code stringlengths 5 1M | repo_name stringlengths 5 109 | path stringlengths 6 208 | language stringclasses 1 value | license stringclasses 15 values | size int64 5 1M |
|---|---|---|---|---|---|
/**
* Copyright 2013 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package rx.lang.scala
import scala.language.postfixOps
import org.junit.Assert._
import org.junit.Test
import org.scalatest.junit.JUnitSuite
class ConstructorTest extends JUnitSuite {
@Test def toObservable() {
val xs = List(1,2,3).toObservable.toBlocking.toList
assertEquals(List(1,2,3), xs)
val ys = Observable.from(List(1,2,3)).toBlocking.toList
assertEquals(List(1,2,3), xs)
val zs = Observable.items(1,2,3).toBlocking.toList
assertEquals(List(1,2,3), xs)
}
}
| mttkay/RxJava | language-adaptors/rxjava-scala/src/test/scala/rx/lang/scala/ConstructorTest.scala | Scala | apache-2.0 | 1,095 |
/*
* Copyright 2013 http4s.org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.http4s
package parser
import org.http4s.headers.`Content-Language`
import org.http4s.syntax.all._
class ContentLanguageSpec extends Http4sSuite with HeaderParserHelper[`Content-Language`] {
val en = `Content-Language`(LanguageTag("en"))
val en_IN = `Content-Language`(LanguageTag("en", "IN"))
val en_IN_en_US = `Content-Language`(LanguageTag("en", "IN"), LanguageTag("en", "US"))
val multi_lang =
`Content-Language`(LanguageTag("en"), LanguageTag("fr"), LanguageTag("da"), LanguageTag("id"))
test("Content-Language should Give correct value") {
assertEquals(en.value, "en")
assertEquals(en_IN.value, "en-IN")
assertEquals(en_IN_en_US.value, "en-IN, en-US")
assertEquals(multi_lang.value, "en, fr, da, id")
}
test("Content-Language should Parse Properly") {
assertEquals(roundTrip(en), en)
assertEquals(roundTrip(en_IN), en_IN)
assertEquals(roundTrip(en_IN_en_US), en_IN_en_US)
assertEquals(roundTrip(multi_lang), multi_lang)
}
}
| http4s/http4s | tests/shared/src/test/scala/org/http4s/parser/ContentLanguageSpec.scala | Scala | apache-2.0 | 1,595 |
package com.lightning.walletapp.ln
import fr.acinq.bitcoin.Crypto._
import com.softwaremill.quicklens._
import com.lightning.walletapp.ln.wire._
import com.lightning.walletapp.ln.Scripts._
import com.lightning.walletapp.ln.LNParams._
import com.lightning.walletapp.ln.ChanErrorCodes._
import com.lightning.walletapp.ln.LNParams.broadcaster._
import com.lightning.walletapp.ln.CommitmentSpec.{HtlcAndFail, HtlcAndFulfill}
import com.lightning.walletapp.ln.crypto.{Generators, ShaChain, ShaHashesWithIndex}
import com.lightning.walletapp.ln.Helpers.Closing.{SuccessAndClaim, TimeoutAndClaim}
import com.lightning.walletapp.ln.wire.LightningMessageCodecs.{LNDirectionalMessage, LNMessageVector, RedeemScriptAndSig}
import fr.acinq.bitcoin.{Satoshi, Transaction}
import org.bitcoinj.core.Batch
import scodec.bits.ByteVector
import fr.acinq.eclair.UInt64
sealed trait Command
case class CMDHostedStateOverride(so: StateOverride) extends Command
case class CMDShutdown(scriptPubKey: Option[ByteVector] = None) extends Command
case class CMDConfirmed(tx: Transaction) extends Command
case class CMDFeerate(satPerKw: Long) extends Command
case class CMDSpent(tx: Transaction) extends Command
case object CMDChainTipKnown extends Command
case object CMDSocketOffline extends Command
case object CMDSocketOnline extends Command
case object CMDProceed extends Command
case class CMDOpenChannel(localParams: LocalParams, tempChanId: ByteVector, initialFeeratePerKw: Long, batch: Batch,
fundingSat: Long, channelFlags: ChannelFlags = ChannelFlags(0), pushMsat: Long = 0L) extends Command
case class CMDFailMalformedHtlc(id: Long, onionHash: ByteVector, code: Int) extends Command
case class CMDFulfillHtlc(add: UpdateAddHtlc, preimage: ByteVector) extends Command
case class CMDFailHtlc(id: Long, reason: ByteVector) extends Command
// CHANNEL DATA
sealed trait ChannelData { val announce: NodeAnnouncement }
sealed trait HasNormalCommits extends ChannelData { val commitments: NormalCommits }
case class InitData(announce: NodeAnnouncement) extends ChannelData
// HOSTED CHANNEL
case class WaitRemoteHostedReply(announce: NodeAnnouncement, refundScriptPubKey: ByteVector, secret: ByteVector) extends ChannelData {
require(Helpers isValidFinalScriptPubkey refundScriptPubKey, "Invalid refundScriptPubKey when opening a hosted channel")
lazy val invokeMsg = InvokeHostedChannel(LNParams.chainHash, refundScriptPubKey, secret)
}
case class WaitRemoteHostedStateUpdate(announce: NodeAnnouncement, hc: HostedCommits) extends ChannelData
// INCOMING CHANNEL
case class WaitFundingCreatedRemote(announce: NodeAnnouncement, localParams: LocalParams, remoteParams: AcceptChannel, open: OpenChannel) extends ChannelData
// OUTGOING CHANNEL
case class WaitAcceptData(announce: NodeAnnouncement, cmd: CMDOpenChannel) extends ChannelData
// Funding tx may arrive locally or from external funder
case class WaitFundingSignedCore(localParams: LocalParams, channelId: ByteVector, channelFlags: Option[ChannelFlags],
remoteParams: AcceptChannel, localSpec: CommitmentSpec, remoteCommit: RemoteCommit) {
def makeCommitments(signedLocalCommitTx: CommitTx) =
NormalCommits(localParams, remoteParams, LocalCommit(index = 0L, localSpec, Nil, signedLocalCommitTx), remoteCommit,
localChanges = Changes(Vector.empty, Vector.empty, Vector.empty), remoteChanges = Changes(Vector.empty, Vector.empty, Vector.empty),
localNextHtlcId = 0L, remoteNextHtlcId = 0L, remoteNextCommitInfo = Right(Tools.randomPrivKey.toPoint), signedLocalCommitTx.input,
ShaHashesWithIndex(Map.empty, None), channelId, updateOpt = None, channelFlags, startedAt = System.currentTimeMillis)
}
case class WaitFundingSignedData(announce: NodeAnnouncement, core: WaitFundingSignedCore,
localCommitTx: CommitTx, fundingTx: Transaction) extends ChannelData
// ALL THE DATA BELOW WILL BE STORED
case class WaitBroadcastRemoteData(announce: NodeAnnouncement, core: WaitFundingSignedCore,
commitments: NormalCommits, their: Option[FundingLocked] = None,
fundingError: Option[String] = None) extends HasNormalCommits {
def isLost: Boolean = fundingError match {
case None => commitments.startedAt < System.currentTimeMillis - 3600 * 24 * 21 * 1000L
case _ => commitments.startedAt < System.currentTimeMillis - 3600 * 24 * 7 * 1000L
}
}
case class WaitFundingDoneData(announce: NodeAnnouncement, our: Option[FundingLocked], their: Option[FundingLocked],
fundingTx: Transaction, commitments: NormalCommits) extends HasNormalCommits
case class NormalData(announce: NodeAnnouncement, commitments: NormalCommits, unknownSpend: Option[Transaction] = None,
localShutdown: Option[Shutdown] = None, remoteShutdown: Option[Shutdown] = None) extends HasNormalCommits
case class ClosingTxProposed(unsignedTx: ClosingTx, localClosingSigned: ClosingSigned)
case class NegotiationsData(announce: NodeAnnouncement, commitments: NormalCommits, localShutdown: Shutdown, remoteShutdown: Shutdown,
localProposals: Seq[ClosingTxProposed], lastSignedTx: Option[ClosingTx] = None) extends HasNormalCommits
case class RefundingData(announce: NodeAnnouncement, remoteLatestPoint: Option[Point], commitments: NormalCommits) extends HasNormalCommits
case class ClosingData(announce: NodeAnnouncement,
commitments: NormalCommits, localProposals: Seq[ClosingTxProposed] = Nil,
mutualClose: Seq[Transaction] = Nil, localCommit: Seq[LocalCommitPublished] = Nil,
remoteCommit: Seq[RemoteCommitPublished] = Nil, nextRemoteCommit: Seq[RemoteCommitPublished] = Nil,
refundRemoteCommit: Seq[RemoteCommitPublished] = Nil, revokedCommit: Seq[RevokedCommitPublished] = Nil,
closedAt: Long = System.currentTimeMillis) extends HasNormalCommits {
lazy val commitTxs = realTier12Closings.map(_.commitTx)
lazy val realTier12Closings = revokedCommit ++ localCommit ++ remoteCommit ++ nextRemoteCommit ++ refundRemoteCommit
def canBeRemoved: Boolean = System.currentTimeMillis > closedAt + 1000L * 3600 * 24 * 28
def tier12States: Seq[PublishStatus] = realTier12Closings.flatMap(_.getState)
def bestClosing: CommitPublished = {
// At least one closing is guaranteed to be present
val mutualWrappers = mutualClose map MutualCommitPublished
(mutualWrappers ++ realTier12Closings).maxBy(_.getDepth)
}
}
sealed trait CommitPublished {
def getState: Seq[PublishStatus] = Nil
def commitTx: Transaction
def getDepth: Int = {
val txDepth \\ isDead = getStatus(commitTx.txid)
if (isDead) -txDepth else txDepth
}
}
case class LocalCommitPublished(claimMainDelayed: Seq[ClaimDelayedOutputTx], claimHtlcSuccess: Seq[SuccessAndClaim],
claimHtlcTimeout: Seq[TimeoutAndClaim], commitTx: Transaction) extends CommitPublished {
override def getState = {
val success = for (tier1 \\ tier2 <- claimHtlcSuccess) yield HideReady(tier1.tx) :: csvShowDelayed(tier1, tier2, commitTx) :: Nil
val timeout = for (t1 \\ t2 <- claimHtlcTimeout) yield HideDelayed(cltv(commitTx, t1.tx), t1.tx) :: csvShowDelayed(t1, t2, commitTx) :: Nil
val main = for (t1 <- claimMainDelayed) yield ShowDelayed(csv(commitTx, t1.tx), t1.tx, commitTx, t1 -- t1, t1.tx.allOutputsAmount) :: Nil
main.flatten ++ success.flatten ++ timeout.flatten
}
}
case class RemoteCommitPublished(claimMain: Seq[ClaimP2WPKHOutputTx], claimHtlcSuccess: Seq[ClaimHtlcSuccessTx],
claimHtlcTimeout: Seq[ClaimHtlcTimeoutTx], commitTx: Transaction) extends CommitPublished {
override def getState = {
val timeout = for (t1 <- claimHtlcTimeout) yield ShowDelayed(cltv(commitTx, t1.tx), t1.tx, commitTx, t1 -- t1, t1.tx.allOutputsAmount)
val success = for (tier1 <- claimHtlcSuccess) yield ShowReady(tier1.tx, tier1 -- tier1, tier1.tx.allOutputsAmount)
val main = for (t1 <- claimMain) yield ShowReady(t1.tx, t1 -- t1, t1.tx.allOutputsAmount)
main ++ success ++ timeout
}
}
case class MutualCommitPublished(commitTx: Transaction) extends CommitPublished
case class RevokedCommitPublished(claimMain: Seq[ClaimP2WPKHOutputTx], claimTheirMainPenalty: Seq[MainPenaltyTx],
htlcPenalty: Seq[HtlcPenaltyTx], commitTx: Transaction) extends CommitPublished {
def spendsFromRevoked(htlcTx: Transaction): Boolean =
htlcTx.txIn.map(_.outPoint.txid).contains(commitTx.txid)
override def getState = {
val main = for (t1 <- claimMain) yield ShowReady(t1.tx, t1 -- t1, t1.tx.allOutputsAmount)
val their = for (t1 <- claimTheirMainPenalty) yield ShowReady(t1.tx, t1 -- t1, t1.tx.allOutputsAmount)
val penalty = for (t1 <- htlcPenalty) yield ShowReady(t1.tx, t1 -- t1, t1.tx.allOutputsAmount)
main ++ their ++ penalty
}
}
case class RevocationInfo(redeemScriptsToSigs: List[RedeemScriptAndSig],
claimMainTxSig: Option[ByteVector], claimPenaltyTxSig: Option[ByteVector], feeRate: Long,
dustLimit: Long, finalScriptPubKey: ByteVector, toSelfDelay: Int, localPubKey: PublicKey,
remoteRevocationPubkey: PublicKey, remoteDelayedPaymentKey: PublicKey) {
lazy val dustLim = Satoshi(dustLimit)
def makeClaimP2WPKHOutput(tx: Transaction) = Scripts.makeClaimP2WPKHOutputTx(tx, localPubKey, finalScriptPubKey, feeRate, dustLim)
def makeHtlcPenalty(finder: PubKeyScriptIndexFinder)(redeemScript: ByteVector) = Scripts.makeHtlcPenaltyTx(finder, redeemScript, finalScriptPubKey, feeRate, dustLim)
def makeMainPenalty(tx: Transaction) = Scripts.makeMainPenaltyTx(tx, remoteRevocationPubkey, finalScriptPubKey, toSelfDelay, remoteDelayedPaymentKey, feeRate, dustLim)
}
// COMMITMENTS
case class Htlc(incoming: Boolean, add: UpdateAddHtlc)
case class CommitmentSpec(feeratePerKw: Long, toLocalMsat: Long, toRemoteMsat: Long,
htlcs: Set[Htlc] = Set.empty, fulfilled: Set[HtlcAndFulfill] = Set.empty,
failed: Set[HtlcAndFail] = Set.empty, malformed: Set[Htlc] = Set.empty) {
lazy val fulfilledIncoming = fulfilled collect { case Htlc(true, add) \\ _ => add }
lazy val fulfilledOutgoing = fulfilled collect { case Htlc(false, add) \\ _ => add }
def directedHtlcsAndSum(incoming: Boolean) = {
val filtered = htlcs.filter(_.incoming == incoming)
filtered -> filtered.toVector.map(_.add.amountMsat).sum
}
}
object CommitmentSpec {
def findHtlcById(cs: CommitmentSpec, id: Long, isIncoming: Boolean): Option[Htlc] =
cs.htlcs.find(htlc => htlc.add.id == id && htlc.incoming == isIncoming)
type HtlcAndFulfill = (Htlc, UpdateFulfillHtlc)
def fulfill(cs: CommitmentSpec, isIncoming: Boolean, m: UpdateFulfillHtlc) = findHtlcById(cs, m.id, isIncoming) match {
case Some(h) if h.incoming => cs.copy(toLocalMsat = cs.toLocalMsat + h.add.amountMsat, fulfilled = cs.fulfilled + Tuple2(h, m), htlcs = cs.htlcs - h)
case Some(h) => cs.copy(toRemoteMsat = cs.toRemoteMsat + h.add.amountMsat, fulfilled = cs.fulfilled + Tuple2(h, m), htlcs = cs.htlcs - h)
case None => cs
}
type HtlcAndFail = (Htlc, UpdateFailHtlc)
def fail(cs: CommitmentSpec, isIncoming: Boolean, m: UpdateFailHtlc) = findHtlcById(cs, m.id, isIncoming) match {
case Some(h) if h.incoming => cs.copy(toRemoteMsat = cs.toRemoteMsat + h.add.amountMsat, failed = cs.failed + Tuple2(h, m), htlcs = cs.htlcs - h)
case Some(h) => cs.copy(toLocalMsat = cs.toLocalMsat + h.add.amountMsat, failed = cs.failed + Tuple2(h, m), htlcs = cs.htlcs - h)
case None => cs
}
def failMalformed(cs: CommitmentSpec, isIncoming: Boolean, m: UpdateFailMalformedHtlc) = findHtlcById(cs, m.id, isIncoming) match {
case Some(h) if h.incoming => cs.copy(toRemoteMsat = cs.toRemoteMsat + h.add.amountMsat, malformed = cs.malformed + h, htlcs = cs.htlcs - h)
case Some(h) => cs.copy(toLocalMsat = cs.toLocalMsat + h.add.amountMsat, malformed = cs.malformed + h, htlcs = cs.htlcs - h)
case None => cs
}
def plusOutgoing(data: UpdateAddHtlc, cs: CommitmentSpec) = cs.copy(htlcs = cs.htlcs + Htlc(incoming = false, add = data), toLocalMsat = cs.toLocalMsat - data.amountMsat)
def plusIncoming(data: UpdateAddHtlc, cs: CommitmentSpec) = cs.copy(htlcs = cs.htlcs + Htlc(incoming = true, add = data), toRemoteMsat = cs.toRemoteMsat - data.amountMsat)
def reduce(cs: CommitmentSpec, local: LNMessageVector, remote: LNMessageVector) = {
val spec1 = cs.copy(fulfilled = Set.empty, failed = Set.empty, malformed = Set.empty)
val spec2 = (spec1 /: local) { case (s, add: UpdateAddHtlc) => plusOutgoing(add, s) case s \\ _ => s }
val spec3 = (spec2 /: remote) { case (s, add: UpdateAddHtlc) => plusIncoming(add, s) case s \\ _ => s }
val spec4 = (spec3 /: local) {
case (s, msg: UpdateFee) => s.copy(feeratePerKw = msg.feeratePerKw)
case (s, msg: UpdateFulfillHtlc) => fulfill(s, isIncoming = true, msg)
case (s, msg: UpdateFailMalformedHtlc) => failMalformed(s, isIncoming = true, msg)
case (s, msg: UpdateFailHtlc) => fail(s, isIncoming = true, msg)
case s \\ _ => s
}
(spec4 /: remote) {
case (s, msg: UpdateFee) => s.copy(feeratePerKw = msg.feeratePerKw)
case (s, msg: UpdateFulfillHtlc) => fulfill(s, isIncoming = false, msg)
case (s, msg: UpdateFailMalformedHtlc) => failMalformed(s, isIncoming = false, msg)
case (s, msg: UpdateFailHtlc) => fail(s, isIncoming = false, msg)
case s \\ _ => s
}
}
}
case class LocalParams(maxHtlcValueInFlightMsat: UInt64, channelReserveSat: Long, toSelfDelay: Int,
maxAcceptedHtlcs: Int, fundingPrivKey: PrivateKey, revocationSecret: Scalar,
paymentKey: Scalar, delayedPaymentKey: Scalar, htlcKey: Scalar,
defaultFinalScriptPubKey: ByteVector, dustLimit: Satoshi,
shaSeed: ByteVector, isFunder: Boolean) {
lazy val delayedPaymentBasepoint = delayedPaymentKey.toPoint
lazy val revocationBasepoint = revocationSecret.toPoint
lazy val paymentBasepoint = paymentKey.toPoint
lazy val htlcBasepoint = htlcKey.toPoint
}
case class WaitingForRevocation(nextRemoteCommit: RemoteCommit, sent: CommitSig, localCommitIndexSnapshot: Long)
case class LocalCommit(index: Long, spec: CommitmentSpec, htlcTxsAndSigs: Seq[HtlcTxAndSigs], commitTx: CommitTx)
case class RemoteCommit(index: Long, spec: CommitmentSpec, txOpt: Option[Transaction], remotePerCommitmentPoint: Point)
case class HtlcTxAndSigs(txinfo: TransactionWithInputInfo, localSig: ByteVector, remoteSig: ByteVector)
case class Changes(proposed: LNMessageVector, signed: LNMessageVector, acked: LNMessageVector)
sealed trait Commitments {
val updateOpt: Option[ChannelUpdate]
val localSpec: CommitmentSpec
val channelId: ByteVector
val startedAt: Long
}
case class ReducedState(spec: CommitmentSpec, canSendMsat: Long, canReceiveMsat: Long, myFeeSat: Long)
case class NormalCommits(localParams: LocalParams, remoteParams: AcceptChannel, localCommit: LocalCommit,
remoteCommit: RemoteCommit, localChanges: Changes, remoteChanges: Changes, localNextHtlcId: Long,
remoteNextHtlcId: Long, remoteNextCommitInfo: Either[WaitingForRevocation, Point], commitInput: InputInfo,
remotePerCommitmentSecrets: ShaHashesWithIndex, channelId: ByteVector, updateOpt: Option[ChannelUpdate],
channelFlags: Option[ChannelFlags], startedAt: Long) extends Commitments { me =>
lazy val reducedRemoteState = {
val reduced = CommitmentSpec.reduce(latestRemoteCommit.spec, remoteChanges.acked, localChanges.proposed)
val commitFeeSat = Scripts.commitTxFee(remoteParams.dustLimitSat, reduced).toLong
val theirFeeSat = if (localParams.isFunder) 0L else commitFeeSat
val myFeeSat = if (localParams.isFunder) commitFeeSat else 0L
val canSendMsat = reduced.toRemoteMsat - (myFeeSat + remoteParams.channelReserveSatoshis) * 1000L
val canReceiveMsat = reduced.toLocalMsat - (theirFeeSat + localParams.channelReserveSat) * 1000L
ReducedState(reduced, canSendMsat, canReceiveMsat, myFeeSat)
}
lazy val localSpec = localCommit.spec
def latestRemoteCommit = remoteNextCommitInfo.left.toOption.map(_.nextRemoteCommit) getOrElse remoteCommit
def localHasUnsignedOutgoing = localChanges.proposed.collectFirst { case msg: UpdateAddHtlc => msg }.isDefined
def remoteHasUnsignedOutgoing = remoteChanges.proposed.collectFirst { case msg: UpdateAddHtlc => msg }.isDefined
def addRemoteProposal(update: LightningMessage) = me.modify(_.remoteChanges.proposed).using(_ :+ update)
def addLocalProposal(update: LightningMessage) = me.modify(_.localChanges.proposed).using(_ :+ update)
def nextDummyReduced = addLocalProposal(Tools.nextDummyHtlc).reducedRemoteState
def getHtlcCrossSigned(incomingRelativeToLocal: Boolean, htlcId: Long) = for {
_ <- CommitmentSpec.findHtlcById(latestRemoteCommit.spec, htlcId, !incomingRelativeToLocal)
htlcOut <- CommitmentSpec.findHtlcById(localSpec, htlcId, incomingRelativeToLocal)
} yield htlcOut.add
def ensureSenderCanAffordChange = {
val reduced = CommitmentSpec.reduce(localSpec, localChanges.acked, remoteChanges.proposed)
val feesSat = if (localParams.isFunder) 0L else Scripts.commitTxFee(localParams.dustLimit, reduced).amount
if (reduced.toRemoteMsat - (feesSat + localParams.channelReserveSat) * 1000L < 0L) throw new LightningException
me -> reduced
}
def sendFee(ratePerKw: Long) = {
if (!localParams.isFunder) throw new LightningException
val updateFeeMessage = UpdateFee(channelId, ratePerKw)
val c1 = addLocalProposal(update = updateFeeMessage)
if (c1.reducedRemoteState.canSendMsat < 0L) None
else Some(c1 -> updateFeeMessage)
}
def receiveFee(fee: UpdateFee) = {
if (localParams.isFunder) throw new LightningException
if (fee.feeratePerKw < minFeeratePerKw) throw new LightningException
val c1 \\ _ = addRemoteProposal(fee).ensureSenderCanAffordChange
c1
}
def sendAdd(rd: RoutingData) = {
// Let's compute the current commitment transaction *as seen by remote peer* with this change taken into account
val add = UpdateAddHtlc(channelId, localNextHtlcId, rd.lastMsat, rd.pr.paymentHash, rd.lastExpiry, rd.onion.packet)
val c1 = addLocalProposal(add).modify(_.localNextHtlcId).using(_ + 1)
// This is their point of view so our outgoing HTLCs are their incoming
val outHtlcs \\ inFlight = c1.reducedRemoteState.spec.directedHtlcsAndSum(incoming = true)
if (c1.reducedRemoteState.canSendMsat < 0L) throw CMDAddImpossible(rd, ERR_LOCAL_AMOUNT_HIGH)
if (rd.firstMsat < remoteParams.htlcMinimumMsat) throw CMDAddImpossible(rd, ERR_REMOTE_AMOUNT_LOW, remoteParams.htlcMinimumMsat)
if (!c1.localParams.isFunder && c1.reducedRemoteState.canReceiveMsat < 0L) throw CMDAddImpossible(rd, ERR_REMOTE_AMOUNT_HIGH)
if (UInt64(inFlight) > remoteParams.maxHtlcValueInFlightMsat) throw CMDAddImpossible(rd, ERR_REMOTE_AMOUNT_HIGH)
if (outHtlcs.size > remoteParams.maxAcceptedHtlcs) throw CMDAddImpossible(rd, ERR_TOO_MANY_HTLC)
c1 -> add
}
def receiveAdd(add: UpdateAddHtlc) = {
// We should both check if WE can accept another HTLC and if PEER can send another HTLC
// let's compute the current commitment *as seen by us* with this payment change taken into account
val c1 \\ reduced = addRemoteProposal(add).modify(_.remoteNextHtlcId).using(_ + 1).ensureSenderCanAffordChange
// This is our point of view because `ensureSenderCanAffordChange` returns reduced local commits
val inHtlcs \\ inFlight = reduced.directedHtlcsAndSum(incoming = true)
if (add.amountMsat < 1L) throw new LightningException
if (add.id != remoteNextHtlcId) throw new LightningException
if (inHtlcs.size > localParams.maxAcceptedHtlcs) throw new LightningException
if (UInt64(inFlight) > localParams.maxHtlcValueInFlightMsat) throw new LightningException
c1
}
def receiveFulfill(fulfill: UpdateFulfillHtlc) =
getHtlcCrossSigned(incomingRelativeToLocal = false, fulfill.id) match {
case Some(add) if fulfill.paymentHash == add.paymentHash => addRemoteProposal(fulfill)
case None => throw new LightningException("Peer has fulfilled a non-cross-signed payment")
}
def sendFulfill(cmd: CMDFulfillHtlc) = {
val fulfill = UpdateFulfillHtlc(channelId, cmd.add.id, cmd.preimage)
val notFound = getHtlcCrossSigned(incomingRelativeToLocal = true, cmd.add.id).isEmpty
if (notFound) throw new LightningException else addLocalProposal(fulfill) -> fulfill
}
def sendFail(cmd: CMDFailHtlc) = {
val fail = UpdateFailHtlc(channelId, cmd.id, cmd.reason)
val notFound = getHtlcCrossSigned(incomingRelativeToLocal = true, cmd.id).isEmpty
if (notFound) throw new LightningException else addLocalProposal(fail) -> fail
}
def sendFailMalformed(cmd: CMDFailMalformedHtlc) = {
val failMalformed = UpdateFailMalformedHtlc(channelId, cmd.id, cmd.onionHash, cmd.code)
val notFound = getHtlcCrossSigned(incomingRelativeToLocal = true, htlcId = cmd.id).isEmpty
if (notFound) throw new LightningException else addLocalProposal(failMalformed) -> failMalformed
}
def receiveFail(fail: UpdateFailHtlc) = {
val notFound = getHtlcCrossSigned(incomingRelativeToLocal = false, fail.id).isEmpty
if (notFound) throw new LightningException else addRemoteProposal(fail)
}
def receiveFailMalformed(fail: UpdateFailMalformedHtlc) = {
val notFound = getHtlcCrossSigned(incomingRelativeToLocal = false, fail.id).isEmpty
if (fail.failureCode.&(FailureMessageCodecs.BADONION) == 0) throw new LightningException
if (notFound) throw new LightningException else addRemoteProposal(fail)
}
def sendCommit(remoteNextPerCommitmentPoint: Point) = {
val htlcKey = Generators.derivePrivKey(localParams.htlcKey, remoteNextPerCommitmentPoint)
val spec = CommitmentSpec.reduce(remoteCommit.spec, remoteChanges.acked, localChanges.proposed)
val (remoteCommitTx, htlcTimeoutTxs, htlcSuccessTxs, _, _) =
Helpers.makeRemoteTxs(remoteCommit.index + 1, localParams, remoteParams,
commitInput, remoteNextPerCommitmentPoint, spec)
// Generate signatures
val sortedHtlcTxs = (htlcTimeoutTxs ++ htlcSuccessTxs).sortBy(_.input.outPoint.index)
val htlcSigs = for (info <- sortedHtlcTxs) yield Scripts.sign(htlcKey)(info)
// Update commitment data
val remoteChanges1 = remoteChanges.copy(acked = Vector.empty, signed = remoteChanges.acked)
val localChanges1 = localChanges.copy(proposed = Vector.empty, signed = localChanges.proposed)
val commitSig = CommitSig(channelId, Scripts.sign(localParams.fundingPrivKey)(remoteCommitTx), htlcSigs.toList)
val remoteCommit1 = RemoteCommit(remoteCommit.index + 1, spec, Some(remoteCommitTx.tx), remoteNextPerCommitmentPoint)
val wait = WaitingForRevocation(nextRemoteCommit = remoteCommit1, commitSig, localCommitIndexSnapshot = localCommit.index)
copy(remoteNextCommitInfo = Left(wait), localChanges = localChanges1, remoteChanges = remoteChanges1) -> commitSig
}
def receiveCommit(commit: CommitSig) = {
val spec = CommitmentSpec.reduce(localSpec, localChanges.acked, remoteChanges.proposed)
val localPerCommitmentSecret = Generators.perCommitSecret(localParams.shaSeed, localCommit.index)
val localPerCommitmentPoint = Generators.perCommitPoint(localParams.shaSeed, localCommit.index + 1)
val localNextPerCommitmentPoint = Generators.perCommitPoint(localParams.shaSeed, localCommit.index + 2)
val remoteHtlcPubkey = Generators.derivePubKey(remoteParams.htlcBasepoint, localPerCommitmentPoint)
val localHtlcKey = Generators.derivePrivKey(localParams.htlcKey, localPerCommitmentPoint)
val (localCommitTx, htlcTimeoutTxs, htlcSuccessTxs) =
Helpers.makeLocalTxs(localCommit.index + 1, localParams,
remoteParams, commitInput, localPerCommitmentPoint, spec)
val sortedHtlcTxs = (htlcTimeoutTxs ++ htlcSuccessTxs).sortBy(_.input.outPoint.index)
val signedLocalCommitTx = Scripts.addSigs(localCommitTx, localParams.fundingPrivKey.publicKey,
remoteParams.fundingPubkey, Scripts.sign(localParams.fundingPrivKey)(localCommitTx), commit.signature)
if (commit.htlcSignatures.size != sortedHtlcTxs.size) throw new LightningException
if (Scripts.checkValid(signedLocalCommitTx).isFailure) throw new LightningException
val htlcSigs = for (info <- sortedHtlcTxs) yield Scripts.sign(localHtlcKey)(info)
val combined = (sortedHtlcTxs, htlcSigs, commit.htlcSignatures).zipped.toList
val htlcTxsAndSigs = combined collect {
case (htlcTx: HtlcTimeoutTx, localSig, remoteSig) =>
val check = Scripts checkValid Scripts.addSigs(htlcTx, localSig, remoteSig)
if (check.isSuccess) HtlcTxAndSigs(htlcTx, localSig, remoteSig)
else throw new LightningException
case (htlcTx: HtlcSuccessTx, localSig, remoteSig) =>
val sigValid = Scripts.checkSig(htlcTx, remoteSig, remoteHtlcPubkey)
if (sigValid) HtlcTxAndSigs(htlcTx, localSig, remoteSig)
else throw new LightningException
}
val localCommit1 = LocalCommit(localCommit.index + 1, spec, htlcTxsAndSigs, signedLocalCommitTx)
val remoteChanges1 = remoteChanges.copy(proposed = Vector.empty, acked = remoteChanges.acked ++ remoteChanges.proposed)
val c1 = copy(localChanges = localChanges.copy(acked = Vector.empty), remoteChanges = remoteChanges1, localCommit = localCommit1)
c1 -> RevokeAndAck(channelId, localPerCommitmentSecret, localNextPerCommitmentPoint)
}
def receiveRevocation(rev: RevokeAndAck) = remoteNextCommitInfo match {
case Left(_) if remoteCommit.remotePerCommitmentPoint != rev.perCommitmentSecret.toPoint =>
throw new LightningException("Peer has supplied a wrong per commitment secret")
case Left(wait) =>
val nextIndex = ShaChain.largestTxIndex - remoteCommit.index
val secrets1 = ShaChain.addHash(remotePerCommitmentSecrets, rev.perCommitmentSecret.toBin.toArray, nextIndex)
val localChanges1 = localChanges.copy(signed = Vector.empty, acked = localChanges.acked ++ localChanges.signed)
val remoteChanges1 = remoteChanges.copy(signed = Vector.empty)
copy(localChanges = localChanges1, remoteChanges = remoteChanges1, remoteCommit = wait.nextRemoteCommit,
remoteNextCommitInfo = Right(rev.nextPerCommitmentPoint), remotePerCommitmentSecrets = secrets1)
case _ =>
// Unexpected revocation
throw new LightningException
}
}
case class HostedCommits(announce: NodeAnnouncement, lastCrossSignedState: LastCrossSignedState, futureUpdates: Vector[LNDirectionalMessage],
localSpec: CommitmentSpec, updateOpt: Option[ChannelUpdate], localError: Option[Error], remoteError: Option[Error],
startedAt: Long = System.currentTimeMillis) extends Commitments with ChannelData { me =>
lazy val Tuple4(nextLocalUpdates, nextRemoteUpdates, nextTotalLocal, nextTotalRemote) =
(Tuple4(Vector.empty[LightningMessage], Vector.empty[LightningMessage], lastCrossSignedState.localUpdates, lastCrossSignedState.remoteUpdates) /: futureUpdates) {
case (localMessages, remoteMessages, totalLocalNumber, totalRemoteNumber) \\ (msg \\ true) => (localMessages :+ msg, remoteMessages, totalLocalNumber + 1, totalRemoteNumber)
case (localMessages, remoteMessages, totalLocalNumber, totalRemoteNumber) \\ (msg \\ false) => (localMessages, remoteMessages :+ msg, totalLocalNumber, totalRemoteNumber + 1)
}
val channelId = announce.hostedChanId
lazy val invokeMsg = InvokeHostedChannel(chainHash, lastCrossSignedState.refundScriptPubKey, ByteVector.empty)
lazy val nextLocalSpec = CommitmentSpec.reduce(localSpec, nextLocalUpdates, nextRemoteUpdates)
lazy val currentAndNextInFlight = localSpec.htlcs ++ nextLocalSpec.htlcs
def nextLocalUnsignedLCSS(blockDay: Long) = {
val incomingHtlcs \\ outgoingHtlcs = nextLocalSpec.htlcs.toList.partition(_.incoming)
LastCrossSignedState(lastCrossSignedState.refundScriptPubKey, lastCrossSignedState.initHostedChannel,
blockDay, nextLocalSpec.toLocalMsat, nextLocalSpec.toRemoteMsat, nextTotalLocal, nextTotalRemote,
incomingHtlcs = incomingHtlcs.map(_.add), outgoingHtlcs = outgoingHtlcs.map(_.add),
localSigOfRemote = ByteVector.empty, remoteSigOfLocal = ByteVector.empty)
}
def findState(remoteLCSS: LastCrossSignedState) = for {
// Find a future state which matches their update numbers
previousIndex <- futureUpdates.indices drop 1
previousHC = me.copy(futureUpdates = futureUpdates take previousIndex)
if previousHC.nextLocalUnsignedLCSS(remoteLCSS.blockDay).isEven(remoteLCSS)
} yield previousHC
def getError: Option[Error] = localError.orElse(remoteError)
def addProposal(update: LNDirectionalMessage) = copy(futureUpdates = futureUpdates :+ update)
def newLocalBalanceMsat(so: StateOverride) = lastCrossSignedState.initHostedChannel.channelCapacityMsat - so.localBalanceMsat
def hostedState = HostedState(channelId, nextLocalUpdates, nextRemoteUpdates, lastCrossSignedState)
def sentPreimages = for {
UpdateFulfillHtlc(_, id, paymentPreimage) <- nextLocalUpdates
htlc <- CommitmentSpec.findHtlcById(localSpec, id, isIncoming = true)
} yield paymentPreimage -> htlc.add.expiry
def sendAdd(rd: RoutingData) = {
// Let's add this change and see if the new state violates any of constraints including those imposed by them on us
val add = UpdateAddHtlc(channelId, nextTotalLocal + 1, rd.lastMsat, rd.pr.paymentHash, rd.lastExpiry, rd.onion.packet)
val commits1 = addProposal(add.local)
val inHtlcs \\ inFlight = commits1.nextLocalSpec.directedHtlcsAndSum(incoming = false)
if (commits1.nextLocalSpec.toLocalMsat < 0L) throw CMDAddImpossible(rd, ERR_REMOTE_AMOUNT_HIGH)
if (rd.firstMsat < lastCrossSignedState.initHostedChannel.htlcMinimumMsat) throw CMDAddImpossible(rd, ERR_REMOTE_AMOUNT_LOW, lastCrossSignedState.initHostedChannel.htlcMinimumMsat)
if (UInt64(inFlight) > lastCrossSignedState.initHostedChannel.maxHtlcValueInFlightMsat) throw CMDAddImpossible(rd, ERR_REMOTE_AMOUNT_HIGH)
if (inHtlcs.size > lastCrossSignedState.initHostedChannel.maxAcceptedHtlcs) throw CMDAddImpossible(rd, ERR_TOO_MANY_HTLC)
commits1 -> add
}
def receiveAdd(add: UpdateAddHtlc) = {
val commits1 = addProposal(add.remote)
if (add.id != nextTotalRemote + 1) throw new LightningException
if (commits1.nextLocalSpec.toRemoteMsat < 0L) throw new LightningException
val inHtlcs \\ inFlight = commits1.nextLocalSpec.directedHtlcsAndSum(incoming = true)
if (inHtlcs.size > lastCrossSignedState.initHostedChannel.maxAcceptedHtlcs) throw new LightningException
if (UInt64(inFlight) > lastCrossSignedState.initHostedChannel.maxHtlcValueInFlightMsat) throw new LightningException
commits1
}
def receiveFulfill(fulfill: UpdateFulfillHtlc) =
CommitmentSpec.findHtlcById(localSpec, fulfill.id, isIncoming = false) match {
case Some(htlc) if fulfill.paymentHash == htlc.add.paymentHash => addProposal(fulfill.remote)
case None => throw new LightningException("Peer has fulfilled a non-existing payment")
}
def receiveFail(fail: UpdateFailHtlc) = {
val notFound = CommitmentSpec.findHtlcById(localSpec, fail.id, isIncoming = false).isEmpty
if (notFound) throw new LightningException else addProposal(fail.remote)
}
def receiveFailMalformed(fail: UpdateFailMalformedHtlc) = {
val notFound = CommitmentSpec.findHtlcById(localSpec, fail.id, isIncoming = false).isEmpty
if (fail.failureCode.&(FailureMessageCodecs.BADONION) == 0) throw new LightningException
if (notFound) throw new LightningException else addProposal(fail.remote)
}
} | btcontract/lnwallet | app/src/main/java/com/lightning/walletapp/ln/ChannelData.scala | Scala | apache-2.0 | 32,125 |
/*
* Copyright 2001-2013 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalatest.wordspec
import org.scalatest._
import SharedHelpers.{EventRecordingReporter, thisLineNumber}
import scala.concurrent.{Promise, ExecutionContext, Future}
import org.scalatest.concurrent.SleepHelper
import org.scalatest.events.{InfoProvided, MarkupProvided}
import org.scalatest.exceptions.{DuplicateTestNameException, NotAllowedException}
import org.scalactic.Prettifier
import scala.util.Success
import org.scalatest
import org.scalatest.wordspec
class FixtureAsyncWordSpecSpec extends scalatest.funspec.AnyFunSpec {
private val prettifier = Prettifier.default
describe("AsyncWordSpec") {
it("can be used for tests that return Future under parallel async test execution") {
class ExampleSpec extends wordspec.FixtureAsyncWordSpec with ParallelTestExecution {
//SCALATESTJS-ONLY implicit override def executionContext = org.scalatest.concurrent.TestExecutionContext.runNow
type FixtureParam = String
def withFixture(test: OneArgAsyncTest): FutureOutcome =
test("testing")
val a = 1
"test 1" in { fixture =>
Future {
assert(a == 1)
}
}
"test 2" in { fixture =>
Future {
assert(a == 2)
}
}
"test 3" in { fixture =>
Future {
pending
}
}
"test 4" in { fixture =>
Future {
cancel()
}
}
"test 5" ignore { fixture =>
Future {
cancel()
}
}
override def newInstance = new ExampleSpec
}
val rep = new EventRecordingReporter
val spec = new ExampleSpec
val status = spec.run(None, Args(reporter = rep))
// SKIP-SCALATESTJS,NATIVE-START
status.waitUntilCompleted()
// SKIP-SCALATESTJS,NATIVE-END
assert(rep.testStartingEventsReceived.length == 4)
assert(rep.testSucceededEventsReceived.length == 1)
assert(rep.testSucceededEventsReceived(0).testName == "test 1")
assert(rep.testFailedEventsReceived.length == 1)
assert(rep.testFailedEventsReceived(0).testName == "test 2")
assert(rep.testPendingEventsReceived.length == 1)
assert(rep.testPendingEventsReceived(0).testName == "test 3")
assert(rep.testCanceledEventsReceived.length == 1)
assert(rep.testCanceledEventsReceived(0).testName == "test 4")
assert(rep.testIgnoredEventsReceived.length == 1)
assert(rep.testIgnoredEventsReceived(0).testName == "test 5")
}
it("can be used for tests that did not return Future under parallel async test execution") {
class ExampleSpec extends wordspec.FixtureAsyncWordSpec with ParallelTestExecution {
//SCALATESTJS-ONLY implicit override def executionContext = org.scalatest.concurrent.TestExecutionContext.runNow
type FixtureParam = String
def withFixture(test: OneArgAsyncTest): FutureOutcome =
test("testing")
val a = 1
"test 1" in { fixture =>
assert(a == 1)
}
"test 2" in { fixture =>
assert(a == 2)
}
"test 3" in { fixture =>
pending
}
"test 4" in { fixture =>
cancel()
}
"test 5" ignore { fixture =>
cancel()
}
override def newInstance = new ExampleSpec
}
val rep = new EventRecordingReporter
val spec = new ExampleSpec
val status = spec.run(None, Args(reporter = rep))
// SKIP-SCALATESTJS,NATIVE-START
status.waitUntilCompleted()
// SKIP-SCALATESTJS,NATIVE-END
assert(rep.testStartingEventsReceived.length == 4)
assert(rep.testSucceededEventsReceived.length == 1)
assert(rep.testSucceededEventsReceived(0).testName == "test 1")
assert(rep.testFailedEventsReceived.length == 1)
assert(rep.testFailedEventsReceived(0).testName == "test 2")
assert(rep.testPendingEventsReceived.length == 1)
assert(rep.testPendingEventsReceived(0).testName == "test 3")
assert(rep.testCanceledEventsReceived.length == 1)
assert(rep.testCanceledEventsReceived(0).testName == "test 4")
assert(rep.testIgnoredEventsReceived.length == 1)
assert(rep.testIgnoredEventsReceived(0).testName == "test 5")
}
it("should run tests that return Future in serial by default") {
@volatile var count = 0
class ExampleSpec extends wordspec.FixtureAsyncWordSpec {
//SCALATESTJS-ONLY implicit override def executionContext = org.scalatest.concurrent.TestExecutionContext.runNow
type FixtureParam = String
def withFixture(test: OneArgAsyncTest): FutureOutcome =
test("testing")
"test 1" in { fixture =>
Future {
SleepHelper.sleep(30)
assert(count == 0)
count = 1
succeed
}
}
"test 2" in { fixture =>
Future {
assert(count == 1)
SleepHelper.sleep(50)
count = 2
succeed
}
}
"test 3" in { fixture =>
Future {
assert(count == 2)
}
}
}
val rep = new EventRecordingReporter
val suite = new ExampleSpec
val status = suite.run(None, Args(reporter = rep))
// SKIP-SCALATESTJS,NATIVE-START
status.waitUntilCompleted()
// SKIP-SCALATESTJS,NATIVE-END
assert(rep.testStartingEventsReceived.length == 3)
assert(rep.testSucceededEventsReceived.length == 3)
}
it("should run tests that does not return Future in serial by default") {
@volatile var count = 0
class ExampleSpec extends wordspec.FixtureAsyncWordSpec {
//SCALATESTJS-ONLY implicit override def executionContext = org.scalatest.concurrent.TestExecutionContext.runNow
type FixtureParam = String
def withFixture(test: OneArgAsyncTest): FutureOutcome =
test("testing")
"test 1" in { fixture =>
SleepHelper.sleep(30)
assert(count == 0)
count = 1
succeed
}
"test 2" in { fixture =>
assert(count == 1)
SleepHelper.sleep(50)
count = 2
succeed
}
"test 3" in { fixture =>
assert(count == 2)
}
}
val rep = new EventRecordingReporter
val suite = new ExampleSpec
val status = suite.run(None, Args(reporter = rep))
// SKIP-SCALATESTJS,NATIVE-START
status.waitUntilCompleted()
// SKIP-SCALATESTJS,NATIVE-END
assert(rep.testStartingEventsReceived.length == 3)
assert(rep.testSucceededEventsReceived.length == 3)
}
// SKIP-SCALATESTJS,NATIVE-START
it("should run tests and its future in same main thread when use SerialExecutionContext") {
var mainThread = Thread.currentThread
var test1Thread: Option[Thread] = None
var test2Thread: Option[Thread] = None
var onCompleteThread: Option[Thread] = None
class ExampleSpec extends wordspec.FixtureAsyncWordSpec {
type FixtureParam = String
def withFixture(test: OneArgAsyncTest): FutureOutcome =
test("testing")
"test 1" in { fixture =>
Future {
test1Thread = Some(Thread.currentThread)
succeed
}
}
"test 2" in { fixture =>
Future {
test2Thread = Some(Thread.currentThread)
succeed
}
}
}
val rep = new EventRecordingReporter
val suite = new ExampleSpec
val status = suite.run(None, Args(reporter = rep))
status.whenCompleted { s =>
onCompleteThread = Some(Thread.currentThread)
}
status.waitUntilCompleted()
assert(test1Thread.isDefined)
assert(test1Thread.get == mainThread)
assert(test2Thread.isDefined)
assert(test2Thread.get == mainThread)
assert(onCompleteThread.isDefined)
assert(onCompleteThread.get == mainThread)
}
it("should run tests and its true async future in the same thread when use SerialExecutionContext") {
var mainThread = Thread.currentThread
@volatile var test1Thread: Option[Thread] = None
@volatile var test2Thread: Option[Thread] = None
var onCompleteThread: Option[Thread] = None
class ExampleSpec extends wordspec.FixtureAsyncWordSpec {
type FixtureParam = String
def withFixture(test: OneArgAsyncTest): FutureOutcome =
test("testing")
"test 1" in { fixture =>
val promise = Promise[Assertion]
val timer = new java.util.Timer
timer.schedule(
new java.util.TimerTask {
def run(): Unit = {
promise.complete(Success(succeed))
}
},
1000
)
promise.future.map { s =>
test1Thread = Some(Thread.currentThread)
s
}
}
"test 2" in { fixture =>
val promise = Promise[Assertion]
val timer = new java.util.Timer
timer.schedule(
new java.util.TimerTask {
def run(): Unit = {
promise.complete(Success(succeed))
}
},
500
)
promise.future.map { s =>
test2Thread = Some(Thread.currentThread)
s
}
}
}
val rep = new EventRecordingReporter
val suite = new ExampleSpec
val status = suite.run(None, Args(reporter = rep))
status.whenCompleted { s =>
onCompleteThread = Some(Thread.currentThread)
}
status.waitUntilCompleted()
assert(test1Thread.isDefined)
assert(test1Thread.get == mainThread)
assert(test2Thread.isDefined)
assert(test2Thread.get == mainThread)
assert(onCompleteThread.isDefined)
assert(onCompleteThread.get == mainThread)
}
it("should not run out of stack space with nested futures when using SerialExecutionContext") {
class ExampleSpec extends wordspec.FixtureAsyncWordSpec {
// Note we get a StackOverflowError with the following execution
// context.
// override implicit def executionContext: ExecutionContext = new ExecutionContext { def execute(runnable: Runnable) = runnable.run; def reportFailure(cause: Throwable) = () }
type FixtureParam = String
def withFixture(test: OneArgAsyncTest): FutureOutcome =
test("testing")
def sum(xs: List[Int]): Future[Int] =
xs match {
case Nil => Future.successful(0)
case x :: xs => Future(x).flatMap(xx => sum(xs).map(xxx => xx + xxx))
}
"test 1" in { fixture =>
val fut: Future[Int] = sum((1 to 50000).toList)
fut.map(total => assert(total == 1250025000))
}
}
val rep = new EventRecordingReporter
val suite = new ExampleSpec
val status = suite.run(None, Args(reporter = rep))
status.waitUntilCompleted()
assert(!rep.testSucceededEventsReceived.isEmpty)
}
// SKIP-SCALATESTJS,NATIVE-END
it("should run tests that returns Future and report their result in serial") {
class ExampleSpec extends wordspec.FixtureAsyncWordSpec {
//SCALATESTJS-ONLY implicit override def executionContext = org.scalatest.concurrent.TestExecutionContext.runNow
type FixtureParam = String
def withFixture(test: OneArgAsyncTest): FutureOutcome =
test("testing")
"test 1" in { fixture =>
Future {
SleepHelper.sleep(60)
succeed
}
}
"test 2" in { fixture =>
Future {
SleepHelper.sleep(30)
succeed
}
}
"test 3" in { fixture =>
Future {
succeed
}
}
}
val rep = new EventRecordingReporter
val suite = new ExampleSpec
val status = suite.run(None, Args(reporter = rep))
// SKIP-SCALATESTJS,NATIVE-START
status.waitUntilCompleted()
// SKIP-SCALATESTJS,NATIVE-END
assert(rep.testStartingEventsReceived.length == 3)
assert(rep.testStartingEventsReceived(0).testName == "test 1")
assert(rep.testStartingEventsReceived(1).testName == "test 2")
assert(rep.testStartingEventsReceived(2).testName == "test 3")
assert(rep.testSucceededEventsReceived.length == 3)
assert(rep.testSucceededEventsReceived(0).testName == "test 1")
assert(rep.testSucceededEventsReceived(1).testName == "test 2")
assert(rep.testSucceededEventsReceived(2).testName == "test 3")
}
it("should run tests that does not return Future and report their result in serial") {
class ExampleSpec extends wordspec.FixtureAsyncWordSpec {
//SCALATESTJS-ONLY implicit override def executionContext = org.scalatest.concurrent.TestExecutionContext.runNow
type FixtureParam = String
def withFixture(test: OneArgAsyncTest): FutureOutcome =
test("testing")
"test 1" in { fixture =>
SleepHelper.sleep(60)
succeed
}
"test 2" in { fixture =>
SleepHelper.sleep(30)
succeed
}
"test 3" in { fixture =>
succeed
}
}
val rep = new EventRecordingReporter
val suite = new ExampleSpec
val status = suite.run(None, Args(reporter = rep))
// SKIP-SCALATESTJS,NATIVE-START
status.waitUntilCompleted()
// SKIP-SCALATESTJS,NATIVE-END
assert(rep.testStartingEventsReceived.length == 3)
assert(rep.testStartingEventsReceived(0).testName == "test 1")
assert(rep.testStartingEventsReceived(1).testName == "test 2")
assert(rep.testStartingEventsReceived(2).testName == "test 3")
assert(rep.testSucceededEventsReceived.length == 3)
assert(rep.testSucceededEventsReceived(0).testName == "test 1")
assert(rep.testSucceededEventsReceived(1).testName == "test 2")
assert(rep.testSucceededEventsReceived(2).testName == "test 3")
}
it("should send an InfoProvided event for an info in main spec body") {
class MySuite extends wordspec.FixtureAsyncWordSpec {
//SCALATESTJS-ONLY implicit override def executionContext = org.scalatest.concurrent.TestExecutionContext.runNow
type FixtureParam = String
def withFixture(test: OneArgAsyncTest): FutureOutcome =
test("testing")
info(
"hi there"
)
}
val suite = new MySuite
val reporter = new EventRecordingReporter
val status = suite.run(None, Args(reporter))
// SKIP-SCALATESTJS,NATIVE-START
status.waitUntilCompleted()
// SKIP-SCALATESTJS,NATIVE-END
val infoList = reporter.infoProvidedEventsReceived
assert(infoList.size == 1)
assert(infoList(0).message == "hi there")
}
it("should send an InfoProvided event for an info in scope body") {
class MySuite extends wordspec.FixtureAsyncWordSpec {
//SCALATESTJS-ONLY implicit override def executionContext = org.scalatest.concurrent.TestExecutionContext.runNow
type FixtureParam = String
def withFixture(test: OneArgAsyncTest): FutureOutcome =
test("testing")
"test feature" should {
info(
"hi there"
)
"test 1" in { fixture => succeed }
}
}
val suite = new MySuite
val reporter = new EventRecordingReporter
val status = suite.run(None, Args(reporter))
// SKIP-SCALATESTJS,NATIVE-START
status.waitUntilCompleted()
// SKIP-SCALATESTJS,NATIVE-END
val infoList = reporter.infoProvidedEventsReceived
assert(infoList.size == 1)
assert(infoList(0).message == "hi there")
}
it("should send an InfoProvided event for an info in test body") {
class MySuite extends wordspec.FixtureAsyncWordSpec {
//SCALATESTJS-ONLY implicit override def executionContext = org.scalatest.concurrent.TestExecutionContext.runNow
type FixtureParam = String
def withFixture(test: OneArgAsyncTest): FutureOutcome =
test("testing")
"test feature" should {
"test 1" in { fixture =>
info("hi there")
succeed
}
}
}
val suite = new MySuite
val reporter = new EventRecordingReporter
val status = suite.run(None, Args(reporter))
// SKIP-SCALATESTJS,NATIVE-START
status.waitUntilCompleted()
// SKIP-SCALATESTJS,NATIVE-END
val infoList = reporter.infoProvidedEventsReceived
assert(infoList.size == 0)
val testSucceededList = reporter.testSucceededEventsReceived
assert(testSucceededList.size == 1)
assert(testSucceededList(0).recordedEvents.size == 1)
val recordedEvent = testSucceededList(0).recordedEvents(0)
assert(recordedEvent.isInstanceOf[InfoProvided])
val infoProvided = recordedEvent.asInstanceOf[InfoProvided]
assert(infoProvided.message == "hi there")
}
it("should send an InfoProvided event for an info in Future returned by test body") {
class MySuite extends wordspec.FixtureAsyncWordSpec {
//SCALATESTJS-ONLY implicit override def executionContext = org.scalatest.concurrent.TestExecutionContext.runNow
type FixtureParam = String
def withFixture(test: OneArgAsyncTest): FutureOutcome =
test("testing")
"test feature" should {
"test 1" in { fixture =>
Future {
info("hi there")
succeed
}
}
}
}
val suite = new MySuite
val reporter = new EventRecordingReporter
val status = suite.run(None, Args(reporter))
// SKIP-SCALATESTJS,NATIVE-START
status.waitUntilCompleted()
// SKIP-SCALATESTJS,NATIVE-END
val infoList = reporter.infoProvidedEventsReceived
assert(infoList.size == 0)
val testSucceededList = reporter.testSucceededEventsReceived
assert(testSucceededList.size == 1)
assert(testSucceededList(0).recordedEvents.size == 1)
val recordedEvent = testSucceededList(0).recordedEvents(0)
assert(recordedEvent.isInstanceOf[InfoProvided])
val infoProvided = recordedEvent.asInstanceOf[InfoProvided]
assert(infoProvided.message == "hi there")
}
it("should send a NoteProvided event for a note in main spec body") {
class MySuite extends wordspec.FixtureAsyncWordSpec {
//SCALATESTJS-ONLY implicit override def executionContext = org.scalatest.concurrent.TestExecutionContext.runNow
type FixtureParam = String
def withFixture(test: OneArgAsyncTest): FutureOutcome =
test("testing")
note(
"hi there"
)
}
val suite = new MySuite
val reporter = new EventRecordingReporter
val status = suite.run(None, Args(reporter))
// SKIP-SCALATESTJS,NATIVE-START
status.waitUntilCompleted()
// SKIP-SCALATESTJS,NATIVE-END
val noteList = reporter.noteProvidedEventsReceived
assert(noteList.size == 1)
assert(noteList(0).message == "hi there")
}
it("should send a NoteProvided event for a note in scope body") {
class MySuite extends wordspec.FixtureAsyncWordSpec {
//SCALATESTJS-ONLY implicit override def executionContext = org.scalatest.concurrent.TestExecutionContext.runNow
type FixtureParam = String
def withFixture(test: OneArgAsyncTest): FutureOutcome =
test("testing")
"test feature" should {
note(
"hi there"
)
"test 1" in { fixture => succeed }
}
}
val suite = new MySuite
val reporter = new EventRecordingReporter
val status = suite.run(None, Args(reporter))
// SKIP-SCALATESTJS,NATIVE-START
status.waitUntilCompleted()
// SKIP-SCALATESTJS,NATIVE-END
val noteList = reporter.noteProvidedEventsReceived
assert(noteList.size == 1)
assert(noteList(0).message == "hi there")
}
it("should send a NoteProvided event for a note in test body") {
class MySuite extends wordspec.FixtureAsyncWordSpec {
//SCALATESTJS-ONLY implicit override def executionContext = org.scalatest.concurrent.TestExecutionContext.runNow
type FixtureParam = String
def withFixture(test: OneArgAsyncTest): FutureOutcome =
test("testing")
"test feature" should {
"test 1" in { fixture =>
note("hi there")
succeed
}
}
}
val suite = new MySuite
val reporter = new EventRecordingReporter
val status = suite.run(None, Args(reporter))
// SKIP-SCALATESTJS,NATIVE-START
status.waitUntilCompleted()
// SKIP-SCALATESTJS,NATIVE-END
val noteList = reporter.noteProvidedEventsReceived
assert(noteList.size == 1)
assert(noteList(0).message == "hi there")
}
it("should send a NoteProvided event for a note in Future returned by test body") {
class MySuite extends wordspec.FixtureAsyncWordSpec {
//SCALATESTJS-ONLY implicit override def executionContext = org.scalatest.concurrent.TestExecutionContext.runNow
type FixtureParam = String
def withFixture(test: OneArgAsyncTest): FutureOutcome =
test("testing")
"test feature" should {
"test 1" in { fixture =>
Future {
note("hi there")
succeed
}
}
}
}
val suite = new MySuite
val reporter = new EventRecordingReporter
val status = suite.run(None, Args(reporter))
// SKIP-SCALATESTJS,NATIVE-START
status.waitUntilCompleted()
// SKIP-SCALATESTJS,NATIVE-END
val noteList = reporter.noteProvidedEventsReceived
assert(noteList.size == 1)
assert(noteList(0).message == "hi there")
}
it("should send an AlertProvided event for an alert in main spec body") {
class MySuite extends wordspec.FixtureAsyncWordSpec {
//SCALATESTJS-ONLY implicit override def executionContext = org.scalatest.concurrent.TestExecutionContext.runNow
type FixtureParam = String
def withFixture(test: OneArgAsyncTest): FutureOutcome =
test("testing")
alert(
"hi there"
)
}
val suite = new MySuite
val reporter = new EventRecordingReporter
val status = suite.run(None, Args(reporter))
// SKIP-SCALATESTJS,NATIVE-START
status.waitUntilCompleted()
// SKIP-SCALATESTJS,NATIVE-END
val alertList = reporter.alertProvidedEventsReceived
assert(alertList.size == 1)
assert(alertList(0).message == "hi there")
}
it("should send an AlertProvided event for an alert in scope body") {
class MySuite extends wordspec.FixtureAsyncWordSpec {
//SCALATESTJS-ONLY implicit override def executionContext = org.scalatest.concurrent.TestExecutionContext.runNow
type FixtureParam = String
def withFixture(test: OneArgAsyncTest): FutureOutcome =
test("testing")
"test feature" should {
alert(
"hi there"
)
"test 1" in { fixture => succeed }
}
}
val suite = new MySuite
val reporter = new EventRecordingReporter
val status = suite.run(None, Args(reporter))
// SKIP-SCALATESTJS,NATIVE-START
status.waitUntilCompleted()
// SKIP-SCALATESTJS,NATIVE-END
val alertList = reporter.alertProvidedEventsReceived
assert(alertList.size == 1)
assert(alertList(0).message == "hi there")
}
it("should send an AlertProvided event for an alert in test body") {
class MySuite extends wordspec.FixtureAsyncWordSpec {
//SCALATESTJS-ONLY implicit override def executionContext = org.scalatest.concurrent.TestExecutionContext.runNow
type FixtureParam = String
def withFixture(test: OneArgAsyncTest): FutureOutcome =
test("testing")
"test feature" should {
"test 1" in { fixture =>
alert("hi there")
succeed
}
}
}
val suite = new MySuite
val reporter = new EventRecordingReporter
val status = suite.run(None, Args(reporter))
// SKIP-SCALATESTJS,NATIVE-START
status.waitUntilCompleted()
// SKIP-SCALATESTJS,NATIVE-END
val alertList = reporter.alertProvidedEventsReceived
assert(alertList.size == 1)
assert(alertList(0).message == "hi there")
}
it("should send an AlertProvided event for an alert in Future returned by test body") {
class MySuite extends wordspec.FixtureAsyncWordSpec {
//SCALATESTJS-ONLY implicit override def executionContext = org.scalatest.concurrent.TestExecutionContext.runNow
type FixtureParam = String
def withFixture(test: OneArgAsyncTest): FutureOutcome =
test("testing")
"test feature" should {
"test 1" in { fixture =>
Future {
alert("hi there")
succeed
}
}
}
}
val suite = new MySuite
val reporter = new EventRecordingReporter
val status = suite.run(None, Args(reporter))
// SKIP-SCALATESTJS,NATIVE-START
status.waitUntilCompleted()
// SKIP-SCALATESTJS,NATIVE-END
val alertList = reporter.alertProvidedEventsReceived
assert(alertList.size == 1)
assert(alertList(0).message == "hi there")
}
it("should send a MarkupProvided event for a markup in main spec body") {
class MySuite extends wordspec.FixtureAsyncWordSpec {
//SCALATESTJS-ONLY implicit override def executionContext = org.scalatest.concurrent.TestExecutionContext.runNow
type FixtureParam = String
def withFixture(test: OneArgAsyncTest): FutureOutcome =
test("testing")
markup(
"hi there"
)
}
val suite = new MySuite
val reporter = new EventRecordingReporter
val status = suite.run(None, Args(reporter))
// SKIP-SCALATESTJS,NATIVE-START
status.waitUntilCompleted()
// SKIP-SCALATESTJS,NATIVE-END
val markupList = reporter.markupProvidedEventsReceived
assert(markupList.size == 1)
assert(markupList(0).text == "hi there")
}
it("should send a MarkupProvided event for a markup in scope body") {
class MySuite extends wordspec.FixtureAsyncWordSpec {
//SCALATESTJS-ONLY implicit override def executionContext = org.scalatest.concurrent.TestExecutionContext.runNow
type FixtureParam = String
def withFixture(test: OneArgAsyncTest): FutureOutcome =
test("testing")
"test feature" should {
markup(
"hi there"
)
"test 1" in { fixture => succeed }
}
}
val suite = new MySuite
val reporter = new EventRecordingReporter
val status = suite.run(None, Args(reporter))
// SKIP-SCALATESTJS,NATIVE-START
status.waitUntilCompleted()
// SKIP-SCALATESTJS,NATIVE-END
val markupList = reporter.markupProvidedEventsReceived
assert(markupList.size == 1)
assert(markupList(0).text == "hi there")
}
it("should send a MarkupProvided event for a markup in test body") {
class MySuite extends wordspec.FixtureAsyncWordSpec {
//SCALATESTJS-ONLY implicit override def executionContext = org.scalatest.concurrent.TestExecutionContext.runNow
type FixtureParam = String
def withFixture(test: OneArgAsyncTest): FutureOutcome =
test("testing")
"test feature" should {
"test 1" in { fixture =>
markup("hi there")
succeed
}
}
}
val suite = new MySuite
val reporter = new EventRecordingReporter
val status = suite.run(None, Args(reporter))
// SKIP-SCALATESTJS,NATIVE-START
status.waitUntilCompleted()
// SKIP-SCALATESTJS,NATIVE-END
val markupList = reporter.markupProvidedEventsReceived
assert(markupList.size == 0)
val testSucceededList = reporter.testSucceededEventsReceived
assert(testSucceededList.size == 1)
assert(testSucceededList(0).recordedEvents.size == 1)
val recordedEvent = testSucceededList(0).recordedEvents(0)
assert(recordedEvent.isInstanceOf[MarkupProvided])
val markupProvided = recordedEvent.asInstanceOf[MarkupProvided]
assert(markupProvided.text == "hi there")
}
it("should send a MarkupProvided event for a markup in Future returned by test body") {
class MySuite extends wordspec.FixtureAsyncWordSpec {
//SCALATESTJS-ONLY implicit override def executionContext = org.scalatest.concurrent.TestExecutionContext.runNow
type FixtureParam = String
def withFixture(test: OneArgAsyncTest): FutureOutcome =
test("testing")
"test feature" should {
"test 1" in { fixture =>
Future {
markup("hi there")
succeed
}
}
}
}
val suite = new MySuite
val reporter = new EventRecordingReporter
val status = suite.run(None, Args(reporter))
// SKIP-SCALATESTJS,NATIVE-START
status.waitUntilCompleted()
// SKIP-SCALATESTJS,NATIVE-END
val markupList = reporter.markupProvidedEventsReceived
assert(markupList.size == 0)
val testSucceededList = reporter.testSucceededEventsReceived
assert(testSucceededList.size == 1)
assert(testSucceededList(0).recordedEvents.size == 1)
val recordedEvent = testSucceededList(0).recordedEvents(0)
assert(recordedEvent.isInstanceOf[MarkupProvided])
val markupProvided = recordedEvent.asInstanceOf[MarkupProvided]
assert(markupProvided.text == "hi there")
}
it("should throw NotAllowedException wrapping a DuplicateTestNameException when duplicate test name is detected inside when") {
class TestSpec extends wordspec.FixtureAsyncWordSpec {
type FixtureParam = String
def withFixture(test: OneArgAsyncTest): FutureOutcome = { test("hi") }
"a feature" when {
"test 1" in { fixture => succeed }
"test 1" in { fixture => succeed }
}
}
val e = intercept[NotAllowedException] {
new TestSpec
}
assert("FixtureAsyncWordSpecSpec.scala" == e.failedCodeFileName.get)
assert(e.failedCodeLineNumber.get == thisLineNumber - 7)
assert(e.cause.isDefined)
val causeThrowable = e.cause.get
assert(e.message == Some(FailureMessages.exceptionWasThrownInWhenClause(prettifier, UnquotedString(causeThrowable.getClass.getName), "a feature", FailureMessages.duplicateTestName(prettifier, UnquotedString("a feature when test 1")))))
assert(causeThrowable.isInstanceOf[DuplicateTestNameException])
val cause = causeThrowable.asInstanceOf[DuplicateTestNameException]
assert(cause.getMessage == FailureMessages.duplicateTestName(prettifier, UnquotedString("a feature when test 1")))
}
it("should throw NotAllowedException wrapping a DuplicateTestNameException when duplicate test name is detected inside shorthand when") {
class TestSpec extends wordspec.FixtureAsyncWordSpec {
type FixtureParam = String
def withFixture(test: OneArgAsyncTest): FutureOutcome = { test("hi") }
"a feature" when {
//DOTTY-ONLY ()
}
it when {
"test 1" in { fixture => succeed }
"test 1" in { fixture => succeed }
}
}
val e = intercept[NotAllowedException] {
new TestSpec
}
assert("FixtureAsyncWordSpecSpec.scala" == e.failedCodeFileName.get)
assert(e.failedCodeLineNumber.get == thisLineNumber - 7)
assert(e.cause.isDefined)
val causeThrowable = e.cause.get
assert(e.message == Some(FailureMessages.exceptionWasThrownInWhenClause(prettifier, UnquotedString(causeThrowable.getClass.getName), "a feature", FailureMessages.duplicateTestName(prettifier, UnquotedString("a feature when test 1")))))
assert(causeThrowable.isInstanceOf[DuplicateTestNameException])
val cause = causeThrowable.asInstanceOf[DuplicateTestNameException]
assert(cause.getMessage == FailureMessages.duplicateTestName(prettifier, UnquotedString("a feature when test 1")))
}
it("should throw NotAllowedException wrapping a DuplicateTestNameException when duplicate test name is detected inside should") {
class TestSpec extends wordspec.FixtureAsyncWordSpec {
type FixtureParam = String
def withFixture(test: OneArgAsyncTest): FutureOutcome = { test("hi") }
"a feature" should {
"test 1" in { fixture => succeed }
"test 1" in { fixture => succeed }
}
}
val e = intercept[NotAllowedException] {
new TestSpec
}
assert("FixtureAsyncWordSpecSpec.scala" == e.failedCodeFileName.get)
assert(e.failedCodeLineNumber.get == thisLineNumber - 7)
assert(e.cause.isDefined)
val causeThrowable = e.cause.get
assert(e.message == Some(FailureMessages.exceptionWasThrownInShouldClause(prettifier, UnquotedString(causeThrowable.getClass.getName), "a feature", FailureMessages.duplicateTestName(prettifier, UnquotedString("a feature should test 1")))))
assert(causeThrowable.isInstanceOf[DuplicateTestNameException])
val cause = causeThrowable.asInstanceOf[DuplicateTestNameException]
assert(cause.getMessage == FailureMessages.duplicateTestName(prettifier, UnquotedString("a feature should test 1")))
}
it("should throw NotAllowedException wrapping a DuplicateTestNameException when duplicate test name is detected inside shorthand should") {
class TestSpec extends wordspec.FixtureAsyncWordSpec {
type FixtureParam = String
def withFixture(test: OneArgAsyncTest): FutureOutcome = { test("hi") }
"a feature" should {
//DOTTY-ONLY ()
}
it should {
"test 1" in { fixture => succeed }
"test 1" in { fixture => succeed }
}
}
val e = intercept[NotAllowedException] {
new TestSpec
}
assert("FixtureAsyncWordSpecSpec.scala" == e.failedCodeFileName.get)
assert(e.failedCodeLineNumber.get == thisLineNumber - 7)
assert(e.cause.isDefined)
val causeThrowable = e.cause.get
assert(e.message == Some(FailureMessages.exceptionWasThrownInShouldClause(prettifier, UnquotedString(causeThrowable.getClass.getName), "a feature", FailureMessages.duplicateTestName(prettifier, UnquotedString("a feature should test 1")))))
assert(causeThrowable.isInstanceOf[DuplicateTestNameException])
val cause = causeThrowable.asInstanceOf[DuplicateTestNameException]
assert(cause.getMessage == FailureMessages.duplicateTestName(prettifier, UnquotedString("a feature should test 1")))
}
it("should throw NotAllowedException wrapping a DuplicateTestNameException when duplicate test name is detected inside must") {
class TestSpec extends wordspec.FixtureAsyncWordSpec {
type FixtureParam = String
def withFixture(test: OneArgAsyncTest): FutureOutcome = { test("hi") }
"a feature" must {
"test 1" in { fixture => succeed }
"test 1" in { fixture => succeed }
}
}
val e = intercept[NotAllowedException] {
new TestSpec
}
assert("FixtureAsyncWordSpecSpec.scala" == e.failedCodeFileName.get)
assert(e.failedCodeLineNumber.get == thisLineNumber - 7)
assert(e.cause.isDefined)
val causeThrowable = e.cause.get
assert(e.message == Some(FailureMessages.exceptionWasThrownInMustClause(prettifier, UnquotedString(causeThrowable.getClass.getName), "a feature", FailureMessages.duplicateTestName(prettifier, UnquotedString("a feature must test 1")))))
assert(causeThrowable.isInstanceOf[DuplicateTestNameException])
val cause = causeThrowable.asInstanceOf[DuplicateTestNameException]
assert(cause.getMessage == FailureMessages.duplicateTestName(prettifier, UnquotedString("a feature must test 1")))
}
it("should throw NotAllowedException wrapping a DuplicateTestNameException when duplicate test name is detected inside shorthand must") {
class TestSpec extends wordspec.FixtureAsyncWordSpec {
type FixtureParam = String
def withFixture(test: OneArgAsyncTest): FutureOutcome = { test("hi") }
"a feature" must {
//DOTTY-ONLY ()
}
it must {
"test 1" in { fixture => succeed }
"test 1" in { fixture => succeed }
}
}
val e = intercept[NotAllowedException] {
new TestSpec
}
assert("FixtureAsyncWordSpecSpec.scala" == e.failedCodeFileName.get)
assert(e.failedCodeLineNumber.get == thisLineNumber - 7)
assert(e.cause.isDefined)
val causeThrowable = e.cause.get
assert(e.message == Some(FailureMessages.exceptionWasThrownInMustClause(prettifier, UnquotedString(causeThrowable.getClass.getName), "a feature", FailureMessages.duplicateTestName(prettifier, UnquotedString("a feature must test 1")))))
assert(causeThrowable.isInstanceOf[DuplicateTestNameException])
val cause = causeThrowable.asInstanceOf[DuplicateTestNameException]
assert(cause.getMessage == FailureMessages.duplicateTestName(prettifier, UnquotedString("a feature must test 1")))
}
it("should throw NotAllowedException wrapping a DuplicateTestNameException when duplicate test name is detected inside that") {
class TestSpec extends wordspec.FixtureAsyncWordSpec {
type FixtureParam = String
def withFixture(test: OneArgAsyncTest): FutureOutcome = { test("hi") }
"a feature" that {
"test 1" in { fixture => succeed }
"test 1" in { fixture => succeed }
}
}
val e = intercept[NotAllowedException] {
new TestSpec
}
assert("FixtureAsyncWordSpecSpec.scala" == e.failedCodeFileName.get)
assert(e.failedCodeLineNumber.get == thisLineNumber - 7)
assert(e.cause.isDefined)
val causeThrowable = e.cause.get
assert(e.message == Some(FailureMessages.exceptionWasThrownInThatClause(prettifier, UnquotedString(causeThrowable.getClass.getName), "a feature that", FailureMessages.duplicateTestName(prettifier, UnquotedString("a feature that test 1")))))
assert(causeThrowable.isInstanceOf[DuplicateTestNameException])
val cause = causeThrowable.asInstanceOf[DuplicateTestNameException]
assert(cause.getMessage == FailureMessages.duplicateTestName(prettifier, UnquotedString("a feature that test 1")))
}
it("should throw NotAllowedException wrapping a DuplicateTestNameException when duplicate test name is detected inside which") {
class TestSpec extends wordspec.FixtureAsyncWordSpec {
type FixtureParam = String
def withFixture(test: OneArgAsyncTest): FutureOutcome = { test("hi") }
"a feature" which {
"test 1" in { fixture => succeed }
"test 1" in { fixture => succeed }
}
}
val e = intercept[NotAllowedException] {
new TestSpec
}
assert("FixtureAsyncWordSpecSpec.scala" == e.failedCodeFileName.get)
assert(e.failedCodeLineNumber.get == thisLineNumber - 7)
assert(e.cause.isDefined)
val causeThrowable = e.cause.get
assert(e.message == Some(FailureMessages.exceptionWasThrownInWhichClause(prettifier, UnquotedString(causeThrowable.getClass.getName), "a feature which", FailureMessages.duplicateTestName(prettifier, UnquotedString("a feature which test 1")))))
assert(causeThrowable.isInstanceOf[DuplicateTestNameException])
val cause = causeThrowable.asInstanceOf[DuplicateTestNameException]
assert(cause.getMessage == FailureMessages.duplicateTestName(prettifier, UnquotedString("a feature which test 1")))
}
it("should throw NotAllowedException wrapping a DuplicateTestNameException when duplicate test name is detected inside can") {
class TestSpec extends wordspec.FixtureAsyncWordSpec {
type FixtureParam = String
def withFixture(test: OneArgAsyncTest): FutureOutcome = { test("hi") }
"a feature" can {
"test 1" in { fixture => succeed }
"test 1" in { fixture => succeed }
}
}
val e = intercept[NotAllowedException] {
new TestSpec
}
assert("FixtureAsyncWordSpecSpec.scala" == e.failedCodeFileName.get)
assert(e.failedCodeLineNumber.get == thisLineNumber - 7)
assert(e.cause.isDefined)
val causeThrowable = e.cause.get
assert(e.message == Some(FailureMessages.exceptionWasThrownInCanClause(prettifier, UnquotedString(causeThrowable.getClass.getName), "a feature", FailureMessages.duplicateTestName(prettifier, UnquotedString("a feature can test 1")))))
assert(causeThrowable.isInstanceOf[DuplicateTestNameException])
val cause = causeThrowable.asInstanceOf[DuplicateTestNameException]
assert(cause.getMessage == FailureMessages.duplicateTestName(prettifier, UnquotedString("a feature can test 1")))
}
it("should throw NotAllowedException wrapping a DuplicateTestNameException when duplicate test name is detected inside shorthand can") {
class TestSpec extends wordspec.FixtureAsyncWordSpec {
type FixtureParam = String
def withFixture(test: OneArgAsyncTest): FutureOutcome = { test("hi") }
"a feature" can {
//DOTTY-ONLY ()
}
it can {
"test 1" in { fixture => succeed }
"test 1" in { fixture => succeed }
}
}
val e = intercept[NotAllowedException] {
new TestSpec
}
assert("FixtureAsyncWordSpecSpec.scala" == e.failedCodeFileName.get)
assert(e.failedCodeLineNumber.get == thisLineNumber - 7)
assert(e.cause.isDefined)
val causeThrowable = e.cause.get
assert(e.message == Some(FailureMessages.exceptionWasThrownInCanClause(prettifier, UnquotedString(causeThrowable.getClass.getName), "a feature", FailureMessages.duplicateTestName(prettifier, UnquotedString("a feature can test 1")))))
assert(causeThrowable.isInstanceOf[DuplicateTestNameException])
val cause = causeThrowable.asInstanceOf[DuplicateTestNameException]
assert(cause.getMessage == FailureMessages.duplicateTestName(prettifier, UnquotedString("a feature can test 1")))
}
it("should allow other execution context to be used") {
class TestSpec extends wordspec.FixtureAsyncWordSpec {
// SKIP-SCALATESTJS,NATIVE-START
override implicit val executionContext = scala.concurrent.ExecutionContext.Implicits.global
// SKIP-SCALATESTJS,NATIVE-END
// SCALATESTJS-ONLY override implicit val executionContext = scala.scalajs.concurrent.JSExecutionContext.runNow
type FixtureParam = String
def withFixture(test: OneArgAsyncTest): FutureOutcome = { test("hi") }
val a = 1
"feature 1" should {
"test A" in { fixture =>
Future { assert(a == 1) }
}
}
"feature 2" should {
"test B" in { fixture =>
Future { assert(a == 1) }
}
}
"feature 3" should {
"test C" in { fixture =>
Future { assert(a == 1) }
}
}
}
val suite = new TestSpec
val reporter = new EventRecordingReporter
val status = suite.run(None, Args(reporter))
// SKIP-SCALATESTJS,NATIVE-START
status.waitUntilCompleted()
// SKIP-SCALATESTJS,NATIVE-END
assert(reporter.scopeOpenedEventsReceived.length == 3)
assert(reporter.scopeClosedEventsReceived.length == 3)
assert(reporter.testStartingEventsReceived.length == 3)
assert(reporter.testSucceededEventsReceived.length == 3)
}
}
}
| scalatest/scalatest | jvm/wordspec-test/src/test/scala/org/scalatest/wordspec/FixtureAsyncWordSpecSpec.scala | Scala | apache-2.0 | 44,992 |
/*
* Copyright 2015 – 2016 Martin Seeler
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package rx.oanda.instruments
import io.circe.Decoder
import io.circe.generic.semiauto._
case class Instrument(
instrument: String,
displayName: String,
pip: Double,
precision: Double,
maxTradeUnits: Int,
maxTrailingStop: Double,
minTrailingStop: Double,
marginRate: Double,
halted: Boolean
)
object Instrument {
implicit val decodeInstrument: Decoder[Instrument] =
deriveDecoder
implicit val decodeInstruments =
Decoder.instance(_.get[Vector[Instrument]]("instruments"))
}
| MartinSeeler/rx-oanda | src/main/scala/rx/oanda/instruments/Instrument.scala | Scala | apache-2.0 | 1,116 |
package blended.streams.dispatcher.internal.builder
import akka.NotUsed
import akka.actor.ActorSystem
import akka.stream.scaladsl.{Flow, GraphDSL, RunnableGraph, Source}
import akka.stream.{ActorMaterializer, Graph, Materializer, SinkShape}
import blended.jms.bridge.BridgeProviderConfig
import blended.jms.utils.{JmsDestination, JmsQueue}
import blended.streams.dispatcher.internal.builder.DispatcherOutbound.DispatcherTarget
import blended.streams.jms.JmsFlowSupport
import blended.streams.message.{FlowEnvelope, FlowMessage}
import blended.streams.processor.Collector
import blended.streams.worklist.WorklistState.WorklistState
import blended.streams.worklist.{WorklistEvent, WorklistState}
import org.scalatest.Matchers
import scala.concurrent.ExecutionContext
class DispatcherOutboundSpec extends DispatcherSpecSupport
with Matchers {
override def loggerName: String = classOf[DispatcherOutboundSpec].getName()
private def runnableOutbound(
ctxt : DispatcherExecContext,
testMsg : FlowEnvelope,
send : Flow[FlowEnvelope, FlowEnvelope, NotUsed]
) : (Collector[WorklistEvent], Collector[FlowEnvelope], RunnableGraph[NotUsed]) = {
implicit val system : ActorSystem = ctxt.system
val outColl = Collector[WorklistEvent]("out")(_ => {})
val errColl = Collector[FlowEnvelope]("err")(_.acknowledge())
val source = Source.single[FlowEnvelope](testMsg)
val sinkGraph : Graph[SinkShape[FlowEnvelope], NotUsed] = {
GraphDSL.create() { implicit b =>
import GraphDSL.Implicits._
val outStep = b.add(DispatcherBuilder(ctxt.idSvc, ctxt.cfg, send)(ctxt.bs).outbound())
val out = b.add(outColl.sink)
val err = b.add(errColl.sink)
outStep.out0 ~> out
outStep.out1 ~> err
SinkShape(outStep.in)
}
}
(outColl, errColl, source.to(sinkGraph))
}
def testOutbound(expectedState: WorklistState, send: Flow[FlowEnvelope, FlowEnvelope, NotUsed]) : Unit = {
withDispatcherConfig { ctxt =>
implicit val system : ActorSystem = ctxt.system
implicit val eCtxt : ExecutionContext = system.dispatcher
implicit val materializer : Materializer = ActorMaterializer()
val envelope = FlowEnvelope().withHeader(ctxt.bs.headerConfig.headerBranch, "outbound").get
val (outColl, errColl, out) = runnableOutbound(ctxt, envelope, send)
try {
out.run()
val result = for {
err <- errColl.result
evt <- outColl.result
} yield (err, evt)
result.map { case (error, events) =>
error should be (empty)
events should have size 1
val event = events.head
event.worklist.items should have size 1
event.worklist.id should be (envelope.id)
event.state should be (expectedState)
}
} finally {
system.stop(outColl.actor)
system.stop(errColl.actor)
}
}
}
"The outbound flow of the dispatcher should" - {
"produce a worklist completed event for successfull completions of the outbound flow" in {
val good = Flow.fromFunction[FlowEnvelope, FlowEnvelope]{ env => env}
testOutbound(WorklistState.Completed, good)
}
"produce a worklist failed event after unsuccessfull completions of the outbound flow" in {
val bad = Flow.fromFunction[FlowEnvelope, FlowEnvelope]{ env => env.withException(new Exception("Boom !")) }
testOutbound(WorklistState.Failed, bad)
}
}
"The outbound routing decider should" - {
val provider = BridgeProviderConfig(
vendor = "sonic75",
provider = "central",
internal = false,
inbound = JmsDestination.create("in").get,
outbound = JmsDestination.create("out").get,
errors = JmsDestination.create("error").get,
transactions = JmsDestination.create("trans").get,
cbes = JmsDestination.create("cbes").get
)
val prefix : DispatcherExecContext => String = ctxt => ctxt.bs.headerConfig.prefix
val srcVendorHeader : DispatcherExecContext => String = ctxt => JmsFlowSupport.srcVendorHeader(prefix(ctxt))
val srcProviderHeader : DispatcherExecContext => String = ctxt => JmsFlowSupport.srcProviderHeader(prefix(ctxt))
val replyToHeader : DispatcherExecContext => String = ctxt => JmsFlowSupport.replyToHeader(prefix(ctxt))
val srcDestHeader : DispatcherExecContext => String = ctxt => JmsFlowSupport.srcDestHeader(prefix(ctxt))
"resolve a replyTo destination if no outbound destination is set in resource type router" in {
withDispatcherConfig { ctxt =>
val env : FlowEnvelope = FlowEnvelope(
FlowMessage(FlowMessage.noProps)
)
.withHeader(srcVendorHeader(ctxt), "activemq").get
.withHeader(srcProviderHeader(ctxt), "activemq").get
.withHeader(replyToHeader(ctxt), "response").get
.withHeader(srcDestHeader(ctxt), JmsDestination.create("Dummy").get.asString).get
.withContextObject(ctxt.bs.bridgeProviderKey, provider)
// This will trigger the replyto routing
.withContextObject(ctxt.bs.bridgeDestinationKey, None)
val routing : DispatcherTarget = DispatcherOutbound.outboundRouting(
dispatcherCfg = ctxt.cfg,
idSvc = ctxt.idSvc,
bs = ctxt.bs
)(env).get
routing should be (DispatcherTarget("activemq", "activemq", JmsDestination.create("response").get))
}
}
"resolve a replyTo destination if the outbound destination is set to 'replyTo' in the config" in {
withDispatcherConfig { ctxt =>
val env : FlowEnvelope = FlowEnvelope(
FlowMessage(FlowMessage.noProps)
)
.withHeader(srcVendorHeader(ctxt), "activemq").get
.withHeader(srcProviderHeader(ctxt), "activemq").get
.withHeader(replyToHeader(ctxt), "response").get
.withHeader(srcDestHeader(ctxt), JmsDestination.create("Dummy").get.asString).get
.withContextObject(ctxt.bs.bridgeProviderKey, provider)
// This will trigger the replyto routing
.withContextObject(ctxt.bs.bridgeDestinationKey, Some(JmsFlowSupport.replyToQueueName))
val routing : DispatcherTarget = DispatcherOutbound.outboundRouting(
dispatcherCfg = ctxt.cfg,
idSvc = ctxt.idSvc,
bs = ctxt.bs
)(env).get
routing should be (DispatcherTarget("activemq", "activemq", JmsDestination.create("response").get))
}
}
"resolve to the configured target destination" in {
withDispatcherConfig { ctxt =>
val env : FlowEnvelope = FlowEnvelope(
FlowMessage(FlowMessage.noProps)
)
.withHeader(srcVendorHeader(ctxt), "activemq").get
.withHeader(srcProviderHeader(ctxt), "activemq").get
.withHeader(replyToHeader(ctxt), "response").get
.withHeader(srcDestHeader(ctxt), JmsDestination.create("Dummy").get.asString).get
.withContextObject(ctxt.bs.bridgeProviderKey, provider)
.withContextObject(ctxt.bs.bridgeDestinationKey, Some("centralDest"))
val routing : DispatcherTarget = DispatcherOutbound.outboundRouting(
dispatcherCfg = ctxt.cfg,
idSvc = ctxt.idSvc,
bs = ctxt.bs
)(env).get
routing should be (DispatcherTarget(provider.vendor, provider.provider, JmsDestination.create("centralDest").get))
}
}
}
}
| lefou/blended | blended.streams.dispatcher/src/test/scala/blended/streams/dispatcher/internal/builder/DispatcherOutboundSpec.scala | Scala | apache-2.0 | 7,436 |
package greetings {
object hello {
def speak = println("Hello!")
}
}
| grzegorzbalcerek/scala-book-examples | examples/PackageNameConflict1.scala | Scala | mit | 77 |
import sbt._
/**
* @param packageName Used as the prefix for: (1) handout name, (2) the Scala package, (3) source folder.
* @param key Per assignment key specified by coursera.
* @param partId Identifies the part of the assignment. (We always have one-part assignments.)
* @param maxScore Maximum score that can be given for the assignment. Must match the value in the WebAPI.
* @param styleScoreRatio Defines the portion of the grade that is assigned to style.
* @param dependencies Library dependencies specific to this module.
* @param styleSheet Path to the scalastyle configuration for this assignment.
* @param options Options passed to the java process or coursera infrastructure. Following values are
* supported:
*
* NAME DEFAULT DESCRIPTION
* Xms 10m -Xms for jvm
* Xmx 256m -Xmx for jvm, should less than `grader-memory`
* individualTimeout 240 time out of one test case
* totalTimeout 850 total time out, should less than `grader-timeout`
* grader-cpu 1 number of cpu for coursera infrastructure
* grader-memory 1024 memory for coursera infrastructure
* grader-timeout 1200 grading timeout for coursera infrastructure
*/
case class Assignment(packageName: String,
key: String,
itemId: String,
partId: String,
maxScore: Double,
styleScoreRatio: Double = 0.0d,
styleSheet: String = "",
dependencies: Seq[ModuleID] = Seq(),
options: Map[String, String] = Map()) {
assert(!(styleScoreRatio == 0.0d ^ styleSheet == ""), "Style sheet and style ratio should be defined in pair.")
}
trait CommonBuild extends Build {
val course = SettingKey[String]("course")
val assignment = SettingKey[String]("assignment")
val assignmentsMap = SettingKey[Map[String, Assignment]]("assignmentsMap")
val courseId = SettingKey[String]("courseId")
val commonSourcePackages = SettingKey[Seq[String]]("commonSourcePackages")
lazy val scalaTestDependency = "org.scalatest" %% "scalatest" % "2.2.4"
}
| alvsanand/scala-spark-big-data | wikipedia/project/CommonBuild.scala | Scala | apache-2.0 | 2,734 |
/*
* Copyright 2001-2012 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalautils
class DefaultEquality[A] extends Equality[A] {
def areEqual(a: A, b: Any): Boolean = {
a match {
case arr: Array[_] =>
b match {
case brr: Array[_] => arr.deep == brr.deep
case _ => arr.deep == b
}
case _ => {
b match {
case brr: Array[_] => a == brr.deep
case _ => a == b
}
}
}
}
}
| hubertp/scalatest | src/main/scala/org/scalautils/DefaultEquality.scala | Scala | apache-2.0 | 1,011 |
/*
* Copyright (c) 2015.
* Created by MrTJP.
* All rights reserved.
*/
package mrtjp.core.fx
import codechicken.lib.vec.Vector3
import mrtjp.core.fx.particles.CoreParticle
import net.minecraft.util.math.BlockPos
trait TPositionedParticle extends CoreParticle
{
//Implement manually because x, y, z are protected now
def x:Double
def y:Double
def z:Double
def x_=(x:Double){setPosition(x, y, z)}
def y_=(y:Double){setPosition(x, y, z)}
def z_=(z:Double){setPosition(x, y, z)}
def px:Double
def py:Double
def pz:Double
def px_=(x:Double)
def py_=(y:Double)
def pz_=(z:Double)
def dx = x-px
def dy = y-py
def dz = z-pz
def position = new Vector3(x, y, z)
def prevPosition = new Vector3(px, py, pz)
def setPos(pos:Vector3)
{
setPosition(pos.x, pos.y, pos.z)
}
def setPrevPos(pos:Vector3)
{
px = pos.x
py = pos.y
pz = pos.z
}
def blockPosition = new BlockPos(math.floor(x).toInt, math.floor(y).toInt, math.floor(z).toInt)
abstract override def tick()
{
super.tick()
px = x
py = y
pz = z
}
}
class PositionChangeToAction extends ParticleAction
{
var target = Vector3.ZERO
var duration = 0.0
override def canOperate(p:CoreParticle) = p.isInstanceOf[TPositionedParticle]
override def operate(p:CoreParticle, time:Double)
{
val pp = p.asInstanceOf[TPositionedParticle]
val pos = pp.position
if (time < duration)
{
val dpos = target.copy.subtract(pos)
val speed = dpos.copy.multiply(1/(duration-time)).multiply(deltaTime(time))
pp.setPos(pos.add(speed))
}
else isFinished = true
}
override def compile(p:CoreParticle){}
override def copy = ParticleAction.moveTo(target.x, target.y, target.z, duration)
}
class PositionChangeForAction extends ParticleAction
{
var delta = Vector3.ZERO
var duration = 0.0
override def canOperate(p:CoreParticle) = p.isInstanceOf[TPositionedParticle]
override def operate(p:CoreParticle, time:Double)
{
val pp = p.asInstanceOf[TPositionedParticle]
if (time < duration) pp.setPos(pp.position.add(delta.copy.multiply(deltaTime(time))))
else isFinished = true
}
override def compile(p:CoreParticle){}
override def copy = ParticleAction.moveFor(delta.x, delta.y, delta.z, duration)
}
| MrTJP/MrTJPCore | src/main/scala/mrtjp/core/fx/PositionChangeAction.scala | Scala | lgpl-3.0 | 2,474 |
// Databricks notebook source
// MAGIC %md
// MAGIC ScaDaMaLe Course [site](https://lamastex.github.io/scalable-data-science/sds/3/x/) and [book](https://lamastex.github.io/ScaDaMaLe/index.html)
// COMMAND ----------
// MAGIC %md # Data Processing
// COMMAND ----------
// MAGIC %md ### Load datasets
// COMMAND ----------
// Paths to datasets of different regions.
val paths: List[String] = List("dbfs:/FileStore/shared_uploads/hugower@kth.se/sequences_oceania.fasta",
"dbfs:/FileStore/shared_uploads/hugower@kth.se/sequences_northamerica.fasta",
"dbfs:/FileStore/shared_uploads/hugower@kth.se/sequences_southamerica.fasta",
"dbfs:/FileStore/shared_uploads/hugower@kth.se/sequences_europe.fasta",
"dbfs:/FileStore/shared_uploads/hugower@kth.se/sequences_africa.fasta",
"dbfs:/FileStore/shared_uploads/hugower@kth.se/sequences_asia.fasta")
// COMMAND ----------
import scala.util.matching.Regex
// regex pattern to take region name, label, from complete path name (Must be changed accordingly if path follows a different structure)
val pattern: Regex = "/[a-zA-Z]+_([a-zA-Z]+)\\\\.".r
def read_datasets(paths:List[String]): List[RDD[(String,String)]] = {
if (paths.size < 1) { // return an empty RDD
return List.fill(0) (sc.emptyRDD)
}
else {
pattern.findFirstMatchIn(paths.head) match { // extract the label based on the pattern defined above
case Some(x) => {
val label:String = x.group(1) // create the label based on the path name
return (sc.textFile(paths.head).filter(_ != "").map(_.trim()).map(s => (s,label)))::read_datasets(paths.tail) // read the file in path and attach the data with its label to RDD list
}
case None => throw new RuntimeException("no label found")
}
}
}
// COMMAND ----------
// read data and set the delimiter as ">" which seperates each sample in fasta format
sc.hadoopConfiguration.set("textinputformat.record.delimiter",">")
val datasets = read_datasets(paths)
// COMMAND ----------
datasets.length
// COMMAND ----------
datasets(0).take(1)
// COMMAND ----------
// combine the datasets into one and cache for optimization
val data = datasets.reduce( (a,b) => a++b).cache()
// COMMAND ----------
data.take(1)
// COMMAND ----------
// get the headers for each sample (the first line of each sample is a header)
val headers = data.map( {case (genome,label) => (genome.split("\\n").head.split('|'),label)})
headers.count
// COMMAND ----------
headers.take(5)
// COMMAND ----------
// remove the headers and only get genome sequences of samples.
val samples = data.map( {case (genome,label) => (genome.split("\\n").tail.mkString(""), label)}).cache()
samples.count
// COMMAND ----------
// get the genome lengths per sample (this is just to check if there are extreme cases so we would remove those)
val genome_length_per_s = samples.map({case (genome,label) => genome.length()})
// COMMAND ----------
// check the statistics if there is any significant variation
genome_length_per_s.stats
// COMMAND ----------
// MAGIC %md #### Extract (overlapping or nonoverlapping) 3-mers
// COMMAND ----------
// A tail recursive overlapping subsequence function
// ex1: input: ("abcd", 2, true) -> output: "ab bc cd":
// ex2: input: ("abcd", 2, false) -> output: "ab cd"
def subsequence_str( sequence:String, k:Int, overlapping:Boolean ): String = {
def helper(seq:String, acc:String): String = {
if (seq.length < k ) {
return acc
}
else {
val sub = seq.substring(0,k)
if(overlapping) helper(seq.tail, acc + sub + " ")
else helper(seq.substring(k), acc + sub + " ")
}
}
return helper(sequence, "")
}
// COMMAND ----------
// Extract the subsequences, kmers, for each sample
val k_mers = samples.map( {case (genome,label) => (subsequence_str(genome, 3, false),label)} ).cache()
// COMMAND ----------
k_mers.take(1)
// COMMAND ----------
// index kmers
val kmers_df = k_mers.zipWithIndex.map({case ((a,b),c) => (a,b,c)}).toDF("genome", "label", "id").cache()
// COMMAND ----------
kmers_df.take(1)
// COMMAND ----------
// MAGIC %md ### Split dataset as train and test
// COMMAND ----------
// split train and test data
val split = kmers_df.randomSplit(Array(0.7, 0.3), seed=42)
// COMMAND ----------
val train = split(0).cache()
train.take(1)
// COMMAND ----------
train.count
// COMMAND ----------
val test = split(1).cache()
test.take(1)
// COMMAND ----------
test.count
// COMMAND ----------
// MAGIC %md ### Save the results
// COMMAND ----------
// save the results for the next notebook
dbutils.fs.rm("/FileStore/shared_uploads/caylak@kth.se/data_test_nonoverlapping", recurse=true) // remove existing folder
test.write.parquet("dbfs:/FileStore/shared_uploads/caylak@kth.se/data_test_nonoverlapping")
dbutils.fs.rm("/FileStore/shared_uploads/caylak@kth.se/data_train_nonoverlapping", recurse=true) // remove existing folder
train.write.parquet("dbfs:/FileStore/shared_uploads/caylak@kth.se/data_train_nonoverlapping") | lamastex/scalable-data-science | dbcArchives/2021/000_0-sds-3-x-projects/student-project-09_group-TopicModeling/02_Data_Processing.scala | Scala | unlicense | 5,158 |
package org.jetbrains.plugins.scala
package lang
package psi
package impl
package statements
package params
import com.intellij.lang.ASTNode
import com.intellij.psi.PsiElement
import org.jetbrains.plugins.scala.lang.lexer.ScalaTokenTypes
import org.jetbrains.plugins.scala.lang.parser.ScalaElementTypes
import org.jetbrains.plugins.scala.lang.psi.api.base.ScPrimaryConstructor
import org.jetbrains.plugins.scala.lang.psi.api.expr.ScFunctionExpr
import org.jetbrains.plugins.scala.lang.psi.api.statements.ScFunction
import org.jetbrains.plugins.scala.lang.psi.api.statements.params._
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.typedef.ScClass
import org.jetbrains.plugins.scala.lang.psi.impl.ScalaPsiElementFactory._
import org.jetbrains.plugins.scala.lang.psi.stubs.ScParamClauseStub
import org.jetbrains.plugins.scala.macroAnnotations.{Cached, CachedInsidePsiElement, ModCount}
/**
* @author Alexander Podkhalyuzin
* Date: 22.02.2008
*/
class ScParameterClauseImpl private(stub: ScParamClauseStub, node: ASTNode)
extends ScalaStubBasedElementImpl(stub, ScalaElementTypes.PARAM_CLAUSE, node) with ScParameterClause {
def this(node: ASTNode) = this(null, node)
def this(stub: ScParamClauseStub) = this(stub, null)
override def toString: String = "ParametersClause"
@Cached(ModCount.anyScalaPsiModificationCount, this)
def parameters: Seq[ScParameter] = {
getStubOrPsiChildren[ScParameter](TokenSets.PARAMETERS, JavaArrayFactoryUtil.ScParameterFactory).toSeq
}
@CachedInsidePsiElement(this, ModCount.getBlockModificationCount)
override def effectiveParameters: Seq[ScParameter] = {
if (!isImplicit) return parameters
//getParent is sufficient (not getContext), for synthetic clause, getParent will return other PSI,
//which is ok, it will not add anything more
getParent match {
case clauses: ScParameters =>
val element =
clauses.getParent match {
case f: ScFunction => f
case p: ScPrimaryConstructor =>
p.containingClass match {
case c: ScClass => c
case _ => return parameters
}
case _ => return parameters
}
val syntheticClause = ScalaPsiUtil.syntheticParamClause(element, clauses, element.isInstanceOf[ScClass], hasImplicit = false)
syntheticClause match {
case Some(sClause) =>
val synthParameters = sClause.parameters
synthParameters.foreach(_.setContext(this, null))
synthParameters ++ parameters
case _ => parameters
}
case _ => parameters
}
}
@Cached(ModCount.anyScalaPsiModificationCount, this)
def isImplicit: Boolean = byStubOrPsi(_.isImplicit)(findChildByType(ScalaTokenTypes.kIMPLICIT) != null)
def addParameter(param: ScParameter): ScParameterClause = {
val params = parameters
val vararg =
if (params.isEmpty) false
else params.last.isRepeatedParameter
val rParen = if (vararg) params.last.getNode else getLastChild.getNode
val node = getNode
def comma = createComma.getNode
def space = createNewLineNode(" ")
if (params.nonEmpty && !vararg) {
node.addChild(comma, rParen)
node.addChild(space, rParen)
}
node.addChild(param.getNode, rParen)
if (vararg) {
node.addChild(comma, rParen)
node.addChild(space, rParen)
}
this
}
override def owner: PsiElement = {
ScalaPsiUtil.getContextOfType(this, true, classOf[ScFunctionExpr], classOf[ScFunction], classOf[ScPrimaryConstructor])
}
}
| loskutov/intellij-scala | src/org/jetbrains/plugins/scala/lang/psi/impl/statements/params/ScParameterClauseImpl.scala | Scala | apache-2.0 | 3,599 |
/**
* BasePlate API
* An API for BasePlate to connect with Loanapp.
*
* OpenAPI spec version: 1.0.0
* Contact: apiteam@swagger.io
*
* NOTE: This class is auto generated by the swagger code generator program.
* https://github.com/swagger-api/swagger-codegen.git
* Do not edit the class manually.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import com.wordnik.client.api._
import akka.actor.ActorSystem
import io.swagger.app.{ResourcesApp, SwaggerApp}
import javax.servlet.ServletContext
import org.scalatra.LifeCycle
class ScalatraBootstrap extends LifeCycle {
implicit val swagger = new SwaggerApp
override def init(context: ServletContext) {
implicit val system = ActorSystem("appActorSystem")
try {
context mount (new ApiApi, "/Api/*")
context mount (new ResourcesApp, "/api-docs/*")
} catch {
case e: Throwable => e.printStackTrace()
}
}
} | garywong89/PetStoreAPI | scalatra/src/main/scala/ScalatraBootstrap.scala | Scala | apache-2.0 | 1,415 |
package slick.jdbc
import org.slf4j.LoggerFactory
import scala.concurrent.{ExecutionContext, Future}
import scala.reflect.ClassTag
import scala.util.{Failure, Success}
import java.sql.DatabaseMetaData
import slick.SlickException
import slick.ast.ColumnOption
import slick.dbio._
import slick.jdbc.meta._
import slick.{model => m}
import slick.relational.RelationalProfile
import slick.sql.SqlProfile
import slick.util.Logging
/** Build a Slick model from introspecting the JDBC metadata.
*
* In most cases you are better off transforming the generated model instead of overriding functionality here. It is
* only useful if you need easy access to the JDBC metadata in order to influence how the model is generated. A good
* use case would be interpreting column types or default values that Slick doesn't understand out of the box. If you
* just want to remove or hard code some default values, transform the resulting model instead.
*
* The tight coupling can easily lead to source code incompatibilities in future versions. Avoid hooking in here if you
* don't have to.
*
* @param ignoreInvalidDefaults see JdbcModelBuilder#ColumnBuilder#default
*/
class JdbcModelBuilder(mTables: Seq[MTable], ignoreInvalidDefaults: Boolean)(implicit ec: ExecutionContext) extends Logging {
////////////////////////////////////////////////////////////////////// Actions for reading the required JDBC metadata
/** Read the column metadata for a table in ordinal position order */
def readColumns(t: MTable): DBIO[Vector[MColumn]] = t.getColumns.map(_.sortBy(_.ordinalPosition))
/** Read the primary key metadata for a table in key sequence order */
def readPrimaryKeys(t: MTable): DBIO[Vector[MPrimaryKey]] = t.getPrimaryKeys.map(_.sortBy(_.keySeq))
/** Read the foreign key metadata for a table grouped by name and in key sequence order */
def readForeignKeys(t: MTable): DBIO[Seq[Seq[MForeignKey]]] = t.getImportedKeys.map(
// remove foreign keys pointing to tables which were not included
_.filter(fk => tableNamersByQName.isDefinedAt(fk.pkTable))
.groupBy(fk => (fk.pkTable,fk.fkName,fk.pkName,fk.fkTable))
.toSeq
.sortBy{case (key,_) => (key._1.name,key._2,key._3,key._4.name)}
.map(_._2.sortBy(_.keySeq)) // respect order
)
/** Read the index metadata grouped by name and in ordinal position order */
def readIndices(t: MTable): DBIO[Seq[Seq[MIndexInfo]]] = t.getIndexInfo().asTry.map {
case Success(iis) =>
iis.groupBy(_.indexName).toSeq.sortBy(_._1).map(_._2.sortBy(_.ordinalPosition)) // respect order
case Failure(e: java.sql.SQLException) => // TODO: this needs a test!
logger.debug(s"Skipping indices of table ${t.name.name} due to exception during getIndexInfo: "+e.getMessage.trim)
Seq()
case Failure(e) => throw e
}
///////////////////////////////////////////////////////////////////////////////////////////// Builder factory methods
def createTableNamer(meta: MTable): TableNamer = new TableNamer(meta)
/** Column model builder factory. Override for customization.
* @group Basic customization overrides */
def createColumnBuilder(tableBuilder: TableBuilder, meta: MColumn): ColumnBuilder = new ColumnBuilder(tableBuilder, meta)
def createPrimaryKeyBuilder(tableBuilder: TableBuilder, meta: Seq[MPrimaryKey]): PrimaryKeyBuilder = new PrimaryKeyBuilder(tableBuilder, meta)
def createForeignKeyBuilder(tableBuilder: TableBuilder, meta: Seq[MForeignKey]): ForeignKeyBuilder = new ForeignKeyBuilder(tableBuilder, meta)
def createIndexBuilder(tableBuilder: TableBuilder, meta: Seq[MIndexInfo]): IndexBuilder = new IndexBuilder(tableBuilder, meta)
//////////////////////////////////////////////////////////////////////////////////////////////////////// Main builder
lazy val tableNamers: Seq[TableNamer] = mTables.map(createTableNamer)
lazy val tableNamersByQName: Map[MQName, TableNamer] = mTables.map(m => m.name).zip(tableNamers).toMap
/** Table model builder factory. Override for customization.
* @group Basic customization overrides */
def createTableBuilder(namer: TableNamer): DBIO[TableBuilder] = for {
cs <- readColumns(namer.meta)
pks <- readPrimaryKeys(namer.meta)
fks <- readForeignKeys(namer.meta)
idxs <- readIndices(namer.meta)
} yield new TableBuilder(namer.meta, namer, cs, pks, fks, idxs)
/** Creates a Slick data model from jdbc meta data. Foreign keys pointing out of the given tables
* are not included. */
def buildModel: DBIO[m.Model] = for {
ts <- DBIO.sequence(tableNamers.map(createTableBuilder))
tablesByQName = ts.map(t => t.meta.name -> t).toMap
builders = createBuilders(tablesByQName)
} yield m.Model(ts.sortBy(_.meta.name.name).map(_.buildModel(builders)))
def createBuilders(tablesByQName: Map[MQName, TableBuilder]) = new Builders(tablesByQName)
class Builders(val tablesByQName: Map[MQName, TableBuilder])
/** Converts from java.sql.Types w/ type name to the corresponding Java class name (with fully qualified path). */
def jdbcTypeToScala(jdbcType: Int, typeName: String = ""): ClassTag[_] = {
import java.sql.Types._
import scala.reflect.classTag
// see TABLE B-1 of JSR-000221 JBDCTM API Specification 4.1 Maintenance Release
// Mapping to corresponding Scala types where applicable
jdbcType match {
case CHAR | VARCHAR | LONGVARCHAR | NCHAR | NVARCHAR | LONGNVARCHAR => classTag[String]
case NUMERIC | DECIMAL => classTag[BigDecimal]
case BIT | BOOLEAN => classTag[Boolean]
case TINYINT => classTag[Byte]
case SMALLINT => classTag[Short]
case INTEGER => classTag[Int]
case BIGINT => classTag[Long]
case REAL => classTag[Float]
case FLOAT | DOUBLE => classTag[Double]
case BINARY | VARBINARY | LONGVARBINARY | BLOB => classTag[java.sql.Blob]
case DATE => classTag[java.sql.Date]
case TIME => classTag[java.sql.Time]
case TIMESTAMP => classTag[java.sql.Timestamp]
case CLOB => classTag[java.sql.Clob]
// case ARRAY => classTag[java.sql.Array]
// case STRUCT => classTag[java.sql.Struct]
// case REF => classTag[java.sql.Ref]
// case DATALINK => classTag[java.net.URL]
// case ROWID => classTag[java.sql.RowId]
// case NCLOB => classTag[java.sql.NClob]
// case SQLXML => classTag[java.sql.SQLXML]
case NULL => classTag[Null]
case DISTINCT => logger.warn(s"Found jdbc type DISTINCT. Assuming Blob. This may be wrong. You can override ModelBuilder#Table#Column#tpe to fix this."); classTag[java.sql.Blob] // FIXME
case t => logger.warn(s"Found unknown jdbc type $t. Assuming String. This may be wrong. You can override ModelBuilder#Table#Column#tpe to fix this."); classTag[String] // FIXME
}
}
///////////////////////////////////////////////////////////////////////////////////////////// Builder implementations
class TableNamer(val meta: MTable) {
/** Table name */
def name: String = meta.name.name
/** Optional table schema
* @group Basic customization overrides */
def schema: Option[String] = meta.name.schema
/** Optional table catalog
* @group Basic customization overrides */
def catalog = meta.name.catalog
/** Fully qualified table name */
final lazy val qualifiedName = m.QualifiedName(name,schema,catalog)
}
/** Table model builder
* @group Basic customization overrides */
class TableBuilder(val meta: MTable,
val namer: TableNamer,
val mColumns: Seq[MColumn],
val mPrimaryKeys: Seq[MPrimaryKey],
val mForeignKeys: Seq[Seq[MForeignKey]],
val mIndices: Seq[Seq[MIndexInfo]]) { table =>
// models
def buildModel(builders: Builders) = m.Table(namer.qualifiedName, columns, primaryKey, buildForeignKeys(builders), indices)
/** Column models in ordinal position order */
final lazy val columns: Seq[m.Column] = mColumns.map(c => createColumnBuilder(this, c).model)
/** Column models by name */
final lazy val columnsByName: Map[String,m.Column] = columns.map(c => c.name -> c).toMap
/** Primary key models in key sequence order */
final lazy val primaryKey: Option[m.PrimaryKey] = createPrimaryKeyBuilder(this, mPrimaryKeys).model
/** Foreign key models by key sequence order */
final def buildForeignKeys(builders: Builders) =
mForeignKeys.map(mf => createForeignKeyBuilder(this, mf).buildModel(builders)).flatten
/** Index models by ordinal position order */
final lazy val indices: Seq[m.Index] = mIndices.map(mi => createIndexBuilder(this, mi).model).flatten
}
/** Column model builder.
* @group Basic customization overrides */
class ColumnBuilder(tableBuilder: TableBuilder, meta: MColumn) {
/** Regex matcher to extract string out ouf surrounding '' */
final val StringPattern = """^'(.*)'$""".r
/** Scala type this column is mapped to */
def tpe = jdbcTypeToScala(meta.sqlType).toString match {
case "java.lang.String" => if(meta.size == Some(1)) "Char" else "String"
case t => t
}
def name = meta.name
/** Indicates whether this is a nullable column */
def nullable = meta.nullable.getOrElse(true)
/** Indicates whether this is an auto increment column */
def autoInc: Boolean = meta.isAutoInc.getOrElse(false)
/** Indicates whether a ColumnOption Primary key should be put into the model.
* Only valid for single column primary keys. */
def createPrimaryKeyColumnOption: Boolean =
tableBuilder.mPrimaryKeys.size == 1 && tableBuilder.mPrimaryKeys.head.column == meta.name
/** A (potentially non-portable) database column type for string types, this should not
* include a length ascription for other types it should */
def dbType: Option[String] = Some(meta.typeName)
/** Column length of string types */
def length: Option[Int] = if(tpe == "String") meta.size else None // Only valid for strings!
/** Indicates wether this should be a varchar in case of a string column.
* Currently defaults to true. Should be based on the value of dbType in the future. */
def varying: Boolean =
Seq(java.sql.Types.NVARCHAR, java.sql.Types.VARCHAR, java.sql.Types.LONGVARCHAR, java.sql.Types.LONGNVARCHAR) contains meta.sqlType
def rawDefault = meta.columnDef
/** The default value for the column. The outer option is used to indicate if a default value is given. The inner
* Option is used to allow giving None for a nullable column. This method must not return Some(None) for a
* non-nullable column.
*
* Default values for autoInc column are automatically ignored (as if returning None).
*
* If `ignoreInvalidDefaults = true`, Slick catches scala.MatchError and java.lang.NumberFormatException thrown by
* this method, logs the message and treats it as no default value for convenience. */
def default: Option[Option[Any]] = rawDefault.map { v =>
if(v == "NULL") None else {
// NOTE: When extending this list, please also extend the code generator accordingly
Some((v,tpe) match {
case (v,"Byte") => v.toByte
case (v,"Short") => v.toShort
case (v,"Int") => v.toInt
case (v,"Long") => v.toLong
case (v,"Double") => v.toDouble
case (v,"Float") => v.toFloat
case (v,"Char") =>
v.length match {
case 1 => v(0)
case 3 => v(1) // quoted character
}
case (v,"String") if meta.typeName == "CHAR" => v.head // FIXME: check length
case (v,"scala.math.BigDecimal") => v // FIXME: probably we shouldn't use a string here
case (StringPattern(str),"String") => str
case ("TRUE","Boolean") => true
case ("FALSE","Boolean") => false
})
}
}
private def formatDefault(v:Any) =
s" default value $v for column ${tableBuilder.namer.qualifiedName.asString}.$name of type $tpe, meta data: "+meta.toString
/** The default value for the column as a ColumnOption Default or None if no default. The value wrapped by
* ColumnOption Default needs to be an Option in case of a nullable column but can't be an Option in case of a
* non-nullable Column.
*
* Default values for autoInc columns are automatically ignored.
*
* If `ignoreInvalidDefaults = true`, Slick catches scala.MatchError and java.lang.NumberFormatException thrown by
* this method, logs the message and treats it as no default value for convenience. */
def defaultColumnOption: Option[RelationalProfile.ColumnOption.Default[_]] = rawDefault.map(v => (v,tpe)).collect {
case (v,_) if Seq("NOW","CURRENT_TIMESTAMP","CURRENT_DATE","CURRENT_TIME").contains(v.stripSuffix("()").toUpperCase) =>
logger.debug(s"Ignoring"+formatDefault(v))
None
}.getOrElse {
default.map( d =>
RelationalProfile.ColumnOption.Default(
if(nullable) d
else d.getOrElse(throw new SlickException(s"Invalid default value $d for non-nullable column ${tableBuilder.namer.qualifiedName.asString}.$name of type $tpe, meta data: "+meta.toString))
)
)
}
private def convenientDefault: Option[RelationalProfile.ColumnOption.Default[_]] =
try defaultColumnOption catch {
case e: java.lang.NumberFormatException if ignoreInvalidDefaults =>
logger.debug(s"NumberFormatException: Could not parse"+formatDefault(rawDefault))
None
case e: scala.MatchError =>
val msg = "Could not parse" + formatDefault(rawDefault)
if(ignoreInvalidDefaults) {
logger.debug(s"SlickException: $msg")
None
} else throw new SlickException(msg, e)
}
def model = m.Column(name=name, table=tableBuilder.namer.qualifiedName, tpe=tpe, nullable=nullable,
options = Set() ++
dbType.map(SqlProfile.ColumnOption.SqlType) ++
(if(autoInc) Some(ColumnOption.AutoInc) else None) ++
(if(createPrimaryKeyColumnOption) Some(ColumnOption.PrimaryKey) else None) ++
length.map(RelationalProfile.ColumnOption.Length.apply(_,varying=varying)) ++
(if(!autoInc) convenientDefault else None) )
}
class PrimaryKeyBuilder(tableBuilder: TableBuilder, meta: Seq[MPrimaryKey]){
/** Indicates wether a primary key should be generated. Disabled by default for single column primary keys in favor
* of ColumnOption PrimaryKey via Column#createPrimaryKeyColumnOption. */
def enabled: Boolean = meta.size > 1
def name: Option[String] = meta.head.pkName.filter(_ != "")
def columns = meta.map(_.column)
// single column primary keys excluded in favor of PrimaryKey column option
final def model: Option[m.PrimaryKey] = if(!enabled) None else Some(
m.PrimaryKey(name, tableBuilder.namer.qualifiedName,columns.map(tableBuilder.columnsByName))
)
}
class ForeignKeyBuilder(tableBuilder: TableBuilder, meta: Seq[MForeignKey]) {
private val fk = meta.head
def enabled: Boolean = true
def name: Option[String] = fk.fkName.filter(_ != "")
def referencedColumns = meta.map(_.fkColumn)
private val referencingColumns = meta.map(_.pkColumn)
assert(referencingColumns.size == referencedColumns.size)
def updateRule: m.ForeignKeyAction = fk.updateRule
def deleteRule: m.ForeignKeyAction = fk.deleteRule
final def buildModel(builders: Builders): Option[m.ForeignKey] = {
assert(meta.size >= 1)
assert(tableBuilder.namer.qualifiedName == tableNamersByQName(fk.fkTable).qualifiedName)
if(!enabled) None else Some(m.ForeignKey(
name,
tableBuilder.namer.qualifiedName,
referencedColumns.map(tableBuilder.columnsByName),
tableNamersByQName(fk.pkTable).qualifiedName,
referencingColumns.map(builders.tablesByQName(fk.pkTable).columnsByName),
updateRule,
deleteRule
))
}
}
class IndexBuilder(tableBuilder: TableBuilder, meta: Seq[MIndexInfo]) {
private val idx = meta.head
assert(meta.size >= 1)
assert(meta.forall(_.indexName == idx.indexName))
assert(meta.forall(_.nonUnique == idx.nonUnique))
/** Indicates wether an index should be generated. Disabled by default for:
* - indexType == tableIndexStatistic
* - indices matching primary key
* - non-unique indices matching foreign keys referencing columns
* - indices matching foreign keys referenced columns */
def enabled = (
idx.indexType != DatabaseMetaData.tableIndexStatistic &&
(tableBuilder.mPrimaryKeys.isEmpty || tableBuilder.mPrimaryKeys.map(_.column).toSet != columns.toSet) &&
// preserve additional uniqueness constraints on (usually not unique) fk columns
(unique || tableBuilder.mForeignKeys.forall(_.map(_.fkColumn).toSet != columns.toSet)) &&
// postgres may refer to column oid, skipping index for now. Maybe we should generate a column and include it
// instead. And maybe this should be moved into PostgresModelBuilder.
// TODO: This needs a test case!
columns.forall(tableBuilder.columnsByName.isDefinedAt)
)
def unique = !idx.nonUnique
def columns = meta.flatMap(_.column)
def name = idx.indexName.filter(_ != "")
final def model: Option[m.Index] =
if(!enabled) None
else Some(m.Index(name, tableBuilder.namer.qualifiedName, columns.map(tableBuilder.columnsByName), unique))
}
}
| knoldus/slick-1 | slick/src/main/scala/slick/jdbc/JdbcModelBuilder.scala | Scala | bsd-2-clause | 17,626 |
package x0
class x0 {
def x1 =
x2 match
]
case x3[] => x0
| som-snytt/dotty | tests/fuzzy/parser-stability-15.scala | Scala | apache-2.0 | 58 |
/*
* Copyright (c) 2017 Magomed Abdurakhmanov, Hypertino
* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/.
*
*/
package com.hypertino.hyperbus.transport.api
trait ServiceEndpoint {
def hostname: String
def port: Option[Int]
}
| hypertino/hyperbus | hyperbus/src/main/scala/com/hypertino/hyperbus/transport/api/ServiceEndpoint.scala | Scala | mpl-2.0 | 387 |
package cpup.mc.lib.network
import net.minecraft.network.PacketBuffer
trait CPupMessage[DATA <: AnyRef] {
// def this(EntityPlayer, PacketBuffer, DATA)
def writeTo(buf: PacketBuffer)
def handle(data: DATA): Option[CPupMessage[DATA]]
}
| CoderPuppy/cpup-mc | src/main/scala/cpup/mc/lib/network/CPupMessage.scala | Scala | mit | 240 |
package cz.kamenitxan.labelprinter.generators.impl.i13x5
import cz.kamenitxan.labelprinter.generators.Generators
import cz.kamenitxan.labelprinter.utils.AltXAddress
class AltxBonusInfo extends InkBonusInfo with AltXAddress {
override def getFolderName: String = Generators.INK_ALTX_INFO.folder
override val firm: String = company
}
| kamenitxan/Label-printer | src/main/java/cz/kamenitxan/labelprinter/generators/impl/i13x5/AltxBonusInfo.scala | Scala | bsd-3-clause | 336 |
/*
* Copyright 2013 Commonwealth Computer Research, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package geomesa.core
import org.apache.accumulo.core.data.Value
import org.apache.hadoop.io.Text
package object data {
val INSTANCE_ID = "geomesa.instance.id"
val ZOOKEEPERS = "geomesa.zookeepers"
val ACCUMULO_USER = "geomesa.user"
val ACCUMULO_PASS = "geomesa.pass"
val AUTHS = "geomesa.auths"
val TABLE = "geomesa.table"
val FEATURE_NAME = "geomesa.feature.name"
val ATTRIBUTES_CF = new Text("attributes")
val BOUNDS_CF = new Text("bounds")
val SCHEMA_CF = new Text("schema")
val METADATA_TAG = "~METADATA"
val EMPTY_STRING = ""
val EMPTY_VALUE = new Value(Array[Byte]())
val EMPTY_COLQ = new Text(EMPTY_STRING)
val WHOLE_WORLD_BOUNDS = "-180.0:180.0:-90.0:90.0"
val UNLIKELY_FIRST_ROWID = "\\u0000"
val UNLIKELY_LAST_ROWID = "~"*50
}
| anthonyccri/geomesa | geomesa-core/src/main/scala/geomesa/core/data/package.scala | Scala | apache-2.0 | 1,389 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.runtime.stream.table
import org.apache.flink.api.common.typeinfo.TypeInformation
import org.apache.flink.api.java.typeutils.{GenericTypeInfo, RowTypeInfo}
import org.apache.flink.api.scala._
import org.apache.flink.streaming.api.TimeCharacteristic
import org.apache.flink.streaming.api.datastream.DataStream
import org.apache.flink.streaming.api.environment.{StreamExecutionEnvironment => JExecEnv}
import org.apache.flink.streaming.api.functions.ProcessFunction
import org.apache.flink.streaming.api.scala.StreamExecutionEnvironment
import org.apache.flink.table.api._
import org.apache.flink.table.api.bridge.scala._
import org.apache.flink.table.api.internal.TableEnvironmentInternal
import org.apache.flink.table.runtime.utils.{CommonTestData, StreamITCase}
import org.apache.flink.table.sources.StreamTableSource
import org.apache.flink.table.utils._
import org.apache.flink.test.util.AbstractTestBase
import org.apache.flink.types.Row
import org.apache.flink.util.Collector
import org.apache.calcite.runtime.SqlFunctions.{internalToTimestamp => toTimestamp}
import org.junit.Assert._
import org.junit.{Before, Test}
import java.lang.{Boolean => JBool, Integer => JInt, Long => JLong}
import scala.collection.JavaConverters._
class TableSourceITCase extends AbstractTestBase {
@Before
def setup(): Unit = {
StreamITCase.clear
}
@Test(expected = classOf[TableException])
def testInvalidDatastreamType(): Unit = {
val env = StreamExecutionEnvironment.getExecutionEnvironment
val settings = EnvironmentSettings.newInstance().useOldPlanner().build()
val tEnv = StreamTableEnvironment.create(env, settings)
val tableSource = new StreamTableSource[Row]() {
private val fieldNames: Array[String] = Array("name", "id", "value")
private val fieldTypes: Array[TypeInformation[_]] = Array(Types.STRING, Types.LONG, Types.INT)
.asInstanceOf[Array[TypeInformation[_]]]
override def getDataStream(execEnv: JExecEnv): DataStream[Row] = {
val data = List(Row.of("Mary", new JLong(1L), new JInt(1))).asJava
// return DataStream[Row] with GenericTypeInfo
execEnv.fromCollection(data, new GenericTypeInfo[Row](classOf[Row]))
}
override def getReturnType: TypeInformation[Row] = new RowTypeInfo(fieldTypes, fieldNames)
override def getTableSchema: TableSchema = new TableSchema(fieldNames, fieldTypes)
}
tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal("T", tableSource)
tEnv.scan("T")
.select('value, 'name)
.addSink(new StreamITCase.StringSink[Row])
env.execute()
// test should fail because type info of returned DataStream does not match type return type
// info.
}
@Test
def testUnregisteredCsvTableSource(): Unit = {
val csvTable = CommonTestData.getCsvTableSource
val env = StreamExecutionEnvironment.getExecutionEnvironment
val settings = EnvironmentSettings.newInstance().useOldPlanner().build()
val tEnv = StreamTableEnvironment.create(env, settings)
tEnv.fromTableSource(csvTable)
.where('id > 4)
.select('last, 'score * 2)
.toAppendStream[Row]
.addSink(new StreamITCase.StringSink[Row])
env.execute()
val expected = Seq(
"Williams,69.0",
"Miller,13.56",
"Smith,180.2",
"Williams,4.68")
assertEquals(expected.sorted, StreamITCase.testResults.sorted)
}
@Test
def testCsvTableSource(): Unit = {
val csvTable = CommonTestData.getCsvTableSource
val env = StreamExecutionEnvironment.getExecutionEnvironment
val settings = EnvironmentSettings.newInstance().useOldPlanner().build()
val tEnv = StreamTableEnvironment.create(env, settings)
tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal("csvTable", csvTable)
tEnv.scan("csvTable")
.where('id > 4)
.select('last, 'score * 2)
.toAppendStream[Row]
.addSink(new StreamITCase.StringSink[Row])
env.execute()
val expected = Seq(
"Williams,69.0",
"Miller,13.56",
"Smith,180.2",
"Williams,4.68")
assertEquals(expected.sorted, StreamITCase.testResults.sorted)
}
@Test
def testCsvTableSourceWithFilterable(): Unit = {
val tableName = "MyTable"
val env = StreamExecutionEnvironment.getExecutionEnvironment
val settings = EnvironmentSettings.newInstance().useOldPlanner().build()
val tEnv = StreamTableEnvironment.create(env, settings)
tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal(
tableName, TestFilterableTableSource())
tEnv.scan(tableName)
.where($"amount" > 4 && $"price" < 9)
.select($"id", $"name")
.addSink(new StreamITCase.StringSink[Row])
env.execute()
val expected = Seq("5,Record_5", "6,Record_6", "7,Record_7", "8,Record_8")
assertEquals(expected.sorted, StreamITCase.testResults.sorted)
}
@Test
def testRowtimeRowTableSource(): Unit = {
val tableName = "MyTable"
val env = StreamExecutionEnvironment.getExecutionEnvironment
env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime)
val settings = EnvironmentSettings.newInstance().useOldPlanner().build()
val tEnv = StreamTableEnvironment.create(env, settings)
val data = Seq(
Row.of("Mary", new JLong(1L), new JInt(10)),
Row.of("Bob", new JLong(2L), new JInt(20)),
Row.of("Mary", new JLong(2L), new JInt(30)),
Row.of("Liz", new JLong(2001L), new JInt(40)))
val fieldNames = Array("name", "rtime", "amount")
val schema = new TableSchema(fieldNames, Array(Types.STRING, Types.SQL_TIMESTAMP, Types.INT))
val rowType = new RowTypeInfo(
Array(Types.STRING, Types.LONG, Types.INT).asInstanceOf[Array[TypeInformation[_]]],
fieldNames)
val tableSource = new TestTableSourceWithTime(schema, rowType, data, "rtime", null)
tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal(tableName, tableSource)
tEnv.scan(tableName)
.window(Tumble over 1.second on 'rtime as 'w)
.groupBy('name, 'w)
.select('name, 'w.start, 'amount.sum)
.addSink(new StreamITCase.StringSink[Row])
env.execute()
val expected = Seq(
"Mary,1970-01-01 00:00:00.0,40",
"Bob,1970-01-01 00:00:00.0,20",
"Liz,1970-01-01 00:00:02.0,40")
assertEquals(expected.sorted, StreamITCase.testResults.sorted)
}
@Test
def testProctimeRowTableSource(): Unit = {
val tableName = "MyTable"
val env = StreamExecutionEnvironment.getExecutionEnvironment
env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime)
val settings = EnvironmentSettings.newInstance().useOldPlanner().build()
val tEnv = StreamTableEnvironment.create(env, settings)
val data = Seq(
Row.of("Mary", new JLong(1L), new JInt(10)),
Row.of("Bob", new JLong(2L), new JInt(20)),
Row.of("Mary", new JLong(2L), new JInt(30)),
Row.of("Liz", new JLong(2001L), new JInt(40)))
val fieldNames = Array("name", "rtime", "amount")
val schema = new TableSchema(
fieldNames :+ "ptime",
Array(Types.STRING, Types.LONG, Types.INT, Types.SQL_TIMESTAMP))
val rowType = new RowTypeInfo(
Array(Types.STRING, Types.LONG, Types.INT).asInstanceOf[Array[TypeInformation[_]]],
fieldNames)
val tableSource = new TestTableSourceWithTime(schema, rowType, data, null, "ptime")
tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal(tableName, tableSource)
tEnv.scan(tableName)
.where('ptime.cast(Types.LONG) > 0L)
.select('name, 'amount)
.addSink(new StreamITCase.StringSink[Row])
env.execute()
val expected = Seq(
"Mary,10",
"Bob,20",
"Mary,30",
"Liz,40")
assertEquals(expected.sorted, StreamITCase.testResults.sorted)
}
@Test
def testRowtimeProctimeRowTableSource(): Unit = {
val tableName = "MyTable"
val env = StreamExecutionEnvironment.getExecutionEnvironment
env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime)
val settings = EnvironmentSettings.newInstance().useOldPlanner().build()
val tEnv = StreamTableEnvironment.create(env, settings)
val data = Seq(
Row.of("Mary", new JLong(1L), new JInt(10)),
Row.of("Bob", new JLong(2L), new JInt(20)),
Row.of("Mary", new JLong(2L), new JInt(30)),
Row.of("Liz", new JLong(2001L), new JInt(40)))
val fieldNames = Array("name", "rtime", "amount")
val schema = new TableSchema(
fieldNames :+ "ptime",
Array(Types.STRING, Types.SQL_TIMESTAMP, Types.INT, Types.SQL_TIMESTAMP))
val rowType = new RowTypeInfo(
Array(Types.STRING, Types.LONG, Types.INT).asInstanceOf[Array[TypeInformation[_]]],
fieldNames)
val tableSource = new TestTableSourceWithTime(schema, rowType, data, "rtime", "ptime")
tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal(tableName, tableSource)
tEnv.scan(tableName)
.window(Tumble over 1.second on 'rtime as 'w)
.groupBy('name, 'w)
.select('name, 'w.start, 'amount.sum)
.addSink(new StreamITCase.StringSink[Row])
env.execute()
val expected = Seq(
"Mary,1970-01-01 00:00:00.0,40",
"Bob,1970-01-01 00:00:00.0,20",
"Liz,1970-01-01 00:00:02.0,40")
assertEquals(expected.sorted, StreamITCase.testResults.sorted)
}
@Test
def testRowtimeAsTimestampRowTableSource(): Unit = {
val tableName = "MyTable"
val env = StreamExecutionEnvironment.getExecutionEnvironment
env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime)
val settings = EnvironmentSettings.newInstance().useOldPlanner().build()
val tEnv = StreamTableEnvironment.create(env, settings)
val data = Seq(
Row.of("Mary", toTimestamp(1L), new JInt(10)),
Row.of("Bob", toTimestamp(2L), new JInt(20)),
Row.of("Mary", toTimestamp(2L), new JInt(30)),
Row.of("Liz", toTimestamp(2001L), new JInt(40)))
val fieldNames = Array("name", "rtime", "amount")
val schema = new TableSchema(fieldNames, Array(Types.STRING, Types.SQL_TIMESTAMP, Types.INT))
val rowType = new RowTypeInfo(
Array(Types.STRING, Types.SQL_TIMESTAMP, Types.INT).asInstanceOf[Array[TypeInformation[_]]],
fieldNames)
val tableSource = new TestTableSourceWithTime(schema, rowType, data, "rtime", null)
tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal(tableName, tableSource)
tEnv.scan(tableName)
.window(Tumble over 1.second on 'rtime as 'w)
.groupBy('name, 'w)
.select('name, 'w.start, 'amount.sum)
.addSink(new StreamITCase.StringSink[Row])
env.execute()
val expected = Seq(
"Mary,1970-01-01 00:00:00.0,40",
"Bob,1970-01-01 00:00:00.0,20",
"Liz,1970-01-01 00:00:02.0,40")
assertEquals(expected.sorted, StreamITCase.testResults.sorted)
}
@Test
def testRowtimeLongTableSource(): Unit = {
val tableName = "MyTable"
val env = StreamExecutionEnvironment.getExecutionEnvironment
env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime)
val settings = EnvironmentSettings.newInstance().useOldPlanner().build()
val tEnv = StreamTableEnvironment.create(env, settings)
val data = Seq(new JLong(1L), new JLong(2L), new JLong(2L), new JLong(2001L), new JLong(4001L))
val schema = new TableSchema(Array("rtime"), Array(Types.SQL_TIMESTAMP))
val returnType = Types.LONG
val tableSource = new TestTableSourceWithTime(schema, returnType, data, "rtime", null)
tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal(tableName, tableSource)
tEnv.scan(tableName)
.window(Tumble over 1.second on 'rtime as 'w)
.groupBy('w)
.select('w.start, 1.count)
.addSink(new StreamITCase.StringSink[Row])
env.execute()
val expected = Seq(
"1970-01-01 00:00:00.0,3",
"1970-01-01 00:00:02.0,1",
"1970-01-01 00:00:04.0,1")
assertEquals(expected.sorted, StreamITCase.testResults.sorted)
}
@Test
def testRowtimeStringTableSource(): Unit = {
val tableName = "MyTable"
val env = StreamExecutionEnvironment.getExecutionEnvironment
env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime)
val settings = EnvironmentSettings.newInstance().useOldPlanner().build()
val tEnv = StreamTableEnvironment.create(env, settings)
val data = Seq(
"1970-01-01 00:00:00",
"1970-01-01 00:00:01",
"1970-01-01 00:00:01",
"1970-01-01 00:00:02",
"1970-01-01 00:00:04")
val schema = new TableSchema(Array("rtime"), Array(Types.SQL_TIMESTAMP))
val returnType = Types.STRING
val tableSource = new TestTableSourceWithTime(schema, returnType, data, "rtime", null)
tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal(tableName, tableSource)
tEnv.scan(tableName)
.window(Tumble over 1.second on 'rtime as 'w)
.groupBy('w)
.select('w.start, 1.count)
.addSink(new StreamITCase.StringSink[Row])
env.execute()
val expected = Seq(
"1970-01-01 00:00:00.0,1",
"1970-01-01 00:00:01.0,2",
"1970-01-01 00:00:02.0,1",
"1970-01-01 00:00:04.0,1")
assertEquals(expected.sorted, StreamITCase.testResults.sorted)
}
@Test
def testProctimeStringTableSource(): Unit = {
val tableName = "MyTable"
val env = StreamExecutionEnvironment.getExecutionEnvironment
env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime)
val settings = EnvironmentSettings.newInstance().useOldPlanner().build()
val tEnv = StreamTableEnvironment.create(env, settings)
val data = Seq("Mary", "Peter", "Bob", "Liz")
val schema = new TableSchema(Array("name", "ptime"), Array(Types.STRING, Types.SQL_TIMESTAMP))
val returnType = Types.STRING
val tableSource = new TestTableSourceWithTime(schema, returnType, data, null, "ptime")
tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal(tableName, tableSource)
tEnv.scan(tableName)
.where('ptime.cast(Types.LONG) > 1)
.select('name)
.addSink(new StreamITCase.StringSink[Row])
env.execute()
val expected = Seq("Mary", "Peter", "Bob", "Liz")
assertEquals(expected.sorted, StreamITCase.testResults.sorted)
}
@Test
def testRowtimeProctimeLongTableSource(): Unit = {
val tableName = "MyTable"
val env = StreamExecutionEnvironment.getExecutionEnvironment
env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime)
val settings = EnvironmentSettings.newInstance().useOldPlanner().build()
val tEnv = StreamTableEnvironment.create(env, settings)
val data = Seq(new JLong(1L), new JLong(2L), new JLong(2L), new JLong(2001L), new JLong(4001L))
val schema = new TableSchema(
Array("rtime", "ptime"),
Array(Types.SQL_TIMESTAMP, Types.SQL_TIMESTAMP))
val returnType = Types.LONG
val tableSource = new TestTableSourceWithTime(schema, returnType, data, "rtime", "ptime")
tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal(tableName, tableSource)
tEnv.scan(tableName)
.where('ptime.cast(Types.LONG) > 1)
.window(Tumble over 1.second on 'rtime as 'w)
.groupBy('w)
.select('w.start, 1.count)
.addSink(new StreamITCase.StringSink[Row])
env.execute()
val expected = Seq(
"1970-01-01 00:00:00.0,3",
"1970-01-01 00:00:02.0,1",
"1970-01-01 00:00:04.0,1")
assertEquals(expected.sorted, StreamITCase.testResults.sorted)
}
@Test
def testFieldMappingTableSource(): Unit = {
val tableName = "MyTable"
val env = StreamExecutionEnvironment.getExecutionEnvironment
env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime)
val settings = EnvironmentSettings.newInstance().useOldPlanner().build()
val tEnv = StreamTableEnvironment.create(env, settings)
val data = Seq(
Row.of("Mary", new JLong(1L), new JInt(10)),
Row.of("Bob", new JLong(2L), new JInt(20)),
Row.of("Mary", new JLong(2L), new JInt(30)),
Row.of("Liz", new JLong(2001L), new JInt(40)))
val schema = new TableSchema(
Array("ptime", "amount", "name", "rtime"),
Array(Types.SQL_TIMESTAMP, Types.INT, Types.STRING, Types.SQL_TIMESTAMP))
val returnType = new RowTypeInfo(Types.STRING, Types.LONG, Types.INT)
val mapping = Map("amount" -> "f2", "name" -> "f0", "rtime" -> "f1")
val source = new TestTableSourceWithTime(schema, returnType, data, "rtime", "ptime", mapping)
tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal(tableName, source)
tEnv.scan(tableName)
.window(Tumble over 1.second on 'rtime as 'w)
.groupBy('name, 'w)
.select('name, 'w.start, 'amount.sum)
.addSink(new StreamITCase.StringSink[Row])
env.execute()
val expected = Seq(
"Mary,1970-01-01 00:00:00.0,40",
"Bob,1970-01-01 00:00:00.0,20",
"Liz,1970-01-01 00:00:02.0,40")
assertEquals(expected.sorted, StreamITCase.testResults.sorted)
}
@Test
def testProjectWithoutRowtimeProctime(): Unit = {
val env = StreamExecutionEnvironment.getExecutionEnvironment
env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime)
val settings = EnvironmentSettings.newInstance().useOldPlanner().build()
val tEnv = StreamTableEnvironment.create(env, settings)
val data = Seq(
Row.of(new JInt(1), "Mary", new JLong(10L), new JLong(1)),
Row.of(new JInt(2), "Bob", new JLong(20L), new JLong(2)),
Row.of(new JInt(3), "Mike", new JLong(30L), new JLong(2)),
Row.of(new JInt(4), "Liz", new JLong(40L), new JLong(2001)))
val tableSchema = new TableSchema(
Array("id", "rtime", "val", "ptime", "name"),
Array(Types.INT, Types.SQL_TIMESTAMP, Types.LONG, Types.SQL_TIMESTAMP, Types.STRING))
val returnType = new RowTypeInfo(
Array(Types.INT, Types.STRING, Types.LONG, Types.LONG)
.asInstanceOf[Array[TypeInformation[_]]],
Array("id", "name", "val", "rtime"))
tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal(
"T",
new TestProjectableTableSource(tableSchema, returnType, data, "rtime", "ptime"))
tEnv.scan("T")
.select('name, 'val, 'id)
.addSink(new StreamITCase.StringSink[Row])
env.execute()
val expected = Seq(
"Mary,10,1",
"Bob,20,2",
"Mike,30,3",
"Liz,40,4")
assertEquals(expected.sorted, StreamITCase.testResults.sorted)
}
@Test
def testProjectWithoutProctime(): Unit = {
val env = StreamExecutionEnvironment.getExecutionEnvironment
env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime)
val settings = EnvironmentSettings.newInstance().useOldPlanner().build()
val tEnv = StreamTableEnvironment.create(env, settings)
val data = Seq(
Row.of(new JInt(1), "Mary", new JLong(10L), new JLong(1)),
Row.of(new JInt(2), "Bob", new JLong(20L), new JLong(2)),
Row.of(new JInt(3), "Mike", new JLong(30L), new JLong(2)),
Row.of(new JInt(4), "Liz", new JLong(40L), new JLong(2001)))
val tableSchema = new TableSchema(
Array("id", "rtime", "val", "ptime", "name"),
Array(Types.INT, Types.SQL_TIMESTAMP, Types.LONG, Types.SQL_TIMESTAMP, Types.STRING))
val returnType = new RowTypeInfo(
Array(Types.INT, Types.STRING, Types.LONG, Types.LONG)
.asInstanceOf[Array[TypeInformation[_]]],
Array("id", "name", "val", "rtime"))
tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal(
"T",
new TestProjectableTableSource(tableSchema, returnType, data, "rtime", "ptime"))
tEnv.scan("T")
.select('rtime, 'name, 'id)
.addSink(new StreamITCase.StringSink[Row])
env.execute()
val expected = Seq(
"1970-01-01 00:00:00.001,Mary,1",
"1970-01-01 00:00:00.002,Bob,2",
"1970-01-01 00:00:00.002,Mike,3",
"1970-01-01 00:00:02.001,Liz,4")
assertEquals(expected.sorted, StreamITCase.testResults.sorted)
}
@Test
def testProjectWithoutRowtime(): Unit = {
val env = StreamExecutionEnvironment.getExecutionEnvironment
env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime)
val settings = EnvironmentSettings.newInstance().useOldPlanner().build()
val tEnv = StreamTableEnvironment.create(env, settings)
val data = Seq(
Row.of(new JInt(1), "Mary", new JLong(10L), new JLong(1)),
Row.of(new JInt(2), "Bob", new JLong(20L), new JLong(2)),
Row.of(new JInt(3), "Mike", new JLong(30L), new JLong(2)),
Row.of(new JInt(4), "Liz", new JLong(40L), new JLong(2001)))
val tableSchema = new TableSchema(
Array("id", "rtime", "val", "ptime", "name"),
Array(Types.INT, Types.SQL_TIMESTAMP, Types.LONG, Types.SQL_TIMESTAMP, Types.STRING))
val returnType = new RowTypeInfo(
Array(Types.INT, Types.STRING, Types.LONG, Types.LONG)
.asInstanceOf[Array[TypeInformation[_]]],
Array("id", "name", "val", "rtime"))
tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal(
"T",
new TestProjectableTableSource(tableSchema, returnType, data, "rtime", "ptime"))
tEnv.scan("T")
.filter('ptime.cast(Types.LONG) > 0)
.select('name, 'id)
.addSink(new StreamITCase.StringSink[Row])
env.execute()
val expected = Seq(
"Mary,1",
"Bob,2",
"Mike,3",
"Liz,4")
assertEquals(expected.sorted, StreamITCase.testResults.sorted)
}
def testProjectOnlyProctime(): Unit = {
val env = StreamExecutionEnvironment.getExecutionEnvironment
env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime)
val settings = EnvironmentSettings.newInstance().useOldPlanner().build()
val tEnv = StreamTableEnvironment.create(env, settings)
val data = Seq(
Row.of(new JInt(1), new JLong(1), new JLong(10L), "Mary"),
Row.of(new JInt(2), new JLong(2L), new JLong(20L), "Bob"),
Row.of(new JInt(3), new JLong(2L), new JLong(30L), "Mike"),
Row.of(new JInt(4), new JLong(2001L), new JLong(30L), "Liz"))
val tableSchema = new TableSchema(
Array("id", "rtime", "val", "ptime", "name"),
Array(Types.INT, Types.SQL_TIMESTAMP, Types.LONG, Types.SQL_TIMESTAMP, Types.STRING))
val returnType = new RowTypeInfo(
Array(Types.INT, Types.LONG, Types.LONG, Types.STRING)
.asInstanceOf[Array[TypeInformation[_]]],
Array("id", "rtime", "val", "name"))
tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal(
"T",
new TestProjectableTableSource(tableSchema, returnType, data, "rtime", "ptime"))
tEnv.scan("T")
.select('ptime > 0)
.select(1.count)
.addSink(new StreamITCase.StringSink[Row])
env.execute()
val expected = Seq("4")
assertEquals(expected.sorted, StreamITCase.testResults.sorted)
}
def testProjectOnlyRowtime(): Unit = {
val env = StreamExecutionEnvironment.getExecutionEnvironment
env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime)
val settings = EnvironmentSettings.newInstance().useOldPlanner().build()
val tEnv = StreamTableEnvironment.create(env, settings)
val data = Seq(
Row.of(new JInt(1), new JLong(1), new JLong(10L), "Mary"),
Row.of(new JInt(2), new JLong(2L), new JLong(20L), "Bob"),
Row.of(new JInt(3), new JLong(2L), new JLong(30L), "Mike"),
Row.of(new JInt(4), new JLong(2001L), new JLong(30L), "Liz"))
val tableSchema = new TableSchema(
Array("id", "rtime", "val", "ptime", "name"),
Array(Types.INT, Types.SQL_TIMESTAMP, Types.LONG, Types.SQL_TIMESTAMP, Types.STRING))
val returnType = new RowTypeInfo(
Array(Types.INT, Types.LONG, Types.LONG, Types.STRING)
.asInstanceOf[Array[TypeInformation[_]]],
Array("id", "rtime", "val", "name"))
tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal(
"T",
new TestProjectableTableSource(tableSchema, returnType, data, "rtime", "ptime"))
tEnv.scan("T")
.select('rtime)
.addSink(new StreamITCase.StringSink[Row])
env.execute()
val expected = Seq(
"1970-01-01 00:00:00.001",
"1970-01-01 00:00:00.002",
"1970-01-01 00:00:00.002",
"1970-01-01 00:00:02.001")
assertEquals(expected.sorted, StreamITCase.testResults.sorted)
}
@Test
def testProjectWithMapping(): Unit = {
val env = StreamExecutionEnvironment.getExecutionEnvironment
env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime)
val settings = EnvironmentSettings.newInstance().useOldPlanner().build()
val tEnv = StreamTableEnvironment.create(env, settings)
val data = Seq(
Row.of(new JLong(1), new JInt(1), "Mary", new JLong(10)),
Row.of(new JLong(2), new JInt(2), "Bob", new JLong(20)),
Row.of(new JLong(2), new JInt(3), "Mike", new JLong(30)),
Row.of(new JLong(2001), new JInt(4), "Liz", new JLong(40)))
val tableSchema = new TableSchema(
Array("id", "rtime", "val", "ptime", "name"),
Array(Types.INT, Types.SQL_TIMESTAMP, Types.LONG, Types.SQL_TIMESTAMP, Types.STRING))
val returnType = new RowTypeInfo(
Array(Types.LONG, Types.INT, Types.STRING, Types.LONG)
.asInstanceOf[Array[TypeInformation[_]]],
Array("p-rtime", "p-id", "p-name", "p-val"))
val mapping = Map("rtime" -> "p-rtime", "id" -> "p-id", "val" -> "p-val", "name" -> "p-name")
tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal(
"T",
new TestProjectableTableSource(tableSchema, returnType, data, "rtime", "ptime", mapping))
tEnv.scan("T")
.select('name, 'rtime, 'val)
.addSink(new StreamITCase.StringSink[Row])
env.execute()
val expected = Seq(
"Mary,1970-01-01 00:00:00.001,10",
"Bob,1970-01-01 00:00:00.002,20",
"Mike,1970-01-01 00:00:00.002,30",
"Liz,1970-01-01 00:00:02.001,40")
assertEquals(expected.sorted, StreamITCase.testResults.sorted)
}
@Test
def testNestedProject(): Unit = {
val env = StreamExecutionEnvironment.getExecutionEnvironment
env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime)
val settings = EnvironmentSettings.newInstance().useOldPlanner().build()
val tEnv = StreamTableEnvironment.create(env, settings)
val data = Seq(
Row.of(new JLong(1),
Row.of(
Row.of("Sarah", new JInt(100)),
Row.of(new JInt(1000), new JBool(true))
),
Row.of("Peter", new JInt(10000)),
"Mary"),
Row.of(new JLong(2),
Row.of(
Row.of("Rob", new JInt(200)),
Row.of(new JInt(2000), new JBool(false))
),
Row.of("Lucy", new JInt(20000)),
"Bob"),
Row.of(new JLong(3),
Row.of(
Row.of("Mike", new JInt(300)),
Row.of(new JInt(3000), new JBool(true))
),
Row.of("Betty", new JInt(30000)),
"Liz"))
val nested1 = new RowTypeInfo(
Array(Types.STRING, Types.INT).asInstanceOf[Array[TypeInformation[_]]],
Array("name", "value")
)
val nested2 = new RowTypeInfo(
Array(Types.INT, Types.BOOLEAN).asInstanceOf[Array[TypeInformation[_]]],
Array("num", "flag")
)
val deepNested = new RowTypeInfo(
Array(nested1, nested2).asInstanceOf[Array[TypeInformation[_]]],
Array("nested1", "nested2")
)
val tableSchema = new TableSchema(
Array("id", "deepNested", "nested", "name"),
Array(Types.LONG, deepNested, nested1, Types.STRING))
val returnType = new RowTypeInfo(
Array(Types.LONG, deepNested, nested1, Types.STRING).asInstanceOf[Array[TypeInformation[_]]],
Array("id", "deepNested", "nested", "name"))
tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal(
"T",
new TestNestedProjectableTableSource(tableSchema, returnType, data))
tEnv
.scan("T")
.select('id,
'deepNested.get("nested1").get("name") as 'nestedName,
'nested.get("value") as 'nestedValue,
'deepNested.get("nested2").get("flag") as 'nestedFlag,
'deepNested.get("nested2").get("num") as 'nestedNum)
.addSink(new StreamITCase.StringSink[Row])
env.execute()
val expected = Seq(
"1,Sarah,10000,true,1000",
"2,Rob,20000,false,2000",
"3,Mike,30000,true,3000")
assertEquals(expected.sorted, StreamITCase.testResults.sorted)
}
@Test
def testRowtimeTableSourcePreserveWatermarks(): Unit = {
val tableName = "MyTable"
val env = StreamExecutionEnvironment.getExecutionEnvironment
env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime)
val settings = EnvironmentSettings.newInstance().useOldPlanner().build()
val tEnv = StreamTableEnvironment.create(env, settings)
// rows with timestamps and watermarks
val data = Seq(
Right(1L),
Left(5L, Row.of(new JInt(1), new JLong(5), "A")),
Left(2L, Row.of(new JInt(2), new JLong(1), "B")),
Right(10L),
Left(8L, Row.of(new JInt(6), new JLong(8), "C")),
Right(20L),
Left(21L, Row.of(new JInt(6), new JLong(21), "D")),
Right(30L)
)
val fieldNames = Array("id", "rtime", "name")
val schema = new TableSchema(fieldNames, Array(Types.INT, Types.SQL_TIMESTAMP, Types.STRING))
val rowType = new RowTypeInfo(
Array(Types.INT, Types.LONG, Types.STRING).asInstanceOf[Array[TypeInformation[_]]],
fieldNames)
val tableSource = new TestPreserveWMTableSource(schema, rowType, data, "rtime")
tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal(tableName, tableSource)
tEnv.scan(tableName)
.where('rtime.cast(Types.LONG) > 3L)
.select('id, 'name)
.toAppendStream[Row]
// append current watermark to each row to verify that original watermarks were preserved
.process(new ProcessFunction[Row, (Row, Long)] {
override def processElement(
value: Row,
ctx: ProcessFunction[Row, (Row, Long)]#Context,
out: Collector[(Row, Long)]): Unit = {
out.collect(value, ctx.timerService().currentWatermark())
}
})
.addSink(new StreamITCase.StringSink[(Row, Long)])
env.execute()
val expected = Seq("(1,A,1)", "(6,C,10)", "(6,D,20)")
assertEquals(expected.sorted, StreamITCase.testResults.sorted)
}
}
| tzulitai/flink | flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/runtime/stream/table/TableSourceITCase.scala | Scala | apache-2.0 | 31,222 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.planner.plan.common
import org.apache.flink.api.common.typeinfo.TypeInformation
import org.apache.flink.api.scala._
import org.apache.flink.table.api._
import org.apache.flink.table.planner.utils.{TableTestBase, TableTestUtil}
import org.junit.Test
import java.sql.Timestamp
/**
* Test for UNNEST queries.
*/
abstract class UnnestTestBase extends TableTestBase {
protected val util: TableTestUtil = getTableTestUtil
protected def getTableTestUtil: TableTestUtil
@Test
def testUnnestPrimitiveArrayFromTable(): Unit = {
util.addTableSource[(Int, Array[Int], Array[Array[Int]])]("MyTable", 'a, 'b, 'c)
util.verifyPlan("SELECT a, b, s FROM MyTable, UNNEST(MyTable.b) AS A (s)")
}
@Test
def testUnnestArrayOfArrayFromTable(): Unit = {
util.addTableSource[(Int, Array[Int], Array[Array[Int]])]("MyTable", 'a, 'b, 'c)
util.verifyPlan("SELECT a, s FROM MyTable, UNNEST(MyTable.c) AS A (s)")
}
@Test
def testUnnestObjectArrayFromTableWithFilter(): Unit = {
util.addTableSource[(Int, Array[(Int, String)])]("MyTable", 'a, 'b)
util.verifyPlan("SELECT a, b, s, t FROM MyTable, UNNEST(MyTable.b) AS A (s, t) WHERE s > 13")
}
@Test
def testUnnestMultiSetFromCollectResult(): Unit = {
util.addDataStream[(Int, Int, (Int, String))]("MyTable", 'a, 'b, 'c)
val sqlQuery =
"""
|WITH T AS (SELECT b, COLLECT(c) as `set` FROM MyTable GROUP BY b)
|SELECT b, id, point FROM T, UNNEST(T.`set`) AS A(id, point) WHERE b < 3
""".stripMargin
util.verifyPlan(sqlQuery)
}
@Test
def testLeftUnnestMultiSetFromCollectResult(): Unit = {
util.addDataStream[(Int, String, String)]("MyTable", 'a, 'b, 'c)
val sqlQuery =
"""
|WITH T AS (SELECT a, COLLECT(b) as `set` FROM MyTable GROUP BY a)
|SELECT a, s FROM T LEFT JOIN UNNEST(T.`set`) AS A(s) ON TRUE WHERE a < 5
""".stripMargin
util.verifyPlan(sqlQuery)
}
@Test
def testTumbleWindowAggregateWithCollectUnnest(): Unit = {
util.addDataStream[(Int, Long, String, Timestamp)]("MyTable", 'a, 'b, 'c, 'rowtime.rowtime)
val sqlQuery =
"""
|WITH T AS (SELECT b, COLLECT(b) as `set`
| FROM MyTable
| GROUP BY b, TUMBLE(rowtime, INTERVAL '3' SECOND)
|)
|SELECT b, s FROM T, UNNEST(T.`set`) AS A(s) where b < 3
""".stripMargin
util.verifyPlan(sqlQuery)
}
@Test
def testCrossWithUnnest(): Unit = {
util.addTableSource[(Int, Long, Array[String])]("MyTable", 'a, 'b, 'c)
util.verifyPlan("SELECT a, s FROM MyTable, UNNEST(MyTable.c) as A (s)")
}
@Test
def testCrossWithUnnestForMap(): Unit = {
util.addTableSource("MyTable",
Array[TypeInformation[_]](Types.INT,
Types.LONG,
Types.MAP(Types.STRING, Types.STRING)),
Array("a", "b", "c"))
util.verifyPlan("SELECT a, b, v FROM MyTable CROSS JOIN UNNEST(c) as f(k, v)")
}
@Test
def testJoinWithUnnestOfTuple(): Unit = {
util.addTableSource[(Int, Array[(Int, String)])]("MyTable", 'a, 'b)
val sqlQuery =
"""
|SELECT a, b, x, y FROM
| (SELECT a, b FROM MyTable WHERE a < 3) as tf,
| UNNEST(tf.b) as A (x, y)
|WHERE x > a
""".stripMargin
util.verifyPlan(sqlQuery)
}
@Test
def testUnnestObjectArrayWithoutAlias(): Unit = {
util.addTableSource[(Int, Array[(Int, String)])]("MyTable", 'a, 'b)
util.verifyPlan("SELECT a, b, A._1, A._2 FROM MyTable, UNNEST(MyTable.b) AS A where A._1 > 1")
}
}
| tzulitai/flink | flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/plan/common/UnnestTestBase.scala | Scala | apache-2.0 | 4,344 |
// ' Project: smath
// Module: commons / linear
// Description: Implementation of MatD using Apache math-commons
//
// Copyright (c) 2015 Johannes Kastner <jokade@karchedon.de>
package biz.enef.smath.linear.commons
import biz.enef.smath.linear.{Vec, Mat, MatD}
import biz.enef.smath.linear.MatD
import org.apache.commons.math3.linear.{Array2DRowRealMatrix, RealMatrix}
class MatDCommons(protected[commons] val mat: RealMatrix) extends MatD {
def this(rows: Int, cols: Int) = this(new Array2DRowRealMatrix(rows,cols))
override def copy(): MatD = new MatDCommons( mat.copy() )
@inline
override def rows: Int = mat.getRowDimension
@inline
override def cols: Int = mat.getColumnDimension
@inline
override def size: Int = rows*cols
@inline
override def apply(row: Int, col: Int): Double = mat.getEntry(row,col)
@inline
override def update(row: Int, col: Int, v: Double): Unit = mat.setEntry(row,col,v)
override def operate(v: Vec[Double]): Vec[Double] = v match {
case v: VecDCommons => operate(v)
case _ => ???
}
@inline
def operate(v: VecDCommons) : VecDCommons = new VecDCommons(mat.operate(v.vec))
override def multiply(m: Mat[Double]): Mat[Double] = m match {
case m: MatDCommons => multiply(m)
case _ => ???
}
@inline
def multiply(m: MatDCommons) : MatDCommons = new MatDCommons(mat.multiply(m.mat))
//override def analyzeEigenvalues() : EigenvalueAnalysisCommons = new EigenvalueAnalysisCommons(this)
override def toString() = mat.toString
}
| jokade/smath | commons/src/main/scala/biz/enef/smath/linear/commons/MatDCommons.scala | Scala | mit | 1,526 |
/*
* This is an initialization file for the Atlas Shell. Each time a Shell linked
* with this project is opened or restarted, the code in this file will be run as scala code. Below
* is included the default initialization code for the Shell. As long as this file exists only
* the code in this file will be run on Shell startup; this default code will not be run if you
* remove it from this file.
*
* You do not need to put initialization code in a scala object or class.
*/
import com.ensoftcorp.atlas.core.query.Q
import com.ensoftcorp.atlas.core.query.Attr
import com.ensoftcorp.atlas.core.query.Attr.Edge
import com.ensoftcorp.atlas.core.query.Attr.Node
import com.ensoftcorp.atlas.core.script.Common
import com.ensoftcorp.atlas.core.script.Common._
import com.ensoftcorp.atlas.ui.shell.lib.Common
import com.ensoftcorp.atlas.ui.shell.lib.Common._
import com.ensoftcorp.atlas.core.script.CommonQueries
import com.ensoftcorp.atlas.core.script.CommonQueries._
import com.ensoftcorp.atlas.core.db.Accuracy._
import com.ensoftcorp.atlas.core.db.graph.Graph
// color for graph highlighting
import java.awt.Color
// android essentials
import com.ensoftcorp.open.android.essentials._
import com.ensoftcorp.open.android.essentials.permissions._
import com.ensoftcorp.open.android.essentials.permissions.mappings._
// toolbox.analysis project
import toolbox.analysis._ | MaTriXy/android-essentials-toolbox | example.shell/shellInit.scala | Scala | mit | 1,379 |
/*
* Copyright © 2015-2019 the contributors (see Contributors.md).
*
* This file is part of Knora.
*
* Knora is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published
* by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Knora is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public
* License along with Knora. If not, see <http://www.gnu.org/licenses/>.
*/
package org.knora.webapi
import akka.http.scaladsl.model.{ContentType, HttpCharsets, MediaType, MediaTypes}
/**
* Represents media types supported by the Knora API server for representing RDF data, and provides
* convenience methods for transforming media types.
*/
object RdfMediaTypes {
val `application/json`: MediaType.WithFixedCharset = MediaTypes.`application/json`
val `application/ld+json`: MediaType.WithFixedCharset = MediaType.customWithFixedCharset(
mainType = "application",
subType = "ld+json",
charset = HttpCharsets.`UTF-8`,
fileExtensions = List("jsonld")
)
val `text/turtle`: MediaType.WithFixedCharset = MediaType.customWithFixedCharset(
mainType = "text",
subType = "turtle",
charset = HttpCharsets.`UTF-8`,
fileExtensions = List("ttl")
)
val `application/rdf+xml`: MediaType.WithOpenCharset = MediaType.customWithOpenCharset(
mainType = "application",
subType = "rdf+xml",
fileExtensions = List("rdf")
)
/**
* A map of MIME types (strings) to supported RDF media types.
*/
val registry: Map[String, MediaType.NonBinary] = Set(
`application/json`,
`application/ld+json`,
`text/turtle`,
`application/rdf+xml`
).map {
mediaType => mediaType.toString -> mediaType
}.toMap
/**
* Ensures that a media specifies the UTF-8 charset if necessary.
*
* @param mediaType a non-binary media type.
* @return the same media type, specifying the UTF-8 charset if necessary.
*/
def toUTF8ContentType(mediaType: MediaType.NonBinary): ContentType.NonBinary = {
mediaType match {
case withFixedCharset: MediaType.WithFixedCharset => withFixedCharset.toContentType
case withOpenCharset: MediaType.WithOpenCharset => withOpenCharset.toContentType(HttpCharsets.`UTF-8`)
}
}
/**
* Converts less specific media types to more specific ones if necessary (e.g. specifying
* JSON-LD instead of JSON).
*
* @param mediaType a non-binary media type.
* @return the most specific similar media type that the Knora API server supports.
*/
def toMostSpecificMediaType(mediaType: MediaType.NonBinary): MediaType.NonBinary = {
mediaType match {
case `application/json` => `application/ld+json`
case other => other
}
}
}
| musicEnfanthen/Knora | webapi/src/main/scala/org/knora/webapi/RdfMediaTypes.scala | Scala | agpl-3.0 | 3,227 |
/*
* Copyright 2001-2014 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalatest.fixture
abstract class AsyncFeatureSpec extends AsyncFeatureSpecLike {
/**
* Returns a user friendly string for this suite, composed of the
* simple name of the class (possibly simplified further by removing dollar signs if added by the Scala interpeter) and, if this suite
* contains nested suites, the result of invoking <code>toString</code> on each
* of the nested suites, separated by commas and surrounded by parentheses.
*
* @return a user-friendly string for this suite
*/
override def toString: String = org.scalatest.Suite.suiteToString(None, this)
} | rahulkavale/scalatest | scalatest/src/main/scala/org/scalatest/fixture/AsyncFeatureSpec.scala | Scala | apache-2.0 | 1,210 |
/*
* Copyright 2016 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.ct.ct600j.v3
import org.mockito.Mockito._
import org.scalatest.mock.MockitoSugar
import org.scalatest.{Matchers, WordSpec}
import uk.gov.hmrc.ct.box.CtValidation
import uk.gov.hmrc.ct.ct600.v3.retriever.CT600BoxRetriever
class J5Spec extends WordSpec with MockitoSugar with Matchers {
"J5 validate" should {
"not return errors when B140 is false" in {
val mockBoxRetriever = mock[CT600BoxRetriever]
when(mockBoxRetriever.retrieveB140()).thenReturn(B140(Some(false)))
J5(None).validate(mockBoxRetriever) shouldBe Set()
}
"not return errors when B140 is true and J5 is valid" in {
val mockBoxRetriever = mock[CT600BoxRetriever]
when(mockBoxRetriever.retrieveB140()).thenReturn(B140(Some(true)))
J5(Some("12345678")).validate(mockBoxRetriever) shouldBe Set()
}
"return required error when B140 is true and J5 is blank" in {
val mockBoxRetriever = mock[CT600BoxRetriever]
when(mockBoxRetriever.retrieveB140()).thenReturn(B140(Some(true)))
J5(None).validate(mockBoxRetriever) shouldBe Set(CtValidation(Some("J5"), "error.J5.required", None))
}
"return regex error when B140 is true and J5 is invalid" in {
val mockBoxRetriever = mock[CT600BoxRetriever]
when(mockBoxRetriever.retrieveB140()).thenReturn(B140(Some(true)))
J5(Some("xyz")).validate(mockBoxRetriever) shouldBe Set(CtValidation(Some("J5"), "error.J5.regexFailure", None))
}
}
}
| ahudspith-equalexperts/ct-calculations | src/test/scala/uk/gov/hmrc/ct/ct600j/v3/J5Spec.scala | Scala | apache-2.0 | 2,078 |
package dotty.tools
package dotc
package ast
import core._
import Names._, Types._ , Symbols._, StdNames._, Flags._, Contexts._
import org.junit.Test
class DesugarTests extends DottyTest {
import tpd._
private def validSym(sym: Symbol)(using Context): Unit = {
assert(
// remaining symbols must be either synthetic:
sym.is(Synthetic) ||
// or be a constructor:
sym.name == nme.CONSTRUCTOR,
s"found: $sym (${sym.flagsString})"
)
}
@Test def caseClassHasCorrectMembers: Unit =
checkCompile("typer", "case class Foo(x: Int, y: String)") { (tree, context) =>
given Context = context
val ccTree = tree.find(tree => tree.symbol.name == typeName("Foo")).get
val List(_, foo) = defPath(ccTree.symbol, tree).map(_.symbol.info)
val x :: y :: rest = foo.decls.toList
// Make sure we extracted the correct values from foo:
assert(x.name == termName("x"))
assert(y.name == termName("y"))
rest.foreach(validSym)
}
@Test def caseClassCompanionHasCorrectMembers: Unit =
checkCompile("typer", "case class Foo(x: Int, y: String)") { (tree, context) =>
given Context = context
val ccTree = tree.find(tree => tree.symbol.name == termName("Foo")).get
val List(_, foo) = defPath(ccTree.symbol, tree).map(_.symbol.info)
foo.decls.foreach(validSym)
}
}
| lampepfl/dotty | compiler/test/dotty/tools/dotc/ast/DesugarTests.scala | Scala | apache-2.0 | 1,389 |
package com.twitter.gizzard.util
import scala.collection.SeqProxy
import scala.collection.generic.CanBuildFrom
import java.util.concurrent._
import com.twitter.util.{Duration, Time}
import com.twitter.conversions.time._
import com.twitter.gizzard.Stats
class Future(name: String, poolSize: Int, maxPoolSize: Int, keepAlive: Duration,
val timeout: Duration) {
var executor = new ThreadPoolExecutor(poolSize, maxPoolSize, keepAlive.inSeconds,
TimeUnit.SECONDS, new LinkedBlockingQueue[Runnable], new NamedPoolThreadFactory(name))
Stats.addGauge("future-" + name + "-queue-size") { executor.getQueue().size() }
def apply[A](a: => A) = {
val trans = Stats.transactionOpt.map { _.createChild }
val future = new FutureTask(new Callable[A] {
val startTime = Time.now
def call = {
trans.foreach { t => Stats.setTransaction(t) }
val timeInQueue = Time.now - startTime
Stats.transaction.record("Time spent in future queue: "+timeInQueue.inMillis)
if (timeInQueue > timeout) {
Stats.incr("future-" + name + "-timeout")
throw new TimeoutException("future spent too long in queue")
}
val threadExecTime = Time.now
try {
a
} catch {
case e: Exception =>
Stats.transaction.record("Caught exception: "+e)
throw e
} finally {
val duration = Time.now - threadExecTime
Stats.transaction.record("Total duration: "+duration.inMillis)
trans.foreach { t => Stats.endTransaction() }
}
}
})
executor.execute(future)
future
}
def shutdown() {
executor.shutdown()
executor.awaitTermination(60, TimeUnit.SECONDS)
}
}
class ParallelSeq[A](seq: Seq[A], future: Future) extends SeqProxy[A] {
def self = seq
override def map[B, That](f: A => B)(implicit bf: CanBuildFrom[Seq[A], B, That]): That = {
val coll: Seq[B] = if (seq.size <= 1) {
seq.map(f)
} else {
seq.map { a => future(f(a)) }.map { _.get(future.timeout.inMillis, TimeUnit.MILLISECONDS) }
}
val b = bf(repr)
for (x <- coll) b += x
b.sizeHint(coll)
b.result
}
override def flatMap[B, That](f: A => Traversable[B])(implicit bf: CanBuildFrom[Seq[A], B, That]): That = {
val coll: Seq[B] = if (seq.size <= 1) {
seq.flatMap(f)
} else {
seq.map { a => future(f(a)) }.flatMap { _.get(future.timeout.inMillis, TimeUnit.MILLISECONDS) }
}
val b = bf(repr)
for (x <- coll) b += x
b.sizeHint(coll)
b.result
}
}
| kangkot/gizzard | src/main/scala/com/twitter/gizzard/util/Future.scala | Scala | apache-2.0 | 2,582 |
/*
* Copyright (c) 2014-2018 by The Monix Project Developers.
* See the project homepage at: https://monix.io
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package monix.tail
import cats.laws._
import cats.laws.discipline._
import cats.syntax.eq._
import monix.eval.{Coeval, Task}
import monix.execution.exceptions.DummyException
import monix.execution.internal.Platform
import monix.tail.batches.{Batch, BatchCursor}
object IterantRepeatSuite extends BaseTestSuite {
test("Iterant.repeat works for one item") { _ =>
val count = if (Platform.isJVM) 5000 else 500
val r = Iterant[Coeval].pure(1).repeat.take(count).sumL.value()
assertEquals(r, count)
}
test("Iterant.repeat works for many many") { _ =>
val count = if (Platform.isJVM) 10 else 500
check2 { (list: List[Int], index: Int) =>
val fa = arbitraryListToIterant[Coeval, Int](list, index, allowErrors = false)
.repeat
.take(count)
val expected =
if (list.isEmpty) Iterant.empty[Coeval, Int] else {
Iterant[Coeval].fromIterable(
(0 until (count / list.length + 1))
.flatMap(_ => list)
.take(count))
}
fa <-> expected
}
}
test("Iterant.repeat terminates on exception") { implicit s =>
var effect = 0
var values = List[Int]()
val expectedValues = List.fill(6)(1)
val dummy = DummyException("dummy")
val source = Iterant[Coeval].nextS(1, Coeval(Iterant[Coeval].empty[Int]))
intercept[DummyException] {
source.repeat.map { x =>
if (effect == 6) throw dummy else {
effect += 1
values ::= x
x
}
}.toListL.value()}
assertEquals(values, expectedValues)
}
test("Iterant.repeat protects against broken batches") { implicit s =>
check1 { (iter: Iterant[Task, Int]) =>
val dummy = DummyException("dummy")
val suffix = Iterant[Task].nextBatchS[Int](new ThrowExceptionBatch(dummy), Task.now(Iterant[Task].empty))
val stream = iter.onErrorIgnore ++ suffix
val received = stream.repeat
received <-> iter.onErrorIgnore ++ Iterant[Task].haltS[Int](Some(dummy))
}
}
test("Iterant.repeat protects against broken cursors") { implicit s =>
check1 { (iter: Iterant[Task, Int]) =>
val dummy = DummyException("dummy")
val suffix = Iterant[Task].nextCursorS[Int](new ThrowExceptionCursor(dummy), Task.now(Iterant[Task].empty))
val stream = iter.onErrorIgnore ++ suffix
val received = stream.repeat
received <-> iter.onErrorIgnore ++ Iterant[Task].haltS[Int](Some(dummy))
}
}
test("Iterant.repeat terminates streams that end in error") { _ =>
check1 { (stream: Iterant[Coeval, Int]) =>
val dummy = DummyException("dummy")
val fa = stream ++ Iterant[Coeval].raiseError[Int](dummy)
fa.repeat <-> fa
}
}
test("Iterant.repeat terminates if the source is empty") { implicit s =>
val source1 = Iterant[Coeval].empty[Int]
val source2 = Iterant[Coeval].suspendS(Coeval(source1))
val source3 = Iterant[Coeval].nextCursorS[Int](BatchCursor(), Coeval(source2))
val source4 = Iterant[Coeval].nextBatchS[Int](Batch.empty[Int], Coeval(source3))
assertEquals(source1.repeat.toListL.value(), List.empty[Int])
assertEquals(source2.repeat.toListL.value(), List.empty[Int])
assertEquals(source3.repeat.toListL.value(), List.empty[Int])
assertEquals(source4.repeat.toListL.value(), List.empty[Int])
}
test("Iterant.repeatEval captures effects") { _ =>
check1 { (xs: Vector[Int]) =>
val iterator = xs.iterator
val evaluated = Iterant[Coeval]
.repeatEval(iterator.next())
.take(xs.length)
evaluated <-> Iterant[Coeval].fromIterator(xs.iterator)
}
}
test("Iterant.repeatEval terminates on exceptions") { _ =>
val dummy = DummyException("dummy")
val xs = Iterant[Coeval].repeatEval[Int] {
throw dummy
}
assert(xs === Iterant[Coeval].raiseError(dummy))
}
test("Iterant.repeatEvalF repeats effectful values") { _ =>
val repeats = 66
var effect = 0
val increment = Coeval { effect += 1 }
Iterant[Coeval].repeatEvalF(increment).take(repeats)
.completedL.value()
assertEquals(effect, repeats)
}
test("Iterant.repeatEvalF terminates on exceptions raised in F") { _ =>
val dummy = DummyException("dummy")
val xs = Iterant[Coeval].repeatEvalF(Coeval.raiseError[Int](dummy))
assert(xs === Iterant[Coeval].raiseError(dummy))
}
} | Wogan/monix | monix-tail/shared/src/test/scala/monix/tail/IterantRepeatSuite.scala | Scala | apache-2.0 | 5,052 |
package uk.gov.dvla.vehicles.presentation.common.mappings
import play.api.data.Forms.{number, optional}
import play.api.data.Mapping
object Mileage {
private final val MinLength = 1
final val MaxLength = 6
final val Max = 999999 // confirmed as max size by BAs
final val Pattern = s"\\d{$MinLength,$MaxLength}" // Digits only with specified size.
def mileage: Mapping[Option[Int]] = optional(number(min = 0, max = Max))
}
| dvla/vehicles-presentation-common | app/uk/gov/dvla/vehicles/presentation/common/mappings/Mileage.scala | Scala | mit | 435 |
package com.ing.baker.types.modules
import com.ing.baker.types
import com.ing.baker.types.Converters
import org.joda.time.{DateTime, LocalDate, LocalDateTime}
import org.scalacheck.Gen
import org.scalacheck.Test.Parameters.defaultVerbose
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpecLike
import org.scalatestplus.scalacheck.Checkers
class JodaTimeModuleSpec extends AnyWordSpecLike with Matchers with Checkers {
private val minSuccessfulTests = 100
// Long.MaxValue is not supported by joda time for local dates, resulting in a integer overflow
// This shifts the long max value 1 bit to the right (divides by 2)
// This translates to the date: Fri Apr 24 17:36:27 CEST 146140482
private val maxMillis = Long.MaxValue >> 1
private val numGen: Gen[Long] = Gen.chooseNum[Long](
0L, maxMillis, 0, maxMillis
)
"The JodaTimeModule" should {
"be able to parse the types of DateTime, LocalDateTime and LocalDate" in {
Converters.readJavaType[DateTime] shouldBe types.Date
Converters.readJavaType[LocalDateTime] shouldBe types.Date
Converters.readJavaType[LocalDate] shouldBe types.Date
}
"be able to read/write all DateTime instances" in {
val dateTimeGen: Gen[DateTime] = numGen.map(millis => new DateTime(millis))
check(transitivityProperty[DateTime](dateTimeGen), defaultVerbose.withMinSuccessfulTests(minSuccessfulTests))
}
"be able to read/write all LocalDateTime instances" in {
val localDateTimeGen: Gen[LocalDateTime] = numGen.map(millis => new LocalDateTime(millis))
check(transitivityProperty[LocalDateTime](localDateTimeGen), defaultVerbose.withMinSuccessfulTests(minSuccessfulTests))
}
"be able to read/write all LocalDate instances" in {
val localDateGen: Gen[LocalDate] = numGen.map(millis => new LocalDate(millis))
check(transitivityProperty[LocalDate](localDateGen), defaultVerbose.withMinSuccessfulTests(minSuccessfulTests))
}
}
}
| ing-bank/baker | core/baker-types/src/test/scala/com/ing/baker/types/modules/JodaTimeModuleSpec.scala | Scala | mit | 2,008 |
package org.jetbrains.plugins.scala
package console
import com.intellij.execution.configurations._
import com.intellij.execution.filters.TextConsoleBuilderImpl
import com.intellij.execution.ui.ConsoleView
import com.intellij.openapi.project.Project
import java.lang.String
import org.jdom.Element
import com.intellij.openapi.options.SettingsEditor
import com.intellij.execution.runners.ExecutionEnvironment
import com.intellij.openapi.util.JDOMExternalizer
import com.intellij.execution.Executor
import org.jetbrains.plugins.scala.runner.BaseRunConfiguration
/**
* User: Alexander Podkhalyuzin
* Date: 10.02.2009
*/
class ScalaConsoleRunConfiguration(project: Project, configurationFactory: ConfigurationFactory, name: String)
extends BaseRunConfiguration(project, configurationFactory, name) {
val mainClass = "org.jetbrains.plugins.scala.compiler.rt.ConsoleRunner"
def apply(params: ScalaConsoleRunConfigurationForm) {
javaOptions = params.getJavaOptions
consoleArgs = params.getConsoleArgs
workingDirectory = params.getWorkingDirectory
setModule(params.getModule)
}
def getState(executor: Executor, env: ExecutionEnvironment): RunProfileState = {
val state = new JavaCommandLineState(env) {
protected override def createJavaParameters: JavaParameters = {
val params = createParams
params.getProgramParametersList.addParametersString(consoleArgs)
params
}
}
val consoleBuilder = new TextConsoleBuilderImpl(project) {
override def getConsole: ConsoleView = {
val consoleView = new ScalaLanguageConsoleView(getProject)
consoleView.getConsole.setPrompt(null)
consoleView
}
}
state.setConsoleBuilder(consoleBuilder)
state
}
def getConfigurationEditor: SettingsEditor[_ <: RunConfiguration] = new ScalaConsoleRunConfigurationEditor(project, this)
override def writeExternal(element: Element) {
super.writeExternal(element)
JDOMExternalizer.write(element, "consoleArgs", consoleArgs)
}
override def readExternal(element: Element) {
super.readExternal(element)
consoleArgs = JDOMExternalizer.readString(element, "consoleArgs")
}
}
| consulo/consulo-scala | src/org/jetbrains/plugins/scala/console/ScalaConsoleRunConfiguration.scala | Scala | apache-2.0 | 2,197 |
package com.arkxu.aaas.api
import akka.http.scaladsl.model.{StatusCodes, HttpResponse}
import com.arkxu.aaas.Implicits
import com.arkxu.aaas.auth.BasicAuth
import com.arkxu.aaas.model.operation.AssetsDataOperation
import akka.http.scaladsl.server.Directives._
import scala.concurrent.Future
/**
* Created by arkxu on 12/24/15.
*/
trait DeleteRoutes extends BaseRoute with AssetsDataOperation with Implicits{
val deleteRoutes =
delete {
authenticateBasic(realm = "aaas realm", BasicAuth.authenticator) { user =>
path(JavaUUID ~ RestPath) { (id, rest) =>
onSuccess(model.delete(id)) {
case _ =>
complete {
HttpResponse(status = StatusCodes.OK)
}
}
} ~ {
path(Segments ~ Slash.?) { segments =>
val assetByPaths = model.findByPath(segments)
val done = assetByPaths.flatMap { abps =>
Future {
for (abp <- abps) {
model.delete(abp.id)
}
}
}
onSuccess(done) {
complete {
HttpResponse(status = StatusCodes.OK)
}
}
}
}
}
}
}
| arkxu/aaas | src/main/scala/com/arkxu/aaas/api/DeleteRoutes.scala | Scala | mit | 1,248 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution.datasources
import java.util.Locale
import scala.collection.JavaConverters._
import org.apache.hadoop.fs.Path
import org.json4s.NoTypeHints
import org.json4s.jackson.Serialization
import org.apache.spark.SparkUpgradeException
import org.apache.spark.sql.{SPARK_LEGACY_DATETIME, SPARK_LEGACY_INT96, SPARK_VERSION_METADATA_KEY}
import org.apache.spark.sql.catalyst.catalog.{CatalogTable, CatalogUtils}
import org.apache.spark.sql.catalyst.expressions.{AttributeReference, AttributeSet, Expression, ExpressionSet, PredicateHelper}
import org.apache.spark.sql.catalyst.util.RebaseDateTime
import org.apache.spark.sql.errors.{QueryCompilationErrors, QueryExecutionErrors}
import org.apache.spark.sql.execution.datasources.parquet.ParquetOptions
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.internal.SQLConf.LegacyBehaviorPolicy
import org.apache.spark.sql.sources.BaseRelation
import org.apache.spark.sql.types._
import org.apache.spark.sql.util.CaseInsensitiveStringMap
import org.apache.spark.util.Utils
object DataSourceUtils extends PredicateHelper {
/**
* The key to use for storing partitionBy columns as options.
*/
val PARTITIONING_COLUMNS_KEY = "__partition_columns"
/**
* Utility methods for converting partitionBy columns to options and back.
*/
private implicit val formats = Serialization.formats(NoTypeHints)
def encodePartitioningColumns(columns: Seq[String]): String = {
Serialization.write(columns)
}
def decodePartitioningColumns(str: String): Seq[String] = {
Serialization.read[Seq[String]](str)
}
/**
* Verify if the field name is supported in datasource. This verification should be done
* in a driver side.
*/
def checkFieldNames(format: FileFormat, schema: StructType): Unit = {
schema.foreach { field =>
if (!format.supportFieldName(field.name)) {
throw QueryCompilationErrors.columnNameContainsInvalidCharactersError(field.name)
}
field.dataType match {
case s: StructType => checkFieldNames(format, s)
case _ =>
}
}
}
/**
* Verify if the schema is supported in datasource. This verification should be done
* in a driver side.
*/
def verifySchema(format: FileFormat, schema: StructType): Unit = {
schema.foreach { field =>
if (!format.supportDataType(field.dataType)) {
throw QueryCompilationErrors.dataTypeUnsupportedByDataSourceError(format.toString, field)
}
}
checkFieldNames(format, schema)
}
// SPARK-24626: Metadata files and temporary files should not be
// counted as data files, so that they shouldn't participate in tasks like
// location size calculation.
private[sql] def isDataPath(path: Path): Boolean = isDataFile(path.getName)
private[sql] def isDataFile(fileName: String) =
!(fileName.startsWith("_") || fileName.startsWith("."))
/**
* Returns if the given relation's V1 datasource provider supports nested predicate pushdown.
*/
private[sql] def supportNestedPredicatePushdown(relation: BaseRelation): Boolean =
relation match {
case hs: HadoopFsRelation =>
val supportedDatasources =
Utils.stringToSeq(SQLConf.get.getConf(SQLConf.NESTED_PREDICATE_PUSHDOWN_FILE_SOURCE_LIST)
.toLowerCase(Locale.ROOT))
supportedDatasources.contains(hs.toString)
case _ => false
}
def datetimeRebaseMode(
lookupFileMeta: String => String,
modeByConfig: String): LegacyBehaviorPolicy.Value = {
if (Utils.isTesting && SQLConf.get.getConfString("spark.test.forceNoRebase", "") == "true") {
return LegacyBehaviorPolicy.CORRECTED
}
// If there is no version, we return the mode specified by the config.
Option(lookupFileMeta(SPARK_VERSION_METADATA_KEY)).map { version =>
// Files written by Spark 2.4 and earlier follow the legacy hybrid calendar and we need to
// rebase the datetime values.
// Files written by Spark 3.0 and latter may also need the rebase if they were written with
// the "LEGACY" rebase mode.
if (version < "3.0.0" || lookupFileMeta(SPARK_LEGACY_DATETIME) != null) {
LegacyBehaviorPolicy.LEGACY
} else {
LegacyBehaviorPolicy.CORRECTED
}
}.getOrElse(LegacyBehaviorPolicy.withName(modeByConfig))
}
def int96RebaseMode(
lookupFileMeta: String => String,
modeByConfig: String): LegacyBehaviorPolicy.Value = {
if (Utils.isTesting && SQLConf.get.getConfString("spark.test.forceNoRebase", "") == "true") {
return LegacyBehaviorPolicy.CORRECTED
}
// If there is no version, we return the mode specified by the config.
Option(lookupFileMeta(SPARK_VERSION_METADATA_KEY)).map { version =>
// Files written by Spark 3.0 and earlier follow the legacy hybrid calendar and we need to
// rebase the INT96 timestamp values.
// Files written by Spark 3.1 and latter may also need the rebase if they were written with
// the "LEGACY" rebase mode.
if (version < "3.1.0" || lookupFileMeta(SPARK_LEGACY_INT96) != null) {
LegacyBehaviorPolicy.LEGACY
} else {
LegacyBehaviorPolicy.CORRECTED
}
}.getOrElse(LegacyBehaviorPolicy.withName(modeByConfig))
}
def newRebaseExceptionInRead(format: String): SparkUpgradeException = {
val (config, option) = format match {
case "Parquet INT96" =>
(SQLConf.PARQUET_INT96_REBASE_MODE_IN_READ.key, ParquetOptions.INT96_REBASE_MODE)
case "Parquet" =>
(SQLConf.PARQUET_REBASE_MODE_IN_READ.key, ParquetOptions.DATETIME_REBASE_MODE)
case "Avro" =>
(SQLConf.AVRO_REBASE_MODE_IN_READ.key, "datetimeRebaseMode")
case _ => throw QueryExecutionErrors.unrecognizedFileFormatError(format)
}
QueryExecutionErrors.sparkUpgradeInReadingDatesError(format, config, option)
}
def newRebaseExceptionInWrite(format: String): SparkUpgradeException = {
val config = format match {
case "Parquet INT96" => SQLConf.PARQUET_INT96_REBASE_MODE_IN_WRITE.key
case "Parquet" => SQLConf.PARQUET_REBASE_MODE_IN_WRITE.key
case "Avro" => SQLConf.AVRO_REBASE_MODE_IN_WRITE.key
case _ => throw QueryExecutionErrors.unrecognizedFileFormatError(format)
}
QueryExecutionErrors.sparkUpgradeInWritingDatesError(format, config)
}
def creteDateRebaseFuncInRead(
rebaseMode: LegacyBehaviorPolicy.Value,
format: String): Int => Int = rebaseMode match {
case LegacyBehaviorPolicy.EXCEPTION => days: Int =>
if (days < RebaseDateTime.lastSwitchJulianDay) {
throw DataSourceUtils.newRebaseExceptionInRead(format)
}
days
case LegacyBehaviorPolicy.LEGACY => RebaseDateTime.rebaseJulianToGregorianDays
case LegacyBehaviorPolicy.CORRECTED => identity[Int]
}
def creteDateRebaseFuncInWrite(
rebaseMode: LegacyBehaviorPolicy.Value,
format: String): Int => Int = rebaseMode match {
case LegacyBehaviorPolicy.EXCEPTION => days: Int =>
if (days < RebaseDateTime.lastSwitchGregorianDay) {
throw DataSourceUtils.newRebaseExceptionInWrite(format)
}
days
case LegacyBehaviorPolicy.LEGACY => RebaseDateTime.rebaseGregorianToJulianDays
case LegacyBehaviorPolicy.CORRECTED => identity[Int]
}
def creteTimestampRebaseFuncInRead(
rebaseMode: LegacyBehaviorPolicy.Value,
format: String): Long => Long = rebaseMode match {
case LegacyBehaviorPolicy.EXCEPTION => micros: Long =>
if (micros < RebaseDateTime.lastSwitchJulianTs) {
throw DataSourceUtils.newRebaseExceptionInRead(format)
}
micros
case LegacyBehaviorPolicy.LEGACY => RebaseDateTime.rebaseJulianToGregorianMicros
case LegacyBehaviorPolicy.CORRECTED => identity[Long]
}
def creteTimestampRebaseFuncInWrite(
rebaseMode: LegacyBehaviorPolicy.Value,
format: String): Long => Long = rebaseMode match {
case LegacyBehaviorPolicy.EXCEPTION => micros: Long =>
if (micros < RebaseDateTime.lastSwitchGregorianTs) {
throw DataSourceUtils.newRebaseExceptionInWrite(format)
}
micros
case LegacyBehaviorPolicy.LEGACY => RebaseDateTime.rebaseGregorianToJulianMicros
case LegacyBehaviorPolicy.CORRECTED => identity[Long]
}
def generateDatasourceOptions(
extraOptions: CaseInsensitiveStringMap, table: CatalogTable): Map[String, String] = {
val pathOption = table.storage.locationUri.map("path" -> CatalogUtils.URIToString(_))
val options = table.storage.properties ++ pathOption
if (!SQLConf.get.getConf(SQLConf.LEGACY_EXTRA_OPTIONS_BEHAVIOR)) {
// Check the same key with different values
table.storage.properties.foreach { case (k, v) =>
if (extraOptions.containsKey(k) && extraOptions.get(k) != v) {
throw QueryCompilationErrors.failToResolveDataSourceForTableError(table, k)
}
}
// To keep the original key from table properties, here we filter all case insensitive
// duplicate keys out from extra options.
val lowerCasedDuplicatedKeys =
table.storage.properties.keySet.map(_.toLowerCase(Locale.ROOT))
.intersect(extraOptions.keySet.asScala)
extraOptions.asCaseSensitiveMap().asScala.filterNot {
case (k, _) => lowerCasedDuplicatedKeys.contains(k.toLowerCase(Locale.ROOT))
}.toMap ++ options
} else {
options
}
}
def getPartitionFiltersAndDataFilters(
partitionSchema: StructType,
normalizedFilters: Seq[Expression]): (Seq[Expression], Seq[Expression]) = {
val partitionColumns = normalizedFilters.flatMap { expr =>
expr.collect {
case attr: AttributeReference if partitionSchema.names.contains(attr.name) =>
attr
}
}
val partitionSet = AttributeSet(partitionColumns)
val (partitionFilters, dataFilters) = normalizedFilters.partition(f =>
f.references.subsetOf(partitionSet)
)
val extraPartitionFilter =
dataFilters.flatMap(extractPredicatesWithinOutputSet(_, partitionSet))
(ExpressionSet(partitionFilters ++ extraPartitionFilter).toSeq, dataFilters)
}
}
| nchammas/spark | sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/DataSourceUtils.scala | Scala | apache-2.0 | 10,957 |
import models.Game.Setup
import play.api.Configuration
import scala.reflect.runtime.currentMirror
import scala.tools.reflect.ToolBox
import scala.util.Random
package object config {
final case class GameConfig(fps: Int, secs: Int, setupCode: String) {
lazy val setup: Setup = {
setupCode match {
case "random" => (_, _) => Random.nextBoolean()
case other => evaluate("($r: Int, $c: Int) => { " + other + " }: Boolean")
}
}
private def evaluate[A](string: String): A = {
val tb = currentMirror.mkToolBox()
tb.eval(tb.parse(string)).asInstanceOf[A]
}
}
object GameConfig {
def apply(c: Configuration): GameConfig = {
val game = c.get[Configuration]("game")
import game._
GameConfig(
fps = get[Int]("fps"),
secs = get[Int]("secs"),
setupCode = get[String]("setup"),
)
}
}
}
| christian-schlichtherle/play-scala-game-of-life | server/app/config/package.scala | Scala | apache-2.0 | 898 |
package dk.gp.gpc.util
import dk.bayes.factorgraph2.variable.CanonicalGaussianVariable
import dk.gp.gpc.GpcModel
import dk.bayes.factorgraph2.variable.BernVariable
import dk.bayes.factorgraph2.factor.StepFunctionFactor
import dk.bayes.factorgraph2.factor.CanonicalGaussianFactor
import breeze.linalg.DenseMatrix
import breeze.linalg.DenseVector
case class GpcFactorGraph(model: GpcModel) {
private val covX = model.covFunc.cov(model.x, model.x, model.covFuncParams) + DenseMatrix.eye[Double](model.x.rows) * 1e-7
private val meanX = DenseVector.zeros[Double](model.x.rows) + model.gpMean
/**
* Create variables
*/
val fVariable = CanonicalGaussianVariable()
val yVariables = model.y.toArray.map { y =>
val k = if (y == 1) 1 else 0
BernVariable(k)
}
/**
* Create factors
*/
val fFactor = CanonicalGaussianFactor(fVariable, meanX, covX)
val yFactors = model.y.toArray.zipWithIndex.map {
case (y, i) =>
StepFunctionFactor(fVariable, yVariables(i), model.x.rows, i, v = 1)
}
def updateFfactor(covFuncParams: DenseVector[Double], gpMean: Double) = {
val newCovX = model.covFunc.cov(model.x, model.x, covFuncParams) + DenseMatrix.eye[Double](model.x.rows) * 1e-7
val newMeanX = DenseVector.zeros[Double](model.x.rows) + gpMean
fFactor.updateMeanAndVariance(newMeanX, newCovX)
}
} | danielkorzekwa/bayes-scala-gp | src/main/scala/dk/gp/gpc/util/GpcFactorGraph.scala | Scala | bsd-2-clause | 1,350 |
package shapeless.datatype.tensorflow
import java.net.URI
import org.joda.time.Instant
import org.scalacheck.Prop.{all, forAll}
import org.scalacheck._
import org.tensorflow.example.Example
import magnolify.scalacheck.auto._
import shapeless._
import shapeless.datatype.record._
object TensorFlowTypeSpec extends Properties("TensorFlowType") {
import shapeless.datatype.test.Records._
import shapeless.datatype.test.SerializableUtils._
implicit def compareByteArrays(x: Array[Byte], y: Array[Byte]) = java.util.Arrays.equals(x, y)
implicit def compareIntArrays(x: Array[Int], y: Array[Int]) = java.util.Arrays.equals(x, y)
implicit def compareDouble(x: Double, y: Double) = x.toFloat == y.toFloat
def roundTrip[A, L <: HList](m: A)(implicit
gen: LabelledGeneric.Aux[A, L],
fromL: FromFeatures[L],
toL: ToFeatures[L],
mr: MatchRecord[L]
): Prop = {
val t = ensureSerializable(TensorFlowType[A])
val f1: SerializableFunction[A, Example] =
new SerializableFunction[A, Example] {
override def apply(m: A): Example = t.toExample(m)
}
val f2: SerializableFunction[Example, Option[A]] =
new SerializableFunction[Example, Option[A]] {
override def apply(m: Example): Option[A] = t.fromExample(m)
}
val f3: SerializableFunction[A, Example.Builder] =
new SerializableFunction[A, Example.Builder] {
override def apply(m: A): Example.Builder = t.toExampleBuilder(m)
}
val f4: SerializableFunction[Example.Builder, Option[A]] =
new SerializableFunction[Example.Builder, Option[A]] {
override def apply(m: Example.Builder): Option[A] = t.fromExampleBuilder(m)
}
val toFn1 = ensureSerializable(f1)
val fromFn1 = ensureSerializable(f2)
val toFn2 = ensureSerializable(f3)
val fromFn2 = ensureSerializable(f4)
val copy1 = fromFn1(toFn1(m))
val copy2 = fromFn2(toFn2(m))
val rm = RecordMatcher[A]
all(copy1.exists(rm(_, m)), copy2.exists(rm(_, m)))
}
implicit val timestampTensorFlowMappableType = TensorFlowType.at[Instant](
TensorFlowType.toLongs(_).map(new Instant(_)),
xs => TensorFlowType.fromLongs(xs.map(_.getMillis))
)
property("required") = forAll { m: Required => roundTrip(m) }
property("optional") = forAll { m: Optional => roundTrip(m) }
property("repeated") = forAll { m: Repeated => roundTrip(m) }
property("mixed") = forAll { m: Mixed => roundTrip(m) }
property("seqs") = forAll { m: Seqs => roundTrip(m) }
implicit val uriTensorFlowType = TensorFlowType.at[URI](
TensorFlowType.toStrings(_).map(URI.create),
xs => TensorFlowType.fromStrings(xs.map(_.toString))
)
property("custom") = forAll { m: Custom => roundTrip(m) }
}
| nevillelyh/shapeless-datatype | tensorflow/src/test/scala/shapeless/datatype/tensorflow/TensorFlowTypeSpec.scala | Scala | apache-2.0 | 2,728 |
package ch.epfl.scala.index
package api
import scala.concurrent.Future
trait Api {
def autocomplete(q: String): Future[List[Autocompletion]]
}
case class Autocompletion(organization: String, repository: String, description: String)
| adamwy/scaladex | shared/shared/src/main/scala/ch.epfl.scala.index.api/Api.scala | Scala | bsd-3-clause | 237 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.plan.nodes.logical
import org.apache.flink.table.plan.nodes.FlinkConventions
import org.apache.flink.table.plan.nodes.common.CommonCalc
import org.apache.calcite.plan._
import org.apache.calcite.rel.RelNode
import org.apache.calcite.rel.convert.ConverterRule
import org.apache.calcite.rel.core.Calc
import org.apache.calcite.rel.logical.LogicalCalc
import org.apache.calcite.rex.RexProgram
/**
* Sub-class of [[Calc]] that is a relational expression which computes project expressions
* and also filters in Flink.
*/
class FlinkLogicalCalc(
cluster: RelOptCluster,
traitSet: RelTraitSet,
input: RelNode,
calcProgram: RexProgram)
extends CommonCalc(cluster, traitSet, input, calcProgram)
with FlinkLogicalRel {
override def copy(traitSet: RelTraitSet, child: RelNode, program: RexProgram): Calc = {
new FlinkLogicalCalc(cluster, traitSet, child, program)
}
}
private class FlinkLogicalCalcConverter
extends ConverterRule(
classOf[LogicalCalc],
Convention.NONE,
FlinkConventions.LOGICAL,
"FlinkLogicalCalcConverter") {
override def convert(rel: RelNode): RelNode = {
val calc = rel.asInstanceOf[LogicalCalc]
val newInput = RelOptRule.convert(calc.getInput, FlinkConventions.LOGICAL)
FlinkLogicalCalc.create(newInput, calc.getProgram)
}
}
object FlinkLogicalCalc {
val CONVERTER: ConverterRule = new FlinkLogicalCalcConverter()
def create(
input: RelNode,
calcProgram: RexProgram): FlinkLogicalCalc = {
val cluster = input.getCluster
val traitSet = cluster.traitSet.replace(FlinkConventions.LOGICAL).simplify()
new FlinkLogicalCalc(cluster, traitSet, input, calcProgram)
}
}
| ueshin/apache-flink | flink-table/flink-table-planner-blink/src/main/scala/org/apache/flink/table/plan/nodes/logical/FlinkLogicalCalc.scala | Scala | apache-2.0 | 2,517 |
/*
* Copyright (C)2014 D. Plaindoux.
*
* This program is free software; you can redistribute it and/or modify it
* under the terms of the GNU Lesser General Public License as published
* by the Free Software Foundation; either version 2, or (at your option) any
* later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program; see the file COPYING. If not, write to
* the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA.
*/
package smallibs.rapido.lang.checker
import smallibs.rapido.lang.ast._
/**
* The type check validates the specification checking object
* type compatibilities and definition in each service scope
*/
// ---------------------------------------------------------------------------------------------------------------------
// Service checker
// ---------------------------------------------------------------------------------------------------------------------
class ClientChecker(entities: Entities) {
def missingDefinitions(notifier: ErrorNotifier): ErrorNotifier =
entities.clients.foldLeft[ErrorNotifier](notifier) {
case (notifier, (name, client)) =>
(for (name <- client.provides if !entities.services.contains(name))
yield name) match {
case Nil => notifier
case l =>
notifier.
locate(client.pos).
undefined(l).
unlocate
}
}
}
object ClientChecker {
def apply(entities: Entities): ClientChecker = new ClientChecker(entities)
def apply(entities: List[Entity]): ClientChecker = this(Entities(entities))
def apply(entities: Entity*): ClientChecker = this(entities.toList)
} | d-plaindoux/rapido | src/main/scala/smallibs/rapido/lang/checker/clientchecker.scala | Scala | lgpl-2.1 | 1,973 |
package controllers
import play.api._
import play.api.mvc._
import play.api.i18n._
import play.api.cache.Cache
import play.api.cache.Cached
import play.api.cache._
import play.api.Play.current
import scala.io.Source
import scala.xml._
import java.io._
import scala.io.Source
import play.api.libs.ws._
import scala.concurrent.Future
object Application extends Controller {
def index = {
Action {
Ok(views.html.wxrequest())
}
}
def mobile = Cached("mobile") {
Action {
Ok(views.html.mobile())
}
}
//def wx = Action{ Ok(views.xml.response("echostr","msg_signature","timestamp","nonce")) }
def wx = Action { request => Ok("Got request [" + request + "]") }
}
| MKendo/OctopusMail | app/controllers/Application.scala | Scala | mit | 702 |
package cats.abstractcat
import com.typesafe.config.Config
/**
* Created by ruguer on 3/20/15.
*/
abstract class Cat(val name:String,config: Config) {
def act() : Unit
}
| raymondpoling/CatsOfUlthar | src/main/scala/cats/abstractcat/Cat.scala | Scala | apache-2.0 | 177 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.carbondata.spark.load
import scala.collection.JavaConverters._
import scala.collection.mutable.ArrayBuffer
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.Path
import org.apache.hadoop.mapreduce.{TaskAttemptID, TaskType}
import org.apache.hadoop.mapreduce.lib.input.{FileInputFormat, FileSplit}
import org.apache.hadoop.mapreduce.task.TaskAttemptContextImpl
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.{CarbonToSparkAdapter, SparkSession}
import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.expressions.GenericInternalRow
import org.apache.spark.sql.execution.datasources.{FilePartition, FileScanRDD, PartitionedFile}
import org.apache.spark.sql.util.SparkSQLUtil
import org.apache.spark.sql.util.SparkSQLUtil.sessionState
import org.apache.carbondata.common.logging.LogServiceFactory
import org.apache.carbondata.core.datastore.block.{Distributable, TableBlockInfo}
import org.apache.carbondata.core.datastore.impl.FileFactory
import org.apache.carbondata.core.util.{CarbonProperties, ThreadLocalSessionInfo}
import org.apache.carbondata.hadoop.util.CarbonInputFormatUtil
import org.apache.carbondata.processing.loading.csvinput.{BlockDetails, CSVInputFormat}
import org.apache.carbondata.processing.loading.model.CarbonLoadModel
import org.apache.carbondata.spark.rdd.CarbonDataRDDFactory.getNodeBlockMapping
import org.apache.carbondata.spark.util.{CarbonSparkUtil, CommonUtil}
object CsvRDDHelper {
private val LOGGER = LogServiceFactory.getLogService(this.getClass.getCanonicalName)
/**
* create a RDD that does reading of multiple CSV files
*/
def csvFileScanRDD(
spark: SparkSession,
model: CarbonLoadModel,
hadoopConf: Configuration
): RDD[InternalRow] = {
// 1. partition
val defaultMaxSplitBytes = sessionState(spark).conf.filesMaxPartitionBytes
val openCostInBytes = sessionState(spark).conf.filesOpenCostInBytes
val defaultParallelism = spark.sparkContext.defaultParallelism
CommonUtil.configureCSVInputFormat(hadoopConf, model)
hadoopConf.set(FileInputFormat.INPUT_DIR, model.getFactFilePath)
val jobContext = CarbonSparkUtil.createHadoopJob(hadoopConf)
val inputFormat = new CSVInputFormat()
val rawSplits = inputFormat.getSplits(jobContext).toArray
var totalLength = 0L
val splitFiles = rawSplits.map { split =>
val fileSplit = split.asInstanceOf[FileSplit]
totalLength = totalLength + fileSplit.getLength
PartitionedFile(
InternalRow.empty,
fileSplit.getPath.toString,
fileSplit.getStart,
fileSplit.getLength,
fileSplit.getLocations)
}.sortBy(_.length)(implicitly[Ordering[Long]].reverse)
model.setTotalSize(totalLength)
val totalBytes = splitFiles.map(_.length + openCostInBytes).sum
val bytesPerCore = totalBytes / defaultParallelism
val maxSplitBytes = Math.min(defaultMaxSplitBytes, Math.max(openCostInBytes, bytesPerCore))
LOGGER.info(s"Planning scan with bin packing, max size: $maxSplitBytes bytes, " +
s"open cost is considered as scanning $openCostInBytes bytes.")
val partitions = new ArrayBuffer[FilePartition]
val currentFiles = new ArrayBuffer[PartitionedFile]
var currentSize = 0L
def closePartition(): Unit = {
if (currentFiles.nonEmpty) {
val newPartition =
CarbonToSparkAdapter.createFilePartition(
partitions.size,
currentFiles)
partitions += newPartition
}
currentFiles.clear()
currentSize = 0
}
splitFiles.foreach { file =>
if (currentSize + file.length > maxSplitBytes) {
closePartition()
}
// Add the given file to the current partition.
currentSize += file.length + openCostInBytes
currentFiles += file
}
closePartition()
// 2. read function
val readFunction = getReadFunction(hadoopConf)
new FileScanRDD(spark, readFunction, partitions)
}
/**
* create a RDD that does reading of multiple CSV files based on data locality
*/
def csvFileScanRDDForLocalSort(
spark: SparkSession,
model: CarbonLoadModel,
hadoopConf: Configuration
): RDD[InternalRow] = {
CommonUtil.configureCSVInputFormat(hadoopConf, model)
// divide the blocks among the nodes as per the data locality
val nodeBlockMapping = getNodeBlockMapping(spark.sqlContext, hadoopConf, model)
val partitions = new ArrayBuffer[FilePartition]
// create file partition
nodeBlockMapping.map { entry =>
val files = entry._2.asScala.map(distributable => {
val tableBlock = distributable.asInstanceOf[TableBlockInfo]
PartitionedFile(
InternalRow.empty,
tableBlock.getFilePath,
tableBlock.getBlockOffset,
tableBlock.getBlockLength,
tableBlock.getLocations)
}).toArray
val newPartition =
CarbonToSparkAdapter.createFilePartition(
partitions.size,
collection.mutable.ArrayBuffer(files: _*))
partitions += newPartition
}
// 2. read function
val readFunction = getReadFunction(hadoopConf)
new FileScanRDD(spark, readFunction, partitions)
}
private def getReadFunction(configuration: Configuration): (PartitionedFile =>
Iterator[InternalRow]) = {
val serializableConfiguration = SparkSQLUtil.getSerializableConfigurableInstance(configuration)
new (PartitionedFile => Iterator[InternalRow]) with Serializable {
override def apply(file: PartitionedFile): Iterator[InternalRow] = {
new Iterator[InternalRow] {
ThreadLocalSessionInfo.setConfigurationToCurrentThread(serializableConfiguration.value)
val jobTrackerId = CarbonInputFormatUtil.createJobTrackerID()
val attemptId = new TaskAttemptID(jobTrackerId, 0, TaskType.MAP, 0, 0)
val hadoopAttemptContext = new TaskAttemptContextImpl(FileFactory.getConfiguration,
attemptId)
val inputSplit =
new FileSplit(new Path(file.filePath), file.start, file.length, file.locations)
var finished = false
val inputFormat = new CSVInputFormat()
val reader = inputFormat.createRecordReader(inputSplit, hadoopAttemptContext)
reader.initialize(inputSplit, hadoopAttemptContext)
override def hasNext: Boolean = {
if (!finished) {
if (reader != null) {
if (reader.nextKeyValue()) {
true
} else {
finished = true
reader.close()
false
}
} else {
finished = true
false
}
} else {
false
}
}
override def next(): InternalRow = {
new GenericInternalRow(reader.getCurrentValue.get().asInstanceOf[Array[Any]])
}
}
}
}
}
}
| zzcclp/carbondata | integration/spark/src/main/scala/org/apache/carbondata/spark/load/CsvRDDHelper.scala | Scala | apache-2.0 | 7,817 |
package io.iohk.ethereum.ets.blockchain
import akka.util.ByteString
import io.iohk.ethereum.domain._
import io.iohk.ethereum.ets.common.AccountState
case class BlockchainScenario(
blocks: List[BlockDef],
genesisBlockHeader: BlockHeaderDef,
genesisRLP: Option[ByteString],
lastblockhash: ByteString,
network: String,
postState: Option[Map[Address, AccountState]],
postStateHash: Option[ByteString],
pre: Map[Address, AccountState],
sealEngine: Option[String]
)
case class BlockDef(
rlp: String,
blocknumber: Option[BigInt],
blockHeader: Option[BlockHeaderDef],
transactions: Option[Seq[TransactionDef]],
uncleHeaders: Option[Seq[BlockHeaderDef]]
)
case class TransactionDef(
nonce: BigInt,
gasPrice: BigInt,
gasLimit: BigInt,
to: Option[Address],
value: BigInt,
data:ByteString,
r: BigInt,
s: BigInt,
v: ByteString
)
case class BlockHeaderDef(
parentHash: ByteString,
uncleHash: ByteString,
coinbase: ByteString,
stateRoot: ByteString,
transactionsTrie: ByteString,
receiptTrie: ByteString,
bloom: ByteString,
difficulty: BigInt,
number: BigInt,
gasLimit: BigInt,
gasUsed: BigInt,
timestamp: Long,
extraData: ByteString,
mixHash: ByteString,
nonce: ByteString
) {
def toBlockHeader: BlockHeader =
BlockHeader(parentHash, uncleHash, coinbase, stateRoot, transactionsTrie, receiptTrie, bloom,
difficulty, number, gasLimit, gasUsed, timestamp, extraData, mixHash, nonce
)
}
| input-output-hk/etc-client | src/ets/scala/io/iohk/ethereum/ets/blockchain/Scenario.scala | Scala | mit | 1,481 |
package io.github.lucienh.common.dao
import org.junit.Test
import org.junit.runner.RunWith
import org.springframework.beans.factory.annotation.Autowired
import org.springframework.core.io.Resource
import org.springframework.test.context.ContextConfiguration
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner
/**
* Created by h on 15-10-29.
*/
@RunWith(classOf[SpringJUnit4ClassRunner])
@ContextConfiguration(Array("classpath*:ioc-*.xml"))
class ElasticSearchTemplateExtTest {
var elasticSearchTemplateExt: ElasticSearchTemplateExt = null
@Autowired
def setElasticSearchTemplateExt(es: ElasticSearchTemplateExt): Unit = {
elasticSearchTemplateExt = es
}
@Test
def testAdd(): Unit = {
println(elasticSearchTemplateExt)
}
}
| vulntest/sqlinjection | src/test/scala/io/github/lucienh/common/dao/ElasticSearchTemplateExtTest.scala | Scala | mit | 775 |
package filodb.memory.format
import java.nio.ByteBuffer
import java.sql.Timestamp
import scala.reflect.ClassTag
import org.agrona.DirectBuffer
import org.agrona.concurrent.UnsafeBuffer
import org.joda.time.DateTime
import spire.syntax.cfor._
import filodb.memory.format.vectors.Histogram
/**
* A generic trait for reading typed values out of a row of data.
* Used for both reading out of Filo vectors as well as for RowToVectorBuilder,
* which means it can be used to compose heterogeneous Filo vectors together.
*/
// scalastyle:off
trait RowReader {
def notNull(columnNo: Int): Boolean
def getBoolean(columnNo: Int): Boolean
def getInt(columnNo: Int): Int
def getLong(columnNo: Int): Long
def getDouble(columnNo: Int): Double
def getFloat(columnNo: Int): Float
def getString(columnNo: Int): String
def getAny(columnNo: Int): Any
def getBlobBase(columnNo: Int): Any
def getBlobOffset(columnNo: Int): Long
def getBlobNumBytes(columnNo: Int): Int // Total number of bytes for the blob
// By default this is not implemented as histograms can be parsed from multiple serialized forms or actual objects
def getHistogram(columnNo: Int): Histogram = ???
/**
* Retrieves a view into the blob at column columnNo without duplicating contents.
* Smart implementations could reuse the same UnsafeBuffer to avoid allocations.
* This default implementation simply allocates a new one.
*/
def blobAsBuffer(columnNo: Int): DirectBuffer = {
val buf = new UnsafeBuffer(Array.empty[Byte])
UnsafeUtils.wrapDirectBuf(getBlobBase(columnNo), getBlobOffset(columnNo), getBlobNumBytes(columnNo), buf)
buf
}
final def getBuffer(columnNo: Int): ByteBuffer = {
val length = getBlobNumBytes(columnNo)
getBlobBase(columnNo) match {
case UnsafeUtils.ZeroPointer => // offheap
UnsafeUtils.asDirectBuffer(getBlobOffset(columnNo), length)
case array: Array[Byte] =>
ByteBuffer.wrap(array, (getBlobOffset(columnNo) - UnsafeUtils.arayOffset).toInt, length)
}
}
// def getUtf8MediumOffset(columnNo: Int): Long
// Please override final def if your RowReader has a faster implementation
def filoUTF8String(columnNo: Int): ZeroCopyUTF8String = getAny(columnNo) match {
case s: String =>
Option(s).map(ZeroCopyUTF8String.apply).getOrElse(ZeroCopyUTF8String.empty)
case z: ZeroCopyUTF8String => z
case null => ZeroCopyUTF8String.NA
}
/**
* This method serves two purposes.
* For RowReaders that need to parse from some input source, such as CSV,
* the ClassTag gives a way for per-type parsing for non-primitive types.
* For RowReaders for fast reading paths, such as Spark, the default
* implementation serves as a fast way to read from objects.
*/
def as[T: ClassTag](columnNo: Int): T = getAny(columnNo).asInstanceOf[T]
}
import filodb.memory.format.RowReader._
// A RowReader that knows how to hashcode and compare its individual elements. Extractors must
// correspond to the schema. This could allow partition keys to be wrapped directly around raw ingest
// elements without converting to BinaryRecord first
trait SchemaRowReader extends RowReader {
def extractors: Array[TypedFieldExtractor[_]]
// NOTE: This is an EXTREMELY HOT code path, needs to be super optimized. No standard Scala collection
// or slow functional code here.
override def hashCode: Int = {
var hash = 0
cforRange { 0 until extractors.size } { i =>
hash ^= extractors(i).getField(this, i).hashCode
}
hash
}
override def equals(other: Any): Boolean = other match {
case reader: RowReader =>
cforRange { 0 until extractors.size } { i =>
if (extractors(i).compare(this, reader, i) != 0) return false
}
true
case other: Any =>
false
}
}
/**
* An example of a RowReader that can read from Scala tuples containing Option[_]
*/
final case class TupleRowReader(tuple: Product) extends RowReader {
def notNull(columnNo: Int): Boolean =
tuple.productElement(columnNo).asInstanceOf[Option[Any]].nonEmpty
def getBoolean(columnNo: Int): Boolean = tuple.productElement(columnNo) match {
case Some(x: Boolean) => x
case None => false
}
def getInt(columnNo: Int): Int = tuple.productElement(columnNo) match {
case Some(x: Int) => x
case None => 0
}
def getLong(columnNo: Int): Long = tuple.productElement(columnNo) match {
case Some(x: Long) => x
case Some(x: Timestamp) => x.getTime
case None => 0L
}
def getDouble(columnNo: Int): Double = tuple.productElement(columnNo) match {
case Some(x: Double) => x
case None => 0.0
}
def getFloat(columnNo: Int): Float = tuple.productElement(columnNo) match {
case Some(x: Float) => x
case None => 0.0F
}
def getString(columnNo: Int): String = tuple.productElement(columnNo) match {
case Some(x: String) => x
case None => null
}
def getAny(columnNo: Int): Any =
tuple.productElement(columnNo).asInstanceOf[Option[Any]].getOrElse(null)
override def getBlobBase(columnNo: Int): Any = ???
override def getBlobOffset(columnNo: Int): Long = ???
override def getBlobNumBytes(columnNo: Int): Int = ???
}
/**
* A RowReader for working with OpenCSV or anything else that emits string[]
*/
final case class ArrayStringRowReader(strings: Array[String]) extends RowReader {
//scalastyle:off
def notNull(columnNo: Int): Boolean = strings(columnNo) != null && strings(columnNo) != ""
//scalastyle:on
def getBoolean(columnNo: Int): Boolean = strings(columnNo).toBoolean
def getInt(columnNo: Int): Int = strings(columnNo).toInt
def getLong(columnNo: Int): Long = try {
strings(columnNo).toLong
} catch {
case ex: NumberFormatException => DateTime.parse(strings(columnNo)).getMillis
}
def getDouble(columnNo: Int): Double = strings(columnNo).toDouble
def getFloat(columnNo: Int): Float = strings(columnNo).toFloat
def getString(columnNo: Int): String = strings(columnNo)
def getAny(columnNo: Int): Any = strings(columnNo)
override def as[T: ClassTag](columnNo: Int): T = {
implicitly[ClassTag[T]].runtimeClass.asInstanceOf[T]
}
override def toString: String = s"ArrayStringRR(${strings.mkString(", ")})"
override def getBlobBase(columnNo: Int): Any = ???
override def getBlobOffset(columnNo: Int): Long = ???
override def getBlobNumBytes(columnNo: Int): Int = ???
// override def getUtf8MediumOffset(columnNo: Int): Long = ???
}
// scalastyle:off
/**
* A RowReader that changes the column numbers around of an original RowReader. It could be used to
* present a subset of the original columns, for example.
* @param columnRoutes an array of original column numbers for the column in question. For example:
* Array(0, 2, 5) means an getInt(1) would map to a getInt(2) for the original RowReader
*/
//noinspection ScalaStyle
trait RoutingReader extends RowReader {
def origReader: RowReader
def columnRoutes: Array[Int]
final def notNull(columnNo: Int): Boolean = origReader.notNull(columnRoutes(columnNo))
final def getBoolean(columnNo: Int): Boolean = origReader.getBoolean(columnRoutes(columnNo))
final def getInt(columnNo: Int): Int = origReader.getInt(columnRoutes(columnNo))
final def getLong(columnNo: Int): Long = origReader.getLong(columnRoutes(columnNo))
final def getDouble(columnNo: Int): Double = origReader.getDouble(columnRoutes(columnNo))
final def getFloat(columnNo: Int): Float = origReader.getFloat(columnRoutes(columnNo))
final def getString(columnNo: Int): String = origReader.getString(columnRoutes(columnNo))
final def getAny(columnNo: Int): Any = origReader.getAny(columnRoutes(columnNo))
final def getBlobBase(columnNo: Int): Any = ???
final def getBlobOffset(columnNo: Int): Long = ???
final def getBlobNumBytes(columnNo: Int): Int = ???
override def equals(other: Any): Boolean = other match {
case RoutingRowReader(orig, _) => orig.equals(origReader)
case r: RowReader => r.equals(origReader)
case other: Any => false
}
}
final case class RoutingRowReader(origReader: RowReader, columnRoutes: Array[Int]) extends RoutingReader
// A RoutingRowReader which is also a SchemaRowReader
final case class SchemaRoutingRowReader(origReader: RowReader,
columnRoutes: Array[Int],
extractors: Array[TypedFieldExtractor[_]])
extends RoutingReader with SchemaRowReader {
override def toString: String = s"SchemaRoutingRR($origReader, ${columnRoutes.toList})"
}
final case class SingleValueRowReader(value: Any) extends RowReader {
def notNull(columnNo: Int): Boolean = Option(value).isDefined
def getBoolean(columnNo: Int): Boolean = value.asInstanceOf[Boolean]
def getInt(columnNo: Int): Int = value.asInstanceOf[Int]
def getLong(columnNo: Int): Long = value.asInstanceOf[Long]
def getDouble(columnNo: Int): Double = value.asInstanceOf[Double]
def getFloat(columnNo: Int): Float = value.asInstanceOf[Float]
def getString(columnNo: Int): String = value.asInstanceOf[String]
override def getHistogram(columnNo: Int): Histogram = value.asInstanceOf[Histogram]
def getAny(columnNo: Int): Any = value
def getBlobBase(columnNo: Int): Any = value
def getBlobOffset(columnNo: Int): Long = 0
def getBlobNumBytes(columnNo: Int): Int = value.asInstanceOf[Array[Byte]].length
}
final case class SeqRowReader(sequence: Seq[Any]) extends RowReader {
def notNull(columnNo: Int): Boolean = true
def getBoolean(columnNo: Int): Boolean = sequence(columnNo).asInstanceOf[Boolean]
def getInt(columnNo: Int): Int = sequence(columnNo).asInstanceOf[Int]
def getLong(columnNo: Int): Long = sequence(columnNo).asInstanceOf[Long]
def getDouble(columnNo: Int): Double = sequence(columnNo).asInstanceOf[Double]
def getFloat(columnNo: Int): Float = sequence(columnNo).asInstanceOf[Float]
def getString(columnNo: Int): String = sequence(columnNo).asInstanceOf[String]
override def getHistogram(columnNo: Int): Histogram = sequence(columnNo).asInstanceOf[Histogram]
def getAny(columnNo: Int): Any = sequence(columnNo)
def getBlobBase(columnNo: Int): Any = ???
def getBlobOffset(columnNo: Int): Long = ???
def getBlobNumBytes(columnNo: Int): Int = ???
}
final case class UTF8MapIteratorRowReader(records: Iterator[Map[ZeroCopyUTF8String, ZeroCopyUTF8String]]) extends Iterator[RowReader] {
var currVal: Map[ZeroCopyUTF8String, ZeroCopyUTF8String] = _
private val rowReader = new RowReader {
def notNull(columnNo: Int): Boolean = true
def getBoolean(columnNo: Int): Boolean = ???
def getInt(columnNo: Int): Int = ???
def getLong(columnNo: Int): Long = ???
def getDouble(columnNo: Int): Double = ???
def getFloat(columnNo: Int): Float = ???
def getString(columnNo: Int): String = currVal.toString
def getAny(columnNo: Int): Any = currVal
def getBlobBase(columnNo: Int): Any = ???
def getBlobOffset(columnNo: Int): Long = ???
def getBlobNumBytes(columnNo: Int): Int = ???
}
override def hasNext: Boolean = records.hasNext
override def next(): RowReader = {
currVal = records.next()
rowReader
}
}
final case class SchemaSeqRowReader(sequence: Seq[Any],
extractors: Array[TypedFieldExtractor[_]]) extends SchemaRowReader {
def notNull(columnNo: Int): Boolean = true
def getBoolean(columnNo: Int): Boolean = sequence(columnNo).asInstanceOf[Boolean]
def getInt(columnNo: Int): Int = sequence(columnNo).asInstanceOf[Int]
def getLong(columnNo: Int): Long = sequence(columnNo).asInstanceOf[Long]
def getDouble(columnNo: Int): Double = sequence(columnNo).asInstanceOf[Double]
def getFloat(columnNo: Int): Float = sequence(columnNo).asInstanceOf[Float]
def getString(columnNo: Int): String = sequence(columnNo).asInstanceOf[String]
override def getHistogram(columnNo: Int): Histogram = sequence(columnNo).asInstanceOf[Histogram]
def getAny(columnNo: Int): Any = sequence(columnNo)
def getBlobBase(columnNo: Int): Any = sequence(columnNo).asInstanceOf[Array[Byte]]
def getBlobOffset(columnNo: Int): Long = 0
def getBlobNumBytes(columnNo: Int): Int = sequence(columnNo).asInstanceOf[Array[Byte]].length
}
object RowReader {
import DefaultValues._
// Type class for extracting a field of a specific type .. and comparing a field from two RowReaders
trait TypedFieldExtractor[@specialized F] {
def getField(reader: RowReader, columnNo: Int): F
def getFieldOrDefault(reader: RowReader, columnNo: Int): F = getField(reader, columnNo)
def compare(reader: RowReader, other: RowReader, columnNo: Int): Int
}
// A generic FieldExtractor for objects
case class ObjectFieldExtractor[T: ClassTag](default: T) extends TypedFieldExtractor[T] {
final def getField(reader: RowReader, columnNo: Int): T = reader.as[T](columnNo)
final override def getFieldOrDefault(reader: RowReader, columnNo: Int): T =
if (reader.notNull(columnNo)) getField(reader, columnNo) else default
final def compare(reader: RowReader, other: RowReader, columnNo: Int): Int =
if (getFieldOrDefault(reader, columnNo) == getFieldOrDefault(other, columnNo)) 0 else 1
}
class WrappedExtractor[@specialized T, F: TypedFieldExtractor](func: F => T)
extends TypedFieldExtractor[T] {
val orig = implicitly[TypedFieldExtractor[F]]
def getField(reader: RowReader, columnNo: Int): T = func(orig.getField(reader, columnNo))
def compare(reader: RowReader, other: RowReader, col: Int): Int = orig.compare(reader, other, col)
}
implicit object BooleanFieldExtractor extends TypedFieldExtractor[Boolean] {
final def getField(reader: RowReader, columnNo: Int): Boolean = reader.getBoolean(columnNo)
final def compare(reader: RowReader, other: RowReader, columnNo: Int): Int =
java.lang.Boolean.compare(getFieldOrDefault(reader, columnNo), getFieldOrDefault(other, columnNo))
}
implicit object LongFieldExtractor extends TypedFieldExtractor[Long] {
final def getField(reader: RowReader, columnNo: Int): Long = reader.getLong(columnNo)
final def compare(reader: RowReader, other: RowReader, columnNo: Int): Int =
java.lang.Long.compare(getFieldOrDefault(reader, columnNo), getFieldOrDefault(other, columnNo))
}
implicit object IntFieldExtractor extends TypedFieldExtractor[Int] {
final def getField(reader: RowReader, columnNo: Int): Int = reader.getInt(columnNo)
final def compare(reader: RowReader, other: RowReader, columnNo: Int): Int =
java.lang.Integer.compare(getFieldOrDefault(reader, columnNo), getFieldOrDefault(other, columnNo))
}
implicit object DoubleFieldExtractor extends TypedFieldExtractor[Double] {
final def getField(reader: RowReader, columnNo: Int): Double = reader.getDouble(columnNo)
final def compare(reader: RowReader, other: RowReader, columnNo: Int): Int =
java.lang.Double.compare(getFieldOrDefault(reader, columnNo), getFieldOrDefault(other, columnNo))
}
implicit object FloatFieldExtractor extends TypedFieldExtractor[Float] {
final def getField(reader: RowReader, columnNo: Int): Float = reader.getFloat(columnNo)
final def compare(reader: RowReader, other: RowReader, columnNo: Int): Int =
java.lang.Float.compare(getFieldOrDefault(reader, columnNo), getFieldOrDefault(other, columnNo))
}
implicit object StringFieldExtractor extends TypedFieldExtractor[String] {
final def getField(reader: RowReader, columnNo: Int): String = reader.getString(columnNo)
override final def getFieldOrDefault(reader: RowReader, columnNo: Int): String = {
val str = reader.getString(columnNo)
if (str == null) DefaultString else str
}
final def compare(reader: RowReader, other: RowReader, columnNo: Int): Int =
getFieldOrDefault(reader, columnNo).compareTo(getFieldOrDefault(other, columnNo))
}
implicit object UTF8StringFieldExtractor extends TypedFieldExtractor[ZeroCopyUTF8String] {
final def getField(reader: RowReader, columnNo: Int): ZeroCopyUTF8String =
reader.filoUTF8String(columnNo)
// TODO: do UTF8 comparison so we can avoid having to deserialize
final def compare(reader: RowReader, other: RowReader, columnNo: Int): Int =
getFieldOrDefault(reader, columnNo).compareTo(getFieldOrDefault(other, columnNo))
}
implicit object DateTimeFieldExtractor extends TypedFieldExtractor[DateTime] {
final def getField(reader: RowReader, columnNo: Int): DateTime = reader.as[DateTime](columnNo)
override final def getFieldOrDefault(reader: RowReader, columnNo: Int): DateTime = {
val dt = reader.as[DateTime](columnNo)
if (dt == null) DefaultDateTime else dt
}
final def compare(reader: RowReader, other: RowReader, columnNo: Int): Int =
getFieldOrDefault(reader, columnNo).compareTo(getFieldOrDefault(other, columnNo))
}
implicit object TimestampFieldExtractor extends TypedFieldExtractor[Timestamp] {
final def getField(reader: RowReader, columnNo: Int): Timestamp = reader.as[Timestamp](columnNo)
override final def getFieldOrDefault(reader: RowReader, columnNo: Int): Timestamp = {
val ts = reader.as[Timestamp](columnNo)
if (ts == null) DefaultTimestamp else ts
}
// TODO: compare the Long, instead of deserializing and comparing Timestamp object
final def compare(reader: RowReader, other: RowReader, columnNo: Int): Int =
getFieldOrDefault(reader, columnNo).compareTo(getFieldOrDefault(other, columnNo))
}
implicit object HistogramExtractor extends TypedFieldExtractor[Histogram] {
final def getField(reader: RowReader, columnNo: Int): Histogram = reader.getHistogram(columnNo)
final def compare(reader: RowReader, other: RowReader, columnNo: Int): Int =
getFieldOrDefault(reader, columnNo).compare(getFieldOrDefault(other, columnNo))
}
}
| tuplejump/FiloDB | memory/src/main/scala/filodb.memory/format/RowReader.scala | Scala | apache-2.0 | 17,956 |
trait A {
type T[_]
type S = (T with T)[A]
}
| folone/dotty | tests/untried/neg/t0207.scala | Scala | bsd-3-clause | 49 |
trait SeqLike[+Repr]
trait Seq extends SeqLike[Seq]
trait MySeq extends Seq with SeqLike[MySub]
trait MySub extends MySeq
| yusuke2255/dotty | tests/untried/pos/t3676.scala | Scala | bsd-3-clause | 123 |
package com.lynbrookrobotics.potassium.commons.drivetrain.unicycle.control
import com.lynbrookrobotics.potassium.clock.Clock
import com.lynbrookrobotics.potassium.commons.drivetrain.unicycle.control.purePursuit.PurePursuitControllers
import com.lynbrookrobotics.potassium.commons.drivetrain.unicycle.{UnicycleSignal, UnicycleVelocity}
import com.lynbrookrobotics.potassium.streams.Stream
import com.lynbrookrobotics.potassium.tasks.{ContinuousTask, FiniteTask}
import com.lynbrookrobotics.potassium.{Component, Signal}
import squants.motion.{AngularVelocity, DegreesPerSecond}
import squants.space.{Degrees, Feet}
import squants.{Acceleration, Angle, Dimensionless, Length, Percent, Quantity, Time, Velocity}
import scala.collection.immutable.Queue
trait UnicycleCoreTasks {
val controllers: UnicycleCoreControllers with UnicycleMotionProfileControllers with PurePursuitControllers
type Drivetrain <: Component[controllers.DriveSignal]
import controllers._
class DriveOpenLoop(forward: Stream[Dimensionless], turn: Stream[Dimensionless])(drive: Drivetrain)(
implicit hardware: DrivetrainHardware,
props: Signal[DrivetrainProperties]
) extends ContinuousTask {
override def onStart(): Unit = {
val combined = forward.zip(turn).map(t => UnicycleSignal(t._1, t._2))
drive.setController(childOpenLoop(combined))
}
override def onEnd(): Unit = {
drive.resetToDefault()
}
}
class ContinuousClosedDrive(forward: Stream[Dimensionless], turn: Stream[Dimensionless])(drive: Drivetrain)(
implicit hardware: DrivetrainHardware,
props: Signal[DrivetrainProperties]
) extends ContinuousTask {
override def onStart(): Unit = {
val combined = forward.zip(turn).map(t => UnicycleSignal(t._1, t._2))
drive.setController(childVelocityControl(combined))
}
override def onEnd(): Unit = {
drive.resetToDefault()
}
}
class ContinuousVelocityDrive(forward: Stream[Velocity], turn: Stream[AngularVelocity])(drive: Drivetrain)(
implicit hardware: DrivetrainHardware,
props: Signal[DrivetrainProperties]
) extends ContinuousTask {
override def onStart(): Unit = {
val combined = forward.zip(turn).map(t => UnicycleVelocity(t._1, t._2))
drive.setController(childVelocityControl(velocityControl(combined)))
}
override def onEnd(): Unit = {
drive.resetToDefault()
}
}
class DriveDistance(distance: Length, tolerance: Length)(drive: Drivetrain)(
implicit hardware: DrivetrainHardware,
props: Signal[DrivetrainProperties]
) extends FiniteTask {
override def onStart(): Unit = {
val absoluteDistance = hardware.forwardPosition.currentValue.map(_ + distance)
val (controller, error) = forwardPositionControl(absoluteDistance)
val checkedController = controller.withCheckZipped(error) { error =>
if (error.abs < tolerance) {
finished()
}
}
drive.setController(childVelocityControl(speedControl(checkedController)))
}
override def onEnd(): Unit = {
drive.resetToDefault()
}
}
class DriveDistanceWithTrapezoidalProfile(
cruisingVelocity: Velocity,
finalVelocity: Velocity,
acceleration: Acceleration,
deceleration: Acceleration,
targetDistance: Length,
tolerance: Length,
toleranceAngle: Angle
)(drive: Drivetrain)(implicit hardware: DrivetrainHardware, properties: Signal[DrivetrainProperties])
extends FiniteTask {
if (cruisingVelocity.abs > properties.get.maxForwardVelocity) {
throw new IllegalArgumentException(
"Input speed: " +
cruisingVelocity.abs.toFeetPerSecond +
" ft/s is greater than max speed"
)
}
override final def onStart(): Unit = {
val (idealVelocity, forwardError) = trapezoidalDriveControl(
cruisingVelocity,
finalVelocity,
acceleration,
deceleration,
hardware.forwardPosition,
hardware.forwardPosition.currentValue.map(_ + targetDistance),
hardware.forwardVelocity
)
val absoluteAngleTarget = hardware.turnPosition.currentValue
val (turnController, turnError) = turnPositionControl(absoluteAngleTarget)
val forwardOutput = idealVelocity.map(UnicycleVelocity(_, DegreesPerSecond(0)).toUnicycleSignal)
val combinedController = forwardOutput.zip(turnController).map(t => t._1 + t._2)
val uncheckedController = childVelocityControl(speedControl(combinedController))
val zippedError = forwardError.zip(turnError)
drive.setController(uncheckedController.withCheckZipped(zippedError) {
case (forwardError, turnError) =>
if (forwardError.abs < tolerance && turnError.abs < toleranceAngle) {
finished()
}
})
}
override def onEnd(): Unit = {
drive.resetToDefault()
}
}
/**
* drives the target distance with default values for acceleration and cruising velocity
* TODO: finish adding docs
* @param targetForwardDistance
* @param finalVelocity
* @param drive
* @param hardware
* @param properties
*/
class DriveDistanceSmooth(targetForwardDistance: Length, finalVelocity: Velocity)(drive: Drivetrain)(
implicit hardware: DrivetrainHardware,
properties: Signal[DrivetrainProperties]
) extends DriveDistanceWithTrapezoidalProfile(
0.5 * properties.get.maxForwardVelocity,
finalVelocity,
properties.get.maxAcceleration,
properties.get.maxDeceleration,
targetForwardDistance,
Feet(.1),
Degrees(5)
)(drive)
class DriveDistanceStraight(
distance: Length,
toleranceForward: Length,
toleranceAngle: Angle,
maxSpeed: Dimensionless,
minStableTicks: Int = 10
)(drive: Drivetrain)(implicit hardware: DrivetrainHardware, props: Signal[DrivetrainProperties])
extends FiniteTask {
var stableTicks = 0
override def onStart(): Unit = {
val absoluteDistance = hardware.forwardPosition.currentValue.map(_ + distance)
val (forwardController, forwardError) = forwardPositionControl(absoluteDistance)
val limitedForward = forwardController.map { u =>
UnicycleSignal(u.forward max (-maxSpeed) min maxSpeed, u.turn)
}
val targetAngleAbsolute = hardware.turnPosition.currentValue
val (turnController, turnError) = turnPositionControl(targetAngleAbsolute)
val combinedController = limitedForward.zip(turnController).map(t => t._1 + t._2)
val zippedError = forwardError.zip(turnError)
val checkedController = combinedController.withCheckZipped(zippedError) {
case (forwardError, turnError) =>
if (forwardError.abs < toleranceForward && turnError.abs < toleranceAngle) {
stableTicks += 1
if (stableTicks >= minStableTicks) {
finished()
}
} else {
stableTicks = 0
}
}
drive.setController(
childVelocityControl(speedControl(checkedController))
)
}
override def onEnd(): Unit = {
stableTicks = 0
drive.resetToDefault()
}
}
class DriveDistanceAtAngle(
distance: Length,
toleranceForward: Length,
targetAngle: Angle,
toleranceAngle: Angle,
maxSpeed: Dimensionless
)(drive: Drivetrain)(implicit hardware: DrivetrainHardware, props: Signal[DrivetrainProperties])
extends FiniteTask {
override def onStart(): Unit = {
val absoluteDistance = hardware.forwardPosition.currentValue.map(_ + distance)
val (forwardController, forwardError) = forwardPositionControl(absoluteDistance)
val limitedForward = forwardController.map { u =>
UnicycleSignal(u.forward max (-maxSpeed) min maxSpeed, u.turn)
}
val (turnController, turnError) = turnPositionControl(targetAngle)
val combinedController = limitedForward.zip(turnController).map(t => t._1 + t._2)
val zippedError = forwardError.zip(turnError)
val checkedController = combinedController.withCheckZipped(zippedError) {
case (forwardError, turnError) =>
if (forwardError.abs < toleranceForward && turnError.abs < toleranceAngle) {
finished()
}
}
drive.setController(
childVelocityControl(speedControl(checkedController))
)
}
override def onEnd(): Unit = {
drive.resetToDefault()
}
}
class DriveBeyondStraight(distance: Length, toleranceForward: Length, toleranceAngle: Angle, maxSpeed: Dimensionless)(
drive: Drivetrain
)(implicit hardware: DrivetrainHardware, props: Signal[DrivetrainProperties])
extends FiniteTask {
override def onStart(): Unit = {
val (forwardController, forwardError) = (
if (distance.value > 0) {
hardware.forwardPosition.mapToConstant(maxSpeed)
} else {
hardware.forwardPosition.mapToConstant(-maxSpeed)
},
hardware.forwardPosition.relativize((initial, current) => {
val absoluteTarget = initial + distance
absoluteTarget - current
})
)
val limitedForward = forwardController.map { u =>
UnicycleSignal(u, Percent(0))
}
val absoluteAngle = hardware.turnPosition.currentValue
val (turnController, turnError) = turnPositionControl(absoluteAngle)
val combinedController = limitedForward.zip(turnController).map(t => t._1 + t._2)
val zippedError = forwardError.zip(turnError)
val checkedController = combinedController.withCheckZipped(zippedError) {
case (forwardError, turnError) =>
val beyond = if (distance.value > 0) {
forwardError.value < 0
} else {
forwardError.value > 0
}
if (beyond) {
finished()
}
}
drive.setController(
childVelocityControl(speedControl(checkedController))
)
}
override def onEnd(): Unit = {
drive.resetToDefault()
}
}
class RotateByAngle(relativeAngle: Angle, tolerance: Angle, timeWithinTolerance: Int)(drive: Drivetrain)(
implicit hardware: DrivetrainHardware,
props: Signal[DrivetrainProperties]
) extends FiniteTask {
override def onStart(): Unit = {
val absoluteAngle = hardware.turnPosition.currentValue.map(_ + relativeAngle)
// val absoluteAngle = hardware.turnPosition.get + relativeAngle
val (controller, error) = turnPositionControl(absoluteAngle)
var ticksWithinTolerance = 0
val checkedController = controller.withCheckZipped(error) { error =>
if (error.abs < tolerance) {
ticksWithinTolerance += 1
} else {
ticksWithinTolerance = 0
}
if (ticksWithinTolerance >= timeWithinTolerance) {
finished()
}
}
drive.setController(childVelocityControl(speedControl(checkedController)))
}
override def onEnd(): Unit = {
drive.resetToDefault()
}
}
class RotateToAngle(absoluteAngle: Angle, tolerance: Angle)(drive: Drivetrain)(
implicit hardware: DrivetrainHardware,
props: Signal[DrivetrainProperties]
) extends FiniteTask {
override def onStart(): Unit = {
val (controller, error) = turnPositionControl(absoluteAngle)
val checkedController = controller.withCheckZipped(error) { error =>
if (error.abs < tolerance) {
finished()
}
}
drive.setController(childVelocityControl(speedControl(checkedController)))
}
override def onEnd(): Unit = {
drive.resetToDefault()
}
}
class CorrectOffsetWithLatency(timestampedOffset: Stream[(Angle, Time)], tolerance: Angle)(drive: Drivetrain)(
implicit hardware: DrivetrainHardware,
props: Signal[DrivetrainProperties],
clock: Clock
) extends FiniteTask {
val positionSlide: Stream[Seq[(Angle, Time)]] = hardware.turnPosition.zipWithTime.sliding(20)
private def calculateTargetFromOffsetWithLatency[T <: Quantity[T]](
timestampedOffset: Stream[(T, Time)],
positionSlide: Stream[Seq[(T, Time)]]
) = {
positionSlide.zip(timestampedOffset).map { t =>
val (positionHistory, (offset, offsetTime)) = t
val closestTimeSoFar = positionHistory.minBy {
case (position, positionTime) =>
Math.abs(positionTime.value - offsetTime.value)
}
closestTimeSoFar._1 + offset
}
}
override def onStart(): Unit = {
val targetAbsolute = calculateTargetFromOffsetWithLatency(timestampedOffset, positionSlide)
val (controller, error) = continuousTurnPositionControl(targetAbsolute)
val checkedController = controller
.zip(error)
.withCheck { t =>
val (_, e) = t
if (e.abs < tolerance) {
finished()
}
}
.map(_._1)
drive.setController(childVelocityControl(speedControl(checkedController)))
}
override def onEnd(): Unit = {
drive.resetToDefault()
}
}
class DriveToTargetWithConstantSpeed(
drivetrainComponent: Drivetrain,
distanceToTarget: Stream[Option[Length]],
angleToTarget: Stream[Angle],
forwardVelocity: Dimensionless,
maxTurnVelocity: Dimensionless,
minDistance: Length
)(implicit drivetrainHardware: DrivetrainHardware, props: Signal[DrivetrainProperties])
extends FiniteTask {
override def onStart(): Unit = {
val absoluteTargetAngle = drivetrainHardware.turnPosition.zipAsync(angleToTarget).map { t =>
t._1 + t._2
}
val turnController = turnPositionControl(absoluteTargetAngle)._1
val out = childVelocityControl(
turnController.map { p =>
UnicycleSignal(forwardVelocity, p.turn min maxTurnVelocity max -maxTurnVelocity)
}
)
drivetrainComponent.setController(out.withCheckZipped(distanceToTarget) { distanceToTarget =>
{
if (distanceToTarget.exists(_ <= minDistance)) {
finished()
}
}
})
}
override def onEnd(): Unit = {
drivetrainComponent.resetToDefault()
}
}
}
| Team846/potassium | commons/src/main/scala/com/lynbrookrobotics/potassium/commons/drivetrain/unicycle/control/UnicycleCoreTasks.scala | Scala | mit | 14,158 |
package com.acework.js.components
import scala.scalajs.js
package object bootstrap extends js.GlobalScope {
val jQuery: JQueryStatic = js.native
import scala.language.implicitConversions
implicit def jq2bootstrap(jq: JQuery): BootstrapJQuery = jq.asInstanceOf[BootstrapJQuery]
}
| weiyinteo/scalajs-react-bootstrap | core/src/main/scala/com/acework/js/components/bootstrap/package.scala | Scala | mit | 290 |
package com.seanshubin.hello.domain
import java.nio.file.Path
trait FilesContract {
def readAllBytes(path: Path): Array[Byte]
}
| SeanShubin/hello | domain/src/main/scala/com/seanshubin/hello/domain/FilesContract.scala | Scala | unlicense | 132 |
package blended.akka
import akka.actor.{Actor, ActorLogging, ActorRef}
import scala.collection.mutable.ListBuffer
trait MemoryStash { this : Actor with ActorLogging =>
val requests = ListBuffer.empty[(ActorRef, Any)]
def stashing : Receive = {
case msg =>
log.debug(s"Stashing [${msg}]")
requests.prepend((sender, msg))
}
def unstash() : Unit = {
log.debug(s"Unstashing [${requests.size}] messages.")
val r = requests.reverse.toList
requests.clear()
r.foreach { case (requestor, msg) =>
self.tell(msg, requestor)
}
requests.clear()
}
}
| lefou/blended | blended.akka/src/main/scala/blended/akka/MemoryStash.scala | Scala | apache-2.0 | 598 |
package controllers
import play.api.mvc.{Action, RequestHeader, Result, Controller}
import models.User
trait Authentication {
self:Controller =>
var accessConditions: List[Conditions.Condition] = List.empty
def AuthenticateMe(f: User => Result) = Action { implicit request =>
val user = AuthUtils.parseUserFromRequest
if(user.isEmpty)
Forbidden("Invalid username or password")
else {
accessConditions.map(condition => condition(user.get)).collectFirst[String]{case Left(error) => error}
match {
case Some(error) => Forbidden(s"Conditions not met: $error")
case _ => f(user.get)
}
}
}
}
object Conditions {
type Condition = (User => Either[String, Unit])
def isPremiumUser:Condition = {
user => if(user.isPremium)
Right()
else
Left("User must be premium")
}
def balanceGreaterThan(required:Int):Condition = {
user => if(user.balance > required)
Right()
else
Left(s"User balance must be > $required")
}
}
trait PremiumUsersOnly {
self:Authentication =>
accessConditions = accessConditions :+ Conditions.isPremiumUser
}
trait BalanceCheck {
self:Authentication =>
def getRequiredBalance:Int
accessConditions = accessConditions :+ Conditions.balanceGreaterThan(getRequiredBalance)
}
object AuthUtils {
def parseUserFromCookie(implicit request: RequestHeader) = request.session.get("username").flatMap(username => User.find(username))
def parseUserFromQueryString(implicit request:RequestHeader) = {
val query = request.queryString.map { case (k, v) => k -> v.mkString }
val username = query get ("username")
val password = query get ("password")
(username, password) match {
case (Some(u), Some(p)) => User.find(u).filter(user => user.checkPassword(p))
case _ => None
}
}
def parseUserFromRequest(implicit request:RequestHeader):Option[User] = {
parseUserFromQueryString orElse parseUserFromCookie
}
}
| TheTunnelBear/PlayAuthenticationSample | app/controllers/Authentication.scala | Scala | mit | 1,983 |
import sbt.Keys._
import sbt._
import com.typesafe.sbt.packager.archetypes.JavaAppPackaging
import com.typesafe.sbt.packager.jdkpackager.JDKPackagerPlugin
import com.reactific.sbt.ProjectPlugin
import com.reactific.sbt.ProjectPlugin.autoImport._
import sbtbuildinfo.BuildInfoKeys._
import scoverage.ScoverageSbtPlugin
/** Main Build Definition For RestOmnia */
object JFXtensionsBuild extends Build {
val jfxtend_dependencies = Seq(
"de.codecentric.centerdevice" % "javafxsvg" % "1.1.0",
"org.testfx" % "testfx-core" % "4.0.4-alpha" % "test",
"org.testfx" % "openjfx-monocle" % "8u76-b04" % "test"
)
val classesIgnoredByScoverage : String = Seq[String](
"<empty>", // Avoids warnings from scoverage
"com.reactific.jfxtend.BuildInfo"
).mkString(";")
lazy val jfxtend = Project("jfxtend", file(".")).
enablePlugins(ProjectPlugin, JavaAppPackaging, JDKPackagerPlugin).
settings(
scalaVersion := "2.11.7",
organization := "com.reactific",
titleForDocs := "JavaFX Extensions",
codePackage := "com.reactific.jfxtend",
copyrightHolder := "Reactific Software LLC",
copyrightYears := Seq(2016),
developerUrl := url("http://www.reactific.com/jfxtend"),
maxErrors := 50,
buildInfoObject := "BuildInfo",
buildInfoPackage := "com.reactific.jfxtend",
ScoverageSbtPlugin.ScoverageKeys.coverageMinimum := 90,
ScoverageSbtPlugin.ScoverageKeys.coverageFailOnMinimum := true,
ScoverageSbtPlugin.ScoverageKeys.coverageExcludedPackages := classesIgnoredByScoverage,
libraryDependencies ++= jfxtend_dependencies
)
val sbt_version = sys.props.getOrElse("sbt.version","0.13.11")
override def rootProject = Some(jfxtend)
}
| reactific/jfxtensions | project/JFXtensionsBuild.scala | Scala | apache-2.0 | 1,739 |
package util
import org.eclipse.jgit.api.Git
import util.Directory._
import util.StringUtil._
import util.ControlUtil._
import scala.collection.JavaConverters._
import org.eclipse.jgit.lib._
import org.eclipse.jgit.revwalk._
import org.eclipse.jgit.revwalk.filter._
import org.eclipse.jgit.treewalk._
import org.eclipse.jgit.treewalk.filter._
import org.eclipse.jgit.diff.DiffEntry.ChangeType
import org.eclipse.jgit.errors.MissingObjectException
import java.util.Date
import org.eclipse.jgit.api.errors.NoHeadException
import service.RepositoryService
import org.eclipse.jgit.dircache.DirCacheEntry
/**
* Provides complex JGit operations.
*/
object JGitUtil {
/**
* The repository data.
*
* @param owner the user name of the repository owner
* @param name the repository name
* @param url the repository URL
* @param commitCount the commit count. If the repository has over 1000 commits then this property is 1001.
* @param branchList the list of branch names
* @param tags the list of tags
*/
case class RepositoryInfo(owner: String, name: String, url: String, commitCount: Int, branchList: List[String], tags: List[TagInfo])
/**
* The file data for the file list of the repository viewer.
*
* @param id the object id
* @param isDirectory whether is it directory
* @param name the file (or directory) name
* @param time the last modified time
* @param message the last commit message
* @param commitId the last commit id
* @param committer the last committer name
* @param mailAddress the committer's mail address
*/
case class FileInfo(id: ObjectId, isDirectory: Boolean, name: String, time: Date, message: String, commitId: String,
committer: String, mailAddress: String)
/**
* The commit data.
*
* @param id the commit id
* @param time the commit time
* @param committer the committer name
* @param mailAddress the mail address of the committer
* @param shortMessage the short message
* @param fullMessage the full message
* @param parents the list of parent commit id
*/
case class CommitInfo(id: String, time: Date, committer: String, mailAddress: String,
shortMessage: String, fullMessage: String, parents: List[String]){
def this(rev: org.eclipse.jgit.revwalk.RevCommit) = this(
rev.getName,
rev.getCommitterIdent.getWhen,
rev.getCommitterIdent.getName,
rev.getCommitterIdent.getEmailAddress,
rev.getShortMessage,
rev.getFullMessage,
rev.getParents().map(_.name).toList)
val summary = defining(fullMessage.trim.indexOf("\\n")){ i =>
defining(if(i >= 0) fullMessage.trim.substring(0, i).trim else fullMessage){ firstLine =>
if(firstLine.length > shortMessage.length) shortMessage else firstLine
}
}
val description = defining(fullMessage.trim.indexOf("\\n")){ i =>
if(i >= 0){
Some(fullMessage.trim.substring(i).trim)
} else None
}
}
case class DiffInfo(changeType: ChangeType, oldPath: String, newPath: String, oldContent: Option[String], newContent: Option[String])
/**
* The file content data for the file content view of the repository viewer.
*
* @param viewType "image", "large" or "other"
* @param content the string content
*/
case class ContentInfo(viewType: String, content: Option[String])
/**
* The tag data.
*
* @param name the tag name
* @param time the tagged date
* @param id the commit id
*/
case class TagInfo(name: String, time: Date, id: String)
/**
* Returns RevCommit from the commit or tag id.
*
* @param git the Git object
* @param objectId the ObjectId of the commit or tag
* @return the RevCommit for the specified commit or tag
*/
def getRevCommitFromId(git: Git, objectId: ObjectId): RevCommit = {
val revWalk = new RevWalk(git.getRepository)
val revCommit = revWalk.parseAny(objectId) match {
case r: RevTag => revWalk.parseCommit(r.getObject)
case _ => revWalk.parseCommit(objectId)
}
revWalk.dispose
revCommit
}
/**
* Returns the repository information. It contains branch names and tag names.
*/
def getRepositoryInfo(owner: String, repository: String, baseUrl: String): RepositoryInfo = {
using(Git.open(getRepositoryDir(owner, repository))){ git =>
try {
// get commit count
val commitCount = git.log.all.call.iterator.asScala.map(_ => 1).take(1000).sum
RepositoryInfo(
owner, repository, s"${baseUrl}/git/${owner}/${repository}.git",
// commit count
commitCount,
// branches
git.branchList.call.asScala.map { ref =>
ref.getName.replaceFirst("^refs/heads/", "")
}.toList,
// tags
git.tagList.call.asScala.map { ref =>
val revCommit = getRevCommitFromId(git, ref.getObjectId)
TagInfo(ref.getName.replaceFirst("^refs/tags/", ""), revCommit.getCommitterIdent.getWhen, revCommit.getName)
}.toList
)
} catch {
// not initialized
case e: NoHeadException => RepositoryInfo(
owner, repository, s"${baseUrl}/git/${owner}/${repository}.git", 0, Nil, Nil)
}
}
}
/**
* Returns the file list of the specified path.
*
* @param git the Git object
* @param revision the branch name or commit id
* @param path the directory path (optional)
* @return HTML of the file list
*/
def getFileList(git: Git, revision: String, path: String = "."): List[FileInfo] = {
val list = new scala.collection.mutable.ListBuffer[(ObjectId, FileMode, String, String)]
using(new RevWalk(git.getRepository)){ revWalk =>
val objectId = git.getRepository.resolve(revision)
val revCommit = revWalk.parseCommit(objectId)
using(new TreeWalk(git.getRepository)){ treeWalk =>
treeWalk.addTree(revCommit.getTree)
if(path != "."){
treeWalk.setRecursive(true)
treeWalk.setFilter(new TreeFilter(){
var stopRecursive = false
def include(walker: TreeWalk): Boolean = {
val targetPath = walker.getPathString
if((path + "/").startsWith(targetPath)){
true
} else if(targetPath.startsWith(path + "/") && targetPath.substring(path.length + 1).indexOf("/") < 0){
stopRecursive = true
treeWalk.setRecursive(false)
true
} else {
false
}
}
def shouldBeRecursive(): Boolean = !stopRecursive
override def clone: TreeFilter = return this
})
}
while (treeWalk.next()) {
list.append((treeWalk.getObjectId(0), treeWalk.getFileMode(0), treeWalk.getPathString, treeWalk.getNameString))
}
}
}
val commits = getLatestCommitFromPaths(git, list.toList.map(_._3), revision)
list.map { case (objectId, fileMode, path, name) =>
FileInfo(
objectId,
fileMode == FileMode.TREE,
name,
commits(path).getCommitterIdent.getWhen,
commits(path).getShortMessage,
commits(path).getName,
commits(path).getCommitterIdent.getName,
commits(path).getCommitterIdent.getEmailAddress)
}.sortWith { (file1, file2) =>
(file1.isDirectory, file2.isDirectory) match {
case (true , false) => true
case (false, true ) => false
case _ => file1.name.compareTo(file2.name) < 0
}
}.toList
}
/**
* Returns the commit list of the specified branch.
*
* @param git the Git object
* @param revision the branch name or commit id
* @param page the page number (1-)
* @param limit the number of commit info per page. 0 (default) means unlimited.
* @param path filters by this path. default is no filter.
* @return a tuple of the commit list and whether has next, or the error message
*/
def getCommitLog(git: Git, revision: String, page: Int = 1, limit: Int = 0, path: String = ""): Either[String, (List[CommitInfo], Boolean)] = {
val fixedPage = if(page <= 0) 1 else page
@scala.annotation.tailrec
def getCommitLog(i: java.util.Iterator[RevCommit], count: Int, logs: List[CommitInfo]): (List[CommitInfo], Boolean) =
i.hasNext match {
case true if(limit <= 0 || logs.size < limit) => {
val commit = i.next
getCommitLog(i, count + 1, if(limit <= 0 || (fixedPage - 1) * limit <= count) logs :+ new CommitInfo(commit) else logs)
}
case _ => (logs, i.hasNext)
}
using(new RevWalk(git.getRepository)){ revWalk =>
defining(git.getRepository.resolve(revision)){ objectId =>
if(objectId == null){
Left(s"${revision} can't be resolved.")
} else {
revWalk.markStart(revWalk.parseCommit(objectId))
if(path.nonEmpty){
revWalk.setRevFilter(new RevFilter(){
def include(walk: RevWalk, commit: RevCommit): Boolean = {
getDiffs(git, commit.getName, false)._1.find(_.newPath == path).nonEmpty
}
override def clone(): RevFilter = this
})
}
Right(getCommitLog(revWalk.iterator, 0, Nil))
}
}
}
}
def getCommitLogs(git: Git, begin: String, includesLastCommit: Boolean = false)
(endCondition: RevCommit => Boolean): List[CommitInfo] = {
@scala.annotation.tailrec
def getCommitLog(i: java.util.Iterator[RevCommit], logs: List[CommitInfo]): List[CommitInfo] =
i.hasNext match {
case true => {
val revCommit = i.next
if(endCondition(revCommit)){
if(includesLastCommit) logs :+ new CommitInfo(revCommit) else logs
} else {
getCommitLog(i, logs :+ new CommitInfo(revCommit))
}
}
case false => logs
}
using(new RevWalk(git.getRepository)){ revWalk =>
revWalk.markStart(revWalk.parseCommit(git.getRepository.resolve(begin)))
getCommitLog(revWalk.iterator, Nil).reverse
}
}
/**
* Returns the commit list between two revisions.
*
* @param git the Git object
* @param from the from revision
* @param to the to revision
* @return the commit list
*/
// TODO swap parameters 'from' and 'to'!?
def getCommitLog(git: Git, from: String, to: String): List[CommitInfo] =
getCommitLogs(git, to)(_.getName == from)
/**
* Returns the latest RevCommit of the specified path.
*
* @param git the Git object
* @param path the path
* @param revision the branch name or commit id
* @return the latest commit
*/
def getLatestCommitFromPath(git: Git, path: String, revision: String): Option[RevCommit] =
getLatestCommitFromPaths(git, List(path), revision).get(path)
/**
* Returns the list of latest RevCommit of the specified paths.
*
* @param git the Git object
* @param paths the list of paths
* @param revision the branch name or commit id
* @return the list of latest commit
*/
def getLatestCommitFromPaths(git: Git, paths: List[String], revision: String): Map[String, RevCommit] = {
val start = getRevCommitFromId(git, git.getRepository.resolve(revision))
paths.map { path =>
val commit = git.log.add(start).addPath(path).setMaxCount(1).call.iterator.next
(path, commit)
}.toMap
}
/**
* Get object content of the given id as String from the Git repository.
*
* @param git the Git object
* @param id the object id
* @param large if false then returns None for the large file
* @return the object or None if object does not exist
*/
def getContent(git: Git, id: ObjectId, large: Boolean): Option[Array[Byte]] = try {
val loader = git.getRepository.getObjectDatabase.open(id)
if(large == false && FileUtil.isLarge(loader.getSize)){
None
} else {
using(git.getRepository.getObjectDatabase){ db =>
Some(db.open(id).getBytes)
}
}
} catch {
case e: MissingObjectException => None
}
/**
* Returns the tuple of diff of the given commit and the previous commit id.
*/
def getDiffs(git: Git, id: String, fetchContent: Boolean = true): (List[DiffInfo], Option[String]) = {
@scala.annotation.tailrec
def getCommitLog(i: java.util.Iterator[RevCommit], logs: List[RevCommit]): List[RevCommit] =
i.hasNext match {
case true if(logs.size < 2) => getCommitLog(i, logs :+ i.next)
case _ => logs
}
using(new RevWalk(git.getRepository)){ revWalk =>
revWalk.markStart(revWalk.parseCommit(git.getRepository.resolve(id)))
val commits = getCommitLog(revWalk.iterator, Nil)
val revCommit = commits(0)
if(commits.length >= 2){
// not initial commit
val oldCommit = commits(1)
(getDiffs(git, oldCommit.getName, id, fetchContent), Some(oldCommit.getName))
} else {
// initial commit
using(new TreeWalk(git.getRepository)){ treeWalk =>
treeWalk.addTree(revCommit.getTree)
val buffer = new scala.collection.mutable.ListBuffer[DiffInfo]()
while(treeWalk.next){
buffer.append((if(!fetchContent){
DiffInfo(ChangeType.ADD, null, treeWalk.getPathString, None, None)
} else {
DiffInfo(ChangeType.ADD, null, treeWalk.getPathString, None,
JGitUtil.getContent(git, treeWalk.getObjectId(0), false).filter(FileUtil.isText).map(convertFromByteArray))
}))
}
(buffer.toList, None)
}
}
}
}
def getDiffs(git: Git, from: String, to: String, fetchContent: Boolean): List[DiffInfo] = {
val reader = git.getRepository.newObjectReader
val oldTreeIter = new CanonicalTreeParser
oldTreeIter.reset(reader, git.getRepository.resolve(from + "^{tree}"))
val newTreeIter = new CanonicalTreeParser
newTreeIter.reset(reader, git.getRepository.resolve(to + "^{tree}"))
import scala.collection.JavaConverters._
git.diff.setNewTree(newTreeIter).setOldTree(oldTreeIter).call.asScala.map { diff =>
if(!fetchContent || FileUtil.isImage(diff.getOldPath) || FileUtil.isImage(diff.getNewPath)){
DiffInfo(diff.getChangeType, diff.getOldPath, diff.getNewPath, None, None)
} else {
DiffInfo(diff.getChangeType, diff.getOldPath, diff.getNewPath,
JGitUtil.getContent(git, diff.getOldId.toObjectId, false).filter(FileUtil.isText).map(convertFromByteArray),
JGitUtil.getContent(git, diff.getNewId.toObjectId, false).filter(FileUtil.isText).map(convertFromByteArray))
}
}.toList
}
/**
* Returns the list of branch names of the specified commit.
*/
def getBranchesOfCommit(git: Git, commitId: String): List[String] =
using(new RevWalk(git.getRepository)){ revWalk =>
defining(revWalk.parseCommit(git.getRepository.resolve(commitId + "^0"))){ commit =>
git.getRepository.getAllRefs.entrySet.asScala.filter { e =>
(e.getKey.startsWith(Constants.R_HEADS) && revWalk.isMergedInto(commit, revWalk.parseCommit(e.getValue.getObjectId)))
}.map { e =>
e.getValue.getName.substring(org.eclipse.jgit.lib.Constants.R_HEADS.length)
}.toList.sorted
}
}
/**
* Returns the list of tags of the specified commit.
*/
def getTagsOfCommit(git: Git, commitId: String): List[String] =
using(new RevWalk(git.getRepository)){ revWalk =>
defining(revWalk.parseCommit(git.getRepository.resolve(commitId + "^0"))){ commit =>
git.getRepository.getAllRefs.entrySet.asScala.filter { e =>
(e.getKey.startsWith(Constants.R_TAGS) && revWalk.isMergedInto(commit, revWalk.parseCommit(e.getValue.getObjectId)))
}.map { e =>
e.getValue.getName.substring(org.eclipse.jgit.lib.Constants.R_TAGS.length)
}.toList.sorted.reverse
}
}
def initRepository(dir: java.io.File): Unit =
using(new RepositoryBuilder().setGitDir(dir).setBare.build){ repository =>
repository.create
setReceivePack(repository)
}
def cloneRepository(from: java.io.File, to: java.io.File): Unit =
using(Git.cloneRepository.setURI(from.toURI.toString).setDirectory(to).setBare(true).call){ git =>
setReceivePack(git.getRepository)
}
def isEmpty(git: Git): Boolean = git.getRepository.resolve(Constants.HEAD) == null
private def setReceivePack(repository: org.eclipse.jgit.lib.Repository): Unit =
defining(repository.getConfig){ config =>
config.setBoolean("http", null, "receivepack", true)
config.save
}
def getDefaultBranch(git: Git, repository: RepositoryService.RepositoryInfo,
revstr: String = ""): Option[(ObjectId, String)] = {
Seq(
Some(if(revstr.isEmpty) repository.repository.defaultBranch else revstr),
repository.branchList.headOption
).flatMap {
case Some(rev) => Some((git.getRepository.resolve(rev), rev))
case None => None
}.find(_._1 != null)
}
def createDirCacheEntry(path: String, mode: FileMode, objectId: ObjectId): DirCacheEntry = {
val entry = new DirCacheEntry(path)
entry.setFileMode(mode)
entry.setObjectId(objectId)
entry
}
def createNewCommit(git: Git, inserter: ObjectInserter, headId: AnyObjectId, treeId: AnyObjectId,
fullName: String, mailAddress: String, message: String): String = {
val newCommit = new CommitBuilder()
newCommit.setCommitter(new PersonIdent(fullName, mailAddress))
newCommit.setAuthor(new PersonIdent(fullName, mailAddress))
newCommit.setMessage(message)
if(headId != null){
newCommit.setParentIds(List(headId).asJava)
}
newCommit.setTreeId(treeId)
val newHeadId = inserter.insert(newCommit)
inserter.flush()
inserter.release()
val refUpdate = git.getRepository.updateRef(Constants.HEAD)
refUpdate.setNewObjectId(newHeadId)
refUpdate.update()
newHeadId.getName
}
}
| iambowen/gitbucket | src/main/scala/util/JGitUtil.scala | Scala | apache-2.0 | 18,170 |
package ru.biocad.ig.igcont.common
import scala.collection.immutable.HashMap
/**
* Created with IntelliJ IDEA.
* User: pavel
* Date: 25.02.14
* Time: 10:19
*/
case class VariantsResult(symbol : Char, variants : Iterable[Char],
annotations : HashMap[String, String])
| zmactep/igcat | lib/ig-container/src/main/scala/ru/biocad/ig/igcont/common/VariantsResult.scala | Scala | bsd-2-clause | 299 |
/* *\\
** Squants **
** **
** Scala Quantities and Units of Measure Library and DSL **
** (c) 2013-2015, Gary Keorkunian **
** **
\\* */
package squants.experimental.json
import org.scalatest._
import org.json4s.DefaultFormats
import squants.energy._
import squants.time._
import org.json4s.native.Serialization._
import squants.market._
import squants.market.Price
import squants.mass.{ Mass, Pounds }
class QuantitySerializerSpec extends FlatSpec with MustMatchers {
object QuantitySerializerMarshaller {
implicit val formats = DefaultFormats.withBigDecimal +
new PowerSerializer +
new EnergyPriceSerializer +
new MassPriceSerializer +
new MoneySerializer +
new TimeSerializer +
new MassSerializer +
new TimePriceSerializer
}
behavior of "QuantitySerializer"
import QuantitySerializerMarshaller._
val seedValue = 10.22
val seedValueInt = 10
val jsonkW = "{\\"value\\":10.22,\\"unit\\":\\"kW\\"}"
val jsonMW = "{\\"value\\":10.22,\\"unit\\":\\"MW\\"}"
val jsonGW = "{\\"value\\":10.22,\\"unit\\":\\"GW\\"}"
val jsonPowerInt = "{\\"value\\":10,\\"unit\\":\\"kW\\"}"
val jsonMillis = "{\\"value\\":10.22,\\"unit\\":\\"ms\\"}"
val jsonSeconds = "{\\"value\\":10.22,\\"unit\\":\\"s\\"}"
val jsonMinutes = "{\\"value\\":10.22,\\"unit\\":\\"m\\"}"
val jsonHours = "{\\"value\\":10.22,\\"unit\\":\\"h\\"}"
val jsonTimeInt = "{\\"value\\":10,\\"unit\\":\\"s\\"}"
it must "serialize a Power value" in {
val json = write[Power](Kilowatts(seedValue))
json must be(jsonkW)
val json2 = write[Power](Megawatts(seedValue))
json2 must be(jsonMW)
val json3 = write[Power](Gigawatts(seedValue))
json3 must be(jsonGW)
}
it must "deserialize a Power value" in {
val power = read[Power](jsonkW)
power must be(Kilowatts(seedValue))
val powerMW = read[Power](jsonMW)
powerMW must be(Megawatts(seedValue))
val powerGW = read[Power](jsonGW)
powerGW must be(Gigawatts(seedValue))
val powerInt = read[Power](jsonPowerInt)
powerInt must be(Kilowatts(seedValueInt))
}
it must "serialize a Time value" in {
val json = write[Time](Milliseconds(seedValue))
json must be(jsonMillis)
val json2 = write[Time](Seconds(seedValue))
json2 must be(jsonSeconds)
val json3 = write[Time](Minutes(seedValue))
json3 must be(jsonMinutes)
val json4 = write[Time](Hours(seedValue))
json4 must be(jsonHours)
}
it must "deserialize a Time value" in {
val time = read[Time](jsonMillis)
time must be(Milliseconds(seedValue))
val time2 = read[Time](jsonSeconds)
time2 must be(Seconds(seedValue))
val time3 = read[Time](jsonMinutes)
time3 must be(Minutes(seedValue))
val time4 = read[Time](jsonHours)
time4 must be(Hours(seedValue))
val time5 = read[Time](jsonTimeInt)
time5 must be(Seconds(seedValueInt))
}
it must "serialize a Mass value" in {
val json = write[Mass](Pounds(seedValue))
json must be("{\\"value\\":10.22,\\"unit\\":\\"lb\\"}")
}
behavior of "MoneySerializer"
val jsonMoney = "{\\"amount\\":10.22,\\"currency\\":\\"USD\\"}"
it must "serialize a Money value" in {
val json = write[Money](USD(seedValue))
json must be(jsonMoney)
}
it must "deserialize a Money value" in {
val money = read[Money](jsonMoney)
money must be(USD(seedValue))
}
behavior of "PriceSerializer"
val jsonEnergyPrice = "{\\"amount\\":10.22,\\"currency\\":\\"USD\\",\\"per\\":\\"1.0 MWh\\"}"
val jsonMassPrice = "{\\"amount\\":10.22,\\"currency\\":\\"USD\\",\\"per\\":\\"1.0 lb\\"}"
val jsonTimePrice = "{\\"amount\\":10.22,\\"currency\\":\\"USD\\",\\"per\\":\\"30.0 m\\"}"
it must "serialize an Energy Price" in {
val json = write[Price[Energy]](USD(seedValue) / MegawattHours(1))
json must be(jsonEnergyPrice)
}
it must "deserialize an Energy Price" in {
val price = read[Price[Energy]](jsonEnergyPrice)
price must be(USD(seedValue) / MegawattHours(1))
}
it must "serialize an Mass Price" in {
val json = write[Price[Mass]](USD(seedValue) / Pounds(1))
json must be(jsonMassPrice)
}
it must "deserialize an Mass Price" in {
val price = read[Price[Mass]](jsonMassPrice)
price must be(USD(seedValue) / Pounds(1))
}
it must "serialize a Time Price" in {
val json = write[Price[Time]](USD(seedValue) / Minutes(30))
json must be(jsonTimePrice)
}
it must "deserialize a Time Price" in {
val price = read[Price[Time]](jsonTimePrice)
price must be(USD(seedValue) / Minutes(30))
}
}
| derekmorr/squants | shared/src/test/scala/squants/experimental/json/QuantitySerializerSpec.scala | Scala | apache-2.0 | 4,864 |
package dotty.tools
package dotc
package parsing
import scala.language.unsafeNulls
import collection.immutable.BitSet
import core.Decorators._
import core.StdNames.nme
abstract class TokensCommon {
def maxToken: Int
type Token = Int
type TokenSet = BitSet
def tokenRange(lo: Int, hi: Int): TokenSet = BitSet(lo to hi: _*)
def showTokenDetailed(token: Int): String = debugString(token)
def showToken(token: Int): String = {
val str = tokenString(token)
if (isKeyword(token)) s"'$str'" else str
}
val tokenString, debugString: Array[String] = new Array[String](maxToken + 1)
def enter(token: Int, str: String, debugStr: String = ""): Unit = {
assert(tokenString(token) == null)
tokenString(token) = str
debugString(token) = if (debugStr.isEmpty) str else debugStr
}
/** special tokens */
inline val EMPTY = 0; enter(EMPTY, "<empty>") // a missing token, used in lookahead
inline val ERROR = 1; enter(ERROR, "erroneous token") // an erroneous token
inline val EOF = 2; enter(EOF, "eof")
/** literals */
inline val CHARLIT = 3; enter(CHARLIT, "character literal")
inline val INTLIT = 4; enter(INTLIT, "integer literal")
inline val DECILIT = 5; enter(DECILIT, "number literal") // with decimal point
inline val EXPOLIT = 6; enter(EXPOLIT, "number literal with exponent")
inline val LONGLIT = 7; enter(LONGLIT, "long literal")
inline val FLOATLIT = 8; enter(FLOATLIT, "float literal")
inline val DOUBLELIT = 9; enter(DOUBLELIT, "double literal")
inline val STRINGLIT = 10; enter(STRINGLIT, "string literal")
inline val STRINGPART = 11; enter(STRINGPART, "string literal", "string literal part")
//inline val INTERPOLATIONID = 12; enter(INTERPOLATIONID, "string interpolator")
//inline val QUOTEID = 13; enter(QUOTEID, "quoted identifier") // TODO: deprecate
/** identifiers */
inline val IDENTIFIER = 14; enter(IDENTIFIER, "identifier")
//inline val BACKQUOTED_IDENT = 15; enter(BACKQUOTED_IDENT, "identifier", "backquoted ident")
/** alphabetic keywords */
inline val IF = 20; enter(IF, "if")
inline val FOR = 21; enter(FOR, "for")
inline val ELSE = 22; enter(ELSE, "else")
inline val THIS = 23; enter(THIS, "this")
inline val NULL = 24; enter(NULL, "null")
inline val NEW = 25; enter(NEW, "new")
//inline val WITH = 26; enter(WITH, "with")
inline val SUPER = 27; enter(SUPER, "super")
//inline val CASE = 28; enter(CASE, "case")
//inline val CASECLASS = 29; enter(CASECLASS, "case class")
//inline val CASEOBJECT = 30; enter(CASEOBJECT, "case object")
//inline val VAL = 31; enter(VAL, "val")
inline val ABSTRACT = 32; enter(ABSTRACT, "abstract")
inline val FINAL = 33; enter(FINAL, "final")
inline val PRIVATE = 34; enter(PRIVATE, "private")
inline val PROTECTED = 35; enter(PROTECTED, "protected")
inline val OVERRIDE = 36; enter(OVERRIDE, "override")
//inline val IMPLICIT = 37; enter(IMPLICIT, "implicit")
//inline val VAR = 38; enter(VAR, "var")
//inline val DEF = 39; enter(DEF, "def")
//inline val TYPE = 40; enter(TYPE, "type")
inline val EXTENDS = 41; enter(EXTENDS, "extends")
inline val TRUE = 42; enter(TRUE, "true")
inline val FALSE = 43; enter(FALSE, "false")
//inline val OBJECT = 44; enter(OBJECT, "object")
inline val CLASS = 45; enter(CLASS, "class")
inline val IMPORT = 46; enter(IMPORT, "import")
inline val PACKAGE = 47; enter(PACKAGE, "package")
//inline val YIELD = 48; enter(YIELD, "yield")
inline val DO = 49; enter(DO, "do")
//inline val TRAIT = 50; enter(TRAIT, "trait")
//inline val SEALED = 51; enter(SEALED, "sealed")
inline val THROW = 52; enter(THROW, "throw")
inline val TRY = 53; enter(TRY, "try")
inline val CATCH = 54; enter(CATCH, "catch")
inline val FINALLY = 55; enter(FINALLY, "finally")
inline val WHILE = 56; enter(WHILE, "while")
inline val RETURN = 57; enter(RETURN, "return")
//inline val MATCH = 58; enter(MATCH, "match")
//inline val LAZY = 59; enter(LAZY, "lazy")
//inline val THEN = 60; enter(THEN, "then")
//inline val FORSOME = 61; enter(FORSOME, "forSome") // TODO: deprecate
//inline val ENUM = 62; enter(ENUM, "enum")
/** special symbols */
inline val COMMA = 70; enter(COMMA, "','")
inline val SEMI = 71; enter(SEMI, "';'")
inline val DOT = 72; enter(DOT, "'.'")
//inline val NEWLINE = 78; enter(NEWLINE, "end of statement", "new line")
//inline val NEWLINES = 79; enter(NEWLINES, "end of statement", "new lines")
/** special keywords */
//inline val USCORE = 73; enter(USCORE, "_")
inline val COLON = 74; enter(COLON, ":")
inline val EQUALS = 75; enter(EQUALS, "=")
//inline val LARROW = 76; enter(LARROW, "<-")
//inline val ARROW = 77; enter(ARROW, "=>")
//inline val SUBTYPE = 80; enter(SUBTYPE, "<:")
//inline val SUPERTYPE = 81; enter(SUPERTYPE, ">:")
//inline val HASH = 82; enter(HASH, "#")
inline val AT = 83; enter(AT, "@")
//inline val VIEWBOUND = 84; enter(VIEWBOUND, "<%")
val keywords: TokenSet
def isKeyword(token: Token): Boolean = keywords contains token
/** parentheses */
inline val LPAREN = 91; enter(LPAREN, "'('")
inline val RPAREN = 92; enter(RPAREN, "')'")
inline val LBRACKET = 93; enter(LBRACKET, "'['")
inline val RBRACKET = 94; enter(RBRACKET, "']'")
inline val LBRACE = 95; enter(LBRACE, "'{'")
inline val RBRACE = 96; enter(RBRACE, "'}'")
inline val INDENT = 97; enter(INDENT, "indent")
inline val OUTDENT = 98; enter(OUTDENT, "unindent")
inline val firstParen = LPAREN
inline val lastParen = OUTDENT
def buildKeywordArray(keywords: TokenSet): (Int, Array[Int]) = {
def start(tok: Token) = tokenString(tok).toTermName.asSimpleName.start
def sourceKeywords = keywords.toList.filter { (kw: Token) =>
val ts = tokenString(kw)
(ts != null) && !ts.contains(' ')
}
val lastKeywordStart = sourceKeywords.map(start).max
val arr = Array.fill(lastKeywordStart + 1)(IDENTIFIER)
for (kw <- sourceKeywords) arr(start(kw)) = kw
(lastKeywordStart, arr)
}
}
object Tokens extends TokensCommon {
inline val minToken = EMPTY
final def maxToken: Int = XMLSTART
inline val INTERPOLATIONID = 12; enter(INTERPOLATIONID, "string interpolator")
inline val QUOTEID = 13; enter(QUOTEID, "quoted identifier") // TODO: deprecate
inline val BACKQUOTED_IDENT = 15; enter(BACKQUOTED_IDENT, "identifier", "backquoted ident")
final val identifierTokens: TokenSet = BitSet(IDENTIFIER, BACKQUOTED_IDENT)
def isIdentifier(token : Int): Boolean =
token >= IDENTIFIER && token <= BACKQUOTED_IDENT
/** alphabetic keywords */
inline val WITH = 26; enter(WITH, "with")
inline val CASE = 28; enter(CASE, "case")
inline val CASECLASS = 29; enter(CASECLASS, "case class")
inline val CASEOBJECT = 30; enter(CASEOBJECT, "case object")
inline val VAL = 31; enter(VAL, "val")
inline val IMPLICIT = 37; enter(IMPLICIT, "implicit")
inline val VAR = 38; enter(VAR, "var")
inline val DEF = 39; enter(DEF, "def")
inline val TYPE = 40; enter(TYPE, "type")
inline val OBJECT = 44; enter(OBJECT, "object")
inline val YIELD = 48; enter(YIELD, "yield")
inline val TRAIT = 50; enter(TRAIT, "trait")
inline val SEALED = 51; enter(SEALED, "sealed")
inline val MATCH = 58; enter(MATCH, "match")
inline val LAZY = 59; enter(LAZY, "lazy")
inline val THEN = 60; enter(THEN, "then")
inline val FORSOME = 61; enter(FORSOME, "forSome") // TODO: deprecate
inline val ENUM = 62; enter(ENUM, "enum")
inline val GIVEN = 63; enter(GIVEN, "given")
inline val EXPORT = 64; enter(EXPORT, "export")
inline val MACRO = 65; enter(MACRO, "macro") // TODO: remove
inline val END = 66; enter(END, "end")
/** special symbols */
inline val NEWLINE = 78; enter(NEWLINE, "end of statement", "new line")
inline val NEWLINES = 79; enter(NEWLINES, "end of statement", "new lines")
/** special keywords */
inline val USCORE = 73; enter(USCORE, "_")
inline val LARROW = 76; enter(LARROW, "<-")
inline val ARROW = 77; enter(ARROW, "=>")
inline val SUBTYPE = 80; enter(SUBTYPE, "<:")
inline val SUPERTYPE = 81; enter(SUPERTYPE, ">:")
inline val HASH = 82; enter(HASH, "#")
inline val VIEWBOUND = 84; enter(VIEWBOUND, "<%")
inline val TLARROW = 85; enter(TLARROW, "=>>")
inline val CTXARROW = 86; enter(CTXARROW, "?=>")
inline val QUOTE = 87; enter(QUOTE, "'")
inline val COLONEOL = 88; enter(COLONEOL, ":", ": at eol")
inline val SELFARROW = 89; enter(SELFARROW, "=>") // reclassified ARROW following self-type
/** XML mode */
inline val XMLSTART = 99; enter(XMLSTART, "$XMLSTART$<") // TODO: deprecate
final val alphaKeywords: TokenSet = tokenRange(IF, END)
final val symbolicKeywords: TokenSet = tokenRange(USCORE, CTXARROW)
final val keywords: TokenSet = alphaKeywords | symbolicKeywords
final val allTokens: TokenSet = tokenRange(minToken, maxToken)
final val simpleLiteralTokens: TokenSet =
tokenRange(CHARLIT, STRINGLIT) | BitSet(TRUE, FALSE)
final val literalTokens: TokenSet = simpleLiteralTokens | BitSet(INTERPOLATIONID, QUOTEID, NULL) // TODO: drop QUOTEID when symbol literals are gone
final val atomicExprTokens: TokenSet = literalTokens | identifierTokens | BitSet(
USCORE, NULL, THIS, SUPER, TRUE, FALSE, RETURN, QUOTEID, XMLSTART)
final val openParensTokens = BitSet(LBRACE, LPAREN, LBRACKET)
final val canStartExprTokens3: TokenSet =
atomicExprTokens
| openParensTokens
| BitSet(INDENT, QUOTE, IF, WHILE, FOR, NEW, TRY, THROW)
final val canStartExprTokens2: TokenSet = canStartExprTokens3 | BitSet(DO)
final val canStartTypeTokens: TokenSet = literalTokens | identifierTokens | BitSet(
THIS, SUPER, USCORE, LPAREN, AT)
final val templateIntroTokens: TokenSet = BitSet(CLASS, TRAIT, OBJECT, ENUM, CASECLASS, CASEOBJECT)
final val dclIntroTokens: TokenSet = BitSet(DEF, VAL, VAR, TYPE, GIVEN)
final val defIntroTokens: TokenSet = templateIntroTokens | dclIntroTokens
final val localModifierTokens: TokenSet = BitSet(ABSTRACT, FINAL, SEALED, IMPLICIT, LAZY)
final val accessModifierTokens: TokenSet = BitSet(
PRIVATE, PROTECTED)
final val modifierTokens: TokenSet = localModifierTokens | accessModifierTokens | BitSet(
OVERRIDE)
final val modifierTokensOrCase: TokenSet = modifierTokens | BitSet(CASE)
final val modifierFollowers = modifierTokensOrCase | defIntroTokens
/** Is token only legal as start of statement (eof also included)? */
final val mustStartStatTokens: TokenSet = defIntroTokens | modifierTokens | BitSet(IMPORT, EXPORT, PACKAGE)
final val canStartStatTokens2: TokenSet = canStartExprTokens2 | mustStartStatTokens | BitSet(
AT, CASE, END) // END is included since it might be tested before being converted back to IDENTIFIER
final val canStartStatTokens3: TokenSet = canStartExprTokens3 | mustStartStatTokens | BitSet(
AT, CASE, END)
final val canEndStatTokens: TokenSet = atomicExprTokens | BitSet(TYPE, GIVEN, RPAREN, RBRACE, RBRACKET, OUTDENT)
/** Tokens that stop a lookahead scan search for a `<-`, `then`, or `do`.
* Used for disambiguating between old and new syntax.
*/
final val stopScanTokens: BitSet = mustStartStatTokens |
BitSet(IF, ELSE, WHILE, DO, FOR, YIELD, NEW, TRY, CATCH, FINALLY, THROW, RETURN, MATCH, SEMI, EOF)
final val numericLitTokens: TokenSet = BitSet(INTLIT, DECILIT, EXPOLIT, LONGLIT, FLOATLIT, DOUBLELIT)
final val statCtdTokens: BitSet = BitSet(THEN, ELSE, DO, CATCH, FINALLY, YIELD, MATCH)
final val closingRegionTokens = BitSet(RBRACE, RPAREN, RBRACKET, CASE) | statCtdTokens
final val canStartIndentTokens: BitSet =
statCtdTokens | BitSet(COLONEOL, WITH, EQUALS, ARROW, CTXARROW, LARROW, WHILE, TRY, FOR, IF, THROW, RETURN)
/** Faced with the choice between a type and a formal parameter, the following
* tokens determine it's a formal parameter.
*/
final val startParamTokens: BitSet = modifierTokens | BitSet(VAL, VAR, AT)
final val scala3keywords = BitSet(ENUM, GIVEN)
final val endMarkerTokens = identifierTokens | BitSet(IF, WHILE, FOR, MATCH, TRY, NEW, THROW, GIVEN, VAL, THIS)
final val skipStopTokens = BitSet(SEMI, NEWLINE, NEWLINES, RBRACE, RPAREN, RBRACKET, OUTDENT)
final val softModifierNames = Set(nme.inline, nme.opaque, nme.open, nme.transparent, nme.infix)
}
| lampepfl/dotty | compiler/src/dotty/tools/dotc/parsing/Tokens.scala | Scala | apache-2.0 | 13,511 |
/*-------------------------------------------------------------------------*\
** ScalaCheck **
** Copyright (c) 2007-2018 Rickard Nilsson. All rights reserved. **
** http://www.scalacheck.org **
** **
** This software is released under the terms of the Revised BSD License. **
** There is NO WARRANTY. See the file LICENSE for the full text. **
\*------------------------------------------------------------------------ */
package org.scalacheck
import language.higherKinds
import language.implicitConversions
import rng.Seed
import util.Buildable
import util.SerializableCanBuildFroms._
import ScalaVersionSpecific._
import scala.annotation.tailrec
import scala.collection.immutable.TreeMap
import scala.collection.mutable.ArrayBuffer
import scala.concurrent.duration.{Duration, FiniteDuration}
import java.util.{ Calendar, UUID }
sealed abstract class Gen[+T] extends Serializable { self =>
//// Private interface ////
import Gen.{R, gen}
/** Just an alias */
private type P = Gen.Parameters
/** Should be a copy of R.sieve. Used internally in Gen when some generators
* with suchThat-clause are created (when R is not available). This method
* actually breaks covariance, but since this method will only ever be
* called with a value of exactly type T, it is OK. */
private[scalacheck] def sieveCopy(x: Any): Boolean = true
private[scalacheck] def doApply(p: P, seed: Seed): R[T]
//// Public interface ////
/** A class supporting filtered operations. */
final class WithFilter(p: T => Boolean) {
def map[U](f: T => U): Gen[U] = Gen.this.suchThat(p).map(f)
def flatMap[U](f: T => Gen[U]): Gen[U] = Gen.this.suchThat(p).flatMap(f)
def withFilter(q: T => Boolean): WithFilter = Gen.this.withFilter(x => p(x) && q(x))
}
/** Evaluate this generator with the given parameters */
def apply(p: Gen.Parameters, seed: Seed): Option[T] =
doApply(p, seed).retrieve
def doPureApply(p: Gen.Parameters, seed: Seed, retries: Int = 100): Gen.R[T] = {
@tailrec def loop(r: Gen.R[T], i: Int): Gen.R[T] =
if (r.retrieve.isDefined) r
else if (i > 0) loop(doApply(p, r.seed), i - 1)
else throw new Gen.RetrievalError()
loop(doApply(p, seed), retries)
}
/**
* Evaluate this generator with the given parameters.
*
* The generator will attempt to generate a valid `T` value. If a
* valid value is not produced it may retry several times,
* determined by the `retries` parameter (which defaults to 100).
*
* If all the retries fail it will throw a `Gen.RetrievalError`
* exception.
*/
def pureApply(p: Gen.Parameters, seed: Seed, retries: Int = 100): T =
doPureApply(p, seed, retries).retrieve.get
/** Create a new generator by mapping the result of this generator */
def map[U](f: T => U): Gen[U] = gen { (p, seed) => doApply(p, seed).map(f) }
/** Create a new generator by flat-mapping the result of this generator */
def flatMap[U](f: T => Gen[U]): Gen[U] = gen { (p, seed) =>
val rt = doApply(p, seed)
rt.flatMap(t => f(t).doApply(p, rt.seed))
}
/** Create a new generator that uses this generator to produce a value
* that fulfills the given condition. If the condition is not fulfilled,
* the generator fails (returns None). Also, make sure that the provided
* test property is side-effect free, eg it should not use external vars. */
def filter(p: T => Boolean): Gen[T] = suchThat(p)
/** Create a new generator that uses this generator to produce a value
* that doesn't fulfill the given condition. If the condition is fulfilled,
* the generator fails (returns None). Also, make sure that the provided
* test property is side-effect free, eg it should not use external vars. */
def filterNot(p: T => Boolean): Gen[T] = suchThat(x => !p(x))
/** Creates a non-strict filtered version of this generator. */
def withFilter(p: T => Boolean): WithFilter = new WithFilter(p)
/** Create a new generator that uses this generator to produce a value
* that fulfills the given condition. If the condition is not fulfilled,
* the generator fails (returns None). Also, make sure that the provided
* test property is side-effect free, eg it should not use external vars.
* This method is identical to [Gen.filter]. */
def suchThat(f: T => Boolean): Gen[T] = new Gen[T] {
def doApply(p: P, seed: Seed) =
p.useInitialSeed(seed) { (p0, s0) =>
val res = Gen.this.doApply(p0, s0)
res.copy(s = { x:T => res.sieve(x) && f(x) })
}
override def sieveCopy(x: Any) =
try Gen.this.sieveCopy(x) && f(x.asInstanceOf[T])
catch { case _: java.lang.ClassCastException => false }
}
case class RetryUntilException(n: Int) extends RuntimeException(s"retryUntil failed after $n attempts")
/**
* Create a generator that calls this generator repeatedly until the
* given condition is fulfilled. The generated value is then
* returned. Make sure that the provided test property is
* side-effect free (it should not use external vars).
*
* If the generator fails more than maxTries, a RetryUntilException
* will be thrown.
*/
def retryUntil(p: T => Boolean, maxTries: Int): Gen[T] = {
require(maxTries > 0)
def loop(params: P, seed: Seed, tries: Int): R[T] =
if (tries > maxTries) throw RetryUntilException(tries) else {
val r = self.doApply(params, seed)
if (r.retrieve.exists(p)) r else loop(params, r.seed, tries + 1)
}
Gen.gen((params, seed) => loop(params, seed, 1))
}
/**
* Create a generator that calls this generator repeatedly until the
* given condition is fulfilled. The generated value is then
* returned. Make sure that the provided test property is
* side-effect free (it should not use external vars).
*
*
* If the generator fails more than 10000 times, a
* RetryUntilException will be thrown. You can call `retryUntil`
* with a second parameter to change this number.
*/
def retryUntil(p: T => Boolean): Gen[T] =
retryUntil(p, 10000)
def sample: Option[T] =
doApply(Gen.Parameters.default, Seed.random()).retrieve
/** Returns a new property that holds if and only if both this
* and the given generator generates the same result, or both
* generators generate no result. */
def ==[U](g: Gen[U]) = Prop { prms =>
// test equality using a random seed
val seed = Seed.random()
val lhs = doApply(prms, seed).retrieve
val rhs = g.doApply(prms, seed).retrieve
if (lhs == rhs) Prop.proved(prms) else Prop.falsified(prms)
}
def !=[U](g: Gen[U]) = Prop.forAll(this)(r => Prop.forAll(g)(_ != r))
def !==[U](g: Gen[U]) = Prop { prms =>
// test inequality using a random seed
val seed = Seed.random()
val lhs = doApply(prms, seed).retrieve
val rhs = g.doApply(prms, seed).retrieve
if (lhs != rhs) Prop.proved(prms) else Prop.falsified(prms)
}
/** Put a label on the generator to make test reports clearer */
def label(l: String): Gen[T] = new Gen[T] {
def doApply(p: P, seed: Seed) =
p.useInitialSeed(seed) { (p0, s0) =>
val r = Gen.this.doApply(p0, s0)
r.copy(l = r.labels + l)
}
override def sieveCopy(x: Any) = Gen.this.sieveCopy(x)
}
/** Put a label on the generator to make test reports clearer */
def :|(l: String) = label(l)
/** Put a label on the generator to make test reports clearer */
def |:(l: String) = label(l)
/** Put a label on the generator to make test reports clearer */
def :|(l: Symbol) = label(l.name)
/** Put a label on the generator to make test reports clearer */
def |:(l: Symbol) = label(l.name)
/** Perform some RNG perturbation before generating */
def withPerturb(f: Seed => Seed): Gen[T] =
Gen.gen((p, seed) => doApply(p, f(seed)))
}
object Gen extends GenArities with GenVersionSpecific {
//// Private interface ////
import Arbitrary.arbitrary
/** Just an alias */
private type P = Parameters
class RetrievalError extends RuntimeException("couldn't generate value")
private[scalacheck] trait R[+T] {
def labels: Set[String] = Set()
def sieve[U >: T]: U => Boolean = _ => true
protected def result: Option[T]
def seed: Seed
def retrieve: Option[T] = result.filter(sieve)
def copy[U >: T](
l: Set[String] = this.labels,
s: U => Boolean = this.sieve,
r: Option[U] = this.result,
sd: Seed = this.seed
): R[U] = new R[U] {
override val labels = l
override def sieve[V >: U] = { (x: Any) =>
try s(x.asInstanceOf[U])
catch { case _: java.lang.ClassCastException => false }
}
val seed = sd
val result = r
}
def map[U](f: T => U): R[U] = r(retrieve.map(f), seed).copy(l = labels)
def flatMap[U](f: T => R[U]): R[U] = retrieve match {
case None => r(None, seed).copy(l = labels)
case Some(t) =>
val r = f(t)
r.copy(l = labels ++ r.labels, sd = r.seed)
}
}
private[scalacheck] def r[T](r: Option[T], sd: Seed): R[T] = new R[T] {
val result = r
val seed = sd
}
/** Generator factory method */
private[scalacheck] def gen[T](f: (P, Seed) => R[T]): Gen[T] = new Gen[T] {
def doApply(p: P, seed: Seed): R[T] = p.useInitialSeed(seed)(f)
}
//// Public interface ////
/** Generator parameters, used by [[org.scalacheck.Gen.apply]] */
sealed abstract class Parameters extends Serializable {
/**
* The size of the generated value. Generator implementations are
* allowed to freely interpret (or ignore) this value. During test
* execution, the value of this parameter is controlled by
* [[Test.Parameters.minSize]] and [[Test.Parameters.maxSize]].
*/
val size: Int
/**
* Create a copy of this [[Gen.Parameters]] instance with
* [[Gen.Parameters.size]] set to the specified value.
*/
def withSize(size: Int): Parameters =
cp(size = size)
/**
*
*/
val initialSeed: Option[Seed]
def withInitialSeed(seed: Seed): Parameters =
cp(initialSeed = Some(seed))
def withInitialSeed(n: Long): Parameters =
cp(initialSeed = Some(Seed(n)))
def withNoInitialSeed: Parameters =
cp(initialSeed = None)
def useInitialSeed[A](seed: Seed)(f: (Parameters, Seed) => A): A =
initialSeed match {
case Some(s) => f(this.withNoInitialSeed, s)
case None => f(this, seed)
}
// private since we can't guarantee binary compatibility for this one
private case class cp(size: Int = size, initialSeed: Option[Seed] = None) extends Parameters
}
/** Provides methods for creating [[org.scalacheck.Gen.Parameters]] values */
object Parameters {
/** Default generator parameters instance. */
val default: Parameters = new Parameters {
val size: Int = 100
val initialSeed: Option[Seed] = None
}
}
/** A wrapper type for range types */
trait Choose[T] extends Serializable {
/** Creates a generator that returns a value in the given inclusive range */
def choose(min: T, max: T): Gen[T]
}
/** Provides implicit [[org.scalacheck.Gen.Choose]] instances */
object Choose {
class IllegalBoundsError[A](low: A, high: A)
extends IllegalArgumentException(s"invalid bounds: low=$low, high=$high")
/**
* This method gets a ton of use -- so we want it to be as fast as
* possible for many of our common cases.
*/
private def chLng(l: Long, h: Long)(p: P, seed: Seed): R[Long] = {
if (h < l) {
throw new IllegalBoundsError(l, h)
} else if (h == l) {
const(l).doApply(p, seed)
} else if (l == Long.MinValue && h == Long.MaxValue) {
val (n, s) = seed.long
r(Some(n), s)
} else if (l == Int.MinValue && h == Int.MaxValue) {
val (n, s) = seed.long
r(Some(n.toInt.toLong), s)
} else if (l == Short.MinValue && h == Short.MaxValue) {
val (n, s) = seed.long
r(Some(n.toShort.toLong), s)
} else if (l == 0L && h == Char.MaxValue) {
val (n, s) = seed.long
r(Some(n.toChar.toLong), s)
} else if (l == Byte.MinValue && h == Byte.MaxValue) {
val (n, s) = seed.long
r(Some(n.toByte.toLong), s)
} else {
val d = h - l + 1
if (d <= 0) {
var tpl = seed.long
var n = tpl._1
var s = tpl._2
while (n < l || n > h) {
tpl = s.long
n = tpl._1
s = tpl._2
}
r(Some(n), s)
} else {
val (n, s) = seed.long
r(Some(l + (n & 0x7fffffffffffffffL) % d), s)
}
}
}
private def chDbl(l: Double, h: Double)(p: P, seed: Seed): R[Double] = {
val d = h - l
if (d < 0) {
throw new IllegalBoundsError(l, h)
} else if (d > Double.MaxValue) {
val (x, seed2) = seed.long
if (x < 0) chDbl(l, 0d)(p, seed2) else chDbl(0d, h)(p, seed2)
} else if (d == 0) {
r(Some(l), seed)
} else {
val (n, s) = seed.double
r(Some(n * (h-l) + l), s)
}
}
implicit val chooseLong: Choose[Long] =
new Choose[Long] {
def choose(low: Long, high: Long): Gen[Long] =
if (low > high) throw new IllegalBoundsError(low, high)
else gen(chLng(low,high))
}
implicit val chooseInt: Choose[Int] =
Choose.xmap[Long, Int](_.toInt, _.toLong)
implicit val chooseShort: Choose[Short] =
Choose.xmap[Long, Short](_.toShort, _.toLong)
implicit val chooseChar: Choose[Char] =
Choose.xmap[Long, Char](_.toChar, _.toLong)
implicit val chooseByte: Choose[Byte] =
Choose.xmap[Long, Byte](_.toByte, _.toLong)
implicit val chooseDouble: Choose[Double] =
new Choose[Double] {
def choose(low: Double, high: Double) =
if (low > high) throw new IllegalBoundsError(low, high)
else if (low == Double.NegativeInfinity)
frequency(1 -> const(Double.NegativeInfinity),
9 -> choose(Double.MinValue, high))
else if (high == Double.PositiveInfinity)
frequency(1 -> const(Double.PositiveInfinity),
9 -> choose(low, Double.MaxValue))
else gen(chDbl(low,high))
}
implicit val chooseFloat: Choose[Float] =
Choose.xmap[Double, Float](_.toFloat, _.toDouble)
implicit val chooseFiniteDuration: Choose[FiniteDuration] =
Choose.xmap[Long, FiniteDuration](Duration.fromNanos, _.toNanos)
/** Transform a Choose[T] to a Choose[U] where T and U are two isomorphic
* types whose relationship is described by the provided transformation
* functions. (exponential functor map) */
def xmap[T, U](from: T => U, to: U => T)(implicit c: Choose[T]): Choose[U] =
new Choose[U] {
def choose(low: U, high: U): Gen[U] =
c.choose(to(low), to(high)).map(from)
}
}
//// Various Generator Combinators ////
/** A generator that always generates the given value */
implicit def const[T](x: T): Gen[T] = gen((p, seed) => r(Some(x), seed))
/** A generator that never generates a value */
def fail[T]: Gen[T] = gen((p, seed) => failed[T](seed))
/** A result that never contains a value */
private[scalacheck] def failed[T](seed0: Seed): R[T] =
new R[T] {
val result: Option[T] = None
override def sieve[U >: T]: U => Boolean = _ => false
val seed = seed0
}
/** A generator that generates a random value in the given (inclusive)
* range. If the range is invalid, the generator will not generate
* any value. */
def choose[T](min: T, max: T)(implicit c: Choose[T]): Gen[T] =
c.choose(min, max)
/** Sequences generators. If any of the given generators fails, the
* resulting generator will also fail. */
def sequence[C,T](gs: Traversable[Gen[T]])(implicit b: Buildable[T,C]): Gen[C] = {
val g = gen { (p, seed) =>
gs.foldLeft(r(Some(Vector.empty[T]), seed)) {
case (rs,g) =>
val rt = g.doApply(p, rs.seed)
rt.flatMap(t => rs.map(_ :+ t)).copy(sd = rt.seed)
}
}
g.map(b.fromIterable)
}
/** Monadic recursion on Gen
* This is a stack-safe loop that is the same as:
*
* {{{
*
* fn(a).flatMap {
* case Left(a) => tailRec(a)(fn)
* case Right(b) => Gen.const(b)
* }
*
* }}}
*
* which is useful for doing monadic loops without blowing up the
* stack
*/
def tailRecM[A, B](a0: A)(fn: A => Gen[Either[A, B]]): Gen[B] = {
@tailrec
def tailRecMR(a: A, seed: Seed, labs: Set[String])(fn: (A, Seed) => R[Either[A, B]]): R[B] = {
val re = fn(a, seed)
val nextLabs = labs | re.labels
re.retrieve match {
case None => r(None, re.seed).copy(l = nextLabs)
case Some(Right(b)) => r(Some(b), re.seed).copy(l = nextLabs)
case Some(Left(a)) => tailRecMR(a, re.seed, nextLabs)(fn)
}
}
// This is the "Reader-style" appoach to making a stack-safe loop:
// we put one outer closure around an explicitly tailrec loop
gen[B] { (p: P, seed: Seed) =>
tailRecMR(a0, seed, Set.empty) { (a, seed) => fn(a).doApply(p, seed) }
}
}
/** Wraps a generator lazily. The given parameter is only evaluated once,
* and not until the wrapper generator is evaluated. */
def lzy[T](g: => Gen[T]): Gen[T] = {
lazy val h = g
gen { (p, seed) => h.doApply(p, seed) }
}
/** Wraps a generator for later evaluation. The given parameter is
* evaluated each time the wrapper generator is evaluated. */
def delay[T](g: => Gen[T]): Gen[T] =
gen { (p, seed) => g.doApply(p, seed) }
/** Creates a generator that can access its generation parameters */
def parameterized[T](f: Parameters => Gen[T]): Gen[T] =
gen { (p, seed) => f(p).doApply(p, seed) }
/** Creates a generator that can access its generation size */
def sized[T](f: Int => Gen[T]): Gen[T] =
gen { (p, seed) => f(p.size).doApply(p, seed) }
/** A generator that returns the current generation size */
lazy val size: Gen[Int] = sized { sz => sz }
/** Creates a resized version of a generator */
def resize[T](s: Int, g: Gen[T]) = gen((p, seed) => g.doApply(p.withSize(s), seed))
/** Picks a random value from a list */
def oneOf[T](xs: Seq[T]): Gen[T] =
if (xs.isEmpty) {
throw new IllegalArgumentException("oneOf called on empty collection")
} else {
val vector = xs.toVector
choose(0, vector.size - 1).map(vector(_))
}
/** Picks a random value from a list */
def oneOf[T](t0: T, t1: T, tn: T*): Gen[T] = oneOf(t0 +: t1 +: tn)
/** Picks a random generator from a list */
def oneOf[T](g0: Gen[T], g1: Gen[T], gn: Gen[T]*): Gen[T] = {
val gs = g0 +: g1 +: gn
choose(0,gs.size-1).flatMap(gs(_)).suchThat(x => gs.exists(_.sieveCopy(x)))
}
/** Makes a generator result optional. Either `Some(T)` or `None` will be provided. */
def option[T](g: Gen[T]): Gen[Option[T]] =
frequency(1 -> const(None), 9 -> some(g))
/** A generator that returns `Some(T)` */
def some[T](g: Gen[T]): Gen[Option[T]] =
g.map(Some.apply)
/** Chooses one of the given generators with a weighted random distribution */
def frequency[T](gs: (Int, Gen[T])*): Gen[T] = {
val filtered = gs.iterator.filter(_._1 > 0).toVector
if (filtered.isEmpty) {
throw new IllegalArgumentException("no items with positive weights")
} else {
var total = 0L
val builder = TreeMap.newBuilder[Long, Gen[T]]
filtered.foreach { case (weight, value) =>
total += weight
builder += ((total, value))
}
val tree = builder.result
choose(1L, total).flatMap(r => tree.rangeFrom(r).head._2).suchThat { x =>
gs.exists(_._2.sieveCopy(x))
}
}
}
/** Implicit convenience method for using the `frequency` method
* like this:
* {{{
* frequency((1, "foo"), (3, "bar"))
* }}}
*/
implicit def freqTuple[T](t: (Int,T)): (Int,Gen[T]) = (t._1, const(t._2))
//// List Generators ////
/** Generates a container of any Traversable type for which there exists an
* implicit [[org.scalacheck.util.Buildable]] instance. The elements in the
* container will be generated by the given generator. The size of the
* generated container is limited by `n`. Depending on what kind of container
* that is generated, the resulting container may contain fewer elements than
* `n`, but not more. If the given generator fails generating a value, the
* complete container generator will also fail. */
def buildableOfN[C,T](n: Int, g: Gen[T])(implicit
evb: Buildable[T,C], evt: C => Traversable[T]
): Gen[C] =
sequence[C,T](Traversable.fill(n)(g)) suchThat { c =>
// TODO: Can we guarantee c.size == n (See issue #89)?
c.forall(g.sieveCopy)
}
/** Generates a container of any Traversable type for which there exists an
* implicit [[org.scalacheck.util.Buildable]] instance. The elements in the
* container will be generated by the given generator. The size of the
* container is bounded by the size parameter used when generating values. */
def buildableOf[C,T](g: Gen[T])(implicit
evb: Buildable[T,C], evt: C => Traversable[T]
): Gen[C] =
sized(s => choose(0, s max 0).flatMap(buildableOfN[C,T](_,g))) suchThat { c =>
if (c == null) g.sieveCopy(null) else c.forall(g.sieveCopy)
}
/** Generates a non-empty container of any Traversable type for which there
* exists an implicit [[org.scalacheck.util.Buildable]] instance. The
* elements in the container will be generated by the given generator. The
* size of the container is bounded by the size parameter used when
* generating values. */
def nonEmptyBuildableOf[C,T](g: Gen[T])(implicit
evb: Buildable[T,C], evt: C => Traversable[T]
): Gen[C] =
sized(s => choose(1, s max 1).flatMap(buildableOfN[C,T](_,g))) suchThat(_.size > 0)
/** A convenience method for calling `buildableOfN[C[T],T](n,g)`. */
def containerOfN[C[_],T](n: Int, g: Gen[T])(implicit
evb: Buildable[T,C[T]], evt: C[T] => Traversable[T]
): Gen[C[T]] = buildableOfN[C[T],T](n,g)
/** A convenience method for calling `buildableOf[C[T],T](g)`. */
def containerOf[C[_],T](g: Gen[T])(implicit
evb: Buildable[T,C[T]], evt: C[T] => Traversable[T]
): Gen[C[T]] = buildableOf[C[T],T](g)
/** A convenience method for calling `nonEmptyBuildableOf[C[T],T](g)`. */
def nonEmptyContainerOf[C[_],T](g: Gen[T])(implicit
evb: Buildable[T,C[T]], evt: C[T] => Traversable[T]
): Gen[C[T]] = nonEmptyBuildableOf[C[T],T](g)
/** Generates a list of random length. The maximum length depends on the
* size parameter. This method is equal to calling
* `containerOf[List,T](g)`. */
def listOf[T](g: => Gen[T]) = buildableOf[List[T],T](g)
/** Generates a non-empty list of random length. The maximum length depends
* on the size parameter. This method is equal to calling
* `nonEmptyContainerOf[List,T](g)`. */
def nonEmptyListOf[T](g: => Gen[T]) = nonEmptyBuildableOf[List[T],T](g)
/** Generates a list of the given length. This method is equal to calling
* `containerOfN[List,T](n,g)`. */
def listOfN[T](n: Int, g: Gen[T]) = buildableOfN[List[T],T](n,g)
/** Generates a map of random length. The maximum length depends on the
* size parameter. This method is equal to calling
* <code>containerOf[Map,T,U](g)</code>. */
def mapOf[T,U](g: => Gen[(T,U)]) = buildableOf[Map[T,U],(T,U)](g)
/** Generates a non-empty map of random length. The maximum length depends
* on the size parameter. This method is equal to calling
* <code>nonEmptyContainerOf[Map,T,U](g)</code>. */
def nonEmptyMap[T,U](g: => Gen[(T,U)]) = nonEmptyBuildableOf[Map[T,U],(T,U)](g)
/** Generates a map with at most the given number of elements. This method
* is equal to calling <code>containerOfN[Map,T,U](n,g)</code>. */
def mapOfN[T,U](n: Int, g: Gen[(T,U)]) = buildableOfN[Map[T,U],(T,U)](n,g)
/** Generates an infinite stream. */
def infiniteStream[T](g: => Gen[T]): Gen[Stream[T]] = {
def unfold[A, S](z: S)(f: S => Option[(A, S)]): Stream[A] = f(z) match {
case Some((h, s)) => h #:: unfold(s)(f)
case None => Stream.empty
}
gen { (p, seed0) =>
new R[Stream[T]] {
val result: Option[Stream[T]] = Some(unfold(seed0)(s => Some(g.pureApply(p, s) -> s.next)))
val seed: Seed = seed0.next
}
}
}
/** A generator that picks a random number of elements from a list */
def someOf[T](l: Iterable[T]) = choose(0,l.size).flatMap(pick(_,l))
/** A generator that picks a random number of elements from a list */
def someOf[T](g1: Gen[T], g2: Gen[T], gs: Gen[T]*) =
choose(0, gs.length+2).flatMap(pick(_, g1, g2, gs: _*))
/** A generator that picks at least one element from a list */
def atLeastOne[T](l: Iterable[T]) = {
require(l.size > 0, "There has to be at least one option to choose from")
choose(1,l.size).flatMap(pick(_,l))
}
/** A generator that picks at least one element from a list */
def atLeastOne[T](g1: Gen[T], g2: Gen[T], gs: Gen[T]*) =
choose(1, gs.length+2).flatMap(pick(_, g1, g2, gs: _*))
/** A generator that picks a given number of elements from a list, randomly */
def pick[T](n: Int, l: Iterable[T]): Gen[collection.Seq[T]] = {
if (n > l.size || n < 0) throw new IllegalArgumentException(s"invalid choice: $n")
else if (n == 0) Gen.const(Nil)
else gen { (p, seed0) =>
val buf = ArrayBuffer.empty[T]
val it = l.iterator
var seed = seed0
var count = 0
while (it.hasNext) {
val t = it.next
count += 1
if (count <= n) {
buf += t
} else {
val (x, s) = seed.long
val i = (x & 0x7fffffff).toInt % count
if (i < n) buf(i) = t
seed = s
}
}
r(Some(buf), seed)
}
}
/** A generator that picks a given number of elements from a list, randomly */
def pick[T](n: Int, g1: Gen[T], g2: Gen[T], gn: Gen[T]*): Gen[Seq[T]] = {
val gs = g1 +: g2 +: gn
pick(n, 0 until gs.size).flatMap(idxs =>
sequence[List[T],T](idxs.toList.map(gs(_)))
).suchThat(_.forall(x => gs.exists(_.sieveCopy(x))))
}
/** Takes a function and returns a generator that generates arbitrary
* results of that function by feeding it with arbitrarily generated input
* parameters. */
def resultOf[T,R0](f: T => R0)(implicit a: Arbitrary[T]): Gen[R0] =
arbitrary[T] map f
/** Creates a Function0 generator. */
def function0[A](g: Gen[A]): Gen[() => A] =
g.map(a => () => a)
//// Character Generators ////
/** Generates a numerical character */
def numChar: Gen[Char] = choose(48.toChar, 57.toChar)
/** Generates an upper-case alpha character */
def alphaUpperChar: Gen[Char] = choose(65.toChar, 90.toChar)
/** Generates a lower-case alpha character */
def alphaLowerChar: Gen[Char] = choose(97.toChar, 122.toChar)
/** Generates an alpha character */
def alphaChar = frequency((1,alphaUpperChar), (9,alphaLowerChar))
/** Generates an alphanumerical character */
def alphaNumChar = frequency((1,numChar), (9,alphaChar))
/** Generates a ASCII character, with extra weighting for printable characters */
def asciiChar: Gen[Char] = chooseNum(0, 127, 32 to 126:_*).map(_.toChar)
/** Generates a ASCII printable character */
def asciiPrintableChar: Gen[Char] = choose(32.toChar, 126.toChar)
//// String Generators ////
/** Generates a string that starts with a lower-case alpha character,
* and only contains alphanumerical characters */
def identifier: Gen[String] = (for {
c <- alphaLowerChar
cs <- listOf(alphaNumChar)
} yield (c::cs).mkString)
/** Generates a string of digits */
def numStr: Gen[String] =
listOf(numChar).map(_.mkString)
/** Generates a string of upper-case alpha characters */
def alphaUpperStr: Gen[String] =
listOf(alphaUpperChar).map(_.mkString)
/** Generates a string of lower-case alpha characters */
def alphaLowerStr: Gen[String] =
listOf(alphaLowerChar).map(_.mkString)
/** Generates a string of alpha characters */
def alphaStr: Gen[String] =
listOf(alphaChar).map(_.mkString)
/** Generates a string of alphanumerical characters */
def alphaNumStr: Gen[String] =
listOf(alphaNumChar).map(_.mkString)
/** Generates a string of ASCII characters, with extra weighting for printable characters */
def asciiStr: Gen[String] =
listOf(asciiChar).map(_.mkString)
/** Generates a string of ASCII printable characters */
def asciiPrintableStr: Gen[String] =
listOf(asciiPrintableChar).map(_.mkString)
//// Number Generators ////
/** Generates positive numbers of uniform distribution, with an
* upper bound of the generation size parameter. */
def posNum[T](implicit num: Numeric[T], c: Choose[T]): Gen[T] = {
import num._
sized(n => c.choose(one, max(fromInt(n), one)))
}
/** Generates negative numbers of uniform distribution, with an
* lower bound of the negated generation size parameter. */
def negNum[T](implicit num: Numeric[T], c: Choose[T]): Gen[T] = {
import num._
sized(n => c.choose(min(-fromInt(n), -one), -one))
}
/** Generates numbers within the given inclusive range, with
* extra weight on zero, +/- unity, both extremities, and any special
* numbers provided. The special numbers must lie within the given range,
* otherwise they won't be included. */
def chooseNum[T](minT: T, maxT: T, specials: T*)(
implicit num: Numeric[T], c: Choose[T]
): Gen[T] = {
import num._
val basics = List(minT, maxT, zero, one, -one)
val basicsAndSpecials = for {
t <- specials ++ basics if t >= minT && t <= maxT
} yield (1, const(t))
val other = (basicsAndSpecials.length, c.choose(minT, maxT))
val allGens = basicsAndSpecials :+ other
frequency(allGens: _*)
}
//// Misc Generators ////
/** Generates a version 4 (random) UUID. */
lazy val uuid: Gen[UUID] = for {
l1 <- Gen.choose(Long.MinValue, Long.MaxValue)
l2 <- Gen.choose(Long.MinValue, Long.MaxValue)
y <- Gen.oneOf('8', '9', 'a', 'b')
} yield UUID.fromString(
new UUID(l1,l2).toString.updated(14, '4').updated(19, y)
)
lazy val calendar: Gen[Calendar] = {
import Calendar._
def adjust(c: Calendar)(f: Calendar => Unit): Calendar = { f(c); c }
// We want to be sure we always initialize the calendar's time. By
// default, Calendar.getInstance uses the system time. We always
// overwrite it with a determinisitcally-geneated time to be sure
// that calendar generation is also deterministic.
//
// We limit the time (in milliseconds) because extreme values will
// cause Calendar.getTime calls to fail. This range is relatively
// large but safe:
//
// -62135751600000 is 1 CE
// 64087186649116 is 4000 CE
val calendar: Gen[Calendar] =
Gen.chooseNum(-62135751600000L, 64087186649116L).map { t =>
adjust(Calendar.getInstance)(_.setTimeInMillis(t))
}
def yearGen(c: Calendar): Gen[Int] =
Gen.chooseNum(c.getGreatestMinimum(YEAR), c.getLeastMaximum(YEAR))
def moveToNearestLeapDate(c: Calendar, year: Int): Calendar = {
@tailrec def loop(y: Int): Calendar = {
c.set(YEAR, y)
if (c.getActualMaximum(DAY_OF_YEAR) > 365) c else loop(y + 1)
}
loop(if (year + 4 > c.getLeastMaximum(YEAR)) year - 5 else year)
}
val beginningOfDayGen: Gen[Calendar] =
calendar.map(c => adjust(c) { c =>
c.set(HOUR_OF_DAY, 0)
c.set(MINUTE, 0)
c.set(SECOND, 0)
c.set(MILLISECOND, 0)
})
val endOfDayGen: Gen[Calendar] =
calendar.map(c => adjust(c) { c =>
c.set(HOUR_OF_DAY, 23)
c.set(MINUTE, 59)
c.set(SECOND, 59)
c.set(MILLISECOND, 59)
})
val firstDayOfYearGen: Gen[Calendar] =
for { c <- calendar; y <- yearGen(c) } yield adjust(c)(_.set(y, JANUARY, 1))
val lastDayOfYearGen: Gen[Calendar] =
for { c <- calendar; y <- yearGen(c) } yield adjust(c)(_.set(y, DECEMBER, 31))
val closestLeapDateGen: Gen[Calendar] =
for { c <- calendar; y <- yearGen(c) } yield moveToNearestLeapDate(c, y)
val lastDayOfMonthGen: Gen[Calendar] =
calendar.map(c => adjust(c)(_.set(DAY_OF_MONTH, c.getActualMaximum(DAY_OF_MONTH))))
val firstDayOfMonthGen: Gen[Calendar] =
calendar.map(c => adjust(c)(_.set(DAY_OF_MONTH, 1)))
Gen.frequency(
(1, firstDayOfYearGen),
(1, lastDayOfYearGen),
(1, closestLeapDateGen),
(1, beginningOfDayGen),
(1, endOfDayGen),
(1, firstDayOfMonthGen),
(1, lastDayOfMonthGen),
(7, calendar))
}
val finiteDuration: Gen[FiniteDuration] =
// Duration.fromNanos doesn't allow Long.MinValue since it would create a
// duration that cannot be negated.
chooseNum(Long.MinValue + 1, Long.MaxValue).map(Duration.fromNanos)
/**
* Generates instance of Duration.
*
* In addition to `FiniteDuration` values, this can generate `Duration.Inf`,
* `Duration.MinusInf`, and `Duration.Undefined`.
*/
val duration: Gen[Duration] = frequency(
1 -> const(Duration.Inf),
1 -> const(Duration.MinusInf),
1 -> const(Duration.Undefined),
1 -> const(Duration.Zero),
6 -> finiteDuration)
}
| martijnhoekstra/scala | src/scalacheck/org/scalacheck/Gen.scala | Scala | apache-2.0 | 33,784 |
package com.recursivity.jpa
import util.DynamicVariable
import collection.mutable.HashMap
import javax.persistence.{EntityTransaction, EntityManager}
/**
* Created by IntelliJ IDEA.
* User: wfaler
* Date: 09/02/2011
* Time: 23:35
* To change this template use File | Settings | File Templates.
*/
object Jpa{
private val _em = new DynamicVariable[HashMap[String, EntityManager]](null)
private val _tx = new DynamicVariable[HashMap[EntityManager, EntityTransaction]](null)
def transaction(function: => Any): Any = {
if(_em.value != null)
return function
else{
_em.withValue(new HashMap[String, EntityManager]){
_tx.withValue(new HashMap[EntityManager, EntityTransaction]){
try{
val any = function
commit
return any
}catch{
case e: Exception => {
rollback
throw e
}
}
}
}
}
}
def entityManager: EntityManager = {
entityManager(PersistenceUnit.unitName)
}
def entityManager(unit: String): EntityManager = {
if(_em.value == null)
throw new IllegalArgumentException("You can only get an EntityManager if you are within a transaction-closure context: transaction{..yourcode..}")
if(_em.value.contains(unit)){
return _em.value(unit)
}else{
val em = PersistenceManagerFactory(unit)
_em.value.put(unit, em)
val tx = em.getTransaction
tx.begin
_tx.value.put(em, tx)
return em
}
}
private def commit = {
_em.value.keysIterator.foreach(k => {
val em = _em.value(k)
val tx = _tx.value(em)
if(tx.isActive)
tx.commit
if(em.isOpen)
em.close
})
}
private def rollback = {
_em.value.keysIterator.foreach(k => {
val em = _em.value(k)
val tx = _tx.value(em)
if(tx.isActive)
tx.commit
if(em.isOpen)
em.close
})
}
}
| bowler-framework/recursivity-jpa | src/main/scala/com/recursivity/jpa/Jpa.scala | Scala | bsd-3-clause | 1,978 |
/*
* Copyright (C) 2011 Mathias Doenitz
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// package sjsonnew
// package support.spray
// import spray.json._
// object AdditionalFormatsSpec extends verify.BasicTestSuite {
// case class Container[A](inner: Option[A])
// object ReaderProtocol extends BasicJsonProtocol {
// implicit def containerReader[T :JsonFormat]: JsonFormat[Container[T]] = liftFormat {
// new JsonReader[Container[T]] {
// def read(value: JsValue) = value match {
// case JsObject(fields) if fields.contains("content") => Container(Some(jsonReader[T].read(fields("content"))))
// case _ => deserializationError("Unexpected format: " + value.toString)
// }
// }
// }
// }
// object WriterProtocol extends BasicJsonProtocol {
// implicit def containerWriter[T :JsonFormat]: JsonFormat[Container[T]] = liftFormat {
// new JsonWriter[Container[T]] {
// def write(obj: Container[T]) = JsObject("content" -> obj.inner.toJson)
// }
// }
// }
// val obj = Container(Some(Container(Some(List(1, 2, 3)))))
// test("The liftJsonWriter should properly write a Container[Container[List[Int]]] to JSON") {
// import WriterProtocol._
// Predef.assert(obj.toJson.toString == """{"content":{"content":[1,2,3]}}""")
// }
// test("The liftJsonWriter should properly read a Container[Container[List[Int]]] from JSON") {
// import ReaderProtocol._
// Predef.assert("""{"content":{"content":[1,2,3]}}""".parseJson.convertTo[Container[Container[List[Int]]]] == obj)
// }
// // case class Foo(id: Long, name: String, foos: Option[List[Foo]] = None)
// // object FooProtocol extends DefaultJsonProtocol {
// // implicit val fooProtocol: JsonFormat[Foo] = lazyFormat(jsonFormat(Foo, "id", "name", "foos"))
// // }
// // "The lazyFormat wrapper" should {
// // "enable recursive format definitions" in {
// // import FooProtocol._
// // Foo(1, "a", Some(Foo(2, "b", Some(Foo(3, "c") :: Nil)) :: Foo(4, "d") :: Nil)).toJson.toString mustEqual
// // """{"id":1,"name":"a","foos":[{"id":2,"name":"b","foos":[{"id":3,"name":"c"}]},{"id":4,"name":"d"}]}"""
// // }
// // }
// }
| eed3si9n/sjson-new | support/spray/src/test/scala/sjsonnew/support/spray/AdditionalFormatsSpec.scala | Scala | apache-2.0 | 2,749 |
/*
* This file is part of Kiama.
*
* Copyright (C) 2011-2015 Anthony M Sloane, Macquarie University.
*
* Kiama is free software: you can redistribute it and/or modify it under
* the terms of the GNU Lesser General Public License as published by the
* Free Software Foundation, either version 3 of the License, or (at your
* option) any later version.
*
* Kiama is distributed in the hope that it will be useful, but WITHOUT ANY
* WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
* FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for
* more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with Kiama. (See files COPYING and COPYING.LESSER.) If not, see
* <http://www.gnu.org/licenses/>.
*/
package org.kiama
package example.prolog
import org.kiama.util.PositionedParserUtilities
/**
* Module containing parsers for Prolog.
*/
class SyntaxAnalyser extends PositionedParserUtilities {
import PrologTree._
import scala.language.postfixOps
lazy val program =
phrase ((clause+) ^^ Program)
lazy val query =
phrase (literal <~ ".")
lazy val clause =
literal ~ (":-" ~> literals) <~ "." ^^ Rule |
literal <~ "." ^^ Fact
lazy val literal : PackratParser[Literal] =
atom ~ ("(" ~> terms <~ ")") ^^ Pred |
atom ^^ Atom
lazy val literals =
rep1sep (literal | cut, ",")
lazy val cut =
"!" ^^ { case _ => Cut () }
lazy val terms =
rep1sep (term, ",")
lazy val term =
literal |
varr ^^ Var |
integer |
list
lazy val list =
"[" ~> "]" ^^ { case _ => Pred ("nil", Nil) } |
"[" ~> listterms <~ "]"
lazy val listterms : PackratParser[Literal] =
term ~ ("," ~> listterms) ^^ {
case h ~ t => Pred ("cons", List (h, t))
} |
term ^^ {
case h => Pred ("cons", List (h, Pred ("nil", Nil)))
}
lazy val atom =
regex ("[a-z][a-zA-Z]*".r)
lazy val varr =
regex ("[A-Z][a-zA-Z]*".r)
lazy val integer =
regex ("[0-9]+".r) ^^ { case s => Integer (s.toInt) }
}
| adeze/kiama | library/src/org/kiama/example/prolog/SyntaxAnalyser.scala | Scala | gpl-3.0 | 2,220 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution.streaming
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.encoders.ExpressionEncoder
import org.apache.spark.sql.catalyst.expressions.{Ascending, Attribute, AttributeReference, Expression, Literal, SortOrder, SpecificInternalRow, UnsafeProjection, UnsafeRow}
import org.apache.spark.sql.catalyst.plans.logical.{LogicalKeyedState, ProcessingTimeTimeout}
import org.apache.spark.sql.catalyst.plans.physical.{ClusteredDistribution, Distribution, Partitioning}
import org.apache.spark.sql.execution._
import org.apache.spark.sql.execution.streaming.state._
import org.apache.spark.sql.streaming.{KeyedStateTimeout, OutputMode}
import org.apache.spark.sql.types.{BooleanType, IntegerType}
import org.apache.spark.util.CompletionIterator
/**
* Physical operator for executing `FlatMapGroupsWithState.`
*
* @param func function called on each group
* @param keyDeserializer used to extract the key object for each group.
* @param valueDeserializer used to extract the items in the iterator from an input row.
* @param groupingAttributes used to group the data
* @param dataAttributes used to read the data
* @param outputObjAttr used to define the output object
* @param stateEncoder used to serialize/deserialize state before calling `func`
* @param outputMode the output mode of `func`
* @param timeout used to timeout groups that have not received data in a while
* @param batchTimestampMs processing timestamp of the current batch.
*/
case class FlatMapGroupsWithStateExec(
func: (Any, Iterator[Any], LogicalKeyedState[Any]) => Iterator[Any],
keyDeserializer: Expression,
valueDeserializer: Expression,
groupingAttributes: Seq[Attribute],
dataAttributes: Seq[Attribute],
outputObjAttr: Attribute,
stateId: Option[OperatorStateId],
stateEncoder: ExpressionEncoder[Any],
outputMode: OutputMode,
timeout: KeyedStateTimeout,
batchTimestampMs: Long,
child: SparkPlan) extends UnaryExecNode with ObjectProducerExec with StateStoreWriter {
private val isTimeoutEnabled = timeout == ProcessingTimeTimeout
private val timestampTimeoutAttribute =
AttributeReference("timeoutTimestamp", dataType = IntegerType, nullable = false)()
private val stateAttributes: Seq[Attribute] = {
val encSchemaAttribs = stateEncoder.schema.toAttributes
if (isTimeoutEnabled) encSchemaAttribs :+ timestampTimeoutAttribute else encSchemaAttribs
}
import KeyedStateImpl._
/** Distribute by grouping attributes */
override def requiredChildDistribution: Seq[Distribution] =
ClusteredDistribution(groupingAttributes) :: Nil
/** Ordering needed for using GroupingIterator */
override def requiredChildOrdering: Seq[Seq[SortOrder]] =
Seq(groupingAttributes.map(SortOrder(_, Ascending)))
override protected def doExecute(): RDD[InternalRow] = {
metrics // force lazy init at driver
child.execute().mapPartitionsWithStateStore[InternalRow](
getStateId.checkpointLocation,
getStateId.operatorId,
getStateId.batchId,
groupingAttributes.toStructType,
stateAttributes.toStructType,
sqlContext.sessionState,
Some(sqlContext.streams.stateStoreCoordinator)) { case (store, iterator) =>
val updater = new StateStoreUpdater(store)
// Generate a iterator that returns the rows grouped by the grouping function
// Note that this code ensures that the filtering for timeout occurs only after
// all the data has been processed. This is to ensure that the timeout information of all
// the keys with data is updated before they are processed for timeouts.
val outputIterator =
updater.updateStateForKeysWithData(iterator) ++ updater.updateStateForTimedOutKeys()
// Return an iterator of all the rows generated by all the keys, such that when fully
// consumed, all the state updates will be committed by the state store
CompletionIterator[InternalRow, Iterator[InternalRow]](
outputIterator,
{
store.commit()
longMetric("numTotalStateRows") += store.numKeys()
}
)
}
}
/** Helper class to update the state store */
class StateStoreUpdater(store: StateStore) {
// Converters for translating input keys, values, output data between rows and Java objects
private val getKeyObj =
ObjectOperator.deserializeRowToObject(keyDeserializer, groupingAttributes)
private val getValueObj =
ObjectOperator.deserializeRowToObject(valueDeserializer, dataAttributes)
private val getOutputRow = ObjectOperator.wrapObjectToRow(outputObjAttr.dataType)
// Converter for translating state rows to Java objects
private val getStateObjFromRow = ObjectOperator.deserializeRowToObject(
stateEncoder.resolveAndBind().deserializer, stateAttributes)
// Converter for translating state Java objects to rows
private val stateSerializer = {
val encoderSerializer = stateEncoder.namedExpressions
if (isTimeoutEnabled) {
encoderSerializer :+ Literal(KeyedStateImpl.TIMEOUT_TIMESTAMP_NOT_SET)
} else {
encoderSerializer
}
}
private val getStateRowFromObj = ObjectOperator.serializeObjectToRow(stateSerializer)
// Index of the additional metadata fields in the state row
private val timeoutTimestampIndex = stateAttributes.indexOf(timestampTimeoutAttribute)
// Metrics
private val numUpdatedStateRows = longMetric("numUpdatedStateRows")
private val numOutputRows = longMetric("numOutputRows")
/**
* For every group, get the key, values and corresponding state and call the function,
* and return an iterator of rows
*/
def updateStateForKeysWithData(dataIter: Iterator[InternalRow]): Iterator[InternalRow] = {
val groupedIter = GroupedIterator(dataIter, groupingAttributes, child.output)
groupedIter.flatMap { case (keyRow, valueRowIter) =>
val keyUnsafeRow = keyRow.asInstanceOf[UnsafeRow]
callFunctionAndUpdateState(
keyUnsafeRow,
valueRowIter,
store.get(keyUnsafeRow),
hasTimedOut = false)
}
}
/** Find the groups that have timeout set and are timing out right now, and call the function */
def updateStateForTimedOutKeys(): Iterator[InternalRow] = {
if (isTimeoutEnabled) {
val timingOutKeys = store.filter { case (_, stateRow) =>
val timeoutTimestamp = getTimeoutTimestamp(stateRow)
timeoutTimestamp != TIMEOUT_TIMESTAMP_NOT_SET && timeoutTimestamp < batchTimestampMs
}
timingOutKeys.flatMap { case (keyRow, stateRow) =>
callFunctionAndUpdateState(
keyRow,
Iterator.empty,
Some(stateRow),
hasTimedOut = true)
}
} else Iterator.empty
}
/**
* Call the user function on a key's data, update the state store, and return the return data
* iterator. Note that the store updating is lazy, that is, the store will be updated only
* after the returned iterator is fully consumed.
*/
private def callFunctionAndUpdateState(
keyRow: UnsafeRow,
valueRowIter: Iterator[InternalRow],
prevStateRowOption: Option[UnsafeRow],
hasTimedOut: Boolean): Iterator[InternalRow] = {
val keyObj = getKeyObj(keyRow) // convert key to objects
val valueObjIter = valueRowIter.map(getValueObj.apply) // convert value rows to objects
val stateObjOption = getStateObj(prevStateRowOption)
val keyedState = new KeyedStateImpl(
stateObjOption, batchTimestampMs, isTimeoutEnabled, hasTimedOut)
// Call function, get the returned objects and convert them to rows
val mappedIterator = func(keyObj, valueObjIter, keyedState).map { obj =>
numOutputRows += 1
getOutputRow(obj)
}
// When the iterator is consumed, then write changes to state
def onIteratorCompletion: Unit = {
// Has the timeout information changed
if (keyedState.hasRemoved) {
store.remove(keyRow)
numUpdatedStateRows += 1
} else {
val previousTimeoutTimestamp = prevStateRowOption match {
case Some(row) => getTimeoutTimestamp(row)
case None => TIMEOUT_TIMESTAMP_NOT_SET
}
val stateRowToWrite = if (keyedState.hasUpdated) {
getStateRow(keyedState.get)
} else {
prevStateRowOption.orNull
}
val hasTimeoutChanged = keyedState.getTimeoutTimestamp != previousTimeoutTimestamp
val shouldWriteState = keyedState.hasUpdated || hasTimeoutChanged
if (shouldWriteState) {
if (stateRowToWrite == null) {
// This should never happen because checks in KeyedStateImpl should avoid cases
// where empty state would need to be written
throw new IllegalStateException(
"Attempting to write empty state")
}
setTimeoutTimestamp(stateRowToWrite, keyedState.getTimeoutTimestamp)
store.put(keyRow.copy(), stateRowToWrite.copy())
numUpdatedStateRows += 1
}
}
}
// Return an iterator of rows such that fully consumed, the updated state value will be saved
CompletionIterator[InternalRow, Iterator[InternalRow]](mappedIterator, onIteratorCompletion)
}
/** Returns the state as Java object if defined */
def getStateObj(stateRowOption: Option[UnsafeRow]): Option[Any] = {
stateRowOption.map(getStateObjFromRow)
}
/** Returns the row for an updated state */
def getStateRow(obj: Any): UnsafeRow = {
getStateRowFromObj(obj)
}
/** Returns the timeout timestamp of a state row is set */
def getTimeoutTimestamp(stateRow: UnsafeRow): Long = {
if (isTimeoutEnabled) stateRow.getLong(timeoutTimestampIndex) else TIMEOUT_TIMESTAMP_NOT_SET
}
/** Set the timestamp in a state row */
def setTimeoutTimestamp(stateRow: UnsafeRow, timeoutTimestamps: Long): Unit = {
if (isTimeoutEnabled) stateRow.setLong(timeoutTimestampIndex, timeoutTimestamps)
}
}
}
| jianran/spark | sql/core/src/main/scala/org/apache/spark/sql/execution/streaming/FlatMapGroupsWithStateExec.scala | Scala | apache-2.0 | 11,098 |
/*
*
* Copyright 2015 Foundational Development
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package pro.foundev.benchmarks.ft_spark_streaming
import pro.foundev.commons.messaging.ZeroMQCapable
class StreamingWithWALEnabled extends ZeroMQCapable{
}
| rssvihla/datastax_work | spark_commons/benchmarks/ft_spark_streaming/src/main/scala/pro/foundev/benchmarks/ft_spark_streaming/StreamingWithWALEnabled.scala | Scala | apache-2.0 | 795 |
package org.functionalkoans.forscala
import org.functionalkoans.forscala.support.KoanFunSuite
import org.scalatest.Matchers
case class Board(length: Int, height: Int) {
case class Coordinate(x: Int, y: Int) {
require(0 <= x && x < length && 0 <= y && y < height)
}
val occupied = scala.collection.mutable.Set[Coordinate]()
}
class AboutPathDependentTypes extends KoanFunSuite with Matchers {
koan(
"""When a class is instantiated inside of another object, it belongs to the object. This is a path
| dependent type. Once established, it cannot be placed inside of another object""") {
val b1 = Board(20, 20)
val b2 = Board(30, 30)
val c1 = b1.Coordinate(15, 15)
val c2 = b2.Coordinate(25, 25)
b1.occupied += c1
b2.occupied += c2
// Next line doesn't compile, uncomment to try, then comment and answer the koan next
// b1.occupied += c2
c1.x should be (__)
}
}
| pharmpress/codingdojo | scala-koans/src/test/scala/org/functionalkoans/forscala/AboutPathDependentTypes.scala | Scala | apache-2.0 | 929 |
package xyz.hyperreal.typesetter
import java.awt.GraphicsEnvironment
object FontsMain extends App
{
println( GraphicsEnvironment.getLocalGraphicsEnvironment.getAllFonts filter (_.getFamily matches "Free.*") mkString (", ") )
} | edadma/typesetter | src/main/scala/FontsMain.scala | Scala | mit | 230 |
import akka.actor._
import org.json4s._
import org.json4s.native.JsonMethods._
object FollowerExtractor {
// messages
case class Extract(val login:String, val jsonResponse:JArray)
// props factory method
def props(manager:ActorRef) =
Props(classOf[FollowerExtractor], manager)
}
class FollowerExtractor(manager:ActorRef)
extends Actor with ActorLogging {
import FollowerExtractor._
def receive = {
case Extract(login, followerArray) =>
val followers = extractFollowers(followerArray)
followers foreach { f =>
manager ! FetcherManager.AddToQueue(f)
}
}
def extractFollowers(followerArray:JArray) = for {
JObject(follower) <- followerArray
JField("login", JString(login)) <- follower
} yield login
}
| pbugnion/s4ds | chap09/ghub_crawler_fault_tolerant/FollowerExtractor.scala | Scala | apache-2.0 | 771 |
package picasso.frontend.compilerPlugin
import picasso.math.Boolean3Valued._
import picasso.math.hol._
import Annotations._
import scala.tools.nsc.Global
import picasso.utils.{LogCritical, LogError, LogWarning, LogNotice, LogInfo, LogDebug, Logger}
import utils.TreeUtils
//TODO this is a wrapper around the HOL formulas and the Trees and FunCheck ?
trait Predicates {
self: TreeUtils =>
val global: Global
import global._
import global.definitions._
lazy val annotations = getModule("picasso.frontend.compilerPlugin.Annotations")
lazy val predAnnotation = annotations.info.decls.find(s => s.name.toString == "Predicate").get
//TODO is there a nicer way of getting nested classes ?
def isPredicate(vd: ValDef) = {
vd.symbol.annotations.exists( annot => {
annot.atp.typeSymbol == predAnnotation
})
}
abstract class Predicate {
/** Does given symbol apprears in the predicate ? */
def contains(s :Symbol): Boolean = containing(s)
/** The set of symbols that are apprears in this predicate. */
def containing: Set[Symbol]
/** The tree (or the closest thing) that corresponds to this predicate. */
def tree: Tree
/** The formula corresponding to this predicate. */
def formula: Formula
}
class GivenByUserPredicate(vd: ValDef) extends Predicate {
assert(isPredicate(vd))
private val contained = {
val all = symbolsInvloved(vd)
assert(all.size == 1)
all.head._2.toSet
}
def containing = contained
def tree = vd.rhs
def formula = sys.error("TODO")
}
/**
class BooleanValDefPredicate(vd: ValDef) extends Predicate {
assert(BooleanValDefPredicate.is(vd))
def containing = Set[Symbol](vd.symbol)
val select = {
Select(This(vd.symbol.owner) setType vd.symbol.owner.info, vd.symbol) setType vd.symbol.info
}
def tree = select
val variable = {
val v = Variable(vd.symbol.name + "#" + vd.symbol.id)
v.tpe = Bool
v
}
def formula = variable
}
object BooleanValDefPredicate {
def is(t: Tree) = t match {
case vd@ValDef(_,_,_,_) => vd.symbol.info == BooleanClass.tpe
case _ => false
}
def apply(t: Tree) = t match {
case vd@ValDef(_,_,_,_) if is(vd) => new BooleanValDefPredicate(vd)
case _ => Logger.logAndThrow("Plugin", LogError, "Cannot create a BooleanValDefPredicate out of " + t)
}
}
*/
}
| dzufferey/picasso | frontend/compilerPlugin/src/main/scala/picasso/frontend/compilerPlugin/Predicates.scala | Scala | bsd-2-clause | 2,434 |
package first_example.after
import first_example.after.movie.Movie
/**
* 租赁
*/
class Rental(var movie: Movie, var daysRented: Int)
| zj-lingxin/refactoring | src/main/scala/first_example/after/Rental.scala | Scala | mit | 141 |
/*
* Copyright 2017 FOLIO Co., Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.folio_sec.reladomo.scala_api.service.scala_lang
import com.folio_sec.reladomo.scala_api._
import com.gs.fw.common.mithra.MithraDatedTransactionalObject
import com.gs.fw.finder.OrderBy
import scala.concurrent.{ ExecutionContext, Future }
/**
* A TransactionalObjectService which supports scala-lang Future interface.
*
* @tparam TxObject the actual transactional object type
*/
trait TemporalTransactionalObjectService[TxObject <: TemporalTransactionalObject,
TxObjectList <: TemporalTransactionalList[TxObject, MithraTxObject],
MithraTxObject <: MithraDatedTransactionalObject] {
type TxObjectListFinder =
TemporalTransactionalObjectFinder[TxObject, TxObjectList, MithraTxObject]#ListFinder[
TxObject,
TxObjectList,
MithraTxObject
]
val finder: TemporalTransactionalObjectFinder[TxObject, TxObjectList, MithraTxObject]
def findOne(operation: FinderOperation)(implicit ctx: ExecutionContext): Future[Option[TxObject]] = {
Future(finder.findOne(operation))
}
def findOneWith(
operation: (finder.type) => FinderOperation
)(implicit ctx: ExecutionContext): Future[Option[TxObject]] = {
Future(finder.findOne(operation.apply(finder)))
}
def findOneBypassCache(operation: FinderOperation)(implicit ctx: ExecutionContext): Future[Option[TxObject]] = {
Future(finder.findOneBypassCache(operation))
}
def findOneBypassCacheWith(
operation: (finder.type) => FinderOperation
)(implicit ctx: ExecutionContext): Future[Option[TxObject]] = {
Future(finder.findOneBypassCache(operation.apply(finder)))
}
def findMany(operation: FinderOperation)(implicit ctx: ExecutionContext): Future[TxObjectListFinder] = {
Future(finder.findMany(operation))
}
def findManyWith(
operation: (finder.type) => FinderOperation
)(implicit ctx: ExecutionContext): Future[TxObjectListFinder] = {
Future(finder.findMany(operation.apply(finder)))
}
def findSortedManyWith(
operation: (finder.type) => FinderOperation,
limit: Int = 0,
orderBy: (finder.type) => OrderBy[_] = (_) => null
)(implicit ctx: ExecutionContext): Future[TxObjectListFinder] = {
Future {
(limit, orderBy.apply(finder)) match {
case (size, null) if size <= 0 => finder.findManyWith(operation)
case (size, null) => finder.findManyWith(operation).limit(size)
case (size, _) if size <= 0 => finder.findManyWith(operation).orderByWith(orderBy)
case (size, _) => finder.findManyWith(operation).limit(size).orderByWith(orderBy)
}
}
}
def findManyBypassCache(
operation: FinderOperation
)(implicit ctx: ExecutionContext): Future[TxObjectListFinder] = {
Future(finder.findManyBypassCache(operation))
}
def findManyBypassCacheWith(
operation: (finder.type) => FinderOperation
)(implicit ctx: ExecutionContext): Future[TxObjectListFinder] = {
Future(finder.findManyBypassCache(operation.apply(finder)))
}
def findSortedManyBypassCacheWith(
operation: (finder.type) => FinderOperation,
limit: Int = 0,
orderBy: (finder.type) => OrderBy[_] = (_) => null
)(implicit ctx: ExecutionContext): Future[TxObjectListFinder] = {
Future {
(limit, orderBy.apply(finder)) match {
case (size, null) if size <= 0 => finder.findManyBypassCacheWith(operation)
case (size, null) => finder.findManyBypassCacheWith(operation).limit(size)
case (size, _) if size <= 0 => finder.findManyBypassCacheWith(operation).orderByWith(orderBy)
case (size, _) => finder.findManyBypassCacheWith(operation).limit(size).orderByWith(orderBy)
}
}
}
}
| folio-sec/reladomo-scala | reladomo-scala-common/src/main/scala/com/folio_sec/reladomo/scala_api/service/scala_lang/TemporalTransactionalObjectService.scala | Scala | apache-2.0 | 4,397 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.deploy
import org.json4s.JsonAST.JObject
import org.json4s.JsonDSL._
import org.apache.spark.deploy.DeployMessages.{MasterStateResponse, WorkerStateResponse}
import org.apache.spark.deploy.master._
import org.apache.spark.deploy.master.RecoveryState.MasterState
import org.apache.spark.deploy.worker.ExecutorRunner
private[deploy] object JsonProtocol {
/**
* Export the [[WorkerInfo]] to a Json object. A [[WorkerInfo]] consists of the information of a
* worker.
*
* @return a Json object containing the following fields:
* `id` a string identifier of the worker
* `host` the host that the worker is running on
* `port` the port that the worker is bound to
* `webuiaddress` the address used in web UI
* `cores` total cores of the worker
* `coresused` allocated cores of the worker
* `coresfree` free cores of the worker
* `memory` total memory of the worker
* `memoryused` allocated memory of the worker
* `memoryfree` free memory of the worker
* `state` state of the worker, see [[WorkerState]]
* `lastheartbeat` time in milliseconds that the latest heart beat message from the
* worker is received
*/
def writeWorkerInfo(obj: WorkerInfo): JObject = {
("id" -> obj.id) ~
("host" -> obj.host) ~
("port" -> obj.port) ~
("webuiaddress" -> obj.webUiAddress) ~
("cores" -> obj.cores) ~
("coresused" -> obj.coresUsed) ~
("coresfree" -> obj.coresFree) ~
("memory" -> obj.memory) ~
("memoryused" -> obj.memoryUsed) ~
("memoryfree" -> obj.memoryFree) ~
("state" -> obj.state.toString) ~
("lastheartbeat" -> obj.lastHeartbeat)
}
/**
* Export the [[ApplicationInfo]] to a Json objec. An [[ApplicationInfo]] consists of the
* information of an application.
*
* @return a Json object containing the following fields:
* `id` a string identifier of the application
* `starttime` time in milliseconds that the application starts
* `name` the description of the application
* `cores` total cores granted to the application
* `user` name of the user who submitted the application
* `memoryperslave` minimal memory in MB required to each executor
* `submitdate` time in Date that the application is submitted
* `state` state of the application, see [[ApplicationState]]
* `duration` time in milliseconds that the application has been running
*/
def writeApplicationInfo(obj: ApplicationInfo): JObject = {
("id" -> obj.id) ~
("starttime" -> obj.startTime) ~
("name" -> obj.desc.name) ~
("cores" -> obj.coresGranted) ~
("user" -> obj.desc.user) ~
("memoryperslave" -> obj.desc.memoryPerExecutorMB) ~
("submitdate" -> obj.submitDate.toString) ~
("state" -> obj.state.toString) ~
("duration" -> obj.duration)
}
/**
* Export the [[ApplicationDescription]] to a Json object. An [[ApplicationDescription]] consists
* of the description of an application.
*
* @return a Json object containing the following fields:
* `name` the description of the application
* `cores` max cores that can be allocated to the application, 0 means unlimited
* `memoryperslave` minimal memory in MB required to each executor
* `user` name of the user who submitted the application
* `command` the command string used to submit the application
*/
def writeApplicationDescription(obj: ApplicationDescription): JObject = {
("name" -> obj.name) ~
("cores" -> obj.maxCores.getOrElse(0)) ~
("memoryperslave" -> obj.memoryPerExecutorMB) ~
("user" -> obj.user) ~
("command" -> obj.command.toString)
}
/**
* Export the [[ExecutorRunner]] to a Json object. An [[ExecutorRunner]] consists of the
* information of an executor.
*
* @return a Json object containing the following fields:
* `id` an integer identifier of the executor
* `memory` memory in MB allocated to the executor
* `appid` a string identifier of the application that the executor is working on
* `appdesc` a Json object of the [[ApplicationDescription]] of the application that the
* executor is working on
*/
def writeExecutorRunner(obj: ExecutorRunner): JObject = {
("id" -> obj.execId) ~
("memory" -> obj.memory) ~
("appid" -> obj.appId) ~
("appdesc" -> writeApplicationDescription(obj.appDesc))
}
/**
* Export the [[DriverInfo]] to a Json object. A [[DriverInfo]] consists of the information of a
* driver.
*
* @return a Json object containing the following fields:
* `id` a string identifier of the driver
* `starttime` time in milliseconds that the driver starts
* `state` state of the driver, see [[DriverState]]
* `cores` cores allocated to the driver
* `memory` memory in MB allocated to the driver
* `submitdate` time in Date that the driver is created
* `worker` identifier of the worker that the driver is running on
* `mainclass` main class of the command string that started the driver
*/
def writeDriverInfo(obj: DriverInfo): JObject = {
("id" -> obj.id) ~
("starttime" -> obj.startTime.toString) ~
("state" -> obj.state.toString) ~
("cores" -> obj.desc.cores) ~
("memory" -> obj.desc.mem) ~
("submitdate" -> obj.submitDate.toString) ~
("worker" -> obj.worker.map(_.id).getOrElse("None")) ~
("mainclass" -> obj.desc.command.arguments(2))
}
/**
* Export the [[MasterStateResponse]] to a Json object. A [[MasterStateResponse]] consists the
* information of a master node.
*
* @return a Json object containing the following fields:
* `url` the url of the master node
* `workers` a list of Json objects of [[WorkerInfo]] of the workers allocated to the
* master
* `aliveworkers` size of alive workers allocated to the master
* `cores` total cores available of the master
* `coresused` cores used by the master
* `memory` total memory available of the master
* `memoryused` memory used by the master
* `activeapps` a list of Json objects of [[ApplicationInfo]] of the active applications
* running on the master
* `completedapps` a list of Json objects of [[ApplicationInfo]] of the applications
* completed in the master
* `activedrivers` a list of Json objects of [[DriverInfo]] of the active drivers of the
* master
* `completeddrivers` a list of Json objects of [[DriverInfo]] of the completed drivers
* of the master
* `status` status of the master, see [[MasterState]]
*/
def writeMasterState(obj: MasterStateResponse): JObject = {
val aliveWorkers = obj.workers.filter(_.isAlive())
("url" -> obj.uri) ~
("workers" -> obj.workers.toList.map(writeWorkerInfo)) ~
("aliveworkers" -> aliveWorkers.length) ~
("cores" -> aliveWorkers.map(_.cores).sum) ~
("coresused" -> aliveWorkers.map(_.coresUsed).sum) ~
("memory" -> aliveWorkers.map(_.memory).sum) ~
("memoryused" -> aliveWorkers.map(_.memoryUsed).sum) ~
("activeapps" -> obj.activeApps.toList.map(writeApplicationInfo)) ~
("completedapps" -> obj.completedApps.toList.map(writeApplicationInfo)) ~
("activedrivers" -> obj.activeDrivers.toList.map(writeDriverInfo)) ~
("completeddrivers" -> obj.completedDrivers.toList.map(writeDriverInfo)) ~
("status" -> obj.status.toString)
}
/**
* Export the [[WorkerStateResponse]] to a Json object. A [[WorkerStateResponse]] consists the
* information of a worker node.
*
* @return a Json object containing the following fields:
* `id` a string identifier of the worker node
* `masterurl` url of the master node of the worker
* `masterwebuiurl` the address used in web UI of the master node of the worker
* `cores` total cores of the worker
* `coreused` used cores of the worker
* `memory` total memory of the worker
* `memoryused` used memory of the worker
* `executors` a list of Json objects of [[ExecutorRunner]] of the executors running on
* the worker
* `finishedexecutors` a list of Json objects of [[ExecutorRunner]] of the finished
* executors of the worker
*/
def writeWorkerState(obj: WorkerStateResponse): JObject = {
("id" -> obj.workerId) ~
("masterurl" -> obj.masterUrl) ~
("masterwebuiurl" -> obj.masterWebUiUrl) ~
("cores" -> obj.cores) ~
("coresused" -> obj.coresUsed) ~
("memory" -> obj.memory) ~
("memoryused" -> obj.memoryUsed) ~
("executors" -> obj.executors.map(writeExecutorRunner)) ~
("finishedexecutors" -> obj.finishedExecutors.map(writeExecutorRunner))
}
}
| bravo-zhang/spark | core/src/main/scala/org/apache/spark/deploy/JsonProtocol.scala | Scala | apache-2.0 | 9,867 |
package com.avsystem.commons
package misc
import org.scalatest.funsuite.AnyFunSuite
class ValueEnumTest extends AnyFunSuite {
class SomeValueEnum(implicit enumCtx: EnumCtx) extends AbstractValueEnum
object SomeValueEnum extends AbstractValueEnumCompanion[SomeValueEnum] {
final val One, Two, Three: Value = new SomeValueEnum
final val Four: Value = new SomeValueEnum {}
final val Five_? : Value = new SomeValueEnum
}
test("value enum test") {
import SomeValueEnum._
assert(values == List(One, Two, Three, Four, Five_?))
assert(values.map(_.ordinal) == List.range(0, 5))
assert(values.map(_.name) == List("One", "Two", "Three", "Four", "Five_?"))
}
test("enum constant member validation") {
assertCompiles(
"""
|final class Enumz(implicit enumCtx: EnumCtx) extends AbstractValueEnum
|object Enumz extends AbstractValueEnumCompanion[Enumz] {
| final val Constant: Value = new Enumz
|}
""".stripMargin
)
assertDoesNotCompile(
"""
|final class Enumz(implicit enumCtx: EnumCtx) extends AbstractValueEnum
|object Enumz extends AbstractValueEnumCompanion[Enumz] {
| private final val Constant: Value = new Enumz
|}
""".stripMargin
)
assertDoesNotCompile(
"""
|final class Enumz(implicit enumCtx: EnumCtx) extends AbstractValueEnum
|object Enumz extends AbstractValueEnumCompanion[Enumz] {
| final def Constant: Value = new Enumz
|}
""".stripMargin
)
assertDoesNotCompile(
"""
|final class Enumz(implicit enumCtx: EnumCtx) extends AbstractValueEnum
|object Enumz extends AbstractValueEnumCompanion[Enumz] {
| val Constant: Value = new Enumz
|}
""".stripMargin
)
assertDoesNotCompile(
"""
|final class Enumz(implicit enumCtx: EnumCtx) extends AbstractValueEnum
|object Enumz extends AbstractValueEnumCompanion[Enumz] {
| final lazy val Constant: Value = new Enumz
|}
""".stripMargin
)
assertDoesNotCompile(
"""
|final class Enumz(implicit enumCtx: EnumCtx) extends AbstractValueEnum
|object Enumz extends AbstractValueEnumCompanion[Enumz] {
| final val Constant = new Enumz
|}
""".stripMargin
)
assertDoesNotCompile(
"""
|final class Enumz(implicit enumCtx: EnumCtx) extends AbstractValueEnum
|object Enumz extends AbstractValueEnumCompanion[Enumz] {
| object Inner {
| final val Constant: Value = new Enumz
| }
|}
""".stripMargin
)
}
}
| AVSystem/scala-commons | commons-core/src/test/scala/com/avsystem/commons/misc/ValueEnumTest.scala | Scala | mit | 2,665 |
/*
Copyright 2014 Commonwealth Bank of Australia
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package au.com.cba.omnia.six
import com.twitter.scalding._
import cascading.flow.FlowDef
import com.twitter.scalding.TDsl._
/**
* Common args, Input formats, etc.
*/
trait JobCommon {
type I = String; type F = String; type V = String; type L = String
def getMissingValues(args : Args) : Set[String] =
args.getOrElse("missing-values",null) match {
case null => Set[String]()
case str => str.split(",").toSet
}
/** iid\01fid\01value */
def fromOsv3[X : Manifest](src : String)(implicit flowDef : FlowDef, mode : Mode) = TypedOsv[(I,F,X)](src)
/** iid|fid|type|value */
def fromPsv4[X : Manifest](src : String)(implicit flowDef : FlowDef, mode : Mode) = TypedPsv[(I,F,String,X)](src).map(r => (r._1,r._2,r._4))
def getFrom[X : Manifest](args : Args, what : String)(implicit flowDef : FlowDef, mode : Mode) : TypedPipe[(I,F,X)] = {
val argVal = args(what)
args.getOrElse(what+"-encoding","osv3") match {
case "osv3" => fromOsv3[X](argVal)
case "psv4" => fromPsv4[X](argVal)
case x => sys.error("Unknown "+what+"-encoding: " + x)
}
}
def getLabelPipe(args : Args)(implicit flowDef : FlowDef, mode : Mode) = getFrom[L](args,"labels")
def getFeaturePipe(args : Args)(implicit flowDef : FlowDef, mode : Mode) = getFrom[F](args,"features")
}
| CommBank/six | src/main/scala/au/com/cba/omnia/six/JobCommon.scala | Scala | apache-2.0 | 1,924 |
package org.opencompare.api.scala.io
class ImportMatrix extends IOMatrix[ImportCell] {
def removeDuplicatedRows(): Unit = {
// Remove duplicated rows
val distinctRows = rows().distinct
// Update matrix
updateMatrix(distinctRows)
// Update maximum number of rows
maxRow = distinctRows.size - 1
}
def removeEmptyRows(): Unit = {
// Remove empty rows
val nonEmptyRows = rows().filter { cells =>
!cells.forall { cell =>
cell.isEmpty || cell.get.content.matches("\\\\s*")
}
}
// Update matrix
updateMatrix(nonEmptyRows)
maxRow = nonEmptyRows.size - 1
}
private def updateMatrix(rows : List[List[Option[ImportCell]]]): Unit = {
cells = rows.zipWithIndex.flatMap { case (row, rowIndex) =>
for ((cell, columnIndex) <- row.zipWithIndex if cell.isDefined) yield {
(rowIndex, columnIndex) -> cell.get
}
}.toMap
}
}
| OpenCompare/OpenCompare | org.opencompare/api-scala/src/main/scala/org/opencompare/api/scala/io/ImportMatrix.scala | Scala | apache-2.0 | 924 |
package autolift.cats
import cats.implicits._
import autolift.cats.fold._
class LiftMaximumByTest extends BaseSpec{
"liftMaximumBy on a List with identity" should "work" in{
val in = List(1, 2, 3)
val out = in.liftMaxBy(identity[Int])
same[Option[Int]](out, Option(3))
}
"liftMaximumBy on a Option[List]" should "work" in{
val in = Option(List("1", "2", "3"))
val out = in.liftMaxBy(s2i)
same[Option[Option[String]]](out, Option(Option("3")))
}
"liftMaximumBy on a List[List]" should "work with functions" in{
val in = List(List("1"), List("2"), List("3"))
val out = in.liftMaxBy(anyF)
same[Option[List[String]]](out, Option(List("3")))
}
"liftMaximumBy on a List[Option]" should "work" in{
val in = List(None, None, Some("1"))
val out = in.liftMaxBy(s2i)
same[List[Option[String]]](out, List(None, None, Some("1")))
}
"LiftedMaximumBy" should "work" in{
val fn = liftMaxBy(s2i)
val out = fn(List(List("1", "2")))
same[List[Option[String]]](out, List(Option("2")))
}
"LiftedMaximumBy" should "map" in{
val lf = liftMaxBy(s2i)
val fn = lf map {_ + 1}
val out = fn(List(List("1", "2")))
same[List[Option[String]]](out, List(Option("2")))
}
} | wheaties/AutoLifts | autolift-cats/src/test/scala/autolift/cats/LiftMaximumByTest.scala | Scala | apache-2.0 | 1,252 |
package vault
trait GeneratedFromDb { this: FromDb.type =>
implicit def FromDbTuple2[A: FromDb, B: FromDb]: FromDb[(A, B)] = for {
a <- of[A]
b <- of[B]
} yield (a, b)
implicit def FromDbTuple3[A: FromDb, B: FromDb, C: FromDb]: FromDb[(A, B, C)] = for {
a <- of[A]
b <- of[B]
c <- of[C]
} yield (a, b, c)
implicit def FromDbTuple4[A: FromDb, B: FromDb, C: FromDb, D: FromDb]: FromDb[(A, B, C, D)] = for {
a <- of[A]
b <- of[B]
c <- of[C]
d <- of[D]
} yield (a, b, c, d)
implicit def FromDbTuple5[A: FromDb, B: FromDb, C: FromDb, D: FromDb, E: FromDb]: FromDb[(A, B, C, D, E)] = for {
a <- of[A]
b <- of[B]
c <- of[C]
d <- of[D]
e <- of[E]
} yield (a, b, c, d, e)
implicit def FromDbTuple6[A: FromDb, B: FromDb, C: FromDb, D: FromDb, E: FromDb, F: FromDb]: FromDb[(A, B, C, D, E, F)] = for {
a <- of[A]
b <- of[B]
c <- of[C]
d <- of[D]
e <- of[E]
f <- of[F]
} yield (a, b, c, d, e, f)
implicit def FromDbTuple7[A: FromDb, B: FromDb, C: FromDb, D: FromDb, E: FromDb, F: FromDb, G: FromDb]: FromDb[(A, B, C, D, E, F, G)] = for {
a <- of[A]
b <- of[B]
c <- of[C]
d <- of[D]
e <- of[E]
f <- of[F]
g <- of[G]
} yield (a, b, c, d, e, f, g)
implicit def FromDbTuple8[A: FromDb, B: FromDb, C: FromDb, D: FromDb, E: FromDb, F: FromDb, G: FromDb, H: FromDb]: FromDb[(A, B, C, D, E, F, G, H)] = for {
a <- of[A]
b <- of[B]
c <- of[C]
d <- of[D]
e <- of[E]
f <- of[F]
g <- of[G]
h <- of[H]
} yield (a, b, c, d, e, f, g, h)
implicit def FromDbTuple9[A: FromDb, B: FromDb, C: FromDb, D: FromDb, E: FromDb, F: FromDb, G: FromDb, H: FromDb, I: FromDb]: FromDb[(A, B, C, D, E, F, G, H, I)] = for {
a <- of[A]
b <- of[B]
c <- of[C]
d <- of[D]
e <- of[E]
f <- of[F]
g <- of[G]
h <- of[H]
i <- of[I]
} yield (a, b, c, d, e, f, g, h, i)
implicit def FromDbTuple10[A: FromDb, B: FromDb, C: FromDb, D: FromDb, E: FromDb, F: FromDb, G: FromDb, H: FromDb, I: FromDb, J: FromDb]: FromDb[(A, B, C, D, E, F, G, H, I, J)] = for {
a <- of[A]
b <- of[B]
c <- of[C]
d <- of[D]
e <- of[E]
f <- of[F]
g <- of[G]
h <- of[H]
i <- of[I]
j <- of[J]
} yield (a, b, c, d, e, f, g, h, i, j)
implicit def FromDbTuple11[A: FromDb, B: FromDb, C: FromDb, D: FromDb, E: FromDb, F: FromDb, G: FromDb, H: FromDb, I: FromDb, J: FromDb, K: FromDb]: FromDb[(A, B, C, D, E, F, G, H, I, J, K)] = for {
a <- of[A]
b <- of[B]
c <- of[C]
d <- of[D]
e <- of[E]
f <- of[F]
g <- of[G]
h <- of[H]
i <- of[I]
j <- of[J]
k <- of[K]
} yield (a, b, c, d, e, f, g, h, i, j, k)
implicit def FromDbTuple12[A: FromDb, B: FromDb, C: FromDb, D: FromDb, E: FromDb, F: FromDb, G: FromDb, H: FromDb, I: FromDb, J: FromDb, K: FromDb, L: FromDb]: FromDb[(A, B, C, D, E, F, G, H, I, J, K, L)] = for {
a <- of[A]
b <- of[B]
c <- of[C]
d <- of[D]
e <- of[E]
f <- of[F]
g <- of[G]
h <- of[H]
i <- of[I]
j <- of[J]
k <- of[K]
l <- of[L]
} yield (a, b, c, d, e, f, g, h, i, j, k, l)
implicit def FromDbTuple13[A: FromDb, B: FromDb, C: FromDb, D: FromDb, E: FromDb, F: FromDb, G: FromDb, H: FromDb, I: FromDb, J: FromDb, K: FromDb, L: FromDb, M: FromDb]: FromDb[(A, B, C, D, E, F, G, H, I, J, K, L, M)] = for {
a <- of[A]
b <- of[B]
c <- of[C]
d <- of[D]
e <- of[E]
f <- of[F]
g <- of[G]
h <- of[H]
i <- of[I]
j <- of[J]
k <- of[K]
l <- of[L]
m <- of[M]
} yield (a, b, c, d, e, f, g, h, i, j, k, l, m)
implicit def FromDbTuple14[A: FromDb, B: FromDb, C: FromDb, D: FromDb, E: FromDb, F: FromDb, G: FromDb, H: FromDb, I: FromDb, J: FromDb, K: FromDb, L: FromDb, M: FromDb, N: FromDb]: FromDb[(A, B, C, D, E, F, G, H, I, J, K, L, M, N)] = for {
a <- of[A]
b <- of[B]
c <- of[C]
d <- of[D]
e <- of[E]
f <- of[F]
g <- of[G]
h <- of[H]
i <- of[I]
j <- of[J]
k <- of[K]
l <- of[L]
m <- of[M]
n <- of[N]
} yield (a, b, c, d, e, f, g, h, i, j, k, l, m, n)
implicit def FromDbTuple15[A: FromDb, B: FromDb, C: FromDb, D: FromDb, E: FromDb, F: FromDb, G: FromDb, H: FromDb, I: FromDb, J: FromDb, K: FromDb, L: FromDb, M: FromDb, N: FromDb, O: FromDb]: FromDb[(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O)] = for {
a <- of[A]
b <- of[B]
c <- of[C]
d <- of[D]
e <- of[E]
f <- of[F]
g <- of[G]
h <- of[H]
i <- of[I]
j <- of[J]
k <- of[K]
l <- of[L]
m <- of[M]
n <- of[N]
o <- of[O]
} yield (a, b, c, d, e, f, g, h, i, j, k, l, m, n, o)
implicit def FromDbTuple16[A: FromDb, B: FromDb, C: FromDb, D: FromDb, E: FromDb, F: FromDb, G: FromDb, H: FromDb, I: FromDb, J: FromDb, K: FromDb, L: FromDb, M: FromDb, N: FromDb, O: FromDb, P: FromDb]: FromDb[(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P)] = for {
a <- of[A]
b <- of[B]
c <- of[C]
d <- of[D]
e <- of[E]
f <- of[F]
g <- of[G]
h <- of[H]
i <- of[I]
j <- of[J]
k <- of[K]
l <- of[L]
m <- of[M]
n <- of[N]
o <- of[O]
p <- of[P]
} yield (a, b, c, d, e, f, g, h, i, j, k, l, m, n, o, p)
implicit def FromDbTuple17[A: FromDb, B: FromDb, C: FromDb, D: FromDb, E: FromDb, F: FromDb, G: FromDb, H: FromDb, I: FromDb, J: FromDb, K: FromDb, L: FromDb, M: FromDb, N: FromDb, O: FromDb, P: FromDb, Q: FromDb]: FromDb[(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q)] = for {
a <- of[A]
b <- of[B]
c <- of[C]
d <- of[D]
e <- of[E]
f <- of[F]
g <- of[G]
h <- of[H]
i <- of[I]
j <- of[J]
k <- of[K]
l <- of[L]
m <- of[M]
n <- of[N]
o <- of[O]
p <- of[P]
q <- of[Q]
} yield (a, b, c, d, e, f, g, h, i, j, k, l, m, n, o, p, q)
implicit def FromDbTuple18[A: FromDb, B: FromDb, C: FromDb, D: FromDb, E: FromDb, F: FromDb, G: FromDb, H: FromDb, I: FromDb, J: FromDb, K: FromDb, L: FromDb, M: FromDb, N: FromDb, O: FromDb, P: FromDb, Q: FromDb, R: FromDb]: FromDb[(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R)] = for {
a <- of[A]
b <- of[B]
c <- of[C]
d <- of[D]
e <- of[E]
f <- of[F]
g <- of[G]
h <- of[H]
i <- of[I]
j <- of[J]
k <- of[K]
l <- of[L]
m <- of[M]
n <- of[N]
o <- of[O]
p <- of[P]
q <- of[Q]
r <- of[R]
} yield (a, b, c, d, e, f, g, h, i, j, k, l, m, n, o, p, q, r)
implicit def FromDbTuple19[A: FromDb, B: FromDb, C: FromDb, D: FromDb, E: FromDb, F: FromDb, G: FromDb, H: FromDb, I: FromDb, J: FromDb, K: FromDb, L: FromDb, M: FromDb, N: FromDb, O: FromDb, P: FromDb, Q: FromDb, R: FromDb, S: FromDb]: FromDb[(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S)] = for {
a <- of[A]
b <- of[B]
c <- of[C]
d <- of[D]
e <- of[E]
f <- of[F]
g <- of[G]
h <- of[H]
i <- of[I]
j <- of[J]
k <- of[K]
l <- of[L]
m <- of[M]
n <- of[N]
o <- of[O]
p <- of[P]
q <- of[Q]
r <- of[R]
s <- of[S]
} yield (a, b, c, d, e, f, g, h, i, j, k, l, m, n, o, p, q, r, s)
implicit def FromDbTuple20[A: FromDb, B: FromDb, C: FromDb, D: FromDb, E: FromDb, F: FromDb, G: FromDb, H: FromDb, I: FromDb, J: FromDb, K: FromDb, L: FromDb, M: FromDb, N: FromDb, O: FromDb, P: FromDb, Q: FromDb, R: FromDb, S: FromDb, T: FromDb]: FromDb[(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T)] = for {
a <- of[A]
b <- of[B]
c <- of[C]
d <- of[D]
e <- of[E]
f <- of[F]
g <- of[G]
h <- of[H]
i <- of[I]
j <- of[J]
k <- of[K]
l <- of[L]
m <- of[M]
n <- of[N]
o <- of[O]
p <- of[P]
q <- of[Q]
r <- of[R]
s <- of[S]
t <- of[T]
} yield (a, b, c, d, e, f, g, h, i, j, k, l, m, n, o, p, q, r, s, t)
implicit def FromDbTuple21[A: FromDb, B: FromDb, C: FromDb, D: FromDb, E: FromDb, F: FromDb, G: FromDb, H: FromDb, I: FromDb, J: FromDb, K: FromDb, L: FromDb, M: FromDb, N: FromDb, O: FromDb, P: FromDb, Q: FromDb, R: FromDb, S: FromDb, T: FromDb, U: FromDb]: FromDb[(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U)] = for {
a <- of[A]
b <- of[B]
c <- of[C]
d <- of[D]
e <- of[E]
f <- of[F]
g <- of[G]
h <- of[H]
i <- of[I]
j <- of[J]
k <- of[K]
l <- of[L]
m <- of[M]
n <- of[N]
o <- of[O]
p <- of[P]
q <- of[Q]
r <- of[R]
s <- of[S]
t <- of[T]
u <- of[U]
} yield (a, b, c, d, e, f, g, h, i, j, k, l, m, n, o, p, q, r, s, t, u)
implicit def FromDbTuple22[A: FromDb, B: FromDb, C: FromDb, D: FromDb, E: FromDb, F: FromDb, G: FromDb, H: FromDb, I: FromDb, J: FromDb, K: FromDb, L: FromDb, M: FromDb, N: FromDb, O: FromDb, P: FromDb, Q: FromDb, R: FromDb, S: FromDb, T: FromDb, U: FromDb, V: FromDb]: FromDb[(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V)] = for {
a <- of[A]
b <- of[B]
c <- of[C]
d <- of[D]
e <- of[E]
f <- of[F]
g <- of[G]
h <- of[H]
i <- of[I]
j <- of[J]
k <- of[K]
l <- of[L]
m <- of[M]
n <- of[N]
o <- of[O]
p <- of[P]
q <- of[Q]
r <- of[R]
s <- of[S]
t <- of[T]
u <- of[U]
v <- of[V]
} yield (a, b, c, d, e, f, g, h, i, j, k, l, m, n, o, p, q, r, s, t, u, v)
}
| markhibberd/vault | src/main/scala/vault/GeneratedFromDb.scala | Scala | bsd-3-clause | 9,358 |
package ch.bsisa.hyperbird.dao.xqs
import ch.bsisa.hyperbird.util.format.JsonXmlConverter
/**
* TODO: to be removed
*
* Predefined XQueries meant for prototyping/testing
*
* @author Patrick Refondini
*
*/
object TestQueries {
/**
* Parametrised query string
*
* Excludes 'credit special'
* IDENTIFIANT/DE must be text as it may contain data such as 2005|2006|2007
*/
def noSpecialCreditQueryString(year: Int, owner: String): String = {
s"""collection("/db/hb4/G20081113902512302")//ELFIN[not(contains(@GROUPE,'dit')) and not(contains(@GROUPE,'cial'))][contains(./IDENTIFIANT/DE,'${year}')][PARTENAIRE/PROPRIETAIRE/@NOM='${owner}']"""
}
// /**
// * Query parameters
// */
// val start: Double = 1
// val length: Double = 10
// val year = 2006
// val yearMinusOne = year - 1
// val owner = "NE"
// Query current year
//val xmls = XQueryHelper.seqOfElem(noSpecialCreditQueryString(year, owner))
// Query current year minus one
//val xmlsYearMinusOne = XQueryHelper.seqOfElem(noSpecialCreditQueryString(yearMinusOne, owner))
def noSpecialCreditAsJson(year: Int, owner: String): String = {
val xqueryString = noSpecialCreditQueryString(year, owner)
val xmlSeqResult = XQueryHelper.seqOfElem(xqueryString)
val jsonResult = JsonXmlConverter.xmlSeqToJson(xmlSeqResult)
jsonResult
}
} | bsisa/hb-api | app/ch/bsisa/hyperbird/dao/xqs/TestQueries.scala | Scala | gpl-2.0 | 1,384 |
package mayton.primes
import mayton.primes.PrimeLib._
import org.scalatest.flatspec.AnyFlatSpec
import org.scalatest.matchers.must.Matchers
class PermutationSpec extends AnyFlatSpec with Matchers {
it must "throw IllegalArgumentException any argument is negative" in {
a [IllegalArgumentException] must be thrownBy {
permutations(BigInt(-1),BigInt(0))
}
a [IllegalArgumentException] must be thrownBy {
permutations(BigInt(0),BigInt(-1))
}
}
"permutations(1,3)" must "be equals to 3" in {
permutations(BigInt(1),BigInt(3)) must be (BigInt(3))
}
"permutations(2,3)" must "be equals to 6" in {
permutations(BigInt(2),BigInt(3)) must be (BigInt(6))
}
"permutations(2,15)" must "be equals to 210" in {
permutations(BigInt(2),BigInt(15)) must be (BigInt(210))
}
def test1: Unit = {
}
}
| Mark-Kovalyov/primegen-experiments | mayton/mtn-primelib/src/test/scala/mayton/primes/PermutationSpec.scala | Scala | gpl-3.0 | 854 |
/*
* Copyright 2016 Coursera Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.coursera.naptime
import com.linkedin.data.schema.NamedDataSchema
import com.linkedin.data.schema.RecordDataSchema
import org.coursera.naptime.model.KeyFormat
import org.coursera.naptime.model.Keyed
import play.api.libs.json.JsArray
import play.api.libs.json.JsObject
import play.api.libs.json.JsValue
import play.api.libs.json.Json
import play.api.libs.json.OWrites
import play.api.mvc.RequestHeader
import scala.collection.JavaConverters._
import scala.language.existentials
/**
* Helpers to work with Json.
*/
private[naptime] object JsonUtilities {
def filterJsonFields(jsObj: JsObject, fields: RequestFields): JsObject = {
JsObject(jsObj.fields.filter { case (field, _) =>
field == KeyFormat.ID_FIELD || fields.hasField(field)
})
}
def outputOneObj[K, T](obj: Keyed[K, T], fields: RequestFields)
(implicit writes: OWrites[T], keyFormat: KeyFormat[K]): JsObject = {
val filtered = JsonUtilities.filterJsonFields(Keyed.writes.writes(obj), fields)
val keyFields = keyFormat.format.writes(obj.key)
keyFields ++ filtered
}
def outputSeq[K, T](objs: Seq[Keyed[K, T]], fields: RequestFields)
(implicit writes: OWrites[T], keyWrites: KeyFormat[K]): Seq[JsObject] = {
objs.map { obj =>
outputOneObj(obj, fields)
}
}
def formatInclude[K, M](related: Ok.Related[K, M], fields: RequestFields): (String, JsArray) = {
related.resourceName.identifier -> JsArray(related.toJson(fields))
}
def formatIncludes(
ok: Ok[_],
requestFields: RequestFields,
queryIncludes: QueryIncludes,
fields: Fields[_]): Option[JsObject] = {
if (ok.related.isEmpty || queryIncludes.fields.isEmpty) {
None
} else {
case class FieldsHolder(fields: Fields[_])
val fieldsMap = ok.related.map { related =>
related._1 -> FieldsHolder(related._2.fields)
}
val oneHopResourcesToInclude = queryIncludes.fields.flatMap { fieldName =>
fields.relations.get(fieldName)
}.toSet
val multiHopeResourcesToInclude = (for {
(resourceName, fieldNames) <- queryIncludes.resources
fieldName <- fieldNames
resourceFields <- fieldsMap.get(resourceName)
resourceToInclude <- resourceFields.fields.relations.get(fieldName)
} yield resourceToInclude).toSet
val resourcesToInclude = oneHopResourcesToInclude ++ multiHopeResourcesToInclude
val filteredRelated = ok.related.filter { case (name, _) =>
resourcesToInclude.contains(name)
}.values
Some(JsObject(filteredRelated.map(formatInclude(_, requestFields)).toList))
}
}
def requestAskingForLinksInformation(request: RequestHeader): Boolean = {
request.queryString.get("includes").exists { includes =>
includes.exists(_.contains("_links"))
}
}
def formatLinksMeta(
queryIncludes: QueryIncludes,
requestFields: RequestFields,
fields: Fields[_],
ok: Ok[_]): JsObject = {
// Don't bother outputting metadata if there are no fields present in the response.
val visibleIncludes = ok.related.filterKeys(requestFields.forResource(_).isDefined)
val formatted = visibleIncludes.map { case (name, related) =>
name.identifier ->
related.fields.makeMetaRelationsMap(queryIncludes.resources.getOrElse(name, Set.empty))
}.toList
JsObject("elements" -> fields.makeMetaRelationsMap(queryIncludes.fields) :: formatted)
}
// TODO(future): Format differently based on the request header accepts.
def formatSuccessfulResponseBody(
response: Ok[_],
elements: JsValue,
fields: Fields[_],
request: RequestHeader,
requestFields: RequestFields,
queryIncludes: QueryIncludes): JsObject = {
var obj = Json.obj(
"elements" -> elements,
"paging" -> Json.toJson(response.pagination),
"linked" -> formatIncludes(response, requestFields, queryIncludes, fields)
)
if (requestAskingForLinksInformation(request)) {
obj = obj + ("links" -> formatLinksMeta(queryIncludes, requestFields, fields, response))
}
obj
}
}
/**
* Helpers for working with Courier/Pegasus Schemas.
*/
object SchemaUtils {
/**
* Fixes up inferred schemas with additional type information.
*
* When computing the schemas for resources, if the model or value type is not a courier
* (or known type), the macro calls [[org.coursera.naptime.courier.SchemaInference]], which
* attempts to infer the schema via reflection. There are a number of types that the schema
* inferencer cannot handle. This function will take a scala-guice map-binding and fix up
* the inappropriately inferred schemas with configured types.
*
* Note: this is a destructive operation, and mutates the input record data schema. Please
* use with care to ensure that the schemas are not being modified inappropriately.
*/
def fixupInferredSchemas(
schemaToFix: RecordDataSchema,
typeOverrides: NaptimeModule.SchemaTypeOverrides,
visitedFields: Set[String] = Set.empty): Unit = {
schemaToFix.getFields.asScala.foreach { field =>
val fieldType = field.getType
fieldType.getDereferencedDataSchema match {
case named: NamedDataSchema if typeOverrides.contains(named.getFullName) =>
val overrideType = typeOverrides(named.getFullName)
field.setType(overrideType)
case recordType: RecordDataSchema if !visitedFields.contains(recordType.getFullName) =>
val updatedVisitedFields = visitedFields + recordType.getFullName
fixupInferredSchemas(recordType, typeOverrides, updatedVisitedFields)
case _ => // Do nothing otherwise.
}
}
}
}
| josh-newman/naptime | naptime/src/main/scala/org/coursera/naptime/utilities.scala | Scala | apache-2.0 | 6,304 |
/**
* TABuddy-Model - a human-centric K,V framework
*
* Copyright (c) 2014 Alexey Aksenov ezh@ezh.msk.ru
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.digimead.tabuddy.model.serialization.signature
import java.io.{ InputStream, OutputStream }
import java.net.URI
import java.security.PublicKey
import java.util.concurrent.atomic.AtomicReference
import org.digimead.tabuddy.model.Model
import org.digimead.tabuddy.model.element.Element
import org.digimead.tabuddy.model.graph.Graph
import org.digimead.tabuddy.model.serialization.SData
import org.digimead.tabuddy.model.serialization.transport.Transport
import scala.ref.SoftReference
/**
* Signature mechanism interface.
*/
trait Mechanism {
/** Identifier of the signature mechanism. */
val identifier: Mechanism.Identifier
/** Get mechanism parameters. */
def apply(algorithmName: String, args: String*): Mechanism.Parameters
/** Just invoked before freeze completion. */
def afterFreeze(parameters: Mechanism.Parameters, graph: Graph[_ <: Model.Like], transport: Transport, sData: SData)
/** Initialize SData for acquire process. */
def initAcquire(sData: SData): SData
/** Initialize SData for freeze process. */
def initFreeze(sData: SData): SData
/** Just invoked after read beginning. */
def readFilter(parameters: Mechanism.Parameters, context: AtomicReference[SoftReference[AnyRef]],
modified: Element.Timestamp, is: InputStream, uri: URI, transport: Transport, sData: SData): InputStream
/** Just invoked after write beginning. */
def writeFilter(parameters: Mechanism.Parameters, os: OutputStream,
uri: URI, transport: Transport, sData: SData): OutputStream
}
object Mechanism {
/**
* Identifier that is associated with the signature mechanism.
*/
trait Identifier extends Equals with java.io.Serializable {
/** Mechanism name. */
val name: String
/** Mechanism description. */
val description: String
override def canEqual(that: Any) = that.isInstanceOf[Identifier]
override def equals(that: Any): Boolean = that match {
case that: Identifier ⇒ that.canEqual(this) && that.name.equals(this.name)
case _ ⇒ false
}
override def hashCode = name.##
override def toString = s"Mechanism.Identifier(${name})"
}
/**
* Mechanism parameters.
*/
trait Parameters extends Product with java.io.Serializable {
/** Signature algorithm name. */
def algorithm: String
/** Signature parameters as sequence of strings. */
def arguments: Seq[String]
/** Mechanism instance. */
def mechanism: Mechanism
/** Get public key for the current parameter. */
def publicKey: PublicKey
}
}
| digimead/digi-TABuddy-model | src/main/scala/org/digimead/tabuddy/model/serialization/signature/Mechanism.scala | Scala | apache-2.0 | 3,206 |
package hoecoga.slack
import java.net.{URI, URL}
import hoecoga.Config
import play.api.libs.json.{JsValue, Json}
import scalaj.http.Http
/**
* @see [[https://api.slack.com/methods]]
*/
class SlackWebApi(config: Config) {
private[this] def request(path: String, params: (String, String)*): JsValue = {
val http = (params.toList :+ ("token" -> config.slack.token)).foldRight(Http(new URL(config.slack.baseUrl, path).toString)) {
case ((key, value), req) =>
req.param(key, value)
}
Json.parse(http.asBytes.body)
}
/**
* @see [[https://api.slack.com/methods/channels.info]]
*/
def info(channel: SlackChannel): String = {
val path = "/api/channels.info"
val json = request(path, "channel" -> channel.id)
val name = (json \\ "channel" \\ "name").as[String]
name
}
/**
* @see [[https://api.slack.com/methods/rtm.start]]
*/
def start(): (URI, SlackUser) = {
val path = "/api/rtm.start"
val json = request(path)
val url = (json \\ "url").as[String]
val selfId = (json \\ "self" \\ "id").as[String]
(new URI(url), SlackUser(selfId))
}
}
| hoecoga/hoecoga-bot | src/main/scala/hoecoga/slack/SlackWebApi.scala | Scala | mit | 1,120 |
/*
* #%L
* MITH General Utilities
* %%
* Copyright (C) 2011 - 2012 Maryland Institute for Technology in the Humanities
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
package edu.umd.mith.util.hathi
import java.io.File
import scala.io.Source
import edu.umd.mith.util.{ RichFile, ZipReader }
case class TextInfo(
val id: String,
val metsFile: File,
val zipFile: File
)
class Collection(private val base: String) {
def escape(id: String): (String, String) = {
val first = id.indexOf(".")
val collection = id.substring(0, first)
val remainder = id.substring(first + 1)
val dirName = remainder.replaceAll("\\\\.", ",")
.replaceAll("\\\\:", "+")
.replaceAll("\\\\/", "=")
(collection, dirName)
}
def unescape(dirName: String) =
dirName.replaceAll("\\\\,", ".")
.replaceAll("\\\\+", ":")
.replaceAll("\\\\=", "/")
def texts: Iterator[TextInfo] = {
new File(this.base).listFiles.sorted.toIterator.flatMap { collection =>
new RichFile(new File(collection, "pairtree_root")).leaves.map { path =>
val metsFile = new File(path, path.getName + ".mets.xml")
val zipFile = new File(path, path.getName + ".zip")
assert(metsFile.exists)
assert(zipFile.exists)
TextInfo(
collection.getName + "." + this.unescape(path.getName),
metsFile,
zipFile
)
}
}
}
def findTextInfo(id: String): Option[TextInfo] = {
val (collection, name) = this.escape(id)
val parts = collection +: "pairtree_root" +: name.grouped(2).toList :+ name
val path = new File(this.base, parts.mkString(File.separator))
if (path.exists) {
val metsFile = new File(path, path.getName + ".mets.xml")
val zipFile = new File(path, path.getName + ".zip")
if (metsFile.exists && zipFile.exists) {
Some(TextInfo(id, metsFile, zipFile))
} else None
} else {
System.err.println("ERROR: no such file: " + path)
None
}
}
def extractPages(text: TextInfo): Iterator[(Int, String)] = {
val reader = new ZipReader(text.zipFile)
reader.iterator.drop(1).flatMap {
case (path, source) => {
val Array(_, name) = path.split("\\\\/")
val Array(number, _) = name.split("\\\\.")
try Some(number.toInt, source.mkString)
catch { case e: NumberFormatException => None }
}
}
}
def malletFormat(id: String): Option[Iterator[String]] = {
this.findTextInfo(id).map {
this.extractPages(_).map {
case (number, content) =>
"%s.%04d _ %s".format(id, number, content.replaceAll("\\n", " "))
}
}
}
}
/**
* Simple example of how to extract texts from the Hathi collection, in this
* case to export nineteenth-century texts for topic modeling with MALLET.
*/
object MalletConverter {
def main(args: Array[String]) {
import edu.umd.mith.util.SimpleDateCleaner.parseYearField
val metadata = new MetadataParser(args(0))
val collection = new Collection(args(1))
val out = new java.io.PrintWriter(args(2))
val pages = metadata.par.flatMap {
case (id, fields) =>
fields.get("date").flatMap(
_.headOption.flatMap(parseYearField(_))
).flatMap {
case year if year.start >= 1800 && year.end < 1900 =>
println(id)
collection.malletFormat(id)
case _ => None
}.flatten
}
pages.foreach(out.println)
out.close()
}
}
| umd-mith/mith-jvm-lib | util/src/main/scala/edu/umd/mith/util/hathi/Collection.scala | Scala | apache-2.0 | 4,052 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kafka.server
import java.nio.ByteBuffer
import java.nio.charset.StandardCharsets
import java.security.InvalidKeyException
import java.util.Base64
import javax.crypto.spec.SecretKeySpec
import javax.crypto.{Mac, SecretKey}
import kafka.common.{NotificationHandler, ZkNodeChangeNotificationListener}
import kafka.metrics.KafkaMetricsGroup
import kafka.utils.{CoreUtils, Json, Logging}
import kafka.zk.{DelegationTokenChangeNotificationSequenceZNode, DelegationTokenChangeNotificationZNode, DelegationTokensZNode, KafkaZkClient}
import org.apache.kafka.common.protocol.Errors
import org.apache.kafka.common.security.auth.KafkaPrincipal
import org.apache.kafka.common.security.scram.internals.{ScramFormatter, ScramMechanism}
import org.apache.kafka.common.security.scram.ScramCredential
import org.apache.kafka.common.security.token.delegation.internals.DelegationTokenCache
import org.apache.kafka.common.security.token.delegation.{DelegationToken, TokenInformation}
import org.apache.kafka.common.utils.{Sanitizer, SecurityUtils, Time}
import scala.jdk.CollectionConverters._
import scala.collection.mutable
object DelegationTokenManager {
val DefaultHmacAlgorithm = "HmacSHA512"
val OwnerKey ="owner"
val RenewersKey = "renewers"
val IssueTimestampKey = "issueTimestamp"
val MaxTimestampKey = "maxTimestamp"
val ExpiryTimestampKey = "expiryTimestamp"
val TokenIdKey = "tokenId"
val VersionKey = "version"
val CurrentVersion = 1
val ErrorTimestamp = -1
/**
*
* @param tokenId
* @param secretKey
* @return
*/
def createHmac(tokenId: String, secretKey: String) : Array[Byte] = {
createHmac(tokenId, createSecretKey(secretKey.getBytes(StandardCharsets.UTF_8)))
}
/**
* Convert the byte[] to a secret key
* @param keybytes the byte[] to create the secret key from
* @return the secret key
*/
def createSecretKey(keybytes: Array[Byte]) : SecretKey = {
new SecretKeySpec(keybytes, DefaultHmacAlgorithm)
}
/**
*
*
* @param tokenId
* @param secretKey
* @return
*/
def createBase64HMAC(tokenId: String, secretKey: SecretKey) : String = {
val hmac = createHmac(tokenId, secretKey)
Base64.getEncoder.encodeToString(hmac)
}
/**
* Compute HMAC of the identifier using the secret key
* @param tokenId the bytes of the identifier
* @param secretKey the secret key
* @return String of the generated hmac
*/
def createHmac(tokenId: String, secretKey: SecretKey) : Array[Byte] = {
val mac = Mac.getInstance(DefaultHmacAlgorithm)
try
mac.init(secretKey)
catch {
case ike: InvalidKeyException => throw new IllegalArgumentException("Invalid key to HMAC computation", ike);
}
mac.doFinal(tokenId.getBytes(StandardCharsets.UTF_8))
}
def toJsonCompatibleMap(token: DelegationToken): Map[String, Any] = {
val tokenInfo = token.tokenInfo
val tokenInfoMap = mutable.Map[String, Any]()
tokenInfoMap(VersionKey) = CurrentVersion
tokenInfoMap(OwnerKey) = Sanitizer.sanitize(tokenInfo.ownerAsString)
tokenInfoMap(RenewersKey) = tokenInfo.renewersAsString.asScala.map(e => Sanitizer.sanitize(e)).asJava
tokenInfoMap(IssueTimestampKey) = tokenInfo.issueTimestamp
tokenInfoMap(MaxTimestampKey) = tokenInfo.maxTimestamp
tokenInfoMap(ExpiryTimestampKey) = tokenInfo.expiryTimestamp
tokenInfoMap(TokenIdKey) = tokenInfo.tokenId()
tokenInfoMap.toMap
}
def fromBytes(bytes: Array[Byte]): Option[TokenInformation] = {
if (bytes == null || bytes.isEmpty)
return None
Json.parseBytes(bytes) match {
case Some(js) =>
val mainJs = js.asJsonObject
require(mainJs(VersionKey).to[Int] == CurrentVersion)
val owner = SecurityUtils.parseKafkaPrincipal(Sanitizer.desanitize(mainJs(OwnerKey).to[String]))
val renewerStr = mainJs(RenewersKey).to[Seq[String]]
val renewers = renewerStr.map(Sanitizer.desanitize(_)).map(SecurityUtils.parseKafkaPrincipal(_))
val issueTimestamp = mainJs(IssueTimestampKey).to[Long]
val expiryTimestamp = mainJs(ExpiryTimestampKey).to[Long]
val maxTimestamp = mainJs(MaxTimestampKey).to[Long]
val tokenId = mainJs(TokenIdKey).to[String]
val tokenInfo = new TokenInformation(tokenId, owner, renewers.asJava,
issueTimestamp, maxTimestamp, expiryTimestamp)
Some(tokenInfo)
case None =>
None
}
}
def filterToken(requestedPrincipal: KafkaPrincipal, owners : Option[List[KafkaPrincipal]], token: TokenInformation, authorizeToken: String => Boolean) : Boolean = {
val allow =
//exclude tokens which are not requested
if (!owners.isEmpty && !owners.get.exists(owner => token.ownerOrRenewer(owner))) {
false
//Owners and the renewers can describe their own tokens
} else if (token.ownerOrRenewer(requestedPrincipal)) {
true
// Check permission for non-owned tokens
} else if ((authorizeToken(token.tokenId))) {
true
}
else {
false
}
allow
}
}
class DelegationTokenManager(val config: KafkaConfig,
val tokenCache: DelegationTokenCache,
val time: Time,
val zkClient: KafkaZkClient) extends Logging with KafkaMetricsGroup {
this.logIdent = s"[Token Manager on Broker ${config.brokerId}]: "
import DelegationTokenManager._
type CreateResponseCallback = CreateTokenResult => Unit
type RenewResponseCallback = (Errors, Long) => Unit
type ExpireResponseCallback = (Errors, Long) => Unit
type DescribeResponseCallback = (Errors, List[DelegationToken]) => Unit
val secretKey = {
val keyBytes = if (config.tokenAuthEnabled) config.delegationTokenSecretKey.value.getBytes(StandardCharsets.UTF_8) else null
if (keyBytes == null || keyBytes.length == 0) null
else
createSecretKey(keyBytes)
}
val tokenMaxLifetime: Long = config.delegationTokenMaxLifeMs
val defaultTokenRenewTime: Long = config.delegationTokenExpiryTimeMs
val tokenRemoverScanInterval: Long = config.delegationTokenExpiryCheckIntervalMs
private val lock = new Object()
private var tokenChangeListener: ZkNodeChangeNotificationListener = null
def startup() = {
if (config.tokenAuthEnabled) {
zkClient.createDelegationTokenPaths()
loadCache()
tokenChangeListener = new ZkNodeChangeNotificationListener(zkClient, DelegationTokenChangeNotificationZNode.path, DelegationTokenChangeNotificationSequenceZNode.SequenceNumberPrefix, TokenChangedNotificationHandler)
tokenChangeListener.init()
}
}
def shutdown() = {
if (config.tokenAuthEnabled) {
if (tokenChangeListener != null) tokenChangeListener.close()
}
}
private def loadCache(): Unit = {
lock.synchronized {
val tokens = zkClient.getChildren(DelegationTokensZNode.path)
info(s"Loading the token cache. Total token count: ${tokens.size}")
for (tokenId <- tokens) {
try {
getTokenFromZk(tokenId) match {
case Some(token) => updateCache(token)
case None =>
}
} catch {
case ex: Throwable => error(s"Error while getting Token for tokenId: $tokenId", ex)
}
}
}
}
private def getTokenFromZk(tokenId: String): Option[DelegationToken] = {
zkClient.getDelegationTokenInfo(tokenId) match {
case Some(tokenInformation) => {
val hmac = createHmac(tokenId, secretKey)
Some(new DelegationToken(tokenInformation, hmac))
}
case None =>
None
}
}
/**
*
* @param token
*/
private def updateCache(token: DelegationToken): Unit = {
val hmacString = token.hmacAsBase64String
val scramCredentialMap = prepareScramCredentials(hmacString)
tokenCache.updateCache(token, scramCredentialMap.asJava)
}
/**
* @param hmacString
*/
private def prepareScramCredentials(hmacString: String) : Map[String, ScramCredential] = {
val scramCredentialMap = mutable.Map[String, ScramCredential]()
def scramCredential(mechanism: ScramMechanism): ScramCredential = {
new ScramFormatter(mechanism).generateCredential(hmacString, mechanism.minIterations)
}
for (mechanism <- ScramMechanism.values)
scramCredentialMap(mechanism.mechanismName) = scramCredential(mechanism)
scramCredentialMap.toMap
}
/**
*
* @param owner
* @param renewers
* @param maxLifeTimeMs
* @param responseCallback
*/
def createToken(owner: KafkaPrincipal,
renewers: List[KafkaPrincipal],
maxLifeTimeMs: Long,
responseCallback: CreateResponseCallback): Unit = {
if (!config.tokenAuthEnabled) {
responseCallback(CreateTokenResult(-1, -1, -1, "", Array[Byte](), Errors.DELEGATION_TOKEN_AUTH_DISABLED))
} else {
lock.synchronized {
val tokenId = CoreUtils.generateUuidAsBase64()
val issueTimeStamp = time.milliseconds
val maxLifeTime = if (maxLifeTimeMs <= 0) tokenMaxLifetime else Math.min(maxLifeTimeMs, tokenMaxLifetime)
val maxLifeTimeStamp = issueTimeStamp + maxLifeTime
val expiryTimeStamp = Math.min(maxLifeTimeStamp, issueTimeStamp + defaultTokenRenewTime)
val tokenInfo = new TokenInformation(tokenId, owner, renewers.asJava, issueTimeStamp, maxLifeTimeStamp, expiryTimeStamp)
val hmac = createHmac(tokenId, secretKey)
val token = new DelegationToken(tokenInfo, hmac)
updateToken(token)
info(s"Created a delegation token: $tokenId for owner: $owner")
responseCallback(CreateTokenResult(issueTimeStamp, expiryTimeStamp, maxLifeTimeStamp, tokenId, hmac, Errors.NONE))
}
}
}
/**
*
* @param principal
* @param hmac
* @param renewLifeTimeMs
* @param renewCallback
*/
def renewToken(principal: KafkaPrincipal,
hmac: ByteBuffer,
renewLifeTimeMs: Long,
renewCallback: RenewResponseCallback): Unit = {
if (!config.tokenAuthEnabled) {
renewCallback(Errors.DELEGATION_TOKEN_AUTH_DISABLED, -1)
} else {
lock.synchronized {
getToken(hmac) match {
case Some(token) => {
val now = time.milliseconds
val tokenInfo = token.tokenInfo
if (!allowedToRenew(principal, tokenInfo)) {
renewCallback(Errors.DELEGATION_TOKEN_OWNER_MISMATCH, -1)
} else if (tokenInfo.maxTimestamp < now || tokenInfo.expiryTimestamp < now) {
renewCallback(Errors.DELEGATION_TOKEN_EXPIRED, -1)
} else {
val renewLifeTime = if (renewLifeTimeMs < 0) defaultTokenRenewTime else renewLifeTimeMs
val renewTimeStamp = now + renewLifeTime
val expiryTimeStamp = Math.min(tokenInfo.maxTimestamp, renewTimeStamp)
tokenInfo.setExpiryTimestamp(expiryTimeStamp)
updateToken(token)
info(s"Delegation token renewed for token: ${tokenInfo.tokenId} for owner: ${tokenInfo.owner}")
renewCallback(Errors.NONE, expiryTimeStamp)
}
}
case None => renewCallback(Errors.DELEGATION_TOKEN_NOT_FOUND, -1)
}
}
}
}
/**
* @param token
*/
private def updateToken(token: DelegationToken): Unit = {
zkClient.setOrCreateDelegationToken(token)
updateCache(token)
zkClient.createTokenChangeNotification(token.tokenInfo.tokenId())
}
/**
*
* @param hmac
* @return
*/
private def getToken(hmac: ByteBuffer): Option[DelegationToken] = {
try {
val byteArray = new Array[Byte](hmac.remaining)
hmac.get(byteArray)
val base64Pwd = Base64.getEncoder.encodeToString(byteArray)
val tokenInfo = tokenCache.tokenForHmac(base64Pwd)
if (tokenInfo == null) None else Some(new DelegationToken(tokenInfo, byteArray))
} catch {
case e: Exception =>
error("Exception while getting token for hmac", e)
None
}
}
/**
*
* @param principal
* @param tokenInfo
* @return
*/
private def allowedToRenew(principal: KafkaPrincipal, tokenInfo: TokenInformation): Boolean = {
if (principal.equals(tokenInfo.owner) || tokenInfo.renewers.asScala.toList.contains(principal)) true else false
}
/**
*
* @param tokenId
* @return
*/
def getToken(tokenId: String): Option[DelegationToken] = {
val tokenInfo = tokenCache.token(tokenId)
if (tokenInfo != null) Some(getToken(tokenInfo)) else None
}
/**
*
* @param tokenInfo
* @return
*/
private def getToken(tokenInfo: TokenInformation): DelegationToken = {
val hmac = createHmac(tokenInfo.tokenId, secretKey)
new DelegationToken(tokenInfo, hmac)
}
/**
*
* @param principal
* @param hmac
* @param expireLifeTimeMs
* @param expireResponseCallback
*/
def expireToken(principal: KafkaPrincipal,
hmac: ByteBuffer,
expireLifeTimeMs: Long,
expireResponseCallback: ExpireResponseCallback): Unit = {
if (!config.tokenAuthEnabled) {
expireResponseCallback(Errors.DELEGATION_TOKEN_AUTH_DISABLED, -1)
} else {
lock.synchronized {
getToken(hmac) match {
case Some(token) => {
val tokenInfo = token.tokenInfo
val now = time.milliseconds
if (!allowedToRenew(principal, tokenInfo)) {
expireResponseCallback(Errors.DELEGATION_TOKEN_OWNER_MISMATCH, -1)
} else if (tokenInfo.maxTimestamp < now || tokenInfo.expiryTimestamp < now) {
expireResponseCallback(Errors.DELEGATION_TOKEN_EXPIRED, -1)
} else if (expireLifeTimeMs < 0) { //expire immediately
removeToken(tokenInfo.tokenId)
info(s"Token expired for token: ${tokenInfo.tokenId} for owner: ${tokenInfo.owner}")
expireResponseCallback(Errors.NONE, now)
} else {
//set expiry time stamp
val expiryTimeStamp = Math.min(tokenInfo.maxTimestamp, now + expireLifeTimeMs)
tokenInfo.setExpiryTimestamp(expiryTimeStamp)
updateToken(token)
info(s"Updated expiry time for token: ${tokenInfo.tokenId} for owner: ${tokenInfo.owner}")
expireResponseCallback(Errors.NONE, expiryTimeStamp)
}
}
case None => expireResponseCallback(Errors.DELEGATION_TOKEN_NOT_FOUND, -1)
}
}
}
}
/**
*
* @param tokenId
*/
private def removeToken(tokenId: String): Unit = {
zkClient.deleteDelegationToken(tokenId)
removeCache(tokenId)
zkClient.createTokenChangeNotification(tokenId)
}
/**
*
* @param tokenId
*/
private def removeCache(tokenId: String): Unit = {
tokenCache.removeCache(tokenId)
}
/**
*
* @return
*/
def expireTokens(): Unit = {
lock.synchronized {
for (tokenInfo <- getAllTokenInformation) {
val now = time.milliseconds
if (tokenInfo.maxTimestamp < now || tokenInfo.expiryTimestamp < now) {
info(s"Delegation token expired for token: ${tokenInfo.tokenId} for owner: ${tokenInfo.owner}")
removeToken(tokenInfo.tokenId)
}
}
}
}
def getAllTokenInformation: List[TokenInformation] = tokenCache.tokens.asScala.toList
def getTokens(filterToken: TokenInformation => Boolean): List[DelegationToken] = {
getAllTokenInformation.filter(filterToken).map(token => getToken(token))
}
object TokenChangedNotificationHandler extends NotificationHandler {
override def processNotification(tokenIdBytes: Array[Byte]): Unit = {
lock.synchronized {
val tokenId = new String(tokenIdBytes, StandardCharsets.UTF_8)
info(s"Processing Token Notification for tokenId: $tokenId")
getTokenFromZk(tokenId) match {
case Some(token) => updateCache(token)
case None => removeCache(tokenId)
}
}
}
}
}
case class CreateTokenResult(issueTimestamp: Long,
expiryTimestamp: Long,
maxTimestamp: Long,
tokenId: String,
hmac: Array[Byte],
error: Errors) {
override def equals(other: Any): Boolean = {
other match {
case that: CreateTokenResult =>
error.equals(that.error) &&
tokenId.equals(that.tokenId) &&
issueTimestamp.equals(that.issueTimestamp) &&
expiryTimestamp.equals(that.expiryTimestamp) &&
maxTimestamp.equals(that.maxTimestamp) &&
(hmac sameElements that.hmac)
case _ => false
}
}
override def hashCode(): Int = {
val fields = Seq(issueTimestamp, expiryTimestamp, maxTimestamp, tokenId, hmac, error)
fields.map(_.hashCode()).foldLeft(0)((a, b) => 31 * a + b)
}
}
| guozhangwang/kafka | core/src/main/scala/kafka/server/DelegationTokenManager.scala | Scala | apache-2.0 | 17,781 |
package org.jetbrains.plugins.scala
package compiler
import java.io.{File, IOException}
import javax.swing.event.HyperlinkEvent
import com.intellij.notification.{Notification, NotificationListener, NotificationType, Notifications}
import com.intellij.openapi.application.ApplicationManager
import com.intellij.openapi.components.ApplicationComponent
import com.intellij.openapi.project.Project
import com.intellij.openapi.projectRoots.{JavaSdk, ProjectJdkTable}
import com.intellij.openapi.roots.ProjectRootManager
import com.intellij.openapi.util.io.FileUtil
import com.intellij.util.PathUtil
import com.intellij.util.net.NetUtils
import gnu.trove.TByteArrayList
import org.jetbrains.jps.incremental.BuilderService
import org.jetbrains.plugins.scala.compiler.CompileServerLauncher._
import org.jetbrains.plugins.scala.extensions._
import scala.collection.JavaConverters._
import scala.util.control.Exception._
/**
* @author Pavel Fatin
*/
class CompileServerLauncher extends ApplicationComponent {
private var serverInstance: Option[ServerInstance] = None
def initComponent() {}
def disposeComponent() {
if (running) stop()
}
def tryToStart(project: Project): Boolean = {
if (!running) {
val started = start(project)
if (started) {
try new RemoteServerRunner(project).send("addDisconnectListener", Seq.empty, null)
catch {
case e: Exception =>
}
}
started
}
else true
}
private def start(project: Project): Boolean = {
val applicationSettings = ScalaCompileServerSettings.getInstance
if (applicationSettings.COMPILE_SERVER_SDK == null) {
// Try to find a suitable JDK
val choice = Option(ProjectRootManager.getInstance(project).getProjectSdk).orElse {
val all = ProjectJdkTable.getInstance.getSdksOfType(JavaSdk.getInstance()).asScala
all.headOption
}
choice.foreach(sdk => applicationSettings.COMPILE_SERVER_SDK = sdk.getName)
// val message = "JVM SDK is automatically selected: " + name +
// "\\n(can be changed in Application Settings / Scala)"
// Notifications.Bus.notify(new Notification("scala", "Scala compile server",
// message, NotificationType.INFORMATION))
}
findJdkByName(applicationSettings.COMPILE_SERVER_SDK)
.left.map(_ + "\\nPlease either disable Scala compile server or configure a valid JVM SDK for it.")
.right.flatMap(start(project, _)) match {
case Left(error) =>
val title = "Cannot start Scala compile server"
val content = s"<html><body>${error.replace("\\n", "<br>")} <a href=''>Configure</a></body></html>"
Notifications.Bus.notify(new Notification("scala", title, content, NotificationType.ERROR, ConfigureLinkListener))
false
case Right(_) =>
ApplicationManager.getApplication invokeLater new Runnable {
override def run() {
CompileServerManager.instance(project).configureWidget()
}
}
true
}
}
private def start(project: Project, jdk: JDK): Either[String, Process] = {
import org.jetbrains.plugins.scala.compiler.CompileServerLauncher.{compilerJars, jvmParameters}
compilerJars.partition(_.exists) match {
case (presentFiles, Seq()) =>
val classpath = (jdk.tools +: presentFiles).map(_.canonicalPath).mkString(File.pathSeparator)
val settings = ScalaCompileServerSettings.getInstance
val freePort = CompileServerLauncher.findFreePort
if (settings.COMPILE_SERVER_PORT != freePort) {
new RemoteServerStopper(settings.COMPILE_SERVER_PORT).sendStop()
settings.COMPILE_SERVER_PORT = freePort
ApplicationManager.getApplication.saveSettings()
}
val ngRunnerFqn = "org.jetbrains.plugins.scala.nailgun.NailgunRunner"
val id = settings.COMPILE_SERVER_ID
val commands = jdk.executable.canonicalPath +: "-cp" +: classpath +: jvmParameters ++:
ngRunnerFqn +: freePort.toString +: id.toString +: Nil
val builder = new ProcessBuilder(commands.asJava)
if (settings.USE_PROJECT_HOME_AS_WORKING_DIR) {
projectHome(project).foreach(dir => builder.directory(dir))
}
catching(classOf[IOException]).either(builder.start())
.left.map(_.getMessage)
.right.map { process =>
val watcher = new ProcessWatcher(process)
serverInstance = Some(ServerInstance(watcher, freePort, builder.directory()))
watcher.startNotify()
process
}
case (_, absentFiles) =>
val paths = absentFiles.map(_.getPath).mkString(", ")
Left("Required file(s) not found: " + paths)
}
}
// TODO stop server more gracefully
def stop() {
serverInstance.foreach { it =>
it.destroyProcess()
}
}
def stop(project: Project) {
stop()
ApplicationManager.getApplication invokeLater new Runnable {
override def run() {
CompileServerManager.instance(project).configureWidget()
}
}
}
def running: Boolean = serverInstance.exists(_.running)
def errors(): Seq[String] = serverInstance.map(_.errors()).getOrElse(Seq.empty)
def port: Option[Int] = serverInstance.map(_.port)
def getComponentName = getClass.getSimpleName
}
object CompileServerLauncher {
def instance = ApplicationManager.getApplication.getComponent(classOf[CompileServerLauncher])
def compilerJars = {
val jpsBuildersJar = new File(PathUtil.getJarPathForClass(classOf[BuilderService]))
val utilJar = new File(PathUtil.getJarPathForClass(classOf[FileUtil]))
val trove4jJar = new File(PathUtil.getJarPathForClass(classOf[TByteArrayList]))
val pluginRoot = new File(PathUtil.getJarPathForClass(getClass)).getParent
val jpsRoot = new File(pluginRoot, "jps")
Seq(
jpsBuildersJar,
utilJar,
trove4jJar,
new File(pluginRoot, "scala-library.jar"),
new File(pluginRoot, "scala-nailgun-runner.jar"),
new File(pluginRoot, "compiler-settings.jar"),
new File(jpsRoot, "nailgun.jar"),
new File(jpsRoot, "sbt-interface.jar"),
new File(jpsRoot, "incremental-compiler.jar"),
new File(jpsRoot, "jline.jar"),
new File(jpsRoot, "scala-jps-plugin.jar"))
}
def jvmParameters: Seq[String] = {
val settings = ScalaCompileServerSettings.getInstance
val xmx = settings.COMPILE_SERVER_MAXIMUM_HEAP_SIZE |> { size =>
if (size.isEmpty) Nil else List("-Xmx%sm".format(size))
}
val (userMaxPermSize, otherParams) = settings.COMPILE_SERVER_JVM_PARAMETERS.split(" ").partition(_.contains("-XX:MaxPermSize"))
val defaultMaxPermSize = Some("-XX:MaxPermSize=256m")
val needMaxPermSize = settings.COMPILE_SERVER_SDK < "1.8"
val maxPermSize = if (needMaxPermSize) userMaxPermSize.headOption.orElse(defaultMaxPermSize) else None
xmx ++ otherParams ++ maxPermSize
}
def ensureServerRunning(project: Project) {
val launcher = CompileServerLauncher.instance
if (needRestart(project)) launcher.stop()
if (!launcher.running) launcher.tryToStart(project)
}
def needRestart(project: Project): Boolean = {
val launcher = CompileServerLauncher.instance
ScalaCompileServerSettings.getInstance().USE_PROJECT_HOME_AS_WORKING_DIR &&
projectHome(project) != launcher.serverInstance.map(_.workingDir)
}
def ensureNotRunning(project: Project) {
val launcher = CompileServerLauncher.instance
if (launcher.running) launcher.stop(project)
}
def findFreePort: Int = {
val port = ScalaCompileServerSettings.getInstance().COMPILE_SERVER_PORT
if (NetUtils.canConnectToSocket("localhost", port))
NetUtils.findAvailableSocketPort()
else port
}
private def projectHome(project: Project): Option[File] = {
for {
dir <- Option(project.getBaseDir)
path <- Option(dir.getCanonicalPath)
file = new File(path)
if file.exists()
} yield file
}
}
private case class ServerInstance(watcher: ProcessWatcher, port: Int, workingDir: File) {
private var stopped = false
def running: Boolean = !stopped && watcher.running
def errors(): Seq[String] = watcher.errors()
def destroyProcess() {
stopped = true
watcher.destroyProcess()
}
}
private object ConfigureLinkListener extends NotificationListener.Adapter {
def hyperlinkActivated(notification: Notification, event: HyperlinkEvent) {
CompileServerManager.showCompileServerSettingsDialog()
notification.expire()
}
} | LPTK/intellij-scala | src/org/jetbrains/plugins/scala/compiler/CompileServerLauncher.scala | Scala | apache-2.0 | 8,593 |
/*
Copyright (c) 2017, Robby, Kansas State University
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.sireum
import org.sireum.$internal.{Boxer, MSMarker}
object MS {
class WithFilter[I, V](ms: MS[I, V], p: V => B) {
def foreach[U](f: V => U): Unit = {
var i = Z.MP.zero
while (i < ms.length) {
val v = ms.boxer.lookup[V](ms.data, i)
if (p(v)) {
f(v)
}
i = i.increase
}
}
def map[V2](f: V => V2): MS[I, V2] =
if (ms.isEmpty) MS[I, V2]()(ms.companion)
else {
var a: AnyRef = null
var boxer2: Boxer = null
var i = Z.MP.zero
var j = Z.MP.zero
while (i < ms.length) {
val v = ms.boxer.lookup[V](ms.data, i)
if (p(v)) {
val v2 = f(v)
if (boxer2 == null) {
boxer2 = Boxer.boxer(v2)
a = boxer2.create(ms.length)
}
boxer2.store(a, j, helper.assign(v2))
j = j.increase
}
i = i.increase
}
MS[I, V2](ms.companion, a, j, if (boxer2 == null) $internal.IdentityBoxer else boxer2)
}
def flatMap[V2](f: V => MS[I, V2]): MS[I, V2] =
if (ms.isEmpty) MS[I, V2]()(ms.companion)
else {
val es = ms.elements
var r = f(es.head)
for (e <- es.tail if p(e)) {
r = r ++ f(e)
}
r
}
def withFilter(p2: V => B): WithFilter[I, V] = new WithFilter[I, V](ms, v => p(v) && p2(v))
}
def checkSize[I](size: Z)(implicit companion: $ZCompanion[I]): Unit = {
assert(Z.MP.zero <= size, s"Slang MS requires a non-negative size.")
assert(
!companion.hasMax || companion.Index.asInstanceOf[ZLike[_]].toMP + size - 1 <= companion.Max
.asInstanceOf[ZLike[_]]
.toMP,
s"Slang IS requires its index (${companion.Index}) plus its size ($size) less than or equal to it max ${companion.Max} plus one."
)
}
def apply[I, V](args: V*)(implicit companion: $ZCompanion[I]): MS[I, V] = {
checkSize(Z.MP(args.length))(companion)
val boxer = Boxer.boxerSeq(args)
val length = Z.MP(args.length)
val a = boxer.create(length)
var i = Z.MP.zero
for (arg <- args) {
boxer.store(a, i, helper.assign(arg))
i = i.increase
}
MS[I, V](companion, a, length, boxer)
}
def create[I, V](size: I, default: V)(implicit companion: $ZCompanion[I]): MS[I, V] = {
val length = size.asInstanceOf[ZLike[_]].toMP
checkSize(length)(companion)
val boxer = Boxer.boxer(default)
val a = boxer.create(length)
var i = Z.MP.zero
while (i < length) {
boxer.store(a, i, helper.assign(default))
i = i.increase
}
MS[I, V](companion, a, length, boxer)
}
def zreate[I, V](size: Z, default: V)(implicit companion: $ZCompanion[I]): MS[I, V] = {
val length = size
checkSize(length)(companion)
val boxer = Boxer.boxer(default)
val a = boxer.create(length)
var i = Z.MP.zero
while (i < length) {
boxer.store(a, i, helper.assign(default))
i = i.increase
}
MS[I, V](companion, a, length, boxer)
}
def apply[I, V](companion: $ZCompanion[I], data: scala.AnyRef, length: Z, boxer: Boxer): MS[I, V] =
new MS[I, V](companion, data, length, boxer)
def unapplySeq[I, V](o: MS[I, V]): scala.Option[scala.Seq[V]] = scala.Some(o.elements.map(helper.cloneAssign))
}
final class MS[I, V](val companion: $ZCompanion[I], val data: scala.AnyRef, val length: Z, val boxer: Boxer)
extends Mutable with MSMarker with _root_.java.lang.Iterable[V] {
private var isOwned: scala.Boolean = false
override def $owned: scala.Boolean = isOwned
override def $owned_=(b: scala.Boolean): this.type = {
isOwned = b
this
}
def hash: Z = hashCode
def $clone: MS[I, V] = {
val a = boxer.cloneMut(data, length, length, Z.MP.zero)
MS[I, V](companion, a, length, boxer)
}
def isEmpty: B = length == Z.MP.zero
def nonEmpty: B = length != Z.MP.zero
def isInBound(i: I): B = {
val iMP = i.asInstanceOf[ZLike[_]].toMP
0 <= iMP && iMP < length
}
def :+(e: V): MS[I, V] =
if (isEmpty) MS[I, V](e)(companion)
else {
val newLength = length.increase
MS.checkSize(newLength)
val a = boxer.cloneMut(data, length, newLength, Z.MP.zero)
boxer.store(a, length, helper.assign(e))
MS[I, V](companion, a, newLength, boxer)
}
def +:(e: V): MS[I, V] =
if (isEmpty) MS[I, V](e)(companion)
else {
val newLength = length.increase
MS.checkSize(newLength)
val a = boxer.cloneMut(data, length, newLength, Z.MP.one)
boxer.store(a, Z.MP.zero, helper.assign(e))
MS[I, V](companion, a, newLength, boxer)
}
def ++[I2](other: MS[I2, V]): MS[I, V] = {
val bxr = if (isEmpty) other.boxer else boxer
if (other.length == Z.MP.zero) return this
val newLength = length + other.length
MS.checkSize(newLength)
val a = bxr.cloneMut(data, length, newLength, Z.MP.zero)
var i = length
var j = Z.MP.zero
while (i < newLength) {
bxr.store(a, i, helper.assign(other.boxer.lookup[V](other.data, j)))
i = i.increase
j = j.increase
}
MS[I, V](companion, a, newLength, bxr)
}
def --[I2](other: MS[I2, V]): MS[I, V] =
if (isEmpty || other.length == Z.MP.zero) this
else {
val otherElements = other.elements
var sm = elements.withFilter(_ != otherElements.head)
for (e <- other.elements.tail) {
sm = sm.withFilter(_ != e)
}
val s = sm.map(identity)
val newLength = Z.MP(s.size)
val a = boxer.create(newLength)
var i = Z.MP.zero
for (e <- s) {
boxer.store(a, i, helper.assign(e))
i = i.increase
}
MS[I, V](companion, a, newLength, boxer)
}
def -(e: V): MS[I, V] = if (isEmpty) this else filter(_ != e)
def indices: ZRange[I] = {
var j = companion.Index.asInstanceOf[ZLike[_]]
var i = Z.MP.zero
while (i < length) {
i = i.increase
j = j.increase.asInstanceOf[ZLike[_]]
}
ZRange[I](
T,
companion.Index,
j.decrease.asInstanceOf[I],
1,
new ZRange.CondIncDec[I] {
@pure def cond(i: I): B = T
@pure override def increase(i: I): I = i.asInstanceOf[ZLike[_]].increase.asInstanceOf[I]
@pure override def decrease(i: I): I = i.asInstanceOf[ZLike[_]].decrease.asInstanceOf[I]
}
)
}
def map[V2](f: V => V2): MS[I, V2] =
if (isEmpty) MS[I, V2]()(companion)
else {
var a: AnyRef = null
var boxer2: Boxer = null
var i = Z.MP.zero
while (i < length) {
val v2 = f(boxer.lookup[V](data, i))
if (boxer2 == null) {
boxer2 = Boxer.boxer(v2)
a = boxer2.create(length)
}
boxer2.store(a, i, helper.assign(v2))
i = i.increase
}
MS[I, V2](companion, a, length, if (boxer2 == null) $internal.IdentityBoxer else boxer2)
}
def foreach[U](f: V => U): Unit = {
var i = Z.MP.zero
while (i < length) {
f(boxer.lookup[V](data, i))
i = i.increase
}
}
def flatMap[V2](f: V => MS[I, V2]): MS[I, V2] =
if (isEmpty) MS[I, V2]()(companion)
else {
val es = elements
var r = f(es.head)
for (e <- es.tail) {
r = r ++ f(e)
}
r
}
def filter(p: V => B @pure): MS[I, V] =
if (isEmpty) this
else {
val a: AnyRef = boxer.create(length)
var i = Z.MP.zero
var j = Z.MP.zero
while (i < length) {
val v = boxer.lookup[V](data, i)
if (p(v)) {
boxer.store(a, j, helper.assign(v))
j = j.increase
}
i = i.increase
}
MS[I, V](companion, a, j, boxer)
}
def withFilter(p: V => B): MS.WithFilter[I, V] = new MS.WithFilter(this, p)
def iterator(): _root_.java.util.Iterator[V] = new _root_.java.util.Iterator[V] {
var i: scala.Int = 0
val es: Seq[V] = elements
override def next(): V = {
assert(hasNext)
val r = helper.cloneAssign(es(i))
i = i + 1
r
}
override def hasNext: scala.Boolean = i <= es.length
}
def size: Z = length
def firstIndex: I = if (companion.isZeroIndex) companion(0) else companion.Min
def lastIndex: I = {
assert(nonEmpty, "lastIndex can only be used on non-empty MS")
if (companion.isZeroIndex) companion(length) else {
var r = companion.Min
for (_ <- 0 until length) {
r = r.asInstanceOf[ZLike[_]].increase.asInstanceOf[I]
}
return r
}
}
def toIS: IS[I, V] = {
new IS[I, V](companion, boxer.clone(data, length, length, Z.MP.zero), length, boxer)
}
def apply(index: I): V = {
val i = index.asInstanceOf[ZLike[_]].toIndex
assert(Z.MP.zero <= i && i <= length, s"Array indexing out of bounds: $index")
boxer.lookup[V](data, i)
}
def apply(args: (I, V)*): MS[I, V] = {
val a = boxer.clone(data, length, length, Z.MP.zero)
for ((index, v) <- args) {
val i = index.asInstanceOf[ZLike[_]].toIndex
assert(Z.MP.zero <= i && i <= length, s"Array indexing out of bounds: $index")
boxer.store(a, i, v)
}
MS[I, V](companion, a, length, boxer)
}
def update(index: I, value: V): Unit = {
val i = index.asInstanceOf[ZLike[_]].toIndex
assert(Z.MP.zero <= i && i <= length, s"Array indexing out of bounds: $index")
boxer.store(data, i, helper.assign(value))
}
def elements: scala.Seq[V] = {
val r = new Array[Any](length.toInt)
var i = Z.MP.zero
while (i < length) {
r(i.toInt) = boxer.lookup[V](data, i)
i = i.increase
}
r.toSeq.asInstanceOf[scala.Seq[V]]
}
override def hashCode: scala.Int = {
(companion, elements).hashCode
}
override def equals(other: scala.Any): scala.Boolean =
if (this eq other.asInstanceOf[scala.AnyRef]) true
else
other match {
case other: MS[_, _] =>
if (companion ne other.companion) return false
if (length != other.length) return false
if (data eq other.data) return true
val b1 = boxer
val b2 = other.boxer
val data1 = data
val data2 = other.data
for (i <- Z.MP.zero until length) {
val iMP = i.toMP
if (b1.lookup[V](data1, iMP) != b2.lookup[V](data2, iMP)) return false
}
true
case _ => false
}
def string: String = toString
override def toString: Predef.String = boxer.toString(data, length)
}
| sireum/v3-runtime | library/shared/src/main/scala/org/sireum/MS.scala | Scala | bsd-2-clause | 11,820 |
package jadeutils.xmpp.utils
import org.scalatest.junit.JUnitRunner
import org.scalatest.FunSuite
import org.junit.runner.RunWith
import java.io.Reader
import scala.actors._
import scala.actors.Actor._
import scala.xml.Elem
import jadeutils.common.Logging
class MockReader(lines: List[String]) extends Reader {
val logger = MockReader.logger
private[this] var lineIdx = 0
def close() {}
def read(buffer: Array[Char], idx: Int, len: Int): Int = {
if (lineIdx < lines.length) {
logger.debug("read line: {}", lines(lineIdx))
val la = lines(lineIdx).toCharArray
for (i <- 0 until la.length) {
buffer(i) = la(i)
}
lineIdx = lineIdx + 1
la.length
} else {
Thread.sleep(10 * 60 * 1000)
0
}
}
}
object MockReader extends Logging
class MockMessageProcesser extends MessageProcesser {
val msgHandlers = Nil
}
object MockMessageProcesser extends Logging
@RunWith(classOf[JUnitRunner])
class IOTest extends FunSuite {
def load(data: List[String]) {
var pkgReader = new PacketReader(new ReaderStatusHelper(new MockReader(data), new MockMessageProcesser()))
pkgReader.init()
pkgReader.start()
Thread.sleep(1 * 1000)
println("\\n\\n\\n")
}
test("Test-Read-XML-Head") {
load(List( """<?xml version='1.0'?>"""))
load(List( """<?x""", """ml version='1.0'?>"""))
}
test("Test-Slash") {
load(List( """<stream:stream xmlns:stream='http://etherx.jabber.org/streams' """ +
"""xmlns='jabber:client' from='jabber.org' id='fbe3166a9974bdc3' """ +
"""version='1.0'>"""))
}
test("Test-Close-XML") {
load(List(
"""<stream:features>""",
"""<starttls xmlns='urn:ietf:params:xml:ns:xmpp-tls'/>""",
"""<mechanisms xmlns='urn:ietf:params:xml:ns:xmpp-sasl'>""",
"""<mechanism>SCRAM-SHA-1</mechanism>""",
"""</mechanisms>""",
"""</stream:features>""",
"""<proceed/>"""))
}
test("Test-Read-XML-Example") {
load(List("""<?xml version='1.0'?>""",
"""<stream:stream xmlns='jabber:client' xmlns:stream='http://etherx.jabber.org/streams' from='jabber.org' id='fbe3166a9974bdc3' version='1.0'>""",
"""<stream:features>""",
"""<starttls xmlns='urn:ietf:params:xml:ns:xmpp-tls'/>""",
"""<mechanisms xmlns='urn:ietf:params:xml:ns:xmpp-sasl'>""",
"""<mechanism>SCRAM-SHA-1</mechanism><mechanism>DIGEST-MD5</mechanism><mechanism>CRAM-MD5</mechanism><mechanism>PLAIN</mechanism><mechanism>LOGIN</mechanism>""",
"""</mechanisms>""",
"""</stream:features>""",
"""<proceed xmlns='urn:ietf:params:xml:ns:xmpp-tls'/>"""))
}
}
| Jade-Shan/Jade-XMPP | src/test/scala/utils/IOTest.scala | Scala | gpl-3.0 | 2,511 |
/*
* Copyright (c) 2014-2021 Snowplow Analytics Ltd. All rights reserved.
*
* This program is licensed to you under the Apache License Version 2.0,
* and you may not use this file except in compliance with the Apache License Version 2.0.
* You may obtain a copy of the Apache License Version 2.0 at http://www.apache.org/licenses/LICENSE-2.0.
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the Apache License Version 2.0 is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the Apache License Version 2.0 for the specific language governing permissions and limitations there under.
*/
// SBT
import sbt._
import Keys._
import com.typesafe.sbt.packager.Keys._
import com.typesafe.sbt.packager.docker.DockerPlugin.autoImport.Docker
import com.typesafe.sbt.packager.docker._
// Scoverage plugin
import scoverage.ScoverageKeys._
// Scalafmt plugin
import org.scalafmt.sbt.ScalafmtPlugin.autoImport._
object BuildSettings {
// Basic settings for our app
lazy val basicSettings = Seq(
organization := "com.snowplowanalytics",
scalaVersion := "2.13.6",
resolvers ++= Dependencies.resolvers
)
/** Add example config for integration tests */
lazy val addExampleConfToTestCp = Seq(
Test / unmanagedClasspath += {
baseDirectory.value / "config"
}
)
lazy val dockerSettings = Seq(
Docker / maintainer := "Snowplow Analytics Ltd. <support@snowplowanalytics.com>",
Docker / daemonUser := "daemon",
Docker / packageName := "snowplow/snowplow-s3-loader",
dockerBaseImage := "adoptopenjdk:11-jre-hotspot-focal",
dockerUpdateLatest := true,
dockerCommands := {
val installLzo = Seq(Cmd("RUN", "mkdir -p /var/lib/apt/lists/partial && apt-get update && apt-get install -y lzop && apt-get purge -y"))
val (h, t) = dockerCommands.value.splitAt(dockerCommands.value.size-4)
h ++ installLzo ++ t
}
)
// Makes our SBT app settings available from within the app
lazy val scalifySettings = Seq(
Compile / sourceGenerators += Def.task {
val file = (Compile / sourceManaged).value / "settings.scala"
IO.write(file, """package com.snowplowanalytics.s3.loader.generated
|object Settings {
| val organization = "%s"
| val version = "%s"
| val name = "%s"
|}
|""".stripMargin.format(organization.value, version.value, name.value))
Seq(file)
}.taskValue
)
// sbt-assembly settings for building a fat jar
import sbtassembly.AssemblyPlugin.autoImport._
lazy val sbtAssemblySettings = Seq(
assembly / assemblyJarName := { s"${name.value}-${version.value}.jar" },
assembly / assemblyMergeStrategy := {
case PathList("javax", "servlet", xs @ _*) => MergeStrategy.first
case PathList("org", "objectweb", "asm", xs @ _*) => MergeStrategy.first
case PathList("org", "objectweb", "asm", xs @ _*) => MergeStrategy.first
case PathList("org", "apache", "log4j", _*) => MergeStrategy.last // handled by log4j-over-slf4j
case PathList(ps @ _*) if ps.last endsWith ".html" => MergeStrategy.first
case "application.conf" => MergeStrategy.concat
case "module-info.class" => MergeStrategy.discard
case x =>
val oldStrategy = (assembly / assemblyMergeStrategy).value
oldStrategy(x)
}
)
lazy val scoverageSettings = Seq(
coverageMinimum := 50,
coverageFailOnMinimum := true,
coverageHighlighting := false,
(Test / test) := {
(coverageReport dependsOn (Test / test)).value
}
)
lazy val formattingSettings = Seq(
scalafmtConfig := file(".scalafmt.conf"),
scalafmtOnCompile := false
)
}
| snowplow/kinesis-s3 | project/BuildSettings.scala | Scala | apache-2.0 | 3,865 |
package org.jetbrains.plugins.scala
package lang
package completion3
import com.intellij.codeInsight.completion.CompletionType
import org.jetbrains.plugins.scala.util.runners.{RunWithScalaVersions, TestScalaVersion}
/**
* @author Alefas
* @since 04.09.13
*/
class ScalaSuperParametersTest extends SameSignatureCallParametersProviderTestBase {
import ScalaCodeInsightTestBase._
import icons.Icons.{PARAMETER, PATTERN_VAL}
def testConstructorCall(): Unit = doCompletionTest(
fileText =
s"""class A(x: Int, y: Int) {
| def this(x: Int, y: Int, z: Int) = this(x, y)
|}
|
|class B(x: Int, y: Int, z: Int) extends A($CARET)
""".stripMargin,
resultText =
s"""class A(x: Int, y: Int) {
| def this(x: Int, y: Int, z: Int) = this(x, y)
|}
|
|class B(x: Int, y: Int, z: Int) extends A(x, y, z)$CARET
""".stripMargin,
item = "x, y, z"
)
def testConstructorCall2(): Unit = doCompletionTest(
fileText =
s"""class A(x: Int, y: Int) {
| def this(x: Int, y: Int, z: Int) = this(x, y)
|}
|
|class B(x: Int, y: Int, z: Int) extends A($CARET)
""".stripMargin,
resultText =
s"""class A(x: Int, y: Int) {
| def this(x: Int, y: Int, z: Int) = this(x, y)
|}
|
|class B(x: Int, y: Int, z: Int) extends A(x, y)$CARET
""".stripMargin,
item = "x, y"
)
def testConstructorCall2Smart(): Unit = doCompletionTest(
fileText =
s"""class A(x: Int, y: Int) {
| def this(x: Int, y: Int, z: Int) = this(x, y)
|}
|
|class B(x: Int, y: Int, z: Int) extends A($CARET)
""".stripMargin,
resultText =
s"""class A(x: Int, y: Int) {
| def this(x: Int, y: Int, z: Int) = this(x, y)
|}
|
|class B(x: Int, y: Int, z: Int) extends A(x, y)$CARET
""".stripMargin,
item = "x, y",
completionType = CompletionType.SMART
)
def testConstructorCallLookupElement(): Unit = checkLookupElement(
fileText =
s"""class A(x: Int, y: Int)
|
|class B(x: Int, y: Int) extends A($CARET)
""".stripMargin,
resultText =
s"""class A(x: Int, y: Int)
|
|class B(x: Int, y: Int) extends A(x, y)$CARET
""".stripMargin,
item = "x, y",
isSuper = true,
icons = PARAMETER, PARAMETER
)
def testAfterParenthesisOnlyInConstructor(): Unit = checkNoCompletion(
s"""class A(x: Int, y: Int)
|
|class B(x: Int, y: Int) extends A(x, $CARET)
|""".stripMargin
)
def testBeforeParenthesisOnlyInConstructor(): Unit = checkNoCompletion(
s"""class A(x: Int, y: Int)
|
|class B(x: Int, y: Int) extends A($CARET, y)
|""".stripMargin
)
def testPositionInConstructor(): Unit = doCompletionTest(
fileText =
s"""class A(x: Int, y: Int, z: Int)
|
|class B(y: Int, z: Int) extends A(, $CARET)
|""".stripMargin,
resultText =
s"""class A(x: Int, y: Int, z: Int)
|
|class B(y: Int, z: Int) extends A(, y, z)$CARET
|""".stripMargin,
item = "y, z"
)
def testEmptyConstructor(): Unit = checkNoCompletion(
s"""class A()
|
|class B(x: Int, y: Int) extends A($CARET)
|""".stripMargin
)
def testTooShortConstructor(): Unit = checkNoCompletion(
s"""class A(x: Int)
|
|class B(x: Int, y: Int) extends A($CARET)
|""".stripMargin
)
// should have (x = ???, y = ???) but not (x, y)
def testNoNameMatchingConstructor(): Unit = checkNoCompletionWithoutTailText(
fileText =
s"""class A(x: Int, y: Int)
|
|class B(x: Int, z: Int) extends A($CARET)
|""".stripMargin,
lookupString = "x, y"
)
// should have (x = ???, y = ???) but not (x, y)
def testNoTypeMatchingConstructor(): Unit = checkNoCompletionWithoutTailText(
fileText =
s"""class A(x: Int, y: Int)
|
|class B(x: Int, y: String) extends A($CARET)
|""".stripMargin,
lookupString = "x, y"
)
def testConstructorAssignment(): Unit = doRawCompletionTest(
fileText =
s"""class A(x: Int, y: Int)
|
|class B extends A($CARET)
|""".stripMargin,
resultText =
s"""class A(x: Int, y: Int)
|
|class B extends A(x = ???, y = ???)$CARET
|""".stripMargin,
) {
hasItemText(_, "x, y")(tailText = " = ")
}
def testPositionInConstructorAssignment(): Unit = doCompletionTest(
fileText =
s"""class A(x: Int, y: Int, z: Int)
|
|class B extends A(, $CARET)
|""".stripMargin,
resultText =
s"""class A(x: Int, y: Int, z: Int)
|
|class B extends A(, y = ???, z = ???)$CARET
|""".stripMargin,
item = "y, z"
)
def testConstructorAssignmentLookupElement(): Unit = checkLookupElement(
fileText =
s"""class A(x: Int, y: Int)
|
|class B(x: Int) extends A(x$CARET)
|""".stripMargin,
resultText =
s"""class A(x: Int, y: Int)
|
|class B(x: Int) extends A(x = ???, y = ???)$CARET
|""".stripMargin,
item = "x, y",
isSuper = false,
icons = PARAMETER, PARAMETER
)
//todo fix for Scala 3
@RunWithScalaVersions(Array(
TestScalaVersion.Scala_2_12,
))
def testConstructorCallAfterNew(): Unit = doCompletionTest(
fileText =
s"""class A(x: Int, y: Int)
|
|val x: Int = ???
|val y: Int = ???
|
|new A($CARET)
""".stripMargin,
resultText =
s"""class A(x: Int, y: Int)
|
|val x: Int = ???
|val y: Int = ???
|
|new A(x, y)$CARET
""".stripMargin,
item = "x, y"
)
@RunWithScalaVersions(Array(
TestScalaVersion.Scala_2_12,
))
def testConstructorCallAfterNew2(): Unit = doCompletionTest(
fileText =
s"""class A(x: Int, y: Int) {
| def this(x: Int, y: Int, z: Int) = this(x, y)
|}
|
|val x: Int = ???
|val y: Int = ???
|val z: Int = ???
|
|new A($CARET)
""".stripMargin,
resultText =
s"""class A(x: Int, y: Int) {
| def this(x: Int, y: Int, z: Int) = this(x, y)
|}
|
|val x: Int = ???
|val y: Int = ???
|val z: Int = ???
|
|new A(x, y, z)$CARET
""".stripMargin,
item = "x, y, z"
)
@RunWithScalaVersions(Array(
TestScalaVersion.Scala_2_12,
))
def testConstructorCallAfterNew3(): Unit = doCompletionTest(
fileText =
s"""class A(x: Int, y: Int) {
| def this(x: Int, y: Int, z: Int) = this(x, y)
|}
|
|val x: Int = ???
|val y: Int = ???
|val z: Int = ???
|
|new A($CARET)
""".stripMargin,
resultText =
s"""class A(x: Int, y: Int) {
| def this(x: Int, y: Int, z: Int) = this(x, y)
|}
|
|val x: Int = ???
|val y: Int = ???
|val z: Int = ???
|
|new A(x, y)$CARET
""".stripMargin,
item = "x, y"
)
@RunWithScalaVersions(Array(
TestScalaVersion.Scala_2_12,
))
def testConstructorCallAfterNew3Smart(): Unit = doCompletionTest(
fileText =
s"""class A(x: Int, y: Int) {
| def this(x: Int, y: Int, z: Int) = this(x, y)
|}
|
|val x: Int = ???
|val y: Int = ???
|val z: Int = ???
|
|new A($CARET)
""".stripMargin,
resultText =
s"""class A(x: Int, y: Int) {
| def this(x: Int, y: Int, z: Int) = this(x, y)
|}
|
|val x: Int = ???
|val y: Int = ???
|val z: Int = ???
|
|new A(x, y)$CARET
""".stripMargin,
item = "x, y",
completionType = CompletionType.SMART
)
@RunWithScalaVersions(Array(
TestScalaVersion.Scala_2_12,
))
def testConstructorCallAfterNewLookupElement(): Unit = checkLookupElement(
fileText =
s"""class A(x: Int, y: Int)
|
|val x: Int = ???
|val y: Int = ???
|
|new A($CARET)
""".stripMargin,
resultText =
s"""class A(x: Int, y: Int)
|
|val x: Int = ???
|val y: Int = ???
|
|new A(x, y)$CARET
""".stripMargin,
item = "x, y",
isSuper = true,
icons = PATTERN_VAL, PATTERN_VAL
)
@RunWithScalaVersions(Array(
TestScalaVersion.Scala_2_12,
))
def testAfterParenthesisOnlyInConstructorAfterNew(): Unit = checkNoCompletion(
fileText =
s"""class A(x: Int, y: Int)
|
|val x: Int = ???
|val y: Int = ???
|
|new A(x, $CARET)
""".stripMargin
)
def testBeforeParenthesisOnlyInConstructorAfterNew(): Unit = checkNoCompletion(
fileText =
s"""class A(x: Int, y: Int)
|
|val x: Int = ???
|val y: Int = ???
|
|new A($CARET, y)
""".stripMargin
)
@RunWithScalaVersions(Array(
TestScalaVersion.Scala_2_12,
))
def testPositionInConstructorAfterNew(): Unit = doCompletionTest(
fileText =
s"""class A(x: Int, y: Int, z: Int)
|
|val y: Int = ???
|val z: Int = ???
|
|new A(, $CARET)
""".stripMargin,
resultText =
s"""class A(x: Int, y: Int, z: Int)
|
|val y: Int = ???
|val z: Int = ???
|
|new A(, y, z)$CARET
""".stripMargin,
item = "y, z"
)
def testEmptyConstructorAfterNew(): Unit = checkNoCompletion(
s"""class A()
|
|val x: Int = ???
|val y: Int = ???
|
|new A($CARET)
|""".stripMargin
)
def testTooShortConstructorAfterNew(): Unit = checkNoCompletion(
s"""class A(x: Int)
|
|val x: Int = ???
|val y: Int = ???
|
|new A($CARET)
|""".stripMargin
)
// should have (x = ???, y = ???) but not (x, y)
def testNoNameMatchingConstructorAfterNew(): Unit = checkNoCompletionWithoutTailText(
fileText =
s"""class A(x: Int, y: Int)
|
|val x: Int = ???
|val z: Int = ???
|
|new A($CARET)
|""".stripMargin,
lookupString = "x, y"
)
// should have (x = ???, y = ???) but not (x, y)
def testNoTypeMatchingConstructorAfterNew(): Unit = checkNoCompletionWithoutTailText(
fileText =
s"""class A(x: Int, y: Int)
|
|val x: Int = ???
|val y: String = ???
|
|new A($CARET)
|""".stripMargin,
lookupString = "x, y"
)
def testConstructorAssignmentAfterNew(): Unit = doRawCompletionTest(
fileText =
s"""class A()(x: Int, y: Int)
|
|new A()(x$CARET)
|""".stripMargin,
resultText =
s"""class A()(x: Int, y: Int)
|
|new A()(x = ???, y = ???)$CARET
|""".stripMargin,
) {
hasItemText(_, "x, y")(tailText = " = ")
}
def testPositionInConstructorAssignmentAfterNew(): Unit = doCompletionTest(
fileText =
s"""class A(x: Int, y: Int, z: Int)
|
|new A(, $CARET)
|""".stripMargin,
resultText =
s"""class A(x: Int, y: Int, z: Int)
|
|new A(, y = ???, z = ???)$CARET
|""".stripMargin,
item = "y, z"
)
def testConstructorAssignmentLookupElementAfterNew(): Unit = checkLookupElement(
fileText =
s"""class A(x: Int, y: Int)
|
|new A(x$CARET)
|""".stripMargin,
resultText =
s"""class A(x: Int, y: Int)
|
|new A(x = ???, y = ???)$CARET
|""".stripMargin,
item = "x, y",
isSuper = false,
icons = PARAMETER, PARAMETER
)
def testSuperCall(): Unit = doCompletionTest(
fileText =
s"""class A {
| def foo(x: Int, y: Int, z: Int) = 1
| def foo(x: Int, y: Int) = 2
|}
|
|class B extends A {
| override def foo(x: Int, y: Int, z: Int) = {
| super.foo($CARET)
| }
|}
""".stripMargin,
resultText =
s"""class A {
| def foo(x: Int, y: Int, z: Int) = 1
| def foo(x: Int, y: Int) = 2
|}
|
|class B extends A {
| override def foo(x: Int, y: Int, z: Int) = {
| super.foo(x, y)$CARET
| }
|}
""".stripMargin,
item = "x, y"
)
def testSuperCall2(): Unit = doCompletionTest(
fileText =
s"""class A {
| def foo(x: Int, y: Int, z: Int) = 1
| def foo(x: Int, y: Int) = 2
|}
|
|class B extends A {
| override def foo(x: Int, y: Int, z: Int) = {
| super.foo($CARET)
| }
|}
""".stripMargin,
resultText =
s"""class A {
| def foo(x: Int, y: Int, z: Int) = 1
| def foo(x: Int, y: Int) = 2
|}
|
|class B extends A {
| override def foo(x: Int, y: Int, z: Int) = {
| super.foo(x, y, z)$CARET
| }
|}
""".stripMargin,
item = "x, y, z"
)
def testSuperCallLookupElement(): Unit = checkLookupElement(
fileText =
s"""class A {
| def foo(x: Int, y: Int) = 42
|}
|
|class B extends A {
| override def foo(x: Int, y: Int) =
| super.foo($CARET)
|}
|""".stripMargin,
resultText =
s"""class A {
| def foo(x: Int, y: Int) = 42
|}
|
|class B extends A {
| override def foo(x: Int, y: Int) =
| super.foo(x, y)$CARET
|}
|""".stripMargin,
item = "x, y",
isSuper = true,
icons = PARAMETER, PARAMETER
)
def testAfterParenthesisOnlyInSuperMethod(): Unit = checkNoCompletion(
s"""class A {
| def foo(x: Int, y: Int) = 42
|}
|
|class B extends A {
| override def foo(x: Int, y: Int) =
| super.foo(x, $CARET)
|}
|""".stripMargin
)
def testBeforeParenthesisOnlyInSuperMethod(): Unit = checkNoCompletion(
s"""class A {
| def foo(x: Int, y: Int) = 42
|}
|
|class B extends A {
| override def foo(x: Int, y: Int) =
| super.foo($CARET, y)
|}
|""".stripMargin
)
def testPositionInSuperMethod(): Unit = doCompletionTest(
fileText =
s"""class A {
| def foo(x: Int, y: Int, z: Int) = 42
|}
|
|class B extends A {
| override def foo(x: Int, y: Int, z: Int) =
| super.foo(, $CARET)
|}
|""".stripMargin,
resultText =
s"""class A {
| def foo(x: Int, y: Int, z: Int) = 42
|}
|
|class B extends A {
| override def foo(x: Int, y: Int, z: Int) =
| super.foo(, y, z)$CARET
|}
|""".stripMargin,
item = "y, z"
)
def testEmptySuperMethod(): Unit = checkNoCompletion(
s"""class A {
| def foo() = 42
|}
|
|class B extends A {
| override def foo() =
| super.foo($CARET)
|}
|""".stripMargin
)
def testTooShortSuperMethod(): Unit = checkNoCompletion(
s"""class A {
| def foo(x: Int) = 42
|}
|
|class B extends A {
| override def foo(x: Int) =
| super.foo($CARET)
|}
|""".stripMargin
)
def testNoNameMatchingSuperMethod(): Unit = checkNoBasicCompletion(
fileText =
s"""class A {
| def foo(x: Int, y: Int) = 42
|}
|
|class B extends A {
| override def foo(x: Int, z: Int) =
| super.foo($CARET)
|}
|""".stripMargin,
item = "x, y"
)
def testMethodCall(): Unit = doCompletionTest(
fileText =
s"""class A {
| def foo(x: Int, y: Int) = 1
|}
|
|class B extends A {
| def bar(x: Int, y: Int) = {
| foo($CARET)
| }
|}
""".stripMargin,
resultText =
s"""class A {
| def foo(x: Int, y: Int) = 1
|}
|
|class B extends A {
| def bar(x: Int, y: Int) = {
| foo(x, y)$CARET
| }
|}
""".stripMargin,
item = "x, y"
)
def testQualifiedMethodCall(): Unit = doCompletionTest(
fileText =
s"""class A {
| def foo(x: Int, y: Int) = 1
|}
|
|class B {
| private val a = new A
|
| def bar(x: Int, y: Int) = {
| a.foo($CARET)
| }
|}
""".stripMargin,
resultText =
s"""class A {
| def foo(x: Int, y: Int) = 1
|}
|
|class B {
| private val a = new A
|
| def bar(x: Int, y: Int) = {
| a.foo(x, y)$CARET
| }
|}
""".stripMargin,
item = "x, y"
)
def testQualifiedMethodCallCompletionChar(): Unit = doCompletionTest(
fileText =
s"""class A {
| def foo(x: Int, y: Int) = 1
|}
|
|class B {
| private val a = new A
|
| def bar(x: Int, y: Int) = {
| a.foo($CARET)
| }
|}
""".stripMargin,
resultText =
s"""class A {
| def foo(x: Int, y: Int) = 1
|}
|
|class B {
| private val a = new A
|
| def bar(x: Int, y: Int) = {
| a.foo(x, y)$CARET
| }
|}
""".stripMargin,
item = "x, y",
char = ')'
)
def testMethodCallLookupElement(): Unit = checkLookupElement(
fileText =
s"""class A {
| def foo(x: Int, y: Int) = 42
|}
|
|class B extends A {
| def bar(x: Int, y: Int) =
| foo($CARET)
|}
|""".stripMargin,
resultText =
s"""class A {
| def foo(x: Int, y: Int) = 42
|}
|
|class B extends A {
| def bar(x: Int, y: Int) =
| foo(x, y)$CARET
|}
|""".stripMargin,
item = "x, y",
isSuper = false,
icons = PARAMETER, PARAMETER
)
def testAfterParenthesisOnlyInMethodCall(): Unit = checkNoCompletion(
s"""class A {
| def foo(x: Int, y: Int) = 42
|}
|
|class B extends A {
| def bar(x: Int, y: Int) =
| foo(x, $CARET)
|}
|""".stripMargin
)
def testBeforeParenthesisOnlyInMethodCall(): Unit = checkNoCompletion(
s"""class A {
| def foo(x: Int, y: Int) = 42
|}
|
|class B extends A {
| def bar(x: Int, y: Int) =
| foo($CARET, y)
|}
|""".stripMargin
)
def testPositionInMethodCall(): Unit = doCompletionTest(
fileText =
s"""class A {
| def foo(x: Int, y: Int, z: Int) = 42
|}
|
|class B extends A {
| def bar(y: Int, z: Int) =
| foo(, $CARET)
|}
|""".stripMargin,
resultText =
s"""class A {
| def foo(x: Int, y: Int, z: Int) = 42
|}
|
|class B extends A {
| def bar(y: Int, z: Int) =
| foo(, y, z)$CARET
|}
|""".stripMargin,
item = "y, z"
)
def testEmptyMethod(): Unit = checkNoCompletion(
s"""class A {
| def foo() = 42
|}
|
|class B extends A {
| def bar() =
| foo($CARET)
|}
|""".stripMargin
)
def testTooShortMethod(): Unit = checkNoCompletion(
s"""class A {
| def foo(x: Int) = 42
|}
|
|class B extends A {
| def bar(x: Int) =
| foo($CARET)
|}
|""".stripMargin
)
def testNoNameMatchingMethod(): Unit = checkNoBasicCompletion(
fileText =
s"""class A {
| def foo(x: Int, y: Int) = 42
|}
|
|class B extends A {
| def bar(x: Int, z: Int) =
| foo($CARET)
|}
|""".stripMargin,
item = "x, y"
)
def testNoTypeMatchingMethod(): Unit = checkNoBasicCompletion(
fileText =
s"""class A {
| def foo(x: Int, y: Int) = 42
|}
|
|class B extends A {
| def bar(x: Int, y: String) =
| foo($CARET)
|}
|""".stripMargin,
item = "x, y"
)
def testCaseClass(): Unit = doCompletionTest(
fileText =
s"""case class A(x: Int, y: Int)
|
|class B {
| def bar(x: Int, y: Int) = {
| A($CARET)
| }
|}
""".stripMargin,
resultText =
s"""case class A(x: Int, y: Int)
|
|class B {
| def bar(x: Int, y: Int) = {
| A(x, y)$CARET
| }
|}
""".stripMargin,
item = "x, y"
)
def testCaseClassLookupElement(): Unit = checkLookupElement(
fileText =
s"""case class A(x: Int, y: Int)
|
|class B {
| def bar(x: Int, y: Int) =
| A($CARET)
|}
|""".stripMargin,
resultText =
s"""case class A(x: Int, y: Int)
|
|class B {
| def bar(x: Int, y: Int) =
| A(x, y)$CARET
|}
|""".stripMargin,
item = "x, y",
isSuper = false,
icons = PARAMETER, PARAMETER
)
def testBeforeParenthesisOnlyInCaseClass(): Unit = checkNoCompletion(
s"""case class A(x: Int, y: Int)
|
|class B {
| def bar(x: Int, y: Int) =
| A(x, $CARET)
|}
|""".stripMargin
)
def testAfterParenthesisOnlyInCaseClass(): Unit = checkNoCompletion(
s"""case class A(x: Int, y: Int)
|
|class B {
| def bar(x: Int, y: Int) =
| A($CARET, y)
|}
|""".stripMargin
)
def testPositionInCaseClass(): Unit = doCompletionTest(
fileText =
s"""case class A(x: Int, y: Int, z: Int)
|
|class B {
| def bar(y: Int, z: Int) =
| A(, $CARET)
|}
|""".stripMargin,
resultText =
s"""case class A(x: Int, y: Int, z: Int)
|
|class B {
| def bar(y: Int, z: Int) =
| A(, y, z)$CARET
|}
|""".stripMargin,
item = "y, z"
)
def testEmptyCaseClassArgumentsList(): Unit = checkNoCompletion(
s"""case class A()
|
|class B {
| def bar() =
| A($CARET)
|}
|""".stripMargin
)
def testTooShortCaseClassArgumentsList(): Unit = checkNoCompletion(
s"""case class A(x: Int)
|
|class B {
| def bar(x: Int) =
| A($CARET)
|}
|""".stripMargin
)
// should have (x = ???, y = ???) but not (x, y)
def testNoNameMatchingCaseClass(): Unit = checkNoCompletionWithoutTailText(
fileText =
s"""case class A(x: Int, y: Int)
|
|class B {
| def bar(x: Int, z: Int) =
| A($CARET)
|}
|""".stripMargin,
lookupString = "x, y"
)
// should have (x = ???, y = ???) but not (x, y)
def testNoTypeMatchingCaseClass(): Unit = checkNoCompletionWithoutTailText(
fileText =
s"""case class A(x: Int, y: Int)
|
|class B {
| def bar(x: Int, y: String) =
| A($CARET)
|}
|""".stripMargin,
lookupString = "x, y"
)
def testCaseClassAssignment(): Unit = doRawCompletionTest(
fileText =
s"""final case class Foo()(foo: Int, bar: Int)
|
|Foo()(f$CARET)
|""".stripMargin,
resultText =
s"""final case class Foo()(foo: Int, bar: Int)
|
|Foo()(foo = ???, bar = ???)$CARET
|""".stripMargin,
) {
hasItemText(_, "foo, bar")(tailText = " = ")
}
def testPhysicalApplyMethodAssignment(): Unit = doCompletionTest(
fileText =
s"""final class Foo private(val foo: Int,
| val bar: Int,
| val baz: Int)
|
|object Foo {
|
| def apply(foo: Int,
| bar: Int,
| baz: Int) =
| new Foo(foo, bar, baz)
|
| def apply(foo: Int,
| bar: Int) =
| new Foo(foo, bar, 42)
|}
|
|Foo(f$CARET)
|""".stripMargin,
resultText =
s"""final class Foo private(val foo: Int,
| val bar: Int,
| val baz: Int)
|
|object Foo {
|
| def apply(foo: Int,
| bar: Int,
| baz: Int) =
| new Foo(foo, bar, baz)
|
| def apply(foo: Int,
| bar: Int) =
| new Foo(foo, bar, 42)
|}
|
|Foo(foo = ???, bar = ???, baz = ???)$CARET
|""".stripMargin,
item = "foo, bar, baz"
)
def testPhysicalApplyMethodAssignment2(): Unit = doCompletionTest(
fileText =
s"""final class Foo private(val foo: Int,
| val bar: Int,
| val baz: Int)
|
|object Foo {
|
| def apply(foo: Int,
| bar: Int,
| baz: Int) =
| new Foo(foo, bar, baz)
|
| def apply(foo: Int,
| bar: Int) =
| new Foo(foo, bar, 42)
|}
|
|Foo(f$CARET)
|""".stripMargin,
resultText =
s"""final class Foo private(val foo: Int,
| val bar: Int,
| val baz: Int)
|
|object Foo {
|
| def apply(foo: Int,
| bar: Int,
| baz: Int) =
| new Foo(foo, bar, baz)
|
| def apply(foo: Int,
| bar: Int) =
| new Foo(foo, bar, 42)
|}
|
|Foo(foo = ???, bar = ???)$CARET
|""".stripMargin,
item = "foo, bar"
)
def testApplyCallAssignmentLookupElement(): Unit = checkLookupElement(
fileText =
s"""final case class Foo(foo: Int, bar: Int)
|
|Foo(f$CARET)
|""".stripMargin,
resultText =
s"""final case class Foo(foo: Int, bar: Int)
|
|Foo(foo = ???, bar = ???)$CARET
|""".stripMargin,
item = "foo, bar",
isSuper = false,
icons = PARAMETER, PARAMETER
)
def testCaseClassCompletionChar(): Unit = doCompletionTest(
fileText =
s"""final case class Foo(foo: Int, bar: Int)
|
|Foo(f$CARET)
|""".stripMargin,
resultText =
s"""final case class Foo(foo: Int, bar: Int)
|
|Foo(foo, bar)$CARET
|""".stripMargin,
item = "foo, bar",
char = ')'
)
def testAfterParenthesisOnlyInApplyCall(): Unit = checkNoCompletion(
s"""final case class Foo(foo: Int, bar: Int)
|
|Foo(foo, $CARET)
|""".stripMargin
)
def testBeforeParenthesisOnlyInApplyCall(): Unit = checkNoCompletion(
s"""final case class Foo(foo: Int, bar: Int)
|
|Foo($CARET, bar)
|""".stripMargin
)
def testPositionInApplyCallAssignment(): Unit = doCompletionTest(
fileText =
s"""final case class Foo(foo: Int, bar: Int, baz: Int)
|
|Foo(, $CARET)
|""".stripMargin,
resultText =
s"""final case class Foo(foo: Int, bar: Int, baz: Int)
|
|Foo(, bar = ???, baz = ???)$CARET
|""".stripMargin,
item = "bar, baz"
)
def testEmptyCaseClass(): Unit = checkNoCompletion(
s"""final case class Foo()
|
|Foo(f$CARET)
|""".stripMargin
)
def testTooShortCaseClass(): Unit = checkNoCompletion(
s"""final case class Foo(foo: Int)
|
|Foo(f$CARET)
|""".stripMargin
)
def testNonApplyMethod(): Unit = checkNoCompletion(
s"""object Foo {
| def baz(foo: Int, bar: Int): Unit = {}
|}
|
|Foo.baz(f$CARET)
|""".stripMargin
)
def testClauseLookupElement(): Unit = checkLookupElement(
fileText =
s"""def foo(bar: Int, baz: String): Int = 42
|
|def foo(bar: Int): Int = {
| val baz = ""
| foo(b$CARET)
|}
|""".stripMargin,
resultText =
s"""def foo(bar: Int, baz: String): Int = 42
|
|def foo(bar: Int): Int = {
| val baz = ""
| foo(bar, baz)$CARET
|}
|""".stripMargin,
item = "bar, baz",
isSuper = false,
icons = PARAMETER, PATTERN_VAL
)
def testClauseLookupElement2(): Unit = checkLookupElement(
fileText =
s"""def foo(bar: Int,
| baz: String): Unit = {}
|
|var bar = 42
|"" match {
| case baz => foo(b$CARET)
|}
|""".stripMargin,
resultText =
s"""def foo(bar: Int,
| baz: String): Unit = {}
|
|var bar = 42
|"" match {
| case baz => foo(bar, baz)$CARET
|}
|""".stripMargin,
item = "bar, baz",
isSuper = false,
icons = PATTERN_VAL, PATTERN_VAL
)
def testClauseLookupElement3(): Unit = checkNoBasicCompletion(
fileText =
s"""import java.util.{Collections, List}
|import Thread._
|
|def emptyList = Collections.emptyList[String]
|
|def foo(emptyList: List[String],
| currentThread: Thread,
| defaultUncaughtExceptionHandler: UncaughtExceptionHandler): Unit = {}
|
|foo(e$CARET)
|""".stripMargin,
item = "emptyList, currentThread, defaultUncaughtExceptionHandler",
)
def testClauseLookupElementAccessAll(): Unit = doCompletionTest(
fileText =
s"""import java.util.{Collections, List}
|import Thread._
|
|def emptyList = Collections.emptyList[String]
|
|def foo(emptyList: List[String],
| currentThread: Thread,
| defaultUncaughtExceptionHandler: UncaughtExceptionHandler): Unit = {}
|
|foo(emptyL$CARET)
|""".stripMargin,
resultText =
s"""import java.util.{Collections, List}
|import Thread._
|
|def emptyList = Collections.emptyList[String]
|
|def foo(emptyList: List[String],
| currentThread: Thread,
| defaultUncaughtExceptionHandler: UncaughtExceptionHandler): Unit = {}
|
|foo(emptyList, currentThread, defaultUncaughtExceptionHandler)$CARET
|""".stripMargin,
item = "emptyList, currentThread, defaultUncaughtExceptionHandler",
time = 2
)
def testPositionInClause(): Unit = doCompletionTest(
fileText =
s"""def foo(bar: Int,
| baz: String,
| barBaz: Boolean): Unit =
| foo(, $CARET)
|""".stripMargin,
resultText =
s"""def foo(bar: Int,
| baz: String,
| barBaz: Boolean): Unit =
| foo(, baz, barBaz)$CARET
|""".stripMargin,
item = "baz, barBaz"
)
def testEmptyClause(): Unit = checkNoCompletion(
s"""def foo() = 42
|
|foo(f$CARET)
|""".stripMargin
)
def testTooShortClause(): Unit = checkNoCompletion(
s"""def foo(bar: Int) = 42
|
|foo(b$CARET)
|""".stripMargin
)
def testNoNameMatchingClause(): Unit = checkNoCompletion(
s"""def foo(bar: Int, baz: String): Int = 42
|
|def foo(bar: Int): Int = {
| val barBaz = ""
| foo(b$CARET)
|}
|""".stripMargin
)
def testNoTypeMatchingClause(): Unit = checkNoCompletion(
s"""def foo(bar: Int, baz: String): Int = 42
|
|def foo(bar: Int): Int = {
| val baz = 42
| foo(b$CARET)
|}
|""".stripMargin
)
def testMultipleClause(): Unit = doCompletionTest(
fileText =
s"""def foo(foo: Int)
| (bar: Int, baz: String): Int = 42
|
|def foo(bar: Int): Int = {
| val baz = ""
| foo()(b$CARET)
|}
|""".stripMargin,
resultText =
s"""def foo(foo: Int)
| (bar: Int, baz: String): Int = 42
|
|def foo(bar: Int): Int = {
| val baz = ""
| foo()(bar, baz)$CARET
|}
|""".stripMargin,
item = "bar, baz"
)
def testMultipleClausePosition(): Unit = checkNoCompletion(
s"""def foo(bar: Int, baz: String)(): Int = 42
|
|def foo(bar: Int): Int = {
| val baz = ""
| foo()($CARET)
|}
|""".stripMargin
)
}
| JetBrains/intellij-scala | scala/scala-impl/test/org/jetbrains/plugins/scala/lang/completion3/ScalaSuperParametersTest.scala | Scala | apache-2.0 | 33,450 |
package extracells.integration.igw
import java.util
import cpw.mods.fml.common.Optional
import cpw.mods.fml.common.registry.GameRegistry
import extracells.Extracells
import extracells.integration.Integration
import extracells.registries.{BlockEnum, ItemEnum}
import igwmod.api.WikiRegistry
import net.minecraft.item.{Item, ItemStack}
import scala.collection.JavaConversions._
object IGW {
def initNotifier {
IGWSupportNotifier
}
@Optional.Method(modid = "IGWMod")
def init{
for(item <- ItemEnum.values()){
if(item != ItemEnum.CRAFTINGPATTERN && item != ItemEnum.FLUIDITEM) {
if(item == ItemEnum.FLUIDPATTERN){
WikiRegistry.registerBlockAndItemPageEntry(item.getSizedStack(1), item.getSizedStack(1).getUnlocalizedName.replace(".", "/"))
}else if (item == ItemEnum.STORAGECOMPONET || item == ItemEnum.STORAGECASING){
val list = new util.ArrayList[java.lang.Object]
item.getItem.getSubItems(item.getItem, Extracells.ModTab, list)
for (sub <- list) {
val stack = sub.asInstanceOf[ItemStack]
WikiRegistry.registerBlockAndItemPageEntry(stack, "extracells/item/crafting")
}
}else{
val list = new util.ArrayList[java.lang.Object]
item.getItem.getSubItems(item.getItem, Extracells.ModTab, list)
for (sub <- list) {
val stack = sub.asInstanceOf[ItemStack]
WikiRegistry.registerBlockAndItemPageEntry(stack, stack.getUnlocalizedName.replace(".", "/"))
}
}
}
}
if(Integration.Mods.OPENCOMPUTERS.isEnabled){
val stack = GameRegistry.findItemStack("extracells", "oc.upgrade", 1)
WikiRegistry.registerBlockAndItemPageEntry(stack.getItem, stack.getUnlocalizedName.replace(".", "/"))
}
for(block <- BlockEnum.values()){
val list = new util.ArrayList[java.lang.Object]
Item.getItemFromBlock(block.getBlock).getSubItems(Item.getItemFromBlock(block.getBlock), Extracells.ModTab, list)
for(sub <- list){
val stack = sub.asInstanceOf[ItemStack]
WikiRegistry.registerBlockAndItemPageEntry(stack, stack.getUnlocalizedName.replace(".", "/").replace("tile/", ""))
}
}
}
}
| ieatbedrock/Bedrocks-AE2-addons | src/main/scala/extracells/integration/igw/IGW.scala | Scala | mit | 2,226 |
/*
* Copyright 2016 The BigDL Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.bigdl.nn
import com.intel.analytics.bigdl.nn.abstractnn.AbstractModule
import com.intel.analytics.bigdl.tensor.Tensor
import com.intel.analytics.bigdl.tensor.TensorNumericMath.TensorNumeric
import com.intel.analytics.bigdl.utils.{T, Table}
import scala.reflect.ClassTag
/**
* Creates a module that takes a table as input and outputs the subtable starting at index
* offset having length elements (defaults to 1 element). The elements can be either
* a table or a Tensor. If `length` is negative, it means selecting the elements from the
* offset to element which located at the abs(`length`) to the last element of the input.
*
* @param offset the start index of table
* @param length the length want to select
*/
@SerialVersionUID(8046335768231475724L)
class NarrowTable[T: ClassTag](var offset: Int, val length: Int = 1)
(implicit ev: TensorNumeric[T]) extends AbstractModule[Table, Table, T]{
var len = length
override def updateOutput(input: Table): Table = {
output = T()
if (length < 0) {
len = input.length() - offset + 2 + length
}
var i = 1
while (i <= len) {
output.insert(i, input(offset + i -1))
i += 1
}
output
}
override def updateGradInput(input: Table, gradOutput: Table): Table = {
gradInput = T()
if (length < 0) {
len = input.length() - offset + 2 + length
}
var i = 1
while (i <= gradOutput.length()) {
gradInput.insert(offset + i - 1, gradOutput(i))
i += 1
}
i = 1
while (i <= input.length()) {
if (!gradInput.contains(i)) gradInput(i) = Tensor[T]()
if ((i < offset) || (i >= (offset + length))) {
gradInput(i) = Utils.recursiveResizeAs(gradInput(i), input(i))
Utils.recursiveFill(gradInput(i), 0)
}
i += 1
}
gradInput
}
override def toString(): String = {
s"${getPrintName}($offset, $length)"
}
override def canEqual(other: Any): Boolean = other.isInstanceOf[NarrowTable[T]]
override def equals(other: Any): Boolean = other match {
case that: NarrowTable[T] =>
super.equals(that) &&
(that canEqual this) &&
offset == that.offset &&
length == that.length
case _ => false
}
override def hashCode(): Int = {
val state = Seq(super.hashCode(), offset, length)
state.map(_.hashCode()).foldLeft(0)((a, b) => 31 * a + b)
}
}
object NarrowTable {
def apply[@specialized(Float, Double) T: ClassTag](
offset: Int,
length: Int = 1)(implicit ev: TensorNumeric[T]) : NarrowTable[T] = {
new NarrowTable[T](offset, length)
}
}
| jenniew/BigDL | spark/dl/src/main/scala/com/intel/analytics/bigdl/nn/NarrowTable.scala | Scala | apache-2.0 | 3,221 |
package IFDS
import org.scalatest._
import cell._
class SimpleTestBiDiRANoOPAL extends FlatSpec with Matchers {
{
val helper = new BiDiTestHelper()
var fwProblem = helper.happyPathFW
var bwProblem = helper.happyPathBW
var bidiSolver = new BiDiRAIFDSSolver[Node, String, Method](fwProblem, bwProblem, new HandlerPool(1))
bidiSolver.solve
"BiDi Happy path" should "use all forward flow functions" in {
fwProblem.allFlowFunctionsUsed should be (true)
}
it should "use all backward flow functions" in {
bwProblem.allFlowFunctionsUsed should be (true)
}
}
{
val helper = new BiDiTestHelper()
val fwProblem = helper.unbalancedReturnsBothDirectionsFW
val bwProblem = helper.unbalancedReturnsBothDirectionsBW
val bidiSolver = new BiDiRAIFDSSolver[Node, String, Method](fwProblem, bwProblem, new HandlerPool(1))
bidiSolver.solve
"BiDi Unbalanced returns both directions" should "use all forward flow functions" in {
fwProblem.allFlowFunctionsUsed should be (true)
}
it should "use all backward flow functions" in {
bwProblem.allFlowFunctionsUsed should be (true)
}
}
{
val helper = new BiDiTestHelper()
val fwProblem = helper.unbalancedReturnsNonMatchingCallSitesFW
val bwProblem = helper.unbalancedReturnsNonMatchingCallSitesBW
val bidiSolver = new BiDiRAIFDSSolver[Node, String, Method](fwProblem, bwProblem, new HandlerPool(1))
bidiSolver.solve
"BiDi Unbalanced returns non matching call sites" should "use all forward flow functions" in {
fwProblem.allFlowFunctionsUsed should be (true)
}
it should "use all backward flow functions" in {
bwProblem.allFlowFunctionsUsed should be (true)
}
}
{
val helper = new BiDiTestHelper()
val fwProblem = helper.returnsOnlyOneDirectionAndStopsFW
val bwProblem = helper.returnsOnlyOneDirectionAndStopsBW
val bidiSolver = new BiDiRAIFDSSolver[Node, String, Method](fwProblem, bwProblem, new HandlerPool(1))
bidiSolver.solve
"BiDi Returns only one direction and stops" should "use all forward flow functions" in {
fwProblem.allFlowFunctionsUsed should be (true)
}
it should "use all backward flow functions" in {
bwProblem.allFlowFunctionsUsed should be (true)
}
}
{
val helper = new BiDiTestHelper()
val fwProblem = helper.reuseSummaryFW
val bwProblem = helper.reuseSummaryBW
val bidiSolver = new BiDiRAIFDSSolver[Node, String, Method](fwProblem, bwProblem, new HandlerPool(1))
bidiSolver.solve
"BiDi reuse summary" should "use all forward flow functions" in {
fwProblem.allFlowFunctionsUsed should be (true)
}
it should "use all backward flow functions" in {
bwProblem.allFlowFunctionsUsed should be (true)
}
}
{
val helper = new BiDiTestHelper()
val fwProblem = helper.dontResumeIfReturnFlowIsKilledFW
val bwProblem = helper.dontResumeIfReturnFlowIsKilledBW
val bidiSolver = new BiDiRAIFDSSolver[Node, String, Method](fwProblem, bwProblem, new HandlerPool(1))
bidiSolver.solve
"BiDi dont resume if return flow is killed" should "use all forward flow functions" in {
fwProblem.allFlowFunctionsUsed should be (true)
}
it should "use all backward flow functions" in {
bwProblem.allFlowFunctionsUsed should be (true)
}
}
}
| packlnd/IFDS-RA | src/test/scala/IFDS/SimpleTestBiDiRANoOpal.scala | Scala | mit | 3,369 |
package com.advancedspark.serving.prediction.java
import scala.collection.JavaConverters.mapAsJavaMapConverter
trait Predictable {
def predict(inputs:java.util.Map[String,Any]): Any
}
| Resly/pipeline | prediction.ml/java/src/main/scala/com/advancedspark/serving/prediction/java/Predictable.scala | Scala | apache-2.0 | 188 |
package org.bitcoins.core.p2p
import org.bitcoins.testkitcore.node.P2PMessageTestUtil
import org.bitcoins.testkitcore.util.BitcoinSUnitTest
class NetworkMessageTest extends BitcoinSUnitTest {
"NetworkMessage" must "be able to serialize then deserialize a message and get the original hex back" in {
NetworkMessage(P2PMessageTestUtil.rawNetworkMessage).hex must be(
P2PMessageTestUtil.rawNetworkMessage)
}
it must "serialize and deserialize a version message example from the bitcoin wiki" in {
val hex = {
//taken from here with slight modifications
//https://en.bitcoin.it/wiki/Protocol_documentation#Message_structure
//this example uses an old protocol version WITHOUT the relay flag on the version message
//since we only support protocol version > 7, i added it manually
//this means the payload size is bumped by 1 byte in the NetworkHeader from 100 -> 101
//and a relay byte "00" is appended to the end of the payload
"F9BEB4D976657273696F6E000000000065000000358d4932" +
"62EA0000010000000000000011B2D05000000000010000000000000000000000000000000000FFFF000000000000010000000000000000000000000000000000FFFF0000000000003B2EB35D8CE617650F2F5361746F7368693A302E372E322FC03E0300" +
"00"
}.toLowerCase
val networkMsg = NetworkMessage.fromHex(hex)
networkMsg.hex must be(hex)
}
}
| bitcoin-s/bitcoin-s | core-test/src/test/scala/org/bitcoins/core/p2p/NetworkMessageTest.scala | Scala | mit | 1,375 |
// Starter Code for Exercise 4
// From "Vectors" atom
import com.atomicscala.AtomicTest._
theString is "The dog visited the firehouse!"
| P7h/ScalaPlayground | Atomic Scala/atomic-scala-solutions/18_Vectors/Starter-4.scala | Scala | apache-2.0 | 137 |
package com.aquamentis.nwsr
import android.app.Dialog
import android.app.ProgressDialog
import android.content.Context
import android.content.Intent
import android.content.SharedPreferences
import android.os.Bundle
import android.preference.PreferenceManager
import android.view.ContextMenu
import android.view.LayoutInflater
import android.view.Menu
import android.view.MenuItem
import android.view.View
import android.widget.AdapterView
import android.widget.ListView
import android.widget.SimpleCursorAdapter
import android.widget.TextView
import scala.collection.mutable.ArrayBuilder
import com.aquamentis.util.RichDatabase._
import com.aquamentis.util.Story
class Headlines extends DatabaseActivity with DialogFeedRetriever
with SharedPreferences.OnSharedPreferenceChangeListener {
override def onCreate(savedInstanceState: Bundle) {
super.onCreate(savedInstanceState)
setTitle(R.string.title_headlines)
setContentView(R.layout.headlines)
val inflater = LayoutInflater.from(this)
getListView.addFooterView(
inflater.inflate(R.layout.button_next_headline, null))
registerForContextMenu(getListView)
getListView.setLongClickable(false)
cursor = db.storyView()
adapter = new SimpleCursorAdapter(
this, R.layout.headline, cursor, Array("title", "link"),
Array(R.id.headline_title, R.id.headline_link))
setListAdapter(adapter)
PreferenceManager.setDefaultValues(this, R.xml.settings, false)
PreferenceManager.getDefaultSharedPreferences(this)
.registerOnSharedPreferenceChangeListener(this)
}
override def onResume() {
super.onResume()
updateView()
}
override def onCreateOptionsMenu(menu: Menu): Boolean = {
val inflater = getMenuInflater()
inflater.inflate(R.menu.headlines, menu)
true
}
override def onOptionsItemSelected(item: MenuItem): Boolean =
item.getItemId match {
case R.id.refresh => {
new ForegroundRetrieveTask().execute(FeedAll)
true
}
case R.id.feeds => {
startActivity(new Intent(this, classOf[NewsFeeds]))
true
}
case R.id.saved => {
startActivity(new Intent(this, classOf[SavedItems]))
true
}
case R.id.settings => {
startActivity(new Intent(this, classOf[Settings]))
true
}
case _ => super.onOptionsItemSelected(item)
}
override def onListItemClick(lv: ListView, v: View, pos: Int, id: Long) {
if (id < 0) {
val ids = {
val arr = ArrayBuilder.make[Long]
cursor.foreach {
arr += cursor.getLong(0)
}
arr.result()
}
db.hideStories(ids)
updateView()
getListView.setSelectionAfterHeaderView()
} else {
lv.showContextMenuForChild(v)
}
}
override def onCreateContextMenu(menu: ContextMenu, v: View,
menuInfo: ContextMenu.ContextMenuInfo) {
super.onCreateContextMenu(menu, v, menuInfo)
if (menuInfo.asInstanceOf[AdapterView.AdapterContextMenuInfo].id >= 0) {
val inflater = getMenuInflater()
inflater.inflate(R.menu.context_headlines, menu)
menu.setHeaderTitle(
menuInfo.asInstanceOf[AdapterView.AdapterContextMenuInfo].targetView
.findViewById(R.id.headline_title).asInstanceOf[TextView].getText)
}
}
override def onContextItemSelected(item: MenuItem): Boolean = {
val info = item.getMenuInfo.asInstanceOf[
AdapterView.AdapterContextMenuInfo]
item.getItemId match {
case R.id.open_browser => {
val url = info.targetView.findViewById(R.id.headline_link)
.asInstanceOf[TextView].getText.toString
db.hideStories(Array(info.id))
openInBrowser(url)
updateView()
true
}
case R.id.save => {
db.addSaved(info.id)
updateView()
true
}
case R.id.delete => {
db.hideStories(Array(info.id))
updateView()
false
}
case _ => super.onContextItemSelected(item)
}
}
def onSharedPreferenceChanged(sp: SharedPreferences, key: String) {
if (key == "stories_per_page") {
cursor = db.storyView()
adapter.changeCursor(cursor)
}
}
}
| alexclare/nwsr | src/scala/nwsr/headlines.scala | Scala | gpl-3.0 | 4,233 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution.datasources.v2
import org.apache.spark.sql.catalyst.expressions.AttributeReference
import org.apache.spark.sql.catalyst.plans.logical.{LeafNode, Statistics}
import org.apache.spark.sql.sources.v2.reader.{DataSourceV2Reader, SupportsReportStatistics}
case class DataSourceV2Relation(
output: Seq[AttributeReference],
reader: DataSourceV2Reader) extends LeafNode {
override def computeStats(): Statistics = reader match {
case r: SupportsReportStatistics =>
Statistics(sizeInBytes = r.getStatistics.sizeInBytes().orElse(conf.defaultSizeInBytes))
case _ =>
Statistics(sizeInBytes = conf.defaultSizeInBytes)
}
}
object DataSourceV2Relation {
def apply(reader: DataSourceV2Reader): DataSourceV2Relation = {
new DataSourceV2Relation(reader.readSchema().toAttributes, reader)
}
}
| shubhamchopra/spark | sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/DataSourceV2Relation.scala | Scala | apache-2.0 | 1,658 |
package fpscala.laziness
import org.scalatest.{Matchers, FlatSpec}
class lazinessTest extends FlatSpec with Matchers {
"Exercise 5.0" should "do" in {
def if2[A](cond: Boolean,
onTrue: () => A,
onFalse: () => A):A =
if (cond) onTrue() else onFalse()
def if2_1[A](cond: Boolean,
onTrue: => A,
onFalse: => A):A =
if (cond) onTrue else onFalse
if2( 22 > 1, ()=> println("big"), ()=> println("small"))
if2_1( 22 > 1, println("big"), println("small"))
if2_1(false, sys.error("fail"), 3)
def maybeTwice(b: Boolean, i: => Int) =
if(b) i+i else 0
val x = maybeTwice(true, { println("hi"); 1+41 })
x should be (84)
def maybeTwice2(b: Boolean, i: => Int ) = {
lazy val j = i
if(b) j+j else 0
}
val y = maybeTwice2(true, { println("hi2"); 1+41 })
x should be (84)
}
"Exercise 5.1" should "do" in {
/*Write a function to convert a Stream to a List, which will force its evaluation and let you look at it in the REPL.
You can convert to the regular List type in the standard library.
You can place this and other functions that operate on a Stream inside the Stream trait.*/
Stream[Int](1,2,3,4).toList should be (List(1,2,3,4))
Stream.empty.toList should be (List())
}
"Exercise 5.2" should "do" in {
/*Write the function take(n) for returning the first n elements of a Stream,
and drop(n) for skipping the first n elements of a Stream.*/
Stream(1,2,3,4).take(2).toList should be (List(1,2))
Stream(1,2,3,4).drop(2).toList should be (List(3,4))
}
"Exercise 5.3" should "do" in {
/*Write the function takeWhile for returning all starting elements of a Stream
that match the given predicate.
def takeWhile(p: A => Boolean): Stream[A]*/
Stream(1,2,3,4).takeWhile( _ < 3 ).toList should be (List(1,2))
}
"Exercise 5.4" should "do" in {
/* Implement forAll, which checks that all elements in the Stream match a given predicate.
Your implementation should terminate the traversal as soon as it encounters a nonmatching value.
def forAll(p: A => Boolean): Boolean*/
Stream(1,2,3,4).exists( _ == 3 ) should be (true)
Stream(1,2,3,4).exists( _ == 5 ) should be (false)
Stream(1,2,3,4).forAll( _ < 5 ) should be (true)
Stream(1,2,3,4).forAll( _ % 2 == 0 ) should be (false)
}
"Exercise 5.5" should "do" in {
/*Use foldRight to implement takeWhile.*/
Stream(1,2,3,4).takeWhile_byFoldRight( _ < 3).toList should be (List(1,2))
}
"Exercise 5.6" should "do" in {
/*Hard: Implement headOption using foldRight*/
}
"Exercise 5.7" should "do" in {
/*Implement map, filter, append, and flatMap using foldRight.
The append method should be non-strict in its argument*/
Stream(1,2,3,4).map( _ * 2 ).toList should be (List(2,4,6,8))
Stream(1,2,3,4).flatMap( x => Stream(x*2) ).toList should be (List(2,4,6,8))
Stream(1,2,3,4).filter( _ < 3).toList should be (List(1,2))
}
"Exercise 5.8" should "do" in {
/*Generalize ones slightly to the function constant, which returns an infinite Stream of a given value.
def constant[A](a: A): Stream[A]*/
Stream.constant(3).take(4).toList should be (List(3,3,3,3))
}
"Exercise 5.9" should "do" in {
/*Write a function that generates an infinite stream of integers,
starting from n, then n + 1, n + 2, and so on.
def from(n: Int): Stream[Int]*/
Stream.from(1).take(4).toList should be (List(1,2,3,4))
}
"Exercise 5.10" should "do" in {
/*Write a function fibs that generates the infinite stream
of Fibonacci numbers: 0, 1, 1, 2, 3, 5, 8, and so on.*/
Stream.fibs().take(6).toList should be (List(0,1,1,2,3,5))
}
"Exercise 5.11" should "do" in {
/*Write a more general stream-building function called unfold. It takes an initial state,
and a function for producing both the next state and the next value in the generated stream.
def unfold[A, S](z: S)(f: S => Option[(A, S)]): Stream[A]*/
}
"Exercise 5.12" should "do" in {
/*Write fibs, from, constant, and ones in terms of unfold*/
Stream.fibs_byUnfold.take(6).toList should be (List(0,1,1,2,3,5))
}
"Exercise 5.13" should "do" in {
}
}
sealed trait Stream[+A] {
import Stream._
def toList: List[A] = {
def go(s: Stream[A], acc: List[A]): List[A] = s match {
case Cons(h,t) => go( t(), h() :: acc)
case _ => acc
}
go(this, List()).reverse
}
// will be very slow because append very long sequence op is very time consuming
// instead, prepend and then reverse
def toList_1: List[A] = this match {
case Empty => List()
case Cons( h, t) => List(h()) ++ t().toList
}
def take(n: Int): Stream[A] = this match {
case Cons(h,t) if n > 1 => cons( h(), t().take(n-1) )
case Cons(h,_) if n == 1 => cons( h(), Stream.empty )
case _ => empty
}
def drop(n: Int): Stream[A] = this match {
case Cons(h,t) if n > 0 => t().drop(n-1)
case _ => this
}
def takeWhile(p: A => Boolean): Stream[A] = this match {
case Cons(h,t) if p(h()) => cons( h(), t().takeWhile(p))
case _ => empty
}
def exists_1(p: A => Boolean): Boolean = this match {
case Cons(h,t) => p(h()) || t().exists_1(p)
case _ => false
}
def foldRight[B](z: => B)(f: (A, => B) => B): B = this match {
case Cons(h,t) => f(h(), t().foldRight(z)(f))
case _ => z
}
def exists(p: A => Boolean): Boolean = foldRight(false)((h,t) => p(h) || t )
def forAll(p: A => Boolean): Boolean = foldRight(true)((h,t) => p(h) && t )
def takeWhile_byFoldRight(p: A => Boolean): Stream[A] =
foldRight(empty[A])((h,t) => if(p(h)) cons(h, t) else empty[A])
def headOption: Option[A] =
foldRight(None: Option[A])((h,_) => Some(h))
def map[B](f: A => B): Stream[B] = {
foldRight(empty[B])((h,t) => cons(f(h),t))
}
def filter(p: A => Boolean): Stream[A] = {
foldRight(empty[A])((h,t) => if (p(h)) cons(h,t) else t )
}
def append[B>:A](z: => Stream[B]): Stream[B] =
foldRight(z)((h,t) => cons(h,t))
def flatMap[B](f: A => Stream[B]): Stream[B] =
foldRight(empty[B])((h,t) => f(h).append(t))
def find(p: A => Boolean): Option[A] =
filter(p).headOption
}
case object Empty extends Stream[Nothing]
case class Cons[+A](h: () => A, t: () => Stream[A]) extends Stream[A]
object Stream {
// smart constructor that make the value lazily evaluated
def cons[A](hd: => A, tl: => Stream[A]): Stream[A] = {
lazy val head = hd
lazy val tail = tl
Cons(()=> head, () => tail)
}
def empty[A]: Stream[A] = Empty
def apply[A](as: A*): Stream[A] =
if (as.isEmpty) empty else cons(as.head, apply(as.tail: _*))
// This is more efficient than `cons(a, constant(a))` since it's just
// one object referencing itself.
def constant[A](a: A): Stream[A] = {
lazy val tail: Stream[A] = Cons(() => a, () => tail)
tail
}
def constant_1[A](a: A): Stream[A] = cons(a, constant(a))
def from(n: Int): Stream[Int] = cons(n, from(n+1))
def fibs(): Stream[Int] = {
def go(f0:Int, f1:Int): Stream[Int] = cons(f0, go(f1,f0+f1))
go(0,1)
}
def unfold[A,S](z: S)(f: S => Option[(A,S)]): Stream[A] = {
f(z) match {
case None => empty
case Some((a,s)) => cons(a, unfold(s)(f))
}
}
val fibs_byUnfold
= unfold((0,1)) { case (f0,f1) => Some((f0,(f1,f0+f1))) }
//def constant[A](a: A): Stream[A] = unfold(a)( s => Some(a,s) )
} | hoondori/TIL | Excercise/Fpscala/src/test/scala/fpscala/laziness/laziness.scala | Scala | cc0-1.0 | 7,562 |
package blended.testsupport.security
import javax.security.auth.login.AppConfigurationEntry.LoginModuleControlFlag
import javax.security.auth.login.{AppConfigurationEntry, Configuration}
import scala.jdk.CollectionConverters._
class DummyLoginConfiguration extends Configuration {
private[this] val options : Map[String, String] = Map.empty
override def getAppConfigurationEntry(name : String) : Array[AppConfigurationEntry] = {
val entry = new AppConfigurationEntry(classOf[DummyLoginModule].getName(), LoginModuleControlFlag.SUFFICIENT, options.asJava)
Array(entry)
}
}
| woq-blended/blended | blended.testsupport/src/main/scala/blended/testsupport/security/DummyLoginConfiguration.scala | Scala | apache-2.0 | 593 |
package com.kolor.docker.api.entities
import com.kolor.docker.api._
import play.api.libs.json._
trait DockerVolume extends DockerEntity {
def containerPath: String
def hostPath: String
}
sealed case class ContainerVolume(containerPath: String, hostPath: String = "") extends DockerVolume {
override def toString = s"$containerPath"
}
sealed case class BindMountVolume(containerPath: String, hostPath: String, `type`: String = "ro") extends DockerVolume{
override def toString = s"$containerPath:$hostPath:${`type`}"
}
object DockerVolume {
private val pattern = """^([^:]+):(/[^:]+):(ro|rw)$""".r
def apply(path: String, hostPath: String): DockerVolume = ContainerVolume(path)
def unapply(v: DockerVolume): Option[(String, String)] = {
v match {
case bind: BindMountVolume => Some((bind.containerPath, bind.hostPath))
case vol: ContainerVolume => Some((vol.containerPath, ""))
case _ => None
}
}
def fromString(s: String): Option[BindMountVolume] = s match {
case pattern(containerPath, hostPath, rwType) => Some(BindMountVolume(containerPath, hostPath, rwType))
case _ => None
}
} | waveinch/reactive-docker | src/main/scala/com/kolor/docker/api/entities/DockerVolume.scala | Scala | mit | 1,168 |
package controllers.sitedata.table
import javax.inject._
import play.api._
import play.api.mvc._
import play.api.data.Form
import play.api.data.Forms._
import play.api.data._
import models.sitedata.SiteInfo
import models.sitedata.SiteInfoDetail
import models.sitedata.SubZone
import models.sitedata.Zone
import models.sitedata.EquipmentModel
import models.sitedata.EquipmentName
import models.sitedata.Petname
import models.sitedata.Modality
import models.sitedata.LineType
import play.api.i18n.Messages
import play.api.i18n.I18nSupport
import play.api.i18n.MessagesApi
import services.sitedata.ISiteInfoService
import services.sitedata.ISiteInfoDetailService
import services.sitedata.ISubZoneService
import services.sitedata.IZoneService
import services.sitedata.IEquipmentModelService
import services.sitedata.IEquipmentNameService
import services.sitedata.IPetnameService
import services.sitedata.IModalityService
import services.sitedata.ILineTypeService
import play.Application
import utils.Awaits
import play.api.libs.iteratee.Enumerator
import reports.ReportBuilder
import play.api.Configuration
@Singleton
class LineTypeController @Inject() (
val messagesApi: MessagesApi,
val applicationconf: Configuration,
val service_siteinfo: ISiteInfoService,
val service_siteinfodetail: ISiteInfoDetailService,
val service_zone: IZoneService,
val service_subzone: ISubZoneService,
val service_equipmentmodel: IEquipmentModelService,
val service_equipmentname: IEquipmentNameService,
val service_petname: IPetnameService,
val service_modality: IModalityService,
val service_linetype: ILineTypeService) extends Controller with I18nSupport {
val linetypeForm: Form[LineType] = Form(
mapping(
"id" -> longNumber,
"name" -> text
)(models.sitedata.LineType.apply)(models.sitedata.LineType.unapply))
def index = Action { implicit request =>
Logger.info("/linetype -> LineTypeController index called.")
val linetype = Awaits.get(5, service_linetype.findAll()).getOrElse(Seq())
Ok(views.html.sitedata.linetype.index(linetype))
}
def blank = Action { implicit request =>
Logger.info("blank called. ")
Ok(views.html.sitedata.linetype.create(linetypeForm))
}
def details(id: Long) = Action { implicit request =>
Logger.info("details called. id: " + id)
val linetype = Awaits.get(5, service_linetype.findById(id)).get
Ok(views.html.sitedata.linetype.details(linetype))
}
def insert()= Action { implicit request =>
Logger.info("insert called.")
linetypeForm.bindFromRequest.fold(
form => {
BadRequest(views.html.sitedata.linetype.insert(form))
},
linetype => {
service_linetype.insert(linetype)
Redirect(controllers.sitedata.table.routes.LineTypeController.index)
.flashing("success" -> Messages("success.insert", "new linetype created"))
})
}
def update(id: Long) = Action { implicit request =>
Logger.info("updated called. id: " + id)
linetypeForm.bindFromRequest.fold(
form => {
Ok(views.html.sitedata.linetype.update(form))
.flashing("error" -> "Fix the errors!")
},
linetype => {
service_linetype.update(id, linetype)
Redirect(controllers.sitedata.table.routes.LineTypeController.index)
.flashing("success" -> Messages("success.update", linetype.name))
})
}
def remove(id: Long) = Action {
import play.api.libs.concurrent.Execution.Implicits.defaultContext
val result = Awaits.get(5, service_linetype.findById(id))
result.map { linetype =>
service_linetype.remove(id)
Redirect(controllers.sitedata.table.routes.LineTypeController.index)
.flashing("success" -> Messages("success.delete", linetype.name))
}.getOrElse(NotFound)
}
def report() = Action {
import play.api.libs.concurrent.Execution.Implicits.defaultContext
val url = applicationconf.getString("slick.dbs.SiteData.db.url").getOrElse("None")
Ok.chunked( Enumerator.fromStream( ReportBuilder.toPdf("LineType.jrxml", url) ) )
.withHeaders(CONTENT_TYPE -> "application/octet-stream")
.withHeaders(CONTENT_DISPOSITION -> "attachment; filename=report-linetype.pdf"
)
}
}
| tnddn/iv-web | portal/rest-portal/app/controllers/sitedata/table/LineTypeController.scala | Scala | apache-2.0 | 4,279 |
Subsets and Splits
Filtered Scala Code Snippets
The query filters and retrieves a sample of code snippets that meet specific criteria, providing a basic overview of the dataset's content without revealing deeper insights.