code
stringlengths 5
1M
| repo_name
stringlengths 5
109
| path
stringlengths 6
208
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 5
1M
|
|---|---|---|---|---|---|
package com.twitter.finagle.tracing
import com.twitter.finagle.tracing.Annotation.Message
import com.twitter.logging.{Level, LogRecord}
import org.specs.SpecificationWithJUnit
class TracingLogHandlerSpec extends SpecificationWithJUnit {
"TracingLogHandler" should {
doBefore { Trace.clear() }
"send messages to Tracer" in {
val tracer = new BufferingTracer()
Trace.pushTracer(tracer)
val handler = new TracingLogHandler
val msg1 = "hello"
handler.publish(new LogRecord(Level.DEBUG, msg1))
tracer.iterator.next().annotation match {
case Message(s) => s.trim must_== msg1
case _ => fail("Message does not match")
}
}
}
}
|
foursquare/finagle
|
finagle-core/src/test/scala/com/twitter/finagle/tracing/TracingLogHandlerSpec.scala
|
Scala
|
apache-2.0
| 698
|
package misc.sudoku
/**
* Created by sajit on 1/1/15.
*/
class Board(cells:Array[Array[Int]]) {
// def solve(boardState:Array[Array[Int]]):Array[Array[Int]] = {
//
// if(SudokuHelperUtils.isSolved(boardState)){
// boardState
// }
// else{
// boardState.zipWithIndex.map{
// case(row,rowIdx) => row.zipWithIndex.map{
// case(element,colIdx) => {
//
// val rowPossiblities = SudokuHelperUtils.getPossibleNums(row.toList)
// if(rowPossiblities.length==1){
// val clone = boardState.clone()
// clone(rowIdx)(colIdx) = rowPossiblities(0)
// return solve(clone)
// }
// else{
// return solve(boardState.clone())
// }
// //val transposed = boardState.transpose
// //val colPossiblities = SudokuHelperUtils.getPossibleNums(transposed(colIdx).toList)
// //val groupPossibles = SudokuHelperUtils.getGroupPossibleNums(boardState,rowIdx,colIdx)
//
//
// }
// }
// }
// }
// }
}
|
sajit/skalad
|
scala/src/main/scala/misc/sudoku/Board.scala
|
Scala
|
apache-2.0
| 1,044
|
package concrete.constraint.extension
import java.util
import bitvectors.BitVector
import concrete._
import concrete.constraint.{Constraint, StatefulConstraint}
import concrete.util.SparseSet
import mdd.{MDD, MDD0, MDDLeaf, TSSet}
/* MDDRelation comes with its own timestamp */
class MDDC(_scope: Array[Variable], val mdd: MDDRelation)
extends Constraint(_scope) with StatefulConstraint[SparseSet] {
val simpleEvaluation: Int = math.min(Constraint.NP, scope.count(_.initDomain.size > 1))
private val prop = mdd.edges.toDouble / scope.map(_.initDomain.size.toDouble).product
override def init(ps: ProblemState): ProblemState = {
val max = mdd.mdd.fastIdentify() + 1
ps.updateState(this, new SparseSet(max)) //new SparseSet(max))
}
// Members declared in concrete.constraint.Constraint
override def check(t: Array[Int]) = mdd.contains(t)
def advise(ps: ProblemState, event: Event, pos: Int): Int = (prop * doubleCardSize(ps)).toInt
def revise(ps: ProblemState, mod: BitVector): Outcome = {
val domains = ps.doms(scope) //Array.tabulate(arity)(p => ps.dom(scope(p)))
val supported = Array.fill(arity)(new util.HashSet[Int]())
// val unsupported = domains.map(_.to[collection.mutable.Set])
var delta = arity
var gNo = ps(this) //.clone()
var gNoChange = false
val gYes = new TSSet[MDD]()
// if (mdd.lambda < 50) {
// mdd.map(_.mkString(", ")).foreach(println)
// }
def seekSupports(g: MDD, i: Int): Boolean = {
@inline
def loop(dom: Domain): Boolean = {
var res = false
for (ak <- dom) {
val gk = g.subMDD(ak)
if (seekSupports(gk, i + 1)) {
res = true
supported(i).add(ak)
if (i + 1 == delta && supported(i).size == domains(i).size) {
delta = i
return true
}
}
}
res
}
if (g eq MDDLeaf) {
if (i < delta) {
delta = i
}
true
} else if (g eq MDD0) {
false
} else if (gYes.contains(g)) {
true
} else if (gNo.contains(g.id)) {
false
} else if (loop(domains(i))) {
gYes.put(g)
true
} else {
gNo = gNo.incl(g.id)
gNoChange = true
false
}
}
val sat = seekSupports(mdd.mdd, 0)
if (sat) {
var cs: ProblemState =
if (gNoChange) ps.updateState(this, gNo) else ps
for (p <- 0 until delta) {
if (supported(p).size < domains(p).size) {
cs = cs.updateDomNonEmptyNoCheck(scope(p), domains(p).filter(supported(p).contains))
}
}
cs.entailIfFree(this)
} else {
Contradiction(scope)
}
}
override def dataSize: Int = mdd.edges
}
|
concrete-cp/concrete
|
src/main/scala/concrete/constraint/extension/MDDC.scala
|
Scala
|
lgpl-2.1
| 2,796
|
/*
* Scala (https://www.scala-lang.org)
*
* Copyright EPFL and Lightbend, Inc.
*
* Licensed under Apache License 2.0
* (http://www.apache.org/licenses/LICENSE-2.0).
*
* See the NOTICE file distributed with this work for
* additional information regarding copyright ownership.
*/
package scala.tools.nsc
package backend.jvm
package analysis
import scala.annotation.switch
import scala.tools.asm.Opcodes._
import scala.tools.asm.Type
import scala.tools.asm.tree._
import scala.tools.asm.tree.analysis.{Frame, Value}
import opt.BytecodeUtils._
object InstructionStackEffect {
val consShift = 3
val prodMask = (1 << consShift) - 1
def cons(i: Int) = i >>> consShift
def prod(i: Int) = i & prodMask
private def t(x: Int, y: Int): Int = (x << consShift) | y
/**
* Returns the number of stack values consumed and produced by `insn`, encoded in a single `Int`
* (the `cons` / `prod` extract individual values). The returned values are correct for use in
* asm's Analyzer framework. For example, a LLOAD instruction produces one stack value. See also
* doc in `analysis` package object.
*
* This method requires the `frame` to be in the state **before** executing / interpreting the
* `insn`.
*/
def forAsmAnalysis[V <: Value](insn: AbstractInsnNode, frame: Frame[V]): Int = computeConsProd(insn, forClassfile = false, conservative = false, frame = frame)
/**
* Returns the maximal possible growth of the stack when executing `insn`. The returned value
* is usually the same as expected by asm's Analyzer framework, but it may be larger. For
* example, consider a POP2 instruction:
* - if two size-1 values are popped, then the asm Analyzer consumes two values
* - if a size-2 value is popped, the asm Analyzer consumes only one stack slot (see doc in the
* `analysis` package object)
*
* If a precise result is needed, invoke the `forAsmAnalysis` and provide a `frame` value that
* allows looking up the sizes of values on the stack.
*/
def maxStackGrowth(insn: AbstractInsnNode): Int = {
val prodCons = computeConsProd(insn, forClassfile = false, conservative = true)
prod(prodCons) - cons(prodCons)
}
/**
* Returns the number of stack values consumed and produced by `insn`, encoded in a single `Int`
* (the `cons` / `prod` extract individual values). The returned values are correct for writing
* into a classfile (see doc on the `analysis` package object).
*/
def forClassfile(insn: AbstractInsnNode): Int = computeConsProd(insn, forClassfile = true, conservative = false)
private def invokeConsProd(methodDesc: String, insn: AbstractInsnNode, forClassfile: Boolean): Int = {
val consumesReceiver = insn.getOpcode != INVOKESTATIC && insn.getOpcode != INVOKEDYNAMIC
if (forClassfile) {
val sizes = Type.getArgumentsAndReturnSizes(methodDesc)
val cons = (sizes >> 2) - (if (consumesReceiver) 0 else 1)
val prod = sizes & 0x03
t(cons, prod)
} else {
val cons = Type.getArgumentTypes(methodDesc).length + (if (consumesReceiver) 1 else 0)
val prod = if (Type.getReturnType(methodDesc) == Type.VOID_TYPE) 0 else 1
t(cons, prod)
}
}
private def fieldInsnIsLongOrDouble(insn: AbstractInsnNode) = {
val d = insn.asInstanceOf[FieldInsnNode].desc
d == "J" || d == "D"
}
private def computeConsProd[V <: Value](insn: AbstractInsnNode, forClassfile: Boolean, conservative: Boolean, frame: Frame[V] = null): Int = {
// not used if `forClassfile || conservative`: in these cases, `frame` is allowed to be `null`
def peekStack(n: Int): V = frame.peekStack(n)
(insn.getOpcode: @switch) match {
// The order of opcodes is the same as in Frame.execute.
case NOP => t(0, 0)
case ACONST_NULL |
ICONST_M1 |
ICONST_0 |
ICONST_1 |
ICONST_2 |
ICONST_3 |
ICONST_4 |
ICONST_5 |
FCONST_0 |
FCONST_1 |
FCONST_2 |
BIPUSH |
SIPUSH |
ILOAD |
FLOAD |
ALOAD => t(0, 1)
case LDC =>
if (forClassfile) insn.asInstanceOf[LdcInsnNode].cst match {
case _: java.lang.Long | _: java.lang.Double => t(0, 2)
case _ => t(0, 1)
} else
t(0, 1)
case LCONST_0 |
LCONST_1 |
DCONST_0 |
DCONST_1 |
LLOAD |
DLOAD => if (forClassfile) t(0, 2) else t(0, 1)
case IALOAD |
FALOAD |
AALOAD |
BALOAD |
CALOAD |
SALOAD => t(2, 1)
case LALOAD |
DALOAD => if (forClassfile) t(2, 2) else t(2, 1)
case ISTORE |
FSTORE |
ASTORE => t(1, 0)
case LSTORE |
DSTORE => if (forClassfile) t(2, 0) else t(1, 0)
case IASTORE |
FASTORE |
AASTORE |
BASTORE |
CASTORE |
SASTORE => t(3, 0)
case LASTORE |
DASTORE => if (forClassfile) t(4, 0) else t(3, 0)
case POP => t(1, 0)
case POP2 =>
if (forClassfile) t(2, 0)
else if (conservative) t(1, 0)
else {
val isSize2 = peekStack(0).getSize == 2
if (isSize2) t(1, 0) else t(2, 0)
}
case DUP => t(1, 2)
case DUP_X1 => t(2, 3)
case DUP_X2 =>
if (forClassfile || conservative) t(3, 4)
else {
val isSize2 = peekStack(1).getSize == 2
if (isSize2) t(2, 3) else t(3, 4)
}
case DUP2 =>
if (forClassfile || conservative) t(2, 4)
else {
val isSize2 = peekStack(0).getSize == 2
if (isSize2) t(1, 2) else t(2, 4)
}
case DUP2_X1 =>
if (forClassfile || conservative) t(3, 5)
else {
val isSize2 = peekStack(0).getSize == 2
if (isSize2) t(2, 3) else t(3, 5)
}
case DUP2_X2 =>
if (forClassfile || conservative) t(4, 6)
else {
val v1isSize2 = peekStack(0).getSize == 2
if (v1isSize2) {
val v2isSize2 = peekStack(1).getSize == 2
if (v2isSize2) t(2, 3) else t(3, 4)
} else {
val v3isSize2 = peekStack(2).getSize == 2
if (v3isSize2) t(3, 5) else t(4, 6)
}
}
case SWAP => t(2, 2)
case IADD |
FADD |
ISUB |
FSUB |
IMUL |
FMUL |
IDIV |
FDIV |
IREM |
FREM => t(2, 1)
case LADD |
DADD |
LSUB |
DSUB |
LMUL |
DMUL |
LDIV |
DDIV |
LREM |
DREM => if (forClassfile) t(4, 2) else t(2, 1)
case INEG |
FNEG => t(1, 1)
case LNEG |
DNEG => if (forClassfile) t(2, 2) else t(1, 1)
case ISHL |
ISHR |
IUSHR |
IAND |
IOR |
IXOR => t(2, 1)
case LSHL |
LSHR |
LUSHR => if (forClassfile) t(3, 2) else t(2, 1)
case LAND |
LOR |
LXOR => if (forClassfile) t(4, 2) else t(2, 1)
case IINC => t(0, 0)
case I2F |
F2I |
I2B |
I2C |
I2S => t(1, 1)
case I2L |
I2D |
F2L |
F2D => if (forClassfile) t(1, 2) else t(1, 1)
case L2I |
L2F |
D2I |
D2F => if (forClassfile) t(2, 1) else t(1, 1)
case L2D |
D2L => if (forClassfile) t(2, 2) else t(1, 1)
case FCMPL |
FCMPG => t(2, 1)
case LCMP |
DCMPL |
DCMPG => if (forClassfile) t(4, 1) else t(2, 1)
case IFEQ |
IFNE |
IFLT |
IFGE |
IFGT |
IFLE => t(1, 0)
case IF_ICMPEQ |
IF_ICMPNE |
IF_ICMPLT |
IF_ICMPGE |
IF_ICMPGT |
IF_ICMPLE |
IF_ACMPEQ |
IF_ACMPNE => t(2, 0)
case GOTO => t(0, 0)
case JSR => t(0, 1)
case RET => t(0, 0)
case TABLESWITCH |
LOOKUPSWITCH => t(1, 0)
case IRETURN |
FRETURN |
ARETURN => t(1, 0) // Frame.execute consumes one stack value
case LRETURN |
DRETURN => if (forClassfile) t(2, 0) else t(1, 0)
case RETURN => t(0, 0) // Frame.execute does not change the stack
case GETSTATIC =>
val prod = if (forClassfile && fieldInsnIsLongOrDouble(insn)) 2 else 1
t(0, prod)
case PUTSTATIC =>
val cons = if (forClassfile && fieldInsnIsLongOrDouble(insn)) 2 else 1
t(cons, 0)
case GETFIELD =>
val prod = if (forClassfile && fieldInsnIsLongOrDouble(insn)) 2 else 1
t(1, prod)
case PUTFIELD =>
val cons = if (forClassfile && fieldInsnIsLongOrDouble(insn)) 3 else 2
t(cons, 0)
case INVOKEVIRTUAL |
INVOKESPECIAL |
INVOKESTATIC |
INVOKEINTERFACE => invokeConsProd(insn.asInstanceOf[MethodInsnNode].desc, insn, forClassfile)
case INVOKEDYNAMIC => invokeConsProd(insn.asInstanceOf[InvokeDynamicInsnNode].desc, insn, forClassfile)
case NEW => t(0, 1)
case NEWARRAY |
ANEWARRAY |
ARRAYLENGTH => t(1, 1)
case ATHROW => t(1, 0) // Frame.execute consumes one stack value
case CHECKCAST |
INSTANCEOF => t(1, 1) // Frame.execute does push(pop()) for both of them
case MONITORENTER |
MONITOREXIT => t(1, 0)
case MULTIANEWARRAY => t(insn.asInstanceOf[MultiANewArrayInsnNode].dims, 1)
case IFNULL |
IFNONNULL => t(1, 0)
}
}
}
|
scala/scala
|
src/compiler/scala/tools/nsc/backend/jvm/analysis/InstructionStackEffect.scala
|
Scala
|
apache-2.0
| 9,815
|
package com.buransky.plugins.scoverage.samples.sbt.multiModule.module1
import scala.util.Random
trait Beer {
val volume: Double
def isGood: Boolean = (volume > 0.0)
}
case object EmptyBeer extends {
val volume = 0.0
} with Beer
trait SlovakBeer extends Beer {
override def isGood = Random.nextBoolean
}
trait BelgianBeer extends Beer {
if (volume > 0.25)
throw new IllegalArgumentException("Too big beer for belgian beer!")
override def isGood = true
}
case class HordonBeer(volume: Double) extends SlovakBeer {
override def isGood = false
}
case class ChimayBeer(volume: Double) extends BelgianBeer
|
zenderol/sonar-scoverage-plugin
|
samples/sbt/multi-module/module1/src/main/scala/com/buransky/plugins/scoverage/samples/sbt/multiModule/module1/Beer.scala
|
Scala
|
lgpl-3.0
| 626
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.openwhisk.core.database.cosmosdb
import akka.event.slf4j.SLF4JLogging
import ch.qos.logback.classic.LoggerContext
import ch.qos.logback.classic.spi.ILoggingEvent
import ch.qos.logback.core.AppenderBase
import com.microsoft.azure.cosmosdb.rx.internal.ResourceThrottleRetryPolicy
import org.apache.openwhisk.common.{Counter => WhiskCounter}
import kamon.metric.{Counter, MeasurementUnit}
import org.apache.openwhisk.common.{LogMarkerToken, TransactionId}
import org.apache.openwhisk.core.ConfigKeys
import org.slf4j.LoggerFactory
import pureconfig._
import pureconfig.generic.auto._
import scala.util.Try
object CosmosDBAction extends Enumeration {
val Create, Query, Get, Others = Value
}
object RetryMetricsCollector extends AppenderBase[ILoggingEvent] with SLF4JLogging {
import CosmosDBAction._
private val tokens =
Map(Create -> Token(Create), Query -> Token(Query), Get -> Token(Get), Others -> Token(Others))
val retryCounter = new WhiskCounter
private[cosmosdb] def registerIfEnabled(): Unit = {
val enabled = loadConfigOrThrow[Boolean](s"${ConfigKeys.cosmosdb}.retry-stats-enabled")
if (enabled) {
log.info("Enabling retry metrics collector")
register()
}
}
/**
* CosmosDB uses below log message
* ```
* logger.warn(
* "Operation will be retried after {} milliseconds. Current attempt {}, Cumulative delay {}",
* retryDelay.toMillis(),
* this.currentAttemptCount,
* this.cumulativeRetryDelay,
* exception);
* ```
*
*/
override def append(e: ILoggingEvent): Unit = {
val msg = e.getMessage
val errorMsg = Option(e.getThrowableProxy).map(_.getMessage).getOrElse(msg)
for {
success <- isSuccessOrFailedRetry(msg)
token <- tokens.get(operationType(errorMsg))
} {
if (success) {
token.success.counter.increment()
//Element 1 has the count
val attemptCount = getRetryAttempt(e.getArgumentArray, 1)
token.success.histogram.record(attemptCount)
//Used mostly for test mode where tags may be disabled
//and test need to determine if count is increased
if (!TransactionId.metricsKamonTags) {
retryCounter.next()
}
} else {
token.failed.counter.increment()
}
}
}
def getCounter(opType: CosmosDBAction.Value, retryPassed: Boolean = true): Option[Counter] = {
tokens.get(opType).map(t => if (retryPassed) t.success else t.failed).map { _.counter }
}
private def getRetryAttempt(args: Array[AnyRef], index: Int) = {
val t = Try {
if (args != null & args.length > index) {
args(index) match {
case n: Number => n.intValue
case _ => 0
}
} else 0
}
t.getOrElse(0)
}
private def register(): Unit = {
val logCtx = LoggerFactory.getILoggerFactory.asInstanceOf[LoggerContext]
val retryLogger = logCtx.getLogger(classOf[ResourceThrottleRetryPolicy].getName)
start()
retryLogger.addAppender(this)
}
private def isSuccessOrFailedRetry(msg: String) = {
if (msg.startsWith("Operation will be retried after")) Some(true)
else if (msg.startsWith("Operation will NOT be retried")) Some(false)
else None
}
private def operationType(errorMsg: String) = {
if (errorMsg.contains("OperationType: Query")) Query
else if (errorMsg.contains("OperationType: Create")) Create
else if (errorMsg.contains("OperationType: Get")) Get
else Others
}
private def createToken(opType: String, retryPassed: Boolean): LogMarkerToken = {
val action = if (retryPassed) "success" else "failed"
val tags = Map("type" -> opType)
if (TransactionId.metricsKamonTags) LogMarkerToken("cosmosdb", "retry", action, tags = tags)(MeasurementUnit.none)
else LogMarkerToken("cosmosdb", "retry", action, Some(opType))(MeasurementUnit.none)
}
private case class Token(success: LogMarkerToken, failed: LogMarkerToken)
private object Token {
def apply(opType: CosmosDBAction.Value): Token =
new Token(createToken(opType.toString, retryPassed = true), createToken(opType.toString, retryPassed = false))
}
}
|
jasonpet/openwhisk
|
common/scala/src/main/scala/org/apache/openwhisk/core/database/cosmosdb/RetryMetricsCollector.scala
|
Scala
|
apache-2.0
| 5,024
|
package momijikawa.lacquer
import akka.actor._
import akka.agent.Agent
import KanColleMessage.{ ApiStart2, Port, Slot_item }
import spray.can.Http
object KanColleWebSocketServer {
final case class Push(msg: String)
def props() = Props(classOf[KanColleWebSocketServer])
}
class KanColleWebSocketServer extends Actor with ActorLogging {
import KanColleWebSocketServer.Push
implicit val execContext = context.dispatcher.prepare()
var connections = List[ActorRef]()
val apiStart2Cache = Agent[Option[ApiStart2]](None)
val portCache = Agent[Option[Port]](None)
val slot_itemCache = Agent[Option[Slot_item]](None)
def receive = {
case Http.Connected(remoteAddress, localAddress) ⇒
log.info("ws connected")
val serverConnection = sender()
val conn = context.actorOf(KanColleWebSocketWorker.props(serverConnection))
connections = connections :+ conn
serverConnection ! Http.Register(conn)
context.watch(conn)
// api_start2, portのキャッシュがあるときは、新規着信の扱いにしてデータをクライアントに送ってあげる。
// クライアント再接続時のリロードの手間を省くため。
apiStart2Cache().foreach {
apiStart2 ⇒
log.info("Re-sending api_start2...")
Thread.sleep(200)
conn ! Push(s"""{"api":"api_start2","data":${apiStart2.jsonString}}""")
}
portCache().foreach {
port ⇒
log.info("Re-sending port...")
Thread.sleep(200)
conn ! Push(s"""{"api":"port","data":${port.jsonString}}""")
}
slot_itemCache().foreach {
slot_item ⇒
log.info("Re-sending slot_item...")
Thread.sleep(200)
conn ! Push(s"""{"api":"slot_item","data":${slot_item.jsonString}}""")
}
case Http.Closed ⇒
log.info("ws disconnected")
connections = connections.filterNot(_ == sender())
case Terminated(actor) ⇒
// 切断したコネクションのアクターをリストから外す
connections = connections.filterNot(_ == actor)
case message: Port ⇒
log.info("Updating port data...")
portCache.alter { _ ⇒ Some(message) } foreach { msg: Option[Port] ⇒ log.info(s"Now, port cache is ${msg.toString.take(200)}") }
connections.map {
conn ⇒
conn ! Push(s"""{"api":"port","data":${message.jsonString}}""")
}
case message: ApiStart2 ⇒
log.info("Updating api_start2 data...")
apiStart2Cache.alter { _ ⇒ Some(message) } foreach { msg: Option[ApiStart2] ⇒ log.info(s"Now, api_start2 cache is ${msg.toString.take(200)}") }
connections.map {
conn ⇒
conn ! Push(s"""{"api":"api_start2","data":${message.jsonString}}""")
}
case message: Slot_item ⇒
log.info("Updating slot_item data...")
slot_itemCache.alter { _ ⇒ Some(message) } foreach { msg: Option[Slot_item] ⇒ log.info(s"Now, slot_item cache is ${msg.toString.take(200)}") }
connections.map {
conn ⇒
conn ! Push(s"""{"api":"slot_item","data":${message.jsonString}}""")
}
}
}
|
windymelt/lacquer
|
src/main/scala/momijikawa/lacquer/KanColleWebSocketServer.scala
|
Scala
|
bsd-3-clause
| 3,162
|
/**
* Copyright (c) 2016 Intel Corporation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.trustedanalytics.sparktk.frame.internal.ops.flatten
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.Row
import org.trustedanalytics.sparktk.frame.DataTypes
import org.trustedanalytics.sparktk.frame.DataTypes.DataType
import java.util.regex.Pattern
/**
* This is a wrapper to encapsulate methods that may need to be serialized to executed on Spark worker nodes.
* If you don't know what this means please read about Closure Mishap
* [[http://ampcamp.berkeley.edu/wp-content/uploads/2012/06/matei-zaharia-part-1-amp-camp-2012-spark-intro.pdf]]
* and Task Serialization
* [[http://stackoverflow.com/questions/22592811/scala-spark-task-not-serializable-java-io-notserializableexceptionon-when]]
*/
object FlattenColumnsFunctions extends Serializable {
/**
* Flatten RDD by the column with specified column indices
* @param columns List of tuples that contain column index, data type, and delimiters.
* @param rdd RDD for flattening
* @return new RDD with columns flattened
*/
def flattenRddByColumnIndices(columns: List[(Int, DataType, String)])(rdd: RDD[Row]): RDD[Row] = {
val flattener = flattenRowByColumnIndices(columns)_
rdd.flatMap(row => flattener(row))
}
/**
* flatten a row by the column with specified column indices. Columns must be a string or vector.
* @param columns List of tuples that contain column index, data type, and delimiter
* @param row row data
* @return flattened out row/rows
*/
private[frame] def flattenRowByColumnIndices(columns: List[(Int, DataType, String)])(row: Row): Array[Row] = {
val rowBuffer = new scala.collection.mutable.ArrayBuffer[Row]()
for (i <- columns.indices) {
val (columnIndex, columnDataType, delimiter) = columns(i)
columnDataType match {
case DataTypes.string =>
val splitItems = row(columnIndex).asInstanceOf[String].split(Pattern.quote(delimiter))
if (splitItems.length > 1) {
// Loop through items being split from the string
for (rowIndex <- splitItems.indices) {
val isNewRow = rowBuffer.length <= rowIndex
val r = if (isNewRow) row.toSeq.toArray.clone() else rowBuffer(rowIndex).toSeq.toArray.clone()
r(columnIndex) = splitItems(rowIndex)
if (isNewRow) {
for (tempColIndex <- columns.indices) {
if (tempColIndex != i) {
r(columns(tempColIndex)._1) = null
}
}
rowBuffer += Row.fromSeq(r)
}
else
rowBuffer(rowIndex) = Row.fromSeq(r)
}
}
else {
// There's nothing to split, just update first row in the rowBuffer
if (rowBuffer.length == 0)
rowBuffer += row
else {
val r = rowBuffer(0).toSeq.toArray.clone()
r(columnIndex) = splitItems(0)
rowBuffer(0) = Row.fromSeq(r)
}
}
case DataTypes.vector(length) =>
val vectorItems = DataTypes.toVector(length)(row(columnIndex)).toArray
// Loop through items in the vector
for (vectorIndex <- vectorItems.indices) {
val isNewRow = rowBuffer.length <= vectorIndex
val r = if (isNewRow) row.toSeq.toArray.clone() else rowBuffer(vectorIndex).toSeq.toArray.clone()
// Set vector item in the column being flattened
r(columnIndex) = vectorItems(vectorIndex)
if (isNewRow) {
// Empty out other columns that are being flattened in the new row
for (tempColIndex <- columns.indices) {
if (tempColIndex != i) {
r(columns(tempColIndex)._1) = null
}
}
// Add new row to the rowBuffer
rowBuffer += Row.fromSeq(r)
}
else
rowBuffer(vectorIndex) = Row.fromSeq(r)
}
case _ =>
throw new IllegalArgumentException("Flatten column does not support type: " + columnDataType.toString)
}
}
rowBuffer.toArray
}
}
|
dmsuehir/spark-tk
|
sparktk-core/src/main/scala/org/trustedanalytics/sparktk/frame/internal/ops/flatten/FlattenColumnsFunctions.scala
|
Scala
|
apache-2.0
| 4,888
|
package mr.merc.economics
import mr.merc.politics.{Election, Elites, Party, Province, Regime, State, StateElectionReport}
import MapUtil.FloatOperations._
import mr.merc.politics.VotersPolicy.NoVoting
import WorldConstants.Population._
class PoliticalSystem(startingRulingParty: Party, state: State, creationTurn: Int) {
private var lastElectionTurn = -1
val elites = new Elites(state, creationTurn, startingRulingParty)
def rulingParty_=(newRulingParty: Party): Unit = {
_rulingParty = newRulingParty
_parliament = if (_rulingParty.regime == Regime.Absolute) None
else Some(ParliamentParties(Map(_rulingParty -> 1.0d), Set(_rulingParty)))
}
def refreshElites(turn: Int): Unit = {
elites.refreshElites(turn)
}
def rulingParty: Party = _rulingParty
def nextElectionTurn: Option[Int] =
if (rulingParty.votersPolicy == NoVoting) None
else Some(lastElectionTurn + WorldConstants.Diplomacy.ElectionCycle)
def isElectionNow(turn: Int): Boolean = nextElectionTurn.contains(turn)
def doElectionsNow(turn: Int, primaryCulture: Culture, possibleParties: List[Party], regions: List[Province]): StateElectionReport = {
lastElectionTurn = turn
val fairElections = new Election(rulingParty, primaryCulture, possibleParties).doElections(regions)
val riggedElections = fairElections.riggedElections(rulingParty, RiggedElectionsQ(rulingParty.regime))
applyElectionResults(riggedElections, turn)
riggedElections
}
private var _rulingParty = startingRulingParty
private var _parliament: Option[ParliamentParties] = {
if (startingRulingParty.regime == Regime.Absolute) None
else Some(ParliamentParties(Map(startingRulingParty -> 1.0d), Set(startingRulingParty)))
}
def parliament: Option[ParliamentParties] = _parliament
def applyElectionResults(election: StateElectionReport, turn: Int): Unit = {
val resultsAfterThreshold = election.votes.scaleToSum(1d).filter(_._2 >= ElectionThreshold).scaleToSum(1d)
val coalition = findCoalition(resultsAfterThreshold)
_rulingParty = coalition.maxBy(resultsAfterThreshold)
_parliament = Some(ParliamentParties(resultsAfterThreshold, coalition))
refreshElites(turn)
}
def findCoalition(results: Map[Party, Double]): Set[Party] = {
def variants(alreadyInCoalition: List[Party], remaining: List[Party]): List[List[Party]] = {
if (alreadyInCoalition.map(results).sum > 0.5) List(alreadyInCoalition)
else {
remaining.flatMap { r =>
variants(r :: alreadyInCoalition, remaining.filterNot(_ == r))
}
}
}
variants(Nil, results.keySet.toList).minBy(set => (coalitionIdeologyDifference(set), -set.map(results).sum)).toSet
}
private def coalitionIdeologyDifference(parties: List[Party]): Int = {
parties match {
case List(_) => 0
case _ =>
val diffs = for {
p1 <- parties
p2 <- parties if p1 != p2
} yield p1.politicalPosition.diffWithPosition(p2.politicalPosition)
diffs.max
}
}
def changeAbsoluteRulingParty(newParty: Party): Unit = {
require(newParty.regime == Regime.Absolute, s"new party must be absolute but is $newParty")
require(rulingParty.regime == Regime.Absolute, s"not absolute ruling party")
_rulingParty = newParty
}
def usurpPower(newParty: Party, turn: Int): Unit = {
if (newParty.regime == Regime.Absolute && rulingParty.regime == Regime.Constitutional) {
_rulingParty = newParty
_parliament = None
elites.presidentUsurpsPower(turn)
} else if (newParty.regime == Regime.Constitutional && rulingParty.regime == Regime.Democracy) {
_rulingParty = newParty
_parliament = _parliament.map { p =>
ParliamentParties((p.parties + (newParty -> 1.1)).scaleToSum(1d), Set(newParty))
}
elites.presidentUsurpsPower(turn)
} else {
sys.error(s"party $newParty instad of $rulingParty is incorrect usurpation")
}
}
def giveUpPower(newParty: Party, turn: Int): Unit = {
lastElectionTurn = turn
if (newParty.regime == Regime.Constitutional && rulingParty.regime == Regime.Absolute) {
_rulingParty = newParty
_parliament = Some(ParliamentParties(Map(newParty -> 1.0d), Set(newParty)))
refreshElites(turn)
} else if (newParty.regime == Regime.Democracy && rulingParty.regime == Regime.Constitutional) {
_rulingParty = newParty
_parliament = _parliament.map { p =>
ParliamentParties((p.parties + (newParty -> 1.05)).scaleToSum(1d), Set(newParty))
}
refreshElites(turn)
} else {
sys.error(s"party $newParty instead of $rulingParty is incorrect giving up power")
}
}
}
case class ParliamentParties(parties: Map[Party, Double], coalition: Set[Party])
|
RenualdMarch/merc
|
src/main/scala/mr/merc/economics/PoliticalSystem.scala
|
Scala
|
gpl-3.0
| 4,779
|
package org.http4s
package server
package middleware
import cats.effect._
import cats.implicits._
import fs2._
import java.io.ByteArrayOutputStream
import java.util.zip.GZIPOutputStream
import org.http4s.dsl.io._
import org.http4s.headers._
import org.scalacheck.Properties
import org.scalacheck.Prop.forAll
class GZipSpec extends Http4sSpec {
"GZip" should {
"fall through if the route doesn't match" in {
val service = GZip(HttpService.empty[IO]) <+> HttpService[IO] {
case GET -> Root => Ok("pong")
}
val req =
Request[IO](Method.GET, Uri.uri("/")).putHeaders(`Accept-Encoding`(ContentCoding.gzip))
val resp = service.orNotFound(req).unsafeRunSync()
resp.status must_== (Status.Ok)
resp.headers.get(`Content-Encoding`) must beNone
}
"encodes random content-type if given isZippable is true" in {
val response = "Response string"
val service: HttpService[IO] = HttpService[IO] {
case GET -> Root =>
Ok(response, Header("Content-Type", "random-type; charset=utf-8"))
}
val gzipService: HttpService[IO] = GZip(service, isZippable = (_) => true)
val req: Request[IO] = Request[IO](Method.GET, Uri.uri("/"))
.putHeaders(`Accept-Encoding`(ContentCoding.gzip))
val actual: IO[Array[Byte]] =
gzipService.orNotFound(req).flatMap(_.as[Chunk[Byte]]).map(_.toArray)
val byteStream = new ByteArrayOutputStream(response.length)
val gZIPStream = new GZIPOutputStream(byteStream)
gZIPStream.write(response.getBytes)
gZIPStream.close()
actual must returnValue(byteStream.toByteArray)
}
checkAll(
"encoding",
new Properties("GZip") {
property("middleware encoding == GZIPOutputStream encoding") = forAll {
vector: Vector[Array[Byte]] =>
val service: HttpService[IO] = HttpService[IO] {
case GET -> Root => Ok(Stream.emits(vector).covary[IO])
}
val gzipService: HttpService[IO] = GZip(service)
val req: Request[IO] = Request[IO](Method.GET, Uri.uri("/"))
.putHeaders(`Accept-Encoding`(ContentCoding.gzip))
val actual: IO[Array[Byte]] =
gzipService.orNotFound(req).flatMap(_.as[Chunk[Byte]]).map(_.toArray)
val byteArrayStream = new ByteArrayOutputStream()
val gzipStream = new GZIPOutputStream(byteArrayStream)
vector.foreach(gzipStream.write)
gzipStream.close()
val expected = byteArrayStream.toByteArray
actual must returnValue(expected)
}
}
)
}
}
|
reactormonk/http4s
|
server/src/test/scala/org/http4s/server/middleware/GZipSpec.scala
|
Scala
|
apache-2.0
| 2,643
|
/*
* Copyright 2016 The BigDL Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.bigdl.nn
import java.util
import com.intel.analytics.bigdl.nn.Graph.ModuleNode
import com.intel.analytics.bigdl.nn.abstractnn.{AbstractModule, Activity}
import com.intel.analytics.bigdl.nn.tf.{ControlDependency, WithoutInput}
import com.intel.analytics.bigdl.tensor.Tensor
import com.intel.analytics.bigdl.tensor.TensorNumericMath.TensorNumeric
import com.intel.analytics.bigdl.utils.{Node, T}
import scala.collection.mutable
import scala.reflect.ClassTag
/**
* A graph container. The modules in the container are connected as a DAG graph.
*
* @param _inputs inputs modules, user can feed data into these modules in the forward method
* @param _outputs output modules
* @param _variables
* @tparam T Numeric type. Only support float/double now
*/
class StaticGraph[T: ClassTag](
private val _inputs : Seq[ModuleNode[T]],
private val _outputs : Seq[ModuleNode[T]],
private val _variables: Option[(Array[Tensor[T]], Array[Tensor[T]])] = None
)(implicit ev: TensorNumeric[T]) extends Graph[T](_inputs, _outputs, _variables) {
private val forwardExecution = forwardGraph.topologySort.reverse
private var backwardExecution: Array[Node[AbstractModule[Activity, Activity, T]]] = _
private val inputCache = new Array[Activity](forwardExecution.length)
private var backId2ForwardId: Array[Int] = _
private var gradOutputCache: Array[Activity] = _
buildBackwardGraph()
override def updateOutput(input: Activity): Activity = {
var i = 0
while(i < forwardExecution.length) {
val node = forwardExecution(i)
val nodeInput = findInput(node, input)
inputCache(i) = nodeInput
node.element.forward(nodeInput)
i += 1
}
output = dummyOutput.element.output
output
}
override def backward(input: Activity, gradOutput: Activity): Activity = {
val before = System.nanoTime()
val gradients = backwardExecution(input, gradOutput, true)
backwardTime = System.nanoTime() - before
gradients
}
override def updateGradInput(input: Activity, gradOutput: Activity): Activity = {
backwardExecution(input, gradOutput, false)
}
override def buildBackwardGraph(): this.type = {
super.buildBackwardGraph()
backwardExecution = backwardGraph.topologySort.reverse
backId2ForwardId = new Array[Int](backwardExecution.length)
gradOutputCache = new Array[Activity](backwardExecution.length)
var i = 0
while(i < backwardExecution.length - 1) {
var j = 0
var find = false
while(j < forwardExecution.length) {
if (forwardExecution(j).element.getName() == backwardExecution(i).element.getName()) {
backId2ForwardId(i) = j
find = true
}
j += 1
}
require(find, "Cannot find backward layer in forward executions")
i += 1
}
this
}
override def accGradParameters(input: Activity, gradOutput: Activity): Unit = {
var i = 0
while (i < backwardExecution.length - 1) {
val curNode = backwardExecution(i)
val curInput = inputCache(backId2ForwardId(i))
curNode.element.accGradParameters(curInput, gradOutputCache(i))
i += 1
}
}
private def backwardExecution(input: Activity, gradOutput: Activity,
executeBackward: Boolean): Activity = {
dummyOutputGrad.element.gradInput = gradOutput
var i = 0
while (i < backwardExecution.length - 1) { // do not execute the dummy backward end
val curNode = backwardExecution(i)
val curGradOutput = findGradOutput(curNode, gradOutput)
gradOutputCache(i) = curGradOutput
val curInput = inputCache(backId2ForwardId(i))
if (!isStopGradient(curNode.element)) {
if (executeBackward) {
curNode.element.backward(curInput, curGradOutput)
} else {
curNode.element.updateGradInput(curInput, curGradOutput)
}
} else if (executeBackward) {
curNode.element.accGradParameters(curInput, curGradOutput)
}
i += 1
}
gradInput = fetchModelGradInput()
gradInput
}
}
|
qiuxin2012/BigDL
|
spark/dl/src/main/scala/com/intel/analytics/bigdl/nn/StaticGraph.scala
|
Scala
|
apache-2.0
| 4,665
|
/*
* CorrelatorCore.scala
* (LeereNull)
*
* Copyright (c) 2011-2014 Hanns Holger Rutz. All rights reserved.
*
* This software is published under the GNU General Public License v3+
*
*
* For further information, please contact Hanns Holger Rutz at
* contact@sciss.de
*/
package de.sciss.leerenull
import de.sciss.kontur.session.Session
import de.sciss.processor.Processor
import de.sciss.processor.Processor.Aborted
import de.sciss.swingplus.GroupPanel
import collection.breakOut
import javax.swing.table.DefaultTableModel
import scala.util.{Failure, Success}
import swing.{BorderPanel, ScrollPane, Table, Swing}
import java.util.{Locale, Date}
import java.io.File
import xml.{NodeSeq, XML}
import java.text.DateFormat
import de.sciss.app.AbstractWindow
import de.sciss.kontur.gui.AppWindow
import java.awt.BorderLayout
import de.sciss.strugatzki.{FeatureExtraction, FeatureCorrelation}
import FeatureCorrelation.{Config => CSettings, Match}
import FeatureExtraction.{Config => ESettings}
object CorrelatorSelector extends GUIGoodies with KonturGoodies with NullGoodies {
var verbose = false
var autosave = true
object Search {
private val dateFormat = DateFormat.getDateTimeInstance( DateFormat.SHORT, DateFormat.FULL, Locale.US )
def fromXMLFile( file: File ) : Search = fromXML( XML.loadFile( file ))
def fromXML( xml: NodeSeq ) : Search = {
val date = dateFormat.parse( (xml \\ "date").text )
val offset = {xml \\ "offset"}.text.toLong
val settings = CSettings.fromXML( xml \\ "settings" )
val matches: IndexedSeq[ Match ] = ((xml \\ "matches") \\ "match").map( Match.fromXML( _ ))( breakOut )
val master = {
val e = xml \\ "master"
if( e.isEmpty ) None else Some( Match.fromXML( e ))
}
val metas = {
val res: IndexedSeq[ ESettings ] = (xml \\ "metas" \\ "feature").map( ESettings.fromXML( _ ))( breakOut )
if( res.nonEmpty ) res else {
IndexedSeq( ESettings.fromXMLFile( settings.metaInput ))
}
}
// val shift = {
// val e = (xml \\ "shift").text
// if( e == "" ) None else Some( e.toDouble )
// }
val transform = CorrelatorCore.Transform.fromXML( xml \\ "transform" )
Search( date, offset, settings, matches, metas, master, transform )
}
}
final case class Search( creation: Date, offset: Long, settings: CSettings, matches: IndexedSeq[ Match ],
metas: IndexedSeq[ ESettings ], master: Option[ Match ],
transform: CorrelatorCore.Transform ) {
def toXML = <search>
<date>{Search.dateFormat.format( creation )}</date>
<offset>{offset}</offset>
<settings>{settings.toXML.child}</settings>
<matches>{matches.map(_.toXML)}</matches>
<metas>{metas.map( _.toXML )}</metas>
{master match { case Some( m ) => <master>{m.toXML.child}</master>; case None => Nil }}
<transform>{transform.toXML.child}</transform>
</search>
}
/**
* @param offset the offset of the search input with respect to its
* appearance in the main timeline
*/
def beginSearch( offset: Long, settings: CSettings, metas: IndexedSeq[ ESettings ],
master: Option[ Match ], transform: CorrelatorCore.Transform )
( implicit doc: Session ): Unit = {
if( verbose ) println( settings )
val dlg = progressDialog( "Correlating with database" )
val tim = new Date()
val fc = FeatureCorrelation( settings )
fc.addListener {
case Processor.Result(_, Success(res)) =>
dlg.stop()
if( verbose ) {
println( "Done. " + res.size + " entries:" )
res.foreach { m =>
println( "\\nFile : " + m.file.getAbsolutePath +
"\\nSimilarity: " + (m.sim * 100) +
"\\nSpan start: " + m.punch.start +
"\\nBoost in : " + ampdb( m.boostIn ))
if( settings.punchOut.isDefined ) {
println( "Span stop : " + m.punch.stop +
"\\nBoost out : " + ampdb( m.boostOut ))
}
}
}
val search = Search( tim, offset, settings, res, metas, master, transform )
if( autosave ) saveSearch( search )
Swing.onEDT( makeSelector( search ))
case Processor.Result(_, Failure(Aborted())) =>
dlg.stop()
case Processor.Result(_, Failure(e)) =>
dlg.stop()
e.printStackTrace()
case prog @ Processor.Progress(_, _) => dlg.progress = prog.toInt
}
dlg.start( fc )
}
def saveSearch( search: Search ): Unit = {
val id = plainName( search.settings.metaInput ).filter( _.isLetterOrDigit ).take( 16 )
// val df = new SimpleDateFormat( "yyMMdd'_'HHmmss'_" + id + ".xml'", Locale.US )
// val f = new File( LeereNull.searchFolder, df.format( search.creation ))
val f = stampedFile( LeereNull.searchFolder, id, ".xml", search.creation )
XML.save( f.getAbsolutePath, search.toXML, "UTF-8", xmlDecl = true)
}
def makeSelector( search: Search )( implicit doc: Session ): Unit = {
// val tls = doc.timelines
// implicit val tl = tls.tryEdit( "Add Correlator Timeline" ) { implicit ce =>
// implicit val tl = BasicTimeline.newEmpty( doc )
//// tl.span = ar0.span
// tl.name = uniqueName( tls, "$Correlator" )
// tls.editInsert( ce, tls.size, tl )
//// placeStereo( ar0, "$" )
// tl
// }
def rowStrings( m: Match ) : Seq[ String ] = Seq(
percentString( m.sim ), plainName( m.file ), timeString( m.punch, 44100 / search.transform.timeScale ),
decibelString( ampdb( m.boostIn )), decibelString( ampdb( m.boostOut ))
)
val rowData: Array[ Array[ AnyRef ]] = search.matches.map( m => {
Array[ AnyRef ]( rowStrings( m ): _* )
})( breakOut )
val columnNames = Array[ AnyRef ]( "Sim", "File", "Span", "Gain In", "Gain Out" )
val table = new Table {
peer.putClientProperty( "JComponent.sizeVariant", "small" )
showGrid = true
peer.getTableHeader.setVisible( true )
peer.setModel( new DefaultTableModel( rowData, columnNames ) {
override def isCellEditable( row: Int, col: Int ) = false
})
val cm = peer.getColumnModel
Seq( 32, 112, 144, 36, 36 ).zipWithIndex.foreach { case (w, idx) =>
cm.getColumn( idx ).setPreferredWidth( w )
}
// listenTo( selection )
selection.elementMode = Table.ElementMode.Row
selection.intervalMode = Table.IntervalMode.Single
}
val scroll = new ScrollPane( table ) {
horizontalScrollBarPolicy = ScrollPane.BarPolicy.Never
verticalScrollBarPolicy = ScrollPane.BarPolicy.Always
}
// val tlf = TimelineFrame2 { f =>
// println( "Bye..." )
// f.dispose()
// }
val a = new AppWindow( AbstractWindow.REGULAR ) {
setTitle( "Search results" )
setLocationRelativeTo( null )
}
val lbInfo = label( "<html>Search conducted for " + plainName( search.settings.metaInput ) + " at " +
search.creation + (search.master match {
case Some( m ) => rowStrings( m ).mkString( "<br>Master: ", " ", "" )
case None => ""
}) + "</html>" )
val butSelectMatch = button( "Select match" ) { b =>
table.selection.rows.headOption.foreach { row =>
// println( search.matches( row ))
CorrelatorCore.makeMatchEditor( search, row )
}
}
val panel = new GroupPanel {
horizontal = Seq( lbInfo, butSelectMatch )
vertical = Par( Baseline )( lbInfo, butSelectMatch )
}
val bp = new BorderPanel {
add( panel, BorderPanel.Position.North )
add( scroll, BorderPanel.Position.Center )
}
val cp = a.getContentPane
cp.add( bp.peer, BorderLayout.CENTER )
a.pack()
a.setVisible( true )
// tlf.bottomPanel = Some( bp )
// tlf.pack() // AndSetMinimum()
}
}
|
Sciss/LeereNull
|
src/main/scala/de/sciss/leerenull/CorrelatorSelector.scala
|
Scala
|
gpl-3.0
| 8,372
|
/*
* LaScala
* Copyright (c) 2017. Phasmid Software
*/
package com.phasmid.laScala.fp
import org.scalatest.{FlatSpec, Matchers}
/**
* Created by scalaprof on 4/7/17.
*/
class LazySpec extends FlatSpec with Matchers {
behavior of "AtomicLazyNumber"
it should "work" in {
// val x = AtomicLazyNumber
}
}
|
rchillyard/LaScala
|
src/test/scala/com/phasmid/laScala/fp/LazySpec.scala
|
Scala
|
lgpl-2.1
| 327
|
package org.scalameter
package reporting
import java.util.Date
import java.io._
import java.awt.Color
import org.jfree.chart._
import collection._
import xml._
import utils.Tree
import Key._
case class HtmlReporter(val renderers: HtmlReporter.Renderer*) extends Reporter {
val sep = File.separator
def head =
<head>
<title>Performance report</title>
<link type="text/css" media="screen" rel="stylesheet" href="lib/index.css"/>
</head>
def body(result: Tree[CurveData], persistor: Persistor) = {
<body>
{machineInformation}
{date(result)}
<h1>Performance test charts</h1>
{
for ((ctx, scoperesults) <- result.scopes; if scoperesults.nonEmpty) yield <p><div>
<h2>Performance test group: {ctx.scope}</h2>
{
val histories = scoperesults.map(cd => persistor.load(cd.context))
for (r <- renderers) yield r.render(ctx, scoperesults, histories)
}
</div></p>
}
</body>
}
def machineInformation =
<div>
<h1>Machine information</h1>
<p><ul>
{
for ((k, v) <- Context.machine.properties.toList.sortBy(_._1)) yield <li>
{k + ": " + v}
</li>
}
</ul></p>
</div>
def date(results: Tree[CurveData]) = {
val dateoption = for {
start <- results.context.get[Date](reports.startDate)
end <- results.context.get[Date](reports.endDate)
} yield <div>
<div>Started: {start}</div>
<div>Finished: {end}</div>
<div>Running time: {(end.getTime - start.getTime) / 1000} seconds</div>
</div>
dateoption.getOrElse(<div>No date information.</div>)
}
def report(result: CurveData, persistor: Persistor) {
// nothing - the charts are generated only at the end
}
def report(results: Tree[CurveData], persistor: Persistor) = {
val resultdir = results.context.goe(reports.resultDir, "tmp")
new File(s"$resultdir").mkdir()
new File(s"$resultdir${sep}report").mkdir()
new File(s"$resultdir${sep}report${sep}images").mkdir()
new File(s"$resultdir${sep}report${sep}lib").mkdir()
val report = <html>{head ++ body(results, persistor)}</html>
val css = getClass.getClassLoader.getResourceAsStream("css/index.css")
try {
val reader = new BufferedReader(new InputStreamReader(css))
printToFile(new File(s"$resultdir${sep}report${sep}lib${sep}index.css")) { p =>
var line = ""
while (line != null) {
p.println(line)
line = reader.readLine()
}
}
} finally {
css.close()
}
printToFile(new File(s"$resultdir${sep}report${sep}index.html")) {
_.println(report.toString)
}
true
}
def printToFile(f: java.io.File)(op: java.io.PrintWriter => Unit) {
val p = new java.io.PrintWriter(f)
try { op(p) } finally { p.close() }
}
}
object HtmlReporter {
trait Renderer {
def render(context: Context, curves: Seq[CurveData], hs: Seq[History]): Node
}
object Renderer {
def regression = Seq(Info(), Chart(ChartReporter.ChartFactory.XYLine()))
def basic = Seq(Info(), BigO(), Chart(ChartReporter.ChartFactory.XYLine()))
case class Info() extends Renderer {
def render(context: Context, curves: Seq[CurveData], hs: Seq[History]): Node =
<div>Info:
<ul>
<li>Number of runs: {context.goe(exec.benchRuns, "")}</li>
<li>Executor: {context.goe(dsl.executor, "")}</li>
</ul>
</div>
}
case class BigO() extends Renderer {
def render(context: Context, curves: Seq[CurveData], hs: Seq[History]): Node =
<div>Big O analysis:
<ul>
{
for (cd <- curves) yield <li>
{cd.context.goe(dsl.curve, "")}: {cd.context.goe(reports.bigO, "(no data)")}
</li>
}
</ul>
</div>
}
case class Chart(factory: ChartReporter.ChartFactory) extends Renderer {
def render(context: Context, curves: Seq[CurveData], hs: Seq[History]): Node = {
val resultdir = context.goe(reports.resultDir, "tmp")
val scopename = context.scope
val chart = factory.createChart(scopename, curves, hs)
val chartfile = new File(s"$resultdir${File.separator}report${File.separator}images${File.separator}$scopename.png")
ChartUtilities.saveChartAsPNG(chartfile, chart, 1600, 1200)
<div>
<p>Chart:</p>
<a href={"images/" + scopename + ".png"}>
<img src={"images/" + scopename + ".png"} alt={scopename} width="800" height="600"></img>
</a>
</div>
}
}
case class HistoryList() extends Renderer {
def render(context: Context, curves: Seq[CurveData], hs: Seq[History]): Node = {
// TODO
<div>
</div>
}
}
case class Regression(factory: ChartReporter.ChartFactory, colors: Seq[Color]) extends Renderer {
def render(context: Context, curves: Seq[CurveData], hs: Seq[History]): Node = {
val resultdir = context.goe(reports.resultDir, "tmp")
val scopename = context.scope
val chart = factory.createChart(scopename, curves, hs, colors)
val chartfile = new File(s"$resultdir${File.separator}report${File.separator}images${File.separator}$scopename.png")
ChartUtilities.saveChartAsPNG(chartfile, chart, 1600, 1200)
<div>
<p>Chart:</p>
<a href={"images/" + scopename + ".png"}>
<img src={"images/" + scopename + ".png"} alt={scopename} width="800" height="600"></img>
</a>
</div>
}
}
case class Histogram(factory: ChartReporter.ChartFactory, colors: Seq[Color]) extends Renderer {
def render(context: Context, curves: Seq[CurveData], hs: Seq[History]): Node = {
<div>
</div>
}
}
}
}
|
lossyrob/scalpel
|
src/main/scala/org/scalameter/reporting/HtmlReporter.scala
|
Scala
|
bsd-3-clause
| 5,834
|
package org.beaucatcher.bobject
import org.beaucatcher.bobject.Implicits._
import org.beaucatcher.bson._
import org.beaucatcher.mongo._
import org.beaucatcher.caseclass.ClassAnalysis
/**
* The idea of this trait is to implement a backbone.js-style set of methods
* for a MongoDB collection. backbone.js just seems like a good "spec" for
* a set of CRUD methods in REST format. You can pretty easily adapt this
* to map to HTTP differently, though.
*
* See http://documentcloud.github.com/backbone/#Sync
*
* My still-untested interpretation follows.
*
* CRUD maps to REST like this:
* create = POST /collection
* read = GET /collection[/id]
* update = PUT /collection/id
* delete = DELETE /collection/id
*
* Create and Update should return the modified fields (the fields different from those passed in)
* though they are allowed to return unmodified fields, with an efficiency cost.
*
* Read should always return all fields of the object. Without the /id, it returns a JSON array of all
* objects.
*
* Delete I guess should return an empty body, though I'm not sure what backbone.js expects.
*
* FIXME: This class probably needs some way to pass in application-specific data (such as
* the currently-logged-in-user) to the various methods. I guess you could use thread local
* but maybe there's something cleaner.
*/
trait JsonMethods[SchemaType <: Product] {
/** Point this to a Collection to use to store BObject */
protected def jsonSync(implicit context: Context): BoundSyncCollection[BObject, BObject, BObject, _, _]
/** Since we're a trait, we don't have a manifest and you have to provide this */
protected def jsonAnalysis: ClassAnalysis[SchemaType]
/** If you want to override the JSON flavor, do so here */
protected val jsonFlavor: JsonFlavor.Value = JsonFlavor.CLEAN
/**
* This method creates the query that will be used to
* find an existing object, based on the URL path segment
* used in the HTTP request.
*
* By default it assumes the path segment is an ObjectId
* stored in the "_id" field. (In the future, the default
* may be smart and look at the type of the _id field
* in the schema case class.)
*
* To change that, override here.
*/
protected def createQueryForObject(path: String): BObject = {
BObject("_id" -> BObjectId(ObjectId(path)))
}
/**
* This method creates the query that will be used to list
* all of the objects in the collection. By default this
* isn't implemented, because it's unlikely anyone wants
* an API that returns an entire unfiltered MongoDB collection.
* More likely, you want to filter by user or something, which
* you can do by overriding this. This is abstract to be sure
* you consider the issues.
*/
protected def createQueryForAllObjects(): BObject
/**
* This function should convert a trailing path component to the JValue representation
* of the object ID, or throw a JsonValidationException if the path is not a well-formed ID value.
* By default, this validates an ObjectId and converts it to a BString, but you can override
* if your object's ID is not an ObjectId.
*
* (In the future, the default
* may be smart and look at the type of the _id field
* in the schema case class.)
*/
protected def parseJValueIdFromPath(path: String): JValue = {
try {
ObjectId(path)
} catch {
case _ => throw new JsonValidationException("not a valid id: " + path)
}
BString(path)
}
/**
* When creating an object, this function generates a new ID for it. By default,
* it creates a new ObjectId as a BString. Override this if your ID type is
* not ObjectId.
*/
protected def generateJValueId(): JValue = {
BString(ObjectId().toString)
}
/**
* Override this if you want to modify JSON that's coming in. You can
* also throw a JsonValidationException from here. This method will
* be called before validating the JSON against the case class schema.
* This method must return an object, not any other kind of JValue.
*
* By default this method forces the JValue to be a JObject and
* adds an "_id" field if there wasn't one before. Also by default
* if there's a path passed in, it's used for the _id and must match
* any existing _id. This method's default implementation invokes
* parseJValueIdFromPath() and generateJValueId(); usually you would
* override those to modify your ID type.
*
* In this method, you could also "fix up" JSON (for example add default values for missing fields)
* before the JSON gets validated.
*
* This method is called for creating and for updating.
*/
protected def modifyIncomingJson(path: Option[String], jvalue: JValue): JObject = {
jvalue match {
case jobject: JObject => {
if (jobject.contains("_id")) {
if (path.isDefined) {
val idInObject: JValue = jobject.get("_id").get
if (parseJValueIdFromPath(path.get) != idInObject)
throw new JsonValidationException("Posted JSON containing _id %s to path %s".format(idInObject, path.get))
}
jobject
} else {
val id: JValue = {
if (path.isDefined)
parseJValueIdFromPath(path.get)
else
generateJValueId()
}
jobject + ("_id", id)
}
}
case _ => throw new JsonValidationException("JSON value must be an object i.e. enclosed in {}")
}
}
/**
* Override this if you want to modify the BSON after it's
* validated against the case class schema and converted from
* JSON. You can also throw a JsonValidationException from here.
* So you can use this to do additional validation, or to add or remove
* or rename fields before storing in the database. This is called for
* creating and for updating.
*/
protected def modifyIncomingBson(bobject: BObject): BObject = {
bobject
}
/**
* Override this to modify BSON on its way out. Called when reading
* and when returning an object from create and update.
*/
protected def modifyOutgoingBson(bobject: BObject): BObject = {
bobject
}
/**
* Override this to modify JSON on its way out. Called when reading
* and when returning an object from create and update.
*/
protected def modifyOutgoingJson(jobject: JObject): JObject = {
jobject
}
// FIXME we want some way to return only changed fields, at least
// to the extent we can do that without doing another query and being
// slow.
private def outgoingString(bobject: BObject): String = {
modifyOutgoingJson(modifyOutgoingBson(bobject).toJValue(jsonFlavor)).toJson()
}
/**
* This function is intended to implement HTTP POST to
* a URL like "/collection", creating a new object. It returns
* a JSON string with the new object, mostly so you can see the
* newly-created ID for the object.
* See updateJson for most other details.
*/
def createJson(json: String)(implicit context: Context): String = {
val bobject = parseJson(None, json)
jsonSync.save(bobject)
// FIXME here in theory we only have to return the new ID
outgoingString(bobject)
}
/**
* This function is intended to implement HTTP PUT (updating an object).
* The provided path should be the last segment of the path, or whatever part
* identifies the object. The JSON will be validated and then stored in the
* database.
*
* By default the assumption is that "path" is an ObjectId string which
* corresponds to the _id field in the object.
* You can modify the pipeline from JSON to BSON, including the path-to-ID mapping,
* by overriding the modifyIncomingJson() or modifyIncomingBson() hooks.
*
* The new object is returned back as a JSON string.
* The returned-back new object is modified by
* the same methods that affect getJson()'s returned object.
*/
def updateJson(path: String, json: String)(implicit context: Context): String = {
val bobject = parseJson(Some(path), json)
jsonSync.update(createQueryForObject(path), bobject)
// FIXME here in theory we only have to return the changed fields
outgoingString(bobject)
}
/**
* This function is intended to implement HTTP GET, retrieving an object
* or list of objects.
*
* By default the assumption is that "path" is an ObjectId string which corresponds
* to the _id field in the object. You can change this assumption by overriding
* the pipeline from BSON to JSON, i.e. the modifyOutgoingBson() and modifyOutgoingJson()
* hooks.
*
* If no path is provided, then this reads all objects as defined by createQueryForAllObjects().
*/
def readJson(path: Option[String])(implicit context: Context): Option[String] = {
if (path.isDefined) {
// GET a single ID
jsonSync.findOne(createQueryForObject(path.get)) match {
case Some(bobject) =>
Some(outgoingString(bobject))
case _ =>
None
}
} else {
// GET all objects
val all = jsonSync.find(createQueryForAllObjects())
val b = JArray.newBuilder
for (o <- all) {
b += modifyOutgoingJson(modifyOutgoingBson(o).toJValue(jsonFlavor))
}
Some(b.result.toJson())
}
}
/**
* Intended to implement HTTP DELETE, deletes the object identified by the path.
*/
def deleteJson(path: String)(implicit context: Context): Unit = {
jsonSync.remove(createQueryForObject(path))
}
private def fromJValue(path: Option[String], jvalue: JValue): BObject = {
val jobject: JObject = modifyIncomingJson(path, jvalue)
BValue.fromJValue(jobject, jsonAnalysis, jsonFlavor) match {
case bobject: BObject =>
modifyIncomingBson(bobject)
case wtf =>
throw new JsonValidationException("JSON must be an object \"{...}\" not " + wtf.getClass.getName)
}
}
private def parseJson(path: Option[String], json: String): BObject = {
fromJValue(path, JValue.parseJson(json))
}
/**
* Parses JSON against the schema, calling modify hooks in the same way as createJson(), i.e.
* the parsed JSON ends up as it would normally be stored in MongoDB.
*/
def parseJson(json: String): BObject = {
parseJson(None, json)
}
/**
* Parses JSON containing an array of objects, validating each one against the
* schema. The parsed objects end up as they would normally be stored in MongoDB
* by createJson() or updateJson()
*/
def parseJsonArray(json: String): BArray = {
val jvalue = JValue.parseJson(json)
jvalue match {
case jarray: JArray =>
val b = BArray.newBuilder
for (o <- jarray) {
b += fromJValue(None, o)
}
b.result
case wtf =>
throw new JsonValidationException("JSON must be an array of objects \"[...]\" not " + wtf.getClass.getName)
}
}
}
|
havocp/beaucatcher
|
bobject/src/main/scala/org/beaucatcher/bobject/JsonMethods.scala
|
Scala
|
apache-2.0
| 11,782
|
package ru.pavkin.todoist.api.core.model
import org.scalacheck.Arbitrary._
import org.scalacheck.Gen
import org.scalatest.prop.GeneratorDrivenPropertyChecks
import org.scalatest.{FunSuite, Matchers}
class ProjectColorSpec extends FunSuite with Matchers with GeneratorDrivenPropertyChecks {
test("ProjectColor.unsafeBy returns color if it exists") {
forAll(Gen.choose(0, 21)) { (n: Int) =>
whenever(n >= 0 && n <= 21) {
noException should be thrownBy ProjectColor.unsafeBy(n)
ProjectColor.unsafeBy(n).isPremium shouldBe n >= 12
}
}
forAll { (n: Int) =>
whenever(n < 0 || n > 21) {
an[Exception] should be thrownBy ProjectColor.unsafeBy(n)
}
}
}
}
|
vpavkin/scalist
|
tests/src/test/scala/ru/pavkin/todoist/api/core/model/ProjectColorSpec.scala
|
Scala
|
mit
| 718
|
package org.pinky.core
import com.google.inject.servlet.ServletModule
import com.google.inject.Injector
import org.scalatest.Spec
import org.scalatest.matchers.ShouldMatchers
import org.pinky.guice.PinkyServletContextListener
/**
* Created by IntelliJ IDEA.
* User: phausel
* Date: Jan 21, 2009
* Time: 2:09:37 PM
* To change this template use File | Settings | File Templates.
*/
class PinkyServletContextListenerTest extends Spec with ShouldMatchers {
describe("A Servlet Context Listener") {
it("should_fail_since_modules_are_not_populated") {
var exceptionIsThrown = false
try {
val f = new PinkyServletContextListener() {
def getInjectorPublic(): Injector = {
super.getInjector
}
}
val i = f.getInjectorPublic
i.hashCode()
} catch {
case ex: NullPointerException => exceptionIsThrown = true
case _ =>
}
exceptionIsThrown should be (true)
}
it("should_fail_pass") {
val f = new PinkyServletContextListener() {
def getInjectorPublic(): Injector = {
super.getInjector
}
}
f.modules = Array(new ServletModule() {})
val i = f.getInjectorPublic
i.getClass.getName should equal("com.google.inject.internal.InjectorImpl")
}
}
}
|
d5nguyenvan/pinky
|
src/test/scala/org/pinky/core/PinkyServletContextListenerTest.scala
|
Scala
|
bsd-3-clause
| 1,328
|
package com.github.morikuni.locest.frequency.application.service
import com.github.morikuni.locest.frequency.application.dto.MorphemeDto
import scala.concurrent.Future
trait MorphologicalAnalysisService {
/** 形態素解析をする
*
* @param sentence 形態素解析をする文章
* @return Future.successful(Seq(MorphemeDto)) 成功時
* Future.failed(IOException) 入出力に失敗したとき
*/
def morphologicalAnalysis(sentence: String): Future[Seq[MorphemeDto]]
}
trait DependMorphologicalAnalysisService {
def morphologicalAnalysisService: MorphologicalAnalysisService
}
|
morikuni/locest
|
frequency/app/com/github/morikuni/locest/frequency/application/service/MorphologicalAnalysisService.scala
|
Scala
|
mit
| 620
|
package sampler.abc.actor.children
import akka.testkit.TestKit
import org.scalatest.FreeSpecLike
import akka.actor.ActorSystem
class ReceiveActorTest extends TestKit(ActorSystem("ABC-test")) with FreeSpecLike {
"TODO" in fail("TODO")
}
|
tearne/Sampler
|
sampler-abc/src/test/scala/sampler/abc/actor/children/ReceiveActorTest.scala
|
Scala
|
apache-2.0
| 239
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.bwsw.tstreams.common
import org.scalatest.{FlatSpec, Matchers}
/**
* Created by Ivan Kudryavtsev on 11.06.17.
*/
class RoundRobinPartitionIterationPolicyTests extends FlatSpec with Matchers {
val PARTITIONS_COUNT = 3
it should "operate correctly over partitions" in {
val partitions = Set(0,1,2)
val roundRobinPolicy = new RoundRobinPartitionIterationPolicy(PARTITIONS_COUNT, partitions)
Seq(0, 1, 2, 0, 1, 2)
.foreach(partitionExpectation => roundRobinPolicy.getNextPartition() shouldBe partitionExpectation)
}
}
|
bwsw/t-streams
|
src/test/scala/com/bwsw/tstreams/common/RoundRobinPartitionIterationPolicyTests.scala
|
Scala
|
apache-2.0
| 1,365
|
/*
* Copyright 2013 Werner Punz
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.github.werpu.scalaelresolver.scalaelresolvertest
import javax.faces.bean.{RequestScoped, ManagedBean}
/**
* bean2
*/
@ManagedBean
@RequestScoped
class Bean2 {
var bean2test1 = "hello world from bean2 test1"
var bean2test2 = "hello world from bean2 test2"
}
|
werpu/scalaelresolver
|
elresolvertest/src/main/scala/com/github/werpu/scalaelresolver/scalaelresolvertest/Bean2.scala
|
Scala
|
apache-2.0
| 870
|
package com.bayesianwitch.injera.deduplication
import scala.collection.mutable.HashMap
import com.google.common.cache._
import java.util.concurrent.TimeUnit
import com.google.common.hash.{BloomFilter,Funnel, PrimitiveSink}
class SimpleRepeatAvoider[T <: Object] extends RepeatAvoider[T] {
//This will likely overload your memory
private val cache = new HashMap[T,T]()
protected def check(k: T) = cache.get(k).isEmpty
protected def set(k: T) = cache += (k -> k)
}
class TimeSpaceLimitedRepeatAvoider[T <: Object](maximumSize: Long = 1024, expireTime: Int = 10) extends RepeatAvoider[T] {
private val cache: Cache[T,T] = CacheBuilder.newBuilder().maximumSize(maximumSize).expireAfterWrite(expireTime, TimeUnit.MINUTES).build();
protected def check(k: T) = (cache.getIfPresent(k) == null)
protected def set(k: T) = cache.put(k,k)
}
class BloomFilterRepeatAvoider[T <: Object](maxSize: Int=1024*16, errorProb: Double = 1e-4)(implicit funnel: Funnel[T]) extends RepeatAvoider[T] {
private val bloomFilter = BloomFilter.create(funnel, maxSize, errorProb)
protected def set(k: T) = bloomFilter.put(k)
protected def check(k: T) = !bloomFilter.mightContain(k)
}
|
bayesianwitch/injera
|
src/main/scala/injera/deduplication/RepeatAvoiders.scala
|
Scala
|
gpl-3.0
| 1,177
|
package org.rplsd.condalang.data
/**
* Created by Luqman on 11/28/2015.
*/
case class RecipeJumlah (nama_resep:String, jumlah:Int)
object RecipeJumlah {
def nama_resep = "nama_resep"
def jumlah = "jumlah"
}
|
luqmankusnadi/Tugas-RPLSD-DSL
|
src/main/scala/org/rplsd/condalang/data/RecipeJumlah.scala
|
Scala
|
mit
| 217
|
package org.kirhgoff.ap.core
import org.kirhgoff.ap.model.lifegame.LifeGameElement
trait Element {
//TODO remove from element
def x:Int
def y:Int
def getStrategy(environment: Environment): Strategy
def isAlive: Boolean
}
class EmptyElement(val x:Int, val y:Int) extends Element {
override def getStrategy(environment: Environment): Strategy = DoNothingStrategy(this)
override def isAlive: Boolean = false
}
|
kirhgoff/life-server
|
app/org/kirhgoff/ap/core/Element.scala
|
Scala
|
mit
| 428
|
/*
* Copyright 2016 The BigDL Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.bigdl.torch
import com.intel.analytics.bigdl.nn.SoftSign
import com.intel.analytics.bigdl.tensor.Tensor
import scala.util.Random
@com.intel.analytics.bigdl.tags.Serial
class SoftSignSpec extends TorchSpec {
"A SoftSign 3D input" should "generate correct output and grad" in {
torchCheck()
val layer = new SoftSign[Double, Double]()
val input = Tensor[Double](2, 3, 4).apply1(_ => Random.nextDouble())
val gradOutput = Tensor[Double](2, 3, 4).apply1(_ => Random.nextDouble())
val start = System.nanoTime()
val output = layer.forward(input)
val gradInput = layer.backward(input, gradOutput)
val end = System.nanoTime()
val scalaTime = end - start
val code = "module = nn.SoftSign()\\n" +
"output = module:forward(input)\\n" +
"gradInput = module:backward(input,gradOutput)"
val (luaTime, torchResult) = TH.run(code, Map("input" -> input, "gradOutput" -> gradOutput),
Array("output", "gradInput"))
val luaOutput = torchResult("output").asInstanceOf[Tensor[Double]]
val luaGradInput = torchResult("gradInput").asInstanceOf[Tensor[Double]]
output should be (luaOutput)
gradInput should be (luaGradInput)
println("Test case : SoftSign, Torch : " + luaTime + " s, Scala : " + scalaTime / 1e9 + " s")
}
"A SoftSign 4D input" should "generate correct output and grad" in {
torchCheck()
val layer = new SoftSign[Double, Double]()
val input = Tensor[Double](5, 4, 3, 2).apply1(_ => Random.nextDouble())
val gradOutput = Tensor[Double](5, 4, 3, 2).apply1(_ => Random.nextDouble())
val start = System.nanoTime()
val output = layer.forward(input)
val gradInput = layer.backward(input, gradOutput)
val end = System.nanoTime()
val scalaTime = end - start
val code = "module = nn.SoftSign()\\n" +
"output = module:forward(input)\\n" +
"gradInput = module:backward(input,gradOutput)"
val (luaTime, torchResult) = TH.run(code, Map("input" -> input, "gradOutput" -> gradOutput),
Array("output", "gradInput"))
val luaOutput = torchResult("output").asInstanceOf[Tensor[Double]]
val luaGradInput = torchResult("gradInput").asInstanceOf[Tensor[Double]]
output should be (luaOutput)
gradInput should be (luaGradInput)
println("Test case : SoftSign, Torch : " + luaTime + " s, Scala : " + scalaTime / 1e9 + " s")
}
}
|
jenniew/BigDL
|
spark/dl/src/test/scala/com/intel/analytics/bigdl/torch/SoftSignSpec.scala
|
Scala
|
apache-2.0
| 3,006
|
package scredis.io
import java.util.concurrent.locks.ReentrantLock
import akka.actor._
import scredis.exceptions._
import scredis.protocol._
import scredis.util.UniqueNameGenerator
import scala.concurrent.Await
import scala.concurrent.duration._
import scala.util.Try
/**
* This trait represents a blocking connection to a `Redis` server.
*/
abstract class AkkaBlockingConnection(
system: ActorSystem,
host: String,
port: Int,
authOpt: Option[AuthConfig],
database: Int,
nameOpt: Option[String],
decodersCount: Int,
connectTimeout: FiniteDuration,
maxWriteBatchSize: Int,
tcpSendBufferSizeHint: Int,
tcpReceiveBufferSizeHint: Int,
akkaListenerDispatcherPath: String,
akkaIODispatcherPath: String,
akkaDecoderDispatcherPath: String,
failCommandOnConnecting:Boolean
) extends AbstractAkkaConnection(
system = system,
host = host,
port = port,
authOpt = authOpt,
database = database,
nameOpt = nameOpt,
decodersCount = decodersCount,
receiveTimeoutOpt = None,
connectTimeout = connectTimeout,
maxWriteBatchSize = maxWriteBatchSize,
tcpSendBufferSizeHint = tcpSendBufferSizeHint,
tcpReceiveBufferSizeHint = tcpReceiveBufferSizeHint,
akkaListenerDispatcherPath = akkaListenerDispatcherPath,
akkaIODispatcherPath = akkaIODispatcherPath,
akkaDecoderDispatcherPath = akkaDecoderDispatcherPath
) with BlockingConnection {
private val lock = new ReentrantLock()
protected val listenerActor = system.actorOf(
Props(
classOf[ListenerActor],
host,
port,
authOpt,
database,
nameOpt,
decodersCount,
receiveTimeoutOpt,
connectTimeout,
maxWriteBatchSize,
tcpSendBufferSizeHint,
tcpReceiveBufferSizeHint,
akkaIODispatcherPath,
akkaDecoderDispatcherPath,
failCommandOnConnecting
).withDispatcher(akkaListenerDispatcherPath),
UniqueNameGenerator.getUniqueName(s"${nameOpt.getOrElse(s"$host-$port")}-listener-actor")
)
private def withLock[A](f: => A): A = {
if (lock.tryLock) {
try {
f
} finally {
lock.unlock()
}
} else {
throw RedisIOException("Trying to send request on a blocked connection")
}
}
override protected[scredis] def sendBlocking[A](request: Request[A])(
implicit timeout: Duration
): Try[A] = withLock {
logger.debug(s"Sending blocking request: $request")
updateState(request)
val future = Protocol.send(request, listenerActor)
Try(Await.result(future, timeout))
}
}
|
scredis/scredis
|
src/main/scala/scredis/io/AkkaBlockingConnection.scala
|
Scala
|
apache-2.0
| 2,533
|
package io.github.yzernik.bitcoinscodec.structures
import io.github.yzernik.bitcoinscodec.CodecSuite
import scodec.bits.ByteVector
class InvVectSpec extends CodecSuite {
import InvVect._
"InvVect codec" should {
"roundtrip" in {
roundtrip(InvVect(ERROR, Hash(ByteVector.fill(32)(0x42))))
roundtrip(InvVect(MSG_TX, Hash(ByteVector.fill(32)(0x42))))
roundtrip(InvVect(MSG_BLOCK, Hash(ByteVector.fill(32)(0x43))))
}
}
}
|
yzernik/bitcoin-scodec
|
src/test/scala/io/github/yzernik/bitcoinscodec/structures/InvVectSpec.scala
|
Scala
|
mit
| 453
|
/**
* Copyright 2013 Israel Freitas (israel.araujo.freitas@gmail.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package brain.web
import javax.servlet.ServletContextListener
import javax.servlet.ServletContextEvent
import brain.db.OrientDbServer
import brain.models.ProgramD
import java.net.URL
import org.aitools.programd.Core
import java.net.MalformedURLException
import org.aitools.util.resource.URLTools
import brain.config.Config
class WebAppListener extends ServletContextListener {
def contextInitialized(servletContextEvent:ServletContextEvent) {
//System.setProperty("run.mode", "production")
Config.load
OrientDbServer.start
ProgramD.prepare
val theURL = getBaseURL(servletContextEvent);
val core = new Core(theURL, URLTools.contextualize(theURL, Config.getProgramDCoreFilePath));
ProgramD.start(core);
}
def contextDestroyed(servletContextEvent:ServletContextEvent) {
ProgramD.shutdown
OrientDbServer.stop
}
def getBaseURL(servletContextEvent:ServletContextEvent):URL = {
try {
return servletContextEvent.getServletContext().getResource("/");
}
catch {
case e:MalformedURLException => {
servletContextEvent.getServletContext().log("Unable to get the base url.", e);
}
return null;
}
}
}
|
ifreitas/brain
|
src/main/scala/brain/web/WebAppListener.scala
|
Scala
|
apache-2.0
| 1,823
|
package com.rouesnel.typedsql.examples
package coppersmith
import org.apache.hadoop.fs.Path
import com.twitter.scalding.{Config, Execution, TypedPipe}
import org.joda.time.DateTime
import commbank.coppersmith.api.{DataSource => _, _}
import scalding._
import Coppersmith._
import EavtText.{EavtEnc, eavtByDay}
import com.rouesnel.typedsql._
import com.rouesnel.typedsql.examples.coppersmith.TypedSqlCoppersmithExample.{Parameters, Sources}
import commbank.coppersmith.thrift.Eavt
import com.rouesnel.typedsql.DataSource
@SqlQuery
object TypedSqlCoppersmithExample {
def query(minimumAge: Int)(customers: Unpartitioned[Customer],
orders: Unpartitioned[Order],
orderLineItems: Unpartitioned[OrderLineItem],
payments: Unpartitioned[Payment]) =
"""
SELECT c.customer_id as customer_id,
c.age as age,
COUNT(o.order_id) as number_of_orders,
SUM(oli.item_discount * oli.item_quantity) as amount_discounted,
SUM(oli.item_price * oli.item_quantity) as gross_amount_spent
FROM ${customers} c
LEFT OUTER JOIN ${orders} o ON c.customer_id = o.customer_id
LEFT OUTER JOIN ${orderLineItems} oli ON o.order_id = oli.order_id
WHERE c.age > ${minimumAge}
GROUP BY c.customer_id, c.age
"""
object Features extends FeatureSet[Row] {
val namespace = "typedsql.example"
def entity(row: Row) = row.customerId.toString
val source = From[Row]() // FeatureSource (see above)
val select = source.featureSetBuilder(namespace, entity)
val customerAge = select(_.age.toInt)
.asFeature(Continuous, "CUSTOMER_AGE", Some(MinMaxRange(0, 130)), "Age of customer in years")
val orderCount = select(_.numberOfOrders)
.asFeature(Continuous, "ORDER_COUNT", "Number of orders the customer has made.")
val grossAmountSpent = select(_.grossAmountSpent).asFeature(
Continuous,
"GROSS_AMOUNT_SPENT",
"Amount spent by the customer (before discounts).")
val amountDiscounted = select(_.amountDiscounted).asFeature(
Continuous,
"MONEY_SAVED",
"Amount of money saved by the customer due to discounts on ordered items.")
val amountSpent = select(row => row.grossAmountSpent - row.amountDiscounted).asFeature(
Continuous,
"NET_AMOUNT_SPENT",
"Amount spent by the customer (reduced by any discounts). Also known as revenue generated by customer.")
val features = List(customerAge, orderCount, grossAmountSpent, amountDiscounted, amountSpent)
}
}
|
laurencer/typedsql
|
examples/src/main/scala/com/rouesnel/typedsql/examples/CoppersmithExample.scala
|
Scala
|
apache-2.0
| 2,632
|
/**
* Author:
* Yujian Zhang <yujian{dot}zhang[at]gmail(dot)com>
* Description:
* Definition related to bitboard.
* License:
* GNU General Public License v2
* http://www.gnu.org/licenses/gpl-2.0.html
* Copyright (C) 2010-2012 Yujian Zhang
*/
package net.whily.unichess.engine
/// **********************************************************************
/// For details on definitions of square, files and ranks, please refer
/// http://chessprogramming.wikispaces.com/Square+Mapping+Considerations
// Little endian rank-file (LERF) mapping.
object Square extends Enumeration {
type Square = Value
val A1, B1, C1, D1, E1, F1, G1, H1,
A2, B2, C2, D2, E2, F2, G2, H2,
A3, B3, C3, D3, E3, F3, G3, H3,
A4, B4, C4, D4, E4, F4, G4, H4,
A5, B5, C5, D5, E5, F5, G5, H5,
A6, B6, C6, D6, E6, F6, G6, H6,
A7, B7, C7, D7, E7, F7, G7, H7,
A8, B8, C8, D8, E8, F8, G8, H8 = Value
implicit def squareToInt(x: Square): Int = x.id
}
// Little endian mapping of files.
object File extends Enumeration {
type File = Value
val FileA, FileB, FileC, FileD, FileE, FileF, FileG, FileH = Value
implicit def fileToInt(x: File): Int = x.id
}
// Little endian mapping of ranks.
object Rank extends Enumeration {
type Rank = Value
val Rank1, Rank2, Rank3, Rank4, Rank5, Rank6, Rank7, Rank8 = Value
implicit def rankToInt(x: Rank): Int = x.id
}
// BitBoard shifts.
object Shift {
private val notAFile = 0xfefefefefefefefeL
private val notABFile = 0xfcfcfcfcfcfcfcfcL
private val notHFile = 0x7f7f7f7f7f7f7f7fL
private val notGHFile = 0x3f3f3f3f3f3f3f3fL
// One step shift, post shift masks.
// http://chessprogramming.wikispaces.com/General+Setwise+Operations#OneStepOnly
def soutOne(b: Long): Long = b >>> 8
def nortOne(b: Long): Long = b << 8
def eastOne(b: Long): Long = (b & notHFile) << 1
def noEaOne(b: Long): Long = (b & notHFile) << 9
def soEaOne(b: Long): Long = (b & notHFile) >>> 7
def westOne(b: Long): Long = (b & notAFile) >>> 1
def soWeOne(b: Long): Long = (b & notAFile) >>> 9
def noWeOne(b: Long): Long = (b & notAFile) << 7
// Knight moves: http://chessprogramming.wikispaces.com/Knight+Pattern
def noNoEa(b: Long): Long = (b & notHFile) << 17
def noEaEa(b: Long): Long = (b & notGHFile) << 10
def soEaEa(b: Long): Long = (b & notGHFile) >>> 6
def soSoEa(b: Long): Long = (b & notHFile) >>> 15
def noNoWe(b: Long): Long = (b & notAFile) << 15
def noWeWe(b: Long): Long = (b & notABFile) << 6
def soWeWe(b: Long): Long = (b & notABFile) >>> 10
def soSoWe(b: Long): Long = (b & notAFile) >>> 17
}
/// **********************************************************************
/// Position definition.
object Piece extends Enumeration {
type Piece = Value
val Occupied, Pawn, Knight, Bishop, Rook, Queen, King = Value
implicit def pieceToInt(x: Piece): Int = x.id
}
import Piece.Piece
// In unit of centipawns.
object PieceV {
val EmptyV = 0
val PawnV = 100
val KnightV = 300
val BishopV = 300
val RookV = 500
val QueenV = 900
val KingV = 9900
}
object Color extends Enumeration {
type Color = Value
val Black, White = Value
implicit def colorToInt(x: Color) = x.id
}
import Color.Color
/* Bitboards for one side. */
abstract class Pieces {
val pieces: Array[Long] // Size is 7
}
/**
* Defined according to FEN definition in
* http://en.wikipedia.org/wiki/Forsyth-Edwads_Notation
*/
abstract class Position {
var allPieces: Array[Pieces] // Size is 2
var activeColor: Color
var castlingAvailability: Int
var enPassantTarget: Int
var halfMoveClock: Int
var fullMoveNumber: Int
def toFEN: String
}
|
whily/unichess
|
src/main/scala/net/whily/unichess/engine/BitBoard.scala
|
Scala
|
gpl-2.0
| 3,685
|
package com.twitter.finagle.netty4.pushsession
import com.twitter.finagle.{Stack, param}
import com.twitter.finagle.netty4.channel.RawNetty4ClientChannelInitializer
import com.twitter.finagle.netty4.ConnectionBuilder
import com.twitter.finagle.pushsession.{PushChannelHandle, PushSession, PushTransporter}
import com.twitter.util.Future
import io.netty.channel.{Channel, ChannelPipeline}
import java.net.SocketAddress
object Netty4PushTransporter {
private[this] def build[In, Out](
protocolInit: ChannelPipeline => Unit,
addr: SocketAddress,
params: Stack.Params
)(
implicit mOut: Manifest[Out]
): PushTransporter[In, Out] =
new Netty4PushTransporter[In, Out](_ => (), protocolInit, addr, params)
/**
* `Transporter` constructor for protocols that need direct access to the netty pipeline
* (e.g. finagle-http)
*
* @note this factory method makes no assumptions about reference counting
* of `ByteBuf` instances.
*/
def raw[In, Out](
protocolInit: ChannelPipeline => Unit,
addr: SocketAddress,
params: Stack.Params
)(
implicit mOut: Manifest[Out]
): PushTransporter[In, Out] = {
build[In, Out](protocolInit, addr, params)
}
}
class Netty4PushTransporter[In, Out](
transportInit: ChannelPipeline => Unit,
protocolInit: ChannelPipeline => Unit,
val remoteAddress: SocketAddress,
params: Stack.Params)
extends PushTransporter[In, Out] {
private[this] val builder = new ConnectionBuilder(
new RawNetty4ClientChannelInitializer(transportInit, params),
remoteAddress,
params
)
/**
* Create a future [[PushSession]]
*
* Subtypes can override this behavior to add functionality such as
* interacting with the Netty4 `Channel`.
*/
protected def initSession[T <: PushSession[In, Out]](
channel: Channel,
protocolInit: ChannelPipeline => Unit,
sessionBuilder: PushChannelHandle[In, Out] => Future[T]
): Future[T] = {
val statsReceiver = params[param.Stats].statsReceiver
val (_, sessionF) =
Netty4PushChannelHandle.install[In, Out, T](
channel,
protocolInit,
sessionBuilder,
statsReceiver
)
sessionF
}
final def apply[T <: PushSession[In, Out]](
sessionBuilder: (PushChannelHandle[In, Out]) => Future[T]
): Future[T] = builder.build(initSession(_, protocolInit, sessionBuilder))
override def toString: String = "Netty4PushTransporter"
}
|
twitter/finagle
|
finagle-netty4/src/main/scala/com/twitter/finagle/netty4/pushsession/Netty4PushTransporter.scala
|
Scala
|
apache-2.0
| 2,445
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.Strategy
import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.encoders.RowEncoder
import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.catalyst.planning._
import org.apache.spark.sql.catalyst.plans._
import org.apache.spark.sql.catalyst.plans.logical._
import org.apache.spark.sql.catalyst.plans.physical._
import org.apache.spark.sql.execution
import org.apache.spark.sql.execution.columnar.{InMemoryRelation, InMemoryTableScanExec}
import org.apache.spark.sql.execution.command._
import org.apache.spark.sql.execution.exchange.ShuffleExchange
import org.apache.spark.sql.execution.joins.{BuildLeft, BuildRight}
import org.apache.spark.sql.execution.streaming._
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.streaming.StreamingQuery
/**
* Converts a logical plan into zero or more SparkPlans. This API is exposed for experimenting
* with the query planner and is not designed to be stable across spark releases. Developers
* writing libraries should instead consider using the stable APIs provided in
* [[org.apache.spark.sql.sources]]
*/
abstract class SparkStrategy extends GenericStrategy[SparkPlan] {
override protected def planLater(plan: LogicalPlan): SparkPlan = PlanLater(plan)
}
case class PlanLater(plan: LogicalPlan) extends LeafExecNode {
override def output: Seq[Attribute] = plan.output
protected override def doExecute(): RDD[InternalRow] = {
throw new UnsupportedOperationException()
}
}
abstract class SparkStrategies extends QueryPlanner[SparkPlan] {
self: SparkPlanner =>
/**
* Plans special cases of limit operators.
*/
object SpecialLimits extends Strategy {
override def apply(plan: LogicalPlan): Seq[SparkPlan] = plan match {
case ReturnAnswer(rootPlan) => rootPlan match {
case Limit(IntegerLiteral(limit), Sort(order, true, child)) =>
TakeOrderedAndProjectExec(limit, order, child.output, planLater(child)) :: Nil
case Limit(IntegerLiteral(limit), Project(projectList, Sort(order, true, child))) =>
TakeOrderedAndProjectExec(limit, order, projectList, planLater(child)) :: Nil
case Limit(IntegerLiteral(limit), child) =>
// With whole stage codegen, Spark releases resources only when all the output data of the
// query plan are consumed. It's possible that `CollectLimitExec` only consumes a little
// data from child plan and finishes the query without releasing resources. Here we wrap
// the child plan with `LocalLimitExec`, to stop the processing of whole stage codegen and
// trigger the resource releasing work, after we consume `limit` rows.
CollectLimitExec(limit, LocalLimitExec(limit, planLater(child))) :: Nil
case other => planLater(other) :: Nil
}
case Limit(IntegerLiteral(limit), Sort(order, true, child)) =>
TakeOrderedAndProjectExec(limit, order, child.output, planLater(child)) :: Nil
case Limit(IntegerLiteral(limit), Project(projectList, Sort(order, true, child))) =>
TakeOrderedAndProjectExec(limit, order, projectList, planLater(child)) :: Nil
case _ => Nil
}
}
/**
* Select the proper physical plan for join based on joining keys and size of logical plan.
*
* At first, uses the [[ExtractEquiJoinKeys]] pattern to find joins where at least some of the
* predicates can be evaluated by matching join keys. If found, Join implementations are chosen
* with the following precedence:
*
* - Broadcast: if one side of the join has an estimated physical size that is smaller than the
* user-configurable [[SQLConf.AUTO_BROADCASTJOIN_THRESHOLD]] threshold
* or if that side has an explicit broadcast hint (e.g. the user applied the
* [[org.apache.spark.sql.functions.broadcast()]] function to a DataFrame), then that side
* of the join will be broadcasted and the other side will be streamed, with no shuffling
* performed. If both sides of the join are eligible to be broadcasted then the
* - Shuffle hash join: if the average size of a single partition is small enough to build a hash
* table.
* - Sort merge: if the matching join keys are sortable.
*
* If there is no joining keys, Join implementations are chosen with the following precedence:
* - BroadcastNestedLoopJoin: if one side of the join could be broadcasted
* - CartesianProduct: for Inner join
* - BroadcastNestedLoopJoin
*/
object JoinSelection extends Strategy with PredicateHelper {
/**
* Matches a plan whose output should be small enough to be used in broadcast join.
*/
private def canBroadcast(plan: LogicalPlan): Boolean = {
plan.stats.hints.broadcast ||
(plan.stats.sizeInBytes >= 0 &&
plan.stats.sizeInBytes <= conf.autoBroadcastJoinThreshold)
}
/**
* Matches a plan whose single partition should be small enough to build a hash table.
*
* Note: this assume that the number of partition is fixed, requires additional work if it's
* dynamic.
*/
private def canBuildLocalHashMap(plan: LogicalPlan): Boolean = {
plan.stats.sizeInBytes < conf.autoBroadcastJoinThreshold * conf.numShufflePartitions
}
/**
* Returns whether plan a is much smaller (3X) than plan b.
*
* The cost to build hash map is higher than sorting, we should only build hash map on a table
* that is much smaller than other one. Since we does not have the statistic for number of rows,
* use the size of bytes here as estimation.
*/
private def muchSmaller(a: LogicalPlan, b: LogicalPlan): Boolean = {
a.stats.sizeInBytes * 3 <= b.stats.sizeInBytes
}
private def canBuildRight(joinType: JoinType): Boolean = joinType match {
case _: InnerLike | LeftOuter | LeftSemi | LeftAnti => true
case j: ExistenceJoin => true
case _ => false
}
private def canBuildLeft(joinType: JoinType): Boolean = joinType match {
case _: InnerLike | RightOuter => true
case _ => false
}
def apply(plan: LogicalPlan): Seq[SparkPlan] = plan match {
// --- BroadcastHashJoin --------------------------------------------------------------------
case ExtractEquiJoinKeys(joinType, leftKeys, rightKeys, condition, left, right)
if canBuildRight(joinType) && canBroadcast(right) =>
Seq(joins.BroadcastHashJoinExec(
leftKeys, rightKeys, joinType, BuildRight, condition, planLater(left), planLater(right)))
case ExtractEquiJoinKeys(joinType, leftKeys, rightKeys, condition, left, right)
if canBuildLeft(joinType) && canBroadcast(left) =>
Seq(joins.BroadcastHashJoinExec(
leftKeys, rightKeys, joinType, BuildLeft, condition, planLater(left), planLater(right)))
// --- ShuffledHashJoin ---------------------------------------------------------------------
case ExtractEquiJoinKeys(joinType, leftKeys, rightKeys, condition, left, right)
if !conf.preferSortMergeJoin && canBuildRight(joinType) && canBuildLocalHashMap(right)
&& muchSmaller(right, left) ||
!RowOrdering.isOrderable(leftKeys) =>
Seq(joins.ShuffledHashJoinExec(
leftKeys, rightKeys, joinType, BuildRight, condition, planLater(left), planLater(right)))
case ExtractEquiJoinKeys(joinType, leftKeys, rightKeys, condition, left, right)
if !conf.preferSortMergeJoin && canBuildLeft(joinType) && canBuildLocalHashMap(left)
&& muchSmaller(left, right) ||
!RowOrdering.isOrderable(leftKeys) =>
Seq(joins.ShuffledHashJoinExec(
leftKeys, rightKeys, joinType, BuildLeft, condition, planLater(left), planLater(right)))
// --- SortMergeJoin ------------------------------------------------------------
case ExtractEquiJoinKeys(joinType, leftKeys, rightKeys, condition, left, right)
if RowOrdering.isOrderable(leftKeys) =>
joins.SortMergeJoinExec(
leftKeys, rightKeys, joinType, condition, planLater(left), planLater(right)) :: Nil
// --- Without joining keys ------------------------------------------------------------
// Pick BroadcastNestedLoopJoin if one side could be broadcasted
case j @ logical.Join(left, right, joinType, condition)
if canBuildRight(joinType) && canBroadcast(right) =>
joins.BroadcastNestedLoopJoinExec(
planLater(left), planLater(right), BuildRight, joinType, condition) :: Nil
case j @ logical.Join(left, right, joinType, condition)
if canBuildLeft(joinType) && canBroadcast(left) =>
joins.BroadcastNestedLoopJoinExec(
planLater(left), planLater(right), BuildLeft, joinType, condition) :: Nil
// Pick CartesianProduct for InnerJoin
case logical.Join(left, right, _: InnerLike, condition) =>
joins.CartesianProductExec(planLater(left), planLater(right), condition) :: Nil
case logical.Join(left, right, joinType, condition) =>
val buildSide =
if (right.stats.sizeInBytes <= left.stats.sizeInBytes) {
BuildRight
} else {
BuildLeft
}
// This join could be very slow or OOM
joins.BroadcastNestedLoopJoinExec(
planLater(left), planLater(right), buildSide, joinType, condition) :: Nil
// --- Cases where this strategy does not apply ---------------------------------------------
case _ => Nil
}
}
/**
* Used to plan streaming aggregation queries that are computed incrementally as part of a
* [[StreamingQuery]]. Currently this rule is injected into the planner
* on-demand, only when planning in a [[org.apache.spark.sql.execution.streaming.StreamExecution]]
*/
object StatefulAggregationStrategy extends Strategy {
override def apply(plan: LogicalPlan): Seq[SparkPlan] = plan match {
case _ if !plan.isStreaming => Nil
case EventTimeWatermark(columnName, delay, child) =>
EventTimeWatermarkExec(columnName, delay, planLater(child)) :: Nil
case PhysicalAggregation(
namedGroupingExpressions, aggregateExpressions, rewrittenResultExpressions, child) =>
aggregate.AggUtils.planStreamingAggregation(
namedGroupingExpressions,
aggregateExpressions,
rewrittenResultExpressions,
planLater(child))
case _ => Nil
}
}
/**
* Used to plan the streaming deduplicate operator.
*/
object StreamingDeduplicationStrategy extends Strategy {
override def apply(plan: LogicalPlan): Seq[SparkPlan] = plan match {
case Deduplicate(keys, child) if child.isStreaming =>
StreamingDeduplicateExec(keys, planLater(child)) :: Nil
case _ => Nil
}
}
/**
* Used to plan the aggregate operator for expressions based on the AggregateFunction2 interface.
*/
object Aggregation extends Strategy {
def apply(plan: LogicalPlan): Seq[SparkPlan] = plan match {
case PhysicalAggregation(
groupingExpressions, aggregateExpressions, resultExpressions, child) =>
val (functionsWithDistinct, functionsWithoutDistinct) =
aggregateExpressions.partition(_.isDistinct)
if (functionsWithDistinct.map(_.aggregateFunction.children).distinct.length > 1) {
// This is a sanity check. We should not reach here when we have multiple distinct
// column sets. Our MultipleDistinctRewriter should take care this case.
sys.error("You hit a query analyzer bug. Please report your query to " +
"Spark user mailing list.")
}
val aggregateOperator =
if (functionsWithDistinct.isEmpty) {
aggregate.AggUtils.planAggregateWithoutDistinct(
groupingExpressions,
aggregateExpressions,
resultExpressions,
planLater(child))
} else {
aggregate.AggUtils.planAggregateWithOneDistinct(
groupingExpressions,
functionsWithDistinct,
functionsWithoutDistinct,
resultExpressions,
planLater(child))
}
aggregateOperator
case _ => Nil
}
}
protected lazy val singleRowRdd = sparkContext.parallelize(Seq(InternalRow()), 1)
object InMemoryScans extends Strategy {
def apply(plan: LogicalPlan): Seq[SparkPlan] = plan match {
case PhysicalOperation(projectList, filters, mem: InMemoryRelation) =>
pruneFilterProject(
projectList,
filters,
identity[Seq[Expression]], // All filters still need to be evaluated.
InMemoryTableScanExec(_, filters, mem)) :: Nil
case _ => Nil
}
}
/**
* This strategy is just for explaining `Dataset/DataFrame` created by `spark.readStream`.
* It won't affect the execution, because `StreamingRelation` will be replaced with
* `StreamingExecutionRelation` in `StreamingQueryManager` and `StreamingExecutionRelation` will
* be replaced with the real relation using the `Source` in `StreamExecution`.
*/
object StreamingRelationStrategy extends Strategy {
def apply(plan: LogicalPlan): Seq[SparkPlan] = plan match {
case s: StreamingRelation =>
StreamingRelationExec(s.sourceName, s.output) :: Nil
case s: StreamingExecutionRelation =>
StreamingRelationExec(s.toString, s.output) :: Nil
case _ => Nil
}
}
/**
* Strategy to convert [[FlatMapGroupsWithState]] logical operator to physical operator
* in streaming plans. Conversion for batch plans is handled by [[BasicOperators]].
*/
object FlatMapGroupsWithStateStrategy extends Strategy {
override def apply(plan: LogicalPlan): Seq[SparkPlan] = plan match {
case FlatMapGroupsWithState(
func, keyDeser, valueDeser, groupAttr, dataAttr, outputAttr, stateEnc, outputMode, _,
timeout, child) =>
val execPlan = FlatMapGroupsWithStateExec(
func, keyDeser, valueDeser, groupAttr, dataAttr, outputAttr, None, stateEnc, outputMode,
timeout, batchTimestampMs = None, eventTimeWatermark = None, planLater(child))
execPlan :: Nil
case _ =>
Nil
}
}
// Can we automate these 'pass through' operations?
object BasicOperators extends Strategy {
def apply(plan: LogicalPlan): Seq[SparkPlan] = plan match {
case r: RunnableCommand => ExecutedCommandExec(r, r.children.map(planLater)) :: Nil
case MemoryPlan(sink, output) =>
val encoder = RowEncoder(sink.schema)
LocalTableScanExec(output, sink.allData.map(r => encoder.toRow(r).copy())) :: Nil
case logical.Distinct(child) =>
throw new IllegalStateException(
"logical distinct operator should have been replaced by aggregate in the optimizer")
case logical.Intersect(left, right) =>
throw new IllegalStateException(
"logical intersect operator should have been replaced by semi-join in the optimizer")
case logical.Except(left, right) =>
throw new IllegalStateException(
"logical except operator should have been replaced by anti-join in the optimizer")
case logical.DeserializeToObject(deserializer, objAttr, child) =>
execution.DeserializeToObjectExec(deserializer, objAttr, planLater(child)) :: Nil
case logical.SerializeFromObject(serializer, child) =>
execution.SerializeFromObjectExec(serializer, planLater(child)) :: Nil
case logical.MapPartitions(f, objAttr, child) =>
execution.MapPartitionsExec(f, objAttr, planLater(child)) :: Nil
case logical.MapPartitionsInR(f, p, b, is, os, objAttr, child) =>
execution.MapPartitionsExec(
execution.r.MapPartitionsRWrapper(f, p, b, is, os), objAttr, planLater(child)) :: Nil
case logical.FlatMapGroupsInR(f, p, b, is, os, key, value, grouping, data, objAttr, child) =>
execution.FlatMapGroupsInRExec(f, p, b, is, os, key, value, grouping,
data, objAttr, planLater(child)) :: Nil
case logical.MapElements(f, _, _, objAttr, child) =>
execution.MapElementsExec(f, objAttr, planLater(child)) :: Nil
case logical.AppendColumns(f, _, _, in, out, child) =>
execution.AppendColumnsExec(f, in, out, planLater(child)) :: Nil
case logical.AppendColumnsWithObject(f, childSer, newSer, child) =>
execution.AppendColumnsWithObjectExec(f, childSer, newSer, planLater(child)) :: Nil
case logical.MapGroups(f, key, value, grouping, data, objAttr, child) =>
execution.MapGroupsExec(f, key, value, grouping, data, objAttr, planLater(child)) :: Nil
case logical.FlatMapGroupsWithState(
f, key, value, grouping, data, output, _, _, _, timeout, child) =>
execution.MapGroupsExec(
f, key, value, grouping, data, output, timeout, planLater(child)) :: Nil
case logical.CoGroup(f, key, lObj, rObj, lGroup, rGroup, lAttr, rAttr, oAttr, left, right) =>
execution.CoGroupExec(
f, key, lObj, rObj, lGroup, rGroup, lAttr, rAttr, oAttr,
planLater(left), planLater(right)) :: Nil
case logical.Repartition(numPartitions, shuffle, child) =>
if (shuffle) {
ShuffleExchange(RoundRobinPartitioning(numPartitions), planLater(child)) :: Nil
} else {
execution.CoalesceExec(numPartitions, planLater(child)) :: Nil
}
case logical.Sort(sortExprs, global, child) =>
execution.SortExec(sortExprs, global, planLater(child)) :: Nil
case logical.Project(projectList, child) =>
execution.ProjectExec(projectList, planLater(child)) :: Nil
case logical.Filter(condition, child) =>
execution.FilterExec(condition, planLater(child)) :: Nil
case f: logical.TypedFilter =>
execution.FilterExec(f.typedCondition(f.deserializer), planLater(f.child)) :: Nil
case e @ logical.Expand(_, _, child) =>
execution.ExpandExec(e.projections, e.output, planLater(child)) :: Nil
case logical.Window(windowExprs, partitionSpec, orderSpec, child) =>
execution.window.WindowExec(windowExprs, partitionSpec, orderSpec, planLater(child)) :: Nil
case logical.Sample(lb, ub, withReplacement, seed, child) =>
execution.SampleExec(lb, ub, withReplacement, seed, planLater(child)) :: Nil
case logical.LocalRelation(output, data, _) =>
LocalTableScanExec(output, data) :: Nil
case logical.LocalLimit(IntegerLiteral(limit), child) =>
execution.LocalLimitExec(limit, planLater(child)) :: Nil
case logical.GlobalLimit(IntegerLiteral(limit), child) =>
execution.GlobalLimitExec(limit, planLater(child)) :: Nil
case logical.Union(unionChildren) =>
execution.UnionExec(unionChildren.map(planLater)) :: Nil
case g @ logical.Generate(generator, join, outer, _, _, child) =>
execution.GenerateExec(
generator, join = join, outer = outer, g.qualifiedGeneratorOutput,
planLater(child)) :: Nil
case _: logical.OneRowRelation =>
execution.RDDScanExec(Nil, singleRowRdd, "OneRowRelation") :: Nil
case r: logical.Range =>
execution.RangeExec(r) :: Nil
case logical.RepartitionByExpression(expressions, child, numPartitions) =>
exchange.ShuffleExchange(HashPartitioning(
expressions, numPartitions), planLater(child)) :: Nil
case ExternalRDD(outputObjAttr, rdd) => ExternalRDDScanExec(outputObjAttr, rdd) :: Nil
case r: LogicalRDD =>
RDDScanExec(r.output, r.rdd, "ExistingRDD", r.outputPartitioning, r.outputOrdering) :: Nil
case h: ResolvedHint => planLater(h.child) :: Nil
case _ => Nil
}
}
}
|
stanzhai/spark
|
sql/core/src/main/scala/org/apache/spark/sql/execution/SparkStrategies.scala
|
Scala
|
apache-2.0
| 20,690
|
/*
* Copyright 2009 Twitter, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.fsq.twitter.ostrich.stats
import java.util.concurrent.ConcurrentHashMap
/**
* Singleton StatsCollector that collects performance data for the application.
*/
object Stats extends StatsCollection {
includeJvmStats = true
private val namedCollections = new ConcurrentHashMap[String, StatsCollection](128, 0.75f, 2)
namedCollections.put("", Stats)
/**
* Return a named StatsCollection as defined in an AdminServiceConfig.
* If the named collection doesn't exist, the global stats object is returned.
*/
def get(name: String): StatsCollection = {
val rv = namedCollections.get(name)
if (rv == null) namedCollections.get("") else rv
}
/**
* Make a named StatsCollection, or return an existing collection if one already exists under
* that name.
*/
def make(name: String): StatsCollection = {
val rv = namedCollections.get(name)
if (rv == null) {
namedCollections.putIfAbsent(name, new StatsCollection())
}
namedCollections.get(name)
}
// helper function for computing deltas over counters
def delta(oldValue: Long, newValue: Long): Long = {
if (oldValue <= newValue) {
newValue - oldValue
} else {
(Long.MaxValue - oldValue) + (newValue - Long.MinValue) + 1
}
}
/**
* Create a function that returns the delta of a counter each time it's called.
*/
def makeDeltaFunction(counter: Counter): () => Double = {
var lastValue: Long = 0
() => {
val newValue = counter()
val rv = delta(lastValue, newValue)
lastValue = newValue
rv.toDouble
}
}
}
|
foursquare/fsqio
|
src/jvm/io/fsq/twitter/ostrich/stats/Stats.scala
|
Scala
|
apache-2.0
| 2,206
|
package com.twitter.io
import java.io.{ByteArrayOutputStream, InputStream, OutputStream}
import scala.annotation.tailrec
object StreamIO {
/**
* Copy an InputStream to an OutputStream in chunks of the given
* buffer size (default = 1KB).
*/
@tailrec
final def copy(
inputStream: InputStream,
outputStream: OutputStream,
bufferSize: Int = 1024
) {
val buf = new Array[Byte](bufferSize)
inputStream.read(buf, 0, buf.length) match {
case -1 => ()
case n =>
outputStream.write(buf, 0, n)
copy(inputStream, outputStream, bufferSize)
}
}
/**
* Buffer (fully) the given input stream by creating & copying it to
* a ByteArrayOutputStream.
*/
def buffer(inputStream: InputStream): ByteArrayOutputStream = {
val bos = new java.io.ByteArrayOutputStream
copy(inputStream, bos)
bos
}
}
|
travisbrown/util
|
util-core/src/main/scala/com/twitter/io/StreamIO.scala
|
Scala
|
apache-2.0
| 878
|
package com.github.wkennedy.detroitpast.model
import net.liftweb.mongodb.{JsonObject, JsonObjectMeta}
case class ErrorMessage(userMessage:String, developerMessage: String, code: Int, additionalInfo: Option[String]) extends JsonObject[ErrorMessage] with Serializable {
def meta = ErrorMessage
}
object ErrorMessage extends JsonObjectMeta[ErrorMessage] {
}
|
wkennedy/DetroitPast
|
src/main/scala/com/github/wkennedy/detroitpast/model/ErrorMessage.scala
|
Scala
|
mit
| 362
|
// Databricks notebook source exported at Sun, 28 Aug 2016 15:46:03 UTC
// MAGIC %md
// MAGIC
// MAGIC # [Scalable Data Science](http://www.math.canterbury.ac.nz/~r.sainudiin/courses/ScalableDataScience/)
// MAGIC
// MAGIC
// MAGIC ### prepared by [Raazesh Sainudiin](https://nz.linkedin.com/in/raazesh-sainudiin-45955845) and [Sivanand Sivaram](https://www.linkedin.com/in/sivanand)
// MAGIC
// MAGIC *supported by* [](https://databricks.com/)
// MAGIC and
// MAGIC [](https://www.awseducate.com/microsite/CommunitiesEngageHome)
// COMMAND ----------
// MAGIC %md
// MAGIC The [html source url](https://raw.githubusercontent.com/raazesh-sainudiin/scalable-data-science/master/db/week1/01_introduction/003_scalaCrashCourse.html) of this databricks notebook and its recorded Uji :
// MAGIC
// MAGIC [](https://www.youtube.com/v/O8JbxgPpAU8?rel=0&autoplay=1&modestbranding=1&start=4511)
// COMMAND ----------
// MAGIC %md
// MAGIC # **Notebooks**
// MAGIC Write Spark code for processing your data in notebooks.
// MAGIC
// MAGIC **NOTE**: You should have already cloned this notebook and attached it to the ``studentsEnrolled`` or ``studentsObserving1`` clusters by now. If not seek help from Siva by raising your hand.
// COMMAND ----------
// MAGIC %md
// MAGIC ### Notebooks can be written in **Python**, **Scala**, **R**, or **SQL**.
// MAGIC * This is a Scala notebook - which is indicated next to the title above by ``(Scala)``.
// COMMAND ----------
// MAGIC %md
// MAGIC ### **Creating a new Notebook**
// MAGIC
// MAGIC 
// MAGIC
// MAGIC * Click the tiangle on the right side of a folder to open the folder menu.
// MAGIC * Select **Create > Notebook**.
// MAGIC * Enter the name of the notebook, the language (Python, Scala, R or SQL) for the notebook, and a cluster to run it on.
// COMMAND ----------
// MAGIC %md
// MAGIC
// MAGIC ### ** Cloning a Notebook**
// MAGIC * You can clone a notebook to create a copy of it, for example if you want to edit or run an Example notebook like this one.
// MAGIC * Click **File > Clone** in the notebook context bar above.
// MAGIC * Enter a new name and location for your notebook. If Access Control is enabled, you can only clone to folders that you have Manage permissions on.
// COMMAND ----------
// MAGIC %md
// MAGIC # **Introduction to Scala through Scala Notebook**
// MAGIC
// MAGIC * This introduction notebook describes how to get started running Scala code in Notebooks.
// COMMAND ----------
// MAGIC %md
// MAGIC ### Clone Or Import This Notebook
// MAGIC * From the **File** menu at the top left of this notebook, choose **Clone** or click **Import Notebook** on the top right. This will allow you to interactively execute code cells as you proceed through the notebook.
// MAGIC
// MAGIC 
// MAGIC * Enter a name and a desired location for your cloned notebook (i.e. Perhaps clone to your own user directory or the "Shared" directory.)
// MAGIC * Navigate to the location you selected (e.g. click Menu > Workspace > `Your cloned location`)
// COMMAND ----------
// MAGIC %md
// MAGIC ### **Attach** the Notebook to a **cluster**
// MAGIC * A **Cluster** is a group of machines which can run commands in cells.
// MAGIC * Check the upper left corner of your notebook to see if it is **Attached** or **Detached**.
// MAGIC * If **Detached**, click on the right arrow and select a cluster to attach your notebook to.
// MAGIC * If there is no running cluster, create one as described in the [Welcome to Databricks](/#workspace/databricks_guide/00 Welcome to Databricks) guide.
// MAGIC
// MAGIC 
// COMMAND ----------
// MAGIC %md
// MAGIC ***
// MAGIC ####  **Cells** are units that make up notebooks
// MAGIC 
// MAGIC
// MAGIC Cells each have a type - including **scala**, **python**, **sql**, **R**, **markdown**, **filesystem**, and **shell**.
// MAGIC * While cells default to the type of the Notebook, other cell types are supported as well.
// MAGIC * This cell is in **markdown** and is used for documentation. [Markdown](http://en.wikipedia.org/wiki/Markdown) is a simple text formatting syntax.
// MAGIC ***
// COMMAND ----------
// MAGIC %md
// MAGIC ***
// MAGIC ### **Create** and **Edit** a New Markdown Cell in this Notebook
// MAGIC * When you mouse between cells, a + sign will pop up in the center that you can click on to create a new cell.
// MAGIC
// MAGIC 
// MAGIC * Type **``%md Hello, world!``** into your new cell (**``%md``** indicates the cell is markdown).
// MAGIC
// MAGIC
// MAGIC
// MAGIC * Click out of the cell to see the cell contents update.
// MAGIC
// MAGIC 
// MAGIC ***
// COMMAND ----------
// MAGIC %md
// MAGIC
// MAGIC ### **Running a cell in your notebook.**
// MAGIC * #### Press **Shift+Enter** when in the cell to **run** it and proceed to the next cell.
// MAGIC * The cells contents should update.
// MAGIC 
// MAGIC * **NOTE:** Cells are not automatically run each time you open it.
// MAGIC * Instead, Previous results from running a cell are saved and displayed.
// MAGIC * #### Alternately, press **Ctrl+Enter** when in a cell to **run** it, but not proceed to the next cell.
// COMMAND ----------
// MAGIC %md **You Try Now!**
// MAGIC Just double-click the cell below, modify the text following ``%md`` and press **Ctrl+Enter** to evaluate it and see it's mark-down'd output.
// MAGIC ```
// MAGIC > %md Hello, world!
// MAGIC ```
// COMMAND ----------
// MAGIC %md Hello, world!
// COMMAND ----------
// MAGIC %md
// MAGIC ***
// MAGIC ####  **Markdown Cell Tips**
// MAGIC * To change a non-markdown cell to markdown, add **%md** to very start of the cell.
// MAGIC * After updating the contents of a markdown cell, click out of the cell to update the formatted contents of a markdown cell.
// MAGIC * To edit an existing markdown cell, **doubleclick** the cell.
// MAGIC ***
// COMMAND ----------
// MAGIC %md
// MAGIC ***
// MAGIC ### Run a **Scala Cell**
// MAGIC * Run the following scala cell.
// MAGIC * Note: There is no need for any special indicator (such as ``%md``) necessary to create a Scala cell in a Scala notebook.
// MAGIC * You know it is a scala notebook because of the `` (Scala)`` appended to the name of this notebook.
// MAGIC * Make sure the cell contents updates before moving on.
// MAGIC * Press **Shift+Enter** when in the cell to run it and proceed to the next cell.
// MAGIC * The cells contents should update.
// MAGIC * Alternately, press **Ctrl+Enter** when in a cell to **run** it, but not proceed to the next cell.
// MAGIC * characters following ``//`` are comments in scala.
// MAGIC ***
// COMMAND ----------
println(System.currentTimeMillis) // press Ctrl+Enter to evaluate println that prints its argument as a line
// COMMAND ----------
// MAGIC %md
// MAGIC
// MAGIC ## Scala Resources
// MAGIC
// MAGIC You will not be learning scala systematically and thoroughly in this course. You will learn *to use* Scala by doing various Spark jobs.
// MAGIC
// MAGIC If you are seriously interested in learning scala properly, then there are various resources, including:
// MAGIC
// MAGIC * [scala-lang.org](http://www.scala-lang.org/) is the **core Scala resource**.
// MAGIC * [tour-of-scala](http://docs.scala-lang.org/tutorials/tour/tour-of-scala)
// MAGIC * MOOC
// MAGIC * [courseera: Functional Programming Principles in Scala](https://www.coursera.org/course/progfun)
// MAGIC * [Books](http://www.scala-lang.org/documentation/books.html)
// MAGIC * [Programming in Scala, 1st Edition, Free Online Reading](http://www.artima.com/pins1ed/)
// MAGIC
// MAGIC The main sources for the following content are (you are encouraged to read them for more background):
// MAGIC
// MAGIC * [Martin Oderski's Scala by example](http://www.scala-lang.org/docu/files/ScalaByExample.pdf)
// MAGIC * [Scala crash course by Holden Karau](http://lintool.github.io/SparkTutorial/slides/day1_Scala_crash_course.pdf)
// MAGIC * [Darren's brief introduction to scala and breeze for statistical computing](https://darrenjw.wordpress.com/2013/12/30/brief-introduction-to-scala-and-breeze-for-statistical-computing/)
// MAGIC
// MAGIC
// COMMAND ----------
// MAGIC %md
// MAGIC #Introduction to Scala
// MAGIC ## What is Scala?
// MAGIC "Scala smoothly integrates object-oriented and functional programming. It is designed to express common programming patterns in a concise, elegant, and type-safe way." by Matrin Odersky.
// MAGIC
// MAGIC * High-level language for the Java Virtual Machine (JVM)
// MAGIC * Object oriented + functional programming
// MAGIC * Statically typed
// MAGIC * Comparable in speed to Java
// MAGIC * Type inference saves us from having to write explicit types most of the time Interoperates with Java
// MAGIC * Can use any Java class (inherit from, etc.)
// MAGIC * Can be called from Java code
// MAGIC
// MAGIC ## Why Scala?
// MAGIC
// MAGIC * Spark was originally written in Scala, which allows concise function syntax and interactive use
// MAGIC * Spark APIs for other languages include:
// MAGIC * Java API for standalone use
// MAGIC * Python API added to reach a wider user community of programmes
// MAGIC * R API added more recently to reach a wider community of data analyststs
// MAGIC * Unfortunately, Python and R APIs are generally behind Spark's native Scala (for eg. GraphX is only available in Scala currently).
// MAGIC * See Darren Wilkinson's 11 reasons for [scala as a platform for statistical computing and data science](https://darrenjw.wordpress.com/2013/12/23/scala-as-a-platform-for-statistical-computing-and-data-science/). It is embedded in-place below for your convenience.
// COMMAND ----------
//%run "/scalable-data-science/xtraResources/support/sdsFunctions"
//This allows easy embedding of publicly available information into any other notebook
//when viewing in git-book just ignore this block - you may have to manually chase the URL in frameIt("URL").
//Example usage:
// displayHTML(frameIt("https://en.wikipedia.org/wiki/Latent_Dirichlet_allocation#Topics_in_LDA",250))
def frameIt( u:String, h:Int ) : String = {
"""<iframe
src=""""+ u+""""
width="95%" height="""" + h + """"
sandbox>
<p>
<a href="http://spark.apache.org/docs/latest/index.html">
Fallback link for browsers that, unlikely, don't support frames
</a>
</p>
</iframe>"""
}
// COMMAND ----------
displayHTML(frameIt("https://darrenjw.wordpress.com/2013/12/23/scala-as-a-platform-for-statistical-computing-and-data-science/",500))
// COMMAND ----------
// MAGIC %md
// MAGIC # Let's get our hands dirty in Scala
// MAGIC
// MAGIC We will go through the following programming concepts and tasks:
// MAGIC * Assignments
// MAGIC * Methods and Tab-completion
// MAGIC * Functions in Scala
// MAGIC * Collections in Scala
// MAGIC * Scala Closures for Functional Programming and MapReduce
// MAGIC
// MAGIC **Remark**: You need to take a computer science course (from CourseEra, for example) to properly learn Scala. Here, we will learn to use Scala by example to accomplish our data science tasks at hand.
// COMMAND ----------
// MAGIC %md
// MAGIC ## Assignments
// MAGIC ### value and variable as ``val`` and ``var``
// MAGIC
// MAGIC Let us assign the integer value ``5`` to `x` as follows:
// COMMAND ----------
val x : Int = 5 // <Ctrl+Enter> to declare a value x to be integer 5
// COMMAND ----------
// MAGIC %md
// MAGIC Scala is statically typed, but it uses built-in type inference machinery to automatically figure out that ``x`` is an integer or ``Int`` type as follows.
// MAGIC Let's declare a value ``x`` to be ``Int`` 5 next without explictly using ``Int``.
// COMMAND ----------
val x = 5 // <Ctrl+Enter> to declare a value x as Int 5 (type automatically inferred)
// COMMAND ----------
// MAGIC %md
// MAGIC Let's declare ``x`` as a ``Double`` or double-precision floating-point type using decimal such as ``5.0`` (a digit has to follow the decimal point!)
// COMMAND ----------
val x = 5.0 // <Ctrl+Enter> to declare a value x as Double 5
// COMMAND ----------
// MAGIC %md
// MAGIC Alternatively, we can assign ``x`` as a ``Double`` explicitly. Note that the decimal point is not needed in this case due to explicit typing as ``Double``.
// COMMAND ----------
val x : Double = 5 // <Ctrl+Enter> to declare a value x as Double 5 (type automatically inferred)
// COMMAND ----------
// MAGIC %md
// MAGIC Next note that labels need to be declared on first use. We have declared x to be a ``val`` which is short for *value*. This makes ``x`` immutable (cannot be changed).
// MAGIC
// MAGIC Thus, ``x`` cannot be just re-assigned, as the following code illustrates in the resulting error: ``... error: reassignment to val``.
// COMMAND ----------
x = 10 // <Ctrl+Enter> to try to reassign val x to 10
// COMMAND ----------
// MAGIC %md
// MAGIC Scala allows declaration of mutable variables as well using ``var``, as follows:
// COMMAND ----------
var y = 2 // <Shift+Enter> to declare a variable y to be integer 2 and go to next cell
// COMMAND ----------
y = 3 // <Shift+Enter> to change the value of y to 3
// COMMAND ----------
// MAGIC %md
// MAGIC ## Methods and Tab-completion
// COMMAND ----------
val s = "hi" // <Ctrl+Enter> to declare val s to String "hi"
// COMMAND ----------
// MAGIC %md
// MAGIC You can place the cursor after ``.`` following a declared object and find out the methods available for it as shown in the image below.
// MAGIC
// MAGIC 
// MAGIC
// MAGIC **You Try** doing this next.
// COMMAND ----------
s. // place cursor after the '.' and press Tab to see all available methods for s
// COMMAND ----------
// MAGIC %md
// MAGIC For example,
// MAGIC * scroll down to ``contains`` and double-click on it.
// MAGIC * This should lead to ``s.contains`` in your cell.
// MAGIC * Now add an argument String to see if ``s`` contains the argument, for example, try:
// MAGIC * ``s.contains("f")``
// MAGIC * ``s.contains("")`` and
// MAGIC * ``s.contains("i")``
// COMMAND ----------
s // <Shift-Enter> recall the value of String s
// COMMAND ----------
s.contains("f") // <Shift-Enter> returns Boolean false since s does not contain the string "f"
// COMMAND ----------
s.contains("") // <Shift-Enter> returns Boolean true since s contains the empty string ""
// COMMAND ----------
s.contains("i") // <Ctrl+Enter> returns Boolean true since s contains the string "i"
// COMMAND ----------
// MAGIC %md
// MAGIC ## Functions
// COMMAND ----------
def square(x: Int): Int = x*x // <Shitf+Enter> to define a function named square
// COMMAND ----------
square(5) // <Shitf+Enter> to call this function on argument 5
// COMMAND ----------
y // <Shitf+Enter> to recall that val y is Int 3
// COMMAND ----------
square(y) // <Shitf+Enter> to call the function on val y of the right argument type Int
// COMMAND ----------
x // <Shitf+Enter> to recall x is Double 5.0
// COMMAND ----------
square(x) // <Shift+Enter> to call the function on val x of type Double will give type mismatch error
// COMMAND ----------
def square(x: Int): Int = { // <Shitf+Enter> to declare function in a block
val answer=x*x
answer // the last line of the function block is returned
}
// COMMAND ----------
square(5000) // <Shift+Enter> to call the function
// COMMAND ----------
// <Shift+Enter> to define function with input and output type as String
def announceAndEmit(text: String) =
{
println(text)
text // the last line of the function block is returned
}
// COMMAND ----------
// <Ctrl+Enter> to call function which prints as line and returns as String
announceAndEmit("roger roger")
// COMMAND ----------
// MAGIC %md
// MAGIC ## Scala Collections
// MAGIC
// MAGIC See the [overview](http://docs.scala-lang.org/overviews/collections/overview) and [introduction](http://docs.scala-lang.org/overviews/collections/introduction) to scala collections, the building blocks of Spark.
// COMMAND ----------
// <Ctrl+Enter> to declare (an immutable) val lst as List of Int's 1,2,3
val lst = List(1, 2, 3)
// COMMAND ----------
// MAGIC %md
// MAGIC There are several other Scala collections and we will introduce them as needed. The two other most common ones are ``Array`` and ``Seq``.
// COMMAND ----------
val arr = Array(1,2,3) // <Shift-Enter> to declare an Array
// COMMAND ----------
val seq = Seq(1,2,3) // <Shift-Enter> to declare a Seq
// COMMAND ----------
// MAGIC %md
// MAGIC ##Scala Closures for Functional Programming and MapReduce
// MAGIC
// MAGIC We will apply such *closures* for processing scala collections with functional programming.
// MAGIC
// MAGIC ### Five ways of adding 1
// MAGIC
// MAGIC 1. explicit version:
// MAGIC ```%scala
// MAGIC (x: Int) => x + 1
// MAGIC ```
// MAGIC
// MAGIC 2. type-inferred more intuitive version:
// MAGIC ```%scala
// MAGIC x => x + 1
// MAGIC ```
// MAGIC
// MAGIC 3. placeholder syntax (each argument must be used exactly once):
// MAGIC ```%scala
// MAGIC _ + 1
// MAGIC ```
// MAGIC
// MAGIC 4. type-inferred more intuitive version with code-block for larger function body:
// MAGIC ```%scala
// MAGIC x => {
// MAGIC // body is a block of code
// MAGIC val integerToAdd = 1
// MAGIC x + integerToAdd
// MAGIC }
// MAGIC ```
// MAGIC
// MAGIC 5. regular functions using ``def``:
// MAGIC ```%scala
// MAGIC def addOne(x: Int): Int = x + 1
// MAGIC ```
// COMMAND ----------
// MAGIC %md
// MAGIC Now, let's apply closures for functional programming over scala collection (``List``) using ``foreach``, ``map``, ``filter`` and ``reduce``. In the end we will write out first mapReduce program!
// COMMAND ----------
// <Shift+Enter> to call the foreach method and print its contents element-per-line using println function
lst.foreach(x => println(x))
// COMMAND ----------
// <Shift+Enter> for same output as above where println is applied to each element of List lst
lst.foreach(println)
// COMMAND ----------
// <Shift+Enter> to map each value x of lst with x+10 to return a new List(11, 12, 13)
lst.map(x => x + 10)
// COMMAND ----------
// <Shift+Enter> for the same as above using place-holder syntax
lst.map(_ + 10)
// COMMAND ----------
// <Shift+Enter> to return a new List(1, 3) after filtering x's from lst if (x % 2 == 1) is true
lst.filter(x => (x % 2 == 1) )
// COMMAND ----------
// <Shift+Enter> for the same as above using place-holder syntax
lst.filter( _ % 2 == 1 )
// COMMAND ----------
// <Shift+Enter> to use reduce to add elements of lst two at a time to return Int 6
lst.reduce( (x, y) => x + y )
// COMMAND ----------
// <Ctrl+Enter> for the same as above but using place-holder syntax
lst.reduce( _ + _ )
// COMMAND ----------
// MAGIC %md
// MAGIC Let's combine ``map`` and ``reduce`` programs above to find the sum of after 10 has been added to every element of the original List ``lst`` as follows:
// COMMAND ----------
lst.map(x => x+10).reduce((x,y) => x+y) // <Ctrl-Enter> to get Int 36 = sum(1+10,2+10,3+10)
// COMMAND ----------
// MAGIC %md
// MAGIC There are lots of methods in Scala Collections. See for example [API_scala.collection.Seq](http://www.scala-lang.org/api/2.10.4/index.html#scala.collection.Seq).
// COMMAND ----------
// MAGIC %md
// MAGIC ## Spark is written in Scala and the primary language for this course is Scala.
// MAGIC ### However, let us use the best language for the job!
// MAGIC
// MAGIC ### Cells each have a type - **scala**, **python**, **r**, **sql**, **filesystem**, **command line** or **markdown**.
// MAGIC * While cells default to the type of the Notebook, other cell types are supported as well.
// MAGIC * For example, Python Notebooks can contain python, sql, markdown, and even Scala cells. This lets you write notebooks that do use multiple languages.
// MAGIC * This cell is in **markdown** and is used for documentation purposes.
// COMMAND ----------
// MAGIC %md
// MAGIC ### All types of cells can be created in any notebook, regardless of the language.
// MAGIC
// MAGIC To create a cell of another language, start the cell with:
// MAGIC * `%md` - Markdown
// MAGIC * `%sql` - SQL
// MAGIC * `%scala` - Scala
// MAGIC * `%py` - Python
// MAGIC * `%r` - R
// COMMAND ----------
// MAGIC %md
// MAGIC ### Cross-language cells can be used to mix commands from other languages.
// MAGIC
// MAGIC Examples:
// COMMAND ----------
// MAGIC %py print("For example, this is a scala notebook, but we can use %py to run python commands inline.")
// COMMAND ----------
// MAGIC %r print("We can also access other languages such as R.")
// COMMAND ----------
// MAGIC %md
// MAGIC ### Command line cells can be used to work with local files on the Spark driver node.
// MAGIC * Start a cell with `%sh` to run a command line command
// COMMAND ----------
// MAGIC %sh
// MAGIC # This is a command line cell. Commands you write here will be executed as if they were run on the command line.
// MAGIC # For example, in this cell we access the help pages for the bash shell.
// MAGIC man bash
// COMMAND ----------
// MAGIC %md
// MAGIC ### Filesystem cells allow access to the [Databricks File System](/#workspace/databricks_guide/02 Product Overview/09 DB File System - scala).
// MAGIC * Start a cell with `%fs` to run DBFS commands
// MAGIC * Type `%fs help` for a list of commands
// COMMAND ----------
// MAGIC %md
// MAGIC # Further Reference / Homework
// MAGIC
// MAGIC Go through the following notebooks (``File`` and ``Clone`` them into your ``Workspace/Users/...``) to play and familiarize yourself with databricks cloud:
// MAGIC * [Welcome to Databricks](/#workspace/databricks_guide/00 Welcome to Databricks) and watch the *Getting Started* videos for more details. This worksheet can be loaded below using ``%run "/databricks_guide/00 Welcome to Databricks"``
// MAGIC * [Intro Scala Notebooks](/#workspace/databricks_guide/01 Intro Notebooks/2 Intro Scala Notebooks)
// MAGIC * [Databricks File System](/#workspace/databricks_guide/02 Product Overview/09 DB File System - scala)
// MAGIC * [FileStore](/#workspace/databricks_guide/02 Product Overview/12 FileStore) to organize files for access.
// MAGIC
// MAGIC You may also like to check out:
// MAGIC * [Intro Python Notebooks](/#workspace/databricks_guide/01 Intro Notebooks/1 Intro Python Notebooks)
// MAGIC * [Intro R Notebooks](/#workspace/databricks_guide/01 Intro Notebooks/4 Intro R Notebooks)
// COMMAND ----------
// MAGIC %md
// MAGIC ### Notebooks can be run from other notebooks using **%run**
// MAGIC * Syntax: `%run /full/path/to/notebook`
// MAGIC * This is commonly used to import functions you defined in other notebooks.
// COMMAND ----------
// just see the guide for the introductory notebooks
//%run "/databricks_guide/00 Welcome to Databricks" // running this cell will load databricks_guide/00 Welcome to Databricks notebook here
// COMMAND ----------
// MAGIC %md
// MAGIC
// MAGIC # [Scalable Data Science](http://www.math.canterbury.ac.nz/~r.sainudiin/courses/ScalableDataScience/)
// MAGIC
// MAGIC
// MAGIC ### prepared by [Raazesh Sainudiin](https://nz.linkedin.com/in/raazesh-sainudiin-45955845) and [Sivanand Sivaram](https://www.linkedin.com/in/sivanand)
// MAGIC
// MAGIC *supported by* [](https://databricks.com/)
// MAGIC and
// MAGIC [](https://www.awseducate.com/microsite/CommunitiesEngageHome)
|
raazesh-sainudiin/scalable-data-science
|
db/week1/01_introduction/003_scalaCrashCourse.scala
|
Scala
|
unlicense
| 25,129
|
package com.github.dzhg.tedis.storage
/**
* @author dzhg 8/11/17
*/
case class TedisKeyInfo(name: String, ttl: Option[Long], createdAt: Long)
case class TedisEntry(keyInfo: TedisKeyInfo, value: TedisValue)
|
dzhg/tedis
|
src/main/scala/com/github/dzhg/tedis/storage/TedisModels.scala
|
Scala
|
mit
| 213
|
/*
*
* ____ __ ____________ ______
* / __/______ _/ /__ /_ __/ _/ //_/_ /
* _\\ \\/ __/ _ `/ / _ `// / _/ // ,< / /_
* /___/\\__/\\_,_/_/\\_,_//_/ /___/_/|_| /___/
*
* A PGF/TIKZ plot library for Scala.
*
*/
package scalatikz.pgf.plots.enums
import enumeratum._
import scala.collection.immutable._
sealed abstract class AxisScale(override val entryName: String) extends EnumEntry {
override def toString: String = entryName
}
object AxisScale extends Enum[AxisScale] {
val values: IndexedSeq[AxisScale] = findValues
case object LINEAR extends AxisScale("linear")
case object LOG extends AxisScale("log")
}
|
vagmcs/ScalaTIKZ
|
src/main/scala/scalatikz/pgf/plots/enums/AxisScale.scala
|
Scala
|
lgpl-3.0
| 646
|
/*
* Copyright (c) 2016 JLCM
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of
* this software and associated documentation files (the "Software"), to deal in
* the Software without restriction, including without limitation the rights to
* use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
* the Software, and to permit persons to whom the Software is furnished to do so,
* subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
* FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
* COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
* IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
* CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
package de.beikern.quilltests.daos
object Dao {
case class Foo(field1: String, field2: Int)
case class Bar(field1: String, field2: Int)
}
|
beikern/quilltests
|
src/main/scala/de/beikern/quilltests/daos/Dao.scala
|
Scala
|
mit
| 1,250
|
/*
* Copyright 2021 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.ct.computations
import uk.gov.hmrc.ct.box.{Calculated, CtBoxIdentifier, CtOptionalInteger}
import uk.gov.hmrc.ct.computations.calculations.LossesSetAgainstOtherProfitsCalculator
import uk.gov.hmrc.ct.computations.retriever.ComputationsBoxRetriever
case class CP998(value: Option[Int]) extends CtBoxIdentifier(name = "Losses this AP set against other profits this AP") with CtOptionalInteger
object CP998 extends Calculated[CP998, ComputationsBoxRetriever] with LossesSetAgainstOtherProfitsCalculator {
override def calculate(fieldValueRetriever: ComputationsBoxRetriever): CP998 = {
calculateLossesSetAgainstProfits(cato01 = fieldValueRetriever.cato01(),
cp997 = fieldValueRetriever.chooseCp997(),
cp118 = fieldValueRetriever.cp118(),
cpq19 = fieldValueRetriever.cpQ19())
}
}
|
hmrc/ct-calculations
|
src/main/scala/uk/gov/hmrc/ct/computations/CP998.scala
|
Scala
|
apache-2.0
| 1,525
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.openchai.spark.util
object AddLicense {
def readFile(fpath: String) = {
import java.util.Scanner
import java.nio.file.Paths
val content = new Scanner(Paths.get(fpath)).useDelimiter("\\\\Z").next()
content
}
def write(path: String, data: String): Unit = tools.nsc.io.File(path).writeAll(data)
val header="""/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/""".stripMargin
def main(args: Array[String]) = {
val path = args(0)
println(s"Adding header to $path")
val txt = header + "\\n" + readFile(path)+"\\n"
write(path, txt)
}
}
|
javadba/p2p
|
src/main/scala/org/openchai/spark/util/AddLicense.scala
|
Scala
|
apache-2.0
| 2,137
|
package com.blinkbox.books.auth.server.data
import com.blinkbox.books.auth.server.sso.SsoUserId
import com.blinkbox.books.auth.server.{PasswordHasher, UserRegistration}
import com.blinkbox.books.slick.{TablesSupport, SlickTypes}
import com.blinkbox.books.time.{Clock, TimeSupport}
import scala.slick.driver.JdbcProfile
import scala.slick.profile.BasicProfile
import scala.util.Random
trait UserRepository[Profile <: BasicProfile] extends SlickTypes[Profile] {
def userWithUsername(username: String)(implicit session: Session): Option[User]
def updateUser(user: User)(implicit session: Session): Unit
def createUser(registration: UserRegistration)(implicit session: Session): User
def userWithId(id: UserId)(implicit session: Session): Option[User]
def userWithSsoId(id: SsoUserId)(implicit session: Session): Option[User]
def registerUsernameUpdate(oldUsername: String, updatedUser: User)(implicit session: Session): Unit
def userWithHistoryById(id: UserId)(implicit session: Session): Option[(User, List[PreviousUsername])]
def userWithHistoryByUsername(username: String)(implicit session: Session): List[(User, List[PreviousUsername])]
def userWithHistoryByName(firstName: String, lastName: String)(implicit session: Session): List[(User, List[PreviousUsername])]
}
trait JdbcUserRepository[Profile <: JdbcProfile] extends UserRepository[Profile] with TablesSupport[Profile, ZuulTables[Profile]] {
this: TimeSupport =>
import tables._
import driver.simple._
override def userWithUsername(username: String)(implicit session: Session): Option[User] =
users.filter(_.username === username).firstOption
override def createUser(reg: UserRegistration)(implicit session: Session): User = {
val now = clock.now()
// TODO: This is not needed any more as SSO is handling password checks; after the migration is done remove this field
val passwordHash = Random.alphanumeric.take(12).mkString
val user = User(UserId.Invalid, now, now, reg.username, reg.firstName, reg.lastName, passwordHash, reg.allowMarketing)
val id = (users returning users.map(_.id)) += user
user.copy(id = id)
}
override def userWithId(id: UserId)(implicit session: Session) = users.filter(_.id === id).firstOption
override def updateUser(user: User)(implicit session: Session): Unit = users.filter(_.id === user.id).update(user)
override def userWithSsoId(id: SsoUserId)(implicit session: Session) = users.filter(_.ssoId === id).firstOption
override def registerUsernameUpdate(oldUsername: String, updatedUser: User)(implicit session: Session): Unit = {
previousUsernames += PreviousUsername(PreviousUsernameId.invalid, clock.now(), updatedUser.id, oldUsername)
}
private def groupByUser(l: List[(User, Option[PreviousUsername])]): List[(User, List[PreviousUsername])] =
(l.groupBy(_._1).collect {
case (user, pairs) => (user, pairs.map(_._2).flatten)
}).toList
private def userWithHistoryQuery = (for {
(u, p) <- users leftJoin previousUsernames on(_.id === _.userId)
} yield (u, p)).sortBy(_._2.createdAt.desc).map { case (u, p) => (u, p.?) }
override def userWithHistoryById(id: UserId)(implicit session: Session): Option[(User, List[PreviousUsername])] =
groupByUser(userWithHistoryQuery.filter(_._1.id === id).list).headOption
def userWithHistoryByUsername(username: String)(implicit session: Session): List[(User, List[PreviousUsername])] = {
val q = userWithHistoryQuery
val uq = q.filter(_._1.username === username)
val u = uq.list
groupByUser(u)
}
def userWithHistoryByName(firstName: String, lastName: String)(implicit session: Session): List[(User, List[PreviousUsername])] =
groupByUser(userWithHistoryQuery.filter({
case (u, _) => u.firstName.toLowerCase === firstName.toLowerCase && u.lastName.toLowerCase === lastName.toLowerCase
}).list)
}
class DefaultUserRepository[Profile <: JdbcProfile](val tables: ZuulTables[Profile])(implicit val clock: Clock)
extends TimeSupport with JdbcUserRepository[Profile]
|
blinkboxbooks/auth-service.scala
|
src/main/scala/com/blinkbox/books/auth/server/data/UserRepository.scala
|
Scala
|
mit
| 4,042
|
package com.tribbloids.spookystuff.integration
import com.tribbloids.spookystuff.SpookyEnvFixture
import com.tribbloids.spookystuff.actions.{Trace, Wget}
import com.tribbloids.spookystuff.extractors.{FR, GenExtractor}
import com.tribbloids.spookystuff.rdd.FetchedDataset
import com.tribbloids.spookystuff.utils.CommonConst
/**
* move the entire webscraper.io/test-sites/ into a local dir for integration tests
* may use wayback machine:
* https://web.archive.org/web/20170707111752/http://webscraper.io:80/test-sites
*/
object SnapshotRunner extends SpookyEnvFixture.EnvBase {
val SPLITTER = "/http://webscraper.io(:80)?"
val SPLITTER_MIN = "/http://webscraper.io"
import scala.concurrent.duration._
val coolDown: Some[FiniteDuration] = Some(5.seconds)
implicit class FDSView(fd: FetchedDataset) {
import com.tribbloids.spookystuff.dsl.DSL._
import com.tribbloids.spookystuff.utils.CommonViews.StringView
val pathEncoding: GenExtractor[FR, String] = S.uri
.andFn { uri =>
val base = uri.split(SPLITTER).last
CommonConst.USER_TEMP_DIR \\\\ "test-sites" \\\\ base
}
def save(): FetchedDataset = {
fd.persist()
val originalVersion = fd.wget(
S.uri.andFn(
{ uri =>
try {
val Array(first, last) = uri.split(SPLITTER)
first + "id_" + SPLITTER_MIN + last
} catch {
case e: Exception =>
throw new UnsupportedOperationException(s"malformed URI: $uri", e)
}
}
),
cooldown = coolDown
)
originalVersion
.savePages_!(pathEncoding, overwrite = true)
fd
}
}
def main(args: Array[String]): Unit = {
import com.tribbloids.spookystuff.dsl.DSL._
val spooky = this.spooky
val keyBy: Trace => String = { trace =>
val uri = trace
.collectFirst {
case wget: Wget => wget.uri.value
}
.getOrElse("")
val base = uri.split(SPLITTER).last
base
}
spooky
.wget(
"https://web.archive.org/web/20170707111752/http://webscraper.io:80/test-sites"
)
.save()
.wgetJoin(S"h2.site-heading a", cooldown = coolDown, keyBy = keyBy)
.save()
.wgetExplore(S"div.sidebar-nav a", cooldown = coolDown, keyBy = keyBy)
.save()
.wgetExplore(S"ul.pagination a", cooldown = coolDown, keyBy = keyBy)
.save()
}
}
|
tribbloid/spookystuff
|
integration/src/test/scala/com/tribbloids/spookystuff/integration/SnapshotRunner.scala
|
Scala
|
apache-2.0
| 2,453
|
/*
* Copyright 2001-2013 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalatest.enablers
import org.scalactic.Requirements._
import scala.collection.JavaConverters._
import org.scalactic.Equality
import org.scalatest.FailureMessages
import scala.annotation.tailrec
import scala.collection.GenTraversable
/**
* Supertrait for typeclasses that enable <code>contain key</code> matcher syntax.
*
* <p>
* A <code>KeyMapping[M]</code> provides access to the "key mapping nature" of type <code>M</code> in such
* a way that <code>contain key</code> matcher syntax can be used with type <code>M</code>. A <code>M</code>
* can be any type for which <code>contain key</code> syntax makes sense. ScalaTest provides implicit implementations
* for <code>scala.collection.GenMap</code> and <code>java.util.Map</code>. You can enable the <code>contain key</code>
* matcher syntax on your own type <code>U</code> by defining a <code>KeyMapping[U]</code> for the type and making it
* available implicitly.
*
* <p>
* ScalaTest provides implicit <code>KeyMapping</code> instances for <code>scala.collection.GenMap</code>,
* and <code>java.util.Map</code> in the <code>KeyMapping</code> companion object.
* </p>
*/
trait KeyMapping[-M] {
/**
* Check if the passed <code>map</code> contains the passed <code>key</code>.
*
* @param map a map about which an assertion is being made
* @param key key of which should be contained in the passed map
* @return true if the passed map contains the passed key
*/
def containsKey(map: M, key: Any): Boolean
}
/**
* Companion object for <code>KeyMapping</code> that provides implicit implementations for <code>scala.collection.GenMap</code> and <code>java.util.Map</code>.
*/
object KeyMapping {
import scala.language.higherKinds
/**
* Enable <code>KeyMapping</code> implementation for <code>scala.collection.GenMap</code>.
*
* @param equality <a href="../../scalactic/Equality.html"><code>Equality</code></a> type class that is used to check equality of key in the <code>scala.collection.GenMap</code>
* @tparam K the type of the key in the <code>scala.collection.GenMap</code>
* @tparam V the type of the value in the <code>scala.collection.GenMap</code>
* @tparam MAP any subtype of <code>scala.collection.GenMap</code>
* @return <code>KeyMapping[MAP[K, V]]</code> that supports <code>scala.collection.GenMap</code> in <code>contain key</code> syntax
*/
implicit def keyMappingNatureOfGenMap[K, V, MAP[k, v] <: scala.collection.GenMap[k, v]](implicit equality: Equality[K]): KeyMapping[MAP[K, V]] =
new KeyMapping[MAP[K, V]] {
def containsKey(map: MAP[K, V], key: Any): Boolean = {
requireNonNull(map)
map.keySet.exists((k: K) => equality.areEqual(k, key))
}
}
import scala.language.implicitConversions
/**
* Implicit conversion that converts an <a href="../../scalactic/Equality.html"><code>Equality</code></a> of type <code>K</code>
* into <code>KeyMapping</code> of type <code>MAP[K, V]</code>, where <code>MAP</code> is a subtype of <code>scala.collection.GenMap</code>.
* This is required to support the explicit <a href="../../scalactic/Equality.html"><code>Equality</code></a> syntax, for example:
*
* <pre class="stHighlight">
* (Map("one" -> 1) should contain key "ONE") (after being lowerCased)
* </pre>
*
* <code>(after being lowerCased)</code> will returns an <a href="../../scalactic/Equality.html"><code>Equality[String]</code></a>
* and this implicit conversion will convert it into <code>KeyMapping[Map[String, Int]]</code>.
*
* @param equality <a href="../../scalactic/Equality.html"><code>Equality</code></a> of type <code>K</code>
* @tparam K the type of the key in the <code>scala.collection.GenMap</code>
* @tparam V the type of the value in the <code>scala.collection.GenMap</code>
* @tparam MAP any subtype of <code>scala.collection.GenMap</code>
* @return <code>KeyMapping</code> of type <code>MAP[K, V]</code>
*/
implicit def convertEqualityToGenMapKeyMapping[K, V, MAP[k, v] <: scala.collection.GenMap[k, v]](equality: Equality[K]): KeyMapping[MAP[K, V]] =
keyMappingNatureOfGenMap(equality)
/**
* Enable <code>KeyMapping</code> implementation for <code>java.util.Map</code>.
*
* @param equality <a href="../../scalactic/Equality.html"><code>Equality</code></a> type class that is used to check equality of key in the <code>java.util.Map</code>
* @tparam K the type of the key in the <code>java.util.Map</code>
* @tparam V the type of the value in the <code>java.util.Map</code>
* @tparam JMAP any subtype of <code>java.util.Map</code>
* @return <code>KeyMapping[JMAP[K, V]]</code> that supports <code>java.util.Map</code> in <code>contain</code> <code>key</code> syntax
*/
implicit def keyMappingNatureOfJavaMap[K, V, JMAP[k, v] <: java.util.Map[k, v]](implicit equality: Equality[K]): KeyMapping[JMAP[K, V]] =
new KeyMapping[JMAP[K, V]] {
def containsKey(jMap: JMAP[K, V], key: Any): Boolean = {
jMap.asScala.keySet.exists((k: K) => equality.areEqual(k, key))
}
}
/**
* Implicit conversion that converts an <a href="../../scalactic/Equality.html"><code>Equality</code></a> of type <code>K</code>
* into <code>KeyMapping</code> of type <code>JMAP[K, V]</code>, where <code>JMAP</code> is a subtype of <code>java.util.Map</code>.
* This is required to support the explicit <a href="../../scalactic/Equality.html"><code>Equality</code></a> syntax, for example:
*
* <pre class="stHighlight">
* val javaMap = new java.util.HashMap[String, Int]()
* javaMap.put("one", 1)
* (javaMap should contain key "ONE") (after being lowerCased)
* </pre>
*
* <code>(after being lowerCased)</code> will returns an <a href="../../scalactic/Equality.html"><code>Equality[String]</code></a>
* and this implicit conversion will convert it into <code>KeyMapping[java.util.HashMap[String, Int]]</code>.
*
* @param equality <a href="../../scalactic/Equality.html"><code>Equality</code></a> of type <code>K</code>
* @tparam K the type of the key in the <code>java.util.Map</code>
* @tparam V the type of the value in the <code>java.util.Map</code>
* @tparam JMAP any subtype of <code>java.util.Map</code>
* @return <code>KeyMapping</code> of type <code>JMAP[K, V]</code>
*/
implicit def convertEqualityToJavaMapKeyMapping[K, V, JMAP[k, v] <: java.util.Map[k, v]](equality: Equality[K]): KeyMapping[JMAP[K, V]] =
keyMappingNatureOfJavaMap(equality)
}
|
dotty-staging/scalatest
|
scalatest/src/main/scala/org/scalatest/enablers/KeyMapping.scala
|
Scala
|
apache-2.0
| 7,116
|
/*
* Scala.js (https://www.scala-js.org/)
*
* Copyright EPFL.
*
* Licensed under Apache License 2.0
* (https://www.apache.org/licenses/LICENSE-2.0).
*
* See the NOTICE file distributed with this work for
* additional information regarding copyright ownership.
*/
package java.nio.charset
import java.nio.charset
final class StandardCharsets private ()
object StandardCharsets {
def US_ASCII: Charset = charset.US_ASCII
def ISO_8859_1: Charset = charset.ISO_8859_1
def UTF_8: Charset = charset.UTF_8
def UTF_16BE: Charset = charset.UTF_16BE
def UTF_16LE: Charset = charset.UTF_16LE
def UTF_16: Charset = charset.UTF_16
}
|
scala-js/scala-js
|
javalib/src/main/scala/java/nio/charset/StandardCharsets.scala
|
Scala
|
apache-2.0
| 646
|
package object hephaestus
extends IntInstances
with ListInstances
with MonocleInstances
with CatsInstances
|
to-ithaca/hephaestus
|
core/src/main/scala/hephaestus/package.scala
|
Scala
|
apache-2.0
| 123
|
import sbt._
import Keys._
import play.Project._
object ApplicationBuild extends Build {
val appName = "play-reactive-mongo"
val appVersion = "0.0.6"
val appDependencies = Seq(
"reactive_mongo_plugin" %% "reactive_mongo_plugin" % "0.0.43"
)
val main = play.Project(appName, appVersion, appDependencies).settings(
(Seq(
resolvers += "Sonatype Snapshots" at "http://oss.sonatype.org/content/repositories/snapshots/",
resolvers += Resolver.url("TPTeam Repository", url("http://tpteam.github.io/releases/"))(Resolver.ivyStylePatterns),
resolvers += Opts.resolver.sonatypeReleases,
resolvers += Resolver.sonatypeRepo("snapshots"))): _*)
}
|
TPTeam/reactive_mongo_example
|
project/Build.scala
|
Scala
|
mit
| 697
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.carbondata.spark.testsuite.createTable
import java.io.{File, IOException}
import java.sql.Timestamp
import java.util
import org.apache.avro
import org.apache.commons.io.FileUtils
import org.apache.spark.sql.Row
import org.apache.spark.sql.test.util.QueryTest
import org.junit.Assert
import org.scalatest.BeforeAndAfterAll
import org.apache.carbondata.core.constants.CarbonCommonConstants
import org.apache.carbondata.core.metadata.datatype.DataTypes
import org.apache.carbondata.core.util.CarbonProperties
import org.apache.carbondata.sdk.file._
import scala.collection.JavaConverters._
class TestNonTransactionalCarbonTableJsonWriter extends QueryTest with BeforeAndAfterAll {
var writerPath = new File(this.getClass.getResource("/").getPath
+ "../."
+ "./target/SparkCarbonFileFormat/WriterOutput/").getCanonicalPath
//getCanonicalPath gives path with \, but the code expects /.
writerPath = writerPath.replace("\\", "/")
var backupdateFormat = CarbonProperties.getInstance().getProperty(
CarbonCommonConstants.CARBON_DATE_FORMAT, CarbonCommonConstants.CARBON_DATE_DEFAULT_FORMAT)
var backupTimeFormat = CarbonProperties.getInstance().getProperty(
CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT,
CarbonCommonConstants.CARBON_TIMESTAMP_DEFAULT_FORMAT)
override def beforeAll(): Unit = {
CarbonProperties.getInstance()
.addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT,
CarbonCommonConstants.CARBON_TIMESTAMP_DEFAULT_FORMAT)
CarbonProperties.getInstance()
.addProperty(CarbonCommonConstants.CARBON_DATE_FORMAT,
CarbonCommonConstants.CARBON_DATE_DEFAULT_FORMAT)
sql("DROP TABLE IF EXISTS sdkOutputTable")
}
override def afterAll(): Unit = {
sql("DROP TABLE IF EXISTS sdkOutputTable")
CarbonProperties.getInstance()
.addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT,
backupTimeFormat)
CarbonProperties.getInstance()
.addProperty(CarbonCommonConstants.CARBON_DATE_FORMAT,
backupdateFormat)
}
/**
* Utility function to read a whole file as a string,
* Must not use this if the file is very huge. As it may result in memory exhaustion.
*
* @param filePath
* @return
*/
def readFromFile(filePath: String): String = {
val file = new File(filePath)
val uri = file.toURI
try {
val bytes = java.nio.file.Files.readAllBytes(java.nio.file.Paths.get(uri))
new String(bytes, "UTF-8")
} catch {
case e: IOException =>
e.printStackTrace()
return "ERROR loading file " + filePath
}
}
private def writeCarbonFileFromJsonRowInput(jsonRow: String,
carbonSchema: Schema) = {
try {
var options: util.Map[String, String] = Map("bAd_RECords_action" -> "FAIL", "quotechar" -> "\"").asJava
val writer = CarbonWriter.builder
.outputPath(writerPath)
.uniqueIdentifier(System.currentTimeMillis())
.withLoadOptions(options)
.withJsonInput(carbonSchema).writtenBy("TestNonTransactionalCarbonTableJsonWriter").build()
writer.write(jsonRow)
writer.close()
}
catch {
case e: Exception => {
e.printStackTrace()
Assert.fail(e.getMessage)
}
}
}
// test all primitive type
test("Read sdk writer Json output of all primitive type") {
FileUtils.deleteDirectory(new File(writerPath))
var dataPath: String = null
dataPath = resourcesPath + "/jsonFiles/data/allPrimitiveType.json"
val fields = new Array[Field](9)
fields(0) = new Field("stringField", DataTypes.STRING)
fields(1) = new Field("intField", DataTypes.INT)
fields(2) = new Field("shortField", DataTypes.SHORT)
fields(3) = new Field("longField", DataTypes.LONG)
fields(4) = new Field("doubleField", DataTypes.DOUBLE)
fields(5) = new Field("boolField", DataTypes.BOOLEAN)
fields(6) = new Field("dateField", DataTypes.DATE)
fields(7) = new Field("timeField", DataTypes.TIMESTAMP)
fields(8) = new Field("decimalField", DataTypes.createDecimalType(8, 2))
val jsonRow = readFromFile(dataPath)
writeCarbonFileFromJsonRowInput(jsonRow, new Schema(fields))
assert(new File(writerPath).exists())
sql("DROP TABLE IF EXISTS sdkOutputTable")
sql(
s"""CREATE EXTERNAL TABLE sdkOutputTable STORED BY 'carbondata' LOCATION
|'$writerPath' """.stripMargin)
checkAnswer(sql("select * from sdkOutputTable"),
Seq(Row("ajantha\"bhat\"",
26,
26,
1234567,
23.3333,
false,
java.sql.Date.valueOf("2019-03-02"),
Timestamp.valueOf("2019-02-12 03:03:34"),
55.35)))
sql("DROP TABLE sdkOutputTable")
// drop table should not delete the files
assert(new File(writerPath).listFiles().length > 0)
FileUtils.deleteDirectory(new File(writerPath))
}
// test all primitive type with bad record
test("Read sdk writer Json output of all primitive type with Bad record") {
FileUtils.deleteDirectory(new File(writerPath))
var dataPath: String = null
dataPath = resourcesPath + "/jsonFiles/data/allPrimitiveTypeBadRecord.json"
val fields = new Array[Field](9)
fields(0) = new Field("stringField", DataTypes.STRING)
fields(1) = new Field("intField", DataTypes.INT)
fields(2) = new Field("shortField", DataTypes.SHORT)
fields(3) = new Field("longField", DataTypes.LONG)
fields(4) = new Field("doubleField", DataTypes.DOUBLE)
fields(5) = new Field("boolField", DataTypes.BOOLEAN)
fields(6) = new Field("dateField", DataTypes.DATE)
fields(7) = new Field("timeField", DataTypes.TIMESTAMP)
fields(8) = new Field("decimalField", DataTypes.createDecimalType(8, 2))
val jsonRow = readFromFile(dataPath)
var exception = intercept[java.lang.AssertionError] {
writeCarbonFileFromJsonRowInput(jsonRow, new Schema(fields))
}
assert(exception.getMessage()
.contains("Data load failed due to bad record"))
FileUtils.deleteDirectory(new File(writerPath))
}
// test array Of array Of array Of Struct
test("Read sdk writer Json output of array Of array Of array Of Struct") {
FileUtils.deleteDirectory(new File(writerPath))
var dataPath = resourcesPath + "/jsonFiles/data/arrayOfarrayOfarrayOfStruct.json"
// for testing purpose get carbonSchema from avro schema.
// Carbon schema will be passed without AVRO in the real scenarios
var schemaPath = resourcesPath + "/jsonFiles/schema/arrayOfarrayOfarrayOfStruct.avsc"
val avroSchema = new avro.Schema.Parser().parse(readFromFile(schemaPath))
val carbonSchema = AvroCarbonWriter.getCarbonSchemaFromAvroSchema(avroSchema)
val jsonRow = readFromFile(dataPath)
writeCarbonFileFromJsonRowInput(jsonRow, carbonSchema)
assert(new File(writerPath).exists())
sql("DROP TABLE IF EXISTS sdkOutputTable")
sql(
s"""CREATE EXTERNAL TABLE sdkOutputTable STORED BY 'carbondata' LOCATION
|'$writerPath' """.stripMargin)
sql("select * from sdkOutputTable").show(false)
/*
* +-------+---+-----------------------------------------+
|name |age|BuildNum |
+-------+---+-----------------------------------------+
|ajantha|26 |[WrappedArray(WrappedArray([abc,city1]))]|
+-------+---+-----------------------------------------+
*
*/
sql("DROP TABLE sdkOutputTable")
// drop table should not delete the files
assert(new File(writerPath).listFiles().length > 0)
FileUtils.deleteDirectory(new File(writerPath))
}
// test array Of Struct Of Struct
test("Read sdk writer Json output of array Of Struct Of Struct") {
FileUtils.deleteDirectory(new File(writerPath))
var dataPath = resourcesPath + "/jsonFiles/data/arrayOfStructOfStruct.json"
// for testing purpose get carbonSchema from avro schema.
// Carbon schema will be passed without AVRO in the real scenarios
var schemaPath = resourcesPath + "/jsonFiles/schema/arrayOfStructOfStruct.avsc"
val avroSchema = new avro.Schema.Parser().parse(readFromFile(schemaPath))
val carbonSchema = AvroCarbonWriter.getCarbonSchemaFromAvroSchema(avroSchema)
val jsonRow = readFromFile(dataPath)
writeCarbonFileFromJsonRowInput(jsonRow, carbonSchema)
assert(new File(writerPath).exists())
sql("DROP TABLE IF EXISTS sdkOutputTable")
sql(
s"""CREATE EXTERNAL TABLE sdkOutputTable STORED BY 'carbondata' LOCATION
|'$writerPath' """.stripMargin)
sql("select * from sdkOutputTable").show(false)
/*
* +----+---+-------------------+
* |name|age|doorNum |
* +----+---+-------------------+
* |bob |10 |[[abc,city1,[a,1]]]|
* +----+---+-------------------+
* */
sql("DROP TABLE sdkOutputTable")
// drop table should not delete the files
assert(new File(writerPath).listFiles().length > 0)
FileUtils.deleteDirectory(new File(writerPath))
}
// test struct of all types
test("Read sdk writer Json output of Struct of all types") {
FileUtils.deleteDirectory(new File(writerPath))
var dataPath = resourcesPath + "/jsonFiles/data/StructOfAllTypes.json"
// for testing purpose get carbonSchema from avro schema.
// Carbon schema will be passed without AVRO in the real scenarios
var schemaPath = resourcesPath + "/jsonFiles/schema/StructOfAllTypes.avsc"
val avroSchema = new avro.Schema.Parser().parse(readFromFile(schemaPath))
val carbonSchema = AvroCarbonWriter.getCarbonSchemaFromAvroSchema(avroSchema)
val jsonRow = readFromFile(dataPath)
writeCarbonFileFromJsonRowInput(jsonRow, carbonSchema)
assert(new File(writerPath).exists())
sql("DROP TABLE IF EXISTS sdkOutputTable")
sql(
s"""CREATE EXTERNAL TABLE sdkOutputTable STORED BY 'carbondata' LOCATION
|'$writerPath' """.stripMargin)
assert(sql("select * from sdkOutputTable").collectAsList().toString.equals(
"[[[bob,10,12345678,123400.78,true,WrappedArray(1, 2, 3, 4, 5, 6),WrappedArray(abc, def)," +
"WrappedArray(1234567, 2345678),WrappedArray(1.0, 2.0, 33.33),WrappedArray(true, false, " +
"false, true)]]]"))
sql("DROP TABLE sdkOutputTable")
// drop table should not delete the files
assert(new File(writerPath).listFiles().length > 0)
FileUtils.deleteDirectory(new File(writerPath))
}
// test : One element as null
test("Read sdk writer Json output of primitive type with one element as null") {
FileUtils.deleteDirectory(new File(writerPath))
var dataPath: String = null
dataPath = resourcesPath + "/jsonFiles/data/PrimitiveTypeWithNull.json"
val fields = new Array[Field](2)
fields(0) = new Field("stringField", DataTypes.STRING)
fields(1) = new Field("intField", DataTypes.INT)
val jsonRow = readFromFile(dataPath)
writeCarbonFileFromJsonRowInput(jsonRow, new Schema(fields))
assert(new File(writerPath).exists())
sql("DROP TABLE IF EXISTS sdkOutputTable")
sql(
s"""CREATE EXTERNAL TABLE sdkOutputTable STORED BY 'carbondata' LOCATION
|'$writerPath' """.stripMargin)
checkAnswer(sql("select * from sdkOutputTable"),
Seq(Row(null,
26)))
sql("DROP TABLE sdkOutputTable")
// drop table should not delete the files
assert(new File(writerPath).listFiles().length > 0)
FileUtils.deleteDirectory(new File(writerPath))
}
// test : Schema length is greater than array length
test("Read Json output of primitive type with Schema length is greater than array length") {
FileUtils.deleteDirectory(new File(writerPath))
var dataPath: String = null
dataPath = resourcesPath + "/jsonFiles/data/PrimitiveTypeWithNull.json"
val fields = new Array[Field](5)
fields(0) = new Field("stringField", DataTypes.STRING)
fields(1) = new Field("intField", DataTypes.INT)
fields(2) = new Field("shortField", DataTypes.SHORT)
fields(3) = new Field("longField", DataTypes.LONG)
fields(4) = new Field("doubleField", DataTypes.DOUBLE)
val jsonRow = readFromFile(dataPath)
writeCarbonFileFromJsonRowInput(jsonRow, new Schema(fields))
assert(new File(writerPath).exists())
sql("DROP TABLE IF EXISTS sdkOutputTable")
sql(
s"""CREATE EXTERNAL TABLE sdkOutputTable STORED BY 'carbondata' LOCATION
|'$writerPath' """.stripMargin)
checkAnswer(sql("select * from sdkOutputTable"),
Seq(Row(null, 26, null, null, null)))
sql("DROP TABLE sdkOutputTable")
// drop table should not delete the files
assert(new File(writerPath).listFiles().length > 0)
FileUtils.deleteDirectory(new File(writerPath))
}
// test : Schema length is lesser than array length
test("Read Json output of primitive type with Schema length is lesser than array length") {
FileUtils.deleteDirectory(new File(writerPath))
var dataPath: String = null
dataPath = resourcesPath + "/jsonFiles/data/allPrimitiveType.json"
val fields = new Array[Field](2)
fields(0) = new Field("stringField", DataTypes.STRING)
fields(1) = new Field("intField", DataTypes.INT)
val jsonRow = readFromFile(dataPath)
writeCarbonFileFromJsonRowInput(jsonRow, new Schema(fields))
assert(new File(writerPath).exists())
sql("DROP TABLE IF EXISTS sdkOutputTable")
sql(
s"""CREATE EXTERNAL TABLE sdkOutputTable STORED BY 'carbondata' LOCATION
|'$writerPath' """.stripMargin)
checkAnswer(sql("select * from sdkOutputTable"),
Seq(Row("ajantha\"bhat\"", 26)))
sql("DROP TABLE sdkOutputTable")
// drop table should not delete the files
assert(new File(writerPath).listFiles().length > 0)
FileUtils.deleteDirectory(new File(writerPath))
}
}
|
manishgupta88/carbondata
|
integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestNonTransactionalCarbonTableJsonWriter.scala
|
Scala
|
apache-2.0
| 14,587
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.template.recommendation
import org.apache.predictionio.controller.PPreparator
import org.apache.spark.SparkContext
import org.apache.spark.SparkContext._
import org.apache.spark.rdd.RDD
class Preparator
extends PPreparator[TrainingData, PreparedData] {
def prepare(sc: SparkContext, trainingData: TrainingData): PreparedData = {
new PreparedData(ratings = trainingData.ratings)
}
}
class PreparedData(
val ratings: RDD[Rating]
) extends Serializable
|
PredictionIO/PredictionIO
|
tests/pio_tests/engines/recommendation-engine/src/main/scala/Preparator.scala
|
Scala
|
apache-2.0
| 1,281
|
package nikhil.tcp
/**
* @author Nikhil
*/
package object client {
type Host = String
type Port = Int
type NumberOfClients = Int
}
|
nikhilRP/akka_requests
|
src/main/scala/nikhil/tcp/client/package.scala
|
Scala
|
apache-2.0
| 140
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution.streaming.state
import org.apache.spark.sql.internal.SQLConf
/** A class that contains configuration parameters for [[StateStore]]s. */
class StateStoreConf(@transient private val sqlConf: SQLConf)
extends Serializable {
def this() = this(new SQLConf)
/**
* Minimum number of delta files in a chain after which HDFSBackedStateStore will
* consider generating a snapshot.
*/
val minDeltasForSnapshot: Int = sqlConf.stateStoreMinDeltasForSnapshot
/** Minimum versions a State Store implementation should retain to allow rollbacks */
val minVersionsToRetain: Int = sqlConf.minBatchesToRetain
/**
* Optional fully qualified name of the subclass of [[StateStoreProvider]]
* managing state data. That is, the implementation of the State Store to use.
*/
val providerClass: Option[String] = sqlConf.stateStoreProviderClass
/**
* Additional configurations related to state store. This will capture all configs in
* SQLConf that start with `spark.sql.streaming.stateStore.` */
val confs: Map[String, String] =
sqlConf.getAllConfs.filter(_._1.startsWith("spark.sql.streaming.stateStore."))
}
object StateStoreConf {
val empty = new StateStoreConf()
def apply(conf: SQLConf): StateStoreConf = new StateStoreConf(conf)
}
|
saturday-shi/spark
|
sql/core/src/main/scala/org/apache/spark/sql/execution/streaming/state/StateStoreConf.scala
|
Scala
|
apache-2.0
| 2,111
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.runtime.stream.table
import org.apache.flink.api.scala._
import org.apache.flink.streaming.api.scala.StreamExecutionEnvironment
import org.apache.flink.streaming.util.StreamingMultipleProgramsTestBase
import org.apache.flink.table.api.TableEnvironment
import org.apache.flink.table.api.scala._
import org.apache.flink.table.runtime.utils.{StreamITCase, StreamTestData}
import org.apache.flink.types.Row
import org.junit.Assert._
import org.junit.Test
import scala.collection.mutable
class SetOperatorsITCase extends StreamingMultipleProgramsTestBase {
@Test
def testUnion(): Unit = {
val env = StreamExecutionEnvironment.getExecutionEnvironment
val tEnv = TableEnvironment.getTableEnvironment(env)
StreamITCase.testResults = mutable.MutableList()
val ds1 = StreamTestData.getSmall3TupleDataStream(env).toTable(tEnv, 'a, 'b, 'c)
val ds2 = StreamTestData.getSmall3TupleDataStream(env).toTable(tEnv, 'd, 'e, 'f)
val unionDs = ds1.unionAll(ds2).select('c)
val results = unionDs.toAppendStream[Row]
results.addSink(new StreamITCase.StringSink[Row])
env.execute()
val expected = mutable.MutableList(
"Hi", "Hello", "Hello world", "Hi", "Hello", "Hello world")
assertEquals(expected.sorted, StreamITCase.testResults.sorted)
}
@Test
def testUnionWithFilter(): Unit = {
val env = StreamExecutionEnvironment.getExecutionEnvironment
val tEnv = TableEnvironment.getTableEnvironment(env)
StreamITCase.testResults = mutable.MutableList()
val ds1 = StreamTestData.getSmall3TupleDataStream(env).toTable(tEnv, 'a, 'b, 'c)
val ds2 = StreamTestData.get5TupleDataStream(env).toTable(tEnv, 'a, 'b, 'd, 'c, 'e)
val unionDs = ds1.unionAll(ds2.select('a, 'b, 'c)).filter('b < 2).select('c)
val results = unionDs.toAppendStream[Row]
results.addSink(new StreamITCase.StringSink[Row])
env.execute()
val expected = mutable.MutableList("Hi", "Hallo")
assertEquals(expected.sorted, StreamITCase.testResults.sorted)
}
@Test
def testUnionWithAnyType(): Unit = {
val env = StreamExecutionEnvironment.getExecutionEnvironment
val tEnv = TableEnvironment.getTableEnvironment(env)
StreamITCase.testResults = mutable.MutableList()
val s1 = env.fromElements((1, new NonPojo), (2, new NonPojo)).toTable(tEnv, 'a, 'b)
val s2 = env.fromElements((3, new NonPojo), (4, new NonPojo)).toTable(tEnv, 'a, 'b)
val result = s1.unionAll(s2).toAppendStream[Row]
result.addSink(new StreamITCase.StringSink[Row])
env.execute()
val expected = mutable.MutableList("1,{}", "2,{}", "3,{}", "4,{}")
assertEquals(expected.sorted, StreamITCase.testResults.sorted)
}
class NonPojo {
val x = new java.util.HashMap[String, String]()
override def toString: String = x.toString
}
}
|
PangZhi/flink
|
flink-libraries/flink-table/src/test/scala/org/apache/flink/table/runtime/stream/table/SetOperatorsITCase.scala
|
Scala
|
apache-2.0
| 3,639
|
package main
import models.Question
import operations.network.analysis.Metrics
import operations.persistance.Neo4j
import operations.recommendations.Recommender
import operations.stack.exchange.DownloadingProcedures
object Main extends App {
/**
* Example function for showing tag similarity between Java, Scala and Clojure
*/
def similarityBetweenJavaScalaClojure(): Unit = {
val java = "java"
val scala = "scala"
val clojure = "clojure"
DownloadingProcedures.startDownloadingProcess()
DownloadingProcedures.forTagSimilarityMetrics(List(java, clojure, scala))
DownloadingProcedures.finishDownloadingProcess()
Neo4j.openConnection()
println("Tag similarity between:")
print("Java & Scala: ")
println(Metrics.tagSimilarity(java, scala))
print("Clojure & Scala: ")
println(Metrics.tagSimilarity(clojure, scala))
print("Java & Clojure: ")
println(Metrics.tagSimilarity(java, clojure))
Neo4j.closeConnection()
}
def similarityBetweenJavaScalaClojureUsingPMI(): Unit = {
val java = "java"
val scala = "scala"
val clojure = "clojure"
DownloadingProcedures.startDownloadingProcess()
DownloadingProcedures.forPMIMetrics(List(java, clojure, scala))
DownloadingProcedures.finishDownloadingProcess()
Neo4j.openConnection()
println("Point Mutual Information:")
print("Java & Scala: ")
println(Metrics.pointMutualInformation(java, scala))
print("Clojure & Scala: ")
println(Metrics.pointMutualInformation(clojure, scala))
print("Java & Clojure: ")
println(Metrics.pointMutualInformation(java, clojure))
Neo4j.closeConnection()
}
/**
* Example function for showing interesting "random" questions to specific
* @param tagName - name of tag for which you want interesting question. Check if there is tag with that name in
* StackExchange before searching it
*/
def recommendMeQuestionForTag(tagName: String): Unit = {
// DownloadingProcedures.startDownloadingProcess()
// try {
// DownloadingProcedures.downloadRecommenderData(tagName)
// } catch {
// case e: Exception => e.printStackTrace()
// }
// DownloadingProcedures.finishDownloadingProcess()
println("Recommendation for: " + tagName)
val recommendedQuestions: List[Question] = Recommender.recommendQuestionsForTag(tagName, size = 10, depth = 3)
println("Questions can be reached at links:")
for (question <- recommendedQuestions) {
println(question.link)
}
}
//main program
println("Starting")
// recommendMeQuestionForTag("artificial-intelligence")
recommendMeQuestionForTag("game-ai")
println("Completed")
}
|
QuietOne/StackExchangeAnalysis
|
src/main/scala/main/Main.scala
|
Scala
|
artistic-2.0
| 2,703
|
/*
* Copyright (C) Lightbend Inc. <https://www.lightbend.com>
*/
package docs.scaladsl.services.headerfilters
package compose {
import com.lightbend.lagom.scaladsl.api.transport.HeaderFilter
import com.lightbend.lagom.scaladsl.api.transport.RequestHeader
import com.lightbend.lagom.scaladsl.api.transport.ResponseHeader
import com.lightbend.lagom.scaladsl.api.Service
import com.lightbend.lagom.scaladsl.api.ServiceCall
import org.slf4j.LoggerFactory
//#verbose-filter
class VerboseFilter(name: String) extends HeaderFilter {
private val log = LoggerFactory.getLogger(getClass)
def transformClientRequest(request: RequestHeader) = {
log.debug(name + " - transforming Client Request")
request
}
def transformServerRequest(request: RequestHeader) = {
log.debug(name + " - transforming Server Request")
request
}
def transformServerResponse(response: ResponseHeader, request: RequestHeader) = {
log.debug(name + " - transforming Server Response")
response
}
def transformClientResponse(response: ResponseHeader, request: RequestHeader) = {
log.debug(name + " - transforming Client Response")
response
}
}
//#verbose-filter
trait HelloService extends Service {
def sayHello: ServiceCall[String, String]
//#header-filter-composition
def descriptor = {
import Service._
named("hello")
.withCalls(
call(sayHello)
)
.withHeaderFilter(
HeaderFilter.composite(
new VerboseFilter("Foo"),
new VerboseFilter("Bar")
)
)
}
//#header-filter-composition
}
}
|
lagom/lagom
|
docs/manual/scala/guide/services/code/HeaderFilters.scala
|
Scala
|
apache-2.0
| 1,678
|
/*
* Copyright (C) 2020 MapRoulette contributors (see CONTRIBUTORS.md).
* Licensed under the Apache License, Version 2.0 (see LICENSE).
*/
package org.maproulette.framework.model
import org.apache.commons.lang3.StringUtils
import org.joda.time.DateTime
import org.maproulette.data.{ChallengeType, ItemType}
import org.maproulette.exception.InvalidException
import org.maproulette.framework.psql.CommonField
import org.maproulette.framework.model.{Identifiable, Task}
import org.maproulette.models.BaseObject
import org.maproulette.models.utils.{ChallengeReads, ChallengeWrites}
import play.api.libs.json._
import org.maproulette.utils.Utils
case class PriorityRule(operator: String, key: String, value: String, valueType: String) {
def doesMatch(properties: Map[String, String], task: Task): Boolean = {
// For a "bounds" match we need to see if task location is in given bounds value
if (valueType == "bounds") {
return locationInBounds(operator, value, task.location)
}
properties.find(pair => StringUtils.equalsIgnoreCase(pair._1, key)) match {
case Some(v) =>
valueType match {
case "string" =>
operator match {
case "equal" => StringUtils.equals(v._2, value)
case "not_equal" => !StringUtils.equals(v._2, value)
case "contains" => StringUtils.contains(v._2, value)
case "not_contains" => !StringUtils.contains(v._2, value)
case "is_empty" => StringUtils.isEmpty(v._2)
case "is_not_empty" => StringUtils.isNotEmpty(v._2)
case _ => throw new InvalidException(s"Operator $operator not supported")
}
case "double" =>
operator match {
case "==" => v._2.toDouble == value.toDouble
case "!=" => v._2.toDouble != value.toDouble
case "<" => v._2.toDouble < value.toDouble
case "<=" => v._2.toDouble <= value.toDouble
case ">" => v._2.toDouble > value.toDouble
case ">=" => v._2.toDouble >= value.toDouble
case _ => throw new InvalidException(s"Operator $operator not supported")
}
case "integer" | "long" =>
operator match {
case "==" => v._2.toLong == value.toLong
case "!=" => v._2.toLong != value.toLong
case "<" => v._2.toLong < value.toLong
case "<=" => v._2.toLong <= value.toLong
case ">" => v._2.toLong > value.toLong
case ">=" => v._2.toLong >= value.toLong
case _ => throw new InvalidException(s"Operator $operator not supported")
}
case x => throw new InvalidException(s"Type $x not supported by Priority Rules")
}
case None => false
}
}
private def locationInBounds(
operator: String,
value: String,
location: Option[String]
): Boolean = {
// eg. Some({"type":"Point","coordinates":[-120.18699365,48.47991855]})
location match {
case Some(loc) =>
// MinX,MinY,MaxX,MaxY
val bbox: List[Double] = Utils.toDoubleList(value).getOrElse(List(0, 0, 0, 0))
val coordinates = (Json.parse(loc) \\ "coordinates").as[List[Double]]
if (coordinates.length == 2) {
val x = coordinates(0)
val y = coordinates(1)
val isInBBox = (x > bbox(0) && x < bbox(2) && y > bbox(1) && y < bbox(3))
operator match {
case "contains" => return isInBBox // loc is in bbox
case "not_contains" => return !isInBBox // loc is not in bbox
}
}
case _ => // no location to match against
}
return false
}
}
case class ChallengeGeneral(
owner: Long,
parent: Long,
instruction: String,
difficulty: Int = Challenge.DIFFICULTY_NORMAL,
blurb: Option[String] = None,
enabled: Boolean = false,
featured: Boolean = false,
cooperativeType: Int = 0,
popularity: Option[Int] = None,
checkinComment: String = "",
checkinSource: String = "",
changesetUrl: Boolean = false,
virtualParents: Option[List[Long]] = None,
requiresLocal: Boolean = false
) extends DefaultWrites
case class ChallengeCreation(
overpassQL: Option[String] = None,
remoteGeoJson: Option[String] = None,
overpassTargetType: Option[String] = None
) extends DefaultWrites
case class ChallengePriority(
defaultPriority: Int = Challenge.PRIORITY_HIGH,
highPriorityRule: Option[String] = None,
mediumPriorityRule: Option[String] = None,
lowPriorityRule: Option[String] = None
) extends DefaultWrites
case class ChallengeExtra(
defaultZoom: Int = Challenge.DEFAULT_ZOOM,
minZoom: Int = Challenge.MIN_ZOOM,
maxZoom: Int = Challenge.MAX_ZOOM,
defaultBasemap: Option[Int] = None,
defaultBasemapId: Option[String] = None,
customBasemap: Option[String] = None,
updateTasks: Boolean = false,
exportableProperties: Option[String] = None,
osmIdProperty: Option[String] = None,
preferredTags: Option[String] = None,
preferredReviewTags: Option[String] = None,
limitTags: Boolean = false, // If true, only preferred tags should be used
limitReviewTags: Boolean = false, // If true, only preferred review tags should be used
taskStyles: Option[String] = None,
taskBundleIdProperty: Option[String] = None,
isArchived: Boolean = false,
systemArchivedAt: Option[DateTime] = None,
presets: Option[List[String]] = None
) extends DefaultWrites
case class ChallengeListing(
id: Long,
parent: Long,
name: String,
enabled: Boolean,
virtualParents: Option[Array[Long]] = None,
status: Option[Int],
isArchived: Boolean
)
/**
* The ChallengeFormFix case class is built so that we can nest the form objects as there is a limit
* on the number of elements allowed in the form mapping.
*/
case class Challenge(
override val id: Long,
override val name: String,
override val created: DateTime,
override val modified: DateTime,
override val description: Option[String] = None,
deleted: Boolean = false,
infoLink: Option[String] = None,
general: ChallengeGeneral,
creation: ChallengeCreation,
priority: ChallengePriority,
extra: ChallengeExtra,
status: Option[Int] = Some(0),
statusMessage: Option[String] = None,
lastTaskRefresh: Option[DateTime] = None,
dataOriginDate: Option[DateTime] = None,
location: Option[String] = None,
bounding: Option[String] = None,
completionPercentage: Option[Int] = Some(0),
tasksRemaining: Option[Int] = Some(0)
) extends BaseObject[Long]
with DefaultWrites
with Identifiable {
override val itemType: ItemType = ChallengeType()
def isHighPriority(properties: Map[String, String], task: Task): Boolean =
this.matchesRule(priority.highPriorityRule, properties, task)
def isMediumPriority(properties: Map[String, String], task: Task): Boolean =
this.matchesRule(priority.mediumPriorityRule, properties, task)
def isLowRulePriority(properties: Map[String, String], task: Task): Boolean =
this.matchesRule(priority.lowPriorityRule, properties, task)
private def matchesRule(
rule: Option[String],
properties: Map[String, String],
task: Task
): Boolean = {
rule match {
case Some(r) => matchesJSONRule(Json.parse(r), properties, task)
case None => false
}
}
private def matchesJSONRule(
ruleJSON: JsValue,
properties: Map[String, String],
task: Task
): Boolean = {
val cnf = (ruleJSON \\ "condition").asOpt[String] match {
case Some("OR") => false
case _ => true
}
implicit val reads = Writes
val rules = (ruleJSON \\ "rules").as[List[JsValue]]
val matched = rules.filter(jsValue => {
(jsValue \\ "rules").asOpt[JsValue] match {
case Some(nestedRule) => matchesJSONRule(jsValue, properties, task)
case _ =>
val keyValue = (jsValue \\ "value").as[String].split("\\\\.", 2)
val valueType = (jsValue \\ "type").as[String]
val rule =
PriorityRule((jsValue \\ "operator").as[String], keyValue(0), keyValue(1), valueType)
rule.doesMatch(properties, task)
}
})
if (cnf && matched.size == rules.size) {
true
} else if (!cnf && matched.nonEmpty) {
true
} else {
false
}
}
}
object Challenge extends CommonField {
val writes = new Object with ChallengeWrites
val reads = new Object with ChallengeReads
val DIFFICULTY_EASY = 1
val DIFFICULTY_NORMAL = 2
val DIFFICULTY_EXPERT = 3
val PRIORITY_HIGH = 0
val PRIORITY_HIGH_NAME = "High"
val PRIORITY_MEDIUM = 1
val PRIORITY_MEDIUM_NAME = "Medium"
val PRIORITY_LOW = 2
val PRIORITY_LOW_NAME = "Low"
val priorityMap = Map(
PRIORITY_HIGH -> PRIORITY_HIGH_NAME,
PRIORITY_MEDIUM -> PRIORITY_MEDIUM_NAME,
PRIORITY_LOW -> PRIORITY_LOW_NAME
)
val DEFAULT_ZOOM = 13
val MIN_ZOOM = 1
val MAX_ZOOM = 19
val KEY_PARENT = "parent"
val KEY_VIRTUAL_PARENTS = "virtualParents"
val STATUS_NA = 0
val STATUS_BUILDING = 1
val STATUS_FAILED = 2
val STATUS_READY = 3
val STATUS_PARTIALLY_LOADED = 4
val STATUS_FINISHED = 5
val STATUS_DELETING_TASKS = 6
// COOPERATIVE TYPES
val COOPERATIVE_NONE = 0
val COOPERATIVE_TAGS = 1
val COOPERATIVE_CHANGEFILE = 2
// CHALLENGE FIELDS
val TABLE = "challenges"
val FIELD_PARENT_ID = "parent_id"
val FIELD_ENABLED = "enabled"
val FIELD_ARCHIVED = "is_archived"
val FIELD_STATUS = "status"
val FIELD_DELETED = "deleted"
/**
* This will check to make sure that the rule string is fully valid.
*
* @param rule
* @return
*/
def isValidRule(rule: Option[String]): Boolean = {
rule match {
case Some(r) if StringUtils.isNotEmpty(r) && !StringUtils.equalsIgnoreCase(r, "{}") =>
isValidRuleJSON(Json.parse(r))
case _ => false
}
}
/**
* This will check to make sure that the json rule is fully valid. The simple check just makes sure
* that every rule value can be split by "." into two values, with support for nested rules
*
* @param ruleJSON
* @return
*/
def isValidRuleJSON(ruleJSON: JsValue): Boolean = {
val rules = (ruleJSON \\ "rules")
.as[List[JsValue]]
.map(jsValue => {
(jsValue \\ "rules").asOpt[JsValue] match {
case Some(nestedRule) => isValidRuleJSON(jsValue)
case _ =>
val keyValue = (jsValue \\ "value").as[String].split("\\\\.", 2)
keyValue.size == 2
}
})
!rules.contains(false)
}
def emptyChallenge(ownerId: Long, parentId: Long): Challenge = Challenge(
-1,
"",
DateTime.now(),
DateTime.now(),
None,
false,
None,
ChallengeGeneral(-1, -1, ""),
ChallengeCreation(),
ChallengePriority(),
ChallengeExtra()
)
}
case class ArchivableChallenge(
val id: Long,
val created: DateTime,
val name: String = "",
val deleted: Boolean,
val isArchived: Boolean
)
case class ArchivableTask(
val id: Long,
val modified: DateTime,
val status: Long
)
|
mgcuthbert/maproulette2
|
app/org/maproulette/framework/model/Challenge.scala
|
Scala
|
apache-2.0
| 11,433
|
/*
* Scala (https://www.scala-lang.org)
*
* Copyright EPFL and Lightbend, Inc.
*
* Licensed under Apache License 2.0
* (http://www.apache.org/licenses/LICENSE-2.0).
*
* See the NOTICE file distributed with this work for
* additional information regarding copyright ownership.
*/
package scala
package math
import scala.language.implicitConversions
import java.math.{
BigDecimal => BigDec,
MathContext,
RoundingMode => JRM,
}
import scala.collection.immutable.NumericRange
object BigDecimal {
private final val maximumHashScale = 4934 // Quit maintaining hash identity with BigInt beyond this scale
private final val hashCodeNotComputed = 0x5D50690F // Magic value (happens to be "BigDecimal" old MurmurHash3 value)
private final val deci2binary = 3.3219280948873626 // Ratio of log(10) to log(2)
private[this] val minCached = -512
private[this] val maxCached = 512
val defaultMathContext = MathContext.DECIMAL128
/** Cache only for defaultMathContext using BigDecimals in a small range. */
private[this] lazy val cache = new Array[BigDecimal](maxCached - minCached + 1)
object RoundingMode extends Enumeration {
// Annoying boilerplate to ensure consistency with java.math.RoundingMode
type RoundingMode = Value
val UP = Value(JRM.UP.ordinal)
val DOWN = Value(JRM.DOWN.ordinal)
val CEILING = Value(JRM.CEILING.ordinal)
val FLOOR = Value(JRM.FLOOR.ordinal)
val HALF_UP = Value(JRM.HALF_UP.ordinal)
val HALF_DOWN = Value(JRM.HALF_DOWN.ordinal)
val HALF_EVEN = Value(JRM.HALF_EVEN.ordinal)
val UNNECESSARY = Value(JRM.UNNECESSARY.ordinal)
}
/** Constructs a `BigDecimal` using the decimal text representation of `Double` value `d`, rounding if necessary. */
def decimal(d: Double, mc: MathContext): BigDecimal =
new BigDecimal(new BigDec(java.lang.Double.toString(d), mc), mc)
/** Constructs a `BigDecimal` using the decimal text representation of `Double` value `d`. */
def decimal(d: Double): BigDecimal = decimal(d, defaultMathContext)
/** Constructs a `BigDecimal` using the decimal text representation of `Float` value `f`, rounding if necessary.
* Note that `BigDecimal.decimal(0.1f) != 0.1f` since equality agrees with the `Double` representation, and
* `0.1 != 0.1f`.
*/
def decimal(f: Float, mc: MathContext): BigDecimal =
new BigDecimal(new BigDec(java.lang.Float.toString(f), mc), mc)
/** Constructs a `BigDecimal` using the decimal text representation of `Float` value `f`.
* Note that `BigDecimal.decimal(0.1f) != 0.1f` since equality agrees with the `Double` representation, and
* `0.1 != 0.1f`.
*/
def decimal(f: Float): BigDecimal = decimal(f, defaultMathContext)
// This exists solely to avoid conversion from Int/Long to Float, screwing everything up.
/** Constructs a `BigDecimal` from a `Long`, rounding if necessary. This is identical to `BigDecimal(l, mc)`. */
def decimal(l: Long, mc: MathContext): BigDecimal = apply(l, mc)
// This exists solely to avoid conversion from Int/Long to Float, screwing everything up.
/** Constructs a `BigDecimal` from a `Long`. This is identical to `BigDecimal(l)`. */
def decimal(l: Long): BigDecimal = apply(l)
/** Constructs a `BigDecimal` using a `java.math.BigDecimal`, rounding if necessary. */
def decimal(bd: BigDec, mc: MathContext): BigDecimal = new BigDecimal(bd.round(mc), mc)
/** Constructs a `BigDecimal` by expanding the binary fraction
* contained by `Double` value `d` into a decimal representation,
* rounding if necessary. When a `Float` is converted to a
* `Double`, the binary fraction is preserved, so this method
* also works for converted `Float`s.
*/
def binary(d: Double, mc: MathContext): BigDecimal = new BigDecimal(new BigDec(d, mc), mc)
/** Constructs a `BigDecimal` by expanding the binary fraction
* contained by `Double` value `d` into a decimal representation.
* Note: this also works correctly on converted `Float`s.
*/
def binary(d: Double): BigDecimal = binary(d, defaultMathContext)
/** Constructs a `BigDecimal` from a `java.math.BigDecimal`. The
* precision is the default for `BigDecimal` or enough to represent
* the `java.math.BigDecimal` exactly, whichever is greater.
*/
def exact(repr: BigDec): BigDecimal = {
val mc =
if (repr.precision <= defaultMathContext.getPrecision) defaultMathContext
else new MathContext(repr.precision, java.math.RoundingMode.HALF_EVEN)
new BigDecimal(repr, mc)
}
/** Constructs a `BigDecimal` by fully expanding the binary fraction
* contained by `Double` value `d`, adjusting the precision as
* necessary. Note: this works correctly on converted `Float`s also.
*/
def exact(d: Double): BigDecimal = exact(new BigDec(d))
/** Constructs a `BigDecimal` that exactly represents a `BigInt`.
*/
def exact(bi: BigInt): BigDecimal = exact(new BigDec(bi.bigInteger))
/** Constructs a `BigDecimal` that exactly represents a `Long`. Note that
* all creation methods for `BigDecimal` that do not take a `MathContext`
* represent a `Long`; this is equivalent to `apply`, `valueOf`, etc..
*/
def exact(l: Long): BigDecimal = apply(l)
/** Constructs a `BigDecimal` that exactly represents the number
* specified in a `String`.
*/
def exact(s: String): BigDecimal = exact(new BigDec(s))
/** Constructs a `BigDecimal` that exactly represents the number
* specified in base 10 in a character array.
*/
def exact(cs: Array[Char]): BigDecimal = exact(new BigDec(cs))
/** Constructs a `BigDecimal` using the java BigDecimal static
* valueOf constructor. Equivalent to `BigDecimal.decimal`.
*
* @param d the specified double value
* @return the constructed `BigDecimal`
*/
def valueOf(d: Double): BigDecimal = apply(BigDec valueOf d)
/** Constructs a `BigDecimal` using the java BigDecimal static
* valueOf constructor.
*
* @param x the specified `Long` value
* @return the constructed `BigDecimal`
*/
def valueOf(x: Long): BigDecimal = apply(x)
/** Constructs a `BigDecimal` whose value is equal to that of the
* specified `Integer` value.
*
* @param i the specified integer value
* @return the constructed `BigDecimal`
*/
def apply(i: Int): BigDecimal = apply(i, defaultMathContext)
/** Constructs a `BigDecimal` whose value is equal to that of the
* specified `Integer` value, rounding if necessary.
*
* @param i the specified integer value
* @param mc the precision and rounding mode for creation of this value and future operations on it
* @return the constructed `BigDecimal`
*/
def apply(i: Int, mc: MathContext): BigDecimal =
if (mc == defaultMathContext && minCached <= i && i <= maxCached) {
val offset = i - minCached
var n = cache(offset)
if (n eq null) { n = new BigDecimal(BigDec.valueOf(i.toLong), mc); cache(offset) = n }
n
}
else apply(i.toLong, mc)
/** Constructs a `BigDecimal` whose value is equal to that of the
* specified long value.
*
* @param l the specified long value
* @return the constructed `BigDecimal`
*/
def apply(l: Long): BigDecimal =
if (minCached <= l && l <= maxCached) apply(l.toInt)
else new BigDecimal(BigDec.valueOf(l), defaultMathContext)
/** Constructs a `BigDecimal` whose value is equal to that of the
* specified long value, but rounded if necessary.
*
* @param l the specified long value
* @param mc the precision and rounding mode for creation of this value and future operations on it
* @return the constructed `BigDecimal`
*/
def apply(l: Long, mc: MathContext): BigDecimal =
new BigDecimal(new BigDec(l, mc), mc)
/** Constructs a `BigDecimal` whose unscaled value is equal to that
* of the specified long value.
*
* @param unscaledVal the value
* @param scale the scale
* @return the constructed `BigDecimal`
*/
def apply(unscaledVal: Long, scale: Int): BigDecimal =
apply(BigInt(unscaledVal), scale)
/** Constructs a `BigDecimal` whose unscaled value is equal to that
* of the specified long value, but rounded if necessary.
*
* @param unscaledVal the value
* @param scale the scale
* @param mc the precision and rounding mode for creation of this value and future operations on it
* @return the constructed `BigDecimal`
*/
def apply(unscaledVal: Long, scale: Int, mc: MathContext): BigDecimal =
apply(BigInt(unscaledVal), scale, mc)
/** Constructs a `BigDecimal` whose value is equal to that of the
* specified double value. Equivalent to `BigDecimal.decimal`.
*
* @param d the specified `Double` value
* @return the constructed `BigDecimal`
*/
def apply(d: Double): BigDecimal = decimal(d, defaultMathContext)
// note we don't use the static valueOf because it doesn't let us supply
// a MathContext, but we should be duplicating its logic, modulo caching.
/** Constructs a `BigDecimal` whose value is equal to that of the
* specified double value, but rounded if necessary. Equivalent to
* `BigDecimal.decimal`.
*
* @param d the specified `Double` value
* @param mc the precision and rounding mode for creation of this value and future operations on it
* @return the constructed `BigDecimal`
*/
def apply(d: Double, mc: MathContext): BigDecimal = decimal(d, mc)
/** Translates a character array representation of a `BigDecimal`
* into a `BigDecimal`.
*/
def apply(x: Array[Char]): BigDecimal = exact(x)
/** Translates a character array representation of a `BigDecimal`
* into a `BigDecimal`, rounding if necessary.
*/
def apply(x: Array[Char], mc: MathContext): BigDecimal =
new BigDecimal(new BigDec(x, mc), mc)
/** Translates the decimal String representation of a `BigDecimal`
* into a `BigDecimal`.
*/
def apply(x: String): BigDecimal = exact(x)
/** Translates the decimal String representation of a `BigDecimal`
* into a `BigDecimal`, rounding if necessary.
*/
def apply(x: String, mc: MathContext): BigDecimal =
new BigDecimal(new BigDec(x, mc), mc)
/** Constructs a `BigDecimal` whose value is equal to that of the
* specified `BigInt` value.
*
* @param x the specified `BigInt` value
* @return the constructed `BigDecimal`
*/
def apply(x: BigInt): BigDecimal = exact(x)
/** Constructs a `BigDecimal` whose value is equal to that of the
* specified `BigInt` value, rounding if necessary.
*
* @param x the specified `BigInt` value
* @param mc the precision and rounding mode for creation of this value and future operations on it
* @return the constructed `BigDecimal`
*/
def apply(x: BigInt, mc: MathContext): BigDecimal =
new BigDecimal(new BigDec(x.bigInteger, mc), mc)
/** Constructs a `BigDecimal` whose unscaled value is equal to that
* of the specified `BigInt` value.
*
* @param unscaledVal the specified `BigInt` value
* @param scale the scale
* @return the constructed `BigDecimal`
*/
def apply(unscaledVal: BigInt, scale: Int): BigDecimal =
exact(new BigDec(unscaledVal.bigInteger, scale))
/** Constructs a `BigDecimal` whose unscaled value is equal to that
* of the specified `BigInt` value.
*
* @param unscaledVal the specified `BigInt` value
* @param scale the scale
* @param mc the precision and rounding mode for creation of this value and future operations on it
* @return the constructed `BigDecimal`
*/
def apply(unscaledVal: BigInt, scale: Int, mc: MathContext): BigDecimal =
new BigDecimal(new BigDec(unscaledVal.bigInteger, scale, mc), mc)
/** Constructs a `BigDecimal` from a `java.math.BigDecimal`. */
def apply(bd: BigDec): BigDecimal = new BigDecimal(bd, defaultMathContext)
/** Implicit conversion from `Int` to `BigDecimal`. */
implicit def int2bigDecimal(i: Int): BigDecimal = apply(i)
/** Implicit conversion from `Long` to `BigDecimal`. */
implicit def long2bigDecimal(l: Long): BigDecimal = apply(l)
/** Implicit conversion from `Double` to `BigDecimal`. */
implicit def double2bigDecimal(d: Double): BigDecimal = decimal(d)
/** Implicit conversion from `java.math.BigDecimal` to `scala.BigDecimal`. */
implicit def javaBigDecimal2bigDecimal(x: BigDec): BigDecimal = if (x == null) null else apply(x)
}
/**
* `BigDecimal` represents decimal floating-point numbers of arbitrary precision.
* By default, the precision approximately matches that of IEEE 128-bit floating
* point numbers (34 decimal digits, `HALF_EVEN` rounding mode). Within the range
* of IEEE binary128 numbers, `BigDecimal` will agree with `BigInt` for both
* equality and hash codes (and will agree with primitive types as well). Beyond
* that range--numbers with more than 4934 digits when written out in full--the
* `hashCode` of `BigInt` and `BigDecimal` is allowed to diverge due to difficulty
* in efficiently computing both the decimal representation in `BigDecimal` and the
* binary representation in `BigInt`.
*
* When creating a `BigDecimal` from a `Double` or `Float`, care must be taken as
* the binary fraction representation of `Double` and `Float` does not easily
* convert into a decimal representation. Three explicit schemes are available
* for conversion. `BigDecimal.decimal` will convert the floating-point number
* to a decimal text representation, and build a `BigDecimal` based on that.
* `BigDecimal.binary` will expand the binary fraction to the requested or default
* precision. `BigDecimal.exact` will expand the binary fraction to the
* full number of digits, thus producing the exact decimal value corresponding to
* the binary fraction of that floating-point number. `BigDecimal` equality
* matches the decimal expansion of `Double`: `BigDecimal.decimal(0.1) == 0.1`.
* Note that since `0.1f != 0.1`, the same is not true for `Float`. Instead,
* `0.1f == BigDecimal.decimal((0.1f).toDouble)`.
*
* To test whether a `BigDecimal` number can be converted to a `Double` or
* `Float` and then back without loss of information by using one of these
* methods, test with `isDecimalDouble`, `isBinaryDouble`, or `isExactDouble`
* or the corresponding `Float` versions. Note that `BigInt`'s `isValidDouble`
* will agree with `isExactDouble`, not the `isDecimalDouble` used by default.
*
* `BigDecimal` uses the decimal representation of binary floating-point numbers
* to determine equality and hash codes. This yields different answers than
* conversion between `Long` and `Double` values, where the exact form is used.
* As always, since floating-point is a lossy representation, it is advisable to
* take care when assuming identity will be maintained across multiple conversions.
*
* `BigDecimal` maintains a `MathContext` that determines the rounding that
* is applied to certain calculations. In most cases, the value of the
* `BigDecimal` is also rounded to the precision specified by the `MathContext`.
* To create a `BigDecimal` with a different precision than its `MathContext`,
* use `new BigDecimal(new java.math.BigDecimal(...), mc)`. Rounding will
* be applied on those mathematical operations that can dramatically change the
* number of digits in a full representation, namely multiplication, division,
* and powers. The left-hand argument's `MathContext` always determines the
* degree of rounding, if any, and is the one propagated through arithmetic
* operations that do not apply rounding themselves.
*/
final class BigDecimal(val bigDecimal: BigDec, val mc: MathContext)
extends ScalaNumber with ScalaNumericConversions with Serializable with Ordered[BigDecimal] {
def this(bigDecimal: BigDec) = this(bigDecimal, BigDecimal.defaultMathContext)
import BigDecimal.RoundingMode._
import BigDecimal.{decimal, binary, exact}
if (bigDecimal eq null) throw new IllegalArgumentException("null value for BigDecimal")
if (mc eq null) throw new IllegalArgumentException("null MathContext for BigDecimal")
// There was an implicit to cut down on the wrapper noise for BigDec -> BigDecimal.
// However, this may mask introduction of surprising behavior (e.g. lack of rounding
// where one might expect it). Wrappers should be applied explicitly with an
// eye to correctness.
// Sane hash code computation (which is surprisingly hard).
// Note--not lazy val because we can't afford the extra space.
private final var computedHashCode: Int = BigDecimal.hashCodeNotComputed
private final def computeHashCode(): Unit = {
computedHashCode =
if (isWhole && (precision - scale) < BigDecimal.maximumHashScale) toBigInt.hashCode
else if (isDecimalDouble) doubleValue.##
else {
val temp = bigDecimal.stripTrailingZeros
scala.util.hashing.MurmurHash3.mixLast( temp.scaleByPowerOfTen(temp.scale).toBigInteger.hashCode, temp.scale )
}
}
/** Returns the hash code for this BigDecimal.
* Note that this does not merely use the underlying java object's
* `hashCode` because we compare `BigDecimal`s with `compareTo`
* which deems 2 == 2.00, whereas in java these are unequal
* with unequal `hashCode`s. These hash codes agree with `BigInt`
* for whole numbers up ~4934 digits (the range of IEEE 128 bit floating
* point). Beyond this, hash codes will disagree; this prevents the
* explicit representation of the `BigInt` form for `BigDecimal` values
* with large exponents.
*/
override def hashCode(): Int = {
if (computedHashCode == BigDecimal.hashCodeNotComputed) computeHashCode()
computedHashCode
}
/** Compares this BigDecimal with the specified value for equality. Where `Float` and `Double`
* disagree, `BigDecimal` will agree with the `Double` value
*/
override def equals (that: Any): Boolean = that match {
case that: BigDecimal => this equals that
case that: BigInt =>
that.bitLength > (precision-scale-2)*BigDecimal.deci2binary &&
this.toBigIntExact.exists(that equals _)
case that: Double =>
!that.isInfinity && {
val d = toDouble
!d.isInfinity && d == that && equals(decimal(d))
}
case that: Float =>
!that.isInfinity && {
val f = toFloat
!f.isInfinity && f == that && equals(decimal(f.toDouble))
}
case _ => isValidLong && unifiedPrimitiveEquals(that)
}
override def isValidByte = noArithmeticException(toByteExact)
override def isValidShort = noArithmeticException(toShortExact)
override def isValidChar = isValidInt && toIntExact >= Char.MinValue && toIntExact <= Char.MaxValue
override def isValidInt = noArithmeticException(toIntExact)
def isValidLong = noArithmeticException(toLongExact)
/** Tests whether this `BigDecimal` holds the decimal representation of a `Double`. */
def isDecimalDouble = {
val d = toDouble
!d.isInfinity && equals(decimal(d))
}
/** Tests whether this `BigDecimal` holds the decimal representation of a `Float`. */
def isDecimalFloat = {
val f = toFloat
!f.isInfinity && equals(decimal(f))
}
/** Tests whether this `BigDecimal` holds, to within precision, the binary representation of a `Double`. */
def isBinaryDouble = {
val d = toDouble
!d.isInfinity && equals(binary(d,mc))
}
/** Tests whether this `BigDecimal` holds, to within precision, the binary representation of a `Float`. */
def isBinaryFloat = {
val f = toFloat
!f.isInfinity && equals(binary(f,mc))
}
/** Tests whether this `BigDecimal` holds the exact expansion of a `Double`'s binary fractional form into base 10. */
def isExactDouble = {
val d = toDouble
!d.isInfinity && equals(exact(d))
}
/** Tests whether this `BigDecimal` holds the exact expansion of a `Float`'s binary fractional form into base 10. */
def isExactFloat = {
val f = toFloat
!f.isInfinity && equals(exact(f.toDouble))
}
private def noArithmeticException(body: => Unit): Boolean = {
try { body ; true }
catch { case _: ArithmeticException => false }
}
def isWhole = scale <= 0 || bigDecimal.stripTrailingZeros.scale <= 0
def underlying = bigDecimal
/** Compares this BigDecimal with the specified BigDecimal for equality.
*/
def equals (that: BigDecimal): Boolean = compare(that) == 0
/** Compares this BigDecimal with the specified BigDecimal
*/
def compare (that: BigDecimal): Int = this.bigDecimal compareTo that.bigDecimal
/** Addition of BigDecimals
*/
def + (that: BigDecimal): BigDecimal = new BigDecimal(this.bigDecimal.add(that.bigDecimal, mc), mc)
/** Subtraction of BigDecimals
*/
def - (that: BigDecimal): BigDecimal = new BigDecimal(this.bigDecimal.subtract(that.bigDecimal, mc), mc)
/** Multiplication of BigDecimals
*/
def * (that: BigDecimal): BigDecimal = new BigDecimal(this.bigDecimal.multiply(that.bigDecimal, mc), mc)
/** Division of BigDecimals
*/
def / (that: BigDecimal): BigDecimal = new BigDecimal(this.bigDecimal.divide(that.bigDecimal, mc), mc)
/** Division and Remainder - returns tuple containing the result of
* divideToIntegralValue and the remainder. The computation is exact: no rounding is applied.
*/
def /% (that: BigDecimal): (BigDecimal, BigDecimal) = {
val qr = this.bigDecimal.divideAndRemainder(that.bigDecimal, mc)
(new BigDecimal(qr(0), mc), new BigDecimal(qr(1), mc))
}
/** Divide to Integral value.
*/
def quot (that: BigDecimal): BigDecimal =
new BigDecimal(this.bigDecimal.divideToIntegralValue(that.bigDecimal, mc), mc)
/** Returns the minimum of this and that, or this if the two are equal
*/
def min (that: BigDecimal): BigDecimal = (this compare that) match {
case x if x <= 0 => this
case _ => that
}
/** Returns the maximum of this and that, or this if the two are equal
*/
def max (that: BigDecimal): BigDecimal = (this compare that) match {
case x if x >= 0 => this
case _ => that
}
/** Remainder after dividing this by that.
*/
def remainder (that: BigDecimal): BigDecimal = new BigDecimal(this.bigDecimal.remainder(that.bigDecimal, mc), mc)
/** Remainder after dividing this by that.
*/
def % (that: BigDecimal): BigDecimal = this.remainder(that)
/** Returns a BigDecimal whose value is this ** n.
*/
def pow (n: Int): BigDecimal = new BigDecimal(this.bigDecimal.pow(n, mc), mc)
/** Returns a BigDecimal whose value is the negation of this BigDecimal
*/
def unary_- : BigDecimal = new BigDecimal(this.bigDecimal.negate(mc), mc)
/** Returns the absolute value of this BigDecimal
*/
def abs: BigDecimal = if (signum < 0) unary_- else this
/** Returns the sign of this BigDecimal;
* -1 if it is less than 0,
* +1 if it is greater than 0,
* 0 if it is equal to 0.
*/
def signum: Int = this.bigDecimal.signum()
/** Returns the sign of this BigDecimal;
* -1 if it is less than 0,
* +1 if it is greater than 0,
* 0 if it is equal to 0.
*/
def sign: BigDecimal = signum
/** Returns the precision of this `BigDecimal`.
*/
def precision: Int = this.bigDecimal.precision
/** Returns a BigDecimal rounded according to the supplied MathContext settings, but
* preserving its own MathContext for future operations.
*/
def round(mc: MathContext): BigDecimal = {
val r = this.bigDecimal round mc
if (r eq bigDecimal) this else new BigDecimal(r, this.mc)
}
/** Returns a `BigDecimal` rounded according to its own `MathContext` */
def rounded: BigDecimal = {
val r = bigDecimal round mc
if (r eq bigDecimal) this else new BigDecimal(r, mc)
}
/** Returns the scale of this `BigDecimal`.
*/
def scale: Int = this.bigDecimal.scale
/** Returns the size of an ulp, a unit in the last place, of this BigDecimal.
*/
def ulp: BigDecimal = new BigDecimal(this.bigDecimal.ulp, mc)
/** Returns a new BigDecimal based on the supplied MathContext, rounded as needed.
*/
def apply(mc: MathContext): BigDecimal = new BigDecimal(this.bigDecimal round mc, mc)
/** Returns a `BigDecimal` whose scale is the specified value, and whose value is
* numerically equal to this BigDecimal's.
*/
def setScale(scale: Int): BigDecimal =
if (this.scale == scale) this
else new BigDecimal(this.bigDecimal.setScale(scale), mc)
def setScale(scale: Int, mode: RoundingMode): BigDecimal =
if (this.scale == scale) this
else new BigDecimal(this.bigDecimal.setScale(scale, JRM.valueOf(mode.id)), mc)
/** Converts this BigDecimal to a Byte.
* If the BigDecimal is too big to fit in a Byte, only the low-order 8 bits are returned.
* Note that this conversion can lose information about the overall magnitude of the
* BigDecimal value as well as return a result with the opposite sign.
*/
override def byteValue = intValue.toByte
/** Converts this BigDecimal to a Short.
* If the BigDecimal is too big to fit in a Short, only the low-order 16 bits are returned.
* Note that this conversion can lose information about the overall magnitude of the
* BigDecimal value as well as return a result with the opposite sign.
*/
override def shortValue = intValue.toShort
/** Converts this BigDecimal to a Char.
* If the BigDecimal is too big to fit in a Char, only the low-order 16 bits are returned.
* Note that this conversion can lose information about the overall magnitude of the
* BigDecimal value and that it always returns a positive result.
*/
def charValue = intValue.toChar
/** Converts this BigDecimal to an Int.
* If the BigDecimal is too big to fit in an Int, only the low-order 32 bits
* are returned. Note that this conversion can lose information about the
* overall magnitude of the BigDecimal value as well as return a result with
* the opposite sign.
*/
def intValue = this.bigDecimal.intValue
/** Converts this BigDecimal to a Long.
* If the BigDecimal is too big to fit in a Long, only the low-order 64 bits
* are returned. Note that this conversion can lose information about the
* overall magnitude of the BigDecimal value as well as return a result with
* the opposite sign.
*/
def longValue = this.bigDecimal.longValue
/** Converts this BigDecimal to a Float.
* if this BigDecimal has too great a magnitude to represent as a float,
* it will be converted to `Float.NEGATIVE_INFINITY` or
* `Float.POSITIVE_INFINITY` as appropriate.
*/
def floatValue = this.bigDecimal.floatValue
/** Converts this BigDecimal to a Double.
* if this BigDecimal has too great a magnitude to represent as a double,
* it will be converted to `Double.NEGATIVE_INFINITY` or
* `Double.POSITIVE_INFINITY` as appropriate.
*/
def doubleValue = this.bigDecimal.doubleValue
/** Converts this `BigDecimal` to a [[scala.Byte]], checking for lost information.
* If this `BigDecimal` has a nonzero fractional part, or is out of the possible
* range for a [[scala.Byte]] result, then a `java.lang.ArithmeticException` is
* thrown.
*/
def toByteExact = bigDecimal.byteValueExact
/** Converts this `BigDecimal` to a [[scala.Short]], checking for lost information.
* If this `BigDecimal` has a nonzero fractional part, or is out of the possible
* range for a [[scala.Short]] result, then a `java.lang.ArithmeticException` is
* thrown.
*/
def toShortExact = bigDecimal.shortValueExact
/** Converts this `BigDecimal` to a [[scala.Int]], checking for lost information.
* If this `BigDecimal` has a nonzero fractional part, or is out of the possible
* range for an [[scala.Int]] result, then a `java.lang.ArithmeticException` is
* thrown.
*/
def toIntExact = bigDecimal.intValueExact
/** Converts this `BigDecimal` to a [[scala.Long]], checking for lost information.
* If this `BigDecimal` has a nonzero fractional part, or is out of the possible
* range for a [[scala.Long]] result, then a `java.lang.ArithmeticException` is
* thrown.
*/
def toLongExact = bigDecimal.longValueExact
/** Creates a partially constructed NumericRange[BigDecimal] in range
* `[start;end)`, where start is the target BigDecimal. The step
* must be supplied via the "by" method of the returned object in order
* to receive the fully constructed range. For example:
* {{{
* val partial = BigDecimal(1.0) to 2.0 // not usable yet
* val range = partial by 0.01 // now a NumericRange
* val range2 = BigDecimal(0) to 1.0 by 0.01 // all at once of course is fine too
* }}}
*
* @param end the end value of the range (exclusive)
* @return the partially constructed NumericRange
*/
def until(end: BigDecimal): Range.Partial[BigDecimal, NumericRange.Exclusive[BigDecimal]] =
new Range.Partial(until(end, _))
/** Same as the one-argument `until`, but creates the range immediately. */
def until(end: BigDecimal, step: BigDecimal): NumericRange.Exclusive[BigDecimal] = Range.BigDecimal(this, end, step)
/** Like `until`, but inclusive of the end value. */
def to(end: BigDecimal): Range.Partial[BigDecimal, NumericRange.Inclusive[BigDecimal]] =
new Range.Partial(to(end, _))
/** Like `until`, but inclusive of the end value. */
def to(end: BigDecimal, step: BigDecimal) = Range.BigDecimal.inclusive(this, end, step)
/** Converts this `BigDecimal` to a scala.BigInt.
*/
def toBigInt: BigInt = new BigInt(this.bigDecimal.toBigInteger)
/** Converts this `BigDecimal` to a scala.BigInt if it
* can be done losslessly, returning Some(BigInt) or None.
*/
def toBigIntExact: Option[BigInt] =
if (isWhole) {
try Some(new BigInt(this.bigDecimal.toBigIntegerExact))
catch { case _: ArithmeticException => None }
}
else None
/** Returns the decimal String representation of this BigDecimal.
*/
override def toString: String = this.bigDecimal.toString
}
|
scala/scala
|
src/library/scala/math/BigDecimal.scala
|
Scala
|
apache-2.0
| 30,300
|
/*
* Copyright 2013 Stephan Rehfeld
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package test.scaladelray.optimization
import org.scalatest.FunSpec
import scaladelray.math.Point3
import scaladelray.optimization.Octree
class OctreeSpec extends FunSpec {
describe( "An Octree" ) {
it( "accept a set that contains 0 octants" ) {
val run = Point3( 1, 1, 1 )
val lbf = Point3( -1, -1, -1 )
val root = new Octree[Int]( run, lbf, Set(), 0 )
}
it( "should accept a set that contains 8 octants" ) {
val run = Point3( 1, 1, 1 )
val lbf = Point3( -1, -1, -1 )
val center = lbf + ((run - lbf) / 2.0)
val octants = Set() + new Octree[Int]( run, center, Set(), 0 ) +
new Octree[Int]( Point3( center.x, run.y, run.z ), Point3( lbf.x, center.y, center.z ), Set(), 0 ) +
new Octree[Int]( Point3( run.x, run.y, center.z ), Point3( center.x, center.y, lbf.z ), Set(), 0 ) +
new Octree[Int]( Point3( center.x, run.y, center.z ), Point3( lbf.x, center.y, lbf.z ), Set(), 0 ) +
new Octree[Int]( Point3( run.x, center.y, run.z ), Point3( center.x, lbf.y, center.z ), Set(), 0 ) +
new Octree[Int]( Point3( center.x, center.y, run.z ), Point3( lbf.x, lbf.y, center.z ), Set(), 0 ) +
new Octree[Int]( Point3( run.x, center.y, center.z ), Point3( center.x, lbf.y, lbf.z ), Set(), 0 ) +
new Octree[Int]( center, lbf, Set(), 0 )
val root = new Octree[Int]( run, lbf, octants, 0 )
}
it( "should throw an exception if a different amount than 0 or 8 octants are passed" ) {
val run = Point3( 1, 1, 1 )
val lbf = Point3( -1, -1, -1 )
val center = lbf + ((run - lbf) / 2.0)
val octants = Set() + new Octree[Int]( run, center, Set(), 0 ) +
new Octree[Int]( Point3( center.x, run.y, run.z ), Point3( lbf.x, center.y, center.z ), Set(), 0 ) +
new Octree[Int]( Point3( run.x, run.y, center.z ), Point3( center.x, center.y, lbf.z ), Set(), 0 ) +
new Octree[Int]( Point3( center.x, run.y, center.z ), Point3( lbf.x, center.y, lbf.z ), Set(), 0 ) +
new Octree[Int]( Point3( run.x, center.y, run.z ), Point3( center.x, lbf.y, center.z ), Set(), 0 ) +
new Octree[Int]( Point3( center.x, center.y, run.z ), Point3( lbf.x, lbf.y, center.z ), Set(), 0 ) +
new Octree[Int]( Point3( run.x, center.y, center.z ), Point3( center.x, lbf.y, lbf.z ), Set(), 0 ) /*+
new Octree[Int]( center, lbf, Set(), 0 )*/
intercept[IllegalArgumentException] {
val root = new Octree[Int]( run, lbf, octants, 0 )
}
}
it( "should provide the passed data" ) {
val run = Point3( 1, 1, 1 )
val lbf = Point3( -1, -1, -1 )
val numbers = 4 :: 8 :: 15 :: 16 :: 23 :: 42 :: Nil
val root = new Octree[List[Int]]( run, lbf, Set(), numbers )
assert( root.data( 0 ) == 4 )
assert( root.data( 1 ) == 8 )
assert( root.data( 2 ) == 15 )
assert( root.data( 3 ) == 16 )
assert( root.data( 4 ) == 23 )
assert( root.data( 5 ) == 42 )
}
}
}
|
stephan-rehfeld/scaladelray
|
src/test/scala/test/scaladelray/optimization/OctreeSpec.scala
|
Scala
|
apache-2.0
| 3,586
|
package org.openurp.edu.eams.teach.election.model.constraint
import org.openurp.base.Semester
import org.openurp.edu.base.Student
@SerialVersionUID(-6627564288570998553L)
class StdCreditConstraint extends AbstractCreditConstraint {
var semester: Semester = _
var std: Student = _
var GPA: java.lang.Float = _
}
|
openurp/edu-eams-webapp
|
election/src/main/scala/org/openurp/edu/eams/teach/election/model/constraint/StdCreditConstraint.scala
|
Scala
|
gpl-3.0
| 358
|
package utils
import javax.inject.Inject
import com.mohiva.play.silhouette.api.SecuredErrorHandler
import controllers.routes
import play.api.http.DefaultHttpErrorHandler
import play.api.i18n.Messages
import play.api.mvc.Results._
import play.api.mvc.{ Result, RequestHeader }
import play.api.routing.Router
import play.api.{ OptionalSourceMapper, Configuration }
import scala.concurrent.Future
/**
* A secured error handler.
*/
class ErrorHandler @Inject() (
env: play.api.Environment,
config: Configuration,
sourceMapper: OptionalSourceMapper,
router: javax.inject.Provider[Router])
extends DefaultHttpErrorHandler(env, config, sourceMapper, router)
with SecuredErrorHandler {
/**
* Called when a user is not authenticated.
*
* As defined by RFC 2616, the status code of the response should be 401 Unauthorized.
*
* @param request The request header.
* @param messages The messages for the current language.
* @return The result to send to the client.
*/
override def onNotAuthenticated(request: RequestHeader, messages: Messages): Option[Future[Result]] = {
Some(Future.successful(Redirect(routes.ApplicationController.signIn())))
}
/**
* Called when a user is authenticated but not authorized.
*
* As defined by RFC 2616, the status code of the response should be 403 Forbidden.
*
* @param request The request header.
* @param messages The messages for the current language.
* @return The result to send to the client.
*/
override def onNotAuthorized(request: RequestHeader, messages: Messages): Option[Future[Result]] = {
Some(Future.successful(Redirect(routes.ApplicationController.signIn()).flashing("error" -> Messages("access.denied")(messages))))
}
}
|
leannenorthrop/play-mantra-accumulations
|
app/utils/ErrorHandler.scala
|
Scala
|
apache-2.0
| 1,752
|
package im.mange.flyby
import im.mange.driveby.{BrowserCommand, CommandExecutor}
import im.mange.common.ConditionNotMetException
class RemoteExecutor(space: FlySpace, browserId: Long) extends CommandExecutor {
def execute(command: BrowserCommand): BrowserCommand = {
val toWrite = RemoteCommand(command, browserId, executed = false, succeeded = false)
space.write(toWrite)
val toTakeTemplate = RemoteCommand(null, browserId, executed = true, succeeded = null)
val result = space.take(toTakeTemplate)
//TODO: need to handle the option properly here
//TODO: need to use the config timeout ...
val commandResult = result.get
if (!commandResult.succeeded) { throw new ConditionNotMetException(commandResult.exceptionMessage) }
commandResult.browserCommand
}
}
|
alltonp/driveby
|
src/main/scala/im/mange/flyby/RemoteExecutor.scala
|
Scala
|
apache-2.0
| 800
|
package com.debasish.nlp.posTaggers
import edu.stanford.nlp.pipeline.DefaultPaths
import edu.stanford.nlp.tagger.maxent.MaxentTagger
/**
* Created by Debasish Kaushik on 5/21/16.
*/
private[posTaggers] class StanfordPOSTagger extends POSTagger {
private[this] val tagger = new MaxentTagger(DefaultPaths.DEFAULT_POS_MODEL)
def process(string: String): List[(String, String)] = {
val taggedString = tagger.tagString(string)
taggedString.split(" ").map{ case pair =>
val p = pair.split("_")
(p(0), p(1))
}.toList
}
}
|
DEK11/MoreNLP
|
src/main/scala/com/debasish/nlp/posTaggers/StanfordPOSTagger.scala
|
Scala
|
apache-2.0
| 554
|
package org.allenai.common
/** Various convenient utilities for Scala constructs. */
object ScalaUtils {
/** A common use case for groupBy. Takes in a sequence of pairs, groups them by the first
* element, and returns a map from the group identifier to a sequence of second elements
* of the matching pairs. E.g., ((a,1), (b,2), (a,3), (b,4)) turns into {a -> (1,3),
* b -> (2,4)}
*/
def toMapUsingGroupByFirst[T1, T2](x: Seq[(T1, T2)]): Map[T1, Seq[T2]] = {
x.groupBy(_._1).mapValues(_.unzip._2)
}
}
|
jkinkead/common
|
core/src/main/scala/org/allenai/common/ScalaUtils.scala
|
Scala
|
apache-2.0
| 529
|
package mesosphere.marathon.core.flow
import org.rogach.scallop.ScallopConf
trait ReviveOffersConfig extends ScallopConf {
//scalastyle:off magic.number
lazy val minReviveOffersInterval = opt[Long]("min_revive_offers_interval",
descr = "Do not ask for all offers (also already seen ones) more often than this interval (ms).",
default = Some(5000))
}
|
sepiroth887/marathon
|
src/main/scala/mesosphere/marathon/core/flow/ReviveOffersConfig.scala
|
Scala
|
apache-2.0
| 365
|
package io.github.steefh.amorphous
import scala.annotation.implicitNotFound
package object patch {
import shapeless._
import labelled._
import record._
import ops.record._
@implicitNotFound("Cannot patch a field of type ${Value} with a field of type ${ValuePatch}")
trait FieldPatcher[Value, ValuePatch] {
def apply(value: Value, patch: ValuePatch): Value
}
object FieldPatcher {
def apply[Value, ValuePatch](patchFunction: (Value, ValuePatch) => Value) =
new FieldPatcher[Value, ValuePatch] {
override def apply(value: Value, patch: ValuePatch): Value = patchFunction(value, patch)
}
}
implicit def simplePatch[Value]: FieldPatcher[Value, Value] =
FieldPatcher { (_, v) => v }
implicit def patchValueWithValueOption[Value]: FieldPatcher[Value, Option[Value]] =
FieldPatcher { (v, p) => p getOrElse v }
implicit def patchValueOptionWithValueOption[Value]: FieldPatcher[Option[Value], Option[Value]] =
FieldPatcher { (e, u) => u orElse e }
implicit def patchMapWithMapOfOptions[K, V]: FieldPatcher[Map[K, V], Map[K, Option[V]]] =
FieldPatcher { (e, u) => u.foldLeft(e) {
case (acc, (k, Some(v))) => acc + (k -> v)
case (acc, (k, None)) => acc - k
}}
implicit def patchProductOptionWithPatchOption[Obj <: Product, Patch <: Product, ObjRepr <: HList, PatchRepr <: HList](
implicit
sourceGen: LabelledGeneric.Aux[Obj, ObjRepr],
patchGen: LabelledGeneric.Aux[Patch, PatchRepr],
patcher: FieldSubsetPatcher[ObjRepr, PatchRepr]
): FieldPatcher[Option[Obj], Option[Patch]] =
FieldPatcher {
case (Some(e), Some(u)) => Some(e patchedWith u)
case (Some(e), None) => Some(e)
case (None, _) => None
}
implicit def patchProductWithPatchOption[Target <: Product, Patch <: Product, TargetRepr <: HList, PatchRepr <: HList](
implicit
sourceGen: LabelledGeneric.Aux[Target, TargetRepr],
patchGen: LabelledGeneric.Aux[Patch, PatchRepr],
patcher: FieldSubsetPatcher[TargetRepr, PatchRepr]
): FieldPatcher[Target, Option[Patch]] =
FieldPatcher {
case (e, Some(u)) => e patchedWith u
case (e, None) => e
}
implicit def patchProductWithPatch[Target <: Product, Patch <: Product, TargetRepr <: HList, PatchRepr <: HList](
implicit
sourceGen: LabelledGeneric.Aux[Target, TargetRepr],
patchGen: LabelledGeneric.Aux[Patch, PatchRepr],
patcher: FieldSubsetPatcher[TargetRepr, PatchRepr]
): FieldPatcher[Target, Patch] = FieldPatcher {
(e, u) => e patchedWith u
}
implicit def patchProductWithProduct[Target <: Product]: FieldPatcher[Target, Target] =
FieldPatcher {
(_, u) => u
}
implicit def patchProductWithProductOption[Target <: Product]: FieldPatcher[Target, Option[Target]] =
FieldPatcher {
case (_, Some(u)) => u
case (e, None) => e
}
implicit def patchProductOptionWithProductOption[Target <: Product]: FieldPatcher[Option[Target], Option[Target]] =
FieldPatcher {
case (e, None) => e
case (_, u) => u
}
trait FieldSubsetPatcher[TargetRepr <: HList, PatchRepr <: HList] {
def apply(source: TargetRepr, patch: PatchRepr): TargetRepr
}
implicit def deriveHNilPatcher[TargetRepr <: HList]: FieldSubsetPatcher[TargetRepr, HNil] =
new FieldSubsetPatcher[TargetRepr, HNil] {
override def apply(source: TargetRepr, patch: HNil): TargetRepr = source
}
@implicitNotFound("Cannot patch a field of type ${V} with a field of type ${P}")
trait FieldPatchedWithOp[V, P] {
def apply(v: V, p: P): V
}
object FieldPatchedWithOp {
def apply[V, P](implicit patcher: FieldPatcher[V, P]): FieldPatchedWithOp[V, P] = new FieldPatchedWithOp[V, P] {
override def apply(v: V, p: P): V = patcher(v, p)
}
}
implicit class FieldPatchedWithSyntax[V](val v: V) extends AnyVal {
def fieldPatchedWith[P](p: P)(implicit op: FieldPatchedWithOp[V, P]): V = op(v, p)
}
implicit def deriveHListPatcher[TargetRepr <: HList, Value, Key <: Symbol, PatchValue, Tail <: HList](
implicit
key: Witness.Aux[Key],
tailPatcher: FieldSubsetPatcher[TargetRepr, Tail],
selector: Selector.Aux[TargetRepr, Key, Value],
valuePatcher: FieldPatcher[Value, PatchValue],
// valuePatcher: FieldPatchedWithOp[Value, PatchValue],
updater: Updater.Aux[TargetRepr, FieldType[Key, Value], TargetRepr]
): FieldSubsetPatcher[TargetRepr, FieldType[Key, PatchValue] :: Tail] =
new FieldSubsetPatcher[TargetRepr, FieldType[Key, PatchValue] :: Tail] {
override def apply(sourceRepr: TargetRepr, patch: FieldType[Key, PatchValue] :: Tail): TargetRepr = {
val patchedTail = tailPatcher(sourceRepr, patch.tail)
val sourceValue = selector(patchedTail)
val patchedValue = valuePatcher(sourceValue, patch.head)
patchedTail.replace(key, patchedValue)
}
}
@implicitNotFound("Type ${Target} cannot be patched by type ${Patch}")
trait PatchedWithOp[Target, Patch] {
def apply(target: Target, patch: Patch): Target
}
object PatchedWithOp {
implicit def patchedWithOp[Target, TargetRepr <: HList, Patch <: Product, PatchRepr <: HList](
implicit
sourceGen: LabelledGeneric.Aux[Target, TargetRepr],
patchGen: LabelledGeneric.Aux[Patch, PatchRepr],
patcher: FieldSubsetPatcher[TargetRepr, PatchRepr]
): PatchedWithOp[Target, Patch] = new PatchedWithOp[Target, Patch] {
def apply(target: Target, patch: Patch): Target =
sourceGen from patcher(sourceGen to target, patchGen to patch)
}
}
implicit class PatchedWithSyntax[Target <: Product](val target: Target) extends AnyVal {
def patchedWith[Patch <: Product](patch: Patch)(
implicit
patchedWithOp: PatchedWithOp[Target, Patch]
): Target = patchedWithOp(target, patch)
}
}
|
SteefH/amorphous
|
src/main/scala/io/github/steefh/amorphous/patch.scala
|
Scala
|
mit
| 5,875
|
package java.nio
object CharBuffer {
private final val HashSeed = -182887236 // "java.nio.CharBuffer".##
def allocate(capacity: Int): CharBuffer =
wrap(new Array[Char](capacity))
def wrap(array: Array[Char], offset: Int, length: Int): CharBuffer =
HeapCharBuffer.wrap(array, 0, array.length, offset, length, false)
def wrap(array: Array[Char]): CharBuffer =
wrap(array, 0, array.length)
def wrap(csq: CharSequence, start: Int, end: Int): CharBuffer =
StringCharBuffer.wrap(csq, 0, csq.length, start, end - start)
def wrap(csq: CharSequence): CharBuffer =
wrap(csq, 0, csq.length)
}
abstract class CharBuffer private[nio] (_capacity: Int,
private[nio] val _array: Array[Char],
private[nio] val _arrayOffset: Int)
extends Buffer(_capacity)
with Comparable[CharBuffer]
with CharSequence
with Appendable
with Readable {
private[nio] type ElementType = Char
private[nio] type BufferType = CharBuffer
def this(_capacity: Int) = this(_capacity, null, -1)
def read(target: CharBuffer): Int = {
// Attention: this method must not change this buffer's position
val n = remaining
if (n == 0) -1
else if (_array != null) {
// even if read-only
target.put(_array, _arrayOffset, n)
n
} else {
val savedPos = position
target.put(this)
position(savedPos)
n
}
}
def slice(): CharBuffer
def duplicate(): CharBuffer
def asReadOnlyBuffer(): CharBuffer
def get(): Char
def put(c: Char): CharBuffer
def get(index: Int): Char
def put(index: Int, c: Char): CharBuffer
@noinline
def get(dst: Array[Char], offset: Int, length: Int): CharBuffer =
GenBuffer(this).generic_get(dst, offset, length)
def get(dst: Array[Char]): CharBuffer =
get(dst, 0, dst.length)
@noinline
def put(src: CharBuffer): CharBuffer =
GenBuffer(this).generic_put(src)
@noinline
def put(src: Array[Char], offset: Int, length: Int): CharBuffer =
GenBuffer(this).generic_put(src, offset, length)
final def put(src: Array[Char]): CharBuffer =
put(src, 0, src.length)
def put(src: String, start: Int, end: Int): CharBuffer =
put(CharBuffer.wrap(src, start, end))
final def put(src: String): CharBuffer =
put(src, 0, src.length)
@inline final def hasArray(): Boolean =
GenBuffer(this).generic_hasArray()
@inline final def array(): Array[Char] =
GenBuffer(this).generic_array()
@inline final def arrayOffset(): Int =
GenBuffer(this).generic_arrayOffset()
def compact(): CharBuffer
def isDirect(): Boolean
@noinline
override def hashCode(): Int =
GenBuffer(this).generic_hashCode(CharBuffer.HashSeed)
override def equals(that: Any): Boolean = that match {
case that: CharBuffer => compareTo(that) == 0
case _ => false
}
@noinline
def compareTo(that: CharBuffer): Int =
GenBuffer(this).generic_compareTo(that)(_.compareTo(_))
override def toString(): String = {
if (_array != null) {
// even if read-only
new String(_array, position + _arrayOffset, remaining)
} else {
val chars = new Array[Char](remaining)
val savedPos = position
get(chars)
position(savedPos)
new String(chars)
}
}
final def length(): Int = remaining
final def charAt(index: Int): Char = get(position + index)
def subSequence(start: Int, end: Int): CharSequence
def append(csq: CharSequence): CharBuffer =
put(csq.toString())
def append(csq: CharSequence, start: Int, end: Int): CharBuffer =
put(csq.subSequence(start, end).toString())
def append(c: Char): CharBuffer =
put(c)
def order(): ByteOrder
// Internal API
private[nio] def load(index: Int): Char
private[nio] def store(index: Int, elem: Char): Unit
@inline
private[nio] def load(startIndex: Int,
dst: Array[Char],
offset: Int,
length: Int): Unit =
GenBuffer(this).generic_load(startIndex, dst, offset, length)
@inline
private[nio] def store(startIndex: Int,
src: Array[Char],
offset: Int,
length: Int): Unit =
GenBuffer(this).generic_store(startIndex, src, offset, length)
}
|
cedricviaccoz/scala-native
|
javalib/src/main/scala/java/nio/CharBuffer.scala
|
Scala
|
bsd-3-clause
| 4,379
|
/* __ *\\
** ________ ___ / / ___ __ ____ Scala.js Test Suite **
** / __/ __// _ | / / / _ | __ / // __/ (c) 2013, LAMP/EPFL **
** __\\ \\/ /__/ __ |/ /__/ __ |/_// /_\\ \\ http://scala-js.org/ **
** /____/\\___/_/ |_/____/_/ | |__/ /____/ **
** |/____/ **
\\* */
package org.scalajs.testsuite.javalib.util
import java.{util => ju}
object AbstractListTest extends AbstractListTest(new AbstractListFactory)
abstract class AbstractListTest[F <: AbstractListFactory](listFactory: F)
extends AbstractCollectionTest(listFactory) with ListTest {
override def testApi(): Unit = {
super.testApi()
testListApi(listFactory)
}
}
class AbstractListFactory extends AbstractCollectionFactory with ListFactory {
override def implementationName: String =
"java.util.AbstractList"
override def empty[E]: ju.AbstractList[E] = {
// inefficient but simple for debugging implementation of AbstractList
new ju.AbstractList[E] {
private var inner = scala.collection.immutable.List.empty[E]
override def get(index: Int): E = {
checkIndexInBounds(index)
inner(index)
}
override def size(): Int =
inner.size
override def add(index: Int, element: E): Unit = {
checkIndexOnBounds(index)
val (left, right) = inner.splitAt(index)
inner = left ::: element :: right
}
override def set(index: Int, element: E): E = {
checkIndexInBounds(index)
val (left, right) = inner.splitAt(index)
inner = left ::: element :: right.tail
right.head
}
override def remove(index: Int): E = {
checkIndexInBounds(index)
val (left, right) = inner.splitAt(index)
inner = left ::: right.tail
right.head
}
override def clear(): Unit =
inner = Nil
private def checkIndexInBounds(index: Int): Unit = {
if (index < 0 || index >= size)
throw new IndexOutOfBoundsException(index.toString)
}
private def checkIndexOnBounds(index: Int): Unit = {
if (index < 0 || index > size)
throw new IndexOutOfBoundsException(index.toString)
}
}
}
}
|
CapeSepias/scala-js
|
test-suite/src/test/scala/org/scalajs/testsuite/javalib/util/AbstractListTest.scala
|
Scala
|
bsd-3-clause
| 2,436
|
object Solution {
def isPerfectSquare(n: Long) = {
// http://stackoverflow.com/questions/295579/fastest-way-to-determine-if-an-integers-square-root-is-an-integer
if (n <= 0) false else n & 0x3F match {
case 0x00 | 0x01 | 0x04 | 0x09 | 0x10 | 0x11
| 0x19 | 0x21 | 0x24 | 0x29 | 0x31 | 0x39 =>
math.sqrt(n) % 1 == 0
case _ => false
}
}
def isPerfectSquare(n: Int): Boolean = isPerfectSquare(n.toLong)
def isNgonal(s: Int, n: Int) = {
val x = (math.sqrt(n * (8 * s - 16) +
math.pow(s - 4, 2)) + s - 4) / (2 * s - 4)
x == Math.rint(x)
}
def genCyclics(comb: Array[Int]) = {
import scala.collection.mutable.LinkedHashSet
def isPoly(n: Int) = comb.exists(s => isNgonal(s, n))
def slides(n: Int) = comb collect { case s if isNgonal(s, n) => s }
val nums = comb.toList.map({ n =>
(1000 to 9999).filter(isNgonal(n, _)).toList
})
val resultSet = LinkedHashSet.empty[List[Int]]
def searchNums(result: LinkedHashSet[Int], vistedCategory: Set[Int]) {
for {
j <- 0 until comb.size if !vistedCategory(j)
ele <- nums(j) if ele / 100 == result.last % 100 && !result(ele)
} {
val newResult = result + ele
if (newResult.size == comb.size) {
if (newResult.head / 100 == newResult.last % 100)
resultSet.add(newResult.toList)
}
else {
searchNums(newResult, vistedCategory + j)
}
}
}
for (ele <- nums(0)) searchNums(LinkedHashSet(ele), Set(0))
resultSet
}
def main(args: Array[String]) {
val _ = readLine
val comb = readLine.split(" ").map(_.toInt)
val result = genCyclics(comb).map(_.sum).toList.sorted
result foreach println
}
}
|
advancedxy/hackerrank
|
project-euler/problem-61/CyclicalFigurateNumbers.scala
|
Scala
|
mit
| 1,775
|
package com.twitter.finagle.exp.mysql.integration
import com.twitter.finagle.exp.Mysql
import com.twitter.finagle.exp.mysql._
import com.twitter.finagle.param
import com.twitter.finagle.tracing._
import com.twitter.util.Await
import com.twitter.util.Local
import org.junit.runner.RunWith
import org.scalatest.FunSuite
import org.scalatest.junit.JUnitRunner
@RunWith(classOf[JUnitRunner])
class MysqlBuilderTest extends FunSuite with IntegrationClient {
test("clients have granular tracing") {
Trace.enable()
var annotations: List[Annotation] = Nil
val mockTracer = new Tracer {
def record(record: Record) = {
annotations ::= record.annotation
}
def sampleTrace(traceId: TraceId): Option[Boolean] = Some(true)
}
// if we have a local instance of mysql running.
if (isAvailable) {
val client = Mysql.client
.configured(param.Label("myclient"))
.configured(param.Tracer(mockTracer))
.withDatabase("test")
.newRichClient("localhost:3306")
Await.ready(client.query("query"))
Await.ready(client.prepare("prepare query")(1))
Await.ready(client.ping())
val mysqlTraces = annotations collect {
case Annotation.BinaryAnnotation("mysql.query", "query") => ()
case Annotation.BinaryAnnotation("mysql.prepare", "prepare query") => ()
case Annotation.Message("mysql.PingRequest") => ()
}
assert(mysqlTraces.size === 3, "missing traces")
}
}
}
|
Krasnyanskiy/finagle
|
finagle-mysql/src/test/scala/com/twitter/finagle/mysql/integration/MysqlBuilderTest.scala
|
Scala
|
apache-2.0
| 1,492
|
package stamina.testkit
import org.scalatest._
import events._
class EventRecordingReporter extends Reporter {
private var eventList: List[Event] = List()
def findResultEvent(testName: String): Option[Event] = {
eventList.find {
case evt: TestSucceeded if evt.testName == testName => true
case evt: TestFailed if evt.testName == testName => true
case _ => false
}
}
def eventsReceived = eventList.reverse
def testSucceededEventsReceived: List[TestSucceeded] = {
eventsReceived filter {
case event: TestSucceeded => true
case _ => false
} map {
case event: TestSucceeded => event
case _ => throw new RuntimeException("should never happen")
}
}
def infoProvidedEventsReceived: List[InfoProvided] = {
eventsReceived filter {
case event: InfoProvided => true
case _ => false
} map {
case event: InfoProvided => event
case _ => throw new RuntimeException("should never happen")
}
}
def testPendingEventsReceived: List[TestPending] = {
eventsReceived filter {
case event: TestPending => true
case _ => false
} map {
case event: TestPending => event
case _ => throw new RuntimeException("should never happen")
}
}
def testFailedEventsReceived: List[TestFailed] = {
eventsReceived filter {
case event: TestFailed => true
case _ => false
} map {
case event: TestFailed => event
case _ => throw new RuntimeException("should never happen")
}
}
def testIgnoredEventsReceived: List[TestIgnored] = {
eventsReceived filter {
case event: TestIgnored => true
case _ => false
} map {
case event: TestIgnored => event
case _ => throw new RuntimeException("should never happen")
}
}
def apply(event: Event): Unit = {
eventList ::= event
}
}
|
scalapenos/stamina
|
stamina-testkit/src/test/scala/stamina/testkit/EventRecordingReporter.scala
|
Scala
|
mit
| 2,089
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.streaming.eventhubs
import org.apache.spark.eventhubscommon._
import org.apache.spark.sql.streaming.OffsetSuite
import org.apache.spark.sql.test.SharedSQLContext
class EventHubsOffsetSuite extends OffsetSuite with SharedSQLContext {
val eventHubsBatchRecord = EventHubsBatchRecord(0L,
Map(EventHubNameAndPartition("eventhub", 0) -> 0L,
EventHubNameAndPartition("eventhub", 1) -> 100L,
EventHubNameAndPartition("eventhub", 2) -> 200L))
test("basic serialization and deserialization of Eventhubs batch record") {
val deserializedEventhubsBatchRecord = JsonUtils.partitionAndSeqNum(eventHubsBatchRecord.json)
assert(deserializedEventhubsBatchRecord.batchId === eventHubsBatchRecord.batchId)
assert(deserializedEventhubsBatchRecord.targetSeqNums === eventHubsBatchRecord.targetSeqNums)
}
}
|
CodingCat/spark-eventhubs
|
core/src/test/scala/org/apache/spark/sql/streaming/eventhubs/EventHubsOffsetSuite.scala
|
Scala
|
apache-2.0
| 1,693
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.mllib.linalg
import org.apache.spark.SparkFunSuite
import org.apache.spark.mllib.util.TestingUtils._
import org.apache.spark.mllib.linalg.BLAS._
class BLASSuite extends SparkFunSuite {
test("copy") {
val sx = Vectors.sparse(4, Array(0, 2), Array(1.0, -2.0))
val dx = Vectors.dense(1.0, 0.0, -2.0, 0.0)
val sy = Vectors.sparse(4, Array(0, 1, 3), Array(2.0, 1.0, 1.0))
val dy = Array(2.0, 1.0, 0.0, 1.0)
val dy1 = Vectors.dense(dy.clone())
copy(sx, dy1)
assert(dy1 ~== dx absTol 1e-15)
val dy2 = Vectors.dense(dy.clone())
copy(dx, dy2)
assert(dy2 ~== dx absTol 1e-15)
intercept[IllegalArgumentException] {
copy(sx, sy)
}
intercept[IllegalArgumentException] {
copy(dx, sy)
}
withClue("vector sizes must match") {
intercept[Exception] {
copy(sx, Vectors.dense(0.0, 1.0, 2.0))
}
}
}
test("scal") {
val a = 0.1
val sx = Vectors.sparse(3, Array(0, 2), Array(1.0, -2.0))
val dx = Vectors.dense(1.0, 0.0, -2.0)
scal(a, sx)
assert(sx ~== Vectors.sparse(3, Array(0, 2), Array(0.1, -0.2)) absTol 1e-15)
scal(a, dx)
assert(dx ~== Vectors.dense(0.1, 0.0, -0.2) absTol 1e-15)
}
test("axpy") {
val alpha = 0.1
val sx = Vectors.sparse(3, Array(0, 2), Array(1.0, -2.0))
val dx = Vectors.dense(1.0, 0.0, -2.0)
val dy = Array(2.0, 1.0, 0.0)
val expected = Vectors.dense(2.1, 1.0, -0.2)
val dy1 = Vectors.dense(dy.clone())
axpy(alpha, sx, dy1)
assert(dy1 ~== expected absTol 1e-15)
val dy2 = Vectors.dense(dy.clone())
axpy(alpha, dx, dy2)
assert(dy2 ~== expected absTol 1e-15)
val sy = Vectors.sparse(4, Array(0, 1), Array(2.0, 1.0))
intercept[IllegalArgumentException] {
axpy(alpha, sx, sy)
}
intercept[IllegalArgumentException] {
axpy(alpha, dx, sy)
}
withClue("vector sizes must match") {
intercept[Exception] {
axpy(alpha, sx, Vectors.dense(1.0, 2.0))
}
}
}
test("dot") {
val sx = Vectors.sparse(3, Array(0, 2), Array(1.0, -2.0))
val dx = Vectors.dense(1.0, 0.0, -2.0)
val sy = Vectors.sparse(3, Array(0, 1), Array(2.0, 1.0))
val dy = Vectors.dense(2.0, 1.0, 0.0)
assert(dot(sx, sy) ~== 2.0 absTol 1e-15)
assert(dot(sy, sx) ~== 2.0 absTol 1e-15)
assert(dot(sx, dy) ~== 2.0 absTol 1e-15)
assert(dot(dy, sx) ~== 2.0 absTol 1e-15)
assert(dot(dx, dy) ~== 2.0 absTol 1e-15)
assert(dot(dy, dx) ~== 2.0 absTol 1e-15)
assert(dot(sx, sx) ~== 5.0 absTol 1e-15)
assert(dot(dx, dx) ~== 5.0 absTol 1e-15)
assert(dot(sx, dx) ~== 5.0 absTol 1e-15)
assert(dot(dx, sx) ~== 5.0 absTol 1e-15)
val sx1 = Vectors.sparse(10, Array(0, 3, 5, 7, 8), Array(1.0, 2.0, 3.0, 4.0, 5.0))
val sx2 = Vectors.sparse(10, Array(1, 3, 6, 7, 9), Array(1.0, 2.0, 3.0, 4.0, 5.0))
assert(dot(sx1, sx2) ~== 20.0 absTol 1e-15)
assert(dot(sx2, sx1) ~== 20.0 absTol 1e-15)
withClue("vector sizes must match") {
intercept[Exception] {
dot(sx, Vectors.dense(2.0, 1.0))
}
}
}
test("syr") {
val dA = new DenseMatrix(4, 4,
Array(0.0, 1.2, 2.2, 3.1, 1.2, 3.2, 5.3, 4.6, 2.2, 5.3, 1.8, 3.0, 3.1, 4.6, 3.0, 0.8))
val x = new DenseVector(Array(0.0, 2.7, 3.5, 2.1))
val alpha = 0.15
val expected = new DenseMatrix(4, 4,
Array(0.0, 1.2, 2.2, 3.1, 1.2, 4.2935, 6.7175, 5.4505, 2.2, 6.7175, 3.6375, 4.1025, 3.1,
5.4505, 4.1025, 1.4615))
syr(alpha, x, dA)
assert(dA ~== expected absTol 1e-15)
val dB =
new DenseMatrix(3, 4, Array(0.0, 1.2, 2.2, 3.1, 1.2, 3.2, 5.3, 4.6, 2.2, 5.3, 1.8, 3.0))
withClue("Matrix A must be a symmetric Matrix") {
intercept[Exception] {
syr(alpha, x, dB)
}
}
val dC =
new DenseMatrix(3, 3, Array(0.0, 1.2, 2.2, 1.2, 3.2, 5.3, 2.2, 5.3, 1.8))
withClue("Size of vector must match the rank of matrix") {
intercept[Exception] {
syr(alpha, x, dC)
}
}
val y = new DenseVector(Array(0.0, 2.7, 3.5, 2.1, 1.5))
withClue("Size of vector must match the rank of matrix") {
intercept[Exception] {
syr(alpha, y, dA)
}
}
val xSparse = new SparseVector(4, Array(0, 2, 3), Array(1.0, 3.0, 4.0))
val dD = new DenseMatrix(4, 4,
Array(0.0, 1.2, 2.2, 3.1, 1.2, 3.2, 5.3, 4.6, 2.2, 5.3, 1.8, 3.0, 3.1, 4.6, 3.0, 0.8))
syr(0.1, xSparse, dD)
val expectedSparse = new DenseMatrix(4, 4,
Array(0.1, 1.2, 2.5, 3.5, 1.2, 3.2, 5.3, 4.6, 2.5, 5.3, 2.7, 4.2, 3.5, 4.6, 4.2, 2.4))
assert(dD ~== expectedSparse absTol 1e-15)
}
test("gemm") {
val dA =
new DenseMatrix(4, 3, Array(0.0, 1.0, 0.0, 0.0, 2.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 3.0))
val sA = new SparseMatrix(4, 3, Array(0, 1, 3, 4), Array(1, 0, 2, 3), Array(1.0, 2.0, 1.0, 3.0))
val B = new DenseMatrix(3, 2, Array(1.0, 0.0, 0.0, 0.0, 2.0, 1.0))
val expected = new DenseMatrix(4, 2, Array(0.0, 1.0, 0.0, 0.0, 4.0, 0.0, 2.0, 3.0))
val BTman = new DenseMatrix(2, 3, Array(1.0, 0.0, 0.0, 2.0, 0.0, 1.0))
val BT = B.transpose
assert(dA.multiply(B) ~== expected absTol 1e-15)
assert(sA.multiply(B) ~== expected absTol 1e-15)
val C1 = new DenseMatrix(4, 2, Array(1.0, 0.0, 2.0, 1.0, 0.0, 0.0, 1.0, 0.0))
val C2 = C1.copy
val C3 = C1.copy
val C4 = C1.copy
val C5 = C1.copy
val C6 = C1.copy
val C7 = C1.copy
val C8 = C1.copy
val C9 = C1.copy
val C10 = C1.copy
val C11 = C1.copy
val C12 = C1.copy
val C13 = C1.copy
val C14 = C1.copy
val C15 = C1.copy
val C16 = C1.copy
val C17 = C1.copy
val expected2 = new DenseMatrix(4, 2, Array(2.0, 1.0, 4.0, 2.0, 4.0, 0.0, 4.0, 3.0))
val expected3 = new DenseMatrix(4, 2, Array(2.0, 2.0, 4.0, 2.0, 8.0, 0.0, 6.0, 6.0))
val expected4 = new DenseMatrix(4, 2, Array(5.0, 0.0, 10.0, 5.0, 0.0, 0.0, 5.0, 0.0))
val expected5 = C1.copy
gemm(1.0, dA, B, 2.0, C1)
gemm(1.0, sA, B, 2.0, C2)
gemm(2.0, dA, B, 2.0, C3)
gemm(2.0, sA, B, 2.0, C4)
assert(C1 ~== expected2 absTol 1e-15)
assert(C2 ~== expected2 absTol 1e-15)
assert(C3 ~== expected3 absTol 1e-15)
assert(C4 ~== expected3 absTol 1e-15)
gemm(1.0, dA, B, 0.0, C17)
assert(C17 ~== expected absTol 1e-15)
gemm(1.0, sA, B, 0.0, C17)
assert(C17 ~== expected absTol 1e-15)
withClue("columns of A don't match the rows of B") {
intercept[Exception] {
gemm(1.0, dA.transpose, B, 2.0, C1)
}
}
val dATman =
new DenseMatrix(3, 4, Array(0.0, 2.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 3.0))
val sATman =
new SparseMatrix(3, 4, Array(0, 1, 2, 3, 4), Array(1, 0, 1, 2), Array(2.0, 1.0, 1.0, 3.0))
val dATT = dATman.transpose
val sATT = sATman.transpose
val BTT = BTman.transpose.asInstanceOf[DenseMatrix]
assert(dATT.multiply(B) ~== expected absTol 1e-15)
assert(sATT.multiply(B) ~== expected absTol 1e-15)
assert(dATT.multiply(BTT) ~== expected absTol 1e-15)
assert(sATT.multiply(BTT) ~== expected absTol 1e-15)
gemm(1.0, dATT, BTT, 2.0, C5)
gemm(1.0, sATT, BTT, 2.0, C6)
gemm(2.0, dATT, BTT, 2.0, C7)
gemm(2.0, sATT, BTT, 2.0, C8)
gemm(1.0, dA, BTT, 2.0, C9)
gemm(1.0, sA, BTT, 2.0, C10)
gemm(2.0, dA, BTT, 2.0, C11)
gemm(2.0, sA, BTT, 2.0, C12)
assert(C5 ~== expected2 absTol 1e-15)
assert(C6 ~== expected2 absTol 1e-15)
assert(C7 ~== expected3 absTol 1e-15)
assert(C8 ~== expected3 absTol 1e-15)
assert(C9 ~== expected2 absTol 1e-15)
assert(C10 ~== expected2 absTol 1e-15)
assert(C11 ~== expected3 absTol 1e-15)
assert(C12 ~== expected3 absTol 1e-15)
gemm(0, dA, B, 5, C13)
gemm(0, sA, B, 5, C14)
gemm(0, dA, B, 1, C15)
gemm(0, sA, B, 1, C16)
assert(C13 ~== expected4 absTol 1e-15)
assert(C14 ~== expected4 absTol 1e-15)
assert(C15 ~== expected5 absTol 1e-15)
assert(C16 ~== expected5 absTol 1e-15)
}
test("gemv") {
val dA =
new DenseMatrix(4, 3, Array(0.0, 1.0, 0.0, 0.0, 2.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 3.0))
val sA = new SparseMatrix(4, 3, Array(0, 1, 3, 4), Array(1, 0, 2, 3), Array(1.0, 2.0, 1.0, 3.0))
val dA2 =
new DenseMatrix(4, 3, Array(0.0, 2.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 3.0), true)
val sA2 =
new SparseMatrix(4, 3, Array(0, 1, 2, 3, 4), Array(1, 0, 1, 2), Array(2.0, 1.0, 1.0, 3.0),
true)
val dx = new DenseVector(Array(1.0, 2.0, 3.0))
val sx = dx.toSparse
val expected = new DenseVector(Array(4.0, 1.0, 2.0, 9.0))
assert(dA.multiply(dx) ~== expected absTol 1e-15)
assert(sA.multiply(dx) ~== expected absTol 1e-15)
assert(dA.multiply(sx) ~== expected absTol 1e-15)
assert(sA.multiply(sx) ~== expected absTol 1e-15)
val y1 = new DenseVector(Array(1.0, 3.0, 1.0, 0.0))
val y2 = y1.copy
val y3 = y1.copy
val y4 = y1.copy
val y5 = y1.copy
val y6 = y1.copy
val y7 = y1.copy
val y8 = y1.copy
val y9 = y1.copy
val y10 = y1.copy
val y11 = y1.copy
val y12 = y1.copy
val y13 = y1.copy
val y14 = y1.copy
val y15 = y1.copy
val y16 = y1.copy
val expected2 = new DenseVector(Array(6.0, 7.0, 4.0, 9.0))
val expected3 = new DenseVector(Array(10.0, 8.0, 6.0, 18.0))
gemv(1.0, dA, dx, 2.0, y1)
gemv(1.0, sA, dx, 2.0, y2)
gemv(1.0, dA, sx, 2.0, y3)
gemv(1.0, sA, sx, 2.0, y4)
gemv(1.0, dA2, dx, 2.0, y5)
gemv(1.0, sA2, dx, 2.0, y6)
gemv(1.0, dA2, sx, 2.0, y7)
gemv(1.0, sA2, sx, 2.0, y8)
gemv(2.0, dA, dx, 2.0, y9)
gemv(2.0, sA, dx, 2.0, y10)
gemv(2.0, dA, sx, 2.0, y11)
gemv(2.0, sA, sx, 2.0, y12)
gemv(2.0, dA2, dx, 2.0, y13)
gemv(2.0, sA2, dx, 2.0, y14)
gemv(2.0, dA2, sx, 2.0, y15)
gemv(2.0, sA2, sx, 2.0, y16)
assert(y1 ~== expected2 absTol 1e-15)
assert(y2 ~== expected2 absTol 1e-15)
assert(y3 ~== expected2 absTol 1e-15)
assert(y4 ~== expected2 absTol 1e-15)
assert(y5 ~== expected2 absTol 1e-15)
assert(y6 ~== expected2 absTol 1e-15)
assert(y7 ~== expected2 absTol 1e-15)
assert(y8 ~== expected2 absTol 1e-15)
assert(y9 ~== expected3 absTol 1e-15)
assert(y10 ~== expected3 absTol 1e-15)
assert(y11 ~== expected3 absTol 1e-15)
assert(y12 ~== expected3 absTol 1e-15)
assert(y13 ~== expected3 absTol 1e-15)
assert(y14 ~== expected3 absTol 1e-15)
assert(y15 ~== expected3 absTol 1e-15)
assert(y16 ~== expected3 absTol 1e-15)
withClue("columns of A don't match the rows of B") {
intercept[Exception] {
gemv(1.0, dA.transpose, dx, 2.0, y1)
}
intercept[Exception] {
gemv(1.0, sA.transpose, dx, 2.0, y1)
}
intercept[Exception] {
gemv(1.0, dA.transpose, sx, 2.0, y1)
}
intercept[Exception] {
gemv(1.0, sA.transpose, sx, 2.0, y1)
}
}
val dAT =
new DenseMatrix(3, 4, Array(0.0, 2.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 3.0))
val sAT =
new SparseMatrix(3, 4, Array(0, 1, 2, 3, 4), Array(1, 0, 1, 2), Array(2.0, 1.0, 1.0, 3.0))
val dATT = dAT.transpose
val sATT = sAT.transpose
assert(dATT.multiply(dx) ~== expected absTol 1e-15)
assert(sATT.multiply(dx) ~== expected absTol 1e-15)
assert(dATT.multiply(sx) ~== expected absTol 1e-15)
assert(sATT.multiply(sx) ~== expected absTol 1e-15)
}
}
|
andrewor14/iolap
|
mllib/src/test/scala/org/apache/spark/mllib/linalg/BLASSuite.scala
|
Scala
|
apache-2.0
| 12,234
|
package japgolly.scalajs.react.test
import scala.scalajs.js
/**
* Allows composition and abstraction of `ReactTestUtils.Simulate` procedures.
*/
class Simulation(_run: (() => ReactOrDomNode) => Unit) {
def run(n: => ReactOrDomNode): Unit =
_run(() => n)
def andThen(f: Simulation) =
new Simulation(n => { _run(n); f.run(n()) })
@inline final def >> (f: Simulation) = this andThen f
@inline final def compose(f: Simulation) = f andThen this
final def runN(cs: ReactOrDomNode*): Unit =
cs foreach (run(_))
}
object Simulation {
def apply(run: (=> ReactOrDomNode) => Unit): Simulation =
new Simulation(n => run(n()))
// Don't use default arguments - they force parentheses on to caller.
// Eg. Simulation.blur >> Simulation.focus becomes Simulation.blur() >> Simulation.focus(). Yuk.
def beforeInput = Simulation(Simulate.beforeInput (_))
def blur = Simulation(Simulate.blur (_))
def change = Simulation(Simulate.change (_))
def click = Simulation(Simulate.click (_, SimEvent.Mouse()))
def compositionEnd = Simulation(Simulate.compositionEnd (_))
def compositionStart = Simulation(Simulate.compositionStart (_))
def compositionUpdate= Simulation(Simulate.compositionUpdate(_))
def contextMenu = Simulation(Simulate.contextMenu (_))
def copy = Simulation(Simulate.copy (_))
def cut = Simulation(Simulate.cut (_))
def doubleClick = Simulation(Simulate.doubleClick (_, SimEvent.Mouse()))
def drag = Simulation(Simulate.drag (_, SimEvent.Mouse()))
def dragEnd = Simulation(Simulate.dragEnd (_, SimEvent.Mouse()))
def dragEnter = Simulation(Simulate.dragEnter (_, SimEvent.Mouse()))
def dragExit = Simulation(Simulate.dragExit (_, SimEvent.Mouse()))
def dragLeave = Simulation(Simulate.dragLeave (_, SimEvent.Mouse()))
def dragOver = Simulation(Simulate.dragOver (_, SimEvent.Mouse()))
def dragStart = Simulation(Simulate.dragStart (_, SimEvent.Mouse()))
def drop = Simulation(Simulate.drop (_, SimEvent.Mouse()))
def error = Simulation(Simulate.error (_))
def focus = Simulation(Simulate.focus (_))
def input = Simulation(Simulate.input (_))
def keyDown = Simulation(Simulate.keyDown (_, SimEvent.Keyboard()))
def keyPress = Simulation(Simulate.keyPress (_, SimEvent.Keyboard()))
def keyUp = Simulation(Simulate.keyUp (_, SimEvent.Keyboard()))
def load = Simulation(Simulate.load (_))
def mouseDown = Simulation(Simulate.mouseDown (_, SimEvent.Mouse()))
def mouseEnter = Simulation(Simulate.mouseEnter (_, SimEvent.Mouse()))
def mouseLeave = Simulation(Simulate.mouseLeave (_, SimEvent.Mouse()))
def mouseMove = Simulation(Simulate.mouseMove (_, SimEvent.Mouse()))
def mouseOut = Simulation(Simulate.mouseOut (_, SimEvent.Mouse()))
def mouseOver = Simulation(Simulate.mouseOver (_, SimEvent.Mouse()))
def mouseUp = Simulation(Simulate.mouseUp (_, SimEvent.Mouse()))
def paste = Simulation(Simulate.paste (_))
def reset = Simulation(Simulate.reset (_))
def scroll = Simulation(Simulate.scroll (_))
def select = Simulation(Simulate.select (_))
def submit = Simulation(Simulate.submit (_))
def touchCancel = Simulation(Simulate.touchCancel (_))
def touchEnd = Simulation(Simulate.touchEnd (_))
def touchMove = Simulation(Simulate.touchMove (_))
def touchStart = Simulation(Simulate.touchStart (_))
def wheel = Simulation(Simulate.wheel (_))
def beforeInput (eventData: js.Object) = Simulation(Simulate.beforeInput (_, eventData))
def blur (eventData: js.Object) = Simulation(Simulate.blur (_, eventData))
def change (eventData: js.Object) = Simulation(Simulate.change (_, eventData))
def click (eventData: js.Object) = Simulation(Simulate.click (_, eventData))
def compositionEnd (eventData: js.Object) = Simulation(Simulate.compositionEnd (_, eventData))
def compositionStart (eventData: js.Object) = Simulation(Simulate.compositionStart (_, eventData))
def compositionUpdate(eventData: js.Object) = Simulation(Simulate.compositionUpdate(_, eventData))
def contextMenu (eventData: js.Object) = Simulation(Simulate.contextMenu (_, eventData))
def copy (eventData: js.Object) = Simulation(Simulate.copy (_, eventData))
def cut (eventData: js.Object) = Simulation(Simulate.cut (_, eventData))
def doubleClick (eventData: js.Object) = Simulation(Simulate.doubleClick (_, eventData))
def drag (eventData: js.Object) = Simulation(Simulate.drag (_, eventData))
def dragEnd (eventData: js.Object) = Simulation(Simulate.dragEnd (_, eventData))
def dragEnter (eventData: js.Object) = Simulation(Simulate.dragEnter (_, eventData))
def dragExit (eventData: js.Object) = Simulation(Simulate.dragExit (_, eventData))
def dragLeave (eventData: js.Object) = Simulation(Simulate.dragLeave (_, eventData))
def dragOver (eventData: js.Object) = Simulation(Simulate.dragOver (_, eventData))
def dragStart (eventData: js.Object) = Simulation(Simulate.dragStart (_, eventData))
def drop (eventData: js.Object) = Simulation(Simulate.drop (_, eventData))
def error (eventData: js.Object) = Simulation(Simulate.error (_, eventData))
def focus (eventData: js.Object) = Simulation(Simulate.focus (_, eventData))
def input (eventData: js.Object) = Simulation(Simulate.input (_, eventData))
def keyDown (eventData: js.Object) = Simulation(Simulate.keyDown (_, eventData))
def keyPress (eventData: js.Object) = Simulation(Simulate.keyPress (_, eventData))
def keyUp (eventData: js.Object) = Simulation(Simulate.keyUp (_, eventData))
def load (eventData: js.Object) = Simulation(Simulate.load (_, eventData))
def mouseDown (eventData: js.Object) = Simulation(Simulate.mouseDown (_, eventData))
def mouseEnter (eventData: js.Object) = Simulation(Simulate.mouseEnter (_, eventData))
def mouseLeave (eventData: js.Object) = Simulation(Simulate.mouseLeave (_, eventData))
def mouseMove (eventData: js.Object) = Simulation(Simulate.mouseMove (_, eventData))
def mouseOut (eventData: js.Object) = Simulation(Simulate.mouseOut (_, eventData))
def mouseOver (eventData: js.Object) = Simulation(Simulate.mouseOver (_, eventData))
def mouseUp (eventData: js.Object) = Simulation(Simulate.mouseUp (_, eventData))
def paste (eventData: js.Object) = Simulation(Simulate.paste (_, eventData))
def reset (eventData: js.Object) = Simulation(Simulate.reset (_, eventData))
def scroll (eventData: js.Object) = Simulation(Simulate.scroll (_, eventData))
def select (eventData: js.Object) = Simulation(Simulate.select (_, eventData))
def submit (eventData: js.Object) = Simulation(Simulate.submit (_, eventData))
def touchCancel (eventData: js.Object) = Simulation(Simulate.touchCancel (_, eventData))
def touchEnd (eventData: js.Object) = Simulation(Simulate.touchEnd (_, eventData))
def touchMove (eventData: js.Object) = Simulation(Simulate.touchMove (_, eventData))
def touchStart (eventData: js.Object) = Simulation(Simulate.touchStart (_, eventData))
def wheel (eventData: js.Object) = Simulation(Simulate.wheel (_, eventData))
// Helpers for common scenarios
def focusSimBlur(s: Simulation) =
focus >> s >> blur
def focusChangeBlur(newValue: String) =
focusSimBlur(SimEvent.Change(value = newValue).simulation)
}
|
matthughes/scalajs-react
|
test/src/main/scala/japgolly/scalajs/react/test/Simulation.scala
|
Scala
|
apache-2.0
| 8,626
|
package org.jetbrains.plugins.scala
package lang.psi.light
import com.intellij.openapi.project.Project
import com.intellij.psi._
import com.intellij.psi.impl.light.LightField
import org.jetbrains.plugins.scala.extensions._
import org.jetbrains.plugins.scala.lang.psi.api.statements.ScAnnotationsHolder
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.typedef.ScTypeDefinition
import org.jetbrains.plugins.scala.lang.psi.types.ScTypeExt
import org.jetbrains.plugins.scala.lang.psi.types.api.ParameterizedType
import _root_.scala.collection.mutable.ArrayBuffer
/**
* @author Alefas
* @since 07.12.12
*/
object LightUtil {
/**
* for Java only
*
* @param holder annotation holder
* @return Java throws section string or empty string
*/
def getThrowsSection(holder: ScAnnotationsHolder): String = {
val throwAnnotations = holder.annotations("scala.throws").foldLeft[ArrayBuffer[String]](ArrayBuffer()) {
case (accumulator, annotation) =>
implicit val elementScope = holder.elementScope
val classes = annotation.constructor.args.map(_.exprs).getOrElse(Seq.empty).flatMap {
_.`type`() match {
case Right(ParameterizedType(des, Seq(arg))) => des.extractClass match {
case Some(clazz) if clazz.qualifiedName == "java.lang.Class" =>
arg.toPsiType match {
case c: PsiClassType =>
c.resolve() match {
case clazz: PsiClass => Seq(clazz.getQualifiedName)
case _ => Seq.empty
}
case _ => Seq.empty
}
case _ => Seq.empty
}
case _ => Seq.empty
}
}
if (classes.isEmpty) {
annotation.constructor.typeArgList match {
case Some(args) =>
val classes = args.typeArgs
.flatMap(_.`type`().toOption)
.flatMap {
_.toPsiType match {
case c: PsiClassType => Option(c.resolve())
case _ => None
}
}.collect {
case c: PsiClass => c.getQualifiedName
}
if (classes.nonEmpty) accumulator :+ classes.mkString(sep = ", ")
else accumulator
case None => accumulator
}
} else accumulator :+ classes.mkString(sep = ", ")
case _ => ArrayBuffer()
}
if (throwAnnotations.isEmpty) ""
else throwAnnotations.mkString(start = " throws ", sep = ", ", end = " ")
}
def createJavaMethod(methodText: String, containingClass: PsiClass, project: Project): PsiMethod = {
val elementFactory = JavaPsiFacade.getInstance(project).getElementFactory
try elementFactory.createMethodFromText(methodText, containingClass)
catch {
case _: Exception => elementFactory.createMethodFromText("public void FAILED_TO_DECOMPILE_METHOD() {}", containingClass)
}
}
def javaTypeElement(tp: PsiType, context: PsiElement, project: Project): PsiTypeElement = {
val elementFactory = JavaPsiFacade.getInstance(project).getElementFactory
elementFactory.createTypeElementFromText(tp.getCanonicalText, context)
}
//see LightElement.setNavigationElement
def originalNavigationElement(elem: PsiElement): PsiElement = {
elem.toOption
.map(_.getNavigationElement)
.getOrElse(elem)
}
def createLightField(fieldText: String, containingClass: ScTypeDefinition): PsiField = {
val factory = JavaPsiFacade.getInstance(containingClass.getProject).getElementFactory
val dummyField = factory.createFieldFromText(fieldText, containingClass)
new LightField(containingClass.getManager, dummyField, containingClass) {
override def getNavigationElement: PsiElement = originalNavigationElement(containingClass)
}
}
}
|
jastice/intellij-scala
|
scala/scala-impl/src/org/jetbrains/plugins/scala/lang/psi/light/LightUtil.scala
|
Scala
|
apache-2.0
| 3,892
|
package com.gu.notificationschedule.notifications
import java.nio.charset.StandardCharsets
import java.time.Instant
import java.util.UUID
import com.amazonaws.services.cloudwatch.model.StandardUnit
import com.gu.notificationschedule.NotificationScheduleConfig
import com.gu.notificationschedule.cloudwatch.{CloudWatchMetrics, Timer}
import com.gu.notificationschedule.dynamo.NotificationsScheduleEntry
import com.gu.notificationschedule.external.SsmConfig
import com.typesafe.config.ConfigFactory
import okhttp3._
import okio.Buffer
import org.specs2.mock.Mockito
import org.specs2.mutable.Specification
import scala.jdk.CollectionConverters._
import scala.util.Success
class RequestNotificationImplSpec extends Specification with Mockito {
"RequestNotificationImpl" should {
val config = ConfigFactory.parseMap(Map(
"schedule.notifications.pushTopicUrl" -> "http://push.topic.invalid",
"schedule.notifications.secretKey" -> "secretkey"
).asJava)
val notificationsScheduleEntry = NotificationsScheduleEntry(UUID.randomUUID().toString, "notification", 1, 1)
val cloudWatchMetrics = new CloudWatchMetrics {
override def queueMetric(metricName: String, value: Double, standardUnit: StandardUnit, instant: Instant): Boolean = ???
override def startTimer(metricName: String): Timer = mock[Timer]
override def meterHttpStatusResponses(metricName: String, code: Int): Unit = ???
}
"send notification request to notifications" in {
val okHttpClient = mock[OkHttpClient]
val mockCall = mock[Call]
okHttpClient.newCall(any[Request]()) answers {
(_: Any) match {
case (request: Request) => {
request.url() must beEqualTo(HttpUrl.parse("http://push.topic.invalid"))
request.header("Authorization") must beEqualTo(s"Bearer secretkey")
request.method().toLowerCase must beEqualTo("post")
val buffer = new Buffer()
request.body().writeTo(buffer)
new String(buffer.readByteArray(), StandardCharsets.UTF_8) must beEqualTo("notification")
request.body().contentType().toString must beEqualTo("application/json; charset=utf-8")
mockCall.execute() returns new Response.Builder().code(200)
.protocol(Protocol.HTTP_2)
.request(request)
.message("status message")
.headers(Headers.of(Map[String, String]().asJava))
.body(ResponseBody.create(MediaType.parse("application/json"), ""))
.build()
mockCall
}
}
}
val requestNotification = new RequestNotificationImpl(
new NotificationScheduleConfig(SsmConfig("app", "stack", "stage", config)), okHttpClient,
cloudWatchMetrics)
requestNotification(1, notificationsScheduleEntry) must beEqualTo(Success(()))
}
"handle bad status" in {
val okHttpClient = mock[OkHttpClient]
val mockCall = mock[Call]
okHttpClient.newCall(any[Request]()) answers {
(_: Any) match {
case (request: Request) => {
mockCall.execute() returns new Response.Builder().code(400)
.protocol(Protocol.HTTP_2)
.request(request)
.message("status message")
.headers(Headers.of(Map[String, String]().asJava))
.build()
mockCall
}
}
}
val requestNotification = new RequestNotificationImpl(
new NotificationScheduleConfig(SsmConfig("app", "stack", "stage", config)), okHttpClient,
cloudWatchMetrics)
requestNotification(1, notificationsScheduleEntry).get must throwA[RequestNotificationException]
}
"handle no response" in {
val okHttpClient = mock[OkHttpClient]
val mockCall = mock[Call]
okHttpClient.newCall(any[Request]()) answers {
(_: Any) match {
case (request: Request) => mockCall
}
}
val requestNofication = new RequestNotificationImpl(
new NotificationScheduleConfig(SsmConfig("app", "stack", "stage", config)), okHttpClient,
cloudWatchMetrics)
requestNofication(1, notificationsScheduleEntry).get must throwA[RequestNotificationException]
}
"handle error" in {
val okHttpClient = mock[OkHttpClient]
val mockCall = mock[Call]
val exception = new NullPointerException
okHttpClient.newCall(any[Request]()) answers {
(_: Any) match {
case (request: Request) => {
mockCall.execute() throws exception
mockCall
}
}
}
val requestNotification = new RequestNotificationImpl(
new NotificationScheduleConfig(SsmConfig("app", "stack", "stage", config)), okHttpClient,
cloudWatchMetrics)
requestNotification(1, notificationsScheduleEntry).get must throwA(exception)
}
}
}
|
guardian/mobile-n10n
|
schedulelambda/src/test/scala/com/gu/notificationschedule/notifications/RequestNotificationImplSpec.scala
|
Scala
|
apache-2.0
| 4,905
|
package actors
import akka.actor.{ActorRef, Props}
import akka.agent.Agent
import markets.orders.Order
import markets.participants.LiquidityDemander
import markets.tickers.Tick
import markets.tradables.Tradable
import strategies.placement.PoissonOrderPlacementStrategy
import strategies.trading.ZIMarketOrderTradingStrategy
import scala.collection.{immutable, mutable}
import scala.util.Random
case class ZILiquidityDemander(config: RandomLiquidityDemanderConfig,
markets: mutable.Map[Tradable, ActorRef],
prng: Random,
tickers: mutable.Map[Tradable, Agent[immutable.Seq[Tick]]])
extends LiquidityDemander {
val marketOrderTradingStrategy = ZIMarketOrderTradingStrategy(config, prng)
val orderPlacementStrategy = PoissonOrderPlacementStrategy(prng, context.system.scheduler)
val outstandingOrders = mutable.Set.empty[Order]
// possible insert this into post-start life-cycle hook?
import context.dispatcher
val initialDelay = orderPlacementStrategy.waitTime(config.mu)
val marketOrderInterval = orderPlacementStrategy.waitTime(config.mu)
orderPlacementStrategy.schedule(initialDelay, marketOrderInterval, self, SubmitMarketAskOrder)
orderPlacementStrategy.schedule(initialDelay, marketOrderInterval, self, SubmitMarketBidOrder)
}
object ZILiquidityDemander {
def props(config: RandomLiquidityDemanderConfig,
markets: mutable.Map[Tradable, ActorRef],
prng: Random,
tickers: mutable.Map[Tradable, Agent[immutable.Seq[Tick]]]): Props = {
Props(new ZILiquidityDemander(config, markets, prng, tickers))
}
}
|
ScalABM/models-library
|
farmer-patelli-zovko/src/main/scala-2.11/actors/ZILiquidityDemander.scala
|
Scala
|
apache-2.0
| 1,673
|
package io.github.shogowada.scalajs.reactjs.classes
import scala.scalajs.js
@js.native
class ReactClass extends js.Object
|
a1russell/scalajs-reactjs
|
core/src/main/scala/io/github/shogowada/scalajs/reactjs/classes/ReactClass.scala
|
Scala
|
mit
| 124
|
import sbt._
import Keys._
object B extends Build {
lazy val root = Project("root", file("."))
.configs( IntegrationTest )
.settings( Defaults.itSettings : _*)
}
|
colder/scala-smtlib
|
project/Build.scala
|
Scala
|
mit
| 206
|
package org.jetbrains.plugins.scala
package lang.refactoring.changeSignature
import com.intellij.psi.PsiElement
import com.intellij.refactoring.changeSignature.{ChangeInfo, JavaChangeInfo, OverriderUsageInfo}
import com.intellij.util.containers.MultiMap
import org.jetbrains.plugins.scala.lang.psi.ScalaPsiUtil
import org.jetbrains.plugins.scala.lang.psi.api.base.ScPrimaryConstructor
import org.jetbrains.plugins.scala.lang.psi.api.base.patterns.ScBindingPattern
import org.jetbrains.plugins.scala.lang.psi.api.statements.params.ScClassParameter
import org.jetbrains.plugins.scala.lang.psi.api.statements.{ScPatternDefinition, ScVariableDefinition}
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.typedef.ScMember
import org.jetbrains.plugins.scala.lang.psi.types.api.ValType
import org.jetbrains.plugins.scala.lang.refactoring.changeSignature.changeInfo.ScalaChangeInfo
/**
* Nikolay.Tropin
* 2014-08-13
*/
private[changeSignature] object ConflictsUtil {
type ConflictsMap = MultiMap[PsiElement, String]
def addJavaOverriderConflicts(info: OverriderUsageInfo, change: ChangeInfo, map: ConflictsMap): Unit = {
change match {
case sc: ScalaChangeInfo if sc.newParameters.exists(p => p.isByName && p.scType.isInstanceOf[ValType]) =>
val message = s"This method has java overriders, by-name parameters of value classes cannot be used."
map.putValue(info.getOverridingMethod, message)
case _ =>
}
}
def addBindingPatternConflicts(bp: ScBindingPattern,
change: ChangeInfo,
result: ConflictsMap): Unit = {
if (change.getNewParameters.nonEmpty) {
val (member: ScMember, kind, isSimple) = bp match {
case ScalaPsiUtil.inNameContext(pd: ScPatternDefinition) => (pd, "pattern definition", pd.isSimple)
case ScalaPsiUtil.inNameContext(vd: ScVariableDefinition) => (vd, "variable definition", vd.isSimple)
case _ => return
}
if (!isSimple) {
val className = member.containingClass.qualifiedName
val message = s"Method is overriden in a composite $kind in $className. " +
"Converting it to function definition is not supported."
result.putValue(bp, message)
}
}
}
def addClassParameterConflicts(cp: ScClassParameter, change: ChangeInfo, result: ConflictsMap): Unit = {
if (change.getNewParameters.nonEmpty) {
val className = cp.containingClass.qualifiedName
val message = s"Method is overriden by class parameter of $className. " +
"Converting it to a function definition is not supported."
result.putValue(cp, message)
}
}
def addUnapplyUsagesConflicts(p: PatternUsageInfo, change: ChangeInfo, result: ConflictsMap): Unit = {
change match {
case jc: JavaChangeInfo if jc.isParameterSetOrOrderChanged || jc.isParameterTypesChanged =>
jc.getMethod match {
case ScPrimaryConstructor.ofClass(clazz) if clazz.isCase =>
val message = "Updating of usages of generated `unapply` methods is not supported"
result.putValue(p.pattern, message)
case _ =>
}
case _ =>
}
}
}
|
triplequote/intellij-scala
|
scala/scala-impl/src/org/jetbrains/plugins/scala/lang/refactoring/changeSignature/ConflictsUtil.scala
|
Scala
|
apache-2.0
| 3,224
|
import scala.quoted.*
object Foo {
inline def foo(x: Int): Int = ${ bar('{ 'x; x }) } // error
def bar(i: Expr[Int]): Expr[Int] = i
}
|
lampepfl/dotty
|
tests/neg-macros/quote-pcp-in-arg.scala
|
Scala
|
apache-2.0
| 139
|
// This software is Copyright 2012, Mike (stew) O'Connor <stew@vireo.org>
//
// This software is dual licensed under the GPL-3 and the Apache 2.0
// license. Feel free to use, modify, and redistribute this software
// under the terms of either license. Both licenses appear verbatim in
// the file named COPYING which you should have received as part of this
// software.
package turing
/**
* Cell
*
* The tape is made of of a linear sequence of Cells.
* There are three types of cells, X (empty), One, Zero
*/
sealed trait Cell
trait X extends Cell
trait Zero extends Cell
trait One extends Cell
case object X extends X
case object Zero extends Zero
case object One extends One
/**
* Tape
*
* a tape is made up of a current cell, a list of cells to
* the left of current and a list of cells to the right of
* current
*/
case class Tape[+L <: Cells, +C <: Cell, +R <: Cells](left: L, current: C, right: R)
/**
* TapeOps
*
* pimp the Tape object to provice the run method which actually
* calculates the result
*/
final class TapeOps[L <: Cells, C <: Cell, R <: Cells](t: Tape[L,C,R]) {
def run[S <: MachineState](s: S)(implicit rs: RunningState[L,C,R,S]) : rs.Out = rs(t,s)
}
object Tape {
implicit def tapeOps[L <: Cells, C <: Cell, R <: Cells](t : Tape[L,C,R]) : TapeOps[L,C,R] = new TapeOps(t)
}
/**
* Cells
*
* a heterogenous list of cells which are either X, One, or Zero
*
* This is used to store the list of cells to the right or left
* of the current cell
*/
sealed trait Cells
final case class ::[+H <: Cell, +T <: Cells](head: H, tail: T) extends Cells {
override def toString = head+" :: "+tail.toString
}
/**
* TNil
*
* marks either the left or right end of a tape
*/
trait TNil extends Cells {
def ::[H <: Cell](h: H) = turing.::(h,this)
override def toString = "TapeEnd"
}
case object TNil extends TNil
final class CellsOps[L <: Cells](l : L) {
def ::[H <: Cell](h : H) : H :: L = turing.::(h, l)
}
object Cells {
implicit def cellsOps[L <: Cells](l : L) : CellsOps[L] = new CellsOps(l)
}
/**
* MachineState
*
* A state that the FSM might be in
*/
trait MachineState
trait Halt extends MachineState
case object Halt extends Halt
/**
* LeftState
*
* when the machine transitions to a LeftState, the tape moves one
* cell to the left
*/
trait LeftState extends MachineState
/**
* RightState
*
* when the machine transitions to a RightState, the tape moves one
* cell to the right
*/
trait RightState extends MachineState
/**
* Transition
*
* a transition defines how the FSM moves from one state to another.
* it says, when we are in the FromState, and the current cell on
* the tape matches FromCell, we should write Write to the tape and
* transition to ToState
*
* all possible transition should be in implicit scope
*/
case class Transition[FromState <: MachineState,
ToState <: MachineState,
FromCell <: Cell,
Write <: Cell](fromState: FromState, toState: ToState, write: Write)
/**
* RunningState
*
* running state is a combination of the current Tape and the current
* MachineState
*/
trait RunningState[TL <: Cells, TC <: Cell, TR <: Cells, S <: MachineState] {
type Out
def apply(tape: Tape[TL, TC, TR], state: S): Out
}
object RunningState {
implicit def runningstate[TL <: Cells, TC <: Cell, TR <: Cells, S <: MachineState, Out0](implicit runningstateaux: RunningStateAux[TL, TC, TR, S, Out0]) = new RunningState[TL, TC, TR, S] {
type Out = Out0
def apply(tape: Tape[TL, TC, TR], state: S): Out = runningstateaux(tape, state)
}
}
/**
* RunningStateAux
*
* this is just a rearrangement of the above which has our much desired
* Out type as another type argument
*/
trait RunningStateAux[TL <: Cells, TC <: Cell, TR <: Cells, S <: MachineState, Out] {
def apply(tape: Tape[TL, TC, TR], state: S): Out
}
object RunningStateAux {
/**
* halted
*
* provide an implicit RunningStateAux object for any Tape for a running
* machine in the Halt state. If a machine enters the halt state, we now
* know what the Tape in the Out position looks like
*/
implicit def halted[TL <: Cells,
TC <: Cell,
TR <: Cells] : RunningStateAux[TL, TC, TR, Halt.type, Tape[TL, TC, TR]] =
new RunningStateAux[TL, TC, TR, Halt.type, Tape[TL, TC, TR]] {
def apply(tape: Tape[TL, TC, TR], state: Halt.type): Tape[TL, TC, TR] = tape
}
/**
* previousLeftState
*
* if we have an implicit Transition available that takes us from some
* LeftState state to a state for which there is alread an implicit
* RunningStateAux, we supply an implicit RunningAuxState for the the
* tape in the state it would be in before the LeftState was reached
*
*/
implicit def previousLeftState[TLH <: Cell,
TLT <: Cells,
TC <: Cell,
TR <: Cells,
UC <: Cell,
S <: LeftState,
N <: MachineState,
Out]
(implicit transition: Transition[S, N, TLH, UC],
nextRunningState: RunningStateAux[TLT,UC,TC :: TR, N, Out]) =
new RunningStateAux[TLH :: TLT, TC, TR, S, Out] {
// remove the head from the tape on the left of current the
// center of the tape becomes whatever the state transition is
// supposed to write
// right of the tape gets the previous center pushed onto the head
def apply(tape: Tape[TLH :: TLT, TC, TR], state: S) : Out = {
nextRunningState(Tape(tape.left.tail, transition.write, turing.::(tape.current, tape.right)), transition.toState)
}
}
/**
* previousRightState
*
* if we have an implicit Transition available that takes us from some
* RightState state to a state for which there is alread an implicit
* RunningStateAux, we supply an implicit RunningAuxState for the the
* tape in the state it would be in before the RightState was reached
*
*/
implicit def previousRightState[TL <: Cells,
TC <: Cell,
TRH <: Cell,
TRT <: Cells,
UC <: Cell,
S <: RightState,
N <: MachineState,
Out]
(implicit transition: Transition[S, N, TRH, UC],
nextRunningState: RunningStateAux[TC :: TL, UC, TRT, N, Out]) =
new RunningStateAux[TL, TC, TRH :: TRT, S, Out] {
// remove the head from the tape on the right of current the
// center of the tape becomes whatever the state transition is
// supposed to write
// left of the tape gets the previous center pushed onto the head
def apply(tape: Tape[TL, TC, TRH :: TRT], state: S) : Out = {
nextRunningState(Tape(turing.::(tape.current, tape.left), transition.write, tape.right.tail), transition.toState)
}
}
}
|
stew/scatur
|
src/main/scala/turing.scala
|
Scala
|
gpl-3.0
| 7,187
|
package spark.jobserver.stress
import akka.actor.{ActorSystem, Props}
import akka.pattern.ask
import akka.util.Timeout
import com.typesafe.config.ConfigFactory
import org.joda.time.DateTime
import scala.concurrent.Await
import spark.jobserver._
import spark.jobserver.io.JobFileDAO
/**
* A stress test for launching many jobs within a job context
* Launch using sbt> test:run
* Watch with visualvm to see memory usage
*
* TODO(velvia): Turn this into an actual test. For now it's an app, requires manual testing.
*/
object SingleContextJobStress extends App with TestJarFinder {
import CommonMessages.JobResult
import JobManagerActor._
import scala.collection.JavaConverters._
import scala.concurrent.duration._
val jobDaoPrefix = "target/jobserver/"
val config = ConfigFactory.parseString("""
num-cpu-cores = 4 # Number of cores to allocate. Required.
memory-per-node = 512m # Executor memory per node, -Xmx style eg 512m, 1G, etc.
""")
val system = ActorSystem("test", config)
// Stuff needed for futures and Await
implicit val ec = system
implicit val ShortTimeout = Timeout(3 seconds)
val jobDaoDir = jobDaoPrefix + DateTime.now.toString()
val jobDaoConfig = ConfigFactory.parseMap(Map("spark.jobserver.filedao.rootdir" -> jobDaoDir).asJava)
val dao = new JobFileDAO(jobDaoConfig)
val jobManager = system.actorOf(Props(classOf[JobManagerActor], dao, "c1", "local[4]", config, false))
private def uploadJar(jarFilePath: String, appName: String) {
val bytes = scala.io.Source.fromFile(jarFilePath, "ISO-8859-1").map(_.toByte).toArray
dao.saveJar(appName, DateTime.now, bytes)
}
private val demoJarPath = testJar.getAbsolutePath
private val demoJarClass = "spark.jobserver.WordCountExample"
private val emptyConfig = ConfigFactory.parseString("")
// Create the context
val res1 = Await.result(jobManager ? Initialize, 3 seconds)
assert(res1.getClass == classOf[Initialized])
uploadJar(demoJarPath, "demo1")
// Now keep running this darn test ....
var numJobs = 0
val startTime = System.currentTimeMillis()
while (true) {
val f = jobManager ? StartJob("demo1", demoJarClass, emptyConfig, Set(classOf[JobResult]))
Await.result(f, 3 seconds) match {
case JobResult(info, Some(m)) =>
numJobs += 1
if (numJobs % 100 == 0) {
val elapsed = System.currentTimeMillis() - startTime
println("%d jobs finished in %f seconds".format(numJobs, elapsed / 1000.0))
}
case x =>
println("Some error occurred: " + x)
sys.exit(1)
}
// Thread sleep 1000
}
}
|
nachiketa-shukla/spark-jobserver
|
job-server/test/spark.jobserver/stress/SingleContextJobStress.scala
|
Scala
|
apache-2.0
| 2,637
|
/*
Copyright 2013 Twitter, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package com.twitter.zipkin.common
import com.twitter.bijection._
import com.twitter.summingbird.batch.BatchID
object Serialization {
implicit def kInj[T: Codec]: Injection[(T, BatchID), Array[Byte]] = {
implicit val buf =
Bufferable.viaInjection[(T, BatchID), (Array[Byte], Array[Byte])]
Bufferable.injectionOf[(T, BatchID)]
}
implicit def vInj[V: Codec]: Injection[(BatchID, V), Array[Byte]] =
Injection.connect[(BatchID, V), (V, BatchID), Array[Byte]]
implicit val mapStrListInj: Injection[Map[String, List[Long]], Array[Byte]] =
Bufferable.injectionOf[Map[String, List[Long]]]
implicit val mapStrListTupleInj: Injection[Map[String, (List[Long], List[Long])], Array[Byte]] =
Bufferable.injectionOf[Map[String, (List[Long], List[Long])]]
}
|
gardleopard/zipkin
|
zipkin-common/src/main/scala/com/twitter/zipkin/storm/Serialization.scala
|
Scala
|
apache-2.0
| 1,350
|
/*
* Copyright (C) 2011 Mathias Doenitz
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.backchat.http
package parser
import org.parboiled.scala._
import BasicRules._
// implementation of additional parsing rules required for extensions that are not in the core HTTP standard
private[parser] trait AdditionalRules {
this: Parser ⇒
def Ip: Rule1[HttpIp] = rule (
group(IpNumber ~ ch('.') ~ IpNumber ~ ch('.') ~ IpNumber ~ ch('.') ~ IpNumber)
~> HttpIp.fromString ~ OptWS)
def IpNumber = rule {
Digit ~ optional(Digit ~ optional(Digit))
}
def AuthScheme = rule {
Token ~ OptWS
}
def AuthParam = rule {
Token ~ "=" ~ (Token | QuotedString) ~~> ((_, _))
}
}
|
backchatio/http-parsers
|
src/main/scala/io/backchat/http/parser/AdditionalRules.scala
|
Scala
|
apache-2.0
| 1,221
|
package org.scaladebugger.tool.frontend.history
/**
* Represents the interface for storing and loading terminal history.
*/
trait HistoryManager {
/**
* Adds a new line to the current history and updates the persistent history.
*
* @param line The line to add
*/
def writeLine(line: String): Unit
/**
* Adds multiple lines to the current history and updates the persistent
* history.
*
* @param lines The lines to add
*/
def writeLines(lines: String*): Unit = lines.foreach(writeLine)
/**
* Returns the current number of lines of history held by the manager.
*
* @return The total number of lines
*/
def size: Int = lines.size
/**
* Returns the collection of lines stored in history in order of
* being added (oldest to newest).
*
* @return The collection of lines
*/
def lines: Seq[String]
/**
* Returns the collection of lines stored in history in order of
* most to least recently added.
*
* @return The collection of lines
*/
def linesByMostRecent: Seq[String] = lines.reverse
/**
* Returns the maximum number of lines that will be kept in history.
*
* @return The maximum number of lines, or -1 if there is no limit
*/
def maxLines: Int
/**
* Destroys the internal and persistent history.
*/
def destroy(): Unit
}
|
ensime/scala-debugger
|
scala-debugger-tool/src/main/scala/org/scaladebugger/tool/frontend/history/HistoryManager.scala
|
Scala
|
apache-2.0
| 1,343
|
package com.github.caiiiycuk.async4s.test
import org.scalatest.FlatSpec
import org.scalatest.matchers.ShouldMatchers
import com.github.caiiiycuk.async4s.dsl.Async4sDSL
import com.github.caiiiycuk.async4s.impl.Async4sTestClient
import com.github.caiiiycuk.async4s.request.RequestUrl
class GET extends FlatSpec with ShouldMatchers {
import Async4sDSL._
import Async4sTestClient._
behavior of "GET dsl"
it should "GET single url as string" in {
val content = get("http://google.com")
content should include("google")
}
it should "GET single url as bytes" in {
val bytes = get("http://google.com" as BYTES)
bytes.length > (0)
}
it should "GET urls from seq" in {
val responses = get(Seq[RequestUrl[String]](
"http://google.com",
"http://www.bing.com",
"http://www.yahoo.com",
"https://duckduckgo.com/",
"http://www.yandex.ru"))
responses(0) should include("google")
responses(1) should include("bing")
responses(2) should include("yahoo")
responses(3) should include("duckduckgo")
responses(4) should include("yandex")
}
it should "GET with RAW response type" in {
val response = get("http://google.com" as RAW)
response.getContentType() should equal ("text/html; charset=UTF-8")
}
}
|
caiiiycuk/async4s-http-client
|
src/test/scala/com/github/caiiiycuk/async4s/test/GET.scala
|
Scala
|
mit
| 1,296
|
package rewriting.rules
import ir._
import ir.ast._
import lift.arithmetic._
import opencl.ir.pattern._
import rewriting.utils.Utils
object Rules {
import rewriting.utils.Utils.isTranspose
/* Rules required for 2D stencil rewrite *///TODO find appropriate names
/* Slide-promotion *///TODO not really because of map(join)... find better name
val slidePromotion = Rule("Slide(u,v) o Map(Join()) => Map(Map(Join())) o Slide(u,v)",{
case FunCall(Slide(u,v), FunCall(Map(Lambda(_, FunCall(Join(), _))), arg)) =>
Map(Map(Join())) o Slide(u,v) $ arg
})
val slideSwap = Rule("Slide(u,v) o Map(Map(Slide(n,s))) => Map(Map(Map(Slide(n,s)))) o Slide(u,v)",{
case FunCall(Slide(u,v), FunCall(Map(Lambda(
_, FunCall(Map(Lambda(
_, FunCall(Slide(n,s), _))), _))), arg)) =>
Map(Map(Map(Slide(n,s)))) o Slide(u,v) $ arg
})
val joinSwap = Rule("Join() o Map(Map(f)) => Map(f) o Join()", {
case FunCall(Join(), FunCall(Map(Lambda(_, FunCall(map@Map(_), _))), arg)) =>
map o Join() $ arg
})
// todo reduce on layer of maps and use map fission before applying this rule
val transposeSwap = Rule("Map(Map(Map(Transpose()))) o Map(Transpose()) => Map(Transpose()) o Map(Map(Map(Transpose())))", {
case FunCall(Map(Lambda(_, FunCall(Map(Lambda(_, FunCall(Map(Lambda(_, FunCall(Transpose(),
_))), _))), _))), FunCall(Map(Lambda(
_, FunCall(
Transpose(), _))), arg)) =>
Map(Transpose()) o Map(Map(Map(Transpose()))) $ arg
})
// todo reduce on layer of maps and use map fission before applying this rule
val slideTransposeSwap = Rule("Map(Map(Map(Slide(u,v)))) o Map(Transpose()) => Map(Transpose) o Map(Map(Map(Slide(u,v))))", {
case FunCall(Map(Lambda(
_, FunCall(Map(Lambda(
_, FunCall(Map(Lambda(
_, FunCall(Slide(u,v), _))), _))), _))), FunCall(Map(Lambda(
_, FunCall(
Transpose(), _))), arg)) =>
Map(Transpose()) o Map(Map(Map(Slide(u,v)))) $ arg
})
val slideTransposeReordering = Rule("Map(Slide(u,v)) o Map(Transpose()) => " +
"Map(Map(Transpose())) o Map(Transpose()) o Map(Map(Slide(u,v)))", {
case FunCall(Map(Lambda(
_, FunCall(Slide(u,v), _))), FunCall(Map(Lambda(
_, FunCall(Transpose(), _))), arg)) =>
Map(Map(Transpose())) o Map(Transpose()) o Map(Map(Slide(u,v))) $ arg
})
val transposeMapJoinReordering = Rule("Transpose() o Map(Join()) => Join() o Map(Transpose()) o Transpose()", {
case FunCall(Transpose(), FunCall(Map(Lambda(_, FunCall(Join(), _))), arg)) =>
Join() o Map(Transpose()) o Transpose() $ arg
})
val idTransposeTranspose = Rule("id => Transpose() o Transpose()", {
case FunCall(Id(), arg) =>
Transpose() o Transpose() $ arg
})
/* Reorder commutativity rules */
val gatherToScatter = Rule("Map() o Gather(f) => Scatter(f) o Map()", {
case FunCall(Map(f), FunCall(Gather(g), arg)) =>
Scatter(g) o Map(f) $ arg
})
val scatterToGather = Rule("Scatter(f) o Map() => Map() o Gather(f)", {
case FunCall(Scatter(g), FunCall(Map(f), arg)) =>
Map(f) o Gather(g) $ arg
})
/* Slide rules */
val slideTiling: Rule = slideTiling(?)
def slideTiling(tileStep: ArithExpr) = Rule("Slide(n, s) => Join() o Map(Slide(n, s)) o Slide(u, v)", {
case FunCall(Slide(n,s), arg) if
tileStep == ? || // either we set it to valid value
ArithExpr.isSmaller(s,tileStep).getOrElse(false) || // tile is bigger (valid)
tileStep.equals(s) => // tile is as big as previous slide (creates one sliding window, valid)
val step = Utils.splitVariable(tileStep, arg.t)
val overlap = n-s
Join() o Map(Slide(n, s)) o Slide(step + overlap, step) $ arg
})
val mapSeqSlide = Rule("Map(fun(m => {})) o Slide(n,s) => MapSeqSlide(fun(m => {} )),n,s)",
{
case FunCall(Map(lambda), FunCall(Slide(n,s), arg)) =>
MapSeqSlide(lambda,n,s) $ arg
})
val mapSeqSlideSeq = Rule("MapSeq(fun(m => {})) o Slide(n,s) => MapSeqSlide(fun(m => {} )),n,s)",
{
case FunCall(MapSeq(lambda), FunCall(Slide(n,s), arg)) =>
MapSeqSlide(lambda,n,s) $ arg
})
/* Split-join rule */
val splitJoin: Rule = splitJoin(?)
def splitJoin(split: ArithExpr) = Rule("Map(f) => Join() o Map(Map(f)) o Split(I)", {
case FunCall(Map(f), arg) =>
val chunkSize = Utils.splitVariable(split, arg.t)
Join() o Map(Map(f)) o Split(chunkSize) $ arg
})
val splitJoinMapSeq: Rule = splitJoinMapSeq(?)
def splitJoinMapSeq(split: ArithExpr) = Rule("Map(f) => Join() o Map(Map(f)) o Split(I)", {
case FunCall(MapSeq(f), arg) =>
val chunkSize = Utils.splitVariable(split, arg.t)
Join() o Map(MapSeq(f)) o Split(chunkSize) $ arg
})
val joinSplit = Rule("Map(Map(f)) => Split(I) o Map(f) o Join()", {
case call @ FunCall(Map(Lambda(Array(p), FunCall(Map(f), mapArg))), arg)
if p == mapArg =>
val length = arg.t match {
case at: ArrayType => Type.getLength(at.elemT)
case ty => throw new TypeException(ty, "Array(_)", call)
}
Split(length) o Map(f) o Join() $ arg
})
// Required for avoiding data races
val splitJoinReduce = Rule("Reduce(f) $ data => Join() o Map(Reduce(f)) o Split(data.length)", {
case FunCall(red: Reduce, init, arg) =>
val length = arg.t match { case ArrayTypeWS(_, n) => n }
Join() o Map(fun(x => red(init, x))) o Split(length) $ arg
case FunCall(red: ReduceSeq, init, arg) =>
val length = arg.t match { case ArrayTypeWS(_, n) => n }
Join() o Map(fun(x => red(init, x))) o Split(length) $ arg
})
/* Stride accesses or normal accesses */
// TODO
/* Other */
val reorderBothSidesWithStride: Rule = reorderBothSidesWithStride(?)
def reorderBothSidesWithStride(stride: ArithExpr): Rule = {
Rule("Map(f) => Reorder(g^{-1}) o Map(f) o Reorder(g)", {
case FunCall(map@Map(_), arg) =>
val s = Utils.splitVariable(stride, arg.t)
Scatter(ReorderWithStride(s)) o map o Gather(ReorderWithStride(s)) $ arg
})
}
val mapSplitTranspose = Rule("Map(Split(n)) o Transpose()" +
"Transpose() o Map(Transpose()) o Split(n)", {
case FunCall(Map(Lambda(param, FunCall(Split(n), a))), FunCall(t, arg))
if (param.head eq a) && isTranspose(t)
=>
Transpose() o Map(Transpose()) o Split(n) $ arg
})
val mapTransposeSplit = Rule("Map(Transpose()) o Split(n)" +
"Transpose() o Map(Split(n)) o Transpose()", {
case FunCall(Map(Lambda(param, FunCall(t, a))), FunCall(Split(n), arg))
if (param.head eq a) && isTranspose(t)
=>
Transpose() o Map(Split(n)) o Transpose() $ arg
})
val transposeMapSplit = Rule("Transpose() o Map(Split(n))" +
"Map(Transpose()) o Split(n) o Transpose()", {
case FunCall(t, FunCall(Map(Lambda(param, FunCall(Split(n), a))), arg))
if (param.head eq a) && isTranspose(t)
=>
Map(Transpose()) o Split(n) o Transpose() $ arg
})
val splitTranspose = Rule("Split(n) o Transpose() => " +
"Map(Transpose()) o Transpose() o Map(Split(n))", {
case FunCall(Split(n), FunCall(t, arg))
if isTranspose(t)
=>
Map(Transpose()) o Transpose() o Map(Split(n)) $ arg
})
val mapTransposeTransposeMapTranspose =
Rule("Map(Transpose()) o Transpose() o Map(Transpose())) => " +
"Transpose() o Map(Transpose()) o Transpose()", {
case FunCall(Map(Lambda(param1, FunCall(t1, a1))),
FunCall(t2,
FunCall(Map(Lambda(param2, FunCall(t3, a2))), arg)))
if (param1.head eq a1)
&& (param2.head eq a2)
&& isTranspose(t1)
&& isTranspose(t2)
&& isTranspose(t3)
=>
Transpose() o Map(Transpose()) o Transpose() $ arg
})
// TODO: Does it matter there is a Map after the Split? Saves a fusion.
val splitIntoZip = Rule("Map(fun(x => Map() ) o Split() $ Zip(...) => " +
"Map(x => Map() $ Zip(Get(n, x) ... ) $ Zip(Split $ ...)", {
case FunCall(Map(Lambda(lambdaParam, FunCall(Map(mapLambda), mapArg))), FunCall(Split(n), FunCall(Zip(_), zipArgs@_*)))
if lambdaParam.head eq mapArg
=>
val newZipArgs = zipArgs.map(arg => Split(n) $ arg)
val newLambdaParam = Param()
val innerZipArgs = zipArgs.indices.map(Get(_)(newLambdaParam))
Map(Lambda(Array(newLambdaParam), Map(mapLambda) $ Zip(innerZipArgs:_*))) $ Zip(newZipArgs:_*)
})
private val joinCall: PartialFunction[Expr, Unit] = { case FunCall(Join(), _) => }
private val getOutermostLengths: PartialFunction[Expr, (ArithExpr, ArithExpr)] =
{ case FunCall(_, arg) => arg.t match {
case ArrayTypeWS(ArrayTypeWS(_, inner), outer) => (inner, outer)
}}
val joinFromZip = Rule(" Zip(Join() $ ..., Join() $ ..., ...) => " +
"Join() o Map(Zip(...)) $ Zip(...)", {
case FunCall(Zip(_), zipArgs@_*)
if zipArgs.forall(joinCall.isDefinedAt) && zipArgs.map(getOutermostLengths).distinct.size == 1
=>
val newArgs = zipArgs.map({ case FunCall(_, arg) => arg })
val p = Param()
val innerArgs = newArgs.indices.map(Get(p, _))
Join() o Map(Lambda(Array(p), Zip(innerArgs:_*))) $ Zip(newArgs:_*)
})
val transposeMapTransposeReorder =
Rule("Map(Gather(f) o Transpose()) o Transpose() => " +
" Map(Transpose()) o Transpose() o Map(Map(Gather(f)))", {
case FunCall(Map(Lambda(p,
FunCall(f:Gather, FunCall(t1, a))
)), FunCall(t2, arg))
if (p.head eq a)
&& isTranspose(t1)
&& isTranspose(t2)
=>
Map(Transpose()) o Transpose() o Map(Map(f)) $ arg
})
val reorderTranspose =
Rule("Transpose() o Scatter(f) => Map(Scatter(f)) o Transpose()", {
case FunCall(t, FunCall(f:Scatter, arg))
if isTranspose(t)
=>
Map(f) o Transpose() $ arg
})
val bSearch = Rule("Search(f) => BSearch(f)", {
case FunCall(Search(f), arg) => BSearch(f) $ arg
})
val lSearch = Rule("Search(f) => BSearch(f)", {
case FunCall(Search(f), arg) => LSearch(f) $ arg
})
val absSearchB = Rule("BSearch(f) => Search(f)", {
case FunCall(BSearch(f), arg) => Search(f) $ arg
})
val absSearchL = Rule("LSearch(f) => Search(f)", {
case FunCall(LSearch(f), arg) => Search(f) $ arg
})
}
|
lift-project/lift
|
src/main/rewriting/rules/Rules.scala
|
Scala
|
mit
| 10,354
|
package org.jetbrains.plugins.scala.codeInspection.implicits
import com.intellij.codeInspection.ProblemsHolder
import com.intellij.openapi.project.Project
import com.intellij.psi.PsiElement
import org.jetbrains.plugins.scala.codeInsight.intention.types.ConvertImplicitBoundsToImplicitParameter._
import org.jetbrains.plugins.scala.codeInspection.implicits.DeprecatedViewBoundInspection._
import org.jetbrains.plugins.scala.codeInspection.{AbstractFixOnPsiElement, AbstractInspection}
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.ScTypeBoundsOwner
/**
* Nikolay.Tropin
* 2014-11-18
*/
class DeprecatedViewBoundInspection extends AbstractInspection(description) {
override def actionFor(implicit holder: ProblemsHolder): PartialFunction[PsiElement, Any] = {
case boundsOwner: ScTypeBoundsOwner if boundsOwner.viewBound.nonEmpty && canBeConverted(boundsOwner) =>
holder.registerProblem(boundsOwner, description, new ConvertToImplicitParametersQuickFix(boundsOwner))
}
}
class ConvertToImplicitParametersQuickFix(owner: ScTypeBoundsOwner) extends AbstractFixOnPsiElement(fixDescription, owner) {
override protected def doApplyFix(boundOwner: ScTypeBoundsOwner)
(implicit project: Project): Unit = {
val addedParams = doConversion(boundOwner)
runRenamingTemplate(addedParams)
}
}
object DeprecatedViewBoundInspection {
val id = "DeprecatedViewBound"
val description = "View bounds are deprecated"
val fixDescription = "Replace with implicit parameters"
}
|
jastice/intellij-scala
|
scala/scala-impl/src/org/jetbrains/plugins/scala/codeInspection/implicits/DeprecatedViewBoundInspection.scala
|
Scala
|
apache-2.0
| 1,535
|
package com.hunorkovacs.koauth.domain.mapper
import com.hunorkovacs.koauth.domain.KoauthRequest
import scala.concurrent.{ExecutionContext, Future}
trait RequestMapper[RequestType] {
def map(source: RequestType): Future[KoauthRequest]
}
|
kovacshuni/koauth
|
src/main/scala/com/hunorkovacs/koauth/domain/mapper/RequestMapper.scala
|
Scala
|
apache-2.0
| 241
|
package gdg.blaze.godaddy
import com.google.common.io.Resources
import com.google.common.net.HostAndPort
import gdg.blaze._
import gdg.feed.netty.FeedSpec
import gdg.feed.proto.gen.Feed.Envelope
import gdg.feed.proto.gen.Feed.ServiceHeader.Subscribe.Offsets
import gdg.feed.proto.gen.Feed.ServiceHeader.Subscribe.StartingPoint.Edge
import org.apache.spark.storage.StorageLevel
import org.apache.spark.streaming.dstream.DStream
class FeedInput(pc: FeedInputConfig, bc: BlazeContext) extends Input {
override def apply(): DStream[Message] = {
val spec = FeedSpec.builder()
.`type`(Envelope.getDefaultInstance).websocket()
.sub()
.feed(pc.feed)
.start(Edge.LATEST, Offsets.getDefaultInstance)
.protobuf()
.location(HostAndPort.fromParts("feed-http.databus.prod.int.godaddy.com", 443))
val keystore = Resources.asByteSource(Resources.getResource("parquet-feed-writer-prod.jks")).read()
new FeedClientDStream(bc.sc, StorageLevel.MEMORY_AND_DISK, spec.build().toString, keystore, "parquet-feed-writer-password")
}
}
case class FeedInputConfig(feed:String)
object FeedInput extends PluginFactory[FeedInput] {
override def apply(pc: PluginConfig, bc: BlazeContext): FeedInput = new FeedInput(pc.convert(classOf[FeedInputConfig]), bc)
}
|
micahrupersburg/blaze-of-glory
|
src/main/scala/gdg/blaze/godaddy/FeedInput.scala
|
Scala
|
apache-2.0
| 1,285
|
package scorex.transaction
import scorex.block.{Block, BlockProcessingModule}
trait TransactionModule[TransactionBlockData] extends BlockProcessingModule[TransactionBlockData] {
val blockStorage: BlockStorage
val utxStorage: UnconfirmedTransactionsStorage
def isValid(block: Block): Boolean
/**
* Check whether tx is valid on current state
*/
def isValid(tx: Transaction): Boolean = blockStorage.state.isValid(tx)
def transactions(block: Block): Seq[Transaction]
/**
* Returns all unconfirmed transactions
*/
def unconfirmedTxs() : Seq[Transaction] = utxStorage.all()
def putUnconfirmedIfNew(tx: Transaction): Boolean = utxStorage.putIfNew(tx)
def packUnconfirmed(): TransactionBlockData
def clearFromUnconfirmed(data: TransactionBlockData): Unit
def onNewOffchainTransaction(transaction: Transaction): Unit
lazy val balancesSupport: Boolean = blockStorage.state match {
case _: State with BalanceSheet => true
case _ => false
}
lazy val accountWatchingSupport: Boolean = blockStorage.state match {
case _: State with AccountTransactionsHistory => true
case _ => false
}
}
|
alexeykiselev/WavesScorex
|
scorex-basics/src/main/scala/scorex/transaction/TransactionModule.scala
|
Scala
|
cc0-1.0
| 1,151
|
// Hello, World
object HelloWorld extends App {
println("Hello, World! in file.")
}
|
wnoguchi/my-workspace
|
2014_summer/scala/src/main/scala/HelloWorld.scala
|
Scala
|
mit
| 86
|
/*
* ResolutionParser.scala
*
* To change this template, choose Tools | Template Manager
* and open the template in the editor.
*/
package at.logic.gapt.formats.simple
import at.logic.gapt.formats.InputParser
import at.logic.gapt.proofs.lk.base._
trait ResolutionParser extends InputParser {
def clauseList: Parser[List[FSequent]]
def getClauseList(): List[FSequent] = {
val reader = getInput()
try {
parseAll( clauseList, reader ).get
} finally {
reader.close();
}
}
}
|
gisellemnr/gapt
|
src/main/scala/at/logic/gapt/formats/simple/ResolutionParser.scala
|
Scala
|
gpl-3.0
| 512
|
package com.scalableminds.webknossos.datastore.helpers
import com.scalableminds.util.geometry.{Vec3Int, Vec3Double}
import com.scalableminds.webknossos.datastore.SkeletonTracing.{Node, SkeletonTracing}
object SkeletonTracingDefaults extends ProtoGeometryImplicits {
private val dataSetName = ""
private val trees = Seq()
private def createdTimestamp = System.currentTimeMillis()
private val boundingBox = None
private val activeNodeId = None
val editPosition: Vec3Int = Vec3Int(0, 0, 0)
val editRotation: Vec3Double = Vec3Double(0, 0, 0)
val zoomLevel: Double = 2.0
private val version = 0
private val userBoundingBox = None
def createInstance: SkeletonTracing =
SkeletonTracing(dataSetName,
trees,
createdTimestamp,
boundingBox,
activeNodeId,
editPosition,
editRotation,
zoomLevel,
version,
userBoundingBox)
}
object NodeDefaults extends ProtoGeometryImplicits {
val id: Int = 0
val rotation: Vec3Double = Vec3Double(0, 0, 0)
val position: Vec3Int = Vec3Int(0, 0, 0)
val radius: Float = 1.0f
val viewport: Int = 1
val resolution: Int = 1
val bitDepth: Int = 0
val interpolation: Boolean = false
def createdTimestamp: Long = System.currentTimeMillis()
def createInstance: Node =
Node(id, position, rotation, radius, viewport, resolution, bitDepth, interpolation, createdTimestamp)
}
|
scalableminds/webknossos
|
webknossos-datastore/app/com/scalableminds/webknossos/datastore/helpers/SkeletonElementDefaults.scala
|
Scala
|
agpl-3.0
| 1,518
|
package com.autodesk.tct.utilities
import java.util.{UUID, Date}
import org.joda.time.DateTime
import play.api.libs.json.Json._
import play.api.libs.json._
import scala.language.postfixOps
/**
* Json helper
*
* Converts a Map or String to Json
*/
object AsJson {
/**
* Converts a Map to Json
*
* @param data the data map
* @return the JsValue
*/
def apply(data: Map[String, Any]): JsValue = {
val wrapped: Map[String, JsValue] = data flatMap {
case (key, value: String) => Some(key, JsString(value))
case (key, value: Int) => Some(key, JsNumber(value))
case (key, value: Long) => Some(key, JsNumber(value))
case (key, value: Float) => Some(key, JsNumber(BigDecimal(value)))
case (key, value: Double) => Some(key, JsNumber(value))
case (key, value: Boolean) => Some(key, JsBoolean(value))
case (key, value: Date) => Some(key, JsString(value.toString))
case (key, value: DateTime) => Some(key, JsString(value.toString))
case (key, value: UUID) => Some(key, JsString(value.toString))
case (key, value: JsValue) => Some(key, value)
case (key, value: Seq[_]) => Some(key, JsArray(value.map(v => JsString(v.toString))))
case (key, value: Set[_]) => Some(key, JsArray(value.toSeq.map(v => JsString(v.toString))))
case (key, value: Map[_, _]) =>
Some(key, AsJson(value map (
e => (e._1.toString, e._2))
))
case (key, value) => Some(key.toString, JsString(value.toString))
}
toJson(wrapped toMap)
}
/**
* Converts a String to Json
*
* @param data the JSON string to convert
* @return a JsValue
*/
def apply(data: String): JsValue = try {
parse(data)
} catch {
case _: Throwable => toJson(Map[String, String]())
}
}
|
adsk-cp-tct/challenger-backend
|
app/com/autodesk/tct/utilities/AsJson.scala
|
Scala
|
gpl-3.0
| 1,806
|
class C1 {
def f {}
}
class C2 extends C1 {
override def f {}
println(/* line: 6 */f)
println(super./* line: 2 */f)
}
|
ilinum/intellij-scala
|
testdata/resolve2/inheritance/super/single/ClashClass.scala
|
Scala
|
apache-2.0
| 127
|
package com.ml.algorithm
import com.ml._
import scala.util.Random
import scala.util.control.Breaks._
class Annealing(
trials: Int,
Tmax: Float)
extends Optimizer{
val rand = new Random()
def apply(p: Problem)(implicit ds: Diagnostic[p.SolutionType]): (p.SolutionType) = {
def bypass(d: Double, i: Int): Boolean = {
val temp = Tmax * (trials - i).toFloat / trials.toFloat
val prob = Math.exp(-d/temp)
return rand.nextFloat() < prob; // 0 <= nextFloat <= 1
}
var sol = p.potential()
breakable {
for(i <- 0 until trials) {
val next = sol.mutate()
val diff = next.fitness - sol.fitness
if(diff < 0 || bypass(diff, i))
sol = next
ds log Seq(sol)
if(ds.finished) break
}
}
sol
}
override def toString() = "Annealing"
}
|
BrettAM/EvCompHW
|
src/main/scala/ml/algorithm/Annealing.scala
|
Scala
|
apache-2.0
| 952
|
/**
* This code is generated using [[https://www.scala-sbt.org/contraband/ sbt-contraband]].
*/
// DO NOT EDIT MANUALLY
package sbt.internal.util.codec
import _root_.sjsonnew.JsonFormat
trait AbstractEntryFormats { self: sjsonnew.BasicJsonProtocol with sbt.internal.util.codec.StringEventFormats with sbt.internal.util.codec.TraceEventFormats with sbt.internal.util.codec.ProgressItemFormats with sbt.internal.util.codec.ProgressEventFormats =>
implicit lazy val AbstractEntryFormat: JsonFormat[sbt.internal.util.AbstractEntry] = flatUnionFormat3[sbt.internal.util.AbstractEntry, sbt.internal.util.StringEvent, sbt.internal.util.TraceEvent, sbt.internal.util.ProgressEvent]("type")
}
|
sbt/sbt
|
internal/util-logging/src/main/contraband-scala/sbt/internal/util/codec/AbstractEntryFormats.scala
|
Scala
|
apache-2.0
| 688
|
/*
* Copyright 2015-2016 IBM Corporation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package whisk.core.loadBalancer
import java.util.concurrent.atomic.AtomicReference
import scala.concurrent.duration.DurationInt
import scala.language.postfixOps
import akka.actor.ActorSystem
import akka.event.Logging.InfoLevel
import spray.json.DefaultJsonProtocol.StringJsonFormat
import spray.json.{ JsObject, JsString, pimpAny, pimpString }
import whisk.common.ConsulClient
import whisk.common.ConsulKV.InvokerKeys
import whisk.common.DateUtil
import whisk.common.Logging
import whisk.core.WhiskConfig
import whisk.core.WhiskConfig.consulServer
import whisk.common.Scheduler
import whisk.common.TransactionId
object InvokerHealth {
val requiredProperties = consulServer
}
/**
* Monitors the health of the invokers. The number of invokers is dynamic.
*
* We are starting to put real load-balancer logic in here too. Should probably be moved out at some point.
*/
class InvokerHealth(
config: WhiskConfig,
instanceChange: Array[Int] => Unit,
getKafkaPostCount: () => Long)(
implicit val system: ActorSystem) extends Logging {
/** We obtain the health of all invokers this often. Although we perform a recursive call,
* the subtree is small and does not contain voluminous information like per-user information.
*/
private val healthCheckInterval = 2 seconds
/** If we do not hear from an invoker for this long, we consider it to be out of commission. */
private val maximumAllowedDelay = 20 seconds
private implicit val executionContext = system.dispatcher
setVerbosity(InfoLevel);
def getInvokerIndices(): Array[Int] = curStatus.get() map { _.index }
def getCurStatus = curStatus.get().clone()
private def getHealth(statuses: Array[InvokerStatus]): Map[Int, Boolean] = {
statuses.map { status => (status.index, status.isUp) }.toMap
}
def getInvokerHealth(): Map[Int, Boolean] = getHealth(curStatus.get())
def getInvokerHealthJson(): JsObject = {
val health = getInvokerHealth().map { case (index, isUp) => s"invoker${index}" -> (if (isUp) "up" else "down").toJson }
JsObject(health toMap)
}
def isFresh(lastDate: String) = {
val lastDateMilli = DateUtil.parseToMilli(lastDate)
val now = System.currentTimeMillis() // We fetch this repeatedly in case KV fetch is slow
(now - lastDateMilli) <= maximumAllowedDelay.toMillis
}
private val consul = new ConsulClient(config.consulServer)
Scheduler.scheduleWaitAtLeast(healthCheckInterval) { () =>
consul.kv.getRecurse(InvokerKeys.allInvokers) map { invokerInfo =>
// keys are like invokers/invokerN/count
val flattened = ConsulClient.dropKeyLevel(invokerInfo)
val nested = ConsulClient.toNestedMap(flattened)
// Get the new status (some entries may correspond to new instances)
val statusMap = nested map {
case (key, inner) =>
val index = InvokerKeys.extractInvokerIndex(key)
val JsString(startDate) = inner(InvokerKeys.startKey).parseJson
val JsString(lastDate) = inner(InvokerKeys.statusKey).parseJson
(index, InvokerStatus(index, startDate, lastDate, isFresh(lastDate)))
}
val newStatus = statusMap.values.toArray.sortBy(_.index)
// Warning is issued only if up/down is changed
if (getInvokerHealth() != getHealth(newStatus)) {
warn(this, s"InvokerHealth status change: ${newStatus.deep.mkString(" ")}")(TransactionId.loadbalancer)
}
// Existing entries that have become stale require recording and a warning
val stale = curStatus.get().filter {
case InvokerStatus(index, startDate, _, _) =>
statusMap.get(index).map(startDate != _.startDate) getOrElse false
}
if (!stale.isEmpty) {
oldStatus.set(oldStatus.get() ++ stale)
warn(this, s"Stale invoker status has changed: ${oldStatus.get().deep.mkString(" ")}")
instanceChange(stale.map(_.index))
}
curStatus.set(newStatus)
}
}
private lazy val curStatus = new AtomicReference(Array(): Array[InvokerStatus])
private lazy val oldStatus = new AtomicReference(Array(): Array[InvokerStatus])
}
/*
* Invoker indices are 0-based.
* curStatus maintains the status of the current instance at a particular index while oldStatus
* tracks instances (potentially many per index) that are not longer fresh (invoker was restarted).
*/
case class InvokerStatus(index: Int, startDate: String, lastDate: String, isUp: Boolean) {
override def toString = s"index: $index, healthy: $isUp, start: $startDate, last: $lastDate"
}
|
nwspeete-ibm/openwhisk
|
core/controller/src/main/scala/whisk/core/loadBalancer/InvokerHealth.scala
|
Scala
|
apache-2.0
| 5,411
|
/** MACHINE-GENERATED FROM AVRO SCHEMA. DO NOT EDIT DIRECTLY */
package example.shanested.foo
import example.shanested.{Prop, Sha256}
final case class HashRecord(my_hash: Sha256, prop: Prop)
|
julianpeeters/avrohugger
|
avrohugger-core/src/test/expected/standard/example/shanested/foo/HashRecord.scala
|
Scala
|
apache-2.0
| 192
|
import sbt._
object Dependencies {
lazy val scalaTest = "org.scalatest" %% "scalatest" % "3.0.8"
object logger {
lazy val scalaLogging = "com.typesafe.scala-logging" %% "scala-logging" % "3.9.2"
lazy val logback = "ch.qos.logback" % "logback-classic" % "1.2.3"
}
}
|
nil2013/LIlib
|
project/Dependencies.scala
|
Scala
|
mit
| 285
|
Subsets and Splits
Filtered Scala Code Snippets
The query filters and retrieves a sample of code snippets that meet specific criteria, providing a basic overview of the dataset's content without revealing deeper insights.