code stringlengths 5 1M | repo_name stringlengths 5 109 | path stringlengths 6 208 | language stringclasses 1 value | license stringclasses 15 values | size int64 5 1M |
|---|---|---|---|---|---|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.scheduler.cluster
import java.nio.ByteBuffer
import org.apache.spark.TaskState.TaskState
import org.apache.spark.scheduler.TaskDescription
import org.apache.spark.util.{SerializableBuffer, Utils}
private[spark] sealed trait CoarseGrainedClusterMessage extends Serializable
private[spark] object CoarseGrainedClusterMessages {
// Driver to executors
case class LaunchTask(task: TaskDescription) extends CoarseGrainedClusterMessage
case class KillTask(taskId: Long, executor: String, interruptThread: Boolean)
extends CoarseGrainedClusterMessage
case class RegisteredExecutor(sparkProperties: Seq[(String, String)])
extends CoarseGrainedClusterMessage
case class RegisterExecutorFailed(message: String) extends CoarseGrainedClusterMessage
// Executors to driver
case class RegisterExecutor(executorId: String, hostPort: String, cores: Int)
extends CoarseGrainedClusterMessage {
Utils.checkHostPort(hostPort, "Expected host port")
}
case class StatusUpdate(executorId: String, taskId: Long, state: TaskState,
data: SerializableBuffer) extends CoarseGrainedClusterMessage
object StatusUpdate {
/** Alternate factory method that takes a ByteBuffer directly for the data field */
def apply(executorId: String, taskId: Long, state: TaskState, data: ByteBuffer)
: StatusUpdate = {
StatusUpdate(executorId, taskId, state, new SerializableBuffer(data))
}
}
// Internal messages in driver
case object ReviveOffers extends CoarseGrainedClusterMessage
case object StopDriver extends CoarseGrainedClusterMessage
case object StopExecutor extends CoarseGrainedClusterMessage
case object StopExecutors extends CoarseGrainedClusterMessage
case class RemoveExecutor(executorId: String, reason: String) extends CoarseGrainedClusterMessage
}
| zhangjunfang/eclipse-dir | spark/core/src/main/scala/org/apache/spark/scheduler/cluster/CoarseGrainedClusterMessage.scala | Scala | bsd-2-clause | 2,643 |
package zzz.akka.avionics
import akka.actor.{Actor, ActorSystem}
import akka.testkit.{TestKit, TestActorRef}
import org.scalatest.{BeforeAndAfterAll, WordSpecLike}
import org.scalatest.MustMatchers
// We can't test a "trait" very easily, so we're going to
// create a specific EventSource derivation that conforms to
// the requirements of the trait so that we can test the
// production code.
class TestEventSource extends Actor with ProductionEventSource {
def receive = eventSourceReceive
}
// "class"Spec is a decent convention we'll be following
class EventSourceSpec extends TestKit(ActorSystem("EventSourceSpec")) with WordSpecLike with MustMatchers with BeforeAndAfterAll {
import EventSource._
override def afterAll() {
system.shutdown()
}
"EventSource" should {
"allow us to register a listener" in {
val real = TestActorRef[TestEventSource].underlyingActor
real.receive(RegisterListener(testActor))
real.listeners must contain(testActor)
}
"allow us to unregister a listener" in {
val real = TestActorRef[TestEventSource].underlyingActor
real.receive(RegisterListener(testActor))
real.receive(UnregisterListener(testActor))
real.listeners.size must be(0)
}
"send the event to our test actor" in {
val testA = TestActorRef[TestEventSource]
testA ! RegisterListener(testActor)
testA.underlyingActor.sendEvent("Fibonacci")
expectMsg("Fibonacci")
}
}
}
| kevyin/akka-book-wyatt | src/test/scala/zzz/akka/avionics/EventSourceTest.scala | Scala | mit | 1,473 |
package utils.reflect
import java.lang.reflect.{Method,Field,Constructor,AccessibleObject,Modifier,Type,GenericArrayType,ParameterizedType,TypeVariable,WildcardType}
import scala.reflect.runtime.{currentMirror => cm}
import scala.reflect.runtime.universe.Symbol
import scala.reflect.runtime.universe.newTermName
object Reflect {
/** underlying class for a given type.
* Class => that object
* GenericArrayType => the underlying array class (stripped of genericity)
* ParameterizedType => the underlying class (stripped of genericity)
* TypeVariable => is unexpected
* WildcardType => is unexpected
*/
implicit def findClass[U](gType:Type):Class[_<:U] = (gType match { //exhaustive check
case c:Class[_] => c
case g:GenericArrayType => java.lang.reflect.Array.newInstance(g.getGenericComponentType,0).getClass
case p:ParameterizedType => p.getRawType
case t:TypeVariable[_] => throw new IllegalStateException(s"Real types are expected ; found $t")
case w:WildcardType => throw new IllegalStateException(s"Non wilcard types are expected ; found $w")
}).asInstanceOf[Class[_<:U]]
abstract class AccessibleElement {
type Kind>:Null<:AccessibleObject
def obj:Kind
def getModifiers:Int
def isSynthetic:Boolean
def isBridge:Boolean
def getName:String
def compareTo(m:Kind):Option[Int]
def debug = {
val m = getModifiers
print(getName)
if (Modifier.isPublic(m)) print(" public")
if (Modifier.isPrivate(m)) print(" private")
if (Modifier.isProtected(m)) print(" protected")
if (isSynthetic) print(" synthetic")
if (isBridge) print(" bridge")
}
}
object AccessibleElement {
val nonComparable = new Exception { override def fillInStackTrace=this }
val nonUniqueMin = new Exception { override def fillInStackTrace=this }
val s0 = Some(0)
val sp = Some(1)
val sm = Some(-1)
def cmp(c1:Class[_],c2:Class[_]):Option[Int] = {
val b1 = c1.isAssignableFrom(c2)
val b2 = c2.isAssignableFrom(c1)
if (b1) if (b2) s0 else sp
else if (b2) sm else None
}
/** Compares two lists of classes, to determine if they are compatible, and their
* relation order.
* @param start, the expected order if any ; if unknown, use None
* @param r1 the first class list
* @param r2 the second class list
* @return s0 if the lists are equal,
* sp if r1>r2 (i.e. all classes in r2 are derived from classes in r1)
* sm if r1<r2 (i.e. all classes in r1 are derived from classes in r2)
* None if the classes cannot be compared
*/
def cmp(r1:Array[Class[_]],r2:Array[Class[_]],start:Option[Int]):Option[Int] = {
if (r1.length!=r2.length) return None
var c=start
var i:Int=0
while (i<r1.length) {
cmp(r1(i),r2(i)) match {
case None => return None //non comparable classes
case `sp` if c==sm => return None //comparable, but order inverted from what was seen before
case `sm` if c==sp => return None //comparable, but order inverted from what was seen before
case `s0` => if (c==None) c=s0 //equal: do nothing
case r => c=r //order can be selected
}
i += 1
}
c
}
final def min[E<:AccessibleObject](l:Array[E]):E = min[E,E](l)(identity)
final def min[E<:AccessibleObject,X](l:Seq[X])(f:X=>E):X = {
if (l.isEmpty) throw nonComparable
var min:X = null.asInstanceOf[X]
var fMin:E = null.asInstanceOf[E]
for (x <- l) {
val fX = f(x)
if (min==null) { min=x; fMin=fX } //first item
else {
fX.compareTo(fMin) match {
case `sm` => min=x; fMin=fX
case `s0` => throw nonUniqueMin
case None => throw nonComparable
case _ =>
}
}
}
min
}
final implicit def apply[E<:AccessibleObject](obj:E):AccessibleElement { type Kind=E } = (obj match {
case o:Method => new MethodX(o)
case o:Field => new FieldX(o)
case o:Constructor[_] => new ConstructorX(o)
}).asInstanceOf[AccessibleElement { type Kind=E }]
}
implicit final class MethodX(val obj:Method) extends AccessibleElement {
final type Kind = Method
final def getModifiers = obj.getModifiers
final def isSynthetic = obj.isSynthetic
final def isBridge = obj.isBridge
final def getName = obj.getName
final def compareTo(m:Method):Option[Int] = {
import AccessibleElement._
if (m==obj) s0
else cmp(obj.getParameterTypes, m.getParameterTypes, None) //cmp(m.getReturnType,obj.getReturnType)
}
}
implicit final class ConstructorX(val obj:Constructor[_]) extends AccessibleElement {
final type Kind = Constructor[_]
final def getModifiers = obj.getModifiers
final def isSynthetic = obj.isSynthetic
final def isBridge = false
final def getName = obj.getName
final def compareTo(m:Constructor[_]):Option[Int] = {
import AccessibleElement._
if (m==obj) s0
else cmp(obj.getParameterTypes, m.getParameterTypes, None) //cmp(m.getDeclaringClass,obj.getDeclaringClass)
}
}
implicit final class FieldX(val obj:Field) extends AccessibleElement {
final type Kind = Field
final def getModifiers = obj.getModifiers
final def isSynthetic = obj.isSynthetic
final def isBridge = false
final def getName = obj.getName
final def compareTo(m:Field):Option[Int] = {
import AccessibleElement._
if (m==obj) s0
else cmp(Array(m.getType,m.getDeclaringClass),Array(obj.getType,obj.getDeclaringClass),None)
}
}
/** Provides some Java reflection utilities.
* Note that this class doesn't achieve anything close to scala reflection.
* It doesn't use any compile time info and is not suitable for general use with generics.
*/
implicit final class RichClass[+U](val c:Class[_<:U]) {
final val isFinal = Modifier.isFinal(c.getModifiers())
//scala singleton associated with this class if appropriate
def asObject:U = (try {
val f = c.getDeclaredField("MODULE$")
val m = f.getModifiers
if (Modifier.isFinal(m) && Modifier.isStatic(m)) f.get(null) else null
} catch {
case _:Throwable => null
}).asInstanceOf[U]
//subclass checks
// !!! Not for use with generic types.
// These are JVM checks, not scala Types checks. Two 'unrelated' classes can thus be
// found in relation to each other when they are not! e.g. List[Double] and List[Method]
// both erase to List and are superficially seen as compatible, even though they obviously
// do not share much in common.
final def <(c:Class[_]):Boolean = c.isAssignableFrom(this.c)
final def >(c:Class[_]):Boolean = this.c.isAssignableFrom(c)
final def <(c:RichClass[_]):Boolean = this < c.c
final def >(c:RichClass[_]):Boolean = this > c.c
//finds appropriate constructors matching the expected class list, whatever order, expect for the mandatory first classes that must be in the correct order. Generics are out.
def findConstructor(expected:Array[RichClass[_]],mandatory:Int):Array[_<:(_<:Constructor[_<:U],Array[Int])] =
Reflect.findConstructor[U](c.getConstructors.asInstanceOf[Array[_<:Constructor[_<:U]]],expected,mandatory)
//finds the constructor matching the expected class list. Generics are out.
def findConstructorN(expected:RichClass[_]*):Option[Constructor[_<:U]] =
Reflect.findConstructor[U](c.getConstructors.asInstanceOf[Array[_<:Constructor[_<:U]]],expected.toArray,expected.length) match {
case Array() => None
case Array((c,_)) => Some(c)
}
final def printMethods() = methods.foreach(println)
override def toString = s"RichClass[${c.getCanonicalName}]"
override def equals(o:Any) = if (o.isInstanceOf[RichClass[_]]) o.asInstanceOf[RichClass[_]].c eq this.c else false
override def hashCode = c.hashCode
//standard way to retrieve useful methods,fields and construtors
//this will retrieve all public methods from the class and superclasses, and all methods from the class itself (protected or private)
//a method is present only once
//synthetic methods are excluded
def methods = (c.getDeclaredMethods.filter(m => !Modifier.isPublic(m.getModifiers))++c.getMethods).filter(!_.isSynthetic)
def fields = (c.getDeclaredFields.filter(m => !Modifier.isPublic(m.getModifiers))++c.getFields).filter(!_.isSynthetic)
def constructors = (c.getDeclaredConstructors.filter(m => !Modifier.isPublic(m.getModifiers))++c.getConstructors).filter(!_.isSynthetic)
}
//easy factory
def ^[U](c:Class[U]) = new RichClass[U](c)
/** Finds the method in an array that is closest to the parameter/return types given.
* The parameter checked is the first.
* The returned method has the minimal parameter type then maximum return type admissible
* Note that this is not intended to deal with complex types (generics most notably.)
* @param a, an array of method to check
* @param src, the first parameter expected class. Can be null if that is not to be checked.
* @param dst, the return class. Can be null if that is not to be checked.
*/
def reduce(a:Array[Method],src:RichClass[_],dst:RichClass[_]):Array[Method] = {
var s = src
val l1 = if (src==null) a else {
//find minimal class for source
for (m <- a if dst==null || dst>m.getReturnType()) { val x=m.getParameterTypes()(0); if (s>x) s=x }
//build sublist with minimal class for source
for (m <- a if s>m.getParameterTypes()(0)) yield m
}
val l = if (l1.length<=1 && dst!=null) l1 else {
s = null
//find maximal class for return type
for (m <- l1) { val x=m.getReturnType(); if (s==null || s<x) s=x }
//build sublist with maximal class for return type
for (m <- a if s<m.getReturnType()) yield m
}
l
}
/** Finds the methods (static or not) in src that return dst (or a subclass of).
* @param src, the source class in which we are looking for an appropriate method (possibly static)
* @param dst, the class to return
* @param check, a method that adds additional criterion on a method (such as name...)
* @return the list of matching Converter
* @throws NoSuchMethodException if no method or more than one method is found matching
*/
def find[U<:AnyRef,V](src:RichClass[U],dst:RichClass[V],check:(Method)=>Boolean):Array[Method] =
reduce(src.methods.filter(m => check(m) && dst>m.getReturnType),src,dst)
////////////////////////////////////////////////////////////////////////////////////////////////
// The following methods are useful to deal with reflexion around variable list of parameters //
////////////////////////////////////////////////////////////////////////////////////////////////
/** Matches an incoming list of classes against an expected list of classes. Classes must be unrelated or the match will be unpredictable.
* @param expected, the list of possible classes
* @param incoming, the list of found classes ; they must all be assignable to at most one of the expected classes.
* it is possible to only partially match the incoming list, but the expected list must be fully met.
* @param mandatory, the number of elements in expected that must be found in the right order in found
* @return an array indicating how indexes in incoming match indexes in expected. null if fails (i.e some incoming elts don't match any expected one)
*/
def checkParams(expected:Array[Class[_]],incoming:Array[RichClass[_]],mandatory:Int):Array[Int] = {
if (incoming.length<expected.length) return null
if (expected.length==0) return if (mandatory==0) new Array[Int](0) else null
val a = new Array[Int](expected.length)
//mandatory arguments must be matched at their exact position
for (i <- 0 until mandatory) if (!(incoming(i)<expected(i))) return null else a(i)=i
val r = mandatory until incoming.length
//loop on other expected arguments
for (i <- mandatory until expected.length) {
if (r.find(incoming(_)<expected(i)).map(a(i)=_)==None) //check if an incoming argument matches and if found, record its index
return null //if none found, arguments match fails
}
a
}
/** Builds the actual parameter array from a list of possible parameters, based on the substitution array 'matching' (likely coming
* from a call to the previous method)
*/
def buildParams(possible:Array[AnyRef],matching:Array[Int]):Array[AnyRef] = {
val a = new Array[AnyRef](matching.length)
for (i <- 0 until a.length) a(i) = possible(matching(i))
a
}
/** restricts 'in' to the Methods that do accept the right kind of parameters */
def findMethod(in:Array[Method],incoming:Array[RichClass[_]],mandatory:Int):Array[(Method,Array[Int])] =
for (m <- in; p=checkParams(m.getParameterTypes,incoming,mandatory) if p!=null) yield (m,p)
/** restricts 'in' to the Constructors that do accept the right kind of parameters */
def findConstructor[U](in:Array[_<:Constructor[_<:U]],incoming:Array[RichClass[_]],mandatory:Int):Array[(Constructor[_<:U],Array[Int])] =
for (m <- in; p=checkParams(m.getParameterTypes,incoming,mandatory) if p!=null) yield (m,p)
/** returns true if p1 and p2 represent the same primitive type, Java Boxed or not */
final def checkPrimitive(p1:Class[_],p2:Class[_]):Boolean = {
if (p1 eq p2) return true
if (!p1.isPrimitive) return p2.isPrimitive && checkPrimitive(p2,p1)
if (p1 eq java.lang.Integer.TYPE) return p2 eq classOf[java.lang.Integer]
if (p1 eq java.lang.Boolean.TYPE) return p2 eq classOf[java.lang.Boolean]
if (p1 eq java.lang.Character.TYPE) return p2 eq classOf[java.lang.Character]
if (p1 eq java.lang.Float.TYPE) return p2 eq classOf[java.lang.Float]
if (p1 eq java.lang.Double.TYPE) return p2 eq classOf[java.lang.Double]
if (p1 eq java.lang.Short.TYPE) return p2 eq classOf[java.lang.Short]
if (p1 eq java.lang.Byte.TYPE) return p2 eq classOf[java.lang.Byte]
if (p1 eq java.lang.Long.TYPE) return p2 eq classOf[java.lang.Long]
false
}
/** Analyzes a type to determine if it is a collection, and in that case the relevant information.
* @param t, the type to analyze
* @param cv, the conversion solver in use
* @param n, the depth for the analysis (0 is all the way to the bottom of encapsulated seqs/lists)
* @returns the actual depth if less than n, then None if the type can be converted or Some(class found)
*/
def analyzeType(t:java.lang.reflect.Type, cv:ConversionSolver, n:Int):(Int, Option[java.lang.reflect.Type]) =
cv.collectionSolver(t) match {
case Some(l) => val x = l.depth(n)
val isConvertible = cv.stringSolver(x._2.czElt)
(x._1, if (isConvertible==None) Some(x._2.czElt) else None)
case None => val isConvertible = cv.stringSolver(t)
(0,if (isConvertible==None) Some(t) else None)
}
//XXX for fun... test on the Scala reflective API sho it is very slow for our requirements
def copy[T<:AnyRef:scala.reflect.ClassTag](b:T,p1: String):T = {
import scala.reflect.runtime.{ currentMirror => cm }
import scala.reflect.runtime.universe._
val im = cm.reflect(b)
val ts = im.symbol.typeSignature
val copySym = ts.member(TermName("copy")).asMethod
def element(p: Symbol): Any = (im reflectMethod ts.member(p.name).asMethod)()
val args = for (ps <- copySym.paramLists; p <- ps) yield {
if (p.name.toString == "p1") p1 else element(p)
}
(im reflectMethod copySym)(args: _*).asInstanceOf[T]
}
} | Y-P-/data-processing-binding | Utils/src/utils/reflect/Reflect.scala | Scala | gpl-3.0 | 16,547 |
/***********************************************************************
* Copyright (c) 2013-2016 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
*************************************************************************/
package org.locationtech.geomesa.filter.function
import java.io.ByteArrayOutputStream
import java.text.SimpleDateFormat
import org.geotools.data.collection.ListFeatureCollection
import org.geotools.feature.simple.SimpleFeatureBuilder
import org.junit.runner.RunWith
import org.locationtech.geomesa.filter.function.BinaryOutputEncoder.{EncodingOptions, LatLonAttributes}
import org.locationtech.geomesa.utils.geotools.SimpleFeatureTypes
import org.locationtech.geomesa.utils.text.WKTUtils
import org.specs2.mutable.Specification
import org.specs2.runner.JUnitRunner
@RunWith(classOf[JUnitRunner])
class BinaryOutputEncoderTest extends Specification {
"BinaryViewerOutputFormat" should {
val dateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss")
"encode a point feature collection" in {
val sft = SimpleFeatureTypes.createType("bintest",
"track:String,label:Long,lat:Double,lon:Double,dtg:Date,geom:Point:srid=4326")
val baseDtg = dateFormat.parse("2014-01-01 08:09:00").getTime
val fc = new ListFeatureCollection(sft)
val builder = new SimpleFeatureBuilder(sft)
(0 until 4).foreach { i =>
val point = WKTUtils.read(s"POINT (45 5$i)")
val date = dateFormat.parse(s"2014-01-01 08:0${9-i}:00")
builder.addAll(Array(s"1234-$i", java.lang.Long.valueOf(i), 45 + i, 50, date, point).asInstanceOf[Array[AnyRef]])
fc.add(builder.buildFeature(s"$i"))
}
"with label field" >> {
val out = new ByteArrayOutputStream()
BinaryOutputEncoder
.encodeFeatureCollection(fc, out, EncodingOptions(None, Some("dtg"), Some("track"), Some("label")))
val encoded = out.toByteArray
(0 until 4).foreach { i =>
val decoded = Convert2ViewerFunction.decode(encoded.slice(i * 24, (i + 1) * 24))
decoded.dtg mustEqual baseDtg - 60 * 1000 * i
decoded.lat mustEqual 45
decoded.lon mustEqual 50 + i
decoded.trackId mustEqual s"1234-$i".hashCode
decoded.asInstanceOf[ExtendedValues].label mustEqual Convert2ViewerFunction.convertToLabel(i.toString)
}
success
}
"without label field" >> {
val out = new ByteArrayOutputStream()
BinaryOutputEncoder
.encodeFeatureCollection(fc, out, EncodingOptions(None, Some("dtg"), Some("track"), None))
val encoded = out.toByteArray
(0 until 4).foreach { i =>
val decoded = Convert2ViewerFunction.decode(encoded.slice(i * 16, (i + 1) * 16))
decoded.dtg mustEqual baseDtg - 60 * 1000 * i
decoded.lat mustEqual 45
decoded.lon mustEqual 50 + i
decoded.trackId mustEqual s"1234-$i".hashCode
decoded must beAnInstanceOf[BasicValues]
}
success
}
"with id field" >> {
val out = new ByteArrayOutputStream()
BinaryOutputEncoder
.encodeFeatureCollection(fc, out, EncodingOptions(None, Some("dtg"), Some("id"), None))
val encoded = out.toByteArray
(0 until 4).foreach { i =>
val decoded = Convert2ViewerFunction.decode(encoded.slice(i * 16, (i + 1) * 16))
decoded.dtg mustEqual baseDtg - 60 * 1000 * i
decoded.lat mustEqual 45
decoded.lon mustEqual 50 + i
decoded.trackId mustEqual s"$i".hashCode
decoded must beAnInstanceOf[BasicValues]
}
success
}
"with custom lat/lon" >> {
val out = new ByteArrayOutputStream()
BinaryOutputEncoder
.encodeFeatureCollection(fc, out, EncodingOptions(Some(LatLonAttributes("lat", "lon")), Some("dtg"), Some("track"), None))
val encoded = out.toByteArray
(0 until 4).foreach { i =>
val decoded = Convert2ViewerFunction.decode(encoded.slice(i * 16, (i + 1) * 16))
decoded.dtg mustEqual baseDtg - 60 * 1000 * i
decoded.lat mustEqual 45 + i
decoded.lon mustEqual 50
decoded.trackId mustEqual s"1234-$i".hashCode
decoded must beAnInstanceOf[BasicValues]
}
success
}
}
"encode a line feature collection" in {
val sft = SimpleFeatureTypes.createType("binlinetest",
"track:String,label:Long,dtg:Date,dates:List[Date],geom:LineString:srid=4326")
val line = WKTUtils.read("LINESTRING(45 50, 46 51, 47 52, 50 55)")
val date = dateFormat.parse("2014-01-01 08:00:00")
val dates = (0 until 4).map(i => dateFormat.parse(s"2014-01-01 08:00:0${9-i}"))
val fc = new ListFeatureCollection(sft)
val builder = new SimpleFeatureBuilder(sft)
(0 until 1).foreach { i =>
builder.addAll(Array[AnyRef](s"1234-$i", java.lang.Long.valueOf(i), date, dates, line))
fc.add(builder.buildFeature(s"$i"))
}
"with label field" >> {
val out = new ByteArrayOutputStream()
BinaryOutputEncoder
.encodeFeatureCollection(fc, out, EncodingOptions(None, Some("dates"), Some("track"), Some("label")))
val encoded = out.toByteArray
(0 until 4).foreach { i =>
val decoded = Convert2ViewerFunction.decode(encoded.slice(i * 24, (i + 1) * 24))
decoded.dtg mustEqual dates(i).getTime
decoded.lat mustEqual line.getCoordinates()(i).x.toFloat
decoded.lon mustEqual line.getCoordinates()(i).y.toFloat
decoded.trackId mustEqual "1234-0".hashCode
decoded.asInstanceOf[ExtendedValues].label mustEqual Convert2ViewerFunction.convertToLabel("0")
}
success
}
"without label field" >> {
val out = new ByteArrayOutputStream()
BinaryOutputEncoder
.encodeFeatureCollection(fc, out, EncodingOptions(None, Some("dates"), Some("track"), None))
val encoded = out.toByteArray
(0 until 4).foreach { i =>
val decoded = Convert2ViewerFunction.decode(encoded.slice(i * 16, (i + 1) * 16))
decoded.dtg mustEqual dates(i).getTime
decoded.lat mustEqual line.getCoordinates()(i).x.toFloat
decoded.lon mustEqual line.getCoordinates()(i).y.toFloat
decoded.trackId mustEqual "1234-0".hashCode
decoded must beAnInstanceOf[BasicValues]
}
success
}
"with sorting" >> {
val out = new ByteArrayOutputStream()
BinaryOutputEncoder
.encodeFeatureCollection(fc, out, EncodingOptions(None, Some("dates"), Some("track"), None), sort = true)
val encoded = out.toByteArray
(0 until 4).foreach { i =>
val decoded = Convert2ViewerFunction.decode(encoded.slice(i * 16, (i + 1) * 16))
decoded.dtg mustEqual dates(3 - i).getTime
decoded.lat mustEqual line.getCoordinates()(3 - i).x.toFloat
decoded.lon mustEqual line.getCoordinates()(3 - i).y.toFloat
decoded.trackId mustEqual "1234-0".hashCode
decoded must beAnInstanceOf[BasicValues]
}
success
}
}
}
} | nagavallia/geomesa | geomesa-filter/src/test/scala/org/locationtech/geomesa/filter/function/BinaryOutputEncoderTest.scala | Scala | apache-2.0 | 7,450 |
package almhirt.http
import almhirt.common.{ DomainEvent, DomainCommand }
trait HasCommonAlmMediaTypesProviders {
implicit def booleanAlmMediaTypesProvider: AlmMediaTypesProvider[Boolean]
implicit def stringAlmMediaTypesProvider: AlmMediaTypesProvider[String]
implicit def byteAlmMediaTypesProvider: AlmMediaTypesProvider[Byte]
implicit def shortAlmMediaTypesProvider: AlmMediaTypesProvider[Short]
implicit def intAlmMediaTypesProvider: AlmMediaTypesProvider[Int]
implicit def longAlmMediaTypesProvider: AlmMediaTypesProvider[Long]
implicit def bigIntAlmMediaTypesProvider: AlmMediaTypesProvider[BigInt]
implicit def floatAlmMediaTypesProvider: AlmMediaTypesProvider[Float]
implicit def doubleAlmMediaTypesProvider: AlmMediaTypesProvider[Double]
implicit def bigDecimalAlmMediaTypesProvider: AlmMediaTypesProvider[BigDecimal]
implicit def uriAlmMediaTypesProvider: AlmMediaTypesProvider[java.net.URI]
implicit def uuidAlmMediaTypesProvider: AlmMediaTypesProvider[java.util.UUID]
implicit def localDateTimeAlmMediaTypesProvider: AlmMediaTypesProvider[java.time.LocalDateTime]
implicit def dateTimeAlmMediaTypesProvider: AlmMediaTypesProvider[java.time.ZonedDateTime]
implicit def finiteDurationAlmMediaTypesProvider: AlmMediaTypesProvider[scala.concurrent.duration.FiniteDuration]
implicit def booleansAlmMediaTypesProvider: AlmMediaTypesProvider[Seq[Boolean]]
implicit def stringsAlmMediaTypesProvider: AlmMediaTypesProvider[Seq[String]]
implicit def bytesAlmMediaTypesProvider: AlmMediaTypesProvider[Seq[Byte]]
implicit def shortsAlmMediaTypesProvider: AlmMediaTypesProvider[Seq[Short]]
implicit def intsAlmMediaTypesProvider: AlmMediaTypesProvider[Seq[Int]]
implicit def longsAlmMediaTypesProvider: AlmMediaTypesProvider[Seq[Long]]
implicit def bigIntsAlmMediaTypesProvider: AlmMediaTypesProvider[Seq[BigInt]]
implicit def floatsAlmMediaTypesProvider: AlmMediaTypesProvider[Seq[Float]]
implicit def doublesAlmMediaTypesProvider: AlmMediaTypesProvider[Seq[Double]]
implicit def bigDecimalsAlmMediaTypesProvider: AlmMediaTypesProvider[Seq[BigDecimal]]
implicit def urisAlmMediaTypesProvider: AlmMediaTypesProvider[Seq[java.net.URI]]
implicit def uuidsAlmMediaTypesProvider: AlmMediaTypesProvider[Seq[java.util.UUID]]
implicit def localDateTimesAlmMediaTypesProvider: AlmMediaTypesProvider[Seq[java.time.LocalDateTime]]
implicit def dateTimesAlmMediaTypesProvider: AlmMediaTypesProvider[Seq[java.time.ZonedDateTime]]
implicit def finiteDurationsAlmMediaTypesProvider: AlmMediaTypesProvider[Seq[scala.concurrent.duration.FiniteDuration]]
implicit def eventAlmMediaTypesProvider: AlmMediaTypesProvider[almhirt.common.Event]
def systemEventAlmMediaTypesProvider: AlmMediaTypesProvider[almhirt.common.SystemEvent]
def domainEventAlmMediaTypesProvider: AlmMediaTypesProvider[almhirt.common.DomainEvent]
def aggregateRootEventAlmMediaTypesProvider: AlmMediaTypesProvider[almhirt.common.AggregateRootEvent]
implicit def commandAlmMediaTypesProvider: AlmMediaTypesProvider[almhirt.common.Command]
implicit def problemAlmMediaTypesProvider: AlmMediaTypesProvider[almhirt.common.Problem]
implicit def commandResponseAlmMediaTypesProvider: AlmMediaTypesProvider[almhirt.tracking.CommandResponse]
implicit def eventsAlmMediaTypesProvider: AlmMediaTypesProvider[Seq[almhirt.common.Event]]
implicit def systemEventsAlmMediaTypesProvider: AlmMediaTypesProvider[Seq[almhirt.common.SystemEvent]]
implicit def domainEventsAlmMediaTypesProvider: AlmMediaTypesProvider[Seq[almhirt.common.DomainEvent]]
implicit def aggregateRootEventsAlmMediaTypesProvider: AlmMediaTypesProvider[Seq[almhirt.common.AggregateRootEvent]]
implicit def commandsAlmMediaTypesProvider: AlmMediaTypesProvider[Seq[almhirt.common.Command]]
implicit def problemsAlmMediaTypesProvider: AlmMediaTypesProvider[Seq[almhirt.common.Problem]]
}
trait VendorBasedCommonAlmMediaTypesProviders { self: HasCommonAlmMediaTypesProviders ⇒
implicit def vendorProvider: MediaTypeVendorProvider
override lazy val booleanAlmMediaTypesProvider = AlmMediaTypesProvider.registeredDefaults[Boolean]("Boolean").withGenericTargets
override lazy val stringAlmMediaTypesProvider = AlmMediaTypesProvider.registeredDefaults[String]("String").withGenericTargets
override lazy val byteAlmMediaTypesProvider = AlmMediaTypesProvider.registeredDefaults[Byte]("Byte").withGenericTargets
override lazy val shortAlmMediaTypesProvider = AlmMediaTypesProvider.registeredDefaults[Short]("Short").withGenericTargets
override lazy val intAlmMediaTypesProvider = AlmMediaTypesProvider.registeredDefaults[Int]("Int").withGenericTargets
override lazy val longAlmMediaTypesProvider = AlmMediaTypesProvider.registeredDefaults[Long]("Long").withGenericTargets
override lazy val bigIntAlmMediaTypesProvider = AlmMediaTypesProvider.registeredDefaults[BigInt]("BigInt").withGenericTargets
override lazy val floatAlmMediaTypesProvider = AlmMediaTypesProvider.registeredDefaults[Float]("Float").withGenericTargets
override lazy val doubleAlmMediaTypesProvider = AlmMediaTypesProvider.registeredDefaults[Double]("Double").withGenericTargets
override lazy val bigDecimalAlmMediaTypesProvider = AlmMediaTypesProvider.registeredDefaults[BigDecimal]("BigDecimal").withGenericTargets
override lazy val uriAlmMediaTypesProvider = AlmMediaTypesProvider.registeredDefaults[java.net.URI]("Uri").withGenericTargets
override lazy val uuidAlmMediaTypesProvider = AlmMediaTypesProvider.registeredDefaults[java.util.UUID]("Uuid").withGenericTargets
override lazy val localDateTimeAlmMediaTypesProvider = AlmMediaTypesProvider.registeredDefaults[java.time.LocalDateTime]("LocalDateTime").withGenericTargets
override lazy val dateTimeAlmMediaTypesProvider = AlmMediaTypesProvider.registeredDefaults[java.time.ZonedDateTime]("DateTime").withGenericTargets
override lazy val finiteDurationAlmMediaTypesProvider = AlmMediaTypesProvider.registeredDefaults[scala.concurrent.duration.FiniteDuration]("FiniteDuration").withGenericTargets
override lazy val booleansAlmMediaTypesProvider = AlmMediaTypesProvider.registeredDefaults[Seq[Boolean]]("Booleans").withGenericTargets
override lazy val stringsAlmMediaTypesProvider = AlmMediaTypesProvider.registeredDefaults[Seq[String]]("Strings").withGenericTargets
override lazy val bytesAlmMediaTypesProvider = AlmMediaTypesProvider.registeredDefaults[Seq[Byte]]("Bytes").withGenericTargets
override lazy val shortsAlmMediaTypesProvider = AlmMediaTypesProvider.registeredDefaults[Seq[Short]]("Shorts").withGenericTargets
override lazy val intsAlmMediaTypesProvider = AlmMediaTypesProvider.registeredDefaults[Seq[Int]]("Ints").withGenericTargets
override lazy val longsAlmMediaTypesProvider = AlmMediaTypesProvider.registeredDefaults[Seq[Long]]("Longs").withGenericTargets
override lazy val bigIntsAlmMediaTypesProvider = AlmMediaTypesProvider.registeredDefaults[Seq[BigInt]]("BigInts").withGenericTargets
override lazy val floatsAlmMediaTypesProvider = AlmMediaTypesProvider.registeredDefaults[Seq[Float]]("Floats").withGenericTargets
override lazy val doublesAlmMediaTypesProvider = AlmMediaTypesProvider.registeredDefaults[Seq[Double]]("Doubles").withGenericTargets
override lazy val bigDecimalsAlmMediaTypesProvider = AlmMediaTypesProvider.registeredDefaults[Seq[BigDecimal]]("BigDecimals").withGenericTargets
override lazy val urisAlmMediaTypesProvider = AlmMediaTypesProvider.registeredDefaults[Seq[java.net.URI]]("Uris").withGenericTargets
override lazy val uuidsAlmMediaTypesProvider = AlmMediaTypesProvider.registeredDefaults[Seq[java.util.UUID]]("Uuids").withGenericTargets
override lazy val localDateTimesAlmMediaTypesProvider = AlmMediaTypesProvider.registeredDefaults[Seq[java.time.LocalDateTime]]("LocalDateTimes").withGenericTargets
override lazy val dateTimesAlmMediaTypesProvider = AlmMediaTypesProvider.registeredDefaults[Seq[java.time.ZonedDateTime]]("DateTimes").withGenericTargets
override lazy val finiteDurationsAlmMediaTypesProvider = AlmMediaTypesProvider.registeredDefaults[Seq[scala.concurrent.duration.FiniteDuration]]("FiniteDurations").withGenericTargets
override lazy val eventAlmMediaTypesProvider = AlmMediaTypesProvider.registeredDefaults[almhirt.common.Event]("Event").withGenericTargets
override lazy val systemEventAlmMediaTypesProvider = AlmMediaTypesProvider.registeredDefaults[almhirt.common.SystemEvent]("SystemEvent").withGenericTargets
override lazy val domainEventAlmMediaTypesProvider = AlmMediaTypesProvider.registeredDefaults[almhirt.common.DomainEvent]("DomainEvent").withGenericTargets
override lazy val aggregateRootEventAlmMediaTypesProvider = AlmMediaTypesProvider.registeredDefaults[almhirt.common.AggregateRootEvent]("AggregateRootEvent").withGenericTargets
override lazy val commandAlmMediaTypesProvider = AlmMediaTypesProvider.registeredDefaults[almhirt.common.Command]("Command").withGenericTargets
override lazy val problemAlmMediaTypesProvider = AlmMediaTypesProvider.registeredDefaults[almhirt.common.Problem]("Problem").withGenericTargets
override lazy val commandResponseAlmMediaTypesProvider = AlmMediaTypesProvider.registeredDefaults[almhirt.tracking.CommandResponse]("CommandResponse").withGenericTargets
override lazy val eventsAlmMediaTypesProvider = AlmMediaTypesProvider.registeredDefaults[Seq[almhirt.common.Event]]("Events").withGenericTargets
override lazy val systemEventsAlmMediaTypesProvider = AlmMediaTypesProvider.registeredDefaults[Seq[almhirt.common.SystemEvent]]("SystemEvents").withGenericTargets
override lazy val domainEventsAlmMediaTypesProvider = AlmMediaTypesProvider.registeredDefaults[Seq[almhirt.common.DomainEvent]]("DomainEvents").withGenericTargets
override lazy val aggregateRootEventsAlmMediaTypesProvider = AlmMediaTypesProvider.registeredDefaults[Seq[almhirt.common.AggregateRootEvent]]("AggregateRootEvents").withGenericTargets
override lazy val commandsAlmMediaTypesProvider = AlmMediaTypesProvider.registeredDefaults[Seq[almhirt.common.Command]]("Commands").withGenericTargets
override lazy val problemsAlmMediaTypesProvider = AlmMediaTypesProvider.registeredDefaults[Seq[almhirt.common.Problem]]("Problems").withGenericTargets
} | chridou/almhirt | almhirt-common/src/main/scala/almhirt/http/HasCommonAlmMediaTypesProviders.scala | Scala | apache-2.0 | 10,337 |
package net.javachallenge.util.misc
import java.util.Calendar
object DateUtils {
def dateStringForFileName = {
val calendar = Calendar.getInstance();
val year = calendar.get(Calendar.YEAR)
val month = calendar.get(Calendar.MONTH) + 1
val day = calendar.get(Calendar.DAY_OF_MONTH)
val hour = calendar.get(Calendar.HOUR_OF_DAY)
val minute = calendar.get(Calendar.MINUTE)
val second = calendar.get(Calendar.SECOND)
List(year, month, day, hour, minute, second).mkString("_")
}
} | AI-comp/JavaChallenge2012 | src/main/scala/net/javachallenge/util/misc/DateUtils.scala | Scala | apache-2.0 | 512 |
package sbtmeow
// Adapted from https://gist.github.com/MicahElliott/719710
object Colors {
// Color lookup table, 8 bit to tupled RGB
val lookup = List(
// Primary 3-bit (8 colors). Unique representation!
("00", (0, 0, 0)),
("01", (128, 0, 0)),
("02", (0, 128, 0)),
("03", (128, 128, 0)),
("04", (0, 0, 128)),
("05", (128, 0, 128)),
("06", (0, 128, 128)),
("07", (192, 192, 192)),
// Equivalent "bright" versions of original 8 colors.
("08", (128, 128, 128)),
("09", (255, 0, 0)),
("10", (0, 255, 0)),
("11", (255, 255, 0)),
("12", (0, 0, 255)),
("13", (255, 0, 255)),
("14", (0, 255, 255)),
("15", (255, 255, 255)),
// Strictly ascending.
("16", (0, 0, 0)),
("17", (0, 0, 95)),
("18", (0, 0, 135)),
("19", (0, 0, 175)),
("20", (0, 0, 215)),
("21", (0, 0, 255)),
("22", (0, 95, 0)),
("23", (0, 95, 95)),
("24", (0, 95, 135)),
("25", (0, 95, 175)),
("26", (0, 95, 215)),
("27", (0, 95, 255)),
("28", (0, 135, 0)),
("29", (0, 135, 95)),
("30", (0, 135, 135)),
("31", (0, 135, 175)),
("32", (0, 135, 215)),
("33", (0, 135, 255)),
("34", (0, 175, 0)),
("35", (0, 175, 95)),
("36", (0, 175, 135)),
("37", (0, 175, 175)),
("38", (0, 175, 215)),
("39", (0, 175, 255)),
("40", (0, 215, 0)),
("41", (0, 215, 95)),
("42", (0, 215, 135)),
("43", (0, 215, 175)),
("44", (0, 215, 215)),
("45", (0, 215, 255)),
("46", (0, 255, 0)),
("47", (0, 255, 95)),
("48", (0, 255, 135)),
("49", (0, 255, 175)),
("50", (0, 255, 215)),
("51", (0, 255, 255)),
("52", (95, 0, 0)),
("53", (95, 0, 95)),
("54", (95, 0, 135)),
("55", (95, 0, 175)),
("56", (95, 0, 215)),
("57", (95, 0, 255)),
("58", (95, 95, 0)),
("59", (95, 95, 95)),
("60", (95, 95, 135)),
("61", (95, 95, 175)),
("62", (95, 95, 215)),
("63", (95, 95, 255)),
("64", (95, 135, 0)),
("65", (95, 135, 95)),
("66", (95, 135, 135)),
("67", (95, 135, 175)),
("68", (95, 135, 215)),
("69", (95, 135, 255)),
("70", (95, 175, 0)),
("71", (95, 175, 95)),
("72", (95, 175, 135)),
("73", (95, 175, 175)),
("74", (95, 175, 215)),
("75", (95, 175, 255)),
("76", (95, 215, 0)),
("77", (95, 215, 95)),
("78", (95, 215, 135)),
("79", (95, 215, 175)),
("80", (95, 215, 215)),
("81", (95, 215, 255)),
("82", (95, 255, 0)),
("83", (95, 255, 95)),
("84", (95, 255, 135)),
("85", (95, 255, 175)),
("86", (95, 255, 215)),
("87", (95, 255, 255)),
("88", (135, 0, 0)),
("89", (135, 0, 95)),
("90", (135, 0, 135)),
("91", (135, 0, 175)),
("92", (135, 0, 215)),
("93", (135, 0, 255)),
("94", (135, 95, 0)),
("95", (135, 95, 95)),
("96", (135, 95, 135)),
("97", (135, 95, 175)),
("98", (135, 95, 215)),
("99", (135, 95, 255)),
("100", (135, 135, 0)),
("101", (135, 135, 95)),
("102", (135, 135, 135)),
("103", (135, 135, 175)),
("104", (135, 135, 215)),
("105", (135, 135, 255)),
("106", (135, 175, 0)),
("107", (135, 175, 95)),
("108", (135, 175, 135)),
("109", (135, 175, 175)),
("110", (135, 175, 215)),
("111", (135, 175, 255)),
("112", (135, 215, 0)),
("113", (135, 215, 95)),
("114", (135, 215, 135)),
("115", (135, 215, 175)),
("116", (135, 215, 215)),
("117", (135, 215, 255)),
("118", (135, 255, 0)),
("119", (135, 255, 95)),
("120", (135, 255, 135)),
("121", (135, 255, 175)),
("122", (135, 255, 215)),
("123", (135, 255, 255)),
("124", (175, 0, 0)),
("125", (175, 0, 95)),
("126", (175, 0, 135)),
("127", (175, 0, 175)),
("128", (175, 0, 215)),
("129", (175, 0, 255)),
("130", (175, 95, 0)),
("131", (175, 95, 95)),
("132", (175, 95, 135)),
("133", (175, 95, 175)),
("134", (175, 95, 215)),
("135", (175, 95, 255)),
("136", (175, 135, 0)),
("137", (175, 135, 95)),
("138", (175, 135, 135)),
("139", (175, 135, 175)),
("140", (175, 135, 215)),
("141", (175, 135, 255)),
("142", (175, 175, 0)),
("143", (175, 175, 95)),
("144", (175, 175, 135)),
("145", (175, 175, 175)),
("146", (175, 175, 215)),
("147", (175, 175, 255)),
("148", (175, 215, 0)),
("149", (175, 215, 95)),
("150", (175, 215, 135)),
("151", (175, 215, 175)),
("152", (175, 215, 215)),
("153", (175, 215, 255)),
("154", (175, 255, 0)),
("155", (175, 255, 95)),
("156", (175, 255, 135)),
("157", (175, 255, 175)),
("158", (175, 255, 215)),
("159", (175, 255, 255)),
("160", (215, 0, 0)),
("161", (215, 0, 95)),
("162", (215, 0, 135)),
("163", (215, 0, 175)),
("164", (215, 0, 215)),
("165", (215, 0, 255)),
("166", (215, 95, 0)),
("167", (215, 95, 95)),
("168", (215, 95, 135)),
("169", (215, 95, 175)),
("170", (215, 95, 215)),
("171", (215, 95, 255)),
("172", (215, 135, 0)),
("173", (215, 135, 95)),
("174", (215, 135, 135)),
("175", (215, 135, 175)),
("176", (215, 135, 215)),
("177", (215, 135, 255)),
("178", (215, 175, 0)),
("179", (215, 175, 95)),
("180", (215, 175, 135)),
("181", (215, 175, 175)),
("182", (215, 175, 215)),
("183", (215, 175, 255)),
("184", (215, 215, 0)),
("185", (215, 215, 95)),
("186", (215, 215, 135)),
("187", (215, 215, 175)),
("188", (215, 215, 215)),
("189", (215, 215, 255)),
("190", (215, 255, 0)),
("191", (215, 255, 95)),
("192", (215, 255, 135)),
("193", (215, 255, 175)),
("194", (215, 255, 215)),
("195", (215, 255, 255)),
("196", (255, 0, 0)),
("197", (255, 0, 95)),
("198", (255, 0, 135)),
("199", (255, 0, 175)),
("200", (255, 0, 215)),
("201", (255, 0, 255)),
("202", (255, 95, 0)),
("203", (255, 95, 95)),
("204", (255, 95, 135)),
("205", (255, 95, 175)),
("206", (255, 95, 215)),
("207", (255, 95, 255)),
("208", (255, 135, 0)),
("209", (255, 135, 95)),
("210", (255, 135, 135)),
("211", (255, 135, 175)),
("212", (255, 135, 215)),
("213", (255, 135, 255)),
("214", (255, 175, 0)),
("215", (255, 175, 95)),
("216", (255, 175, 135)),
("217", (255, 175, 175)),
("218", (255, 175, 215)),
("219", (255, 175, 255)),
("220", (255, 215, 0)),
("221", (255, 215, 95)),
("222", (255, 215, 135)),
("223", (255, 215, 175)),
("224", (255, 215, 215)),
("225", (255, 215, 255)),
("226", (255, 255, 0)),
("227", (255, 255, 95)),
("228", (255, 255, 135)),
("229", (255, 255, 175)),
("230", (255, 255, 215)),
("231", (255, 255, 255)),
// Gray-scale range.
("232", (8, 8, 8)),
("233", (18, 18, 18)),
("234", (28, 28, 28)),
("235", (38, 38, 38)),
("236", (48, 48, 48)),
("237", (58, 58, 58)),
("238", (68, 68, 68)),
("239", (78, 78, 78)),
("240", (88, 88, 88)),
("241", (98, 98, 98)),
("242", (108, 108, 108)),
("243", (118, 118, 118)),
("244", (128, 128, 128)),
("245", (138, 138, 138)),
("246", (148, 148, 148)),
("247", (158, 158, 158)),
("248", (168, 168, 168)),
("249", (178, 178, 178)),
("250", (188, 188, 188)),
("251", (198, 198, 198)),
("252", (208, 208, 208)),
("253", (218, 218, 218)),
("254", (228, 228, 228)),
("255", (238, 238, 238))
)
} | thricejamie/sbt-meow | src/main/scala/Colors.scala | Scala | mit | 7,516 |
/*
* Copyright (c) 2013, Hidekatsu Hirose
* Copyright (c) 2013, Hirose-Zouen
* This file is subject to the terms and conditions defined in
* This file is subject to the terms and conditions defined in
* file 'LICENSE.txt', which is part of this source code package.
*/
package org.hirosezouen.sine_wave
import java.awt.Color
import scala.collection.mutable.Buffer
import scala.swing.Component
import scala.swing.Dimension
import scala.swing.Graphics2D
import scala.swing.MainFrame
import scala.swing.Panel
import scala.swing.SimpleSwingApplication
object SineWaveGenerator extends SimpleSwingApplication {
def top = new MainFrame {
title = "Graphic Sample"
minimumSize = new Dimension(300, 300)
contents = new Panel {
override def contents: Buffer[Component] = Buffer.empty
override def paint(g: Graphics2D) = super.paint(g)
override protected def paintComponent(g: Graphics2D) {
super.paint(g)
g.setColor(Color.GREEN)
g.fillOval(0,0,size.width*2,size.height*2)
}
}
}
}
| chokopapashi/SineWaveGenerator | src/main/scala/org/hirosezouen/sine_wave/SineWaveGenerator.scala | Scala | bsd-3-clause | 1,153 |
/*
* Copyright 2014–2017 SlamData Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package quasar.niflheim
import quasar.precog.common._
import quasar.precog.BitSet
import quasar.precog.util.BitSetUtil.Implicits._
import java.io.IOException
import java.nio.channels.{ ReadableByteChannel, WritableByteChannel }
import java.nio.ByteBuffer
import scala.{ specialized => spec }
import scalaz.{Validation, Success, Failure}
import scalaz.syntax.monad._
object V1SegmentFormat extends SegmentFormat {
private val checksum = true
object reader extends SegmentReader {
private def wrapException[A](f: => A): Validation[IOException, A] = try {
Success(f)
} catch { case e: Exception =>
Failure(new IOException(e))
}
def readSegmentId(channel: ReadableByteChannel): Validation[IOException, SegmentId] = for {
buffer <- readChunk(channel)
blockId <- wrapException(buffer.getLong())
cpath <- wrapException(CPath(Codec.Utf8Codec.read(buffer)))
ctype <- CTypeFlags.readCType(buffer)
} yield SegmentId(blockId, cpath, ctype)
def readSegment(channel: ReadableByteChannel): Validation[IOException, Segment] = {
def readArray[A](ctype: CValueType[A]): Validation[IOException, (BitSet, Array[A])] = for {
buffer <- readChunk(channel)
} yield {
val length = buffer.getInt()
val defined = Codec.BitSetCodec.read(buffer)
val codec = getCodecFor(ctype)
val values = ctype.classTag.newArray(length)
defined.foreach { row =>
values(row) = codec.read(buffer)
}
(defined, values)
}
def readNull(ctype: CNullType): Validation[IOException, (BitSet, Int)] = for {
buffer <- readChunk(channel)
} yield {
val length = buffer.getInt()
val defined = Codec.BitSetCodec.read(buffer)
(defined, length)
}
def readBoolean(): Validation[IOException, (BitSet, Int, BitSet)] = for {
buffer <- readChunk(channel)
} yield {
val length = buffer.getInt()
val defined = Codec.BitSetCodec.read(buffer)
val values = Codec.BitSetCodec.read(buffer)
(defined, length, values)
}
for {
header <- readSegmentId(channel)
segment <- header match {
case SegmentId(blockid, cpath, CBoolean) =>
readBoolean() map { case (defined, length, values) =>
BooleanSegment(blockid, cpath, defined, values, length)
}
case SegmentId(blockid, cpath, ctype: CValueType[a]) =>
readArray(ctype) map { case (defined, values) =>
ArraySegment(blockid, cpath, ctype, defined, values)
}
case SegmentId(blockid, cpath, ctype: CNullType) =>
readNull(ctype) map { case (defined, length) =>
NullSegment(blockid, cpath, ctype, defined, length)
}
}
} yield segment
}
}
object writer extends SegmentWriter {
def writeSegment(channel: WritableByteChannel, segment: Segment): Validation[IOException, Unit] = {
for {
_ <- writeSegmentId(channel, segment)
_ <- segment match {
case seg: ArraySegment[a] =>
writeArraySegment(channel, seg, getCodecFor(seg.ctype))
case seg: BooleanSegment =>
writeBooleanSegment(channel, seg)
case seg: NullSegment =>
writeNullSegment(channel, seg)
}
} yield ()
}
private def writeSegmentId(channel: WritableByteChannel, segment: Segment): Validation[IOException, Unit] = {
val tpeFlag = CTypeFlags.getFlagFor(segment.ctype)
val strPath = segment.cpath.toString
val maxSize = Codec.Utf8Codec.maxSize(strPath) + tpeFlag.length + 8
writeChunk(channel, maxSize) { buffer =>
buffer.putLong(segment.blockid)
Codec.Utf8Codec.writeUnsafe(strPath, buffer)
buffer.put(tpeFlag)
}
}
private def writeArraySegment[@spec(Boolean,Long,Double) A](channel: WritableByteChannel,
segment: ArraySegment[A], codec: Codec[A]): Validation[IOException, Unit] = {
var maxSize = Codec.BitSetCodec.maxSize(segment.defined) + 4
segment.defined.foreach { row =>
maxSize += codec.maxSize(segment.values(row))
}
writeChunk(channel, maxSize) { buffer =>
buffer.putInt(segment.values.length)
Codec.BitSetCodec.writeUnsafe(segment.defined, buffer)
segment.defined foreach { row =>
codec.writeUnsafe(segment.values(row), buffer)
}
}
}
private def writeBooleanSegment(channel: WritableByteChannel, segment: BooleanSegment) = {
val maxSize = Codec.BitSetCodec.maxSize(segment.defined) + Codec.BitSetCodec.maxSize(segment.values) + 4
writeChunk(channel, maxSize) { buffer =>
buffer.putInt(segment.length)
Codec.BitSetCodec.writeUnsafe(segment.defined, buffer)
Codec.BitSetCodec.writeUnsafe(segment.values, buffer)
}
}
private def writeNullSegment(channel: WritableByteChannel, segment: NullSegment) = {
val maxSize = Codec.BitSetCodec.maxSize(segment.defined) + 4
writeChunk(channel, maxSize) { buffer =>
buffer.putInt(segment.length)
Codec.BitSetCodec.writeUnsafe(segment.defined, buffer)
}
}
}
private def allocate(size: Int): ByteBuffer = ByteBuffer.allocate(size)
def writeChunk[A](channel: WritableByteChannel, maxSize: Int)(f: ByteBuffer => A): Validation[IOException, A] = {
val buffer = allocate(maxSize + 4)
buffer.position(4)
val result = f(buffer)
buffer.flip()
buffer.putInt(0, buffer.limit() - 4)
try {
while (buffer.remaining() > 0) {
channel.write(buffer)
}
Success(result)
} catch {
case ex: IOException =>
Failure(ex)
}
}
def readChunk(channel: ReadableByteChannel): Validation[IOException, ByteBuffer] = {
try {
val buffer0 = allocate(4)
while (buffer0.remaining() > 0) {
channel.read(buffer0)
}
buffer0.flip()
val length = buffer0.getInt()
val buffer = allocate(length)
while (buffer.remaining() > 0) {
channel.read(buffer)
}
buffer.flip()
Success(buffer)
} catch {
case ioe: IOException =>
Failure(ioe)
}
}
private def getCodecFor[A](ctype: CValueType[A]): Codec[A] = ctype match {
case CPeriod => ???
// there doesn't appear to be a sane way to handle this
// Codec.LongCodec.as[Period](_.toStandardDuration.getMillis, new Period(_))
case CBoolean => Codec.BooleanCodec
case CString => Codec.Utf8Codec
case CLong => Codec.PackedLongCodec
case CDouble => Codec.DoubleCodec
case CNum => Codec.BigDecimalCodec
case CDate => Codec.ZonedDateTimeCodec
case CArrayType(elemType) =>
Codec.ArrayCodec(getCodecFor(elemType))(elemType.classTag)
}
}
| drostron/quasar | niflheim/src/main/scala/quasar/niflheim/V1SegmentFormat.scala | Scala | apache-2.0 | 7,467 |
package haru.util
object RedisKeyGenerator {
def getClass(appid: String): String = {
"rs:classes:" + appid
}
def getSchema(appid: String, classes: String): String = {
"rs:schema:" + classes + ":" + appid
}
} | haruio/haru-admin | src/main/scala/haru/util/RedisKeyGenerator.scala | Scala | mit | 225 |
package org.scalatest.examples.featurespec.getfixture
import org.scalatest.FeatureSpec
import collection.mutable.ListBuffer
class ExampleSpec extends FeatureSpec {
def fixture =
new {
val builder = new StringBuilder("ScalaTest is designed to ")
val buffer = new ListBuffer[String]
}
feature("Simplicity") {
scenario("User needs to read test code written by others") {
val f = fixture
f.builder.append("encourage clear code!")
assert(f.builder.toString === "ScalaTest is designed to encourage clear code!")
assert(f.buffer.isEmpty)
f.buffer += "sweet"
}
scenario("User needs to understand what the tests are doing") {
val f = fixture
f.builder.append("be easy to reason about!")
assert(f.builder.toString === "ScalaTest is designed to be easy to reason about!")
assert(f.buffer.isEmpty)
}
}
} | hubertp/scalatest | examples/src/main/scala/org/scalatest/examples/featurespec/getfixture/ExampleSpec.scala | Scala | apache-2.0 | 897 |
class BCPBooleanOrBooleanTest {
def m(): Unit = {
val a = true
val b = a || true
val c = a || false
val d = false
val e = d || true
val f = d || false
}
} | jean-andre-gauthier/scalasca | src/test/scala/lara/epfl/scalasca/tests/unit/executables/blockconstantpropagation/BlockConstantPropagationBooleanOrBooleanTest.scala | Scala | bsd-3-clause | 168 |
package pin
import bin.A
import bin.B
import bin.C
import bin.D
import tin._
import bon.G
class FixImport extends G {
val x = new /*ref*/E
}
/*
package pin
import bin.{bon => _, _}
import tin._
import bon.G
class FixImport extends G {
val x = new E
}
*/ | whorbowicz/intellij-scala | testdata/autoImport/fixingImport/FixImport.scala | Scala | apache-2.0 | 261 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.ml.feature
import scala.collection.mutable.ArrayBuilder
import org.apache.spark.SparkException
import org.apache.spark.annotation.Experimental
import org.apache.spark.ml.Transformer
import org.apache.spark.ml.attribute.{Attribute, AttributeGroup, NumericAttribute, UnresolvedAttribute}
import org.apache.spark.ml.param.ParamMap
import org.apache.spark.ml.param.shared._
import org.apache.spark.ml.util.Identifiable
import org.apache.spark.mllib.linalg.{Vector, VectorUDT, Vectors}
import org.apache.spark.sql.{DataFrame, Row}
import org.apache.spark.sql.functions._
import org.apache.spark.sql.types._
/**
* :: Experimental ::
* A feature transformer that merges multiple columns into a vector column.
*/
@Experimental
class VectorAssembler(override val uid: String)
extends Transformer with HasInputCols with HasOutputCol {
def this() = this(Identifiable.randomUID("vecAssembler"))
/** @group setParam */
def setInputCols(value: Array[String]): this.type = set(inputCols, value)
/** @group setParam */
def setOutputCol(value: String): this.type = set(outputCol, value)
override def transform(dataset: DataFrame): DataFrame = {
// Schema transformation.
val schema = dataset.schema
lazy val first = dataset.first()
val attrs = $(inputCols).flatMap { c =>
val field = schema(c)
val index = schema.fieldIndex(c)
field.dataType match {
case DoubleType =>
val attr = Attribute.fromStructField(field)
// If the input column doesn't have ML attribute, assume numeric.
if (attr == UnresolvedAttribute) {
Some(NumericAttribute.defaultAttr.withName(c))
} else {
Some(attr.withName(c))
}
case _: NumericType | BooleanType =>
// If the input column type is a compatible scalar type, assume numeric.
Some(NumericAttribute.defaultAttr.withName(c))
case _: VectorUDT =>
val group = AttributeGroup.fromStructField(field)
if (group.attributes.isDefined) {
// If attributes are defined, copy them with updated names.
group.attributes.get.map { attr =>
if (attr.name.isDefined) {
// TODO: Define a rigorous naming scheme.
attr.withName(c + "_" + attr.name.get)
} else {
attr
}
}
} else {
// Otherwise, treat all attributes as numeric. If we cannot get the number of attributes
// from metadata, check the first row.
val numAttrs = group.numAttributes.getOrElse(first.getAs[Vector](index).size)
Array.fill(numAttrs)(NumericAttribute.defaultAttr)
}
}
}
val metadata = new AttributeGroup($(outputCol), attrs).toMetadata()
// Data transformation.
val assembleFunc = udf { r: Row =>
VectorAssembler.assemble(r.toSeq: _*)
}
val args = $(inputCols).map { c =>
schema(c).dataType match {
case DoubleType => dataset(c)
case _: VectorUDT => dataset(c)
case _: NumericType | BooleanType => dataset(c).cast(DoubleType).as(s"${c}_double_$uid")
}
}
dataset.select(col("*"), assembleFunc(struct(args : _*)).as($(outputCol), metadata))
}
override def transformSchema(schema: StructType): StructType = {
val inputColNames = $(inputCols)
val outputColName = $(outputCol)
val inputDataTypes = inputColNames.map(name => schema(name).dataType)
inputDataTypes.foreach {
case _: NumericType | BooleanType =>
case t if t.isInstanceOf[VectorUDT] =>
case other =>
throw new IllegalArgumentException(s"Data type $other is not supported.")
}
if (schema.fieldNames.contains(outputColName)) {
throw new IllegalArgumentException(s"Output column $outputColName already exists.")
}
StructType(schema.fields :+ new StructField(outputColName, new VectorUDT, true))
}
override def copy(extra: ParamMap): VectorAssembler = defaultCopy(extra)
}
private object VectorAssembler {
private[feature] def assemble(vv: Any*): Vector = {
val indices = ArrayBuilder.make[Int]
val values = ArrayBuilder.make[Double]
var cur = 0
vv.foreach {
case v: Double =>
if (v != 0.0) {
indices += cur
values += v
}
cur += 1
case vec: Vector =>
vec.foreachActive { case (i, v) =>
if (v != 0.0) {
indices += cur + i
values += v
}
}
cur += vec.size
case null =>
// TODO: output Double.NaN?
throw new SparkException("Values to assemble cannot be null.")
case o =>
throw new SparkException(s"$o of type ${o.getClass.getName} is not supported.")
}
Vectors.sparse(cur, indices.result(), values.result()).compressed
}
}
| practice-vishnoi/dev-spark-1 | mllib/src/main/scala/org/apache/spark/ml/feature/VectorAssembler.scala | Scala | apache-2.0 | 5,698 |
/*
* Copyright 2014–2018 SlamData Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package quasar.std
import scala.Predef.$conforms
import slamdata.Predef._
import quasar._
import org.specs2.scalaz._
import scalaz._, Scalaz._
class StructuralSpecs extends quasar.Qspec with ValidationMatchers {
import quasar.Type._
import StructuralLib._
import DataGenerators._
import TypeGenerators._
"ConcatOp" should {
// NB: the func's domain is the type assigned to any argument if nothing
// else is known about it.
val unknown = AnyArray ⨿ Str
def stringToList(s: String): List[Data] =
s.map(c => Data.Str(c.toString)).toList
"type combination of arbitrary strs as str" >> prop { (st1: Type, st2: Type) =>
ConcatOp.tpe(Func.Input2(st1, st2)).map(Str contains _) must beSuccessful(true)
ConcatOp.tpe(Func.Input2(st2, st1)).map(Str contains _) must beSuccessful(true)
}.setArbitrary1(arbStrType).setArbitrary2(arbStrType)
"type arbitrary str || unknown as Str" >> prop { (st: Type) =>
ConcatOp.tpe(Func.Input2(st, unknown)) must beSuccessful(Str)
ConcatOp.tpe(Func.Input2(unknown, st)) must beSuccessful(Str)
}.setArbitrary(arbStrType)
"fold constant Strings" >> prop { (s1: String, s2: String) =>
ConcatOp.tpe(Func.Input2(Const(Data.Str(s1)), Const(Data.Str(s2)))) must
beSuccessful(Const(Data.Str(s1 + s2)))
}
"type combination of arbitrary arrays as array" >> prop { (at1: Type, at2: Type) =>
ConcatOp.tpe(Func.Input2(at1, at2)).map(_.arrayLike) must beSuccessful(true)
ConcatOp.tpe(Func.Input2(at2, at1)).map(_.arrayLike) must beSuccessful(true)
}.setArbitrary1(arbArrayType).setArbitrary2(arbArrayType)
"type arbitrary array || unknown as array" >> prop { (at: Type) =>
ConcatOp.tpe(Func.Input2(at, unknown)).map(_.arrayLike) must beSuccessful(true)
ConcatOp.tpe(Func.Input2(unknown, at)).map(_.arrayLike) must beSuccessful(true)
}.setArbitrary(arbArrayType)
"fold constant arrays" >> prop { (ds1: List[Data], ds2: List[Data]) =>
ConcatOp.tpe(Func.Input2(Const(Data.Arr(ds1)), Const(Data.Arr(ds2)))) must
beSuccessful(Const(Data.Arr(ds1 ++ ds2)))
}
"type mixed Str and array args as array" >> prop { (st: Type, at: Type) =>
ConcatOp.tpe(Func.Input2(st, at)).map(_.arrayLike) must beSuccessful(true)
ConcatOp.tpe(Func.Input2(at, st)).map(_.arrayLike) must beSuccessful(true)
}.setArbitrary1(arbStrType).setArbitrary2(arbArrayType)
"fold constant string || array" >> prop { (s: String, xs: List[Data]) =>
ConcatOp.tpe(Func.Input2(Const(Data.Str(s)), Const(Data.Arr(xs)))) must
beSuccessful(Const(Data.Arr(stringToList(s) ::: xs)))
}
"fold constant array || string" >> prop { (s: String, xs: List[Data]) =>
ConcatOp.tpe(Func.Input2(Const(Data.Arr(xs)), Const(Data.Str(s)))) must
beSuccessful(Const(Data.Arr(xs ::: stringToList(s))))
}
"propagate unknown types" in {
ConcatOp.tpe(Func.Input2(unknown, unknown)) must beSuccessful(unknown)
}
}
"FlattenMap" should {
"only accept maps" >> prop { (nonMap: Type) =>
FlattenMap.tpe(Func.Input1(nonMap)) must beFailing
}.setArbitrary(arbArrayType)
"convert from a map type to the type of its values" in {
FlattenMap.tpe(Func.Input1(Obj(Map(), Str.some))) must beSuccessful(Str)
}
"untype to a map type from some value type" in {
FlattenMap.untpe(Str).map(_.unsized) must beSuccessful(List(Obj(Map(), Str.some)))
}
}
"FlattenMapKeys" should {
"only accept maps" >> prop { (nonMap: Type) =>
FlattenMapKeys.tpe(Func.Input1(nonMap)) must beFailing
}.setArbitrary(arbArrayType)
"convert from a map type to the type of its keys" in {
FlattenMapKeys.tpe(Func.Input1(Obj(Map(), Int.some))) must beSuccessful(Str)
}
"untype to a map type from some key type" in {
FlattenMapKeys.untpe(Str).map(_.unsized) must beSuccessful(List(Obj(Map(), Top.some)))
}
}
"FlattenArray" should {
"only accept arrays" >> prop { (nonArr: Type) =>
FlattenArray.tpe(Func.Input1(nonArr)) must beFailing
}.setArbitrary(arbStrType)
"convert from an array type to the type of its values" in {
FlattenArray.tpe(Func.Input1(FlexArr(0, None, Str))) must beSuccessful(Str)
}
"untype to an array type from some value type" in {
FlattenArray.untpe(Str).map(_.unsized) must beSuccessful(List(FlexArr(0, None, Str)))
}
}
"FlattenArrayIndices" should {
"only accept arrays" >> prop { (nonArr: Type) =>
FlattenArrayIndices.tpe(Func.Input1(nonArr)) must beFailing
}.setArbitrary(arbStrType)
"convert from an array type to int" in {
FlattenArrayIndices.tpe(Func.Input1(FlexArr(0, None, Str))) must beSuccessful(Int)
}
"untype to an array type from int" in {
FlattenArrayIndices.untpe(Int).map(_.unsized) must
beSuccessful(List(FlexArr(0, None, Top)))
}
}
"ShiftMap" should {
"only accept maps" >> prop { (nonMap: Type) =>
ShiftMap.tpe(Func.Input1(nonMap)) must beFailing
}.setArbitrary(arbArrayType)
"convert from a map type to the type of its values" in {
ShiftMap.tpe(Func.Input1(Obj(Map(), Str.some))) must beSuccessful(Str)
}
"untype to a map type from some value type" in {
ShiftMap.untpe(Str).map(_.unsized) must beSuccessful(List(Obj(Map(), Str.some)))
}
}
"ShiftMapKeys" should {
"only accept maps" >> prop { (nonMap: Type) =>
ShiftMapKeys.tpe(Func.Input1(nonMap)) must beFailing
}.setArbitrary(arbArrayType)
"convert from a map type to the type of its keys" in {
ShiftMapKeys.tpe(Func.Input1(Obj(Map(), Int.some))) must beSuccessful(Str)
}
"untype to a map type from some key type" in {
ShiftMapKeys.untpe(Str).map(_.unsized) must beSuccessful(List(Obj(Map(), Top.some)))
}
}
"ShiftArray" should {
"only accept arrays" >> prop { (nonArr: Type) =>
ShiftArray.tpe(Func.Input1(nonArr)) must beFailing
}.setArbitrary(arbStrType)
"convert from an array type to the type of its values" in {
ShiftArray.tpe(Func.Input1(FlexArr(0, None, Str))) must beSuccessful(Str)
}
"untype to an array type from some value type" in {
ShiftArray.untpe(Str).map(_.unsized) must beSuccessful(List(FlexArr(0, None, Str)))
}
}
"ShiftArrayIndices" should {
"only accept arrays" >> prop { (nonArr: Type) =>
ShiftArrayIndices.tpe(Func.Input1(nonArr)) must beFailing
}.setArbitrary(arbStrType)
"convert from an array type to int" in {
ShiftArrayIndices.tpe(Func.Input1(FlexArr(0, None, Str))) must beSuccessful(Int)
}
"untype to an array type from int" in {
ShiftArrayIndices.untpe(Int).map(_.unsized) must
beSuccessful(List(FlexArr(0, None, Top)))
}
}
"UnshiftMap" should {
"convert to a map type from some value type" in {
UnshiftMap.tpe(Func.Input2(Top, Str)) must beSuccessful(Obj(Map(), Str.some))
}
"untype from a map type to the type of its values" in {
UnshiftMap.untpe(Obj(Map(), Str.some)).map(_.unsized) must beSuccessful(List(Top, Str))
}
}
"UnshiftArray" should {
"convert to an array type from some value type" in {
UnshiftArray.tpe(Func.Input1(Str)) must beSuccessful(FlexArr(0, None, Str))
}
"untype from an array type to the type of its values" in {
UnshiftArray.untpe(FlexArr(0, None, Str)).map(_.unsized) must beSuccessful(List(Str))
}
}
import org.scalacheck.Gen, Gen._
import org.scalacheck.Arbitrary, Arbitrary._
lazy val arbStrType: Arbitrary[Type] = Arbitrary(Gen.oneOf(
const(Str),
arbitrary[String].map(s => Const(Data.Str(s)))))
lazy val arbArrayType: Arbitrary[Type] = Arbitrary(simpleArrayGen)
lazy val simpleArrayGen = Gen.oneOf(
for {
i <- arbitrary[Int]
n <- arbitrary[Option[Int]]
t <- arbitrary[Type]
} yield FlexArr(i.abs, n.map(i.abs max _.abs), t),
for {
t <- arbitrary[List[Type]]
} yield Arr(t),
for {
ds <- resize(5, arbitrary[List[Data]])
} yield Const(Data.Arr(ds)))
}
| jedesah/Quasar | frontend/src/test/scala/quasar/std/structural.scala | Scala | apache-2.0 | 8,706 |
package keystoneml.workflow
/**
* TransformerGraphs are similar to [[Graph]]s, but unlike normal Graphs they may only contain
* [[TransformerOperator]]s as operators, and as a result are guaranteed to be serializable.
*
* @param sources The set of all [[SourceId]]s of sources in the graph
* @param sinkDependencies A map of [[SinkId]] to the id of the node or source the sink depends on
* @param operators A map of [[NodeId]] to the operator contained within that node
* @param dependencies A map of [[NodeId]] to the node's ordered dependencies
*/
private[workflow] case class TransformerGraph(
sources: Set[SourceId],
sinkDependencies: Map[SinkId, NodeOrSourceId],
operators: Map[NodeId, TransformerOperator],
dependencies: Map[NodeId, Seq[NodeOrSourceId]]
) {
/**
* Convert this TransformerGraph into a standard [[Graph]]
*/
private[workflow] def toGraph: Graph = {
Graph(
sources = sources,
sinkDependencies = sinkDependencies,
operators = operators,
dependencies = dependencies)
}
}
| amplab/keystone | src/main/scala/keystoneml/workflow/TransformerGraph.scala | Scala | apache-2.0 | 1,051 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.ml.feature
import scala.util.Random
import breeze.linalg.normalize
import org.apache.hadoop.fs.Path
import org.apache.spark.annotation.{Experimental, Since}
import org.apache.spark.ml.linalg._
import org.apache.spark.ml.param._
import org.apache.spark.ml.param.shared.HasSeed
import org.apache.spark.ml.util._
import org.apache.spark.mllib.util.MLUtils
import org.apache.spark.sql.Row
import org.apache.spark.sql.types.StructType
/**
* :: Experimental ::
*
* Params for [[BucketedRandomProjectionLSH]].
*/
private[ml] trait BucketedRandomProjectionLSHParams extends Params {
/**
* The length of each hash bucket, a larger bucket lowers the false negative rate. The number of
* buckets will be `(max L2 norm of input vectors) / bucketLength`.
*
*
* If input vectors are normalized, 1-10 times of pow(numRecords, -1/inputDim) would be a
* reasonable value
* @group param
*/
val bucketLength: DoubleParam = new DoubleParam(this, "bucketLength",
"the length of each hash bucket, a larger bucket lowers the false negative rate.",
ParamValidators.gt(0))
/** @group getParam */
final def getBucketLength: Double = $(bucketLength)
}
/**
* :: Experimental ::
*
* Model produced by [[BucketedRandomProjectionLSH]], where multiple random vectors are stored. The
* vectors are normalized to be unit vectors and each vector is used in a hash function:
* `h_i(x) = floor(r_i.dot(x) / bucketLength)`
* where `r_i` is the i-th random unit vector. The number of buckets will be `(max L2 norm of input
* vectors) / bucketLength`.
*
* @param randUnitVectors An array of random unit vectors. Each vector represents a hash function.
*/
@Experimental
@Since("2.1.0")
class BucketedRandomProjectionLSHModel private[ml](
override val uid: String,
private[ml] val randUnitVectors: Array[Vector])
extends LSHModel[BucketedRandomProjectionLSHModel] with BucketedRandomProjectionLSHParams {
@Since("2.1.0")
override protected[ml] val hashFunction: Vector => Array[Vector] = {
key: Vector => {
val hashValues: Array[Double] = randUnitVectors.map({
randUnitVector => Math.floor(BLAS.dot(key, randUnitVector) / $(bucketLength))
})
// TODO: Output vectors of dimension numHashFunctions in SPARK-18450
hashValues.map(Vectors.dense(_))
}
}
@Since("2.1.0")
override protected[ml] def keyDistance(x: Vector, y: Vector): Double = {
Math.sqrt(Vectors.sqdist(x, y))
}
@Since("2.1.0")
override protected[ml] def hashDistance(x: Seq[Vector], y: Seq[Vector]): Double = {
// Since it's generated by hashing, it will be a pair of dense vectors.
x.zip(y).map(vectorPair => Vectors.sqdist(vectorPair._1, vectorPair._2)).min
}
@Since("2.1.0")
override def copy(extra: ParamMap): this.type = defaultCopy(extra)
@Since("2.1.0")
override def write: MLWriter = {
new BucketedRandomProjectionLSHModel.BucketedRandomProjectionLSHModelWriter(this)
}
}
/**
* :: Experimental ::
*
* This [[BucketedRandomProjectionLSH]] implements Locality Sensitive Hashing functions for
* Euclidean distance metrics.
*
* The input is dense or sparse vectors, each of which represents a point in the Euclidean
* distance space. The output will be vectors of configurable dimension. Hash values in the
* same dimension are calculated by the same hash function.
*
* References:
*
* 1. <a href="https://en.wikipedia.org/wiki/Locality-sensitive_hashing#Stable_distributions">
* Wikipedia on Stable Distributions</a>
*
* 2. Wang, Jingdong et al. "Hashing for similarity search: A survey." arXiv preprint
* arXiv:1408.2927 (2014).
*/
@Experimental
@Since("2.1.0")
class BucketedRandomProjectionLSH(override val uid: String)
extends LSH[BucketedRandomProjectionLSHModel]
with BucketedRandomProjectionLSHParams with HasSeed {
@Since("2.1.0")
override def setInputCol(value: String): this.type = super.setInputCol(value)
@Since("2.1.0")
override def setOutputCol(value: String): this.type = super.setOutputCol(value)
@Since("2.1.0")
override def setNumHashTables(value: Int): this.type = super.setNumHashTables(value)
@Since("2.1.0")
def this() = {
this(Identifiable.randomUID("brp-lsh"))
}
/** @group setParam */
@Since("2.1.0")
def setBucketLength(value: Double): this.type = set(bucketLength, value)
/** @group setParam */
@Since("2.1.0")
def setSeed(value: Long): this.type = set(seed, value)
@Since("2.1.0")
override protected[this] def createRawLSHModel(
inputDim: Int): BucketedRandomProjectionLSHModel = {
val rand = new Random($(seed))
val randUnitVectors: Array[Vector] = {
Array.fill($(numHashTables)) {
val randArray = Array.fill(inputDim)(rand.nextGaussian())
Vectors.fromBreeze(normalize(breeze.linalg.Vector(randArray)))
}
}
new BucketedRandomProjectionLSHModel(uid, randUnitVectors)
}
@Since("2.1.0")
override def transformSchema(schema: StructType): StructType = {
SchemaUtils.checkColumnType(schema, $(inputCol), new VectorUDT)
validateAndTransformSchema(schema)
}
@Since("2.1.0")
override def copy(extra: ParamMap): this.type = defaultCopy(extra)
}
@Since("2.1.0")
object BucketedRandomProjectionLSH extends DefaultParamsReadable[BucketedRandomProjectionLSH] {
@Since("2.1.0")
override def load(path: String): BucketedRandomProjectionLSH = super.load(path)
}
@Since("2.1.0")
object BucketedRandomProjectionLSHModel extends MLReadable[BucketedRandomProjectionLSHModel] {
@Since("2.1.0")
override def read: MLReader[BucketedRandomProjectionLSHModel] = {
new BucketedRandomProjectionLSHModelReader
}
@Since("2.1.0")
override def load(path: String): BucketedRandomProjectionLSHModel = super.load(path)
private[BucketedRandomProjectionLSHModel] class BucketedRandomProjectionLSHModelWriter(
instance: BucketedRandomProjectionLSHModel) extends MLWriter {
// TODO: Save using the existing format of Array[Vector] once SPARK-12878 is resolved.
private case class Data(randUnitVectors: Matrix)
override protected def saveImpl(path: String): Unit = {
DefaultParamsWriter.saveMetadata(instance, path, sc)
val numRows = instance.randUnitVectors.length
require(numRows > 0)
val numCols = instance.randUnitVectors.head.size
val values = instance.randUnitVectors.map(_.toArray).reduce(Array.concat(_, _))
val randMatrix = Matrices.dense(numRows, numCols, values)
val data = Data(randMatrix)
val dataPath = new Path(path, "data").toString
sparkSession.createDataFrame(Seq(data)).repartition(1).write.parquet(dataPath)
}
}
private class BucketedRandomProjectionLSHModelReader
extends MLReader[BucketedRandomProjectionLSHModel] {
/** Checked against metadata when loading model */
private val className = classOf[BucketedRandomProjectionLSHModel].getName
override def load(path: String): BucketedRandomProjectionLSHModel = {
val metadata = DefaultParamsReader.loadMetadata(path, sc, className)
val dataPath = new Path(path, "data").toString
val data = sparkSession.read.parquet(dataPath)
val Row(randUnitVectors: Matrix) = MLUtils.convertMatrixColumnsToML(data, "randUnitVectors")
.select("randUnitVectors")
.head()
val model = new BucketedRandomProjectionLSHModel(metadata.uid,
randUnitVectors.rowIter.toArray)
DefaultParamsReader.getAndSetParams(model, metadata)
model
}
}
}
| Panos-Bletsos/spark-cost-model-optimizer | mllib/src/main/scala/org/apache/spark/ml/feature/BucketedRandomProjectionLSH.scala | Scala | apache-2.0 | 8,325 |
package es.weso.computex.entities
package object Action extends Enumeration {
type Action = Value
val ByFile, ByDirectInput, ByURI = Value
}
| weso/computex | app/es/weso/computex/entities/Action.scala | Scala | apache-2.0 | 151 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.streaming.kinesis
import java.util.concurrent.{ExecutorService, TimeoutException}
import scala.concurrent.{Await, ExecutionContext, Future}
import scala.concurrent.duration._
import scala.language.postfixOps
import com.amazonaws.services.kinesis.clientlibrary.interfaces.IRecordProcessorCheckpointer
import org.mockito.Matchers._
import org.mockito.Mockito._
import org.mockito.invocation.InvocationOnMock
import org.mockito.stubbing.Answer
import org.scalatest.{BeforeAndAfterEach, PrivateMethodTester}
import org.scalatest.concurrent.Eventually
import org.scalatest.concurrent.Eventually._
import org.scalatest.mock.MockitoSugar
import org.apache.spark.streaming.{Duration, TestSuiteBase}
import org.apache.spark.util.ManualClock
class KinesisCheckpointerSuite extends TestSuiteBase
with MockitoSugar
with BeforeAndAfterEach
with PrivateMethodTester
with Eventually {
private val workerId = "dummyWorkerId"
private val shardId = "dummyShardId"
private val seqNum = "123"
private val otherSeqNum = "245"
private val checkpointInterval = Duration(10)
private val someSeqNum = Some(seqNum)
private val someOtherSeqNum = Some(otherSeqNum)
private var receiverMock: KinesisReceiver[Array[Byte]] = _
private var checkpointerMock: IRecordProcessorCheckpointer = _
private var kinesisCheckpointer: KinesisCheckpointer = _
private var clock: ManualClock = _
private val checkpoint = PrivateMethod[Unit]('checkpoint)
override def beforeEach(): Unit = {
receiverMock = mock[KinesisReceiver[Array[Byte]]]
checkpointerMock = mock[IRecordProcessorCheckpointer]
clock = new ManualClock()
kinesisCheckpointer = new KinesisCheckpointer(receiverMock, checkpointInterval, workerId, clock)
}
test("checkpoint is not called twice for the same sequence number") {
when(receiverMock.getLatestSeqNumToCheckpoint(shardId)).thenReturn(someSeqNum)
kinesisCheckpointer.invokePrivate(checkpoint(shardId, checkpointerMock))
kinesisCheckpointer.invokePrivate(checkpoint(shardId, checkpointerMock))
verify(checkpointerMock, times(1)).checkpoint(anyString())
}
test("checkpoint is called after sequence number increases") {
when(receiverMock.getLatestSeqNumToCheckpoint(shardId))
.thenReturn(someSeqNum).thenReturn(someOtherSeqNum)
kinesisCheckpointer.invokePrivate(checkpoint(shardId, checkpointerMock))
kinesisCheckpointer.invokePrivate(checkpoint(shardId, checkpointerMock))
verify(checkpointerMock, times(1)).checkpoint(seqNum)
verify(checkpointerMock, times(1)).checkpoint(otherSeqNum)
}
test("should checkpoint if we have exceeded the checkpoint interval") {
when(receiverMock.getLatestSeqNumToCheckpoint(shardId))
.thenReturn(someSeqNum).thenReturn(someOtherSeqNum)
kinesisCheckpointer.setCheckpointer(shardId, checkpointerMock)
clock.advance(5 * checkpointInterval.milliseconds)
eventually(timeout(1 second)) {
verify(checkpointerMock, times(1)).checkpoint(seqNum)
verify(checkpointerMock, times(1)).checkpoint(otherSeqNum)
}
}
test("shouldn't checkpoint if we have not exceeded the checkpoint interval") {
when(receiverMock.getLatestSeqNumToCheckpoint(shardId)).thenReturn(someSeqNum)
kinesisCheckpointer.setCheckpointer(shardId, checkpointerMock)
clock.advance(checkpointInterval.milliseconds / 2)
verify(checkpointerMock, never()).checkpoint(anyString())
}
test("should not checkpoint for the same sequence number") {
when(receiverMock.getLatestSeqNumToCheckpoint(shardId)).thenReturn(someSeqNum)
kinesisCheckpointer.setCheckpointer(shardId, checkpointerMock)
clock.advance(checkpointInterval.milliseconds * 5)
eventually(timeout(1 second)) {
verify(checkpointerMock, atMost(1)).checkpoint(anyString())
}
}
test("removing checkpointer checkpoints one last time") {
when(receiverMock.getLatestSeqNumToCheckpoint(shardId)).thenReturn(someSeqNum)
kinesisCheckpointer.removeCheckpointer(shardId, checkpointerMock)
verify(checkpointerMock, times(1)).checkpoint(anyString())
}
test("if checkpointing is going on, wait until finished before removing and checkpointing") {
when(receiverMock.getLatestSeqNumToCheckpoint(shardId))
.thenReturn(someSeqNum).thenReturn(someOtherSeqNum)
when(checkpointerMock.checkpoint(anyString)).thenAnswer(new Answer[Unit] {
override def answer(invocations: InvocationOnMock): Unit = {
clock.waitTillTime(clock.getTimeMillis() + checkpointInterval.milliseconds / 2)
}
})
kinesisCheckpointer.setCheckpointer(shardId, checkpointerMock)
clock.advance(checkpointInterval.milliseconds)
eventually(timeout(1 second)) {
verify(checkpointerMock, times(1)).checkpoint(anyString())
}
// don't block test thread
val f = Future(kinesisCheckpointer.removeCheckpointer(shardId, checkpointerMock))(
ExecutionContext.global)
intercept[TimeoutException] {
Await.ready(f, 50 millis)
}
clock.advance(checkpointInterval.milliseconds / 2)
eventually(timeout(1 second)) {
verify(checkpointerMock, times(2)).checkpoint(anyString())
}
}
}
| Panos-Bletsos/spark-cost-model-optimizer | external/kinesis-asl/src/test/scala/org/apache/spark/streaming/kinesis/KinesisCheckpointerSuite.scala | Scala | apache-2.0 | 5,993 |
package net.matthaynes.juicer.service
import com.gravity.goose.{ Article => GooseArticle, _ }
import com.gravity.goose.extractors.AdditionalDataExtractor
import com.gravity.goose.images.{ Image => GooseImage }
import org.jsoup.nodes.Element
import scala.util.matching.Regex
case class Image(src: String, width: Int, height: Int)
case class Article(url: String, domain: String, hash: String, title: String, description: String,
body: String, image: Option[Image], additionalData: Option[scala.collection.Map[String, String]], entities: List[NamedEntity])
class ArticleExtractorService {
val config = new Configuration {
setLocalStoragePath("/tmp/goose")
setAdditionalDataExtractor(new BbcNewsDataExtractor())
import scala.sys.process._
if ("which convert".! == 0 && "which identify".! == 0) {
setEnableImageFetching(true)
setImagemagickConvertPath("which convert".!!.trim)
setImagemagickIdentifyPath("which identify".!!.trim)
} else {
setEnableImageFetching(false)
}
// Don't want this disabled thanks
com.gravity.goose.network.HtmlFetcher.getHttpClient.getParams.setParameter("http.connection.stalecheck", true)
}
val goose = new Goose(config)
val entities = new NamedEntityService
def extract(url : String) : Article = {
val article = goose.extractContent(url)
var text = List(article.title, article.cleanedArticleText).filter(_ != null).mkString(" ")
val image = article.topImage match {
case i:GooseImage =>
if (i.imageSrc.isEmpty) None else Option(new Image(i.imageSrc, i.width, i.height))
case _ => None
}
val additionalData = article.additionalData.isEmpty match {
case false => Option(article.additionalData)
case true => None
}
new Article(article.canonicalLink, article.domain, article.linkhash, article.title,
article.metaDescription, article.cleanedArticleText, image, additionalData, entities.classify(text))
}
}
class BbcNewsDataExtractor extends AdditionalDataExtractor {
override def extract(rootElement: Element): Map[String, String] = {
var attributes = Map[String, String]()
getMetaTagValue(rootElement, "Section") match {
case Some(section) => attributes = attributes ++ Map("bbc.news.section" -> section)
case None =>
}
getMetaTagValue(rootElement, "CPS_ASSET_TYPE") match {
case Some(asset_type) => attributes = attributes ++ Map("bbc.news.asset_type" -> asset_type)
case None =>
}
getMetaTagValue(rootElement, "CPS_ID") match {
case Some(id) => attributes = attributes ++ Map("bbc.news.id" -> id)
case None =>
}
getMetaTagValue(rootElement, "OriginalPublicationDate") match {
case Some(published) => {
val pattern = new Regex("""(\d{4})\/(\d{2})\/(\d{2}) (\d{2}):(\d{2}):(\d{2})""", "Y", "m", "d", "H", "M", "S")
val result = pattern.findFirstMatchIn(published).get
val date = result.group("Y") + "-" +
result.group("m") + "-" +
result.group("d") + "T" +
result.group("H") + ":" +
result.group("M") + ":" +
result.group("S")
attributes = attributes ++ Map("bbc.news.published" -> date)
}
case None =>
}
return attributes
}
def getMetaTagValue(rootElement: Element, key: String): Option[String] = {
var metaTags = rootElement.getElementsByAttributeValue("name", key)
if (metaTags.isEmpty()) {
return None
}
val metaTag = metaTags.get(0)
if (metaTag.tagName() != "meta") {
return None
}
return Some(metaTag.attr("content"))
}
}
| matth/juicer | juicer-service/src/main/scala/net/matthaynes/juicer/service/ArticleExtractorService.scala | Scala | mit | 3,757 |
// Copyright 2012 Brennan Saeta
//
// This file is part of Axess
//
// Axess is free software: you can redistribute it and/or modify
// it under the terms of the GNU Affero General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// Axess is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Affero General Public License for more details.
//
// You should have received a copy of the GNU Affero General Public License
// along with Axess. If not, see <http://www.gnu.org/licenses/>.
package axess.checkers
import org.openqa.selenium.WebDriver
import org.openqa.selenium.By
import scala.collection.JavaConversions._
class LinkText extends Checker {
val digits = "^\\W*\\d+\\W*$".r
val here = "^here$".r
val click_here = "^click here$".r
def checkPage(browser: WebDriver): List[String] = {
for {
elem <- browser.findElements(By.tagName("a")).toList
if elem.getAttribute("href") != null
txt = elem.getText()
if digits.findFirstIn(txt) != None ||
here.findFirstIn(txt) != None ||
click_here.findFirstIn(txt) != None
} yield "Inaccessibile link text: \"" + txt + "\" (" + elem + ")"
}
def category = "Accessibility"
}
| saeta/axess | app/axess/checkers/LinkText.scala | Scala | agpl-3.0 | 1,411 |
package com.github.mdr.mash.screen
import com.github.mdr.mash.screen.Style._
import com.github.mdr.mash.utils.{ Dimensions, Point, Utils }
import com.github.mdr.mash.utils.Utils._
case class ScreenDraw(drawString: String, swappedOutScreenOpt: Option[Screen])
class ScreenDrawer(terminalSize: Dimensions) {
def draw(newScreen: Screen,
previousScreenOpt: Option[Screen] = None,
swappedOutScreenOpt: Option[Screen] = None): ScreenDraw = {
val Screen(lines, cursorPosOpt, title, alternateScreen) = newScreen
val cursorPos = cursorPosOpt.getOrElse(Point(0, 0))
val oldAlternateScreen = previousScreenOpt.exists(_.isAlternateScreen)
val swappingOut = alternateScreen && !oldAlternateScreen
val swappingBackIn = !alternateScreen && oldAlternateScreen
val actualPreviousScreenOpt = if (swappingBackIn) swappedOutScreenOpt else previousScreenOpt
val currentPos = actualPreviousScreenOpt.flatMap(_.cursorPosOpt).getOrElse(Point(0, 0))
val drawState = new DrawState(terminalSize, currentPos.row, currentPos.column, Style.Default)
if (swappingOut) {
/**
* Os X's Terminal.app has a bug that, if there is a character in the very bottom right of the
* window when coming back from the alternate screen, a newline is inserted.
*
* As a workaround, we delete any bottom right character of a Screen before going to the alternate screen
* (even if this might not be the bottom right of the Terminal window, we can't tell because have no idea
* about scrollback). The character is restored when we return from the alternate screen.
*/
for ((point, _) ← getBottomRightChar(actualPreviousScreenOpt)) {
drawState.moveCursor(point)
drawState.eraseLineFromCursor()
}
drawState.switchToAlternateScreen()
} else if (swappingBackIn) {
drawState.returnFromAlternateScreen(currentPos)
for ((point, c) ← getBottomRightChar(actualPreviousScreenOpt)) {
drawState.moveCursor(point)
drawState.addChar(c)
}
}
drawLines(drawState, lines, actualPreviousScreenOpt)
drawState.moveCursor(cursorPos)
if (previousScreenOpt.map(_.title) != Some(title))
drawState.setTitle(title)
val output = drawState.complete(showCursor = cursorPosOpt.isDefined)
val newSwappedOutScreenOpt =
if (swappingOut)
previousScreenOpt
else if (swappingBackIn)
None
else
swappedOutScreenOpt
ScreenDraw(output, swappedOutScreenOpt = newSwappedOutScreenOpt)
}
private def getBottomRightChar(actualPreviousScreenOpt: Option[Screen]) = {
for {
screen ← actualPreviousScreenOpt
lastLine ← screen.lines.lastOption
if lastLine.length == terminalSize.columns
c ← lastLine.string.chars.lastOption
size = screen.size
bottomRight = Point(size.rows - 1, size.columns - 1)
} yield (bottomRight, c)
}
private def drawLines(drawState: DrawState, lines: Seq[Line], actualPreviousScreenOpt: Option[Screen]) {
val previousLines = actualPreviousScreenOpt.map(_.lines).getOrElse(Seq())
val newAndPreviousLines: Seq[(Option[Line], Option[Line])] =
Utils.zipPad(lines.map(Some(_)), previousLines.map(Some(_)), None)
for (((newLineOpt, previousLineOpt), row) ← newAndPreviousLines.zipWithIndex)
drawLine(drawState, lines, newLineOpt, previousLineOpt, row)
}
private def drawLine(drawState: DrawState,
lines: Seq[Line],
newLineOpt: Option[Line],
previousLineOpt: Option[Line],
row: Int): Unit =
(newLineOpt, previousLineOpt) match {
case (_, _) if newLineOpt == previousLineOpt ⇒ // nothing needed
case (Some(newLine), _) ⇒ drawLine(drawState, lines, newLine, previousLineOpt, row)
case (None, Some(_)) ⇒ eraseLine(drawState, row)
}
private def drawLine(drawState: DrawState, lines: Seq[Line], newLine: Line, previousLineOpt: Option[Line], row: Int) {
drawState.navigateUpToRowOrDownToJustAbove(row)
var firstCharDrawn = false
if (drawState.getCurrentRow == row - 1) {
// We ended up on the line above
val aboveLine = lines(row - 1)
if (aboveLine.endsInNewline)
drawState.crlf()
else {
// Rewrite the last character of the previous line and the first character of the new line to force a wrap
if (drawState.getCurrentColumn < terminalSize.columns) {
val lastCharOfPreviousLine = aboveLine.string.lastOption getOrElse ' '.style
drawState.moveCursorToColumn(aboveLine.string.size - 1)
drawState.addChar(lastCharOfPreviousLine)
}
val firstCharOfNewLine = newLine.string.headOption getOrElse ' '.style
drawState.addChar(firstCharOfNewLine)
firstCharDrawn = true
}
}
previousLineOpt match {
case Some(previousLine) ⇒
val previousChars = previousLine.string.chars
val newChars = newLine.string.chars
val previousAndNewChars = previousChars.map(Some(_)).padTo(newChars.length, None).zip(newChars)
for (((previousCharOpt, newChar), col) ← previousAndNewChars.zipWithIndex.when(firstCharDrawn, _ drop 1))
if (previousCharOpt != Some(newChar)) {
drawState.moveCursorToColumn(col)
drawState.addChar(newChar)
}
if (previousLine.length > newLine.length) {
drawState.moveCursorToColumn(newLine.length)
drawState.eraseLineFromCursor()
}
case None ⇒
if (firstCharDrawn) {
drawState.moveCursorToColumn(1)
drawState.addChars(newLine.string drop 1)
} else {
drawState.moveCursorToColumn(0)
drawState.addChars(newLine.string)
}
}
}
private def eraseLine(drawState: DrawState, row: Int) {
drawState.moveCursorToRow(row)
drawState.cr()
drawState.eraseLineFromCursor()
}
}
| mdr/mash | src/main/scala/com/github/mdr/mash/screen/ScreenDrawer.scala | Scala | mit | 6,078 |
import scala.reflect.ClassManifest
@deprecated("Suppress warnings", since="2.11")
object Test extends App {
type CM[T] = ClassManifest[T]
println(implicitly[CM[Int]])
println(implicitly[CM[Int]] eq Manifest.Int)
}
| martijnhoekstra/scala | test/files/run/classmanifests_new_alias.scala | Scala | apache-2.0 | 221 |
package com.cave.metrics.data
import org.joda.time.DateTime
import org.joda.time.format.ISODateTimeFormat
import play.api.libs.json._
case class Check(schedule: Schedule, timestamp: DateTime)
object Check {
final val FMT = ISODateTimeFormat.dateTimeNoMillis()
implicit val checkReads = new Reads[Check] {
def reads(value: JsValue) = try {
JsSuccess(new Check(
(value \\ "schedule").as[Schedule],
FMT.parseDateTime((value \\ "timestamp").as[String])
))
} catch {
case e: Exception => JsError(e.getMessage)
}
}
implicit val checkWrites = new Writes[Check] {
def writes(check: Check): JsValue = {
Json.obj(
"schedule" -> Json.toJson(check.schedule),
"timestamp" -> JsString(FMT.print(check.timestamp))
)
}
}
}
| gilt/cave | core/src/main/scala/com/cave/metrics/data/Check.scala | Scala | mit | 803 |
/*
Copyright (c) 2016, Rice University
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
1. Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following
disclaimer in the documentation and/or other materials provided
with the distribution.
3. Neither the name of Rice University
nor the names of its contributors may be used to endorse or
promote products derived from this software without specific
prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.apache.spark.rdd.cl.tests
import java.util.LinkedList
import com.amd.aparapi.internal.writer.ScalaArrayParameter
import org.apache.spark.rdd.cl.SyncCodeGenTest
import org.apache.spark.rdd.cl.CodeGenTest
import org.apache.spark.rdd.cl.CodeGenTests
import com.amd.aparapi.internal.model.HardCodedClassModels
object ExtensionTest extends SyncCodeGenTest[Int, ExtRet] {
def getExpectedException() : String = { return null }
def getExpectedKernel() : String = { getExpectedKernelHelper(getClass) }
def getExpectedNumInputs() : Int = {
1
}
def init() : HardCodedClassModels = { new HardCodedClassModels() }
def complete(params : LinkedList[ScalaArrayParameter]) { }
def getFunction() : Function1[Int, ExtRet] = {
val max_band_try : Int = 0
val fpgaExtTasks_leftQs: Array[Byte] = null
val fpgaExtTasks_leftQlen: Array[Int] = null
val fpgaExtTasks_leftQoffset: Array[Int] = null
val fpgaExtTasks_leftRs: Array[Byte] = null
val fpgaExtTasks_leftRlen: Array[Int] = null
val fpgaExtTasks_leftRoffset: Array[Int] = null
val fpgaExtTasks_rightQs: Array[Byte] = null
val fpgaExtTasks_rightQlen: Array[Int] = null
val fpgaExtTasks_rightQoffset: Array[Int] = null
val fpgaExtTasks_rightRs: Array[Byte] = null
val fpgaExtTasks_rightRlen: Array[Int] = null
val fpgaExtTasks_rightRoffset: Array[Int] = null
val fpgaExtTasks_w: Array[Int] = null
val fpgaExtTasks_mat: Array[Byte] = null // same for each
val matlen : Int = 0
val fpgaExtTasks_oDel: Array[Int] = null
val fpgaExtTasks_eDel: Array[Int] = null
val fpgaExtTasks_oIns: Array[Int] = null
val fpgaExtTasks_eIns: Array[Int] = null
val fpgaExtTasks_penClip5: Array[Int] = null
val fpgaExtTasks_penClip3: Array[Int] = null
val fpgaExtTasks_zdrop: Array[Int] = null
val fpgaExtTasks_h0: Array[Int] = null
val fpgaExtTasks_regScore: Array[Int] = null
val fpgaExtTasks_qBeg: Array[Int] = null
val fpgaExtTasks_idx: Array[Int] = null
new Function[Int, ExtRet] {
override def apply(i : Int) : ExtRet = {
extension(fpgaExtTasks_w(i), fpgaExtTasks_regScore(i),
fpgaExtTasks_rightQlen(i), fpgaExtTasks_rightQoffset(i), fpgaExtTasks_rightQs,
fpgaExtTasks_leftQlen(i), fpgaExtTasks_leftQoffset(i), fpgaExtTasks_leftQs,
fpgaExtTasks_rightRlen(i), fpgaExtTasks_rightRoffset(i), fpgaExtTasks_rightRs,
fpgaExtTasks_idx(i), fpgaExtTasks_penClip5(i),
fpgaExtTasks_penClip3(i), fpgaExtTasks_qBeg(i),
fpgaExtTasks_leftRlen(i), fpgaExtTasks_leftRoffset(i),
fpgaExtTasks_leftRs, fpgaExtTasks_mat, matlen, fpgaExtTasks_oDel(i),
fpgaExtTasks_eDel(i), fpgaExtTasks_oIns(i),
fpgaExtTasks_eIns(i), fpgaExtTasks_zdrop(i), fpgaExtTasks_h0(i),
max_band_try)
}
private def SWExtend(
qLen: Int, qOffset : Int, query: Array[Byte], tLen: Int, tOffset: Int, target: Array[Byte], m: Int, mat: Array[Byte], matlen : Int,
oDel: Int, eDel: Int, oIns: Int, eIns: Int, w_i: Int, endBonus: Int, zdrop: Int, h0: Int): Array[Int] =
{
var retArray: Array[Int] = new Array[Int](6)
var eh_e : Array[Int] = new Array[Int](qLen + 1) // score array
var eh_h : Array[Int] = new Array[Int](qLen + 1) // score array
var qp: Array[Byte] = new Array[Byte](qLen * m) // query profile
var oeDel = oDel + eDel
var oeIns = oIns + eIns
var i = 0
var j = 0
var k = 0
var w = w_i
while(i < (qLen + 1)) {
eh_e(i) = 0
eh_h(i) = 0
i += 1
}
// generate the query profile
i = 0
k = 0
while(k < m) {
val p = k * m
j = 0
while(j < qLen) {
qp(i) = mat(p + query(qOffset + j))
i += 1
j += 1
}
k += 1
}
// fill the first row
eh_h(0) = h0
if(h0 > oeIns) eh_h(1) = h0 - oeIns
else eh_h(1) = 0
j = 2
while(j <= qLen && eh_h(j-1) > eIns) {
eh_h(j) = eh_h(j-1) - eIns
j += 1
}
// adjust $w if it is too large
k = m * m
var max = 0
i = 0
while (i < matlen) {
if (mat(i) > max) {
max = mat(i)
}
i += 1
}
var maxIns = ((qLen * max + endBonus - oIns).toDouble / eIns + 1.0).toInt
if(maxIns < 1) maxIns = 1
if(w > maxIns) w = maxIns // TODO: is this necessary? (in original C implementation)
var maxDel = ((qLen * max + endBonus - oDel).toDouble / eDel + 1.0).toInt
if(maxDel < 1) maxDel = 1
if(w > maxDel) w = maxDel // TODO: is this necessary? (in original C implementation)
// DP loop
max = h0
var max_i = -1
var max_j = -1
var max_ie = -1
var gscore = -1
var max_off = 0
var beg = 0
var end = qLen
var isBreak = false
i = 0
while(i < tLen && !isBreak) {
var t = 0
var f = 0
var h1 = 0
var m = 0
var mj = -1
var qPtr = target(tOffset + i) * qLen
// compute the first column
h1 = h0 - (oDel + eDel * (i + 1))
if(h1 < 0) h1 = 0
// apply the band and the constraint (if provided)
if (beg < i - w) beg = i - w
if (end > i + w + 1) end = i + w + 1
if (end > qLen) end = qLen
j = beg
while(j < end) {
// At the beginning of the loop: eh[j] = { H(i-1,j-1), E(i,j) }, f = F(i,j) and h1 = H(i,j-1)
// Similar to SSE2-SW, cells are computed in the following order:
// H(i,j) = max{H(i-1,j-1)+S(i,j), E(i,j), F(i,j)}
// E(i+1,j) = max{H(i,j)-gapo, E(i,j)} - gape
// F(i,j+1) = max{H(i,j)-gapo, F(i,j)} - gape
var h = eh_h(j)
var e = eh_e(j) // get H(i-1,j-1) and E(i-1,j)
eh_h(j) = h1
h += qp(qPtr + j)
if(h < e) h = e
if(h < f) h = f
h1 = h // save H(i,j) to h1 for the next column
if(m <= h) {
mj = j // record the position where max score is achieved
m = h // m is stored at eh[mj+1]
}
t = h - oeDel
if(t < 0) t = 0
e -= eDel
if(e < t) e = t // computed E(i+1,j)
eh_e(j) = e // save E(i+1,j) for the next row
t = h - oeIns
if(t < 0) t = 0
f -= eIns
if(f < t) f = t
j += 1
}
eh_h(end) = h1
eh_e(end) = 0
// end == j after the previous loop
if(j == qLen) {
if(gscore <= h1) {
max_ie = i
gscore = h1
}
}
if(m == 0)
isBreak = true
else {
if(m > max) {
max = m
max_i = i
max_j = mj
if(max_off < scala.math.abs(mj - i)) max_off = scala.math.abs(mj - i)
}
else if(zdrop > 0) {
if((i - max_i) > (mj - max_j))
if(max - m - ((i - max_i) - (mj - max_j)) * eDel > zdrop) isBreak = true
else
if(max - m - ((mj - max_j) - (i - max_i)) * eIns > zdrop) isBreak = true
}
// update beg and end for the next round
if(!isBreak) {
j = mj
while(j >= beg && eh_h(j) > 0) {
j -= 1
}
beg = j + 1
j = mj + 2
while(j <= end && eh_h(j) > 0) {
j += 1
}
end = j
}
}
//println(i + " " + max_ie + " " + gscore) // testing
i += 1
}
retArray(0) = max
retArray(1) = max_j + 1
retArray(2) = max_i + 1
retArray(3) = max_ie + 1
retArray(4) = gscore
retArray(5) = max_off
retArray
}
private def extension(extParam_w : Int, extParam_regScore : Int,
extParam_rightQlen : Int, extParam_rightQoffset : Int, extParam_rightQs : Array[Byte],
extParam_leftQlen : Int, extParam_leftQoffset : Int, extParam_leftQs : Array[Byte],
extParam_rightRlen : Int, extParam_rightRoffset : Int, extParam_rightRs : Array[Byte],
extParam_idx : Int, extParam_penClip5 : Int, extParam_penClip3 : Int,
extParam_qBeg : Int, extParam_leftRlen : Int,
extParam_leftRoffset : Int, extParam_leftRs : Array[Byte],
extParam_mat : Array[Byte], matlen : Int, extParam_oDel : Int, extParam_eDel : Int,
extParam_oIns : Int, extParam_elns : Int, extParam_zdrop : Int,
extParam_h0 : Int, MAX_BAND_TRY : Int): ExtRet = {
var aw0 = extParam_w
var aw1 = extParam_w
var qle = -1
var tle = -1
var gtle = -1
var gscore = -1
var maxoff = -1
var i = 0
var isBreak = false
var prev: Int = -1
var regScore: Int = extParam_regScore
var extRet = new ExtRet
extRet.qBeg = 0
extRet.rBeg = 0
extRet.qEnd = extParam_rightQlen
extRet.rEnd = 0
extRet.trueScore = extParam_regScore
if (extParam_leftQlen > 0) {
while(i < MAX_BAND_TRY && !isBreak) {
prev = regScore
aw0 = extParam_w << i
val results = SWExtend(extParam_leftQlen, extParam_leftQoffset, extParam_leftQs,
extParam_leftRlen, extParam_leftRoffset, extParam_leftRs, 5, extParam_mat, matlen,
extParam_oDel, extParam_eDel, extParam_oIns, extParam_elns,
aw0, extParam_penClip5, extParam_zdrop, extParam_h0)
regScore = results(0)
qle = results(1)
tle = results(2)
gtle = results(3)
gscore = results(4)
maxoff = results(5)
if (regScore == prev || ( maxoff < (aw0 >> 1) + (aw0 >> 2) ) ) isBreak = true
i += 1
}
extRet.score = regScore
// check whether we prefer to reach the end of the query
// local extension
if(gscore <= 0 || gscore <= (regScore - extParam_penClip5)) {
extRet.qBeg = extParam_qBeg - qle
extRet.rBeg = -tle
extRet.trueScore = regScore
} else {
extRet.qBeg = 0
extRet.rBeg = -gtle
extRet.trueScore = gscore
}
}
if (extParam_rightQlen > 0) {
i = 0
isBreak = false
var sc0 = regScore
while(i < MAX_BAND_TRY && !isBreak) {
prev = regScore
aw1 = extParam_w << i
val results = SWExtend(extParam_rightQlen, extParam_rightQoffset, extParam_rightQs,
extParam_rightRlen, extParam_rightRoffset, extParam_rightRs, 5, extParam_mat, matlen,
extParam_oDel, extParam_eDel, extParam_oIns, extParam_elns,
aw1, extParam_penClip3, extParam_zdrop, sc0)
regScore = results(0)
qle = results(1)
tle = results(2)
gtle = results(3)
gscore = results(4)
maxoff = results(5)
if(regScore == prev || ( maxoff < (aw1 >> 1) + (aw1 >> 2) ) ) isBreak = true
i += 1
}
extRet.score = regScore
// check whether we prefer to reach the end of the query
// local extension
if(gscore <= 0 || gscore <= (regScore - extParam_penClip3)) {
extRet.qEnd = qle
extRet.rEnd = tle
extRet.trueScore += regScore - sc0
}
else {
extRet.qEnd = extParam_rightQlen
extRet.rEnd = gtle
extRet.trueScore += gscore - sc0
}
}
if (aw0 > aw1) extRet.width = aw0
else extRet.width = aw1
extRet.idx = extParam_idx
extRet
}
}
}
}
class EHType(e_i: Int, h_i: Int) {
var e: Int = e_i
var h: Int = h_i
}
class ExtRet() {
var qBeg: Int = -1
var rBeg: Long = -1
var qEnd: Int = -1
var rEnd: Long = -1
var score: Int = -1
var trueScore: Int = -1
var width: Int = -1
var idx: Int = -1
}
| agrippa/spark-swat | swat/src/test/scala/org/apache/spark/rdd/cl/tests/ExtensionTest.scala | Scala | bsd-3-clause | 14,112 |
package com.twitter.scalding
import cascading.flow.FlowProcess
import cascading.stats.CascadingStats
import java.util.{Collections, WeakHashMap}
import org.slf4j.{Logger, LoggerFactory}
import scala.collection.JavaConversions._
import scala.collection.JavaConverters._
import scala.collection.mutable
import scala.ref.WeakReference
case class Stat(name: String, group: String = Stats.ScaldingGroup)(@transient implicit val uniqueIdCont: UniqueID) {
@transient private lazy val logger: Logger = LoggerFactory.getLogger(this.getClass)
val uniqueId = uniqueIdCont.get
lazy val flowProcess: FlowProcess[_] = RuntimeStats.getFlowProcessForUniqueId(uniqueId)
def incBy(amount: Long) = flowProcess.increment(group, name, amount)
def inc = incBy(1L)
}
/**
* Wrapper around a FlowProcess useful, for e.g. incrementing counters.
*/
object RuntimeStats extends java.io.Serializable {
@transient private lazy val logger: Logger = LoggerFactory.getLogger(this.getClass)
private val flowMappingStore: mutable.Map[String, WeakReference[FlowProcess[_]]] =
Collections.synchronizedMap(new WeakHashMap[String, WeakReference[FlowProcess[_]]])
def getFlowProcessForUniqueId(uniqueId: String): FlowProcess[_] = {
(for {
weakFlowProcess <- flowMappingStore.get(uniqueId)
flowProcess <- weakFlowProcess.get
} yield {
flowProcess
}).getOrElse {
sys.error("Error in job deployment, the FlowProcess for unique id %s isn't available".format(uniqueId))
}
}
def addFlowProcess(fp: FlowProcess[_]) {
val uniqueJobIdObj = fp.getProperty(Job.UNIQUE_JOB_ID)
if(uniqueJobIdObj != null) {
val uniqueId = uniqueJobIdObj.asInstanceOf[String]
logger.debug("Adding flow process id: " + uniqueId)
flowMappingStore.put(uniqueId, new WeakReference(fp))
}
}
}
object Stats {
// This is the group that we assign all custom counters to
val ScaldingGroup = "Scalding Custom"
// When getting a counter value, cascadeStats takes precedence (if set) and
// flowStats is used after that. Returns None if neither is defined.
def getCounterValue(counter: String, group: String = ScaldingGroup)
(implicit cascadingStats: CascadingStats): Long =
cascadingStats.getCounterValue(group, counter)
// Returns a map of all custom counter names and their counts.
def getAllCustomCounters()(implicit cascadingStats: CascadingStats): Map[String, Long] = {
val counts = for {
counter <- cascadingStats.getCountersFor(ScaldingGroup).asScala
value = getCounterValue(counter)
} yield (counter, value)
counts.toMap
}
} | afsalthaj/scalding | scalding-core/src/main/scala/com/twitter/scalding/Stats.scala | Scala | apache-2.0 | 2,627 |
package com.imaginea.activegrid.core.models
import com.imaginea.activegrid.core.utils.ActiveGridUtils
import org.neo4j.graphdb.Node
import org.slf4j.LoggerFactory
/**
* Created by nagulmeeras on 31/10/16.
*/
case class ReservedInstanceDetails(override val id: Option[Long],
instanceType: Option[String],
reservedInstancesId: Option[String],
availabilityZone: Option[String],
tenancy: Option[String],
offeringType: Option[String],
productDescription: Option[String],
count: Option[Int]) extends BaseEntity
object ReservedInstanceDetails {
val reservedInstanceDetailsLabel = "ReservedInstanceDetails"
val logger = LoggerFactory.getLogger(getClass)
def fromNeo4jGraph(nodeId: Long): Option[ReservedInstanceDetails] = {
val mayBeNode = Neo4jRepository.findNodeById(nodeId)
mayBeNode match {
case Some(node) =>
if (Neo4jRepository.hasLabel(node, reservedInstanceDetailsLabel)) {
val map = Neo4jRepository.getProperties(node, "instanceType", "reservedInstancesId", "availabilityZone", "tenancy",
"offeringType", "productDescription", "count")
Some(ReservedInstanceDetails(Some(nodeId),
ActiveGridUtils.getValueFromMapAs[String](map, "instanceType"),
ActiveGridUtils.getValueFromMapAs[String](map, "reservedInstancesId"),
ActiveGridUtils.getValueFromMapAs[String](map, "availabilityZone"),
ActiveGridUtils.getValueFromMapAs[String](map, "tenancy"),
ActiveGridUtils.getValueFromMapAs[String](map, "offeringType"),
ActiveGridUtils.getValueFromMapAs[String](map, "productDescription"),
ActiveGridUtils.getValueFromMapAs[Int](map, "count")))
} else {
None
}
case None => None
}
}
implicit class ReservedInstanceDetailsImpl(reservedInstanceDetails: ReservedInstanceDetails)
extends Neo4jRep[ReservedInstanceDetails] {
override def toNeo4jGraph(entity: ReservedInstanceDetails): Node = {
val map = Map("instanceType" -> entity.instanceType,
"reservedInstancesId" -> entity.reservedInstancesId,
"availabilityZone" -> entity.availabilityZone,
"tenancy" -> entity.tenancy,
"offeringType" -> entity.offeringType,
"productDescription" -> entity.productDescription,
"count" -> entity.count)
Neo4jRepository.saveEntity[ReservedInstanceDetails](reservedInstanceDetailsLabel, entity.id, map)
}
override def fromNeo4jGraph(nodeId: Long): Option[ReservedInstanceDetails] = {
ReservedInstanceDetails.fromNeo4jGraph(nodeId)
}
}
}
| eklavya/activeGrid | src/main/scala/com/imaginea/activegrid/core/models/ReservedInstanceDetails.scala | Scala | apache-2.0 | 2,834 |
/*
* The MIT License
*
* Copyright (c) 2017 Fulcrum Genomics LLC
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*
*/
package com.fulcrumgenomics.str.vcf
import com.fulcrumgenomics.FgBioDef._
import com.fulcrumgenomics.commons.CommonsDef.PathToIntervals
import com.fulcrumgenomics.commons.util.LazyLogging
import com.fulcrumgenomics.str.vcf.StrInterval.StrAllele
import htsjdk.samtools.util.{Interval, IntervalList}
import htsjdk.variant.variantcontext.{Allele, VariantContext}
object StrInterval {
object StrAllele extends LazyLogging {
val NoCall: StrAllele = StrAllele(Allele.NO_CALL, 0, 0, 0)
def toCalls(str: StrInterval, ctx: VariantContext, counts: Seq[Int], minDepth: Int = 1, warn: Boolean = true): Seq[StrAllele] = {
val alleles = ctx.getAlleles.toSeq
require(alleles.length == counts.length, s"# of alleles '${alleles.length}' and # of counts '${counts.length}' did not match")
val refAlleleLength = ctx.getReference.length()
if (warn && str.unitLength * str.refLength != refAlleleLength) {
logger.warning(s"Mismatch between reference repeat length in the interval list '${str.unitLength * str.refLength}' and vcf '$refAlleleLength' length")
// NB: this is important for when we trim the allele
require(str.unitLength * str.refLength < refAlleleLength, "Bug: allele length was longer in the interval list than in the vcf")
}
alleles.zip(counts)
.filter(_._2 >= minDepth) // ignore zero counts
.map { case (allele, count) =>
// NB: the VCF's reference allele may not have the same length as the STR "reference" length, so we calculate the #
// of bases different between the VCF's reference allele and this allele, then add that to the STR "reference"
// length found in the interval list (unit-length times ref-length).
val baseDiff = allele.length() - refAlleleLength
val alleleLength = baseDiff + (str.refLength * str.unitLength)
StrAllele(allele, alleleLength, count, str.unitLength)
}.sortBy(-_.count)
}
def apply(allele: Allele, alleleLength: Int, count: Int, unitLength: Int): StrAllele = {
// Assume the allele is left-aligned, so that we trim any leading bases
val baseDiff = allele.length() - alleleLength
require(baseDiff >= 0, "Bug: base difference should be greater than or equal to zero")
val repeatAllele = Allele.create(allele.getBaseString.substring(baseDiff))
StrAllele(allele, repeatAllele, count, unitLength)
}
}
/** A single STR allele, including the number of repeats and count.
*
* NB: the `allele` member is the original allele from the VCF, and may contain flanking sequence, whereas the
* `normalizedAllele` is just the repeat sequence
* */
case class StrAllele(allele: Allele, repeatAllele: Allele, count: Int, unitLength: Int) {
def alleleLength: Int = {
if (Allele.NO_CALL == allele) 0 else repeatAllele.length
}
def repeatLength: Double = {
if (Allele.NO_CALL == allele) 0 else this.alleleLength / this.unitLength.toDouble
}
def toGenotype: String = {
if (Allele.NO_CALL == allele) {
"0"
}
else if (alleleLength % unitLength == 0) {
f"${alleleLength / unitLength}%d"
}
else {
f"$repeatLength%.2f"
}
}
}
/** Reads in the interval list with extra STR info. */
def loadIntervals(path: PathToIntervals): Iterator[StrInterval] = {
val intervals = IntervalList.fromFile(path.toFile)
intervals.map { interval =>
val strInfo = StrInterval(
chrom = interval.getContig,
start = interval.getStart,
end = interval.getEnd,
// these will be set below
unitLength = 0,
refLength = 0,
name = ""
)
interval.getName.split(",").toList match {
case unitLength :: refLength :: name :: Nil =>
strInfo.copy(unitLength=unitLength.toInt, refLength=refLength.toInt, name=name)
case unitLength :: refLength :: name :: truthCalls =>
strInfo.copy(unitLength=unitLength.toInt, refLength=refLength.toInt, name=name, truthCalls=truthCalls.map(_.toFloat))
case _ =>
throw new IllegalArgumentException(s"Interval name improperly formatted for interval: $interval")
}
}
}
}
case class StrInterval(chrom: String,
start: Int,
end: Int,
unitLength: Int,
refLength: Int,
name: String,
truthCalls: Seq[Float] = Seq.empty)
extends Interval(chrom, start, end, true, name) {
override def toString: String = productIterator.flatMap {
case x: Option[_] => x
case x: Seq[_] => if (x.isEmpty) None else Some(x.mkString(","))
case x => Some(x)
}.mkString("\\t")
/** Outputs a formatted string with the given set of (called) alleles */
def toLongString(alleles: Seq[StrAllele]): String = {
val genotypes = alleles.map { allele =>
s"${allele.toGenotype}:${allele.count}"
}.mkString(",")
s"$this\\t$genotypes"
}
} | fulcrumgenomics/fgstr | tools/src/main/scala/com/fulcrumgenomics/str/vcf/StrInterval.scala | Scala | mit | 6,223 |
package mesosphere.marathon
package core.deployment.impl
import akka.testkit.{ TestActorRef, TestProbe }
import mesosphere.AkkaUnitTest
import mesosphere.marathon.core.condition.Condition
import mesosphere.marathon.core.condition.Condition.{ Failed, Running }
import mesosphere.marathon.core.event.{ DeploymentStatus, _ }
import mesosphere.marathon.core.health.MesosCommandHealthCheck
import mesosphere.marathon.core.instance.update.InstanceUpdateOperation
import mesosphere.marathon.core.instance.{ Instance, TestInstanceBuilder }
import mesosphere.marathon.core.launcher.impl.LaunchQueueTestHelper
import mesosphere.marathon.core.launchqueue.LaunchQueue
import mesosphere.marathon.core.leadership.AlwaysElectedLeadershipModule
import mesosphere.marathon.core.readiness.ReadinessCheckExecutor
import mesosphere.marathon.core.task.tracker.{ InstanceCreationHandler, InstanceTracker }
import mesosphere.marathon.state.PathId._
import mesosphere.marathon.state.{ AppDefinition, Command, Timestamp }
import mesosphere.marathon.test.MarathonTestHelper
import org.mockito.Mockito.{ spy, when }
import scala.concurrent.Promise
class TaskStartActorTest extends AkkaUnitTest {
"TaskStartActor" should {
for (
(counts, description) <- Seq(
None -> "with no item in queue",
Some(LaunchQueueTestHelper.zeroCounts) -> "with zero count queue item"
)
) {
s"Start success $description" in {
val f = new Fixture
val promise = Promise[Unit]()
val app = AppDefinition("/myApp".toPath, instances = 5)
when(f.launchQueue.get(app.id)).thenReturn(counts)
val ref = f.startActor(app, app.instances, promise)
watch(ref)
verify(f.launchQueue, timeout(3000)).add(app, app.instances)
for (i <- 0 until app.instances)
system.eventStream.publish(f.instanceChange(app, Instance.Id.forRunSpec(app.id), Running))
promise.future.futureValue should be(())
expectTerminated(ref)
}
}
"Start success with one task left to launch" in {
val f = new Fixture
val counts = Some(LaunchQueueTestHelper.zeroCounts.copy(instancesLeftToLaunch = 1, finalInstanceCount = 1))
val promise = Promise[Unit]()
val app = AppDefinition("/myApp".toPath, instances = 5)
when(f.launchQueue.get(app.id)).thenReturn(counts)
val ref = f.startActor(app, app.instances, promise)
watch(ref)
verify(f.launchQueue, timeout(3000)).add(app, app.instances - 1)
for (i <- 0 until (app.instances - 1))
system.eventStream.publish(f.instanceChange(app, Instance.Id(s"task-$i"), Running))
promise.future.futureValue should be(())
expectTerminated(ref)
}
"Start success with existing task in launch queue" in {
val f = new Fixture
val promise = Promise[Unit]()
val app = AppDefinition("/myApp".toPath, instances = 5)
when(f.launchQueue.get(app.id)).thenReturn(None)
val instance = TestInstanceBuilder.newBuilder(app.id, version = Timestamp(1024)).addTaskStarting().getInstance()
f.taskCreationHandler.created(InstanceUpdateOperation.LaunchEphemeral(instance)).futureValue
val ref = f.startActor(app, app.instances, promise)
watch(ref)
verify(f.launchQueue, timeout(3000)).add(app, app.instances - 1)
for (i <- 0 until (app.instances - 1))
system.eventStream.publish(f.instanceChange(app, Instance.Id(s"task-$i"), Running))
promise.future.futureValue should be(())
expectTerminated(ref)
}
"Start success with no instances to start" in {
val f = new Fixture
val promise = Promise[Unit]()
val app = AppDefinition("/myApp".toPath, instances = 0)
when(f.launchQueue.get(app.id)).thenReturn(None)
val ref = f.startActor(app, app.instances, promise)
watch(ref)
promise.future.futureValue should be(())
expectTerminated(ref)
}
"Start with health checks" in {
val f = new Fixture
val promise = Promise[Unit]()
val app = AppDefinition(
"/myApp".toPath,
instances = 5,
healthChecks = Set(MesosCommandHealthCheck(command = Command("true")))
)
when(f.launchQueue.get(app.id)).thenReturn(None)
val ref = f.startActor(app, app.instances, promise)
watch(ref)
verify(f.launchQueue, timeout(3000)).add(app, app.instances)
for (i <- 0 until app.instances)
system.eventStream.publish(f.healthChange(app, Instance.Id(s"task_$i"), healthy = true))
promise.future.futureValue should be(())
expectTerminated(ref)
}
"Start with health checks with no instances to start" in {
val f = new Fixture
val promise = Promise[Unit]()
val app = AppDefinition(
"/myApp".toPath,
instances = 0,
healthChecks = Set(MesosCommandHealthCheck(command = Command("true")))
)
when(f.launchQueue.get(app.id)).thenReturn(None)
val ref = f.startActor(app, app.instances, promise)
watch(ref)
promise.future.futureValue should be(())
expectTerminated(ref)
}
"Task fails to start" in {
val f = new Fixture
val promise = Promise[Unit]()
val app = AppDefinition("/myApp".toPath, instances = 1)
when(f.launchQueue.get(app.id)).thenReturn(None)
val ref = f.startActor(app, app.instances, promise)
watch(ref)
verify(f.launchQueue, timeout(3000)).add(app, app.instances)
system.eventStream.publish(f.instanceChange(app, Instance.Id.forRunSpec(app.id), Failed))
verify(f.launchQueue, timeout(3000)).add(app, 1)
for (i <- 0 until app.instances)
system.eventStream.publish(f.instanceChange(app, Instance.Id.forRunSpec(app.id), Running))
promise.future.futureValue should be(())
expectTerminated(ref)
}
"Start success with dying existing task, reschedules, but finishes early" in {
val f = new Fixture
val promise = Promise[Unit]()
val app = AppDefinition("/myApp".toPath, instances = 5)
when(f.launchQueue.get(app.id)).thenReturn(None)
val outdatedInstance = TestInstanceBuilder.newBuilder(app.id, version = Timestamp(1024)).addTaskStaged().getInstance()
val instanceId = outdatedInstance.instanceId
f.taskCreationHandler.created(InstanceUpdateOperation.LaunchEphemeral(outdatedInstance)).futureValue
val ref = f.startActor(app, app.instances, promise)
watch(ref)
// wait for initial sync
verify(f.launchQueue, timeout(3000)).get(app.id)
verify(f.launchQueue, timeout(3000)).add(app, app.instances - 1)
noMoreInteractions(f.launchQueue)
reset(f.launchQueue)
// let existing task die
when(f.taskTracker.countLaunchedSpecInstancesSync(app.id)).thenReturn(0)
when(f.launchQueue.get(app.id)).thenReturn(Some(LaunchQueueTestHelper.zeroCounts.copy(instancesLeftToLaunch = 4, finalInstanceCount = 4)))
// The version does not match the app.version so that it is filtered in StartingBehavior.
// does that make sense?
val (_, outdatedTask) = outdatedInstance.tasksMap.head
system.eventStream.publish(f.instanceChange(app, instanceId, Condition.Error).copy(runSpecVersion = outdatedTask.runSpecVersion))
// sync will reschedule task
ref ! StartingBehavior.Sync
verify(f.launchQueue, timeout(3000)).get(app.id)
verify(f.launchQueue, timeout(3000)).add(app, 1)
noMoreInteractions(f.launchQueue)
reset(f.launchQueue)
// launch 4 of the tasks
when(f.launchQueue.get(app.id)).thenReturn(Some(LaunchQueueTestHelper.zeroCounts.copy(instancesLeftToLaunch = app.instances, finalInstanceCount = 4)))
when(f.taskTracker.countLaunchedSpecInstancesSync(app.id)).thenReturn(4)
List(0, 1, 2, 3) foreach { i =>
system.eventStream.publish(f.instanceChange(app, Instance.Id(s"task-$i"), Running))
}
// it finished early
promise.future.futureValue should be(())
noMoreInteractions(f.launchQueue)
expectTerminated(ref)
}
}
class Fixture {
val scheduler: SchedulerActions = mock[SchedulerActions]
val launchQueue: LaunchQueue = mock[LaunchQueue]
val leadershipModule = AlwaysElectedLeadershipModule.forRefFactory(system)
val taskTrackerModule = MarathonTestHelper.createTaskTrackerModule(leadershipModule)
val taskTracker: InstanceTracker = spy(taskTrackerModule.instanceTracker)
val taskCreationHandler: InstanceCreationHandler = taskTrackerModule.instanceCreationHandler
val deploymentManager = TestProbe()
val status: DeploymentStatus = mock[DeploymentStatus]
val readinessCheckExecutor: ReadinessCheckExecutor = mock[ReadinessCheckExecutor]
def instanceChange(app: AppDefinition, id: Instance.Id, condition: Condition): InstanceChanged = {
val instance: Instance = mock[Instance]
instance.instanceId returns id
InstanceChanged(id, app.version, app.id, condition, instance)
}
def healthChange(app: AppDefinition, id: Instance.Id, healthy: Boolean): InstanceHealthChanged = {
InstanceHealthChanged(id, app.version, app.id, Some(healthy))
}
def startActor(app: AppDefinition, scaleTo: Int, promise: Promise[Unit]): TestActorRef[TaskStartActor] =
TestActorRef(TaskStartActor.props(
deploymentManager.ref, status, scheduler, launchQueue, taskTracker, system.eventStream, readinessCheckExecutor,
app, scaleTo, promise))
}
}
| natemurthy/marathon | src/test/scala/mesosphere/marathon/core/deployment/impl/TaskStartActorTest.scala | Scala | apache-2.0 | 9,505 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst.trees
import scala.collection.mutable.ArrayBuffer
import org.apache.spark.SparkFunSuite
import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.catalyst.expressions.codegen.CodegenFallback
import org.apache.spark.sql.types.{IntegerType, StringType, NullType}
case class Dummy(optKey: Option[Expression]) extends Expression with CodegenFallback {
override def children: Seq[Expression] = optKey.toSeq
override def nullable: Boolean = true
override def dataType: NullType = NullType
override lazy val resolved = true
override def eval(input: InternalRow): Any = null.asInstanceOf[Any]
}
case class ComplexPlan(exprs: Seq[Seq[Expression]])
extends org.apache.spark.sql.catalyst.plans.logical.LeafNode {
override def output: Seq[Attribute] = Nil
}
case class ExpressionInMap(map: Map[String, Expression]) extends Expression with Unevaluable {
override def children: Seq[Expression] = map.values.toSeq
override def nullable: Boolean = true
override def dataType: NullType = NullType
override lazy val resolved = true
}
class TreeNodeSuite extends SparkFunSuite {
test("top node changed") {
val after = Literal(1) transform { case Literal(1, _) => Literal(2) }
assert(after === Literal(2))
}
test("one child changed") {
val before = Add(Literal(1), Literal(2))
val after = before transform { case Literal(2, _) => Literal(1) }
assert(after === Add(Literal(1), Literal(1)))
}
test("no change") {
val before = Add(Literal(1), Add(Literal(2), Add(Literal(3), Literal(4))))
val after = before transform { case Literal(5, _) => Literal(1)}
assert(before === after)
// Ensure that the objects after are the same objects before the transformation.
before.map(identity[Expression]).zip(after.map(identity[Expression])).foreach {
case (b, a) => assert(b eq a)
}
}
test("collect") {
val tree = Add(Literal(1), Add(Literal(2), Add(Literal(3), Literal(4))))
val literals = tree collect {case l: Literal => l}
assert(literals.size === 4)
(1 to 4).foreach(i => assert(literals contains Literal(i)))
}
test("pre-order transform") {
val actual = new ArrayBuffer[String]()
val expected = Seq("+", "1", "*", "2", "-", "3", "4")
val expression = Add(Literal(1), Multiply(Literal(2), Subtract(Literal(3), Literal(4))))
expression transformDown {
case b: BinaryOperator => actual.append(b.symbol); b
case l: Literal => actual.append(l.toString); l
}
assert(expected === actual)
}
test("post-order transform") {
val actual = new ArrayBuffer[String]()
val expected = Seq("1", "2", "3", "4", "-", "*", "+")
val expression = Add(Literal(1), Multiply(Literal(2), Subtract(Literal(3), Literal(4))))
expression transformUp {
case b: BinaryOperator => actual.append(b.symbol); b
case l: Literal => actual.append(l.toString); l
}
assert(expected === actual)
}
test("transform works on nodes with Option children") {
val dummy1 = Dummy(Some(Literal.create("1", StringType)))
val dummy2 = Dummy(None)
val toZero: PartialFunction[Expression, Expression] = { case Literal(_, _) => Literal(0) }
var actual = dummy1 transformDown toZero
assert(actual === Dummy(Some(Literal(0))))
actual = dummy1 transformUp toZero
assert(actual === Dummy(Some(Literal(0))))
actual = dummy2 transform toZero
assert(actual === Dummy(None))
}
test("preserves origin") {
CurrentOrigin.setPosition(1, 1)
val add = Add(Literal(1), Literal(1))
CurrentOrigin.reset()
val transformed = add transform {
case Literal(1, _) => Literal(2)
}
assert(transformed.origin.line.isDefined)
assert(transformed.origin.startPosition.isDefined)
}
test("foreach up") {
val actual = new ArrayBuffer[String]()
val expected = Seq("1", "2", "3", "4", "-", "*", "+")
val expression = Add(Literal(1), Multiply(Literal(2), Subtract(Literal(3), Literal(4))))
expression foreachUp {
case b: BinaryOperator => actual.append(b.symbol);
case l: Literal => actual.append(l.toString);
}
assert(expected === actual)
}
test("find") {
val expression = Add(Literal(1), Multiply(Literal(2), Subtract(Literal(3), Literal(4))))
// Find the top node.
var actual: Option[Expression] = expression.find {
case add: Add => true
case other => false
}
var expected: Option[Expression] =
Some(Add(Literal(1), Multiply(Literal(2), Subtract(Literal(3), Literal(4)))))
assert(expected === actual)
// Find the first children.
actual = expression.find {
case Literal(1, IntegerType) => true
case other => false
}
expected = Some(Literal(1))
assert(expected === actual)
// Find an internal node (Subtract).
actual = expression.find {
case sub: Subtract => true
case other => false
}
expected = Some(Subtract(Literal(3), Literal(4)))
assert(expected === actual)
// Find a leaf node.
actual = expression.find {
case Literal(3, IntegerType) => true
case other => false
}
expected = Some(Literal(3))
assert(expected === actual)
// Find nothing.
actual = expression.find {
case Literal(100, IntegerType) => true
case other => false
}
expected = None
assert(expected === actual)
}
test("collectFirst") {
val expression = Add(Literal(1), Multiply(Literal(2), Subtract(Literal(3), Literal(4))))
// Collect the top node.
{
val actual = expression.collectFirst {
case add: Add => add
}
val expected =
Some(Add(Literal(1), Multiply(Literal(2), Subtract(Literal(3), Literal(4)))))
assert(expected === actual)
}
// Collect the first children.
{
val actual = expression.collectFirst {
case l @ Literal(1, IntegerType) => l
}
val expected = Some(Literal(1))
assert(expected === actual)
}
// Collect an internal node (Subtract).
{
val actual = expression.collectFirst {
case sub: Subtract => sub
}
val expected = Some(Subtract(Literal(3), Literal(4)))
assert(expected === actual)
}
// Collect a leaf node.
{
val actual = expression.collectFirst {
case l @ Literal(3, IntegerType) => l
}
val expected = Some(Literal(3))
assert(expected === actual)
}
// Collect nothing.
{
val actual = expression.collectFirst {
case l @ Literal(100, IntegerType) => l
}
val expected = None
assert(expected === actual)
}
}
test("transformExpressions on nested expression sequence") {
val plan = ComplexPlan(Seq(Seq(Literal(1)), Seq(Literal(2))))
val actual = plan.transformExpressions {
case Literal(value, _) => Literal(value.toString)
}
val expected = ComplexPlan(Seq(Seq(Literal("1")), Seq(Literal("2"))))
assert(expected === actual)
}
test("expressions inside a map") {
val expression = ExpressionInMap(Map("1" -> Literal(1), "2" -> Literal(2)))
{
val actual = expression.transform {
case Literal(i: Int, _) => Literal(i + 1)
}
val expected = ExpressionInMap(Map("1" -> Literal(2), "2" -> Literal(3)))
assert(actual === expected)
}
{
val actual = expression.withNewChildren(Seq(Literal(2), Literal(3)))
val expected = ExpressionInMap(Map("1" -> Literal(2), "2" -> Literal(3)))
assert(actual === expected)
}
}
}
| chenc10/Spark-PAF | sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/trees/TreeNodeSuite.scala | Scala | apache-2.0 | 8,430 |
import scala.language.experimental.macros
object Macros {
def foo(x: Any) = macro Impls.foo2
}
object Test extends App {
import Macros._
foo(42)
}
| lrytz/scala | test/files/neg/macro-nontypeablebody/Macros_Test_2.scala | Scala | apache-2.0 | 154 |
// Copyright: 2010 - 2016 https://github.com/ensime/ensime-server/graphs
// Licence: http://www.gnu.org/licenses/gpl-3.0.en.html
package org.ensime.core
import org.ensime.fixture._
import org.ensime.api._
import org.ensime.util.EnsimeSpec
import scala.collection.mutable.ListBuffer
class StructureViewBuilderSpec extends EnsimeSpec
with IsolatedRichPresentationCompilerFixture
with RichPresentationCompilerTestUtils
with ReallyRichPresentationCompilerFixture {
def original = EnsimeConfigFixture.EmptyTestProject
def getStructure(
config: EnsimeConfig,
cc: RichCompilerControl,
content: String
): List[String] = {
val result = ListBuffer[String]()
def collect(parent: Option[String], x: StructureViewMember): Unit = {
val par = parent.map(_ + ".").getOrElse("")
x match {
case StructureViewMember(key, name, _, Nil) =>
result.append(s"($key)${par}$name")
case StructureViewMember(key, name, _, xs) =>
result.append(s"($key)${par}$name")
xs.foreach(collect(Some(s"${par}$name"), _))
}
}
val file = srcFile(config, "abc.scala", contents(content))
cc.askLoadedTyped(file)
cc.askStructure(file).foreach(collect(None, _))
result.toList
}
"StructureViewBuilder" should "show top level classes and objects" in {
withPresCompiler { (config, cc) =>
val structure = getStructure(
config, cc, """
package com.example
import org.scalatest._
class Test {
def fun(u: Int, v: Int) { u + v }
}
object Test {
def apply(x: String) { new Test(x) }
}
"""
)
structure shouldBe List(
"(class)Test",
"(def)Test.fun",
"(object)Test",
"(def)Test.apply"
)
}
}
it should "show nested members" in withPresCompiler { (config, cc) =>
val structure = getStructure(
config, cc, """
package com.example
object Test {
type TestType = Int
class Nested {
def fun(u: Int, v: Int) { u + v }
}
object Nested {
def apply(x: String) { new Nested(x) }
}
}
"""
)
structure shouldBe List(
"(object)Test",
"(type)Test.TestType",
"(class)Test.Nested",
"(def)Test.Nested.fun",
"(object)Test.Nested",
"(def)Test.Nested.apply"
)
}
it should "skip accessors" in withPresCompiler { (config, cc) =>
val structure = getStructure(
config, cc, """
package com.example
class Test(val accessor: String)
class CaseTest(x: String, y: Int)
object Test {
class Nested(val accessor: String)
case class NestedCase(x: String, y:Int)
}
"""
)
structure shouldBe List(
"(class)Test",
"(class)CaseTest",
"(object)Test",
"(class)Test.Nested",
"(class)Test.NestedCase"
)
}
}
| d1egoaz/ensime-sbt | src/sbt-test/sbt-ensime/ensime-server/core/src/it/scala/org/ensime/core/StructureViewBuilderSpec.scala | Scala | apache-2.0 | 3,093 |
package pl.touk.nussknacker.engine.util.service
import pl.touk.nussknacker.engine.util.metrics.{Histogram, RateMeter}
case class EspTimer(rateMeter: RateMeter, histogram: Histogram) {
def update(nanoTimeStart: Long): Unit = {
val delta = System.nanoTime() - nanoTimeStart
rateMeter.mark()
histogram.update(delta)
}
}
object EspTimer {
val histogramSuffix = "histogram"
val instantRateSuffix = "instantRate"
}
| TouK/nussknacker | components-api/src/main/scala/pl/touk/nussknacker/engine/util/service/EspTimer.scala | Scala | apache-2.0 | 437 |
/*
,i::,
:;;;;;;;
;:,,::;.
1ft1;::;1tL
t1;::;1,
:;::; _____ __ ___ __
fCLff ;:: tfLLC / ___/ / |/ /____ _ _____ / /_
CLft11 :,, i1tffLi \__ \ ____ / /|_/ // __ `// ___// __ \
1t1i .;; .1tf ___/ //___// / / // /_/ // /__ / / / /
CLt1i :,: .1tfL. /____/ /_/ /_/ \__,_/ \___//_/ /_/
Lft1,:;: , 1tfL:
;it1i ,,,:::;;;::1tti s_mach.concurrent
.t1i .,::;;; ;1tt Copyright (c) 2017 S-Mach, Inc.
Lft11ii;::;ii1tfL: Author: lance.gatlin@gmail.com
.L1 1tt1ttt,,Li
...1LLLL...
*/
package s_mach.concurrent.util
import scala.concurrent.{Future, ExecutionContext}
import s_mach.concurrent.impl.SequencerImpl
import s_mach.concurrent.DeferredFuture
/**
* A trait used to guarantee a series of unordered tasks occur sequentially.
* By associating a sequence number with each task, the sequencer can determine
* whether to immediately run a task or queue the task for later. When a task
* has been queued and the sequence number for that task has been reached, the
* task is removed from the queue and executed. The sequence number is only
* advanced after the task completes.
*
* Note: it is assumed each task has a unique sequence number. A request to
* execute a task with a sequence number that is less than the current sequence
* number causes an IllegalArgumentException to be thrown.
*/
trait Sequencer {
/** @return the next sequence number to be executed */
def next : Int
/**
* @throws java.lang.IllegalArgumentException if sequenceNumber is less than next
* @return a Future that completes once the sequence number has been reached
* and the task has completed
* */
def when[X](
sequenceNumber: Int
)(task: => Future[X])(implicit ec:ExecutionContext) : DeferredFuture[X]
}
object Sequencer {
def apply(next: Int = 0) : Sequencer = new SequencerImpl(next)
}
| S-Mach/s_mach.concurrent | src/main/scala/s_mach/concurrent/util/Sequencer.scala | Scala | mit | 2,085 |
package dmtest.writeboost
import dmtest._
import dmtest.stack._
class REPRO_144 extends DMTestSuite {
test("log rotated and superblock record is enabled") {
slowDevice(Sector.M(128)) { backing =>
fastDevice(Sector.M(32)) { caching =>
Writeboost.sweepCaches(caching)
Writeboost.Table(backing, caching).create { s =>
s.bdev.write(Sector(0), DataBuffer.random(Sector.M(64).toB.toInt))
s.dropTransient()
s.dropCaches()
assert(s.status.lastFlushedId === s.status.lastWritebackId)
s.dm.message("update_sb_record_interval 1")
Thread.sleep(5000) // wait for updating the sb record
}
// this should not cause kernel panic
Writeboost.Table(backing, caching).create { s =>
}
}
}
}
}
| akiradeveloper/dmtest | src/test/scala/dmtest/writeboost/REPRO_144.scala | Scala | apache-2.0 | 806 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kafka.server
import java.util.Collections
import org.apache.kafka.common.{TopicPartition, Uuid}
import scala.jdk.CollectionConverters._
import kafka.api.LeaderAndIsr
import org.apache.kafka.common.requests._
import org.junit.jupiter.api.Assertions._
import kafka.utils.TestUtils
import kafka.cluster.Broker
import kafka.controller.{ControllerChannelManager, ControllerContext, StateChangeLogger}
import kafka.utils.TestUtils._
import kafka.zk.ZooKeeperTestHarness
import org.apache.kafka.common.message.LeaderAndIsrRequestData.LeaderAndIsrPartitionState
import org.apache.kafka.common.metrics.Metrics
import org.apache.kafka.common.network.ListenerName
import org.apache.kafka.common.protocol.{ApiKeys, Errors}
import org.apache.kafka.common.security.auth.SecurityProtocol
import org.apache.kafka.common.utils.Time
import org.junit.jupiter.api.{AfterEach, BeforeEach, Test}
class LeaderElectionTest extends ZooKeeperTestHarness {
val brokerId1 = 0
val brokerId2 = 1
var servers: Seq[KafkaServer] = Seq.empty[KafkaServer]
var staleControllerEpochDetected = false
@BeforeEach
override def setUp(): Unit = {
super.setUp()
val configProps1 = TestUtils.createBrokerConfig(brokerId1, zkConnect, enableControlledShutdown = false)
val configProps2 = TestUtils.createBrokerConfig(brokerId2, zkConnect, enableControlledShutdown = false)
configProps1.put("unclean.leader.election.enable", "true")
configProps2.put("unclean.leader.election.enable", "true")
// start both servers
val server1 = TestUtils.createServer(KafkaConfig.fromProps(configProps1))
val server2 = TestUtils.createServer(KafkaConfig.fromProps(configProps2))
servers ++= List(server1, server2)
}
@AfterEach
override def tearDown(): Unit = {
TestUtils.shutdownServers(servers)
super.tearDown()
}
@Test
def testLeaderElectionAndEpoch(): Unit = {
// start 2 brokers
val topic = "new-topic"
val partitionId = 0
TestUtils.waitUntilBrokerMetadataIsPropagated(servers)
// create topic with 1 partition, 2 replicas, one on each broker
val leader1 = createTopic(zkClient, topic, partitionReplicaAssignment = Map(0 -> Seq(0, 1)), servers = servers)(0)
val leaderEpoch1 = zkClient.getEpochForPartition(new TopicPartition(topic, partitionId)).get
assertTrue(leader1 == 0, "Leader should be broker 0")
assertEquals(0, leaderEpoch1, "First epoch value should be 0")
// kill the server hosting the preferred replica/initial leader
servers.head.shutdown()
// check if leader moves to the other server
val leader2 = waitUntilLeaderIsElectedOrChanged(zkClient, topic, partitionId, oldLeaderOpt = Some(leader1))
val leaderEpoch2 = zkClient.getEpochForPartition(new TopicPartition(topic, partitionId)).get
assertEquals(1, leader2, "Leader must move to broker 1")
// new leaderEpoch will be leaderEpoch1+2, one increment during ReplicaStateMachine.startup()-> handleStateChanges
// for offline replica and one increment during PartitionStateMachine.triggerOnlinePartitionStateChange()
assertEquals(leaderEpoch1 + 2 , leaderEpoch2, "Second epoch value should be %d".format(leaderEpoch1 + 2))
servers.head.startup()
//make sure second server joins the ISR
TestUtils.waitUntilTrue(() => {
servers.last.metadataCache.getPartitionInfo(topic, partitionId).exists(_.isr.size == 2)
}, "Inconsistent metadata after second broker startup")
servers.last.shutdown()
Thread.sleep(zookeeper.tickTime)
val leader3 = waitUntilLeaderIsElectedOrChanged(zkClient, topic, partitionId, oldLeaderOpt = Some(leader2))
val leaderEpoch3 = zkClient.getEpochForPartition(new TopicPartition(topic, partitionId)).get
assertEquals(0, leader3, "Leader must return to 0")
assertEquals(leaderEpoch2 + 2 , leaderEpoch3, "Second epoch value should be %d".format(leaderEpoch2 + 2))
}
@Test
def testLeaderElectionWithStaleControllerEpoch(): Unit = {
// start 2 brokers
val topic = "new-topic"
val partitionId = 0
// create topic with 1 partition, 2 replicas, one on each broker
val leader1 = createTopic(zkClient, topic, partitionReplicaAssignment = Map(0 -> Seq(0, 1)), servers = servers)(0)
val leaderEpoch1 = zkClient.getEpochForPartition(new TopicPartition(topic, partitionId)).get
debug("leader Epoch: " + leaderEpoch1)
debug("Leader is elected to be: %s".format(leader1))
// NOTE: this is to avoid transient test failures
assertTrue(leader1 == 0 || leader1 == 1, "Leader could be broker 0 or broker 1")
assertEquals(0, leaderEpoch1, "First epoch value should be 0")
// start another controller
val controllerId = 2
val controllerConfig = KafkaConfig.fromProps(TestUtils.createBrokerConfig(controllerId, zkConnect))
val securityProtocol = SecurityProtocol.PLAINTEXT
val listenerName = ListenerName.forSecurityProtocol(securityProtocol)
val brokerAndEpochs = servers.map(s =>
(new Broker(s.config.brokerId, "localhost", TestUtils.boundPort(s), listenerName, securityProtocol),
s.kafkaController.brokerEpoch)).toMap
val nodes = brokerAndEpochs.keys.map(_.node(listenerName))
val controllerContext = new ControllerContext
controllerContext.setLiveBrokers(brokerAndEpochs)
val metrics = new Metrics
val controllerChannelManager = new ControllerChannelManager(controllerContext, controllerConfig, Time.SYSTEM,
metrics, new StateChangeLogger(controllerId, inControllerContext = true, None))
controllerChannelManager.startup()
try {
val staleControllerEpoch = 0
val partitionStates = Seq(
new LeaderAndIsrPartitionState()
.setTopicName(topic)
.setPartitionIndex(partitionId)
.setControllerEpoch(2)
.setLeader(brokerId2)
.setLeaderEpoch(LeaderAndIsr.initialLeaderEpoch)
.setIsr(Seq(brokerId1, brokerId2).map(Integer.valueOf).asJava)
.setZkVersion(LeaderAndIsr.initialZKVersion)
.setReplicas(Seq(0, 1).map(Integer.valueOf).asJava)
.setIsNew(false)
)
val requestBuilder = new LeaderAndIsrRequest.Builder(
ApiKeys.LEADER_AND_ISR.latestVersion, controllerId, staleControllerEpoch,
servers(brokerId2).kafkaController.brokerEpoch, partitionStates.asJava,
Collections.singletonMap(topic, Uuid.randomUuid()), nodes.toSet.asJava)
controllerChannelManager.sendRequest(brokerId2, requestBuilder, staleControllerEpochCallback)
TestUtils.waitUntilTrue(() => staleControllerEpochDetected, "Controller epoch should be stale")
assertTrue(staleControllerEpochDetected, "Stale controller epoch not detected by the broker")
} finally {
controllerChannelManager.shutdown()
metrics.close()
}
}
private def staleControllerEpochCallback(response: AbstractResponse): Unit = {
val leaderAndIsrResponse = response.asInstanceOf[LeaderAndIsrResponse]
staleControllerEpochDetected = leaderAndIsrResponse.error match {
case Errors.STALE_CONTROLLER_EPOCH => true
case _ => false
}
}
}
| lindong28/kafka | core/src/test/scala/unit/kafka/server/LeaderElectionTest.scala | Scala | apache-2.0 | 7,909 |
/*
* Copyright 2015 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.ct.ct600j.v3
import org.joda.time.LocalDate
import uk.gov.hmrc.ct.box._
import uk.gov.hmrc.ct.ct600.v3.retriever.CT600BoxRetriever
case class J40A(value: Option[LocalDate]) extends SchemeDateBox{
override def validate(boxRetriever: CT600BoxRetriever): Set[CtValidation] =
validateSchemeDate(boxRetriever.retrieveJ35(), boxRetriever.retrieveJ35A(), boxRetriever.retrieveJ40())
}
| scottcutts/ct-calculations | src/main/scala/uk/gov/hmrc/ct/ct600j/v3/J40A.scala | Scala | apache-2.0 | 1,013 |
package com.teambytes.inflatable.raft
import com.teambytes.inflatable.raft.protocol._
import akka.testkit.{ImplicitSender, TestKit, TestProbe, TestFSMRef}
import org.scalatest._
import akka.actor.ActorSystem
import com.teambytes.inflatable.raft.example.WordConcatRaftActor
import com.teambytes.inflatable.raft.model.{Term, LogIndexMap}
import com.teambytes.inflatable.raft.example.protocol._
class LeaderTest extends TestKit(ActorSystem("test-system")) with FlatSpecLike with Matchers
with ImplicitSender
with BeforeAndAfter with BeforeAndAfterAll {
behavior of "Leader"
val leader = TestFSMRef(new SnapshottingWordConcatRaftActor)
var data: LeaderMeta = _
before {
data = Meta.initial(leader)
.copy(
currentTerm = Term(1),
config = ClusterConfiguration(isLocal = false, leader)
).forNewElection.forLeader
}
it should "commit an entry once it has been written by the majority of the Followers" in {
// given
val probe1, probe2, probe3 = TestProbe().ref
data = data.copy(config = ClusterConfiguration(isLocal = false, probe1, probe2, probe3))
leader.setState(Leader, data)
val actor = leader.underlyingActor
val matchIndex = LogIndexMap.initialize(Set.empty, -1)
matchIndex.put(probe1, 2)
matchIndex.put(probe2, 2)
matchIndex.put(probe3, 1)
var replicatedLog = actor.replicatedLog
replicatedLog += model.Entry(AppendWord("a"), Term(1), 1)
replicatedLog += model.Entry(AppendWord("b"), Term(1), 2)
replicatedLog += model.Entry(AppendWord("c"), Term(1), 3)
// when
val committedLog = actor.maybeCommitEntry(data, matchIndex, replicatedLog)
// then
actor.replicatedLog.committedIndex should equal (-1)
committedLog.committedIndex should equal (2)
}
it should "reply with it's current configuration when asked to" in {
// note: this is used when an actor has died and starts again in Init state
// given
leader.setState(Leader, data)
// when
leader ! RequestConfiguration
// then
expectMsg(ChangeConfiguration(StableClusterConfiguration(0, Set(leader), singleNodeCluster = false)))
}
} | grahamar/inflatable | src/test/scala/com/teambytes/inflatable/raft/LeaderTest.scala | Scala | apache-2.0 | 2,151 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.hive.orc
import java.net.URI
import java.util.Properties
import scala.collection.JavaConverters._
import scala.util.control.NonFatal
import com.esotericsoftware.kryo.Kryo
import com.esotericsoftware.kryo.io.Output
import org.apache.commons.codec.binary.Base64
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.{FileStatus, Path}
import org.apache.hadoop.hive.conf.HiveConf.ConfVars
import org.apache.hadoop.hive.ql.io.orc._
import org.apache.hadoop.hive.ql.io.sarg.SearchArgument
import org.apache.hadoop.hive.serde2.objectinspector
import org.apache.hadoop.hive.serde2.objectinspector.{SettableStructObjectInspector, StructObjectInspector}
import org.apache.hadoop.hive.serde2.typeinfo.{StructTypeInfo, TypeInfoUtils}
import org.apache.hadoop.io.{NullWritable, Writable}
import org.apache.hadoop.mapred.{JobConf, OutputFormat => MapRedOutputFormat, RecordWriter, Reporter}
import org.apache.hadoop.mapreduce._
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat
import org.apache.orc.OrcConf
import org.apache.orc.OrcConf.COMPRESS
import org.apache.spark.TaskContext
import org.apache.spark.internal.Logging
import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.execution.datasources._
import org.apache.spark.sql.execution.datasources.orc.{OrcFilters, OrcOptions, OrcUtils}
import org.apache.spark.sql.hive.{HiveInspectors, HiveShim}
import org.apache.spark.sql.sources._
import org.apache.spark.sql.types._
import org.apache.spark.util.SerializableConfiguration
/**
* `FileFormat` for reading ORC files. If this is moved or renamed, please update
* `DataSource`'s backwardCompatibilityMap.
*/
class OrcFileFormat extends FileFormat with DataSourceRegister with Serializable {
override def shortName(): String = "orc"
override def toString: String = "ORC"
override def inferSchema(
sparkSession: SparkSession,
options: Map[String, String],
files: Seq[FileStatus]): Option[StructType] = {
val orcOptions = new OrcOptions(options, sparkSession.sessionState.conf)
if (orcOptions.mergeSchema) {
SchemaMergeUtils.mergeSchemasInParallel(
sparkSession, options, files, OrcFileOperator.readOrcSchemasInParallel)
} else {
val ignoreCorruptFiles = sparkSession.sessionState.conf.ignoreCorruptFiles
OrcFileOperator.readSchema(
files.map(_.getPath.toString),
Some(sparkSession.sessionState.newHadoopConfWithOptions(options)),
ignoreCorruptFiles
)
}
}
override def prepareWrite(
sparkSession: SparkSession,
job: Job,
options: Map[String, String],
dataSchema: StructType): OutputWriterFactory = {
val orcOptions = new OrcOptions(options, sparkSession.sessionState.conf)
val configuration = job.getConfiguration
configuration.set(COMPRESS.getAttribute, orcOptions.compressionCodec)
configuration match {
case conf: JobConf =>
conf.setOutputFormat(classOf[OrcOutputFormat])
case conf =>
conf.setClass(
"mapred.output.format.class",
classOf[OrcOutputFormat],
classOf[MapRedOutputFormat[_, _]])
}
new OutputWriterFactory {
override def newInstance(
path: String,
dataSchema: StructType,
context: TaskAttemptContext): OutputWriter = {
new OrcOutputWriter(path, dataSchema, context)
}
override def getFileExtension(context: TaskAttemptContext): String = {
val compressionExtension: String = {
val name = context.getConfiguration.get(COMPRESS.getAttribute)
OrcUtils.extensionsForCompressionCodecNames.getOrElse(name, "")
}
compressionExtension + ".orc"
}
}
}
override def isSplitable(
sparkSession: SparkSession,
options: Map[String, String],
path: Path): Boolean = {
true
}
override def buildReader(
sparkSession: SparkSession,
dataSchema: StructType,
partitionSchema: StructType,
requiredSchema: StructType,
filters: Seq[Filter],
options: Map[String, String],
hadoopConf: Configuration): (PartitionedFile) => Iterator[InternalRow] = {
if (sparkSession.sessionState.conf.orcFilterPushDown) {
// Sets pushed predicates
OrcFilters.createFilter(requiredSchema, filters).foreach { f =>
hadoopConf.set(OrcFileFormat.SARG_PUSHDOWN, toKryo(f))
hadoopConf.setBoolean(ConfVars.HIVEOPTINDEXFILTER.varname, true)
}
}
val broadcastedHadoopConf =
sparkSession.sparkContext.broadcast(new SerializableConfiguration(hadoopConf))
val ignoreCorruptFiles = sparkSession.sessionState.conf.ignoreCorruptFiles
(file: PartitionedFile) => {
val conf = broadcastedHadoopConf.value.value
val filePath = new Path(new URI(file.filePath))
// SPARK-8501: Empty ORC files always have an empty schema stored in their footer. In this
// case, `OrcFileOperator.readSchema` returns `None`, and we can't read the underlying file
// using the given physical schema. Instead, we simply return an empty iterator.
val isEmptyFile =
OrcFileOperator.readSchema(Seq(filePath.toString), Some(conf), ignoreCorruptFiles).isEmpty
if (isEmptyFile) {
Iterator.empty
} else {
OrcFileFormat.setRequiredColumns(conf, dataSchema, requiredSchema)
val orcRecordReader = {
val job = Job.getInstance(conf)
FileInputFormat.setInputPaths(job, file.filePath)
// Custom OrcRecordReader is used to get
// ObjectInspector during recordReader creation itself and can
// avoid NameNode call in unwrapOrcStructs per file.
// Specifically would be helpful for partitioned datasets.
val orcReader = OrcFile.createReader(filePath, OrcFile.readerOptions(conf))
new SparkOrcNewRecordReader(orcReader, conf, file.start, file.length)
}
val recordsIterator = new RecordReaderIterator[OrcStruct](orcRecordReader)
Option(TaskContext.get())
.foreach(_.addTaskCompletionListener[Unit](_ => recordsIterator.close()))
// Unwraps `OrcStruct`s to `UnsafeRow`s
OrcFileFormat.unwrapOrcStructs(
conf,
dataSchema,
requiredSchema,
Some(orcRecordReader.getObjectInspector.asInstanceOf[StructObjectInspector]),
recordsIterator)
}
}
}
override def supportDataType(dataType: DataType): Boolean = dataType match {
case _: AnsiIntervalType => false
case _: AtomicType => true
case st: StructType => st.forall { f => supportDataType(f.dataType) }
case ArrayType(elementType, _) => supportDataType(elementType)
case MapType(keyType, valueType, _) =>
supportDataType(keyType) && supportDataType(valueType)
case udt: UserDefinedType[_] => supportDataType(udt.sqlType)
case _ => false
}
// HIVE-11253 moved `toKryo` from `SearchArgument` to `storage-api` module.
// This is copied from Hive 1.2's SearchArgumentImpl.toKryo().
private def toKryo(sarg: SearchArgument): String = {
val kryo = new Kryo()
val out = new Output(4 * 1024, 10 * 1024 * 1024)
kryo.writeObject(out, sarg)
out.close()
Base64.encodeBase64String(out.toBytes)
}
}
private[orc] class OrcSerializer(dataSchema: StructType, conf: Configuration)
extends HiveInspectors {
def serialize(row: InternalRow): Writable = {
wrapOrcStruct(cachedOrcStruct, structOI, row)
serializer.serialize(cachedOrcStruct, structOI)
}
private[this] val serializer = {
val table = new Properties()
table.setProperty("columns", dataSchema.fieldNames.mkString(","))
table.setProperty("columns.types", dataSchema.map(_.dataType.catalogString).mkString(":"))
val serde = new OrcSerde
serde.initialize(conf, table)
serde
}
// Object inspector converted from the schema of the relation to be serialized.
val structOI = {
val typeInfo = TypeInfoUtils.getTypeInfoFromTypeString(dataSchema.catalogString)
OrcStruct.createObjectInspector(typeInfo.asInstanceOf[StructTypeInfo])
.asInstanceOf[SettableStructObjectInspector]
}
private[this] val cachedOrcStruct = structOI.create().asInstanceOf[OrcStruct]
// Wrapper functions used to wrap Spark SQL input arguments into Hive specific format
private[this] val wrappers = dataSchema.zip(structOI.getAllStructFieldRefs().asScala.toSeq).map {
case (f, i) => wrapperFor(i.getFieldObjectInspector, f.dataType)
}
private[this] def wrapOrcStruct(
struct: OrcStruct,
oi: SettableStructObjectInspector,
row: InternalRow): Unit = {
val fieldRefs = oi.getAllStructFieldRefs
var i = 0
val size = fieldRefs.size
while (i < size) {
oi.setStructFieldData(
struct,
fieldRefs.get(i),
wrappers(i)(row.get(i, dataSchema(i).dataType))
)
i += 1
}
}
}
private[orc] class OrcOutputWriter(
val path: String,
dataSchema: StructType,
context: TaskAttemptContext)
extends OutputWriter with Logging {
private[this] val serializer = new OrcSerializer(dataSchema, context.getConfiguration)
private val recordWriter: RecordWriter[NullWritable, Writable] = {
new OrcOutputFormat().getRecordWriter(
new Path(path).getFileSystem(context.getConfiguration),
context.getConfiguration.asInstanceOf[JobConf],
path,
Reporter.NULL
).asInstanceOf[RecordWriter[NullWritable, Writable]]
}
override def write(row: InternalRow): Unit = {
recordWriter.write(NullWritable.get(), serializer.serialize(row))
}
override def close(): Unit = {
try {
OrcUtils.addSparkVersionMetadata(getOrCreateInternalWriter())
} catch {
case NonFatal(e) => log.warn(e.toString, e)
}
recordWriter.close(Reporter.NULL)
}
private def getOrCreateInternalWriter(): Writer = {
val writerField = recordWriter.getClass.getDeclaredField("writer")
writerField.setAccessible(true)
var writer = writerField.get(recordWriter).asInstanceOf[Writer]
if (writer == null) {
// Hive ORC initializes its private `writer` field at the first write.
// For empty write task, we need to create it manually to record our meta.
val options = OrcFile.writerOptions(context.getConfiguration)
options.inspector(serializer.structOI)
writer = OrcFile.createWriter(new Path(path), options)
// set the writer to make it flush meta on close
writerField.set(recordWriter, writer)
}
writer
}
}
private[orc] object OrcFileFormat extends HiveInspectors with Logging {
// This constant duplicates `OrcInputFormat.SARG_PUSHDOWN`, which is unfortunately not public.
private[orc] val SARG_PUSHDOWN = "sarg.pushdown"
def unwrapOrcStructs(
conf: Configuration,
dataSchema: StructType,
requiredSchema: StructType,
maybeStructOI: Option[StructObjectInspector],
iterator: Iterator[Writable]): Iterator[InternalRow] = {
val deserializer = new OrcSerde
val mutableRow = new SpecificInternalRow(requiredSchema.map(_.dataType))
val unsafeProjection = UnsafeProjection.create(requiredSchema)
val forcePositionalEvolution = OrcConf.FORCE_POSITIONAL_EVOLUTION.getBoolean(conf)
def unwrap(oi: StructObjectInspector): Iterator[InternalRow] = {
val (fieldRefs, fieldOrdinals) = requiredSchema.zipWithIndex.map {
case (field, ordinal) =>
var ref: objectinspector.StructField = null
if (forcePositionalEvolution) {
ref = oi.getAllStructFieldRefs.get(dataSchema.fieldIndex(field.name))
} else {
ref = oi.getStructFieldRef(field.name)
if (ref == null) {
ref = oi.getStructFieldRef("_col" + dataSchema.fieldIndex(field.name))
}
}
ref -> ordinal
}.unzip
val unwrappers = fieldRefs.map(r => if (r == null) null else unwrapperFor(r))
iterator.map { value =>
val raw = deserializer.deserialize(value)
var i = 0
val length = fieldRefs.length
while (i < length) {
val fieldRef = fieldRefs(i)
val fieldValue = if (fieldRef == null) null else oi.getStructFieldData(raw, fieldRef)
if (fieldValue == null) {
mutableRow.setNullAt(fieldOrdinals(i))
} else {
unwrappers(i)(fieldValue, mutableRow, fieldOrdinals(i))
}
i += 1
}
unsafeProjection(mutableRow)
}
}
maybeStructOI.map(unwrap).getOrElse(Iterator.empty)
}
def setRequiredColumns(
conf: Configuration, dataSchema: StructType, requestedSchema: StructType): Unit = {
val ids = requestedSchema.map(a => dataSchema.fieldIndex(a.name): Integer)
val (sortedIDs, sortedNames) = ids.zip(requestedSchema.fieldNames).sorted.unzip
HiveShim.appendReadColumns(conf, sortedIDs, sortedNames)
}
}
| ueshin/apache-spark | sql/hive/src/main/scala/org/apache/spark/sql/hive/orc/OrcFileFormat.scala | Scala | apache-2.0 | 13,868 |
package com.lonelyplanet.akka.http.extensions.fixtures
import spray.json._
object ErrorResponses {
val defaultErrorResponse =
"""
|{
| "errors": [
| {
| "id": "undefined",
| "title": "Requested resource not found"
| }
| ]
|}
""".stripMargin.parseJson
def errorResponseWithToken(token: String) =
s"""
|{
| "errors": [
| {
| "id": "$token",
| "title": "Requested resource not found"
| }
| ]
|}
""".stripMargin.parseJson
}
| lonelyplanet/akka-http-extensions | src/test/scala/com/lonelyplanet/akka/http/extensions/fixtures/ErrorResponses.scala | Scala | apache-2.0 | 574 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.network.netty
import java.nio.ByteBuffer
import scala.collection.JavaConverters._
import scala.reflect.ClassTag
import org.apache.spark.internal.Logging
import org.apache.spark.network.BlockDataManager
import org.apache.spark.network.buffer.NioManagedBuffer
import org.apache.spark.network.client.{RpcResponseCallback, StreamCallbackWithID, TransportClient}
import org.apache.spark.network.server.{OneForOneStreamManager, RpcHandler, StreamManager}
import org.apache.spark.network.shuffle.protocol._
import org.apache.spark.serializer.Serializer
import org.apache.spark.storage.{BlockId, BlockManager, ShuffleBlockBatchId, ShuffleBlockId, StorageLevel}
/**
* Serves requests to open blocks by simply registering one chunk per block requested.
* Handles opening and uploading arbitrary BlockManager blocks.
*
* Opened blocks are registered with the "one-for-one" strategy, meaning each Transport-layer Chunk
* is equivalent to one Spark-level shuffle block.
*/
class NettyBlockRpcServer(
appId: String,
serializer: Serializer,
blockManager: BlockDataManager)
extends RpcHandler with Logging {
private val streamManager = new OneForOneStreamManager()
override def receive(
client: TransportClient,
rpcMessage: ByteBuffer,
responseContext: RpcResponseCallback): Unit = {
val message = BlockTransferMessage.Decoder.fromByteBuffer(rpcMessage)
logTrace(s"Received request: $message")
message match {
case openBlocks: OpenBlocks =>
val blocksNum = openBlocks.blockIds.length
val blocks = (0 until blocksNum).map { i =>
val blockId = BlockId.apply(openBlocks.blockIds(i))
assert(!blockId.isInstanceOf[ShuffleBlockBatchId],
"Continuous shuffle block fetching only works for new fetch protocol.")
blockManager.getLocalBlockData(blockId)
}
val streamId = streamManager.registerStream(appId, blocks.iterator.asJava,
client.getChannel)
logTrace(s"Registered streamId $streamId with $blocksNum buffers")
responseContext.onSuccess(new StreamHandle(streamId, blocksNum).toByteBuffer)
case fetchShuffleBlocks: FetchShuffleBlocks =>
val blocks = fetchShuffleBlocks.mapIds.zipWithIndex.flatMap { case (mapId, index) =>
if (!fetchShuffleBlocks.batchFetchEnabled) {
fetchShuffleBlocks.reduceIds(index).map { reduceId =>
blockManager.getLocalBlockData(
ShuffleBlockId(fetchShuffleBlocks.shuffleId, mapId, reduceId))
}
} else {
val startAndEndId = fetchShuffleBlocks.reduceIds(index)
if (startAndEndId.length != 2) {
throw new IllegalStateException(s"Invalid shuffle fetch request when batch mode " +
s"is enabled: $fetchShuffleBlocks")
}
Array(blockManager.getLocalBlockData(
ShuffleBlockBatchId(
fetchShuffleBlocks.shuffleId, mapId, startAndEndId(0), startAndEndId(1))))
}
}
val numBlockIds = if (fetchShuffleBlocks.batchFetchEnabled) {
fetchShuffleBlocks.mapIds.length
} else {
fetchShuffleBlocks.reduceIds.map(_.length).sum
}
val streamId = streamManager.registerStream(appId, blocks.iterator.asJava,
client.getChannel)
logTrace(s"Registered streamId $streamId with $numBlockIds buffers")
responseContext.onSuccess(
new StreamHandle(streamId, numBlockIds).toByteBuffer)
case uploadBlock: UploadBlock =>
// StorageLevel and ClassTag are serialized as bytes using our JavaSerializer.
val (level, classTag) = deserializeMetadata(uploadBlock.metadata)
val data = new NioManagedBuffer(ByteBuffer.wrap(uploadBlock.blockData))
val blockId = BlockId(uploadBlock.blockId)
logDebug(s"Receiving replicated block $blockId with level ${level} " +
s"from ${client.getSocketAddress}")
val blockStored = blockManager.putBlockData(blockId, data, level, classTag)
if (blockStored) {
responseContext.onSuccess(ByteBuffer.allocate(0))
} else {
val exception = new Exception(s"Upload block for $blockId failed. This mostly happens " +
s"when there is not sufficient space available to store the block.")
responseContext.onFailure(exception)
}
case getLocalDirs: GetLocalDirsForExecutors =>
val isIncorrectAppId = getLocalDirs.appId != appId
val execNum = getLocalDirs.execIds.length
if (isIncorrectAppId || execNum != 1) {
val errorMsg = "Invalid GetLocalDirsForExecutors request: " +
s"${if (isIncorrectAppId) s"incorrect application id: ${getLocalDirs.appId};"}" +
s"${if (execNum != 1) s"incorrect executor number: $execNum (expected 1);"}"
responseContext.onFailure(new IllegalStateException(errorMsg))
} else {
val expectedExecId = blockManager.asInstanceOf[BlockManager].executorId
val actualExecId = getLocalDirs.execIds.head
if (actualExecId != expectedExecId) {
responseContext.onFailure(new IllegalStateException(
s"Invalid executor id: $actualExecId, expected $expectedExecId."))
} else {
responseContext.onSuccess(new LocalDirsForExecutors(
Map(actualExecId -> blockManager.getLocalDiskDirs).asJava).toByteBuffer)
}
}
}
}
override def receiveStream(
client: TransportClient,
messageHeader: ByteBuffer,
responseContext: RpcResponseCallback): StreamCallbackWithID = {
val message =
BlockTransferMessage.Decoder.fromByteBuffer(messageHeader).asInstanceOf[UploadBlockStream]
val (level, classTag) = deserializeMetadata(message.metadata)
val blockId = BlockId(message.blockId)
logDebug(s"Receiving replicated block $blockId with level ${level} as stream " +
s"from ${client.getSocketAddress}")
// This will return immediately, but will setup a callback on streamData which will still
// do all the processing in the netty thread.
blockManager.putBlockDataAsStream(blockId, level, classTag)
}
private def deserializeMetadata[T](metadata: Array[Byte]): (StorageLevel, ClassTag[T]) = {
serializer
.newInstance()
.deserialize(ByteBuffer.wrap(metadata))
.asInstanceOf[(StorageLevel, ClassTag[T])]
}
override def getStreamManager(): StreamManager = streamManager
}
| maropu/spark | core/src/main/scala/org/apache/spark/network/netty/NettyBlockRpcServer.scala | Scala | apache-2.0 | 7,348 |
// scalac: -Xsource:3.0
class A
class B extends A
trait Y {
def value: String
}
trait X[-T] {
def y(t: T): Y
}
trait Z[-T] extends X[T]
object XA extends X[A] {
def y(a: A) = new Y { def value = s"${a.getClass}: AValue" }
}
object ZB extends Z[B] {
def y(b: B) = new Y { def value = s"${b.getClass}: BValue" }
}
object Test {
implicit def f[T](t: T)(implicit x: X[T]): Y = x.y(t)
implicit val za: X[A] = XA
implicit val xb: Z[B] = ZB
def main(argv: Array[String]): Unit = {
val a = new A
val b = new B
println("A: " + a.value)
println("B: " + b.value)
}
}
/*
t2509-6.scala:31: error: value value is not a member of B
println("B: " + b.value)
^
one error found
*/
| lrytz/scala | test/files/pos/t2509-6.scala | Scala | apache-2.0 | 732 |
package nars.storage
import nars.entity.Task
import nars.main.Parameters
//remove if not needed
import scala.collection.JavaConversions._
/**
* New tasks that contain new Term.
*/
class NovelTaskBag(memory: Memory) extends Bag[Task](memory) {
/**
* Get the (constant) capacity of NovelTaskBag
* @return The capacity of NovelTaskBag
*/
protected def capacity(): Int = Parameters.TASK_BUFFER_SIZE
/**
* Get the (constant) forget rate in NovelTaskBag
* @return The forget rate in NovelTaskBag
*/
protected def forgetRate(): Int = Parameters.NEW_TASK_FORGETTING_CYCLE
}
| automenta/opennars | nars_scala/src/main/scala/nars/storage/NovelTaskBag.scala | Scala | gpl-2.0 | 598 |
/*
* Copyright 2018 Analytics Zoo Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.zoo.pipeline.api.keras.layers.utils
import com.intel.analytics.bigdl.nn.{Graph, MklInt8Convertible}
import com.intel.analytics.bigdl.nn.Graph.ModuleNode
import com.intel.analytics.bigdl.nn.abstractnn.{AbstractModule, Activity}
import com.intel.analytics.bigdl.nn.keras.KerasLayer
import com.intel.analytics.bigdl.optim.SGD
import com.intel.analytics.bigdl.utils._
import com.intel.analytics.zoo.pipeline.api.keras.optimizers.{Adam, AdamWeightDecay}
import scala.collection.mutable
import scala.collection.mutable.ArrayBuffer
import scala.reflect.ClassTag
object KerasLayerRef {
def apply[T: ClassTag](instance: KerasLayer[_, _, T]): KerasLayerRef[T] = {
new KerasLayerRef(instance)
}
}
class KerasLayerRef[T: ClassTag](instance: KerasLayer[_, _, T]) {
def excludeInvalidLayers[T: ClassTag]
(modules : Seq[AbstractModule[_, _, T]]): Unit = {
KerasUtils.invokeMethod(instance, "excludeInvalidLayers", modules, ClassTag(this.getClass))
}
def setInputShape(value: Shape): Unit = {
KerasUtils.invokeMethod(instance, "_inputShapeValue_$eq", value)
}
def setOutShape(value: Shape): Unit = {
KerasUtils.invokeMethod(instance, "_outputShapeValue_$eq", value)
}
def checkWithCurrentInputShape(calcInputShape: Shape): Unit = {
KerasUtils.invokeMethod(instance, "checkWithCurrentInputShape", calcInputShape)
}
def validateInput[T: ClassTag](modules : Seq[AbstractModule[_, _, T]]): Unit = {
KerasUtils.invokeMethod(instance, "validateInput", modules, ClassTag(this.getClass))
}
def checkDuplicate(
record: mutable.HashSet[Int] = mutable.HashSet()
): Unit = {
KerasUtils.invokeMethod(instance, "checkDuplicate", record)
}
}
class AbstractModuleRef[T: ClassTag](instance: AbstractModule[Activity, Activity, T]) {
def build(inputShape: Shape): Shape = {
KerasUtils.invokeMethod(instance, "build", inputShape).asInstanceOf[Shape]
}
}
class GraphRef[T: ClassTag](instance: Graph[T]) {
def getOutputs(): Seq[ModuleNode[T]] = {
KerasUtils.invokeMethod(instance, "outputs").asInstanceOf[Seq[ModuleNode[T]]] // !!!!
}
}
object EngineRef {
def getCoreNumber(): Int = {
KerasUtils.invokeMethod(Engine, "coreNumber").asInstanceOf[Int]
}
def getNodeNumber(): Int = {
KerasUtils.invokeMethod(Engine, "nodeNumber").asInstanceOf[Int]
}
def getDefaultThreadPool(): ThreadPool = {
KerasUtils.invokeMethod(Engine, "default").asInstanceOf[ThreadPool]
}
def getEngineType(): EngineType = {
KerasUtils.invokeMethod(Engine, "getEngineType").asInstanceOf[EngineType]
}
def getOptimizerVersion(): OptimizerVersion = {
KerasUtils.invokeMethod(Engine, "getOptimizerVersion").asInstanceOf[OptimizerVersion]
}
def setOptimizerVersion(optimizerVersion : OptimizerVersion): Unit = {
KerasUtils.invokeMethod(Engine, "setOptimizerVersion",
optimizerVersion).asInstanceOf[OptimizerVersion]
}
def setCoreNumber(num: Int): Unit = {
val field = Engine.getClass.getDeclaredField("physicalCoreNumber")
field.setAccessible(true)
field.setInt(Engine, num)
}
}
object SGDRef {
def getstate[T: ClassTag](instance: Adam[T]): Table = {
KerasUtils.invokeMethod(instance, "state").asInstanceOf[Table]
}
def getstate[T: ClassTag](instance: AdamWeightDecay[T]): Table = {
KerasUtils.invokeMethod(instance, "state").asInstanceOf[Table]
}
def getstate[T](instance: SGD[T]): Table = {
KerasUtils.invokeMethod(instance, "state").asInstanceOf[Table]
}
}
object MklInt8ConvertibleRef {
def getWeightScalesBuffer(instance: MklInt8Convertible): ArrayBuffer[Array[Float]] = {
KerasUtils.invokeMethod(instance, "weightScalesBuffer").asInstanceOf[ArrayBuffer[Array[Float]]]
}
}
| intel-analytics/analytics-zoo | zoo/src/main/scala/com/intel/analytics/zoo/pipeline/api/keras/layers/utils/Reflection.scala | Scala | apache-2.0 | 4,350 |
package org.canve.githubCruncher
import scala.concurrent.{Future, Promise}
import scala.util.{Success, Failure}
import scala.concurrent.ExecutionContext.Implicits.global
import scalaj.http._
import com.github.nscala_time.time.Imports._
import scala.concurrent.Await
import play.api.libs.json._
import play.api.libs.functional.syntax._
/*
* A rate limiting api caller
*/
object LimitingApiCaller {
private var applicableRateState: Future[SearchRateState] = safeRateLimitCheck
/*
* get rate limit status without counting as part of quota
*/
private def safeRateLimitCheck: Future[SearchRateState] =
performApiCall(Http("https://api.github.com/rate_limit")) map { response =>
if (!response.isSuccess) throw new Exception(s"github api bad or unexpected response: \n$response")
new SearchRateState(response)
}
/* Class representing the established github rate limit */
private case class Rate(windowLimit: Int, windowRemaining: Int, windowEnd: DateTime) {
/*
* We use this function to always reserve some api quota, so that the api can always be
* manually examined outside the run of the application
*/
def windowQuotaReserveLeft = windowRemaining > (0.1 * windowLimit)
}
/*
* Class that gets the github rate limit applicable to search queries, both from a github api response,
* and from a rate limit api query (the latter does does not count towards quota)
*/
private case class SearchRateState(response: HttpResponse[String]) {
private val asJson: JsObject = Json.parse(response.body).as[JsObject]
val rate = asJson.keys.contains("resources") match {
/* handles response coming from a query api call */
case false =>
Rate(
windowLimit = response.headers("X-RateLimit-Limit").head.toInt,
windowRemaining = response.headers("X-RateLimit-Remaining").head.toInt,
windowEnd = (response.headers("X-RateLimit-Reset").head.toLong * 1000).toDateTime)
/* handles response coming from a rate limit api call */
case true =>
val jsonSearchObj = (asJson \ "resources" \ "search")
Rate(
windowLimit = (jsonSearchObj \ "limit").as[Int],
windowRemaining = (jsonSearchObj \ "remaining").as[Int],
windowEnd = ((jsonSearchObj \ "reset").as[Long] * 1000).toDateTime)
}
}
def nonBlockingHttp(apiCall: HttpRequest) = maybeApiCall(apiCall)
private val slack = 5.seconds // time to linger after the new rate limit window start time
private def maybeApiCall(apiCall: HttpRequest) = {
if (!apiCall.url.contains("//api.github.com/search/"))
throw new Exception("rate limiting for non-search github api is not yet supported here")
/* pass the api call through, or elegantly reject if rate limit protection is needed */
applicableRateState.flatMap { s =>
if (DateTime.now > s.rate.windowEnd) applicableRateState = safeRateLimitCheck
applicableRateState.flatMap { _.rate.windowQuotaReserveLeft match {
case true => performApiCall(apiCall)
case false =>
applicableRateState flatMap { currentRateState =>
Future.failed[HttpResponse[String]](
RateLimitHint(currentRateState.rate.windowEnd + slack))
}
}}
}
}
private def performApiCall(apiCall: HttpRequest): Future[HttpResponse[String]] = {
val response = Future { apiCall.asString }
response.onComplete {
case Success(response) => applicableRateState = Future.successful(SearchRateState(response))
case Failure(f) => throw new Exception(s"failed completing github api call: \n$f")
}
response
}
}
/* Exception type conveying a hint for when to retry the api call ― back to the caller */
case class RateLimitHint(safeRetryTime: DateTime) extends Throwable
| CANVE/github-cruncher | github-cruncher/src/main/scala/org/canve/githubCruncher/RateLimit.scala | Scala | apache-2.0 | 3,928 |
package loader.core
abstract class ParserSpawner {
type Parser <: ParserBuilder
def apply(pr: utils.ParamReader):Parser
def apply(params:scala.collection.Map[String,String]):Parser = apply(new utils.ParamReader(params))
} | Y-P-/data-processing-binding | XX3/obsolete/core/ParserSpawner.scala | Scala | gpl-3.0 | 234 |
/*
* Window.scala
* (Desktop)
*
* Copyright (c) 2013-2021 Hanns Holger Rutz. All rights reserved.
*
* This software is published under the GNU Lesser General Public License v2.1+
*
*
* For further information, please contact Hanns Holger Rutz at
* contact@sciss.de
*/
package de.sciss.desktop
import de.sciss.desktop.impl.WindowImpl
import java.awt
import java.awt.event.WindowEvent
import java.awt.{Dimension, GraphicsEnvironment, Point, Rectangle, Toolkit}
import javax.swing.event.InternalFrameEvent
import javax.swing.{JFrame, RootPaneContainer, SwingUtilities, WindowConstants}
import scala.annotation.switch
import scala.swing.{Action, Reactions, RootPanel, UIElement}
object Window {
sealed trait Style
/** Regular full-fledged window. */
case object Regular extends Style
/** Supplementary window which for example might not need menu bar. */
case object Auxiliary extends Style
/** Supplementary window which is a (possibly floating) palette. */
case object Palette extends Style
private[desktop] def peer(w: Window): awt.Frame = w.component match {
case j: JFrame => j
case _ => w.handler.mainWindow.component match {
case j: JFrame => j
case _ => null
}
}
object CloseOperation {
def apply(id: Int): CloseOperation = (id: @switch) match {
case CloseIgnore .id => CloseIgnore
case CloseExit .id => CloseExit
case CloseHide .id => CloseHide
case CloseDispose.id => CloseDispose
}
}
sealed trait CloseOperation { def id: Int }
case object CloseIgnore extends CloseOperation { final val id = WindowConstants.DO_NOTHING_ON_CLOSE }
case object CloseExit extends CloseOperation { final val id = WindowConstants.EXIT_ON_CLOSE }
case object CloseHide extends CloseOperation { final val id = WindowConstants.HIDE_ON_CLOSE }
case object CloseDispose extends CloseOperation { final val id = WindowConstants.DISPOSE_ON_CLOSE }
object Event {
def apply(window: Window, peer: WindowEvent): Event = {
import WindowEvent._
(peer.getID: @switch) match {
case WINDOW_ACTIVATED => Activated (window)
case WINDOW_CLOSED => Closed (window)
case WINDOW_CLOSING => Closing (window)
case WINDOW_DEACTIVATED => Deactivated(window)
case WINDOW_DEICONIFIED => Deiconified(window)
case WINDOW_ICONIFIED => Iconified (window)
case WINDOW_OPENED => Opened (window)
}
}
def apply(window: Window, peer: InternalFrameEvent): Event = {
import InternalFrameEvent._
(peer.getID: @switch) match {
case INTERNAL_FRAME_ACTIVATED => Activated (window)
case INTERNAL_FRAME_CLOSED => Closed (window)
case INTERNAL_FRAME_CLOSING => Closing (window)
case INTERNAL_FRAME_DEACTIVATED => Deactivated(window)
case INTERNAL_FRAME_DEICONIFIED => Deiconified(window)
case INTERNAL_FRAME_ICONIFIED => Iconified (window)
case INTERNAL_FRAME_OPENED => Opened (window)
}
}
}
sealed trait Event extends swing.event.Event {
def source: Window
}
final case class Activated (source: Window) extends Event
final case class Closed (source: Window) extends Event
final case class Closing (source: Window) extends Event
final case class Deactivated(source: Window) extends Event
final case class Deiconified(source: Window) extends Event
final case class Iconified (source: Window) extends Event
final case class Opened (source: Window) extends Event
def find(component: UIElement): Option[Window] = {
val rp = SwingUtilities.getAncestorOfClass(classOf[RootPaneContainer], component.peer)
if (rp == null) return None
val w = rp.asInstanceOf[RootPaneContainer].getRootPane.getClientProperty(WindowImpl.Property)
if (w == null) return None
Some(w.asInstanceOf[Window])
}
def showDialog[A](parent: UIElement, source: DialogSource[A]): A = {
find(parent) match {
case some @ Some(w) => w.handler.showDialog(some, source)
case _ => showDialog(source)
}
}
def showDialog[A](source: DialogSource[A]): A = {
source.show(None)
}
def menuShortcut: Int = Toolkit.getDefaultToolkit.getMenuShortcutKeyMask
def availableSpace: Rectangle = GraphicsEnvironment.getLocalGraphicsEnvironment.getMaximumWindowBounds
object Actions {
def show(window: Window): Action = new ShowAction(window)
}
private final class ShowAction(window: Window) extends Action(window.title) {
window.reactions += {
case Window.Activated(_) =>
// if( !disposed ) {
// ((BasicApplication) AbstractApplication.getApplication()).getMenuFactory().setSelectedWindow( ShowWindowAction.this );
// XXX TODO
// }
}
def apply(): Unit = {
window.visible = true
window.front()
}
// def dispose(): Unit =
// w.reactions -= ...
}
}
/** Interface that unites functionality
* from inhomogeneous classes such as JFrame, JDialog, JInternalFrame
*/
trait Window {
def handler: WindowHandler
def title: String
var visible: Boolean
def component: RootPanel
def dispose(): Unit
def front(): Unit
def floating: Boolean
def active: Boolean
def resizable: Boolean
var alwaysOnTop: Boolean
def size: Dimension
def bounds: Rectangle
var location: Point
def reactions: Reactions
} | Sciss/Desktop | core/src/main/scala/de/sciss/desktop/Window.scala | Scala | lgpl-2.1 | 5,477 |
package sorm.reflection
import sext._
// or InstanceReflection
class Reflected
( val instance : Any,
val reflection : Reflection )
{
def propertyValues
: Map[String, Any]
= reflection.properties.view.unzip._1.zipBy(propertyValue).toMap
def propertyValue
( name: String )
: Any
= reflection.propertyValue(name, instance.asInstanceOf[AnyRef])
def methodResult
( name: String,
args: List[Any] = Nil )
: Any
= throw new NotImplementedError
}
| cllu/sorm2 | src/main/scala/sorm/reflection/Reflected.scala | Scala | mit | 523 |
// Copyright 2014 Foursquare Labs Inc. All Rights Reserved.
package io.fsq.twofishes.indexer.scalding
import com.twitter.scalding._
import com.twitter.scalding.typed.TypedSink
import io.fsq.twofishes.gen._
import io.fsq.twofishes.indexer.util.SpindleSequenceFileSource
import java.nio.ByteBuffer
import org.apache.hadoop.io.LongWritable
class BasePolygonIndexBuildIntermediateJob(
name: String,
sources: Seq[String],
args: Args
) extends TwofishesIntermediateJob(name, args) {
val features = getJobOutputsAsTypedPipe[LongWritable, GeocodeServingFeature](sources).group
(for {
(featureId, servingFeature) <- features
if servingFeature.feature.geometryOrThrow.wkbGeometryIsSet
wkbGeometryByteBuffer = ByteBuffer.wrap(servingFeature.feature.geometryOrThrow.wkbGeometryByteArray)
} yield {
(featureId -> IntermediateDataContainer.newBuilder.bytes(wkbGeometryByteBuffer).result)
}).group
.withReducers(1)
.head
.write(
TypedSink[(LongWritable, IntermediateDataContainer)](
SpindleSequenceFileSource[LongWritable, IntermediateDataContainer](outputPath)
)
)
}
| foursquare/fsqio | src/jvm/io/fsq/twofishes/indexer/scalding/BasePolygonIndexBuildIntermediateJob.scala | Scala | apache-2.0 | 1,123 |
package com.eevolution.context.dictionary.domain.model
import ai.x.play.json.Jsonx
import com.eevolution.context.dictionary.api.{ActiveEnabled, DomainModel, Identifiable, Traceable}
import org.joda.time.DateTime
/**
* Copyright (C) 2003-2017, e-Evolution Consultants S.A. , http://www.e-evolution.com
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
* Email: emeris.hernandez@e-evolution.com, http://www.e-evolution.com , http://github.com/e-Evolution
* Created by emeris.hernandez@e-evolution.com , www.e-evolution.com
*/
/**
* Workflow Activity Result Entity
* @param workflowActivityResultId Wf Activity Result ID
* @param tenantId Tenant ID
* @param organizationId Organization ID
* @param isActive Is Active
* @param created Created
* @param createdBy Created By
* @param updated Updated
* @param updatedBy Updated By
* @param workflowActivityId Wf Activity ID
* @param attributeName Attribute Name
* @param attributeValue Attribute Value
* @param description Description
* @param help Help
* @param uuid UUID
*/
case class WorkflowActivityResult(workflowActivityResultId: Int,
tenantId: Int,
organizationId: Int,
isActive: Boolean = true,
created: DateTime = DateTime.now,
createdBy: Int,
updated: DateTime = DateTime.now,
updatedBy: Int,
workflowActivityId: Int,
attributeName: String,
attributeValue: Option[String],
description: Option[String],
help: Option[String],
uuid: String
) extends DomainModel
with ActiveEnabled
with Identifiable
with Traceable {
override type ActiveEnabled = this.type
override type Identifiable = this.type
override type Traceable = this.type
override def Id: Int = workflowActivityResultId
override val entityName: String = "AD_Wf_ActivityResult"
override val identifier: String = "AD_Wf_ActivityResult_ID"
}
object WorkflowActivityResult {
implicit lazy val jsonFormat = Jsonx.formatCaseClass[WorkflowActivityResult]
def create(workflowActivityResultId: Int,
tenantId: Int,
organizationId: Int,
isActive: Boolean,
created: DateTime,
createdBy: Int,
updated: DateTime,
updatedBy: Int,
workflowActivityId: Int,
attributeName: String,
attributeValue: String,
description: String,
help: String,
uuid: String) = WorkflowActivityResult(workflowActivityResultId, tenantId, organizationId, isActive,
created, createdBy, updated, updatedBy, workflowActivityId, attributeName, None, None, None, uuid)
}
| adempiere/ADReactiveSystem | dictionary-api/src/main/scala/com/eevolution/context/dictionary/domain/model/WorkflowActivityResult.scala | Scala | gpl-3.0 | 3,647 |
package org.shade.common.file
import java.io.File
import java.nio.file.{Path, Paths}
import org.shade.common.types.{Wrapped, WrappedString}
case class Filename(value: String) extends WrappedString with Ordered[Filename] {
require(value.nonEmpty, "Filename must not be empty")
require(!value.contains("/"), "Filename must not contain a forward slash")
override def compare(that: Filename) = value.compare(that.value)
}
case class RelativePath(value: Path) extends Wrapped[Path] {
require(!value.isAbsolute, s"path is not relative: $value")
def ++(other: RelativePath): RelativePath = RelativePath(value.resolve(other.value))
def ++(other: Filename): RelativePath = RelativePath(value.resolve(other.value))
lazy val file: File = value.toFile
lazy val getName: Option[Filename] = Option(value.getFileName).map(_.toString) match {
case Some(fn) if fn.nonEmpty => Some(Filename(fn))
case _ => None
}
lazy val name: Filename = getName.getOrElse {
throw new IllegalStateException("Cannot construct a Filename from an empty path")
}
lazy val isEmpty: Boolean = getName.isEmpty
lazy val nonEmpty: Boolean = !isEmpty
lazy val parent: RelativePath = {
if (isEmpty) {
throw new IllegalStateException(s"Path has no parent: $this")
} else {
Option(value.getParent) match {
case Some(path) => RelativePath(path)
case None => RelativePath.empty
}
}
}
}
object RelativePath {
val empty: RelativePath = RelativePath(Paths.get(""))
def of(path: Path): Either[String, RelativePath] = {
path.isAbsolute match {
case true => Left(s"Expected a relative path, got: $path")
case false => Right(RelativePath(path))
}
}
}
case class AbsolutePath(value: Path) extends Wrapped[Path] {
require(value.isAbsolute, s"path is not absolute: $value")
def ++(other: RelativePath): AbsolutePath = AbsolutePath(value.resolve(other.value))
def ++(other: Filename): AbsolutePath = AbsolutePath(value.resolve(other.value))
lazy val file: File = value.toFile
lazy val getName: Option[Filename] = Option(value.getFileName).map(_.toString) match {
case Some(fn) if fn.nonEmpty => Some(Filename(fn))
case _ => None
}
lazy val name: Filename = getName.getOrElse {
throw new IllegalStateException("Cannot construct a Filename from the root path")
}
lazy val isRoot: Boolean = getName.isEmpty
lazy val nonRoot: Boolean = !isRoot
lazy val parent: AbsolutePath = {
if (isRoot) {
throw new IllegalStateException(s"Root path has no parent: $this")
} else {
Option(value.getParent) match {
case Some(path) => AbsolutePath(path)
case None => AbsolutePath.root
}
}
}
def relativise(other: Path): RelativePath = RelativePath(value.relativize(other))
def relativise(other: File): RelativePath = relativise(other.toPath)
def relativise(other: AbsolutePath): RelativePath = relativise(other.value)
}
object AbsolutePath {
val root: AbsolutePath = AbsolutePath(Paths.get("/"))
def of(path: Path): Either[String, AbsolutePath] = {
path.isAbsolute match {
case true => Right(AbsolutePath(path))
case false if path.toString.isEmpty => Left(s"Expected an absolute path, got an empty path")
case false => Left(s"Expected an absolute path, got: $path")
}
}
}
| jamesshade/common | src/main/scala/org/shade/common/file/paths.scala | Scala | apache-2.0 | 3,364 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kafka.utils.timer
import org.junit.jupiter.api.Assertions._
import java.util.concurrent.atomic._
import org.junit.jupiter.api.Test
class TimerTaskListTest {
private class TestTask(val delayMs: Long) extends TimerTask {
def run(): Unit = { }
}
private def size(list: TimerTaskList): Int = {
var count = 0
list.foreach(_ => count += 1)
count
}
@Test
def testAll(): Unit = {
val sharedCounter = new AtomicInteger(0)
val list1 = new TimerTaskList(sharedCounter)
val list2 = new TimerTaskList(sharedCounter)
val list3 = new TimerTaskList(sharedCounter)
val tasks = (1 to 10).map { i =>
val task = new TestTask(0L)
list1.add(new TimerTaskEntry(task, 10L))
assertEquals(i, sharedCounter.get)
task
}
assertEquals(tasks.size, sharedCounter.get)
// reinserting the existing tasks shouldn't change the task count
tasks.take(4).foreach { task =>
val prevCount = sharedCounter.get
// new TimerTaskEntry(task) will remove the existing entry from the list
list2.add(new TimerTaskEntry(task, 10L))
assertEquals(prevCount, sharedCounter.get)
}
assertEquals(10 - 4, size(list1))
assertEquals(4, size(list2))
assertEquals(tasks.size, sharedCounter.get)
// reinserting the existing tasks shouldn't change the task count
tasks.drop(4).foreach { task =>
val prevCount = sharedCounter.get
// new TimerTaskEntry(task) will remove the existing entry from the list
list3.add(new TimerTaskEntry(task, 10L))
assertEquals(prevCount, sharedCounter.get)
}
assertEquals(0, size(list1))
assertEquals(4, size(list2))
assertEquals(6, size(list3))
assertEquals(tasks.size, sharedCounter.get)
// cancel tasks in lists
list1.foreach { _.cancel() }
assertEquals(0, size(list1))
assertEquals(4, size(list2))
assertEquals(6, size(list3))
list2.foreach { _.cancel() }
assertEquals(0, size(list1))
assertEquals(0, size(list2))
assertEquals(6, size(list3))
list3.foreach { _.cancel() }
assertEquals(0, size(list1))
assertEquals(0, size(list2))
assertEquals(0, size(list3))
}
}
| guozhangwang/kafka | core/src/test/scala/unit/kafka/utils/timer/TimerTaskListTest.scala | Scala | apache-2.0 | 2,992 |
// Copyright (c) 2013-2020 Rob Norris and Contributors
// This software is licensed under the MIT License (MIT).
// For more information see LICENSE or https://opensource.org/licenses/MIT
package doobie.util
import java.{util => ju}
import scala.collection.JavaConverters._
import scala.collection.immutable.Map
package object compat {
type =:=[From, To] = scala.Predef.=:=[From, To]
def propertiesToScala(p: ju.Properties): Map[String, String] = p.asScala.toMap
}
| tpolecat/doobie | modules/core/src/main/scala-2.13-/doobie/util/compat/package.scala | Scala | mit | 473 |
/***********************************************************************
* Copyright (c) 2013-2018 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.arrow.tools.stats
import com.beust.jcommander.Parameters
import org.locationtech.geomesa.arrow.data.ArrowDataStore
import org.locationtech.geomesa.arrow.tools.ArrowDataStoreCommand
import org.locationtech.geomesa.arrow.tools.UrlParam
import org.locationtech.geomesa.tools.stats.{StatsTopKCommand, StatsTopKParams}
class ArrowStatsTopKCommand extends StatsTopKCommand[ArrowDataStore] with ArrowDataStoreCommand {
override val params = new ArrowStatsTopKParams
override def execute(): Unit = {
params.exact = true
super.execute()
}
}
@Parameters(commandDescription = "Enumerate the most frequent values in a GeoMesa feature type")
class ArrowStatsTopKParams extends StatsTopKParams with UrlParam
| jahhulbert-ccri/geomesa | geomesa-arrow/geomesa-arrow-tools/src/main/scala/org/locationtech/geomesa/arrow/tools/stats/ArrowStatsTopKCommand.scala | Scala | apache-2.0 | 1,211 |
package controllers.s_employment
import org.specs2.mutable._
import utils.WithBrowser
import controllers.ClaimScenarioFactory
import utils.pageobjects.s_employment._
import utils.pageobjects.PageObjects
class GLastWageIntegrationSpec extends Specification {
section("integration",models.domain.LastWage.id)
"Last wage" should {
"be presented" in new WithBrowser with PageObjects{
val page = GLastWagePage(context)
page goToThePage()
}
"employer owes you money is not visible when 'have finished job is 'no'" in new WithBrowser with PageObjects {
val jobDetailsPage = GJobDetailsPage(context)
jobDetailsPage goToThePage()
val claim = ClaimScenarioFactory s7EmploymentWhenFinishedJobNo()
jobDetailsPage fillPageWith claim
val lastWagePage = jobDetailsPage submitPage()
context.browser.find("#employerOwesYouMoney").size() mustEqual 0
}
"be able to navigate back" in new WithBrowser with PageObjects {
val page = GJobDetailsPage(context)
val claim = ClaimScenarioFactory s7Employment()
page goToThePage()
page fillPageWith claim
val submitted = page submitPage()
val backPage = submitted goBack ()
backPage must beAnInstanceOf[GJobDetailsPage]
}
}
section("integration",models.domain.LastWage.id)
}
| Department-for-Work-and-Pensions/ClaimCapture | c3/test/controllers/s_employment/GLastWageIntegrationSpec.scala | Scala | mit | 1,318 |
/**
* Sequence transcription and complement. Delegated by the Sequence object.
* Note we take a functional approach (no reference to self).
*/
package bio
import org.biojava.bio.symbol._
import org.biojava.bio.seq._
package DNA {
object SequenceTranscription {
/**
* Convert DNA to RNA - replacing DNA.T with RNA.U. The 5'-3' order
* is maintained
*/
def toRNA(nucleotides: List[Nucleotide]): List[RNA.Nucleotide] = {
nucleotides.map { nt =>
nt match {
case A => RNA.A
case C => RNA.C
case G => RNA.G
case T => RNA.U
case _ => throw new IllegalArgumentException("non DNA nucleotide " + nt + " type " + nt.getClass.getName)
}
}
}
/**
* Transcribe DNA to RNA, the 5'-3' order is maintained (unlike BioJAVA)
*/
def transcribe(nucleotides: List[Nucleotide]): List[RNA.Nucleotide] = toRNA(nucleotides)
/**
* Complement nucleotides - note: no support for Ambiguous symbols.
*/
def complement(nucleotides: List[Nucleotide]): List[Nucleotide] = {
nucleotides.map { nt =>
nt match {
case A => T
case T => A
case C => G
case G => C
case _ => throw new IllegalArgumentException("non DNA nucleotide " + nt + " type " + nt.getClass.getName)
}
}
}
}
object SymbolSequenceTranscription {
/**
* Convert DNA to RNA - replacing DNA.T with RNA.U. The 5'-3' order
* is maintained
*/
/**
* Convert DNA to RNA - replacing DNA.T with RNA.U. The 5'-3' order
* is maintained
*/
def toRNA(nucleotides: List[NTSymbol]): List[RNA.NTSymbol] = {
nucleotides.map { nt =>
nt match {
case A => RNA.A
case C => RNA.C
case G => RNA.G
case T => RNA.U
// Ambiguous code:
case M => RNA.M
case R => RNA.R
case W => RNA.W
case S => RNA.S
case Y => RNA.Y
case K => RNA.K
case V => RNA.V
case H => RNA.H
case D => RNA.D
case B => RNA.B
case N => RNA.N
case Gap => RNA.Gap
case _ => throw new IllegalArgumentException("non DNA nucleotide " + nt + " type " + nt.getClass.getName)
}
}
}
/**
* Transcribe DNA to RNA, the 5'-3' order is maintained (unlike BioJAVA)
*/
def transcribe(nucleotides: List[NTSymbol]): List[RNA.NTSymbol] = toRNA(nucleotides)
/**
* Complement nucleotides - note: no support for Ambiguous symbols.
*/
def complement(nucleotides: List[NTSymbol]): List[NTSymbol] = {
nucleotides.map { nt =>
nt match {
case A => T
case T => A
case C => G
case G => C
case _ => throw new IllegalArgumentException("non DNA nucleotide " + nt + " type " + nt.getClass.getName)
}
}
}
}
}
| bioscala/bioscala | src/main/scala/bio/sequence/actions/transcribe.scala | Scala | bsd-2-clause | 2,986 |
package satisfaction
import org.specs2.mutable._
import util.parsing.input.CharSequenceReader
import org.specs2.runner.JUnitRunner
import org.junit.runner.RunWith
@RunWith(classOf[JUnitRunner])
class WitnessSpec extends Specification {
"WitnessUtils" should {
"checkPath" should {
}
"find variables in string " in {
val str = " select * from my_view_${networkAbbr} where dt= ${dateString} "
val vars = Substituter.findVariablesInString(str)
vars.foreach(str => println(str))
vars must contain("networkAbbr")
vars must contain("dateString")
}
"find variables with dots in them in string " in {
val str = " use ${ramblas.db}; set dir=${pinkman.home} "
val vars = Substituter.findVariablesInString(str)
vars.foreach(str => println(str))
vars must contain("ramblas.db")
vars must contain("pinkman.home")
}
"handle underscores correctly" in {
val str = "hdfs://dhdp2/data/ramblas/event_log/${event_type}/${dt}/${hour}"
val vars = Substituter.findVariablesInString(str)
vars.foreach(str => println(str))
vars must contain("dt")
vars must contain("hour")
vars must contain("event_type")
}
"substitute variables in string " in {
val tempStr = " select * from my_view_${networkAbbr} where dt= ${dateString}"
val varMap = Map(("networkAbbr" -> "tw"), ("dateString" -> "20130813"))
val str = Substituter.substitute(new CharSequenceReader(tempStr), Witness(varMap))
str.isRight must be
println(" Substr string is " + str)
str match {
case Right(substituted) =>
substituted mustEqual " select * from my_view_tw where dt= 20130813"
}
}
"handle missing curly brace " in {
val tempStr = " select * from my_view_${networkAbbr where dt= ${dateString}"
val varMap = Map(("networkAbbr" -> "tw"), ("dateString" -> "20130813"))
val str = Substituter.substitute(new CharSequenceReader(tempStr), Witness(varMap))
str.isLeft must be
str match {
case Left(missingVars) =>
missingVars.foreach(v => println(" Missing var " + v))
missingVars.size mustEqual 1
missingVars must contain("networkAbbr")
}
}
" handle dollar signs without curly " in {
val tempStr = " select $nonCurly from my_view_${networkAbbr} where dt= ${dateString}"
val varMap = Map(("networkAbbr" -> "tw"), ("dateString" -> "20130813"))
val str = Substituter.substitute(new CharSequenceReader(tempStr), Witness(varMap))
str.isRight must be
println(" Substr string is " + str)
str match {
case Right(substituted) =>
substituted mustEqual " select $nonCurly from my_view_tw where dt= 20130813"
}
}
"detect unsubstituted variables in string " in {
val tempStr = " select * from my_view_${networkAbbr} where dt= ${dateString}" +
" and ks_uid = ${ksUid} and actor_id = ${actorId} "
val varMap = Map(("networkAbbr" -> "tw"), ("dateString" -> "20130813"))
val str = Substituter.substitute(new CharSequenceReader(tempStr), Witness(varMap))
str.isLeft must be
str match {
case Left(missingVars) =>
missingVars.foreach(v => println(" Missing var " + v))
missingVars.size mustEqual 2
missingVars must contain("ksUid")
missingVars must contain("actorId")
}
}
"Read Property file" in {
val goodProps = Substituter.readProperties("modules/core/src/test/resources/goodset.properties")
goodProps.keySet must contain("myProp")
goodProps.keySet must contain("theBigProp")
goodProps.get("myProp").get mustEqual "myVal"
goodProps.get("theBigProp").get mustEqual "12244"
}
"Subst vars in Property file" in {
val goodProps = Substituter.readProperties("modules/core/src/test/resources/subst_var.properties")
goodProps.keySet must contain("nameNode")
goodProps.keySet must contain("dataRoot")
///goodProps.keySet must contain("myTablePath")
println(" NameNode is " + goodProps.get("nameNode").get)
println(" DataRoot is " + goodProps.get("dataRoot").get)
///println(" MyTablePath is " + goodProps.get("myTablePath").get)
//goodProps.get("myProp").get mustEqual "myVal"
//goodProps.get("theBigProp").get mustEqual "12244"
}
"implicitly convert to java.util.Properties" in {
val goodProps = Substituter.readProperties("modules/core/src/test/resources/goodset.properties")
val subst = Witness( goodProps)
val javaProps : java.util.Properties = subst
subst.raw foreach { case(k,v) => {
val lookup = javaProps.getProperty( k)
println(s" Lookup for key $k is $lookup ")
( lookup must not beNull )
lookup must be(v)
}
}
}
/**
"parse dauDB" in {
val pageViewQuery = " use ${dauDB}; "
val props= new java.util.Properties
props.put("dauDB", "ramblas")
val parsed = Substituter.substitute( pageViewQuery , props)
parsed match {
case Left[missingVariables]
}
}
*
*/
}
"Witness creation" should {
"create witness from variables" in {
val dtVar = new Variable("dt", classOf[Int])
val netVar = new Variable("network_abbr", classOf[String])
val ass1 = VariableAssignment[Int](dtVar, 2323)
val ass2 = VariableAssignment[String](new Variable("network_abbr", classOf[String]), "twitter")
//val witness = Witness( VariableAssignment("network", "twitter"),
//VariableAssignment[Int]("service_id", 1) )
val witness = Witness(ass1, ass2)
val vars = witness.variables
println("Witness is " + witness)
vars.foreach(s => println(" Var is " + s))
vars must contain(dtVar)
vars must contain(netVar)
}
"create typed Variable assignments" in {
val intAss = VariableAssignment("IntProperty", 1)
intAss.variable.clazz mustEqual classOf[Int]
intAss.variable.name mustEqual "IntProperty"
intAss.value mustEqual 1
val boolAss = VariableAssignment("BooleanProperty", true)
boolAss.variable.clazz mustEqual classOf[Boolean]
boolAss.variable.name mustEqual "BooleanProperty"
boolAss.value mustEqual true
}
"Substituion should get and update" in {
val subst1 = Witness(VariableAssignment("FirstProp", "FirstVal"),
VariableAssignment("NumericVal", 3.14159)
)
val checkLookup = subst1.get(Variable("FirstProp")).get
println(" Value is " + checkLookup)
checkLookup mustEqual "FirstVal"
}
/// XXX JDB FIX ME
"Qualify witness function" in {
val subst1 = Witness(VariableAssignment("dt", "20130917"))
val mapFunc = Goal.qualifyWitness(Variable("tableAlias"), "friends")
val subst2 = mapFunc(subst1)
println(" qualified witness is " + subst2)
subst2.assignments.foreach(println)
subst2.assignments.size mustEqual 2
}
"compose Qualify witness function" in {
val subst1 = Witness(VariableAssignment("dt", "20130917"))
val mapFunc1 = Goal.qualifyWitness(Variable("tableAlias"), "friends")
val mapFunc2 = Goal.qualifyWitness(Variable("graphType"), "TWITTER_FRIENDS")
val mapFunc = mapFunc1 compose mapFunc2
val subst2 = mapFunc(subst1)
println(" qualified witness is " + subst2)
subst2.assignments.foreach{ ass => println("COMPOSED " + ass) }
subst2.assignments.size mustEqual 3
}
"mapVariables function" in {
val subst1 = new Witness(Set( VariableAssignment("dt" , "20140512"),
VariableAssignment("hour" , "03" ),
VariableAssignment("minute" , "43" )))
val mapVarFunc = Goal.mapVariables( Variable("dt"), Variable("date"))_
val subst2 = mapVarFunc(subst1)
println(s" Witness with mapped variable is $subst2")
subst2.assignments.size mustEqual 3
subst2.variables must contain( Variable("date") )
subst2.variables must not contain( Variable("dt") )
subst2.variables must contain( Variable("hour") )
subst2.variables must contain( Variable("minute") )
}
"toString function" in {
val subst1 = new Witness(Set(
VariableAssignment("alpha" , "first"),
VariableAssignment("dt" , "20140512"),
VariableAssignment("hour" , "03" ),
VariableAssignment("minute" , "43" ),
VariableAssignment("zed" , "last")))
val toS1 = subst1.toString
val subst2 = new Witness(Set(
VariableAssignment("zed" , "last"),
VariableAssignment("alpha" , "first"),
VariableAssignment("dt" , "20140512"),
VariableAssignment("hour" , "03" ),
VariableAssignment("minute" , "43" ),
VariableAssignment("alpha" , "first")))
val toS2 = subst2.toString
println(s"Witness 1 = $toS1 ")
println(s"Witness 2 = $toS2 ")
toS1 mustEqual toS2
}
}
} | jeromebanks/satisfaction | modules/core/src/test/scala/satisfaction/SubstitutionSpec.scala | Scala | apache-2.0 | 10,705 |
/*
* Copyright (c) 2014-2021 by The Monix Project Developers.
* See the project homepage at: https://monix.io
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package monix.reactiveTests
import monix.execution.Ack
import monix.execution.Scheduler.Implicits.global
import monix.execution.Ack.Continue
import monix.reactive.Observer
import monix.reactiveTests.SubscriberWhiteBoxAsyncTest.Value
import org.reactivestreams.tck.SubscriberWhiteboxVerification.WhiteboxSubscriberProbe
import org.reactivestreams.tck.SubscriberWhiteboxVerification
import org.reactivestreams.{Subscriber, Subscription}
import org.scalatestplus.testng.TestNGSuiteLike
import scala.concurrent.Future
import scala.util.Random
class SubscriberWhiteBoxAsyncTest extends SubscriberWhiteboxVerification[Value](env())
with TestNGSuiteLike {
def createSubscriber(probe: WhiteboxSubscriberProbe[Value]): Subscriber[Value] = {
val underlying = Observer.toReactiveSubscriber(new Observer[Value] {
def onNext(elem: Value): Future[Ack] = {
probe.registerOnNext(elem)
if (Random.nextInt() % 4 == 0)
Future(Continue)
else
Continue
}
def onError(ex: Throwable): Unit = {
probe.registerOnError(ex)
}
def onComplete(): Unit = {
probe.registerOnComplete()
}
})
new Subscriber[Value] {
def onError(t: Throwable): Unit =
underlying.onError(t)
def onSubscribe(s: Subscription): Unit = {
underlying.onSubscribe(s)
probe.registerOnSubscribe(new SubscriberWhiteboxVerification.SubscriberPuppet {
def triggerRequest(elements: Long): Unit = s.request(elements)
def signalCancel(): Unit = s.cancel()
})
}
def onComplete(): Unit =
underlying.onComplete()
def onNext(t: Value): Unit =
underlying.onNext(t)
}
}
def createElement(element: Int): Value = {
Value(element)
}
}
object SubscriberWhiteBoxAsyncTest {
case class Value(nr: Int)
}
| monix/monix | reactiveTests/src/test/scala/monix/reactiveTests/SubscriberWhiteBoxAsyncTest.scala | Scala | apache-2.0 | 2,535 |
/*
* Copyright (c) 2016, Innoave.com
* All rights reserved.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL INNOAVE.COM OR ITS CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
* AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package com.innoave.soda.l10n
trait DefineLocalized { thisdefine =>
val bundleName: BundleName = new BundleName(toString)
val keyNamingStrategy: KeyNamingStrategy = NamesAsKeys
override def toString: String = KeyNamingStrategy.simpleTypeName(getClass)
private def keyFor[T](value: T): String =
keyNamingStrategy.keyFor(0, KeyNamingStrategy.simpleTypeName(value.getClass))
final protected def localized[T](value: T): Localized[T] =
new LocalizedValue(value, keyFor(value), bundleName)
final protected def localized[T, A <: Product](value: T, args: A): LocalizedP[T, A] =
new LocalizedPValue(value, args, keyFor(value), bundleName)
}
| innoave/soda | l10n/src/main/scala/com/innoave/soda/l10n/DefineLocalized.scala | Scala | apache-2.0 | 1,513 |
import scala.annotation.tailrec
/*
Reference: http://aperiodic.net/phil/scala/s-99
P09 (**) Pack consecutive duplicates of list elements into sublists.
If a list contains repeated elements they should be placed in separate sublists.
Example:
scala> pack(List('a, 'a, 'a, 'a, 'b, 'c, 'c, 'a, 'a, 'd, 'e, 'e, 'e, 'e))
res0: List[List[Symbol]] = List(List('a, 'a, 'a, 'a), List('b), List('c, 'c), List('a, 'a), List('d), List('e, 'e, 'e, 'e))
*/
def pack[T](l: List[T]): List[List[T]] = {
@tailrec
def helper(l: List[T], acc: List[List[T]]): List[List[T]] = {
l match {
case Nil => acc.reverse
case h :: t => helper(l.dropWhile(_ == h), l.takeWhile(_ == h) :: acc)
}
}
helper(l, Nil)
}
println(pack(List('a, 'a, 'a, 'a, 'b, 'c, 'c, 'a, 'a, 'd, 'e, 'e, 'e, 'e)))
| mcamou/s-99 | src/P09.scala | Scala | apache-2.0 | 793 |
package delta.hazelcast
import java.util.concurrent.ScheduledExecutorService
import scala.concurrent.{ Future, Promise, ExecutionContext }
import scala.concurrent.duration._
import com.hazelcast.core.{ EntryEvent, ExecutionCallback, IMap }
import com.hazelcast.map.listener.{ EntryAddedListener, EntryUpdatedListener, EntryMergedListener }
import delta.read._
import delta.process.UpdateCodec
import scuff.Subscription
object IMapEntryStateReadModel {
import Predef.{ implicitly => ? }
private implicit def toSecond[B](a: Any, b: B): B = b
def apply[ID, S, U](
imap: IMap[ID, _ <: EntryState[S, _]],
failureReporter: Throwable => Unit)(
implicit
scheduler: ScheduledExecutorService,
updateCodec: UpdateCodec[S, U]) =
new IMapEntryStateReadModel[ID, S, ID, S, U](
imap, failureReporter, DefaultReadTimeout)(
?, updateCodec, ?, ?, Option(_))
def apply[ID, S, U](
updateCodec: UpdateCodec[S, U],
imap: IMap[ID, _ <: EntryState[S, _]],
failureReporter: Throwable => Unit)(
implicit
scheduler: ScheduledExecutorService) =
new IMapEntryStateReadModel[ID, S, ID, S, U](
imap, failureReporter, DefaultReadTimeout)(
?, updateCodec, ?, ?, Option(_))
def apply[ID, S, U](
imap: IMap[ID, _ <: EntryState[S, _]],
failureReporter: Throwable => Unit,
defaultReadTimeout: FiniteDuration)(
implicit
scheduler: ScheduledExecutorService,
updateCodec: UpdateCodec[S, U]) =
new IMapEntryStateReadModel[ID, S, ID, S, U](
imap, failureReporter, defaultReadTimeout)(
?, updateCodec, ?, ?, Option(_))
def apply[ID, S, U](
updateCodec: UpdateCodec[S, U],
imap: IMap[ID, _ <: EntryState[S, _]],
failureReporter: Throwable => Unit,
defaultReadTimeout: FiniteDuration)(
implicit
scheduler: ScheduledExecutorService) =
new IMapEntryStateReadModel[ID, S, ID, S, U](
imap, failureReporter, defaultReadTimeout)(
?, updateCodec, ?, ?, Option(_))
def apply[ID, S](
imap: IMap[ID, _ <: EntryState[S, _]],
failureReporter: Throwable => Unit,
defaultReadTimeout: FiniteDuration = DefaultReadTimeout)(
implicit
scheduler: ScheduledExecutorService) =
new IMapEntryStateReadModel[ID, S, ID, S, S](
imap, failureReporter, defaultReadTimeout)(
?, ?, ?, ?, Option(_))
}
class IMapEntryStateReadModel[ID, S, MID, ES, U](
protected val imap: IMap[MID, _ <: EntryState[ES, _]],
failureReporter: Throwable => Unit,
protected val defaultReadTimeout: FiniteDuration = DefaultReadTimeout)(
implicit
protected val scheduler: ScheduledExecutorService,
updateCodec: UpdateCodec[ES, U],
toMapKey: ID => MID,
toView: (ID, ES) => S,
fromView: S => Option[ES])
extends ReadModel[ID, S]
with SubscriptionSupport[ID, S, U] {
protected def name: String = imap.getName
protected type StreamId = MID
protected def StreamId(id: ID) = toMapKey(id)
private type EntryState = delta.hazelcast.EntryState[ES, _]
protected def reportFailure(th: Throwable) =
failureReporter(th)
protected def updateState(id: ID, prev: Option[S], update: U): Option[S] = {
val updated = prev match {
case None => updateCodec.asSnapshot(None, update)
case Some(prev) => fromView(prev) match {
case None => None
case prev => updateCodec.asSnapshot(prev, update)
}
}
updated.map(toView(id, _))
}
protected def readSnapshot(id: ID)(
implicit ec: ExecutionContext): Future[Option[Snapshot]] = {
val promise = Promise[Option[Snapshot]]()
val callback = new ExecutionCallback[Snapshot] {
def onResponse(snapshot: Snapshot): Unit = promise success Option(snapshot)
def onFailure(t: Throwable): Unit = promise failure t
}
val reader = new EntryStateSnapshotReader[ES, S](toView(id, _))
imap.submitToKey(id, reader, callback)
promise.future
}
protected def subscribe(id: ID)(callback: Update => Unit): Subscription = {
val entryListener =
new EntryAddedListener[ID, EntryState]
with EntryUpdatedListener[ID, EntryState]
with EntryMergedListener[ID, EntryState] {
def entryAdded(event: EntryEvent[ID, EntryState]): Unit = onUpsert(None, event.getValue)
def entryUpdated(event: EntryEvent[ID, EntryState]): Unit = onUpsert(Option(event.getOldValue), event.getValue)
def entryMerged(event: EntryEvent[ID,EntryState]): Unit = onUpsert(Option(event.getOldValue), event.getValue)
private def onUpsert(prevState: Option[EntryState], entryState: EntryState): Unit = {
if (entryState != null) entryState.snapshot match {
case null => // Ignore
case currSnapshot =>
val prevSnapshot = prevState match {
case Some(EntryState(snapshot, _, _)) => Option(snapshot)
case _ => None
}
val update = updateCodec.asUpdate(prevSnapshot, currSnapshot, entryState.contentUpdated)
callback(update)
}
}
}
val regId = imap.addEntryListener(entryListener, id, /* includeValue */ true)
new Subscription {
def cancel() = imap.removeEntryListener(regId)
}
}
}
| nilskp/delta | delta-hazelcast/src/main/scala/delta/hazelcast/IMapEntryStateReadModel.scala | Scala | mit | 5,260 |
import _root_.io.gatling.core.scenario.Simulation
import ch.qos.logback.classic.{Level, LoggerContext}
import io.gatling.core.Predef._
import io.gatling.http.Predef._
import org.slf4j.LoggerFactory
import scala.concurrent.duration._
/**
* Performance test for the SocialPicture entity.
*/
class SocialPictureGatlingTest extends Simulation {
val context: LoggerContext = LoggerFactory.getILoggerFactory.asInstanceOf[LoggerContext]
// Log all HTTP requests
//context.getLogger("io.gatling.http").setLevel(Level.valueOf("TRACE"))
// Log failed HTTP requests
//context.getLogger("io.gatling.http").setLevel(Level.valueOf("DEBUG"))
val baseURL = Option(System.getProperty("baseURL")) getOrElse """http://127.0.0.1:8080"""
val httpConf = http
.baseURL(baseURL)
.inferHtmlResources()
.acceptHeader("*/*")
.acceptEncodingHeader("gzip, deflate")
.acceptLanguageHeader("fr,fr-fr;q=0.8,en-us;q=0.5,en;q=0.3")
.connection("keep-alive")
.userAgentHeader("Mozilla/5.0 (Macintosh; Intel Mac OS X 10.10; rv:33.0) Gecko/20100101 Firefox/33.0")
val headers_http = Map(
"Accept" -> """application/json"""
)
val headers_http_authenticated = Map(
"Accept" -> """application/json""",
"X-CSRF-TOKEN" -> "${csrf_token}"
)
val scn = scenario("Test the SocialPicture entity")
.exec(http("First unauthenticated request")
.get("/api/account")
.headers(headers_http)
.check(status.is(401))
.check(headerRegex("Set-Cookie", "CSRF-TOKEN=(.*); [P,p]ath=/").saveAs("csrf_token")))
.pause(10)
.exec(http("Authentication")
.post("/api/authentication")
.headers(headers_http_authenticated)
.formParam("j_username", "admin")
.formParam("j_password", "admin")
.formParam("remember-me", "true")
.formParam("submit", "Login"))
.pause(1)
.exec(http("Authenticated request")
.get("/api/account")
.headers(headers_http_authenticated)
.check(status.is(200))
.check(headerRegex("Set-Cookie", "CSRF-TOKEN=(.*); [P,p]ath=/").saveAs("csrf_token")))
.pause(10)
.repeat(2) {
exec(http("Get all socialPictures")
.get("/api/socialPictures")
.headers(headers_http_authenticated)
.check(status.is(200)))
.pause(10 seconds, 20 seconds)
.exec(http("Create new socialPicture")
.post("/api/socialPictures")
.headers(headers_http_authenticated)
.body(StringBody("""{"id":null, "title":"SAMPLE_TEXT", "src":null, "properties":"SAMPLE_TEXT"}""")).asJSON
.check(status.is(201))
.check(headerRegex("Location", "(.*)").saveAs("new_socialPicture_url")))
.pause(10)
.repeat(5) {
exec(http("Get created socialPicture")
.get("${new_socialPicture_url}")
.headers(headers_http_authenticated))
.pause(10)
}
.exec(http("Delete created socialPicture")
.delete("${new_socialPicture_url}")
.headers(headers_http_authenticated))
.pause(10)
}
val users = scenario("Users").exec(scn)
setUp(
users.inject(rampUsers(100) over (1 minutes))
).protocols(httpConf)
}
| JuanLSanchez/Chefs | project/src/test/gatling/simulations/SocialPictureGatlingTest.scala | Scala | gpl-2.0 | 3,399 |
/*
* Copyright 2014-2021 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.atlas.core.stacklang
class ReverseRotSuite extends BaseWordSuite {
def interpreter: Interpreter = Interpreter(StandardVocabulary.allWords)
def word: Word = StandardVocabulary.ReverseRot
def shouldMatch: List[(String, List[Any])] = List(
"a" -> List("a"),
"a,b" -> List("a", "b"),
"a,b,c,d,e" -> List("d", "c", "b", "a", "e")
)
def shouldNotMatch: List[String] = List("")
}
| copperlight/atlas | atlas-core/src/test/scala/com/netflix/atlas/core/stacklang/ReverseRotSuite.scala | Scala | apache-2.0 | 1,036 |
package util
sealed trait Expr
sealed trait Operation extends Expr
case class OR(left: Expr, right: Expr) extends Operation
case class AND(left: Expr, right: Expr) extends Operation
case class NOT(op: Expr) extends Operation
sealed trait Terminal extends Expr
case class PhraseTerm(str: String) extends Terminal
case class WordTerm(str: String) extends Terminal
case class ExactTerm(str: String) extends Terminal
object SearchQueryParser{
import scala.util.{Try, Success, Failure}
import org.parboiled2._
sealed class QueryParser(val input: ParserInput) extends Parser {
val whiteSpaceChar = CharPredicate(" \\n\\r\\t\\f")
implicit def wspStr(s: String): Rule0 = rule{ str(s) ~ zeroOrMore(whiteSpaceChar)}
def whiteSpace = rule{ zeroOrMore(whiteSpaceChar) }
def expr = or
def or = rule{ oneOrMore(and).separatedBy("""|""") ~> (_.reduceLeft(OR)) }
def and = rule{ oneOrMore(not).separatedBy(whiteSpace) ~> (_.reduceLeft(AND)) | oneOrMore(not).separatedBy("""&""") ~> (_.reduceLeft(AND))}
def not: Rule1[Expr] = rule{ optional(neg) ~ atom ~> (((a: Option[String], b: Expr) => if (a.isDefined) NOT(b) else b))}
def neg = rule{ capture(("!")) ~> (_.toString) }
def atom: Rule1[Expr] = rule{ ((optional(neg) ~ term ~> (((a: Option[String], t: Expr) => if (a.isDefined) NOT(t) else t))) | "(" ~ or ~ (")" | EOI)) }
def term: Rule1[Terminal] = rule{ notExact | exact | word }
def notExact: Rule1[Terminal] = rule{ "[" ~ capture(phraseBody) ~ "]" ~ whiteSpace ~> PhraseTerm}
def exact: Rule1[Terminal] = rule{ "\\"" ~ capture(phraseBody) ~ "\\"" ~ whiteSpace ~> ExactTerm}
def phraseBody = rule{ zeroOrMore( noneOf("]\\"\\\\") | ("\\\\" ~ "\\"" ~ "]")) ~ whiteSpace}
def word: Rule1[Terminal] = rule{ capture(oneOrMore(alphanum)) ~ whiteSpace ~> WordTerm }
def alphanum = CharPredicate.AlphaNum ++ CharPredicate('\\u00c0' to '\\u1fff')
}
def pSimplifyHelper(ex: Expr): Expr = {
ex match {
case NOT(NOT(op)) => op
case _ => ex
}
}
def pSimplify(ex: Expr): Expr = {
ex match {
case NOT(op) => pSimplifyHelper(NOT(pSimplify(op)))
case AND(l, r) => pSimplifyHelper(AND(pSimplify(l), pSimplify(r)))
case OR (l, r) => pSimplifyHelper(OR(pSimplify(l), pSimplify(r)))
case _ => ex
}
}
def pNNFHelper(ex: Expr): Expr = {
ex match {
case AND(l, r) => AND(pNNFHelper(l), pNNFHelper(r))
case OR(l, r) => OR(pNNFHelper(l), pNNFHelper(r))
case NOT(NOT(op)) => pNNFHelper(op)
case NOT(AND(l, r)) => OR(pNNFHelper(NOT(l)), pNNFHelper(NOT(r)))
case NOT(OR(l, r)) => AND(pNNFHelper(NOT(l)), pNNFHelper(NOT(r)))
case _ => ex
}
}
def pNNF(ex: Expr): Expr = pNNFHelper(pSimplify(ex))
def pDistribute(s1: List[List[Expr]], s2: List[List[Expr]]) = {for (x ← s1; y ← s2) yield x.union(y)}
def pDNF(ex: Expr): List[List[Expr]] = {
val t: List[List[Expr]] = List()
ex match {
case AND(l, r) ⇒ pDistribute(pDNF(l), pDNF(r))
case OR(l, r) ⇒ pDNF(l).union(pDNF(r))
case _ ⇒ List(ex) :: t
}
}
def parse(str: String):Try[List[List[Expr]]] = {
println(str)
val parser = new QueryParser(str)
parser.expr.run()
}.map(pNNF).map(pDNF)
def serialize(disjunction: List[List[Expr]]) = {
val baos: java.io.ByteArrayOutputStream = new java.io.ByteArrayOutputStream()
val oos: java.io.ObjectOutputStream = new java.io.ObjectOutputStream(baos)
oos.writeObject(disjunction)
oos.close();
baos.toByteArray()
}
}
| gafiatulin/scala-search-engine | Frontend/src/main/scala/util/Query.scala | Scala | mit | 3,576 |
/*
* Scala.js (https://www.scala-js.org/)
*
* Copyright EPFL.
*
* Licensed under Apache License 2.0
* (https://www.apache.org/licenses/LICENSE-2.0).
*
* See the NOTICE file distributed with this work for
* additional information regarding copyright ownership.
*/
package org.scalajs.testsuite.compiler
import org.junit.Test
import org.junit.Assert._
class ByteTest {
@Test
def `should_always_be_in_their_range`(): Unit = {
def test(x: Int, y: Byte): Unit =
assertEquals(y, x.toByte)
test(0, 0)
test(127, 127)
test(128, -128)
test(-128, -128)
test(-500, 12)
test(-90000, 112)
test(123456789, 21)
test(-40000, -64)
test(65536, 0)
test(32768, 0)
def testC(x: Char, y: Byte): Unit =
assertEquals(y, x.toByte)
testC(-1.toChar, -1)
testC(200.toChar, -56)
}
}
| SebsLittleHelpers/scala-js | test-suite/shared/src/test/scala/org/scalajs/testsuite/compiler/ByteTest.scala | Scala | apache-2.0 | 844 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.storage
import java.io.{DataOutputStream, File, FileOutputStream, IOException}
import java.net.{InetAddress, UnknownHostException}
import java.nio.file.Files
import scala.concurrent.duration._
import org.apache.hadoop.conf.Configuration
import org.mockito.{ArgumentMatchers => mc}
import org.mockito.Mockito.{mock, times, verify, when}
import org.scalatest.concurrent.Eventually.{eventually, interval, timeout}
import org.apache.spark.{LocalSparkContext, SparkConf, SparkContext, SparkFunSuite, TestUtils}
import org.apache.spark.LocalSparkContext.withSpark
import org.apache.spark.internal.config._
import org.apache.spark.launcher.SparkLauncher.{EXECUTOR_MEMORY, SPARK_MASTER}
import org.apache.spark.network.BlockTransferService
import org.apache.spark.network.buffer.ManagedBuffer
import org.apache.spark.scheduler.ExecutorDecommissionInfo
import org.apache.spark.scheduler.cluster.StandaloneSchedulerBackend
import org.apache.spark.shuffle.{IndexShuffleBlockResolver, ShuffleBlockInfo}
import org.apache.spark.shuffle.IndexShuffleBlockResolver.NOOP_REDUCE_ID
import org.apache.spark.util.Utils.tryWithResource
class FallbackStorageSuite extends SparkFunSuite with LocalSparkContext {
def getSparkConf(initialExecutor: Int = 1, minExecutor: Int = 1): SparkConf = {
// Some DNS always replies for all hostnames including unknown host names
try {
InetAddress.getByName(FallbackStorage.FALLBACK_BLOCK_MANAGER_ID.host)
assume(false)
} catch {
case _: UnknownHostException =>
}
new SparkConf(false)
.setAppName(getClass.getName)
.set(SPARK_MASTER, s"local-cluster[$initialExecutor,1,1024]")
.set(EXECUTOR_MEMORY, "1g")
.set(UI.UI_ENABLED, false)
.set(DYN_ALLOCATION_ENABLED, true)
.set(DYN_ALLOCATION_SHUFFLE_TRACKING_ENABLED, true)
.set(DYN_ALLOCATION_INITIAL_EXECUTORS, initialExecutor)
.set(DYN_ALLOCATION_MIN_EXECUTORS, minExecutor)
.set(DECOMMISSION_ENABLED, true)
.set(STORAGE_DECOMMISSION_ENABLED, true)
.set(STORAGE_DECOMMISSION_SHUFFLE_BLOCKS_ENABLED, true)
.set(STORAGE_DECOMMISSION_FALLBACK_STORAGE_PATH,
Files.createTempDirectory("tmp").toFile.getAbsolutePath + "/")
}
test("fallback storage APIs - copy/exists") {
val conf = new SparkConf(false)
.set("spark.app.id", "testId")
.set(SHUFFLE_COMPRESS, false)
.set(STORAGE_DECOMMISSION_SHUFFLE_BLOCKS_ENABLED, true)
.set(STORAGE_DECOMMISSION_FALLBACK_STORAGE_PATH,
Files.createTempDirectory("tmp").toFile.getAbsolutePath + "/")
val fallbackStorage = new FallbackStorage(conf)
val bmm = new BlockManagerMaster(new NoopRpcEndpointRef(conf), null, conf, false)
val bm = mock(classOf[BlockManager])
val dbm = new DiskBlockManager(conf, deleteFilesOnStop = false, isDriver = false)
when(bm.diskBlockManager).thenReturn(dbm)
when(bm.master).thenReturn(bmm)
val resolver = new IndexShuffleBlockResolver(conf, bm)
when(bm.migratableResolver).thenReturn(resolver)
resolver.getIndexFile(1, 1L).createNewFile()
resolver.getDataFile(1, 1L).createNewFile()
val indexFile = resolver.getIndexFile(1, 2L)
tryWithResource(new FileOutputStream(indexFile)) { fos =>
tryWithResource(new DataOutputStream(fos)) { dos =>
dos.writeLong(0)
dos.writeLong(4)
}
}
val dataFile = resolver.getDataFile(1, 2L)
tryWithResource(new FileOutputStream(dataFile)) { fos =>
tryWithResource(new DataOutputStream(fos)) { dos =>
dos.writeLong(0)
}
}
fallbackStorage.copy(ShuffleBlockInfo(1, 1L), bm)
fallbackStorage.copy(ShuffleBlockInfo(1, 2L), bm)
assert(fallbackStorage.exists(1, ShuffleIndexBlockId(1, 1L, NOOP_REDUCE_ID).name))
assert(fallbackStorage.exists(1, ShuffleDataBlockId(1, 1L, NOOP_REDUCE_ID).name))
assert(fallbackStorage.exists(1, ShuffleIndexBlockId(1, 2L, NOOP_REDUCE_ID).name))
assert(fallbackStorage.exists(1, ShuffleDataBlockId(1, 2L, NOOP_REDUCE_ID).name))
// The files for shuffle 1 and map 1 are empty intentionally.
intercept[java.io.EOFException] {
FallbackStorage.read(conf, ShuffleBlockId(1, 1L, 0))
}
FallbackStorage.read(conf, ShuffleBlockId(1, 2L, 0))
}
test("SPARK-34142: fallback storage API - cleanUp") {
withTempDir { dir =>
Seq(true, false).foreach { cleanUp =>
val appId = s"test$cleanUp"
val conf = new SparkConf(false)
.set("spark.app.id", appId)
.set(STORAGE_DECOMMISSION_FALLBACK_STORAGE_PATH, dir.getAbsolutePath + "/")
.set(STORAGE_DECOMMISSION_FALLBACK_STORAGE_CLEANUP, cleanUp)
val location = new File(dir, appId)
assert(location.mkdir())
assert(location.exists())
FallbackStorage.cleanUp(conf, new Configuration())
assert(location.exists() != cleanUp)
}
}
}
test("migrate shuffle data to fallback storage") {
val conf = new SparkConf(false)
.set("spark.app.id", "testId")
.set(STORAGE_DECOMMISSION_SHUFFLE_BLOCKS_ENABLED, true)
.set(STORAGE_DECOMMISSION_FALLBACK_STORAGE_PATH,
Files.createTempDirectory("tmp").toFile.getAbsolutePath + "/")
val ids = Set((1, 1L, 1))
val bm = mock(classOf[BlockManager])
val dbm = new DiskBlockManager(conf, deleteFilesOnStop = false, isDriver = false)
when(bm.diskBlockManager).thenReturn(dbm)
val indexShuffleBlockResolver = new IndexShuffleBlockResolver(conf, bm)
val indexFile = indexShuffleBlockResolver.getIndexFile(1, 1L)
val dataFile = indexShuffleBlockResolver.getDataFile(1, 1L)
indexFile.createNewFile()
dataFile.createNewFile()
val resolver = mock(classOf[IndexShuffleBlockResolver])
when(resolver.getStoredShuffles())
.thenReturn(ids.map(triple => ShuffleBlockInfo(triple._1, triple._2)).toSeq)
ids.foreach { case (shuffleId: Int, mapId: Long, reduceId: Int) =>
when(resolver.getMigrationBlocks(mc.any()))
.thenReturn(List(
(ShuffleIndexBlockId(shuffleId, mapId, reduceId), mock(classOf[ManagedBuffer])),
(ShuffleDataBlockId(shuffleId, mapId, reduceId), mock(classOf[ManagedBuffer]))))
when(resolver.getIndexFile(shuffleId, mapId)).thenReturn(indexFile)
when(resolver.getDataFile(shuffleId, mapId)).thenReturn(dataFile)
}
when(bm.getPeers(mc.any()))
.thenReturn(Seq(FallbackStorage.FALLBACK_BLOCK_MANAGER_ID))
val bmm = new BlockManagerMaster(new NoopRpcEndpointRef(conf), null, conf, false)
when(bm.master).thenReturn(bmm)
val blockTransferService = mock(classOf[BlockTransferService])
when(blockTransferService.uploadBlockSync(mc.any(), mc.any(), mc.any(), mc.any(), mc.any(),
mc.any(), mc.any())).thenThrow(new IOException)
when(bm.blockTransferService).thenReturn(blockTransferService)
when(bm.migratableResolver).thenReturn(resolver)
when(bm.getMigratableRDDBlocks()).thenReturn(Seq())
val decommissioner = new BlockManagerDecommissioner(conf, bm)
try {
decommissioner.start()
val fallbackStorage = new FallbackStorage(conf)
eventually(timeout(10.second), interval(1.seconds)) {
// uploadBlockSync is not used
verify(blockTransferService, times(1))
.uploadBlockSync(mc.any(), mc.any(), mc.any(), mc.any(), mc.any(), mc.any(), mc.any())
Seq("shuffle_1_1_0.index", "shuffle_1_1_0.data").foreach { filename =>
assert(fallbackStorage.exists(shuffleId = 1, filename))
}
}
} finally {
decommissioner.stop()
}
}
test("Upload from all decommissioned executors") {
sc = new SparkContext(getSparkConf(2, 2))
withSpark(sc) { sc =>
TestUtils.waitUntilExecutorsUp(sc, 2, 60000)
val rdd1 = sc.parallelize(1 to 10, 10)
val rdd2 = rdd1.map(x => (x % 2, 1))
val rdd3 = rdd2.reduceByKey(_ + _)
assert(rdd3.count() === 2)
// Decommission all
val sched = sc.schedulerBackend.asInstanceOf[StandaloneSchedulerBackend]
sc.getExecutorIds().foreach {
sched.decommissionExecutor(_, ExecutorDecommissionInfo(""), false)
}
val files = Seq("shuffle_0_0_0.index", "shuffle_0_0_0.data")
val fallbackStorage = new FallbackStorage(sc.getConf)
// Uploading is not started yet.
files.foreach { file => assert(!fallbackStorage.exists(0, file)) }
// Uploading is completed on decommissioned executors
eventually(timeout(20.seconds), interval(1.seconds)) {
files.foreach { file => assert(fallbackStorage.exists(0, file)) }
}
// All executors are still alive.
assert(sc.getExecutorIds().size == 2)
}
}
test("Upload multi stages") {
sc = new SparkContext(getSparkConf())
withSpark(sc) { sc =>
TestUtils.waitUntilExecutorsUp(sc, 1, 60000)
val rdd1 = sc.parallelize(1 to 10, 2)
val rdd2 = rdd1.map(x => (x % 2, 1))
val rdd3 = rdd2.reduceByKey(_ + _)
val rdd4 = rdd3.sortByKey()
assert(rdd4.count() === 2)
val shuffle0_files = Seq(
"shuffle_0_0_0.index", "shuffle_0_0_0.data",
"shuffle_0_1_0.index", "shuffle_0_1_0.data")
val shuffle1_files = Seq(
"shuffle_1_4_0.index", "shuffle_1_4_0.data",
"shuffle_1_5_0.index", "shuffle_1_5_0.data")
val fallbackStorage = new FallbackStorage(sc.getConf)
shuffle0_files.foreach { file => assert(!fallbackStorage.exists(0, file)) }
shuffle1_files.foreach { file => assert(!fallbackStorage.exists(1, file)) }
// Decommission all
val sched = sc.schedulerBackend.asInstanceOf[StandaloneSchedulerBackend]
sc.getExecutorIds().foreach {
sched.decommissionExecutor(_, ExecutorDecommissionInfo(""), false)
}
eventually(timeout(20.seconds), interval(1.seconds)) {
shuffle0_files.foreach { file => assert(fallbackStorage.exists(0, file)) }
shuffle1_files.foreach { file => assert(fallbackStorage.exists(1, file)) }
}
}
}
Seq("lz4", "lzf", "snappy", "zstd").foreach { codec =>
test(s"$codec - Newly added executors should access old data from remote storage") {
sc = new SparkContext(getSparkConf(2, 0).set(IO_COMPRESSION_CODEC, codec))
withSpark(sc) { sc =>
TestUtils.waitUntilExecutorsUp(sc, 2, 60000)
val rdd1 = sc.parallelize(1 to 10, 2)
val rdd2 = rdd1.map(x => (x % 2, 1))
val rdd3 = rdd2.reduceByKey(_ + _)
assert(rdd3.collect() === Array((0, 5), (1, 5)))
// Decommission all
val sched = sc.schedulerBackend.asInstanceOf[StandaloneSchedulerBackend]
sc.getExecutorIds().foreach {
sched.decommissionExecutor(_, ExecutorDecommissionInfo(""), false)
}
// Make it sure that fallback storage are ready
val fallbackStorage = new FallbackStorage(sc.getConf)
eventually(timeout(20.seconds), interval(1.seconds)) {
Seq(
"shuffle_0_0_0.index", "shuffle_0_0_0.data",
"shuffle_0_1_0.index", "shuffle_0_1_0.data").foreach { file =>
assert(fallbackStorage.exists(0, file))
}
}
// Since the data is safe, force to shrink down to zero executor
sc.getExecutorIds().foreach { id =>
sched.killExecutor(id)
}
eventually(timeout(20.seconds), interval(1.seconds)) {
assert(sc.getExecutorIds().isEmpty)
}
// Dynamic allocation will start new executors
assert(rdd3.collect() === Array((0, 5), (1, 5)))
assert(rdd3.sortByKey().count() == 2)
assert(sc.getExecutorIds().nonEmpty)
}
}
}
}
| shaneknapp/spark | core/src/test/scala/org/apache/spark/storage/FallbackStorageSuite.scala | Scala | apache-2.0 | 12,445 |
package userstest
import json.JSON
import play.api.libs.json.{JsValue, Json}
import play.api.test._
import scala.concurrent._
import scala.concurrent.duration._
/**
* Created by ka-son on 6/6/15.
*/
object Post extends PlaySpecification with JSON {
val timeout: FiniteDuration = 10.seconds
"POST /api/v0.1/users " +
"""{"login": "y",
"email": "y@y.com",
"password": "12345678"} """ +
"must be 201 Created" in {
running(FakeApplication()) {
val request = FakeRequest(POST, "/api/v0.1/users")
.withJsonBody(Json.parse( """{ "login": "y",
| "email": "y@y.com",
| "password": "12345678" }""".stripMargin))
val response = route(request)
Thread.sleep(5000)
response.isDefined mustEqual true
val result = Await.result(response.get, timeout)
val json = Json.parse(contentAsString(response.get))
(json \\ "login").as[String] mustEqual "y"
(json \\ "avatar_url").as[String] mustEqual ""
(json \\ "type").as[String] mustEqual "user"
(json \\ "email").as[String] mustEqual "y@y.com"
(json \\ "location").as[String] mustEqual ""
(json \\ "confirmed").as[Boolean] mustEqual false
(json \\ "created_at").as[Long] mustEqual (json \\ "updated_at").as[Long]
result.header.status mustEqual 201
}
}
"POST /api/v0.1/users " +
"""{"login": "a",
"email": "a@a.com",
"password": "12345678"} """ +
"must be 400 Bad request Login email are registered" in {
running(FakeApplication()) {
val request = FakeRequest(POST, "/api/v0.1/users")
.withJsonBody(Json.parse( """{ "login": "a",
| "email": "a@a.com",
| "password": "12345678" }""".stripMargin))
val response = route(request)
Thread.sleep(5000)
response.isDefined mustEqual true
val result = Await.result(response.get, timeout)
val expectedResponse: JsValue =
Json.obj("messages" -> Json.arr("Login is already registered",
"Email is already registered"))
contentAsString(response.get) mustEqual prettify(expectedResponse)
result.header.status mustEqual 400
}
}
"POST /api/v0.1/users " +
"""{"login": "a",
"email": "a@a.com" } """ +
"must be 400 Bad request Invalid Json" in {
running(FakeApplication()) {
val request = FakeRequest(POST, "/api/v0.1/users")
.withJsonBody(Json.parse( """{ "login": "a",
| "email": "a@a.com" }""".stripMargin))
val response = route(request)
Thread.sleep(5000)
response.isDefined mustEqual true
val result = Await.result(response.get, timeout)
val expectedResponse: JsValue =
Json.obj("messages" -> Json.arr("Invalid Json"))
contentAsString(response.get) mustEqual prettify(expectedResponse)
result.header.status mustEqual 400
}
}
"POST /api/v0.1/users " +
"""{"email": "a@a.com",
"password": "12345678" } """ +
"must be 400 Bad request Invalid Json" in {
running(FakeApplication()) {
val request = FakeRequest(POST, "/api/v0.1/users")
.withJsonBody(Json.parse( """{ "email": "a@a.com",
| "password": "12345678" }""".stripMargin))
val response = route(request)
Thread.sleep(5000)
response.isDefined mustEqual true
val result = Await.result(response.get, timeout)
val expectedResponse: JsValue =
Json.obj("messages" -> Json.arr("Invalid Json"))
contentAsString(response.get) mustEqual prettify(expectedResponse)
result.header.status mustEqual 400
}
}
"POST /api/v0.1/users " +
"""{} """ +
"must be 400 Bad request Invalid Json" in {
running(FakeApplication()) {
val request = FakeRequest(POST, "/api/v0.1/users")
.withJsonBody(Json.parse( """{}""".stripMargin))
val response = route(request)
Thread.sleep(5000)
response.isDefined mustEqual true
val result = Await.result(response.get, timeout)
val expectedResponse: JsValue =
Json.obj("messages" -> Json.arr("Invalid Json"))
contentAsString(response.get) mustEqual prettify(expectedResponse)
result.header.status mustEqual 400
}
}
"POST /api/v0.1/users " +
"must be 400 Bad request Expecting json" in {
running(FakeApplication()) {
val request = FakeRequest(POST, "/api/v0.1/users")
val response = route(request)
Thread.sleep(5000)
response.isDefined mustEqual true
val result = Await.result(response.get, timeout)
val expectedResponse: JsValue =
Json.obj("messages" -> Json.arr("Expecting text/json or application/json body"))
contentAsString(response.get) mustEqual prettify(expectedResponse)
result.header.status mustEqual 400
}
}
"POST /api/v0.1/users " +
"""{"login": "",
"email": "a@a.com",
"password": "12345678" } """ +
"must be 400 Bad request Invalid login" in {
running(FakeApplication()) {
val request = FakeRequest(POST, "/api/v0.1/users")
.withJsonBody(Json.parse( """{ "login": "",
| "email": "a@a.com",
| "password": "12345678" }""".stripMargin))
val response = route(request)
Thread.sleep(5000)
response.isDefined mustEqual true
val result = Await.result(response.get, timeout)
val expectedResponse: JsValue =
Json.obj("messages" ->
Json.arr("Username must be at least 1 character and at most 50 characters"))
contentAsString(response.get) mustEqual prettify(expectedResponse)
result.header.status mustEqual 400
}
}
"POST /api/v0.1/users " +
"""{"login": "",
"email": "",
"password": "12345678" } """ +
"must be 400 Bad request Invalid login email" in {
running(FakeApplication()) {
val request = FakeRequest(POST, "/api/v0.1/users")
.withJsonBody(Json.parse( """{ "login": "",
| "email": "",
| "password": "12345678" }""".stripMargin))
val response = route(request)
Thread.sleep(5000)
response.isDefined mustEqual true
val result = Await.result(response.get, timeout)
val expectedResponse: JsValue =
Json.obj("messages" ->
Json.arr("Username must be at least 1 character and at most 50 characters",
"Doesn't look like a valid email"))
contentAsString(response.get) mustEqual prettify(expectedResponse)
result.header.status mustEqual 400
}
}
"POST /api/v0.1/users " +
"""{"login": "",
"email": "a@a.com",
"password": "" } """ +
"must be 400 Bad request Invalid Login password" in {
running(FakeApplication()) {
val request = FakeRequest(POST, "/api/v0.1/users")
.withJsonBody(Json.parse( """{ "login": "",
| "email": "a@a.com",
| "password": "" }""".stripMargin))
val response = route(request)
Thread.sleep(5000)
response.isDefined mustEqual true
val result = Await.result(response.get, timeout)
val expectedResponse: JsValue =
Json.obj("messages" ->
Json.arr("Username must be at least 1 character and at most 50 characters",
"Password must be at least 8 characters and at most 50 characters"))
contentAsString(response.get) mustEqual prettify(expectedResponse)
result.header.status mustEqual 400
}
}
"POST /api/v0.1/users " +
"""{"login": "",
"email": "",
"password": "" } """ +
"must be 400 Bad request Invalid Login email password" in {
running(FakeApplication()) {
val request = FakeRequest(POST, "/api/v0.1/users")
.withJsonBody(Json.parse( """{ "login": "",
| "email": "",
| "password": "" }""".stripMargin))
val response = route(request)
Thread.sleep(5000)
response.isDefined mustEqual true
val result = Await.result(response.get, timeout)
val expectedResponse: JsValue =
Json.obj("messages" ->
Json.arr("Username must be at least 1 character and at most 50 characters",
"Doesn't look like a valid email",
"Password must be at least 8 characters and at most 50 characters"))
contentAsString(response.get) mustEqual prettify(expectedResponse)
result.header.status mustEqual 400
}
}
"POST /api/v0.1/users " +
"""{"login": "a",
"email": "a",
"password": "12345678" } """ +
"must be 400 Bad request Invalid email" in {
running(FakeApplication()) {
val request = FakeRequest(POST, "/api/v0.1/users")
.withJsonBody(Json.parse( """{ "login": "a",
| "email": "a",
| "password": "12345678" }""".stripMargin))
val response = route(request)
Thread.sleep(5000)
response.isDefined mustEqual true
val result = Await.result(response.get, timeout)
val expectedResponse: JsValue =
Json.obj("messages" ->
Json.arr("Doesn't look like a valid email"))
contentAsString(response.get) mustEqual prettify(expectedResponse)
result.header.status mustEqual 400
}
}
"POST /api/v0.1/users " +
"""{"login": "a",
"email": "a",
"password": "1234567" } """ +
"must be 400 Bad request Invalid email password" in {
running(FakeApplication()) {
val request = FakeRequest(POST, "/api/v0.1/users")
.withJsonBody(Json.parse( """{ "login": "a",
| "email": "a",
| "password": "1234567" }""".stripMargin))
val response = route(request)
Thread.sleep(5000)
response.isDefined mustEqual true
val result = Await.result(response.get, timeout)
val expectedResponse: JsValue =
Json.obj("messages" ->
Json.arr("Doesn't look like a valid email",
"Password must be at least 8 characters and at most 50 characters"))
contentAsString(response.get) mustEqual prettify(expectedResponse)
result.header.status mustEqual 400
}
}
"POST /api/v0.1/users " +
"""{"login": "a",
"email": "a@a.com",
"password": "" } """ +
"must be 400 Bad request Invalid password" in {
running(FakeApplication()) {
val request = FakeRequest(POST, "/api/v0.1/users")
.withJsonBody(Json.parse( """{ "login": "a",
| "email": "a@a.com",
| "password": "" }""".stripMargin))
val response = route(request)
Thread.sleep(5000)
response.isDefined mustEqual true
val result = Await.result(response.get, timeout)
val expectedResponse: JsValue =
Json.obj("messages" ->
Json.arr("Password must be at least 8 characters and at most 50 characters"))
contentAsString(response.get) mustEqual prettify(expectedResponse)
result.header.status mustEqual 400
}
}
"POST /api/v0.1/users " +
"""{"login": "123456789012345678901234567890123456789012345678901",
"email": "a@a.com",
"password": "12345678" } """ +
"must be 400 Bad request Invalid login" in {
running(FakeApplication()) {
val request = FakeRequest(POST, "/api/v0.1/users")
.withJsonBody(Json.parse( """{ "login": "",
| "email": "a@a.com",
| "password": "12345678" }""".stripMargin))
val response = route(request)
Thread.sleep(5000)
response.isDefined mustEqual true
val result = Await.result(response.get, timeout)
val expectedResponse: JsValue =
Json.obj("messages" ->
Json.arr("Username must be at least 1 character and at most 50 characters"))
contentAsString(response.get) mustEqual prettify(expectedResponse)
result.header.status mustEqual 400
}
}
"POST /api/v0.1/users " +
"""{"login": "",
"email": "a@a.com",
"password": "1234567" } """ +
"must be 400 Bad request Invalid login password" in {
running(FakeApplication()) {
val request = FakeRequest(POST, "/api/v0.1/users")
.withJsonBody(Json.parse( """{ "login": "",
| "email": "a@a.com",
| "password": "1234567" }""".stripMargin))
val response = route(request)
Thread.sleep(5000)
response.isDefined mustEqual true
val result = Await.result(response.get, timeout)
val expectedResponse: JsValue =
Json.obj("messages" ->
Json.arr("Username must be at least 1 character and at most 50 characters",
"Password must be at least 8 characters and at most 50 characters"))
contentAsString(response.get) mustEqual prettify(expectedResponse)
result.header.status mustEqual 400
}
}
"POST /api/v0.1/users " +
"""{"login": "",
"email": "a@a.com",
"password": "123456789012345678901234567890123456789012345678901" } """ +
"must be 400 Bad request Invalid login password" in {
running(FakeApplication()) {
val request = FakeRequest(POST, "/api/v0.1/users")
.withJsonBody(Json.parse( """{ "login": "",
| "email": "a@a.com",
| "password": "123456789012345678901234567890123456789012345678901" }""".stripMargin))
val response = route(request)
Thread.sleep(5000)
response.isDefined mustEqual true
val result = Await.result(response.get, timeout)
val expectedResponse: JsValue =
Json.obj("messages" ->
Json.arr("Username must be at least 1 character and at most 50 characters",
"Password must be at least 8 characters and at most 50 characters"))
contentAsString(response.get) mustEqual prettify(expectedResponse)
result.header.status mustEqual 400
}
}
"POST /api/v0.1/users " +
"""{"login": "a",
"email": "a@a.com",
"password": "1234567" } """ +
"must be 400 Bad request Invalid password" in {
running(FakeApplication()) {
val request = FakeRequest(POST, "/api/v0.1/users")
.withJsonBody(Json.parse( """{ "login": "a",
| "email": "a@a.com",
| "password": "1234567" }""".stripMargin))
val response = route(request)
Thread.sleep(5000)
response.isDefined mustEqual true
val result = Await.result(response.get, timeout)
val expectedResponse: JsValue =
Json.obj("messages" ->
Json.arr("Password must be at least 8 characters and at most 50 characters"))
contentAsString(response.get) mustEqual prettify(expectedResponse)
result.header.status mustEqual 400
}
}
"POST /api/v0.1/users " +
"""{"login": "a",
"email": "a@a.com",
"password": "123456789012345678901234567890123456789012345678901" } """ +
"must be 400 Bad request Invalid password" in {
running(FakeApplication()) {
val request = FakeRequest(POST, "/api/v0.1/users")
.withJsonBody(Json.parse( """{ "login": "a",
| "email": "a@a.com",
| "password": "123456789012345678901234567890123456789012345678901" }""".stripMargin))
val response = route(request)
Thread.sleep(5000)
response.isDefined mustEqual true
val result = Await.result(response.get, timeout)
val expectedResponse: JsValue =
Json.obj("messages" ->
Json.arr("Password must be at least 8 characters and at most 50 characters"))
contentAsString(response.get) mustEqual prettify(expectedResponse)
result.header.status mustEqual 400
}
}
}
| KasonChan/play-chat-api | test/userstest/Post.scala | Scala | mit | 16,139 |
package com.lyrx.markdown
import com.lyrx.text.{Context, SimpleStringCollector}
object Readme extends MarkdownGenerator {
def main(args:Array[String]):Unit={
import scala.language.postfixOps;
implicit val ctx = Context().withInputDir("src/main/resources/readme").withOutputDir(".")
implicit val collector = new SimpleStringCollector();
"The lyrx Generator: A DSL for document generation in various formats".h1
"History".h2
mdFile("history.text")
}
}
| lyrx/lyrxgenerator | src/main/scala/com/lyrx/markdown/Readme.scala | Scala | gpl-3.0 | 504 |
package com.sksamuel.avro4s.record.encoder
import com.sksamuel.avro4s.{AvroSchema, DefaultNamingStrategy, Encoder, ImmutableRecord}
import org.apache.avro.util.Utf8
import org.scalatest.{Matchers, WordSpec}
class OptionEncoderTest extends WordSpec with Matchers {
"Encoder" should {
"support String options" in {
case class Test(s: Option[String])
val schema = AvroSchema[Test]
Encoder[Test].encode(Test(Option("qwe")), schema, DefaultNamingStrategy) shouldBe ImmutableRecord(schema, Vector(new Utf8("qwe")))
Encoder[Test].encode(Test(None), schema, DefaultNamingStrategy) shouldBe ImmutableRecord(schema, Vector(null))
}
"support boolean options" in {
case class Test(b: Option[Boolean])
val schema = AvroSchema[Test]
Encoder[Test].encode(Test(Option(true)), schema, DefaultNamingStrategy) shouldBe ImmutableRecord(schema, Vector(java.lang.Boolean.valueOf(true)))
Encoder[Test].encode(Test(None), schema, DefaultNamingStrategy) shouldBe ImmutableRecord(schema, Vector(null))
}
"support options of case classes" in {
case class Foo(s: String)
case class Test(b: Option[Foo])
val schema = AvroSchema[Test]
val fooSchema = AvroSchema[Foo]
Encoder[Test].encode(Test(Option(Foo("hello"))), schema, DefaultNamingStrategy) shouldBe ImmutableRecord(schema, Vector(ImmutableRecord(fooSchema, Vector(new Utf8("hello")))))
Encoder[Test].encode(Test(None), schema, DefaultNamingStrategy) shouldBe ImmutableRecord(schema, Vector(null))
}
}
}
| 51zero/avro4s | avro4s-core/src/test/scala/com/sksamuel/avro4s/record/encoder/OptionEncoderTest.scala | Scala | mit | 1,546 |
package techex.cases
import org.http4s.Response
import scalaz._, Scalaz._
import org.http4s.dsl._
import _root_.argonaut._
import Argonaut._
import org.http4s.argonaut._
import techex._
import techex.data.{codecJson, Storage}
import techex.domain._
import scalaz.concurrent.Task
object listPersonalAchievements {
import codecJson._
def acheivedBy(badge: Achievement, ctx: Storage) =
ctx.players.filter(data => data.achievements.exists(_ === badge)).map(data => data.player.nick)
val restApi: WebHandler = {
case req@GET -> Root / "achievements" / "player" / playerId => {
val achievemnts: Task[Task[Response]] =
Storage.run[Task[Response]](State {
playerContext: Storage =>
val maybePlayerData =
playerContext.playerData.get(PlayerId(playerId))
maybePlayerData.fold((playerContext, NotFound())) { playerData =>
val player =
playerData.player
val visibleForUser =
player.privateQuests
.flatMap(_.badges)
val progress =
visibleForUser
.map(badge => PlayerBadgeProgress(badge.id.value, badge.name, badge.desc, playerData.achievements.contains(badge)))
(playerContext, Ok(progress.asJson))
}
})
achievemnts.flatMap(i => i)
}
}
}
| kantega/tech-ex-2015 | backend/src/main/scala/techex/cases/listPersonalAchievements.scala | Scala | mit | 1,385 |
package test_data.v21
import scala.xml.Elem
case class SectionEvidenceList(xml:Elem) {
val rootPath = xml \ "DWPCATransaction" \ "DWPCAClaim" \ "EvidenceList"
val address:Seq[String] = {
val lines = (rootPath \ "RecipientAddress" \ "Answer" \ "Line")
val elems = (for( elements <- lines; element <- elements.child )yield { element.text.trim })(collection.breakOut)
elems
}
val postCode = rootPath \ "RecipientAddress" \ "Answer" \ "PostCode"
val evidenceList: Seq[String] = {
val evidences = rootPath \\ "Evidence"
val elems = (for( elements <- evidences; element <- elements.child )yield { element.text.trim })(collection.breakOut)
elems
}
}
| Department-for-Work-and-Pensions/RenderingService | test/test_data/v21/SectionEvidenceList.scala | Scala | mit | 688 |
package com.nutomic.ensichat.core.messages.header
import com.nutomic.ensichat.core.messages
import com.nutomic.ensichat.core.messages.header
import com.nutomic.ensichat.core.messages.header.MessageHeaderTest._
import com.nutomic.ensichat.core.routing.{Address, AddressTest}
import junit.framework.TestCase
import org.junit.Assert._
object MessageHeaderTest {
val h1 = new header.MessageHeader(header.ContentHeader.ContentMessageType, AddressTest.a1, AddressTest.a2, 3,
0)
val h2 = new header.MessageHeader(header.ContentHeader.ContentMessageType, Address.Null, Address.Broadcast,
header.ContentHeader.SeqNumRange.last, 6, 3)
val h3 = new header.MessageHeader(header.ContentHeader.ContentMessageType, Address.Broadcast, Address.Null, 0, 3)
val headers = Set(h1, h2, h3)
}
class MessageHeaderTest extends TestCase {
def testSerialize(): Unit = {
headers.foreach{h =>
val bytes = h.write(0)
val (header, length) = messages.header.MessageHeader.read(bytes)
assertEquals(h, header)
assertEquals(messages.header.MessageHeader.Length, length)
}
}
}
| Nutomic/ensichat | core/src/test/scala/com/nutomic/ensichat/core/messages/header/MessageHeaderTest.scala | Scala | mpl-2.0 | 1,104 |
package com.sksamuel.elastic4s.requests.searches.aggs
case class NestedAggregation(name: String,
path: String,
subaggs: Seq[AbstractAggregation] = Nil,
metadata: Map[String, AnyRef] = Map.empty)
extends Aggregation {
type T = NestedAggregation
override def subAggregations(aggs: Iterable[AbstractAggregation]): T = copy(subaggs = aggs.toSeq)
override def metadata(map: Map[String, AnyRef]): T = copy(metadata = map)
}
| stringbean/elastic4s | elastic4s-core/src/main/scala/com/sksamuel/elastic4s/requests/searches/aggs/NestedAggregation.scala | Scala | apache-2.0 | 536 |
package rtmp.header
import akka.util.ByteIterator
import java.nio.ByteOrder
/**
* Decode basic header with timestamp ( 0x02 )
* Fields to decode: stream id, time delta
*/
class ExtBasicHeaderDecoder extends HeaderDecoder {
def decode(firstByte:Int, bufferItr: ByteIterator): Header = {
val sid = decodeSID(firstByte, bufferItr)
val timeDelta = decodeTime(bufferItr)
if (timeDelta == 0xffffff) {
val extendedTimeDelta = decodeExtendedTime(bufferItr)
ExtendedBasicHeader(sid, timeDelta, extendedTimeDelta)
} else {
ExtendedBasicHeader(sid, timeDelta, 0)
}
}
protected def decodeTime(bufferItr: ByteIterator):Int = {
val b1 = bufferItr.getByte
val b2 = bufferItr.getByte
val b3 = bufferItr.getByte
(b1 << 16) + (b2 << 8) + b3
}
protected def decodeExtendedTime(bufferItr: ByteIterator):Int = {
bufferItr.getInt(ByteOrder.BIG_ENDIAN) & Integer.MAX_VALUE
}
}
| vimvim/AkkaTest | src/main/scala/rtmp/header/ExtBasicHeaderDecoder.scala | Scala | agpl-3.0 | 936 |
package com.angos.slicknspray
import com.angos.slicknspray.Tables.PeopleRow
import spray.json.DefaultJsonProtocol
import scala.language.implicitConversions
import scalaz.Lens._
import scalaz.PLensFamily
case class Location(latitude: Double, longitude: Double)
case class Person(id: Option[Int], firstName: Option[String], lastName: String, age: Option[Int], location: Option[Location])
case class PersonWithDistance(person: Person, distance: Double)
object LocationLens {
val γLatitude = lensu[Location, Double]((l, v) => l.copy(latitude = v), _.latitude)
val γLongitude = lensu[Location, Double]((l, v) => l.copy(longitude = v), _.longitude)
}
object PersonLens {
val γLocation = lensg[Person, Option[Location]](p => l => p.copy(location = l), _.location)
val γLatitude = γLocation.partial >=> PLensFamily.somePLens >=> LocationLens.γLatitude.partial
val γLongitude = γLocation.partial >=> PLensFamily.somePLens >=> LocationLens.γLongitude.partial
}
object PersonToJsonProtocol extends DefaultJsonProtocol {
implicit val LocationFormat = jsonFormat2(Location)
implicit val PersonFormat = jsonFormat5(Person)
implicit val PersonWithDistanceFormat = jsonFormat2(PersonWithDistance)
}
object PeopleToPersonConverter {
class PeopleRowToPerson(val row: PeopleRow) {
def toPerson: Person = Person(Some(row.id), row.firstname, row.lastname, row.age.map(_.toInt), asOptionalLocation(row.latitude, row.longitude))
}
private[this] def asOptionalLocation(lat: Option[Double], lng: Option[Double]): Option[Location] = {
for {ulat <- lat; ulng <- lng} yield Location(ulat, ulng)
}
implicit def addPeopleRowConversions(row: PeopleRow): PeopleRowToPerson = new PeopleRowToPerson(row)
}
| jdkendall/angos-slicknspray | src/main/scala/com/angos/slicknspray/Person.scala | Scala | mit | 1,727 |
/*
* Copyright 2018 Analytics Zoo Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.zoo.feature.image
import com.intel.analytics.bigdl.transform.vision.image.{ImageFeature, PixelBytesToMat, augmentation}
import org.apache.logging.log4j.LogManager
/**
* Transform byte array(pixels in byte) to OpenCVMat
*
* @param byteKey key that maps byte array
*/
class ImagePixelBytesToMat(
byteKey: String = ImageFeature.bytes) extends ImageProcessing {
private val internalTransformer = new PixelBytesToMat(byteKey)
override def apply(prev: Iterator[ImageFeature]): Iterator[ImageFeature] = {
internalTransformer.apply(prev)
}
override def transform(prev: ImageFeature): ImageFeature = {
internalTransformer.transform(prev)
}
}
object ImagePixelBytesToMat {
val logger = LogManager.getLogger(getClass)
def apply(byteKey: String = ImageFeature.bytes): ImagePixelBytesToMat = {
new ImagePixelBytesToMat(byteKey)
}
}
| intel-analytics/analytics-zoo | zoo/src/main/scala/com/intel/analytics/zoo/feature/image/ImagePixelBytesToMat.scala | Scala | apache-2.0 | 1,496 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.rdd
import scala.reflect.ClassTag
import org.apache.spark._
import org.apache.carbondata.spark.rdd.CarbonRDD
case class DataLoadPartitionWrap[T: ClassTag](rdd: RDD[T], partition: Partition)
class DataLoadCoalescedRDD[T: ClassTag](
@transient var prev: RDD[T],
nodeList: Array[String])
extends CarbonRDD[DataLoadPartitionWrap[T]](prev.context, Nil) {
override def getPartitions: Array[Partition] = {
new DataLoadPartitionCoalescer(prev, nodeList).run
}
override def internalCompute(split: Partition,
context: TaskContext): Iterator[DataLoadPartitionWrap[T]] = {
new Iterator[DataLoadPartitionWrap[T]] {
val iter = split.asInstanceOf[CoalescedRDDPartition].parents.iterator
def hasNext = iter.hasNext
def next: DataLoadPartitionWrap[T] = {
DataLoadPartitionWrap(firstParent[T], iter.next())
}
}
}
override def getDependencies: Seq[Dependency[_]] = {
Seq(new NarrowDependency(prev) {
def getParents(id: Int): Seq[Int] =
partitions(id).asInstanceOf[CoalescedRDDPartition].parentsIndices
})
}
override def clearDependencies() {
super.clearDependencies()
prev = null
}
/**
* Returns the preferred machine for the partition. If split is of type CoalescedRDDPartition,
* then the preferred machine will be one which most parent splits prefer too.
* @param partition
* @return the machine most preferred by split
*/
override def getPreferredLocations(partition: Partition): Seq[String] = {
partition.asInstanceOf[CoalescedRDDPartition].preferredLocation.toSeq
}
}
| HuaweiBigData/carbondata | integration/spark-common/src/main/scala/org/apache/spark/rdd/DataLoadCoalescedRDD.scala | Scala | apache-2.0 | 2,424 |
package spark.util
/**
* An extractor object for parsing strings into integers.
*/
private[spark] object IntParam {
def unapply(str: String): Option[Int] = {
try {
Some(str.toInt)
} catch {
case e: NumberFormatException => None
}
}
}
| koeninger/spark | core/src/main/scala/spark/util/IntParam.scala | Scala | bsd-3-clause | 265 |
package kornell.server.api
import javax.ws.rs._
import kornell.core.entity.{CertificateDetails, CourseDetailsEntityType}
import kornell.core.error.exception.EntityNotFoundException
import kornell.server.jdbc.repository.CertificatesDetailsRepo
import kornell.server.util.AccessDeniedErr
import kornell.server.util.Conditional.toConditional
@Path("certificatesDetails")
class CertificatesDetailsResource {
@Path("{uuid}")
def get(@PathParam("uuid") uuid: String) = CertificateDetailsResource(uuid)
@POST
@Consumes(Array(CertificateDetails.TYPE))
@Produces(Array(CertificateDetails.TYPE))
def create(certificateDetails: CertificateDetails): CertificateDetails = {
CertificatesDetailsRepo.create(certificateDetails)
}.requiring(isPlatformAdmin, AccessDeniedErr())
.or(isInstitutionAdmin, AccessDeniedErr())
.or(isPublisher, AccessDeniedErr())
.get
@GET
@Path("/{entityType}/{entityUUID}")
@Produces(Array(CertificateDetails.TYPE))
def getByEntityTypeAndUUID(@PathParam("entityType") entityType: String,
@PathParam("entityUUID") entityUUID: String): CertificateDetails = {
val certificatesDetailsRepo = CertificatesDetailsRepo.getForEntity(entityUUID, CourseDetailsEntityType.valueOf(entityType))
certificatesDetailsRepo match {
case Some(x) => x
case _ => throw new EntityNotFoundException("notFound")
}
}.requiring(isPlatformAdmin, AccessDeniedErr())
.or(isInstitutionAdmin, AccessDeniedErr())
.or(isPublisher, AccessDeniedErr())
.get
}
object CertificatesDetailsResource {
def apply(uuid: String) = new CertificateDetailsResource(uuid)
}
| Craftware/Kornell | kornell-api/src/main/scala/kornell/server/api/CertificatesDetailsResource.scala | Scala | apache-2.0 | 1,625 |
package controllers
import com.typesafe.plugin._
import play.Play
import play.api.Logger
import play.api.mvc._
import play.api.Play.current
object EmailAlertsController extends Controller {
var alertsEnabled = false
var numberEmailsSent = 0
val emailerFromAddress = Play.application.configuration.getString("emailerfromaddress")
val emailerToAddress = Play.application.configuration.getString("emailertoaddress")
def isStarted = Action {
Ok(alertsEnabled.toString)
}
def turnOnEmailAlerts = Action {
Logger.info("Email Alerts On")
alertsEnabled = true
Ok("started")
}
def turnOffEmailAlerts = Action {
Logger.info("Email Alerts Off")
alertsEnabled = false
Ok("stopped")
}
def sendAlert(alertText:String) = {
if (alertsEnabled) {
Logger.info("Email alert being sent")
sendMesage(alertText)
numberEmailsSent += 1
}
}
def sendMesage(emailText:String) = {
val mail = use[MailerPlugin].email
mail.setSubject("BBKProject - Server Alert")
mail.setRecipient(emailerToAddress)
mail.setFrom(emailerFromAddress)
//or use a list
//mail.setBcc(List("Dummy <example@example.org>", "Dummy2 <example@example.org>"):_*)
//mail.setFrom("Miles Davenport <miles.davenport@anotheremail.com>")
//adds attachment
//mail.addAttachment("attachment.pdf", new File("/some/path/attachment.pdf"))
//adds inline attachment from byte array
//val data: Array[Byte] = "data".getBytes
//mail.addAttachment("data.txt", data, "text/plain", "A simple file", EmailAttachment.INLINE)
//sends html
//mail.sendHtml("<html>html</html>" )
//sends text/text
mail.send(emailText)
//sends both text and html
//mail.send( "text", "<html>html</html>")
Logger.info("Server Email sent :" + emailText)
}
} | chrischivers/London-Bus-Tracker-Play-Framework | app/controllers/EmailAlertsController.scala | Scala | mit | 1,823 |
package fr.caladan.slickgraph.demoscala
/**
* Generate a timeseries that follows the Poisson distribution
*/
object DataGenerator {
protected def poisson(rate: Double): Double = {
return -Math.log(1.0 - Math.random) / rate
}
/**
* Generate a timeseries of a given size
*
* @param size Number of time points in the timeseries
* @return Timeseries
*/
def generateTimeseries(size: Integer): Seq[Double] = {
var prev: Double = 0
val data = for (i <- 0 until size) yield {
prev += poisson(.01)
prev
}
return data
}
}
| remyd/slickgraph | slickgraph-demo-scala/src/main/scala/fr/caladan/slickgraph/demoscala/DataGenerator.scala | Scala | mit | 575 |
package org.infinispan.spark.suites
import org.infinispan.spark.test.{Spark, WordCache}
import org.scalatest.{FunSuite, Matchers}
abstract class RDDRetrievalTest extends FunSuite with Matchers {
self: WordCache with Spark =>
test("RDD Operators") {
val infinispanRDD = createInfinispanRDD[Int, String]
// Count
infinispanRDD.count() shouldBe getNumEntries
// Sort By Key
val first = infinispanRDD.sortByKey().first()
first._1 shouldBe 1
// Count by Key
val map = infinispanRDD.countByKey()
map.forall { case (_, v) => v == 1 } shouldBe true
// Filter
val filteredRDD = infinispanRDD.filter { case (_, v) => v.startsWith("a") }
filteredRDD.values.collect().forall(_.startsWith("a")) shouldBe true
// Collect and Sort
val arrayOfTuple = infinispanRDD.collect().sorted
arrayOfTuple.length shouldBe getNumEntries
arrayOfTuple.head shouldBe((1, wordsCache.get(1)))
// Max/Min
val maxEntry = infinispanRDD.max()
val minEntry = infinispanRDD.min()
minEntry shouldBe((1, wordsCache get 1))
maxEntry shouldBe((getNumEntries, wordsCache get getNumEntries))
// RDD combination operations
val data = Array(1, 2, 3, 4, 5)
val aRDD = sc.parallelize(data)
val cartesianRDD = aRDD.cartesian(infinispanRDD)
cartesianRDD.count shouldBe getNumEntries * data.length
val first5 = (1 to 5).map(i => (i, wordsCache.get(i)))
val otherRDD = sc.makeRDD(first5)
val subtractedRDD = infinispanRDD.subtract(otherRDD, numPartitions = 2)
subtractedRDD.count shouldBe (getNumEntries - otherRDD.count)
// Word count map reduce
val resultRDD = infinispanRDD.map { case (_, v) => v }.flatMap(_.split(" ")).map((_, 1)).reduceByKey(_ + _)
val firstWordCount = resultRDD.first()
val count = infinispanRDD.values.flatMap(_.split(" ")).countByValue().get(firstWordCount._1).get
count shouldBe firstWordCount._2
}
}
| infinispan/infinispan-spark | src/test/scala/org/infinispan/spark/suites/RDDRetrievalTest.scala | Scala | apache-2.0 | 2,008 |
package org.bitcoins.core.protocol.ln
import org.bitcoins.core.number.{UInt32, UInt64, UInt8}
import org.bitcoins.core.protocol.ln.LnParams.{
LnBitcoinMainNet,
LnBitcoinTestNet
}
import org.bitcoins.core.protocol.ln.channel.ShortChannelId
import org.bitcoins.core.protocol.ln.currency.{
MicroBitcoins,
MilliBitcoins,
MilliSatoshis
}
import org.bitcoins.core.protocol.ln.fee.{
FeeBaseMSat,
FeeProportionalMillionths
}
import org.bitcoins.core.protocol.ln.node.NodeId
import org.bitcoins.core.protocol.ln.routing.LnRoute
import org.bitcoins.core.protocol.{Bech32Address, P2PKHAddress, P2SHAddress}
import org.bitcoins.core.util.Bech32
import org.bitcoins.crypto._
import org.bitcoins.testkitcore.gen.ln.LnInvoiceGen
import org.bitcoins.testkitcore.util.BitcoinSUnitTest
import scodec.bits.ByteVector
class LnInvoiceUnitTest extends BitcoinSUnitTest {
behavior of "LnInvoice"
implicit override val generatorDrivenConfig: PropertyCheckConfiguration =
generatorDrivenConfigNewCode
val hrpEmpty = LnHumanReadablePart(LnBitcoinMainNet)
val hrpMicro =
LnHumanReadablePart(LnBitcoinMainNet, Some(MicroBitcoins(2500)))
val hrpMilli = LnHumanReadablePart(LnBitcoinMainNet, Some(MilliBitcoins(20)))
val hrpTestNetMilli =
LnHumanReadablePart(LnBitcoinTestNet, Some(MilliBitcoins(20)))
val time = UInt64(1496314658)
val paymentHash = Sha256Digest.fromHex(
"0001020304050607080900010203040506070809000102030405060708090102")
val paymentTag = LnTag.PaymentHashTag(paymentHash)
val description = {
("One piece of chocolate cake, one icecream cone, one pickle, one slice of swiss cheese, " +
"one slice of salami, one lollypop, one piece of cherry pie, one sausage, one cupcake, " +
"and one slice of watermelon").getBytes()
}
val descriptionHash = CryptoUtil.sha256(ByteVector(description))
val descpriptionHashTag = Right(LnTag.DescriptionHashTag(descriptionHash))
it must "parse BOLT11 example 1" in {
//BOLT11 Example #1
val descriptionTagE =
Left(LnTag.DescriptionTag("Please consider supporting this project"))
val lnTags = LnTaggedFields(paymentHash = paymentTag,
descriptionOrHash = descriptionTagE)
val sigData =
"6c6e62630b25fe64410d00004080c1014181c20240004080c1014181c20240004080c1014181c202404081a1fa83632b0b9b29031b7b739b4b232b91039bab83837b93a34b733903a3434b990383937b532b1ba0"
/* val hashSigData = Sha256Digest.fromHex(
"c3d4e83f646fa79a393d75277b1d858db1d1f7ab7137dcb7835db2ecd518e1c9")*/
val signature = ECDigitalSignature.fromRS(
"38ec6891345e204145be8a3a99de38e98a39d6a569434e1845c8af7205afcfcc7f425fcd1463e93c32881ead0d6e356d467ec8c02553f9aab15e5738b11f127f")
val version = UInt8.zero
val lnSig = LnInvoiceSignature(version, signature)
val invoice = LnInvoice(hrpEmpty, time, lnTags, lnSig)
invoice.signatureData.toHex must be(sigData)
val serialized = invoice.toString
serialized must be(
"lnbc1pvjluezpp5qqqsyqcyq5rqwzqfqqqsyqcyq5rqwzqfqqqsyqcyq5rqwzqfqypqdpl2pkx2ctnv5sxxmmwwd5kgetjypeh2ursdae8g6twvus8g6rfwvs8qun0dfjkxaq8rkx3yf5tcsyz3d73gafnh3cax9rn449d9p5uxz9ezhhypd0elx87sjle52x86fux2ypatgddc6k63n7erqz25le42c4u4ecky03ylcqca784w")
val deserialized = LnInvoice.fromStringT(serialized)
deserialized.get.toString must be(invoice.toString)
}
it must "parse BOLT11 example 2" in {
//BOLT11 Example #2
val descriptionTagE = Left(LnTag.DescriptionTag("1 cup coffee"))
val expiryTimeTag = LnTag.ExpiryTimeTag(UInt32(60))
val lnTags = LnTaggedFields(paymentTag,
descriptionOrHash = descriptionTagE,
expiryTime = Some(expiryTimeTag))
val signature = ECDigitalSignature.fromRS(
"e89639ba6814e36689d4b91bf125f10351b55da057b00647a8dabaeb8a90c95f160f9d5a6e0f79d1fc2b964238b944e2fa4aa677c6f020d466472ab842bd750e")
val version = UInt8.one
val lnSig = LnInvoiceSignature(version, signature)
val invoice = LnInvoice(hrpMicro, time, lnTags, lnSig)
val serialized = invoice.toString
serialized must be(
"lnbc2500u1pvjluezpp5qqqsyqcyq5rqwzqfqqqsyqcyq5rqwzqfqqqsyqcyq5rqwzqfqypqdq5xysxxatsyp3k7enxv4jsxqzpuaztrnwngzn3kdzw5hydlzf03qdgm2hdq27cqv3agm2awhz5se903vruatfhq77w3ls4evs3ch9zw97j25emudupq63nyw24cg27h2rspfj9srp")
val deserialized = LnInvoice.fromStringT(serialized)
deserialized.get.toString must be(invoice.toString)
}
it must "parse BOLT11 example 3" in {
//BOLT11 Example #3 - Description field does not encode correctly due to Japanese letters
val descriptionTagE = Left(LnTag.DescriptionTag("ナンセンス 1杯"))
val expiryTag = LnTag.ExpiryTimeTag(UInt32(60))
val lnTags = LnTaggedFields(
paymentHash = paymentTag,
secret = None,
descriptionOrHash = descriptionTagE,
nodeId = None,
expiryTime = Some(expiryTag),
cltvExpiry = None,
fallbackAddress = None,
routingInfo = None,
features = None
)
val signature = ECDigitalSignature.fromRS(
"259f04511e7ef2aa77f6ff04d51b4ae9209504843e5ab9672ce32a153681f687515b73ce57ee309db588a10eb8e41b5a2d2bc17144ddf398033faa49ffe95ae6")
val version = UInt8.zero
val lnSig = LnInvoiceSignature(version, signature)
val invoice = LnInvoice(hrpMicro, time, lnTags, lnSig)
val serialized = invoice.toString
invoice.toString must be(
"lnbc2500u1pvjluezpp5qqqsyqcyq5rqwzqfqqqsyqcyq5rqwzqfqqqsyqcyq5rqwzqfqypqdpquwpc4curk03c9wlrswe78q4eyqc7d8d0xqzpuyk0sg5g70me25alkluzd2x62aysf2pyy8edtjeevuv4p2d5p76r4zkmneet7uvyakky2zr4cusd45tftc9c5fh0nnqpnl2jfll544esqchsrny")
val deserialized = LnInvoice.fromStringT(serialized)
deserialized.get must be(invoice)
}
it must "parse BOLT11 example 4" in {
//BOLT11 Example #4
val descriptionHash = Sha256Digest.fromHex(
"3925b6f67e2c340036ed12093dd44e0368df1b6ea26c53dbe4811f58fd5db8c1")
val descriptionHashTagE = Right(LnTag.DescriptionHashTag(descriptionHash))
val lnTags = LnTaggedFields(paymentHash = paymentTag,
descriptionOrHash = descriptionHashTagE,
None,
None,
None,
None,
None)
val signature = ECDigitalSignature.fromRS(
"c63486e81f8c878a105bc9d959af1973854c4dc552c4f0e0e0c7389603d6bdc67707bf6be992a8ce7bf50016bb41d8a9b5358652c4960445a170d049ced4558c")
val version = UInt8.zero
val lnSig = LnInvoiceSignature(version, signature)
val invoice = LnInvoice(hrpMilli, time, lnTags, lnSig)
val serialized = invoice.toString
invoice.toString must be(
"lnbc20m1pvjluezpp5qqqsyqcyq5rqwzqfqqqsyqcyq5rqwzqfqqqsyqcyq5rqwzqfqypqhp58yjmdan79s6qqdhdzgynm4zwqd5d7xmw5fk98klysy043l2ahrqscc6gd6ql3jrc5yzme8v4ntcewwz5cnw92tz0pc8qcuufvq7khhr8wpald05e92xw006sq94mg8v2ndf4sefvf9sygkshp5zfem29trqq2yxxz7")
val deserialized = LnInvoice.fromStringT(serialized)
deserialized.get must be(invoice)
}
it must "parse BOLT11 example 5" in {
//BOLT11 Example #5
val descriptionHash = Sha256Digest.fromHex(
"3925b6f67e2c340036ed12093dd44e0368df1b6ea26c53dbe4811f58fd5db8c1")
val descriptionHashTagE = Right(LnTag.DescriptionHashTag(descriptionHash))
val fallbackAddr = LnTag.FallbackAddressTag(
P2PKHAddress.fromString("mk2QpYatsKicvFVuTAQLBryyccRXMUaGHP"))
val lnTags = LnTaggedFields(paymentHash = paymentTag,
descriptionOrHash = descriptionHashTagE,
fallbackAddress = Some(fallbackAddr))
val signature = ECDigitalSignature.fromRS(
"b6c42b8a61e0dc5823ea63e76ff148ab5f6c86f45f9722af0069c7934daff70d5e315893300774c897995e3a7476c8193693d144a36e2645a0851e6ebafc9d0a")
val version = UInt8.one
val lnSig = LnInvoiceSignature(version, signature)
val invoice = LnInvoice(hrpTestNetMilli, time, lnTags, lnSig)
val serialized = invoice.toString
serialized must be(
"lntb20m1pvjluezpp5qqqsyqcyq5rqwzqfqqqsyqcyq5rqwzqfqqqsyqcyq5rqwzqfqypqhp58yjmdan79s6qqdhdzgynm4zwqd5d7xmw5fk98klysy043l2ahrqsfpp3x9et2e20v6pu37c5d9vax37wxq72un98kmzzhznpurw9sgl2v0nklu2g4d0keph5t7tj9tcqd8rexnd07ux4uv2cjvcqwaxgj7v4uwn5wmypjd5n69z2xm3xgksg28nwht7f6zsp2mh7qm")
//In example #5, the order in which tags are encoded in the invoice has been changed to demonstrate the ability to move tags as needed.
//For that reason, the example #5 output we are matching against has been modified to fit the order in which we encode our invoices.
//TODO: Add checksum data to check
val deserialized = LnInvoice.fromStringT(serialized)
deserialized.get.toString must be(serialized)
}
it must "parse BOLT11 example 6" in {
val expected =
"lnbc20m1pvjluezpp5qqqsyqcyq5rqwzqfqqqsyqcyq5rqwzqfqqqsyqcyq5rqwzqfqypqhp58yjmdan79s6qqdhdzgynm4zwqd5d7xmw5fk98klysy043l2ahrqsfpp3qjmp7lwpagxun9pygexvgpjdc4jdj85fr9yq20q82gphp2nflc7jtzrcazrra7wwgzxqc8u7754cdlpfrmccae92qgzqvzq2ps8pqqqqqqpqqqqq9qqqvpeuqafqxu92d8lr6fvg0r5gv0heeeqgcrqlnm6jhphu9y00rrhy4grqszsvpcgpy9qqqqqqgqqqqq7qqzqj9n4evl6mr5aj9f58zp6fyjzup6ywn3x6sk8akg5v4tgn2q8g4fhx05wf6juaxu9760yp46454gpg5mtzgerlzezqcqvjnhjh8z3g2qqdhhwkj"
val fallbackAddr = LnTag.FallbackAddressTag(
P2PKHAddress.fromString("1RustyRX2oai4EYYDpQGWvEL62BBGqN9T"))
val signature = ECDigitalSignature.fromRS(
"91675cb3fad8e9d915343883a49242e074474e26d42c7ed914655689a8074553733e8e4ea5ce9b85f69e40d755a55014536b12323f8b220600c94ef2b9c51428")
val lnInvoiceSig =
LnInvoiceSignature(recoverId = UInt8.zero, signature = signature)
val route1 = LnRoute(
pubkey = ECPublicKey.fromHex(
"029e03a901b85534ff1e92c43c74431f7ce72046060fcf7a95c37e148f78c77255"),
shortChannelID = ShortChannelId.fromHex("0102030405060708"),
feeBaseMsat = FeeBaseMSat(MilliSatoshis.one),
feePropMilli = FeeProportionalMillionths(UInt32(20)),
cltvExpiryDelta = 3
)
val route2 = LnRoute(
pubkey = ECPublicKey.fromHex(
"039e03a901b85534ff1e92c43c74431f7ce72046060fcf7a95c37e148f78c77255"),
shortChannelID = ShortChannelId.fromHex("030405060708090a"),
feeBaseMsat = FeeBaseMSat(MilliSatoshis(2)),
feePropMilli = FeeProportionalMillionths(UInt32(30)),
cltvExpiryDelta = 4
)
val route = LnTag.RoutingInfo(Vector(route1, route2))
val lnTags = LnTaggedFields(paymentHash = paymentTag,
descriptionOrHash = descpriptionHashTag,
fallbackAddress = Some(fallbackAddr),
routingInfo = Some(route))
val lnInvoice = LnInvoice(hrp = hrpMilli,
timestamp = time,
lnTags = lnTags,
signature = lnInvoiceSig)
val serialized = lnInvoice.toString
serialized must be(expected)
val deserialized = LnInvoice.fromStringT(serialized)
deserialized.get.toString must be(serialized)
}
it must "parse BOLT11 example 7 (p2sh fallback addr)" in {
val expected =
"lnbc20m1pvjluezpp5qqqsyqcyq5rqwzqfqqqsyqcyq5rqwzqfqqqsyqcyq5rqwzqfqypq" +
"hp58yjmdan79s6qqdhdzgynm4zwqd5d7xmw5fk98klysy043l2ahrqsfppj3a24vwu6r8ejrss3axul8rxl" +
"dph2q7z9kmrgvr7xlaqm47apw3d48zm203kzcq357a4ls9al2ea73r8jcceyjtya6fu5wzzpe50zrge6ulk" +
"4nvjcpxlekvmxl6qcs9j3tz0469gqsjurz5"
val fallbackAddr = LnTag.FallbackAddressTag(
P2SHAddress.fromString("3EktnHQD7RiAE6uzMj2ZifT9YgRrkSgzQX"))
val lnTags = LnTaggedFields(paymentHash = paymentTag,
descriptionOrHash = descpriptionHashTag,
fallbackAddress = Some(fallbackAddr))
val signature = ECDigitalSignature.fromRS(
"b6c6860fc6ff41bafba1745b538b6a7c6c2c0234f76bf817bf567be88cf2c632492c9dd279470841cd1e21a33ae7ed59b25809bf9b3366fe81881651589f5d15")
val lnInvoiceSig =
LnInvoiceSignature(signature = signature, recoverId = UInt8.zero)
val lnInvoice = LnInvoice(hrp = hrpMilli,
timestamp = time,
lnTags = lnTags,
signature = lnInvoiceSig)
val serialized = lnInvoice.toString
lnInvoice.toString must be(expected)
val deserialized = LnInvoice.fromStringT(serialized)
deserialized.get.toString must be(serialized)
}
it must "parse BOLT11 example 7 (p2wpkh fallback addr)" in {
//this test does not pass because bitcoin-s does not support p2wpkh currently
val expected = "lnbc20m1pvjluez" +
"pp5qqqsyqcyq5rqwzqfqqqsyqcyq5rqwzqfqqqsyqcyq5rqwzqfqypq" +
"hp58yjmdan79s6qqdhdzgynm4zwqd5d7xmw5fk98klysy043l2ahrqs" +
"fppqw508d6qejxtdg4y5r3zarvary0c5xw7kepvrhrm9s57hejg0p66" +
"2ur5j5cr03890fa7k2pypgttmh4897d3raaq85a293e9jpuqwl0rnfu" +
"wzam7yr8e690nd2ypcq9hlkdwdvycqe4x4ch"
val fallbackAddr = LnTag.FallbackAddressTag(
Bech32Address
.fromString("bc1qw508d6qejxtdg4y5r3zarvary0c5xw7kv8f3t4"))
val lnTags = LnTaggedFields(paymentHash = paymentTag,
descriptionOrHash = descpriptionHashTag,
fallbackAddress = Some(fallbackAddr))
val signature = ECDigitalSignature.fromRS(
"c8583b8f65853d7cc90f0eb4ae0e92a606f89caf4f7d65048142d7bbd4e5f3623ef407a75458e4b20f00efbc734f1c2eefc419f3a2be6d51038016ffb35cd613")
val lnInvoiceSig =
LnInvoiceSignature(signature = signature, recoverId = UInt8.zero)
val lnInvoice = LnInvoice(hrp = hrpMilli,
timestamp = time,
lnTags = lnTags,
signature = lnInvoiceSig)
val serialized = lnInvoice.toString
serialized must be(expected)
val deserialized = LnInvoice.fromStringT(serialized)
deserialized.get must be(lnInvoice)
}
it must "parse BOLT11 example 8 (p2wsh fallback addr)" in {
val expected = "lnbc20m1pvjluez" +
"pp5qqqsyqcyq5rqwzqfqqqsyqcyq5rqwzqfqqqsyqcyq5rqwzqfqypq" +
"hp58yjmdan79s6qqdhdzgynm4zwqd5d7xmw5fk98klysy043l2ahrqs" +
"fp4qrp33g0q5c5txsp9arysrx4k6zdkfs4nce4xj0gdcccefvpysxf3" +
"q28j0v3rwgy9pvjnd48ee2pl8xrpxysd5g44td63g6xcjcu003j3qe8" +
"878hluqlvl3km8rm92f5stamd3jw763n3hck0ct7p8wwj463cqm8cxgy"
val fallbackAddr = LnTag.FallbackAddressTag(
Bech32Address
.fromString(
"bc1qrp33g0q5c5txsp9arysrx4k6zdkfs4nce4xj0gdcccefvpysxf3qccfmv3"))
val lnTags = LnTaggedFields(paymentHash = paymentTag,
descriptionOrHash = descpriptionHashTag,
fallbackAddress = Some(fallbackAddr))
val signature = ECDigitalSignature.fromRS(
"51e4f6446e410a164a6da9f39507e730c26241b4456ab6ea28d1b12c71ef8ca20c9cfe3dffc07d9f8db671ecaa4d20beedb193bda8ce37c59f85f82773a55d47")
val lnInvoiceSig =
LnInvoiceSignature(signature = signature, recoverId = UInt8.zero)
val lnInvoice = LnInvoice(hrp = hrpMilli,
timestamp = time,
lnTags = lnTags,
signature = lnInvoiceSig)
val serialized = lnInvoice.toString
lnInvoice.toString must be(expected)
val deserialized = LnInvoice.fromStringT(serialized)
deserialized.get must be(lnInvoice)
}
it must "parse BOLT11 example 10" in {
val expected =
"lnbc25m1pvjluezpp5qqqsyqcyq5rqwzqfqqqsyqcyq5rqwzqfqqqsyqcyq5rqwzqfqypqdq5vdhkven9v5sxyetpdeessp5zyg3zyg3zyg3zyg3zyg3zyg3zyg3zyg3zyg3zyg3zyg3zyg3zygs9q5sqqqqqqqqqqqqqqqpqqq4u9s93jtgysm3mrwll70zr697y3mf902hvxwej0v7c62rsltw83ng0pu8w3j230sluc5gxkdmm9dvpy9y6ggtjd2w544mzdrcs42t7sqdkcy8h"
val signature = ECDigitalSignature.fromHex(
"3045022100af0b02c64b4121b8ec6efffcf10f45f123b495eabb0cecc9ecf634a1c3eb71e30220343c3c3ba32545f0ff31441acddecad60485269085c9aa752b5d89a3c42aa5fa")
val lnInvoiceSig =
LnInvoiceSignature(recoverId = UInt8.zero, signature = signature)
val descriptionTag = LnTag.DescriptionTag("coffee beans")
val paymentSecret = Some(
LnTag.SecretTag(PaymentSecret.fromHex(
"1111111111111111111111111111111111111111111111111111111111111111")))
val features = Some(
LnTag.FeaturesTag(ByteVector.fromValidHex("800000000000000000000800")))
val lnTags = LnTaggedFields(
Vector(paymentTag, descriptionTag, paymentSecret.get, features.get))
val hrpMilli =
LnHumanReadablePart(LnBitcoinMainNet, Some(MilliBitcoins(25)))
val lnInvoice = LnInvoice(hrp = hrpMilli,
timestamp = time,
lnTags = lnTags,
signature = lnInvoiceSig)
val serialized = lnInvoice.toString
// TODO uncomment when https://github.com/bitcoin-s/bitcoin-s/issues/1064 is fixed
// serialized must be(expected)
assert(serialized == expected)
val deserialized = LnInvoice.fromStringT(serialized)
deserialized.get.toString must be(serialized)
}
it must "deserialize and reserialize a invoice with a explicity expiry time" in {
//from eclair
val bech32 =
"lnbcrt1m1pd6ssf3pp5mqcepx6yzx7uu0uagw5x3c7kqhnpwr3mfn844hjux8tlza6ztr7sdqqxqrrss0rl3gzer9gfc54fs84rd4xk6g8nf0syharnnyljc9za933memdzxrjz0v2v94ntuhdxduk3z0nlmpmznryvvvl4gzgu28kjkm4ey98gpmyhjfa"
val invoiceT = LnInvoice.fromStringT(bech32)
val deserialized = invoiceT.get.toString
deserialized must be(bech32)
}
it must "have serialization symmetry for LnHrps" in {
forAll(LnInvoiceGen.lnHrp) { hrp =>
LnHumanReadablePart.fromString(hrp.toString) == hrp
}
}
it must "have serialization symmetry for the invoices" in {
forAll(LnInvoiceGen.lnInvoice) { invoice =>
LnInvoice.fromStringT(invoice.toString).get == invoice
}
}
it must "fail to create an invoice if the digital signature is invalid" in {
intercept[IllegalArgumentException] {
val sig = EmptyDigitalSignature
val tags =
LnTaggedFields(paymentHash = paymentTag,
descriptionOrHash =
Right(LnTag.DescriptionHashTag(descriptionHash)))
val lnSig = LnInvoiceSignature(recoverId = UInt8.zero, signature = sig)
LnInvoice(hrp = hrpEmpty,
timestamp = UInt64.zero,
lnTags = tags,
signature = lnSig)
}
}
it must "create a valid digital signature for an invoice" in {
val privKey = ECPrivateKey.freshPrivateKey
val tags =
LnTaggedFields(paymentHash = paymentTag,
descriptionOrHash =
Right(LnTag.DescriptionHashTag(descriptionHash)))
val invoice =
LnInvoice.build(hrp = hrpEmpty, lnTags = tags, privateKey = privKey)
assert(invoice.isValidSignature)
}
it must "handle the weird case if sigdata being exactly on a byte boundary, which means we need to pad the sigdata with a zero byte" in {
//https://github.com/bitcoin-s/bitcoin-s-core/issues/277
val expected =
"03fad6c016f998e85d03ce0b7358b3b6a38ebc7fd60030340d0245fea0d95c8c12"
val expectedHash =
"6b80b9b7320bc1203534e78f86b6a32945c35ab464e475ed00e92c7b98755f9d"
val str =
"lntb100n1pwz34mzpp5dwqtndejp0qjqdf5u78cdd4r99zuxk45vnj8tmgqayk8hxr4t7wsd890v3xgatjv96xjmmwygarzvpsxqczcgnrdpskumn9ds3r5gn5wfskgetnygkzyetkv4h8gg36yfeh2cnnvdexjcn9ygkzyat4d9jzyw3zxqcrzvfjxgenxtf5xs6n2tfkxcmnwtfc8qunjttpv93xycmrv3jx2etxvc3zcgn90p3ksctwvajjyw3zvf5hgenfdejhsg3vyfehjmtzdakzyw3zgf2yx42ngs386xqrrssqr6xn7dtkyxk0rhl98k3esksst578uwhud5glp9svq24ddwlgqwz6v9uf7mqljrj07xl87ufrn4yfplrsz2vpmc9xwv44634h54dq3sq257hh4"
val invoice = LnInvoice.fromStringT(str).get
invoice.lnTags.paymentHash.hash.hex must be(expectedHash)
invoice.signature.hex must be(
"00f469f9abb10d678eff29ed1cc2d082e9e3f1d7e3688f84b0601556b5df401c2d30bc4fb60fc8727f8df3fb891cea4487e38094c0ef0533995aea35bd2ad04600")
invoice.signature.signature.hex must be(
"3044022000f469f9abb10d678eff29ed1cc2d082e9e3f1d7e3688f84b0601556b5df401c02202d30bc4fb60fc8727f8df3fb891cea4487e38094c0ef0533995aea35bd2ad046")
invoice.lnTags.description.get.string must be(
"{\\"duration\\":10000,\\"channel\\":\\"trades\\",\\"event\\":\\"subscribe\\",\\"uuid\\":\\"00112233-4455-6677-8899-aabbccddeeff\\",\\"exchange\\":\\"bitfinex\\",\\"symbol\\":\\"BTCUSD\\"}")
invoice.timestamp must be(UInt64(1546180450))
invoice.amount.get.toMSat must be(MilliSatoshis(10000))
invoice.lnTags.expiryTime.get.u32 must be(UInt32(3600))
invoice.isValidSignature must be(true)
invoice.signatureData.toHex must be(
"6c6e74623130306e0b851aec410d1ae02e6dcc82f0480d4d39e3e1ada8ca5170d6ad19391d7b403a4b1ee61d57e741a72bd91323ab930ba34b7b7111d1898181818161131b430b73732b6111d113a3930b232b991161132bb32b73a111d1139bab139b1b934b1329116113abab4b2111d111818189899191999969a1a1a9a969b1b1b9b969c1c1c9c96b0b0b13131b1b23232b2b33311161132bc31b430b733b2911d113134ba3334b732bc11161139bcb6b137b6111d11212a21aaa9a2113e8c018e100")
invoice.toString must be(str)
invoice.nodeId.hex must be(expected)
}
it must "parse secret and features tags" in {
// generated by Eclair 3.3.0-SNAPSHOT
val serialized =
"lnbcrt10n1p0px7lfpp5ghc2y7ttnwy58jx0dfcsdxy7ey0qfryn0wcmm04ckud0qw73kt9sdq9vehk7xqrrss9qypqqqsp5qlf6efygd26y03y66jdqqfmlxthplnu5cc8648fgn88twhpyvmgqg9k5kd0k8vv3xvvqpkhkt9chdl579maq45gvck4g0yd0eggmvfkzgvjmwn29r99p57tgyl3l3s82hlc4e97at55xl5lyzpfk6n36yyqqxeem8q"
val invoice = LnInvoice.fromStringT(serialized).get
invoice.lnTags.secret must be(
Some(LnTag.SecretTag(PaymentSecret.fromHex(
"07d3aca4886ab447c49ad49a00277f32ee1fcf94c60faa9d2899ceb75c2466d0"))))
invoice.lnTags.features must be(
Some(LnTag.FeaturesTag(ByteVector.fromValidHex("0800"))))
invoice.toString must be(serialized)
}
it must "ensure that the malleability of the checksum in bech32 strings cannot cause a signature to become valid" in {
val strWithError =
"lnbc2500u1pvjluezpp5qqqsyqcyq5rqwzqfqqqsyqcyq5rqwzqfqqqsyqcyq5rqwzqfqypqdq5xysxxatsyp3k7enxv4jsxqzpuaztrnwngzn3kdzw5hydlzf03qdgm2hdq27cqv3agm2awhz5se903vruatfhq77w3ls4evs3ch9zw97j25emudupq63nyw24cg27h2rspfj9srqqqqqp"
assert(LnInvoice.fromStringT(strWithError).isFailure)
}
it must "parse unknown tags" in {
val privateKeyHex =
"180cb41c7c600be951b5d3d0a7334acc7506173875834f7a6c4c786a28fcbb19"
val key: ECPrivateKey = ECPrivateKey(privateKeyHex)
val unknownTag = LnTag.UnknownTag(
LnTagPrefix.Unknown('z'),
Bech32.from8bitTo5bit(ByteVector.fromValidHex("cafebabe")))
val descriptionTag =
LnTag.DescriptionTag("Please consider supporting this project")
val tags =
LnTaggedFields(Vector(paymentTag, descriptionTag, unknownTag))
val expected = LnInvoice(hrpTestNetMilli, time, tags, key)
val serialized = expected.toString
val deserialized = LnInvoice.fromStringT(serialized).get
deserialized must be(expected)
deserialized.toString must be(serialized)
deserialized.lnTags.tags.size must be(3)
deserialized.lnTags.tags.last must be(unknownTag)
deserialized.lnTags must be(expected.lnTags)
deserialized.nodeId.bytes must be(key.publicKey.bytes)
}
it must "recover public keys" in {
def testInvoice(str: String, nodeId: String): Unit = {
val i = LnInvoice.fromStringT(str).get
i.toString must be(str)
i.nodeId must be(NodeId.fromHex(nodeId))
()
}
testInvoice(
"lnbcrt500p1p0zk8umpp5wyc4s0h4jtu5lapsr4p2nevlpck7l5xec6rpjdv2a7r992vx0ctqdq9vehk7xqrrssfs6t6nyfutf4j8wzq6mf82lxefj5zadvw8fnjw6ev38y4578734zl94jfwnsfqdyt67da7g8shvhej0rkysymy260xyjtdv2dvhmvmgpdg6qjw",
"03033ced5a027b2d1d0224f94cbf6983243f4ccbe07001c20b9ef2db3f116f82dc"
)
testInvoice(
"lnbcrt1p0zk0pepp5f86agc2ue0lt5wvx96fczj9fhzy3swlassdrru7w23n7xq8zsnfqdq8w3jhxaqxqrrss2znyruaauwel7qu5ndrrydfpl9nrwk2lry8k898xguenakge0yrrdk37jcmvanv2dccmmkzhe9ncj0v84chpftrrravp52hyna8dm8qpegw8f8",
"039c14dd6dbea913d3fa21b8aaa328cbacb9d6f1f967c3ead9a895c857958ed38a"
)
}
it must "parse a signet invoice" in {
val str =
"lntbs1ps5um52pp562zjpdyec3hjga5sdeh90v09km7ugasretujf3wwj3ueutyujz3sdqqcqzpgxqyz5vqsp5an8lqngrz6w3vd449eqqtvwu2x4v9ltdf9r6hpwxf4x404fhv6zs9qyyssqfdmmsuldkyy7v29kwuuc9egwkthtf3aaf79p3w93ddffq65fs5zs6vys9et89u0yv5kearpnuyttsvufzjnsnup2ehp4nteelz39exqpgd78w8"
assert(LnInvoice.fromStringT(str).isSuccess)
}
it must "create an invoice above the old limit" in {
val str =
"lntbs42949672960n1p3qavpxpp59mdvu0tw0mzf2kl96ddfcm0597nuxwp42tzsehrn66jrmlg7nu8sdqqcqzpgxqyz5vqsp5y70pg45suf546mses9cp54fk4uke2rmppk9dy4926prhe5v54g4s9qyyssqmtpasu8map7hegdxfzmgusuqnkrssy6m34wcup3lmu0mae3xtht5d49204a3wm9wpklalx49g33cer5gqfractwt79vrc8p7wlpsdlqp296km0"
assert(LnInvoice.fromStringT(str).isSuccess)
}
}
| bitcoin-s/bitcoin-s | core-test/src/test/scala/org/bitcoins/core/protocol/ln/LnInvoiceUnitTest.scala | Scala | mit | 24,975 |
package com.kifi.franz
import com.amazonaws.services.sqs.AmazonSQSAsync
import scala.concurrent.duration.FiniteDuration
import scala.concurrent.Future
import scala.language.implicitConversions
trait FakeSQSQueue[T] extends SQSQueue[T] {
protected val sqs: AmazonSQSAsync = null
protected val createIfNotExists: Boolean = false
val queue: QueueName = QueueName("fake")
protected implicit def asString(obj: T): String = null
protected implicit def fromString(s: String): T = null.asInstanceOf[T]
override def initQueueUrl(): String = ""
override def send(msg: T, messageAttributes: Option[Map[String,String]], delay: Option[Int] = None): Future[MessageId] = Future.successful(MessageId(""))
override protected def nextBatchRequestWithLock(maxBatchSize: Int, lockTimeout: FiniteDuration): Future[Seq[SQSMessage[T]]] = Future.successful(Seq.empty)
}
| stkem/franz | src/main/scala/com/kifi/franz/FakeSQSQueue.scala | Scala | mit | 871 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kafka.consumer
import scala.collection._
import kafka.utils.Logging
import kafka.serializer._
/**
* Main interface for consumer
*/
trait ConsumerConnector {
/**
* Create a list of MessageStreams for each topic.
*
* @param topicCountMap a map of (topic, #streams) pair
* @return a map of (topic, list of KafkaStream) pairs.
* The number of items in the list is #streams. Each stream supports
* an iterator over message/metadata pairs.
*/
def createMessageStreams(topicCountMap: Map[String,Int]): Map[String, List[KafkaStream[Array[Byte],Array[Byte]]]]
/**
* Create a list of MessageStreams for each topic.
*
* @param topicCountMap a map of (topic, #streams) pair
* @param keyDecoder Decoder to decode the key portion of the message
* @param valueDecoder Decoder to decode the value portion of the message
* @return a map of (topic, list of KafkaStream) pairs.
* The number of items in the list is #streams. Each stream supports
* an iterator over message/metadata pairs.
*/
def createMessageStreams[K,V](topicCountMap: Map[String,Int],
keyDecoder: Decoder[K],
valueDecoder: Decoder[V])
: Map[String,List[KafkaStream[K,V]]]
/**
* Create a list of message streams for all topics that match a given filter.
*
* @param topicFilter Either a Whitelist or Blacklist TopicFilter object.
* @param numStreams Number of streams to return
* @param keyDecoder Decoder to decode the key portion of the message
* @param valueDecoder Decoder to decode the value portion of the message
* @return a list of KafkaStream each of which provides an
* iterator over message/metadata pairs over allowed topics.
*/
def createMessageStreamsByFilter[K,V](topicFilter: TopicFilter,
numStreams: Int = 1,
keyDecoder: Decoder[K] = new DefaultDecoder(),
valueDecoder: Decoder[V] = new DefaultDecoder())
: Seq[KafkaStream[K,V]]
/**
* Commit the offsets of all broker partitions connected by this connector.
*/
def commitOffsets(retryOnFailure: Boolean = true)
/**
* Shut down the connector
*/
def shutdown()
}
object Consumer extends Logging {
/**
* Create a ConsumerConnector
*
* @param config at the minimum, need to specify the groupid of the consumer and the zookeeper
* connection string zookeeper.connect.
*/
def create(config: ConsumerConfig): ConsumerConnector = {
val consumerConnect = new ZookeeperConsumerConnector(config)
consumerConnect
}
/**
* Create a ConsumerConnector
*
* @param config at the minimum, need to specify the groupid of the consumer and the zookeeper
* connection string zookeeper.connect.
*/
def createJavaConsumerConnector(config: ConsumerConfig): kafka.javaapi.consumer.ConsumerConnector = {
val consumerConnect = new kafka.javaapi.consumer.ZookeeperConsumerConnector(config)
consumerConnect
}
}
| stealthly/kafka | core/src/main/scala/kafka/consumer/ConsumerConnector.scala | Scala | apache-2.0 | 3,987 |
// haiku.scala (c) 2014, Benoit Marcot
// a random Heroku-like name generator
import scala.util.Random.nextInt
val adjs = List("autumn", "hidden", "bitter", "misty", "silent",
"empty", "dry", "dark", "summer", "icy", "delicate", "quiet", "white", "cool",
"spring", "winter", "patient", "twilight", "dawn", "crimson", "wispy",
"weathered", "blue", "billowing", "broken", "cold", "damp", "falling",
"frosty", "green", "long", "late", "lingering", "bold", "little", "morning",
"muddy", "old", "red", "rough", "still", "small", "sparkling", "throbbing",
"shy", "wandering", "withered", "wild", "black", "holy", "solitary",
"fragrant", "aged", "snowy", "proud", "floral", "restless", "divine",
"polished", "purple", "lively", "nameless", "puffy", "fluffy",
"calm", "young", "golden", "avenging", "ancestral", "ancient", "argent",
"reckless", "daunting", "short", "rising", "strong", "timber", "tumbling",
"silver", "dusty", "celestial", "cosmic", "crescent", "double", "far", "half",
"inner", "milky", "northern", "southern", "eastern", "western", "outer",
"terrestrial", "huge", "deep", "epic", "titanic", "mighty", "powerful")
val nouns = List("waterfall", "river", "breeze", "moon", "rain",
"wind", "sea", "morning", "snow", "lake", "sunset", "pine", "shadow", "leaf",
"dawn", "glitter", "forest", "hill", "cloud", "meadow", "glade",
"bird", "brook", "butterfly", "bush", "dew", "dust", "field",
"flower", "firefly", "feather", "grass", "haze", "mountain", "night", "pond",
"darkness", "snowflake", "silence", "sound", "sky", "shape", "surf",
"thunder", "violet", "wildflower", "wave", "water", "resonance",
"sun", "wood", "dream", "cherry", "tree", "fog", "frost", "voice", "paper",
"frog", "smoke", "star", "sierra", "castle", "fortress", "tiger", "day",
"sequoia", "cedar", "wrath", "blessing", "spirit", "nova", "storm", "burst",
"protector", "drake", "dragon", "knight", "fire", "king", "jungle", "queen",
"giant", "elemental", "throne", "game", "weed", "stone", "apogee", "bang",
"cluster", "corona", "cosmos", "equinox", "horizon", "light", "nebula",
"solstice", "spectrum", "universe", "magnitude", "parallax")
def getRandElt[A](xs: List[A]): A = xs.apply(nextInt(xs.size))
def getRandNumber(ra: Range): String = {
(ra.head + nextInt(ra.end - ra.head)).toString
}
def haiku: String = {
val xs = getRandNumber(1000 to 9999) :: List(nouns, adjs).map(getRandElt)
xs.reverse.mkString("-")
}
print(haiku)
/*
println("")
for (i <- 1 to 25) println(haiku)
def checkUniqueness(xs: List[_]): Boolean = {
if (xs.size - xs.distinct.size > 0) { Console.err.println(xs.diff(xs.distinct)); false }
else true
}
checkUniqueness(adjs)
checkUniqueness(nouns)
*/
| bmarcot/haiku | haiku.scala | Scala | mit | 2,723 |
/*
* Copyright 2014–2018 SlamData Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package quasar.std
import slamdata.Predef._
import quasar._
import quasar.DataDateTimeExtractors._
import quasar.SemanticError._
import quasar.fp.ski._
import quasar.time.{OffsetDate => QOffsetDate, _}
import java.time.{
Instant,
LocalDate => JLocalDate,
LocalDateTime => JLocalDateTime,
LocalTime => JLocalTime,
OffsetDateTime => JOffsetDateTime,
OffsetTime => JOffsetTime,
ZoneOffset
}
import scalaz._
import scalaz.Validation.success
import scalaz.syntax.either._
import shapeless.{Data => _, _}
trait DateLib extends Library with Serializable {
// legacy function for parsing Instants
def parseTimestamp(str: String): SemanticError \\/ Data.OffsetDateTime =
\\/.fromTryCatchNonFatal(Instant.parse(str).atOffset(ZoneOffset.UTC)).bimap(
κ(DateFormatError(OffsetDateTime, str, None)),
Data.OffsetDateTime.apply)
def parseOffsetDateTime(str: String): SemanticError \\/ Data.OffsetDateTime =
\\/.fromTryCatchNonFatal(JOffsetDateTime.parse(str)).bimap(
κ(DateFormatError(OffsetDateTime, str, None)),
Data.OffsetDateTime.apply)
def parseOffsetTime(str: String): SemanticError \\/ Data.OffsetTime =
\\/.fromTryCatchNonFatal(JOffsetTime.parse(str)).bimap(
κ(DateFormatError(OffsetTime, str, None)),
Data.OffsetTime.apply)
def parseOffsetDate(str: String): SemanticError \\/ Data.OffsetDate =
\\/.fromTryCatchNonFatal(QOffsetDate.parse(str)).bimap(
κ(DateFormatError(OffsetDate, str, None)),
Data.OffsetDate.apply)
def parseLocalDateTime(str: String): SemanticError \\/ Data.LocalDateTime =
\\/.fromTryCatchNonFatal(JLocalDateTime.parse(str)).bimap(
κ(DateFormatError(OffsetDate, str, None)),
Data.LocalDateTime.apply)
def parseLocalTime(str: String): SemanticError \\/ Data.LocalTime =
\\/.fromTryCatchNonFatal(JLocalTime.parse(str)).bimap(
κ(DateFormatError(LocalTime, str, None)),
Data.LocalTime.apply)
def parseLocalDate(str: String): SemanticError \\/ Data.LocalDate =
\\/.fromTryCatchNonFatal(JLocalDate.parse(str)).bimap(
κ(DateFormatError(LocalDate, str, None)),
Data.LocalDate.apply)
def parseInterval(str: String): SemanticError \\/ Data.Interval =
DateTimeInterval.parse(str) match {
case Some(i) => Data.Interval(i).right
case None => DateFormatError(Interval, str, Some("expected, e.g. P3DT12H30M15.0S")).left
}
def startOfDayInstant(date: JLocalDate): Instant =
date.atStartOfDay.atZone(ZoneOffset.UTC).toInstant
def startOfDay[I, O](in: I)(setter: SetTime[I, O]): O =
setter(in, JLocalTime.MIN)
def startOfNextDay[I, O](in: I)(dateLens: LensDate[I], setTime: SetTime[I, O]): O = {
val updateDate = dateLens.modify(_.plusDays(1))(in)
val updateTime = startOfDay[I, O](updateDate)(setTime)
updateTime
}
// NB: SQL specifies a function called `extract`, but that doesn't have comma-
// separated arguments. `date_part` is Postgres’ name for the same thing
// with commas.
private def dateFunc(help: String, outputType: Type, f: JLocalDate => Data) =
UnaryFunc(
Mapping, help,
outputType,
Func.Input1(Type.HasDate),
noSimplification,
partialTyper[nat._1] {
case Sized(Type.Const(DataDateTimeExtractors.CanLensDate(i))) => Type.Const(f(i.pos))
case Sized(t) if Type.HasDate contains t => Type.Numeric
},
basicUntyper)
private def timeFunc(help: String, outputType: Type, f: JLocalTime => Data) =
UnaryFunc(
Mapping, help,
outputType,
Func.Input1(Type.HasTime),
noSimplification,
partialTyper[nat._1] {
case Sized(Type.Const(DataDateTimeExtractors.CanLensTime(i))) => Type.Const(f(i.pos))
case Sized(t) if Type.HasTime contains t => Type.Numeric
},
basicUntyper)
private def timeZoneFunc(help: String, f: ZoneOffset => Data) =
UnaryFunc(
Mapping, help,
Type.Int,
Func.Input1(Type.HasOffset),
noSimplification,
partialTyper[nat._1] {
case Sized(Type.Const(DataDateTimeExtractors.CanLensTimeZone(i))) => Type.Const(f(i.pos))
case Sized(t) if Type.HasOffset contains t => Type.Numeric
},
basicUntyper)
val ExtractCentury = dateFunc(
"Pulls out the century subfield from a date/time value (currently (year - 1)/100 + 1).",
Type.Int,
(extractCentury _).andThen(Data.Int(_)))
val ExtractDayOfMonth = dateFunc(
"Pulls out the day of month (`day`) subfield from a date/time value (1-31).",
Type.Int,
(extractDayOfMonth _).andThen(Data.Int(_)))
val ExtractDecade = dateFunc(
"Pulls out the decade subfield from a date/time value (year/10).",
Type.Int,
(extractDecade _).andThen(Data.Int(_)))
val ExtractDayOfWeek = dateFunc(
"Pulls out the day of week (`dow`) subfield from a date/time value " + "(Sunday: 0 to Saturday: 6).",
Type.Int,
(extractDayOfWeek _).andThen(Data.Int(_)))
val ExtractDayOfYear = dateFunc(
"Pulls out the day of year (`doy`) subfield from a date/time value (1-365 or -366).",
Type.Int,
(extractDayOfYear _).andThen(Data.Int(_)))
val ExtractEpoch = UnaryFunc(
Mapping,
"Pulls out the epoch subfield from a datetime value with timezone offset. " +
"This is the number of seconds since midnight, 1970-01-01.",
Type.Dec,
Func.Input1(Type.OffsetDateTime),
noSimplification,
partialTyper[nat._1] {
case Sized(Type.Const(Data.OffsetDateTime(k))) => Type.Const(Data.Dec(time.extractEpoch(k)))
case Sized(Type.OffsetDateTime) => Type.Dec
},
basicUntyper)
val ExtractHour = timeFunc(
"Pulls out the hour subfield from a date/time value (0-23).",
Type.Int,
(extractHour _).andThen(Data.Int(_)))
val ExtractIsoDayOfWeek = dateFunc(
"Pulls out the ISO day of week (`isodow`) subfield from a date/time value (Monday: 1 to Sunday: 7).",
Type.Int,
(extractIsoDayOfWeek _).andThen(Data.Int(_)))
val ExtractIsoYear = dateFunc(
"Pulls out the ISO year (`isoyear`) subfield from a date/time value (based on the first week (Monday is the first day of the week) containing Jan. 4).",
Type.Int,
(extractIsoYear _).andThen(Data.Int(_)))
val ExtractMicrosecond = timeFunc(
"Computes the microseconds of a date/time value (including seconds).",
Type.Int, (extractMicrosecond _).andThen(Data.Int(_)))
val ExtractMillennium = dateFunc(
"Pulls out the millennium subfield from a date/time value (currently (year - 1)/1000 + 1).",
Type.Int,
(extractMillennium _).andThen(Data.Int(_)))
val ExtractMillisecond = timeFunc(
"Computes the milliseconds of a date/time value (including seconds).",
Type.Int, (extractMillisecond _).andThen(Data.Int(_)))
val ExtractMinute = timeFunc(
"Pulls out the minute subfield from a date/time value (0-59).",
Type.Int, (extractMinute _).andThen(Data.Int(_)))
val ExtractMonth = dateFunc(
"Pulls out the month subfield from a date/time value (1-12).",
Type.Int,
(extractMonth _).andThen(Data.Int(_)))
val ExtractQuarter = dateFunc(
"Pulls out the quarter subfield from a date/time value (1-4).",
Type.Int,
(extractQuarter _).andThen(Data.Int(_)))
val ExtractSecond = timeFunc(
"Pulls out the second subfield from a date/time value (0-59, with fractional parts).",
Type.Dec,
(extractSecond _).andThen(Data.Dec(_)))
val ExtractTimeZone = timeZoneFunc(
"Pulls out the timezone subfield from a date/time value (in seconds east of UTC).",
(extractTimeZone _).andThen(Data.Int(_)))
val ExtractTimeZoneHour = timeZoneFunc(
"Pulls out the hour component of the timezone subfield from a date/time value.",
(extractTimeZoneHour _).andThen(Data.Int(_)))
val ExtractTimeZoneMinute = timeZoneFunc(
"Pulls out the minute component of the timezone subfield from a date/time value.",
(extractTimeZoneMinute _).andThen(Data.Int(_)))
val ExtractWeek = dateFunc(
"Pulls out the week subfield from a date/time value (1-53).",
Type.Int,
(extractWeek _).andThen(Data.Int(_)))
val ExtractYear = dateFunc(
"Pulls out the year subfield from a date/time value.",
Type.Int,
(extractYear _).andThen(Data.Int(_)))
private def setTimeZone(help: String, out: Int => ZoneOffset, outTimeZone: (Int, ZoneOffset) => ZoneOffset) =
BinaryFunc(
Mapping, help,
Type.Temporal,
Func.Input2(Type.Temporal, Type.Int),
noSimplification,
partialTyper[nat._2] {
case Sized(Type.OffsetDate | Type.OffsetDateTime | Type.OffsetTime, Type.Numeric) => Type.Numeric
case Sized(Type.Const(DataDateTimeExtractors.CanLensTimeZone(i)), Type.Const(Data.Int(input))) =>
Type.Const(i.peeks(outTimeZone(input.toInt, _)))
case Sized(Type.Const(DataDateTimeExtractors.CanSetTimeZone(k)), Type.Const(Data.Int(input))) =>
Type.Const(k(out(input.toInt)))
},
basicUntyper)
// FIXME `ZoneOffset.ofTotalSeconds` throws an exception if the integer
// input is not in the range [-64800, 64800]
val SetTimeZone = setTimeZone(
"Sets the timezone subfield in a date/time value (in seconds east of UTC).",
ZoneOffset.ofTotalSeconds,
(i, _) => ZoneOffset.ofTotalSeconds(i))
val SetTimeZoneMinute = setTimeZone(
"Sets the minute component of the timezone subfield in a date/time value.",
ZoneOffset.ofHoursMinutes(0, _),
(i, zo) => setTimeZoneMinute(zo, i))
val SetTimeZoneHour = setTimeZone(
"Sets the hour component of the timezone subfield in a date/time value.",
ZoneOffset.ofHours,
(i, zo) => setTimeZoneHour(zo, i))
val Now = NullaryFunc(
Mapping,
"Returns the current datetime in the current time zone – this must always return the same value within the same execution of a query.",
Type.OffsetDateTime,
noSimplification)
val NowTime = NullaryFunc(
Mapping,
"Returns the current time in the current time zone – this must always return the same value within the same execution of a query.",
Type.OffsetTime,
noSimplification)
val NowDate = NullaryFunc(
Mapping,
"Returns the current date in the current time zone – this must always return the same value within the same execution of a query.",
Type.OffsetDate,
noSimplification)
val CurrentTimeZone = NullaryFunc(
Mapping,
"Returns the current time zone offset in total seconds - this must always return the same value within the same execution of a query.",
Type.Int,
noSimplification)
val OffsetDateTime = UnaryFunc(
Mapping,
"Converts a string in the format (YYYY-MM-DDTHH:MM:SS((+/-)HH[:MM[:SS]])/Z) to a timestamp value with a time zone offset. This is a partial function – arguments that don’t satisify the constraint have undefined results.",
Type.OffsetDateTime,
Func.Input1(Type.Str),
noSimplification,
partialTyperV[nat._1] {
case Sized(Type.Const(Data.Str(str))) => parseOffsetDateTime(str).map(Type.Const(_)).validation.toValidationNel
case Sized(Type.Str) => success(Type.OffsetDateTime)
},
basicUntyper)
val OffsetTime = UnaryFunc(
Mapping,
"Converts a string in the format (HH:MM:SS[.SSS]((+/-)HH:MM:SS)/Z) to a time value with a time zone offset. This is a partial function – arguments that don’t satisify the constraint have undefined results.",
Type.OffsetTime,
Func.Input1(Type.Str),
noSimplification,
partialTyperV[nat._1] {
case Sized(Type.Const(Data.Str(str))) => parseOffsetTime(str).map(Type.Const(_)).validation.toValidationNel
case Sized(Type.Str) => success(Type.OffsetTime)
},
basicUntyper)
val OffsetDate = UnaryFunc(
Mapping,
"Converts a string in the format (YYYY-MM-DD((+/-)HH:MM:SS)/Z) to a date value with a time zone offset. This is a partial function – arguments that don’t satisify the constraint have undefined results.",
Type.OffsetDate,
Func.Input1(Type.Str),
noSimplification,
partialTyperV[nat._1] {
case Sized(Type.Const(Data.Str(str))) => parseOffsetDate(str).map(Type.Const(_)).validation.toValidationNel
case Sized(Type.Str) => success(Type.OffsetDate)
},
basicUntyper)
val LocalDateTime = UnaryFunc(
Mapping,
"Converts a string in the format (YYYY-MM-DDTHH:MM:SS) to a date value paired with a time. This is a partial function – arguments that don’t satisify the constraint have undefined results.",
Type.LocalDateTime,
Func.Input1(Type.Str),
noSimplification,
partialTyperV[nat._1] {
case Sized(Type.Const(Data.Str(str))) => parseLocalDateTime(str).map(Type.Const(_)).validation.toValidationNel
case Sized(Type.Str) => success(Type.LocalDateTime)
},
basicUntyper)
val LocalTime = UnaryFunc(
Mapping,
"Converts a string in the format (HH:MM:SS[.SSS]) to a time value. This is a partial function – arguments that don’t satisify the constraint have undefined results.",
Type.LocalTime,
Func.Input1(Type.Str),
noSimplification,
partialTyperV[nat._1] {
case Sized(Type.Const(Data.Str(str))) => parseLocalTime(str).map(Type.Const(_)).validation.toValidationNel
case Sized(Type.Str) => success(Type.LocalTime)
},
basicUntyper)
val LocalDate = UnaryFunc(
Mapping,
"Converts a string in the format (YYYY-MM-DD) to a date value. This is a partial function – arguments that don’t satisify the constraint have undefined results.",
Type.LocalDate,
Func.Input1(Type.Str),
noSimplification,
partialTyperV[nat._1] {
case Sized(Type.Const(Data.Str(str))) => parseLocalDate(str).map(Type.Const(_)).validation.toValidationNel
case Sized(Type.Str) => success(Type.LocalDate)
},
basicUntyper)
/**
* TODO: document behavior change, now that years and months work
*/
val Interval = UnaryFunc(
Mapping,
"Converts a string in the format (ISO 8601, e.g. P3DT12H30M15.0S) to an interval value. This is a partial function – arguments that don’t satisify the constraint have undefined results.",
Type.Interval,
Func.Input1(Type.Str),
noSimplification,
partialTyperV[nat._1] {
case Sized(Type.Const(Data.Str(str))) => parseInterval(str).map(Type.Const(_)).validation.toValidationNel
case Sized(Type.Str) => success(Type.Interval)
},
basicUntyper)
/**
* TODO: document behavior change, `StartOfDay` only makes `OffsetDateTime`s out of other `OffsetDateTime`s.
*/
val StartOfDay = UnaryFunc(
Mapping,
"Converts a DateTime or Date to a DateTime at the start of that day.",
Type.LocalDateTime ⨿ Type.OffsetDateTime,
Func.Input1(Type.HasDate),
noSimplification,
partialTyperV[nat._1] {
case Sized(Type.Const(CanAddTime(f))) =>
success(Type.Const(f(JLocalTime.MIN)))
case Sized(Type.LocalDate | Type.LocalDateTime) =>
success(Type.LocalDateTime)
case Sized(Type.OffsetDate | Type.OffsetDateTime) =>
success(Type.OffsetDateTime)
},
partialUntyper[nat._1] {
case Type.OffsetDateTime => Func.Input1(Type.OffsetDate ⨿ Type.OffsetDateTime)
case Type.LocalDateTime => Func.Input1(Type.LocalDate ⨿ Type.LocalDateTime)
})
val TimeOfDay = UnaryFunc(
Mapping,
"Extracts the time of day from a datetime value. Preserves time zone information.",
Type.LocalTime ⨿ Type.OffsetTime,
Func.Input1(Type.LocalDateTime ⨿ Type.OffsetDateTime),
noSimplification,
partialTyper[nat._1] {
case Sized(Type.Const(Data.OffsetDateTime(odt))) => Type.Const(Data.OffsetTime(odt.toOffsetTime))
case Sized(Type.Const(Data.LocalDateTime(ldt))) => Type.Const(Data.LocalTime(ldt.toLocalTime))
case Sized(Type.LocalDateTime) => Type.LocalTime
case Sized(Type.OffsetDateTime) => Type.OffsetTime
},
partialUntyper[nat._1] {
case Type.OffsetTime => Func.Input1(Type.OffsetDateTime)
case Type.LocalTime => Func.Input1(Type.LocalDateTime)
})
val ToTimestamp = UnaryFunc(
Mapping,
"Converts an integer epoch time value (i.e. milliseconds since 1 Jan. 1970, UTC) to a timestamp constant.",
Type.OffsetDateTime,
Func.Input1(Type.Int),
noSimplification,
partialTyper[nat._1] {
case Sized(Type.Const(Data.Int(millis))) => Type.Const(Data.OffsetDateTime(JOffsetDateTime.ofInstant(Instant.ofEpochMilli(millis.toLong), ZoneOffset.UTC)))
case Sized(Type.Int) => Type.OffsetDateTime
},
partialUntyperV[nat._1] {
case Type.OffsetDateTime => success(Func.Input1(Type.Int))
})
val ToLocal = UnaryFunc(
Mapping,
"Removes the time zone offset from a date, time, or datetime value.",
Type.LocalDateTime ⨿ Type.LocalTime ⨿ Type.LocalDate,
Func.Input1(Type.OffsetDateTime ⨿ Type.OffsetTime ⨿ Type.OffsetDate),
noSimplification,
partialTyper[nat._1] {
case Sized(Type.Const(Data.OffsetDate(od))) => Type.Const(Data.LocalDate(od.date))
case Sized(Type.Const(Data.OffsetTime(ot))) => Type.Const(Data.LocalTime(ot.toLocalTime))
case Sized(Type.Const(Data.OffsetDateTime(odt))) => Type.Const(Data.LocalDateTime(odt.toLocalDateTime))
case Sized(Type.OffsetDateTime) => Type.LocalDateTime
case Sized(Type.OffsetTime) => Type.LocalTime
case Sized(Type.OffsetDate) => Type.LocalDate
},
partialUntyperV[nat._1] {
case Type.LocalDateTime => success(Func.Input1(Type.OffsetDateTime))
case Type.LocalTime => success(Func.Input1(Type.OffsetTime))
case Type.LocalDate => success(Func.Input1(Type.OffsetDate))
})
}
object DateLib extends DateLib
| jedesah/Quasar | frontend/src/main/scala/quasar/std/date.scala | Scala | apache-2.0 | 18,363 |
package verizon.build
import sbt._, Keys._
object CrossLibraryPlugin extends AutoPlugin {
object autoImport {
val scalazStreamVersion = settingKey[String]("scalaz-stream version")
}
import autoImport._
override def trigger = allRequirements
override def requires = RigPlugin
override lazy val projectSettings = Seq(
// "0.8.1a" "0.7.3a"
scalazStreamVersion := {
sys.env.get("SCALAZ_STREAM_VERSION").getOrElse("0.7.3a")
},
unmanagedSourceDirectories in Compile += (sourceDirectory in Compile).value / s"scalaz-stream-${scalazStreamVersion.value.take(3)}",
version := {
val suffix = if(scalazStreamVersion.value.startsWith("0.7")) "" else "a"
val versionValue = version.value
if(versionValue.endsWith("-SNAPSHOT"))
versionValue.replaceAll("-SNAPSHOT", s"$suffix-SNAPSHOT")
else s"$versionValue$suffix"
}
)
}
| timperrett/knobs | project/CrossLibraryPlugin.scala | Scala | apache-2.0 | 893 |
package se.gigurra.leavu3.gfx
import com.badlogic.gdx.graphics.Color
trait Colors {
def CLEAR = Color.CLEAR
def BLACK = Color.BLACK
def WHITE = Color.WHITE
def LIGHT_GRAY = Color.LIGHT_GRAY
def GRAY = Color.GRAY
def DARK_GRAY = Color.DARK_GRAY
def BLUE = Color.BLUE
def NAVY = Color.NAVY
def ROYAL = Color.ROYAL
def SLATE = Color.SLATE
def SKY = Color.SKY
def CYAN = Color.CYAN
def TEAL = Color.TEAL
def GREEN = Color.GREEN
def CHARTREUSE = Color.CHARTREUSE
def LIME = Color.LIME
def FOREST = Color.FOREST
def OLIVE = Color.OLIVE
def YELLOW = Color.YELLOW
def GOLD = Color.GOLD
def GOLDENROD = Color.GOLDENROD
def ORANGE = Color.ORANGE
def BROWN = Color.BROWN
def TAN = Color.TAN
def FIREBRICK = Color.FIREBRICK
def RED = Color.RED
def SCARLET = Color.SCARLET
def CORAL = Color.CORAL
def SALMON = Color.SALMON
def PINK = Color.PINK
def MAGENTA = Color.MAGENTA
def PURPLE = Color.PURPLE
def VIOLET = Color.VIOLET
def MAROON = Color.MAROON
}
| GiGurra/leavu3 | src/main/scala/se/gigurra/leavu3/gfx/Colors.scala | Scala | mit | 1,050 |
package spire
package math
import org.scalatest.FunSuite
import spire.std.int._
class SortingTest extends FunSuite {
def testSort(before: Array[Int]) = {
val goal = before.clone()
scala.util.Sorting.quickSort(goal)
val merged = before.clone()
Sorting.mergeSort(merged)
val quicked = before.clone()
Sorting.quickSort(quicked)
// make sure our result is ok
for (i <- 0 until before.length) assert(merged(i) === goal(i))
for (i <- 0 until before.length) assert(quicked(i) === goal(i))
}
test("sort empty array") {
testSort(Array[Int]())
}
test("sort singleton") {
testSort(Array[Int](1))
}
test("trivial sort") {
testSort(Array(2, 1))
}
test("sort 3 decreasing") {
testSort(Array(3, 2, 1))
}
test("sort()") {
testSort(Array(23, 1, 52, 64, 234, 623, 124, 421, 421))
}
test("sort 5 decreasing") {
testSort(Array(5, 4, 3, 2, 1))
}
}
| tixxit/spire | tests/src/test/scala/spire/math/SortingTest.scala | Scala | mit | 931 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.ui.jobs
import java.util.Properties
import org.scalatest.Matchers
import org.apache.spark._
import org.apache.spark.{LocalSparkContext, SparkConf, Success}
import org.apache.spark.executor._
import org.apache.spark.scheduler._
import org.apache.spark.ui.jobs.UIData.TaskUIData
import org.apache.spark.util.{AccumulatorContext, Utils}
class JobProgressListenerSuite extends SparkFunSuite with LocalSparkContext with Matchers {
val jobSubmissionTime = 1421191042750L
val jobCompletionTime = 1421191296660L
private def createStageStartEvent(stageId: Int) = {
val stageInfo = new StageInfo(stageId, 0, stageId.toString, 0, null, null, "")
SparkListenerStageSubmitted(stageInfo)
}
private def createStageEndEvent(stageId: Int, failed: Boolean = false) = {
val stageInfo = new StageInfo(stageId, 0, stageId.toString, 0, null, null, "")
if (failed) {
stageInfo.failureReason = Some("Failed!")
}
SparkListenerStageCompleted(stageInfo)
}
private def createJobStartEvent(
jobId: Int,
stageIds: Seq[Int],
jobGroup: Option[String] = None): SparkListenerJobStart = {
val stageInfos = stageIds.map { stageId =>
new StageInfo(stageId, 0, stageId.toString, 0, null, null, "")
}
val properties: Option[Properties] = jobGroup.map { groupId =>
val props = new Properties()
props.setProperty(SparkContext.SPARK_JOB_GROUP_ID, groupId)
props
}
SparkListenerJobStart(jobId, jobSubmissionTime, stageInfos, properties.orNull)
}
private def createJobEndEvent(jobId: Int, failed: Boolean = false) = {
val result = if (failed) JobFailed(new Exception("dummy failure")) else JobSucceeded
SparkListenerJobEnd(jobId, jobCompletionTime, result)
}
private def runJob(listener: SparkListener, jobId: Int, shouldFail: Boolean = false) {
val stagesThatWontBeRun = jobId * 200 to jobId * 200 + 10
val stageIds = jobId * 100 to jobId * 100 + 50
listener.onJobStart(createJobStartEvent(jobId, stageIds ++ stagesThatWontBeRun))
for (stageId <- stageIds) {
listener.onStageSubmitted(createStageStartEvent(stageId))
listener.onStageCompleted(createStageEndEvent(stageId, failed = stageId % 2 == 0))
}
listener.onJobEnd(createJobEndEvent(jobId, shouldFail))
}
private def assertActiveJobsStateIsEmpty(listener: JobProgressListener) {
listener.getSizesOfActiveStateTrackingCollections.foreach { case (fieldName, size) =>
assert(size === 0, s"$fieldName was not empty")
}
}
test("test LRU eviction of stages") {
def runWithListener(listener: JobProgressListener) : Unit = {
for (i <- 1 to 50) {
listener.onStageSubmitted(createStageStartEvent(i))
listener.onStageCompleted(createStageEndEvent(i))
}
assertActiveJobsStateIsEmpty(listener)
}
val conf = new SparkConf()
conf.set("spark.ui.retainedStages", 5.toString)
var listener = new JobProgressListener(conf)
// Test with 5 retainedStages
runWithListener(listener)
listener.completedStages.size should be (5)
listener.completedStages.map(_.stageId).toSet should be (Set(50, 49, 48, 47, 46))
// Test with 0 retainedStages
conf.set("spark.ui.retainedStages", 0.toString)
listener = new JobProgressListener(conf)
runWithListener(listener)
listener.completedStages.size should be (0)
}
test("test clearing of stageIdToActiveJobs") {
val conf = new SparkConf()
conf.set("spark.ui.retainedStages", 5.toString)
val listener = new JobProgressListener(conf)
val jobId = 0
val stageIds = 1 to 50
// Start a job with 50 stages
listener.onJobStart(createJobStartEvent(jobId, stageIds))
for (stageId <- stageIds) {
listener.onStageSubmitted(createStageStartEvent(stageId))
}
listener.stageIdToActiveJobIds.size should be > 0
// Complete the stages and job
for (stageId <- stageIds) {
listener.onStageCompleted(createStageEndEvent(stageId, failed = false))
}
listener.onJobEnd(createJobEndEvent(jobId, false))
assertActiveJobsStateIsEmpty(listener)
listener.stageIdToActiveJobIds.size should be (0)
}
test("test clearing of jobGroupToJobIds") {
def runWithListener(listener: JobProgressListener): Unit = {
// Run 50 jobs, each with one stage
for (jobId <- 0 to 50) {
listener.onJobStart(createJobStartEvent(jobId, Seq(0), jobGroup = Some(jobId.toString)))
listener.onStageSubmitted(createStageStartEvent(0))
listener.onStageCompleted(createStageEndEvent(0, failed = false))
listener.onJobEnd(createJobEndEvent(jobId, false))
}
assertActiveJobsStateIsEmpty(listener)
}
val conf = new SparkConf()
conf.set("spark.ui.retainedJobs", 5.toString)
var listener = new JobProgressListener(conf)
runWithListener(listener)
// This collection won't become empty, but it should be bounded by spark.ui.retainedJobs
listener.jobGroupToJobIds.size should be (5)
// Test with 0 jobs
conf.set("spark.ui.retainedJobs", 0.toString)
listener = new JobProgressListener(conf)
runWithListener(listener)
listener.jobGroupToJobIds.size should be (0)
}
test("test LRU eviction of jobs") {
val conf = new SparkConf()
conf.set("spark.ui.retainedStages", 5.toString)
conf.set("spark.ui.retainedJobs", 5.toString)
val listener = new JobProgressListener(conf)
// Run a bunch of jobs to get the listener into a state where we've exceeded both the
// job and stage retention limits:
for (jobId <- 1 to 10) {
runJob(listener, jobId, shouldFail = false)
}
for (jobId <- 200 to 210) {
runJob(listener, jobId, shouldFail = true)
}
assertActiveJobsStateIsEmpty(listener)
// Snapshot the sizes of various soft- and hard-size-limited collections:
val softLimitSizes = listener.getSizesOfSoftSizeLimitedCollections
val hardLimitSizes = listener.getSizesOfHardSizeLimitedCollections
// Run some more jobs:
for (jobId <- 11 to 50) {
runJob(listener, jobId, shouldFail = false)
// We shouldn't exceed the hard / soft limit sizes after the jobs have finished:
listener.getSizesOfSoftSizeLimitedCollections should be (softLimitSizes)
listener.getSizesOfHardSizeLimitedCollections should be (hardLimitSizes)
}
listener.completedJobs.size should be (5)
listener.completedJobs.map(_.jobId).toSet should be (Set(50, 49, 48, 47, 46))
for (jobId <- 51 to 100) {
runJob(listener, jobId, shouldFail = true)
// We shouldn't exceed the hard / soft limit sizes after the jobs have finished:
listener.getSizesOfSoftSizeLimitedCollections should be (softLimitSizes)
listener.getSizesOfHardSizeLimitedCollections should be (hardLimitSizes)
}
assertActiveJobsStateIsEmpty(listener)
// Completed and failed jobs each their own size limits, so this should still be the same:
listener.completedJobs.size should be (5)
listener.completedJobs.map(_.jobId).toSet should be (Set(50, 49, 48, 47, 46))
listener.failedJobs.size should be (5)
listener.failedJobs.map(_.jobId).toSet should be (Set(100, 99, 98, 97, 96))
}
test("test executor id to summary") {
val conf = new SparkConf()
val listener = new JobProgressListener(conf)
val taskMetrics = TaskMetrics.empty
val shuffleReadMetrics = taskMetrics.createTempShuffleReadMetrics()
assert(listener.stageIdToData.size === 0)
// finish this task, should get updated shuffleRead
shuffleReadMetrics.incRemoteBytesRead(1000)
taskMetrics.mergeShuffleReadMetrics()
var taskInfo = new TaskInfo(1234L, 0, 1, 0L, "exe-1", "host1", TaskLocality.NODE_LOCAL, false)
taskInfo.finishTime = 1
var task = new ShuffleMapTask(0)
val taskType = Utils.getFormattedClassName(task)
listener.onTaskEnd(
SparkListenerTaskEnd(task.stageId, 0, taskType, Success, taskInfo, taskMetrics))
assert(listener.stageIdToData.getOrElse((0, 0), fail())
.executorSummary.getOrElse("exe-1", fail()).shuffleRead === 1000)
// finish a task with unknown executor-id, nothing should happen
taskInfo =
new TaskInfo(1234L, 0, 1, 1000L, "exe-unknown", "host1", TaskLocality.NODE_LOCAL, true)
taskInfo.finishTime = 1
task = new ShuffleMapTask(0)
listener.onTaskEnd(
SparkListenerTaskEnd(task.stageId, 0, taskType, Success, taskInfo, taskMetrics))
assert(listener.stageIdToData.size === 1)
// finish this task, should get updated duration
taskInfo = new TaskInfo(1235L, 0, 1, 0L, "exe-1", "host1", TaskLocality.NODE_LOCAL, false)
taskInfo.finishTime = 1
task = new ShuffleMapTask(0)
listener.onTaskEnd(
SparkListenerTaskEnd(task.stageId, 0, taskType, Success, taskInfo, taskMetrics))
assert(listener.stageIdToData.getOrElse((0, 0), fail())
.executorSummary.getOrElse("exe-1", fail()).shuffleRead === 2000)
// finish this task, should get updated duration
taskInfo = new TaskInfo(1236L, 0, 2, 0L, "exe-2", "host1", TaskLocality.NODE_LOCAL, false)
taskInfo.finishTime = 1
task = new ShuffleMapTask(0)
listener.onTaskEnd(
SparkListenerTaskEnd(task.stageId, 0, taskType, Success, taskInfo, taskMetrics))
assert(listener.stageIdToData.getOrElse((0, 0), fail())
.executorSummary.getOrElse("exe-2", fail()).shuffleRead === 1000)
}
test("test task success vs failure counting for different task end reasons") {
val conf = new SparkConf()
val listener = new JobProgressListener(conf)
val metrics = TaskMetrics.empty
val taskInfo = new TaskInfo(1234L, 0, 3, 0L, "exe-1", "host1", TaskLocality.NODE_LOCAL, false)
taskInfo.finishTime = 1
val task = new ShuffleMapTask(0)
val taskType = Utils.getFormattedClassName(task)
// Go through all the failure cases to make sure we are counting them as failures.
val taskFailedReasons = Seq(
Resubmitted,
new FetchFailed(null, 0, 0, 0, "ignored"),
ExceptionFailure("Exception", "description", null, null, None),
TaskResultLost,
ExecutorLostFailure("0", true, Some("Induced failure")),
UnknownReason)
var failCount = 0
for (reason <- taskFailedReasons) {
listener.onTaskEnd(
SparkListenerTaskEnd(task.stageId, 0, taskType, reason, taskInfo, metrics))
failCount += 1
assert(listener.stageIdToData((task.stageId, 0)).numCompleteTasks === 0)
assert(listener.stageIdToData((task.stageId, 0)).numFailedTasks === failCount)
}
// Make sure killed tasks are accounted for correctly.
listener.onTaskEnd(
SparkListenerTaskEnd(task.stageId, 0, taskType, TaskKilled, taskInfo, metrics))
assert(listener.stageIdToData((task.stageId, 0)).numKilledTasks === 1)
// Make sure we count success as success.
listener.onTaskEnd(
SparkListenerTaskEnd(task.stageId, 1, taskType, Success, taskInfo, metrics))
assert(listener.stageIdToData((task.stageId, 1)).numCompleteTasks === 1)
assert(listener.stageIdToData((task.stageId, 0)).numFailedTasks === failCount)
}
test("test update metrics") {
val conf = new SparkConf()
val listener = new JobProgressListener(conf)
val taskType = Utils.getFormattedClassName(new ShuffleMapTask(0))
val execId = "exe-1"
def makeTaskMetrics(base: Int): TaskMetrics = {
val taskMetrics = TaskMetrics.empty
val shuffleReadMetrics = taskMetrics.createTempShuffleReadMetrics()
val shuffleWriteMetrics = taskMetrics.shuffleWriteMetrics
val inputMetrics = taskMetrics.inputMetrics
val outputMetrics = taskMetrics.outputMetrics
shuffleReadMetrics.incRemoteBytesRead(base + 1)
shuffleReadMetrics.incLocalBytesRead(base + 9)
shuffleReadMetrics.incRemoteBlocksFetched(base + 2)
taskMetrics.mergeShuffleReadMetrics()
shuffleWriteMetrics.incBytesWritten(base + 3)
taskMetrics.setExecutorRunTime(base + 4)
taskMetrics.incDiskBytesSpilled(base + 5)
taskMetrics.incMemoryBytesSpilled(base + 6)
inputMetrics.setBytesRead(base + 7)
outputMetrics.setBytesWritten(base + 8)
taskMetrics
}
def makeTaskInfo(taskId: Long, finishTime: Int = 0): TaskInfo = {
val taskInfo = new TaskInfo(taskId, 0, 1, 0L, execId, "host1", TaskLocality.NODE_LOCAL,
false)
taskInfo.finishTime = finishTime
taskInfo
}
listener.onTaskStart(SparkListenerTaskStart(0, 0, makeTaskInfo(1234L)))
listener.onTaskStart(SparkListenerTaskStart(0, 0, makeTaskInfo(1235L)))
listener.onTaskStart(SparkListenerTaskStart(1, 0, makeTaskInfo(1236L)))
listener.onTaskStart(SparkListenerTaskStart(1, 0, makeTaskInfo(1237L)))
listener.onExecutorMetricsUpdate(SparkListenerExecutorMetricsUpdate(execId, Array(
(1234L, 0, 0, makeTaskMetrics(0).accumulators().map(AccumulatorSuite.makeInfo)),
(1235L, 0, 0, makeTaskMetrics(100).accumulators().map(AccumulatorSuite.makeInfo)),
(1236L, 1, 0, makeTaskMetrics(200).accumulators().map(AccumulatorSuite.makeInfo)))))
var stage0Data = listener.stageIdToData.get((0, 0)).get
var stage1Data = listener.stageIdToData.get((1, 0)).get
assert(stage0Data.shuffleReadTotalBytes == 220)
assert(stage1Data.shuffleReadTotalBytes == 410)
assert(stage0Data.shuffleWriteBytes == 106)
assert(stage1Data.shuffleWriteBytes == 203)
assert(stage0Data.executorRunTime == 108)
assert(stage1Data.executorRunTime == 204)
assert(stage0Data.diskBytesSpilled == 110)
assert(stage1Data.diskBytesSpilled == 205)
assert(stage0Data.memoryBytesSpilled == 112)
assert(stage1Data.memoryBytesSpilled == 206)
assert(stage0Data.inputBytes == 114)
assert(stage1Data.inputBytes == 207)
assert(stage0Data.outputBytes == 116)
assert(stage1Data.outputBytes == 208)
assert(
stage0Data.taskData.get(1234L).get.metrics.get.shuffleReadMetrics.totalBlocksFetched == 2)
assert(
stage0Data.taskData.get(1235L).get.metrics.get.shuffleReadMetrics.totalBlocksFetched == 102)
assert(
stage1Data.taskData.get(1236L).get.metrics.get.shuffleReadMetrics.totalBlocksFetched == 202)
// task that was included in a heartbeat
listener.onTaskEnd(SparkListenerTaskEnd(0, 0, taskType, Success, makeTaskInfo(1234L, 1),
makeTaskMetrics(300)))
// task that wasn't included in a heartbeat
listener.onTaskEnd(SparkListenerTaskEnd(1, 0, taskType, Success, makeTaskInfo(1237L, 1),
makeTaskMetrics(400)))
stage0Data = listener.stageIdToData.get((0, 0)).get
stage1Data = listener.stageIdToData.get((1, 0)).get
// Task 1235 contributed (100+1)+(100+9) = 210 shuffle bytes, and task 1234 contributed
// (300+1)+(300+9) = 610 total shuffle bytes, so the total for the stage is 820.
assert(stage0Data.shuffleReadTotalBytes == 820)
// Task 1236 contributed 410 shuffle bytes, and task 1237 contributed 810 shuffle bytes.
assert(stage1Data.shuffleReadTotalBytes == 1220)
assert(stage0Data.shuffleWriteBytes == 406)
assert(stage1Data.shuffleWriteBytes == 606)
assert(stage0Data.executorRunTime == 408)
assert(stage1Data.executorRunTime == 608)
assert(stage0Data.diskBytesSpilled == 410)
assert(stage1Data.diskBytesSpilled == 610)
assert(stage0Data.memoryBytesSpilled == 412)
assert(stage1Data.memoryBytesSpilled == 612)
assert(stage0Data.inputBytes == 414)
assert(stage1Data.inputBytes == 614)
assert(stage0Data.outputBytes == 416)
assert(stage1Data.outputBytes == 616)
assert(
stage0Data.taskData.get(1234L).get.metrics.get.shuffleReadMetrics.totalBlocksFetched == 302)
assert(
stage1Data.taskData.get(1237L).get.metrics.get.shuffleReadMetrics.totalBlocksFetched == 402)
}
test("drop internal and sql accumulators") {
val taskInfo = new TaskInfo(0, 0, 0, 0, "", "", TaskLocality.ANY, false)
val internalAccum =
AccumulableInfo(id = 1, name = Some("internal"), None, None, true, false, None)
val sqlAccum = AccumulableInfo(
id = 2,
name = Some("sql"),
update = None,
value = None,
internal = false,
countFailedValues = false,
metadata = Some(AccumulatorContext.SQL_ACCUM_IDENTIFIER))
val userAccum = AccumulableInfo(
id = 3,
name = Some("user"),
update = None,
value = None,
internal = false,
countFailedValues = false,
metadata = None)
taskInfo.setAccumulables(List(internalAccum, sqlAccum, userAccum))
val newTaskInfo = TaskUIData.dropInternalAndSQLAccumulables(taskInfo)
assert(newTaskInfo.accumulables === Seq(userAccum))
}
test("SPARK-19146 drop more elements when stageData.taskData.size > retainedTasks") {
val conf = new SparkConf()
conf.set("spark.ui.retainedTasks", "100")
val taskMetrics = TaskMetrics.empty
taskMetrics.mergeShuffleReadMetrics()
val task = new ShuffleMapTask(0)
val taskType = Utils.getFormattedClassName(task)
val listener1 = new JobProgressListener(conf)
for (t <- 1 to 101) {
val taskInfo = new TaskInfo(t, 0, 1, 0L, "exe-1", "host1", TaskLocality.NODE_LOCAL, false)
taskInfo.finishTime = 1
listener1.onTaskEnd(
SparkListenerTaskEnd(task.stageId, 0, taskType, Success, taskInfo, taskMetrics))
}
// 101 - math.max(100 / 10, 101 - 100) = 91
assert(listener1.stageIdToData((task.stageId, task.stageAttemptId)).taskData.size === 91)
val listener2 = new JobProgressListener(conf)
for (t <- 1 to 150) {
val taskInfo = new TaskInfo(t, 0, 1, 0L, "exe-1", "host1", TaskLocality.NODE_LOCAL, false)
taskInfo.finishTime = 1
listener2.onTaskEnd(
SparkListenerTaskEnd(task.stageId, 0, taskType, Success, taskInfo, taskMetrics))
}
// 150 - math.max(100 / 10, 150 - 100) = 100
assert(listener2.stageIdToData((task.stageId, task.stageAttemptId)).taskData.size === 100)
}
}
| sachintyagi22/spark | core/src/test/scala/org/apache/spark/ui/jobs/JobProgressListenerSuite.scala | Scala | apache-2.0 | 18,750 |
package com.droelf.gpx.gpxtype
object Main extends App {
val gpx: GPX = GPXDecoder.decodeFromFile("res/demo2.gpx")
//println(gpx)
gpx.tracks.foreach(
_.trackStatsExtension match{
case Some(x) => println(x.ascent)
case None =>
}
)
}
| dr03lf/gpx-parser | src/main/scala/com/droelf/gpxparser/Main.scala | Scala | apache-2.0 | 266 |
/*
* Copyright 2016 The BigDL Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.bigdl.torch
import com.intel.analytics.bigdl.nn.{GradientChecker, SpatialCrossMapLRN}
import com.intel.analytics.bigdl.tensor.Tensor
import com.intel.analytics.bigdl.utils.RandomGenerator._
import com.intel.analytics.bigdl._
import scala.util.Random
@com.intel.analytics.bigdl.tags.Serial
class SpatialCrossMapLRNSpec extends TorchSpec {
"A SpatialCrossMapLRN Layer" should "generate correct output" in {
torchCheck()
val seed = 100
RNG.setSeed(seed)
val layer = new SpatialCrossMapLRN[Double](5, 1.0, 0.75, 1.0)
val input = Tensor[Double](16, 3, 224, 224).rand()
val output = layer.updateOutput(input)
val code = "torch.manualSeed(" + seed + ")\\n" +
"layer = nn.SpatialCrossMapLRN(5, 1.0, 0.75, 1.0)\\n" +
"output = layer:forward(input) "
val torchResult = TH.run(code, Map("input" -> input), Array("output"))._2
val luaOutput = torchResult("output").asInstanceOf[Tensor[Double]]
output should be equals luaOutput
}
it should "generate correct output when feature map number is large" in {
torchCheck()
val seed = 100
RNG.setSeed(seed)
val layer = new SpatialCrossMapLRN[Double](5, 1.0, 0.75, 1.0)
val input = Tensor[Double](16, 32, 128, 128).rand()
val output = layer.updateOutput(input)
val code = "torch.manualSeed(" + seed + ")\\n" +
"layer = nn.SpatialCrossMapLRN(5, 1.0, 0.75, 1.0)\\n" +
"output = layer:forward(input) "
val torchResult = TH.run(code, Map("input" -> input), Array("output"))._2
val luaOutput = torchResult("output").asInstanceOf[Tensor[Double]]
output should be equals luaOutput
}
it should "generate correct gradInput" in {
torchCheck()
val seed = 100
RNG.setSeed(seed)
val layer = new SpatialCrossMapLRN[Double](5, 1.0, 0.75, 1.0)
val input = Tensor[Double](16, 3, 224, 224).rand()
val gradOutput = Tensor[Double](16, 3, 224, 224).rand()
layer.updateOutput(input)
val output = layer.updateGradInput(input, gradOutput)
val code = "torch.manualSeed(" + seed + ")\\n" +
"layer = nn.SpatialCrossMapLRN(5, 1.0, 0.75, 1.0)\\n" +
"layer:forward(input) " +
"gradInput = layer:updateGradInput(input, gradOutput) "
val torchResult = TH.run(code, Map("input" -> input, "gradOutput" -> gradOutput),
Array("gradInput"))._2
val luaOutput = torchResult("gradInput").asInstanceOf[Tensor[Double]]
output should be equals luaOutput
}
it should "generate correct gradInput when feature map number is large" in {
torchCheck()
val seed = 100
RNG.setSeed(seed)
val layer = new SpatialCrossMapLRN[Double](5, 1.0, 0.75, 1.0)
val input = Tensor[Double](16, 32, 128, 128).rand()
val gradOutput = Tensor[Double](16, 32, 128, 128).rand()
layer.updateOutput(input)
val output = layer.updateGradInput(input, gradOutput)
val code = "torch.manualSeed(" + seed + ")\\n" +
"layer = nn.SpatialCrossMapLRN(5, 1.0, 0.75, 1.0)\\n" +
"layer:forward(input) " +
"gradInput = layer:updateGradInput(input, gradOutput) "
val torchResult = TH.run(code, Map("input" -> input, "gradOutput" -> gradOutput),
Array("gradInput"))._2
val luaOutput = torchResult("gradInput").asInstanceOf[Tensor[Double]]
output should be equals luaOutput
}
"SpatialCrossMapLRN module" should "be good in gradient check for input" in {
torchCheck()
val seed = 100
RNG.setSeed(seed)
val layer = new SpatialCrossMapLRN[Double](5, 1.0, 0.75, 1.0)
val input = Tensor[Double](4, 8, 32, 32).apply1(e => Random.nextDouble())
val checker = new GradientChecker(1e-3)
checker.checkLayer[Double](layer, input, 1e-3) should be(true)
}
}
| psyyz10/BigDL | spark/dl/src/test/scala/com/intel/analytics/bigdl/torch/SpatialCrossMapLRNSpec.scala | Scala | apache-2.0 | 4,328 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.mllib.pmml.export
import scala.{Array => SArray}
import org.dmg.pmml._
import org.apache.spark.mllib.regression.GeneralizedLinearModel
/**
* PMML Model Export for GeneralizedLinearModel class with binary ClassificationModel
*/
private[mllib] class BinaryClassificationPMMLModelExport(
model : GeneralizedLinearModel,
description : String,
normalizationMethod : RegressionNormalizationMethodType,
threshold: Double)
extends PMMLModelExport {
populateBinaryClassificationPMML()
/**
* Export the input LogisticRegressionModel or SVMModel to PMML format.
*/
private def populateBinaryClassificationPMML(): Unit = {
pmml.getHeader.setDescription(description)
if (model.weights.size > 0) {
val fields = new SArray[FieldName](model.weights.size)
val dataDictionary = new DataDictionary
val miningSchema = new MiningSchema
val regressionTableYES = new RegressionTable(model.intercept).withTargetCategory("1")
var interceptNO = threshold
if (RegressionNormalizationMethodType.LOGIT == normalizationMethod) {
if (threshold <= 0) {
interceptNO = Double.MinValue
} else if (threshold >= 1) {
interceptNO = Double.MaxValue
} else {
interceptNO = -math.log(1 / threshold - 1)
}
}
val regressionTableNO = new RegressionTable(interceptNO).withTargetCategory("0")
val regressionModel = new RegressionModel()
.withFunctionName(MiningFunctionType.CLASSIFICATION)
.withMiningSchema(miningSchema)
.withModelName(description)
.withNormalizationMethod(normalizationMethod)
.withRegressionTables(regressionTableYES, regressionTableNO)
for (i <- 0 until model.weights.size) {
fields(i) = FieldName.create("field_" + i)
dataDictionary.withDataFields(new DataField(fields(i), OpType.CONTINUOUS, DataType.DOUBLE))
miningSchema
.withMiningFields(new MiningField(fields(i))
.withUsageType(FieldUsageType.ACTIVE))
regressionTableYES.withNumericPredictors(new NumericPredictor(fields(i), model.weights(i)))
}
// add target field
val targetField = FieldName.create("target")
dataDictionary
.withDataFields(new DataField(targetField, OpType.CATEGORICAL, DataType.STRING))
miningSchema
.withMiningFields(new MiningField(targetField)
.withUsageType(FieldUsageType.TARGET))
dataDictionary.withNumberOfFields(dataDictionary.getDataFields.size)
pmml.setDataDictionary(dataDictionary)
pmml.withModels(regressionModel)
}
}
}
| pronix/spark | mllib/src/main/scala/org/apache/spark/mllib/pmml/export/BinaryClassificationPMMLModelExport.scala | Scala | apache-2.0 | 3,496 |
package frameless
/**
* Spark's variance and stddev functions always return Double
*/
trait CatalystVariance[T]
object CatalystVariance {
implicit val intVariance: CatalystVariance[Int] = new CatalystVariance[Int] {}
implicit val longVariance: CatalystVariance[Long] = new CatalystVariance[Long] {}
implicit val shortVariance: CatalystVariance[Short] = new CatalystVariance[Short] {}
implicit val bigDecimalVariance: CatalystVariance[BigDecimal] = new CatalystVariance[BigDecimal] {}
implicit val doubleVariance: CatalystVariance[Double] = new CatalystVariance[Double] {}
}
| OlivierBlanvillain/frameless | core/src/main/scala/frameless/CatalystVariance.scala | Scala | apache-2.0 | 590 |
package org.scalameter.picklers
import java.nio.ByteBuffer
abstract class PrimitivePickler[T] extends Pickler[T] {
protected def bits: Int
protected def unwrap(from: ByteBuffer): T
final protected def byteBuffer: ByteBuffer = ByteBuffer.allocate(numBytes)
final def numBytes: Int = bits / java.lang.Byte.SIZE
final def unpickle(a: Array[Byte], from: Int): (T, Int) = {
val newFrom = if (from + numBytes == a.length) -1 else from + numBytes
(unwrap(ByteBuffer.wrap(a, from, numBytes)), newFrom)
}
}
object UnitPickler extends PrimitivePickler[Unit] {
protected def bits: Int = 0
protected def unwrap(from: ByteBuffer) = ()
def pickle(x: Unit): Array[Byte] = Array.empty[Byte]
}
object BytePickler extends PrimitivePickler[Byte] {
protected def bits: Int = java.lang.Byte.SIZE
protected def unwrap(from: ByteBuffer) = from.get
def pickle(x: Byte): Array[Byte] = byteBuffer.put(x).array()
}
object BooleanPickler extends PrimitivePickler[Boolean] {
protected def bits: Int = java.lang.Byte.SIZE
protected def unwrap(from: ByteBuffer) = {
val v = from.get
if (v == 1.toByte) true else if (v == 0.toByte) false else sys.error(s"Corrupted stream. Expected 0 or 1. Got $v")
}
def pickle(x: Boolean): Array[Byte] = byteBuffer.put(if (x) 1.toByte else 0.toByte).array()
}
object CharPickler extends PrimitivePickler[Char] {
protected def bits: Int = java.lang.Character.SIZE
protected def unwrap(from: ByteBuffer) = from.getChar
def pickle(x: Char): Array[Byte] = byteBuffer.putChar(x).array()
}
object ShortPickler extends PrimitivePickler[Short] {
protected def bits: Int = java.lang.Short.SIZE
protected def unwrap(from: ByteBuffer) = from.getShort
def pickle(x: Short): Array[Byte] = byteBuffer.putShort(x).array()
}
object IntPickler extends PrimitivePickler[Int] {
protected def bits: Int = java.lang.Integer.SIZE
protected def unwrap(from: ByteBuffer) = from.getInt
def pickle(x: Int): Array[Byte] = byteBuffer.putInt(x).array()
}
object LongPickler extends PrimitivePickler[Long] {
protected def bits: Int = java.lang.Long.SIZE
protected def unwrap(from: ByteBuffer) = from.getLong
def pickle(x: Long): Array[Byte] = byteBuffer.putLong(x).array()
}
object FloatPickler extends PrimitivePickler[Float] {
protected def bits: Int = java.lang.Float.SIZE
protected def unwrap(from: ByteBuffer) = from.getFloat
def pickle(x: Float): Array[Byte] = byteBuffer.putFloat(x).array()
}
object DoublePickler extends PrimitivePickler[Double] {
protected def bits: Int = java.lang.Double.SIZE
protected def unwrap(from: ByteBuffer) = from.getDouble
def pickle(x: Double): Array[Byte] = byteBuffer.putDouble(x).array()
}
| kjanosz/scalameter | scalameter-core/src/main/scala/org/scalameter/picklers/primitives.scala | Scala | bsd-3-clause | 2,722 |
package com.flowy.fomoapi.routes
import akka.http.scaladsl.model.StatusCodes
import akka.http.scaladsl.server.Directives.{pathPrefix, _}
import com.flowy.common.database.TheEverythingBagelDao
import com.flowy.common.models.UserDevice
import com.flowy.fomoapi.services.UserDeviceService
import com.softwaremill.bootzooka.common.api.RoutesSupport
import com.softwaremill.bootzooka.user.api.SessionSupport
import com.typesafe.scalalogging.StrictLogging
import io.circe.Json
import io.circe.generic.auto._
import io.circe.syntax._
import redis.RedisClient
trait DeviceRoutes extends RoutesSupport with StrictLogging with SessionSupport {
def bagel: TheEverythingBagelDao
def redis: RedisClient
def deviceService: UserDeviceService
// TODO
// when a trade does not execute successfully you need an error log to tell you why
val deviceRoutes = logRequestResult("DeviceRoutes") {
pathPrefix("devices") {
deleteDevice ~
getUserDevice ~
updateDevice ~
postDevice ~
listDevices
}
}
def deleteDevice = {
path(JavaUUID) { deviceId =>
delete {
userFromSession { user =>
onSuccess(deviceService.remove(user.id, deviceId)) {
case Some(device) =>
complete(StatusCodes.OK, JSendResponse(JsonStatus.Success, "", device.asJson))
case None =>
complete(StatusCodes.NotFound, JSendResponse(JsonStatus.Fail, "cannot find device", Json.Null))
}
}
}
}
}
def listDevices = {
get {
userFromSession { user =>
onSuccess(deviceService.getUserDevices(user.id)) { devices =>
complete(StatusCodes.OK, JSendResponse(JsonStatus.Success, "", devices.asJson))
}
}
}
}
def getUserDevice = {
path(JavaUUID) { deviceId =>
get {
userFromSession { user =>
onSuccess(deviceService.getUserDevice(user.id, deviceId)) {
case Some(device) =>
complete(StatusCodes.OK, JSendResponse(JsonStatus.Success, "", device.asJson))
case None =>
complete(StatusCodes.NotFound, JSendResponse(JsonStatus.Fail, "not found", Json.Null))
}
}
}
}
}
def postDevice = {
post {
userFromSession { user =>
entity(as[UserDeviceRequest]) { deviceReq =>
onSuccess(deviceService.addUserDevice(UserDevice(user.id, deviceReq.deviceType, deviceReq.deviceId, deviceReq.deviceToken))) {
case Some(device) =>
complete(StatusCodes.OK, JSendResponse(JsonStatus.Success, "", device.asJson))
case None =>
complete(StatusCodes.Conflict, JSendResponse(JsonStatus.Fail, "", Json.Null))
}
}
}
}
}
def updateDevice = {
path(JavaUUID) { deviceId =>
put {
userFromSession { user =>
entity(as[UserDeviceRequest]) { deviceReq =>
onSuccess(deviceService.update(UserDevice(deviceId, user.id, deviceReq.deviceType, deviceReq.deviceId, deviceReq.deviceToken))) {
case Some(device) =>
complete(StatusCodes.OK, JSendResponse(JsonStatus.Success, "", device.asJson))
case None =>
complete(StatusCodes.Conflict, JSendResponse(JsonStatus.Fail, "", Json.Null))
}
}
}
}
}
}
}
case class UserDeviceRequest(deviceType: String, deviceId: String, deviceToken: String)
| asciiu/fomo | api/src/main/scala/com/flowy/fomoapi/routes/DeviceRoutes.scala | Scala | apache-2.0 | 3,452 |
/*
* Copyright (C) 2016-2019 Lightbend Inc. <https://www.lightbend.com>
*/
package com.lightbend.lagom.maven
import java.net.URLClassLoader
import javax.inject.Inject
import javax.inject.Singleton
import org.eclipse.aether.artifact.Artifact
/**
* Implements sharing of Scala classloaders, to save on memory
*/
@Singleton
class ScalaClassLoaderManager @Inject()(logger: MavenLoggerProxy) {
/**
* The list of Scala libraries. None of these libraries may have a dependency outside of this list, otherwise there
* will be classloading issues.
*
* Note that while adding more libraries to this list will allow more to be shared, it may also mean that classloaders
* can be shared in less cases, since it becomes less likely that there will be an exact match between two projects
* in what can be shared.
*/
private val ScalaLibs = Set(
"org.scala-lang" -> "scala-library",
"org.scala-lang" -> "scala-reflect",
"org.scala-lang.modules" -> "scala-xml",
"org.scala-lang.modules" -> "scala-parser-combinators",
"org.scala-lang.modules" -> "scala-java8-compat"
)
private val ScalaVersionPattern = "_\\\\d+\\\\.\\\\d+.*$".r
private def stripScalaVersion(artifactId: String) = ScalaVersionPattern.replaceFirstIn(artifactId, "")
private def createCacheKey(artifacts: Seq[Artifact]): String = {
artifacts
.map { artifact =>
import artifact._
s"$getGroupId:$getArtifactId:$getVersion"
}
.sorted
.mkString(",")
}
private var cache = Map.empty[String, ClassLoader]
/**
* Extract a Scala ClassLoader from the given classpath.
*/
def extractScalaClassLoader(artifacts: Seq[Artifact]): ClassLoader = synchronized {
val scalaArtifacts = artifacts.filter { artifact =>
ScalaLibs.contains(artifact.getGroupId -> stripScalaVersion(artifact.getArtifactId))
}
val cacheKey = createCacheKey(scalaArtifacts)
cache.get(cacheKey) match {
case Some(classLoader) =>
logger.debug(s"ScalaClassLoader cache hit - $cacheKey")
classLoader
case None =>
logger.debug(s"ScalaClassLoader cache miss - $cacheKey")
// Use System classloader parent as documented here:
// https://svn.apache.org/repos/infra/websites/production/maven/content/reference/maven-classloading.html#Maven_API_classloader
// Keep in mind this does not contain any application or javaagent classes, which will
// be added in the classLoader below.
//
// This behaves a little different depending on the Java version used:
// - For Java 8: the parent is the boostrap class loader (or null), which in the end
// means the boostrap class loader is used.
// - For Java9+: the parent is the platform class loader is a parent or an ancestor
// of the system class loader that all platform classes are visible to it.
val parent = ClassLoader.getSystemClassLoader().getParent()
val classLoader = new URLClassLoader(scalaArtifacts.map(_.getFile.toURI.toURL).toArray, parent)
cache += (cacheKey -> classLoader)
classLoader
}
}
}
| rcavalcanti/lagom | dev/maven-plugin/src/main/scala/com/lightbend/lagom/maven/ScalaClassLoaderManager.scala | Scala | apache-2.0 | 3,188 |
package at.forsyte.apalache.tla.bmcmt.rules
import at.forsyte.apalache.tla.bmcmt._
import at.forsyte.apalache.tla.lir.OperEx
import at.forsyte.apalache.tla.lir.oper.TlaOper
/**
* Process a labelled expression by simply taking the subexpression.
*
* @author Igor Konnov
*/
class LabelRule(rewriter: SymbStateRewriter) extends RewritingRule {
override def isApplicable(symbState: SymbState): Boolean = {
symbState.ex match {
case OperEx(TlaOper.label, _*) => true
case _ => false
}
}
override def apply(state: SymbState): SymbState = {
state.ex match {
case OperEx(TlaOper.label, ex, _*) =>
rewriter.rewriteUntilDone(state.setRex(ex))
case _ =>
throw new RewriterException("%s is not applicable".format(getClass.getSimpleName), state.ex)
}
}
}
| konnov/dach | tla-bmcmt/src/main/scala/at/forsyte/apalache/tla/bmcmt/rules/LabelRule.scala | Scala | apache-2.0 | 818 |
package restapi.http.routes
import akka.http.scaladsl.model.StatusCodes
import akka.http.scaladsl.server.{Directives, Route}
import core.db.users.UsersDao
import core.entities.UserInformationEntityConverters._
import restapi.http.JsonSupport
import restapi.http.routes.support.SecuredAccessSupport
import scala.concurrent.ExecutionContext
import scala.util.Try
class UsersRoute(usersDao: UsersDao)(implicit ec: ExecutionContext, ac: ApiContext)
extends Directives with SecuredAccessSupport with JsonSupport {
val route: Route =
pathPrefix("users") {
securedAccess { ctx =>
pathEndOrSingleSlash {
get {
complete(usersDao.findUsers().map(toUserInformationEntity))
}
} ~
path("name" / Segment) { name =>
get {
complete {
val maybeUser = usersDao.findUserByName(name)
maybeUser match {
case Some(user) => toUserInformationEntity(user)
case None => StatusCodes.NotFound
}
}
} ~
delete {
privateResourceAccess(ctx, name) {
complete {
Try(usersDao.deleteUser(ctx.userId))
StatusCodes.OK
}
}
}
}
}
}
}
| lymr/fun-chat | fun-chat-server/src/main/scala/restapi/http/routes/UsersRoute.scala | Scala | mit | 1,371 |
Subsets and Splits
Filtered Scala Code Snippets
The query filters and retrieves a sample of code snippets that meet specific criteria, providing a basic overview of the dataset's content without revealing deeper insights.