code
stringlengths
5
1M
repo_name
stringlengths
5
109
path
stringlengths
6
208
language
stringclasses
1 value
license
stringclasses
15 values
size
int64
5
1M
/** * Copyright 2014 Dropbox, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package djinni import djinni.ast._ import java.io._ import djinni.generatorTools._ import djinni.meta._ import djinni.syntax.Error import djinni.writer.IndentWriter import scala.language.implicitConversions import scala.collection.mutable import scala.util.matching.Regex package object generatorTools { case class Spec( javaOutFolder: Option[File], javaPackage: Option[String], javaClassAccessModifier: JavaAccessModifier.Value, javaIdentStyle: JavaIdentStyle, javaCppException: Option[String], javaAnnotation: Option[String], javaGenerateInterfaces: Boolean, javaNullableAnnotation: Option[String], javaNonnullAnnotation: Option[String], javaImplementAndroidOsParcelable: Boolean, javaUseFinalForRecord: Boolean, cppOutFolder: Option[File], cppHeaderOutFolder: Option[File], cppIncludePrefix: String, cppExtendedRecordIncludePrefix: String, cppNamespace: String, cppIdentStyle: CppIdentStyle, cppFileIdentStyle: IdentConverter, cppOptionalTemplate: String, cppOptionalHeader: String, cppEnumHashWorkaround: Boolean, cppNnHeader: Option[String], cppNnType: Option[String], cppNnCheckExpression: Option[String], cppUseWideStrings: Boolean, jniOutFolder: Option[File], jniHeaderOutFolder: Option[File], jniIncludePrefix: String, jniIncludeCppPrefix: String, jniNamespace: String, jniClassIdentStyle: IdentConverter, jniFileIdentStyle: IdentConverter, jniBaseLibIncludePrefix: String, cppExt: String, cppHeaderExt: String, objcOutFolder: Option[File], objcppOutFolder: Option[File], objcIdentStyle: ObjcIdentStyle, objcFileIdentStyle: IdentConverter, objcppExt: String, objcHeaderExt: String, objcIncludePrefix: String, objcExtendedRecordIncludePrefix: String, objcppIncludePrefix: String, objcppIncludeCppPrefix: String, objcppIncludeObjcPrefix: String, objcppNamespace: String, objcBaseLibIncludePrefix: String, objcSwiftBridgingHeaderWriter: Option[Writer], objcSwiftBridgingHeaderName: Option[String], objcClosedEnums: Boolean, outFileListWriter: Option[Writer], skipGeneration: Boolean, yamlOutFolder: Option[File], yamlOutFile: Option[String], yamlPrefix: String) def preComma(s: String) = { if (s.isEmpty) s else ", " + s } def q(s: String) = '"' + s + '"' def firstUpper(token: String) = if (token.isEmpty()) token else token.charAt(0).toUpper + token.substring(1) type IdentConverter = String => String case class CppIdentStyle(ty: IdentConverter, enumType: IdentConverter, typeParam: IdentConverter, method: IdentConverter, field: IdentConverter, local: IdentConverter, enum: IdentConverter, const: IdentConverter) case class JavaIdentStyle(ty: IdentConverter, typeParam: IdentConverter, method: IdentConverter, field: IdentConverter, local: IdentConverter, enum: IdentConverter, const: IdentConverter) case class ObjcIdentStyle(ty: IdentConverter, typeParam: IdentConverter, method: IdentConverter, field: IdentConverter, local: IdentConverter, enum: IdentConverter, const: IdentConverter) object IdentStyle { val camelUpper = (s: String) => s.split('_').map(firstUpper).mkString val camelLower = (s: String) => { val parts = s.split('_') parts.head + parts.tail.map(firstUpper).mkString } val underLower = (s: String) => s val underUpper = (s: String) => s.split('_').map(firstUpper).mkString("_") val underCaps = (s: String) => s.toUpperCase val prefix = (prefix: String, suffix: IdentConverter) => (s: String) => prefix + suffix(s) val javaDefault = JavaIdentStyle(camelUpper, camelUpper, camelLower, camelLower, camelLower, underCaps, underCaps) val cppDefault = CppIdentStyle(camelUpper, camelUpper, camelUpper, underLower, underLower, underLower, underCaps, underCaps) val objcDefault = ObjcIdentStyle(camelUpper, camelUpper, camelLower, camelLower, camelLower, camelUpper, camelUpper) val styles = Map( "FooBar" -> camelUpper, "fooBar" -> camelLower, "foo_bar" -> underLower, "Foo_Bar" -> underUpper, "FOO_BAR" -> underCaps) def infer(input: String): Option[IdentConverter] = { styles.foreach((e) => { val (str, func) = e if (input endsWith str) { val diff = input.length - str.length return Some(if (diff > 0) { val before = input.substring(0, diff) prefix(before, func) } else { func }) } }) None } } object JavaAccessModifier extends Enumeration { val Public = Value("public") val Package = Value("package") def getCodeGenerationString(javaAccessModifier: JavaAccessModifier.Value): String = { javaAccessModifier match { case Public => "public " case Package => "/*package*/ " } } } implicit val javaAccessModifierReads: scopt.Read[JavaAccessModifier.Value] = scopt.Read.reads(JavaAccessModifier withName _) final case class SkipFirst() { private var first = true def apply(f: => Unit) { if (first) { first = false } else { f } } } case class GenerateException(message: String) extends java.lang.Exception(message) def createFolder(name: String, folder: File) { folder.mkdirs() if (folder.exists) { if (!folder.isDirectory) { throw new GenerateException(s"Unable to create $name folder at ${q(folder.getPath)}, there's something in the way.") } } else { throw new GenerateException(s"Unable to create $name folder at ${q(folder.getPath)}.") } } def generate(idl: Seq[TypeDecl], spec: Spec): Option[String] = { try { if (spec.cppOutFolder.isDefined) { if (!spec.skipGeneration) { createFolder("C++", spec.cppOutFolder.get) createFolder("C++ header", spec.cppHeaderOutFolder.get) } new CppGenerator(spec).generate(idl) } if (spec.javaOutFolder.isDefined) { if (!spec.skipGeneration) { createFolder("Java", spec.javaOutFolder.get) } new JavaGenerator(spec).generate(idl) } if (spec.jniOutFolder.isDefined) { if (!spec.skipGeneration) { createFolder("JNI C++", spec.jniOutFolder.get) createFolder("JNI C++ header", spec.jniHeaderOutFolder.get) } new JNIGenerator(spec).generate(idl) } if (spec.objcOutFolder.isDefined) { if (!spec.skipGeneration) { createFolder("Objective-C", spec.objcOutFolder.get) } new ObjcGenerator(spec).generate(idl) } if (spec.objcppOutFolder.isDefined) { if (!spec.skipGeneration) { createFolder("Objective-C++", spec.objcppOutFolder.get) } new ObjcppGenerator(spec).generate(idl) } if (spec.objcSwiftBridgingHeaderWriter.isDefined) { SwiftBridgingHeaderGenerator.writeAutogenerationWarning(spec.objcSwiftBridgingHeaderName.get, spec.objcSwiftBridgingHeaderWriter.get) SwiftBridgingHeaderGenerator.writeBridgingVars(spec.objcSwiftBridgingHeaderName.get, spec.objcSwiftBridgingHeaderWriter.get) new SwiftBridgingHeaderGenerator(spec).generate(idl) } if (spec.yamlOutFolder.isDefined) { if (!spec.skipGeneration) { createFolder("YAML", spec.yamlOutFolder.get) } new YamlGenerator(spec).generate(idl) } None } catch { case GenerateException(message) => Some(message) } } sealed abstract class SymbolReference case class ImportRef(arg: String) extends SymbolReference // Already contains <> or "" in C contexts case class DeclRef(decl: String, namespace: Option[String]) extends SymbolReference } object Generator { val writtenFiles = mutable.HashMap[String,String]() } abstract class Generator(spec: Spec) { protected def createFile(folder: File, fileName: String, makeWriter: OutputStreamWriter => IndentWriter, f: IndentWriter => Unit): Unit = { if (spec.outFileListWriter.isDefined) { spec.outFileListWriter.get.write(new File(folder, fileName).getPath + "\n") } if (spec.skipGeneration) { return } val file = new File(folder, fileName) val cp = file.getCanonicalPath Generator.writtenFiles.put(cp.toLowerCase, cp) match { case Some(existing) => if (existing == cp) { throw GenerateException("Refusing to write \"" + file.getPath + "\"; we already wrote a file to that path.") } else { throw GenerateException("Refusing to write \"" + file.getPath + "\"; we already wrote a file to a path that is the same when lower-cased: \"" + existing + "\".") } case _ => } val fout = new FileOutputStream(file) try { val out = new OutputStreamWriter(fout, "UTF-8") f(makeWriter(out)) out.flush() } finally { fout.close() } } protected def createFile(folder: File, fileName: String, f: IndentWriter => Unit): Unit = createFile(folder, fileName, out => new IndentWriter(out), f) implicit def identToString(ident: Ident): String = ident.name val idCpp = spec.cppIdentStyle val idJava = spec.javaIdentStyle val idObjc = spec.objcIdentStyle def wrapNamespace(w: IndentWriter, ns: String, f: IndentWriter => Unit) { ns match { case "" => f(w) case s => val parts = s.split("::") w.wl(parts.map("namespace "+_+" {").mkString(" ")).wl f(w) w.wl w.wl(parts.map(p => "}").mkString(" ") + s" // namespace $s") } } def wrapAnonymousNamespace(w: IndentWriter, f: IndentWriter => Unit) { w.wl("namespace { // anonymous namespace") w.wl f(w) w.wl w.wl("} // end anonymous namespace") } def writeHppFileGeneric(folder: File, namespace: String, fileIdentStyle: IdentConverter)(name: String, origin: String, includes: Iterable[String], fwds: Iterable[String], f: IndentWriter => Unit, f2: IndentWriter => Unit) { createFile(folder, fileIdentStyle(name) + "." + spec.cppHeaderExt, (w: IndentWriter) => { w.wl("// AUTOGENERATED FILE - DO NOT MODIFY!") w.wl("// This file generated by Djinni from " + origin) w.wl w.wl("#pragma once") if (includes.nonEmpty) { w.wl includes.foreach(w.wl) } w.wl wrapNamespace(w, namespace, (w: IndentWriter) => { if (fwds.nonEmpty) { fwds.foreach(w.wl) w.wl } f(w) } ) f2(w) }) } def writeCppFileGeneric(folder: File, namespace: String, fileIdentStyle: IdentConverter, includePrefix: String)(name: String, origin: String, includes: Iterable[String], f: IndentWriter => Unit) { createFile(folder, fileIdentStyle(name) + "." + spec.cppExt, (w: IndentWriter) => { w.wl("// AUTOGENERATED FILE - DO NOT MODIFY!") w.wl("// This file generated by Djinni from " + origin) w.wl val myHeader = q(includePrefix + fileIdentStyle(name) + "." + spec.cppHeaderExt) w.wl(s"#include $myHeader // my header") val myHeaderInclude = s"#include $myHeader" for (include <- includes if include != myHeaderInclude) w.wl(include) w.wl wrapNamespace(w, namespace, f) }) } def generate(idl: Seq[TypeDecl]) { for (td <- idl.collect { case itd: InternTypeDecl => itd }) td.body match { case e: Enum => assert(td.params.isEmpty) generateEnum(td.origin, td.ident, td.doc, e) case r: Record => generateRecord(td.origin, td.ident, td.doc, td.params, r) case i: Interface => generateInterface(td.origin, td.ident, td.doc, td.params, i) } } def generateEnum(origin: String, ident: Ident, doc: Doc, e: Enum) def generateRecord(origin: String, ident: Ident, doc: Doc, params: Seq[TypeParam], r: Record) def generateInterface(origin: String, ident: Ident, doc: Doc, typeParams: Seq[TypeParam], i: Interface) // -------------------------------------------------------------------------- // Render type expression def withNs(namespace: Option[String], t: String) = namespace match { case None => t case Some("") => "::" + t case Some(s) => "::" + s + "::" + t } def withCppNs(t: String) = withNs(Some(spec.cppNamespace), t) def writeAlignedCall(w: IndentWriter, call: String, params: Seq[Field], delim: String, end: String, f: Field => String): IndentWriter = { w.w(call) val skipFirst = new SkipFirst params.foreach(p => { skipFirst { w.wl(delim); w.w(" " * call.length()) } w.w(f(p)) }) w.w(end) } def writeAlignedCall(w: IndentWriter, call: String, params: Seq[Field], end: String, f: Field => String): IndentWriter = writeAlignedCall(w, call, params, ",", end, f) def writeAlignedObjcCall(w: IndentWriter, call: String, params: Seq[Field], end: String, f: Field => (String, String)) = { w.w(call) val skipFirst = new SkipFirst params.foreach(p => { val (name, value) = f(p) skipFirst { w.wl; w.w(" " * math.max(0, call.length() - name.length)); w.w(name) } w.w(":" + value) }) w.w(end) } def normalEnumOptions(e: Enum) = e.options.filter(_.specialFlag == None) def writeEnumOptionNone(w: IndentWriter, e: Enum, ident: IdentConverter) { for (o <- e.options.find(_.specialFlag == Some(Enum.SpecialFlag.NoFlags))) { writeDoc(w, o.doc) w.wl(ident(o.ident.name) + " = 0,") } } def writeEnumOptions(w: IndentWriter, e: Enum, ident: IdentConverter) { var shift = 0 for (o <- normalEnumOptions(e)) { writeDoc(w, o.doc) w.wl(ident(o.ident.name) + (if(e.flags) s" = 1 << $shift" else "") + ",") shift += 1 } } def writeEnumOptionAll(w: IndentWriter, e: Enum, ident: IdentConverter) { for (o <- e.options.find(_.specialFlag == Some(Enum.SpecialFlag.AllFlags))) { writeDoc(w, o.doc) w.w(ident(o.ident.name) + " = ") w.w(normalEnumOptions(e).map(o => ident(o.ident.name)).fold("0")((acc, o) => acc + " | " + o)) w.wl(",") } } // -------------------------------------------------------------------------- def writeMethodDoc(w: IndentWriter, method: Interface.Method, ident: IdentConverter) { val paramReplacements = method.params.map(p => (s"\\b${Regex.quote(p.ident.name)}\\b", s"${ident(p.ident.name)}")) val newDoc = Doc(method.doc.lines.map(l => { paramReplacements.foldLeft(l)((line, rep) => line.replaceAll(rep._1, rep._2)) })) writeDoc(w, newDoc) } def writeDoc(w: IndentWriter, doc: Doc) { doc.lines.length match { case 0 => case 1 => w.wl(s"// ${doc.lines.head}") case _ => doc.lines.foreach (l => w.wl(s"// $l")) } } }
happybits/djinni
src/source/generator.scala
Scala
apache-2.0
16,499
package mesosphere.marathon.state import scala.concurrent.Future /** * The entity store is mostly syntactic sugar around the PersistentStore. * The main idea is to handle serializing/deserializing of specific entities. * @tparam T the specific type of entities that are handled by this specific store. */ trait EntityStore[T] { type Deserialize = () => T //returns deserialized T value. type Update = Deserialize => T //Update function Gets an Read and returns the (modified) T def fetch(key: String): Future[Option[T]] def store(key: String, value: T): Future[T] = modify(key)(_ => value) def modify(key: String, onSuccess: (T) => Unit = _ => ())(update: Update): Future[T] /** * Delete entity with given id. * Success: the file was deleted (true) or not existent (false) * Failure: the file could not be deleted * @param key the name of the entity. * @return result, whether the file was existent or not. */ def expunge(key: String, onSuccess: () => Unit = () => ()): Future[Boolean] def names(): Future[Seq[String]] }
ss75710541/marathon
src/main/scala/mesosphere/marathon/state/EntityStore.scala
Scala
apache-2.0
1,077
/* Copyright 2009-2016 EPFL, Lausanne */ import leon.annotation._ import leon.lang._ import leon.io.{ FileInputStream => FIS, FileOutputStream => FOS, StdOut } import scala.annotation.tailrec /** * Some basic image processing. * * General NOTEs * ------------- * * Byte ranges from -128 to 127, not 0 to 255. It is important to remember * that when manipulating individual component as Byte. * * The BMP format uses little endian. * * See https://msdn.microsoft.com/en-us/library/dd183391(v=vs.85).aspx * for the full documentation. */ object ImageProcessing { /*************************************************************************** * Constants * ***************************************************************************/ // Sizes in bytes of several Windows numerical types @inline val WordSize = 2 // 16 bits, unsigned @inline val DwordSize = 4 // 32 bits, unsigned @inline val LongSize = 4 // 32 bits, signed // Maximum size of images @inline val MaxSize = 512 @inline val MaxSurfaceSize = 512 * 512 // handwritten here to inline the values /*************************************************************************** * Basic Algorithms * ***************************************************************************/ def inRange(x: Int, min: Int, max: Int): Boolean = { require(min <= max) min <= x && x <= max } def min(x: Int, y: Int): Int = { if (x <= y) x else y } ensuring { res => res <= x && res <= y && (res == x || res == y) } def max(x: Int, y: Int): Int = { if (x < y) y else x } ensuring { res => x <= res && y <= res && (res == x || res == y) } def clamp(x: Int, down: Int, up: Int): Int = { require(down <= up) max(down, min(x, up)) } ensuring { res => inRange(res, down, up) } /*************************************************************************** * Status * ***************************************************************************/ sealed abstract class Status { def isSuccess: Boolean = this.isInstanceOf[Success] } case class Success() extends Status case class OpenError() extends Status case class ReadError() extends Status case class DomainError() extends Status case class InvalidFileHeaderError() extends Status case class InvalidBitmapHeaderError() extends Status case class CorruptedDataError() extends Status case class ImageTooBigError() extends Status case class WriteError() extends Status case class NotImplementedError() extends Status def statusCode(s: Status): Int = s match { case Success() => StdOut.println("success"); 0 case OpenError() => StdOut.println("couldn't open file"); 1 case ReadError() => StdOut.println("couldn't read some expected data"); 2 case DomainError() => StdOut.println("integer out of range"); 3 case InvalidFileHeaderError() => StdOut.println("file format unsupported"); 4 case InvalidBitmapHeaderError() => StdOut.println("bitmap format unsupported"); 5 case CorruptedDataError() => StdOut.println("the file appears to be corrupted"); 6 case ImageTooBigError() => StdOut.println("the image is too big"); 7 case WriteError() => StdOut.println("couldn't write image"); 8 case NotImplementedError() => StdOut.println("not yet implemented"); 99 } /*************************************************************************** * MaybeResult * ***************************************************************************/ // Basically, MaybeResult[A] is Either[A, B] where B is Status abstract class MaybeResult[A] { def isDefined = this match { case Result(_) => true case _ => false } def getResult: A = { require(isDefined) this.asInstanceOf[Result[A]].result } def getStatus: Status = { require(!isDefined) this.asInstanceOf[Failure[A]].status } def toStatus: Status = { if (isDefined) Success() else getStatus } } case class Result[A](result: A) extends MaybeResult[A] case class Failure[A](status: Status) extends MaybeResult[A] { require(status != Success()) } // Extra operations for MaybeResult[Int]. implicit class MaybeResultIntOps(val result: MaybeResult[Int]) { def expect(value: Int): MaybeResult[Int] = result match { case Result(res) if res == value => result case Result(_) => Failure[Int](DomainError()) case _ => result // a Failure remains a Failure } } // Combine two, three or four MaybeResult to a MaybeResult of tuple. def combine[A, B](a: MaybeResult[A], b: MaybeResult[B]): MaybeResult[(A, B)] = { if (a.isDefined) { if (b.isDefined) { Result((a.getResult, b.getResult)) } else Failure[(A, B)](b.getStatus) } else Failure[(A, B)](a.getStatus) } def combine[A, B, C](a: MaybeResult[A], b: MaybeResult[B], c: MaybeResult[C]): MaybeResult[(A, B, C)] = { val tmp = combine(combine(a, b), c) tmp match { case Result(((a, b), c)) => Result((a, b, c)) case Failure(status) => Failure[(A, B, C)](status) } } def combine[A, B, C, D](a: MaybeResult[A], b: MaybeResult[B], c: MaybeResult[C], d: MaybeResult[D]): MaybeResult[(A, B, C, D)] = { val tmp = combine(combine(a, b, c), d) tmp match { case Result(((a, b, c), d)) => Result((a, b, c, d)) case Failure(status) => Failure[(A, B, C, D)](status) } } // Convert an Option to a MaybeResult def maybe[A](opt: Option[A], failStatus: Status): MaybeResult[A] = { require(failStatus != Success()) opt match { case Some(result) => Result(result) case None() => Failure(failStatus) } } // Special DSL for Option. implicit class OptionOps[A](val opt: Option[A]) { def toResultOr(failStatus: Status) = { require(failStatus != Success()) maybe(opt, failStatus) } } /*************************************************************************** * Data Structures * ***************************************************************************/ /* * Hold (some) information about the general file structure; * The file header is 14 bytes, the offset refers to the beginning of the file header. */ case class FileHeader(size: Int, offset: Int) { require((14 + 40) <= size && inRange(offset, 14 + 40, size)) // offset cannot be before the end of BitmapHeader. } /* * Hold basic information about the bitmap. * * See https://msdn.microsoft.com/en-us/library/dd183376(v=vs.85).aspx * * NOTE We assume that * - The number of bits-per-pixel is 24 (RGB format, 8-bit channels); * - No compression is used; * - The palette is empty. */ case class BitmapHeader(width: Int, height: Int) { require(0 <= width && 0 <= height) } /* * Represent an Image, using the usual RGB channels. * * NOTE use createImage to create a new instance of this class easily. */ case class Image(r: Array[Byte], g: Array[Byte], b: Array[Byte], w: Int, h: Int) { require( r.length == MaxSurfaceSize && g.length == MaxSurfaceSize && b.length == MaxSurfaceSize && inRange(w, 0, MaxSize) && inRange(h, 0, MaxSize) && inRange(w * h, 0, MaxSurfaceSize) ) } @inline // <- in order to "return" the image def createImage(width: Int, height: Int) = { require( inRange(width, 0, MaxSize) && inRange(height, 0, MaxSize) && inRange(width * height, 0, MaxSurfaceSize) ) Image( Array.fill[Byte](MaxSurfaceSize)(0), Array.fill[Byte](MaxSurfaceSize)(0), Array.fill[Byte](MaxSurfaceSize)(0), width, height ) } /*************************************************************************** * I/O functions for WORD, DWORD, LONG, and other helpers * ***************************************************************************/ // Skip a given number of bytes, returning true on success. def skipBytes(fis: FIS, count: Int)(implicit state: leon.io.State): Boolean = { require(fis.isOpen && 0 <= count) var i = 0 var success = true (while (success && i < count) { val opt = fis.tryReadByte() success = opt.isDefined i += 1 }) invariant (inRange(i, 0, count)) success } // Fill the output with copies of the given byte. @tailrec // <- a good indicator that the C compiler could optimise out the recursion. def writeBytes(fos: FOS, byte: Byte, count: Int): Boolean = { require(fos.isOpen && 0 <= count) if (count == 0) true else fos.write(byte) && writeBytes(fos, byte, count - 1) } // Attempt to read a WORD (16-bit unsigned integer). // The result is represented using an Int. def maybeReadWord(fis: FIS)(implicit state: leon.io.State): MaybeResult[Int] = { require(fis.isOpen) // From little to big endian def buildShort(b1: Byte, b2: Byte): Int = { (b2 << 8) | (b1 & 0xff) // has Int type } ensuring { short => inRange(short, -32768, 32767) } val byte1 = fis.tryReadByte val byte2 = fis.tryReadByte if (byte1.isDefined && byte2.isDefined) { // Shift range appropriately to respect unsigned numbers representation val signed = buildShort(byte1.get, byte2.get) val unsigned = if (signed < 0) signed + 65536 else signed Result(unsigned) } else Failure[Int](ReadError()) } ensuring { res => res match { case Result(word) => inRange(word, 0, 65536) case _ => true } } // Write a WORD def writeWord(fos: FOS, word: Int): Boolean = { require(fos.isOpen && inRange(word, 0, 65536)) // Shift range appropriatedly to respect integer representation val signed = if (word >= 32768) word - 32768 else word val b2 = (signed >>> 8).toByte val b1 = signed.toByte // Convert big endian to little endian fos.write(b1) && fos.write(b2) } // Attempt to read a DWORD (32-bit unsigned integer). // The result is represented using an Int, and values bigger than 2^31 results in DomainError. def maybeReadDword(fis: FIS)(implicit state: leon.io.State): MaybeResult[Int] = { require(fis.isOpen) // From little to big endian def buildInt(b1: Byte, b2: Byte, b3: Byte, b4: Byte): Int = { require(0 <= b4) (b4 << 24) | ((b3 & 0xff) << 16) | ((b2 & 0xff) << 8) | (b1 & 0xff) } ensuring { int => inRange(int, 0, 2147483647) } val byte1 = fis.tryReadByte val byte2 = fis.tryReadByte val byte3 = fis.tryReadByte val byte4 = fis.tryReadByte // the most significant byte if (byte1.isDefined && byte2.isDefined && byte3.isDefined && byte4.isDefined) { if (byte4.get >= 0) { val dword = buildInt(byte1.get, byte2.get, byte3.get, byte4.get) Result(dword) } else Failure[Int](DomainError()) } else Failure[Int](ReadError()) } ensuring { res => res match { case Result(dword) => inRange(dword, 0, 2147483647) case _ => true } } // Write a DWORD def writeDword(fos: FOS, dword: Int): Boolean = { require(fos.isOpen && inRange(dword, 0, 2147483647)) val b4 = (dword >>> 24).toByte val b3 = (dword >>> 16).toByte val b2 = (dword >>> 8).toByte val b1 = dword.toByte // Big endian to little endian conversion fos.write(b1) && fos.write(b2) && fos.write(b3) && fos.write(b4) } // Attempt to read a LONG (32-bit signed integer). // The result is represented using an Int. def maybeReadLong(fis: FIS)(implicit state: leon.io.State): MaybeResult[Int] = { require(fis.isOpen) // From little to big endian def buildInt(b1: Byte, b2: Byte, b3: Byte, b4: Byte): Int = { (b4 << 24) | ((b3 & 0xff) << 16) | ((b2 & 0xff) << 8) | (b1 & 0xff) } val byte1 = fis.tryReadByte val byte2 = fis.tryReadByte val byte3 = fis.tryReadByte val byte4 = fis.tryReadByte // the most significant byte if (byte1.isDefined && byte2.isDefined && byte3.isDefined && byte4.isDefined) { val long = buildInt(byte1.get, byte2.get, byte3.get, byte4.get) Result(long) } else Failure[Int](ReadError()) } // Write a LONG def writeLong(fos: FOS, long: Int): Boolean = { require(fos.isOpen) val b4 = (long >>> 24).toByte val b3 = (long >>> 16).toByte val b2 = (long >>> 8).toByte val b1 = long.toByte // Big endian to little endian conversion fos.write(b1) && fos.write(b2) && fos.write(b3) && fos.write(b4) } /*************************************************************************** * I/O functions for the BMP format * ***************************************************************************/ // Attempt to read the file header. // Upon success, 14 bytes have been read. def maybeReadFileHeader(fis: FIS)(implicit state: leon.io.State): MaybeResult[FileHeader] = { require(fis.isOpen) var skipSuccess = skipBytes(fis, WordSize) val sizeRes = maybeReadDword(fis) skipSuccess = skipSuccess && skipBytes(fis, WordSize * 2) val offsetRes = maybeReadDword(fis) combine(sizeRes, offsetRes) match { case _ if !skipSuccess => Failure[FileHeader](ReadError()) case Failure(status) => Failure[FileHeader](status) case Result((size, offset)) => { if (14 <= size && 14 + 40 <= offset && offset <= size) Result(FileHeader(size, offset)) else Failure[FileHeader](InvalidFileHeaderError()) } } } // Attempt to read the bitmap header (minimal version). // Upon success, 18 bytes have been read. def maybeReadBitmapHeader(fis: FIS)(implicit state: leon.io.State): MaybeResult[BitmapHeader] = { require(fis.isOpen) var skipSuccess = skipBytes(fis, DwordSize) val widthRes = maybeReadLong(fis) val heightRes = maybeReadLong(fis) skipSuccess = skipSuccess && skipBytes(fis, WordSize) val bppRes = maybeReadWord(fis) val compressionRes = maybeReadWord(fis) combine(widthRes, heightRes, bppRes, compressionRes) match { case _ if !skipSuccess => Failure[BitmapHeader](ReadError()) case Failure(status) => Failure[BitmapHeader](status) case Result((w, h, bpp, compression)) => if (w < 0 || h < 0 || bpp != 24 || compression != 0) { log("width", w) log("height", h) log("bpp", bpp) log("compression", compression) Failure(InvalidBitmapHeaderError()) } else Result(BitmapHeader(w, h)) } } def loadImageData(fis: FIS, image: Image)(implicit state: leon.io.State): Status = { require(fis.isOpen) val size = image.w * image.h var i = 0 var status: Status = Success() (while (status.isSuccess && i < size) { val rOpt = fis.tryReadByte() val gOpt = fis.tryReadByte() val bOpt = fis.tryReadByte() if (rOpt.isEmpty || gOpt.isEmpty || bOpt.isEmpty) { status = ReadError() log("stopped reading data abruptly after", i) } else { image.r(i) = rOpt.get image.g(i) = gOpt.get image.b(i) = bOpt.get } i += 1 }) invariant ( inRange(size, 0, MaxSurfaceSize) && inRange(i, 0, size) ) status } def saveImage(fos: FOS, image: Image): Status = { require(fos.isOpen) def writeFileHeader(): Boolean = { // Size: the headers and 3 channels per pixel, 1 byte per pixel component. val size = 14 + 40 + image.w * image.h * 3 val reserved = 0 // two WORDs are reserved val offset = 14 + 40 // after the two headers fos.write(0x42.toByte) && fos.write(0x4d.toByte) && // the signature "BM" writeDword(fos, size) && writeWord(fos, reserved) && writeWord(fos, reserved) && writeDword(fos, offset) } def writeBitmapHeader(): Boolean = { val size = 40 val w = image.w val h = image.h val planes = 1 val bpp = 24 val comp = 0 writeDword(fos, size) && writeLong(fos, w) && writeLong(fos, h) && writeWord(fos, planes) && writeWord(fos, bpp) && writeWord(fos, comp) && writeBytes(fos, 0, 22) // the last 22 bytes are all not relevant for us and are set to 0 } def writeImage(): Boolean = { val count = image.w * image.h var i = 0 var success = true (while (success && i < count) { success = fos.write(image.r(i)) && fos.write(image.g(i)) && fos.write(image.b(i)) i += 1 }) invariant (inRange(count, 0, MaxSurfaceSize) && inRange(i, 0, count)) success } if (writeFileHeader() && writeBitmapHeader() && writeImage()) Success() else WriteError() } /*************************************************************************** * Logging Facilities * ***************************************************************************/ def log(msg: String, x: Int) { StdOut.print(msg) StdOut.print(": ") StdOut.println(x) } def log(h: FileHeader) { log("size", h.size) log("offset", h.offset) } def log(h: BitmapHeader) { log("width", h.width) log("height", h.height) } /*************************************************************************** * Kernel & Image Processing Algorithm * ***************************************************************************/ case class Kernel(size: Int, scale: Int, kernel: Array[Int]) { require( inRange(size, 0, MaxSize) && size % 2 == 1 && size * size == kernel.length && scale != 0 && scale != -1 // avoid division by zero and some particular overflow (*) ) // (*) -2^31 / -1 /* * Apply the kernel on the given channel. Return the new value for pixel component * at the given index. */ private def apply(channel: Array[Byte], width: Int, height: Int, index: Int): Byte = { require( channel.length == MaxSurfaceSize && inRange(index, 0, channel.length) && inRange(width, 1, MaxSize) && inRange(height, 1, MaxSize) && inRange(width * height, 0, MaxSurfaceSize) ) // Clamping helper def fix(x: Int, side: Int): Int = { require(0 < side) clamp(x, 0, side - 1) } // Get the color component at the given position in the range [0, 255] def at(col: Int, row: Int): Int = { val c = fix(col, width) val r = fix(row, height) val component = channel(r * width + c) // unsigned if (component < 0) component + 255 else component } ensuring { inRange(_, 0, 255) } val mid = size / 2 val i = index % width val j = index / width var res = 0 var p = -mid (while (p <= mid) { var q = -mid val oldP = p // Fix p for the inner loop (the invariant is not automatically inferred) (while (q <= mid) { val kcol = p + mid val krow = q + mid assert(inRange(krow, 0, size - 1)) assert(inRange(kcol, 0, size - 1)) val kidx = krow * size + kcol // Here, the += and * operation could overflow res += at(i + p, j + q) * kernel(kidx) q += 1 }) invariant (oldP == p && inRange(q, -mid, mid + 1)) p += 1 }) invariant (inRange(p, -mid, mid + 1)) res = clamp(res / scale, 0, 255) res.toByte } def apply(src: Image, dest: Image): Unit = { require(src.w == dest.w && src.h == dest.h) val size = src.w * src.h var i = 0 (while (i < size) { dest.r(i) = apply(src.r, src.w, src.h, i) dest.g(i) = apply(src.g, src.w, src.h, i) dest.b(i) = apply(src.b, src.w, src.h, i) i += 1 }) invariant (inRange(i, 0, size)) } } /*************************************************************************** * Main Program * ***************************************************************************/ @extern def main(args: Array[String]): Unit = _main() def _main(): Int = { implicit val state = leon.io.newState val input = FIS.open("input.bmp") val output = FOS.open("output.bmp") val status = if (input.isOpen && output.isOpen) process(input, output) else OpenError() output.close() input.close() statusCode(status) } def process(fis: FIS, fos: FOS)(implicit state: leon.io.State): Status = { require(fis.isOpen && fos.isOpen) /* * // Smooth kernel * val kernel = Kernel(3, 1, Array(1, 1, 1, 1, 2, 1, 1, 1, 1)) */ /* // Edges * val kernel = Kernel(5, 1, Array( * 0, 0, -1, 0, 0, * 0, 0, -1, 0, 0, * -1, -1, 8, -1, -1, * 0, 0, -1, 0, 0, * 0, 0, -1, 0, 0 * )) */ /* // Identity * val kernel = Kernel(5, 1, Array( * 0, 0, 0, 0, 0, * 0, 0, 0, 0, 0, * 0, 0, 1, 0, 0, * 0, 0, 0, 0, 0, * 0, 0, 0, 0, 0 * )) */ /* // Sharpen * val kernel = Kernel(5, 8, Array( * -1, -1, -1, -1, -1, * -1, 2, 2, 2, -1, * -1, 2, 8, 2, -1, * -1, 2, 2, 2, -1, * -1, -1, -1, -1, -1 * )) */ // Blur val kernel = Kernel(5, 25, Array( 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 )) def processImage(src: Image): Status = { val dest = createImage(src.w, src.h) kernel.apply(src, dest) saveImage(fos, dest) } val fileHeaderRes = maybeReadFileHeader(fis) val bitmapHeaderRes = maybeReadBitmapHeader(fis) val status = combine(fileHeaderRes, bitmapHeaderRes) match { case Failure(status) => status /* * Report an error when the file is corrupted, i.e. it's too small. * 40 is the minimal bitmap header size, 14 is the file header size. * Note that more sanity check could be done but that's not the main * point of this example. */ case Result((fh, bh)) if fh.size <= 14 + 40 => CorruptedDataError() case Result((fh, bh)) => log(fh) log(bh) // Skip bytes until the start of the bitmap data val toSkip = fh.offset - (14 + 18) // some bytes were already eaten val success = skipBytes(fis, toSkip) // Break test of size so we avoid overflows. if (!success) CorruptedDataError() else if (bh.width > MaxSize || bh.height > MaxSize) ImageTooBigError() else if (bh.width * bh.height > MaxSurfaceSize) ImageTooBigError() else { val image = createImage(bh.width, bh.height) val status = loadImageData(fis, image) if (status.isSuccess) processImage(image) else status } } status } }
regb/leon
testcases/genc/ImageProcessing.scala
Scala
gpl-3.0
23,906
package models.daos import models.IvoireModel.Event import scala.concurrent.Future /** * Created by Suamah on 06/08/2015. */ class EventDAO extends DAOSlick with DBTableDefinitions { import driver.api._ import play.api.libs.concurrent.Execution.Implicits.defaultContext def all(): Future[List[Event]] = db.run(events.result).map( _.toList) def insert(event: Event): Future[Unit] = db.run(events returning events.map(_.id)+= event).map(_ => ()) def count(): Future[Int] = db.run(events.length.result) def find(id : Long):Future[Seq[Event]] = db.run(events.filter(_.id === id).result) }
Sam225/history-app
app/models/daos/EventDAO.scala
Scala
apache-2.0
609
// Copyright: 2010 - 2016 https://github.com/ensime/ensime-server/graphs // Licence: http://www.gnu.org/licenses/gpl-3.0.en.html package org.ensime.fixture import java.io.File import scala.collection.immutable.Queue import scala.tools.nsc.Settings import scala.tools.nsc.interactive.Global import akka.actor.ActorSystem import akka.testkit.TestProbe import org.ensime.api._ import org.ensime.core._ import org.ensime.indexer._ import org.ensime.util.{ PresentationReporter, ReportHandler } import org.ensime.vfs._ import org.slf4j.LoggerFactory trait RichPresentationCompilerFixture { def withRichPresentationCompiler( testCode: (TestKitFix, EnsimeConfig, RichPresentationCompiler) => Any ): Any } final class TestReporter( val handler: TestReportHandler = new TestReportHandler ) extends PresentationReporter(handler) final class TestReportHandler extends ReportHandler { var messages = Queue.empty[String] override def messageUser(str: String): Unit = { messages = messages enqueue str } var clears = 0 override def clearAllScalaNotes(): Unit = { clears += 1 } @volatile var notes = Queue.empty[Note] override def reportScalaNotes(list: List[Note]): Unit = { notes = notes enqueue list } } object RichPresentationCompilerFixture { private[fixture] def create( config: EnsimeConfig, search: SearchService )( implicit system: ActorSystem, vfs: EnsimeVFS ): RichPresentationCompiler = { val scalaLib = config.allJars.find(_.getName.contains("scala-library")).get val presCompLog = LoggerFactory.getLogger(classOf[Global]) val settings = new Settings(presCompLog.error) settings.YpresentationDebug.value = presCompLog.isTraceEnabled settings.YpresentationVerbose.value = presCompLog.isDebugEnabled settings.verbose.value = presCompLog.isDebugEnabled //settings.usejavacp.value = true settings.bootclasspath.append(scalaLib.getAbsolutePath) settings.classpath.value = config.compileClasspath.mkString(File.pathSeparator) val reporter = new TestReporter val indexer = TestProbe() val parent = TestProbe() new RichPresentationCompiler( config, settings, reporter, parent.ref, indexer.ref, search ) } } trait IsolatedRichPresentationCompilerFixture extends RichPresentationCompilerFixture with IsolatedEnsimeVFSFixture with IsolatedTestKitFixture with IsolatedSearchServiceFixture { override def withRichPresentationCompiler( testCode: (TestKitFix, EnsimeConfig, RichPresentationCompiler) => Any ): Any = { withVFS { implicit vfs => withTestKit { testkit => import testkit._ withSearchService { (config, search) => import org.ensime.fixture.RichPresentationCompilerFixture._ val pc = create(config, search) try { testCode(testkit, config, pc) } finally { pc.askShutdown() } } } } } } trait SharedRichPresentationCompilerFixture extends RichPresentationCompilerFixture with SharedTestKitFixture with SharedSearchServiceFixture { private[fixture] var pc: RichPresentationCompiler = _ override def beforeAll(): Unit = { super.beforeAll() import org.ensime.fixture.RichPresentationCompilerFixture._ implicit val system = _testkit.system pc = create(_config, _search) } override def withRichPresentationCompiler( testCode: (TestKitFix, EnsimeConfig, RichPresentationCompiler) => Any ): Any = testCode(_testkit, _config, pc) }
d1egoaz/ensime-sbt
src/sbt-test/sbt-ensime/ensime-server/core/src/it/scala/org/ensime/fixture/RichPresentationCompilerFixture.scala
Scala
apache-2.0
3,547
case class Foo(x: Bar) extends AnyVal case class Bar(x: Foo) extends AnyVal class Foo1(val x: Bar1) extends AnyVal class Bar1(val x: Foo1) extends AnyVal
yusuke2255/dotty
tests/untried/neg/t5878.scala
Scala
bsd-3-clause
156
package org.tlc.whereat.ui.layouts import android.widget._ import macroid.ActivityContext import macroid.FullDsl._ /** * Author: @aguestuser * Date: 4/22/15 * License: GPLv2 (https://www.gnu.org/licenses/gpl-2.0.html) */ // layouts are composable! object MainLayouts { // def default(greeting: Option[TextView])(implicit ctx: ActivityContext): Ui[LinearLayout] = // l[LinearLayout]( // w[Button] <~ // text("Get Location") <~ // On.click { // greeting <~ show // }, // w[TextView] <~ // wire(greeting) <~ // MainTweaks.greeting("Hello!") // ) <~ MainTweaks.orient // def locGetter(locText: Option[TextView])(implicit ctx: ActivityContext): Ui[LinearLayout] = // l[LinearLayout]( // w[Button] <~ // text("Get Location") <~ // On.click { // locText <~ show // }, // w[TextView] <~ // wire(locText) <~ // MainTweaks.greeting("Hello!") // ) <~ MainTweaks.orient def layout1(implicit ctx: ActivityContext) = l[LinearLayout]( // `l` aliases `layout` w[TextView], // `w` aliases `widget` w[ImageView], w[Button] ) def layout2(implicit ctx: ActivityContext) = l[FrameLayout]( w[ProgressBar] ) def comboLayout(implicit ctx: ActivityContext) = l[FrameLayout]( layout1, layout2 ) }
the-learning-collective/whereat-macroid
src/main/scala/org/tlc/whereat/ui/layouts/MainLayouts.scala
Scala
gpl-3.0
1,386
package scribe.filter import scribe.LogRecord /** * Filters based on the package name */ object PackageNameFilter extends FilterMatcher { override protected def string[M](record: LogRecord[M]): String = { val index = record.className.lastIndexOf('.') if (index > 0) { record.className.substring(0, index) } else { record.className } } }
outr/scribe
core/shared/src/main/scala/scribe/filter/PackageNameFilter.scala
Scala
mit
374
/* * This file is part of AckCord, licensed under the MIT License (MIT). * * Copyright (c) 2019 Katrix * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package ackcord.gateway import java.time.{Instant, OffsetDateTime} import scala.collection.immutable import ackcord.data._ import ackcord.data.raw._ import ackcord.gateway.GatewayProtocol._ import ackcord.util.{JsonOption, JsonSome, JsonUndefined} import akka.NotUsed import cats.{Eval, Later, Now} import enumeratum.values.{IntCirceEnum, IntEnum, IntEnumEntry} import io.circe.Decoder.Result import io.circe.{Decoder, Encoder, Json} /** * Base trait for all gateway messages. */ sealed trait GatewayMessage[D] { /** * The op code for the message. */ def op: GatewayOpCode /** * The data for the message. */ def d: Eval[Decoder.Result[D]] /** * A sequence number for the message if there is one. */ def s: JsonOption[Int] = JsonUndefined /** * An encoder for the message. */ def dataEncoder: Encoder[D] def t: JsonOption[ComplexGatewayEvent[D, _]] = JsonUndefined } sealed trait EagerGatewayMessage[D] extends GatewayMessage[D] { override def d: Now[Decoder.Result[D]] = Now(Right(nowD)) def nowD: D } /** * Sent with each new event. * @param sequence The seq number. * @param event The sent event. */ case class Dispatch[D](sequence: Int, event: ComplexGatewayEvent[D, _])(implicit val dataEncoder: Encoder[D]) extends GatewayMessage[D] { override val s: JsonSome[Int] = JsonSome(sequence) override val t: JsonSome[ComplexGatewayEvent[D, _]] = JsonSome(event) override def op: GatewayOpCode = GatewayOpCode.Dispatch override def d: Later[Decoder.Result[D]] = event.data } /** * Sent and received to confirm the connection is still going. * @param nowD The previous sequence. */ case class Heartbeat(nowD: Option[Int]) extends EagerGatewayMessage[Option[Int]] { override def op: GatewayOpCode = GatewayOpCode.Heartbeat override def dataEncoder: Encoder[Option[Int]] = Encoder[Option[Int]] } /** * @param token The bot token. * @param properties A map of properties to send. * @param compress If compressed messages should be used. * @param largeThreshold The threshold where the gateway stops sending * offline users. * @param shard The shard info, the first index is the shard id, while the * second is the total amount of shards. * @param presence The presence data to start with. * @param guildSubscriptions If member presence events and similar should be * received. AckCord has not been tested with this * flag. Continue with caution. */ case class IdentifyData( token: String, properties: Map[String, String], compress: Boolean, largeThreshold: Int, shard: Seq[Int], presence: StatusData, guildSubscriptions: Boolean ) object IdentifyData { /** * Create a map of the default properties to send with the identify message. */ def createProperties: Map[String, String] = Map("$os" -> System.getProperty("os.name"), "$browser" -> "AckCord", "$device" -> "AckCord") } /** * Sent by the shard to log in. */ case class Identify(nowD: IdentifyData) extends EagerGatewayMessage[IdentifyData] { override def op: GatewayOpCode = GatewayOpCode.Identify override def dataEncoder: Encoder[IdentifyData] = Encoder[IdentifyData] } /** * @param since If present, instant when the user went idle. * @param game The presence text. * @param status The status of the user. * @param afk If the user is AFK. */ case class StatusData(since: Option[Instant], game: Option[RawActivity], status: PresenceStatus, afk: Boolean) /** * Sent when a presence or status changes. */ case class StatusUpdate(nowD: StatusData) extends EagerGatewayMessage[StatusData] { override def op: GatewayOpCode = GatewayOpCode.StatusUpdate override def dataEncoder: Encoder[StatusData] = Encoder[StatusData] } /** * @param guildId The channel the voice channel is in. * @param channelId The voice channel to join. * @param selfMute If the bot should mute itself. * @param selfDeaf If the bot should deafen itself. */ case class VoiceStateUpdateData(guildId: GuildId, channelId: Option[ChannelId], selfMute: Boolean, selfDeaf: Boolean) /** * Sent by the bot to connect to a voice channel. */ case class VoiceStateUpdate(nowD: VoiceStateUpdateData) extends EagerGatewayMessage[VoiceStateUpdateData] { override def op: GatewayOpCode = GatewayOpCode.VoiceStateUpdate override def dataEncoder: Encoder[VoiceStateUpdateData] = Encoder[VoiceStateUpdateData] } /** * @param token The voice connection token. * @param guildId The guild of the update. * @param endpoint The voice server. */ case class VoiceServerUpdateData(token: String, guildId: GuildId, endpoint: String) case class VoiceServerUpdate(nowD: VoiceServerUpdateData) extends EagerGatewayMessage[VoiceServerUpdateData] { override def op: GatewayOpCode = GatewayOpCode.VoiceServerPing override def dataEncoder: Encoder[VoiceServerUpdateData] = Encoder[VoiceServerUpdateData] } /** * @param token The bot token. * @param sessionId The sessionId received earlier. * @param seq The last seq received. */ case class ResumeData(token: String, sessionId: String, seq: Int) /** * Sent by the shard instead of [[Identify]] when resuming a connection. */ case class Resume(nowD: ResumeData) extends EagerGatewayMessage[ResumeData] { override def op: GatewayOpCode = GatewayOpCode.Resume override def dataEncoder: Encoder[ResumeData] = Encoder[ResumeData] } /** * Sent by the gateway to indicate that the shard should reconnect. */ case object Reconnect extends EagerGatewayMessage[NotUsed] { override def op: GatewayOpCode = GatewayOpCode.Reconnect override def nowD: NotUsed = NotUsed override def dataEncoder: Encoder[NotUsed] = (_: NotUsed) => Json.obj() } /** * @param guildId The guildId(s) to request for. * @param query Return all the users where their username start with this. * or an empty string for all users. * @param limit The amount of users to send, or 0 for all users. * @param presences If the presences of the users should be sent too. * @param userIds Users to fetch. */ case class RequestGuildMembersData( guildId: Either[Seq[GuildId], GuildId], query: String = "", limit: Int = 0, presences: Boolean = false, userIds: Option[Seq[UserId]] ) /** * Sent by the shard to receive all the members of a guild, even logged out ones. */ case class RequestGuildMembers(nowD: RequestGuildMembersData) extends EagerGatewayMessage[RequestGuildMembersData] { override def op: GatewayOpCode = GatewayOpCode.RequestGuildMembers override def dataEncoder: Encoder[RequestGuildMembersData] = Encoder[RequestGuildMembersData] } /** * Sent by the gateway if the session is invalid when resuming a connection. * @param resumable If the connection is resumable. */ case class InvalidSession(resumable: Boolean) extends EagerGatewayMessage[Boolean] { override def op: GatewayOpCode = GatewayOpCode.InvalidSession override def nowD: Boolean = resumable override def dataEncoder: Encoder[Boolean] = Encoder[Boolean] } /** * @param heartbeatInterval The amount of milliseconds inbetween the time * to send a heartbeat. */ case class HelloData(heartbeatInterval: Int) /** * Sent by the gateway as a response to [[Identify]] */ case class Hello(nowD: HelloData) extends EagerGatewayMessage[HelloData] { override def op: GatewayOpCode = GatewayOpCode.Hello override def dataEncoder: Encoder[HelloData] = Encoder[HelloData] } /** * Sent by the gateway as a response to [[Heartbeat]]. */ case object HeartbeatACK extends EagerGatewayMessage[NotUsed] { override def op: GatewayOpCode = GatewayOpCode.HeartbeatACK override def nowD: NotUsed = NotUsed override def dataEncoder: Encoder[NotUsed] = (_: NotUsed) => Json.obj() } /** * All the different opcodes used by the gateway. * @param value The number of the opcode. */ sealed abstract class GatewayOpCode(val value: Int) extends IntEnumEntry object GatewayOpCode extends IntEnum[GatewayOpCode] with IntCirceEnum[GatewayOpCode] { object Dispatch extends GatewayOpCode(0) object Heartbeat extends GatewayOpCode(1) object Identify extends GatewayOpCode(2) object StatusUpdate extends GatewayOpCode(3) object VoiceStateUpdate extends GatewayOpCode(4) object VoiceServerPing extends GatewayOpCode(5) object Resume extends GatewayOpCode(6) object Reconnect extends GatewayOpCode(7) object RequestGuildMembers extends GatewayOpCode(8) object InvalidSession extends GatewayOpCode(9) object Hello extends GatewayOpCode(10) object HeartbeatACK extends GatewayOpCode(11) override def values: immutable.IndexedSeq[GatewayOpCode] = findValues } /** * Base trait for all gateway events. * @tparam D The data this event carries. * @tparam HandlerType The type the cache handler takes. */ sealed trait ComplexGatewayEvent[D, HandlerType] { /** * The name of this event. */ def name: String /** * The raw data this event was created from. Used for debugging and error reporting. */ def rawData: Json /** * The data carried by this event. */ def data: Later[Decoder.Result[D]] /** * Maps the data in this event without evaluating it. */ def mapData[A](f: D => A): Eval[Decoder.Result[A]] = data.map(_.map(f)) } /** * A simpler gateway event where the data type and the handler type are the same. */ sealed trait SimpleGatewayEvent[D] extends ComplexGatewayEvent[D, D] object GatewayEvent { /** * @param v The API version used. * @param user The client user. * @param guilds The guilds for this shard. Not available at first. * @param sessionId The session id. * @param shard The shard info, the first index is the shard id, while the * second is the total amount of shards. */ case class ReadyData( v: Int, user: User, guilds: Seq[UnavailableGuild], sessionId: String, shard: Seq[Int] ) /** * Sent to the shard when Discord is ready to serve requests. No requests * should be sent before this has been received. */ case class Ready(rawData: Json, data: Later[Decoder.Result[ReadyData]]) extends SimpleGatewayEvent[ReadyData] { override def name: String = "READY" } /** * Sent to the shard when a previously interrupted connection is resumed. */ case class Resumed(rawData: Json) extends SimpleGatewayEvent[NotUsed] { override def name: String = "RESUMED" override def data: Later[Result[NotUsed]] = Later(Right(NotUsed)) } /** * Base trait for all events that include an optional guild. */ sealed trait OptGuildEvent[D] extends SimpleGatewayEvent[D] { /** * The guild id for this event. */ def guildId: Eval[Decoder.Result[Option[GuildId]]] } /** * Sent to the shard when a new channel is created. * @param data The channel that was created. */ case class ChannelCreate(rawData: Json, data: Later[Decoder.Result[RawChannel]]) extends OptGuildEvent[RawChannel] with ChannelEvent[RawChannel] { override def name: String = "CHANNEL_CREATE" override def guildId: Eval[Decoder.Result[Option[GuildId]]] = mapData(_.guildId) override def channelId: Eval[Decoder.Result[ChannelId]] = mapData(_.id) } /** * Sent to the shard when a channel is edited or updated. * @param data The channel that was edited. This will always be a guild channel. */ case class ChannelUpdate(rawData: Json, data: Later[Decoder.Result[RawChannel]]) extends OptGuildEvent[RawChannel] with ChannelEvent[RawChannel] { override def name: String = "CHANNEL_UPDATE" override def guildId: Eval[Decoder.Result[Option[GuildId]]] = mapData(_.guildId) override def channelId: Eval[Decoder.Result[ChannelId]] = mapData(_.id) } /** * Sent to the shard when a channel is deleted. The current snapshot will * not contain the channel. * @param data The channel that was deleted. */ case class ChannelDelete(rawData: Json, data: Later[Decoder.Result[RawChannel]]) extends OptGuildEvent[RawChannel] with ChannelEvent[RawChannel] { override def name: String = "CHANNEL_DELETE" override def guildId: Eval[Decoder.Result[Option[GuildId]]] = mapData(_.guildId) override def channelId: Eval[Decoder.Result[ChannelId]] = mapData(_.id) } /** * Base trait for an event that includes a channel. */ sealed trait ChannelEvent[D] extends SimpleGatewayEvent[D] { /** * The channel associated with this event. */ def channelId: Eval[Decoder.Result[ChannelId]] } /** * @param guildId The id of the guild where this change happened. * @param channelId The channel where the change happened. * @param timestamp The time the most recent pinned message was pinned. */ case class ChannelPinsUpdateData( guildId: Option[GuildId], channelId: ChannelId, timestamp: JsonOption[OffsetDateTime] ) /** * Sent to the shard when a message is pinned or unpinned in a text * channel. This is not sent when a pinned message is deleted. */ case class ChannelPinsUpdate(rawData: Json, data: Later[Decoder.Result[ChannelPinsUpdateData]]) extends ChannelEvent[ChannelPinsUpdateData] { override def name: String = "CHANNEL_PINS_UPDATE" override def channelId: Eval[Decoder.Result[ChannelId]] = mapData(_.channelId) } /** * Base trait for all simple events that include an optional guild. */ sealed trait GuildEvent[D] extends SimpleGatewayEvent[D] { /** * The guild id for this event. */ def guildId: Eval[Decoder.Result[GuildId]] } /** * Sent to the shard after the shard connects to the gateway, when a * previously unavailable guild becomes available, and when the client * joins a new guild. * @param data The created guild object. */ case class GuildCreate(rawData: Json, data: Later[Decoder.Result[RawGuild]]) extends GuildEvent[RawGuild] { override def name: String = "GUILD_CREATE" override def guildId: Eval[Decoder.Result[GuildId]] = mapData(_.id) } /** * Sent to the shard when a guild object is updated. * @param data The updated guild. */ case class GuildUpdate(rawData: Json, data: Later[Decoder.Result[RawGuild]]) extends GuildEvent[RawGuild] { override def name: String = "GUILD_UPDATE" override def guildId: Eval[Decoder.Result[GuildId]] = mapData(_.id) } /** * Sent to the shard either if a guild becomes unavailable due to and * outage, or if the client leaves or is kicked from a guild. * @param data The deleted or unavailable guild. */ case class GuildDelete(rawData: Json, data: Later[Decoder.Result[UnavailableGuild]]) extends GuildEvent[UnavailableGuild] { override def name: String = "GUILD_DELETE" override def guildId: Eval[Decoder.Result[GuildId]] = mapData(_.id) } case class UserWithGuildId(guildId: GuildId, user: User) /** * Base trait for all complex events that include an optional guild. */ sealed trait ComplexGuildEvent[D, HandlerType] extends ComplexGatewayEvent[D, HandlerType] { def guildId: Eval[Decoder.Result[GuildId]] } /** * Sent to the shard when an user is banned from a guild. * @param data The banned user with a guildId of what guild the user was banned from. */ case class GuildBanAdd(rawData: Json, data: Later[Decoder.Result[UserWithGuildId]]) extends ComplexGuildEvent[UserWithGuildId, (GuildId, RawBan)] { override def name: String = "GUILD_BAN_ADD" override def guildId: Eval[Decoder.Result[GuildId]] = mapData(_.guildId) } /** * Sent to the shard when an user is unbanned from a guild. * @param data The unbanned user with a guildId of what guild the user was unbanned from. */ case class GuildBanRemove(rawData: Json, data: Later[Decoder.Result[UserWithGuildId]]) extends ComplexGuildEvent[UserWithGuildId, (GuildId, User)] { override def name: String = "GUILD_BAN_REMOVE" override def guildId: Eval[Decoder.Result[GuildId]] = mapData(_.guildId) } /** * @param guildId The guild where the update occoured. * @param emojis The new emojis. */ case class GuildEmojisUpdateData(guildId: GuildId, emojis: Seq[RawEmoji]) /** * Sent to the shard when the emojis of a guild have been updated. */ case class GuildEmojisUpdate(rawData: Json, data: Later[Decoder.Result[GuildEmojisUpdateData]]) extends GuildEvent[GuildEmojisUpdateData] { override def name: String = "GUILD_EMOJIS_UPDATE" override def guildId: Eval[Decoder.Result[GuildId]] = mapData(_.guildId) } /** * @param guildId The guild where the update occurred. */ case class GuildIntegrationsUpdateData(guildId: GuildId) /** * Sent to the shard when the integrations of a guild were updated. You * have to fetch the integrations yourself. */ case class GuildIntegrationsUpdate(rawData: Json, data: Later[Decoder.Result[GuildIntegrationsUpdateData]]) extends GuildEvent[GuildIntegrationsUpdateData] { override def name: String = "GUILD_INTEGRATIONS_UPDATE" override def guildId: Eval[Decoder.Result[GuildId]] = mapData(_.guildId) } //Remember to edit RawGuildMember when editing this case class RawGuildMemberWithGuild( guildId: GuildId, user: User, nick: Option[String], roles: Seq[RoleId], joinedAt: OffsetDateTime, premiumSince: Option[OffsetDateTime], deaf: Boolean, mute: Boolean ) { def toRawGuildMember: RawGuildMember = RawGuildMember(user, nick, roles, joinedAt, premiumSince, deaf, mute) } object RawGuildMemberWithGuild { def apply(guildId: GuildId, m: RawGuildMember): RawGuildMemberWithGuild = new RawGuildMemberWithGuild(guildId, m.user, m.nick, m.roles, m.joinedAt, m.premiumSince, m.deaf, m.mute) } /** * Sent to the shard when a user joins the guild. * @param data The new guild member, includes a guild id. */ case class GuildMemberAdd(rawData: Json, data: Later[Decoder.Result[RawGuildMemberWithGuild]]) extends GuildEvent[RawGuildMemberWithGuild] { override def name: String = "GUILD_MEMBER_ADD" override def guildId: Eval[Decoder.Result[GuildId]] = mapData(_.guildId) } /** * @param user The user that left. * @param guildId The guild the user left from. */ case class GuildMemberRemoveData(guildId: GuildId, user: User) /** * Sent to the shard when a user leaves the guild (or is kicked or banned). */ case class GuildMemberRemove(rawData: Json, data: Later[Decoder.Result[GuildMemberRemoveData]]) extends GuildEvent[GuildMemberRemoveData] { override def name: String = "GUILD_MEMBER_REMOVE" override def guildId: Eval[Decoder.Result[GuildId]] = mapData(_.guildId) } /** * The fields seen here are all the fields that can change. Looking at the * users [[ackcord.data.raw.RawGuildMember]] for changes is pointless. * @param guildId The guild of the guild member. * @param roles Thew new roles for the guild member. * @param user The user of the updated guild member. * @param nick Nick of the user if one was set. */ case class GuildMemberUpdateData(guildId: GuildId, roles: Seq[RoleId], user: User, nick: Option[String]) //TODO: Nick can probably be null here /** * Sent to the shard when a guild member is updated. */ case class GuildMemberUpdate(rawData: Json, data: Later[Decoder.Result[GuildMemberUpdateData]]) extends GuildEvent[GuildMemberUpdateData] { override def name: String = "GUILD_MEMBER_UPDATE" override def guildId: Eval[Decoder.Result[GuildId]] = mapData(_.guildId) } /** * @param guildId The guild requested for. * @param members The guild members in this chunk. * @param notFound If some users that was requested wasn't found, their ID is returned here. * @param presences The presences of the users if those were requested as well. */ case class GuildMemberChunkData( guildId: GuildId, members: Seq[RawGuildMember], notFound: Option[Seq[UserId]], presences: Option[Seq[RawPresence]] ) /** * Sent to the shard if the shard requests to get all members * (even offline ones) for large guilds using [[RequestGuildMembers]]. */ case class GuildMemberChunk(rawData: Json, data: Later[Decoder.Result[GuildMemberChunkData]]) extends GuildEvent[GuildMemberChunkData] { override def name: String = "GUILD_MEMBER_CHUNK" override def guildId: Eval[Decoder.Result[GuildId]] = mapData(_.guildId) } /** * @param guildId The guild of the modified role. * @param role The modified role. */ case class GuildRoleModifyData(guildId: GuildId, role: RawRole) /** * Sent to the shard when a new role is created. */ case class GuildRoleCreate(rawData: Json, data: Later[Decoder.Result[GuildRoleModifyData]]) extends GuildEvent[GuildRoleModifyData] { override def name: String = "GUILD_ROLE_CREATE" override def guildId: Eval[Decoder.Result[GuildId]] = mapData(_.guildId) } /** * Sent to the shard when a role is updated. */ case class GuildRoleUpdate(rawData: Json, data: Later[Decoder.Result[GuildRoleModifyData]]) extends GuildEvent[GuildRoleModifyData] { override def name: String = "GUILD_ROLE_UPDATE" override def guildId: Eval[Decoder.Result[GuildId]] = mapData(_.guildId) } /** * @param guildId The guild of the deleted role. * @param roleId The deleted role. */ case class GuildRoleDeleteData(guildId: GuildId, roleId: RoleId) /** * Sent to the shard when a role is deleted. */ case class GuildRoleDelete(rawData: Json, data: Later[Decoder.Result[GuildRoleDeleteData]]) extends GuildEvent[GuildRoleDeleteData] { override def name: String = "GUILD_ROLE_DELETE" override def guildId: Eval[Decoder.Result[GuildId]] = mapData(_.guildId) } /** * Sent to the shard when a message is created (posted). * @param data The sent message. */ case class MessageCreate(rawData: Json, data: Later[Decoder.Result[RawMessage]]) extends ChannelEvent[RawMessage] { override def name: String = "MESSAGE_CREATE" override def channelId: Eval[Decoder.Result[ChannelId]] = mapData(_.channelId) } //RawPartialMessage is defined explicitly because we need to handle the author case class RawPartialMessage( id: MessageId, channelId: ChannelId, author: JsonOption[Author[_]], content: JsonOption[String], timestamp: JsonOption[OffsetDateTime], editedTimestamp: JsonOption[OffsetDateTime], tts: JsonOption[Boolean], mentionEveryone: JsonOption[Boolean], mentions: JsonOption[Seq[User]], mentionRoles: JsonOption[Seq[RoleId]], attachment: JsonOption[Seq[Attachment]], embeds: JsonOption[Seq[ReceivedEmbed]], reactions: JsonOption[Seq[Reaction]], nonce: JsonOption[Either[Int, String]], pinned: JsonOption[Boolean], webhookId: JsonOption[String] ) /** * Sent to the shard when a message is updated. * @param data The new message. */ case class MessageUpdate(rawData: Json, data: Later[Decoder.Result[RawPartialMessage]]) extends ChannelEvent[RawPartialMessage] { override def name: String = "MESSAGE_UPDATE" override def channelId: Eval[Decoder.Result[ChannelId]] = mapData(_.channelId) } /** * @param id The deleted message. * @param channelId The channel of the message. * @param guildId The guild this was done in. Can be missing. */ case class MessageDeleteData(id: MessageId, channelId: ChannelId, guildId: Option[GuildId]) /** * Sent to the shard when a message is deleted. */ case class MessageDelete(rawData: Json, data: Later[Decoder.Result[MessageDeleteData]]) extends ChannelEvent[MessageDeleteData] with OptGuildEvent[MessageDeleteData] { override def name: String = "MESSAGE_DELETE" override def channelId: Eval[Decoder.Result[ChannelId]] = mapData(_.channelId) override def guildId: Eval[Decoder.Result[Option[GuildId]]] = mapData(_.guildId) } /** * @param ids The deleted messages. * @param channelId The channel of the deleted messages. * @param guildId The guild this was done in. Can be missing. */ case class MessageDeleteBulkData(ids: Seq[MessageId], channelId: ChannelId, guildId: Option[GuildId]) /** * Sent to the shard when multiple messages are deleted at the same time. * Often this is performed by a bot. */ case class MessageDeleteBulk(rawData: Json, data: Later[Decoder.Result[MessageDeleteBulkData]]) extends ChannelEvent[MessageDeleteBulkData] with OptGuildEvent[MessageDeleteBulkData] { override def name: String = "MESSAGE_DELETE_BULK" override def channelId: Eval[Decoder.Result[ChannelId]] = mapData(_.channelId) override def guildId: Eval[Decoder.Result[Option[GuildId]]] = mapData(_.guildId) } /** * @param userId The user that caused the reaction change. * @param channelId The channel of the message. * @param messageId The message the reaction belonged to. * @param guildId The guild this was done in. Can be missing. * @param emoji The emoji the user reacted with. */ case class MessageReactionData( userId: UserId, channelId: ChannelId, messageId: MessageId, guildId: Option[GuildId], emoji: PartialEmoji ) /** * Sent to the shard when a user adds a reaction to a message. */ case class MessageReactionAdd(rawData: Json, data: Later[Decoder.Result[MessageReactionData]]) extends ChannelEvent[MessageReactionData] with OptGuildEvent[MessageReactionData] { override def name: String = "MESSAGE_REACTION_ADD" override def channelId: Eval[Decoder.Result[ChannelId]] = mapData(_.channelId) override def guildId: Eval[Decoder.Result[Option[GuildId]]] = mapData(_.guildId) } /** * Sent to the shard when a user removes a reaction from a message. */ case class MessageReactionRemove(rawData: Json, data: Later[Decoder.Result[MessageReactionData]]) extends ChannelEvent[MessageReactionData] with OptGuildEvent[MessageReactionData] { override def name: String = "MESSAGE_REACTION_REMOVE" override def channelId: Eval[Decoder.Result[ChannelId]] = mapData(_.channelId) override def guildId: Eval[Decoder.Result[Option[GuildId]]] = mapData(_.guildId) } /** * @param channelId The channel of the message. * @param messageId The message the user removed the reactions from. * @param guildId The guild this was done in. Can be missing. */ case class MessageReactionRemoveAllData(channelId: ChannelId, messageId: MessageId, guildId: Option[GuildId]) /** * Sent to the shard when a user removes all reactions from a message. */ case class MessageReactionRemoveAll(rawData: Json, data: Later[Decoder.Result[MessageReactionRemoveAllData]]) extends ChannelEvent[MessageReactionRemoveAllData] with OptGuildEvent[MessageReactionRemoveAllData] { override def name: String = "MESSAGE_REACTION_REMOVE_ALL" override def channelId: Eval[Decoder.Result[ChannelId]] = mapData(_.channelId) override def guildId: Eval[Decoder.Result[Option[GuildId]]] = mapData(_.guildId) } /** * @param user The user of the presence. * @param roles The roles of the user. * @param game The new presence message. * @param guildId The guild where the update took place. * @param status The new status. * @param activities The current activites of the user. */ case class PresenceUpdateData( user: PartialUser, roles: Seq[RoleId], game: Option[RawActivity], guildId: GuildId, status: PresenceStatus, activities: Seq[RawActivity], clientStatus: ClientStatus ) /** * Sent to the shard when the presence of a user updates. */ case class PresenceUpdate(rawData: Json, data: Later[Decoder.Result[PresenceUpdateData]]) extends GuildEvent[PresenceUpdateData] { override def name: String = "PRESENCE_UPDATE" override def guildId: Eval[Decoder.Result[GuildId]] = mapData(_.guildId) } /** * @param channelId The channel where the typing happened. * @param guildId The guild id of where the typing happened. * @param userId The user that began typing. * @param timestamp When user started typing. */ case class TypingStartData(channelId: ChannelId, guildId: Option[GuildId], userId: UserId, timestamp: Instant) /** * Sent to the shard when a user starts typing in a channel. */ case class TypingStart(rawData: Json, data: Later[Decoder.Result[TypingStartData]]) extends ChannelEvent[TypingStartData] { override def name: String = "TYPING_START" override def channelId: Eval[Decoder.Result[ChannelId]] = mapData(_.channelId) } /** * Sent to the shard when a user object is updated. * @param data The new user. */ case class UserUpdate(rawData: Json, data: Later[Decoder.Result[User]]) extends SimpleGatewayEvent[User] { override def name: String = "USER_UPDATE" } /** * Sent to the shard when a user joins/leaves/moves voice channels. * @param data New voice states. */ case class VoiceStateUpdate(rawData: Json, data: Later[Decoder.Result[VoiceState]]) extends OptGuildEvent[VoiceState] { override def name: String = "VOICE_STATUS_UPDATE" override def guildId: Eval[Decoder.Result[Option[GuildId]]] = mapData(_.guildId) } /** * Sent a guilds voice server is updated. Also used when connecting to a voice channel. */ case class VoiceServerUpdate(rawData: Json, data: Later[Decoder.Result[VoiceServerUpdateData]]) extends GuildEvent[VoiceServerUpdateData] { override def name: String = "VOICE_SERVER_UPDATE" override def guildId: Eval[Decoder.Result[GuildId]] = mapData(_.guildId) } /** * @param guildId The guild of the updated webhook. * @param channelId The channel for the webhook. */ case class WebhookUpdateData(guildId: GuildId, channelId: ChannelId) /** * Sent to the shard when guilds webhooks are updated. */ case class WebhookUpdate(rawData: Json, data: Later[Decoder.Result[WebhookUpdateData]]) extends GuildEvent[WebhookUpdateData] { override def name: String = "WEBHOOK_UPDATE" override def guildId: Eval[Decoder.Result[GuildId]] = mapData(_.guildId) } case class IgnoredEvent(name: String, rawData: Json, data: Later[Decoder.Result[Unit]]) extends SimpleGatewayEvent[Unit] }
Katrix-/AckCord
gateway/src/main/scala/ackcord/gateway/gatewayData.scala
Scala
mit
32,889
package adm class LwConfig { var serverName: String = _ var port: Int = _ } class LwCoapConfig extends LwConfig
MrLynx93/adm
src/main/scala/adm/LwConfig.scala
Scala
gpl-3.0
117
package io.gatling.keycloak import akka.actor.ActorRef import akka.actor.ActorDSL._ import io.gatling.core.action.builder.ActionBuilder import io.gatling.core.action.Interruptable import io.gatling.core.config.Protocols import io.gatling.core.result.writer.DataWriterClient import io.gatling.core.session.{Expression, Session} import io.gatling.core.validation.Validation import org.keycloak.admin.client.resource.RealmResource import org.keycloak.representations.idm.UserRepresentation import scala.collection.JavaConversions._ case class FindUserAttributes( requestName: Expression[String], realm: Expression[RealmResource], username: Option[Expression[String]] = None, firstName: Option[Expression[String]] = None, lastName: Option[Expression[String]] = None, email: Option[Expression[String]] = None, firstResult: Option[Expression[Integer]] = None, maxResults: Option[Expression[Integer]] = None, use: Option[(Session, List[UserRepresentation]) => Session] = None ) {} object FindUserBuilder { implicit def toActionBuilder(builder: FindUserBuilder) = new FindUserActionBuilder(builder.attributes) } class FindUserBuilder(private val attributes: FindUserAttributes) { def username(username: Expression[String]) = new FindUserBuilder(attributes.copy(username = Some(username))) def firstName(firstName: Expression[String]) = new FindUserBuilder(attributes.copy(firstName = Some(firstName))) def lastName(lastName: Expression[String]) = new FindUserBuilder(attributes.copy(lastName = Some(lastName))) def email(email: Expression[String]) = new FindUserBuilder(attributes.copy(email = Some(email))) def firstResult(firstResult: Expression[Integer]) = new FindUserBuilder(attributes.copy(firstResult = Some(firstResult))) def maxResults(maxResults: Expression[Integer]) = new FindUserBuilder(attributes.copy(maxResults = Some(maxResults))) def use(use: (Session, List[UserRepresentation]) => Session) = new FindUserActionBuilder(attributes.copy(use = Some(use))) } class FindUserActionBuilder(attributes: FindUserAttributes) extends ActionBuilder { override def build(next: ActorRef, protocols: Protocols): ActorRef = actor(actorName("find-user"))(new FindUserAction(attributes, next)) } class FindUserAction( attributes: FindUserAttributes, val next: ActorRef ) extends Interruptable with ExitOnFailure with DataWriterClient { override def executeOrFail(session: Session): Validation[_] = attributes.realm(session).flatMap(realm => Blocking(() => Stopwatch(() => { realm.users().search( attributes.username.map(a => a(session).get).orNull, attributes.firstName.map(a => a(session).get).orNull, attributes.lastName.map(a => a(session).get).orNull, attributes.email.map(a => a(session).get).orNull, attributes.firstResult.map(a => a(session).get).orNull, attributes.maxResults.map(a => a(session).get).orNull) }) .recordAndContinue(this, session, attributes.requestName(session).get, users => { attributes.use.map(use => use(session, users.toList)).getOrElse(session) }) ) ) }
rvansa/keycloak-benchmark
src/main/scala/io/gatling/keycloak/FindUser.scala
Scala
apache-2.0
3,239
package org.jetbrains.plugins.scala package lang package transformation package types /** * @author Pavel Fatin */ class ExpandFunctionTypeTest extends TransformerTest(new ExpandFunctionType()) { def testSingleArgument(): Unit = check( before = "val v: A => B", after = "val v: Function1[A, B]" )() def testParenthesis(): Unit = check( before = "val v: (A) => B", after = "val v: Function1[A, B]" )() def testMultipleArguments(): Unit = check( before = "val v: (A, B) => C", after = "val v: Function2[A, B, C]" )() def testExplicit(): Unit = check( before = "val v: Function1[A, B]", after = "val v: Function1[A, B]" )() }
ilinum/intellij-scala
test/org/jetbrains/plugins/scala/lang/transformation/types/ExpandFunctionTypeTest.scala
Scala
apache-2.0
680
// Copyright (C) 2011-2012 the original author or authors. // See the LICENCE.txt file distributed with this work for additional // information regarding copyright ownership. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package org.scalastyle.scalariform import org.scalastyle.PositionError import org.scalastyle.ScalariformChecker import org.scalastyle.ScalastyleError import scalariform.lexer.Tokens.VAR import scalariform.parser.AnonymousFunction import scalariform.parser.CompilationUnit import scalariform.parser.FunBody import scalariform.parser.PatDefOrDcl import scalariform.parser.TemplateBody import VisitorHelper.visit abstract class VarChecker extends ScalariformChecker { def verify(ast: CompilationUnit): List[ScalastyleError] = { val it = for { f <- localvisit(false)(ast.immediateChildren(0)) } yield { PositionError(f.firstToken.offset) } it.toList } protected def matches(enclosingFunction: Boolean): Boolean private def localvisit(enclosingFunction: Boolean)(ast: Any): List[PatDefOrDcl] = ast match { case t: PatDefOrDcl if t.valOrVarToken.tokenType == VAR && matches(enclosingFunction) => List(t) ::: visit(t, localvisit(enclosingFunction)) case t: TemplateBody => visit(t, localvisit(false)) case t: FunBody => visit(t, localvisit(true)) case t: AnonymousFunction => visit(t, localvisit(true)) case t: Any => visit(t, localvisit(enclosingFunction)) } } class VarLocalChecker extends VarChecker { val errorKey = "var.local" override protected def matches(enclosingFunction: Boolean): Boolean = enclosingFunction } class VarFieldChecker extends VarChecker { val errorKey = "var.field" override protected def matches(enclosingFunction: Boolean): Boolean = !enclosingFunction }
firebase/scalastyle
src/main/scala/org/scalastyle/scalariform/VarChecker.scala
Scala
apache-2.0
2,280
package justin.db import scala.language.implicitConversions package object vectorclocks { /** * Create Vector Clock from plain string eg. "A:1, B:1, C:1" */ implicit class VectorClockOps(plain: String) { def toVectorClock[Id](implicit string2Id: String => Id): VectorClock[Id] = VectorClock.apply { plain.split(",").map { s => val Array(key, value) = s.trim.split(":") (string2Id(key), Counter(value.toInt)) }.toMap } } object VectorClockOps { implicit def stringAsId(s: String): VectorClock[String] = s.toVectorClock[String] implicit def intAsId(s: String): VectorClock[Int] = s.toVectorClock[Int](_.toInt) } }
speedcom/JustinDB
justin-vector-clocks/src/main/scala/justin/db/vectorclocks/package.scala
Scala
apache-2.0
684
/** * Copyright 2015 Yahoo Inc. Licensed under the Apache License, Version 2.0 * See accompanying LICENSE file. */ package kafka.manager.utils import kafka.manager.model.{ClusterTuning, Kafka_0_8_1_1, ClusterConfig} import org.scalatest.{Matchers, FunSuite} /** * @author hiral */ class TestClusterConfig extends FunSuite with Matchers { test("invalid name") { intercept[IllegalArgumentException] { ClusterConfig("qa!","0.8.1.1","localhost",jmxEnabled = false, pollConsumers = true, filterConsumers = true, jmxUser = None, jmxPass = None, jmxSsl = false, tuning = None) } } test("invalid kafka version") { intercept[IllegalArgumentException] { ClusterConfig("qa","0.8.1","localhost:2181",jmxEnabled = false, pollConsumers = true, filterConsumers = true, jmxUser = None, jmxPass = None, jmxSsl = false, tuning = None) } } test("serialize and deserialize 0.8.1.1") { val cc = ClusterConfig("qa","0.8.2.0","localhost:2181", jmxEnabled = true, pollConsumers = true, filterConsumers = true, jmxUser = None, jmxPass = None, jmxSsl = false, tuning = None) val serialize: String = ClusterConfig.serialize(cc) val deserialize = ClusterConfig.deserialize(serialize) assert(deserialize.isSuccess === true) assert(cc == deserialize.get) } test("serialize and deserialize 0.8.2.0 +jmx credentials") { val cc = ClusterConfig("qa","0.8.2.0","localhost:2181", jmxEnabled = true, jmxUser = Some("mario"), jmxPass = Some("rossi"), jmxSsl = false, pollConsumers = true, filterConsumers = true, tuning = None) val serialize: String = ClusterConfig.serialize(cc) val deserialize = ClusterConfig.deserialize(serialize) assert(deserialize.isSuccess === true) assert(cc == deserialize.get) } test("serialize and deserialize 0.8.2.0") { val cc = ClusterConfig("qa","0.8.2.0","localhost:2181", jmxEnabled = true, pollConsumers = true, filterConsumers = true, jmxUser = None, jmxPass = None, jmxSsl = false, tuning = None) val serialize: String = ClusterConfig.serialize(cc) val deserialize = ClusterConfig.deserialize(serialize) assert(deserialize.isSuccess === true) assert(cc == deserialize.get) } test("serialize and deserialize 0.8.2.1") { val cc = ClusterConfig("qa","0.8.2.1","localhost:2181", jmxEnabled = true, pollConsumers = true, filterConsumers = true, jmxUser = None, jmxPass = None, jmxSsl = false, tuning = None) val serialize: String = ClusterConfig.serialize(cc) val deserialize = ClusterConfig.deserialize(serialize) assert(deserialize.isSuccess === true) assert(cc == deserialize.get) } test("serialize and deserialize 0.8.2.2") { val cc = ClusterConfig("qa","0.8.2.2","localhost:2181", jmxEnabled = true, pollConsumers = true, filterConsumers = true, jmxUser = None, jmxPass = None, jmxSsl = false, tuning = None) val serialize: String = ClusterConfig.serialize(cc) val deserialize = ClusterConfig.deserialize(serialize) assert(deserialize.isSuccess === true) assert(cc == deserialize.get) } test("deserialize without version and jmxEnabled") { val cc = ClusterConfig("qa","0.8.2.0","localhost:2181", jmxEnabled = false, pollConsumers = true, filterConsumers = true, jmxUser = None, jmxPass = None, jmxSsl = false, tuning = None) val serialize: String = ClusterConfig.serialize(cc) val noverison = serialize.replace(""","kafkaVersion":"0.8.2.0"""","").replace(""","jmxEnabled":false""","").replace(""","jmxSsl":false""","") assert(!noverison.contains("kafkaVersion")) assert(!noverison.contains("jmxEnabled")) assert(!noverison.contains("jmxSsl")) val deserialize = ClusterConfig.deserialize(noverison) assert(deserialize.isSuccess === true) assert(cc.copy(version = Kafka_0_8_1_1) == deserialize.get) } test("deserialize from 0.8.2-beta as 0.8.2.0") { val cc = ClusterConfig("qa","0.8.2-beta","localhost:2181", jmxEnabled = false, pollConsumers = true, filterConsumers = true, activeOffsetCacheEnabled = true, jmxUser = None, jmxPass = None, jmxSsl = false, tuning = None) val serialize: String = ClusterConfig.serialize(cc) val noverison = serialize.replace(""","kafkaVersion":"0.8.2.0"""",""","kafkaVersion":"0.8.2-beta"""") val deserialize = ClusterConfig.deserialize(noverison) assert(deserialize.isSuccess === true) assert(cc == deserialize.get) } test("deserialize from 0.9.0.1") { val cc = ClusterConfig("qa","0.9.0.1","localhost:2181", jmxEnabled = false, pollConsumers = true, filterConsumers = true, activeOffsetCacheEnabled = true, jmxUser = None, jmxPass = None, jmxSsl = false, tuning = Option(ClusterTuning(Option(1),Option(2),Option(3), Option(4), Option(5), Option(6), Option(7), Option(8), Option(9), Option(10), Option(11), Option(12), Option(13), Option(14), Option(15))) ) val serialize: String = ClusterConfig.serialize(cc) val deserialize = ClusterConfig.deserialize(serialize) assert(deserialize.isSuccess === true) assert(cc == deserialize.get) } }
herokumx/heroku-kafka-manager
test/kafka/manager/utils/TestClusterConfig.scala
Scala
apache-2.0
5,043
package net.tomasherman.specus.server.net import org.jboss.netty.channel.{Channel, ChannelHandlerContext} import org.jboss.netty.buffer.ChannelBuffer import org.jboss.netty.handler.codec.replay.{VoidEnum, ReplayingDecoder} import net.tomasherman.specus.server.api.net.CodecRepository import net.tomasherman.specus.common.api.logging.Logging /** * This file is part of Specus. * * Specus is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * Specus is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * * You should have received a copy of the GNU General Public License * along with Specus. If not, see <http://www.gnu.org/licenses/>. * */ /**Implementation of ReplayingDecoder using CodecBased ProtocolDEcoder to * actually decode stuff */ class SpecusDecoder(val env: {val codecRepository: CodecRepository}) extends ReplayingDecoder[VoidEnum] with CodecBasedProtocolDecoder with Logging{ def decode(ctx: ChannelHandlerContext, channel: Channel, buffer: ChannelBuffer, state: VoidEnum) = { decode(buffer) } }
tomasherman/specus
server/src/main/scala/net/SpecusDecoder.scala
Scala
gpl-3.0
1,408
package firecrest.actors import java.util.concurrent.atomic.AtomicLong import javax.inject.Inject import akka.actor._ import akka.stream.scaladsl._ import akka.stream.{ActorMaterializer, ClosedShape} import akkaguiceutils.GuiceUtils import com.softwaremill.react.kafka.ReactiveKafka import firecrest.config.{ElasticSearchConfig, KafkaConfiguration} import scala.concurrent.duration._ class IndexerActor @Inject() (kafkaConfig: KafkaConfiguration, elasticSearchConfig: ElasticSearchConfig) extends Actor with ActorLogging with GuiceUtils { implicit val materializer = ActorMaterializer() import context._ val kafka = new ReactiveKafka() private val consumerProperties = props(classOf[KafkaActorPublisher]) log.info(s"Consumer properties: $consumerProperties") private val consumerActorProps = props(classOf[KafkaActorPublisher]) private val kafkaSource = Source.actorPublisher[String](consumerActorProps) val batchId = new AtomicLong() private val graph = RunnableGraph.fromGraph(GraphDSL.create(kafkaSource) { implicit builder => implicit source => import GraphDSL.Implicits._ val group = Flow[String].groupedWithin(10000, 5000.millis) val batchWrap = Flow[Seq[String]].map(lines => EsActorSubscriber.Batch(batchId.incrementAndGet(), lines) ) val esSink = Sink.actorSubscriber(props(classOf[EsActorSubscriber])) // @formatter:off source.out ~> group ~> batchWrap ~> esSink.async // @formatter:on ClosedShape }) val sourceActor: ActorRef = graph.run() watch(sourceActor) log.info(s"Started. Watching $sourceActor") override def receive: Receive = { case "die" => log.info("Witness me") throw new RuntimeException("Kafka failure") case terminated: Terminated => log.info(s"Received terminated: $terminated") log.info("Asking to crash") system.scheduler.scheduleOnce(3000.millis, self, "die") } }
redvasily/firecrest
firecrest/src/main/java/firecrest/actors/IndexerActor.scala
Scala
apache-2.0
2,006
/* * Copyright 2014 IBM Corp. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.ibm.spark.kernel.protocol.v5.kernel.socket import org.scalatest.{FunSpec, Matchers} class SocketConnectionSpec extends FunSpec with Matchers { describe("SocketConnection"){ describe("#toString"){ it("should properly format connection string"){ val connection: SocketConnection = SocketConnection("tcp", "127.0.0.1", 1234) connection.toString should be ("tcp://127.0.0.1:1234") } } } }
yeghishe/spark-kernel
kernel/src/test/scala/com/ibm/spark/kernel/protocol/v5/kernel/socket/SocketConnectionSpec.scala
Scala
apache-2.0
1,024
package me.gregd.cineworld.util import scala.concurrent.Future import scala.concurrent.duration.Duration import scalacache.ScalaCache import scalacache.serialization.Codec object NoOpCache extends scalacache.Cache[Array[Byte]] { val cache = ScalaCache(this) def get[V](key: String)(implicit codec: Codec[V, Array[Byte]]): Future[Option[V]] = Future.successful(None) def put[V](key: String, value: V, ttl: Option[Duration])(implicit codec: Codec[V, Array[Byte]]): Future[Unit] = Future.successful(()) def remove(key: String): Future[Unit] = Future.successful(()) def removeAll(): Future[Unit] = Future.successful(()) def close(): Unit = () }
Grogs/cinema-service
domain/src/main/scala/me/gregd/cineworld/util/NoOpCache.scala
Scala
gpl-3.0
680
package endpoints import scalaj.http.HttpRequest object SearchEndpoint extends SpotifyEndpoint { private val searchEndpoint = baseAPIUrl + "/v1/search/" def search(query: String, queryType: Seq[String]): HttpRequest = { val params = Seq( ("q", query), ("type", queryType.mkString(",")) ) createRequest(endpoint = searchEndpoint, params = params) } }
Jakeway/spotify-web-api-scala
src/main/scala/endpoints/SearchEndpoint.scala
Scala
mit
385
package com.github.mijicd.waes.api import akka.pattern.ask import akka.routing.{Broadcast, RoundRobinPool} import akka.util.Timeout import com.github.mijicd.waes.domain.DataKeeper.{Compare, StoreLeft, StoreRight} import com.github.mijicd.waes.domain.{DataKeeper, Decoder, DiffResult} import com.github.mijicd.waes.representations.{Data, JsonFormats} import spray.http.StatusCodes import spray.routing.HttpService import scala.concurrent.ExecutionContext.Implicits.global import scala.concurrent.duration.SECONDS import scala.util.{Failure, Success} trait Diffs extends HttpService with Decoder { import JsonFormats._ implicit val timeout = Timeout(5, SECONDS) val workerPool = RoundRobinPool(5).props(DataKeeper.props()) val workers = actorRefFactory.actorOf(workerPool) val comparatorRoutes = storeLeft ~ storeRight ~ compare private lazy val storeLeft = put { path("v1" / "diff" / Segment / "left") { id => entity(as[Data]) { data => val message = StoreLeft(id, decode(data.content)) workers ! Broadcast(message) complete(data) } } } private lazy val storeRight = put { path("v1" / "diff" / Segment / "right") { id => entity(as[Data]) { data => val message = StoreRight(id, decode(data.content)) workers ! Broadcast(message) complete(data) } } } private lazy val compare = get { path("v1" / "diff" / Segment) { id => val retrieval = (workers ? Compare(id)).mapTo[DiffResult] onComplete(retrieval) { case Success(status) => complete(status) case Failure(ex) => complete(StatusCodes.InternalServerError, ex) } } } }
mijicd/spray-json-diff
src/main/scala/com/github/mijicd/waes/api/Diffs.scala
Scala
mit
1,738
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License */ package org.apache.toree.utils.json import org.apache.spark.rdd.RDD import org.apache.spark.sql.DataFrame import org.apache.spark.sql.types.StructType import org.scalatest.mock.MockitoSugar import org.scalatest.{Matchers, FunSpec} import org.mockito.Mockito._ import org.mockito.Matchers._ import play.api.libs.json.{JsArray, JsString, Json} class RddToJsonSpec extends FunSpec with MockitoSugar with Matchers { val mockDataFrame = mock[DataFrame] val mockRdd = mock[RDD[Any]] val mockStruct = mock[StructType] val columns = Seq("foo", "bar").toArray val rows = Array( Array("a", "b"), Array("c", "d") ) doReturn(mockStruct).when(mockDataFrame).schema doReturn(columns).when(mockStruct).fieldNames doReturn(mockRdd).when(mockDataFrame).map(any())(any()) doReturn(rows).when(mockRdd).take(anyInt()) describe("RddToJson") { describe("#convert(SchemaRDD)") { it("should convert to valid JSON object") { val json = RddToJson.convert(mockDataFrame) val jsValue = Json.parse(json) jsValue \\ "columns" should be (JsArray(Seq(JsString("foo"), JsString("bar")))) jsValue \\ "rows" should be (JsArray(Seq( JsArray(Seq(JsString("a"), JsString("b"))), JsArray(Seq(JsString("c"), JsString("d")))))) } } } }
asorianostratio/incubator-toree
kernel/src/test/scala/org/apache/toree/utils/json/RddToJsonSpec.scala
Scala
apache-2.0
2,108
// Copyright 2014 Commonwealth Bank of Australia // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.cba.omnia.edge package source.memory import cascading.flow.FlowProcess import cascading.tap.Tap import cascading.tuple.Tuple import cascading.tuple.Fields import com.twitter.maple.tap.MemorySourceTap import com.twitter.maple.tap.TupleMemoryInputFormat, TupleMemoryInputFormat.TUPLES_PROPERTY import com.twitter.maple.tap.TupleWrapper import org.apache.hadoop.io.NullWritable import org.apache.hadoop.mapred._ import java.util.{List => JList} import java.util.UUID import scala.collection.JavaConverters._ import scala.collection.mutable.Buffer class DistributableMemorySourceScheme(tuples: JList[Tuple], fields: Fields, id: String, mappers: Int) extends MemorySourceTap.MemorySourceScheme(tuples, fields, id) { override def sourceConfInit( flowProcess: FlowProcess[JobConf], tap: Tap[JobConf, RecordReader[TupleWrapper, NullWritable], Void], conf: JobConf ) { FileInputFormat.setInputPaths(conf, id) conf.setInt(DistributableMemoryInputFormat.Mappers, mappers) conf.setInputFormat(classOf[DistributableMemoryInputFormat]) TupleMemoryInputFormat.storeTuples(conf, TUPLES_PROPERTY, tuples) } } object DistributableMemorySourceScheme { def apply(tuples: Buffer[Tuple], fields: Fields, mappers: Int) = new DistributableMemorySourceScheme(tuples.asJava, fields, "/" + UUID.randomUUID.toString, mappers) }
CommBank/edge
src/main/scala/com/cba/omnia/edge/source/memory/DistributableMemorySourceScheme.scala
Scala
apache-2.0
1,992
package com.ibm.spark.kernel.api import java.io.{InputStream, OutputStream} import com.ibm.spark.kernel.protocol.v5 import com.ibm.spark.kernel.protocol.v5.{KMBuilder, KernelMessage} import com.ibm.spark.kernel.protocol.v5.kernel.ActorLoader import com.ibm.spark.kernel.protocol.v5.stream.{KernelOutputStream, KernelInputStream} import com.typesafe.config.Config /** * Represents the methods available to stream data from the kernel to the * client. * * @param config The kernel configuration to use during object creation * @param actorLoader The actor loader to use when retrieve actors needed for * object creation * @param parentMessage The parent message to use when needed for object * creation * @param kernelMessageBuilder The builder to use when constructing kernel * messages inside objects created */ class FactoryMethods( private val config: Config, private val actorLoader: ActorLoader, private val parentMessage: KernelMessage, private val kernelMessageBuilder: KMBuilder ) extends FactoryMethodsLike { require(parentMessage != null, "Parent message cannot be null!") private[api] val kmBuilder = kernelMessageBuilder.withParent(parentMessage) /** * Creates a new kernel input stream. * * @param prompt The text to use as a prompt * @param password If true, should treat input as a password field * * @return The new KernelInputStream instance */ override def newKernelInputStream( prompt: String = KernelInputStream.DefaultPrompt, password: Boolean = KernelInputStream.DefaultPassword ): InputStream = { new KernelInputStream( actorLoader, kmBuilder.withIds(parentMessage.ids), prompt = prompt, password = password ) } /** * Creates a new kernel output stream. * * @param streamType The type of output stream (stdout/stderr) * @param sendEmptyOutput If true, will send message even if output is empty * * @return The new KernelOutputStream instance */ override def newKernelOutputStream( streamType: String = KernelOutputStream.DefaultStreamType, sendEmptyOutput: Boolean = config.getBoolean("send_empty_output") ): OutputStream = { new v5.stream.KernelOutputStream( actorLoader, kmBuilder, com.ibm.spark.global.ScheduledTaskManager.instance, streamType = streamType, sendEmptyOutput = sendEmptyOutput ) } }
yeghishe/spark-kernel
kernel/src/main/scala/com/ibm/spark/kernel/api/FactoryMethods.scala
Scala
apache-2.0
2,464
package uk.co.seansaville.ninetyninescalaprobs.lists import scala.annotation.tailrec /** * Problem 4: Find the number of elements of a list. */ object Problem4 { def length[T](list: List[T]): Int = { @tailrec def lengthAcc[U](list: List[U], acc: Int): Int = list match { case Nil => acc case _ :: t => lengthAcc(t, acc + 1) } lengthAcc(list, 0) } }
seansaville/99scalaprobs
src/main/scala/uk/co/seansaville/ninetyninescalaprobs/lists/Problem4.scala
Scala
mit
392
/** * Copyright 2011-2012 eBusiness Information, Groupe Excilys (www.excilys.com) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.excilys.ebi.gatling.http.response import java.security.MessageDigest import com.excilys.ebi.gatling.core.util.StringHelper.{ END_OF_LINE, bytes2Hex } import com.ning.http.client.{ Request, Response } class ExtendedResponse( val request: Request, response: Option[Response], checksums: Map[String, MessageDigest], val executionStartDate: Long, val requestSendingEndDate: Long, val responseReceivingStartDate: Long, val executionEndDate: Long) extends Response { def isBuilt = response.isDefined def checksum(algorithm: String): Option[String] = checksums.get(algorithm).map(md => bytes2Hex(md.digest)) def reponseTimeInMillis: Long = executionEndDate - executionStartDate def latencyInMillis: Long = responseReceivingStartDate - requestSendingEndDate def dumpTo(buff: StringBuilder) { response.map { response => if (response.hasResponseStatus) buff.append("status=").append(END_OF_LINE).append(response.getStatusCode).append(" ").append(response.getStatusText).append(END_OF_LINE) if (response.hasResponseHeaders) buff.append("headers= ").append(END_OF_LINE).append(response.getHeaders).append(END_OF_LINE) if (response.hasResponseBody) buff.append("body=").append(END_OF_LINE).append(response.getResponseBody) } } def dump: StringBuilder = { val buff = new StringBuilder dumpTo(buff) buff } override def toString = response.toString def getStatusCode = response.getOrElse(throw new IllegalStateException("Response was not built")).getStatusCode def getStatusText = response.getOrElse(throw new IllegalStateException("Response was not built")).getStatusText def getResponseBodyAsBytes = response.getOrElse(throw new IllegalStateException("Response was not built")).getResponseBodyAsBytes def getResponseBodyAsStream = response.getOrElse(throw new IllegalStateException("Response was not built")).getResponseBodyAsStream def getResponseBodyExcerpt(maxLength: Int, charset: String) = response.getOrElse(throw new IllegalStateException("Response was not built")).getResponseBodyExcerpt(maxLength, charset) def getResponseBody(charset: String) = response.getOrElse(throw new IllegalStateException("Response was not built")).getResponseBody(charset) def getResponseBodyExcerpt(maxLength: Int) = response.getOrElse(throw new IllegalStateException("Response was not built")).getResponseBodyExcerpt(maxLength) def getResponseBody = response.getOrElse(throw new IllegalStateException("Response was not built")).getResponseBody def getUri = response.getOrElse(throw new IllegalStateException("Response was not built")).getUri def getContentType = response.getOrElse(throw new IllegalStateException("Response was not built")).getContentType def getHeader(name: String) = response.getOrElse(throw new IllegalStateException("Response was not built")).getHeader(name) def getHeaders(name: String) = response.getOrElse(throw new IllegalStateException("Response was not built")).getHeaders(name) def getHeaders = response.getOrElse(throw new IllegalStateException("Response was not built")).getHeaders def isRedirected = response.getOrElse(throw new IllegalStateException("Response was not built")).isRedirected def getCookies = response.getOrElse(throw new IllegalStateException("Response was not built")).getCookies def hasResponseStatus = response.getOrElse(throw new IllegalStateException("Response was not built")).hasResponseStatus def hasResponseHeaders = response.getOrElse(throw new IllegalStateException("Response was not built")).hasResponseHeaders def hasResponseBody = response.getOrElse(throw new IllegalStateException("Response was not built")).hasResponseBody }
Tjoene/thesis
Case_Programs/gatling-1.4.0/gatling-http/src/main/scala/com/excilys/ebi/gatling/http/response/ExtendedResponse.scala
Scala
gpl-2.0
4,317
/* __ *\\ ** ________ ___ / / ___ Scala API ** ** / __/ __// _ | / / / _ | (c) 2007-2014, LAMP/EPFL ** ** __\\ \\/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** ** /____/\\___/_/ |_/____/_/ | | ** ** |/ ** \\* */ package scala.swing.examples import scala.swing._ object ListViewDemo extends SimpleSwingApplication { def top = new MainFrame { case class City(name: String, country: String, population: Int, capital: Boolean) val items = List( City("Lausanne", "Switzerland", 129273, false), City("Paris", "France", 2203817, true), City("New York", "USA", 8363710, false), City("Berlin", "Germany", 3416300, true), City("Tokio", "Japan", 12787981, true) ) import ListView._ contents = new FlowPanel(new ScrollPane(new ListView(items) { renderer = Renderer(_.name) })) //new ScrollPane(new Table(items))) } }
SethTisue/scala-swing
examples/src/main/scala/scala/swing/examples/ListViewDemo.scala
Scala
bsd-3-clause
1,193
/* sbt -- Simple Build Tool * Copyright 2010 Mark Harrah */ package sbt package inc import java.io.File import Relations.Source import Relations.SourceDependencies import xsbti.api.{ Source => APISource } import xsbti.DependencyContext import xsbti.DependencyContext._ /** * Provides mappings between source files, generated classes (products), and binaries. * Dependencies that are tracked include internal: a dependency on a source in the same compilation group (project), * external: a dependency on a source in another compilation group (tracked as the name of the class), * binary: a dependency on a class or jar file not generated by a source file in any tracked compilation group, * inherited: a dependency that resulted from a public template inheriting, * direct: any type of dependency, including inheritance. */ trait Relations { /** All sources _with at least one product_ . */ def allSources: collection.Set[File] /** All products associated with sources. */ def allProducts: collection.Set[File] /** All files that are recorded as a binary dependency of a source file.*/ def allBinaryDeps: collection.Set[File] /** All files in this compilation group (project) that are recorded as a source dependency of a source file in this group.*/ def allInternalSrcDeps: collection.Set[File] /** All files in another compilation group (project) that are recorded as a source dependency of a source file in this group.*/ def allExternalDeps: collection.Set[String] /** Fully qualified names of classes generated from source file `src`. */ def classNames(src: File): Set[String] /** Source files that generated a class with the given fully qualified `name`. This is typically a set containing a single file. */ def definesClass(name: String): Set[File] /** The classes that were generated for source file `src`. */ def products(src: File): Set[File] /** The source files that generated class file `prod`. This is typically a set containing a single file. */ def produced(prod: File): Set[File] /** The binary dependencies for the source file `src`. */ def binaryDeps(src: File): Set[File] /** The source files that depend on binary file `dep`. */ def usesBinary(dep: File): Set[File] /** Internal source dependencies for `src`. This includes both direct and inherited dependencies. */ def internalSrcDeps(src: File): Set[File] /** Internal source files that depend on internal source `dep`. This includes both direct and inherited dependencies. */ def usesInternalSrc(dep: File): Set[File] /** External source dependencies that internal source file `src` depends on. This includes both direct and inherited dependencies. */ def externalDeps(src: File): Set[String] /** Internal source dependencies that depend on external source file `dep`. This includes both direct and inherited dependencies. */ def usesExternal(dep: String): Set[File] private[inc] def usedNames(src: File): Set[String] /** Records internal source file `src` as generating class file `prod` with top-level class `name`. */ @deprecated("Record all products using `addProducts`.", "0.13.8") def addProduct(src: File, prod: File, name: String): Relations /** * Records internal source file `src` as dependending on `dependsOn`. If this dependency is introduced * by an inheritance relation, `inherited` is set to true. Note that in this case, the dependency is * also registered as a direct dependency. */ @deprecated("Record all external dependencies using `addExternalDeps`.", "0.13.8") def addExternalDep(src: File, dependsOn: String, inherited: Boolean): Relations /** Records internal source file `src` depending on a dependency binary dependency `dependsOn`.*/ @deprecated("Record all binary dependencies using `addBinaryDeps`.", "0.13.8") def addBinaryDep(src: File, dependsOn: File): Relations /** * Records internal source file `src` as having direct dependencies on internal source files `directDependsOn` * and inheritance dependencies on `inheritedDependsOn`. Everything in `inheritedDependsOn` must be included in `directDependsOn`; * this method does not automatically record direct dependencies like `addExternalDep` does. */ @deprecated("Record all internal dependencies using `addInternalSrcDeps(File, Iterable[InternalDependencies])`.", "0.13.8") def addInternalSrcDeps(src: File, directDependsOn: Iterable[File], inheritedDependsOn: Iterable[File]): Relations /** * Records that the file `src` generates products `products`, has internal dependencies `internalDeps`, * has external dependencies `externalDeps` and binary dependencies `binaryDeps`. */ def addSource(src: File, products: Iterable[(File, String)], internalDeps: Iterable[InternalDependency], externalDeps: Iterable[ExternalDependency], binaryDeps: Iterable[(File, String, Stamp)]): Relations = addProducts(src, products).addInternalSrcDeps(src, internalDeps).addExternalDeps(src, externalDeps).addBinaryDeps(src, binaryDeps) /** * Records all the products `prods` generated by `src` */ private[inc] def addProducts(src: File, prods: Iterable[(File, String)]): Relations /** * Records all the internal source dependencies `deps` of `src` */ private[inc] def addInternalSrcDeps(src: File, deps: Iterable[InternalDependency]): Relations /** * Records all the external dependencies `deps` of `src` */ private[inc] def addExternalDeps(src: File, deps: Iterable[ExternalDependency]): Relations /** * Records all the binary dependencies `deps` of `src` */ private[inc] def addBinaryDeps(src: File, deps: Iterable[(File, String, Stamp)]): Relations private[inc] def addUsedName(src: File, name: String): Relations /** Concatenates the two relations. Acts naively, i.e., doesn't internalize external deps on added files. */ def ++(o: Relations): Relations /** Drops all dependency mappings a->b where a is in `sources`. Acts naively, i.e., doesn't externalize internal deps on removed files. */ def --(sources: Iterable[File]): Relations @deprecated("OK to remove in 0.14", "0.13.1") def groupBy[K](f: (File => K)): Map[K, Relations] /** The relation between internal sources and generated class files. */ def srcProd: Relation[File, File] /** The dependency relation between internal sources and binaries. */ def binaryDep: Relation[File, File] /** The dependency relation between internal sources. This includes both direct and inherited dependencies.*/ def internalSrcDep: Relation[File, File] /** The dependency relation between internal and external sources. This includes both direct and inherited dependencies.*/ def externalDep: Relation[File, String] /** All the internal dependencies */ private[inc] def internalDependencies: InternalDependencies /** All the external dependencies */ private[inc] def externalDependencies: ExternalDependencies /** * The source dependency relation between source files introduced by member reference. * * NOTE: All inheritance dependencies are included in this relation because in order to * inherit from a member you have to refer to it. If you check documentation of `inheritance` * you'll see that there's small oddity related to traits being the first parent of a * class/trait that results in additional parents being introduced due to normalization. * This relation properly accounts for that so the invariant that `memberRef` is a superset * of `inheritance` is preserved. */ private[inc] def memberRef: SourceDependencies /** * The source dependency relation between source files introduced by inheritance. * The dependency by inheritance is introduced when a template (class or trait) mentions * a given type in a parent position. * * NOTE: Due to an oddity in how Scala's type checker works there's one unexpected dependency * on a class being introduced. An example illustrates the best the problem. Let's consider * the following structure: * * trait A extends B * trait B extends C * trait C extends D * class D * * We are interested in dependencies by inheritance of `A`. One would expect it to be just `B` * but the answer is `B` and `D`. The reason is because Scala's type checker performs a certain * normalization so the first parent of a type is a class. Therefore the example above is normalized * to the following form: * * trait A extends D with B * trait B extends D with C * trait C extends D * class D * * Therefore if you inherit from a trait you'll get an additional dependency on a class that is * resolved transitively. You should not rely on this behavior, though. * */ private[inc] def inheritance: SourceDependencies /** The dependency relations between sources. These include both direct and inherited dependencies.*/ def direct: Source /** The inheritance dependency relations between sources.*/ def publicInherited: Source /** The relation between a source file and the fully qualified names of classes generated from it.*/ def classes: Relation[File, String] /** * Flag which indicates whether given Relations object supports operations needed by name hashing algorithm. * * At the moment the list includes the following operations: * * - memberRef: SourceDependencies * - inheritance: SourceDependencies * * The `memberRef` and `inheritance` implement a new style source dependency tracking. When this flag is * enabled access to `direct` and `publicInherited` relations is illegal and will cause runtime exception * being thrown. That is done as an optimization that prevents from storing two overlapping sets of * dependencies. * * Conversely, when `nameHashing` flag is disabled access to `memberRef` and `inheritance` * relations is illegal and will cause runtime exception being thrown. */ private[inc] def nameHashing: Boolean /** * Relation between source files and _unqualified_ term and type names used in given source file. */ private[inc] def names: Relation[File, String] /** * Lists of all the pairs (header, relation) that sbt knows of. * Used by TextAnalysisFormat to persist relations. * This cannot be stored as a Map because the order is important. */ private[inc] def allRelations: List[(String, Relation[File, _])] } object Relations { /** * Represents all the relations that sbt knows of along with a way to recreate each * of their elements from their string representation. */ private[inc] val existingRelations = { val string2File: String => File = new File(_) List( ("products", string2File), ("binary dependencies", string2File), ("direct source dependencies", string2File), ("direct external dependencies", identity[String] _), ("public inherited source dependencies", string2File), ("public inherited external dependencies", identity[String] _), ("member reference internal dependencies", string2File), ("member reference external dependencies", identity[String] _), ("inheritance internal dependencies", string2File), ("inheritance external dependencies", identity[String] _), ("class names", identity[String] _), ("used names", identity[String] _)) } /** * Reconstructs a Relations from a list of Relation * The order in which the relations are read matters and is defined by `existingRelations`. */ def construct(nameHashing: Boolean, relations: List[Relation[_, _]]) = relations match { case p :: bin :: di :: de :: pii :: pie :: mri :: mre :: ii :: ie :: cn :: un :: Nil => val srcProd = p.asInstanceOf[Relation[File, File]] val binaryDep = bin.asInstanceOf[Relation[File, File]] val directSrcDeps = makeSource(di.asInstanceOf[Relation[File, File]], de.asInstanceOf[Relation[File, String]]) val publicInheritedSrcDeps = makeSource(pii.asInstanceOf[Relation[File, File]], pie.asInstanceOf[Relation[File, String]]) val memberRefSrcDeps = makeSourceDependencies(mri.asInstanceOf[Relation[File, File]], mre.asInstanceOf[Relation[File, String]]) val inheritanceSrcDeps = makeSourceDependencies(ii.asInstanceOf[Relation[File, File]], ie.asInstanceOf[Relation[File, String]]) val classes = cn.asInstanceOf[Relation[File, String]] val names = un.asInstanceOf[Relation[File, String]] // we don't check for emptiness of publicInherited/inheritance relations because // we assume that invariant that says they are subsets of direct/memberRef holds assert(nameHashing || (memberRefSrcDeps == emptySourceDependencies), "When name hashing is disabled the `memberRef` relation should be empty.") assert(!nameHashing || (directSrcDeps == emptySource), "When name hashing is enabled the `direct` relation should be empty.") if (nameHashing) { val internal = InternalDependencies(Map(DependencyByMemberRef -> mri.asInstanceOf[Relation[File, File]], DependencyByInheritance -> ii.asInstanceOf[Relation[File, File]])) val external = ExternalDependencies(Map(DependencyByMemberRef -> mre.asInstanceOf[Relation[File, String]], DependencyByInheritance -> ie.asInstanceOf[Relation[File, String]])) Relations.make(srcProd, binaryDep, internal, external, classes, names) } else { assert(names.all.isEmpty, s"When `nameHashing` is disabled `names` relation should be empty: $names") Relations.make(srcProd, binaryDep, directSrcDeps, publicInheritedSrcDeps, classes) } case _ => throw new java.io.IOException(s"Expected to read ${existingRelations.length} relations but read ${relations.length}.") } /** Tracks internal and external source dependencies for a specific dependency type, such as direct or inherited.*/ final class Source private[sbt] (val internal: Relation[File, File], val external: Relation[File, String]) { def addInternal(source: File, dependsOn: Iterable[File]): Source = new Source(internal + (source, dependsOn), external) @deprecated("Use addExternal(File, Iterable[String])", "0.13.8") def addExternal(source: File, dependsOn: String): Source = new Source(internal, external + (source, dependsOn)) def addExternal(source: File, dependsOn: Iterable[String]): Source = new Source(internal, external + (source, dependsOn)) /** Drops all dependency mappings from `sources`. Acts naively, i.e., doesn't externalize internal deps on removed files.*/ def --(sources: Iterable[File]): Source = new Source(internal -- sources, external -- sources) def ++(o: Source): Source = new Source(internal ++ o.internal, external ++ o.external) @deprecated("Broken implementation. OK to remove in 0.14", "0.13.1") def groupBySource[K](f: File => K): Map[K, Source] = { val i = internal.groupBy { case (a, b) => f(a) } val e = external.groupBy { case (a, b) => f(a) } val pairs = for (k <- i.keySet ++ e.keySet) yield (k, new Source(getOrEmpty(i, k), getOrEmpty(e, k))) pairs.toMap } override def equals(other: Any) = other match { case o: Source => internal == o.internal && external == o.external case _ => false } override def hashCode = (internal, external).hashCode } /** Tracks internal and external source dependencies for a specific dependency type, such as direct or inherited.*/ private[inc] final class SourceDependencies(val internal: Relation[File, File], val external: Relation[File, String]) { def addInternal(source: File, dependsOn: Iterable[File]): SourceDependencies = new SourceDependencies(internal + (source, dependsOn), external) @deprecated("Use addExternal(File, Iterable[String])", "0.13.8") def addExternal(source: File, dependsOn: String): SourceDependencies = new SourceDependencies(internal, external + (source, dependsOn)) def addExternal(source: File, dependsOn: Iterable[String]): SourceDependencies = new SourceDependencies(internal, external + (source, dependsOn)) /** Drops all dependency mappings from `sources`. Acts naively, i.e., doesn't externalize internal deps on removed files.*/ def --(sources: Iterable[File]): SourceDependencies = new SourceDependencies(internal -- sources, external -- sources) def ++(o: SourceDependencies): SourceDependencies = new SourceDependencies(internal ++ o.internal, external ++ o.external) override def equals(other: Any) = other match { case o: SourceDependencies => internal == o.internal && external == o.external case _ => false } override def hashCode = (internal, external).hashCode } private[sbt] def getOrEmpty[A, B, K](m: Map[K, Relation[A, B]], k: K): Relation[A, B] = m.getOrElse(k, Relation.empty) private[this] lazy val e = Relation.empty[File, File] private[this] lazy val estr = Relation.empty[File, String] private[this] lazy val es = new Source(e, estr) def emptySource: Source = es private[inc] lazy val emptySourceDependencies: SourceDependencies = new SourceDependencies(e, estr) def empty: Relations = empty(nameHashing = IncOptions.nameHashingDefault) private[inc] def empty(nameHashing: Boolean): Relations = if (nameHashing) new MRelationsNameHashing(e, e, InternalDependencies.empty, ExternalDependencies.empty, estr, estr) else new MRelationsDefaultImpl(e, e, es, es, estr) def make(srcProd: Relation[File, File], binaryDep: Relation[File, File], direct: Source, publicInherited: Source, classes: Relation[File, String]): Relations = new MRelationsDefaultImpl(srcProd, binaryDep, direct = direct, publicInherited = publicInherited, classes) private[inc] def make(srcProd: Relation[File, File], binaryDep: Relation[File, File], internalDependencies: InternalDependencies, externalDependencies: ExternalDependencies, classes: Relation[File, String], names: Relation[File, String]): Relations = new MRelationsNameHashing(srcProd, binaryDep, internalDependencies = internalDependencies, externalDependencies = externalDependencies, classes, names) def makeSource(internal: Relation[File, File], external: Relation[File, String]): Source = new Source(internal, external) private[inc] def makeSourceDependencies(internal: Relation[File, File], external: Relation[File, String]): SourceDependencies = new SourceDependencies(internal, external) } private object DependencyCollection { /** * Combine `m1` and `m2` such that the result contains all the dependencies they represent. * `m1` is expected to be smaller than `m2`. */ def joinMaps[T](m1: Map[DependencyContext, Relation[File, T]], m2: Map[DependencyContext, Relation[File, T]]) = m1.foldLeft(m2) { case (tmp, (key, values)) => tmp.updated(key, tmp.getOrElse(key, Relation.empty) ++ values) } } private object InternalDependencies { /** * Constructs an empty `InteralDependencies` */ def empty = InternalDependencies(Map.empty) } private case class InternalDependencies(dependencies: Map[DependencyContext, Relation[File, File]]) { /** * Adds `dep` to the dependencies */ def +(dep: InternalDependency): InternalDependencies = InternalDependencies(dependencies.updated(dep.context, dependencies.getOrElse(dep.context, Relation.empty) + (dep.sourceFile, dep.targetFile))) /** * Adds all `deps` to the dependencies */ def ++(deps: Iterable[InternalDependency]): InternalDependencies = deps.foldLeft(this)(_ + _) def ++(deps: InternalDependencies): InternalDependencies = InternalDependencies(DependencyCollection.joinMaps(dependencies, deps.dependencies)) /** * Removes all dependencies from `sources` to another file from the dependencies */ def --(sources: Iterable[File]): InternalDependencies = InternalDependencies(dependencies.mapValues(_ -- sources).filter(_._2.size > 0)) } private object ExternalDependencies { /** * Constructs an empty `ExternalDependencies` */ def empty = ExternalDependencies(Map.empty) } private case class ExternalDependencies(dependencies: Map[DependencyContext, Relation[File, String]]) { /** * Adds `dep` to the dependencies */ def +(dep: ExternalDependency): ExternalDependencies = ExternalDependencies(dependencies.updated(dep.context, dependencies.getOrElse(dep.context, Relation.empty) + (dep.sourceFile, dep.targetClassName))) /** * Adds all `deps` to the dependencies */ def ++(deps: Iterable[ExternalDependency]): ExternalDependencies = deps.foldLeft(this)(_ + _) def ++(deps: ExternalDependencies): ExternalDependencies = ExternalDependencies(DependencyCollection.joinMaps(dependencies, deps.dependencies)) /** * Removes all dependencies from `sources` to another file from the dependencies */ def --(sources: Iterable[File]): ExternalDependencies = ExternalDependencies(dependencies.mapValues(_ -- sources).filter(_._2.size > 0)) } /** * An abstract class that contains common functionality inherited by two implementations of Relations trait. * * A little note why we have two different implementations of Relations trait. This is needed for the time * being when we are slowly migrating to the new invalidation algorithm called "name hashing" which requires * some subtle changes to dependency tracking. For some time we plan to keep both algorithms side-by-side * and have a runtime switch which allows to pick one. So we need logic for both old and new dependency * tracking to be available. That's exactly what two subclasses of MRelationsCommon implement. Once name * hashing is proven to be stable and reliable we'll phase out the old algorithm and the old dependency tracking * logic. * * `srcProd` is a relation between a source file and a product: (source, product). * Note that some source files may not have a product and will not be included in this relation. * * `binaryDeps` is a relation between a source file and a binary dependency: (source, binary dependency). * This only includes dependencies on classes and jars that do not have a corresponding source/API to track instead. * A class or jar with a corresponding source should only be tracked in one of the source dependency relations. * * `classes` is a relation between a source file and its generated fully-qualified class names. */ private abstract class MRelationsCommon(val srcProd: Relation[File, File], val binaryDep: Relation[File, File], val classes: Relation[File, String]) extends Relations { def allSources: collection.Set[File] = srcProd._1s def allProducts: collection.Set[File] = srcProd._2s def allBinaryDeps: collection.Set[File] = binaryDep._2s def allInternalSrcDeps: collection.Set[File] = internalSrcDep._2s def allExternalDeps: collection.Set[String] = externalDep._2s def classNames(src: File): Set[String] = classes.forward(src) def definesClass(name: String): Set[File] = classes.reverse(name) def products(src: File): Set[File] = srcProd.forward(src) def produced(prod: File): Set[File] = srcProd.reverse(prod) def binaryDeps(src: File): Set[File] = binaryDep.forward(src) def usesBinary(dep: File): Set[File] = binaryDep.reverse(dep) def internalSrcDeps(src: File): Set[File] = internalSrcDep.forward(src) def usesInternalSrc(dep: File): Set[File] = internalSrcDep.reverse(dep) def externalDeps(src: File): Set[String] = externalDep.forward(src) def usesExternal(dep: String): Set[File] = externalDep.reverse(dep) def usedNames(src: File): Set[String] = names.forward(src) /** Making large Relations a little readable. */ private val userDir = sys.props("user.dir").stripSuffix("/") + "/" private def nocwd(s: String) = s stripPrefix userDir private def line_s(kv: (Any, Any)) = " " + nocwd("" + kv._1) + " -> " + nocwd("" + kv._2) + "\n" protected def relation_s(r: Relation[_, _]) = ( if (r.forwardMap.isEmpty) "Relation [ ]" else (r.all.toSeq map line_s sorted) mkString ("Relation [\n", "", "]") ) } /** * This class implements Relations trait with support for tracking of `direct` and `publicInherited` source * dependencies. Therefore this class preserves the "old" (from sbt 0.13.0) dependency tracking logic and it's * a default implementation. * * `direct` defines relations for dependencies between internal and external source dependencies. It includes all types of * dependencies, including inheritance. * * `publicInherited` defines relations for internal and external source dependencies, only including dependencies * introduced by inheritance. * */ private class MRelationsDefaultImpl(srcProd: Relation[File, File], binaryDep: Relation[File, File], // direct should include everything in inherited val direct: Source, val publicInherited: Source, classes: Relation[File, String]) extends MRelationsCommon(srcProd, binaryDep, classes) { def internalSrcDep: Relation[File, File] = direct.internal def externalDep: Relation[File, String] = direct.external def nameHashing: Boolean = false def memberRef: SourceDependencies = throw new UnsupportedOperationException("The `memberRef` source dependencies relation is not supported " + "when `nameHashing` flag is disabled.") def inheritance: SourceDependencies = throw new UnsupportedOperationException("The `memberRef` source dependencies relation is not supported " + "when `nameHashing` flag is disabled.") def addProduct(src: File, prod: File, name: String): Relations = new MRelationsDefaultImpl(srcProd + (src, prod), binaryDep, direct = direct, publicInherited = publicInherited, classes + (src, name)) def addProducts(src: File, products: Iterable[(File, String)]): Relations = new MRelationsDefaultImpl(srcProd ++ products.map(p => (src, p._1)), binaryDep, direct = direct, publicInherited = publicInherited, classes ++ products.map(p => (src, p._2))) def addInternalSrcDeps(src: File, deps: Iterable[InternalDependency]) = { val depsByInheritance = deps.collect { case InternalDependency(_, targetFile, DependencyByInheritance) => targetFile } val newD = direct.addInternal(src, deps.map(_.targetFile)) val newI = publicInherited.addInternal(src, depsByInheritance) new MRelationsDefaultImpl(srcProd, binaryDep, direct = newD, publicInherited = newI, classes) } def addInternalSrcDeps(src: File, directDependsOn: Iterable[File], inheritedDependsOn: Iterable[File]): Relations = { val directDeps = directDependsOn.map(d => InternalDependency(src, d, DependencyByMemberRef)) val inheritedDeps = inheritedDependsOn.map(d => InternalDependency(src, d, DependencyByInheritance)) addInternalSrcDeps(src, directDeps ++ inheritedDeps) } def addExternalDeps(src: File, deps: Iterable[ExternalDependency]) = { val depsByInheritance = deps.collect { case ExternalDependency(_, targetClassName, _, DependencyByInheritance) => targetClassName } val newD = direct.addExternal(src, deps.map(_.targetClassName)) val newI = publicInherited.addExternal(src, depsByInheritance) new MRelationsDefaultImpl(srcProd, binaryDep, direct = newD, publicInherited = newI, classes) } def addExternalDep(src: File, dependsOn: String, inherited: Boolean): Relations = { val newI = if (inherited) publicInherited.addExternal(src, dependsOn :: Nil) else publicInherited val newD = direct.addExternal(src, dependsOn :: Nil) new MRelationsDefaultImpl(srcProd, binaryDep, direct = newD, publicInherited = newI, classes) } def addBinaryDeps(src: File, deps: Iterable[(File, String, Stamp)]) = new MRelationsDefaultImpl(srcProd, binaryDep + (src, deps.map(_._1)), direct, publicInherited, classes) def addBinaryDep(src: File, dependsOn: File): Relations = new MRelationsDefaultImpl(srcProd, binaryDep + (src, dependsOn), direct = direct, publicInherited = publicInherited, classes) def names: Relation[File, String] = throw new UnsupportedOperationException("Tracking of used names is not supported " + "when `nameHashing` is disabled.") def addUsedName(src: File, name: String): Relations = throw new UnsupportedOperationException("Tracking of used names is not supported " + "when `nameHashing` is disabled.") override def externalDependencies: ExternalDependencies = ExternalDependencies(Map(DependencyByMemberRef -> direct.external, DependencyByInheritance -> publicInherited.external)) override def internalDependencies: InternalDependencies = InternalDependencies(Map(DependencyByMemberRef -> direct.internal, DependencyByInheritance -> publicInherited.internal)) def ++(o: Relations): Relations = { if (nameHashing != o.nameHashing) throw new UnsupportedOperationException("The `++` operation is not supported for relations " + "with different values of `nameHashing` flag.") new MRelationsDefaultImpl(srcProd ++ o.srcProd, binaryDep ++ o.binaryDep, direct ++ o.direct, publicInherited ++ o.publicInherited, classes ++ o.classes) } def --(sources: Iterable[File]) = new MRelationsDefaultImpl(srcProd -- sources, binaryDep -- sources, direct = direct -- sources, publicInherited = publicInherited -- sources, classes -- sources) @deprecated("Broken implementation. OK to remove in 0.14", "0.13.1") def groupBy[K](f: File => K): Map[K, Relations] = { type MapRel[T] = Map[K, Relation[File, T]] def outerJoin(srcProdMap: MapRel[File], binaryDepMap: MapRel[File], direct: Map[K, Source], inherited: Map[K, Source], classesMap: MapRel[String], namesMap: MapRel[String]): Map[K, Relations] = { def kRelations(k: K): Relations = { def get[T](m: Map[K, Relation[File, T]]) = Relations.getOrEmpty(m, k) def getSrc(m: Map[K, Source]): Source = m.getOrElse(k, Relations.emptySource) def getSrcDeps(m: Map[K, SourceDependencies]): SourceDependencies = m.getOrElse(k, Relations.emptySourceDependencies) new MRelationsDefaultImpl(get(srcProdMap), get(binaryDepMap), getSrc(direct), getSrc(inherited), get(classesMap)) } val keys = (srcProdMap.keySet ++ binaryDepMap.keySet ++ direct.keySet ++ inherited.keySet ++ classesMap.keySet).toList Map(keys.map((k: K) => (k, kRelations(k))): _*) } def f1[B](item: (File, B)): K = f(item._1) outerJoin(srcProd.groupBy(f1), binaryDep.groupBy(f1), direct.groupBySource(f), publicInherited.groupBySource(f), classes.groupBy(f1), names.groupBy(f1)) } override def equals(other: Any) = other match { case o: MRelationsDefaultImpl => srcProd == o.srcProd && binaryDep == o.binaryDep && direct == o.direct && publicInherited == o.publicInherited && classes == o.classes case _ => false } def allRelations = { val rels = List( srcProd, binaryDep, direct.internal, direct.external, publicInherited.internal, publicInherited.external, Relations.emptySourceDependencies.internal, // Default implementation doesn't provide memberRef source deps Relations.emptySourceDependencies.external, // Default implementation doesn't provide memberRef source deps Relations.emptySourceDependencies.internal, // Default implementation doesn't provide inheritance source deps Relations.emptySourceDependencies.external, // Default implementation doesn't provide inheritance source deps classes, Relation.empty[File, String]) // Default implementation doesn't provide used names relation Relations.existingRelations map (_._1) zip rels } override def hashCode = (srcProd :: binaryDep :: direct :: publicInherited :: classes :: Nil).hashCode override def toString = ( """ |Relations: | products: %s | bin deps: %s | src deps: %s | ext deps: %s | class names: %s """.trim.stripMargin.format(List(srcProd, binaryDep, internalSrcDep, externalDep, classes) map relation_s: _*) ) } /** * This class implements Relations trait with support for tracking of `memberRef` and `inheritance` source * dependencies. Therefore this class implements the new (compared to sbt 0.13.0) dependency tracking logic * needed by the name hashing invalidation algorithm. */ private class MRelationsNameHashing(srcProd: Relation[File, File], binaryDep: Relation[File, File], val internalDependencies: InternalDependencies, val externalDependencies: ExternalDependencies, classes: Relation[File, String], val names: Relation[File, String]) extends MRelationsCommon(srcProd, binaryDep, classes) { def direct: Source = throw new UnsupportedOperationException("The `direct` source dependencies relation is not supported " + "when `nameHashing` flag is disabled.") def publicInherited: Source = throw new UnsupportedOperationException("The `publicInherited` source dependencies relation is not supported " + "when `nameHashing` flag is disabled.") val nameHashing: Boolean = true def internalSrcDep: Relation[File, File] = memberRef.internal def externalDep: Relation[File, String] = memberRef.external def addProduct(src: File, prod: File, name: String): Relations = new MRelationsNameHashing(srcProd + (src, prod), binaryDep, internalDependencies = internalDependencies, externalDependencies = externalDependencies, classes + (src, name), names = names) def addProducts(src: File, products: Iterable[(File, String)]): Relations = new MRelationsNameHashing(srcProd ++ products.map(p => (src, p._1)), binaryDep, internalDependencies = internalDependencies, externalDependencies = externalDependencies, classes ++ products.map(p => (src, p._2)), names = names) def addInternalSrcDeps(src: File, deps: Iterable[InternalDependency]) = new MRelationsNameHashing(srcProd, binaryDep, internalDependencies = internalDependencies ++ deps, externalDependencies = externalDependencies, classes, names) def addInternalSrcDeps(src: File, dependsOn: Iterable[File], inherited: Iterable[File]): Relations = { val memberRefDeps = dependsOn.map(InternalDependency(src, _, DependencyByMemberRef)) val inheritedDeps = inherited.map(InternalDependency(src, _, DependencyByInheritance)) addInternalSrcDeps(src, memberRefDeps ++ inheritedDeps) } def addExternalDeps(src: File, deps: Iterable[ExternalDependency]) = new MRelationsNameHashing(srcProd, binaryDep, internalDependencies = internalDependencies, externalDependencies = externalDependencies ++ deps, classes, names) def addExternalDep(src: File, dependsOn: String, inherited: Boolean): Relations = throw new UnsupportedOperationException("This method is not supported when `nameHashing` flag is enabled.") def addBinaryDeps(src: File, deps: Iterable[(File, String, Stamp)]) = new MRelationsNameHashing(srcProd, binaryDep + (src, deps.map(_._1)), internalDependencies = internalDependencies, externalDependencies = externalDependencies, classes, names) def addBinaryDep(src: File, dependsOn: File): Relations = new MRelationsNameHashing(srcProd, binaryDep + (src, dependsOn), internalDependencies = internalDependencies, externalDependencies = externalDependencies, classes, names = names) def addUsedName(src: File, name: String): Relations = new MRelationsNameHashing(srcProd, binaryDep, internalDependencies = internalDependencies, externalDependencies = externalDependencies, classes, names = names + (src, name)) override def inheritance: SourceDependencies = new SourceDependencies(internalDependencies.dependencies.getOrElse(DependencyByInheritance, Relation.empty), externalDependencies.dependencies.getOrElse(DependencyByInheritance, Relation.empty)) override def memberRef: SourceDependencies = new SourceDependencies(internalDependencies.dependencies.getOrElse(DependencyByMemberRef, Relation.empty), externalDependencies.dependencies.getOrElse(DependencyByMemberRef, Relation.empty)) def ++(o: Relations): Relations = { if (!o.nameHashing) throw new UnsupportedOperationException("The `++` operation is not supported for relations " + "with different values of `nameHashing` flag.") new MRelationsNameHashing(srcProd ++ o.srcProd, binaryDep ++ o.binaryDep, internalDependencies = internalDependencies ++ o.internalDependencies, externalDependencies = externalDependencies ++ o.externalDependencies, classes ++ o.classes, names = names ++ o.names) } def --(sources: Iterable[File]) = new MRelationsNameHashing(srcProd -- sources, binaryDep -- sources, internalDependencies = internalDependencies -- sources, externalDependencies = externalDependencies -- sources, classes -- sources, names = names -- sources) def groupBy[K](f: File => K): Map[K, Relations] = { throw new UnsupportedOperationException("Merging of Analyses that have" + "`relations.nameHashing` set to `true` is not supported.") } override def equals(other: Any) = other match { case o: MRelationsNameHashing => srcProd == o.srcProd && binaryDep == o.binaryDep && memberRef == o.memberRef && inheritance == o.inheritance && classes == o.classes case _ => false } def allRelations = { val rels = List( srcProd, binaryDep, Relations.emptySource.internal, // NameHashing doesn't provide direct dependencies Relations.emptySource.external, // NameHashing doesn't provide direct dependencies Relations.emptySource.internal, // NameHashing doesn't provide public inherited dependencies Relations.emptySource.external, // NameHashing doesn't provide public inherited dependencies memberRef.internal, memberRef.external, inheritance.internal, inheritance.external, classes, names) Relations.existingRelations map (_._1) zip rels } override def hashCode = (srcProd :: binaryDep :: memberRef :: inheritance :: classes :: Nil).hashCode override def toString = ( """ |Relations (with name hashing enabled): | products: %s | bin deps: %s | src deps: %s | ext deps: %s | class names: %s | used names: %s """.trim.stripMargin.format(List(srcProd, binaryDep, internalSrcDep, externalDep, classes, names) map relation_s: _*) ) }
pdalpra/sbt
compile/inc/src/main/scala/sbt/inc/Relations.scala
Scala
bsd-3-clause
38,431
package vultura.factor.inference.conditioned import vultura.factor.{Ring, Factor, LogD, Problem} import vultura.factor.inference.MargParI import vultura.factor.inference.calibration.{LBP, BPResult} /** Condition on all assignments to a given set of variables, and approximate the result by runnning BeliefPropagation * on each subproblem. * * @param problem * @param conditionVariables * @param maxIterations * @param tol */ case class FullyConditionedBP(problem: Problem, conditionVariables: Set[Int], maxIterations: Int = 100000, tol: Double = 1e-12) extends MargParI { override def ring: Ring[Double] = problem.ring val conditions: IndexedSeq[Map[Int, Int]] = conditionVariables.foldLeft(Seq(Map.empty[Int,Int])){ case (pcs,nv) => pcs.flatMap(pc => (0 until problem.domains(nv)).map(value => pc + (nv -> value))) }.toIndexedSeq val results: IndexedSeq[(BPResult, Boolean, Long)] = conditions.map(c => LBP.inferWithStats(problem.condition(c))) val weights: Array[Double] = LogD.decode(LogD.normalize(results.map(_._1.logZ)(collection.breakOut))) override def encodedVarBelief(vi: Int): Factor = Factor.linearCombination(weights, results.map(_._1.variableBelief(vi)), problem.ring) override def Z: Double = problem.ring.encode(Array(logZ)).head /** @return Natural logarithm of partition function. */ override def logZ: Double = LogD.sumA(results.map(_._1.logZ)(collection.breakOut)) val isConverged: Boolean = results.forall(_._2) val maxSteps: Long = results.map(_._3).max val totalSteps: Long = results.map(_._3).sum }
ziggystar/vultura-factor
src/main/scala/vultura/factor/inference/conditioned/FullyConditionedBP.scala
Scala
mit
1,662
package net.chwthewke.scala.protobuf import scala.reflect.ClassTag import org.scalacheck.Gen import org.scalatest.GivenWhenThen import org.scalatest.Matchers import org.scalatest.WordSpec import org.scalatest.prop.GeneratorDrivenPropertyChecks import com.google.{ protobuf => pb } import net.chwthewke.scala.protobuf.scalautils.Good import net.chwthewke.scala.protobuf.scalautils.Or import net.chwthewke.scala.protobuf.test.TestProtocol._ import net.chwthewke.scala.protobuf.test.ref class RefProtoSpec extends WordSpec with GeneratorDrivenPropertyChecks with Matchers with GivenWhenThen { "Primitives messages" should { behave like new RefProto[Primitives, ref.TestProtocol.Primitives](ref.TestProtocol.Primitives.PARSER).behaviour } "AnEnum messages" should { behave like new RefProto[AnEnum, ref.TestProtocol.AnEnum](ref.TestProtocol.AnEnum.PARSER).behaviour } "LargeFieldNumbers messages" should { behave like new RefProto[LargeFieldNumbers, ref.TestProtocol.LargeFieldNumbers](ref.TestProtocol.LargeFieldNumbers.PARSER).behaviour } "LengthDelimited messages" should { behave like new RefProto[LengthDelimited, ref.TestProtocol.LengthDelimited](ref.TestProtocol.LengthDelimited.PARSER).behaviour } "PackedFields messages" should { behave like new RefProto[PackedFields, ref.TestProtocol.PackedFields](ref.TestProtocol.PackedFields.PARSER).behaviour } "RepeatedFields messages" should { behave like new RefProto[RepeatedFields, ref.TestProtocol.RepeatedFields](ref.TestProtocol.RepeatedFields.PARSER).behaviour } class RefProto[M, R <: pb.Message](val parser: pb.Parser[R])(implicit ct: ClassTag[M], m: Message[M]) { private implicit def variant = MessageVariant.reflective(m) def pairGen: Gen[(M, R)] = MessageGenerator.messagePair[M, R] private def S2R(msg: M): R = parser.parseFrom(ProtobufEncoder[M].run(msg).toArray) private def R2S(refMsg: R): M Or Decoder.Error = Parser[M].parseFrom(refMsg.toByteArray) private def truncMsg(x: Any) = x.toString.splitAt(40) match { case (s, "") => s case (s, _) => s + "..." } def scalaToRef = { forAll(pairGen) { case ((msg, r)) => Given(s"a message ${truncMsg(msg)}") When("serialized and decoded with google protobuf") val conv = S2R(msg) Then(s"results in ${truncMsg(conv)}") conv should equal(r) } } def refToScala = { forAll(pairGen) { case ((msg, r)) => Given(s"a google protobuf message ${truncMsg(r)}") When("serialized and decoded with scala protobuf") val conv = R2S(r) Then(s"results in ${truncMsg(conv)}") conv should equal(Good(msg)) } } def scalaToScala = { forAll(pairGen) { case ((msg, r)) => Given(s"a message ${truncMsg(msg)}") When("converted to google protobuf and back") val conv = R2S(S2R(msg)) Then(s"results in ${truncMsg(conv)}") conv should equal(Good(msg)) } } def refToRef = { forAll(pairGen) { case ((msg, r)) => Given(s"a google protobuf message ${truncMsg(r)}") When("converted to scala protobuf and back") val conv = R2S(r) map S2R Then(s"results in ${truncMsg(conv)}") conv should equal(Good(r)) } } def behaviour = { "translate from pb to scala" in refToScala "translate from scala to pb" in scalaToRef "translate from scala to pb to scala" in scalaToScala "translate from pb to scala to pb" in refToRef } } }
chwthewke/scala-protobuf
scala-protobuf-test/src/test/scala/net/chwthewke/scala/protobuf/RefProtoSpec.scala
Scala
apache-2.0
3,660
package com.manning.mesosinaction import org.apache.spark.SparkConf import org.apache.spark.SparkContext object WarAndPeaceWordCount { def main(args: Array[String]) { val basepath = if (args.length > 0) args(0) else "/tmp/warandpeace" val conf = new SparkConf().setAppName("War and Peace Word Count") val sc = new SparkContext(conf) val textFile = sc.textFile(s"hdfs://localhost:8020/$basepath/warandpeace.txt") val words = textFile.flatMap(line => line.split(" ")) val counts = words.map(word => (word, 1)).reduceByKey(_ + _) counts.saveAsTextFile(s"hdfs://localhost:8020/$basepath/result") sc.stop() } }
rji/mesos-in-action-code-samples
wordcount-example/wordcount.scala
Scala
mit
648
package dwolla.cloudflare import cats.data._ import cats.effect._ import cats.effect.testing.specs2.CatsEffect import cats.implicits._ import com.dwolla.cloudflare.CloudflareSettingFunctions._ import com.dwolla.cloudflare._ import com.dwolla.cloudflare.domain.model.ZoneSettings.{CloudflareSecurityLevel, CloudflareTlsLevel, CloudflareWaf} import com.dwolla.cloudflare.domain.model._ import io.circe.literal._ import org.http4s._ import org.http4s.client._ import org.http4s.dsl.Http4sDsl import org.http4s.syntax.all._ import org.specs2.mutable.Specification import org.specs2.specification.Scope class ZoneSettingsClientSpec extends Specification with CatsEffect { private val authorization = CloudflareAuthorization("email", "key") private val fakeCloudflareService = new FakeCloudflareService(authorization) private val getZoneId = fakeCloudflareService.listZones("hydragents.xyz", SampleResponses.Successes.getZones) trait Setup extends Scope with Http4sDsl[IO] { def client(csfs: CloudflareSettingFunction*) = for { fakeExecutor <- Reader((fakeService: HttpRoutes[IO]) => new StreamingCloudflareApiExecutor[IO](Client.fromHttpApp(fakeService.orNotFound), authorization)) } yield new ZoneSettingsClientImpl(fakeExecutor, 1) { override val settings = csfs.toSet } } "Zone Settings client" should { "apply the TLS setting to the given domain" in new Setup { val zone = Zone("hydragents.xyz", CloudflareTlsLevel.FullTlsStrict, None, None) private val zoneSettingsClient = client(setTlsLevel)(getZoneId <+> fakeCloudflareService.setTlsLevelService("fake-zone-id", "strict")) private val output = zoneSettingsClient.updateSettings(zone) output.compile.last.map { _ must beSome[ValidatedNel[Throwable, Unit]].like { case Validated.Valid(u) => u must_==(()) case Validated.Invalid(e) => throw e.head }} } "apply the security level to the given domain" in new Setup { val zone = Zone("hydragents.xyz", CloudflareTlsLevel.FullTlsStrict, Option(CloudflareSecurityLevel.High), None) private val zoneSettingsClient = client(setSecurityLevel)(getZoneId <+> fakeCloudflareService.setSecurityLevelService("fake-zone-id", "high")) private val output = zoneSettingsClient.updateSettings(zone) output.compile.last.map { _ must beSome[ValidatedNel[Throwable, Unit]].like { case Validated.Valid(u) => u must_==(()) }} } "apply waf to the given domain" in new Setup { val zone = Zone("hydragents.xyz", CloudflareTlsLevel.FullTlsStrict, None, Option(CloudflareWaf.On)) private val zoneSettingsClient = client(setSecurityLevel)(getZoneId <+> fakeCloudflareService.setWafService("fake-zone-id", "on")) private val output = zoneSettingsClient.updateSettings(zone) output.compile.last.map { _ must beSome[ValidatedNel[Throwable, Unit]].like { case Validated.Valid(u) => u must_==(()) }} } "ignore optional settings if they're not set (indicating a custom setting)" in new Setup { val zone = Zone("hydragents.xyz", CloudflareTlsLevel.FullTlsStrict, None, None) def testOptionalSetting(cloudflareSettingFunction: CloudflareSettingFunction) = { val zoneSettingsClient = client(cloudflareSettingFunction)(getZoneId) val output = zoneSettingsClient.updateSettings(zone) output.compile.last.map { _ must beSome[ValidatedNel[Throwable, Unit]].like { case Validated.Valid(u) => u must_==(()) }} } private val settingsUnderTest = List(setSecurityLevel, setWaf) settingsUnderTest.foreach(testOptionalSetting) } "contain the default rules for all zone settings" in new Setup { private val zoneSettingsClient = new ZoneSettingsClientImpl(new StreamingCloudflareApiExecutor[IO](Client.fromHttpApp(HttpRoutes.empty[IO].orNotFound), authorization), 1) zoneSettingsClient.settings must_==(Set(setSecurityLevel, setTlsLevel, setWaf)) } "accumulate multiple errors, should they occur when updating settings" in new Setup { private val zoneSettingsClient = ZoneSettingsClient(new StreamingCloudflareApiExecutor[IO](Client.fromHttpApp(getZoneId.orNotFound), authorization)) private val zone = Zone("hydragents.xyz", CloudflareTlsLevel.FullTlsStrict, Option(CloudflareSecurityLevel.High), None) private val output = zoneSettingsClient.updateSettings(zone) output.compile.last.map { _ should beSome[ValidatedNel[Throwable, Unit]].like { case Validated.Invalid(nel) => nel.toList should have size greaterThanOrEqualTo(2) }} } } private object SampleResponses { object Successes { val getZones = json"""{ "result": [ { "id": "fake-zone-id", "name": "hydragents.xyz", "status": "active", "paused": false, "type": "full", "development_mode": 0, "name_servers": [ "eric.ns.cloudflare.com", "lucy.ns.cloudflare.com" ] } ], "result_info": { "page": 1, "per_page": 20, "total_pages": 1, "count": 1, "total_count": 1 }, "success": true, "errors": [], "messages": [] } """ } } }
Dwolla/scala-cloudflare
client/src/test/scala/dwolla/cloudflare/ZoneSettingsClientSpec.scala
Scala
mit
5,497
/* * Copyright 2021 The BigDL Authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intel.analytics.bigdl.ppml.base import com.intel.analytics.bigdl.dllib.feature.dataset.{LocalDataSet, MiniBatch} import com.intel.analytics.bigdl.dllib.nn.abstractnn.Activity import scala.collection.mutable import scala.collection.mutable.ArrayBuffer trait Estimator { protected val evaluateResults: mutable.Map[String, ArrayBuffer[Float]] def getEvaluateResults(): Map[String, Array[Float]] = { evaluateResults.map(v => (v._1, v._2.toArray)).toMap } def train(endEpoch: Int, trainDataSet: LocalDataSet[MiniBatch[Float]], valDataSet: LocalDataSet[MiniBatch[Float]]): Any def evaluate(dataSet: LocalDataSet[MiniBatch[Float]]) def predict(dataSet: LocalDataSet[MiniBatch[Float]]): Array[Activity] }
intel-analytics/BigDL
scala/ppml/src/main/scala/com/intel/analytics/bigdl/ppml/base/Estimator.scala
Scala
apache-2.0
1,357
/* * Copyright 2017 Exon IT * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package by.exonit.redmine.client.play26ws.managers import by.exonit.redmine.client.Project import by.exonit.redmine.client.managers.impl.RedmineManagerFactory import by.exonit.redmine.client.play26ws.BasicSpec import by.exonit.redmine.client.play26ws.fixtures.{ClientDriverFixture, WebClientFixture} import com.github.restdriver.clientdriver.RestClientDriver._ import org.scalatest.concurrent.PatienceConfiguration.Timeout import org.scalatest.time.{Seconds, Span} /** * Created by keritaf on 04.12.16. */ class ProjectManagerImplSpec extends BasicSpec with ClientDriverFixture with WebClientFixture { "Project manager for Play-WS implementation" when { "getting projects list" should { "issue correct requests for paged load" ignore { val issuesPage1 = this.getClass.getResourceAsStream("/responses/issues_paged_1.json") val issuesPage2 = this.getClass.getResourceAsStream("/responses/issues_paged_2.json") clientDriver.addExpectation( onRequestTo("/issues.json").withParam("offset", 0).withParam("limit", 25), giveResponseAsBytes(issuesPage1, jsonContentType)) clientDriver.addExpectation( onRequestTo("/issues.json").withParam("offset", 25).withParam("limit", 25), giveResponseAsBytes(issuesPage2, jsonContentType)) val manager = RedmineManagerFactory(webClient) .createUnauthenticated(clientDriver.getBaseUrl) val im = manager.issueManager val request = im.getIssues() whenReady(request.runAsync) {_ => clientDriver.verify() } } "return projects for default request" in { val projectsJson = this.getClass.getResourceAsStream("/project/redmine_projects_basic.json") clientDriver.addExpectation( onRequestTo("/projects.json").withParam("offset", 0).withParam("limit", 25), giveResponseAsBytes(projectsJson, jsonContentType)) val manager = RedmineManagerFactory(webClient).createUnauthenticated(clientDriver.getBaseUrl) val pm = manager.projectManager val request = pm.getProjects() whenReady(request.runAsync, Timeout(Span(4, Seconds))) {projects => clientDriver.verify() projects should not be null projects.total shouldBe 3 projects.items.size shouldBe 3 inside(projects.items.find(_.id == 1).value) {case p: Project => p.name shouldBe "Test1" } inside(projects.items.find(_.id == 2).value) {case p: Project => p.name shouldBe "test1 child1" } } } "return projects for multi-page list" ignore { val issuesPage1 = this.getClass.getResourceAsStream("/responses/projects_paged_1.json") val issuesPage2 = this.getClass.getResourceAsStream("/responses/projects_paged_2.json") clientDriver.addExpectation( onRequestTo("/projects.json").withParam("offset", 0).withParam("limit", 25), giveResponseAsBytes(issuesPage1, jsonContentType)) clientDriver.addExpectation( onRequestTo("/projects.json").withParam("offset", 25).withParam("limit", 25), giveResponseAsBytes(issuesPage2, jsonContentType)) val manager = RedmineManagerFactory(webClient) .createUnauthenticated(clientDriver.getBaseUrl) val im = manager.projectManager val request = im.getProjects() whenReady(request.runAsync) {projects1 => clientDriver.verify() projects1 should not be null projects1.total shouldBe 50 projects1.items.size shouldBe 25 inside(projects1.items.find(_.id == 20).value) {case p: Project => p.name shouldBe "1" } whenReady(projects1.next.value.runAsync) { projects2 => projects2.items.size shouldBe 6 inside(projects2.items.find(_.id == 49).value) {case p: Project => p.name shouldBe "30" } } } } } } }
exon-it/redmine-scala-client
client-play26-ws/src/test/scala/by/exonit/redmine/client/play26ws/managers/ProjectManagerImplSpec.scala
Scala
apache-2.0
4,606
package yang.iterator import akka.actor.ActorRef import com.nsn.oss.nbi.fm.operation.interfaces.NsnAlarm /** * Created by y28yang on 2/18/2016. */ object IteratorProtocol { case class RequestCreateIteratorIor(iteratorId: Int) case class polled_data_to_flexmapping(hasNext: Boolean, nsnAlarms: java.util.List[NsnAlarm], iteratorId: Int) case class appended_flexmapped_data[T](hasNext: Boolean, events: Iterable[T]) case class request_next_date(howMany: Short) case class RespondNextDate[T](hasNext: Boolean, structuredEvents: Iterable[T]) case class request_iterator_manager_to_destroy(iterator: ActorRef) case object RequestToPollDate case object RequestDestroyIterator case object schedule_to_check_unused_iterator }
wjingyao2008/firsttry
NextGenAct/src/main/scala/yang/iterator/IteratorProtocol.scala
Scala
apache-2.0
832
package nexus.typelevel import nexus._ import shapeless._ /** * @author Tongfei Chen */ trait UnzipSizedDims[A] { type Out def dims(a: A): Out def shape(a: A): List[Int] } object UnzipSizedDims { def apply[A, B](implicit s: UnzipSizedDims.Aux[A, B]) = s type Aux[A, B] = UnzipSizedDims[A] { type Out = B } implicit def case0: Aux[HNil, HNil] = new UnzipSizedDims[HNil] { type Out = HNil def dims(a: HNil) = HNil def shape(a: HNil) = Nil } implicit def caseN[Ah, At <: HList, Bt <: HList](implicit s: UnzipSizedDims.Aux[At, Bt]): Aux[(Ah, Int) :: At, Ah :: Bt] = new UnzipSizedDims[(Ah, Int) :: At] { type Out = Ah :: Bt def dims(a: (Ah, Int) :: At) = a.head._1 :: s.dims(a.tail) def shape(a: (Ah, Int) :: At) = a.head._2 :: s.shape(a.tail) } implicit def tuple[A, Al <: HList, Bl <: HList, B] (implicit al: ToHList.Aux[A, Al], s: UnzipSizedDims.Aux[Al, Bl], bl: FromHList.Aux[Bl, B]): Aux[A, B] = new UnzipSizedDims[A] { type Out = B def dims(a: A) = bl(s.dims(al(a))) def shape(a: A) = s.shape(al(a)) } }
ctongfei/nexus
tensor/src/main/scala/nexus/typelevel/UnzipSizedDims.scala
Scala
mit
1,099
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.sql.execution.joins import org.apache.spark.sql.catalyst.InternalRow import org.apache.spark.sql.catalyst.expressions._ import org.apache.spark.sql.execution.SparkPlan import org.apache.spark.sql.execution.metric.LongSQLMetric trait HashJoin { self: SparkPlan => val leftKeys: Seq[Expression] val rightKeys: Seq[Expression] val buildSide: BuildSide val left: SparkPlan val right: SparkPlan protected lazy val (buildPlan, streamedPlan) = buildSide match { case BuildLeft => (left, right) case BuildRight => (right, left) } protected lazy val (buildKeys, streamedKeys) = buildSide match { case BuildLeft => (leftKeys, rightKeys) case BuildRight => (rightKeys, leftKeys) } override def output: Seq[Attribute] = left.output ++ right.output override def outputsUnsafeRows: Boolean = true override def canProcessUnsafeRows: Boolean = true override def canProcessSafeRows: Boolean = false protected def buildSideKeyGenerator: Projection = UnsafeProjection.create(buildKeys, buildPlan.output) protected def streamSideKeyGenerator: Projection = UnsafeProjection.create(streamedKeys, streamedPlan.output) protected def hashJoin( streamIter: Iterator[InternalRow], numStreamRows: LongSQLMetric, hashedRelation: HashedRelation, numOutputRows: LongSQLMetric): Iterator[InternalRow] = { new Iterator[InternalRow] { private[this] var currentStreamedRow: InternalRow = _ private[this] var currentHashMatches: Seq[InternalRow] = _ private[this] var currentMatchPosition: Int = -1 // Mutable per row objects. private[this] val joinRow = new JoinedRow private[this] val resultProjection: (InternalRow) => InternalRow = UnsafeProjection.create(self.schema) private[this] val joinKeys = streamSideKeyGenerator override final def hasNext: Boolean = (currentMatchPosition != -1 && currentMatchPosition < currentHashMatches.size) || (streamIter.hasNext && fetchNext()) override final def next(): InternalRow = { val ret = buildSide match { case BuildRight => joinRow(currentStreamedRow, currentHashMatches(currentMatchPosition)) case BuildLeft => joinRow(currentHashMatches(currentMatchPosition), currentStreamedRow) } currentMatchPosition += 1 numOutputRows += 1 resultProjection(ret) } /** * Searches the streamed iterator for the next row that has at least one match in hashtable. * * @return true if the search is successful, and false if the streamed iterator runs out of * tuples. */ private final def fetchNext(): Boolean = { currentHashMatches = null currentMatchPosition = -1 while (currentHashMatches == null && streamIter.hasNext) { currentStreamedRow = streamIter.next() numStreamRows += 1 val key = joinKeys(currentStreamedRow) if (!key.anyNull) { currentHashMatches = hashedRelation.get(key) } } if (currentHashMatches == null) { false } else { currentMatchPosition = 0 true } } } } }
chenc10/Spark-PAF
sql/core/src/main/scala/org/apache/spark/sql/execution/joins/HashJoin.scala
Scala
apache-2.0
4,069
package com.eevolution.context.dictionary.infrastructure.service import java.util.UUID import akka.NotUsed import com.eevolution.context.dictionary.domain._ import com.eevolution.context.dictionary.domain.model.SysConfig import com.eevolution.utils.PaginatedSequence import com.lightbend.lagom.scaladsl.api.{Service, ServiceCall} /** * Copyright (C) 2003-2017, e-Evolution Consultants S.A. , http://www.e-evolution.com * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * You should have received a copy of the GNU General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. * Email: eduardo.moreno@e-evolution.com, http://www.e-evolution.com , http://github.com/e-Evolution * Created by eduardo.moreno@e-evolution.com , www.e-evolution.com */ /** * Sys Config Service */ trait SysConfigService extends Service with api.service.SysConfigService { override def getAll() : ServiceCall[NotUsed, List[SysConfig]] override def getById(id: Int): ServiceCall[NotUsed, SysConfig] override def getByUUID(uuid :UUID): ServiceCall[NotUsed, SysConfig] override def getAllByPage(pageNo: Option[Int], pageSize: Option[Int]): ServiceCall[NotUsed, PaginatedSequence[SysConfig]] def descriptor = { import Service._ named("sysConfig").withCalls( pathCall("/api/v1_0_0/sysConfig/all", getAll _) , pathCall("/api/v1_0_0/sysConfig/:id", getById _), pathCall("/api/v1_0_0/sysConfig/:uuid", getByUUID _) , pathCall("/api/v1_0_0/sysConfig?pageNo&pageSize", getAllByPage _) ) } }
adempiere/ADReactiveSystem
dictionary-impl/src/main/scala/com/eevolution/context/dictionary/infrastructure/service/SysConfigService.scala
Scala
gpl-3.0
2,017
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.h2o import java.net.{URLClassLoader, URL} import org.apache.spark._ import org.apache.spark.api.java.{JavaRDD, JavaSparkContext} import org.apache.spark.h2o.H2OContextUtils._ import org.apache.spark.rdd.{H2ORDD, H2OSchemaRDD} import org.apache.spark.scheduler.{SparkListenerExecutorAdded, SparkListener} import org.apache.spark.sql.types._ import org.apache.spark.sql.{DataFrame, Row, SQLContext} import water._ import water.api.DataFrames.DataFramesHandler import water.api.H2OFrames.H2OFramesHandler import water.api.RDDs.RDDsHandler import water.api._ import water.api.scalaInt.ScalaCodeHandler import water.parser.BufferedString import scala.annotation.tailrec import scala.collection.mutable import scala.language.implicitConversions import scala.reflect.ClassTag import scala.reflect.runtime.universe._ import scala.util.Random /** * Simple H2O context motivated by SQLContext. * * It provides implicit conversion from RDD -> H2OLikeRDD and back. */ class H2OContext private[this](@transient val sparkContext: SparkContext) extends { val sparkConf = sparkContext.getConf } with org.apache.spark.Logging with H2OConf with Serializable { /** Runtime list of active H2O nodes */ private val h2oNodes = mutable.ArrayBuffer.empty[NodeDesc] /** IP of H2O client */ private var localClientIp: String = _ /** REST port of H2O client */ private var localClientPort: Int = _ /** Implicit conversion from Spark DataFrame to H2O's DataFrame */ implicit def asH2OFrame(df : DataFrame) : H2OFrame = asH2OFrame(df, None) def asH2OFrame(df : DataFrame, frameName: Option[String]) : H2OFrame = H2OContext.toH2OFrame(sparkContext, df, if (frameName != null) frameName else None) def asH2OFrame(df : DataFrame, frameName: String) : H2OFrame = asH2OFrame(df, Option(frameName)) /** Implicit conversion from typed RDD to H2O's DataFrame */ implicit def asH2OFrame[A <: Product : TypeTag](rdd : RDD[A]) : H2OFrame = asH2OFrame(rdd, None) def asH2OFrame[A <: Product : TypeTag](rdd : RDD[A], frameName: Option[String]) : H2OFrame = H2OContext.toH2OFrame(sparkContext, rdd, frameName) def asH2OFrame[A <: Product : TypeTag](rdd : RDD[A], frameName: String) : H2OFrame = asH2OFrame(rdd, Option(frameName)) /** Implicit conversion from RDD[Primitive type] ( where primitive type can be String, Double, Float or Int) to appropriate H2OFrame */ implicit def asH2OFrame(primitiveType: PrimitiveType): H2OFrame = asH2OFrame(primitiveType, None) def asH2OFrame(primitiveType: PrimitiveType, frameName: Option[String]): H2OFrame = H2OContext.toH2OFrame(sparkContext, primitiveType, frameName) def asH2OFrame(primitiveType: PrimitiveType, frameName: String): H2OFrame = asH2OFrame(primitiveType, Option(frameName)) /** Implicit conversion from Spark DataFrame to H2O's DataFrame */ implicit def toH2OFrameKey(rdd : DataFrame) : Key[Frame] = toH2OFrameKey(rdd, None) def toH2OFrameKey(rdd : DataFrame, frameName: Option[String]) : Key[Frame] = asH2OFrame(rdd, frameName)._key def toH2OFrameKey(rdd : DataFrame, frameName: String) : Key[Frame] = toH2OFrameKey(rdd, Option(frameName)) /** Implicit conversion from typed RDD to H2O's DataFrame */ implicit def toH2OFrameKey[A <: Product : TypeTag](rdd : RDD[A]) : Key[_] = toH2OFrameKey(rdd, None) def toH2OFrameKey[A <: Product : TypeTag](rdd : RDD[A], frameName: Option[String]) : Key[_] = asH2OFrame(rdd, frameName)._key def toH2OFrameKey[A <: Product : TypeTag](rdd : RDD[A], frameName: String) : Key[_] = toH2OFrameKey(rdd, Option(frameName)) /** Implicit conversion from RDD[Primitive type] ( where primitive type can be String, Boolean, Double, Float, Int, * Long, Short or Byte ) to appropriate H2O's DataFrame */ implicit def toH2OFrameKey(primitiveType: PrimitiveType): Key[_] = toH2OFrameKey(primitiveType, None) def toH2OFrameKey(primitiveType: PrimitiveType, frameName: Option[String]): Key[_] = asH2OFrame(primitiveType, frameName)._key def toH2OFrameKey(primitiveType: PrimitiveType, frameName: String): Key[_] = toH2OFrameKey(primitiveType, Option(frameName)) /** Implicit conversion from Frame to DataFrame */ implicit def asH2OFrame(fr: Frame) : H2OFrame = new H2OFrame(fr) def asH2OFrame(s: String): H2OFrame = new H2OFrame(s) /** Returns a key of given frame */ implicit def toH2OFrameKey(fr: Frame): Key[Frame] = fr._key /** * Support for calls from Py4J */ /** Conversion from RDD[String] to H2O's DataFrame */ def asH2OFrameFromRDDString(rdd: JavaRDD[String], frameName: String): H2OFrame = H2OContext.toH2OFrameFromRDDString(sparkContext,rdd.rdd, Option(frameName)) /** Returns key of the H2O's DataFrame conversed from RDD[String]*/ def asH2OFrameFromRDDStringKey(rdd: JavaRDD[String], frameName: String): Key[Frame] = asH2OFrameFromRDDString(rdd, frameName)._key /** Conversion from RDD[Boolean] to H2O's DataFrame */ def asH2OFrameFromRDDBool(rdd: JavaRDD[Boolean], frameName: String): H2OFrame = H2OContext.toH2OFrameFromRDDBool(sparkContext,rdd.rdd, Option(frameName)) /** Returns key of the H2O's DataFrame conversed from RDD[Boolean]*/ def asH2OFrameFromRDDBoolKey(rdd: JavaRDD[Boolean], frameName: String): Key[Frame] = asH2OFrameFromRDDBool(rdd, frameName)._key /** Conversion from RDD[Double] to H2O's DataFrame */ def asH2OFrameFromRDDDouble(rdd: JavaRDD[Double], frameName: String): H2OFrame = H2OContext.toH2OFrameFromRDDDouble(sparkContext,rdd.rdd, Option(frameName)) /** Returns key of the H2O's DataFrame conversed from RDD[Double]*/ def asH2OFrameFromRDDDoubleKey(rdd: JavaRDD[Double], frameName: String): Key[Frame] = asH2OFrameFromRDDDouble(rdd, frameName)._key /** Conversion from RDD[Long] to H2O's DataFrame */ def asH2OFrameFromRDDLong(rdd: JavaRDD[Long], frameName: String): H2OFrame = H2OContext.toH2OFrameFromRDDLong(sparkContext,rdd.rdd, Option(frameName)) /** Returns key of the H2O's DataFrame conversed from RDD[Long]*/ def asH2OFrameFromRDDLongKey(rdd: JavaRDD[Long], frameName: String): Key[Frame] = asH2OFrameFromRDDLong(rdd, frameName)._key /** Transform given Scala symbol to String */ implicit def symbolToString(sy: scala.Symbol): String = sy.name /** Convert given H2O frame into a RDD type */ @deprecated("Use asRDD instead", "0.2.3") def toRDD[A <: Product: TypeTag: ClassTag](fr : H2OFrame) : RDD[A] = asRDD[A](fr) /** Convert given H2O frame into a Product RDD type */ def asRDD[A <: Product: TypeTag: ClassTag](fr : H2OFrame) : RDD[A] = createH2ORDD[A](fr) /** Convert given H2O frame into DataFrame type */ @deprecated("1.3", "Use asDataFrame") def asSchemaRDD(fr : H2OFrame)(implicit sqlContext: SQLContext) : DataFrame = createH2OSchemaRDD(fr) def asDataFrame(fr : H2OFrame)(implicit sqlContext: SQLContext) : DataFrame = createH2OSchemaRDD(fr) def asDataFrame(s : String)(implicit sqlContext: SQLContext) : DataFrame = createH2OSchemaRDD(new H2OFrame(s)) /** Detected number of Spark executors * Property value is derived from SparkContext during creation of H2OContext. */ private def numOfSparkExecutors = if (sparkContext.isLocal) 1 else sparkContext.getExecutorStorageStatus.length - 1 def h2oLocalClient = this.localClientIp + ":" + this.localClientPort def h2oLocalClientIp = this.localClientIp def h2oLocalClientPort = this.localClientPort // For now disable opening Spark UI //def sparkUI = sparkContext.ui.map(ui => ui.appUIAddress) /** Initialize Sparkling H2O and start H2O cloud with specified number of workers. */ private def start(h2oWorkers: Int):H2OContext = { import H2OConf._ sparkConf.set(PROP_CLUSTER_SIZE._1, h2oWorkers.toString) start() } /** *Specifies maximum number of iterations where the number of executors remained the same * The spreadRDD function is stopped once the variable numTriesSame reached this number */ private final val SUBSEQUENT_NUM_OF_TRIES=3 /** Initialize Sparkling H2O and start H2O cloud. */ private def start(): H2OContext = { import H2OConf._ // Setup properties for H2O configuration if (!sparkConf.contains(PROP_CLOUD_NAME._1)) { sparkConf.set(PROP_CLOUD_NAME._1, PROP_CLOUD_NAME._2 + System.getProperty("user.name", "cloud_" + Random.nextInt(42))) } // Check Spark environment and reconfigure some values H2OContext.checkAndUpdateSparkEnv(sparkConf) logInfo(s"Starting H2O services: " + super[H2OConf].toString) // Create dummy RDD distributed over executors val (spreadRDD, spreadRDDNodes) = createSpreadRDD(numRddRetries, drddMulFactor, numH2OWorkers, 0) //attach listener which shutdown H2O when we bump into executor we didn't discover during the spreadRDD phase sparkContext.addSparkListener(new SparkListener(){ override def onExecutorAdded(executorAdded: SparkListenerExecutorAdded): Unit = { throw new IllegalArgumentException("Executor without H2O instance discovered, killing the cloud!") } }) // Start H2O nodes // Get executors to execute H2O val allExecutorIds = spreadRDDNodes.map(_._1).distinct val executorIds = allExecutorIds // The collected executors based on IDs should match assert(spreadRDDNodes.length == executorIds.length, s"Unexpected number of executors ${spreadRDDNodes.length}!=${executorIds.length}") // H2O is executed only on the subset of Spark cluster - fail if (executorIds.length < allExecutorIds.length) { throw new IllegalArgumentException(s"""Spark cluster contains ${allExecutorIds.length}, but H2O is running only on ${executorIds.length} nodes!""") } // Execute H2O on given nodes logInfo(s"""Launching H2O on following nodes: ${spreadRDDNodes.mkString(",")}""") var h2oNodeArgs = getH2ONodeArgs // Disable web on h2o nodes in non-local mode if(!sparkContext.isLocal){ h2oNodeArgs = h2oNodeArgs++Array("-disable_web") } logDebug(s"Arguments used for launching h2o nodes: ${h2oNodeArgs.mkString(" ")}") val executors = startH2O(sparkContext, spreadRDD, spreadRDDNodes.length, h2oNodeArgs) // Store runtime information h2oNodes.append( executors:_* ) // Connect to a cluster via H2O client, but only in non-local case if (!sparkContext.isLocal) { logTrace("Sparkling H2O - DISTRIBUTED mode: Waiting for " + executors.length) // Get arguments for this launch including flatfile // And also ask for client mode val h2oClientIp = clientIp.getOrElse(getIp(SparkEnv.get)) val h2oClientArgs = toH2OArgs(getH2OClientArgs ++ Array("-ip", h2oClientIp, "-client"), this, executors) logDebug(s"Arguments used for launching h2o client node: ${h2oClientArgs.mkString(" ")}") // Launch H2O H2OStarter.start(h2oClientArgs, false) } // And wait for right cluster size H2O.waitForCloudSize(executors.length, cloudTimeout) // Register web API for client H2OContext.registerClientWebAPI(sparkContext, this) H2O.finalizeRegistration() // Fill information about H2O client localClientIp = H2O.SELF_ADDRESS.getHostAddress localClientPort = H2O.API_PORT // Inform user about status logInfo("Sparkling Water started, status of context: " + this.toString) this } /** Stops H2O context. * * Calls System.exit() which kills executor JVM. */ def stop(stopSparkContext: Boolean = false): Unit = { if (stopSparkContext) sparkContext.stop() H2O.orderlyShutdown(1000) H2O.exit(0) } @tailrec private def createSpreadRDD(nretries: Int, mfactor: Int, nworkers: Int, numTriesSame: Int): (RDD[NodeDesc], Array[NodeDesc]) = { logDebug(s" Creating RDD for launching H2O nodes (mretries=${nretries}, mfactor=${mfactor}, nworkers=${nworkers}") // Non-positive value of nworkers means automatic detection of number of workers val nSparkExecBefore = numOfSparkExecutors val workers = if (nworkers > 0) nworkers else if (nSparkExecBefore > 0) nSparkExecBefore else defaultCloudSize val spreadRDD = sparkContext.parallelize(0 until mfactor*workers, mfactor*workers).persist() // Collect information about executors in Spark cluster val nodes = collectNodesInfo(spreadRDD) // Verify that all executors participate in execution val nSparkExecAfter = numOfSparkExecutors val sparkExecutors = nodes.map(_._1).distinct.length // Delete RDD spreadRDD.unpersist() if ((sparkExecutors < nworkers || nSparkExecAfter != nSparkExecBefore) && nretries == 0) { throw new IllegalArgumentException( s"""Cannot execute H2O on all Spark executors: | Expected number of H2O workers is ${nworkers} | Detected number of Spark workers is $sparkExecutors | Num of Spark executors before is $nSparkExecBefore | Num of Spark executors after is $nSparkExecAfter | | If you are running regular application, please, specify number of Spark workers | via ${H2OConf.PROP_CLUSTER_SIZE._1} Spark configuration property. | If you are running from shell, | you can try: val h2oContext = H2OContext.getOrCreate(sc,<number of Spark workers>) | |""".stripMargin ) } else if (nSparkExecAfter != nSparkExecBefore) { // Repeat if we detect change in number of executors reported by storage level logInfo(s"Detected ${nSparkExecBefore} before, and ${nSparkExecAfter} spark executors after! Retrying again...") createSpreadRDD(nretries-1, mfactor, nworkers, 0) } else if ((nworkers>0 && sparkExecutors == nworkers || nworkers<=0) && sparkExecutors == nSparkExecAfter || numTriesSame==SUBSEQUENT_NUM_OF_TRIES) { // Return result only if we are sure that number of detected executors seems ok or if the number of executors didn't change in the last // SUBSEQUENT_NUM_OF_TRIES iterations logInfo(s"Detected ${sparkExecutors} spark executors for ${nworkers} H2O workers!") (new InvokeOnNodesRDD(nodes, sparkContext), nodes) } else { logInfo(s"Detected ${sparkExecutors} spark executors for ${nworkers} H2O workers! Retrying again...") createSpreadRDD(nretries-1, mfactor*2, nworkers, numTriesSame + 1) } } def createH2ORDD[A <: Product: TypeTag: ClassTag](fr: H2OFrame): RDD[A] = { new H2ORDD[A](this,fr) } def createH2OSchemaRDD(fr: H2OFrame)(implicit sqlContext: SQLContext): DataFrame = { val h2oSchemaRDD = new H2OSchemaRDD(this, fr) import org.apache.spark.sql.H2OSQLContextUtils.internalCreateDataFrame internalCreateDataFrame(h2oSchemaRDD, H2OSchemaUtils.createSchema(fr))(sqlContext) } /** Open H2O Flow running in this client. */ def openFlow(): Unit = openURI(s"http://${h2oLocalClient}") /** Open Spark task manager. */ //def openSparkUI(): Unit = sparkUI.foreach(openURI(_)) /** Open browser for given address. * * @param uri addres to open in browser, e.g., http://example.com */ private def openURI(uri: String): Unit = { import java.awt.Desktop if (!isTesting) { if (Desktop.isDesktopSupported) { Desktop.getDesktop.browse(new java.net.URI(uri)) } else { logWarning(s"Desktop support is missing! Cannot open browser for ${uri}") } } } /** * Return true if running inside spark/sparkling water test. * @return true if the actual run is test run */ private def isTesting = sparkContext.conf.contains("spark.testing") || sys.props.contains("spark.testing") override def toString: String = { s""" |Sparkling Water Context: | * H2O name: ${H2O.ARGS.name} | * number of executors: ${h2oNodes.size} | * list of used executors: | (executorId, host, port) | ------------------------ | ${h2oNodes.mkString("\\n ")} | ------------------------ | | Open H2O Flow in browser: http://${h2oLocalClient} (CMD + click in Mac OSX) """.stripMargin } } object H2OContext extends Logging { private[this] var instance: H2OContext = null private def getOrCreate(sc: SparkContext, h2oWorkers: Option[Int]): H2OContext = { if (instance == null) { val h2oContextClazz = classOf[H2OContext] val ctor = h2oContextClazz.getDeclaredConstructor(classOf[SparkContext]) ctor.setAccessible(true) instance = ctor.newInstance(sc) if (h2oWorkers.isEmpty) { instance.start() } else { instance.start(h2oWorkers.get) } } instance } /** * Get existing H2O Context or initialize Sparkling H2O and start H2O cloud with specified number of workers * * @param sc Spark Context * @return H2O Context */ def getOrCreate(sc: SparkContext, h2oWorkers: Int): H2OContext = { getOrCreate(sc, Some(h2oWorkers)) } /** * Get existing H2O Context or initialize Sparkling H2O and start H2O cloud with default number of workers * * @param sc Spark Context * @return H2O Context */ def getOrCreate(sc: SparkContext): H2OContext = { getOrCreate(sc, None) } /** Supports call from java environments. */ def getOrCreate(sc: JavaSparkContext): H2OContext = { getOrCreate(sc.sc, None) } /** Supports call from java environments. */ def getOrCreate(sc: JavaSparkContext, h2oWorkers: Int): H2OContext = { getOrCreate(sc.sc,Some(h2oWorkers)) } /** Transform SchemaRDD into H2O Frame */ def toH2OFrame(sc: SparkContext, dataFrame: DataFrame, frameKeyName: Option[String]) : H2OFrame = { import org.apache.spark.h2o.H2OSchemaUtils._ // Cache DataFrame RDD's val dfRdd = dataFrame.rdd val keyName = frameKeyName.getOrElse("frame_rdd_" + dfRdd.id) // Fetch cached frame from DKV val frameVal = DKV.get(keyName) if (frameVal==null) { // Flattens and expands RDD's schema val flatRddSchema = expandedSchema(sc, dataFrame) // Patch the flat schema based on information about types val fnames = flatRddSchema.map(t => t._2.name).toArray // Transform datatype into h2o types val vecTypes = flatRddSchema.indices .map(idx => { val f = flatRddSchema(idx) dataTypeToVecType(f._2.dataType) }).toArray // Prepare frame header and put it into DKV under given name initFrame(keyName, fnames) // Create a new chunks corresponding to spark partitions // Note: Eager, not lazy, evaluation val rows = sc.runJob(dfRdd, perSQLPartition(keyName, flatRddSchema, vecTypes) _) val res = new Array[Long](dfRdd.partitions.size) rows.foreach { case (cidx, nrows) => res(cidx) = nrows} // Add Vec headers per-Chunk, and finalize the H2O Frame new H2OFrame( finalizeFrame( keyName, res, vecTypes)) } else { new H2OFrame(frameVal.get.asInstanceOf[Frame]) } } /** Transform typed RDD into H2O Frame */ def toH2OFrame[A <: Product : TypeTag](sc: SparkContext, rdd: RDD[A], frameKeyName: Option[String]) : H2OFrame = { import org.apache.spark.h2o.H2OProductUtils._ import org.apache.spark.h2o.ReflectionUtils._ val keyName = frameKeyName.getOrElse("frame_rdd_" + rdd.id + Key.rand()) // There are uniq IDs for RDD val fnames = names[A] val ftypes = types[A](fnames) // Collect H2O vector types for all input types val vecTypes = ftypes.indices.map(idx => dataTypeToVecType(ftypes(idx))).toArray // Make an H2O data Frame - but with no backing data (yet) initFrame(keyName, fnames) // Create chunks on remote nodes val rows = sc.runJob(rdd, perRDDPartition(keyName, vecTypes) _) // eager, not lazy, evaluation val res = new Array[Long](rdd.partitions.length) rows.foreach{ case(cidx, nrows) => res(cidx) = nrows } // Add Vec headers per-Chunk, and finalize the H2O Frame new H2OFrame(finalizeFrame(keyName, res, vecTypes)) } /** Transform RDD[Primitive type] ( where primitive type can be String, Double, Float or Int) to appropriate H2OFrame */ def toH2OFrame(sc: SparkContext, primitive: PrimitiveType, frameKeyName: Option[String]): H2OFrame = primitive.toH2OFrame(sc, frameKeyName) /** Transform RDD[String] to appropriate H2OFrame */ def toH2OFrameFromRDDString(sc: SparkContext, rdd: RDD[String], frameKeyName: Option[String]): H2OFrame = toH2OFrameFromPrimitive(sc, rdd, frameKeyName) /** Transform RDD[Int] to appropriate H2OFrame */ def toH2OFrameFromRDDInt(sc: SparkContext, rdd: RDD[Int], frameKeyName: Option[String]): H2OFrame = toH2OFrameFromPrimitive(sc, rdd, frameKeyName) /** Transform RDD[Float] to appropriate H2OFrame */ def toH2OFrameFromRDDFloat(sc: SparkContext, rdd: RDD[Float], frameKeyName: Option[String]): H2OFrame = toH2OFrameFromPrimitive(sc, rdd, frameKeyName) /** Transform RDD[Double] to appropriate H2OFrame */ def toH2OFrameFromRDDDouble(sc: SparkContext, rdd: RDD[Double], frameKeyName: Option[String]): H2OFrame = toH2OFrameFromPrimitive(sc, rdd, frameKeyName) /** Transform RDD[Long] to appropriate H2OFrame */ def toH2OFrameFromRDDLong(sc: SparkContext, rdd: RDD[Long], frameKeyName: Option[String]): H2OFrame = toH2OFrameFromPrimitive(sc, rdd, frameKeyName) /** Transform RDD[Double] to appropriate H2OFrame */ def toH2OFrameFromRDDBool(sc: SparkContext, rdd: RDD[Boolean], frameKeyName: Option[String]): H2OFrame = toH2OFrameFromPrimitive(sc, rdd, frameKeyName) private[this] def toH2OFrameFromPrimitive[T: TypeTag](sc: SparkContext, rdd: RDD[T], frameKeyName: Option[String]): H2OFrame = { import org.apache.spark.h2o.H2OPrimitiveTypesUtils._ import org.apache.spark.h2o.ReflectionUtils._ val keyName = frameKeyName.getOrElse("frame_rdd_" + rdd.id + Key.rand()) val fnames = Array[String]("values") val ftypes = Array[Class[_]](typ(typeOf[T])) val vecTypes = ftypes.indices.map(idx => dataTypeToVecType(ftypes(idx))).toArray // Make an H2O data Frame - but with no backing data (yet) initFrame(keyName, fnames) val rows = sc.runJob(rdd, perPrimitivePartition(keyName, vecTypes) _) // eager, not lazy, evaluation val res = new Array[Long](rdd.partitions.length) rows.foreach { case (cidx, nrows) => res(cidx) = nrows } // Add Vec headers per-Chunk, and finalize the H2O Frame new H2OFrame(finalizeFrame(keyName, res, vecTypes)) } private def perPrimitivePartition[T](keystr: String, vecTypes: Array[Byte]) (context: TaskContext, it: Iterator[T]): (Int, Long) = { // An array of H2O NewChunks; A place to record all the data in this partition val nchks = water.fvec.FrameUtils.createNewChunks(keystr, vecTypes, context.partitionId) // Helper to hold H2O string val valStr = new BufferedString() it.foreach(r => { // For all rows in RDD val chk = nchks(0) // There is only one chunk r match { case t: Int => chk.addNum(t.toDouble) case t: Double => chk.addNum(t) case t: Float => chk.addNum(t.toDouble) case t: Long => chk.addNum(t.toDouble) case t: Boolean => chk.addNum(if(t) 1 else 0) case str: String => chk.addStr(valStr.setTo(str)) } }) // Compress & write out the Partition/Chunks water.fvec.FrameUtils.closeNewChunks(nchks) // Return Partition# and rows in this Partition (context.partitionId, nchks(0)._len) } private def perSQLPartition ( keystr: String, types: Seq[(Seq[Int], StructField, Byte)], vecTypes: Array[Byte]) ( context: TaskContext, it: Iterator[Row] ): (Int,Long) = { // New chunks on remote node val nchks = water.fvec.FrameUtils.createNewChunks(keystr, vecTypes, context.partitionId) val valStr = new BufferedString() // just helper for string columns it.foreach(row => { var startOfSeq = -1 // Fill row in the output frame types.indices.foreach { idx => // Index of column val chk = nchks(idx) val field = types(idx) val path = field._1 val dataType = field._2.dataType // Helpers to distinguish embedded collection types val isAry = field._3 == H2OSchemaUtils.ARRAY_TYPE val isVec = field._3 == H2OSchemaUtils.VEC_TYPE val isNewPath = if (idx > 0) path != types(idx-1)._1 else true // Reset counter for sequences if ((isAry || isVec) && isNewPath) startOfSeq = idx else if (!isAry && !isVec) startOfSeq = -1 var i = 0 var subRow = row while (i < path.length-1 && !subRow.isNullAt(path(i))) { subRow = subRow.getAs[Row](path(i)); i += 1 } val aidx = path(i) // actual index into row provided by path if (subRow.isNullAt(aidx)) { chk.addNA() } else { val ary = if (isAry) subRow.getAs[Seq[_]](aidx) else null val aryLen = if (isAry) ary.length else -1 val aryIdx = idx - startOfSeq // shared index to position in array/vector val vec = if (isVec) subRow.getAs[mllib.linalg.Vector](aidx) else null if (isAry && aryIdx >= aryLen) chk.addNA() else if (isVec && aryIdx >= vec.size) chk.addNum(0.0) // Add zeros for vectors else dataType match { case BooleanType => chk.addNum(if (isAry) if (ary(aryIdx).asInstanceOf[Boolean]) 1 else 0 else if (subRow.getBoolean(aidx)) 1 else 0) case BinaryType => case ByteType => chk.addNum(if (isAry) ary(aryIdx).asInstanceOf[Byte] else subRow.getByte(aidx)) case ShortType => chk.addNum(if (isAry) ary(aryIdx).asInstanceOf[Short] else subRow.getShort(aidx)) case IntegerType => chk.addNum(if (isAry) ary(aryIdx).asInstanceOf[Int] else subRow.getInt(aidx)) case LongType => chk.addNum(if (isAry) ary(aryIdx).asInstanceOf[Long] else subRow.getLong(aidx)) case FloatType => chk.addNum(if (isAry) ary(aryIdx).asInstanceOf[Float] else subRow.getFloat(aidx)) case DoubleType => chk.addNum(if (isAry) { ary(aryIdx).asInstanceOf[Double] } else { if (isVec) { subRow.getAs[mllib.linalg.Vector](aidx)(idx - startOfSeq) } else { subRow.getDouble(aidx) } }) case StringType => { val sv = if (isAry) ary(aryIdx).asInstanceOf[String] else subRow.getString(aidx) // Always produce string vectors chk.addStr(valStr.setTo(sv)) } case TimestampType => chk.addNum(row.getAs[java.sql.Timestamp](aidx).getTime()) case _ => chk.addNA() } } } }) // Compress & write out the Partition/Chunks water.fvec.FrameUtils.closeNewChunks(nchks) // Return Partition# and rows in this Partition (context.partitionId,nchks(0)._len) } private def perRDDPartition[A<:Product](keystr:String, vecTypes: Array[Byte]) ( context: TaskContext, it: Iterator[A] ): (Int,Long) = { // An array of H2O NewChunks; A place to record all the data in this partition val nchks = water.fvec.FrameUtils.createNewChunks(keystr, vecTypes, context.partitionId) val valStr = new BufferedString() it.foreach(prod => { // For all rows which are subtype of Product for( i <- 0 until prod.productArity ) { // For all fields... val fld = prod.productElement(i) val chk = nchks(i) val x = fld match { case Some(n) => n case _ => fld } x match { case n: Number => chk.addNum(n.doubleValue()) case n: Boolean => chk.addNum(if (n) 1 else 0) case n: String => chk.addStr(valStr.setTo(n)) case _ => chk.addNA() } } }) // Compress & write out the Partition/Chunks water.fvec.FrameUtils.closeNewChunks(nchks) // Return Partition# and rows in this Partition (context.partitionId,nchks(0)._len) } private def initFrame[T](keyName: String, names: Array[String]):Unit = { val fr = new water.fvec.Frame(Key.make(keyName)) water.fvec.FrameUtils.preparePartialFrame(fr, names) // Save it directly to DKV fr.update(null) } private def finalizeFrame[T](keyName: String, res: Array[Long], colTypes: Array[Byte], colDomains: Array[Array[String]] = null):Frame = { val fr:Frame = DKV.get(keyName).get.asInstanceOf[Frame] water.fvec.FrameUtils.finalizePartialFrame(fr, res, colDomains, colTypes) fr } private def checkAndUpdateSparkEnv(conf: SparkConf): Unit = { // If 'spark.executor.instances' is specified update H2O property as well conf.getOption("spark.executor.instances").foreach(v => conf.set("spark.ext.h2o.cluster.size", v)) // Increase locality timeout since h2o-specific tasks can be long computing if (conf.getInt("spark.locality.wait",3000) <= 3000) { logWarning(s"Increasing 'spark.locality.wait' to value 30000") conf.set("spark.locality.wait", "30000") } } private[h2o] def registerClientWebAPI(sc: SparkContext, h2OContext: H2OContext): Unit = { registerScalaIntEndp(sc, h2OContext) registerDataFramesEndp(sc, h2OContext) registerH2OFramesEndp(sc, h2OContext) registerRDDsEndp(sc) } private def registerH2OFramesEndp(sc: SparkContext, h2OContext: H2OContext) = { val h2OFramesHandler = new H2OFramesHandler(sc, h2OContext) def h2OFramesFactory = new HandlerFactory { override def create(handler: Class[_ <: Handler]): Handler = h2OFramesHandler } RequestServer.register("/3/h2oframes/(?<h2oframe_id>.*)/dataframe", "POST", classOf[H2OFramesHandler], "toDataFrame", null, "Transform H2OFrame with given id to DataFrame", h2OFramesFactory); } private def registerRDDsEndp(sc: SparkContext) = { val rddsHandler = new RDDsHandler(sc) def rddsFactory = new HandlerFactory { override def create(aClass: Class[_ <: Handler]): Handler = rddsHandler } RequestServer.register("/3/RDDs", "GET", classOf[RDDsHandler], "list", null, "Return all Frames in the H2O distributed K/V store.", rddsFactory) RequestServer.register("/3/RDDs/(?<searched_rdd_id>[0-9]+)", "POST", classOf[RDDsHandler], "getRDD", null, "Get frame in the H2O distributed K/V store with the given ID", rddsFactory) } private def registerDataFramesEndp(sc: SparkContext, h2OContext: H2OContext) = { val dataFramesHandler = new DataFramesHandler(sc, h2OContext) def dataFramesfactory = new HandlerFactory { override def create(aClass: Class[_ <: Handler]): Handler = dataFramesHandler } RequestServer.register("/3/dataframes", "GET", classOf[DataFramesHandler], "list", null, "Return all DataFrames.", dataFramesfactory) RequestServer.register("/3/dataframes/(?<searched_dataframe_id>[0-9a-zA-Z_]+)", "POST", classOf[DataFramesHandler], "getDataFrame", null, "Get DataFrame with the given id", dataFramesfactory) RequestServer.register("/3/dataframes/(?<dataframe_id>[0-9a-zA-Z_]+)/h2oframe", "POST", classOf[DataFramesHandler], "toH2OFrame", null, "Transform DataFrame with the given id to H2OFrame", dataFramesfactory) } private def registerScalaIntEndp(sc: SparkContext, h2OContext: H2OContext) = { val scalaCodeHandler = new ScalaCodeHandler(sc, h2OContext) def scalaCodeFactory = new HandlerFactory { override def create(aClass: Class[_ <: Handler]): Handler = scalaCodeHandler } RequestServer.register("/3/scalaint/(?<session_id>[0-9]+)", "POST", classOf[ScalaCodeHandler], "interpret", null, "Interpret the code and return the result", scalaCodeFactory) RequestServer.register("/3/scalaint", "POST", classOf[ScalaCodeHandler], "initSession", null, "Return session id for communication with scala interpreter", scalaCodeFactory) RequestServer.register("/3/scalaint", "GET", classOf[ScalaCodeHandler], "getSessions", null, "Return all active session IDs", scalaCodeFactory) RequestServer.register("/3/scalaint/(?<session_id>[0-9]+)", "DELETE", classOf[ScalaCodeHandler], "destroySession", null, "Return session id for communication with scala interpreter", scalaCodeFactory) } }
nilbody/sparkling-water
core/src/main/scala/org/apache/spark/h2o/H2OContext.scala
Scala
apache-2.0
34,373
package org.birdfeed.chirp.actions import org.birdfeed.chirp.database.models.ApiKey import org.birdfeed.chirp.errors.JsonError import play.api.mvc._ import scala.concurrent.Future case class ActionWithValidApiKey[A] (action: Action[A]) extends Action[A] with JsonError { def apply(request: Request[A]): Future[Result] = { request.headers.get("Chirp-Api-Key") match { case Some(headerToken) => { if ( ApiKey.findBy("key", headerToken).isEmpty ) Future.successful(Results.Unauthorized( jsonError("You provided an invalid API key.") )) else action(request) } case None => Future.successful(Results.BadRequest) } } lazy val parser = action.parser }
birdfeed/chirp
app/org/birdfeed/chirp/actions/ActionWithValidApiKey.scala
Scala
mit
725
/* * Copyright (c) 2009 Sony Pictures Imageworks Inc. * * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the * distribution. Neither the name of Sony Pictures Imageworks nor the * names of its contributors may be used to endorse or promote * products derived from this software without specific prior written * permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE * COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED * OF THE POSSIBILITY OF SUCH DAMAGE. */ package com.imageworks.migration.tests import com.imageworks.migration.With import org.jmock.{Expectations, Mockery} import org.junit.Assert._ import org.junit.{Before, Test} import java.sql.ResultSet class WithTests { private val context = new Mockery @Test def with_result_set_closes_on_normal_return: Unit = { val mock_rs = context.mock(classOf[ResultSet]) context.checking(new Expectations { oneOf (mock_rs).close() }) var rs1: ResultSet = null val result = With.resultSet(mock_rs) { rs2 => rs1 = rs2 "foobar" } assertSame(mock_rs, rs1) assertEquals("foobar", result) context.assertIsSatisfied() } @Test def with_result_set_closes_on_throw: Unit = { val mock_rs = context.mock(classOf[ResultSet]) context.checking(new Expectations { oneOf (mock_rs).close() }) class ThisSpecialException extends java.lang.Throwable var rs1: ResultSet = null try { With.resultSet(mock_rs) { rs2 => rs1 = rs2 throw new ThisSpecialException } } catch { case _: ThisSpecialException => } assertSame(mock_rs, rs1) context.assertIsSatisfied() } }
azinman/scala-migrations
src/test/scala/com/imageworks/migration/tests/WithTests.scala
Scala
bsd-3-clause
2,938
package ch18 object ex03 { object Title object Author class Document { private var useNextArgsAs: Any = null var title = "" var author = "" def set(obj: Title.type): this.type = { useNextArgsAs = obj; this } def set(obj: Author.type): this.type = { useNextArgsAs = obj; this } def to(arg: String): this.type = { useNextArgsAs match { case Title => title = arg case Author => author = arg case _ => } this } override def toString() = "Document(author=%s, title=%s)".format(author, title) } def main(args: Array[String]) { val book1 = new Document book1 set Title to "BookTitle" println(book1) book1 set Author to "BookAuthor" println(book1) val book = new Document book set Title to "Scala for the Impatient" set Author to "Cay Horstmann" println(book) } }
tuxdna/scala-for-the-impatient-exercises
src/main/scala/ch18/ex03.scala
Scala
apache-2.0
877
package leo.modules.parsers import leo.datastructures.tptp._ import syntactical.TPTPParsers._ import scala.util.parsing.input.Reader /** * Provides a parsing interface for TPTP files and single tptp formulae. * The parser obeys the rules of the TPTP Syntax BNF found at * [[http://www.cs.miami.edu/~tptp/TPTP/SyntaxBNF.html]]. * * @author Alexander Steen * @since 23.03.2014 * @note Updated last on 22.04.2014 */ object TPTP { /** * Parses a complete TPTP file yielding a [[leo.datastructures.tptp.Commons.TPTPInput]] value if succeeded. * On success, the result is wrapped in an instance of [[scala.util.Right]]; on failure * a [[scala.util.Left]] containing an error message is returned. * * @param input A [[scala.util.parsing.input.Reader]] wrapping the TPTP input * @return A representation of the in file in [[leo.datastructures.tptp.Commons.TPTPInput]] format */ def parseFile(input: Reader[Char])= extract(parse(input, tptpFile)) /** * Convenience method for parsing. Same as `parseFile(input: Reader[Char])`, just that * it takes a string instead of a [[scala.util.parsing.input.Reader]]. * * @param input The String that is to be parsed * @return A representation of the input file in [[leo.datastructures.tptp.Commons.TPTPInput]] format */ def parseFile(input: String)= extract(parse(input, tptpFile)) def parseFormula(input: String) = extract(parse(input, annotatedFormula)) def parseFOF(input: String) = extract(parse(input, fofAnnotated)) def parseTHF(input: String) = extract(parse(input, thfAnnotated)) def parseTFF(input: String) = extract(parse(input, tffAnnotated)) def parseCNF(input: String) = extract(parse(input, cnfAnnotated)) def parseTPI(input: String) = extract(parse(input, tpiAnnotated)) // def parseTFA(input: String) = parser.exec(input, tfaFormula) // give simplified parsing result representations to the outside private def extract[T](res: ParseResult[T]): Either[String, T] = { res match { case Success(x, _) => Right(x) case noSu: NoSuccess => Left(noSu.msg) } } }
Ryugoron/Leonora
src/main/scala/leo/modules/parsers/TPTP.scala
Scala
mit
2,101
package pl.edu.pw.ii.zsibio.dwh.benchmark.utils /** * @author dawid */ object Probability { implicit class ProbabilityChoose[B, R](seq: Iterable[(B, R)])(implicit num: Numeric[B]) { lazy val distr = doCountDistribution() def countDistribution(): Iterable[(Double, R)] = distr private def doCountDistribution(): Iterable[(Double, R)] = { val sum = seq.map(x => num.toDouble(x._1)).sum val sumOfPrevious = seq.scanLeft(0.0)((p, c) => p + num.toDouble(c._1)) seq .zip(sumOfPrevious) .map(x => ((num.toDouble(x._1._1) + x._2) / sum, x._1._2)) } def selectWithProbability(): R = { val distr = seq.countDistribution() val rand = math.random distr.find(rand <= _._1).map(_._2).get } } }
ZSI-Bio/variantsdwh
genomic-dwh-benchmark/src/main/scala/pl/edu/pw/ii/zsibio/dwh/benchmark/utils/Probability.scala
Scala
apache-2.0
774
package models.dao.anorm import anorm.SqlParser._ import anorm._ import javax.inject.{ Inject, Singleton } import models.dao.{ FeedStats, FeedStatsDAO } import play.api.db.Database @Singleton class AnormFeedStatsDAO @Inject() (db: Database) extends FeedStatsDAO { val feedStats: RowParser[FeedStats] = { (str("origin") ~ int("download_count")).map { case origin ~ downloadCount => FeedStats(origin, downloadCount) } } @SuppressWarnings(Array("org.wartremover.warts.NonUnitStatements")) def incrementDownloadCount(origin: String): Unit = db.withConnection { implicit c => val feedStatOpt = SQL("SELECT * FROM feed_stats WHERE origin = {origin}") .on(Symbol("origin") -> origin) .as(feedStats singleOpt) val feedStat = feedStatOpt.getOrElse { SQL("INSERT INTO feed_stats(origin, download_count) VALUES({origin}, 0)") .on(Symbol("origin") -> origin) .executeUpdate() FeedStats(origin, 0) } SQL("UPDATE feed_stats SET download_count = {count} WHERE origin = {origin}") .on(Symbol("count") -> (feedStat.downloadCount + 1), Symbol("origin") -> origin) .executeUpdate() } }
jcranky/lojinha
app/models/dao/anorm/AnormFeedStatsDAO.scala
Scala
gpl-3.0
1,166
/* * Copyright 2021 HM Revenue & Customs * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package uk.gov.hmrc.ct.accounts.frs102.boxes import uk.gov.hmrc.ct.accounts.frs102.retriever.Frs102AccountsBoxRetriever import uk.gov.hmrc.ct.box._ case class AC153(value: Option[Int]) extends CtBoxIdentifier(name = "Creditors: Other creditors (previous PoA)") with CtOptionalInteger with Input with ValidatableBox[Frs102AccountsBoxRetriever] with Validators { override def validate(boxRetriever: Frs102AccountsBoxRetriever): Set[CtValidation] = { collectErrors( validateMoney(value, min = 0) ) } }
hmrc/ct-calculations
src/main/scala/uk/gov/hmrc/ct/accounts/frs102/boxes/AC153.scala
Scala
apache-2.0
1,133
package org.pigsaw.ccpm /* Copyright Nik Silver 2015. * * This file is part of CCPM. * * CCPM is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * CCPM is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with CCPM. If not, see <http://www.gnu.org/licenses/>. */ import org.scalatest.matchers.MatchResult import org.scalatest.matchers.Matcher /** * Scalatest `Matchers` that are useful for the `Schedule` class. */ trait ScheduleMatchers { // The following definitions allow us to more easily assert // that a half-duration task should come right before another task: // // t1 should halfEndRightBefore t2 // // See http://www.scalatest.org/user_guide/using_matchers#usingCustomMatchers case class MatchingSchedule(sch: Schedule) class TaskEndRightBefore(tLater: Task, sch: Schedule) extends Matcher[Task] { def apply(tEarlier: Task) = { val end = sch.end(tEarlier) val earlierStart = sch.start(tEarlier) val laterStart = sch.start(tLater) MatchResult( end == laterStart, s"$tEarlier with start $earlierStart did not come right before $tLater with start $laterStart", s"$tEarlier with start $earlierStart came right before $tLater with start $laterStart") } } def endRightBefore(tEarlier: Task)(implicit iSched: MatchingSchedule) = new TaskEndRightBefore(tEarlier, iSched.sch) // The following definitions allow us to more easily assert // that a half-duration task should come some time before another task: // // t1 should halfEndSomeTimeBefore t2 class TaskEndSomeTimeBefore(tLater: Task, sch: Schedule) extends Matcher[Task] { def apply(tEarlier: Task) = { val end = sch.end(tEarlier) val earlierStart = sch.start(tEarlier) val laterStart = sch.start(tLater) MatchResult( end <= laterStart, s"$tEarlier with start $earlierStart did not half-end some time before $tLater with start $laterStart", s"$tEarlier with start $earlierStart half-ended some time before $tLater with start $laterStart") } } def endSomeTimeBefore(tEarlier: Task)(implicit iSched: MatchingSchedule) = new TaskEndSomeTimeBefore(tEarlier, iSched.sch) }
niksilver/ccpm
src/test/scala/org/pigsaw/ccpm/ScheduleMatchers.scala
Scala
gpl-3.0
2,644
package jsm4s.ds import scala.collection.Iterable /** * Created by olshanskiy on 7/13/17. */ trait ExtentFactory { val objects: Int def empty: FcaSet def full: FcaSet def values(x: Iterable[Int]): FcaSet }
DmitryOlshansky/jsm4s
src/main/scala/jsm4s/ds/ExtentFactory.scala
Scala
gpl-2.0
223
import org.specs2.mutable._ import org.specs2.runner._ import org.junit.runner._ import play.api.test._ import play.api.test.Helpers._ /** * Add your spec here. * You can mock out a whole application including requests, plugins etc. * For more information, consult the wiki. */ @RunWith(classOf[JUnitRunner]) class ApplicationSpec extends Specification { "Application" should { "send 404 on a bad request" in new WithApplication{ route(FakeRequest(GET, "/boum")) must beNone } "redirect to tasks page on a request to home" in new WithApplication{ val home = route(FakeRequest(GET, "/")).get status(home) must equalTo(303) redirectLocation(home) must beSome.which(_ == "/tasks") } } }
takatama/play-scala-todolist
test/ApplicationSpec.scala
Scala
mit
738
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package ly.stealth.mesos.exhibitor import java.io._ import javax.servlet.http.{HttpServlet, HttpServletRequest, HttpServletResponse} import org.apache.log4j.Logger import org.eclipse.jetty.server.{Server, ServerConnector} import org.eclipse.jetty.servlet.{ServletContextHandler, ServletHolder} import org.eclipse.jetty.util.thread.QueuedThreadPool import play.api.libs.json.Json import scala.collection.JavaConversions._ import scala.util.{Failure, Success, Try} object HttpServer { private val logger = Logger.getLogger(HttpServer.getClass) private var server: Server = null val jarMask = "mesos-exhibitor.*\\\\.jar" val exhibitorMask = "exhibitor.*\\\\.jar" val zookeeperMask = "zookeeper.*" private[exhibitor] var jar: File = null private[exhibitor] var exhibitorDist: File = null private[exhibitor] var zookeeperDist: File = null def start(resolveDeps: Boolean = true) { if (server != null) throw new IllegalStateException("HttpServer already started") if (resolveDeps) this.resolveDeps() val threadPool = new QueuedThreadPool(16) threadPool.setName("Jetty") server = new Server(threadPool) val connector = new ServerConnector(server) connector.setPort(Config.httpServerPort) connector.setIdleTimeout(60 * 1000) val handler = new ServletContextHandler handler.addServlet(new ServletHolder(new Servlet()), "/") server.setHandler(handler) server.addConnector(connector) server.start() logger.info("started on port " + connector.getPort) } def stop() { if (server == null) throw new IllegalStateException("HttpServer not started") server.stop() server.join() server = null logger.info("HttpServer stopped") } private def resolveDeps() { for (file <- new File(".").listFiles()) { if (file.getName.matches(jarMask)) jar = file if (file.getName.matches(exhibitorMask)) exhibitorDist = file if (file.getName.matches(zookeeperMask) && !file.isDirectory) zookeeperDist = file } if (jar == null) throw new IllegalStateException(jarMask + " not found in current dir") if (exhibitorDist == null) throw new IllegalStateException(exhibitorMask + " not found in in current dir") if (zookeeperDist == null) throw new IllegalStateException(zookeeperMask + " not found in in current dir") } class Servlet extends HttpServlet { override def doGet(request: HttpServletRequest, response: HttpServletResponse) { Try(handle(request, response)) match { case Success(_) => case Failure(e) => logger.warn("", e) response.sendError(500, "" + e) throw e } } def handle(request: HttpServletRequest, response: HttpServletResponse) { val uri = request.getRequestURI if (uri.startsWith("/jar/")) downloadFile(HttpServer.jar, response) else if (uri.startsWith("/exhibitor/")) downloadFile(HttpServer.exhibitorDist, response) else if (uri.startsWith("/zookeeper/")) downloadFile(HttpServer.zookeeperDist, response) else if (uri.startsWith("/s3credentials/")) downloadFile(new File(uri.split("/").last), response) else if (uri.startsWith("/api")) handleApi(request, response) else response.sendError(404) } def downloadFile(file: File, response: HttpServletResponse) { response.setContentType("application/zip") response.setHeader("Content-Length", "" + file.length()) response.setHeader("Content-Disposition", "attachment; filename=\\"" + file.getName + "\\"") Util.copyAndClose(new FileInputStream(file), response.getOutputStream) } def handleApi(request: HttpServletRequest, response: HttpServletResponse) { response.setContentType("application/json; charset=utf-8") var uri: String = request.getRequestURI.substring("/api".length) if (uri.startsWith("/")) uri = uri.substring(1) if (uri == "add") handleAddServer(request, response) else if (uri == "start") handleStartServer(request, response) else if (uri == "stop") handleStopServer(request, response) else if (uri == "remove") handleRemoveServer(request, response) else if (uri == "status") handleClusterStatus(request, response) else if (uri == "config") handleConfigureServer(request, response) else response.sendError(404) } private def handleAddServer(request: HttpServletRequest, response: HttpServletResponse) { val id = request.getParameter("id") val cpus = Option(request.getParameter("cpu")) val mem = Option(request.getParameter("mem")) val constraints = Option(request.getParameter("constraints")) val backoff = Option(request.getParameter("configchangebackoff")) val server = ExhibitorServer(id) cpus.foreach(cpus => server.config.cpus = cpus.toDouble) mem.foreach(mem => server.config.mem = mem.toDouble) server.constraints ++= Constraint.parse(constraints.getOrElse("hostname=unique")) backoff.foreach(backoff => server.config.sharedConfigChangeBackoff = backoff.toLong) Scheduler.cluster.servers += server logger.info(s"Added server to cluster: $server") response.getWriter.println(Json.toJson(server)) } private def handleStartServer(request: HttpServletRequest, response: HttpServletResponse) { val id = request.getParameter("id") Scheduler.cluster.getServer(id) match { case Some(s) => if (s.state == ExhibitorServer.Added) { s.state = ExhibitorServer.Stopped logger.info(s"Starting server $id") } else logger.warn(s"Server $id already started") response.getWriter.println(Json.toJson(s)) case None => logger.warn(s"Received start server for unknown server id: $id") handleUnknownServer(id, response) } } private def handleStopServer(request: HttpServletRequest, response: HttpServletResponse) { val id = request.getParameter("id") Scheduler.stopServer(id) match { case Some(s) => response.getWriter.println(Json.toJson(s)) case None => logger.warn(s"Received stop server for unknown server id: $id") handleUnknownServer(id, response) } } private def handleRemoveServer(request: HttpServletRequest, response: HttpServletResponse) { val id = request.getParameter("id") Scheduler.removeServer(id) match { case Some(s) => logger.info("Cluster after removal: " + Scheduler.cluster.servers) response.getWriter.println(Json.toJson(s)) case None => logger.warn(s"Received remove server for unknown server id: $id") handleUnknownServer(id, response) } } private def handleClusterStatus(request: HttpServletRequest, response: HttpServletResponse) { response.getWriter.println(Json.toJson(Scheduler.cluster.servers.toList)) } private val exhibitorConfigs = Set("configtype", "zkconfigconnect", "zkconfigzpath", "s3credentials", "s3region", "s3config", "s3configprefix") private val sharedConfigs = Set("zookeeper-install-directory", "zookeeper-data-directory") private def handleConfigureServer(request: HttpServletRequest, response: HttpServletResponse) { val id = request.getParameter("id") Scheduler.cluster.getServer(id) match { case Some(s) => logger.info(s"Received configurations for server $id: ${request.getParameterMap.toMap.map(entry => entry._1 -> entry._2.head)}") request.getParameterMap.toMap.foreach { case (key, Array(value)) if exhibitorConfigs.contains(key) => s.config.exhibitorConfig += key -> value case (key, Array(value)) if sharedConfigs.contains(key) => s.config.sharedConfigOverride += key -> value case other => logger.debug(s"Got invalid configuration value: $other") } response.getWriter.println(Json.toJson(s)) case None => logger.warn(s"Received configure server for unknown server id: $id") handleUnknownServer(id, response) } } } private def handleUnknownServer(id: String, response: HttpServletResponse) { val unknownServer = ExhibitorServer(id) unknownServer.state = ExhibitorServer.Unknown response.getWriter.println(Json.toJson(unknownServer)) } }
samklr/exhibitor-mesos-framework
src/main/scala/ly/stealth/mesos/exhibitor/HttpServer.scala
Scala
apache-2.0
9,156
package com.twitter.finagle.mux import com.twitter.finagle.{Mux, Service, ServiceFactory, Stack, mux, param => fparam} import com.twitter.finagle.Mux.Server.SessionF import com.twitter.finagle.mux.pushsession._ import com.twitter.finagle.pushsession.RefPushSession import com.twitter.io.{Buf, ByteReader} // Implementation of the standard mux server that doesn't attempt to negotiate. // Only useful for testing Smux to ensure that failing to negotiate doesn't circumvent TLS. private object NonNegotiatingServer { private val NonNegotiatingSessionFactory: SessionF = ( ref: RefPushSession[ByteReader, Buf], params: Stack.Params, sharedStats: SharedNegotiationStats, handle: MuxChannelHandle, service: Service[Request, Response] ) => { val statsReceiver = params.apply[fparam.Stats].statsReceiver val session = new MuxServerSession( params, new FragmentDecoder(sharedStats), new FragmentingMessageWriter(handle, Int.MaxValue, sharedStats), handle, service ) ref.updateRef(session) ref } def apply( stack: Stack[ServiceFactory[mux.Request, mux.Response]] = Mux.server.stack, params: Stack.Params = Mux.server.params ): Mux.Server = Mux.Server( stack = stack, params = params, sessionFactory = NonNegotiatingSessionFactory ) }
twitter/finagle
finagle-mux/src/test/scala/com/twitter/finagle/mux/NonNegotiatingServer.scala
Scala
apache-2.0
1,345
package at.forsyte.apalache.tla.bmcmt.search import java.io.{FileWriter, PrintWriter, Writer} import java.time.{Duration, LocalDateTime} import at.forsyte.apalache.tla.bmcmt.search.SearchStrategy.{Finish, FinishOnDeadlock, NextStep} /** * A decorator of a search strategy that measures the wall-clock time and records it in a CSV file. * This decorator is useful for plotting the time it takes BfsStrategy to explore the computations up to given length. * * The CSV contains the following fields: the step number, the total number of seconds elapsed since the start * till the end of the step, the additional nanoseconds till the end of the step. That is, if step 0 starts at time * instant A and step i ends at time instant B, then the first field contains i, * the second field contains seconds(B - A) and the third field contains nanoseconds(B - A) - seconds(B - A) * pow(10, 9). * * @author Igor Konnov */ class BfsStrategyStopWatchDecorator(strategy: SearchStrategy, filename: String) extends SearchStrategy { private var currentStep: Int = 0 private var printWriter: Option[PrintWriter] = None private var startTime: LocalDateTime = LocalDateTime.now() override def getCommand: SearchStrategy.Command = { val command = strategy.getCommand command match { case NextStep(stepNo, _, _) => if (stepNo == 0) { currentStep = 0 // create a log file and add a header printWriter = Some(new PrintWriter(new FileWriter(filename, false))) printWriter.get.println("step,total_sec,nanosec_adjustment") // start the timer startTime = LocalDateTime.now() } else { appendCsvEntry() currentStep = stepNo } case Finish() | FinishOnDeadlock() => appendCsvEntry() printWriter.get.close() } command } private def appendCsvEntry(): Unit = { val currentTime = LocalDateTime.now() val duration = Duration.between(startTime, currentTime) printWriter.get.println("%d,%d,%d".format(currentStep, duration.getSeconds, duration.getNano)) printWriter.get.flush() // get the results as soon as possible } override def registerResponse(response: SearchStrategy.Response): Unit = { strategy.registerResponse(response) } }
konnov/apalache
tla-bmcmt/src/main/scala/at/forsyte/apalache/tla/bmcmt/search/BfsStrategyStopWatchDecorator.scala
Scala
apache-2.0
2,307
package com.karasiq.shadowcloud.compression import java.io.InputStream import akka.NotUsed import akka.stream._ import akka.stream.scaladsl.{Flow, GraphDSL, StreamConverters} import akka.stream.stage.{GraphStage, GraphStageLogic, InHandler, OutHandler} import akka.util.ByteString import com.karasiq.shadowcloud.utils.ByteStringOutputStream package object lz4 { // TODO: LZ4FrameInputStream, customize compression level private type LZ4InputStream = net.jpountz.lz4.LZ4BlockInputStream private type LZ4OutputStream = net.jpountz.lz4.LZ4BlockOutputStream object LZ4Streams { def compress: Flow[ByteString, ByteString, NotUsed] = { Flow.fromGraph(new LZ4Compress()).named("lz4Compress") } // TODO: Non-blocking decompress def decompress: Flow[ByteString, ByteString, NotUsed] = { val graph = GraphDSL.create(StreamConverters.asInputStream()) { implicit builder ⇒ inputStream ⇒ import GraphDSL.Implicits._ val processInputStream = builder.add(Flow[InputStream].flatMapConcat { inputStream ⇒ StreamConverters.fromInputStream(() ⇒ new LZ4InputStream(inputStream)) }) builder.materializedValue ~> processInputStream FlowShape(inputStream.in, processInputStream.out) } Flow.fromGraph(graph).mapMaterializedValue(_ ⇒ NotUsed).named("lz4Decompress") } } private final class LZ4Compress extends GraphStage[FlowShape[ByteString, ByteString]] { val inlet = Inlet[ByteString]("LZ4Compress.in") val outlet = Outlet[ByteString]("LZ4Compress.out") val shape = FlowShape(inlet, outlet) def createLogic(inheritedAttributes: Attributes) = new GraphStageLogic(shape) with InHandler with OutHandler { private[this] val bsOutputStream = ByteStringOutputStream() private[this] val lz4OutputStream = new LZ4OutputStream(bsOutputStream) def onPull(): Unit = { tryPull(inlet) } def onPush(): Unit = { import com.karasiq.shadowcloud.utils.ByteStringUnsafe.implicits._ val element = grab(inlet) lz4OutputStream.write(element.toArrayUnsafe) val output = bsOutputStream.toByteString if (output.nonEmpty) { push(outlet, output) bsOutputStream.clear() } else { tryPull(inlet) } } override def onUpstreamFinish(): Unit = { lz4OutputStream.close() val lastBlock = bsOutputStream.toByteString if(lastBlock.nonEmpty) { bsOutputStream.clear() emit(outlet, lastBlock, () ⇒ complete(outlet)) } else { complete(outlet) } } override def postStop(): Unit = { lz4OutputStream.close() super.postStop() } setHandlers(inlet, outlet, this) } } }
Karasiq/shadowcloud
utils/.jvm/src/main/scala/com/karasiq/shadowcloud/compression/lz4/package.scala
Scala
apache-2.0
2,801
/* NSC -- new Scala compiler * Copyright 2005-2013 LAMP/EPFL * @author Martin Odersky */ package scala package reflect package internal // todo implement in terms of BitSet import scala.collection.mutable import util.Statistics /** A base type sequence (BaseTypeSeq) is an ordered sequence spanning all the base types * of a type. It characterized by the following two laws: * * (1) Each element of `tp.baseTypeSeq` is a basetype of `tp` * (2) For each basetype `bt1` of `tp` there is an element `bt` in `tp.baseTypeSeq` such that * * bt.typeSymbol = bt1.typeSymbol * bt <: bt1 * * (3) The type symbols of different elements are different. * * Elements in the sequence are ordered by Symbol.isLess. * @note base type sequences were called closures up to 2.7.1. The name has been changed * to avoid confusion with function closures. */ trait BaseTypeSeqs { this: SymbolTable => import definitions._ import BaseTypeSeqsStats._ protected def newBaseTypeSeq(parents: List[Type], elems: Array[Type]) = new BaseTypeSeq(parents, elems) protected def newMappedBaseTypeSeq(orig: BaseTypeSeq, f: Type => Type) = new MappedBaseTypeSeq(orig, f) /** Note: constructor is protected to force everyone to use the factory method newBaseTypeSeq instead. * This is necessary because when run from reflection every base type sequence needs to have a * SynchronizedBaseTypeSeq as mixin. */ class BaseTypeSeq protected[reflect] (private[BaseTypeSeqs] val parents: List[Type], private[BaseTypeSeqs] val elems: Array[Type]) { self => if (Statistics.canEnable) Statistics.incCounter(baseTypeSeqCount) if (Statistics.canEnable) Statistics.incCounter(baseTypeSeqLenTotal, elems.length) /** The number of types in the sequence */ def length: Int = elems.length // #3676 shows why we can't store NoType in elems to mark cycles // (while NoType is in there to indicate a cycle in this BTS, during the execution of // the mergePrefixAndArgs below, the elems get copied without the pending map, // so that NoType's are seen instead of the original type --> spurious compile error) private val pending = new mutable.BitSet(length) /** The type at i'th position in this sequence; lazy types are returned evaluated. */ def apply(i: Int): Type = if(pending contains i) { pending.clear() throw CyclicInheritance } else { def computeLazyType(rtp: RefinedType): Type = { if (!isIntersectionTypeForLazyBaseType(rtp)) devWarning("unexpected RefinedType in base type seq, lazy BTS elements should be created via intersectionTypeForLazyBaseType: " + rtp) val variants = rtp.parents // can't assert decls.isEmpty; see t0764 //if (!decls.isEmpty) abort("computing closure of "+this+":"+this.isInstanceOf[RefinedType]+"/"+closureCache(j)) //Console.println("compute closure of "+this+" => glb("+variants+")") pending += i try { mergePrefixAndArgs(variants, Variance.Contravariant, lubDepth(variants)) match { case NoType => typeError("no common type instance of base types " + (variants mkString ", and ") + " exists.") case tp0 => pending(i) = false elems(i) = tp0 tp0 } } catch { case CyclicInheritance => typeError( "computing the common type instance of base types " + (variants mkString ", and ") + " leads to a cycle.") } } elems(i) match { case rtp@RefinedType(variants, decls) => computeLazyType(rtp) case et @ ExistentialType(quantified, rtp: RefinedType) => existentialAbstraction(quantified, computeLazyType(rtp)) case tp => tp } } def rawElem(i: Int) = elems(i) /** The type symbol of the type at i'th position in this sequence */ def typeSymbol(i: Int): Symbol = elems(i).typeSymbol /** Return all evaluated types in this sequence as a list */ def toList: List[Type] = elems.toList def copy(head: Type, offset: Int): BaseTypeSeq = { val arr = new Array[Type](elems.length + offset) java.lang.System.arraycopy(elems, 0, arr, offset, elems.length) arr(0) = head newBaseTypeSeq(parents, arr) } /** Compute new base type sequence with `tp` prepended to this sequence */ def prepend(tp: Type): BaseTypeSeq = copy(tp, 1) /** Compute new base type sequence with `tp` replacing the head of this sequence */ def updateHead(tp: Type): BaseTypeSeq = copy(tp, 0) /** Compute new base type sequence where every element is mapped * with function `f`. Lazy types are mapped but not evaluated */ def map(f: Type => Type): BaseTypeSeq = { // inlined `elems map f` for performance val len = length val arr = new Array[Type](len) var i = 0 while (i < len) { arr(i) = f(elems(i)) i += 1 } newBaseTypeSeq(parents, arr) } def lateMap(f: Type => Type): BaseTypeSeq = newMappedBaseTypeSeq(this, f) def exists(p: Type => Boolean): Boolean = elems exists p lazy val maxDepth = maxDepthOfElems protected def maxDepthOfElems: Depth = { var d = Depth.Zero 1 until length foreach (i => d = d max typeDepth(elems(i))) d } override def toString = elems.mkString("BTS(", ",", ")") private def typeError(msg: String): Nothing = throw new TypeError( "the type intersection "+(parents mkString " with ")+" is malformed"+ "\\n --- because ---\\n"+msg) } /** A marker object for a base type sequence that's no yet computed. * used to catch inheritance cycles */ val undetBaseTypeSeq: BaseTypeSeq = newBaseTypeSeq(List(), Array()) /** Create a base type sequence consisting of a single type */ def baseTypeSingletonSeq(tp: Type): BaseTypeSeq = newBaseTypeSeq(List(), Array(tp)) /** Create the base type sequence of a compound type with given tp.parents */ def compoundBaseTypeSeq(tp: Type): BaseTypeSeq = { val tsym = tp.typeSymbol val parents = tp.parents // Console.println("computing baseTypeSeq of " + tsym.tpe + " " + parents)//DEBUG val buf = new mutable.ListBuffer[Type] buf += tsym.tpe_* var btsSize = 1 if (parents.nonEmpty) { val nparents = parents.length val pbtss = new Array[BaseTypeSeq](nparents) val index = new Array[Int](nparents) var i = 0 for (p <- parents) { val parentBts = p.dealias.baseTypeSeq // dealias need for SI-8046. pbtss(i) = if (parentBts eq undetBaseTypeSeq) AnyClass.info.baseTypeSeq else parentBts index(i) = 0 i += 1 } def nextTypeSymbol(i: Int): Symbol = { val j = index(i) val pbts = pbtss(i) if (j < pbts.length) pbts.typeSymbol(j) else AnyClass } def nextRawElem(i: Int): Type = { val j = index(i) val pbts = pbtss(i) if (j < pbts.length) pbts.rawElem(j) else AnyTpe } var minSym: Symbol = NoSymbol while (minSym != AnyClass) { minSym = nextTypeSymbol(0) i = 1 while (i < nparents) { val nextSym = nextTypeSymbol(i) if (nextSym isLess minSym) minSym = nextSym i += 1 } var minTypes: List[Type] = List() def alreadyInMinTypes(tp: Type): Boolean = { @annotation.tailrec def loop(tps: List[Type]): Boolean = tps match { case Nil => false case x :: xs => (tp =:= x) || loop(xs) } loop(minTypes) } i = 0 while (i < nparents) { if (nextTypeSymbol(i) == minSym) { nextRawElem(i) match { case RefinedType(variants, decls) => for (tp <- variants) if (!alreadyInMinTypes(tp)) minTypes ::= tp case tp => if (!alreadyInMinTypes(tp)) minTypes ::= tp } index(i) = index(i) + 1 } i += 1 } buf += intersectionTypeForLazyBaseType(minTypes) // TODO this reverses the order. Does this matter? Or should this be minTypes.reverse? btsSize += 1 } } val elems = new Array[Type](btsSize) buf.copyToArray(elems, 0) // Console.println("computed baseTypeSeq of " + tsym.tpe + " " + parents + ": "+elems.toString)//DEBUG newBaseTypeSeq(parents, elems) } class MappedBaseTypeSeq(orig: BaseTypeSeq, f: Type => Type) extends BaseTypeSeq(orig.parents map f, orig.elems) { override def apply(i: Int) = f(orig.apply(i)) override def rawElem(i: Int) = f(orig.rawElem(i)) override def typeSymbol(i: Int) = orig.typeSymbol(i) override def toList = orig.toList map f override def copy(head: Type, offset: Int) = (orig map f).copy(head, offset) override def map(g: Type => Type) = lateMap(g) override def lateMap(g: Type => Type) = orig.lateMap(x => g(f(x))) override def exists(p: Type => Boolean) = elems exists (x => p(f(x))) override protected def maxDepthOfElems: Depth = elems.map(x => typeDepth(f(x))).max override def toString = elems.mkString("MBTS(", ",", ")") } val CyclicInheritance = new Throwable } object BaseTypeSeqsStats { val baseTypeSeqCount = Statistics.newCounter("#base type seqs") val baseTypeSeqLenTotal = Statistics.newRelCounter("avg base type seq length", baseTypeSeqCount) }
shimib/scala
src/reflect/scala/reflect/internal/BaseTypeSeqs.scala
Scala
bsd-3-clause
9,607
package gitbucket.core.controller import gitbucket.core.settings.html import gitbucket.core.model.WebHook import gitbucket.core.service.{RepositoryService, AccountService, WebHookService, ProtectedBranchService, CommitStatusService} import gitbucket.core.service.WebHookService._ import gitbucket.core.util._ import gitbucket.core.util.JGitUtil._ import gitbucket.core.util.ControlUtil._ import gitbucket.core.util.Implicits._ import gitbucket.core.util.Directory._ import io.github.gitbucket.scalatra.forms._ import org.apache.commons.io.FileUtils import org.scalatra.i18n.Messages import org.eclipse.jgit.api.Git import org.eclipse.jgit.lib.Constants import org.eclipse.jgit.lib.ObjectId class RepositorySettingsController extends RepositorySettingsControllerBase with RepositoryService with AccountService with WebHookService with ProtectedBranchService with CommitStatusService with OwnerAuthenticator with UsersAuthenticator trait RepositorySettingsControllerBase extends ControllerBase { self: RepositoryService with AccountService with WebHookService with ProtectedBranchService with CommitStatusService with OwnerAuthenticator with UsersAuthenticator => // for repository options case class OptionsForm(repositoryName: String, description: Option[String], isPrivate: Boolean) val optionsForm = mapping( "repositoryName" -> trim(label("Repository Name", text(required, maxlength(40), identifier, renameRepositoryName))), "description" -> trim(label("Description" , optional(text()))), "isPrivate" -> trim(label("Repository Type", boolean())) )(OptionsForm.apply) // for default branch case class DefaultBranchForm(defaultBranch: String) val defaultBranchForm = mapping( "defaultBranch" -> trim(label("Default Branch" , text(required, maxlength(100)))) )(DefaultBranchForm.apply) // for collaborator addition case class CollaboratorForm(userName: String) val collaboratorForm = mapping( "userName" -> trim(label("Username", text(required, collaborator))) )(CollaboratorForm.apply) // for web hook url addition case class WebHookForm(url: String, events: Set[WebHook.Event]) def webHookForm(update:Boolean) = mapping( "url" -> trim(label("url", text(required, webHook(update)))), "events" -> webhookEvents )(WebHookForm.apply) // for transfer ownership case class TransferOwnerShipForm(newOwner: String) val transferForm = mapping( "newOwner" -> trim(label("New owner", text(required, transferUser))) )(TransferOwnerShipForm.apply) /** * Redirect to the Options page. */ get("/:owner/:repository/settings")(ownerOnly { repository => redirect(s"/${repository.owner}/${repository.name}/settings/options") }) /** * Display the Options page. */ get("/:owner/:repository/settings/options")(ownerOnly { html.options(_, flash.get("info")) }) /** * Save the repository options. */ post("/:owner/:repository/settings/options", optionsForm)(ownerOnly { (form, repository) => saveRepositoryOptions( repository.owner, repository.name, form.description, repository.repository.parentUserName.map { _ => repository.repository.isPrivate } getOrElse form.isPrivate ) // Change repository name if(repository.name != form.repositoryName){ // Update database renameRepository(repository.owner, repository.name, repository.owner, form.repositoryName) // Move git repository defining(getRepositoryDir(repository.owner, repository.name)){ dir => FileUtils.moveDirectory(dir, getRepositoryDir(repository.owner, form.repositoryName)) } // Move wiki repository defining(getWikiRepositoryDir(repository.owner, repository.name)){ dir => FileUtils.moveDirectory(dir, getWikiRepositoryDir(repository.owner, form.repositoryName)) } } flash += "info" -> "Repository settings has been updated." redirect(s"/${repository.owner}/${form.repositoryName}/settings/options") }) /** branch settings */ get("/:owner/:repository/settings/branches")(ownerOnly { repository => val protecteions = getProtectedBranchList(repository.owner, repository.name) html.branches(repository, protecteions, flash.get("info")) }); /** Update default branch */ post("/:owner/:repository/settings/update_default_branch", defaultBranchForm)(ownerOnly { (form, repository) => if(repository.branchList.find(_ == form.defaultBranch).isEmpty){ redirect(s"/${repository.owner}/${repository.name}/settings/options") } else { saveRepositoryDefaultBranch(repository.owner, repository.name, form.defaultBranch) // Change repository HEAD using(Git.open(getRepositoryDir(repository.owner, repository.name))) { git => git.getRepository.updateRef(Constants.HEAD, true).link(Constants.R_HEADS + form.defaultBranch) } flash += "info" -> "Repository default branch has been updated." redirect(s"/${repository.owner}/${repository.name}/settings/branches") } }) /** Branch protection for branch */ get("/:owner/:repository/settings/branches/:branch")(ownerOnly { repository => import gitbucket.core.api._ val branch = params("branch") if(repository.branchList.find(_ == branch).isEmpty){ redirect(s"/${repository.owner}/${repository.name}/settings/branches") } else { val protection = ApiBranchProtection(getProtectedBranchInfo(repository.owner, repository.name, branch)) val lastWeeks = getRecentStatuesContexts(repository.owner, repository.name, org.joda.time.LocalDateTime.now.minusWeeks(1).toDate).toSet val knownContexts = (lastWeeks ++ protection.status.contexts).toSeq.sortBy(identity) html.branchprotection(repository, branch, protection, knownContexts, flash.get("info")) } }) /** https://developer.github.com/v3/repos/#enabling-and-disabling-branch-protection */ patch("/api/v3/repos/:owner/:repo/branches/:branch")(ownerOnly { repository => import gitbucket.core.api._ (for{ branch <- params.get("branch") if repository.branchList.find(_ == branch).isDefined protection <- extractFromJsonBody[ApiBranchProtection.EnablingAndDisabling].map(_.protection) } yield { if(protection.enabled){ enableBranchProtection(repository.owner, repository.name, branch, protection.status.enforcement_level == ApiBranchProtection.Everyone, protection.status.contexts) } else { disableBranchProtection(repository.owner, repository.name, branch) } JsonFormat(ApiBranch(branch, protection)(RepositoryName(repository))) }) getOrElse NotFound }) /** * Display the Collaborators page. */ get("/:owner/:repository/settings/collaborators")(ownerOnly { repository => html.collaborators( getCollaborators(repository.owner, repository.name), getAccountByUserName(repository.owner).get.isGroupAccount, repository) }) /** * Add the collaborator. */ post("/:owner/:repository/settings/collaborators/add", collaboratorForm)(ownerOnly { (form, repository) => if(!getAccountByUserName(repository.owner).get.isGroupAccount){ addCollaborator(repository.owner, repository.name, form.userName) } redirect(s"/${repository.owner}/${repository.name}/settings/collaborators") }) /** * Add the collaborator. */ get("/:owner/:repository/settings/collaborators/remove")(ownerOnly { repository => if(!getAccountByUserName(repository.owner).get.isGroupAccount){ removeCollaborator(repository.owner, repository.name, params("name")) } redirect(s"/${repository.owner}/${repository.name}/settings/collaborators") }) /** * Display the web hook page. */ get("/:owner/:repository/settings/hooks")(ownerOnly { repository => html.hooks(getWebHooks(repository.owner, repository.name), repository, flash.get("info")) }) /** * Display the web hook edit page. */ get("/:owner/:repository/settings/hooks/new")(ownerOnly { repository => val webhook = WebHook(repository.owner, repository.name, "") html.edithooks(webhook, Set(WebHook.Push), repository, flash.get("info"), true) }) /** * Add the web hook URL. */ post("/:owner/:repository/settings/hooks/new", webHookForm(false))(ownerOnly { (form, repository) => addWebHook(repository.owner, repository.name, form.url, form.events) flash += "info" -> s"Webhook ${form.url} created" redirect(s"/${repository.owner}/${repository.name}/settings/hooks") }) /** * Delete the web hook URL. */ get("/:owner/:repository/settings/hooks/delete")(ownerOnly { repository => deleteWebHook(repository.owner, repository.name, params("url")) flash += "info" -> s"Webhook ${params("url")} deleted" redirect(s"/${repository.owner}/${repository.name}/settings/hooks") }) /** * Send the test request to registered web hook URLs. */ ajaxPost("/:owner/:repository/settings/hooks/test")(ownerOnly { repository => def _headers(h: Array[org.apache.http.Header]): Array[Array[String]] = h.map { h => Array(h.getName, h.getValue) } using(Git.open(getRepositoryDir(repository.owner, repository.name))){ git => import scala.collection.JavaConverters._ import scala.concurrent.duration._ import scala.concurrent._ import scala.util.control.NonFatal import org.apache.http.util.EntityUtils import scala.concurrent.ExecutionContext.Implicits.global val url = params("url") val dummyWebHookInfo = WebHook(repository.owner, repository.name, url) val dummyPayload = { val ownerAccount = getAccountByUserName(repository.owner).get val commits = if(repository.commitCount == 0) List.empty else git.log .add(git.getRepository.resolve(repository.repository.defaultBranch)) .setMaxCount(4) .call.iterator.asScala.map(new CommitInfo(_)).toList val pushedCommit = commits.drop(1) WebHookPushPayload( git = git, sender = ownerAccount, refName = "refs/heads/" + repository.repository.defaultBranch, repositoryInfo = repository, commits = pushedCommit, repositoryOwner = ownerAccount, oldId = commits.lastOption.map(_.id).map(ObjectId.fromString).getOrElse(ObjectId.zeroId()), newId = commits.headOption.map(_.id).map(ObjectId.fromString).getOrElse(ObjectId.zeroId()) ) } val (webHook, json, reqFuture, resFuture) = callWebHook(WebHook.Push, List(dummyWebHookInfo), dummyPayload).head val toErrorMap: PartialFunction[Throwable, Map[String,String]] = { case e: java.net.UnknownHostException => Map("error"-> ("Unknown host " + e.getMessage)) case e: java.lang.IllegalArgumentException => Map("error"-> ("invalid url")) case e: org.apache.http.client.ClientProtocolException => Map("error"-> ("invalid url")) case NonFatal(e) => Map("error"-> (e.getClass + " "+ e.getMessage)) } contentType = formats("json") org.json4s.jackson.Serialization.write(Map( "url" -> url, "request" -> Await.result(reqFuture.map(req => Map( "headers" -> _headers(req.getAllHeaders), "payload" -> json )).recover(toErrorMap), 20 seconds), "responce" -> Await.result(resFuture.map(res => Map( "status" -> res.getStatusLine(), "body" -> EntityUtils.toString(res.getEntity()), "headers" -> _headers(res.getAllHeaders()) )).recover(toErrorMap), 20 seconds) )) } }) /** * Display the web hook edit page. */ get("/:owner/:repository/settings/hooks/edit")(ownerOnly { repository => getWebHook(repository.owner, repository.name, params("url")).map{ case (webhook, events) => html.edithooks(webhook, events, repository, flash.get("info"), false) } getOrElse NotFound }) /** * Update web hook settings. */ post("/:owner/:repository/settings/hooks/edit", webHookForm(true))(ownerOnly { (form, repository) => updateWebHook(repository.owner, repository.name, form.url, form.events) flash += "info" -> s"webhook ${form.url} updated" redirect(s"/${repository.owner}/${repository.name}/settings/hooks") }) /** * Display the danger zone. */ get("/:owner/:repository/settings/danger")(ownerOnly { html.danger(_) }) /** * Transfer repository ownership. */ post("/:owner/:repository/settings/transfer", transferForm)(ownerOnly { (form, repository) => // Change repository owner if(repository.owner != form.newOwner){ LockUtil.lock(s"${repository.owner}/${repository.name}"){ // Update database renameRepository(repository.owner, repository.name, form.newOwner, repository.name) // Move git repository defining(getRepositoryDir(repository.owner, repository.name)){ dir => FileUtils.moveDirectory(dir, getRepositoryDir(form.newOwner, repository.name)) } // Move wiki repository defining(getWikiRepositoryDir(repository.owner, repository.name)){ dir => FileUtils.moveDirectory(dir, getWikiRepositoryDir(form.newOwner, repository.name)) } } } redirect(s"/${form.newOwner}/${repository.name}") }) /** * Delete the repository. */ post("/:owner/:repository/settings/delete")(ownerOnly { repository => LockUtil.lock(s"${repository.owner}/${repository.name}"){ deleteRepository(repository.owner, repository.name) FileUtils.deleteDirectory(getRepositoryDir(repository.owner, repository.name)) FileUtils.deleteDirectory(getWikiRepositoryDir(repository.owner, repository.name)) FileUtils.deleteDirectory(getTemporaryDir(repository.owner, repository.name)) } redirect(s"/${repository.owner}") }) /** * Provides duplication check for web hook url. */ private def webHook(needExists: Boolean): Constraint = new Constraint(){ override def validate(name: String, value: String, messages: Messages): Option[String] = if(getWebHook(params("owner"), params("repository"), value).isDefined != needExists){ Some(if(needExists){ "URL had not been registered yet." } else { "URL had been registered already." }) } else { None } } private def webhookEvents = new ValueType[Set[WebHook.Event]]{ def convert(name: String, params: Map[String, String], messages: Messages): Set[WebHook.Event] = { WebHook.Event.values.flatMap { t => params.get(name + "." + t.name).map(_ => t) }.toSet } def validate(name: String, params: Map[String, String], messages: Messages): Seq[(String, String)] = if(convert(name,params,messages).isEmpty){ Seq(name -> messages("error.required").format(name)) } else { Nil } } /** * Provides Constraint to validate the collaborator name. */ private def collaborator: Constraint = new Constraint(){ override def validate(name: String, value: String, messages: Messages): Option[String] = getAccountByUserName(value) match { case None => Some("User does not exist.") case Some(x) if(x.isGroupAccount) => Some("User does not exist.") case Some(x) if(x.userName == params("owner") || getCollaborators(params("owner"), params("repository")).contains(x.userName)) => Some("User can access this repository already.") case _ => None } } /** * Duplicate check for the rename repository name. */ private def renameRepositoryName: Constraint = new Constraint(){ override def validate(name: String, value: String, params: Map[String, String], messages: Messages): Option[String] = params.get("repository").filter(_ != value).flatMap { _ => params.get("owner").flatMap { userName => getRepositoryNamesOfUser(userName).find(_ == value).map(_ => "Repository already exists.") } } } /** * Provides Constraint to validate the repository transfer user. */ private def transferUser: Constraint = new Constraint(){ override def validate(name: String, value: String, messages: Messages): Option[String] = getAccountByUserName(value) match { case None => Some("User does not exist.") case Some(x) => if(x.userName == params("owner")){ Some("This is current repository owner.") } else { params.get("repository").flatMap { repositoryName => getRepositoryNamesOfUser(x.userName).find(_ == repositoryName).map{ _ => "User already has same repository." } } } } } }
marklacroix/gitbucket
src/main/scala/gitbucket/core/controller/RepositorySettingsController.scala
Scala
apache-2.0
16,715
package org.opensplice.mobile.dev.leader.event import java.util.UUID case class LeaderElectedEvent(override val groupId: String, override val epoch: Int, val leaderId: UUID) extends LeaderElectionEvent(groupId, epoch) { override def toString(): String = { "Member " + toShortString(leaderId) + " has been ELECTED Leader of Group " + groupId + " in Epoch " + epoch } }
levitha/levitha
src/main/scala/org/opensplice/mobile/dev/leader/event/LeaderElectedEvent.scala
Scala
apache-2.0
382
package com.philipborg.mummu.math.noise trait NoiseRange { protected val min: Double; protected val max: Double; private val mult = (max - min) / 2d; protected def map(value: Double): Double = { val answer = math.max(min, math.min(max, (value + 1) * mult + min)); return answer; } }
philipborg/Mummu
src/main/scala/com/philipborg/mummu/math/noise/NoiseRange.scala
Scala
agpl-3.0
301
package ai.verta.blobs.dataset import ai.verta.client._ import ai.verta.blobs._ import ai.verta.swagger.client.HttpException import ai.verta.swagger._public.modeldb.versioning.model.VersioningSetTagRequestResponse import scala.concurrent.ExecutionContext import scala.language.reflectiveCalls import scala.util.{Try, Success, Failure} import org.scalatest.FunSuite import org.scalatest.Assertions._ import com.amazonaws.regions.Regions; import com.amazonaws.services.s3._ import com.amazonaws.services.s3.model._ import collection.JavaConverters._ import scala.collection.mutable.HashSet class TestVersioning extends FunSuite { def fixture = new { val testfilePath = "s3://verta-starter/census-train.csv" val testfileLoc = S3Location(testfilePath).get val testfilePath2 = "s3://verta-starter/census-test.csv" val testfileLoc2 = S3Location(testfilePath2).get val workingDir = System.getProperty("user.dir") val testDir = workingDir + "/src/test/scala/ai/verta/blobs/testdir" val testfile = testDir + "/testfile" val testSubdir = testDir + "/testsubdir" val testfile2 = testSubdir + "/testfile2" } test("Combining a Dataset blob that enables versioning with one that doesn't should fail") { val f = fixture val s3Blob = S3(f.testfileLoc, true).get val s3Blob2 = S3(f.testfileLoc2, false).get val combineAttempt = S3.reduce(s3Blob, s3Blob2) assert(combineAttempt.isFailure) assert(combineAttempt match { case Failure(e) => e.getMessage contains "Cannot combine a blob that enables versioning with a blob that does not" }) val pathBlob = PathBlob(f.testfile, true).get val pathBlob2 = PathBlob(f.testfile2, false).get val combineAttempt2 = PathBlob.reduce(pathBlob, pathBlob2) assert(combineAttempt2.isFailure) assert(combineAttempt2 match { case Failure(e) => e.getMessage contains "Cannot combine a blob that enables versioning with a blob that does not" }) } }
mitdbg/modeldb
client/scala/src/test/scala/ai/verta/blobs/dataset/TestVersioning.scala
Scala
mit
2,004
/** * Copyright 2015 Thomson Reuters * * Licensed under the Apache License, Version 2.0 (the “License”); you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an “AS IS” BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * * See the License for the specific language governing permissions and * limitations under the License. */ package cmwell.util.concurrent import java.util.concurrent.{ScheduledExecutorService, ScheduledFuture, ScheduledThreadPoolExecutor} import com.typesafe.scalalogging.LazyLogging import scala.concurrent.{ExecutionContext, Future, Promise} import scala.concurrent.duration.{Duration, FiniteDuration} import scala.util.Try /** * Created by gilad on 12/3/15. */ object SimpleScheduler extends LazyLogging { private[this] lazy val timer = { val executor = new ScheduledThreadPoolExecutor(1) executor.setRemoveOnCancelPolicy(true) executor.asInstanceOf[ScheduledExecutorService] } //method is private, since we must keep execution on the expense of out timer thread to be as limited as possible. //this method can be used if and only if we know `body` is a safe and small job. private[util] def scheduleInstant[T](duration: FiniteDuration)(body: => T) = { val p = Promise[T]() val cancellable = timer.schedule( new Runnable { override def run(): Unit = { // body must not be expensive to compute since it will be run in our only timer thread expense. p.complete(Try(body)) } }, duration.toMillis, java.util.concurrent.TimeUnit.MILLISECONDS ) p.future -> Cancellable(cancellable) } def scheduleAtFixedRate(initialDelay: FiniteDuration, period: FiniteDuration, mayInterruptIfRunning: Boolean = false)( task: => Any )(implicit executionContext: ExecutionContext): Cancellable = { // memoize runnable task val runnable: Runnable = new Runnable { override def run(): Unit = Try(task).failed.foreach { err => logger.error("schedueled task failed", err) } } //prepare returns `this` anyway in all seen cases... (can't see why NOT do this) val ec = executionContext.prepare() val cancellable = timer.scheduleAtFixedRate(new Runnable { override def run(): Unit = ec.execute(runnable) }, initialDelay.toMillis, period.toMillis, java.util.concurrent.TimeUnit.MILLISECONDS) Cancellable(cancellable, mayInterruptIfRunning) } def schedule[T](duration: FiniteDuration)(body: => T)(implicit executionContext: ExecutionContext): Future[T] = { val p = Promise[T]() timer.schedule( new Runnable { override def run(): Unit = { // body may be expensive to compute, and must not be run in our only timer thread expense, // so we compute the task inside a `Future` and make it run on the expense of the given executionContext. p.completeWith(Future(body)(executionContext)) } }, duration.toMillis, java.util.concurrent.TimeUnit.MILLISECONDS ) p.future } def scheduleFuture[T](duration: Duration)(body: => Future[T]): Future[T] = { val p = Promise[T]() timer.schedule(new Runnable { override def run(): Unit = p.completeWith(body) }, duration.toMillis, java.util.concurrent.TimeUnit.MILLISECONDS) p.future } } object Cancellable { def apply(scheduledFuture: ScheduledFuture[_], mayInterruptIfRunning: Boolean = false)= new Cancellable { override def cancel(): Boolean = scheduledFuture.cancel(mayInterruptIfRunning) } } trait Cancellable { def cancel(): Boolean }
thomsonreuters/CM-Well
server/cmwell-util/src/main/scala/cmwell/util/concurrent/SimpleScheduler.scala
Scala
apache-2.0
3,858
package github.joestein.skeletor import java.util.Collections import scala.collection.mutable.ListBuffer import org.apache.cassandra.locator.SimpleStrategy import github.joestein.skeletor.Conversions.keyspaceString import github.joestein.util.LogHelper import me.prettyprint.hector.api.ddl.{ComparatorType, ColumnType} import me.prettyprint.hector.api.factory.HFactory import me.prettyprint.hector.api.query.{ MultigetSliceQuery, SuperSliceQuery, MultigetSubSliceQuery, MultigetSliceCounterQuery, CounterQuery, RangeSlicesQuery } import me.prettyprint.cassandra.serializers.{ StringSerializer, LongSerializer, BytesArraySerializer } import java.lang.{ Long => JLong } import scala.collection.JavaConversions._ object Conversions { implicit def simplekey(s: String): Keyspace = Keyspace(s) implicit def keyspaceString(ks: Keyspace): String = ks.name implicit def rowString(r: Row): String = r.name implicit def columnfamily(cf: ColumnFamily) = cf.name implicit def getrows(r: Rows) = r.get } case class ColumnNameValue(column: Column, name: String, value: Any, isCounter: Boolean) extends LogHelper { def ks() = column.row.cf.ks def row() = column.row def cf() = column.row.cf val isSuperColumn = value match { case _:List[_] => true case _ => false } def intValue = value match { case i: Int => i case l: Long => l.toInt case n: Number => n.intValue() case _ => value.toString.toInt } def hColumn = value match { case l: Long => HFactory.createColumn(name, JLong.valueOf(l), StringSerializer.get(), LongSerializer.get()) case _ => HFactory.createStringColumn(name, value.toString) } def hSuperColumn = value match { case list:List[String] => var columnList = List(HFactory.createColumn(list.head, "", StringSerializer.get(), StringSerializer.get())) list.tail.foreach{ columnName:String => { val column = HFactory.createColumn(columnName, "", StringSerializer.get(), StringSerializer.get()) columnList = columnList ++ List(column) }} HFactory.createSuperColumn(name, asJavaList(columnList), StringSerializer.get(), StringSerializer.get(), StringSerializer.get()) } } case class Column(row: Row, name: String) { def of(value: Any) = ColumnNameValue(this, name, value, false) def inc() = { ColumnNameValue(this, name, 1, true) } def inc(value: Int) = { ColumnNameValue(this, name, value, true) } def dec(value: Int) = { ColumnNameValue(this, name, (value - (2 * value)), true) } def dec() = { ColumnNameValue(this, name, -1, true) } } case class Row(cf: ColumnFamily, name: String) { def has(column: String): Column = Column(this, column) } object Row { def apply(cv: ColumnNameValue): Row = cv.row } class Rows(cv: Option[ColumnNameValue] = None) { import scala.collection.mutable.ListBuffer val rows = new ListBuffer[ColumnNameValue] cv.foreach(rows += _) def add(cv: ColumnNameValue) = { rows += cv this } //need to be able to handle adding the two list buffers together //without explicitly exposing the rows unecessarly def ++(buffRows: Rows) = { rows ++= buffRows.rows this } def get = { rows.result } } object Rows { def apply(cv: ColumnNameValue): Rows = { new Rows(Some(cv)) } } case class ColumnFamily(val ks: Keyspace, val name: String) extends LogHelper { import me.prettyprint.hector.api.factory.HFactory import me.prettyprint.hector.api.ddl.ComparatorType import Conversions._ private lazy val columnFamilyDefinition = HFactory.createColumnFamilyDefinition(ks, name, ComparatorType.UTF8TYPE) var isSuper = false def ->(row: String) = new Row(this, row) //get data out of this column family def >>(sets: (MultigetSliceQuery[String, String, String]) => Unit, proc: (String, String, String) => Unit) { Cassandra >> (this, sets, proc) } //get data out of this super column family def multigetSubSliceQuery(sets: (MultigetSubSliceQuery[String, String, String, String]) => Unit, proc: (String, String, String) => Unit) { Cassandra.multigetSubSliceQuery(this, sets, proc) } //get top level columns from super column family def superSliceQuery(sets: (SuperSliceQuery[String, String, String, String]) => Unit, proc: (String, String, String) => Unit) { Cassandra.superSliceQuery(this, sets, proc) } //get rows out of this column family def >>>(sets: (RangeSlicesQuery[String, String, String]) => Unit, proc: (String, String, String) => Unit) { Cassandra >>> (this, sets, proc) } //get data of this counter column family def >#(sets: (MultigetSliceCounterQuery[String, String]) => Unit, proc: (String, String, Long) => Unit) = { Cassandra ># (this, sets, proc) } //get data of this counter column family def >%(sets: (CounterQuery[String, String]) => Unit, proc: (Long) => Unit) = { Cassandra >% (this, sets, proc) } def <<(rows: Seq[ColumnNameValue]) = { Cassandra << rows } def setSuper(superColumn:Boolean = true) = { isSuper = superColumn if (superColumn) columnFamilyDefinition.setColumnType(ColumnType.SUPER) else columnFamilyDefinition.setColumnType(ColumnType.STANDARD) } /* * create the column family */ def create = { Cassandra.cluster.addColumnFamily(columnFamilyDefinition, true) } /* * drop the column family from the keyspace */ def delete = { Cassandra.cluster.dropColumnFamily(ks, name, true) } /* * truncate the data from this column family */ def truncate = { Cassandra.cluster.truncate(ks, name) } } case class Keyspace(val name: String, val replicationFactor: Int = 1) { private lazy val keyspaceDefinition = HFactory.createKeyspaceDefinition(name, classOf[SimpleStrategy].getName(), replicationFactor, Collections.emptyList()) def create = { Cassandra.cluster.addKeyspace(keyspaceDefinition, true) } def delete = { Cassandra.cluster.dropKeyspace(name, true) } def \\(cf: String) = new ColumnFamily(this, cf) }
joestein/skeletor
src/main/scala/skeletor/Internals.scala
Scala
mit
6,423
/* * Copyright (C) 2016-2017 Lightbend Inc. <https://www.lightbend.com> */ package com.lightbend.lagom.discovery import java.util.Collections import java.util.{ Map => JMap } import scala.collection.JavaConverters._ import javax.inject.Inject import java.net.URI import com.lightbend.lagom.internal.javadsl.registry.ServiceRegistryService case class UnmanagedServices @Inject() (services: Map[String, ServiceRegistryService]) object UnmanagedServices { def apply(services: JMap[String, String]): UnmanagedServices = { val convertedServices = for ((name, url) <- services.asScala.toMap) yield { name -> new ServiceRegistryService(new URI(url), Collections.emptyList()) } UnmanagedServices(convertedServices) } }
edouardKaiser/lagom
dev/service-registry/service-locator/src/main/scala/com/lightbend/lagom/discovery/UnmanagedServices.scala
Scala
apache-2.0
739
package com.arcusys.valamis /** * Created by pkornilov on 16.03.16. */ package object util { type FieldFilter = (String, String) => Boolean }
igor-borisov/valamis
valamis-util/src/main/scala/com/arcusys/valamis/util/package.scala
Scala
gpl-3.0
151
import java.io.File import actor.{FileUploadActor, FileSystemActor, FileLockingActor, DavServerActor} import akka.actor.{ActorSystem, Props} import akka.io.IO import akka.util.Timeout import client.{Config, DropboxCachedFileHandler} import client.dropbox.{FileDownloadRequest, FileListRequest, DropboxClient} import com.typesafe.config.ConfigFactory import spray.can.Http /** * Created by tomas on 01-12-15. */ object Boot extends App { Config.analyze implicit val system = ActorSystem("dropbox-cloud-client") val folder = new File(Config.cachePath) val accessToken = Config.accessToken folder.mkdirs() implicit val client = new DropboxClient(accessToken) val fileLockActor = system.actorOf(Props[FileLockingActor], "dav-file-lock-actor") val fileSystemActor = system.actorOf(FileSystemActor.props(folder, client), "dav-file-system-actor") val fileUploadActor = system.actorOf(FileUploadActor.props(folder, client), "dav-file-upload-actor") val fileHandler = new DropboxCachedFileHandler(folder, fileSystemActor) fileHandler.refreshFolder() // the handler actor replies to incoming HttpRequests val handler = system.actorOf(DavServerActor.props("http://localhost:7070", fileHandler, fileLockActor, fileSystemActor), name = "dav-server-actor") IO(Http) ! Http.Bind(handler, interface = "localhost", port = 7070) println("RUNNING") }
tomasharkema/CloudClient
src/main/scala/Boot.scala
Scala
mit
1,376
/* * Copyright 2011-2018 GatlingCorp (http://gatling.io) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.gatling.http.request.builder.ws2 import io.gatling.core.CoreComponents import io.gatling.core.session.Expression import io.gatling.http.action.ws2.WsConnectBuilder import io.gatling.http.protocol.HttpComponents import io.gatling.http.request.builder.ws.WsRequestExpressionBuilder import io.gatling.http.request.builder.{ CommonAttributes, RequestBuilder } import org.asynchttpclient.Request object WsConnectRequestBuilder { implicit def toActionBuilder(requestBuilder: WsConnectRequestBuilder): WsConnectBuilder = WsConnectBuilder(requestBuilder, Nil, None) } case class WsConnectRequestBuilder(commonAttributes: CommonAttributes, wsName: String) extends RequestBuilder[WsConnectRequestBuilder] { private[http] def newInstance(commonAttributes: CommonAttributes) = new WsConnectRequestBuilder(commonAttributes, wsName) def build(coreComponents: CoreComponents, httpComponents: HttpComponents): Expression[Request] = new WsRequestExpressionBuilder(commonAttributes, coreComponents, httpComponents).build }
wiacekm/gatling
gatling-http/src/main/scala/io/gatling/http/request/builder/ws2/WsConnectRequestBuilder.scala
Scala
apache-2.0
1,655
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.openwhisk.core.loadBalancer import akka.actor.ActorRef import akka.actor.ActorRefFactory import java.util.concurrent.ThreadLocalRandom import akka.actor.{Actor, ActorSystem, Cancellable, Props} import akka.cluster.ClusterEvent._ import akka.cluster.{Cluster, Member, MemberStatus} import akka.management.scaladsl.AkkaManagement import akka.management.cluster.bootstrap.ClusterBootstrap import akka.stream.ActorMaterializer import org.apache.kafka.clients.producer.RecordMetadata import pureconfig._ import pureconfig.generic.auto._ import org.apache.openwhisk.common._ import org.apache.openwhisk.core.WhiskConfig._ import org.apache.openwhisk.core.connector._ import org.apache.openwhisk.core.entity._ import org.apache.openwhisk.core.entity.size.SizeLong import org.apache.openwhisk.common.LoggingMarkers._ import org.apache.openwhisk.core.loadBalancer.InvokerState.{Healthy, Offline, Unhealthy, Unresponsive} import org.apache.openwhisk.core.{ConfigKeys, WhiskConfig} import org.apache.openwhisk.spi.SpiLoader import scala.annotation.tailrec import scala.concurrent.Future import scala.concurrent.duration.FiniteDuration /** * A loadbalancer that schedules workload based on a hashing-algorithm. * * ## Algorithm * * At first, for every namespace + action pair a hash is calculated and then an invoker is picked based on that hash * (`hash % numInvokers`). The determined index is the so called "home-invoker". This is the invoker where the following * progression will **always** start. If this invoker is healthy (see "Invoker health checking") and if there is * capacity on that invoker (see "Capacity checking"), the request is scheduled to it. * * If one of these prerequisites is not true, the index is incremented by a step-size. The step-sizes available are the * all coprime numbers smaller than the amount of invokers available (coprime, to minimize collisions while progressing * through the invokers). The step-size is picked by the same hash calculated above (`hash & numStepSizes`). The * home-invoker-index is now incremented by the step-size and the checks (healthy + capacity) are done on the invoker * we land on now. * * This procedure is repeated until all invokers have been checked at which point the "overload" strategy will be * employed, which is to choose a healthy invoker randomly. In a steadily running system, that overload means that there * is no capacity on any invoker left to schedule the current request to. * * If no invokers are available or if there are no healthy invokers in the system, the loadbalancer will return an error * stating that no invokers are available to take any work. Requests are not queued anywhere in this case. * * An example: * - availableInvokers: 10 (all healthy) * - hash: 13 * - homeInvoker: hash % availableInvokers = 13 % 10 = 3 * - stepSizes: 1, 3, 7 (note how 2 and 5 is not part of this because it's not coprime to 10) * - stepSizeIndex: hash % numStepSizes = 13 % 3 = 1 => stepSize = 3 * * Progression to check the invokers: 3, 6, 9, 2, 5, 8, 1, 4, 7, 0 --> done * * This heuristic is based on the assumption, that the chance to get a warm container is the best on the home invoker * and degrades the more steps you make. The hashing makes sure that all loadbalancers in a cluster will always pick the * same home invoker and do the same progression for a given action. * * Known caveats: * - This assumption is not always true. For instance, two heavy workloads landing on the same invoker can override each * other, which results in many cold starts due to all containers being evicted by the invoker to make space for the * "other" workload respectively. Future work could be to keep a buffer of invokers last scheduled for each action and * to prefer to pick that one. Then the second-last one and so forth. * * ## Capacity checking * * The maximum capacity per invoker is configured using `user-memory`, which is the maximum amount of memory of actions * running in parallel on that invoker. * * Spare capacity is determined by what the loadbalancer thinks it scheduled to each invoker. Upon scheduling, an entry * is made to update the books and a slot for each MB of the actions memory limit in a Semaphore is taken. These slots * are only released after the response from the invoker (active-ack) arrives **or** after the active-ack times out. * The Semaphore has as many slots as MBs are configured in `user-memory`. * * Known caveats: * - In an overload scenario, activations are queued directly to the invokers, which makes the active-ack timeout * unpredictable. Timing out active-acks in that case can cause the loadbalancer to prematurely assign new load to an * overloaded invoker, which can cause uneven queues. * - The same is true if an invoker is extraordinarily slow in processing activations. The queue on this invoker will * slowly rise if it gets slow to the point of still sending pings, but handling the load so slowly, that the * active-acks time out. The loadbalancer again will think there is capacity, when there is none. * * Both caveats could be solved in future work by not queueing to invoker topics on overload, but to queue on a * centralized overflow topic. Timing out an active-ack can then be seen as a system-error, as described in the * following. * * ## Invoker health checking * * Invoker health is determined via a kafka-based protocol, where each invoker pings the loadbalancer every second. If * no ping is seen for a defined amount of time, the invoker is considered "Offline". * * Moreover, results from all activations are inspected. If more than 3 out of the last 10 activations contained system * errors, the invoker is considered "Unhealthy". If an invoker is unhealty, no user workload is sent to it, but * test-actions are sent by the loadbalancer to check if system errors are still happening. If the * system-error-threshold-count in the last 10 activations falls below 3, the invoker is considered "Healthy" again. * * To summarize: * - "Offline": Ping missing for > 10 seconds * - "Unhealthy": > 3 **system-errors** in the last 10 activations, pings arriving as usual * - "Healthy": < 3 **system-errors** in the last 10 activations, pings arriving as usual * * ## Horizontal sharding * * Sharding is employed to avoid both loadbalancers having to share any data, because the metrics used in scheduling * are very fast changing. * * Horizontal sharding means, that each invoker's capacity is evenly divided between the loadbalancers. If an invoker * has at most 16 slots available (invoker-busy-threshold = 16), those will be divided to 8 slots for each loadbalancer * (if there are 2). * * If concurrent activation processing is enabled (and concurrency limit is > 1), accounting of containers and * concurrency capacity per container will limit the number of concurrent activations routed to the particular * slot at an invoker. Default max concurrency is 1. * * Known caveats: * - If a loadbalancer leaves or joins the cluster, all state is removed and created from scratch. Those events should * not happen often. * - If concurrent activation processing is enabled, it only accounts for the containers that the current loadbalancer knows. * So the actual number of containers launched at the invoker may be less than is counted at the loadbalancer, since * the invoker may skip container launch in case there is concurrent capacity available for a container launched via * some other loadbalancer. */ class ShardingContainerPoolBalancer( config: WhiskConfig, controllerInstance: ControllerInstanceId, feedFactory: FeedFactory, val invokerPoolFactory: InvokerPoolFactory, implicit val messagingProvider: MessagingProvider = SpiLoader.get[MessagingProvider])( implicit actorSystem: ActorSystem, logging: Logging, materializer: ActorMaterializer) extends CommonLoadBalancer(config, feedFactory, controllerInstance) { /** Build a cluster of all loadbalancers */ private val cluster: Option[Cluster] = if (loadConfigOrThrow[ClusterConfig](ConfigKeys.cluster).useClusterBootstrap) { AkkaManagement(actorSystem).start() ClusterBootstrap(actorSystem).start() Some(Cluster(actorSystem)) } else if (loadConfigOrThrow[Seq[String]]("akka.cluster.seed-nodes").nonEmpty) { Some(Cluster(actorSystem)) } else { None } override protected def emitMetrics() = { super.emitMetrics() MetricEmitter.emitGaugeMetric( INVOKER_TOTALMEM_BLACKBOX, schedulingState.blackboxInvokers.foldLeft(0L) { (total, curr) => if (curr.status.isUsable) { curr.id.userMemory.toMB + total } else { total } }) MetricEmitter.emitGaugeMetric( INVOKER_TOTALMEM_MANAGED, schedulingState.managedInvokers.foldLeft(0L) { (total, curr) => if (curr.status.isUsable) { curr.id.userMemory.toMB + total } else { total } }) MetricEmitter.emitGaugeMetric(HEALTHY_INVOKER_MANAGED, schedulingState.managedInvokers.count(_.status == Healthy)) MetricEmitter.emitGaugeMetric( UNHEALTHY_INVOKER_MANAGED, schedulingState.managedInvokers.count(_.status == Unhealthy)) MetricEmitter.emitGaugeMetric( UNRESPONSIVE_INVOKER_MANAGED, schedulingState.managedInvokers.count(_.status == Unresponsive)) MetricEmitter.emitGaugeMetric(OFFLINE_INVOKER_MANAGED, schedulingState.managedInvokers.count(_.status == Offline)) MetricEmitter.emitGaugeMetric(HEALTHY_INVOKER_BLACKBOX, schedulingState.blackboxInvokers.count(_.status == Healthy)) MetricEmitter.emitGaugeMetric( UNHEALTHY_INVOKER_BLACKBOX, schedulingState.blackboxInvokers.count(_.status == Unhealthy)) MetricEmitter.emitGaugeMetric( UNRESPONSIVE_INVOKER_BLACKBOX, schedulingState.blackboxInvokers.count(_.status == Unresponsive)) MetricEmitter.emitGaugeMetric(OFFLINE_INVOKER_BLACKBOX, schedulingState.blackboxInvokers.count(_.status == Offline)) } /** State needed for scheduling. */ val schedulingState = ShardingContainerPoolBalancerState()(lbConfig) /** * Monitors invoker supervision and the cluster to update the state sequentially * * All state updates should go through this actor to guarantee that * [[ShardingContainerPoolBalancerState.updateInvokers]] and [[ShardingContainerPoolBalancerState.updateCluster]] * are called exclusive of each other and not concurrently. */ private val monitor = actorSystem.actorOf(Props(new Actor { override def preStart(): Unit = { cluster.foreach(_.subscribe(self, classOf[MemberEvent], classOf[ReachabilityEvent])) } // all members of the cluster that are available var availableMembers = Set.empty[Member] override def receive: Receive = { case CurrentInvokerPoolState(newState) => schedulingState.updateInvokers(newState) // State of the cluster as it is right now case CurrentClusterState(members, _, _, _, _) => availableMembers = members.filter(_.status == MemberStatus.Up) schedulingState.updateCluster(availableMembers.size) // General lifecycle events and events concerning the reachability of members. Split-brain is not a huge concern // in this case as only the invoker-threshold is adjusted according to the perceived cluster-size. // Taking the unreachable member out of the cluster from that point-of-view results in a better experience // even under split-brain-conditions, as that (in the worst-case) results in premature overloading of invokers vs. // going into overflow mode prematurely. case event: ClusterDomainEvent => availableMembers = event match { case MemberUp(member) => availableMembers + member case ReachableMember(member) => availableMembers + member case MemberRemoved(member, _) => availableMembers - member case UnreachableMember(member) => availableMembers - member case _ => availableMembers } schedulingState.updateCluster(availableMembers.size) } })) /** Loadbalancer interface methods */ override def invokerHealth(): Future[IndexedSeq[InvokerHealth]] = Future.successful(schedulingState.invokers) override def clusterSize: Int = schedulingState.clusterSize /** 1. Publish a message to the loadbalancer */ override def publish(action: ExecutableWhiskActionMetaData, msg: ActivationMessage)( implicit transid: TransactionId): Future[Future[Either[ActivationId, WhiskActivation]]] = { val isBlackboxInvocation = action.exec.pull val actionType = if (!isBlackboxInvocation) "managed" else "blackbox" val (invokersToUse, stepSizes) = if (!isBlackboxInvocation) (schedulingState.managedInvokers, schedulingState.managedStepSizes) else (schedulingState.blackboxInvokers, schedulingState.blackboxStepSizes) val chosen = if (invokersToUse.nonEmpty) { val hash = ShardingContainerPoolBalancer.generateHash(msg.user.namespace.name, action.fullyQualifiedName(false)) val homeInvoker = hash % invokersToUse.size val stepSize = stepSizes(hash % stepSizes.size) val invoker: Option[(InvokerInstanceId, Boolean)] = ShardingContainerPoolBalancer.schedule( action.limits.concurrency.maxConcurrent, action.fullyQualifiedName(true), invokersToUse, schedulingState.invokerSlots, action.limits.memory.megabytes, homeInvoker, stepSize) invoker.foreach { case (_, true) => val metric = if (isBlackboxInvocation) LoggingMarkers.BLACKBOX_SYSTEM_OVERLOAD else LoggingMarkers.MANAGED_SYSTEM_OVERLOAD MetricEmitter.emitCounterMetric(metric) case _ => } invoker.map(_._1) } else { None } chosen .map { invoker => // MemoryLimit() and TimeLimit() return singletons - they should be fast enough to be used here val memoryLimit = action.limits.memory val memoryLimitInfo = if (memoryLimit == MemoryLimit()) { "std" } else { "non-std" } val timeLimit = action.limits.timeout val timeLimitInfo = if (timeLimit == TimeLimit()) { "std" } else { "non-std" } logging.info( this, s"scheduled activation ${msg.activationId}, action '${msg.action.asString}' ($actionType), ns '${msg.user.namespace.name.asString}', mem limit ${memoryLimit.megabytes} MB (${memoryLimitInfo}), time limit ${timeLimit.duration.toMillis} ms (${timeLimitInfo}) to ${invoker}") val activationResult = setupActivation(msg, action, invoker) sendActivationToInvoker(messageProducer, msg, invoker).map(_ => activationResult) } .getOrElse { // report the state of all invokers val invokerStates = invokersToUse.foldLeft(Map.empty[InvokerState, Int]) { (agg, curr) => val count = agg.getOrElse(curr.status, 0) + 1 agg + (curr.status -> count) } logging.error( this, s"failed to schedule activation ${msg.activationId}, action '${msg.action.asString}' ($actionType), ns '${msg.user.namespace.name.asString}' - invokers to use: $invokerStates") Future.failed(LoadBalancerException("No invokers available")) } } override val invokerPool = invokerPoolFactory.createInvokerPool( actorSystem, messagingProvider, messageProducer, sendActivationToInvoker, Some(monitor)) override protected def releaseInvoker(invoker: InvokerInstanceId, entry: ActivationEntry) = { schedulingState.invokerSlots .lift(invoker.toInt) .foreach(_.releaseConcurrent(entry.fullyQualifiedEntityName, entry.maxConcurrent, entry.memoryLimit.toMB.toInt)) } } object ShardingContainerPoolBalancer extends LoadBalancerProvider { override def instance(whiskConfig: WhiskConfig, instance: ControllerInstanceId)( implicit actorSystem: ActorSystem, logging: Logging, materializer: ActorMaterializer): LoadBalancer = { val invokerPoolFactory = new InvokerPoolFactory { override def createInvokerPool( actorRefFactory: ActorRefFactory, messagingProvider: MessagingProvider, messagingProducer: MessageProducer, sendActivationToInvoker: (MessageProducer, ActivationMessage, InvokerInstanceId) => Future[RecordMetadata], monitor: Option[ActorRef]): ActorRef = { InvokerPool.prepare(instance, WhiskEntityStore.datastore()) actorRefFactory.actorOf( InvokerPool.props( (f, i) => f.actorOf(InvokerActor.props(i, instance)), (m, i) => sendActivationToInvoker(messagingProducer, m, i), messagingProvider.getConsumer(whiskConfig, s"health${instance.asString}", "health", maxPeek = 128), monitor)) } } new ShardingContainerPoolBalancer( whiskConfig, instance, createFeedFactory(whiskConfig, instance), invokerPoolFactory) } def requiredProperties: Map[String, String] = kafkaHosts /** Generates a hash based on the string representation of namespace and action */ def generateHash(namespace: EntityName, action: FullyQualifiedEntityName): Int = { (namespace.asString.hashCode() ^ action.asString.hashCode()).abs } /** Euclidean algorithm to determine the greatest-common-divisor */ @tailrec def gcd(a: Int, b: Int): Int = if (b == 0) a else gcd(b, a % b) /** Returns pairwise coprime numbers until x. Result is memoized. */ def pairwiseCoprimeNumbersUntil(x: Int): IndexedSeq[Int] = (1 to x).foldLeft(IndexedSeq.empty[Int])((primes, cur) => { if (gcd(cur, x) == 1 && primes.forall(i => gcd(i, cur) == 1)) { primes :+ cur } else primes }) /** * Scans through all invokers and searches for an invoker tries to get a free slot on an invoker. If no slot can be * obtained, randomly picks a healthy invoker. * * @param maxConcurrent concurrency limit supported by this action * @param invokers a list of available invokers to search in, including their state * @param dispatched semaphores for each invoker to give the slots away from * @param slots Number of slots, that need to be acquired (e.g. memory in MB) * @param index the index to start from (initially should be the "homeInvoker" * @param step stable identifier of the entity to be scheduled * @return an invoker to schedule to or None of no invoker is available */ @tailrec def schedule( maxConcurrent: Int, fqn: FullyQualifiedEntityName, invokers: IndexedSeq[InvokerHealth], dispatched: IndexedSeq[NestedSemaphore[FullyQualifiedEntityName]], slots: Int, index: Int, step: Int, stepsDone: Int = 0)(implicit logging: Logging, transId: TransactionId): Option[(InvokerInstanceId, Boolean)] = { val numInvokers = invokers.size if (numInvokers > 0) { val invoker = invokers(index) //test this invoker - if this action supports concurrency, use the scheduleConcurrent function if (invoker.status.isUsable && dispatched(invoker.id.toInt).tryAcquireConcurrent(fqn, maxConcurrent, slots)) { Some(invoker.id, false) } else { // If we've gone through all invokers if (stepsDone == numInvokers + 1) { val healthyInvokers = invokers.filter(_.status.isUsable) if (healthyInvokers.nonEmpty) { // Choose a healthy invoker randomly val random = healthyInvokers(ThreadLocalRandom.current().nextInt(healthyInvokers.size)).id dispatched(random.toInt).forceAcquireConcurrent(fqn, maxConcurrent, slots) logging.warn(this, s"system is overloaded. Chose invoker${random.toInt} by random assignment.") Some(random, true) } else { None } } else { val newIndex = (index + step) % numInvokers schedule(maxConcurrent, fqn, invokers, dispatched, slots, newIndex, step, stepsDone + 1) } } } else { None } } } /** * Holds the state necessary for scheduling of actions. * * @param _invokers all of the known invokers in the system * @param _managedInvokers all invokers for managed runtimes * @param _blackboxInvokers all invokers for blackbox runtimes * @param _managedStepSizes the step-sizes possible for the current managed invoker count * @param _blackboxStepSizes the step-sizes possible for the current blackbox invoker count * @param _invokerSlots state of accessible slots of each invoker */ case class ShardingContainerPoolBalancerState( private var _invokers: IndexedSeq[InvokerHealth] = IndexedSeq.empty[InvokerHealth], private var _managedInvokers: IndexedSeq[InvokerHealth] = IndexedSeq.empty[InvokerHealth], private var _blackboxInvokers: IndexedSeq[InvokerHealth] = IndexedSeq.empty[InvokerHealth], private var _managedStepSizes: Seq[Int] = ShardingContainerPoolBalancer.pairwiseCoprimeNumbersUntil(0), private var _blackboxStepSizes: Seq[Int] = ShardingContainerPoolBalancer.pairwiseCoprimeNumbersUntil(0), protected[loadBalancer] var _invokerSlots: IndexedSeq[NestedSemaphore[FullyQualifiedEntityName]] = IndexedSeq.empty[NestedSemaphore[FullyQualifiedEntityName]], private var _clusterSize: Int = 1)( lbConfig: ShardingContainerPoolBalancerConfig = loadConfigOrThrow[ShardingContainerPoolBalancerConfig](ConfigKeys.loadbalancer))(implicit logging: Logging) { // Managed fraction and blackbox fraction can be between 0.0 and 1.0. The sum of these two fractions has to be between // 1.0 and 2.0. // If the sum is 1.0 that means, that there is no overlap of blackbox and managed invokers. If the sum is 2.0, that // means, that there is no differentiation between managed and blackbox invokers. // If the sum is below 1.0 with the initial values from config, the blackbox fraction will be set higher than // specified in config and adapted to the managed fraction. private val managedFraction: Double = Math.max(0.0, Math.min(1.0, lbConfig.managedFraction)) private val blackboxFraction: Double = Math.max(1.0 - managedFraction, Math.min(1.0, lbConfig.blackboxFraction)) logging.info(this, s"managedFraction = $managedFraction, blackboxFraction = $blackboxFraction")( TransactionId.loadbalancer) /** Getters for the variables, setting from the outside is only allowed through the update methods below */ def invokers: IndexedSeq[InvokerHealth] = _invokers def managedInvokers: IndexedSeq[InvokerHealth] = _managedInvokers def blackboxInvokers: IndexedSeq[InvokerHealth] = _blackboxInvokers def managedStepSizes: Seq[Int] = _managedStepSizes def blackboxStepSizes: Seq[Int] = _blackboxStepSizes def invokerSlots: IndexedSeq[NestedSemaphore[FullyQualifiedEntityName]] = _invokerSlots def clusterSize: Int = _clusterSize /** * @param memory * @return calculated invoker slot */ private def getInvokerSlot(memory: ByteSize): ByteSize = { val invokerShardMemorySize = memory / _clusterSize val newTreshold = if (invokerShardMemorySize < MemoryLimit.MIN_MEMORY) { logging.error( this, s"registered controllers: calculated controller's invoker shard memory size falls below the min memory of one action. " + s"Setting to min memory. Expect invoker overloads. Cluster size ${_clusterSize}, invoker user memory size ${memory.toMB.MB}, " + s"min action memory size ${MemoryLimit.MIN_MEMORY.toMB.MB}, calculated shard size ${invokerShardMemorySize.toMB.MB}.")( TransactionId.loadbalancer) MemoryLimit.MIN_MEMORY } else { invokerShardMemorySize } newTreshold } /** * Updates the scheduling state with the new invokers. * * This is okay to not happen atomically since dirty reads of the values set are not dangerous. It is important though * to update the "invokers" variables last, since they will determine the range of invokers to choose from. * * Handling a shrinking invokers list is not necessary, because InvokerPool won't shrink its own list but rather * report the invoker as "Offline". * * It is important that this method does not run concurrently to itself and/or to [[updateCluster]] */ def updateInvokers(newInvokers: IndexedSeq[InvokerHealth]): Unit = { val oldSize = _invokers.size val newSize = newInvokers.size // for small N, allow the managed invokers to overlap with blackbox invokers, and // further assume that blackbox invokers << managed invokers val managed = Math.max(1, Math.ceil(newSize.toDouble * managedFraction).toInt) val blackboxes = Math.max(1, Math.floor(newSize.toDouble * blackboxFraction).toInt) _invokers = newInvokers _managedInvokers = _invokers.take(managed) _blackboxInvokers = _invokers.takeRight(blackboxes) val logDetail = if (oldSize != newSize) { _managedStepSizes = ShardingContainerPoolBalancer.pairwiseCoprimeNumbersUntil(managed) _blackboxStepSizes = ShardingContainerPoolBalancer.pairwiseCoprimeNumbersUntil(blackboxes) if (oldSize < newSize) { // Keeps the existing state.. val onlyNewInvokers = _invokers.drop(_invokerSlots.length) _invokerSlots = _invokerSlots ++ onlyNewInvokers.map { invoker => new NestedSemaphore[FullyQualifiedEntityName](getInvokerSlot(invoker.id.userMemory).toMB.toInt) } val newInvokerDetails = onlyNewInvokers .map(i => s"${i.id.toString}: ${i.status} / ${getInvokerSlot(i.id.userMemory).toMB.MB} of ${i.id.userMemory.toMB.MB}") .mkString(", ") s"number of known invokers increased: new = $newSize, old = $oldSize. details: $newInvokerDetails." } else { s"number of known invokers decreased: new = $newSize, old = $oldSize." } } else { s"no update required - number of known invokers unchanged: $newSize." } logging.info( this, s"loadbalancer invoker status updated. managedInvokers = $managed blackboxInvokers = $blackboxes. $logDetail")( TransactionId.loadbalancer) } /** * Updates the size of a cluster. Throws away all state for simplicity. * * This is okay to not happen atomically, since a dirty read of the values set are not dangerous. At worst the * scheduler works on outdated invoker-load data which is acceptable. * * It is important that this method does not run concurrently to itself and/or to [[updateInvokers]] */ def updateCluster(newSize: Int): Unit = { val actualSize = newSize max 1 // if a cluster size < 1 is reported, falls back to a size of 1 (alone) if (_clusterSize != actualSize) { val oldSize = _clusterSize _clusterSize = actualSize _invokerSlots = _invokers.map { invoker => new NestedSemaphore[FullyQualifiedEntityName](getInvokerSlot(invoker.id.userMemory).toMB.toInt) } // Directly after startup, no invokers have registered yet. This needs to be handled gracefully. val invokerCount = _invokers.size val totalInvokerMemory = _invokers.foldLeft(0L)((total, invoker) => total + getInvokerSlot(invoker.id.userMemory).toMB).MB val averageInvokerMemory = if (totalInvokerMemory.toMB > 0 && invokerCount > 0) { (totalInvokerMemory / invokerCount).toMB.MB } else { 0.MB } logging.info( this, s"loadbalancer cluster size changed from $oldSize to $actualSize active nodes. ${invokerCount} invokers with ${averageInvokerMemory} average memory size - total invoker memory ${totalInvokerMemory}.")( TransactionId.loadbalancer) } } } /** * Configuration for the cluster created between loadbalancers. * * @param useClusterBootstrap Whether or not to use a bootstrap mechanism */ case class ClusterConfig(useClusterBootstrap: Boolean) /** * Configuration for the sharding container pool balancer. * * @param blackboxFraction the fraction of all invokers to use exclusively for blackboxes * @param timeoutFactor factor to influence the timeout period for forced active acks (time-limit.std * timeoutFactor + timeoutAddon) * @param timeoutAddon extra time to influence the timeout period for forced active acks (time-limit.std * timeoutFactor + timeoutAddon) */ case class ShardingContainerPoolBalancerConfig(managedFraction: Double, blackboxFraction: Double, timeoutFactor: Int, timeoutAddon: FiniteDuration) /** * State kept for each activation slot until completion. * * @param id id of the activation * @param namespaceId namespace that invoked the action * @param invokerName invoker the action is scheduled to * @param memoryLimit memory limit of the invoked action * @param timeLimit time limit of the invoked action * @param maxConcurrent concurrency limit of the invoked action * @param fullyQualifiedEntityName fully qualified name of the invoked action * @param timeoutHandler times out completion of this activation, should be canceled on good paths * @param isBlackbox true if the invoked action is a blackbox action, otherwise false (managed action) * @param isBlocking true if the action is invoked in a blocking fashion, i.e. "somebody" waits for the result */ case class ActivationEntry(id: ActivationId, namespaceId: UUID, invokerName: InvokerInstanceId, memoryLimit: ByteSize, timeLimit: FiniteDuration, maxConcurrent: Int, fullyQualifiedEntityName: FullyQualifiedEntityName, timeoutHandler: Cancellable, isBlackbox: Boolean, isBlocking: Boolean)
jeremiaswerner/openwhisk
core/controller/src/main/scala/org/apache/openwhisk/core/loadBalancer/ShardingContainerPoolBalancer.scala
Scala
apache-2.0
30,884
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package jp.gihyo.spark.ch05 import java.text.SimpleDateFormat case class Station(id: Int, name: String, lat: Double, lon: Double, dockcount: Int, landmark: String, installation: java.sql.Date) object Station { def parse(line: String): Station = { val dateFormat = new SimpleDateFormat("MM/dd/yyy") val elms = line.split(",") val id = elms(0).toInt val name = elms(1) val lat = elms(2).toDouble val lon = elms(3).toDouble val dockcount = elms(4).toInt val landmark = elms(5) val parsedInstallation = dateFormat.parse(elms(6)) val installation = new java.sql.Date(parsedInstallation.getTime) Station(id, name, lat, lon, dockcount, landmark, installation) } }
yu-iskw/gihyo-spark-book-example
src/main/scala/jp/gihyo/spark/ch05/Station.scala
Scala
apache-2.0
1,517
/* * Copyright 2010 LinkedIn * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package kafka.zk import junit.framework.TestCase import kafka.consumer.ConsumerConfig import org.I0Itec.zkclient.ZkClient import kafka.utils.{ZkUtils, StringSerializer} import org.junit.Assert import kafka.{TestZKUtils, TestUtils} class ZKEphemeralTest extends TestCase with ZooKeeperTestHarness { val zkConnect = TestZKUtils.zookeeperConnect var zkSessionTimeoutMs = 1000 def testEphemeralNodeCleanup = { val config = new ConsumerConfig(TestUtils.createConsumerProperties(zkConnect, "test", "1")) var zkClient = new ZkClient(zkConnect, zkSessionTimeoutMs, config.zkConnectionTimeoutMs, StringSerializer) try { ZkUtils.createEphemeralPathExpectConflict(zkClient, "/tmp/zktest", "node created") } catch { case e: Exception => println("Exception in creating ephemeral node") } var testData: String = null testData = ZkUtils.readData(zkClient, "/tmp/zktest") Assert.assertNotNull(testData) zkClient.close Thread.sleep(zkSessionTimeoutMs) zkClient = new ZkClient(zkConnect, zkSessionTimeoutMs, config.zkConnectionTimeoutMs, StringSerializer) val nodeExists = ZkUtils.pathExists(zkClient, "/tmp/zktest") Assert.assertFalse(nodeExists) } }
jinfei21/kafka
test/unit/kafka/zk/ZKEphemeralTest.scala
Scala
apache-2.0
1,896
package org.openurp.edu.eams.teach.program.major.dao import org.openurp.edu.base.code.CourseType trait MajorCourseGroupDao { def getCourseType(planId: java.lang.Long, courseId: java.lang.Long): CourseType }
openurp/edu-eams-webapp
core/src/main/scala/org/openurp/edu/eams/teach/program/major/dao/MajorPlanCourseGroupDao.scala
Scala
gpl-3.0
214
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spot.proxy import org.apache.spark.broadcast.Broadcast import org.apache.spark.sql.functions._ import org.apache.spot.proxy.ProxySuspiciousConnectsModel.EntropyCuts import org.apache.spot.utilities._ import org.apache.spot.utilities.data.validation.InvalidDataHandler import scala.util.{Success, Try} /** * Convert Proxy log entries into "words" for topic modelling analyses. */ object ProxyWordCreation { /** * UDF for word creation * * @param topDomains List of most popular top level domain names (provided) * @param agentCounts List of user agent values in the data set and its count * @return */ def udfWordCreation(topDomains: Broadcast[Set[String]], agentCounts: Broadcast[Map[String, Long]]) = udf((host: String, time: String, reqMethod: String, uri: String, contentType: String, userAgent: String, responseCode: String) => ProxyWordCreation.proxyWord(host, time, reqMethod, uri, contentType, userAgent, responseCode, topDomains, agentCounts)) /** * Creates a word based on values of Proxy record * * @param proxyHost Host name * @param time Proxy connection time * @param reqMethod request method * @param uri URI * @param contentType content type * @param userAgent user agent * @param responseCode response code * @param topDomains top domains * @param agentCounts agent counts * @return */ def proxyWord(proxyHost: String, time: String, reqMethod: String, uri: String, contentType: String, userAgent: String, responseCode: String, topDomains: Broadcast[Set[String]], agentCounts: Broadcast[Map[String, Long]]): String = { Try { List(topDomain(proxyHost, topDomains.value).toString, // Time binned by hours TimeUtilities.getTimeAsHour(time).toString, reqMethod, // Fixed cutoffs MathUtils.bin(Entropy.stringEntropy(uri), EntropyCuts), // Just the top level content type for now if (contentType.split('/').length > 0) contentType.split('/')(0) else "unknown_content_type", // Exponential cutoffs base 2 MathUtils.logBaseXInt(agentCounts.value(userAgent), 2), // Exponential cutoffs base 2 MathUtils.logBaseXInt(uri.length(), 2), // Response code using all 3 digits if (responseCode != null) responseCode else "unknown_response_code").mkString("_") } match { case Success(proxyWord) => proxyWord case _ => InvalidDataHandler.WordError } } /** * Classifies proxy host domain based on popular domains * * @param proxyHost host name * @param topDomains list of top domains * @return */ def topDomain(proxyHost: String, topDomains: Set[String]): Int = { val domain = DomainProcessor.extractDomain(proxyHost) if (domainBelongsToSafeList(domain)) { 2 } else if (topDomains.contains(domain)) { 1 } else { 0 } } /** * Defines if a domain is safe or not based on a known domain * * @param domain domain name * @return returns true if the domains passed is equal to user domain. */ def domainBelongsToSafeList(domain: String): Boolean = domain == "intel" // TBD parameterize this! }
rabarona/incubator-spot
spot-ml/src/main/scala/org/apache/spot/proxy/ProxyWordCreation.scala
Scala
apache-2.0
4,286
package eu.timepit.crjdt.core /** `Key` represents the untyped components of a `[[Cursor]]`. */ sealed trait Key extends Product with Serializable object Key { case object DocK extends Key case object HeadK extends Key final case class IdK(id: Id) extends Key final case class StrK(str: String) extends Key }
fthomas/crjdt
modules/core/src/main/scala/eu/timepit/crjdt/core/Key.scala
Scala
apache-2.0
319
/* https://lampsvn.epfl.ch/trac/scala/ticket/2104 symptom: Source via Buffered Source always loses the last char of the input file. cause: BufferedSource? doesn't check return for -1 (EOF), and uses reader.ready() improperly as a substitute. test: check over all possible strings of length up to N over alphabet chars: write file, then read back its chars, and get back the original. */ import scala.util.Using object Test { val N=4 import java.io.{ File => JFile } import java.io.FileWriter import io.Source def overwrite(file: JFile, w: FileWriter => Unit): Unit = { val fw=new FileWriter(file) w(fw) fw.close() } def delete_after(f: JFile, g: Source => Unit) = Using(Source.fromFile(f)) { src => g(src) f.delete() } def store_tempfile(f: FileWriter => Unit)(implicit name: String) : JFile = { val tp=JFile.createTempFile(name,null) overwrite(tp,f) tp } implicit val name="t2104" val chars=List('\\n','\\r','a') type Cs = List[Char] def all_strings(n: Int) : List[Cs] = if (n==0) List(Nil) else { val sufs=all_strings(n-1) chars.flatMap((c) => sufs.map(c :: _)) } def test(n: Int): Unit = for(l <- all_strings(n)) { val tmp=store_tempfile((f) => l.foreach(f.write(_))) delete_after(tmp,(s) => assert(s.toList == l)) } def main(args: Array[String]): Unit = (0 until N).foreach(test(_)) }
scala/scala
test/files/jvm/t2104.scala
Scala
apache-2.0
1,421
/* Title: Pure/Isar/parse.scala Author: Makarius Generic parsers for Isabelle/Isar outer syntax. */ package isabelle import scala.util.parsing.combinator.Parsers import scala.annotation.tailrec object Parse { /* parsing tokens */ trait Parser extends Parsers { type Elem = Token def filter_proper: Boolean = true @tailrec private def proper(in: Input): Input = if (!filter_proper || in.atEnd || in.first.is_proper) in else proper(in.rest) def token(s: String, pred: Elem => Boolean): Parser[(Elem, Token.Pos)] = new Parser[(Elem, Token.Pos)] { def apply(raw_input: Input) = { val in = proper(raw_input) if (in.atEnd) Failure(s + " expected,\\nbut end-of-input was found", in) else { val pos = in.pos match { case pos: Token.Pos => pos case _ => Token.Pos.none } val token = in.first if (pred(token)) Success((token, pos), proper(in.rest)) else Failure(s + " expected,\\nbut " + token.kind + " was found:\\n" + token.source, in) } } } def atom(s: String, pred: Elem => Boolean): Parser[String] = token(s, pred) ^^ { case (tok, _) => tok.content } def command(name: String): Parser[Position.T] = token("command " + quote(name), tok => tok.is_command && tok.source == name) ^^ { case (_, pos) => pos.position } def $$$(name: String): Parser[String] = atom("keyword " + quote(name), tok => tok.is_keyword && tok.source == name) def string: Parser[String] = atom("string", _.is_string) def nat: Parser[Int] = atom("natural number", _.is_nat) ^^ (s => Integer.parseInt(s)) def name: Parser[String] = atom("name declaration", _.is_name) def xname: Parser[String] = atom("name reference", _.is_xname) def text: Parser[String] = atom("text", _.is_text) def ML_source: Parser[String] = atom("ML source", _.is_text) def document_source: Parser[String] = atom("document source", _.is_text) def path: Parser[String] = atom("file name/path specification", tok => tok.is_name && Path.is_wellformed(tok.content)) def theory_name: Parser[String] = atom("theory name", tok => tok.is_name && Path.is_wellformed(tok.content)) def theory_xname: Parser[String] = atom("theory name reference", tok => tok.is_xname && Path.is_wellformed(tok.content)) private def tag_name: Parser[String] = atom("tag name", tok => tok.kind == Token.Kind.IDENT || tok.kind == Token.Kind.STRING) def tags: Parser[List[String]] = rep($$$("%") ~> tag_name) /* wrappers */ def parse[T](p: Parser[T], in: Token.Reader): ParseResult[T] = p(in) def parse_all[T](p: Parser[T], in: Token.Reader): ParseResult[T] = { val result = parse(p, in) val rest = proper(result.next) if (result.successful && !rest.atEnd) Error("bad input", rest) else result } } }
MerelyAPseudonym/isabelle
src/Pure/Isar/parse.scala
Scala
bsd-3-clause
3,029
/* sbt -- Simple Build Tool * Copyright 2009 Mark Harrah */ package sbt package classfile import Constants._ import java.io.File private[sbt] trait ClassFile { val majorVersion: Int val minorVersion: Int val fileName: String val className: String val superClassName: String val interfaceNames: Array[String] val accessFlags: Int val constantPool: Array[Constant] val fields: Array[FieldOrMethodInfo] val methods: Array[FieldOrMethodInfo] val attributes: Array[AttributeInfo] val sourceFile: Option[String] def types: Set[String] def stringValue(a: AttributeInfo): String } private[sbt] final case class Constant(tag: Byte, nameIndex: Int, typeIndex: Int, value: Option[AnyRef]) extends NotNull { def this(tag: Byte, nameIndex: Int, typeIndex: Int) = this(tag, nameIndex, typeIndex, None) def this(tag: Byte, nameIndex: Int) = this(tag, nameIndex, -1) def this(tag: Byte, value: AnyRef) = this(tag, -1, -1, Some(value)) def wide = tag == ConstantLong || tag == ConstantDouble } private[sbt] final case class FieldOrMethodInfo(accessFlags: Int, name: Option[String], descriptor: Option[String], attributes: IndexedSeq[AttributeInfo]) extends NotNull { def isStatic = (accessFlags & ACC_STATIC) == ACC_STATIC def isPublic = (accessFlags & ACC_PUBLIC) == ACC_PUBLIC def isMain = isPublic && isStatic && descriptor.filter(_ == "([Ljava/lang/String;)V").isDefined } private[sbt] final case class AttributeInfo(name: Option[String], value: Array[Byte]) extends NotNull { def isNamed(s: String) = name.filter(s == _).isDefined def isSignature = isNamed("Signature") def isSourceFile = isNamed("SourceFile") } private[sbt] object Constants { final val ACC_STATIC = 0x0008 final val ACC_PUBLIC = 0x0001 final val JavaMagic = 0xCAFEBABE final val ConstantUTF8 = 1 final val ConstantUnicode = 2 final val ConstantInteger = 3 final val ConstantFloat = 4 final val ConstantLong = 5 final val ConstantDouble = 6 final val ConstantClass = 7 final val ConstantString = 8 final val ConstantField = 9 final val ConstantMethod = 10 final val ConstantInterfaceMethod = 11 final val ConstantNameAndType = 12 final val ConstantMethodHandle = 15 final val ConstantMethodType = 16 final val ConstantInvokeDynamic = 18 final val ClassDescriptor = 'L' }
xeno-by/old-scalameta-sbt
util/classfile/src/main/scala/sbt/classfile/ClassFile.scala
Scala
bsd-3-clause
2,318
package artisanal.pickle.maker import models._ import parser._ import org.specs2._ import mutable._ import specification._ import scala.reflect.internal.pickling.ByteCodecs import scala.tools.scalap.scalax.rules.scalasig._ import com.novus.salat.annotations.util._ import scala.reflect.ScalaSignature class ShortShortSpec extends mutable.Specification { "a ScalaSig for case class MyRecord_ShortShort(b1: Short, b2: Short)" should { "have the correct string" in { val mySig = new ScalaSig(List("case class"), List("models", "MyRecord_ShortShort"), List(("b1", "Short"), ("b2", "Short"))) val correctParsedSig = SigParserHelper.parseByteCodeFromAnnotation(classOf[MyRecord_ShortShort]).map(ScalaSigAttributeParsers.parse(_)).get val myParsedSig = SigParserHelper.parseByteCodeFromMySig(mySig).map(ScalaSigAttributeParsers.parse(_)).get correctParsedSig.toString === myParsedSig.toString } } }
julianpeeters/artisanal-pickle-maker
src/test/scala/doubleValueMember/ShortShortSpec.scala
Scala
apache-2.0
929
/* * Licensed to The Apereo Foundation under one or more contributor license * agreements. See the NOTICE file distributed with this work for additional * information regarding copyright ownership. * * The Apereo Foundation licenses this file to you under the Apache License, * Version 2.0, (the "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.tle.core.i18n import com.tle.common.i18n.StringLookup import com.tle.web.resources.ResourcesService object CoreStrings extends StringLookup { val lookup = ResourcesService.getResourceHelper("com.equella.core") def key(local: String): String = lookup.key(local) def text(key: String): String = text(key, Seq.empty: _*) override def text(key: String, vals: AnyRef*): String = lookup.getString(key, vals: _*) override def prefix(prefix: String): StringLookup = StringLookup.prefixed(key(prefix)) }
equella/Equella
Source/Plugins/Core/com.equella.core/scalasrc/com/tle/core/i18n/CoreStrings.scala
Scala
apache-2.0
1,329
package dawn.flow.trajectory import dawn.flow._ import breeze.linalg._ import spire.math.{Real => _, _ => _} import spire.implicits._ case class Accelerometer(cov: MatrixR)(implicit val modelHook: ModelHook[Trajectory]) extends VectorSensor[Trajectory] { def genVector(traj: Trajectory, t: Time) = traj.getLocalAcceleration(t) } case class Accelerometer2D(cov: MatrixR)(implicit val modelHook: ModelHook[Trajectory]) extends VectorSensor[Trajectory] { def genVector(traj: Trajectory, t: Time) = { val a = traj.getAcceleration(t) DenseVector(a.x, a.y) } } case class Gyroscope(cov: MatrixR, dt: Timestep)(implicit val modelHook: ModelHook[Trajectory]) extends VectorSensor[Trajectory] { def genVector(traj: Trajectory, t: Time) = traj.getOmega(t, dt) } case class PositionSensor(cov: MatrixR)(implicit val modelHook: ModelHook[Trajectory]) extends VectorSensor[Trajectory] { def genVector(traj: Trajectory, t: Time) = traj.getPosition(t) } case class AttitudeSensor(cov: MatrixR)(implicit val modelHook: ModelHook[Trajectory]) extends Sensor[Quat, Trajectory] { def generate(traj: Trajectory, t: Time) = Quat.genQuaternion(traj.getOrientationQuaternion(t), cov) } case class Altimeter(variance: Real)(implicit val modelHook: ModelHook[Trajectory]) extends Sensor[AltitudeRay, Trajectory] { def generate(traj: Trajectory, t: Time) = { val q = traj.getOrientationQuaternion(t) val p = q.getPitch val d = traj.getPosition(t).z/cos(p) Rand.gaussian(d, variance) } } //We make a simplification //An OF returns a measurement of the difference of Position and angle (as a local Quat) case class OpticalFlow(covDP: MatrixR, covDQ: MatrixR, dt: Time)(implicit val modelHook: ModelHook[Trajectory]) extends Sensor[(Position, Quat), Trajectory] { def generate(traj: Trajectory, t: Time) = { val p = traj.getPosition(t) val pm = traj.getPosition(t - dt) val dp = p - pm val rp = Rand.gaussian(dp, covDP) val q = traj.getOrientationQuaternion(t) val qm = traj.getOrientationQuaternion(t - dt) val dq = qm.reciprocal * q val rq = Quat.genQuaternion(dq, covDQ) (rp, rq) } } case class ControlInputThrust(variance: Real, dt: Timestep)(implicit val modelHook: ModelHook[Trajectory]) extends Sensor[Thrust, Trajectory] { override def toString = "CI Thrust" def generate(traj: Trajectory, t: Time) = Rand.gaussian(traj.getThrust(t), variance) } case class ControlInputOmega(covBR: MatrixR, dt: Timestep)(implicit val modelHook: ModelHook[Trajectory]) extends Sensor[Omega, Trajectory] { override def toString = "CI Omega" def generate(traj: Trajectory, t: Time) = Rand.gaussian(traj.getOmega(t, dt), covBR) } case class Sensor2[A, B, M](sensor1: Sensor[A, M], sensor2: Sensor[B, M]) extends Sensor[(A, B), M] { override def toString = sensor1.toString.take(4) + " and " + sensor2.toString.take(4) def modelHook = sensor1.modelHook def generate(model: M, t: Time) = (sensor1.generate(model, t), sensor2.generate(model, t)) } object IMU { def apply(covAcc: MatrixR, covGyro: MatrixR, dtGyro: Time)(implicit mh: ModelHook[Trajectory]) = new Sensor2(Accelerometer(covAcc), Gyroscope(covGyro, dtGyro)) { override def toString = "IMU" } } object GPS { def apply(cov: MatrixR)(implicit modelHook: ModelHook[Trajectory]) = new PositionSensor(cov) { override def toString = "GPS" } } object Magnetometer { def apply(cov: MatrixR)(implicit modelHook: ModelHook[Trajectory]) = new AttitudeSensor(cov) { override def toString = "Magnetometer" } } object Vicon { def apply(covP: MatrixR, covQ: MatrixR)(implicit modelHook: ModelHook[Trajectory]) = new Sensor2(PositionSensor(covP), AttitudeSensor(covQ)) { override def toString = "Vicon" } }
rubenfiszel/scala-flow
core/src/main/scala/trajectory/Sensor.scala
Scala
mit
3,892
/* * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.flaminem.flamy.utils import org.scalatest.FreeSpec /** * Created by fpin on 2/14/17. */ class CliUtilsTest extends FreeSpec { "removeBackSlashes method" - { "should correctly remove backslashes before simple quotes between simple quotes" in { assert(CliUtils.removeBackSlashes("""\\'a\\'""", "'") === """'a'""") } "should not remove backslashes before simple quotes between double quotes" in { assert(CliUtils.removeBackSlashes("""\\'a\\'""", "\\"") === """\\'a\\'""") } "should correctly remove backslashes before double quotes between double quotes" in { assert(CliUtils.removeBackSlashes("""\\"a\\"""", "\\"") === """"a"""") } "should not remove backslashes before double quotes between simple quotes" in { assert(CliUtils.removeBackSlashes("""\\"a\\"""", "'") === """\\"a\\"""") } "should correctly remove backslashes before backslashes between double quotes" in { assert(CliUtils.removeBackSlashes("""a\\\\b""", "\\"") === """a\\b""") } "should correctly remove backslashes before backslashes between simple quotes" in { assert(CliUtils.removeBackSlashes("""a\\\\b""", "'") === """a\\b""") } } "split method" - { "should correctly split a simple string" in { val s = """This is a simple test""" val expected = Seq("This", "is", "a", "simple", "test") assert(CliUtils.split(s) === expected) } "should correctly split a string with double quotes" in { val s = """This is a "less simple" test""" val expected = Seq("This", "is", "a", "less simple", "test") assert(CliUtils.split(s) === expected) } "should correctly split a string with multiple double quotes" in { val s = """This is a "much ""less"" simple" test""" val expected = Seq("This", "is", "a", "much less simple", "test") assert(CliUtils.split(s) === expected) } "should correctly split a string with simple quotes" in { val s = """This is a 'less simple' test""" val expected = Seq("This", "is", "a", "less simple", "test") assert(CliUtils.split(s) === expected) } "should correctly split a string with multiple simple quotes" in { val s = """This is a 'much ''less'' simple' test""" val expected = Seq("This", "is", "a", "much less simple", "test") assert(CliUtils.split(s) === expected) } "should correctly split a string with both double and simple quotes" in { val s = """This is an 'even '"much "'less '"simple" test""" val expected = Seq("This", "is", "an", "even much less simple", "test") assert(CliUtils.split(s) === expected) } "should correctly split a string with simple quotes inside double quotes" in { val s = """This is a "'simple'" test""" val expected = Seq("This", "is", "a", "'simple'", "test") assert(CliUtils.split(s) === expected) } "should correctly split a string with double quotes inside simple quotes" in { val s = """This is a '"simple"' test""" val expected = Seq("This", "is", "a", "\\"simple\\"", "test") assert(CliUtils.split(s) === expected) } "should correctly split a string with backslashed double quotes inside double quotes" in { val s = """This is a "\\"simple\\"" test""" val expected = Seq("This", "is", "a", "\\"simple\\"", "test") assert(CliUtils.split(s) === expected) } "should correctly split a string with backslashed simple quotes inside simple quotes" in { val s = """This is a '\\'simple\\'' test""" val expected = Seq("This", "is", "a", "'simple'", "test") assert(CliUtils.split(s) === expected) } "should correctly split a string with backslashed simple quotes inside double quotes" in { val s = """This is a "\\'simple\\'" test""" val expected = Seq("This", "is", "a", """\\'simple\\'""", "test") assert(CliUtils.split(s) === expected) } "should correctly split a string with backslashed double quotes inside simple quotes" in { val s = """This is a '\\"simple\\"' test""" val expected = Seq("This", "is", "a", """\\"simple\\"""", "test") assert(CliUtils.split(s) === expected) } "should correctly split a string with double backslashes inside simple quotes" in { val s = """This is a '\\\\weird\\\\' test""" val expected = Seq("This", "is", "a", """\\weird\\""", "test") assert(CliUtils.split(s) === expected) } "should correctly split a string with double backslashes inside double quotes" in { val s = """This is a "\\\\weird\\\\" test""" val expected = Seq("This", "is", "a", """\\weird\\""", "test") assert(CliUtils.split(s) === expected) } "should correctly split a string with triple backslashes inside double quotes" in { val s = """This is a "\\\\\\very weird\\\\\\"" test""" val expected = Seq("This", "is", "a", """\\\\very weird\\"""", "test") assert(CliUtils.split(s) === expected) } "should correctly split a string that ends with 1 whitespace (annoying, but required for autocomplete)" in { val s = """This is a simple test """ val expected = Seq("This", "is", "a", "simple", "test", "") assert(CliUtils.split(s) === expected) } "should correctly split a string that ends with 3 whitespaces (annoying, but required for autocomplete)" in { val s = """This is a simple test """ val expected = Seq("This", "is", "a", "simple", "test", "") assert(CliUtils.split(s) === expected) } } }
flaminem/flamy
src/test/scala/com/flaminem/flamy/utils/CliUtilsTest.scala
Scala
apache-2.0
6,107
package api import spray.http.StatusCodes._ import spray.http._ import spray.routing._ import spray.util.{ LoggingContext} import util.control.NonFatal import akka.actor.{ActorLogging, Actor} /** * Holds potential error response with the HTTP status and optional body * * @param responseStatus the status code * @param response the optional body */ case class ErrorResponseException(responseStatus: StatusCode, response: Option[HttpEntity]) extends Exception /** * Provides a hook to catch exceptions and rejections from routes, allowing custom * responses to be provided, logs to be captured, and potentially remedial actions. * * Note that this is not marshalled, but it is possible to do so allowing for a fully * JSON API (e.g. see how Foursquare do it). */ trait FailureHandling { this: HttpService => // For Spray > 1.1-M7 use routeRouteResponse // see https://groups.google.com/d/topic/spray-user/zA_KR4OBs1I/discussion def rejectionHandler: RejectionHandler = RejectionHandler.Default def exceptionHandler(implicit log: LoggingContext) = ExceptionHandler { case e: IllegalArgumentException => ctx => loggedFailureResponse(ctx, e, message = "The server was asked a question that didn't make sense: " + e.getMessage, error = NotAcceptable) case e: NoSuchElementException => ctx => loggedFailureResponse(ctx, e, message = "The server is missing some information. Try again in a few moments.", error = NotFound) case t: Throwable => ctx => // note that toString here may expose information and cause a security leak, so don't do it. loggedFailureResponse(ctx, t) } private def loggedFailureResponse(ctx: RequestContext, thrown: Throwable, message: String = "The server is having problems.", error: StatusCode = InternalServerError) (implicit log: LoggingContext): Unit = { log.error(thrown, ctx.request.toString) ctx.complete((error, message)) } } /** * Allows you to construct Spray ``HttpService`` from a concatenation of routes; and wires in the error handler. * It also logs all internal server errors using ``SprayActorLogging``. * * @param route the (concatenated) route */ class RoutedHttpService(route: Route) extends Actor with HttpService with ActorLogging { implicit def actorRefFactory = context implicit val handler = ExceptionHandler { case NonFatal(ErrorResponseException(statusCode, entity)) => ctx => ctx.complete((statusCode, entity)) case NonFatal(e) => ctx => { log.error(e, InternalServerError.defaultMessage) ctx.complete(InternalServerError) } } def receive: Receive = runRoute(route)(handler, RejectionHandler.Default, context, RoutingSettings.default, LoggingContext.fromActorRefFactory) }
triplem/activator-akka-spray
src/main/scala/api/services.scala
Scala
apache-2.0
2,931
/* * Copyright 2001-2013 Artima, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.scalatest.diagrams import org.scalatest.SharedHelpers.thisLineNumber import java.util.Date import org.scalactic.Prettifier import org.scalatest.exceptions.TestCanceledException import org.scalatest.exceptions.TestFailedException import org.scalatest.funspec.AnyFunSpec import org.scalatest.matchers.should.Matchers class DiagramsSpec extends AnyFunSpec with Matchers with Diagrams { val fileName: String = "DiagramsSpec.scala" class Stateful { var state = false def changeState: Boolean = { state = true state } } class CustomInt(value: Int) { def startsWith(v: Int): Boolean = { value.toString.startsWith(v.toString) } def endsWith(v: Int): Boolean = { value.toString.endsWith(v.toString) } def contains(v: Int): Boolean = { value.toString.contains(v.toString) } def exists(v: Int): Boolean = { value == v } override def toString: String = value.toString } class CustomContainer[+E](e: E) { val element: E = e def contains[E1 >: E](elem: E1): Boolean = elem == element } private def neverRuns1(f: => Unit): Boolean = true private def neverRuns2(f: => Unit)(a: Int): Boolean = true private def neverRuns3[T](f: => Unit)(a: T): Boolean = true val s1 = "hi ScalaTest" val s2 = "ScalaTest hi" val s3 = "Say hi to ScalaTest" val s4 = "" val ci1 = new CustomInt(123) val ci1Str = Prettifier.default(ci1) val ci2 = new CustomInt(321) val ci2Str = Prettifier.default(ci2) val ci3 = ci1 val ci3Str = Prettifier.default(ci3) val l1 = List(1, 2, 3) val l2 = List.empty[Int] val l3 = List("one", "two", "three") val l3Str = Prettifier.default(l3) val m1 = Map(1 -> "one", 2 -> "two", 3 -> "three") val m1Str = Prettifier.default(m1) val m2 = Map.empty[Int, String] val ct1 = new CustomContainer(8) val ct1Str = Prettifier.default(ct1) val date = new Date def woof(f: => Unit) = "woof" def meow(x: Int = 0, y: Int = 3) = "meow" def varargs(x: Int, y: String*): (Int, Seq[String]) = (x, y.toSeq) class CustomList(value: List[Int]) { def contains(x: Int*): Boolean = x.forall(value.contains(_)) } val cList1 = new CustomList(List(1, 2, 3)) describe("Diagrams") { val a = 3 val b = 5 val bob = "bob" val alice = "alice" describe("The assert(boolean) method") { it("should do nothing when is used to check a == 3") { assert(a == 3) } it("should throw TestFailedException with correct message and stack depth when is used to check a == 5") { val e = intercept[TestFailedException] { assert(a == 5) } e.message should be ( Some( """ | |assert(a == 5) | | | | | 3 | 5 | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should do nothing when is used to check 5 == b") { assert(5 == b) } it("should throw TestFailedException with correct message and stack depth when is used to check 3 == b") { val e = intercept[TestFailedException] { assert(3 == b) } e.message should be ( Some( """ | |assert(3 == b) | | | | | 3 | 5 | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should do nothing when is used to check a != 5") { assert(a != 5) } it("should throw TestFailedException with correct message and stack depth when is used to check a != 3") { val e = intercept[TestFailedException] { assert(a != 3) } e.message should be ( Some( """ | |assert(a != 3) | | | | | 3 | 3 | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should do nothing when is used to check 3 != b") { assert(3 != b) } it("should throw TestFailedException with correct message and stack depth when is used to check 5 != b") { val e = intercept[TestFailedException] { assert(5 != b) } e.message should be ( Some( """ | |assert(5 != b) | | | | | 5 | 5 | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should do nothing when is used to check 3 == 3") { assert(3 == 3) } // SKIP-DOTTY-START // const-folding will eliminate the expr it("should throw TestFailedException with message that contains the original code and correct stack depth when is used to check 3 == 5") { // This is because the compiler simply pass the false boolean literal // to the macro, can't find a way to get the 3 == 5 literal. val e1 = intercept[TestFailedException] { assert(3 == 5) } e1.message should be ( Some( """ | |assert(3 == 5) | | | false |""".stripMargin ) ) e1.failedCodeFileName should be (Some(fileName)) e1.failedCodeLineNumber should be (Some(thisLineNumber - 13)) } // SKIP-DOTTY-END it("should throw TestFailedException with correct message and stack depth when is used to check a == b") { val e = intercept[TestFailedException] { assert(a == b) } e.message should be ( Some( """ | |assert(a == b) | | | | | 3 | 5 | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should throw TestFailedException with correct message and stack depth when is used to check a == 0") { val e = intercept[TestFailedException] { assert(a == 0) } e.message should be ( Some( """ | |assert(a == 0) | | | | | 3 | 0 | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should throw TestFailedException with correct message and stack depth when is used to check 0 == a") { val e = intercept[TestFailedException] { assert(0 == a) } e.message should be ( Some( """ | |assert(0 == a) | | | | | 0 | 3 | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should throw TestFailedException with correct message and stack depth when is used to check 3 != a") { val e = intercept[TestFailedException] { assert(3 != a) } e.message should be ( Some( """ | |assert(3 != a) | | | | | 3 | 3 | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should do nothing when is used to check 5 != a") { assert(5 != a) } it("should do nothing when is used to check a > 2") { assert(a > 2) } it("should do nothing when is used to check 5 > a") { assert(5 > a) } it("should throw TestFailedException with correct message and stack depth when is used to check a > 3") { val e = intercept[TestFailedException] { assert(a > 3) } e.message should be ( Some( """ | |assert(a > 3) | | | | | 3 | 3 | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should throw TestFailedException with correct message and stack depth when is used to check 3 > a") { val e = intercept[TestFailedException] { assert(3 > a) } e.message should be ( Some( """ | |assert(3 > a) | | | | | 3 | 3 | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should do nothing when is used to check a >= 3") { assert(a >= 3) } it("should do nothing when is used to check 3 >= a") { assert(3 >= a) } it("should throw TestFailedException with correct message and stack depth when is used to check a >= 4") { val e = intercept[TestFailedException] { assert(a >= 4) } e.message should be ( Some( """ | |assert(a >= 4) | | | | | 3 | 4 | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should throw TestFailedException with correct message and stack depth when is used to check 2 >= a") { val e = intercept[TestFailedException] { assert(2 >= a) } e.message should be ( Some( """ | |assert(2 >= a) | | | | | 2 | 3 | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should do nothing when is used to check b < 6") { assert(b < 6) } it("should do nothing when is used to check 3 < b") { assert(3 < b) } it("should throw TestFailedException with correct message and stack depth when is used to check b < 5") { val e = intercept[TestFailedException] { assert(b < 5) } e.message should be ( Some( """ | |assert(b < 5) | | | | | 5 | 5 | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should throw TestFailedException with correct message and stack depth when is used to check 5 < b") { val e = intercept[TestFailedException] { assert(5 < b) } e.message should be ( Some( """ | |assert(5 < b) | | | | | 5 | 5 | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should do nothing when is used to check b <= 5") { assert(b <= 5) } it("should do nothing when is used to check 5 <= b") { assert(5 <= b) } it("should throw TestFailedException with correct message and stack depth when is used to check b <= 4") { val e = intercept[TestFailedException] { assert(b <= 4) } e.message should be ( Some( """ | |assert(b <= 4) | | | | | 5 | 4 | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should throw TestFailedException with correct message and stack depth when is used to check 6 <= b") { val e = intercept[TestFailedException] { assert(6 <= b) } e.message should be ( Some( """ | |assert(6 <= b) | | | | | 6 | 5 | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should do nothing when is used to check bob == \\"bob\\"") { assert(bob == "bob") } it("should do nothing when is used to check bob != \\"alice\\"") { assert(bob != "alice") } it("should do nothing when is used to check alice == \\"alice\\"") { assert(alice == "alice") } it("should do nothing when is used to check alice != \\"bob\\"") { assert(alice != "bob") } it("should throw TestFailedException with correct message and stack depth when is used to check bob == \\"alice\\"") { val e = intercept[TestFailedException] { assert(bob == "alice") } e.message should be ( Some( """ | |assert(bob == "alice") | | | | | | | "alice" | | false | "bob" |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 15)) } it("should throw TestFailedException with correct message and stack depth when is used to check bob != \\"bob\\"") { val e = intercept[TestFailedException] { assert(bob != "bob") } e.message should be ( Some( """ | |assert(bob != "bob") | | | | | | | "bob" | | false | "bob" |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 15)) } it("should throw TestFailedException with correct message and stack depth when is used to check alice == \\"bob\\"") { val e = intercept[TestFailedException] { assert(alice == "bob") } e.message should be ( Some( """ | |assert(alice == "bob") | | | | | | | "bob" | | false | "alice" |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 15)) } it("should throw TestFailedException with correct message and stack depth when is used to check alice != \\"alice\\"") { val e = intercept[TestFailedException] { assert(alice != "alice") } e.message should be ( Some( """ | |assert(alice != "alice") | | | | | | | "alice" | | false | "alice" |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 15)) } it("should do nothing when is used to check Seq(a, b) == Seq(3, 5)") { assert(Seq(a, b) == Seq(3, 5)) } it("should throw TestFailedException when is used to check Set(a, b) == Set(4)") { val e = intercept[TestFailedException] { assert(Set(a, b) == Set(4)) } e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 3)) } it("should do nothing when is used to check varargs(1, y, z) == 1 -> Seq(y, z)") { assert(varargs(1, "y", "z") == 1 -> Seq("y", "z")) } it("should do nothing when is used to check cList1.contains(1, 2)") { assert(cList1.contains(1, 2)) } it("should do nothing when is used to check a === 3") { assert(a === 3) } it("should throw TestFailedException with correct message and stack depth when is used to check a === 5 ") { val e = intercept[TestFailedException] { assert(a === 5) } e.message should be ( Some( """ | |assert(a === 5) | | | | | 3 | 5 | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should do nothing when is used to check 3 === a") { assert(3 === a) } it("should throw TestFailedException with correct message and stack depth when is used to check 5 === a") { val e = intercept[TestFailedException] { assert(5 === a) } e.message should be ( Some( """ | |assert(5 === a) | | | | | 5 | 3 | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should do nothing when is used to check a !== 5") { assert(a !== 5) } it("should throw TestFailedException with correct message and stack depth when is used to check a !== 3") { val e = intercept[TestFailedException] { assert(a !== 3) } e.message should be ( Some( """ | |assert(a !== 3) | | | | | 3 | 3 | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should do nothing when is used to check 5 !== a") { assert(5 !== a) } it("should throw TestFailedException with correct message and stack depth when is used to check 3 !== a") { val e = intercept[TestFailedException] { assert(3 !== a) } e.message should be ( Some( """ | |assert(3 !== a) | | | | | 3 | 3 | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should do nothing when is used to check a == 3 && b == 5") { assert(a == 3 && b == 5) } it("should throw TestFailedException with correct message and stack depth when is used to check a == 3 && b == 6") { val e = intercept[TestFailedException] { assert(a == 3 && b == 6) } e.message should be ( Some( """ | |assert(a == 3 && b == 6) | | | | | | | | | 3 | 3 | 5 | 6 | true | false | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 15)) } it("should throw TestFailedException with correct message and stack depth when is used to check a == 2 && b == 5") { val e = intercept[TestFailedException] { assert(a == 2 && b == 5) } e.message should be ( Some( """ | |assert(a == 2 && b == 5) | | | | | 3 | 2 | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should throw TestFailedException with correct message and stack depth when is used to check a == 2 && b == 6") { val e = intercept[TestFailedException] { assert(a == 2 && b == 6) } e.message should be ( Some( """ | |assert(a == 2 && b == 6) | | | | | 3 | 2 | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should do nothing when is used to check a == 3 & b == 5") { assert(a == 3 & b == 5) } it("should throw TestFailedException with correct message and stack depth when is used to check a == 3 & b == 6") { val e = intercept[TestFailedException] { assert(a == 3 & b == 6) } e.message should be ( Some( """ | |assert(a == 3 & b == 6) | | | | | | | | | 3 | 3 | 5 | 6 | true | false | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 15)) } it("should throw TestFailedException with correct message and stack depth when is used to check a == 2 & b == 5") { val e = intercept[TestFailedException] { assert(a == 2 & b == 5) } e.message should be ( Some( """ | |assert(a == 2 & b == 5) | | | | | 3 | 2 | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should throw TestFailedException with correct message and stack depth when is used to check a == 2 & b == 6") { val e = intercept[TestFailedException] { assert(a == 2 & b == 6) } e.message should be ( Some( """ | |assert(a == 2 & b == 6) | | | | | 3 | 2 | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should do nothing when is used to check a == 3 || b == 5") { assert(a == 3 || b == 5) } it("should do nothing when is used to check a == 3 || b == 6") { assert(a == 3 || b == 6) } it("should do nothing when is used to check a == 2 || b == 5") { assert(a == 2 || b == 5) } it("should throw TestFailedException with correct message and stack depth when is used to check a == 2 || b == 6") { val e = intercept[TestFailedException] { assert(a == 2 || b == 6) } e.message should be ( Some( """ | |assert(a == 2 || b == 6) | | | | | | | | | 3 | 2 | 5 | 6 | | | false | | false | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 16)) } it("should do nothing when is used to check a == 3 | b == 5") { assert(a == 3 | b == 5) } it("should do nothing when is used to check a == 3 | b == 6") { assert(a == 3 | b == 6) } it("should do nothing when is used to check a == 2 | b == 5") { assert(a == 2 | b == 5) } it("should throw TestFailedException with correct message and stack depth when is used to check a == 2 | b == 6") { val e = intercept[TestFailedException] { assert(a == 2 | b == 6) } e.message should be ( Some( """ | |assert(a == 2 | b == 6) | | | | | | | | | 3 | 2 | 5 | 6 | | | false | | false | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 16)) } it("should do nothing when is used to check a == 3 && (b == 5 && b > 3)") { assert(a == 3 && (b == 5 && b > 3)) } it("should throw TestFailedException with correct message and stack depth when is used to check a == 3 && (b == 5 && b > 5)") { val e = intercept[TestFailedException] { assert(a == 3 && (b == 5 && b > 5)) } e.message should be ( Some( """ | |assert(a == 3 && (b == 5 && b > 5)) | | | | | | | | | | | | | 3 | 3 | 5 | 5 | 5 | 5 | true false true | false | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 15)) } it("should do nothing when is used to check !(a == 5)") { assert(!(a == 5)) } it("should throw TestFailedException with correct message and stack depth when is used to check !(a == 3)") { val e = intercept[TestFailedException] { assert(!(a == 3)) } e.message should be ( Some( """ | |assert(!(a == 3)) | | | | | | | 3 | 3 | | true | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 15)) } it("should throw TestFailedException with correct message and stack depth when is used to check a == 3 && !(b == 5)") { val e = intercept[TestFailedException] { assert(a == 3 && !(b == 5)) } e.message should be ( Some( """ | |assert(a == 3 && !(b == 5)) | | | | | | | | | | 3 | 3 | | 5 | 5 | true | | true | | false | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 16)) } it("should do nothing when is used to check (a == 3) == (b == 5)") { assert((a == 3) == (b == 5)) } it("should throw TestFailedException with correct message and stack depth when is used to check (a == 3) == (b != 5)") { val e = intercept[TestFailedException] { assert((a == 3) == (b != 5)) } e.message should be ( Some( """ | |assert((a == 3) == (b != 5)) | | | | | | | | | 3 | 3 | 5 | 5 | true false false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should short-circuit && when first condition was false") { val s = new Stateful intercept[TestFailedException] { assert(a == 5 && s.changeState) } s.state should be (false) } it("should short-circuit & when first condition was false") { val s = new Stateful intercept[TestFailedException] { assert(a == 5 & s.changeState) } s.state should be (false) } it("should short-circuit || when first condition was true") { val s = new Stateful assert(a == 3 || s.changeState) s.state should be (false) } it("should short-circuit | when first condition was true") { val s = new Stateful assert(a == 3 | s.changeState) s.state should be (false) } it("should do nothing when it is used to check a == 3 && { println(\\"hi\\"); b == 5} ") { assert(a == 3 && { println("hi"); b == 5}) } it("should throw TestFailedException with correct message and stack depth when is usesd to check a == 3 && { println(\\"hi\\"); b == 3}") { val e = intercept[TestFailedException] { assert(a == 3 && { println("hi"); b == 3}) } e.message should be ( Some( """ | |assert(a == 3 && { println("hi"); b == 3}) | | | | | | | | | 3 | 3 false 5 | 3 | true false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should do nothing when it is used to check { println(\\"hi\\"); b == 5} && a == 3") { assert({ println("hi"); b == 5} && a == 3) } it("should throw TestFailedException with correct message and stack depth when is usesd to check { println(\\"hi\\"); b == 5} && a == 5") { val e = intercept[TestFailedException] { assert({ println("hi"); b == 5} && a == 5) } e.message should be ( Some( """ | |assert({ println("hi"); b == 5} && a == 5) | | | | | | | | | 5 | 5 | 3 | 5 | true | false | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 15)) } it("should preserve side effects when Apply with single argument is passed in") { assert(neverRuns1(sys.error("Sad times 1"))) } it("should preserve side effects when Apply with 2 argument list is passed in") { assert(neverRuns2(sys.error("Sad times 2"))(0)) } it("should preserve side effects when typed Apply with 2 argument list is passed in") { assert(neverRuns3(sys.error("Sad times 3"))(0)) } it("should do nothing when is used to check s1 startsWith \\"hi\\"") { assert(s1 startsWith "hi") assert(s1.startsWith("hi")) } it("should throw TestFailedException with correct message and stack depth when is used to check s2 startsWith \\"hi\\"") { val e1 = intercept[TestFailedException] { assert(s2 startsWith "hi") } e1.message should be ( Some( """ | |assert(s2 startsWith "hi") | | | | | | false "hi" | "ScalaTest hi" |""".stripMargin ) ) e1.failedCodeFileName should be (Some(fileName)) e1.failedCodeLineNumber should be (Some(thisLineNumber - 14)) val e2 = intercept[TestFailedException] { assert(s2.startsWith("hi")) } e2.message should be ( Some( """ | |assert(s2.startsWith("hi")) | | | | | | false "hi" | "ScalaTest hi" |""".stripMargin ) ) e2.failedCodeFileName should be (Some(fileName)) e2.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should do nothing when is used to check ci1 startsWith 1") { assert(ci1 startsWith 1) assert(ci1.startsWith(1)) } it("should throw TestFailedException with correct message and stack depth when is used to check ci2 startsWith 1") { val e1 = intercept[TestFailedException] { assert(ci2 startsWith 1) } e1.message should be ( Some( """ | |assert(ci2 startsWith 1) | | | | | 321 false 1 |""".stripMargin ) ) e1.failedCodeFileName should be (Some(fileName)) e1.failedCodeLineNumber should be (Some(thisLineNumber - 13)) val e2 = intercept[TestFailedException] { assert(ci2.startsWith(1)) } e2.message should be ( Some( """ | |assert(ci2.startsWith(1)) | | | | | 321 false 1 |""".stripMargin ) ) e2.failedCodeFileName should be (Some(fileName)) e2.failedCodeLineNumber should be (Some(thisLineNumber - 13)) } it("should do nothing when is used to check !s2.startsWith(\\"hi\\")") { assert(!s2.startsWith("hi")) } it("should throw TestFailedException with correct message and stack depth when is used to check !s1.startsWith(\\"hi\\")") { val e1 = intercept[TestFailedException] { assert(!s1.startsWith("hi")) } e1.message should be ( Some( """ | |assert(!s1.startsWith("hi")) | || | | | || true "hi" | |"hi ScalaTest" | false |""".stripMargin ) ) e1.failedCodeFileName should be (Some(fileName)) e1.failedCodeLineNumber should be (Some(thisLineNumber - 15)) } it("should do nothing when is used to check s2 endsWith \\"hi\\"") { assert(s2 endsWith "hi") assert(s2.endsWith("hi")) } it("should throw TestFailedException with correct message and stack depth when is used to check s1 endsWith \\"hi\\"") { val e1 = intercept[TestFailedException] { assert(s1 endsWith "hi") } e1.message should be ( Some( """ | |assert(s1 endsWith "hi") | | | | | | false "hi" | "hi ScalaTest" |""".stripMargin ) ) e1.failedCodeFileName should be (Some(fileName)) e1.failedCodeLineNumber should be (Some(thisLineNumber - 14)) val e2 = intercept[TestFailedException] { assert(s1.endsWith("hi")) } e2.message should be ( Some( """ | |assert(s1.endsWith("hi")) | | | | | | false "hi" | "hi ScalaTest" |""".stripMargin ) ) e2.failedCodeFileName should be (Some(fileName)) e2.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should do nothing when is used to check ci2 endsWith 1") { assert(ci2 endsWith 1) assert(ci2.endsWith(1)) } it("should throw TestFailedException with correct message and stack depth when is used to check ci1 endsWith 1") { val e1 = intercept[TestFailedException] { assert(ci1 endsWith 1) } e1.message should be ( Some( """ | |assert(ci1 endsWith 1) | | | | | 123 false 1 |""".stripMargin ) ) e1.failedCodeFileName should be (Some(fileName)) e1.failedCodeLineNumber should be (Some(thisLineNumber - 13)) val e2 = intercept[TestFailedException] { assert(ci1.endsWith(1)) } e2.message should be ( Some( """ | |assert(ci1.endsWith(1)) | | | | | 123 false 1 |""".stripMargin ) ) e2.failedCodeFileName should be (Some(fileName)) e2.failedCodeLineNumber should be (Some(thisLineNumber - 13)) } it("should do nothing when is used to check !s1.endsWith(\\"hi\\")") { assert(!s1.endsWith("hi")) } it("should throw TestFailedException with correct message and stack depth when is used to check !s2.endsWith(\\"hi\\")") { val e1 = intercept[TestFailedException] { assert(!s2.endsWith("hi")) } e1.message should be ( Some( """ | |assert(!s2.endsWith("hi")) | || | | | || true "hi" | |"ScalaTest hi" | false |""".stripMargin ) ) e1.failedCodeFileName should be (Some(fileName)) e1.failedCodeLineNumber should be (Some(thisLineNumber - 15)) } it("should do nothing when is used to check s3 contains \\"hi\\"") { assert(s3 contains "hi") assert(s3.contains("hi")) } it("should throw TestFailedException with correct message and stack depth when is used to check s3 contains \\"hello\\"") { val e1 = intercept[TestFailedException] { assert(s3 contains "hello") } e1.message should be ( Some( """ | |assert(s3 contains "hello") | | | | | | false "hello" | "Say hi to ScalaTest" |""".stripMargin ) ) e1.failedCodeFileName should be (Some(fileName)) e1.failedCodeLineNumber should be (Some(thisLineNumber - 14)) val e2 = intercept[TestFailedException] { assert(s3.contains("hello")) } e2.message should be ( Some( """ | |assert(s3.contains("hello")) | | | | | | false "hello" | "Say hi to ScalaTest" |""".stripMargin ) ) e2.failedCodeFileName should be (Some(fileName)) e2.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should do nothing when is used to check ci2 contains 2") { assert(ci2 contains 2) assert(ci2.contains(2)) } it("should throw TestFailedException with correct message and stack depth when is used to check ci1 contains 5") { val e1 = intercept[TestFailedException] { assert(ci1 contains 5) } e1.message should be ( Some( """ | |assert(ci1 contains 5) | | | | | 123 false 5 |""".stripMargin ) ) e1.failedCodeFileName should be (Some(fileName)) e1.failedCodeLineNumber should be (Some(thisLineNumber - 13)) val e2 = intercept[TestFailedException] { assert(ci1.contains(5)) } e2.message should be ( Some( """ | |assert(ci1.contains(5)) | | | | | 123 false 5 |""".stripMargin ) ) e2.failedCodeFileName should be (Some(fileName)) e2.failedCodeLineNumber should be (Some(thisLineNumber - 13)) } it("should do nothing when is used to check !s1.contains(\\"hello\\")") { assert(!s3.contains("hello")) } it("should throw TestFailedException with correct message and stack depth when is used to check !s3.contains(\\"hi\\")") { val e1 = intercept[TestFailedException] { assert(!s3.contains("hi")) } e1.message should be ( Some( """ | |assert(!s3.contains("hi")) | || | | | || true "hi" | |"Say hi to ScalaTest" | false |""".stripMargin ) ) e1.failedCodeFileName should be (Some(fileName)) e1.failedCodeLineNumber should be (Some(thisLineNumber - 15)) } it("should do nothing when is used to check l1 contains 2") { assert(l1 contains 2) assert(l1.contains(2)) } it("should throw TestFailedException with correct message and stack depth when is used to check l1 contains 5") { val e1 = intercept[TestFailedException] { assert(l1 contains 5) } e1.message should be ( Some( """ | |assert(l1 contains 5) | | | | | | false 5 | List(1, 2, 3) |""".stripMargin ) ) e1.failedCodeFileName should be (Some(fileName)) e1.failedCodeLineNumber should be (Some(thisLineNumber - 14)) val e2 = intercept[TestFailedException] { assert(l1.contains(5)) } e2.message should be ( Some( """ | |assert(l1.contains(5)) | | | | | | false 5 | List(1, 2, 3) |""".stripMargin ) ) e2.failedCodeFileName should be (Some(fileName)) e2.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should do nothing when is used to check !(l1 contains 5)") { assert(!(l1 contains 5)) assert(!l1.contains(5)) } it("should throw TestFailedException with correct message and stack depth when is used to check !(l1 contains 2)") { val e1 = intercept[TestFailedException] { assert(!(l1 contains 2)) } e1.message should be ( Some( """ | |assert(!(l1 contains 2)) | | | | | | | | true 2 | | List(1, 2, 3) | false |""".stripMargin ) ) e1.failedCodeFileName should be (Some(fileName)) e1.failedCodeLineNumber should be (Some(thisLineNumber - 15)) val e2 = intercept[TestFailedException] { assert(!l1.contains(2)) } e2.message should be ( Some( """ | |assert(!l1.contains(2)) | || | | | || true 2 | |List(1, 2, 3) | false |""".stripMargin ) ) e2.failedCodeFileName should be (Some(fileName)) e2.failedCodeLineNumber should be (Some(thisLineNumber - 15)) } it("should do nothing when is used to check m1 contains 2") { assert(m1 contains 2) assert(m1.contains(2)) } it("should throw TestFailedException with correct message and stack depth when is used to check m1 contains 5") { val e1 = intercept[TestFailedException] { assert(m1 contains 5) } e1.message should be ( Some( s""" | |assert(m1 contains 5) | | | | | | false 5 | $m1Str |""".stripMargin ) ) e1.failedCodeFileName should be (Some(fileName)) e1.failedCodeLineNumber should be (Some(thisLineNumber - 14)) val e2 = intercept[TestFailedException] { assert(m1.contains(5)) } e2.message should be ( Some( s""" | |assert(m1.contains(5)) | | | | | | false 5 | $m1Str |""".stripMargin ) ) e2.failedCodeFileName should be (Some(fileName)) e2.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should do nothing when is used to check !(m1 contains 5)") { assert(!(m1 contains 5)) assert(!m1.contains(5)) } it("should throw TestFailedException with correct message and stack depth when is used to check !(m1 contains 2)") { val e1 = intercept[TestFailedException] { assert(!(m1 contains 2)) } e1.message should be ( Some( s""" | |assert(!(m1 contains 2)) | | | | | | | | true 2 | | $m1Str | false |""".stripMargin ) ) e1.failedCodeFileName should be (Some(fileName)) e1.failedCodeLineNumber should be (Some(thisLineNumber - 15)) val e2 = intercept[TestFailedException] { assert(!m1.contains(2)) } e2.message should be ( Some( s""" | |assert(!m1.contains(2)) | || | | | || true 2 | |$m1Str | false |""".stripMargin ) ) e2.failedCodeFileName should be (Some(fileName)) e2.failedCodeLineNumber should be (Some(thisLineNumber - 15)) } it("should do nothing when is used to check ct1 contains 8") { assert(ct1 contains 8) assert(ct1.contains(8)) } it("should throw TestFailedException with correct message and stack depth when is used to check ct1 contains 5") { val e1 = intercept[TestFailedException] { assert(ct1 contains 5) } e1.message should be ( Some( s""" | |assert(ct1 contains 5) | | | | | | false 5 | $ct1Str |""".stripMargin ) ) e1.failedCodeFileName should be (Some(fileName)) e1.failedCodeLineNumber should be (Some(thisLineNumber - 14)) val e2 = intercept[TestFailedException] { assert(ct1.contains(5)) } e2.message should be ( Some( s""" | |assert(ct1.contains(5)) | | | | | | false 5 | $ct1Str |""".stripMargin ) ) e2.failedCodeFileName should be (Some(fileName)) e2.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should do nothing when is used to check !ct1.contains(5)") { assert(!ct1.contains(5)) } it("should throw TestFailedException with correct message and stack depth when is used to check !ct1.contains(8)") { val e1 = intercept[TestFailedException] { assert(!ct1.contains(8)) } e1.message should be ( Some( s""" | |assert(!ct1.contains(8)) | || | | | || true 8 | |$ct1Str | false |""".stripMargin ) ) e1.failedCodeFileName should be (Some(fileName)) e1.failedCodeLineNumber should be (Some(thisLineNumber - 15)) } it("should do nothing when is used to check ci1 eq ci3") { assert(ci1 eq ci3) assert(ci1.eq(ci3)) } it("should throw TestFailedException with correct message and stack depth when is used to check ci1 eq ci2") { val e1 = intercept[TestFailedException] { assert(ci1 eq ci2) } e1.message should be ( Some( s""" | |assert(ci1 eq ci2) | | | | | $ci1Str | $ci2Str | false |""".stripMargin ) ) e1.failedCodeFileName should be (Some(fileName)) e1.failedCodeLineNumber should be (Some(thisLineNumber - 14)) val e2 = intercept[TestFailedException] { assert(ci1.eq(ci2)) } e2.message should be ( Some( s""" | |assert(ci1.eq(ci2)) | | | | | $ci1Str | $ci2Str | false |""".stripMargin ) ) e2.failedCodeFileName should be (Some(fileName)) e2.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should do nothing when is used to check !ci1.eq(ci2)") { assert(!ci1.eq(ci2)) } it("should throw TestFailedException with correct message and stack depth when is used to check !ci1.eq(ci3)") { val e = intercept[TestFailedException] { assert(!ci1.eq(ci3)) } e.message should be ( Some( s""" | |assert(!ci1.eq(ci3)) | || | | | |$ci1Str | $ci3Str | | true | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 15)) } it("should do nothing when is used to check ci1 ne ci2") { assert(ci1 ne ci2) assert(ci1.ne(ci2)) } it("should throw TestFailedException with correct message and stack depth when is used to check ci1 ne ci3") { val e1 = intercept[TestFailedException] { assert(ci1 ne ci3) } e1.message should be ( Some( s""" | |assert(ci1 ne ci3) | | | | | $ci1Str | $ci3Str | false |""".stripMargin ) ) e1.failedCodeFileName should be (Some(fileName)) e1.failedCodeLineNumber should be (Some(thisLineNumber - 14)) val e2 = intercept[TestFailedException] { assert(ci1.ne(ci3)) } e2.message should be ( Some( s""" | |assert(ci1.ne(ci3)) | | | | | $ci1Str | $ci3Str | false |""".stripMargin ) ) e2.failedCodeFileName should be (Some(fileName)) e2.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should do nothing when is used to check !ci1.ne(ci3)") { assert(!ci1.ne(ci3)) } it("should throw TestFailedException with correct message and stack depth when is used to check !ci1.ne(ci2)") { val e = intercept[TestFailedException] { assert(!ci1.ne(ci2)) } e.message should be ( Some( """ | |assert(!ci1.ne(ci2)) | || | | | |123 | 321 | | true | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 15)) } it("should do nothing when is used to check s4.isEmpty") { assert(s4.isEmpty) } it("should throw TestFailedException with correct message and stack depth when is used to check s3.isEmpty") { val e = intercept[TestFailedException] { assert(s3.isEmpty) } e.message should be ( Some( """ | |assert(s3.isEmpty) | | | | | false | "Say hi to ScalaTest" |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should do nothing when is used to check !s3.isEmpty") { assert(!s3.isEmpty) } it("should throw TestFailedException with correct message and stack depth when is used to check !s4.isEmpty") { val e = intercept[TestFailedException] { assert(!s4.isEmpty) } e.message should be ( Some( """ | |assert(!s4.isEmpty) | || | | |"" true | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should do nothing when is used to check l2.isEmpty") { assert(l2.isEmpty) } it("should throw TestFailedException with correct message and stack depth when is used to check l1.isEmpty") { val e = intercept[TestFailedException] { assert(l1.isEmpty) } e.message should be ( Some( s""" | |assert(l1.isEmpty) | | | | | false | $l1 |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should do nothing when is used to check !l1.isEmpty") { assert(!l1.isEmpty) } it("should throw TestFailedException with correct message and stack depth when is used to check !l2.isEmpty") { val e = intercept[TestFailedException] { assert(!l2.isEmpty) } e.message should be ( Some( s""" | |assert(!l2.isEmpty) | || | | || true | |$l2 | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 15)) } it("should do nothing when is used to check s1.isInstanceOf[String]") { assert(s1.isInstanceOf[String]) } it("should throw TestFailedException with correct message and stack depth when is used to check l1.isInstanceOf[String]") { val e = intercept[TestFailedException] { assert(l1.isInstanceOf[String]) } e.message should be ( Some( s""" | |assert(l1.isInstanceOf[String]) | | | | | false | $l1 |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should do nothing when is used to check l1.isInstanceOf[List[Int]]") { assert(l1.isInstanceOf[List[Int]]) } it("should throw TestFailedException with correct message and stack depth when is used to check s1.isInstanceOf[List[Int]]") { val e = intercept[TestFailedException] { assert(s1.isInstanceOf[List[Int]]) } e.message should be ( Some( """ | |assert(s1.isInstanceOf[List[Int]]) | | | | | false | "hi ScalaTest" |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should do nothing when is used to check date.isInstanceOf[Date]") { assert(date.isInstanceOf[Date]) } it("should throw TestFailedException with correct message and stack depth when is used to check l1.isInstanceOf[Date]") { val e = intercept[TestFailedException] { assert(l1.isInstanceOf[Date]) } e.message should be ( Some( s""" | |assert(l1.isInstanceOf[Date]) | | | | | false | $l1 |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should do nothing when is used to check !l1.isInstanceOf[String]") { assert(!l1.isInstanceOf[String]) } it("should throw TestFailedException with correct message and stack depth when is used to check !s1.isInstanceOf[String]") { val e = intercept[TestFailedException] { assert(!s1.isInstanceOf[String]) } e.message should be ( Some( """ | |assert(!s1.isInstanceOf[String]) | || | | || true | |"hi ScalaTest" | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 15)) } it("should do nothing when is used to check !s1.isInstanceOf[List[Int]]") { assert(!s1.isInstanceOf[List[Int]]) } it("should throw TestFailedException with correct message and stack depth when is used to check !l1.isInstanceOf[List[Int]]") { val e = intercept[TestFailedException] { assert(!l1.isInstanceOf[List[Int]]) } e.message should be ( Some( s""" | |assert(!l1.isInstanceOf[List[Int]]) | || | | || true | |$l1 | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 15)) } it("should do nothing when is used to check !l1.isInstanceOf[Date]") { assert(!l1.isInstanceOf[Date]) } it("should throw TestFailedException with correct message and stack depth when is used to check !date.isInstanceOf[Date]") { val e = intercept[TestFailedException] { assert(!date.isInstanceOf[Date]) } e.message should be ( Some( s""" | |assert(!date.isInstanceOf[Date]) | || | | || true | |$date | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 15)) } it("should do nothing when is used to check s1.length == 9") { assert(s1.length == 12) } it("should throw TestFailedException with correct message and stack depth when is used to check s1.length == 10") { val e = intercept[TestFailedException] { assert(s1.length == 10) } e.message should be ( Some( """ | |assert(s1.length == 10) | | | | | | | 12 | 10 | | false | "hi ScalaTest" |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 15)) } it("should do nothing when is used to check l1.length == 3") { assert(l1.length == 3) } it("should throw TestFailedException with correct message and stack depth when is used to check l1.length == 10") { val e = intercept[TestFailedException] { assert(l1.length == 10) } e.message should be ( Some( s""" | |assert(l1.length == 10) | | | | | | | 3 | 10 | | false | $l1 |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 15)) } it("should do nothing when is used to check !(s1.length == 10)") { assert(!(s1.length == 10)) } it("should throw TestFailedException with correct message and stack depth when is used to check !(s1.length == 9)") { val e = intercept[TestFailedException] { assert(!(s1.length == 12)) } e.message should be ( Some( """ | |assert(!(s1.length == 12)) | | | | | | | | | 12 | 12 | | | true | | "hi ScalaTest" | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 16)) } it("should do nothing when is used to check !(l1.length == 2)") { assert(!(l1.length == 2)) } it("should throw TestFailedException with correct message and stack depth when is used to check !(l1.length == 9)") { val e = intercept[TestFailedException] { assert(!(l1.length == 3)) } e.message should be ( Some( s""" | |assert(!(l1.length == 3)) | | | | | | | | | 3 | 3 | | | true | | $l1 | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 16)) } it("should do nothing when is used to check s1.size == 9") { assert(s1.size == 12) } it("should throw TestFailedException with correct message and stack depth when is used to check s1.size == 10") { val e = intercept[TestFailedException] { assert(s1.size == 10) } e.message should be ( Some( """ | |assert(s1.size == 10) | | | | | | | 12 | 10 | | false | "hi ScalaTest" |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 15)) } it("should do nothing when is used to check l1.size == 3") { assert(l1.size == 3) } it("should throw TestFailedException with correct message and stack depth when is used to check l1.size == 10") { val e = intercept[TestFailedException] { assert(l1.size == 10) } e.message should be ( Some( s""" | |assert(l1.size == 10) | | | | | | | 3 | 10 | | false | $l1 |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 15)) } it("should do nothing when is used to check !(s1.size == 10)") { assert(!(s1.size == 10)) } it("should throw TestFailedException with correct message and stack depth when is used to check !(s1.size == 9)") { val e = intercept[TestFailedException] { assert(!(s1.size == 12)) } e.message should be ( Some( """ | |assert(!(s1.size == 12)) | | | | | | | | | 12 | 12 | | | true | | "hi ScalaTest" | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 16)) } it("should do nothing when is used to check !(l1.size == 2)") { assert(!(l1.size == 2)) } it("should throw TestFailedException with correct message and stack depth when is used to check !(l1.size == 9) ") { val e = intercept[TestFailedException] { assert(!(l1.size == 3)) } e.message should be ( Some( s""" | |assert(!(l1.size == 3)) | | | | | | | | | 3 | 3 | | | true | | $l1 | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 16)) } it("should do nothing when is used to check l1.exists(_ == 3)") { assert(l1.exists(_ == 3)) } it("should do nothing when is used to check l1.exists(3 == _)") { assert(l1.exists(3 == _)) } it("should throw TestFailedException with correct message and stack depth when is used to check l1.exists(_ == 5) ") { val e = intercept[TestFailedException] { assert(l1.exists(_ == 5)) } e.message should be ( Some( s""" | |assert(l1.exists(_ == 5)) | | | | | false | $l1 |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should throw TestFailedException with correct message and stack depth when is used to check l1.exists(5 == _) ") { val e = intercept[TestFailedException] { assert(l1.exists(5 == _)) } e.message should be ( Some( s""" | |assert(l1.exists(5 == _)) | | | | | false | $l1 |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should do nothing when is used to check !l1.exists(_ == 5)") { assert(!l1.exists(_ == 5)) } it("should do nothing when is used to check !l1.exists(5 == _)") { assert(!l1.exists(5 == _)) } it("should throw TestFailedException with correct message and stack depth when is used to check !l1.exists(_ == 3)") { val e = intercept[TestFailedException] { assert(!l1.exists(_ == 3)) } e.message should be ( Some( s""" | |assert(!l1.exists(_ == 3)) | || | | || true | |$l1 | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 15)) } it("should throw TestFailedException with correct message and stack depth when is used to check !l1.exists(3 == _)") { val e = intercept[TestFailedException] { assert(!l1.exists(3 == _)) } e.message should be ( Some( s""" | |assert(!l1.exists(3 == _)) | || | | || true | |$l1 | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 15)) } it("should throw TestFailedException with correct message and stack depth when is used to check l1.exists(_ > 3)") { val e = intercept[TestFailedException] { assert(l1.exists(_ > 3)) } e.message should be ( Some( s""" | |assert(l1.exists(_ > 3)) | | | | | false | $l1 |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should throw TestFailedException with correct message and stack depth when is used to check l1.exists(3 < _)") { val e = intercept[TestFailedException] { assert(l1.exists(3 < _)) } e.message should be ( Some( s""" | |assert(l1.exists(3 < _)) | | | | | false | $l1 |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should throw TestFailedException with correct message and stack depth when is used to check l3.exists(_.isEmpty)") { val e = intercept[TestFailedException] { assert(l3.exists(_.isEmpty)) } e.message should be ( Some( s""" | |assert(l3.exists(_.isEmpty)) | | | | | false | $l3Str |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should throw TestFailedException with correct message and stack depth when is used to check l3.exists(false)") { val e = intercept[TestFailedException] { assert(ci1.exists(321)) } e.message should be ( Some( """ | |assert(ci1.exists(321)) | | | | | 123 false 321 |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 13)) } it("should do nothing when used to check woof { meow(y = 5) } == \\"woof\\" ") { assert(woof { meow(y = 5) } == "woof") } it("should throw TestFailedException with correct message and stack depth when is used to check woof { meow(y = 5) } == \\"meow\\"") { val e = intercept[TestFailedException] { assert(woof { meow(y = 5) } == "meow") } e.message should be ( Some( """ | |assert(woof { meow(y = 5) } == "meow") | | | | | "woof" | "meow" | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should do nothing when used to check multiline assert((b == a + 2) && (b - 2 <= a)) ") { assert((b == a + 2) && (b - 2 <= a)) } // SKIP-DOTTY-START // problem with quotes it("should throw friend message when used to check multiline assert((b == a + 2) && (b - 1 <= a))") { val e = intercept[TestFailedException] { assert((b == a + 2) && (b - 1 <= a)) } e.message shouldBe Some("5 equaled 5, but 4 was not less than or equal to 3") e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 5)) } // SKIP-DOTTY-END it("should do nothing when a block of code that evaluates to true is passed in") { assert { val a = 1 val b = 2 val c = a + b a < b || c == a + b } } it("should throw TestFailedException with correct message and stack depth when a block of code that evaluates to false is passed") { val e = intercept[TestFailedException] { assert { val a = 1; val b = 2; val c = a + b; a > b || c == b * b } } e.message should be ( Some( """ | |assert { val a = 1; val b = 2; val c = a + b; a > b || c == b * b } | | | | | | | | | | | 1 | 2 | 3 | 2 4 2 | | | false | | false | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 16)) } // SKIP-DOTTY-START // different code show in Dotty it("should fallback to BooleanMacro when a block of code > 1 line is passed in ") { val e = intercept[TestFailedException] { assert { val a = 1 val b = 2 val c = a + b a > b || c == b * b } } e.message should be ( Some( """{ | val a: Int = 1; | val b: Int = 2; | val c: Int = a.+(b); | a.>(b).||(c.==(b.*(b))) |} was false""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 17)) } // SKIP-DOTTY-END // SKIP-SCALATESTJS,NATIVE-START it("should do nothing when used to check <person>Dude</person> == <person>Dude</person>") { assert(<person>Dude</person> == <person>Dude</person>) } it("should throw TestFailedException with correct message and stack depth when is used to check <person>Dude</person> == <person>Mary</person>") { val e = intercept[TestFailedException] { assert(<person>Dude</person> == <person>Mary</person>) } e.message should be ( Some( """ | |assert(<person>Dude</person> == <person>Mary</person>) | | | | | | | <person>Mary</person> | | false | <person>Dude</person> |""".stripMargin ) ) } // SKIP-SCALATESTJS,NATIVE-END it("should compile when used with org == xxx that shadow org.scalactic ") { assertCompiles( """ |val org = "test" |assert(org == "test") """.stripMargin) } it("should compile when used with org === xxx that shadow org.scalactic") { assertCompiles( """ |val org = "test" |assert(org === "test") """.stripMargin) } // SKIP-DOTTY-START it("should compile when used with org === xxx with TypeCheckedTripleEquals that shadow org.scalactic") { assertCompiles( """ |class TestSpec extends AnyFunSpec with org.scalactic.TypeCheckedTripleEquals { | it("testing here") { | val org = "test" | assert(org === "test") | } |} """.stripMargin) } // SKIP-DOTTY-END it("should compile when used with org.aCustomMethod that shadow org.scalactic") { assertCompiles( """ |class Test { | def aCustomMethod: Boolean = true |} |val org = new Test |assert(org.aCustomMethod) """.stripMargin) } it("should compile when used with !org that shadow org.scalactic") { assertCompiles( """ |val org = false |assert(!org) """.stripMargin) } it("should compile when used with org.isEmpty that shadow org.scalactic") { assertCompiles( """ |val org = "" |assert(org.isEmpty) """.stripMargin) } it("should compile when used with org.isInstanceOf that shadow org.scalactic") { assertCompiles( """ |val org = "" |assert(org.isInstanceOf[String]) """.stripMargin) } it("should compile when used with org.size == 0 that shadow org.scalactic") { assertCompiles( """ |val org = Array.empty[String] |assert(org.size == 0) """.stripMargin) } it("should compile when used with org.length == 0 that shadow org.scalactic") { assertCompiles( """ |val org = "" |assert(org.length == 0) """.stripMargin) } it("should compile when used with org.exists(_ == 'b') that shadow org.scalactic ") { assertCompiles( """ |val org = "abc" |assert(org.exists(_ == 'b')) """.stripMargin) } it("should do nothing when is used to check new String(\\"test\\") != \\"test\\"") { assert(new String("test") == "test") } it("should throw TestFailedException with correct message and stack depth when is used to check new String(\\"test\\") != \\"testing\\"") { val e = intercept[TestFailedException] { assert(new String("test") == "testing") } e.message should be ( Some( """ | |assert(new String("test") == "testing") | | | | | "test" | "testing" | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should compile when used with Java static method") { assertCompiles( """ |assert(System.currentTimeMillis() > 0) """.stripMargin) assertCompiles( """ |assert(java.math.BigInteger.ZERO == java.math.BigInteger.ZERO) """.stripMargin) } } describe("The assert(boolean, clue) method") { it("should do nothing when is used to check a == 3") { assert(a == 3, "this is a clue") } it("should throw TestFailedException with correct message and stack depth when is used to check a == 5") { val e = intercept[TestFailedException] { assert(a == 5, "this is a clue") } e.message should be ( Some( """this is a clue | |assert(a == 5, "this is a clue") | | | | | 3 | 5 | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should do nothing when is used to check 5 == b") { assert(5 == b, "this is a clue") } it("should throw TestFailedException with correct message and stack depth when is used to check 3 == b") { val e = intercept[TestFailedException] { assert(3 == b, "this is a clue") } e.message should be ( Some( """this is a clue | |assert(3 == b, "this is a clue") | | | | | 3 | 5 | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should do nothing when is used to check a != 5") { assert(a != 5, "this is a clue") } it("should throw TestFailedException with correct message and stack depth when is used to check a != 3") { val e = intercept[TestFailedException] { assert(a != 3, "this is a clue") } e.message should be ( Some( """this is a clue | |assert(a != 3, "this is a clue") | | | | | 3 | 3 | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should do nothing when is used to check 3 != b") { assert(3 != b, "this is a clue") } it("should throw TestFailedException with correct message and stack depth when is used to check 5 != b") { val e = intercept[TestFailedException] { assert(5 != b, "this is a clue") } e.message should be ( Some( """this is a clue | |assert(5 != b, "this is a clue") | | | | | 5 | 5 | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should do nothing when is used to check 3 == 3") { assert(3 == 3, "this is a clue") } // SKIP-DOTTY-START // const-folding will eliminate the expr it("should throw TestFailedException with message that contains the original code and correct stack depth when is used to check 3 == 5") { // This is because the compiler simply pass the false boolean literal // to the macro, can't find a way to get the 3 == 5 literal. val e1 = intercept[TestFailedException] { assert(3 == 5, "this is a clue") } e1.message should be ( Some( """this is a clue | |assert(3 == 5, "this is a clue") | | | false |""".stripMargin ) ) e1.failedCodeFileName should be (Some(fileName)) e1.failedCodeLineNumber should be (Some(thisLineNumber - 13)) } // SKIP-DOTTY-END it("should throw TestFailedException with correct message and stack depth when is used to check a == b") { val e = intercept[TestFailedException] { assert(a == b, "this is a clue") } e.message should be ( Some( """this is a clue | |assert(a == b, "this is a clue") | | | | | 3 | 5 | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should throw TestFailedException with correct message and stack depth when is used to check a == 0") { val e = intercept[TestFailedException] { assert(a == 0, "this is a clue") } e.message should be ( Some( """this is a clue | |assert(a == 0, "this is a clue") | | | | | 3 | 0 | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should throw TestFailedException with correct message and stack depth when is used to check 0 == a") { val e = intercept[TestFailedException] { assert(0 == a, "this is a clue") } e.message should be ( Some( """this is a clue | |assert(0 == a, "this is a clue") | | | | | 0 | 3 | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should throw TestFailedException with correct message and stack depth when is used to check 3 != a") { val e = intercept[TestFailedException] { assert(3 != a, "this is a clue") } e.message should be ( Some( """this is a clue | |assert(3 != a, "this is a clue") | | | | | 3 | 3 | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should do nothing when is used to check 5 != a") { assert(5 != a, "this is a clue") } it("should do nothing when is used to check a > 2") { assert(a > 2, "this is a clue") } it("should do nothing when is used to check 5 > a") { assert(5 > a, "this is a clue") } it("should throw TestFailedException with correct message and stack depth when is used to check a > 3") { val e = intercept[TestFailedException] { assert(a > 3, "this is a clue") } e.message should be ( Some( """this is a clue | |assert(a > 3, "this is a clue") | | | | | 3 | 3 | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should throw TestFailedException with correct message and stack depth when is used to check 3 > a") { val e = intercept[TestFailedException] { assert(3 > a, "this is a clue") } e.message should be ( Some( """this is a clue | |assert(3 > a, "this is a clue") | | | | | 3 | 3 | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should do nothing when is used to check a >= 3") { assert(a >= 3, "this is a clue") } it("should do nothing when is used to check 3 >= a") { assert(3 >= a, "this is a clue") } it("should throw TestFailedException with correct message and stack depth when is used to check a >= 4") { val e = intercept[TestFailedException] { assert(a >= 4, "this is a clue") } e.message should be ( Some( """this is a clue | |assert(a >= 4, "this is a clue") | | | | | 3 | 4 | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should throw TestFailedException with correct message and stack depth when is used to check 2 >= a") { val e = intercept[TestFailedException] { assert(2 >= a, "this is a clue") } e.message should be ( Some( """this is a clue | |assert(2 >= a, "this is a clue") | | | | | 2 | 3 | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should do nothing when is used to check b < 6") { assert(b < 6, "this is a clue") } it("should do nothing when is used to check 3 < b") { assert(3 < b, "this is a clue") } it("should throw TestFailedException with correct message and stack depth when is used to check b < 5") { val e = intercept[TestFailedException] { assert(b < 5, "this is a clue") } e.message should be ( Some( """this is a clue | |assert(b < 5, "this is a clue") | | | | | 5 | 5 | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should throw TestFailedException with correct message and stack depth when is used to check 5 < b") { val e = intercept[TestFailedException] { assert(5 < b, "this is a clue") } e.message should be ( Some( """this is a clue | |assert(5 < b, "this is a clue") | | | | | 5 | 5 | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should do nothing when is used to check b <= 5") { assert(b <= 5, "this is a clue") } it("should do nothing when is used to check 5 <= b") { assert(5 <= b, "this is a clue") } it("should throw TestFailedException with correct message and stack depth when is used to check b <= 4") { val e = intercept[TestFailedException] { assert(b <= 4, "this is a clue") } e.message should be ( Some( """this is a clue | |assert(b <= 4, "this is a clue") | | | | | 5 | 4 | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should throw TestFailedException with correct message and stack depth when is used to check 6 <= b") { val e = intercept[TestFailedException] { assert(6 <= b, "this is a clue") } e.message should be ( Some( """this is a clue | |assert(6 <= b, "this is a clue") | | | | | 6 | 5 | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should do nothing when is used to check bob == \\"bob\\"") { assert(bob == "bob", "this is a clue") } it("should do nothing when is used to check bob != \\"alice\\"") { assert(bob != "alice", "this is a clue") } it("should do nothing when is used to check alice == \\"alice\\"") { assert(alice == "alice", "this is a clue") } it("should do nothing when is used to check alice != \\"bob\\"") { assert(alice != "bob", "this is a clue") } it("should throw TestFailedException with correct message and stack depth when is used to check bob == \\"alice\\"") { val e = intercept[TestFailedException] { assert(bob == "alice", "this is a clue") } e.message should be ( Some( """this is a clue | |assert(bob == "alice", "this is a clue") | | | | | | | "alice" | | false | "bob" |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 15)) } it("should throw TestFailedException with correct message and stack depth when is used to check bob != \\"bob\\"") { val e = intercept[TestFailedException] { assert(bob != "bob", "this is a clue") } e.message should be ( Some( """this is a clue | |assert(bob != "bob", "this is a clue") | | | | | | | "bob" | | false | "bob" |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 15)) } it("should throw TestFailedException with correct message and stack depth when is used to check alice == \\"bob\\"") { val e = intercept[TestFailedException] { assert(alice == "bob", "this is a clue") } e.message should be ( Some( """this is a clue | |assert(alice == "bob", "this is a clue") | | | | | | | "bob" | | false | "alice" |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 15)) } it("should throw TestFailedException with correct message and stack depth when is used to check alice != \\"alice\\"") { val e = intercept[TestFailedException] { assert(alice != "alice", "this is a clue") } e.message should be ( Some( """this is a clue | |assert(alice != "alice", "this is a clue") | | | | | | | "alice" | | false | "alice" |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 15)) } it("should do nothing when is used to check a === 3") { assert(a === 3, "this is a clue") } it("should throw TestFailedException with correct message and stack depth when is used to check a === 5 ") { val e = intercept[TestFailedException] { assert(a === 5, "this is a clue") } e.message should be ( Some( """this is a clue | |assert(a === 5, "this is a clue") | | | | | 3 | 5 | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should do nothing when is used to check 3 === a") { assert(3 === a, "this is a clue") } it("should throw TestFailedException with correct message and stack depth when is used to check 5 === a") { val e = intercept[TestFailedException] { assert(5 === a, "this is a clue") } e.message should be ( Some( """this is a clue | |assert(5 === a, "this is a clue") | | | | | 5 | 3 | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should do nothing when is used to check a !== 5") { assert(a !== 5, "this is a clue") } it("should throw TestFailedException with correct message and stack depth when is used to check a !== 3") { val e = intercept[TestFailedException] { assert(a !== 3, "this is a clue") } e.message should be ( Some( """this is a clue | |assert(a !== 3, "this is a clue") | | | | | 3 | 3 | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should do nothing when is used to check 5 !== a") { assert(5 !== a, "this is a clue") } it("should throw TestFailedException with correct message and stack depth when is used to check 3 !== a") { val e = intercept[TestFailedException] { assert(3 !== a, "this is a clue") } e.message should be ( Some( """this is a clue | |assert(3 !== a, "this is a clue") | | | | | 3 | 3 | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should do nothing when is used to check a == 3 && b == 5") { assert(a == 3 && b == 5, "this is a clue") } it("should throw TestFailedException with correct message and stack depth when is used to check a == 3 && b == 6") { val e = intercept[TestFailedException] { assert(a == 3 && b == 6, "this is a clue") } e.message should be ( Some( """this is a clue | |assert(a == 3 && b == 6, "this is a clue") | | | | | | | | | 3 | 3 | 5 | 6 | true | false | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 15)) } it("should throw TestFailedException with correct message and stack depth when is used to check a == 2 && b == 5") { val e = intercept[TestFailedException] { assert(a == 2 && b == 5, "this is a clue") } e.message should be ( Some( """this is a clue | |assert(a == 2 && b == 5, "this is a clue") | | | | | 3 | 2 | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should throw TestFailedException with correct message and stack depth when is used to check a == 2 && b == 6") { val e = intercept[TestFailedException] { assert(a == 2 && b == 6, "this is a clue") } e.message should be ( Some( """this is a clue | |assert(a == 2 && b == 6, "this is a clue") | | | | | 3 | 2 | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should do nothing when is used to check a == 3 & b == 5") { assert(a == 3 & b == 5, "this is a clue") } it("should throw TestFailedException with correct message and stack depth when is used to check a == 3 & b == 6") { val e = intercept[TestFailedException] { assert(a == 3 & b == 6, "this is a clue") } e.message should be ( Some( """this is a clue | |assert(a == 3 & b == 6, "this is a clue") | | | | | | | | | 3 | 3 | 5 | 6 | true | false | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 15)) } it("should throw TestFailedException with correct message and stack depth when is used to check a == 2 & b == 5") { val e = intercept[TestFailedException] { assert(a == 2 & b == 5, "this is a clue") } e.message should be ( Some( """this is a clue | |assert(a == 2 & b == 5, "this is a clue") | | | | | 3 | 2 | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should throw TestFailedException with correct message and stack depth when is used to check a == 2 & b == 6") { val e = intercept[TestFailedException] { assert(a == 2 & b == 6, "this is a clue") } e.message should be ( Some( """this is a clue | |assert(a == 2 & b == 6, "this is a clue") | | | | | 3 | 2 | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should do nothing when is used to check a == 3 || b == 5") { assert(a == 3 || b == 5, "this is a clue") } it("should do nothing when is used to check a == 3 || b == 6") { assert(a == 3 || b == 6, "this is a clue") } it("should do nothing when is used to check a == 2 || b == 5") { assert(a == 2 || b == 5, "this is a clue") } it("should throw TestFailedException with correct message and stack depth when is used to check a == 2 || b == 6") { val e = intercept[TestFailedException] { assert(a == 2 || b == 6, "this is a clue") } e.message should be ( Some( """this is a clue | |assert(a == 2 || b == 6, "this is a clue") | | | | | | | | | 3 | 2 | 5 | 6 | | | false | | false | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 16)) } it("should do nothing when is used to check a == 3 | b == 5") { assert(a == 3 | b == 5, "this is a clue") } it("should do nothing when is used to check a == 3 | b == 6") { assert(a == 3 | b == 6, "this is a clue") } it("should do nothing when is used to check a == 2 | b == 5") { assert(a == 2 | b == 5, "this is a clue") } it("should throw TestFailedException with correct message and stack depth when is used to check a == 2 | b == 6") { val e = intercept[TestFailedException] { assert(a == 2 | b == 6, "this is a clue") } e.message should be ( Some( """this is a clue | |assert(a == 2 | b == 6, "this is a clue") | | | | | | | | | 3 | 2 | 5 | 6 | | | false | | false | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 16)) } it("should do nothing when is used to check a == 3 && (b == 5 && b > 3)") { assert(a == 3 && (b == 5 && b > 3), "this is a clue") } it("should throw TestFailedException with correct message and stack depth when is used to check a == 3 && (b == 5 && b > 5)") { val e = intercept[TestFailedException] { assert(a == 3 && (b == 5 && b > 5), "this is a clue") } e.message should be ( Some( """this is a clue | |assert(a == 3 && (b == 5 && b > 5), "this is a clue") | | | | | | | | | | | | | 3 | 3 | 5 | 5 | 5 | 5 | true false true | false | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 15)) } it("should do nothing when is used to check !(a == 5)") { assert(!(a == 5), "this is a clue") } it("should throw TestFailedException with correct message and stack depth when is used to check !(a == 3)") { val e = intercept[TestFailedException] { assert(!(a == 3), "this is a clue") } e.message should be ( Some( """this is a clue | |assert(!(a == 3), "this is a clue") | | | | | | | 3 | 3 | | true | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 15)) } it("should throw TestFailedException with correct message and stack depth when is used to check a == 3 && !(b == 5)") { val e = intercept[TestFailedException] { assert(a == 3 && !(b == 5), "this is a clue") } e.message should be ( Some( """this is a clue | |assert(a == 3 && !(b == 5), "this is a clue") | | | | | | | | | | 3 | 3 | | 5 | 5 | true | | true | | false | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 16)) } it("should do nothing when is used to check (a == 3) == (b == 5)") { assert((a == 3) == (b == 5), "this is a clue") } it("should throw TestFailedException with correct message and stack depth when is used to check (a == 3) == (b != 5)") { val e = intercept[TestFailedException] { assert((a == 3) == (b != 5), "this is a clue") } e.message should be ( Some( """this is a clue | |assert((a == 3) == (b != 5), "this is a clue") | | | | | | | | | 3 | 3 | 5 | 5 | true false false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should short-circuit && when first condition was false") { val s = new Stateful intercept[TestFailedException] { assert(a == 5 && s.changeState, "this is a clue") } s.state should be (false) } it("should short-circuit & when first condition was false") { val s = new Stateful intercept[TestFailedException] { assert(a == 5 & s.changeState, "this is a clue") } s.state should be (false) } it("should short-circuit || when first condition was true") { val s = new Stateful assert(a == 3 || s.changeState, "this is a clue") s.state should be (false) } it("should short-circuit | when first condition was true") { val s = new Stateful assert(a == 3 | s.changeState, "this is a clue") s.state should be (false) } it("should do nothing when it is used to check a == 3 && { println(\\"hi\\"); b == 5} ") { assert(a == 3 && { println("hi"); b == 5}, "this is a clue") } it("should throw TestFailedException with correct message and stack depth when is usesd to check a == 3 && { println(\\"hi\\"); b == 3}") { val e = intercept[TestFailedException] { assert(a == 3 && { println("hi"); b == 3}, "this is a clue") } e.message should be ( Some( """this is a clue | |assert(a == 3 && { println("hi"); b == 3}, "this is a clue") | | | | | | | | | 3 | 3 false 5 | 3 | true false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should do nothing when it is used to check { println(\\"hi\\"); b == 5} && a == 3") { assert({ println("hi"); b == 5} && a == 3, "this is a clue") } it("should throw TestFailedException with correct message and stack depth when is usesd to check { println(\\"hi\\"); b == 5} && a == 5") { val e = intercept[TestFailedException] { assert({ println("hi"); b == 5} && a == 5, "this is a clue") } e.message should be ( Some( """this is a clue | |assert({ println("hi"); b == 5} && a == 5, "this is a clue") | | | | | | | | | 5 | 5 | 3 | 5 | true | false | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 15)) } it("should preserve side effects when Apply with single argument is passed in") { assert(neverRuns1(sys.error("Sad times 1")), "this is a clue") } it("should preserve side effects when Apply with 2 argument list is passed in") { assert(neverRuns2(sys.error("Sad times 2"))(0), "this is a clue") } it("should preserve side effects when typed Apply with 2 argument list is passed in") { assert(neverRuns3(sys.error("Sad times 3"))(0), "this is a clue") } it("should do nothing when is used to check s1 startsWith \\"hi\\"") { assert(s1 startsWith "hi", "this is a clue") assert(s1.startsWith("hi"), "this is a clue") } it("should throw TestFailedException with correct message and stack depth when is used to check s2 startsWith \\"hi\\"") { val e1 = intercept[TestFailedException] { assert(s2 startsWith "hi", "this is a clue") } e1.message should be ( Some( """this is a clue | |assert(s2 startsWith "hi", "this is a clue") | | | | | | false "hi" | "ScalaTest hi" |""".stripMargin ) ) e1.failedCodeFileName should be (Some(fileName)) e1.failedCodeLineNumber should be (Some(thisLineNumber - 14)) val e2 = intercept[TestFailedException] { assert(s2.startsWith("hi"), "this is a clue") } e2.message should be ( Some( """this is a clue | |assert(s2.startsWith("hi"), "this is a clue") | | | | | | false "hi" | "ScalaTest hi" |""".stripMargin ) ) e2.failedCodeFileName should be (Some(fileName)) e2.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should do nothing when is used to check ci1 startsWith 1") { assert(ci1 startsWith 1, "this is a clue") assert(ci1.startsWith(1), "this is a clue") } it("should throw TestFailedException with correct message and stack depth when is used to check ci2 startsWith 1") { val e1 = intercept[TestFailedException] { assert(ci2 startsWith 1, "this is a clue") } e1.message should be ( Some( """this is a clue | |assert(ci2 startsWith 1, "this is a clue") | | | | | 321 false 1 |""".stripMargin ) ) e1.failedCodeFileName should be (Some(fileName)) e1.failedCodeLineNumber should be (Some(thisLineNumber - 13)) val e2 = intercept[TestFailedException] { assert(ci2.startsWith(1), "this is a clue") } e2.message should be ( Some( """this is a clue | |assert(ci2.startsWith(1), "this is a clue") | | | | | 321 false 1 |""".stripMargin ) ) e2.failedCodeFileName should be (Some(fileName)) e2.failedCodeLineNumber should be (Some(thisLineNumber - 13)) } it("should do nothing when is used to check !s2.startsWith(\\"hi\\")") { assert(!s2.startsWith("hi"), "this is a clue") } it("should throw TestFailedException with correct message and stack depth when is used to check !s1.startsWith(\\"hi\\")") { val e1 = intercept[TestFailedException] { assert(!s1.startsWith("hi"), "this is a clue") } e1.message should be ( Some( """this is a clue | |assert(!s1.startsWith("hi"), "this is a clue") | || | | | || true "hi" | |"hi ScalaTest" | false |""".stripMargin ) ) e1.failedCodeFileName should be (Some(fileName)) e1.failedCodeLineNumber should be (Some(thisLineNumber - 15)) } it("should do nothing when is used to check s2 endsWith \\"hi\\"") { assert(s2 endsWith "hi", "this is a clue") assert(s2.endsWith("hi"), "this is a clue") } it("should throw TestFailedException with correct message and stack depth when is used to check s1 endsWith \\"hi\\"") { val e1 = intercept[TestFailedException] { assert(s1 endsWith "hi", "this is a clue") } e1.message should be ( Some( """this is a clue | |assert(s1 endsWith "hi", "this is a clue") | | | | | | false "hi" | "hi ScalaTest" |""".stripMargin ) ) e1.failedCodeFileName should be (Some(fileName)) e1.failedCodeLineNumber should be (Some(thisLineNumber - 14)) val e2 = intercept[TestFailedException] { assert(s1.endsWith("hi"), "this is a clue") } e2.message should be ( Some( """this is a clue | |assert(s1.endsWith("hi"), "this is a clue") | | | | | | false "hi" | "hi ScalaTest" |""".stripMargin ) ) e2.failedCodeFileName should be (Some(fileName)) e2.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should do nothing when is used to check ci2 endsWith 1") { assert(ci2 endsWith 1, "this is a clue") assert(ci2.endsWith(1), "this is a clue") } it("should throw TestFailedException with correct message and stack depth when is used to check ci1 endsWith 1") { val e1 = intercept[TestFailedException] { assert(ci1 endsWith 1, "this is a clue") } e1.message should be ( Some( """this is a clue | |assert(ci1 endsWith 1, "this is a clue") | | | | | 123 false 1 |""".stripMargin ) ) e1.failedCodeFileName should be (Some(fileName)) e1.failedCodeLineNumber should be (Some(thisLineNumber - 13)) val e2 = intercept[TestFailedException] { assert(ci1.endsWith(1), "this is a clue") } e2.message should be ( Some( """this is a clue | |assert(ci1.endsWith(1), "this is a clue") | | | | | 123 false 1 |""".stripMargin ) ) e2.failedCodeFileName should be (Some(fileName)) e2.failedCodeLineNumber should be (Some(thisLineNumber - 13)) } it("should do nothing when is used to check !s1.endsWith(\\"hi\\")") { assert(!s1.endsWith("hi"), "this is a clue") } it("should throw TestFailedException with correct message and stack depth when is used to check !s2.endsWith(\\"hi\\")") { val e1 = intercept[TestFailedException] { assert(!s2.endsWith("hi"), "this is a clue") } e1.message should be ( Some( """this is a clue | |assert(!s2.endsWith("hi"), "this is a clue") | || | | | || true "hi" | |"ScalaTest hi" | false |""".stripMargin ) ) e1.failedCodeFileName should be (Some(fileName)) e1.failedCodeLineNumber should be (Some(thisLineNumber - 15)) } it("should do nothing when is used to check s3 contains \\"hi\\"") { assert(s3 contains "hi", "this is a clue") assert(s3.contains("hi"), "this is a clue") } it("should throw TestFailedException with correct message and stack depth when is used to check s3 contains \\"hello\\"") { val e1 = intercept[TestFailedException] { assert(s3 contains "hello", "this is a clue") } e1.message should be ( Some( """this is a clue | |assert(s3 contains "hello", "this is a clue") | | | | | | false "hello" | "Say hi to ScalaTest" |""".stripMargin ) ) e1.failedCodeFileName should be (Some(fileName)) e1.failedCodeLineNumber should be (Some(thisLineNumber - 14)) val e2 = intercept[TestFailedException] { assert(s3.contains("hello"), "this is a clue") } e2.message should be ( Some( """this is a clue | |assert(s3.contains("hello"), "this is a clue") | | | | | | false "hello" | "Say hi to ScalaTest" |""".stripMargin ) ) e2.failedCodeFileName should be (Some(fileName)) e2.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should do nothing when is used to check ci2 contains 2") { assert(ci2 contains 2, "this is a clue") assert(ci2.contains(2), "this is a clue") } it("should throw TestFailedException with correct message and stack depth when is used to check ci1 contains 5") { val e1 = intercept[TestFailedException] { assert(ci1 contains 5, "this is a clue") } e1.message should be ( Some( """this is a clue | |assert(ci1 contains 5, "this is a clue") | | | | | 123 false 5 |""".stripMargin ) ) e1.failedCodeFileName should be (Some(fileName)) e1.failedCodeLineNumber should be (Some(thisLineNumber - 13)) val e2 = intercept[TestFailedException] { assert(ci1.contains(5), "this is a clue") } e2.message should be ( Some( """this is a clue | |assert(ci1.contains(5), "this is a clue") | | | | | 123 false 5 |""".stripMargin ) ) e2.failedCodeFileName should be (Some(fileName)) e2.failedCodeLineNumber should be (Some(thisLineNumber - 13)) } it("should do nothing when is used to check !s1.contains(\\"hello\\")") { assert(!s3.contains("hello"), "this is a clue") } it("should throw TestFailedException with correct message and stack depth when is used to check !s3.contains(\\"hi\\")") { val e1 = intercept[TestFailedException] { assert(!s3.contains("hi"), "this is a clue") } e1.message should be ( Some( """this is a clue | |assert(!s3.contains("hi"), "this is a clue") | || | | | || true "hi" | |"Say hi to ScalaTest" | false |""".stripMargin ) ) e1.failedCodeFileName should be (Some(fileName)) e1.failedCodeLineNumber should be (Some(thisLineNumber - 15)) } it("should do nothing when is used to check l1 contains 2") { assert(l1 contains 2, "this is a clue") assert(l1.contains(2), "this is a clue") } it("should throw TestFailedException with correct message and stack depth when is used to check l1 contains 5") { val e1 = intercept[TestFailedException] { assert(l1 contains 5, "this is a clue") } e1.message should be ( Some( """this is a clue | |assert(l1 contains 5, "this is a clue") | | | | | | false 5 | List(1, 2, 3) |""".stripMargin ) ) e1.failedCodeFileName should be (Some(fileName)) e1.failedCodeLineNumber should be (Some(thisLineNumber - 14)) val e2 = intercept[TestFailedException] { assert(l1.contains(5), "this is a clue") } e2.message should be ( Some( """this is a clue | |assert(l1.contains(5), "this is a clue") | | | | | | false 5 | List(1, 2, 3) |""".stripMargin ) ) e2.failedCodeFileName should be (Some(fileName)) e2.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should do nothing when is used to check !(l1 contains 5)") { assert(!(l1 contains 5), "this is a clue") assert(!l1.contains(5), "this is a clue") } it("should throw TestFailedException with correct message and stack depth when is used to check !(l1 contains 2)") { val e1 = intercept[TestFailedException] { assert(!(l1 contains 2), "this is a clue") } e1.message should be ( Some( """this is a clue | |assert(!(l1 contains 2), "this is a clue") | | | | | | | | true 2 | | List(1, 2, 3) | false |""".stripMargin ) ) e1.failedCodeFileName should be (Some(fileName)) e1.failedCodeLineNumber should be (Some(thisLineNumber - 15)) val e2 = intercept[TestFailedException] { assert(!l1.contains(2), "this is a clue") } e2.message should be ( Some( """this is a clue | |assert(!l1.contains(2), "this is a clue") | || | | | || true 2 | |List(1, 2, 3) | false |""".stripMargin ) ) e2.failedCodeFileName should be (Some(fileName)) e2.failedCodeLineNumber should be (Some(thisLineNumber - 15)) } it("should do nothing when is used to check m1 contains 2") { assert(m1 contains 2, "this is a clue") assert(m1.contains(2), "this is a clue") } it("should throw TestFailedException with correct message and stack depth when is used to check m1 contains 5") { val e1 = intercept[TestFailedException] { assert(m1 contains 5, "this is a clue") } e1.message should be ( Some( s"""this is a clue | |assert(m1 contains 5, "this is a clue") | | | | | | false 5 | $m1Str |""".stripMargin ) ) e1.failedCodeFileName should be (Some(fileName)) e1.failedCodeLineNumber should be (Some(thisLineNumber - 14)) val e2 = intercept[TestFailedException] { assert(m1.contains(5), "this is a clue") } e2.message should be ( Some( s"""this is a clue | |assert(m1.contains(5), "this is a clue") | | | | | | false 5 | $m1Str |""".stripMargin ) ) e2.failedCodeFileName should be (Some(fileName)) e2.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should do nothing when is used to check !(m1 contains 5)") { assert(!(m1 contains 5), "this is a clue") assert(!m1.contains(5), "this is a clue") } it("should throw TestFailedException with correct message and stack depth when is used to check !(m1 contains 2)") { val e1 = intercept[TestFailedException] { assert(!(m1 contains 2), "this is a clue") } e1.message should be ( Some( s"""this is a clue | |assert(!(m1 contains 2), "this is a clue") | | | | | | | | true 2 | | $m1Str | false |""".stripMargin ) ) e1.failedCodeFileName should be (Some(fileName)) e1.failedCodeLineNumber should be (Some(thisLineNumber - 15)) val e2 = intercept[TestFailedException] { assert(!m1.contains(2), "this is a clue") } e2.message should be ( Some( s"""this is a clue | |assert(!m1.contains(2), "this is a clue") | || | | | || true 2 | |$m1Str | false |""".stripMargin ) ) e2.failedCodeFileName should be (Some(fileName)) e2.failedCodeLineNumber should be (Some(thisLineNumber - 15)) } it("should do nothing when is used to check ct1 contains 8") { assert(ct1 contains 8, "this is a clue") assert(ct1.contains(8), "this is a clue") } it("should throw TestFailedException with correct message and stack depth when is used to check ct1 contains 5") { val e1 = intercept[TestFailedException] { assert(ct1 contains 5, "this is a clue") } e1.message should be ( Some( s"""this is a clue | |assert(ct1 contains 5, "this is a clue") | | | | | | false 5 | $ct1Str |""".stripMargin ) ) e1.failedCodeFileName should be (Some(fileName)) e1.failedCodeLineNumber should be (Some(thisLineNumber - 14)) val e2 = intercept[TestFailedException] { assert(ct1.contains(5), "this is a clue") } e2.message should be ( Some( s"""this is a clue | |assert(ct1.contains(5), "this is a clue") | | | | | | false 5 | $ct1Str |""".stripMargin ) ) e2.failedCodeFileName should be (Some(fileName)) e2.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should do nothing when is used to check !ct1.contains(5)") { assert(!ct1.contains(5), "this is a clue") } it("should throw TestFailedException with correct message and stack depth when is used to check !ct1.contains(8)") { val e1 = intercept[TestFailedException] { assert(!ct1.contains(8), "this is a clue") } e1.message should be ( Some( s"""this is a clue | |assert(!ct1.contains(8), "this is a clue") | || | | | || true 8 | |$ct1Str | false |""".stripMargin ) ) e1.failedCodeFileName should be (Some(fileName)) e1.failedCodeLineNumber should be (Some(thisLineNumber - 15)) } it("should do nothing when is used to check ci1 eq ci3") { assert(ci1 eq ci3, "this is a clue") assert(ci1.eq(ci3), "this is a clue") } it("should throw TestFailedException with correct message and stack depth when is used to check ci1 eq ci2") { val e1 = intercept[TestFailedException] { assert(ci1 eq ci2, "this is a clue") } e1.message should be ( Some( s"""this is a clue | |assert(ci1 eq ci2, "this is a clue") | | | | | $ci1Str | $ci2Str | false |""".stripMargin ) ) e1.failedCodeFileName should be (Some(fileName)) e1.failedCodeLineNumber should be (Some(thisLineNumber - 14)) val e2 = intercept[TestFailedException] { assert(ci1.eq(ci2), "this is a clue") } e2.message should be ( Some( s"""this is a clue | |assert(ci1.eq(ci2), "this is a clue") | | | | | $ci1Str | $ci2Str | false |""".stripMargin ) ) e2.failedCodeFileName should be (Some(fileName)) e2.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should do nothing when is used to check !ci1.eq(ci2)") { assert(!ci1.eq(ci2), "this is a clue") } it("should throw TestFailedException with correct message and stack depth when is used to check !ci1.eq(ci3)") { val e = intercept[TestFailedException] { assert(!ci1.eq(ci3), "this is a clue") } e.message should be ( Some( s"""this is a clue | |assert(!ci1.eq(ci3), "this is a clue") | || | | | |$ci1Str | $ci3Str | | true | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 15)) } it("should do nothing when is used to check ci1 ne ci2") { assert(ci1 ne ci2, "this is a clue") assert(ci1.ne(ci2), "this is a clue") } it("should throw TestFailedException with correct message and stack depth when is used to check ci1 ne ci3") { val e1 = intercept[TestFailedException] { assert(ci1 ne ci3, "this is a clue") } e1.message should be ( Some( s"""this is a clue | |assert(ci1 ne ci3, "this is a clue") | | | | | $ci1Str | $ci3Str | false |""".stripMargin ) ) e1.failedCodeFileName should be (Some(fileName)) e1.failedCodeLineNumber should be (Some(thisLineNumber - 14)) val e2 = intercept[TestFailedException] { assert(ci1.ne(ci3), "this is a clue") } e2.message should be ( Some( s"""this is a clue | |assert(ci1.ne(ci3), "this is a clue") | | | | | $ci1Str | $ci3Str | false |""".stripMargin ) ) e2.failedCodeFileName should be (Some(fileName)) e2.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should do nothing when is used to check !ci1.ne(ci3)") { assert(!ci1.ne(ci3), "this is a clue") } it("should throw TestFailedException with correct message and stack depth when is used to check !ci1.ne(ci2)") { val e = intercept[TestFailedException] { assert(!ci1.ne(ci2), "this is a clue") } e.message should be ( Some( """this is a clue | |assert(!ci1.ne(ci2), "this is a clue") | || | | | |123 | 321 | | true | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 15)) } it("should do nothing when is used to check s4.isEmpty") { assert(s4.isEmpty, "this is a clue") } it("should throw TestFailedException with correct message and stack depth when is used to check s3.isEmpty") { val e = intercept[TestFailedException] { assert(s3.isEmpty, "this is a clue") } e.message should be ( Some( """this is a clue | |assert(s3.isEmpty, "this is a clue") | | | | | false | "Say hi to ScalaTest" |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should do nothing when is used to check !s3.isEmpty") { assert(!s3.isEmpty, "this is a clue") } it("should throw TestFailedException with correct message and stack depth when is used to check !s4.isEmpty") { val e = intercept[TestFailedException] { assert(!s4.isEmpty, "this is a clue") } e.message should be ( Some( """this is a clue | |assert(!s4.isEmpty, "this is a clue") | || | | |"" true | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should do nothing when is used to check l2.isEmpty") { assert(l2.isEmpty, "this is a clue") } it("should throw TestFailedException with correct message and stack depth when is used to check l1.isEmpty") { val e = intercept[TestFailedException] { assert(l1.isEmpty, "this is a clue") } e.message should be ( Some( s"""this is a clue | |assert(l1.isEmpty, "this is a clue") | | | | | false | $l1 |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should do nothing when is used to check !l1.isEmpty") { assert(!l1.isEmpty, "this is a clue") } it("should throw TestFailedException with correct message and stack depth when is used to check !l2.isEmpty") { val e = intercept[TestFailedException] { assert(!l2.isEmpty, "this is a clue") } e.message should be ( Some( s"""this is a clue | |assert(!l2.isEmpty, "this is a clue") | || | | || true | |$l2 | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 15)) } it("should do nothing when is used to check s1.isInstanceOf[String]") { assert(s1.isInstanceOf[String], "this is a clue") } it("should throw TestFailedException with correct message and stack depth when is used to check l1.isInstanceOf[String]") { val e = intercept[TestFailedException] { assert(l1.isInstanceOf[String], "this is a clue") } e.message should be ( Some( s"""this is a clue | |assert(l1.isInstanceOf[String], "this is a clue") | | | | | false | $l1 |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should do nothing when is used to check l1.isInstanceOf[List[Int]]") { assert(l1.isInstanceOf[List[Int]], "this is a clue") } it("should throw TestFailedException with correct message and stack depth when is used to check s1.isInstanceOf[List[Int]]") { val e = intercept[TestFailedException] { assert(s1.isInstanceOf[List[Int]], "this is a clue") } e.message should be ( Some( """this is a clue | |assert(s1.isInstanceOf[List[Int]], "this is a clue") | | | | | false | "hi ScalaTest" |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should do nothing when is used to check date.isInstanceOf[Date]") { assert(date.isInstanceOf[Date], "this is a clue") } it("should throw TestFailedException with correct message and stack depth when is used to check l1.isInstanceOf[Date]") { val e = intercept[TestFailedException] { assert(l1.isInstanceOf[Date], "this is a clue") } e.message should be ( Some( s"""this is a clue | |assert(l1.isInstanceOf[Date], "this is a clue") | | | | | false | $l1 |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should do nothing when is used to check !l1.isInstanceOf[String]") { assert(!l1.isInstanceOf[String], "this is a clue") } it("should throw TestFailedException with correct message and stack depth when is used to check !s1.isInstanceOf[String]") { val e = intercept[TestFailedException] { assert(!s1.isInstanceOf[String], "this is a clue") } e.message should be ( Some( """this is a clue | |assert(!s1.isInstanceOf[String], "this is a clue") | || | | || true | |"hi ScalaTest" | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 15)) } it("should do nothing when is used to check !s1.isInstanceOf[List[Int]]") { assert(!s1.isInstanceOf[List[Int]], "this is a clue") } it("should throw TestFailedException with correct message and stack depth when is used to check !l1.isInstanceOf[List[Int]]") { val e = intercept[TestFailedException] { assert(!l1.isInstanceOf[List[Int]], "this is a clue") } e.message should be ( Some( s"""this is a clue | |assert(!l1.isInstanceOf[List[Int]], "this is a clue") | || | | || true | |$l1 | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 15)) } it("should do nothing when is used to check !l1.isInstanceOf[Date]") { assert(!l1.isInstanceOf[Date], "this is a clue") } it("should throw TestFailedException with correct message and stack depth when is used to check !date.isInstanceOf[Date]") { val e = intercept[TestFailedException] { assert(!date.isInstanceOf[Date], "this is a clue") } e.message should be ( Some( s"""this is a clue | |assert(!date.isInstanceOf[Date], "this is a clue") | || | | || true | |$date | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 15)) } it("should do nothing when is used to check s1.length == 9") { assert(s1.length == 12, "this is a clue") } it("should throw TestFailedException with correct message and stack depth when is used to check s1.length == 10") { val e = intercept[TestFailedException] { assert(s1.length == 10, "this is a clue") } e.message should be ( Some( """this is a clue | |assert(s1.length == 10, "this is a clue") | | | | | | | 12 | 10 | | false | "hi ScalaTest" |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 15)) } it("should do nothing when is used to check l1.length == 3") { assert(l1.length == 3, "this is a clue") } it("should throw TestFailedException with correct message and stack depth when is used to check l1.length == 10") { val e = intercept[TestFailedException] { assert(l1.length == 10, "this is a clue") } e.message should be ( Some( s"""this is a clue | |assert(l1.length == 10, "this is a clue") | | | | | | | 3 | 10 | | false | $l1 |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 15)) } it("should do nothing when is used to check !(s1.length == 10)") { assert(!(s1.length == 10), "this is a clue") } it("should throw TestFailedException with correct message and stack depth when is used to check !(s1.length == 9)") { val e = intercept[TestFailedException] { assert(!(s1.length == 12), "this is a clue") } e.message should be ( Some( """this is a clue | |assert(!(s1.length == 12), "this is a clue") | | | | | | | | | 12 | 12 | | | true | | "hi ScalaTest" | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 16)) } it("should do nothing when is used to check !(l1.length == 2)") { assert(!(l1.length == 2), "this is a clue") } it("should throw TestFailedException with correct message and stack depth when is used to check !(l1.length == 9)") { val e = intercept[TestFailedException] { assert(!(l1.length == 3), "this is a clue") } e.message should be ( Some( s"""this is a clue | |assert(!(l1.length == 3), "this is a clue") | | | | | | | | | 3 | 3 | | | true | | $l1 | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 16)) } it("should do nothing when is used to check s1.size == 9") { assert(s1.size == 12, "this is a clue") } it("should throw TestFailedException with correct message and stack depth when is used to check s1.size == 10") { val e = intercept[TestFailedException] { assert(s1.size == 10, "this is a clue") } e.message should be ( Some( """this is a clue | |assert(s1.size == 10, "this is a clue") | | | | | | | 12 | 10 | | false | "hi ScalaTest" |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 15)) } it("should do nothing when is used to check l1.size == 3") { assert(l1.size == 3, "this is a clue") } it("should throw TestFailedException with correct message and stack depth when is used to check l1.size == 10") { val e = intercept[TestFailedException] { assert(l1.size == 10, "this is a clue") } e.message should be ( Some( s"""this is a clue | |assert(l1.size == 10, "this is a clue") | | | | | | | 3 | 10 | | false | $l1 |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 15)) } it("should do nothing when is used to check !(s1.size == 10)") { assert(!(s1.size == 10), "this is a clue") } it("should throw TestFailedException with correct message and stack depth when is used to check !(s1.size == 9)") { val e = intercept[TestFailedException] { assert(!(s1.size == 12), "this is a clue") } e.message should be ( Some( """this is a clue | |assert(!(s1.size == 12), "this is a clue") | | | | | | | | | 12 | 12 | | | true | | "hi ScalaTest" | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 16)) } it("should do nothing when is used to check !(l1.size == 2)") { assert(!(l1.size == 2), "this is a clue") } it("should throw TestFailedException with correct message and stack depth when is used to check !(l1.size == 9) ") { val e = intercept[TestFailedException] { assert(!(l1.size == 3), "this is a clue") } e.message should be ( Some( s"""this is a clue | |assert(!(l1.size == 3), "this is a clue") | | | | | | | | | 3 | 3 | | | true | | $l1 | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 16)) } it("should do nothing when is used to check l1.exists(_ == 3)") { assert(l1.exists(_ == 3), "this is a clue") } it("should do nothing when is used to check l1.exists(3 == _)") { assert(l1.exists(3 == _), "this is a clue") } it("should throw TestFailedException with correct message and stack depth when is used to check l1.exists(_ == 5) ") { val e = intercept[TestFailedException] { assert(l1.exists(_ == 5), "this is a clue") } e.message should be ( Some( s"""this is a clue | |assert(l1.exists(_ == 5), "this is a clue") | | | | | false | $l1 |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should throw TestFailedException with correct message and stack depth when is used to check l1.exists(5 == _) ") { val e = intercept[TestFailedException] { assert(l1.exists(5 == _), "this is a clue") } e.message should be ( Some( s"""this is a clue | |assert(l1.exists(5 == _), "this is a clue") | | | | | false | $l1 |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should do nothing when is used to check !l1.exists(_ == 5)") { assert(!l1.exists(_ == 5), "this is a clue") } it("should do nothing when is used to check !l1.exists(5 == _)") { assert(!l1.exists(5 == _), "this is a clue") } it("should throw TestFailedException with correct message and stack depth when is used to check !l1.exists(_ == 3)") { val e = intercept[TestFailedException] { assert(!l1.exists(_ == 3), "this is a clue") } e.message should be ( Some( s"""this is a clue | |assert(!l1.exists(_ == 3), "this is a clue") | || | | || true | |$l1 | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 15)) } it("should throw TestFailedException with correct message and stack depth when is used to check !l1.exists(3 == _)") { val e = intercept[TestFailedException] { assert(!l1.exists(3 == _), "this is a clue") } e.message should be ( Some( s"""this is a clue | |assert(!l1.exists(3 == _), "this is a clue") | || | | || true | |$l1 | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 15)) } it("should throw TestFailedException with correct message and stack depth when is used to check l1.exists(_ > 3)") { val e = intercept[TestFailedException] { assert(l1.exists(_ > 3), "this is a clue") } e.message should be ( Some( s"""this is a clue | |assert(l1.exists(_ > 3), "this is a clue") | | | | | false | $l1 |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should throw TestFailedException with correct message and stack depth when is used to check l1.exists(3 < _)") { val e = intercept[TestFailedException] { assert(l1.exists(3 < _), "this is a clue") } e.message should be ( Some( s"""this is a clue | |assert(l1.exists(3 < _), "this is a clue") | | | | | false | $l1 |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should throw TestFailedException with correct message and stack depth when is used to check l3.exists(_.isEmpty)") { val e = intercept[TestFailedException] { assert(l3.exists(_.isEmpty), "this is a clue") } e.message should be ( Some( s"""this is a clue | |assert(l3.exists(_.isEmpty), "this is a clue") | | | | | false | $l3Str |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should throw TestFailedException with correct message and stack depth when is used to check l3.exists(false)") { val e = intercept[TestFailedException] { assert(ci1.exists(321), "this is a clue") } e.message should be ( Some( """this is a clue | |assert(ci1.exists(321), "this is a clue") | | | | | 123 false 321 |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 13)) } it("should do nothing when used to check woof { meow(y = 5) } == \\"woof\\"") { assert(woof { meow(y = 5) } == "woof", "this is a clue") } it("should throw TestFailedException with correct message and stack depth when is used to check woof { meow(y = 5) } == \\"meow\\"") { val e = intercept[TestFailedException] { assert(woof { meow(y = 5) } == "meow", "this is a clue") } e.message should be ( Some( """this is a clue | |assert(woof { meow(y = 5) } == "meow", "this is a clue") | | | | | "woof" | "meow" | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should do nothing when used to check multiline assert((b == a + 2) && (b - 2 <= a)) ") { assert((b == a + 2) && (b - 2 <= a), "this is a clue") } // SKIP-DOTTY-START // problem with quotes it("should throw friend message when used to check multiline assert((b == a + 2) && (b - 1 <= a))") { val e = intercept[TestFailedException] { assert((b == a + 2) && (b - 1 <= a), "this is a clue") } e.message shouldBe Some("5 equaled 5, but 4 was not less than or equal to 3 this is a clue") e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 5)) } // SKIP-DOTTY-END it("should do nothing when a block of code that evaluates to true is passed in") { assert({ val a = 1 val b = 2 val c = a + b a < b || c == a + b }, "this is a clue") } it("should throw TestFailedException with correct message and stack depth when a block of code that evaluates to false is passed") { val e = intercept[TestFailedException] { assert({ val a = 1; val b = 2; val c = a + b; a > b || c == b * b }, "this is a clue") } e.message should be ( Some( """this is a clue | |assert({ val a = 1; val b = 2; val c = a + b; a > b || c == b * b }, "this is a clue") | | | | | | | | | | | 1 | 2 | 3 | 2 4 2 | | | false | | false | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 16)) } // SKIP-DOTTY-START // different code show in Dotty it("should fallback to BooleanMacro when a block of code > 1 line is passed in ") { val e = intercept[TestFailedException] { assert({ val a = 1 val b = 2 val c = a + b a > b || c == b * b }, "this is a clue") } e.message should be ( Some( """{ | val a: Int = 1; | val b: Int = 2; | val c: Int = a.+(b); | a.>(b).||(c.==(b.*(b))) |} was false this is a clue""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 17)) } // SKIP-DOTTY-END // SKIP-SCALATESTJS,NATIVE-START it("should do nothing when used to check <person>Dude</person> == <person>Dude</person>") { assert(<person>Dude</person> == <person>Dude</person>, "this is a clue") } it("should throw TestFailedException with correct message and stack depth when is used to check <person>Dude</person> == <person>Mary</person>") { val e = intercept[TestFailedException] { assert(<person>Dude</person> == <person>Mary</person>, "this is a clue") } e.message should be ( Some( """this is a clue | |assert(<person>Dude</person> == <person>Mary</person>, "this is a clue") | | | | | | | <person>Mary</person> | | false | <person>Dude</person> |""".stripMargin ) ) } // SKIP-SCALATESTJS,NATIVE-END it("should compile when used with org == xxx that shadow org.scalactic ") { assertCompiles( """ |val org = "test" |assert(org == "test", "this is a clue") """.stripMargin) } it("should compile when used with org === xxx that shadow org.scalactic") { assertCompiles( """ |val org = "test" |assert(org === "test", "this is a clue") """.stripMargin) } // SKIP-DOTTY-START it("should compile when used with org === xxx with TypeCheckedTripleEquals that shadow org.scalactic") { assertCompiles( """ |class TestSpec extends AnyFunSpec with org.scalactic.TypeCheckedTripleEquals { | it("testing here") { | val org = "test" | assert(org === "test", "this is a clue") | } |} """.stripMargin) } // SKIP-DOTTY-END it("should compile when used with org.aCustomMethod that shadow org.scalactic") { assertCompiles( """ |class Test { | def aCustomMethod: Boolean = true |} |val org = new Test |assert(org.aCustomMethod, "this is a clue") """.stripMargin) } it("should compile when used with !org that shadow org.scalactic") { assertCompiles( """ |val org = false |assert(!org, "this is a clue") """.stripMargin) } it("should compile when used with org.isEmpty that shadow org.scalactic") { assertCompiles( """ |val org = "" |assert(org.isEmpty, "this is a clue") """.stripMargin) } it("should compile when used with org.isInstanceOf that shadow org.scalactic") { assertCompiles( """ |val org = "" |assert(org.isInstanceOf[String], "this is a clue") """.stripMargin) } it("should compile when used with org.size == 0 that shadow org.scalactic") { assertCompiles( """ |val org = Array.empty[String] |assert(org.size == 0, "this is a clue") """.stripMargin) } it("should compile when used with org.length == 0 that shadow org.scalactic") { assertCompiles( """ |val org = "" |assert(org.length == 0, "this is a clue") """.stripMargin) } it("should compile when used with org.exists(_ == 'b') that shadow org.scalactic ") { assertCompiles( """ |val org = "abc" |assert(org.exists(_ == 'b'), "this is a clue") """.stripMargin) } it("should do nothing when is used to check new String(\\"test\\") != \\"test\\"") { assert(new String("test") == "test", "this is a clue") } it("should throw TestFailedException with correct message and stack depth when is used to check new String(\\"test\\") != \\"testing\\"") { val e = intercept[TestFailedException] { assert(new String("test") == "testing", "this is a clue") } e.message should be ( Some( """this is a clue | |assert(new String("test") == "testing", "this is a clue") | | | | | "test" | "testing" | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should compile when used with Java static method") { assertCompiles( """ |assert(System.currentTimeMillis() > 0, "this is a clue") """.stripMargin) assertCompiles( """ |assert(java.math.BigDecimal.ZERO == java.math.BigDecimal.ZERO, "this is a clue") """.stripMargin) } } describe("The assume(boolean) method") { it("should do nothing when is used to check a == 3") { assume(a == 3) } it("should throw TestCanceledException with correct message and stack depth when is used to check a == 5") { val e = intercept[TestCanceledException] { assume(a == 5) } e.message should be ( Some( """ | |assume(a == 5) | | | | | 3 | 5 | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should do nothing when is used to check 5 == b") { assume(5 == b) } it("should throw TestCanceledException with correct message and stack depth when is used to check 3 == b") { val e = intercept[TestCanceledException] { assume(3 == b) } e.message should be ( Some( """ | |assume(3 == b) | | | | | 3 | 5 | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should do nothing when is used to check a != 5") { assume(a != 5) } it("should throw TestCanceledException with correct message and stack depth when is used to check a != 3") { val e = intercept[TestCanceledException] { assume(a != 3) } e.message should be ( Some( """ | |assume(a != 3) | | | | | 3 | 3 | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should do nothing when is used to check 3 != b") { assume(3 != b) } it("should throw TestCanceledException with correct message and stack depth when is used to check 5 != b") { val e = intercept[TestCanceledException] { assume(5 != b) } e.message should be ( Some( """ | |assume(5 != b) | | | | | 5 | 5 | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should do nothing when is used to check 3 == 3") { assume(3 == 3) } // SKIP-DOTTY-START // const-folding will eliminate the expr it("should throw TestCanceledException with message that contains the original code and correct stack depth when is used to check 3 == 5") { // This is because the compiler simply pass the false boolean literal // to the macro, can't find a way to get the 3 == 5 literal. val e1 = intercept[TestCanceledException] { assume(3 == 5) } e1.message should be ( Some( """ | |assume(3 == 5) | | | false |""".stripMargin ) ) e1.failedCodeFileName should be (Some(fileName)) e1.failedCodeLineNumber should be (Some(thisLineNumber - 13)) } // SKIP-DOTTY-END it("should throw TestCanceledException with correct message and stack depth when is used to check a == b") { val e = intercept[TestCanceledException] { assume(a == b) } e.message should be ( Some( """ | |assume(a == b) | | | | | 3 | 5 | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should throw TestCanceledException with correct message and stack depth when is used to check a == 0") { val e = intercept[TestCanceledException] { assume(a == 0) } e.message should be ( Some( """ | |assume(a == 0) | | | | | 3 | 0 | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should throw TestCanceledException with correct message and stack depth when is used to check 0 == a") { val e = intercept[TestCanceledException] { assume(0 == a) } e.message should be ( Some( """ | |assume(0 == a) | | | | | 0 | 3 | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should throw TestCanceledException with correct message and stack depth when is used to check 3 != a") { val e = intercept[TestCanceledException] { assume(3 != a) } e.message should be ( Some( """ | |assume(3 != a) | | | | | 3 | 3 | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should do nothing when is used to check 5 != a") { assume(5 != a) } it("should do nothing when is used to check a > 2") { assume(a > 2) } it("should do nothing when is used to check 5 > a") { assume(5 > a) } it("should throw TestCanceledException with correct message and stack depth when is used to check a > 3") { val e = intercept[TestCanceledException] { assume(a > 3) } e.message should be ( Some( """ | |assume(a > 3) | | | | | 3 | 3 | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should throw TestCanceledException with correct message and stack depth when is used to check 3 > a") { val e = intercept[TestCanceledException] { assume(3 > a) } e.message should be ( Some( """ | |assume(3 > a) | | | | | 3 | 3 | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should do nothing when is used to check a >= 3") { assume(a >= 3) } it("should do nothing when is used to check 3 >= a") { assume(3 >= a) } it("should throw TestCanceledException with correct message and stack depth when is used to check a >= 4") { val e = intercept[TestCanceledException] { assume(a >= 4) } e.message should be ( Some( """ | |assume(a >= 4) | | | | | 3 | 4 | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should throw TestCanceledException with correct message and stack depth when is used to check 2 >= a") { val e = intercept[TestCanceledException] { assume(2 >= a) } e.message should be ( Some( """ | |assume(2 >= a) | | | | | 2 | 3 | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should do nothing when is used to check b < 6") { assume(b < 6) } it("should do nothing when is used to check 3 < b") { assume(3 < b) } it("should throw TestCanceledException with correct message and stack depth when is used to check b < 5") { val e = intercept[TestCanceledException] { assume(b < 5) } e.message should be ( Some( """ | |assume(b < 5) | | | | | 5 | 5 | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should throw TestCanceledException with correct message and stack depth when is used to check 5 < b") { val e = intercept[TestCanceledException] { assume(5 < b) } e.message should be ( Some( """ | |assume(5 < b) | | | | | 5 | 5 | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should do nothing when is used to check b <= 5") { assume(b <= 5) } it("should do nothing when is used to check 5 <= b") { assume(5 <= b) } it("should throw TestCanceledException with correct message and stack depth when is used to check b <= 4") { val e = intercept[TestCanceledException] { assume(b <= 4) } e.message should be ( Some( """ | |assume(b <= 4) | | | | | 5 | 4 | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should throw TestCanceledException with correct message and stack depth when is used to check 6 <= b") { val e = intercept[TestCanceledException] { assume(6 <= b) } e.message should be ( Some( """ | |assume(6 <= b) | | | | | 6 | 5 | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should do nothing when is used to check bob == \\"bob\\"") { assume(bob == "bob") } it("should do nothing when is used to check bob != \\"alice\\"") { assume(bob != "alice") } it("should do nothing when is used to check alice == \\"alice\\"") { assume(alice == "alice") } it("should do nothing when is used to check alice != \\"bob\\"") { assume(alice != "bob") } it("should throw TestCanceledException with correct message and stack depth when is used to check bob == \\"alice\\"") { val e = intercept[TestCanceledException] { assume(bob == "alice") } e.message should be ( Some( """ | |assume(bob == "alice") | | | | | | | "alice" | | false | "bob" |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 15)) } it("should throw TestCanceledException with correct message and stack depth when is used to check bob != \\"bob\\"") { val e = intercept[TestCanceledException] { assume(bob != "bob") } e.message should be ( Some( """ | |assume(bob != "bob") | | | | | | | "bob" | | false | "bob" |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 15)) } it("should throw TestCanceledException with correct message and stack depth when is used to check alice == \\"bob\\"") { val e = intercept[TestCanceledException] { assume(alice == "bob") } e.message should be ( Some( """ | |assume(alice == "bob") | | | | | | | "bob" | | false | "alice" |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 15)) } it("should throw TestCanceledException with correct message and stack depth when is used to check alice != \\"alice\\"") { val e = intercept[TestCanceledException] { assume(alice != "alice") } e.message should be ( Some( """ | |assume(alice != "alice") | | | | | | | "alice" | | false | "alice" |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 15)) } it("should do nothing when is used to check a === 3") { assume(a === 3) } it("should throw TestCanceledException with correct message and stack depth when is used to check a === 5 ") { val e = intercept[TestCanceledException] { assume(a === 5) } e.message should be ( Some( """ | |assume(a === 5) | | | | | 3 | 5 | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should do nothing when is used to check 3 === a") { assume(3 === a) } it("should throw TestCanceledException with correct message and stack depth when is used to check 5 === a") { val e = intercept[TestCanceledException] { assume(5 === a) } e.message should be ( Some( """ | |assume(5 === a) | | | | | 5 | 3 | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should do nothing when is used to check a !== 5") { assume(a !== 5) } it("should throw TestCanceledException with correct message and stack depth when is used to check a !== 3") { val e = intercept[TestCanceledException] { assume(a !== 3) } e.message should be ( Some( """ | |assume(a !== 3) | | | | | 3 | 3 | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should do nothing when is used to check 5 !== a") { assume(5 !== a) } it("should throw TestCanceledException with correct message and stack depth when is used to check 3 !== a") { val e = intercept[TestCanceledException] { assume(3 !== a) } e.message should be ( Some( """ | |assume(3 !== a) | | | | | 3 | 3 | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should do nothing when is used to check a == 3 && b == 5") { assume(a == 3 && b == 5) } it("should throw TestCanceledException with correct message and stack depth when is used to check a == 3 && b == 6") { val e = intercept[TestCanceledException] { assume(a == 3 && b == 6) } e.message should be ( Some( """ | |assume(a == 3 && b == 6) | | | | | | | | | 3 | 3 | 5 | 6 | true | false | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 15)) } it("should throw TestCanceledException with correct message and stack depth when is used to check a == 2 && b == 5") { val e = intercept[TestCanceledException] { assume(a == 2 && b == 5) } e.message should be ( Some( """ | |assume(a == 2 && b == 5) | | | | | 3 | 2 | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should throw TestCanceledException with correct message and stack depth when is used to check a == 2 && b == 6") { val e = intercept[TestCanceledException] { assume(a == 2 && b == 6) } e.message should be ( Some( """ | |assume(a == 2 && b == 6) | | | | | 3 | 2 | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should do nothing when is used to check a == 3 & b == 5") { assume(a == 3 & b == 5) } it("should throw TestCanceledException with correct message and stack depth when is used to check a == 3 & b == 6") { val e = intercept[TestCanceledException] { assume(a == 3 & b == 6) } e.message should be ( Some( """ | |assume(a == 3 & b == 6) | | | | | | | | | 3 | 3 | 5 | 6 | true | false | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 15)) } it("should throw TestCanceledException with correct message and stack depth when is used to check a == 2 & b == 5") { val e = intercept[TestCanceledException] { assume(a == 2 & b == 5) } e.message should be ( Some( """ | |assume(a == 2 & b == 5) | | | | | 3 | 2 | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should throw TestCanceledException with correct message and stack depth when is used to check a == 2 & b == 6") { val e = intercept[TestCanceledException] { assume(a == 2 & b == 6) } e.message should be ( Some( """ | |assume(a == 2 & b == 6) | | | | | 3 | 2 | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should do nothing when is used to check a == 3 || b == 5") { assume(a == 3 || b == 5) } it("should do nothing when is used to check a == 3 || b == 6") { assume(a == 3 || b == 6) } it("should do nothing when is used to check a == 2 || b == 5") { assume(a == 2 || b == 5) } it("should throw TestCanceledException with correct message and stack depth when is used to check a == 2 || b == 6") { val e = intercept[TestCanceledException] { assume(a == 2 || b == 6) } e.message should be ( Some( """ | |assume(a == 2 || b == 6) | | | | | | | | | 3 | 2 | 5 | 6 | | | false | | false | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 16)) } it("should do nothing when is used to check a == 3 | b == 5") { assume(a == 3 | b == 5) } it("should do nothing when is used to check a == 3 | b == 6") { assume(a == 3 | b == 6) } it("should do nothing when is used to check a == 2 | b == 5") { assume(a == 2 | b == 5) } it("should throw TestCanceledException with correct message and stack depth when is used to check a == 2 | b == 6") { val e = intercept[TestCanceledException] { assume(a == 2 | b == 6) } e.message should be ( Some( """ | |assume(a == 2 | b == 6) | | | | | | | | | 3 | 2 | 5 | 6 | | | false | | false | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 16)) } it("should do nothing when is used to check a == 3 && (b == 5 && b > 3)") { assume(a == 3 && (b == 5 && b > 3)) } it("should throw TestCanceledException with correct message and stack depth when is used to check a == 3 && (b == 5 && b > 5)") { val e = intercept[TestCanceledException] { assume(a == 3 && (b == 5 && b > 5)) } e.message should be ( Some( """ | |assume(a == 3 && (b == 5 && b > 5)) | | | | | | | | | | | | | 3 | 3 | 5 | 5 | 5 | 5 | true false true | false | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 15)) } it("should do nothing when is used to check !(a == 5)") { assume(!(a == 5)) } it("should throw TestCanceledException with correct message and stack depth when is used to check !(a == 3)") { val e = intercept[TestCanceledException] { assume(!(a == 3)) } e.message should be ( Some( """ | |assume(!(a == 3)) | | | | | | | 3 | 3 | | true | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 15)) } it("should throw TestCanceledException with correct message and stack depth when is used to check a == 3 && !(b == 5)") { val e = intercept[TestCanceledException] { assume(a == 3 && !(b == 5)) } e.message should be ( Some( """ | |assume(a == 3 && !(b == 5)) | | | | | | | | | | 3 | 3 | | 5 | 5 | true | | true | | false | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 16)) } it("should do nothing when is used to check (a == 3) == (b == 5)") { assume((a == 3) == (b == 5)) } it("should throw TestCanceledException with correct message and stack depth when is used to check (a == 3) == (b != 5)") { val e = intercept[TestCanceledException] { assume((a == 3) == (b != 5)) } e.message should be ( Some( """ | |assume((a == 3) == (b != 5)) | | | | | | | | | 3 | 3 | 5 | 5 | true false false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should short-circuit && when first condition was false") { val s = new Stateful intercept[TestCanceledException] { assume(a == 5 && s.changeState) } s.state should be (false) } it("should short-circuit & when first condition was false") { val s = new Stateful intercept[TestCanceledException] { assume(a == 5 & s.changeState) } s.state should be (false) } it("should short-circuit || when first condition was true") { val s = new Stateful assume(a == 3 || s.changeState) s.state should be (false) } it("should short-circuit | when first condition was true") { val s = new Stateful assume(a == 3 | s.changeState) s.state should be (false) } it("should do nothing when it is used to check a == 3 && { println(\\"hi\\"); b == 5} ") { assume(a == 3 && { println("hi"); b == 5}) } it("should throw TestCanceledException with correct message and stack depth when is usesd to check a == 3 && { println(\\"hi\\"); b == 3}") { val e = intercept[TestCanceledException] { assume(a == 3 && { println("hi"); b == 3}) } e.message should be ( Some( """ | |assume(a == 3 && { println("hi"); b == 3}) | | | | | | | | | 3 | 3 false 5 | 3 | true false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should do nothing when it is used to check { println(\\"hi\\"); b == 5} && a == 3") { assume({ println("hi"); b == 5} && a == 3) } it("should throw TestCanceledException with correct message and stack depth when is usesd to check { println(\\"hi\\"); b == 5} && a == 5") { val e = intercept[TestCanceledException] { assume({ println("hi"); b == 5} && a == 5) } e.message should be ( Some( """ | |assume({ println("hi"); b == 5} && a == 5) | | | | | | | | | 5 | 5 | 3 | 5 | true | false | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 15)) } it("should preserve side effects when Apply with single argument is passed in") { assume(neverRuns1(sys.error("Sad times 1"))) } it("should preserve side effects when Apply with 2 argument list is passed in") { assume(neverRuns2(sys.error("Sad times 2"))(0)) } it("should preserve side effects when typed Apply with 2 argument list is passed in") { assume(neverRuns3(sys.error("Sad times 3"))(0)) } it("should do nothing when is used to check s1 startsWith \\"hi\\"") { assume(s1 startsWith "hi") assume(s1.startsWith("hi")) } it("should throw TestCanceledException with correct message and stack depth when is used to check s2 startsWith \\"hi\\"") { val e1 = intercept[TestCanceledException] { assume(s2 startsWith "hi") } e1.message should be ( Some( """ | |assume(s2 startsWith "hi") | | | | | | false "hi" | "ScalaTest hi" |""".stripMargin ) ) e1.failedCodeFileName should be (Some(fileName)) e1.failedCodeLineNumber should be (Some(thisLineNumber - 14)) val e2 = intercept[TestCanceledException] { assume(s2.startsWith("hi")) } e2.message should be ( Some( """ | |assume(s2.startsWith("hi")) | | | | | | false "hi" | "ScalaTest hi" |""".stripMargin ) ) e2.failedCodeFileName should be (Some(fileName)) e2.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should do nothing when is used to check ci1 startsWith 1") { assume(ci1 startsWith 1) assume(ci1.startsWith(1)) } it("should throw TestCanceledException with correct message and stack depth when is used to check ci2 startsWith 1") { val e1 = intercept[TestCanceledException] { assume(ci2 startsWith 1) } e1.message should be ( Some( """ | |assume(ci2 startsWith 1) | | | | | 321 false 1 |""".stripMargin ) ) e1.failedCodeFileName should be (Some(fileName)) e1.failedCodeLineNumber should be (Some(thisLineNumber - 13)) val e2 = intercept[TestCanceledException] { assume(ci2.startsWith(1)) } e2.message should be ( Some( """ | |assume(ci2.startsWith(1)) | | | | | 321 false 1 |""".stripMargin ) ) e2.failedCodeFileName should be (Some(fileName)) e2.failedCodeLineNumber should be (Some(thisLineNumber - 13)) } it("should do nothing when is used to check !s2.startsWith(\\"hi\\")") { assume(!s2.startsWith("hi")) } it("should throw TestCanceledException with correct message and stack depth when is used to check !s1.startsWith(\\"hi\\")") { val e1 = intercept[TestCanceledException] { assume(!s1.startsWith("hi")) } e1.message should be ( Some( """ | |assume(!s1.startsWith("hi")) | || | | | || true "hi" | |"hi ScalaTest" | false |""".stripMargin ) ) e1.failedCodeFileName should be (Some(fileName)) e1.failedCodeLineNumber should be (Some(thisLineNumber - 15)) } it("should do nothing when is used to check s2 endsWith \\"hi\\"") { assume(s2 endsWith "hi") assume(s2.endsWith("hi")) } it("should throw TestCanceledException with correct message and stack depth when is used to check s1 endsWith \\"hi\\"") { val e1 = intercept[TestCanceledException] { assume(s1 endsWith "hi") } e1.message should be ( Some( """ | |assume(s1 endsWith "hi") | | | | | | false "hi" | "hi ScalaTest" |""".stripMargin ) ) e1.failedCodeFileName should be (Some(fileName)) e1.failedCodeLineNumber should be (Some(thisLineNumber - 14)) val e2 = intercept[TestCanceledException] { assume(s1.endsWith("hi")) } e2.message should be ( Some( """ | |assume(s1.endsWith("hi")) | | | | | | false "hi" | "hi ScalaTest" |""".stripMargin ) ) e2.failedCodeFileName should be (Some(fileName)) e2.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should do nothing when is used to check ci2 endsWith 1") { assume(ci2 endsWith 1) assume(ci2.endsWith(1)) } it("should throw TestCanceledException with correct message and stack depth when is used to check ci1 endsWith 1") { val e1 = intercept[TestCanceledException] { assume(ci1 endsWith 1) } e1.message should be ( Some( """ | |assume(ci1 endsWith 1) | | | | | 123 false 1 |""".stripMargin ) ) e1.failedCodeFileName should be (Some(fileName)) e1.failedCodeLineNumber should be (Some(thisLineNumber - 13)) val e2 = intercept[TestCanceledException] { assume(ci1.endsWith(1)) } e2.message should be ( Some( """ | |assume(ci1.endsWith(1)) | | | | | 123 false 1 |""".stripMargin ) ) e2.failedCodeFileName should be (Some(fileName)) e2.failedCodeLineNumber should be (Some(thisLineNumber - 13)) } it("should do nothing when is used to check !s1.endsWith(\\"hi\\")") { assume(!s1.endsWith("hi")) } it("should throw TestCanceledException with correct message and stack depth when is used to check !s2.endsWith(\\"hi\\")") { val e1 = intercept[TestCanceledException] { assume(!s2.endsWith("hi")) } e1.message should be ( Some( """ | |assume(!s2.endsWith("hi")) | || | | | || true "hi" | |"ScalaTest hi" | false |""".stripMargin ) ) e1.failedCodeFileName should be (Some(fileName)) e1.failedCodeLineNumber should be (Some(thisLineNumber - 15)) } it("should do nothing when is used to check s3 contains \\"hi\\"") { assume(s3 contains "hi") assume(s3.contains("hi")) } it("should throw TestCanceledException with correct message and stack depth when is used to check s3 contains \\"hello\\"") { val e1 = intercept[TestCanceledException] { assume(s3 contains "hello") } e1.message should be ( Some( """ | |assume(s3 contains "hello") | | | | | | false "hello" | "Say hi to ScalaTest" |""".stripMargin ) ) e1.failedCodeFileName should be (Some(fileName)) e1.failedCodeLineNumber should be (Some(thisLineNumber - 14)) val e2 = intercept[TestCanceledException] { assume(s3.contains("hello")) } e2.message should be ( Some( """ | |assume(s3.contains("hello")) | | | | | | false "hello" | "Say hi to ScalaTest" |""".stripMargin ) ) e2.failedCodeFileName should be (Some(fileName)) e2.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should do nothing when is used to check ci2 contains 2") { assume(ci2 contains 2) assume(ci2.contains(2)) } it("should throw TestCanceledException with correct message and stack depth when is used to check ci1 contains 5") { val e1 = intercept[TestCanceledException] { assume(ci1 contains 5) } e1.message should be ( Some( """ | |assume(ci1 contains 5) | | | | | 123 false 5 |""".stripMargin ) ) e1.failedCodeFileName should be (Some(fileName)) e1.failedCodeLineNumber should be (Some(thisLineNumber - 13)) val e2 = intercept[TestCanceledException] { assume(ci1.contains(5)) } e2.message should be ( Some( """ | |assume(ci1.contains(5)) | | | | | 123 false 5 |""".stripMargin ) ) e2.failedCodeFileName should be (Some(fileName)) e2.failedCodeLineNumber should be (Some(thisLineNumber - 13)) } it("should do nothing when is used to check !s1.contains(\\"hello\\")") { assume(!s3.contains("hello")) } it("should throw TestCanceledException with correct message and stack depth when is used to check !s3.contains(\\"hi\\")") { val e1 = intercept[TestCanceledException] { assume(!s3.contains("hi")) } e1.message should be ( Some( """ | |assume(!s3.contains("hi")) | || | | | || true "hi" | |"Say hi to ScalaTest" | false |""".stripMargin ) ) e1.failedCodeFileName should be (Some(fileName)) e1.failedCodeLineNumber should be (Some(thisLineNumber - 15)) } it("should do nothing when is used to check l1 contains 2") { assume(l1 contains 2) assume(l1.contains(2)) } it("should throw TestCanceledException with correct message and stack depth when is used to check l1 contains 5") { val e1 = intercept[TestCanceledException] { assume(l1 contains 5) } e1.message should be ( Some( """ | |assume(l1 contains 5) | | | | | | false 5 | List(1, 2, 3) |""".stripMargin ) ) e1.failedCodeFileName should be (Some(fileName)) e1.failedCodeLineNumber should be (Some(thisLineNumber - 14)) val e2 = intercept[TestCanceledException] { assume(l1.contains(5)) } e2.message should be ( Some( """ | |assume(l1.contains(5)) | | | | | | false 5 | List(1, 2, 3) |""".stripMargin ) ) e2.failedCodeFileName should be (Some(fileName)) e2.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should do nothing when is used to check !(l1 contains 5)") { assume(!(l1 contains 5)) assume(!l1.contains(5)) } it("should throw TestCanceledException with correct message and stack depth when is used to check !(l1 contains 2)") { val e1 = intercept[TestCanceledException] { assume(!(l1 contains 2)) } e1.message should be ( Some( """ | |assume(!(l1 contains 2)) | | | | | | | | true 2 | | List(1, 2, 3) | false |""".stripMargin ) ) e1.failedCodeFileName should be (Some(fileName)) e1.failedCodeLineNumber should be (Some(thisLineNumber - 15)) val e2 = intercept[TestCanceledException] { assume(!l1.contains(2)) } e2.message should be ( Some( """ | |assume(!l1.contains(2)) | || | | | || true 2 | |List(1, 2, 3) | false |""".stripMargin ) ) e2.failedCodeFileName should be (Some(fileName)) e2.failedCodeLineNumber should be (Some(thisLineNumber - 15)) } it("should do nothing when is used to check m1 contains 2") { assume(m1 contains 2) assume(m1.contains(2)) } it("should throw TestCanceledException with correct message and stack depth when is used to check m1 contains 5") { val e1 = intercept[TestCanceledException] { assume(m1 contains 5) } e1.message should be ( Some( s""" | |assume(m1 contains 5) | | | | | | false 5 | $m1Str |""".stripMargin ) ) e1.failedCodeFileName should be (Some(fileName)) e1.failedCodeLineNumber should be (Some(thisLineNumber - 14)) val e2 = intercept[TestCanceledException] { assume(m1.contains(5)) } e2.message should be ( Some( s""" | |assume(m1.contains(5)) | | | | | | false 5 | $m1Str |""".stripMargin ) ) e2.failedCodeFileName should be (Some(fileName)) e2.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should do nothing when is used to check !(m1 contains 5)") { assume(!(m1 contains 5)) assume(!m1.contains(5)) } it("should throw TestCanceledException with correct message and stack depth when is used to check !(m1 contains 2)") { val e1 = intercept[TestCanceledException] { assume(!(m1 contains 2)) } e1.message should be ( Some( s""" | |assume(!(m1 contains 2)) | | | | | | | | true 2 | | $m1Str | false |""".stripMargin ) ) e1.failedCodeFileName should be (Some(fileName)) e1.failedCodeLineNumber should be (Some(thisLineNumber - 15)) val e2 = intercept[TestCanceledException] { assume(!m1.contains(2)) } e2.message should be ( Some( s""" | |assume(!m1.contains(2)) | || | | | || true 2 | |$m1Str | false |""".stripMargin ) ) e2.failedCodeFileName should be (Some(fileName)) e2.failedCodeLineNumber should be (Some(thisLineNumber - 15)) } it("should do nothing when is used to check ct1 contains 8") { assume(ct1 contains 8) assume(ct1.contains(8)) } it("should throw TestCanceledException with correct message and stack depth when is used to check ct1 contains 5") { val e1 = intercept[TestCanceledException] { assume(ct1 contains 5) } e1.message should be ( Some( s""" | |assume(ct1 contains 5) | | | | | | false 5 | $ct1Str |""".stripMargin ) ) e1.failedCodeFileName should be (Some(fileName)) e1.failedCodeLineNumber should be (Some(thisLineNumber - 14)) val e2 = intercept[TestCanceledException] { assume(ct1.contains(5)) } e2.message should be ( Some( s""" | |assume(ct1.contains(5)) | | | | | | false 5 | $ct1Str |""".stripMargin ) ) e2.failedCodeFileName should be (Some(fileName)) e2.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should do nothing when is used to check !ct1.contains(5)") { assume(!ct1.contains(5)) } it("should throw TestCanceledException with correct message and stack depth when is used to check !ct1.contains(8)") { val e1 = intercept[TestCanceledException] { assume(!ct1.contains(8)) } e1.message should be ( Some( s""" | |assume(!ct1.contains(8)) | || | | | || true 8 | |$ct1Str | false |""".stripMargin ) ) e1.failedCodeFileName should be (Some(fileName)) e1.failedCodeLineNumber should be (Some(thisLineNumber - 15)) } it("should do nothing when is used to check ci1 eq ci3") { assume(ci1 eq ci3) assume(ci1.eq(ci3)) } it("should throw TestCanceledException with correct message and stack depth when is used to check ci1 eq ci2") { val e1 = intercept[TestCanceledException] { assume(ci1 eq ci2) } e1.message should be ( Some( s""" | |assume(ci1 eq ci2) | | | | | $ci1Str | $ci2Str | false |""".stripMargin ) ) e1.failedCodeFileName should be (Some(fileName)) e1.failedCodeLineNumber should be (Some(thisLineNumber - 14)) val e2 = intercept[TestCanceledException] { assume(ci1.eq(ci2)) } e2.message should be ( Some( s""" | |assume(ci1.eq(ci2)) | | | | | $ci1Str | $ci2Str | false |""".stripMargin ) ) e2.failedCodeFileName should be (Some(fileName)) e2.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should do nothing when is used to check !ci1.eq(ci2)") { assume(!ci1.eq(ci2)) } it("should throw TestCanceledException with correct message and stack depth when is used to check !ci1.eq(ci3)") { val e = intercept[TestCanceledException] { assume(!ci1.eq(ci3)) } e.message should be ( Some( s""" | |assume(!ci1.eq(ci3)) | || | | | |$ci1Str | $ci3Str | | true | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 15)) } it("should do nothing when is used to check ci1 ne ci2") { assume(ci1 ne ci2) assume(ci1.ne(ci2)) } it("should throw TestCanceledException with correct message and stack depth when is used to check ci1 ne ci3") { val e1 = intercept[TestCanceledException] { assume(ci1 ne ci3) } e1.message should be ( Some( s""" | |assume(ci1 ne ci3) | | | | | $ci1Str | $ci3Str | false |""".stripMargin ) ) e1.failedCodeFileName should be (Some(fileName)) e1.failedCodeLineNumber should be (Some(thisLineNumber - 14)) val e2 = intercept[TestCanceledException] { assume(ci1.ne(ci3)) } e2.message should be ( Some( s""" | |assume(ci1.ne(ci3)) | | | | | $ci1Str | $ci3Str | false |""".stripMargin ) ) e2.failedCodeFileName should be (Some(fileName)) e2.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should do nothing when is used to check !ci1.ne(ci3)") { assume(!ci1.ne(ci3)) } it("should throw TestCanceledException with correct message and stack depth when is used to check !ci1.ne(ci2)") { val e = intercept[TestCanceledException] { assume(!ci1.ne(ci2)) } e.message should be ( Some( """ | |assume(!ci1.ne(ci2)) | || | | | |123 | 321 | | true | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 15)) } it("should do nothing when is used to check s4.isEmpty") { assume(s4.isEmpty) } it("should throw TestCanceledException with correct message and stack depth when is used to check s3.isEmpty") { val e = intercept[TestCanceledException] { assume(s3.isEmpty) } e.message should be ( Some( """ | |assume(s3.isEmpty) | | | | | false | "Say hi to ScalaTest" |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should do nothing when is used to check !s3.isEmpty") { assume(!s3.isEmpty) } it("should throw TestCanceledException with correct message and stack depth when is used to check !s4.isEmpty") { val e = intercept[TestCanceledException] { assume(!s4.isEmpty) } e.message should be ( Some( """ | |assume(!s4.isEmpty) | || | | |"" true | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should do nothing when is used to check l2.isEmpty") { assume(l2.isEmpty) } it("should throw TestCanceledException with correct message and stack depth when is used to check l1.isEmpty") { val e = intercept[TestCanceledException] { assume(l1.isEmpty) } e.message should be ( Some( s""" | |assume(l1.isEmpty) | | | | | false | $l1 |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should do nothing when is used to check !l1.isEmpty") { assume(!l1.isEmpty) } it("should throw TestCanceledException with correct message and stack depth when is used to check !l2.isEmpty") { val e = intercept[TestCanceledException] { assume(!l2.isEmpty) } e.message should be ( Some( s""" | |assume(!l2.isEmpty) | || | | || true | |$l2 | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 15)) } it("should do nothing when is used to check s1.isInstanceOf[String]") { assume(s1.isInstanceOf[String]) } it("should throw TestCanceledException with correct message and stack depth when is used to check l1.isInstanceOf[String]") { val e = intercept[TestCanceledException] { assume(l1.isInstanceOf[String]) } e.message should be ( Some( s""" | |assume(l1.isInstanceOf[String]) | | | | | false | $l1 |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should do nothing when is used to check l1.isInstanceOf[List[Int]]") { assume(l1.isInstanceOf[List[Int]]) } it("should throw TestCanceledException with correct message and stack depth when is used to check s1.isInstanceOf[List[Int]]") { val e = intercept[TestCanceledException] { assume(s1.isInstanceOf[List[Int]]) } e.message should be ( Some( """ | |assume(s1.isInstanceOf[List[Int]]) | | | | | false | "hi ScalaTest" |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should do nothing when is used to check date.isInstanceOf[Date]") { assume(date.isInstanceOf[Date]) } it("should throw TestCanceledException with correct message and stack depth when is used to check l1.isInstanceOf[Date]") { val e = intercept[TestCanceledException] { assume(l1.isInstanceOf[Date]) } e.message should be ( Some( s""" | |assume(l1.isInstanceOf[Date]) | | | | | false | $l1 |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should do nothing when is used to check !l1.isInstanceOf[String]") { assume(!l1.isInstanceOf[String]) } it("should throw TestCanceledException with correct message and stack depth when is used to check !s1.isInstanceOf[String]") { val e = intercept[TestCanceledException] { assume(!s1.isInstanceOf[String]) } e.message should be ( Some( """ | |assume(!s1.isInstanceOf[String]) | || | | || true | |"hi ScalaTest" | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 15)) } it("should do nothing when is used to check !s1.isInstanceOf[List[Int]]") { assume(!s1.isInstanceOf[List[Int]]) } it("should throw TestCanceledException with correct message and stack depth when is used to check !l1.isInstanceOf[List[Int]]") { val e = intercept[TestCanceledException] { assume(!l1.isInstanceOf[List[Int]]) } e.message should be ( Some( s""" | |assume(!l1.isInstanceOf[List[Int]]) | || | | || true | |$l1 | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 15)) } it("should do nothing when is used to check !l1.isInstanceOf[Date]") { assume(!l1.isInstanceOf[Date]) } it("should throw TestCanceledException with correct message and stack depth when is used to check !date.isInstanceOf[Date]") { val e = intercept[TestCanceledException] { assume(!date.isInstanceOf[Date]) } e.message should be ( Some( s""" | |assume(!date.isInstanceOf[Date]) | || | | || true | |$date | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 15)) } it("should do nothing when is used to check s1.length == 9") { assume(s1.length == 12) } it("should throw TestCanceledException with correct message and stack depth when is used to check s1.length == 10") { val e = intercept[TestCanceledException] { assume(s1.length == 10) } e.message should be ( Some( """ | |assume(s1.length == 10) | | | | | | | 12 | 10 | | false | "hi ScalaTest" |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 15)) } it("should do nothing when is used to check l1.length == 3") { assume(l1.length == 3) } it("should throw TestCanceledException with correct message and stack depth when is used to check l1.length == 10") { val e = intercept[TestCanceledException] { assume(l1.length == 10) } e.message should be ( Some( s""" | |assume(l1.length == 10) | | | | | | | 3 | 10 | | false | $l1 |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 15)) } it("should do nothing when is used to check !(s1.length == 10)") { assume(!(s1.length == 10)) } it("should throw TestCanceledException with correct message and stack depth when is used to check !(s1.length == 9)") { val e = intercept[TestCanceledException] { assume(!(s1.length == 12)) } e.message should be ( Some( """ | |assume(!(s1.length == 12)) | | | | | | | | | 12 | 12 | | | true | | "hi ScalaTest" | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 16)) } it("should do nothing when is used to check !(l1.length == 2)") { assume(!(l1.length == 2)) } it("should throw TestCanceledException with correct message and stack depth when is used to check !(l1.length == 9)") { val e = intercept[TestCanceledException] { assume(!(l1.length == 3)) } e.message should be ( Some( s""" | |assume(!(l1.length == 3)) | | | | | | | | | 3 | 3 | | | true | | $l1 | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 16)) } it("should do nothing when is used to check s1.size == 9") { assume(s1.size == 12) } it("should throw TestCanceledException with correct message and stack depth when is used to check s1.size == 10") { val e = intercept[TestCanceledException] { assume(s1.size == 10) } e.message should be ( Some( """ | |assume(s1.size == 10) | | | | | | | 12 | 10 | | false | "hi ScalaTest" |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 15)) } it("should do nothing when is used to check l1.size == 3") { assume(l1.size == 3) } it("should throw TestCanceledException with correct message and stack depth when is used to check l1.size == 10") { val e = intercept[TestCanceledException] { assume(l1.size == 10) } e.message should be ( Some( s""" | |assume(l1.size == 10) | | | | | | | 3 | 10 | | false | $l1 |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 15)) } it("should do nothing when is used to check !(s1.size == 10)") { assume(!(s1.size == 10)) } it("should throw TestCanceledException with correct message and stack depth when is used to check !(s1.size == 9)") { val e = intercept[TestCanceledException] { assume(!(s1.size == 12)) } e.message should be ( Some( """ | |assume(!(s1.size == 12)) | | | | | | | | | 12 | 12 | | | true | | "hi ScalaTest" | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 16)) } it("should do nothing when is used to check !(l1.size == 2)") { assume(!(l1.size == 2)) } it("should throw TestCanceledException with correct message and stack depth when is used to check !(l1.size == 9) ") { val e = intercept[TestCanceledException] { assume(!(l1.size == 3)) } e.message should be ( Some( s""" | |assume(!(l1.size == 3)) | | | | | | | | | 3 | 3 | | | true | | $l1 | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 16)) } it("should do nothing when is used to check l1.exists(_ == 3)") { assume(l1.exists(_ == 3)) } it("should do nothing when is used to check l1.exists(3 == _)") { assume(l1.exists(3 == _)) } it("should throw TestCanceledException with correct message and stack depth when is used to check l1.exists(_ == 5) ") { val e = intercept[TestCanceledException] { assume(l1.exists(_ == 5)) } e.message should be ( Some( s""" | |assume(l1.exists(_ == 5)) | | | | | false | $l1 |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should throw TestCanceledException with correct message and stack depth when is used to check l1.exists(5 == _) ") { val e = intercept[TestCanceledException] { assume(l1.exists(5 == _)) } e.message should be ( Some( s""" | |assume(l1.exists(5 == _)) | | | | | false | $l1 |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should do nothing when is used to check !l1.exists(_ == 5)") { assume(!l1.exists(_ == 5)) } it("should do nothing when is used to check !l1.exists(5 == _)") { assume(!l1.exists(5 == _)) } it("should throw TestCanceledException with correct message and stack depth when is used to check !l1.exists(_ == 3)") { val e = intercept[TestCanceledException] { assume(!l1.exists(_ == 3)) } e.message should be ( Some( s""" | |assume(!l1.exists(_ == 3)) | || | | || true | |$l1 | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 15)) } it("should throw TestCanceledException with correct message and stack depth when is used to check !l1.exists(3 == _)") { val e = intercept[TestCanceledException] { assume(!l1.exists(3 == _)) } e.message should be ( Some( s""" | |assume(!l1.exists(3 == _)) | || | | || true | |$l1 | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 15)) } it("should throw TestCanceledException with correct message and stack depth when is used to check l1.exists(_ > 3)") { val e = intercept[TestCanceledException] { assume(l1.exists(_ > 3)) } e.message should be ( Some( s""" | |assume(l1.exists(_ > 3)) | | | | | false | $l1 |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should throw TestCanceledException with correct message and stack depth when is used to check l1.exists(3 < _)") { val e = intercept[TestCanceledException] { assume(l1.exists(3 < _)) } e.message should be ( Some( s""" | |assume(l1.exists(3 < _)) | | | | | false | $l1 |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should throw TestCanceledException with correct message and stack depth when is used to check l3.exists(_.isEmpty)") { val e = intercept[TestCanceledException] { assume(l3.exists(_.isEmpty)) } e.message should be ( Some( s""" | |assume(l3.exists(_.isEmpty)) | | | | | false | $l3Str |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should throw TestCanceledException with correct message and stack depth when is used to check l3.exists(false)") { val e = intercept[TestCanceledException] { assume(ci1.exists(321)) } e.message should be ( Some( """ | |assume(ci1.exists(321)) | | | | | 123 false 321 |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 13)) } it("should do nothing when used to check woof { meow(y = 5) } == \\"woof\\"") { assume(woof { meow(y = 5) } == "woof") } it("should throw TestCanceledException with correct message and stack depth when is used to check woof { meow(y = 5) } == \\"meow\\"") { val e = intercept[TestCanceledException] { assume(woof { meow(y = 5) } == "meow") } e.message should be ( Some( """ | |assume(woof { meow(y = 5) } == "meow") | | | | | "woof" | "meow" | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should do nothing when used to check multiline assert((b == a + 2) && (b - 2 <= a)) ") { assume((b == a + 2) && (b - 2 <= a)) } // SKIP-DOTTY-START // problem with quotes it("should throw TestCanceledException with friend message when used to check multiline assert((b == a + 2) && (b - 1 <= a))") { val e = intercept[TestCanceledException] { assume((b == a + 2) && (b - 1 <= a)) } e.message shouldBe Some("5 equaled 5, but 4 was not less than or equal to 3") e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 5)) } // SKIP-DOTTY-END it("should do nothing when a block of code that evaluates to true is passed in") { assume { val a = 1 val b = 2 val c = a + b a < b || c == a + b } } it("should throw TestCanceledException with correct message and stack depth when a block of code that evaluates to false is passed") { val e = intercept[TestCanceledException] { assume { val a = 1; val b = 2; val c = a + b; a > b || c == b * b } } e.message should be ( Some( """ | |assume { val a = 1; val b = 2; val c = a + b; a > b || c == b * b } | | | | | | | | | | | 1 | 2 | 3 | 2 4 2 | | | false | | false | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 16)) } // SKIP-DOTTY-START // different code show in Dotty it("should fallback to BooleanMacro when a block of code > 1 line is passed in ") { val e = intercept[TestCanceledException] { assume { val a = 1 val b = 2 val c = a + b a > b || c == b * b } } e.message should be ( Some( """{ | val a: Int = 1; | val b: Int = 2; | val c: Int = a.+(b); | a.>(b).||(c.==(b.*(b))) |} was false""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 17)) } // SKIP-DOTTY-END // SKIP-SCALATESTJS,NATIVE-START it("should do nothing when used to check <person>Dude</person> == <person>Dude</person>") { assume(<person>Dude</person> == <person>Dude</person>) } it("should throw TestCanceledException with correct message and stack depth when is used to check <person>Dude</person> == <person>Mary</person>") { val e = intercept[TestCanceledException] { assume(<person>Dude</person> == <person>Mary</person>) } e.message should be ( Some( """ | |assume(<person>Dude</person> == <person>Mary</person>) | | | | | | | <person>Mary</person> | | false | <person>Dude</person> |""".stripMargin ) ) } // SKIP-SCALATESTJS,NATIVE-END it("should compile when used with org == xxx that shadow org.scalactic ") { assertCompiles( """ |val org = "test" |assume(org == "test") """.stripMargin) } it("should compile when used with org === xxx that shadow org.scalactic") { assertCompiles( """ |val org = "test" |assume(org === "test") """.stripMargin) } // SKIP-DOTTY-START it("should compile when used with org === xxx with TypeCheckedTripleEquals that shadow org.scalactic") { assertCompiles( """ |class TestSpec extends AnyFunSpec with org.scalactic.TypeCheckedTripleEquals { | it("testing here") { | val org = "test" | assume(org === "test") | } |} """.stripMargin) } // SKIP-DOTTY-END it("should compile when used with org.aCustomMethod that shadow org.scalactic") { assertCompiles( """ |class Test { | def aCustomMethod: Boolean = true |} |val org = new Test |assume(org.aCustomMethod) """.stripMargin) } it("should compile when used with !org that shadow org.scalactic") { assertCompiles( """ |val org = false |assume(!org) """.stripMargin) } it("should compile when used with org.isEmpty that shadow org.scalactic") { assertCompiles( """ |val org = "" |assume(org.isEmpty) """.stripMargin) } it("should compile when used with org.isInstanceOf that shadow org.scalactic") { assertCompiles( """ |val org = "" |assume(org.isInstanceOf[String]) """.stripMargin) } it("should compile when used with org.size == 0 that shadow org.scalactic") { assertCompiles( """ |val org = Array.empty[String] |assume(org.size == 0) """.stripMargin) } it("should compile when used with org.length == 0 that shadow org.scalactic") { assertCompiles( """ |val org = "" |assume(org.length == 0) """.stripMargin) } it("should compile when used with org.exists(_ == 'b') that shadow org.scalactic ") { assertCompiles( """ |val org = "abc" |assume(org.exists(_ == 'b')) """.stripMargin) } it("should do nothing when is used to check new String(\\"test\\") != \\"test\\"") { assume(new String("test") == "test") } it("should throw TestCanceledException with correct message and stack depth when is used to check new String(\\"test\\") != \\"testing\\"") { val e = intercept[TestCanceledException] { assume(new String("test") == "testing") } e.message should be ( Some( """ | |assume(new String("test") == "testing") | | | | | "test" | "testing" | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should compile when used with Java static method") { assertCompiles( """ |assume(System.currentTimeMillis() > 0) """.stripMargin) assertCompiles( """ |assume(java.math.BigDecimal.ZERO == java.math.BigDecimal.ZERO) """.stripMargin) } } describe("The assume(boolean, clue) method") { it("should do nothing when is used to check a == 3") { assume(a == 3, "this is a clue") } it("should throw TestCanceledException with correct message and stack depth when is used to check a == 5") { val e = intercept[TestCanceledException] { assume(a == 5, "this is a clue") } e.message should be ( Some( """this is a clue | |assume(a == 5, "this is a clue") | | | | | 3 | 5 | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should do nothing when is used to check 5 == b") { assume(5 == b, "this is a clue") } it("should throw TestCanceledException with correct message and stack depth when is used to check 3 == b") { val e = intercept[TestCanceledException] { assume(3 == b, "this is a clue") } e.message should be ( Some( """this is a clue | |assume(3 == b, "this is a clue") | | | | | 3 | 5 | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should do nothing when is used to check a != 5") { assume(a != 5, "this is a clue") } it("should throw TestCanceledException with correct message and stack depth when is used to check a != 3") { val e = intercept[TestCanceledException] { assume(a != 3, "this is a clue") } e.message should be ( Some( """this is a clue | |assume(a != 3, "this is a clue") | | | | | 3 | 3 | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should do nothing when is used to check 3 != b") { assume(3 != b, "this is a clue") } it("should throw TestCanceledException with correct message and stack depth when is used to check 5 != b") { val e = intercept[TestCanceledException] { assume(5 != b, "this is a clue") } e.message should be ( Some( """this is a clue | |assume(5 != b, "this is a clue") | | | | | 5 | 5 | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should do nothing when is used to check 3 == 3") { assume(3 == 3, "this is a clue") } // SKIP-DOTTY-START // const-folding will eliminate the expr it("should throw TestCanceledException with message that contains the original code and correct stack depth when is used to check 3 == 5") { // This is because the compiler simply pass the false boolean literal // to the macro, can't find a way to get the 3 == 5 literal. val e1 = intercept[TestCanceledException] { assume(3 == 5, "this is a clue") } e1.message should be ( Some( """this is a clue | |assume(3 == 5, "this is a clue") | | | false |""".stripMargin ) ) e1.failedCodeFileName should be (Some(fileName)) e1.failedCodeLineNumber should be (Some(thisLineNumber - 13)) } // SKIP-DOTTY-END it("should throw TestCanceledException with correct message and stack depth when is used to check a == b") { val e = intercept[TestCanceledException] { assume(a == b, "this is a clue") } e.message should be ( Some( """this is a clue | |assume(a == b, "this is a clue") | | | | | 3 | 5 | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should throw TestCanceledException with correct message and stack depth when is used to check a == 0") { val e = intercept[TestCanceledException] { assume(a == 0, "this is a clue") } e.message should be ( Some( """this is a clue | |assume(a == 0, "this is a clue") | | | | | 3 | 0 | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should throw TestCanceledException with correct message and stack depth when is used to check 0 == a") { val e = intercept[TestCanceledException] { assume(0 == a, "this is a clue") } e.message should be ( Some( """this is a clue | |assume(0 == a, "this is a clue") | | | | | 0 | 3 | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should throw TestCanceledException with correct message and stack depth when is used to check 3 != a") { val e = intercept[TestCanceledException] { assume(3 != a, "this is a clue") } e.message should be ( Some( """this is a clue | |assume(3 != a, "this is a clue") | | | | | 3 | 3 | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should do nothing when is used to check 5 != a") { assume(5 != a, "this is a clue") } it("should do nothing when is used to check a > 2") { assume(a > 2, "this is a clue") } it("should do nothing when is used to check 5 > a") { assume(5 > a, "this is a clue") } it("should throw TestCanceledException with correct message and stack depth when is used to check a > 3") { val e = intercept[TestCanceledException] { assume(a > 3, "this is a clue") } e.message should be ( Some( """this is a clue | |assume(a > 3, "this is a clue") | | | | | 3 | 3 | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should throw TestCanceledException with correct message and stack depth when is used to check 3 > a") { val e = intercept[TestCanceledException] { assume(3 > a, "this is a clue") } e.message should be ( Some( """this is a clue | |assume(3 > a, "this is a clue") | | | | | 3 | 3 | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should do nothing when is used to check a >= 3") { assume(a >= 3, "this is a clue") } it("should do nothing when is used to check 3 >= a") { assume(3 >= a, "this is a clue") } it("should throw TestCanceledException with correct message and stack depth when is used to check a >= 4") { val e = intercept[TestCanceledException] { assume(a >= 4, "this is a clue") } e.message should be ( Some( """this is a clue | |assume(a >= 4, "this is a clue") | | | | | 3 | 4 | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should throw TestCanceledException with correct message and stack depth when is used to check 2 >= a") { val e = intercept[TestCanceledException] { assume(2 >= a, "this is a clue") } e.message should be ( Some( """this is a clue | |assume(2 >= a, "this is a clue") | | | | | 2 | 3 | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should do nothing when is used to check b < 6") { assume(b < 6, "this is a clue") } it("should do nothing when is used to check 3 < b") { assume(3 < b, "this is a clue") } it("should throw TestCanceledException with correct message and stack depth when is used to check b < 5") { val e = intercept[TestCanceledException] { assume(b < 5, "this is a clue") } e.message should be ( Some( """this is a clue | |assume(b < 5, "this is a clue") | | | | | 5 | 5 | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should throw TestCanceledException with correct message and stack depth when is used to check 5 < b") { val e = intercept[TestCanceledException] { assume(5 < b, "this is a clue") } e.message should be ( Some( """this is a clue | |assume(5 < b, "this is a clue") | | | | | 5 | 5 | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should do nothing when is used to check b <= 5") { assume(b <= 5, "this is a clue") } it("should do nothing when is used to check 5 <= b") { assume(5 <= b, "this is a clue") } it("should throw TestCanceledException with correct message and stack depth when is used to check b <= 4") { val e = intercept[TestCanceledException] { assume(b <= 4, "this is a clue") } e.message should be ( Some( """this is a clue | |assume(b <= 4, "this is a clue") | | | | | 5 | 4 | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should throw TestCanceledException with correct message and stack depth when is used to check 6 <= b") { val e = intercept[TestCanceledException] { assume(6 <= b, "this is a clue") } e.message should be ( Some( """this is a clue | |assume(6 <= b, "this is a clue") | | | | | 6 | 5 | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should do nothing when is used to check bob == \\"bob\\"") { assume(bob == "bob", "this is a clue") } it("should do nothing when is used to check bob != \\"alice\\"") { assume(bob != "alice", "this is a clue") } it("should do nothing when is used to check alice == \\"alice\\"") { assume(alice == "alice", "this is a clue") } it("should do nothing when is used to check alice != \\"bob\\"") { assume(alice != "bob", "this is a clue") } it("should throw TestCanceledException with correct message and stack depth when is used to check bob == \\"alice\\"") { val e = intercept[TestCanceledException] { assume(bob == "alice", "this is a clue") } e.message should be ( Some( """this is a clue | |assume(bob == "alice", "this is a clue") | | | | | | | "alice" | | false | "bob" |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 15)) } it("should throw TestCanceledException with correct message and stack depth when is used to check bob != \\"bob\\"") { val e = intercept[TestCanceledException] { assume(bob != "bob", "this is a clue") } e.message should be ( Some( """this is a clue | |assume(bob != "bob", "this is a clue") | | | | | | | "bob" | | false | "bob" |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 15)) } it("should throw TestCanceledException with correct message and stack depth when is used to check alice == \\"bob\\"") { val e = intercept[TestCanceledException] { assume(alice == "bob", "this is a clue") } e.message should be ( Some( """this is a clue | |assume(alice == "bob", "this is a clue") | | | | | | | "bob" | | false | "alice" |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 15)) } it("should throw TestCanceledException with correct message and stack depth when is used to check alice != \\"alice\\"") { val e = intercept[TestCanceledException] { assume(alice != "alice", "this is a clue") } e.message should be ( Some( """this is a clue | |assume(alice != "alice", "this is a clue") | | | | | | | "alice" | | false | "alice" |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 15)) } it("should do nothing when is used to check a === 3") { assume(a === 3, "this is a clue") } it("should throw TestCanceledException with correct message and stack depth when is used to check a === 5 ") { val e = intercept[TestCanceledException] { assume(a === 5, "this is a clue") } e.message should be ( Some( """this is a clue | |assume(a === 5, "this is a clue") | | | | | 3 | 5 | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should do nothing when is used to check 3 === a") { assume(3 === a, "this is a clue") } it("should throw TestCanceledException with correct message and stack depth when is used to check 5 === a") { val e = intercept[TestCanceledException] { assume(5 === a, "this is a clue") } e.message should be ( Some( """this is a clue | |assume(5 === a, "this is a clue") | | | | | 5 | 3 | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should do nothing when is used to check a !== 5") { assume(a !== 5, "this is a clue") } it("should throw TestCanceledException with correct message and stack depth when is used to check a !== 3") { val e = intercept[TestCanceledException] { assume(a !== 3, "this is a clue") } e.message should be ( Some( """this is a clue | |assume(a !== 3, "this is a clue") | | | | | 3 | 3 | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should do nothing when is used to check 5 !== a") { assume(5 !== a, "this is a clue") } it("should throw TestCanceledException with correct message and stack depth when is used to check 3 !== a") { val e = intercept[TestCanceledException] { assume(3 !== a, "this is a clue") } e.message should be ( Some( """this is a clue | |assume(3 !== a, "this is a clue") | | | | | 3 | 3 | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should do nothing when is used to check a == 3 && b == 5") { assume(a == 3 && b == 5, "this is a clue") } it("should throw TestCanceledException with correct message and stack depth when is used to check a == 3 && b == 6") { val e = intercept[TestCanceledException] { assume(a == 3 && b == 6, "this is a clue") } e.message should be ( Some( """this is a clue | |assume(a == 3 && b == 6, "this is a clue") | | | | | | | | | 3 | 3 | 5 | 6 | true | false | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 15)) } it("should throw TestCanceledException with correct message and stack depth when is used to check a == 2 && b == 5") { val e = intercept[TestCanceledException] { assume(a == 2 && b == 5, "this is a clue") } e.message should be ( Some( """this is a clue | |assume(a == 2 && b == 5, "this is a clue") | | | | | 3 | 2 | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should throw TestCanceledException with correct message and stack depth when is used to check a == 2 && b == 6") { val e = intercept[TestCanceledException] { assume(a == 2 && b == 6, "this is a clue") } e.message should be ( Some( """this is a clue | |assume(a == 2 && b == 6, "this is a clue") | | | | | 3 | 2 | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should do nothing when is used to check a == 3 & b == 5") { assume(a == 3 & b == 5, "this is a clue") } it("should throw TestCanceledException with correct message and stack depth when is used to check a == 3 & b == 6") { val e = intercept[TestCanceledException] { assume(a == 3 & b == 6, "this is a clue") } e.message should be ( Some( """this is a clue | |assume(a == 3 & b == 6, "this is a clue") | | | | | | | | | 3 | 3 | 5 | 6 | true | false | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 15)) } it("should throw TestCanceledException with correct message and stack depth when is used to check a == 2 & b == 5") { val e = intercept[TestCanceledException] { assume(a == 2 & b == 5, "this is a clue") } e.message should be ( Some( """this is a clue | |assume(a == 2 & b == 5, "this is a clue") | | | | | 3 | 2 | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should throw TestCanceledException with correct message and stack depth when is used to check a == 2 & b == 6") { val e = intercept[TestCanceledException] { assume(a == 2 & b == 6, "this is a clue") } e.message should be ( Some( """this is a clue | |assume(a == 2 & b == 6, "this is a clue") | | | | | 3 | 2 | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should do nothing when is used to check a == 3 || b == 5") { assume(a == 3 || b == 5, "this is a clue") } it("should do nothing when is used to check a == 3 || b == 6") { assume(a == 3 || b == 6, "this is a clue") } it("should do nothing when is used to check a == 2 || b == 5") { assume(a == 2 || b == 5, "this is a clue") } it("should throw TestCanceledException with correct message and stack depth when is used to check a == 2 || b == 6") { val e = intercept[TestCanceledException] { assume(a == 2 || b == 6, "this is a clue") } e.message should be ( Some( """this is a clue | |assume(a == 2 || b == 6, "this is a clue") | | | | | | | | | 3 | 2 | 5 | 6 | | | false | | false | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 16)) } it("should do nothing when is used to check a == 3 | b == 5") { assume(a == 3 | b == 5, "this is a clue") } it("should do nothing when is used to check a == 3 | b == 6") { assume(a == 3 | b == 6, "this is a clue") } it("should do nothing when is used to check a == 2 | b == 5") { assume(a == 2 | b == 5, "this is a clue") } it("should throw TestCanceledException with correct message and stack depth when is used to check a == 2 | b == 6") { val e = intercept[TestCanceledException] { assume(a == 2 | b == 6, "this is a clue") } e.message should be ( Some( """this is a clue | |assume(a == 2 | b == 6, "this is a clue") | | | | | | | | | 3 | 2 | 5 | 6 | | | false | | false | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 16)) } it("should do nothing when is used to check a == 3 && (b == 5 && b > 3)") { assume(a == 3 && (b == 5 && b > 3), "this is a clue") } it("should throw TestCanceledException with correct message and stack depth when is used to check a == 3 && (b == 5 && b > 5)") { val e = intercept[TestCanceledException] { assume(a == 3 && (b == 5 && b > 5), "this is a clue") } e.message should be ( Some( """this is a clue | |assume(a == 3 && (b == 5 && b > 5), "this is a clue") | | | | | | | | | | | | | 3 | 3 | 5 | 5 | 5 | 5 | true false true | false | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 15)) } it("should do nothing when is used to check !(a == 5)") { assume(!(a == 5), "this is a clue") } it("should throw TestCanceledException with correct message and stack depth when is used to check !(a == 3)") { val e = intercept[TestCanceledException] { assume(!(a == 3), "this is a clue") } e.message should be ( Some( """this is a clue | |assume(!(a == 3), "this is a clue") | | | | | | | 3 | 3 | | true | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 15)) } it("should throw TestCanceledException with correct message and stack depth when is used to check a == 3 && !(b == 5)") { val e = intercept[TestCanceledException] { assume(a == 3 && !(b == 5), "this is a clue") } e.message should be ( Some( """this is a clue | |assume(a == 3 && !(b == 5), "this is a clue") | | | | | | | | | | 3 | 3 | | 5 | 5 | true | | true | | false | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 16)) } it("should do nothing when is used to check (a == 3) == (b == 5)") { assume((a == 3) == (b == 5), "this is a clue") } it("should throw TestCanceledException with correct message and stack depth when is used to check (a == 3) == (b != 5)") { val e = intercept[TestCanceledException] { assume((a == 3) == (b != 5), "this is a clue") } e.message should be ( Some( """this is a clue | |assume((a == 3) == (b != 5), "this is a clue") | | | | | | | | | 3 | 3 | 5 | 5 | true false false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should short-circuit && when first condition was false") { val s = new Stateful intercept[TestCanceledException] { assume(a == 5 && s.changeState, "this is a clue") } s.state should be (false) } it("should short-circuit & when first condition was false") { val s = new Stateful intercept[TestCanceledException] { assume(a == 5 & s.changeState, "this is a clue") } s.state should be (false) } it("should short-circuit || when first condition was true") { val s = new Stateful assume(a == 3 || s.changeState, "this is a clue") s.state should be (false) } it("should short-circuit | when first condition was true") { val s = new Stateful assume(a == 3 | s.changeState, "this is a clue") s.state should be (false) } it("should do nothing when it is used to check a == 3 && { println(\\"hi\\"); b == 5} ") { assume(a == 3 && { println("hi"); b == 5}, "this is a clue") } it("should throw TestCanceledException with correct message and stack depth when is usesd to check a == 3 && { println(\\"hi\\"); b == 3}") { val e = intercept[TestCanceledException] { assume(a == 3 && { println("hi"); b == 3}, "this is a clue") } e.message should be ( Some( """this is a clue | |assume(a == 3 && { println("hi"); b == 3}, "this is a clue") | | | | | | | | | 3 | 3 false 5 | 3 | true false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should do nothing when it is used to check { println(\\"hi\\"); b == 5} && a == 3") { assume({ println("hi"); b == 5} && a == 3, "this is a clue") } it("should throw TestCanceledException with correct message and stack depth when is usesd to check { println(\\"hi\\"); b == 5} && a == 5") { val e = intercept[TestCanceledException] { assume({ println("hi"); b == 5} && a == 5, "this is a clue") } e.message should be ( Some( """this is a clue | |assume({ println("hi"); b == 5} && a == 5, "this is a clue") | | | | | | | | | 5 | 5 | 3 | 5 | true | false | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 15)) } it("should preserve side effects when Apply with single argument is passed in") { assume(neverRuns1(sys.error("Sad times 1")), "this is a clue") } it("should preserve side effects when Apply with 2 argument list is passed in") { assume(neverRuns2(sys.error("Sad times 2"))(0), "this is a clue") } it("should preserve side effects when typed Apply with 2 argument list is passed in") { assume(neverRuns3(sys.error("Sad times 3"))(0), "this is a clue") } it("should do nothing when is used to check s1 startsWith \\"hi\\"") { assume(s1 startsWith "hi", "this is a clue") assume(s1.startsWith("hi"), "this is a clue") } it("should throw TestCanceledException with correct message and stack depth when is used to check s2 startsWith \\"hi\\"") { val e1 = intercept[TestCanceledException] { assume(s2 startsWith "hi", "this is a clue") } e1.message should be ( Some( """this is a clue | |assume(s2 startsWith "hi", "this is a clue") | | | | | | false "hi" | "ScalaTest hi" |""".stripMargin ) ) e1.failedCodeFileName should be (Some(fileName)) e1.failedCodeLineNumber should be (Some(thisLineNumber - 14)) val e2 = intercept[TestCanceledException] { assume(s2.startsWith("hi"), "this is a clue") } e2.message should be ( Some( """this is a clue | |assume(s2.startsWith("hi"), "this is a clue") | | | | | | false "hi" | "ScalaTest hi" |""".stripMargin ) ) e2.failedCodeFileName should be (Some(fileName)) e2.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should do nothing when is used to check ci1 startsWith 1") { assume(ci1 startsWith 1, "this is a clue") assume(ci1.startsWith(1), "this is a clue") } it("should throw TestCanceledException with correct message and stack depth when is used to check ci2 startsWith 1") { val e1 = intercept[TestCanceledException] { assume(ci2 startsWith 1, "this is a clue") } e1.message should be ( Some( """this is a clue | |assume(ci2 startsWith 1, "this is a clue") | | | | | 321 false 1 |""".stripMargin ) ) e1.failedCodeFileName should be (Some(fileName)) e1.failedCodeLineNumber should be (Some(thisLineNumber - 13)) val e2 = intercept[TestCanceledException] { assume(ci2.startsWith(1), "this is a clue") } e2.message should be ( Some( """this is a clue | |assume(ci2.startsWith(1), "this is a clue") | | | | | 321 false 1 |""".stripMargin ) ) e2.failedCodeFileName should be (Some(fileName)) e2.failedCodeLineNumber should be (Some(thisLineNumber - 13)) } it("should do nothing when is used to check !s2.startsWith(\\"hi\\")") { assume(!s2.startsWith("hi"), "this is a clue") } it("should throw TestCanceledException with correct message and stack depth when is used to check !s1.startsWith(\\"hi\\")") { val e1 = intercept[TestCanceledException] { assume(!s1.startsWith("hi"), "this is a clue") } e1.message should be ( Some( """this is a clue | |assume(!s1.startsWith("hi"), "this is a clue") | || | | | || true "hi" | |"hi ScalaTest" | false |""".stripMargin ) ) e1.failedCodeFileName should be (Some(fileName)) e1.failedCodeLineNumber should be (Some(thisLineNumber - 15)) } it("should do nothing when is used to check s2 endsWith \\"hi\\"") { assume(s2 endsWith "hi", "this is a clue") assume(s2.endsWith("hi"), "this is a clue") } it("should throw TestCanceledException with correct message and stack depth when is used to check s1 endsWith \\"hi\\"") { val e1 = intercept[TestCanceledException] { assume(s1 endsWith "hi", "this is a clue") } e1.message should be ( Some( """this is a clue | |assume(s1 endsWith "hi", "this is a clue") | | | | | | false "hi" | "hi ScalaTest" |""".stripMargin ) ) e1.failedCodeFileName should be (Some(fileName)) e1.failedCodeLineNumber should be (Some(thisLineNumber - 14)) val e2 = intercept[TestCanceledException] { assume(s1.endsWith("hi"), "this is a clue") } e2.message should be ( Some( """this is a clue | |assume(s1.endsWith("hi"), "this is a clue") | | | | | | false "hi" | "hi ScalaTest" |""".stripMargin ) ) e2.failedCodeFileName should be (Some(fileName)) e2.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should do nothing when is used to check ci2 endsWith 1") { assume(ci2 endsWith 1, "this is a clue") assume(ci2.endsWith(1), "this is a clue") } it("should throw TestCanceledException with correct message and stack depth when is used to check ci1 endsWith 1") { val e1 = intercept[TestCanceledException] { assume(ci1 endsWith 1, "this is a clue") } e1.message should be ( Some( """this is a clue | |assume(ci1 endsWith 1, "this is a clue") | | | | | 123 false 1 |""".stripMargin ) ) e1.failedCodeFileName should be (Some(fileName)) e1.failedCodeLineNumber should be (Some(thisLineNumber - 13)) val e2 = intercept[TestCanceledException] { assume(ci1.endsWith(1), "this is a clue") } e2.message should be ( Some( """this is a clue | |assume(ci1.endsWith(1), "this is a clue") | | | | | 123 false 1 |""".stripMargin ) ) e2.failedCodeFileName should be (Some(fileName)) e2.failedCodeLineNumber should be (Some(thisLineNumber - 13)) } it("should do nothing when is used to check !s1.endsWith(\\"hi\\")") { assume(!s1.endsWith("hi"), "this is a clue") } it("should throw TestCanceledException with correct message and stack depth when is used to check !s2.endsWith(\\"hi\\")") { val e1 = intercept[TestCanceledException] { assume(!s2.endsWith("hi"), "this is a clue") } e1.message should be ( Some( """this is a clue | |assume(!s2.endsWith("hi"), "this is a clue") | || | | | || true "hi" | |"ScalaTest hi" | false |""".stripMargin ) ) e1.failedCodeFileName should be (Some(fileName)) e1.failedCodeLineNumber should be (Some(thisLineNumber - 15)) } it("should do nothing when is used to check s3 contains \\"hi\\"") { assume(s3 contains "hi", "this is a clue") assume(s3.contains("hi"), "this is a clue") } it("should throw TestCanceledException with correct message and stack depth when is used to check s3 contains \\"hello\\"") { val e1 = intercept[TestCanceledException] { assume(s3 contains "hello", "this is a clue") } e1.message should be ( Some( """this is a clue | |assume(s3 contains "hello", "this is a clue") | | | | | | false "hello" | "Say hi to ScalaTest" |""".stripMargin ) ) e1.failedCodeFileName should be (Some(fileName)) e1.failedCodeLineNumber should be (Some(thisLineNumber - 14)) val e2 = intercept[TestCanceledException] { assume(s3.contains("hello"), "this is a clue") } e2.message should be ( Some( """this is a clue | |assume(s3.contains("hello"), "this is a clue") | | | | | | false "hello" | "Say hi to ScalaTest" |""".stripMargin ) ) e2.failedCodeFileName should be (Some(fileName)) e2.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should do nothing when is used to check ci2 contains 2") { assume(ci2 contains 2, "this is a clue") assume(ci2.contains(2), "this is a clue") } it("should throw TestCanceledException with correct message and stack depth when is used to check ci1 contains 5") { val e1 = intercept[TestCanceledException] { assume(ci1 contains 5, "this is a clue") } e1.message should be ( Some( """this is a clue | |assume(ci1 contains 5, "this is a clue") | | | | | 123 false 5 |""".stripMargin ) ) e1.failedCodeFileName should be (Some(fileName)) e1.failedCodeLineNumber should be (Some(thisLineNumber - 13)) val e2 = intercept[TestCanceledException] { assume(ci1.contains(5), "this is a clue") } e2.message should be ( Some( """this is a clue | |assume(ci1.contains(5), "this is a clue") | | | | | 123 false 5 |""".stripMargin ) ) e2.failedCodeFileName should be (Some(fileName)) e2.failedCodeLineNumber should be (Some(thisLineNumber - 13)) } it("should do nothing when is used to check !s1.contains(\\"hello\\")") { assume(!s3.contains("hello"), "this is a clue") } it("should throw TestCanceledException with correct message and stack depth when is used to check !s3.contains(\\"hi\\")") { val e1 = intercept[TestCanceledException] { assume(!s3.contains("hi"), "this is a clue") } e1.message should be ( Some( """this is a clue | |assume(!s3.contains("hi"), "this is a clue") | || | | | || true "hi" | |"Say hi to ScalaTest" | false |""".stripMargin ) ) e1.failedCodeFileName should be (Some(fileName)) e1.failedCodeLineNumber should be (Some(thisLineNumber - 15)) } it("should do nothing when is used to check l1 contains 2") { assume(l1 contains 2, "this is a clue") assume(l1.contains(2), "this is a clue") } it("should throw TestCanceledException with correct message and stack depth when is used to check l1 contains 5") { val e1 = intercept[TestCanceledException] { assume(l1 contains 5, "this is a clue") } e1.message should be ( Some( """this is a clue | |assume(l1 contains 5, "this is a clue") | | | | | | false 5 | List(1, 2, 3) |""".stripMargin ) ) e1.failedCodeFileName should be (Some(fileName)) e1.failedCodeLineNumber should be (Some(thisLineNumber - 14)) val e2 = intercept[TestCanceledException] { assume(l1.contains(5), "this is a clue") } e2.message should be ( Some( """this is a clue | |assume(l1.contains(5), "this is a clue") | | | | | | false 5 | List(1, 2, 3) |""".stripMargin ) ) e2.failedCodeFileName should be (Some(fileName)) e2.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should do nothing when is used to check !(l1 contains 5)") { assume(!(l1 contains 5), "this is a clue") assume(!l1.contains(5), "this is a clue") } it("should throw TestCanceledException with correct message and stack depth when is used to check !(l1 contains 2)") { val e1 = intercept[TestCanceledException] { assume(!(l1 contains 2), "this is a clue") } e1.message should be ( Some( """this is a clue | |assume(!(l1 contains 2), "this is a clue") | | | | | | | | true 2 | | List(1, 2, 3) | false |""".stripMargin ) ) e1.failedCodeFileName should be (Some(fileName)) e1.failedCodeLineNumber should be (Some(thisLineNumber - 15)) val e2 = intercept[TestCanceledException] { assume(!l1.contains(2), "this is a clue") } e2.message should be ( Some( """this is a clue | |assume(!l1.contains(2), "this is a clue") | || | | | || true 2 | |List(1, 2, 3) | false |""".stripMargin ) ) e2.failedCodeFileName should be (Some(fileName)) e2.failedCodeLineNumber should be (Some(thisLineNumber - 15)) } it("should do nothing when is used to check m1 contains 2") { assume(m1 contains 2, "this is a clue") assume(m1.contains(2), "this is a clue") } it("should throw TestCanceledException with correct message and stack depth when is used to check m1 contains 5") { val e1 = intercept[TestCanceledException] { assume(m1 contains 5, "this is a clue") } e1.message should be ( Some( s"""this is a clue | |assume(m1 contains 5, "this is a clue") | | | | | | false 5 | $m1Str |""".stripMargin ) ) e1.failedCodeFileName should be (Some(fileName)) e1.failedCodeLineNumber should be (Some(thisLineNumber - 14)) val e2 = intercept[TestCanceledException] { assume(m1.contains(5), "this is a clue") } e2.message should be ( Some( s"""this is a clue | |assume(m1.contains(5), "this is a clue") | | | | | | false 5 | $m1Str |""".stripMargin ) ) e2.failedCodeFileName should be (Some(fileName)) e2.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should do nothing when is used to check !(m1 contains 5)") { assume(!(m1 contains 5), "this is a clue") assume(!m1.contains(5), "this is a clue") } it("should throw TestCanceledException with correct message and stack depth when is used to check !(m1 contains 2)") { val e1 = intercept[TestCanceledException] { assume(!(m1 contains 2), "this is a clue") } e1.message should be ( Some( s"""this is a clue | |assume(!(m1 contains 2), "this is a clue") | | | | | | | | true 2 | | $m1Str | false |""".stripMargin ) ) e1.failedCodeFileName should be (Some(fileName)) e1.failedCodeLineNumber should be (Some(thisLineNumber - 15)) val e2 = intercept[TestCanceledException] { assume(!m1.contains(2), "this is a clue") } e2.message should be ( Some( s"""this is a clue | |assume(!m1.contains(2), "this is a clue") | || | | | || true 2 | |$m1Str | false |""".stripMargin ) ) e2.failedCodeFileName should be (Some(fileName)) e2.failedCodeLineNumber should be (Some(thisLineNumber - 15)) } it("should do nothing when is used to check ct1 contains 8") { assume(ct1 contains 8, "this is a clue") assume(ct1.contains(8), "this is a clue") } it("should throw TestCanceledException with correct message and stack depth when is used to check ct1 contains 5") { val e1 = intercept[TestCanceledException] { assume(ct1 contains 5, "this is a clue") } e1.message should be ( Some( s"""this is a clue | |assume(ct1 contains 5, "this is a clue") | | | | | | false 5 | $ct1Str |""".stripMargin ) ) e1.failedCodeFileName should be (Some(fileName)) e1.failedCodeLineNumber should be (Some(thisLineNumber - 14)) val e2 = intercept[TestCanceledException] { assume(ct1.contains(5), "this is a clue") } e2.message should be ( Some( s"""this is a clue | |assume(ct1.contains(5), "this is a clue") | | | | | | false 5 | $ct1Str |""".stripMargin ) ) e2.failedCodeFileName should be (Some(fileName)) e2.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should do nothing when is used to check !ct1.contains(5)") { assume(!ct1.contains(5), "this is a clue") } it("should throw TestCanceledException with correct message and stack depth when is used to check !ct1.contains(8)") { val e1 = intercept[TestCanceledException] { assume(!ct1.contains(8), "this is a clue") } e1.message should be ( Some( s"""this is a clue | |assume(!ct1.contains(8), "this is a clue") | || | | | || true 8 | |$ct1Str | false |""".stripMargin ) ) e1.failedCodeFileName should be (Some(fileName)) e1.failedCodeLineNumber should be (Some(thisLineNumber - 15)) } it("should do nothing when is used to check ci1 eq ci3") { assume(ci1 eq ci3, "this is a clue") assume(ci1.eq(ci3), "this is a clue") } it("should throw TestCanceledException with correct message and stack depth when is used to check ci1 eq ci2") { val e1 = intercept[TestCanceledException] { assume(ci1 eq ci2, "this is a clue") } e1.message should be ( Some( s"""this is a clue | |assume(ci1 eq ci2, "this is a clue") | | | | | $ci1Str | $ci2Str | false |""".stripMargin ) ) e1.failedCodeFileName should be (Some(fileName)) e1.failedCodeLineNumber should be (Some(thisLineNumber - 14)) val e2 = intercept[TestCanceledException] { assume(ci1.eq(ci2), "this is a clue") } e2.message should be ( Some( s"""this is a clue | |assume(ci1.eq(ci2), "this is a clue") | | | | | $ci1Str | $ci2Str | false |""".stripMargin ) ) e2.failedCodeFileName should be (Some(fileName)) e2.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should do nothing when is used to check !ci1.eq(ci2)") { assume(!ci1.eq(ci2), "this is a clue") } it("should throw TestCanceledException with correct message and stack depth when is used to check !ci1.eq(ci3)") { val e = intercept[TestCanceledException] { assume(!ci1.eq(ci3), "this is a clue") } e.message should be ( Some( s"""this is a clue | |assume(!ci1.eq(ci3), "this is a clue") | || | | | |$ci1Str | $ci3Str | | true | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 15)) } it("should do nothing when is used to check ci1 ne ci2") { assume(ci1 ne ci2, "this is a clue") assume(ci1.ne(ci2), "this is a clue") } it("should throw TestCanceledException with correct message and stack depth when is used to check ci1 ne ci3") { val e1 = intercept[TestCanceledException] { assume(ci1 ne ci3, "this is a clue") } e1.message should be ( Some( s"""this is a clue | |assume(ci1 ne ci3, "this is a clue") | | | | | $ci1Str | $ci3Str | false |""".stripMargin ) ) e1.failedCodeFileName should be (Some(fileName)) e1.failedCodeLineNumber should be (Some(thisLineNumber - 14)) val e2 = intercept[TestCanceledException] { assume(ci1.ne(ci3), "this is a clue") } e2.message should be ( Some( s"""this is a clue | |assume(ci1.ne(ci3), "this is a clue") | | | | | $ci1Str | $ci3Str | false |""".stripMargin ) ) e2.failedCodeFileName should be (Some(fileName)) e2.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should do nothing when is used to check !ci1.ne(ci3)") { assume(!ci1.ne(ci3), "this is a clue") } it("should throw TestCanceledException with correct message and stack depth when is used to check !ci1.ne(ci2)") { val e = intercept[TestCanceledException] { assume(!ci1.ne(ci2), "this is a clue") } e.message should be ( Some( """this is a clue | |assume(!ci1.ne(ci2), "this is a clue") | || | | | |123 | 321 | | true | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 15)) } it("should do nothing when is used to check s4.isEmpty") { assume(s4.isEmpty, "this is a clue") } it("should throw TestCanceledException with correct message and stack depth when is used to check s3.isEmpty") { val e = intercept[TestCanceledException] { assume(s3.isEmpty, "this is a clue") } e.message should be ( Some( """this is a clue | |assume(s3.isEmpty, "this is a clue") | | | | | false | "Say hi to ScalaTest" |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should do nothing when is used to check !s3.isEmpty") { assume(!s3.isEmpty, "this is a clue") } it("should throw TestCanceledException with correct message and stack depth when is used to check !s4.isEmpty") { val e = intercept[TestCanceledException] { assume(!s4.isEmpty, "this is a clue") } e.message should be ( Some( """this is a clue | |assume(!s4.isEmpty, "this is a clue") | || | | |"" true | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should do nothing when is used to check l2.isEmpty") { assume(l2.isEmpty, "this is a clue") } it("should throw TestCanceledException with correct message and stack depth when is used to check l1.isEmpty") { val e = intercept[TestCanceledException] { assume(l1.isEmpty, "this is a clue") } e.message should be ( Some( s"""this is a clue | |assume(l1.isEmpty, "this is a clue") | | | | | false | $l1 |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should do nothing when is used to check !l1.isEmpty") { assume(!l1.isEmpty, "this is a clue") } it("should throw TestCanceledException with correct message and stack depth when is used to check !l2.isEmpty") { val e = intercept[TestCanceledException] { assume(!l2.isEmpty, "this is a clue") } e.message should be ( Some( s"""this is a clue | |assume(!l2.isEmpty, "this is a clue") | || | | || true | |$l2 | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 15)) } it("should do nothing when is used to check s1.isInstanceOf[String]") { assume(s1.isInstanceOf[String], "this is a clue") } it("should throw TestCanceledException with correct message and stack depth when is used to check l1.isInstanceOf[String]") { val e = intercept[TestCanceledException] { assume(l1.isInstanceOf[String], "this is a clue") } e.message should be ( Some( s"""this is a clue | |assume(l1.isInstanceOf[String], "this is a clue") | | | | | false | $l1 |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should do nothing when is used to check l1.isInstanceOf[List[Int]]") { assume(l1.isInstanceOf[List[Int]], "this is a clue") } it("should throw TestCanceledException with correct message and stack depth when is used to check s1.isInstanceOf[List[Int]]") { val e = intercept[TestCanceledException] { assume(s1.isInstanceOf[List[Int]], "this is a clue") } e.message should be ( Some( """this is a clue | |assume(s1.isInstanceOf[List[Int]], "this is a clue") | | | | | false | "hi ScalaTest" |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should do nothing when is used to check date.isInstanceOf[Date]") { assume(date.isInstanceOf[Date], "this is a clue") } it("should throw TestCanceledException with correct message and stack depth when is used to check l1.isInstanceOf[Date]") { val e = intercept[TestCanceledException] { assume(l1.isInstanceOf[Date], "this is a clue") } e.message should be ( Some( s"""this is a clue | |assume(l1.isInstanceOf[Date], "this is a clue") | | | | | false | $l1 |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should do nothing when is used to check !l1.isInstanceOf[String]") { assume(!l1.isInstanceOf[String], "this is a clue") } it("should throw TestCanceledException with correct message and stack depth when is used to check !s1.isInstanceOf[String]") { val e = intercept[TestCanceledException] { assume(!s1.isInstanceOf[String], "this is a clue") } e.message should be ( Some( """this is a clue | |assume(!s1.isInstanceOf[String], "this is a clue") | || | | || true | |"hi ScalaTest" | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 15)) } it("should do nothing when is used to check !s1.isInstanceOf[List[Int]]") { assume(!s1.isInstanceOf[List[Int]], "this is a clue") } it("should throw TestCanceledException with correct message and stack depth when is used to check !l1.isInstanceOf[List[Int]]") { val e = intercept[TestCanceledException] { assume(!l1.isInstanceOf[List[Int]], "this is a clue") } e.message should be ( Some( s"""this is a clue | |assume(!l1.isInstanceOf[List[Int]], "this is a clue") | || | | || true | |$l1 | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 15)) } it("should do nothing when is used to check !l1.isInstanceOf[Date]") { assume(!l1.isInstanceOf[Date], "this is a clue") } it("should throw TestCanceledException with correct message and stack depth when is used to check !date.isInstanceOf[Date]") { val e = intercept[TestCanceledException] { assume(!date.isInstanceOf[Date], "this is a clue") } e.message should be ( Some( s"""this is a clue | |assume(!date.isInstanceOf[Date], "this is a clue") | || | | || true | |$date | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 15)) } it("should do nothing when is used to check s1.length == 9") { assume(s1.length == 12, "this is a clue") } it("should throw TestCanceledException with correct message and stack depth when is used to check s1.length == 10") { val e = intercept[TestCanceledException] { assume(s1.length == 10, "this is a clue") } e.message should be ( Some( """this is a clue | |assume(s1.length == 10, "this is a clue") | | | | | | | 12 | 10 | | false | "hi ScalaTest" |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 15)) } it("should do nothing when is used to check l1.length == 3") { assume(l1.length == 3, "this is a clue") } it("should throw TestCanceledException with correct message and stack depth when is used to check l1.length == 10") { val e = intercept[TestCanceledException] { assume(l1.length == 10, "this is a clue") } e.message should be ( Some( s"""this is a clue | |assume(l1.length == 10, "this is a clue") | | | | | | | 3 | 10 | | false | $l1 |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 15)) } it("should do nothing when is used to check !(s1.length == 10)") { assume(!(s1.length == 10), "this is a clue") } it("should throw TestCanceledException with correct message and stack depth when is used to check !(s1.length == 9)") { val e = intercept[TestCanceledException] { assume(!(s1.length == 12), "this is a clue") } e.message should be ( Some( """this is a clue | |assume(!(s1.length == 12), "this is a clue") | | | | | | | | | 12 | 12 | | | true | | "hi ScalaTest" | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 16)) } it("should do nothing when is used to check !(l1.length == 2)") { assume(!(l1.length == 2), "this is a clue") } it("should throw TestCanceledException with correct message and stack depth when is used to check !(l1.length == 9)") { val e = intercept[TestCanceledException] { assume(!(l1.length == 3), "this is a clue") } e.message should be ( Some( s"""this is a clue | |assume(!(l1.length == 3), "this is a clue") | | | | | | | | | 3 | 3 | | | true | | $l1 | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 16)) } it("should do nothing when is used to check s1.size == 9") { assume(s1.size == 12, "this is a clue") } it("should throw TestCanceledException with correct message and stack depth when is used to check s1.size == 10") { val e = intercept[TestCanceledException] { assume(s1.size == 10, "this is a clue") } e.message should be ( Some( """this is a clue | |assume(s1.size == 10, "this is a clue") | | | | | | | 12 | 10 | | false | "hi ScalaTest" |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 15)) } it("should do nothing when is used to check l1.size == 3") { assume(l1.size == 3, "this is a clue") } it("should throw TestCanceledException with correct message and stack depth when is used to check l1.size == 10") { val e = intercept[TestCanceledException] { assume(l1.size == 10, "this is a clue") } e.message should be ( Some( s"""this is a clue | |assume(l1.size == 10, "this is a clue") | | | | | | | 3 | 10 | | false | $l1 |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 15)) } it("should do nothing when is used to check !(s1.size == 10)") { assume(!(s1.size == 10), "this is a clue") } it("should throw TestCanceledException with correct message and stack depth when is used to check !(s1.size == 9)") { val e = intercept[TestCanceledException] { assume(!(s1.size == 12), "this is a clue") } e.message should be ( Some( """this is a clue | |assume(!(s1.size == 12), "this is a clue") | | | | | | | | | 12 | 12 | | | true | | "hi ScalaTest" | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 16)) } it("should do nothing when is used to check !(l1.size == 2)") { assume(!(l1.size == 2), "this is a clue") } it("should throw TestCanceledException with correct message and stack depth when is used to check !(l1.size == 9) ") { val e = intercept[TestCanceledException] { assume(!(l1.size == 3), "this is a clue") } e.message should be ( Some( s"""this is a clue | |assume(!(l1.size == 3), "this is a clue") | | | | | | | | | 3 | 3 | | | true | | $l1 | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 16)) } it("should do nothing when is used to check l1.exists(_ == 3)") { assume(l1.exists(_ == 3), "this is a clue") } it("should do nothing when is used to check l1.exists(3 == _)") { assume(l1.exists(3 == _), "this is a clue") } it("should throw TestCanceledException with correct message and stack depth when is used to check l1.exists(_ == 5) ") { val e = intercept[TestCanceledException] { assume(l1.exists(_ == 5), "this is a clue") } e.message should be ( Some( s"""this is a clue | |assume(l1.exists(_ == 5), "this is a clue") | | | | | false | $l1 |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should throw TestCanceledException with correct message and stack depth when is used to check l1.exists(5 == _) ") { val e = intercept[TestCanceledException] { assume(l1.exists(5 == _), "this is a clue") } e.message should be ( Some( s"""this is a clue | |assume(l1.exists(5 == _), "this is a clue") | | | | | false | $l1 |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should do nothing when is used to check !l1.exists(_ == 5)") { assume(!l1.exists(_ == 5), "this is a clue") } it("should do nothing when is used to check !l1.exists(5 == _)") { assume(!l1.exists(5 == _), "this is a clue") } it("should throw TestCanceledException with correct message and stack depth when is used to check !l1.exists(_ == 3)") { val e = intercept[TestCanceledException] { assume(!l1.exists(_ == 3), "this is a clue") } e.message should be ( Some( s"""this is a clue | |assume(!l1.exists(_ == 3), "this is a clue") | || | | || true | |$l1 | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 15)) } it("should throw TestCanceledException with correct message and stack depth when is used to check !l1.exists(3 == _)") { val e = intercept[TestCanceledException] { assume(!l1.exists(3 == _), "this is a clue") } e.message should be ( Some( s"""this is a clue | |assume(!l1.exists(3 == _), "this is a clue") | || | | || true | |$l1 | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 15)) } it("should throw TestCanceledException with correct message and stack depth when is used to check l1.exists(_ > 3)") { val e = intercept[TestCanceledException] { assume(l1.exists(_ > 3), "this is a clue") } e.message should be ( Some( s"""this is a clue | |assume(l1.exists(_ > 3), "this is a clue") | | | | | false | $l1 |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should throw TestCanceledException with correct message and stack depth when is used to check l1.exists(3 < _)") { val e = intercept[TestCanceledException] { assume(l1.exists(3 < _), "this is a clue") } e.message should be ( Some( s"""this is a clue | |assume(l1.exists(3 < _), "this is a clue") | | | | | false | $l1 |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should throw TestCanceledException with correct message and stack depth when is used to check l3.exists(_.isEmpty)") { val e = intercept[TestCanceledException] { assume(l3.exists(_.isEmpty), "this is a clue") } e.message should be ( Some( s"""this is a clue | |assume(l3.exists(_.isEmpty), "this is a clue") | | | | | false | $l3Str |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should throw TestCanceledException with correct message and stack depth when is used to check l3.exists(false)") { val e = intercept[TestCanceledException] { assume(ci1.exists(321), "this is a clue") } e.message should be ( Some( """this is a clue | |assume(ci1.exists(321), "this is a clue") | | | | | 123 false 321 |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 13)) } it("should do nothing when used to check woof { meow(y = 5) } == \\"woof\\"") { assume(woof { meow(y = 5) } == "woof", "this is a clue") } it("should throw TestCanceledException with correct message and stack depth when is used to check woof { meow(y = 5) } == \\"meow\\"") { val e = intercept[TestCanceledException] { assume(woof { meow(y = 5) } == "meow", "this is a clue") } e.message should be ( Some( """this is a clue | |assume(woof { meow(y = 5) } == "meow", "this is a clue") | | | | | "woof" | "meow" | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should do nothing when used to check multiline assert((b == a + 2) && (b - 2 <= a)) ") { assume((b == a + 2) && (b - 2 <= a), "this is a clue") } // SKIP-DOTTY-START // problem with quotes it("should throw friend message when used to check multiline assert((b == a + 2) && (b - 1 <= a))") { val e = intercept[TestCanceledException] { assume((b == a + 2) && (b - 1 <= a), "this is a clue") } e.message shouldBe Some("5 equaled 5, but 4 was not less than or equal to 3 this is a clue") e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 5)) } // SKIP-DOTTY-END it("should do nothing when a block of code that evaluates to true is passed in") { assume({ val a = 1 val b = 2 val c = a + b a < b || c == a + b }, "this is a clue") } it("should throw TestCanceledException with correct message and stack depth when a block of code that evaluates to false is passed") { val e = intercept[TestCanceledException] { assume({ val a = 1; val b = 2; val c = a + b; a > b || c == b * b }, "this is a clue") } e.message should be ( Some( """this is a clue | |assume({ val a = 1; val b = 2; val c = a + b; a > b || c == b * b }, "this is a clue") | | | | | | | | | | | 1 | 2 | 3 | 2 4 2 | | | false | | false | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 16)) } // SKIP-DOTTY-START // different code show in Dotty it("should fallback to BooleanMacro when a block of code > 1 line is passed in ") { val e = intercept[TestCanceledException] { assume({ val a = 1 val b = 2 val c = a + b a > b || c == b * b }, "this is a clue") } e.message should be ( Some( """{ | val a: Int = 1; | val b: Int = 2; | val c: Int = a.+(b); | a.>(b).||(c.==(b.*(b))) |} was false this is a clue""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 17)) } // SKIP-DOTTY-END // SKIP-SCALATESTJS,NATIVE-START it("should do nothing when used to check <person>Dude</person> == <person>Dude</person>") { assume(<person>Dude</person> == <person>Dude</person>, "this is a clue") } it("should throw TestCanceledException with correct message and stack depth when is used to check <person>Dude</person> == <person>Mary</person>") { val e = intercept[TestCanceledException] { assume(<person>Dude</person> == <person>Mary</person>, "this is a clue") } e.message should be ( Some( """this is a clue | |assume(<person>Dude</person> == <person>Mary</person>, "this is a clue") | | | | | | | <person>Mary</person> | | false | <person>Dude</person> |""".stripMargin ) ) } // SKIP-SCALATESTJS,NATIVE-END it("should compile when used with org == xxx that shadow org.scalactic ") { assertCompiles( """ |val org = "test" |assume(org == "test", "this is a clue") """.stripMargin) } it("should compile when used with org === xxx that shadow org.scalactic") { assertCompiles( """ |val org = "test" |assume(org === "test", "this is a clue") """.stripMargin) } // SKIP-DOTTY-START it("should compile when used with org === xxx with TypeCheckedTripleEquals that shadow org.scalactic") { assertCompiles( """ |class TestSpec extends AnyFunSpec with org.scalactic.TypeCheckedTripleEquals { | it("testing here") { | val org = "test" | assume(org === "test", "this is a clue") | } |} """.stripMargin) } // SKIP-DOTTY-END it("should compile when used with org.aCustomMethod that shadow org.scalactic") { assertCompiles( """ |class Test { | def aCustomMethod: Boolean = true |} |val org = new Test |assume(org.aCustomMethod, "this is a clue") """.stripMargin) } it("should compile when used with !org that shadow org.scalactic") { assertCompiles( """ |val org = false |assume(!org, "this is a clue") """.stripMargin) } it("should compile when used with org.isEmpty that shadow org.scalactic") { assertCompiles( """ |val org = "" |assume(org.isEmpty, "this is a clue") """.stripMargin) } it("should compile when used with org.isInstanceOf that shadow org.scalactic") { assertCompiles( """ |val org = "" |assume(org.isInstanceOf[String], "this is a clue") """.stripMargin) } it("should compile when used with org.size == 0 that shadow org.scalactic") { assertCompiles( """ |val org = Array.empty[String] |assume(org.size == 0, "this is a clue") """.stripMargin) } it("should compile when used with org.length == 0 that shadow org.scalactic") { assertCompiles( """ |val org = "" |assume(org.length == 0, "this is a clue") """.stripMargin) } it("should compile when used with org.exists(_ == 'b') that shadow org.scalactic ") { assertCompiles( """ |val org = "abc" |assume(org.exists(_ == 'b'), "this is a clue") """.stripMargin) } it("should do nothing when is used to check new String(\\"test\\") != \\"test\\"") { assume(new String("test") == "test", "this is a clue") } it("should throw TestCanceledException with correct message and stack depth when is used to check new String(\\"test\\") != \\"testing\\"") { val e = intercept[TestCanceledException] { assume(new String("test") == "testing", "this is a clue") } e.message should be ( Some( """this is a clue | |assume(new String("test") == "testing", "this is a clue") | | | | | "test" | "testing" | false |""".stripMargin ) ) e.failedCodeFileName should be (Some(fileName)) e.failedCodeLineNumber should be (Some(thisLineNumber - 14)) } it("should compile when used with Java static method") { assertCompiles( """ |assume(System.currentTimeMillis() > 0, "this is a clue") """.stripMargin) assertCompiles( """ |assume(java.math.BigDecimal.ZERO == java.math.BigDecimal.ZERO, "this is a clue") """.stripMargin) } } } }
scalatest/scalatest
jvm/diagrams-test/src/test/scala/org/scalatest/diagrams/DiagramsSpec.scala
Scala
apache-2.0
330,277
package core.models.services import core.models.Config import javax.inject.Inject import play.api.Configuration import scala.concurrent.{ ExecutionContext, Future } /** * Default implementation of the [[ConfigService]] trait. * * @param configuration The Play configuration. * @param ec The execution context. */ class ConfigServiceImpl @Inject() ( val configuration: Configuration )( implicit ec: ExecutionContext ) extends ConfigService { /** * Gets configuration parameters. * * @return Configuration parameters. */ override def retrieve(): Future[Config] = { // Get config data (from DB, from Play config, ...) that should be available in the frontend part Future.successful(Config("replace this")) } }
setusoft/silhouette-play-react-seed
app-core/src/main/scala/core/models/services/ConfigServiceImpl.scala
Scala
mit
763
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::: /** @author John Miller * @version 1.2 * @date Wed Dec 23 17:00:46 EST 2009 * @see LICENSE (MIT style license file). */ package scalation.util import scala.io.StdIn.readLine //:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::: /** The `Replace` object is used to replace pattern pat1 with pat2. It reads from * standard input and writes to standard output. */ object Replace extends App { private val pat1 = " ::" // pattern to find (change as needed) private val pat2 = " *" // replacement pattern (change as needed) private var line = "" do { line = readLine () if (line != null) println (line.replace (pat1, pat2)) } while (line != null) } // Replace object
NBKlepp/fda
scalation_1.2/src/main/scala/scalation/util/Replace.scala
Scala
mit
841
package com.twitter.finagle.netty4.channel import com.twitter.finagle.netty4.ssl.Netty4SslHandler import com.twitter.finagle.param._ import com.twitter.finagle.transport.Transport import com.twitter.finagle.Stack import io.netty.channel._ import java.util.logging.Level private[netty4] object Netty4RawServerChannelInitializer { val ChannelLoggerHandlerKey = "channel logger" val ChannelStatsHandlerKey = "channel stats" } /** * Server channel initialization logic for the part of the netty pipeline that * deals with raw bytes. * * @param params [[Stack.Params]] to configure the `Channel`. */ private[netty4] class Netty4RawServerChannelInitializer( params: Stack.Params) extends ChannelInitializer[Channel] { import Netty4RawServerChannelInitializer._ private[this] val Logger(logger) = params[Logger] private[this] val Label(label) = params[Label] private[this] val Stats(stats) = params[Stats] private[this] val channelStatsHandler = if (!stats.isNull) Some(new ChannelStatsHandler(stats)) else None private[this] val channelSnooper = if (params[Transport.Verbose].enabled) Some(ChannelSnooper.byteSnooper(label)(logger.log(Level.INFO, _, _))) else None override def initChannel(ch: Channel): Unit = { // first => last // - a request flies from first to last // - a response flies from last to first // // ssl => channel stats => channel snooper => write timeout => read timeout => req stats => .. // .. => exceptions => finagle val pipeline = ch.pipeline channelSnooper.foreach(pipeline.addFirst(ChannelLoggerHandlerKey, _)) channelStatsHandler.foreach(pipeline.addFirst(ChannelStatsHandlerKey, _)) // Add SslHandler to the pipeline. pipeline.addFirst("tls init", new Netty4SslHandler(params)) // Copy direct byte buffers onto heap before doing anything else. pipeline.addFirst("direct to heap", DirectToHeapInboundHandler) } }
BuoyantIO/finagle
finagle-netty4/src/main/scala/com/twitter/finagle/netty4/channel/Netty4RawServerChannelInitializer.scala
Scala
apache-2.0
1,953
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.sql import java.io.File import java.math.MathContext import java.net.{MalformedURLException, URL} import java.sql.Timestamp import java.util.concurrent.atomic.AtomicBoolean import org.apache.spark.{AccumulatorSuite, SparkException} import org.apache.spark.scheduler.{SparkListener, SparkListenerJobStart} import org.apache.spark.sql.catalyst.util.StringUtils import org.apache.spark.sql.execution.aggregate import org.apache.spark.sql.execution.joins.{BroadcastHashJoinExec, CartesianProductExec, SortMergeJoinExec} import org.apache.spark.sql.functions._ import org.apache.spark.sql.internal.SQLConf import org.apache.spark.sql.test.{SharedSQLContext, TestSQLContext} import org.apache.spark.sql.test.SQLTestData._ import org.apache.spark.sql.types._ class SQLQuerySuite extends QueryTest with SharedSQLContext { import testImplicits._ setupTestData() test("SPARK-8010: promote numeric to string") { val df = Seq((1, 1)).toDF("key", "value") df.createOrReplaceTempView("src") val queryCaseWhen = sql("select case when true then 1.0 else '1' end from src ") val queryCoalesce = sql("select coalesce(null, 1, '1') from src ") checkAnswer(queryCaseWhen, Row("1.0") :: Nil) checkAnswer(queryCoalesce, Row("1") :: Nil) } test("show functions") { def getFunctions(pattern: String): Seq[Row] = { StringUtils.filterPattern( spark.sessionState.catalog.listFunctions("default").map(_._1.funcName), pattern) .map(Row(_)) } def createFunction(names: Seq[String]): Unit = { names.foreach { name => spark.udf.register(name, (arg1: Int, arg2: String) => arg2 + arg1) } } def dropFunction(names: Seq[String]): Unit = { names.foreach { name => spark.sessionState.catalog.dropTempFunction(name, false) } } val functions = Array("ilog", "logi", "logii", "logiii", "crc32i", "cubei", "cume_disti", "isize", "ispace", "to_datei", "date_addi", "current_datei") createFunction(functions) checkAnswer(sql("SHOW functions"), getFunctions("*")) assert(sql("SHOW functions").collect().size > 200) Seq("^c*", "*e$", "log*", "*date*").foreach { pattern => // For the pattern part, only '*' and '|' are allowed as wildcards. // For '*', we need to replace it to '.*'. checkAnswer(sql(s"SHOW FUNCTIONS '$pattern'"), getFunctions(pattern)) } dropFunction(functions) } test("describe functions") { checkKeywordsExist(sql("describe function extended upper"), "Function: upper", "Class: org.apache.spark.sql.catalyst.expressions.Upper", "Usage: upper(str) - Returns `str` with all characters changed to uppercase", "Extended Usage:", "Examples:", "> SELECT upper('SparkSql');", "SPARKSQL") checkKeywordsExist(sql("describe functioN Upper"), "Function: upper", "Class: org.apache.spark.sql.catalyst.expressions.Upper", "Usage: upper(str) - Returns `str` with all characters changed to uppercase") checkKeywordsNotExist(sql("describe functioN Upper"), "Extended Usage") checkKeywordsExist(sql("describe functioN abcadf"), "Function: abcadf not found.") } test("SPARK-14415: All functions should have own descriptions") { for (f <- spark.sessionState.functionRegistry.listFunction()) { if (!Seq("cube", "grouping", "grouping_id", "rollup", "window").contains(f.unquotedString)) { checkKeywordsNotExist(sql(s"describe function `$f`"), "N/A.") } } } test("SPARK-6743: no columns from cache") { Seq( (83, 0, 38), (26, 0, 79), (43, 81, 24) ).toDF("a", "b", "c").createOrReplaceTempView("cachedData") spark.catalog.cacheTable("cachedData") withSQLConf(SQLConf.CROSS_JOINS_ENABLED.key -> "true") { checkAnswer( sql("SELECT t1.b FROM cachedData, cachedData t1 GROUP BY t1.b"), Row(0) :: Row(81) :: Nil) } } test("self join with aliases") { Seq(1, 2, 3).map(i => (i, i.toString)).toDF("int", "str").createOrReplaceTempView("df") checkAnswer( sql( """ |SELECT x.str, COUNT(*) |FROM df x JOIN df y ON x.str = y.str |GROUP BY x.str """.stripMargin), Row("1", 1) :: Row("2", 1) :: Row("3", 1) :: Nil) } test("support table.star") { checkAnswer( sql( """ |SELECT r.* |FROM testData l join testData2 r on (l.key = r.a) """.stripMargin), Row(1, 1) :: Row(1, 2) :: Row(2, 1) :: Row(2, 2) :: Row(3, 1) :: Row(3, 2) :: Nil) } test("self join with alias in agg") { Seq(1, 2, 3) .map(i => (i, i.toString)) .toDF("int", "str") .groupBy("str") .agg($"str", count("str").as("strCount")) .createOrReplaceTempView("df") checkAnswer( sql( """ |SELECT x.str, SUM(x.strCount) |FROM df x JOIN df y ON x.str = y.str |GROUP BY x.str """.stripMargin), Row("1", 1) :: Row("2", 1) :: Row("3", 1) :: Nil) } test("SPARK-8668 expr function") { checkAnswer(Seq((1, "Bobby G.")) .toDF("id", "name") .select(expr("length(name)"), expr("abs(id)")), Row(8, 1)) checkAnswer(Seq((1, "building burrito tunnels"), (1, "major projects")) .toDF("id", "saying") .groupBy(expr("length(saying)")) .count(), Row(24, 1) :: Row(14, 1) :: Nil) } test("SPARK-4625 support SORT BY in SimpleSQLParser & DSL") { checkAnswer( sql("SELECT a FROM testData2 SORT BY a"), Seq(1, 1, 2, 2, 3, 3).map(Row(_)) ) } test("SPARK-7158 collect and take return different results") { import java.util.UUID val df = Seq(Tuple1(1), Tuple1(2), Tuple1(3)).toDF("index") // we except the id is materialized once val idUDF = org.apache.spark.sql.functions.udf(() => UUID.randomUUID().toString) val dfWithId = df.withColumn("id", idUDF()) // Make a new DataFrame (actually the same reference to the old one) val cached = dfWithId.cache() // Trigger the cache val d0 = dfWithId.collect() val d1 = cached.collect() val d2 = cached.collect() // Since the ID is only materialized once, then all of the records // should come from the cache, not by re-computing. Otherwise, the ID // will be different assert(d0.map(_(0)) === d2.map(_(0))) assert(d0.map(_(1)) === d2.map(_(1))) assert(d1.map(_(0)) === d2.map(_(0))) assert(d1.map(_(1)) === d2.map(_(1))) } test("grouping on nested fields") { spark.read .json(Seq("""{"nested": {"attribute": 1}, "value": 2}""").toDS()) .createOrReplaceTempView("rows") checkAnswer( sql( """ |select attribute, sum(cnt) |from ( | select nested.attribute, count(*) as cnt | from rows | group by nested.attribute) a |group by attribute """.stripMargin), Row(1, 1) :: Nil) } test("SPARK-6201 IN type conversion") { spark.read .json(Seq("{\\"a\\": \\"1\\"}}", "{\\"a\\": \\"2\\"}}", "{\\"a\\": \\"3\\"}}").toDS()) .createOrReplaceTempView("d") checkAnswer( sql("select * from d where d.a in (1,2)"), Seq(Row("1"), Row("2"))) } test("SPARK-11226 Skip empty line in json file") { spark.read .json(Seq("{\\"a\\": \\"1\\"}}", "{\\"a\\": \\"2\\"}}", "{\\"a\\": \\"3\\"}}", "").toDS()) .createOrReplaceTempView("d") checkAnswer( sql("select count(1) from d"), Seq(Row(3))) } test("SPARK-8828 sum should return null if all input values are null") { checkAnswer( sql("select sum(a), avg(a) from allNulls"), Seq(Row(null, null)) ) } private def testCodeGen(sqlText: String, expectedResults: Seq[Row]): Unit = { val df = sql(sqlText) // First, check if we have GeneratedAggregate. val hasGeneratedAgg = df.queryExecution.sparkPlan .collect { case _: aggregate.HashAggregateExec => true } .nonEmpty if (!hasGeneratedAgg) { fail( s""" |Codegen is enabled, but query $sqlText does not have HashAggregate in the plan. |${df.queryExecution.simpleString} """.stripMargin) } // Then, check results. checkAnswer(df, expectedResults) } test("aggregation with codegen") { // Prepare a table that we can group some rows. spark.table("testData") .union(spark.table("testData")) .union(spark.table("testData")) .createOrReplaceTempView("testData3x") try { // Just to group rows. testCodeGen( "SELECT key FROM testData3x GROUP BY key", (1 to 100).map(Row(_))) // COUNT testCodeGen( "SELECT key, count(value) FROM testData3x GROUP BY key", (1 to 100).map(i => Row(i, 3))) testCodeGen( "SELECT count(key) FROM testData3x", Row(300) :: Nil) // COUNT DISTINCT ON int testCodeGen( "SELECT value, count(distinct key) FROM testData3x GROUP BY value", (1 to 100).map(i => Row(i.toString, 1))) testCodeGen( "SELECT count(distinct key) FROM testData3x", Row(100) :: Nil) // SUM testCodeGen( "SELECT value, sum(key) FROM testData3x GROUP BY value", (1 to 100).map(i => Row(i.toString, 3 * i))) testCodeGen( "SELECT sum(key), SUM(CAST(key as Double)) FROM testData3x", Row(5050 * 3, 5050 * 3.0) :: Nil) // AVERAGE testCodeGen( "SELECT value, avg(key) FROM testData3x GROUP BY value", (1 to 100).map(i => Row(i.toString, i))) testCodeGen( "SELECT avg(key) FROM testData3x", Row(50.5) :: Nil) // MAX testCodeGen( "SELECT value, max(key) FROM testData3x GROUP BY value", (1 to 100).map(i => Row(i.toString, i))) testCodeGen( "SELECT max(key) FROM testData3x", Row(100) :: Nil) // MIN testCodeGen( "SELECT value, min(key) FROM testData3x GROUP BY value", (1 to 100).map(i => Row(i.toString, i))) testCodeGen( "SELECT min(key) FROM testData3x", Row(1) :: Nil) // Some combinations. testCodeGen( """ |SELECT | value, | sum(key), | max(key), | min(key), | avg(key), | count(key), | count(distinct key) |FROM testData3x |GROUP BY value """.stripMargin, (1 to 100).map(i => Row(i.toString, i*3, i, i, i, 3, 1))) testCodeGen( "SELECT max(key), min(key), avg(key), count(key), count(distinct key) FROM testData3x", Row(100, 1, 50.5, 300, 100) :: Nil) // Aggregate with Code generation handling all null values testCodeGen( "SELECT sum('a'), avg('a'), count(null) FROM testData", Row(null, null, 0) :: Nil) } finally { spark.catalog.dropTempView("testData3x") } } test("Add Parser of SQL COALESCE()") { checkAnswer( sql("""SELECT COALESCE(1, 2)"""), Row(1)) checkAnswer( sql("SELECT COALESCE(null, 1, 1.5)"), Row(BigDecimal(1))) checkAnswer( sql("SELECT COALESCE(null, null, null)"), Row(null)) } test("SPARK-3176 Added Parser of SQL LAST()") { checkAnswer( sql("SELECT LAST(n) FROM lowerCaseData"), Row(4)) } test("SPARK-2041 column name equals tablename") { checkAnswer( sql("SELECT tableName FROM tableName"), Row("test")) } test("SQRT") { checkAnswer( sql("SELECT SQRT(key) FROM testData"), (1 to 100).map(x => Row(math.sqrt(x.toDouble))).toSeq ) } test("SQRT with automatic string casts") { checkAnswer( sql("SELECT SQRT(CAST(key AS STRING)) FROM testData"), (1 to 100).map(x => Row(math.sqrt(x.toDouble))).toSeq ) } test("SPARK-2407 Added Parser of SQL SUBSTR()") { checkAnswer( sql("SELECT substr(tableName, 1, 2) FROM tableName"), Row("te")) checkAnswer( sql("SELECT substr(tableName, 3) FROM tableName"), Row("st")) checkAnswer( sql("SELECT substring(tableName, 1, 2) FROM tableName"), Row("te")) checkAnswer( sql("SELECT substring(tableName, 3) FROM tableName"), Row("st")) } test("SPARK-3173 Timestamp support in the parser") { (0 to 3).map(i => Tuple1(new Timestamp(i))).toDF("time").createOrReplaceTempView("timestamps") checkAnswer(sql( "SELECT time FROM timestamps WHERE time='1969-12-31 16:00:00.0'"), Row(java.sql.Timestamp.valueOf("1969-12-31 16:00:00"))) checkAnswer(sql( "SELECT time FROM timestamps WHERE time=CAST('1969-12-31 16:00:00.001' AS TIMESTAMP)"), Row(java.sql.Timestamp.valueOf("1969-12-31 16:00:00.001"))) checkAnswer(sql( "SELECT time FROM timestamps WHERE time='1969-12-31 16:00:00.001'"), Row(java.sql.Timestamp.valueOf("1969-12-31 16:00:00.001"))) checkAnswer(sql( "SELECT time FROM timestamps WHERE '1969-12-31 16:00:00.001'=time"), Row(java.sql.Timestamp.valueOf("1969-12-31 16:00:00.001"))) checkAnswer(sql( """SELECT time FROM timestamps WHERE time<'1969-12-31 16:00:00.003' AND time>'1969-12-31 16:00:00.001'"""), Row(java.sql.Timestamp.valueOf("1969-12-31 16:00:00.002"))) checkAnswer(sql( """ |SELECT time FROM timestamps |WHERE time IN ('1969-12-31 16:00:00.001','1969-12-31 16:00:00.002') """.stripMargin), Seq(Row(java.sql.Timestamp.valueOf("1969-12-31 16:00:00.001")), Row(java.sql.Timestamp.valueOf("1969-12-31 16:00:00.002")))) checkAnswer(sql( "SELECT time FROM timestamps WHERE time='123'"), Nil) } test("left semi greater than predicate") { withSQLConf(SQLConf.CROSS_JOINS_ENABLED.key -> "true") { checkAnswer( sql("SELECT * FROM testData2 x LEFT SEMI JOIN testData2 y ON x.a >= y.a + 2"), Seq(Row(3, 1), Row(3, 2)) ) } } test("left semi greater than predicate and equal operator") { checkAnswer( sql("SELECT * FROM testData2 x LEFT SEMI JOIN testData2 y ON x.b = y.b and x.a >= y.a + 2"), Seq(Row(3, 1), Row(3, 2)) ) checkAnswer( sql("SELECT * FROM testData2 x LEFT SEMI JOIN testData2 y ON x.b = y.a and x.a >= y.b + 1"), Seq(Row(2, 1), Row(2, 2), Row(3, 1), Row(3, 2)) ) } test("select *") { checkAnswer( sql("SELECT * FROM testData"), testData.collect().toSeq) } test("simple select") { checkAnswer( sql("SELECT value FROM testData WHERE key = 1"), Row("1")) } def sortTest(): Unit = { checkAnswer( sql("SELECT * FROM testData2 ORDER BY a ASC, b ASC"), Seq(Row(1, 1), Row(1, 2), Row(2, 1), Row(2, 2), Row(3, 1), Row(3, 2))) checkAnswer( sql("SELECT * FROM testData2 ORDER BY a ASC, b DESC"), Seq(Row(1, 2), Row(1, 1), Row(2, 2), Row(2, 1), Row(3, 2), Row(3, 1))) checkAnswer( sql("SELECT * FROM testData2 ORDER BY a DESC, b DESC"), Seq(Row(3, 2), Row(3, 1), Row(2, 2), Row(2, 1), Row(1, 2), Row(1, 1))) checkAnswer( sql("SELECT * FROM testData2 ORDER BY a DESC, b ASC"), Seq(Row(3, 1), Row(3, 2), Row(2, 1), Row(2, 2), Row(1, 1), Row(1, 2))) checkAnswer( sql("SELECT b FROM binaryData ORDER BY a ASC"), (1 to 5).map(Row(_))) checkAnswer( sql("SELECT b FROM binaryData ORDER BY a DESC"), (1 to 5).map(Row(_)).toSeq.reverse) checkAnswer( sql("SELECT * FROM arrayData ORDER BY data[0] ASC"), arrayData.collect().sortBy(_.data(0)).map(Row.fromTuple).toSeq) checkAnswer( sql("SELECT * FROM arrayData ORDER BY data[0] DESC"), arrayData.collect().sortBy(_.data(0)).reverse.map(Row.fromTuple).toSeq) checkAnswer( sql("SELECT * FROM mapData ORDER BY data[1] ASC"), mapData.collect().sortBy(_.data(1)).map(Row.fromTuple).toSeq) checkAnswer( sql("SELECT * FROM mapData ORDER BY data[1] DESC"), mapData.collect().sortBy(_.data(1)).reverse.map(Row.fromTuple).toSeq) } test("external sorting") { sortTest() } test("CTE feature") { checkAnswer( sql("with q1 as (select * from testData limit 10) select * from q1"), testData.take(10).toSeq) checkAnswer( sql(""" |with q1 as (select * from testData where key= '5'), |q2 as (select * from testData where key = '4') |select * from q1 union all select * from q2""".stripMargin), Row(5, "5") :: Row(4, "4") :: Nil) } test("Allow only a single WITH clause per query") { intercept[AnalysisException] { sql( "with q1 as (select * from testData) with q2 as (select * from q1) select * from q2") } } test("date row") { checkAnswer(sql( """select cast("2015-01-28" as date) from testData limit 1"""), Row(java.sql.Date.valueOf("2015-01-28")) ) } test("from follow multiple brackets") { checkAnswer(sql( """ |select key from ((select * from testData) | union all (select * from testData)) x limit 1 """.stripMargin), Row(1) ) checkAnswer(sql( "select key from (select * from testData) x limit 1"), Row(1) ) checkAnswer(sql( """ |select key from | (select * from testData union all select * from testData) x | limit 1 """.stripMargin), Row(1) ) } test("average") { checkAnswer( sql("SELECT AVG(a) FROM testData2"), Row(2.0)) } test("average overflow") { checkAnswer( sql("SELECT AVG(a),b FROM largeAndSmallInts group by b"), Seq(Row(2147483645.0, 1), Row(2.0, 2))) } test("count") { checkAnswer( sql("SELECT COUNT(*) FROM testData2"), Row(testData2.count())) } test("count distinct") { checkAnswer( sql("SELECT COUNT(DISTINCT b) FROM testData2"), Row(2)) } test("approximate count distinct") { checkAnswer( sql("SELECT APPROX_COUNT_DISTINCT(a) FROM testData2"), Row(3)) } test("approximate count distinct with user provided standard deviation") { checkAnswer( sql("SELECT APPROX_COUNT_DISTINCT(a, 0.04) FROM testData2"), Row(3)) } test("null count") { checkAnswer( sql("SELECT a, COUNT(b) FROM testData3 GROUP BY a"), Seq(Row(1, 0), Row(2, 1))) checkAnswer( sql( "SELECT COUNT(a), COUNT(b), COUNT(1), COUNT(DISTINCT a), COUNT(DISTINCT b) FROM testData3"), Row(2, 1, 2, 2, 1)) } test("count of empty table") { withTempView("t") { Seq.empty[(Int, Int)].toDF("a", "b").createOrReplaceTempView("t") checkAnswer( sql("select count(a) from t"), Row(0)) } } test("inner join where, one match per row") { withSQLConf(SQLConf.CASE_SENSITIVE.key -> "true") { checkAnswer( sql("SELECT * FROM uppercasedata JOIN lowercasedata WHERE n = N"), Seq( Row(1, "A", 1, "a"), Row(2, "B", 2, "b"), Row(3, "C", 3, "c"), Row(4, "D", 4, "d"))) } } test("inner join ON, one match per row") { withSQLConf(SQLConf.CASE_SENSITIVE.key -> "true") { checkAnswer( sql("SELECT * FROM uppercasedata JOIN lowercasedata ON n = N"), Seq( Row(1, "A", 1, "a"), Row(2, "B", 2, "b"), Row(3, "C", 3, "c"), Row(4, "D", 4, "d"))) } } test("inner join, where, multiple matches") { withSQLConf(SQLConf.CASE_SENSITIVE.key -> "true") { checkAnswer( sql( """ |SELECT * FROM | (SELECT * FROM testdata2 WHERE a = 1) x JOIN | (SELECT * FROM testdata2 WHERE a = 1) y |WHERE x.a = y.a""".stripMargin), Row(1, 1, 1, 1) :: Row(1, 1, 1, 2) :: Row(1, 2, 1, 1) :: Row(1, 2, 1, 2) :: Nil) } } test("inner join, no matches") { checkAnswer( sql( """ |SELECT * FROM | (SELECT * FROM testData2 WHERE a = 1) x JOIN | (SELECT * FROM testData2 WHERE a = 2) y |WHERE x.a = y.a""".stripMargin), Nil) } test("big inner join, 4 matches per row") { checkAnswer( sql( """ |SELECT * FROM | (SELECT * FROM testData UNION ALL | SELECT * FROM testData UNION ALL | SELECT * FROM testData UNION ALL | SELECT * FROM testData) x JOIN | (SELECT * FROM testData UNION ALL | SELECT * FROM testData UNION ALL | SELECT * FROM testData UNION ALL | SELECT * FROM testData) y |WHERE x.key = y.key""".stripMargin), testData.rdd.flatMap( row => Seq.fill(16)(Row.merge(row, row))).collect().toSeq) } test("cartesian product join") { withSQLConf(SQLConf.CROSS_JOINS_ENABLED.key -> "true") { checkAnswer( testData3.join(testData3), Row(1, null, 1, null) :: Row(1, null, 2, 2) :: Row(2, 2, 1, null) :: Row(2, 2, 2, 2) :: Nil) } } test("left outer join") { withSQLConf(SQLConf.CASE_SENSITIVE.key -> "true") { checkAnswer( sql("SELECT * FROM uppercasedata LEFT OUTER JOIN lowercasedata ON n = N"), Row(1, "A", 1, "a") :: Row(2, "B", 2, "b") :: Row(3, "C", 3, "c") :: Row(4, "D", 4, "d") :: Row(5, "E", null, null) :: Row(6, "F", null, null) :: Nil) } } test("right outer join") { withSQLConf(SQLConf.CASE_SENSITIVE.key -> "true") { checkAnswer( sql("SELECT * FROM lowercasedata RIGHT OUTER JOIN uppercasedata ON n = N"), Row(1, "a", 1, "A") :: Row(2, "b", 2, "B") :: Row(3, "c", 3, "C") :: Row(4, "d", 4, "D") :: Row(null, null, 5, "E") :: Row(null, null, 6, "F") :: Nil) } } test("full outer join") { checkAnswer( sql( """ |SELECT * FROM | (SELECT * FROM upperCaseData WHERE N <= 4) leftTable FULL OUTER JOIN | (SELECT * FROM upperCaseData WHERE N >= 3) rightTable | ON leftTable.N = rightTable.N """.stripMargin), Row(1, "A", null, null) :: Row(2, "B", null, null) :: Row(3, "C", 3, "C") :: Row (4, "D", 4, "D") :: Row(null, null, 5, "E") :: Row(null, null, 6, "F") :: Nil) } test("SPARK-11111 null-safe join should not use cartesian product") { val df = sql("select count(*) from testData a join testData b on (a.key <=> b.key)") val cp = df.queryExecution.sparkPlan.collect { case cp: CartesianProductExec => cp } assert(cp.isEmpty, "should not use CartesianProduct for null-safe join") val smj = df.queryExecution.sparkPlan.collect { case smj: SortMergeJoinExec => smj case j: BroadcastHashJoinExec => j } assert(smj.size > 0, "should use SortMergeJoin or BroadcastHashJoin") checkAnswer(df, Row(100) :: Nil) } test("SPARK-3349 partitioning after limit") { sql("SELECT DISTINCT n FROM lowerCaseData ORDER BY n DESC") .limit(2) .createOrReplaceTempView("subset1") sql("SELECT DISTINCT n FROM lowerCaseData ORDER BY n ASC") .limit(2) .createOrReplaceTempView("subset2") checkAnswer( sql("SELECT * FROM lowerCaseData INNER JOIN subset1 ON subset1.n = lowerCaseData.n"), Row(3, "c", 3) :: Row(4, "d", 4) :: Nil) checkAnswer( sql("SELECT * FROM lowerCaseData INNER JOIN subset2 ON subset2.n = lowerCaseData.n"), Row(1, "a", 1) :: Row(2, "b", 2) :: Nil) } test("mixed-case keywords") { checkAnswer( sql( """ |SeleCT * from | (select * from upperCaseData WherE N <= 4) leftTable fuLL OUtER joiN | (sElEcT * FROM upperCaseData whERe N >= 3) rightTable | oN leftTable.N = rightTable.N """.stripMargin), Row(1, "A", null, null) :: Row(2, "B", null, null) :: Row(3, "C", 3, "C") :: Row(4, "D", 4, "D") :: Row(null, null, 5, "E") :: Row(null, null, 6, "F") :: Nil) } test("select with table name as qualifier") { checkAnswer( sql("SELECT testData.value FROM testData WHERE testData.key = 1"), Row("1")) } test("inner join ON with table name as qualifier") { checkAnswer( sql("SELECT * FROM upperCaseData JOIN lowerCaseData ON lowerCaseData.n = upperCaseData.N"), Seq( Row(1, "A", 1, "a"), Row(2, "B", 2, "b"), Row(3, "C", 3, "c"), Row(4, "D", 4, "d"))) } test("qualified select with inner join ON with table name as qualifier") { checkAnswer( sql("SELECT upperCaseData.N, upperCaseData.L FROM upperCaseData JOIN lowerCaseData " + "ON lowerCaseData.n = upperCaseData.N"), Seq( Row(1, "A"), Row(2, "B"), Row(3, "C"), Row(4, "D"))) } test("system function upper()") { checkAnswer( sql("SELECT n,UPPER(l) FROM lowerCaseData"), Seq( Row(1, "A"), Row(2, "B"), Row(3, "C"), Row(4, "D"))) checkAnswer( sql("SELECT n, UPPER(s) FROM nullStrings"), Seq( Row(1, "ABC"), Row(2, "ABC"), Row(3, null))) } test("system function lower()") { checkAnswer( sql("SELECT N,LOWER(L) FROM upperCaseData"), Seq( Row(1, "a"), Row(2, "b"), Row(3, "c"), Row(4, "d"), Row(5, "e"), Row(6, "f"))) checkAnswer( sql("SELECT n, LOWER(s) FROM nullStrings"), Seq( Row(1, "abc"), Row(2, "abc"), Row(3, null))) } test("UNION") { checkAnswer( sql("SELECT * FROM lowerCaseData UNION SELECT * FROM upperCaseData"), Row(1, "A") :: Row(1, "a") :: Row(2, "B") :: Row(2, "b") :: Row(3, "C") :: Row(3, "c") :: Row(4, "D") :: Row(4, "d") :: Row(5, "E") :: Row(6, "F") :: Nil) checkAnswer( sql("SELECT * FROM lowerCaseData UNION SELECT * FROM lowerCaseData"), Row(1, "a") :: Row(2, "b") :: Row(3, "c") :: Row(4, "d") :: Nil) checkAnswer( sql("SELECT * FROM lowerCaseData UNION ALL SELECT * FROM lowerCaseData"), Row(1, "a") :: Row(1, "a") :: Row(2, "b") :: Row(2, "b") :: Row(3, "c") :: Row(3, "c") :: Row(4, "d") :: Row(4, "d") :: Nil) } test("UNION with column mismatches") { // Column name mismatches are allowed. checkAnswer( sql("SELECT n,l FROM lowerCaseData UNION SELECT N as x1, L as x2 FROM upperCaseData"), Row(1, "A") :: Row(1, "a") :: Row(2, "B") :: Row(2, "b") :: Row(3, "C") :: Row(3, "c") :: Row(4, "D") :: Row(4, "d") :: Row(5, "E") :: Row(6, "F") :: Nil) // Column type mismatches are not allowed, forcing a type coercion. checkAnswer( sql("SELECT n FROM lowerCaseData UNION SELECT L FROM upperCaseData"), ("1" :: "2" :: "3" :: "4" :: "A" :: "B" :: "C" :: "D" :: "E" :: "F" :: Nil).map(Row(_))) // Column type mismatches where a coercion is not possible, in this case between integer // and array types, trigger a TreeNodeException. intercept[AnalysisException] { sql("SELECT data FROM arrayData UNION SELECT 1 FROM arrayData").collect() } } test("EXCEPT") { checkAnswer( sql("SELECT * FROM lowerCaseData EXCEPT SELECT * FROM upperCaseData"), Row(1, "a") :: Row(2, "b") :: Row(3, "c") :: Row(4, "d") :: Nil) checkAnswer( sql("SELECT * FROM lowerCaseData EXCEPT SELECT * FROM lowerCaseData"), Nil) checkAnswer( sql("SELECT * FROM upperCaseData EXCEPT SELECT * FROM upperCaseData"), Nil) } test("MINUS") { checkAnswer( sql("SELECT * FROM lowerCaseData MINUS SELECT * FROM upperCaseData"), Row(1, "a") :: Row(2, "b") :: Row(3, "c") :: Row(4, "d") :: Nil) checkAnswer( sql("SELECT * FROM lowerCaseData MINUS SELECT * FROM lowerCaseData"), Nil) checkAnswer( sql("SELECT * FROM upperCaseData MINUS SELECT * FROM upperCaseData"), Nil) } test("INTERSECT") { checkAnswer( sql("SELECT * FROM lowerCaseData INTERSECT SELECT * FROM lowerCaseData"), Row(1, "a") :: Row(2, "b") :: Row(3, "c") :: Row(4, "d") :: Nil) checkAnswer( sql("SELECT * FROM lowerCaseData INTERSECT SELECT * FROM upperCaseData"), Nil) } test("SET commands semantics using sql()") { spark.sessionState.conf.clear() val testKey = "test.key.0" val testVal = "test.val.0" val nonexistentKey = "nonexistent" // "set" itself returns all config variables currently specified in SQLConf. assert(sql("SET").collect().size === TestSQLContext.overrideConfs.size) sql("SET").collect().foreach { row => val key = row.getString(0) val value = row.getString(1) assert( TestSQLContext.overrideConfs.contains(key), s"$key should exist in SQLConf.") assert( TestSQLContext.overrideConfs(key) === value, s"The value of $key should be ${TestSQLContext.overrideConfs(key)} instead of $value.") } val overrideConfs = sql("SET").collect() // "set key=val" sql(s"SET $testKey=$testVal") checkAnswer( sql("SET"), overrideConfs ++ Seq(Row(testKey, testVal)) ) sql(s"SET ${testKey + testKey}=${testVal + testVal}") checkAnswer( sql("set"), overrideConfs ++ Seq(Row(testKey, testVal), Row(testKey + testKey, testVal + testVal)) ) // "set key" checkAnswer( sql(s"SET $testKey"), Row(testKey, testVal) ) checkAnswer( sql(s"SET $nonexistentKey"), Row(nonexistentKey, "<undefined>") ) spark.sessionState.conf.clear() } test("SPARK-19218 SET command should show a result in a sorted order") { val overrideConfs = sql("SET").collect() sql(s"SET test.key3=1") sql(s"SET test.key2=2") sql(s"SET test.key1=3") val result = sql("SET").collect() assert(result === (overrideConfs ++ Seq( Row("test.key1", "3"), Row("test.key2", "2"), Row("test.key3", "1"))).sortBy(_.getString(0)) ) spark.sessionState.conf.clear() } test("SPARK-19218 `SET -v` should not fail with null value configuration") { import SQLConf._ val confEntry = buildConf("spark.test").doc("doc").stringConf.createWithDefault(null) try { val result = sql("SET -v").collect() assert(result === result.sortBy(_.getString(0))) } finally { SQLConf.unregister(confEntry) } } test("SET commands with illegal or inappropriate argument") { spark.sessionState.conf.clear() // Set negative mapred.reduce.tasks for automatically determining // the number of reducers is not supported intercept[IllegalArgumentException](sql(s"SET mapred.reduce.tasks=-1")) intercept[IllegalArgumentException](sql(s"SET mapred.reduce.tasks=-01")) intercept[IllegalArgumentException](sql(s"SET mapred.reduce.tasks=-2")) spark.sessionState.conf.clear() } test("SET mapreduce.job.reduces automatically converted to spark.sql.shuffle.partitions") { spark.sessionState.conf.clear() val before = spark.conf.get(SQLConf.SHUFFLE_PARTITIONS.key).toInt val newConf = before + 1 sql(s"SET mapreduce.job.reduces=${newConf.toString}") val after = spark.conf.get(SQLConf.SHUFFLE_PARTITIONS.key).toInt assert(before != after) assert(newConf === after) intercept[IllegalArgumentException](sql(s"SET mapreduce.job.reduces=-1")) spark.sessionState.conf.clear() } test("apply schema") { val schema1 = StructType( StructField("f1", IntegerType, false) :: StructField("f2", StringType, false) :: StructField("f3", BooleanType, false) :: StructField("f4", IntegerType, true) :: Nil) val rowRDD1 = unparsedStrings.map { r => val values = r.split(",").map(_.trim) val v4 = try values(3).toInt catch { case _: NumberFormatException => null } Row(values(0).toInt, values(1), values(2).toBoolean, v4) } val df1 = spark.createDataFrame(rowRDD1, schema1) df1.createOrReplaceTempView("applySchema1") checkAnswer( sql("SELECT * FROM applySchema1"), Row(1, "A1", true, null) :: Row(2, "B2", false, null) :: Row(3, "C3", true, null) :: Row(4, "D4", true, 2147483644) :: Nil) checkAnswer( sql("SELECT f1, f4 FROM applySchema1"), Row(1, null) :: Row(2, null) :: Row(3, null) :: Row(4, 2147483644) :: Nil) val schema2 = StructType( StructField("f1", StructType( StructField("f11", IntegerType, false) :: StructField("f12", BooleanType, false) :: Nil), false) :: StructField("f2", MapType(StringType, IntegerType, true), false) :: Nil) val rowRDD2 = unparsedStrings.map { r => val values = r.split(",").map(_.trim) val v4 = try values(3).toInt catch { case _: NumberFormatException => null } Row(Row(values(0).toInt, values(2).toBoolean), Map(values(1) -> v4)) } val df2 = spark.createDataFrame(rowRDD2, schema2) df2.createOrReplaceTempView("applySchema2") checkAnswer( sql("SELECT * FROM applySchema2"), Row(Row(1, true), Map("A1" -> null)) :: Row(Row(2, false), Map("B2" -> null)) :: Row(Row(3, true), Map("C3" -> null)) :: Row(Row(4, true), Map("D4" -> 2147483644)) :: Nil) checkAnswer( sql("SELECT f1.f11, f2['D4'] FROM applySchema2"), Row(1, null) :: Row(2, null) :: Row(3, null) :: Row(4, 2147483644) :: Nil) // The value of a MapType column can be a mutable map. val rowRDD3 = unparsedStrings.map { r => val values = r.split(",").map(_.trim) val v4 = try values(3).toInt catch { case _: NumberFormatException => null } Row(Row(values(0).toInt, values(2).toBoolean), scala.collection.mutable.Map(values(1) -> v4)) } val df3 = spark.createDataFrame(rowRDD3, schema2) df3.createOrReplaceTempView("applySchema3") checkAnswer( sql("SELECT f1.f11, f2['D4'] FROM applySchema3"), Row(1, null) :: Row(2, null) :: Row(3, null) :: Row(4, 2147483644) :: Nil) } test("SPARK-3423 BETWEEN") { checkAnswer( sql("SELECT key, value FROM testData WHERE key BETWEEN 5 and 7"), Seq(Row(5, "5"), Row(6, "6"), Row(7, "7")) ) checkAnswer( sql("SELECT key, value FROM testData WHERE key BETWEEN 7 and 7"), Row(7, "7") ) checkAnswer( sql("SELECT key, value FROM testData WHERE key BETWEEN 9 and 7"), Nil ) } test("SPARK-17863: SELECT distinct does not work correctly if order by missing attribute") { checkAnswer( sql("""select distinct struct.a, struct.b |from ( | select named_struct('a', 1, 'b', 2, 'c', 3) as struct | union all | select named_struct('a', 1, 'b', 2, 'c', 4) as struct) tmp |order by a, b |""".stripMargin), Row(1, 2) :: Nil) val error = intercept[AnalysisException] { sql("""select distinct struct.a, struct.b |from ( | select named_struct('a', 1, 'b', 2, 'c', 3) as struct | union all | select named_struct('a', 1, 'b', 2, 'c', 4) as struct) tmp |order by struct.a, struct.b |""".stripMargin) } assert(error.message contains "cannot resolve '`struct.a`' given input columns: [a, b]") } test("cast boolean to string") { // TODO Ensure true/false string letter casing is consistent with Hive in all cases. checkAnswer( sql("SELECT CAST(TRUE AS STRING), CAST(FALSE AS STRING) FROM testData LIMIT 1"), Row("true", "false")) } test("metadata is propagated correctly") { val person: DataFrame = sql("SELECT * FROM person") val schema = person.schema val docKey = "doc" val docValue = "first name" val metadata = new MetadataBuilder() .putString(docKey, docValue) .build() val schemaWithMeta = new StructType(Array( schema("id"), schema("name").copy(metadata = metadata), schema("age"))) val personWithMeta = spark.createDataFrame(person.rdd, schemaWithMeta) def validateMetadata(rdd: DataFrame): Unit = { assert(rdd.schema("name").metadata.getString(docKey) == docValue) } personWithMeta.createOrReplaceTempView("personWithMeta") validateMetadata(personWithMeta.select($"name")) validateMetadata(personWithMeta.select($"name")) validateMetadata(personWithMeta.select($"id", $"name")) validateMetadata(sql("SELECT * FROM personWithMeta")) validateMetadata(sql("SELECT id, name FROM personWithMeta")) validateMetadata(sql("SELECT * FROM personWithMeta JOIN salary ON id = personId")) validateMetadata(sql( "SELECT name, salary FROM personWithMeta JOIN salary ON id = personId")) } test("SPARK-3371 Renaming a function expression with group by gives error") { spark.udf.register("len", (s: String) => s.length) checkAnswer( sql("SELECT len(value) as temp FROM testData WHERE key = 1 group by len(value)"), Row(1)) } test("SPARK-3813 CASE a WHEN b THEN c [WHEN d THEN e]* [ELSE f] END") { checkAnswer( sql("SELECT CASE key WHEN 1 THEN 1 ELSE 0 END FROM testData WHERE key = 1 group by key"), Row(1)) } test("SPARK-3813 CASE WHEN a THEN b [WHEN c THEN d]* [ELSE e] END") { checkAnswer( sql("SELECT CASE WHEN key = 1 THEN 1 ELSE 2 END FROM testData WHERE key = 1 group by key"), Row(1)) } testQuietly( "SPARK-16748: SparkExceptions during planning should not wrapped in TreeNodeException") { intercept[SparkException] { val df = spark.range(0, 5).map(x => (1 / x).toString).toDF("a").orderBy("a") df.queryExecution.toRdd // force physical planning, but not execution of the plan } } test("Multiple join") { checkAnswer( sql( """SELECT a.key, b.key, c.key |FROM testData a |JOIN testData b ON a.key = b.key |JOIN testData c ON a.key = c.key """.stripMargin), (1 to 100).map(i => Row(i, i, i))) } test("SPARK-3483 Special chars in column names") { val data = Seq("""{"key?number1": "value1", "key.number2": "value2"}""").toDS() spark.read.json(data).createOrReplaceTempView("records") withSQLConf(SQLConf.SUPPORT_QUOTED_REGEX_COLUMN_NAME.key -> "false") { sql("SELECT `key?number1`, `key.number2` FROM records") } } test("SPARK-3814 Support Bitwise & operator") { checkAnswer(sql("SELECT key&1 FROM testData WHERE key = 1 "), Row(1)) } test("SPARK-3814 Support Bitwise | operator") { checkAnswer(sql("SELECT key|0 FROM testData WHERE key = 1 "), Row(1)) } test("SPARK-3814 Support Bitwise ^ operator") { checkAnswer(sql("SELECT key^0 FROM testData WHERE key = 1 "), Row(1)) } test("SPARK-3814 Support Bitwise ~ operator") { checkAnswer(sql("SELECT ~key FROM testData WHERE key = 1 "), Row(-2)) } test("SPARK-4120 Join of multiple tables does not work in SparkSQL") { checkAnswer( sql( """SELECT a.key, b.key, c.key |FROM testData a,testData b,testData c |where a.key = b.key and a.key = c.key """.stripMargin), (1 to 100).map(i => Row(i, i, i))) } test("SPARK-4154 Query does not work if it has 'not between' in Spark SQL and HQL") { checkAnswer(sql("SELECT key FROM testData WHERE key not between 0 and 10 order by key"), (11 to 100).map(i => Row(i))) } test("SPARK-4207 Query which has syntax like 'not like' is not working in Spark SQL") { checkAnswer(sql("SELECT key FROM testData WHERE value not like '100%' order by key"), (1 to 99).map(i => Row(i))) } test("SPARK-4322 Grouping field with struct field as sub expression") { spark.read.json(Seq("""{"a": {"b": [{"c": 1}]}}""").toDS()) .createOrReplaceTempView("data") checkAnswer(sql("SELECT a.b[0].c FROM data GROUP BY a.b[0].c"), Row(1)) spark.catalog.dropTempView("data") spark.read.json(Seq("""{"a": {"b": 1}}""").toDS()) .createOrReplaceTempView("data") checkAnswer(sql("SELECT a.b + 1 FROM data GROUP BY a.b + 1"), Row(2)) spark.catalog.dropTempView("data") } test("SPARK-4432 Fix attribute reference resolution error when using ORDER BY") { checkAnswer( sql("SELECT a + b FROM testData2 ORDER BY a"), Seq(2, 3, 3, 4, 4, 5).map(Row(_)) ) } test("oder by asc by default when not specify ascending and descending") { checkAnswer( sql("SELECT a, b FROM testData2 ORDER BY a desc, b"), Seq(Row(3, 1), Row(3, 2), Row(2, 1), Row(2, 2), Row(1, 1), Row(1, 2)) ) } test("Supporting relational operator '<=>' in Spark SQL") { val nullCheckData1 = TestData(1, "1") :: TestData(2, null) :: Nil val rdd1 = sparkContext.parallelize((0 to 1).map(i => nullCheckData1(i))) rdd1.toDF().createOrReplaceTempView("nulldata1") val nullCheckData2 = TestData(1, "1") :: TestData(2, null) :: Nil val rdd2 = sparkContext.parallelize((0 to 1).map(i => nullCheckData2(i))) rdd2.toDF().createOrReplaceTempView("nulldata2") checkAnswer(sql("SELECT nulldata1.key FROM nulldata1 join " + "nulldata2 on nulldata1.value <=> nulldata2.value"), (1 to 2).map(i => Row(i))) } test("Multi-column COUNT(DISTINCT ...)") { val data = TestData(1, "val_1") :: TestData(2, "val_2") :: Nil val rdd = sparkContext.parallelize((0 to 1).map(i => data(i))) rdd.toDF().createOrReplaceTempView("distinctData") checkAnswer(sql("SELECT COUNT(DISTINCT key,value) FROM distinctData"), Row(2)) } test("SPARK-4699 case sensitivity SQL query") { withSQLConf(SQLConf.CASE_SENSITIVE.key -> "false") { val data = TestData(1, "val_1") :: TestData(2, "val_2") :: Nil val rdd = sparkContext.parallelize((0 to 1).map(i => data(i))) rdd.toDF().createOrReplaceTempView("testTable1") checkAnswer(sql("SELECT VALUE FROM TESTTABLE1 where KEY = 1"), Row("val_1")) } } test("SPARK-6145: ORDER BY test for nested fields") { spark.read .json(Seq("""{"a": {"b": 1, "a": {"a": 1}}, "c": [{"d": 1}]}""").toDS()) .createOrReplaceTempView("nestedOrder") checkAnswer(sql("SELECT 1 FROM nestedOrder ORDER BY a.b"), Row(1)) checkAnswer(sql("SELECT a.b FROM nestedOrder ORDER BY a.b"), Row(1)) checkAnswer(sql("SELECT 1 FROM nestedOrder ORDER BY a.a.a"), Row(1)) checkAnswer(sql("SELECT a.a.a FROM nestedOrder ORDER BY a.a.a"), Row(1)) checkAnswer(sql("SELECT 1 FROM nestedOrder ORDER BY c[0].d"), Row(1)) checkAnswer(sql("SELECT c[0].d FROM nestedOrder ORDER BY c[0].d"), Row(1)) } test("SPARK-6145: special cases") { spark.read .json(Seq("""{"a": {"b": [1]}, "b": [{"a": 1}], "_c0": {"a": 1}}""").toDS()) .createOrReplaceTempView("t") checkAnswer(sql("SELECT a.b[0] FROM t ORDER BY _c0.a"), Row(1)) checkAnswer(sql("SELECT b[0].a FROM t ORDER BY _c0.a"), Row(1)) } test("SPARK-6898: complete support for special chars in column names") { spark.read .json(Seq("""{"a": {"c.b": 1}, "b.$q": [{"a@!.q": 1}], "q.w": {"w.i&": [1]}}""").toDS()) .createOrReplaceTempView("t") withSQLConf(SQLConf.SUPPORT_QUOTED_REGEX_COLUMN_NAME.key -> "false") { checkAnswer(sql("SELECT a.`c.b`, `b.$q`[0].`a@!.q`, `q.w`.`w.i&`[0] FROM t"), Row(1, 1, 1)) } } test("SPARK-6583 order by aggregated function") { Seq("1" -> 3, "1" -> 4, "2" -> 7, "2" -> 8, "3" -> 5, "3" -> 6, "4" -> 1, "4" -> 2) .toDF("a", "b").createOrReplaceTempView("orderByData") checkAnswer( sql( """ |SELECT a |FROM orderByData |GROUP BY a |ORDER BY sum(b) """.stripMargin), Row("4") :: Row("1") :: Row("3") :: Row("2") :: Nil) checkAnswer( sql( """ |SELECT sum(b) |FROM orderByData |GROUP BY a |ORDER BY sum(b) """.stripMargin), Row(3) :: Row(7) :: Row(11) :: Row(15) :: Nil) checkAnswer( sql( """ |SELECT sum(b) |FROM orderByData |GROUP BY a |ORDER BY sum(b), max(b) """.stripMargin), Row(3) :: Row(7) :: Row(11) :: Row(15) :: Nil) checkAnswer( sql( """ |SELECT a, sum(b) |FROM orderByData |GROUP BY a |ORDER BY sum(b) """.stripMargin), Row("4", 3) :: Row("1", 7) :: Row("3", 11) :: Row("2", 15) :: Nil) checkAnswer( sql( """ |SELECT a, sum(b) |FROM orderByData |GROUP BY a |ORDER BY sum(b) + 1 """.stripMargin), Row("4", 3) :: Row("1", 7) :: Row("3", 11) :: Row("2", 15) :: Nil) checkAnswer( sql( """ |SELECT count(*) |FROM orderByData |GROUP BY a |ORDER BY count(*) """.stripMargin), Row(2) :: Row(2) :: Row(2) :: Row(2) :: Nil) checkAnswer( sql( """ |SELECT a |FROM orderByData |GROUP BY a |ORDER BY a, count(*), sum(b) """.stripMargin), Row("1") :: Row("2") :: Row("3") :: Row("4") :: Nil) } test("SPARK-7952: fix the equality check between boolean and numeric types") { withTempView("t") { // numeric field i, boolean field j, result of i = j, result of i <=> j Seq[(Integer, java.lang.Boolean, java.lang.Boolean, java.lang.Boolean)]( (1, true, true, true), (0, false, true, true), (2, true, false, false), (2, false, false, false), (null, true, null, false), (null, false, null, false), (0, null, null, false), (1, null, null, false), (null, null, null, true) ).toDF("i", "b", "r1", "r2").createOrReplaceTempView("t") checkAnswer(sql("select i = b from t"), sql("select r1 from t")) checkAnswer(sql("select i <=> b from t"), sql("select r2 from t")) } } test("SPARK-7067: order by queries for complex ExtractValue chain") { withTempView("t") { spark.read .json(Seq("""{"a": {"b": [{"c": 1}]}, "b": [{"d": 1}]}""").toDS()) .createOrReplaceTempView("t") checkAnswer(sql("SELECT a.b FROM t ORDER BY b[0].d"), Row(Seq(Row(1)))) } } test("SPARK-8782: ORDER BY NULL") { withTempView("t") { Seq((1, 2), (1, 2)).toDF("a", "b").createOrReplaceTempView("t") checkAnswer(sql("SELECT * FROM t ORDER BY NULL"), Seq(Row(1, 2), Row(1, 2))) } } test("SPARK-8837: use keyword in column name") { withTempView("t") { val df = Seq(1 -> "a").toDF("count", "sort") checkAnswer(df.filter("count > 0"), Row(1, "a")) df.createOrReplaceTempView("t") checkAnswer(sql("select count, sort from t"), Row(1, "a")) } } test("SPARK-8753: add interval type") { import org.apache.spark.unsafe.types.CalendarInterval val df = sql("select interval 3 years -3 month 7 week 123 microseconds") checkAnswer(df, Row(new CalendarInterval(12 * 3 - 3, 7L * 1000 * 1000 * 3600 * 24 * 7 + 123 ))) withTempPath(f => { // Currently we don't yet support saving out values of interval data type. val e = intercept[AnalysisException] { df.write.json(f.getCanonicalPath) } e.message.contains("Cannot save interval data type into external storage") }) val e1 = intercept[AnalysisException] { sql("select interval") } assert(e1.message.contains("at least one time unit should be given for interval literal")) // Currently we don't yet support nanosecond val e2 = intercept[AnalysisException] { sql("select interval 23 nanosecond") } assert(e2.message.contains("No interval can be constructed")) } test("SPARK-8945: add and subtract expressions for interval type") { import org.apache.spark.unsafe.types.CalendarInterval import org.apache.spark.unsafe.types.CalendarInterval.MICROS_PER_WEEK val df = sql("select interval 3 years -3 month 7 week 123 microseconds as i") checkAnswer(df, Row(new CalendarInterval(12 * 3 - 3, 7L * MICROS_PER_WEEK + 123))) checkAnswer(df.select(df("i") + new CalendarInterval(2, 123)), Row(new CalendarInterval(12 * 3 - 3 + 2, 7L * MICROS_PER_WEEK + 123 + 123))) checkAnswer(df.select(df("i") - new CalendarInterval(2, 123)), Row(new CalendarInterval(12 * 3 - 3 - 2, 7L * MICROS_PER_WEEK + 123 - 123))) // unary minus checkAnswer(df.select(-df("i")), Row(new CalendarInterval(-(12 * 3 - 3), -(7L * MICROS_PER_WEEK + 123)))) } test("aggregation with codegen updates peak execution memory") { AccumulatorSuite.verifyPeakExecutionMemorySet(sparkContext, "aggregation with codegen") { testCodeGen( "SELECT key, count(value) FROM testData GROUP BY key", (1 to 100).map(i => Row(i, 1))) } } test("decimal precision with multiply/division") { checkAnswer(sql("select 10.3 * 3.0"), Row(BigDecimal("30.90"))) checkAnswer(sql("select 10.3000 * 3.0"), Row(BigDecimal("30.90000"))) checkAnswer(sql("select 10.30000 * 30.0"), Row(BigDecimal("309.000000"))) checkAnswer(sql("select 10.300000000000000000 * 3.000000000000000000"), Row(BigDecimal("30.900000000000000000000000000000000000", new MathContext(38)))) checkAnswer(sql("select 10.300000000000000000 * 3.0000000000000000000"), Row(null)) checkAnswer(sql("select 10.3 / 3.0"), Row(BigDecimal("3.433333"))) checkAnswer(sql("select 10.3000 / 3.0"), Row(BigDecimal("3.4333333"))) checkAnswer(sql("select 10.30000 / 30.0"), Row(BigDecimal("0.343333333"))) checkAnswer(sql("select 10.300000000000000000 / 3.00000000000000000"), Row(BigDecimal("3.433333333333333333333333333", new MathContext(38)))) checkAnswer(sql("select 10.3000000000000000000 / 3.00000000000000000"), Row(BigDecimal("3.4333333333333333333333333333", new MathContext(38)))) } test("SPARK-10215 Div of Decimal returns null") { val d = Decimal(1.12321).toBigDecimal val df = Seq((d, 1)).toDF("a", "b") checkAnswer( df.selectExpr("b * a / b"), Seq(Row(d))) checkAnswer( df.selectExpr("b * a / b / b"), Seq(Row(d))) checkAnswer( df.selectExpr("b * a + b"), Seq(Row(BigDecimal(2.12321)))) checkAnswer( df.selectExpr("b * a - b"), Seq(Row(BigDecimal(0.12321)))) checkAnswer( df.selectExpr("b * a * b"), Seq(Row(d))) } test("precision smaller than scale") { checkAnswer(sql("select 10.00"), Row(BigDecimal("10.00"))) checkAnswer(sql("select 1.00"), Row(BigDecimal("1.00"))) checkAnswer(sql("select 0.10"), Row(BigDecimal("0.10"))) checkAnswer(sql("select 0.01"), Row(BigDecimal("0.01"))) checkAnswer(sql("select 0.001"), Row(BigDecimal("0.001"))) checkAnswer(sql("select -0.01"), Row(BigDecimal("-0.01"))) checkAnswer(sql("select -0.001"), Row(BigDecimal("-0.001"))) } test("external sorting updates peak execution memory") { AccumulatorSuite.verifyPeakExecutionMemorySet(sparkContext, "external sort") { sql("SELECT * FROM testData2 ORDER BY a ASC, b ASC").collect() } } test("SPARK-9511: error with table starting with number") { withTempView("1one") { sparkContext.parallelize(1 to 10).map(i => (i, i.toString)) .toDF("num", "str") .createOrReplaceTempView("1one") checkAnswer(sql("select count(num) from 1one"), Row(10)) } } test("specifying database name for a temporary view is not allowed") { withTempPath { dir => val path = dir.toURI.toString val df = sparkContext.parallelize(1 to 10).map(i => (i, i.toString)).toDF("num", "str") df .write .format("parquet") .save(path) // We don't support creating a temporary table while specifying a database intercept[AnalysisException] { spark.sql( s""" |CREATE TEMPORARY VIEW db.t |USING parquet |OPTIONS ( | path '$path' |) """.stripMargin) }.getMessage // If you use backticks to quote the name then it's OK. spark.sql( s""" |CREATE TEMPORARY VIEW `db.t` |USING parquet |OPTIONS ( | path '$path' |) """.stripMargin) checkAnswer(spark.table("`db.t`"), df) } } test("SPARK-10130 type coercion for IF should have children resolved first") { withTempView("src") { Seq((1, 1), (-1, 1)).toDF("key", "value").createOrReplaceTempView("src") checkAnswer( sql("SELECT IF(a > 0, a, 0) FROM (SELECT key a FROM src) temp"), Seq(Row(1), Row(0))) } } test("SPARK-10389: order by non-attribute grouping expression on Aggregate") { withTempView("src") { Seq((1, 1), (-1, 1)).toDF("key", "value").createOrReplaceTempView("src") checkAnswer(sql("SELECT MAX(value) FROM src GROUP BY key + 1 ORDER BY key + 1"), Seq(Row(1), Row(1))) checkAnswer(sql("SELECT MAX(value) FROM src GROUP BY key + 1 ORDER BY (key + 1) * 2"), Seq(Row(1), Row(1))) } } test("run sql directly on files") { val df = spark.range(100).toDF() withTempPath(f => { df.write.json(f.getCanonicalPath) checkAnswer(sql(s"select id from json.`${f.getCanonicalPath}`"), df) checkAnswer(sql(s"select id from `org.apache.spark.sql.json`.`${f.getCanonicalPath}`"), df) checkAnswer(sql(s"select a.id from json.`${f.getCanonicalPath}` as a"), df) }) var e = intercept[AnalysisException] { sql("select * from in_valid_table") } assert(e.message.contains("Table or view not found")) e = intercept[AnalysisException] { sql("select * from no_db.no_table").show() } assert(e.message.contains("Table or view not found")) e = intercept[AnalysisException] { sql("select * from json.invalid_file") } assert(e.message.contains("Path does not exist")) e = intercept[AnalysisException] { sql(s"select id from `org.apache.spark.sql.hive.orc`.`file_path`") } assert(e.message.contains("The ORC data source must be used with Hive support enabled")) e = intercept[AnalysisException] { sql(s"select id from `com.databricks.spark.avro`.`file_path`") } assert(e.message.contains("Failed to find data source: com.databricks.spark.avro.")) // data source type is case insensitive e = intercept[AnalysisException] { sql(s"select id from Avro.`file_path`") } assert(e.message.contains("Failed to find data source: avro.")) e = intercept[AnalysisException] { sql(s"select id from avro.`file_path`") } assert(e.message.contains("Failed to find data source: avro.")) e = intercept[AnalysisException] { sql(s"select id from `org.apache.spark.sql.sources.HadoopFsRelationProvider`.`file_path`") } assert(e.message.contains("Table or view not found: " + "`org.apache.spark.sql.sources.HadoopFsRelationProvider`.`file_path`")) e = intercept[AnalysisException] { sql(s"select id from `Jdbc`.`file_path`") } assert(e.message.contains("Unsupported data source type for direct query on files: Jdbc")) e = intercept[AnalysisException] { sql(s"select id from `org.apache.spark.sql.execution.datasources.jdbc`.`file_path`") } assert(e.message.contains("Unsupported data source type for direct query on files: " + "org.apache.spark.sql.execution.datasources.jdbc")) } test("SortMergeJoin returns wrong results when using UnsafeRows") { // This test is for the fix of https://issues.apache.org/jira/browse/SPARK-10737. // This bug will be triggered when Tungsten is enabled and there are multiple // SortMergeJoin operators executed in the same task. val confs = SQLConf.AUTO_BROADCASTJOIN_THRESHOLD.key -> "1" :: Nil withSQLConf(confs: _*) { val df1 = (1 to 50).map(i => (s"str_$i", i)).toDF("i", "j") val df2 = df1 .join(df1.select(df1("i")), "i") .select(df1("i"), df1("j")) val df3 = df2.withColumnRenamed("i", "i1").withColumnRenamed("j", "j1") val df4 = df2 .join(df3, df2("i") === df3("i1")) .withColumn("diff", $"j" - $"j1") .select(df2("i"), df2("j"), $"diff") checkAnswer( df4, df1.withColumn("diff", lit(0))) } } test("SPARK-11303: filter should not be pushed down into sample") { val df = spark.range(100) List(true, false).foreach { withReplacement => val sampled = df.sample(withReplacement, 0.1, 1) val sampledOdd = sampled.filter("id % 2 != 0") val sampledEven = sampled.filter("id % 2 = 0") assert(sampled.count() == sampledOdd.count() + sampledEven.count()) } } test("Struct Star Expansion") { val structDf = testData2.select("a", "b").as("record") checkAnswer( structDf.select($"record.a", $"record.b"), Row(1, 1) :: Row(1, 2) :: Row(2, 1) :: Row(2, 2) :: Row(3, 1) :: Row(3, 2) :: Nil) checkAnswer( structDf.select($"record.*"), Row(1, 1) :: Row(1, 2) :: Row(2, 1) :: Row(2, 2) :: Row(3, 1) :: Row(3, 2) :: Nil) checkAnswer( structDf.select($"record.*", $"record.*"), Row(1, 1, 1, 1) :: Row(1, 2, 1, 2) :: Row(2, 1, 2, 1) :: Row(2, 2, 2, 2) :: Row(3, 1, 3, 1) :: Row(3, 2, 3, 2) :: Nil) checkAnswer( sql("select struct(a, b) as r1, struct(b, a) as r2 from testData2").select($"r1.*", $"r2.*"), Row(1, 1, 1, 1) :: Row(1, 2, 2, 1) :: Row(2, 1, 1, 2) :: Row(2, 2, 2, 2) :: Row(3, 1, 1, 3) :: Row(3, 2, 2, 3) :: Nil) // Try with a temporary view sql("select struct(a, b) as record from testData2").createOrReplaceTempView("structTable") checkAnswer( sql("SELECT record.* FROM structTable"), Row(1, 1) :: Row(1, 2) :: Row(2, 1) :: Row(2, 2) :: Row(3, 1) :: Row(3, 2) :: Nil) checkAnswer(sql( """ | SELECT min(struct(record.*)) FROM | (select struct(a,b) as record from testData2) tmp """.stripMargin), Row(Row(1, 1)) :: Nil) // Try with an alias on the select list checkAnswer(sql( """ | SELECT max(struct(record.*)) as r FROM | (select struct(a,b) as record from testData2) tmp """.stripMargin).select($"r.*"), Row(3, 2) :: Nil) // With GROUP BY checkAnswer(sql( """ | SELECT min(struct(record.*)) FROM | (select a as a, struct(a,b) as record from testData2) tmp | GROUP BY a """.stripMargin), Row(Row(1, 1)) :: Row(Row(2, 1)) :: Row(Row(3, 1)) :: Nil) // With GROUP BY and alias checkAnswer(sql( """ | SELECT max(struct(record.*)) as r FROM | (select a as a, struct(a,b) as record from testData2) tmp | GROUP BY a """.stripMargin).select($"r.*"), Row(1, 2) :: Row(2, 2) :: Row(3, 2) :: Nil) // With GROUP BY and alias and additional fields in the struct checkAnswer(sql( """ | SELECT max(struct(a, record.*, b)) as r FROM | (select a as a, b as b, struct(a,b) as record from testData2) tmp | GROUP BY a """.stripMargin).select($"r.*"), Row(1, 1, 2, 2) :: Row(2, 2, 2, 2) :: Row(3, 3, 2, 2) :: Nil) // Create a data set that contains nested structs. val nestedStructData = sql( """ | SELECT struct(r1, r2) as record FROM | (SELECT struct(a, b) as r1, struct(b, a) as r2 FROM testData2) tmp """.stripMargin) checkAnswer(nestedStructData.select($"record.*"), Row(Row(1, 1), Row(1, 1)) :: Row(Row(1, 2), Row(2, 1)) :: Row(Row(2, 1), Row(1, 2)) :: Row(Row(2, 2), Row(2, 2)) :: Row(Row(3, 1), Row(1, 3)) :: Row(Row(3, 2), Row(2, 3)) :: Nil) checkAnswer(nestedStructData.select($"record.r1"), Row(Row(1, 1)) :: Row(Row(1, 2)) :: Row(Row(2, 1)) :: Row(Row(2, 2)) :: Row(Row(3, 1)) :: Row(Row(3, 2)) :: Nil) checkAnswer( nestedStructData.select($"record.r1.*"), Row(1, 1) :: Row(1, 2) :: Row(2, 1) :: Row(2, 2) :: Row(3, 1) :: Row(3, 2) :: Nil) // Try with a temporary view withTempView("nestedStructTable") { nestedStructData.createOrReplaceTempView("nestedStructTable") checkAnswer( sql("SELECT record.* FROM nestedStructTable"), nestedStructData.select($"record.*")) checkAnswer( sql("SELECT record.r1 FROM nestedStructTable"), nestedStructData.select($"record.r1")) checkAnswer( sql("SELECT record.r1.* FROM nestedStructTable"), nestedStructData.select($"record.r1.*")) // Try resolving something not there. assert(intercept[AnalysisException](sql("SELECT abc.* FROM nestedStructTable")) .getMessage.contains("cannot resolve")) } // Create paths with unusual characters withSQLConf(SQLConf.SUPPORT_QUOTED_REGEX_COLUMN_NAME.key -> "false") { val specialCharacterPath = sql( """ | SELECT struct(`col$.a_`, `a.b.c.`) as `r&&b.c` FROM | (SELECT struct(a, b) as `col$.a_`, struct(b, a) as `a.b.c.` FROM testData2) tmp """.stripMargin) withTempView("specialCharacterTable") { specialCharacterPath.createOrReplaceTempView("specialCharacterTable") checkAnswer( specialCharacterPath.select($"`r&&b.c`.*"), nestedStructData.select($"record.*")) checkAnswer( sql( "SELECT `r&&b.c`.`col$.a_` FROM specialCharacterTable"), nestedStructData.select($"record.r1")) checkAnswer( sql("SELECT `r&&b.c`.`a.b.c.` FROM specialCharacterTable"), nestedStructData.select($"record.r2")) checkAnswer( sql("SELECT `r&&b.c`.`col$.a_`.* FROM specialCharacterTable"), nestedStructData.select($"record.r1.*")) } } // Try star expanding a scalar. This should fail. assert(intercept[AnalysisException](sql("select a.* from testData2")).getMessage.contains( "Can only star expand struct data types.")) } test("Struct Star Expansion - Name conflict") { // Create a data set that contains a naming conflict val nameConflict = sql("SELECT struct(a, b) as nameConflict, a as a FROM testData2") withTempView("nameConflict") { nameConflict.createOrReplaceTempView("nameConflict") // Unqualified should resolve to table. checkAnswer(sql("SELECT nameConflict.* FROM nameConflict"), Row(Row(1, 1), 1) :: Row(Row(1, 2), 1) :: Row(Row(2, 1), 2) :: Row(Row(2, 2), 2) :: Row(Row(3, 1), 3) :: Row(Row(3, 2), 3) :: Nil) // Qualify the struct type with the table name. checkAnswer(sql("SELECT nameConflict.nameConflict.* FROM nameConflict"), Row(1, 1) :: Row(1, 2) :: Row(2, 1) :: Row(2, 2) :: Row(3, 1) :: Row(3, 2) :: Nil) } } test("Star Expansion - group by") { withSQLConf("spark.sql.retainGroupColumns" -> "false") { checkAnswer( testData2.groupBy($"a", $"b").agg($"*"), sql("SELECT * FROM testData2 group by a, b")) } } test("Star Expansion - table with zero column") { withTempView("temp_table_no_cols") { val rddNoCols = sparkContext.parallelize(1 to 10).map(_ => Row.empty) val dfNoCols = spark.createDataFrame(rddNoCols, StructType(Seq.empty)) dfNoCols.createTempView("temp_table_no_cols") // ResolvedStar checkAnswer( dfNoCols, dfNoCols.select(dfNoCols.col("*"))) // UnresolvedStar checkAnswer( dfNoCols, sql("SELECT * FROM temp_table_no_cols")) checkAnswer( dfNoCols, dfNoCols.select($"*")) var e = intercept[AnalysisException] { sql("SELECT a.* FROM temp_table_no_cols a") }.getMessage assert(e.contains("cannot resolve 'a.*' give input columns ''")) e = intercept[AnalysisException] { dfNoCols.select($"b.*") }.getMessage assert(e.contains("cannot resolve 'b.*' give input columns ''")) } } test("Common subexpression elimination") { // TODO: support subexpression elimination in whole stage codegen withSQLConf("spark.sql.codegen.wholeStage" -> "false") { // select from a table to prevent constant folding. val df = sql("SELECT a, b from testData2 limit 1") checkAnswer(df, Row(1, 1)) checkAnswer(df.selectExpr("a + 1", "a + 1"), Row(2, 2)) checkAnswer(df.selectExpr("a + 1", "a + 1 + 1"), Row(2, 3)) // This does not work because the expressions get grouped like (a + a) + 1 checkAnswer(df.selectExpr("a + 1", "a + a + 1"), Row(2, 3)) checkAnswer(df.selectExpr("a + 1", "a + (a + 1)"), Row(2, 3)) // Identity udf that tracks the number of times it is called. val countAcc = sparkContext.longAccumulator("CallCount") spark.udf.register("testUdf", (x: Int) => { countAcc.add(1) x }) // Evaluates df, verifying it is equal to the expectedResult and the accumulator's value // is correct. def verifyCallCount(df: DataFrame, expectedResult: Row, expectedCount: Int): Unit = { countAcc.setValue(0) QueryTest.checkAnswer( df, Seq(expectedResult), checkToRDD = false /* avoid duplicate exec */) assert(countAcc.value == expectedCount) } verifyCallCount(df.selectExpr("testUdf(a)"), Row(1), 1) verifyCallCount(df.selectExpr("testUdf(a)", "testUdf(a)"), Row(1, 1), 1) verifyCallCount(df.selectExpr("testUdf(a + 1)", "testUdf(a + 1)"), Row(2, 2), 1) verifyCallCount(df.selectExpr("testUdf(a + 1)", "testUdf(a)"), Row(2, 1), 2) verifyCallCount( df.selectExpr("testUdf(a + 1) + testUdf(a + 1)", "testUdf(a + 1)"), Row(4, 2), 1) verifyCallCount( df.selectExpr("testUdf(a + 1) + testUdf(1 + b)", "testUdf(a + 1)"), Row(4, 2), 2) val testUdf = functions.udf((x: Int) => { countAcc.add(1) x }) verifyCallCount( df.groupBy().agg(sum(testUdf($"b") + testUdf($"b") + testUdf($"b"))), Row(3.0), 1) verifyCallCount( df.selectExpr("testUdf(a + 1) + testUdf(1 + a)", "testUdf(a + 1)"), Row(4, 2), 1) // Try disabling it via configuration. spark.conf.set("spark.sql.subexpressionElimination.enabled", "false") verifyCallCount(df.selectExpr("testUdf(a)", "testUdf(a)"), Row(1, 1), 2) spark.conf.set("spark.sql.subexpressionElimination.enabled", "true") verifyCallCount(df.selectExpr("testUdf(a)", "testUdf(a)"), Row(1, 1), 1) } } test("SPARK-10707: nullability should be correctly propagated through set operations (1)") { // This test produced an incorrect result of 1 before the SPARK-10707 fix because of the // NullPropagation rule: COUNT(v) got replaced with COUNT(1) because the output column of // UNION was incorrectly considered non-nullable: checkAnswer( sql("""SELECT count(v) FROM ( | SELECT v FROM ( | SELECT 'foo' AS v UNION ALL | SELECT NULL AS v | ) my_union WHERE isnull(v) |) my_subview""".stripMargin), Seq(Row(0))) } test("SPARK-10707: nullability should be correctly propagated through set operations (2)") { // This test uses RAND() to stop column pruning for Union and checks the resulting isnull // value. This would produce an incorrect result before the fix in SPARK-10707 because the "v" // column of the union was considered non-nullable. checkAnswer( sql( """ |SELECT a FROM ( | SELECT ISNULL(v) AS a, RAND() FROM ( | SELECT 'foo' AS v UNION ALL SELECT null AS v | ) my_union |) my_view """.stripMargin), Row(false) :: Row(true) :: Nil) } test("filter on a grouping column that is not presented in SELECT") { checkAnswer( sql("select count(1) from (select 1 as a) t group by a having a > 0"), Row(1) :: Nil) } test("SPARK-13056: Null in map value causes NPE") { val df = Seq(1 -> Map("abc" -> "somestring", "cba" -> null)).toDF("key", "value") withTempView("maptest") { df.createOrReplaceTempView("maptest") // local optimization will by pass codegen code, so we should keep the filter `key=1` checkAnswer(sql("SELECT value['abc'] FROM maptest where key = 1"), Row("somestring")) checkAnswer(sql("SELECT value['cba'] FROM maptest where key = 1"), Row(null)) } } test("hash function") { val df = Seq(1 -> "a", 2 -> "b").toDF("i", "j") withTempView("tbl") { df.createOrReplaceTempView("tbl") checkAnswer( df.select(hash($"i", $"j")), sql("SELECT hash(i, j) from tbl") ) } } test("join with using clause") { val df1 = Seq(("r1c1", "r1c2", "t1r1c3"), ("r2c1", "r2c2", "t1r2c3"), ("r3c1x", "r3c2", "t1r3c3")).toDF("c1", "c2", "c3") val df2 = Seq(("r1c1", "r1c2", "t2r1c3"), ("r2c1", "r2c2", "t2r2c3"), ("r3c1y", "r3c2", "t2r3c3")).toDF("c1", "c2", "c3") val df3 = Seq((null, "r1c2", "t3r1c3"), ("r2c1", "r2c2", "t3r2c3"), ("r3c1y", "r3c2", "t3r3c3")).toDF("c1", "c2", "c3") withTempView("t1", "t2", "t3") { df1.createOrReplaceTempView("t1") df2.createOrReplaceTempView("t2") df3.createOrReplaceTempView("t3") // inner join with one using column checkAnswer( sql("SELECT * FROM t1 join t2 using (c1)"), Row("r1c1", "r1c2", "t1r1c3", "r1c2", "t2r1c3") :: Row("r2c1", "r2c2", "t1r2c3", "r2c2", "t2r2c3") :: Nil) // inner join with two using columns checkAnswer( sql("SELECT * FROM t1 join t2 using (c1, c2)"), Row("r1c1", "r1c2", "t1r1c3", "t2r1c3") :: Row("r2c1", "r2c2", "t1r2c3", "t2r2c3") :: Nil) // Left outer join with one using column. checkAnswer( sql("SELECT * FROM t1 left join t2 using (c1)"), Row("r1c1", "r1c2", "t1r1c3", "r1c2", "t2r1c3") :: Row("r2c1", "r2c2", "t1r2c3", "r2c2", "t2r2c3") :: Row("r3c1x", "r3c2", "t1r3c3", null, null) :: Nil) // Right outer join with one using column. checkAnswer( sql("SELECT * FROM t1 right join t2 using (c1)"), Row("r1c1", "r1c2", "t1r1c3", "r1c2", "t2r1c3") :: Row("r2c1", "r2c2", "t1r2c3", "r2c2", "t2r2c3") :: Row("r3c1y", null, null, "r3c2", "t2r3c3") :: Nil) // Full outer join with one using column. checkAnswer( sql("SELECT * FROM t1 full outer join t2 using (c1)"), Row("r1c1", "r1c2", "t1r1c3", "r1c2", "t2r1c3") :: Row("r2c1", "r2c2", "t1r2c3", "r2c2", "t2r2c3") :: Row("r3c1x", "r3c2", "t1r3c3", null, null) :: Row("r3c1y", null, null, "r3c2", "t2r3c3") :: Nil) // Full outer join with null value in join column. checkAnswer( sql("SELECT * FROM t1 full outer join t3 using (c1)"), Row("r1c1", "r1c2", "t1r1c3", null, null) :: Row("r2c1", "r2c2", "t1r2c3", "r2c2", "t3r2c3") :: Row("r3c1x", "r3c2", "t1r3c3", null, null) :: Row("r3c1y", null, null, "r3c2", "t3r3c3") :: Row(null, null, null, "r1c2", "t3r1c3") :: Nil) // Self join with using columns. checkAnswer( sql("SELECT * FROM t1 join t1 using (c1)"), Row("r1c1", "r1c2", "t1r1c3", "r1c2", "t1r1c3") :: Row("r2c1", "r2c2", "t1r2c3", "r2c2", "t1r2c3") :: Row("r3c1x", "r3c2", "t1r3c3", "r3c2", "t1r3c3") :: Nil) } } test("SPARK-15327: fail to compile generated code with complex data structure") { withTempDir{ dir => val json = """ |{"h": {"b": {"c": [{"e": "adfgd"}], "a": [{"e": "testing", "count": 3}], |"b": [{"e": "test", "count": 1}]}}, "d": {"b": {"c": [{"e": "adfgd"}], |"a": [{"e": "testing", "count": 3}], "b": [{"e": "test", "count": 1}]}}, |"c": {"b": {"c": [{"e": "adfgd"}], "a": [{"count": 3}], |"b": [{"e": "test", "count": 1}]}}, "a": {"b": {"c": [{"e": "adfgd"}], |"a": [{"count": 3}], "b": [{"e": "test", "count": 1}]}}, |"e": {"b": {"c": [{"e": "adfgd"}], "a": [{"e": "testing", "count": 3}], |"b": [{"e": "test", "count": 1}]}}, "g": {"b": {"c": [{"e": "adfgd"}], |"a": [{"e": "testing", "count": 3}], "b": [{"e": "test", "count": 1}]}}, |"f": {"b": {"c": [{"e": "adfgd"}], "a": [{"e": "testing", "count": 3}], |"b": [{"e": "test", "count": 1}]}}, "b": {"b": {"c": [{"e": "adfgd"}], |"a": [{"count": 3}], "b": [{"e": "test", "count": 1}]}}}' | """.stripMargin spark.read.json(Seq(json).toDS()).write.mode("overwrite").parquet(dir.toString) spark.read.parquet(dir.toString).collect() } } test("data source table created in InMemoryCatalog should be able to read/write") { withTable("tbl") { sql("CREATE TABLE tbl(i INT, j STRING) USING parquet") checkAnswer(sql("SELECT i, j FROM tbl"), Nil) Seq(1 -> "a", 2 -> "b").toDF("i", "j").write.mode("overwrite").insertInto("tbl") checkAnswer(sql("SELECT i, j FROM tbl"), Row(1, "a") :: Row(2, "b") :: Nil) Seq(3 -> "c", 4 -> "d").toDF("i", "j").write.mode("append").saveAsTable("tbl") checkAnswer( sql("SELECT i, j FROM tbl"), Row(1, "a") :: Row(2, "b") :: Row(3, "c") :: Row(4, "d") :: Nil) } } test("Eliminate noop ordinal ORDER BY") { withSQLConf(SQLConf.ORDER_BY_ORDINAL.key -> "true") { val plan1 = sql("SELECT 1.0, 'abc', year(current_date()) ORDER BY 1, 2, 3") val plan2 = sql("SELECT 1.0, 'abc', year(current_date())") comparePlans(plan1.queryExecution.optimizedPlan, plan2.queryExecution.optimizedPlan) } } test("check code injection is prevented") { // The end of comment (*/) should be escaped. var literal = """|*/ |{ | new Object() { | void f() { throw new RuntimeException("This exception is injected."); } | }.f(); |} |/*""".stripMargin var expected = """|*/ |{ | new Object() { | void f() { throw new RuntimeException("This exception is injected."); } | }.f(); |} |/*""".stripMargin checkAnswer( sql(s"SELECT '$literal' AS DUMMY"), Row(s"$expected") :: Nil) // `\\u002A` is `*` and `\\u002F` is `/` // so if the end of comment consists of those characters in queries, we need to escape them. literal = """|\\\\u002A/ |{ | new Object() { | void f() { throw new RuntimeException("This exception is injected."); } | }.f(); |} |/*""".stripMargin expected = s"""|${"\\\\u002A/"} |{ | new Object() { | void f() { throw new RuntimeException("This exception is injected."); } | }.f(); |} |/*""".stripMargin checkAnswer( sql(s"SELECT '$literal' AS DUMMY"), Row(s"$expected") :: Nil) literal = """|\\\\\\\\u002A/ |{ | new Object() { | void f() { throw new RuntimeException("This exception is injected."); } | }.f(); |} |/*""".stripMargin expected = """|\\\\u002A/ |{ | new Object() { | void f() { throw new RuntimeException("This exception is injected."); } | }.f(); |} |/*""".stripMargin checkAnswer( sql(s"SELECT '$literal' AS DUMMY"), Row(s"$expected") :: Nil) literal = """|\\\\u002a/ |{ | new Object() { | void f() { throw new RuntimeException("This exception is injected."); } | }.f(); |} |/*""".stripMargin expected = s"""|${"\\\\u002a/"} |{ | new Object() { | void f() { throw new RuntimeException("This exception is injected."); } | }.f(); |} |/*""".stripMargin checkAnswer( sql(s"SELECT '$literal' AS DUMMY"), Row(s"$expected") :: Nil) literal = """|\\\\\\\\u002a/ |{ | new Object() { | void f() { throw new RuntimeException("This exception is injected."); } | }.f(); |} |/*""".stripMargin expected = """|\\\\u002a/ |{ | new Object() { | void f() { throw new RuntimeException("This exception is injected."); } | }.f(); |} |/*""".stripMargin checkAnswer( sql(s"SELECT '$literal' AS DUMMY"), Row(s"$expected") :: Nil) literal = """|*\\\\u002F |{ | new Object() { | void f() { throw new RuntimeException("This exception is injected."); } | }.f(); |} |/*""".stripMargin expected = s"""|${"*\\\\u002F"} |{ | new Object() { | void f() { throw new RuntimeException("This exception is injected."); } | }.f(); |} |/*""".stripMargin checkAnswer( sql(s"SELECT '$literal' AS DUMMY"), Row(s"$expected") :: Nil) literal = """|*\\\\\\\\u002F |{ | new Object() { | void f() { throw new RuntimeException("This exception is injected."); } | }.f(); |} |/*""".stripMargin expected = """|*\\\\u002F |{ | new Object() { | void f() { throw new RuntimeException("This exception is injected."); } | }.f(); |} |/*""".stripMargin checkAnswer( sql(s"SELECT '$literal' AS DUMMY"), Row(s"$expected") :: Nil) literal = """|*\\\\u002f |{ | new Object() { | void f() { throw new RuntimeException("This exception is injected."); } | }.f(); |} |/*""".stripMargin expected = s"""|${"*\\\\u002f"} |{ | new Object() { | void f() { throw new RuntimeException("This exception is injected."); } | }.f(); |} |/*""".stripMargin checkAnswer( sql(s"SELECT '$literal' AS DUMMY"), Row(s"$expected") :: Nil) literal = """|*\\\\\\\\u002f |{ | new Object() { | void f() { throw new RuntimeException("This exception is injected."); } | }.f(); |} |/*""".stripMargin expected = """|*\\\\u002f |{ | new Object() { | void f() { throw new RuntimeException("This exception is injected."); } | }.f(); |} |/*""".stripMargin checkAnswer( sql(s"SELECT '$literal' AS DUMMY"), Row(s"$expected") :: Nil) literal = """|\\\\u002A\\\\u002F |{ | new Object() { | void f() { throw new RuntimeException("This exception is injected."); } | }.f(); |} |/*""".stripMargin expected = s"""|${"\\\\u002A\\\\u002F"} |{ | new Object() { | void f() { throw new RuntimeException("This exception is injected."); } | }.f(); |} |/*""".stripMargin checkAnswer( sql(s"SELECT '$literal' AS DUMMY"), Row(s"$expected") :: Nil) literal = """|\\\\\\\\u002A\\\\u002F |{ | new Object() { | void f() { throw new RuntimeException("This exception is injected."); } | }.f(); |} |/*""".stripMargin expected = s"""|${"\\\\\\\\u002A\\\\u002F"} |{ | new Object() { | void f() { throw new RuntimeException("This exception is injected."); } | }.f(); |} |/*""".stripMargin checkAnswer( sql(s"SELECT '$literal' AS DUMMY"), Row(s"$expected") :: Nil) literal = """|\\\\u002A\\\\\\\\u002F |{ | new Object() { | void f() { throw new RuntimeException("This exception is injected."); } | }.f(); |} |/*""".stripMargin expected = s"""|${"\\\\u002A\\\\\\\\u002F"} |{ | new Object() { | void f() { throw new RuntimeException("This exception is injected."); } | }.f(); |} |/*""".stripMargin checkAnswer( sql(s"SELECT '$literal' AS DUMMY"), Row(s"$expected") :: Nil) literal = """|\\\\\\\\u002A\\\\\\\\u002F |{ | new Object() { | void f() { throw new RuntimeException("This exception is injected."); } | }.f(); |} |/*""".stripMargin expected = """|\\\\u002A\\\\u002F |{ | new Object() { | void f() { throw new RuntimeException("This exception is injected."); } | }.f(); |} |/*""".stripMargin checkAnswer( sql(s"SELECT '$literal' AS DUMMY"), Row(s"$expected") :: Nil) } test("SPARK-15752 optimize metadata only query for datasource table") { withSQLConf(SQLConf.OPTIMIZER_METADATA_ONLY.key -> "true") { withTable("srcpart_15752") { val data = (1 to 10).map(i => (i, s"data-$i", i % 2, if ((i % 2) == 0) "a" else "b")) .toDF("col1", "col2", "partcol1", "partcol2") data.write.partitionBy("partcol1", "partcol2").mode("append").saveAsTable("srcpart_15752") checkAnswer( sql("select partcol1 from srcpart_15752 group by partcol1"), Row(0) :: Row(1) :: Nil) checkAnswer( sql("select partcol1 from srcpart_15752 where partcol1 = 1 group by partcol1"), Row(1)) checkAnswer( sql("select partcol1, count(distinct partcol2) from srcpart_15752 group by partcol1"), Row(0, 1) :: Row(1, 1) :: Nil) checkAnswer( sql("select partcol1, count(distinct partcol2) from srcpart_15752 where partcol1 = 1 " + "group by partcol1"), Row(1, 1) :: Nil) checkAnswer(sql("select distinct partcol1 from srcpart_15752"), Row(0) :: Row(1) :: Nil) checkAnswer(sql("select distinct partcol1 from srcpart_15752 where partcol1 = 1"), Row(1)) checkAnswer( sql("select distinct col from (select partcol1 + 1 as col from srcpart_15752 " + "where partcol1 = 1) t"), Row(2)) checkAnswer(sql("select max(partcol1) from srcpart_15752"), Row(1)) checkAnswer(sql("select max(partcol1) from srcpart_15752 where partcol1 = 1"), Row(1)) checkAnswer(sql("select max(partcol1) from (select partcol1 from srcpart_15752) t"), Row(1)) checkAnswer( sql("select max(col) from (select partcol1 + 1 as col from srcpart_15752 " + "where partcol1 = 1) t"), Row(2)) } } } test("SPARK-16975: Column-partition path starting '_' should be handled correctly") { withTempDir { dir => val parquetDir = new File(dir, "parquet").getCanonicalPath spark.range(10).withColumn("_col", $"id").write.partitionBy("_col").save(parquetDir) spark.read.parquet(parquetDir) } } test("SPARK-16644: Aggregate should not put aggregate expressions to constraints") { withTable("tbl") { sql("CREATE TABLE tbl(a INT, b INT) USING parquet") checkAnswer(sql( """ |SELECT | a, | MAX(b) AS c1, | b AS c2 |FROM tbl |WHERE a = b |GROUP BY a, b |HAVING c1 = 1 """.stripMargin), Nil) } } test("SPARK-16674: field names containing dots for both fields and partitioned fields") { withTempPath { path => val data = (1 to 10).map(i => (i, s"data-$i", i % 2, if ((i % 2) == 0) "a" else "b")) .toDF("col.1", "col.2", "part.col1", "part.col2") data.write .format("parquet") .partitionBy("part.col1", "part.col2") .save(path.getCanonicalPath) val readBack = spark.read.format("parquet").load(path.getCanonicalPath) checkAnswer( readBack.selectExpr("`part.col1`", "`col.1`"), data.selectExpr("`part.col1`", "`col.1`")) } } test("SPARK-17515: CollectLimit.execute() should perform per-partition limits") { val numRecordsRead = spark.sparkContext.longAccumulator spark.range(1, 100, 1, numPartitions = 10).map { x => numRecordsRead.add(1) x }.limit(1).queryExecution.toRdd.count() assert(numRecordsRead.value === 10) } test("CREATE TABLE USING should not fail if a same-name temp view exists") { withTable("same_name") { withTempView("same_name") { spark.range(10).createTempView("same_name") sql("CREATE TABLE same_name(i int) USING json") checkAnswer(spark.table("same_name"), spark.range(10).toDF()) assert(spark.table("default.same_name").collect().isEmpty) } } } test("SPARK-18053: ARRAY equality is broken") { withTable("array_tbl") { spark.range(10).select(array($"id").as("arr")).write.saveAsTable("array_tbl") assert(sql("SELECT * FROM array_tbl where arr = ARRAY(1L)").count == 1) } } test("SPARK-19157: should be able to change spark.sql.runSQLOnFiles at runtime") { withTempPath { path => Seq(1 -> "a").toDF("i", "j").write.parquet(path.getCanonicalPath) val newSession = spark.newSession() val originalValue = newSession.sessionState.conf.runSQLonFile try { newSession.sessionState.conf.setConf(SQLConf.RUN_SQL_ON_FILES, false) intercept[AnalysisException] { newSession.sql(s"SELECT i, j FROM parquet.`${path.getCanonicalPath}`") } newSession.sessionState.conf.setConf(SQLConf.RUN_SQL_ON_FILES, true) checkAnswer( newSession.sql(s"SELECT i, j FROM parquet.`${path.getCanonicalPath}`"), Row(1, "a")) } finally { newSession.sessionState.conf.setConf(SQLConf.RUN_SQL_ON_FILES, originalValue) } } } test("should be able to resolve a persistent view") { withTable("t1", "t2") { withView("v1") { sql("CREATE TABLE `t1` USING parquet AS SELECT * FROM VALUES(1, 1) AS t1(a, b)") sql("CREATE TABLE `t2` USING parquet AS SELECT * FROM VALUES('a', 2, 1.0) AS t2(d, e, f)") sql("CREATE VIEW `v1`(x, y) AS SELECT * FROM t1") checkAnswer(spark.table("v1").orderBy("x"), Row(1, 1)) sql("ALTER VIEW `v1` AS SELECT * FROM t2") checkAnswer(spark.table("v1").orderBy("f"), Row("a", 2, 1.0)) } } } test("SPARK-19059: read file based table whose name starts with underscore") { withTable("_tbl") { sql("CREATE TABLE `_tbl`(i INT) USING parquet") sql("INSERT INTO `_tbl` VALUES (1), (2), (3)") checkAnswer( sql("SELECT * FROM `_tbl`"), Row(1) :: Row(2) :: Row(3) :: Nil) } } test("SPARK-19334: check code injection is prevented") { // The end of comment (*/) should be escaped. val badQuery = """|SELECT inline(array(cast(struct(1) AS | struct<`= | new Object() { | {f();} | public void f() {throw new RuntimeException("This exception is injected.");} | public int x; | }.x | `:int>)))""".stripMargin.replaceAll("\\n", "") checkAnswer(sql(badQuery), Row(1) :: Nil) } test("SPARK-19650: An action on a Command should not trigger a Spark job") { // Create a listener that checks if new jobs have started. val jobStarted = new AtomicBoolean(false) val listener = new SparkListener { override def onJobStart(jobStart: SparkListenerJobStart): Unit = { jobStarted.set(true) } } // Make sure no spurious job starts are pending in the listener bus. sparkContext.listenerBus.waitUntilEmpty(500) sparkContext.addSparkListener(listener) try { // Execute the command. sql("show databases").head() // Make sure we have seen all events triggered by DataFrame.show() sparkContext.listenerBus.waitUntilEmpty(500) } finally { sparkContext.removeSparkListener(listener) } assert(!jobStarted.get(), "Command should not trigger a Spark job.") } test("SPARK-20164: AnalysisException should be tolerant to null query plan") { try { throw new AnalysisException("", None, None, plan = null) } catch { case ae: AnalysisException => assert(ae.plan == null && ae.getMessage == ae.getSimpleMessage) } } test("SPARK-12868: Allow adding jars from hdfs ") { val jarFromHdfs = "hdfs://doesnotmatter/test.jar" val jarFromInvalidFs = "fffs://doesnotmatter/test.jar" // if 'hdfs' is not supported, MalformedURLException will be thrown new URL(jarFromHdfs) intercept[MalformedURLException] { new URL(jarFromInvalidFs) } } test("RuntimeReplaceable functions should not take extra parameters") { val e = intercept[AnalysisException](sql("SELECT nvl(1, 2, 3)")) assert(e.message.contains("Invalid number of arguments")) } test("SPARK-21228: InSet incorrect handling of structs") { withTempView("A") { // reduce this from the default of 10 so the repro query text is not too long withSQLConf((SQLConf.OPTIMIZER_INSET_CONVERSION_THRESHOLD.key -> "3")) { // a relation that has 1 column of struct type with values (1,1), ..., (9, 9) spark.range(1, 10).selectExpr("named_struct('a', id, 'b', id) as a") .createOrReplaceTempView("A") val df = sql( """ |SELECT * from | (SELECT MIN(a) as minA FROM A) AA -- this Aggregate will return UnsafeRows | -- the IN will become InSet with a Set of GenericInternalRows | -- a GenericInternalRow is never equal to an UnsafeRow so the query would | -- returns 0 results, which is incorrect | WHERE minA IN (NAMED_STRUCT('a', 1L, 'b', 1L), NAMED_STRUCT('a', 2L, 'b', 2L), | NAMED_STRUCT('a', 3L, 'b', 3L)) """.stripMargin) checkAnswer(df, Row(Row(1, 1))) } } } test("SPARK-21335: support un-aliased subquery") { withTempView("v") { Seq(1 -> "a").toDF("i", "j").createOrReplaceTempView("v") checkAnswer(sql("SELECT i from (SELECT i FROM v)"), Row(1)) val e = intercept[AnalysisException](sql("SELECT v.i from (SELECT i FROM v)")) assert(e.message == "cannot resolve '`v.i`' given input columns: [__auto_generated_subquery_name.i]") checkAnswer(sql("SELECT __auto_generated_subquery_name.i from (SELECT i FROM v)"), Row(1)) } } }
VigneshMohan1/spark-branch-2.3
sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala
Scala
apache-2.0
91,675
package org.scalaide.debug.internal.model import org.eclipse.debug.core.model.IValue import org.scalaide.util.Utils.jdiSynchronized import org.eclipse.debug.core.model.IVariable import com.sun.jdi.ArrayType import com.sun.jdi.Field import com.sun.jdi.LocalVariable import com.sun.jdi.ObjectReference abstract class ScalaVariable(target: ScalaDebugTarget) extends ScalaDebugElement(target) with IVariable { override def setValue(value: IValue): Unit = ??? override def setValue(value: String): Unit = ??? override def supportsValueModification: Boolean = false // TODO: need real logic override def verifyValue(value: IValue): Boolean = ??? override def verifyValue(value: String): Boolean = ??? /** Remove private name mangling (taken from Scala compiler and adapted to work on Strings. */ def unexpandedName(name: String): String = name lastIndexOf "$$" match { case 0 | -1 => name case idx0 => // Sketchville - We've found $$ but if it's part of $$$ or $$$$ // or something we need to keep the bonus dollars, so e.g. foo$$$outer // has an original name of $outer. var idx = idx0 while (idx > 0 && name.charAt(idx - 1) == '$') idx -= 1 name drop idx + 2 } final override def getValue(): IValue = jdiSynchronized { wrapJDIException("Exception while retrieving variable's value") { doGetValue() } } final override def getName(): String = jdiSynchronized { wrapJDIException("Exception while retrieving variable's name") { unexpandedName(doGetName()) } } final override def getReferenceTypeName(): String = jdiSynchronized { wrapJDIException("Exception while retrieving variable's reference type name") { doGetReferenceTypeName() } } override def hasValueChanged: Boolean = false // TODO: need real logic def isStatic: Boolean = false def isFinal: Boolean = false /** Gets called by [[getValue]] to ensure that JDI exceptions are handled correctly. */ protected def doGetValue(): IValue /** Gets called by [[getName]] to ensure that JDI exceptions are handled correctly. */ protected def doGetName(): String /** Gets called by [[getGetReferenceTypeName]] to ensure that JDI exceptions are handled correctly. */ protected def doGetReferenceTypeName(): String } class ScalaThisVariable(underlying: ObjectReference, stackFrame: ScalaStackFrame) extends ScalaVariable(stackFrame.getDebugTarget) { override protected def doGetName: String = "this" override protected def doGetReferenceTypeName: String = underlying.referenceType.name override protected def doGetValue: IValue = new ScalaObjectReference(underlying, getDebugTarget) } class ScalaLocalVariable(underlying: LocalVariable, stackFrame: ScalaStackFrame) extends ScalaVariable(stackFrame.getDebugTarget) { override protected def doGetName(): String = underlying.name override protected def doGetReferenceTypeName(): String = underlying.typeName // fetching the value for local variables cannot be delayed because the underlying stack frame element may become invalid at any time override protected def doGetValue: IValue = ScalaValue(stackFrame.stackFrame.getValue(underlying), getDebugTarget) } class ScalaArrayElementVariable(index: Int, arrayReference: ScalaArrayReference) extends ScalaVariable(arrayReference.getDebugTarget) { override protected def doGetName(): String = "(%s)".format(index) override protected def doGetReferenceTypeName(): String = arrayReference.underlying.referenceType.asInstanceOf[ArrayType].componentTypeName override protected def doGetValue(): IValue = ScalaValue(arrayReference.underlying.getValue(index), getDebugTarget) } class ScalaFieldVariable(field: Field, objectReference: ScalaObjectReference) extends ScalaVariable(objectReference.getDebugTarget) { override def isStatic: Boolean = field.isStatic() override def isFinal: Boolean = field.isFinal override protected def doGetName(): String = field.name override protected def doGetReferenceTypeName(): String = field.typeName override protected def doGetValue(): IValue = ScalaValue(objectReference.underlying.getValue(field), getDebugTarget) }
romanowski/scala-ide
org.scala-ide.sdt.debug/src/org/scalaide/debug/internal/model/ScalaVariable.scala
Scala
bsd-3-clause
4,147
package ru.ispras.textnormalization import ru.ispras.textnormalization.stringbased.AbstractStringBased import ru.ispras.textnormalization.tokenizer.AbstractTokenizer import ru.ispras.textnormalization.wordbased.AbstractWordBasedNormalizer /** * Created by padre on 02.12.15. */ class TextNormalizer( private val stringBased: AbstractStringBased, private val tokenizer: AbstractTokenizer, private val wordNormalizer: AbstractWordBasedNormalizer) { def apply(text: String) = { val preprocessedText = stringBased(text) val words = tokenizer(preprocessedText) wordNormalizer(words) } }
IlyaKozlov/TextNormalizer
src/main/scala/ru/ispras/textnormalization/TextNormalizer.scala
Scala
apache-2.0
682
package com.mlh.clustering.actor import akka.actor.{Actor, ActorIdentity, ActorLogging, ActorPath, ActorSelection, Identify, PoisonPill, Props} import com.mlh.clustering.ExceptionUtil import com.mlh.clustering.system import scala.concurrent.ExecutionContext.Implicits.global import scala.concurrent.Future import scala.concurrent.duration.DurationInt import scala.language.postfixOps import scala.util.{Failure, Success} /** * Created by pek on 2017/10/20. */ class CallApiActor(id: Int) extends Actor with ActorLogging { val actor = Some(context.system.scheduler.schedule(20 second, 20 second, self, "tick")) override def postStop() = { log.info("スケジューラを停止しますすすすすすすすすすすす!!!!") actor.foreach(_.cancel()) } val _id = id var countLimit = _id * 10000 implicit val timeout = akka.util.Timeout(1.seconds) override def receive: Receive = { case "start" => log.info("Current Actors in system:") self ! ActorPath.fromString("akka://clustering-cluster/user/") case path: ActorPath => log.info("Current Actors in system:") context.actorSelection(path / "*") ! Identify(()) case ActorIdentity(_, Some(ref)) => log.info(ref.toString()) self ! ref.path case "ping" => { log.info("account id {} get ping" , _id) f.onComplete { case Success(x) => sender() ! x case Failure(ex) => log.error(ExceptionUtil.stackTraceString(ex)) throw ex } } case "tick" => { countLimit -= 1 log.info("tick is running..============================ id : {} count : {}" , _id, countLimit) // self ! "start" // system.actorSelection("user/routerActor/workerRouter") ! "user/routerActor/workerRouter" // system.actorSelection("user/routerActor") ! CountActor.Count(_id) // system.actorSelection("user/routerActor").ask(CountActor.Count(_id)).mapTo[Int].onComplete{ //// (system.actorSelection(CountActor.path) ? "TEST").mapTo[String].onComplete{ // case Success(count) => log.info("id : {} count : {}", _id, count) // case Failure(ex) => log.error(ExceptionUtil.stackTraceString(ex)) // } } case "stop" => { log.info("AccountListActor is stop.") self ! PoisonPill } } val f: Future[String] = Future { Thread.sleep(1000) "PONG!" } } object CallApiActor { def props(id: Int): Props = Props(new CallApiActor(id)) } object CallApiHelper { def props(id: Int): Props = Props(new CallApiActor(id)) private lazy val CALL_API_ACTOR_NAME_PREFIX = "apiRouter_%d" def generateActorName(accountId: Int): String = { CALL_API_ACTOR_NAME_PREFIX format accountId } def getActorPath(accountId: Int): String = { "%s/%s" format (AccountListActor.path, generateActorName(accountId)) } def getActorSelection(accountId: Int): ActorSelection = { system.actorSelection(CallApiHelper.getActorPath(accountId)) } }
eikon-paku-ca/akka-cluster-docker-sample
src/main/scala/com/mlh/clustering/actor/CallApiActor.scala
Scala
mit
2,989
/* * Copyright 2016 The BigDL Authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intel.analytics.bigdl.nn import com.intel.analytics.bigdl.Module import com.intel.analytics.bigdl.models.inception.{Inception_Layer_v1, Inception_v1} import com.intel.analytics.bigdl.models.resnet.{Convolution, ResNet} import com.intel.analytics.bigdl.models.resnet.ResNet.{ShortcutType, iChannels} import com.intel.analytics.bigdl.nn.Graph.ModuleNode import org.scalatest.{FlatSpec, Matchers} import com.intel.analytics.bigdl.numeric.NumericFloat import com.intel.analytics.bigdl.tensor.{Storage, Tensor} import com.intel.analytics.bigdl.utils.{RandomGenerator, T, Table} import scala.reflect.ClassTag @com.intel.analytics.bigdl.tags.Parallel class GraphSpec extends FlatSpec with Matchers { "Graph init" should "throw exceptions when there's cycle" in { val fc1 = Linear(4, 2).inputs() val relu1 = ReLU().inputs(fc1) relu1 -> fc1 intercept[IllegalArgumentException] { Graph(fc1, relu1) } } "Graph init" should "be successful when inputs node are same with outputs node" in { val fc1 = Linear(4, 2).inputs() val graph = Graph(fc1, fc1) val inputData = Tensor(4, 4) fc1.element.parameters()._1(1).zero() // bias is set to 0 graph.forward(inputData) should be((inputData * fc1.element.parameters()._1(0).t())) } "Graph init" should "throw exceptions when some inputs are ignored" in { val fc1 = Linear(4, 2).inputs() val fc2 = Linear(4, 2).inputs() val output = CAddTable().inputs(fc1, fc2) intercept[IllegalArgumentException] { Graph(fc1, output) } } "Graph init" should "be successful output are ignored" in { val fc1 = Linear(4, 2).inputs() val fc2 = Linear(4, 2).inputs() val cadd = CAddTable().inputs(fc1, fc2) val output1 = ReLU().inputs(cadd) val output2 = ReLU().inputs(cadd) val graph = Graph(Array(fc1, fc2), Array(output1)) fc1.element.getParameters()._1.apply1(_ => 1.0f) fc2.element.getParameters()._1.apply1(_ => 1.0f) val output = graph.forward(T(Tensor(T(0.1f, 0.2f, -0.3f, -0.4f)), Tensor(T(0.5f, 0.4f, -0.2f, -0.1f)))) output should be(Tensor(T(2.2f, 2.2f))) } "Graph init" should "throw exceptions when input a tensor while a table is required" in { val fc1 = Linear(4, 2).inputs() val fc2 = Linear(4, 2).inputs() val cadd = CAddTable().inputs(fc1, fc2) val output1 = ReLU().inputs(cadd) val output2 = ReLU().inputs(cadd) val graph = Graph(Array(fc1, fc2), Array(output1, output2)) intercept[IllegalArgumentException] { graph.forward(Tensor(T(0.1f, 0.2f, -0.3f, -0.4f))) } } "Graph init" should "throw exceptions when inputs has pre-nodes" in { val fc1 = Linear(4, 2).inputs() val fc2 = Linear(4, 2).inputs() val tanh1 = Tanh().inputs(fc1) val tanh2 = Tanh().inputs(fc2) val cadd = CAddTable().inputs(tanh1, tanh2) val output1 = ReLU().inputs(cadd) val output2 = ReLU().inputs(cadd) intercept[IllegalArgumentException] { Graph(Array(tanh1, tanh2), Array(output1, output2)) } } "Graph init" should "throw exceptions when inputs has nothing to do with the graph but same " + "number with the roots node in the graph" in { val fc1 = Linear(4, 2).inputs() val fc2 = Linear(4, 2).inputs() val fc3 = Linear(4, 2).inputs() val fc4 = Linear(4, 2).inputs() val tanh1 = Tanh().inputs(fc1) val tanh2 = Tanh().inputs(fc2) val cadd = CAddTable().inputs(tanh1, tanh2) val output1 = ReLU().inputs(cadd) val output2 = ReLU().inputs(cadd) intercept[IllegalArgumentException] { Graph(Array(fc3, fc4), Array(output1, output2)) } } "Graph forward" should "be successful" in { val fc1 = Linear(4, 2).inputs() val fc2 = Linear(4, 2).inputs() val cadd = CAddTable().inputs(fc1, fc2) val output1 = ReLU().inputs(cadd) val output2 = Threshold(10.0).inputs(cadd) val graph = Graph(Array(fc1, fc2), Array(output1, output2)) fc1.element.getParameters()._1.apply1(_ => 1.0f) fc2.element.getParameters()._1.apply1(_ => 1.0f) val output = graph.forward(T(Tensor(T(0.1f, 0.2f, -0.3f, -0.4f)), Tensor(T(0.5f, 0.4f, -0.2f, -0.1f)))) output should be(T(Tensor(T(2.2f, 2.2f)), Tensor(T(.0f, .0f)))) } "Graph forward" should "throw exceptions when input a table while a tensor is required" in { val fc1 = Linear(4, 2).inputs() val output1 = ReLU().inputs(fc1) val graph = Graph(Array(fc1), Array(output1)) intercept[IllegalArgumentException] { graph.forward(T(Tensor(T(0.1f, 0.2f, -0.3f, -0.4f)), Tensor(T(0.5f, 0.4f, -0.2f, -0.1f)))) } } "Graph forward" should "be successful when first node accept multiple tensors input" in { val input1 = Input() val input2 = Input() val cadd = CAddTable().inputs(input1, input2) val graph = Graph(Array(input1, input2), cadd) val output = graph.forward(T(Tensor(T(0.1f, 0.2f, -0.3f, -0.4f)), Tensor(T(0.5f, 0.4f, -0.2f, -0.1f)))) output should be(Tensor(T(0.6f, 0.6f, -0.5f, -0.5f))) } "Graph forward" should "be successful when exchange input order" in { val fc1 = Linear(4, 2).inputs() val fc2 = Linear(4, 2).inputs() val cadd = CAddTable().inputs(fc1, fc2) val output1 = ReLU().inputs(cadd) val output2 = Threshold(10.0).inputs(cadd) val graph = Graph(Array(fc2, fc1), Array(output1, output2)) fc1.element.getParameters()._1.apply1(_ => 1.0f) fc2.element.getParameters()._1.apply1(_ => 2.0f) val output = graph.forward(T(Tensor(T(0.1f, 0.2f, -0.3f, -0.4f)), Tensor(T(0.5f, 0.4f, -0.2f, -0.1f)))) output should be(T(Tensor(T(2.8f, 2.8f)), Tensor(T(0.0f, 0.0f)))) } "Graph forward" should "be successful when paths has different length" in { val fc1 = Linear(4, 2).inputs() val thd1 = Threshold(-10.0).inputs(fc1) val thd2 = Threshold(-10.0).inputs(thd1) val thd3 = Threshold(-10.0).inputs(thd2) val thd4 = Threshold(-10.0).inputs(thd3) val fc2 = Linear(4, 2).inputs() val cadd = CAddTable().inputs(thd4, fc2) val output1 = ReLU().inputs(cadd) val output2 = Threshold(10.0).inputs(cadd) val graph = Graph(Array(fc1, fc2), Array(output1, output2)) fc1.element.getParameters()._1.apply1(_ => 1.0f) fc2.element.getParameters()._1.apply1(_ => 1.0f) val output = graph.forward(T(Tensor(T(0.1f, 0.2f, -0.3f, -0.4f)), Tensor(T(0.5f, 0.4f, -0.2f, -0.1f)))) output should be(T(Tensor(T(2.2f, 2.2f)), Tensor(T(.0f, .0f)))) } "Graph forward" should "be successful when exchange output order" in { val fc1 = Linear(4, 2).inputs() val fc2 = Linear(4, 2).inputs() val cadd = CAddTable().inputs(fc1, fc2) val output1 = ReLU().inputs(cadd) val output2 = Threshold(10.0).inputs(cadd) val graph = Graph(Array(fc1, fc2), Array(output2, output1)) fc1.element.getParameters()._1.apply1(_ => 1.0f) fc2.element.getParameters()._1.apply1(_ => 2.0f) val output = graph.forward(T(Tensor(T(0.1f, 0.2f, -0.3f, -0.4f)), Tensor(T(0.5f, 0.4f, -0.2f, -0.1f)))) output should be(T(Tensor(T(0.0f, 0.0f)), Tensor(T(3.8f, 3.8f)))) } "Graph backward" should "be successful" in { val fc1 = Linear(4, 2).inputs() val fc2 = Linear(4, 2).inputs() val cadd = CAddTable().inputs(fc1, fc2) val output1 = ReLU().inputs(cadd) val output2 = Threshold(10.0).inputs(cadd) val graph = Graph(Array(fc1, fc2), Array(output1, output2)) fc1.element.getParameters()._1.apply1(_ => 1.0f) fc2.element.getParameters()._1.apply1(_ => 2.0f) val output = graph.forward(T(Tensor(T(0.1f, 0.2f, -0.3f, -0.4f)), Tensor(T(0.5f, 0.4f, -0.2f, -0.1f)))) val gradInput = graph.backward(T(Tensor(T(0.1f, 0.2f, -0.3f, -0.4f)), Tensor(T(0.5f, 0.4f, -0.2f, -0.1f))), T(Tensor(T(1.0f, 2.0f)), Tensor(T(3.0f, 4.0f)))) gradInput should be(T(Tensor(T(3.0f, 3.0f, 3.0f, 3.0f)), Tensor(T(6.0f, 6.0f, 6.0f, 6.0f)))) fc1.element.parameters()._2(0) should be(Tensor(T(T(0.1f, 0.2f, -0.3f, -0.4f), T(0.2f, 0.4f, -0.6f, -0.8f)))) fc1.element.parameters()._2(1) should be(Tensor(T(1.0f, 2.0f))) fc2.element.parameters()._2(0) should be(Tensor(T(T(0.5f, 0.4f, -0.2f, -0.1f), T(1.0f, 0.8f, -0.4f, -0.2f)))) fc2.element.parameters()._2(1) should be(Tensor(T(1.0f, 2.0f))) } "Graph backward" should "be successful when first node accept multiple tensors input" in { val input1 = Input() val input2 = Input() val cadd = CAddTable().inputs(input1, input2) val graph = Graph(Array(input1, input2), cadd) val output = graph.forward(T(Tensor(T(0.1f, 0.2f, -0.3f, -0.4f)), Tensor(T(0.5f, 0.4f, -0.2f, -0.1f)))) output should be(Tensor(T(0.6f, 0.6f, -0.5f, -0.5f))) val gradient = graph.backward(T(Tensor(T(0.1f, 0.2f, -0.3f, -0.4f)), Tensor(T(0.5f, 0.4f, -0.2f, -0.1f))), Tensor(T(0.1f, 0.1f, 0.1f, 0.1f))) gradient should be(T(Tensor(T(0.1f, 0.1f, 0.1f, 0.1f)), Tensor(T(0.1f, 0.1f, 0.1f, 0.1f)))) } "Graph backward" should "be successful when paths have different length" in { val fc1 = Linear(4, 2).inputs() val thd1 = Threshold(-10.0).inputs(fc1) val thd2 = Threshold(-10.0).inputs(thd1) val thd3 = Threshold(-10.0).inputs(thd2) val thd4 = Threshold(-10.0).inputs(thd3) val fc2 = Linear(4, 2).inputs() val cadd = CAddTable().inputs(thd4, fc2) val output1 = ReLU().inputs(cadd) val output2 = Threshold(10.0).inputs(cadd) val graph = Graph(Array(fc1, fc2), Array(output1, output2)) fc1.element.getParameters()._1.apply1(_ => 1.0f) fc2.element.getParameters()._1.apply1(_ => 2.0f) val output = graph.forward(T(Tensor(T(0.1f, 0.2f, -0.3f, -0.4f)), Tensor(T(0.5f, 0.4f, -0.2f, -0.1f)))) val gradInput = graph.backward(T(Tensor(T(0.1f, 0.2f, -0.3f, -0.4f)), Tensor(T(0.5f, 0.4f, -0.2f, -0.1f))), T(Tensor(T(1.0f, 2.0f)), Tensor(T(3.0f, 4.0f)))) gradInput should be(T(Tensor(T(3.0f, 3.0f, 3.0f, 3.0f)), Tensor(T(6.0f, 6.0f, 6.0f, 6.0f)))) fc1.element.parameters()._2(0) should be(Tensor(T(T(0.1f, 0.2f, -0.3f, -0.4f), T(0.2f, 0.4f, -0.6f, -0.8f)))) fc1.element.parameters()._2(1) should be(Tensor(T(1.0f, 2.0f))) fc2.element.parameters()._2(0) should be(Tensor(T(T(0.5f, 0.4f, -0.2f, -0.1f), T(1.0f, 0.8f, -0.4f, -0.2f)))) fc2.element.parameters()._2(1) should be(Tensor(T(1.0f, 2.0f))) } "Graph backward" should "be successful when exchange input order" in { val fc1 = Linear(4, 2).inputs() val fc2 = Linear(4, 2).inputs() val cadd = CAddTable().inputs(fc1, fc2) val output1 = ReLU().inputs(cadd) val output2 = Threshold(10.0).inputs(cadd) val graph = Graph(Array(fc2, fc1), Array(output1, output2)) fc1.element.getParameters()._1.apply1(_ => 1.0f) fc2.element.getParameters()._1.apply1(_ => 2.0f) val output = graph.forward(T(Tensor(T(0.1f, 0.2f, -0.3f, -0.4f)), Tensor(T(0.5f, 0.4f, -0.2f, -0.1f)))) val gradInput = graph.backward(T(Tensor(T(0.1f, 0.2f, -0.3f, -0.4f)), Tensor(T(0.5f, 0.4f, -0.2f, -0.1f))), T(Tensor(T(1.0f, 2.0f)), Tensor(T(3.0f, 4.0f)))) gradInput should be(T(Tensor(T(6.0f, 6.0f, 6.0f, 6.0f)), Tensor(T(3.0f, 3.0f, 3.0f, 3.0f)))) fc1.element.parameters()._2(0) should be(Tensor(T(T(0.5f, 0.4f, -0.2f, -0.1f), T(1.0f, 0.8f, -0.4f, -0.2f)))) fc1.element.parameters()._2(1) should be(Tensor(T(1.0f, 2.0f))) fc2.element.parameters()._2(0) should be(Tensor(T(T(0.1f, 0.2f, -0.3f, -0.4f), T(0.2f, 0.4f, -0.6f, -0.8f)))) fc2.element.parameters()._2(1) should be(Tensor(T(1.0f, 2.0f))) } "Graph backward" should "be successful when exchange output order" in { val fc1 = Linear(4, 2).inputs() val fc2 = Linear(4, 2).inputs() val cadd = CAddTable().inputs(fc1, fc2) val output1 = ReLU().inputs(cadd) val output2 = Threshold(10.0).inputs(cadd) val graph = Graph(Array(fc1, fc2), Array(output2, output1)) fc1.element.getParameters()._1.apply1(_ => 1.0f) fc2.element.getParameters()._1.apply1(_ => 2.0f) val output = graph.forward(T(Tensor(T(0.1f, 0.2f, -0.3f, -0.4f)), Tensor(T(0.5f, 0.4f, -0.2f, -0.1f)))) val gradInput = graph.backward(T(Tensor(T(0.1f, 0.2f, -0.3f, -0.4f)), Tensor(T(0.5f, 0.4f, -0.2f, -0.1f))), T(Tensor(T(1.0f, 2.0f)), Tensor(T(3.0f, 4.0f)))) gradInput should be(T(Tensor(T(7.0f, 7.0f, 7.0f, 7.0f)), Tensor(T(14.0f, 14.0f, 14.0f, 14.0f)))) fc1.element.parameters()._2(0) should be(Tensor(T(T(0.3f, 0.6f, -0.9f, -1.2f), T(0.4f, 0.8f, -1.2f, -1.6f)))) fc1.element.parameters()._2(1) should be(Tensor(T(3.0f, 4.0f))) fc2.element.parameters()._2(0) should be(Tensor(T(T(1.5f, 1.2f, -0.6f, -0.3f), T(2.0f, 1.6f, -0.8f, -0.4f)))) fc2.element.parameters()._2(1) should be(Tensor(T(3.0f, 4.0f))) } "Graph forward/backward" should "be successful when there's output from internal node" in { val input1 = Input() val input2 = Input() val add = CAddTable().inputs(input1, input2) val add2 = AddConstant(2.0f).inputs(add) val relu = ReLU().inputs(add2) val graph = Graph[Float](Array(input1, input2), Array(add, relu)) val input = T(Tensor(T(1.0f, 2.0f)), Tensor(T(-2.0f, -1.0f))) val output = graph.forward(input) val gradient = graph.backward(input, T(Tensor(T(1.0f, 2.0f)), Tensor(T(-2.0f, -1.0f)))) val output1 = output.toTable[Tensor[Float]](1) val output2 = output.toTable[Tensor[Float]](2) output1 should be(Tensor[Float](T(-1.0f, 1.0f))) output2 should be(Tensor[Float](T(1.0f, 3.0f))) gradient should be(T(Tensor(T(-1.0f, 1.0f)), Tensor(T(-1.0f, 1.0f)))) } "lenet" should "be same with sequential model" in { RandomGenerator.RNG.setSeed(1000) val seqModel = Sequential().add(Reshape(Array(1, 28, 28))) .add(SpatialConvolution(1, 6, 5, 5).setName("conv1_5x5")) .add(Tanh()) .add(SpatialMaxPooling(2, 2, 2, 2)) .add(Tanh()) .add(SpatialConvolution(6, 12, 5, 5).setName("conv2_5x5")) .add(SpatialMaxPooling(2, 2, 2, 2)) .add(Reshape(Array(12 * 4 * 4))) .add(Linear(12 * 4 * 4, 100).setName("fc1")) .add(Tanh()) .add(Linear(100, 10).setName("fc2")) .add(LogSoftMax()) RandomGenerator.RNG.setSeed(1000) val input = Reshape(Array(1, 28, 28)).inputs() val conv1 = SpatialConvolution(1, 6, 5, 5).inputs(input) val tanh1 = Tanh().inputs(conv1) val pool1 = SpatialMaxPooling(2, 2, 2, 2).inputs(tanh1) val tanh2 = Tanh().inputs(pool1) val conv2 = SpatialConvolution(6, 12, 5, 5).inputs(tanh2) val pool2 = SpatialMaxPooling(2, 2, 2, 2).inputs(conv2) val reshape = Reshape(Array(12 * 4 * 4)).inputs(pool2) val fc1 = Linear(12 * 4 * 4, 100).inputs(reshape) val tanh3 = Tanh().inputs(fc1) val fc2 = Linear(100, 10).inputs(tanh3) val output = LogSoftMax().inputs(fc2) val funcModel = Graph(input, output) val inputData = Tensor(4, 28 * 28).rand() val outputData1 = seqModel.forward(inputData) // warm up var start = System.nanoTime() seqModel.forward(inputData) println(s"seq model forward time is ${(System.nanoTime() - start) / 1e6}ms") start = System.nanoTime() val outputData2 = funcModel.forward(inputData) println(s"funcModel model forward time is ${(System.nanoTime() - start) / 1e6}ms") outputData1 should be(outputData2) val gradient = Tensor(4, 10).rand() start = System.nanoTime() val gradientBP1 = seqModel.backward(inputData, gradient) println(s"seq model backward time is ${(System.nanoTime() - start) / 1e6}ms") start = System.nanoTime() val gradientBP2 = funcModel.backward(inputData, gradient) println(s"funcModel model backward time is ${(System.nanoTime() - start) / 1e6}ms") gradientBP1 should be(gradientBP2) seqModel.getParameters()._2 should be(funcModel.getParameters()._2) } "shift" should "be correct" in { val node = Reshape(Array(1, 28, 28)).inputs() val test = Graph(node, node) test.shift(Array(1, 2, 3, 4), 1, 1) should be(Array(1, 2, 3, 4)) test.shift(Array(1, 2, 3, 4), 1, 3) should be(Array(1, 3, 4, 2)) test.shift(Array(1, 2, 3, 4), 3, 1) should be(Array(1, 4, 2, 3)) } "ResNet-18 basic block shortcut type A" should "be correct" in { RandomGenerator.RNG.setSeed(1000) val seqModel = ModelUntils.ResNet.basicBlockSeq(16, 16, 1, "A") RandomGenerator.RNG.setSeed(1000) val input = Input() val output = ModelUntils.ResNet.basicBlockSeq(16, 16, 1, "A").inputs(input) val funcModel = Graph(input, output) println(seqModel) val inputData = Tensor(4, 16, 32, 32).rand() var start = System.nanoTime() val output1 = seqModel.forward(inputData) println(s"seq model forward time is ${(System.nanoTime() - start) / 1e6}ms") start = System.nanoTime() val output2 = funcModel.forward(inputData) println(s"func model forward time is ${(System.nanoTime() - start) / 1e6}ms") output1 should be(output2) val gradients = Tensor(4, 16, 32, 32).rand() start = System.nanoTime() val gradients1 = seqModel.backward(inputData, gradients) println(s"seq model backward time is ${(System.nanoTime() - start) / 1e6}ms") start = System.nanoTime() val gradients2 = funcModel.backward(inputData, gradients) println(s"func model backward time is ${(System.nanoTime() - start) / 1e6}ms") gradients1 should be(gradients2) seqModel.getParameters()._2 should be(funcModel.getParameters()._2) } "ResNet-18 basic block shortcut type C" should "be correct" in { RandomGenerator.RNG.setSeed(1000) val seqModel = ModelUntils.ResNet.basicBlockSeq(16, 16, 1, "C") RandomGenerator.RNG.setSeed(1000) val input = Input() val output = ModelUntils.ResNet.basicBlockFunc(16, 16, 1, "C")(input) val funcModel = Graph(input, output) println(seqModel) val inputData = Tensor(4, 16, 32, 32).rand() var start = System.nanoTime() val output1 = seqModel.forward(inputData) println(s"seq model forward time is ${(System.nanoTime() - start) / 1e6}ms") start = System.nanoTime() val output2 = funcModel.forward(inputData) println(s"func model forward time is ${(System.nanoTime() - start) / 1e6}ms") output1 should be(output2) val gradients = Tensor(4, 16, 32, 32).rand() start = System.nanoTime() val gradients1 = seqModel.backward(inputData, gradients) println(s"seq model backward time is ${(System.nanoTime() - start) / 1e6}ms") start = System.nanoTime() val gradients2 = funcModel.backward(inputData, gradients) println(s"func model backward time is ${(System.nanoTime() - start) / 1e6}ms") gradients1 should be(gradients2) seqModel.getParametersTable()[Table]("conv1")[Tensor[Float]]("gradWeight") should be( funcModel.getParametersTable()[Table]("conv1")[Tensor[Float]]("gradWeight") ) seqModel.getParametersTable()[Table]("bn1")[Tensor[Float]]("gradWeight") should be( funcModel.getParametersTable()[Table]("bn1")[Tensor[Float]]("gradWeight") ) seqModel.getParametersTable()[Table]("conv2")[Tensor[Float]]("gradWeight") should be( funcModel.getParametersTable()[Table]("conv2")[Tensor[Float]]("gradWeight") ) seqModel.getParametersTable()[Table]("bn2")[Tensor[Float]]("gradWeight") should be( funcModel.getParametersTable()[Table]("bn2")[Tensor[Float]]("gradWeight") ) } "InceptionV1 block" should "be correct" in { RandomGenerator.RNG.setSeed(1000) val seqModel = ModelUntils.Inception.inceptionLayerV1Seq( 2, T(T(4), T(96, 128), T(16, 32), T(32))) RandomGenerator.RNG.setSeed(1000) val input = Input() val output = ModelUntils.Inception.inceptionLayerV1Func( 2, T(T(4), T(96, 128), T(16, 32), T(32)))(input) val funcModel = Graph(input, output) println(seqModel) val inputData = Tensor(1, 2, 4, 4).rand() var start = System.nanoTime() val output1 = seqModel.forward(inputData) println(s"seq model forward time is ${(System.nanoTime() - start) / 1e6}ms") start = System.nanoTime() val output2 = funcModel.forward(inputData) println(s"func model forward time is ${(System.nanoTime() - start) / 1e6}ms") output1 should be(output2) val gradient = Tensor(1, 256, 4, 4).rand() start = System.nanoTime() val gradient1 = seqModel.backward(inputData, gradient) println(s"seq model backward time is ${(System.nanoTime() - start) / 1e6}ms") start = System.nanoTime() val gradient2 = funcModel.backward(inputData, gradient) println(s"func model backward time is ${(System.nanoTime() - start) / 1e6}ms") gradient1 should be(gradient2) seqModel.getParametersTable()[Table]("conv1x1")[Tensor[Float]]("gradWeight") should be( funcModel.getParametersTable()[Table]("conv1x1")[Tensor[Float]]("gradWeight") ) seqModel.getParametersTable()[Table]("conv3x3_1")[Tensor[Float]]("gradWeight") should be( funcModel.getParametersTable()[Table]("conv3x3_1")[Tensor[Float]]("gradWeight") ) seqModel.getParametersTable()[Table]("conv3x3_2")[Tensor[Float]]("gradWeight") should be( funcModel.getParametersTable()[Table]("conv3x3_2")[Tensor[Float]]("gradWeight") ) seqModel.getParametersTable()[Table]("conv5x5_1")[Tensor[Float]]("gradWeight") should be( funcModel.getParametersTable()[Table]("conv5x5_1")[Tensor[Float]]("gradWeight") ) seqModel.getParametersTable()[Table]("conv5x5_2")[Tensor[Float]]("gradWeight") should be( funcModel.getParametersTable()[Table]("conv5x5_2")[Tensor[Float]]("gradWeight") ) seqModel.getParametersTable()[Table]("pool_conv")[Tensor[Float]]("gradWeight") should be( funcModel.getParametersTable()[Table]("pool_conv")[Tensor[Float]]("gradWeight") ) } } object ModelUntils { object Inception { def inceptionLayerV1Func(inputSize: Int, config: Table)(input : ModuleNode[Float]) : ModuleNode[Float] = { val conv1x1 = SpatialConvolution(inputSize, config[Table](1)(1), 1, 1, 1, 1) .setName("conv1x1").inputs(input) val relu1x1 = ReLU(true).inputs(conv1x1) val conv3x3_1 = SpatialConvolution(inputSize, config[Table](2)(1), 1, 1, 1, 1) .setName("conv3x3_1").inputs(input) val relu3x3_1 = ReLU(true).inputs(conv3x3_1) val conv3x3_2 = SpatialConvolution( config[Table](2)(1), config[Table](2)(2), 3, 3, 1, 1, 1, 1) .setName("conv3x3_2").inputs(relu3x3_1) val relu3x3_2 = ReLU(true).inputs(conv3x3_2) val conv5x5_1 = SpatialConvolution(inputSize, config[Table](3)(1), 1, 1, 1, 1) .setName("conv5x5_1").inputs(input) val relu5x5_1 = ReLU(true).inputs(conv5x5_1) val conv5x5_2 = SpatialConvolution( config[Table](3)(1), config[Table](3)(2), 5, 5, 1, 1, 2, 2) .setName("conv5x5_2").inputs(relu5x5_1) val relu5x5_2 = ReLU(true).inputs(conv5x5_2) val pool = SpatialMaxPooling(3, 3, 1, 1, 1, 1).ceil() .setName("pool").inputs(input) val convPool = SpatialConvolution(inputSize, config[Table](4)(1), 1, 1, 1, 1) .setName("pool_conv").inputs(pool) val reluPool = ReLU(true).inputs(convPool) JoinTable(2, 4).inputs(relu1x1, relu3x3_2, relu5x5_2, reluPool) } def inceptionLayerV1Seq(inputSize: Int, config: Table) : Module[Float] = { val concat = Concat(2) val conv1 = Sequential() conv1.add(SpatialConvolution(inputSize, config[Table](1)(1), 1, 1, 1, 1) .setName("conv1x1")) conv1.add(ReLU(true)) concat.add(conv1) val conv3 = Sequential() conv3.add(SpatialConvolution(inputSize, config[Table](2)(1), 1, 1, 1, 1) .setName("conv3x3_1")) conv3.add(ReLU(true)) conv3.add(SpatialConvolution(config[Table](2)(1), config[Table](2)(2), 3, 3, 1, 1, 1, 1) .setName("conv3x3_2")) conv3.add(ReLU(true)) concat.add(conv3) val conv5 = Sequential() conv5.add(SpatialConvolution(inputSize, config[Table](3)(1), 1, 1, 1, 1) .setName("conv5x5_1")) conv5.add(ReLU(true)) conv5.add(SpatialConvolution(config[Table](3)(1), config[Table](3)(2), 5, 5, 1, 1, 2, 2) .setName("conv5x5_2")) conv5.add(ReLU(true)) concat.add(conv5) val pool = Sequential() pool.add(SpatialMaxPooling(3, 3, 1, 1, 1, 1).ceil() .setName("pool")) pool.add(SpatialConvolution(inputSize, config[Table](4)(1), 1, 1, 1, 1).setName("pool_conv")) pool.add(ReLU(true)) concat.add(pool) concat } } object ResNet { def basicBlockFunc(nInputPlane: Int, n: Int, stride: Int, shortcutType : String)( input : ModuleNode[Float]) : ModuleNode[Float] = { val conv1 = SpatialConvolution(nInputPlane, n, 3, 3, stride, stride, 1, 1) .setName("conv1").inputs(input) val bn1 = SpatialBatchNormalization(n).setName("bn1").inputs(conv1) val relu1 = ReLU(true).inputs(bn1) val conv2 = SpatialConvolution(n, n, 3, 3, 1, 1, 1, 1) .setName("conv2").inputs(relu1) val bn2 = SpatialBatchNormalization(n).setName("bn2").inputs(conv2) val shortcut = shortcutFunc(nInputPlane, n, stride, shortcutType)(input) val add = CAddTable(true).inputs(bn2, shortcut) val output = ReLU(true).inputs(add) output } def basicBlockSeq(nInputPlane: Int, n: Int, stride: Int, shortcutType : String) : Module[Float] = { val s = Sequential() s.add(SpatialConvolution(nInputPlane, n, 3, 3, stride, stride, 1, 1).setName("conv1")) s.add(SpatialBatchNormalization(n).setName("bn1")) s.add(ReLU(true)) s.add(SpatialConvolution(n, n, 3, 3, 1, 1, 1, 1).setName("conv2")) s.add(SpatialBatchNormalization(n).setName("bn2")) Sequential() .add(ConcatTable() .add(s) .add(shortcutSeq(nInputPlane, n, stride, shortcutType))) .add(CAddTable(true)) .add(ReLU(true)) } def shortcutFunc(nInputPlane: Int, nOutputPlane: Int, stride: Int, shortcutType : String)(input : ModuleNode[Float]) : ModuleNode[Float] = { val useConv = shortcutType == "C" || (shortcutType == "B" && nInputPlane != nOutputPlane) if (useConv) { val conv1 = SpatialConvolution(nInputPlane, nOutputPlane, 1, 1, stride, stride) .inputs(input) val bn1 = SpatialBatchNormalization(nOutputPlane).inputs(conv1) bn1 } else if (nInputPlane != nOutputPlane) { val pool1 = SpatialAveragePooling(1, 1, stride, stride).inputs(input) val mul1 = MulConstant(0f).inputs(pool1) val concat = JoinTable(2, 3).inputs(pool1, mul1) concat } else { input } } def shortcutSeq(nInputPlane: Int, nOutputPlane: Int, stride: Int, shortcutType : String) : Module[Float] = { val useConv = shortcutType == "C" || (shortcutType == "B" && nInputPlane != nOutputPlane) if (useConv) { Sequential() .add(SpatialConvolution(nInputPlane, nOutputPlane, 1, 1, stride, stride)) .add(SpatialBatchNormalization(nOutputPlane)) } else if (nInputPlane != nOutputPlane) { Sequential() .add(SpatialAveragePooling(1, 1, stride, stride)) .add(Concat(2) .add(Identity()) .add(MulConstant(0f))) } else { Identity() } } } }
JerryYanWan/BigDL
spark/dl/src/test/scala/com/intel/analytics/bigdl/nn/GraphSpec.scala
Scala
apache-2.0
27,980
import sbt._ import Keys._ object MetamorphicSlickSQLiteBuild extends Build { val thisV = "0.0-SNAPSHOT" val metamorphicV = "0.0-SNAPSHOT" val paradiseV = "2.1.0-M5" val sqliteV = "3.7.2" lazy val sqliteCompileDependencies = Seq( "org.xerial" % "sqlite-jdbc" % sqliteV ) lazy val baseSettings = Seq( scalaVersion := "2.11.6", scalacOptions ++= Seq("-unchecked", "-deprecation", "-feature", "-language:experimental.macros"), crossScalaVersions := Seq("2.10.2", "2.10.3", "2.10.4", "2.11.0", "2.11.1", "2.11.2", "2.11.3", "2.11.4", "2.11.5"), resolvers += Resolver.typesafeRepo("releases"), addCompilerPlugin("org.scalamacros" % "paradise" % paradiseV cross CrossVersion.full) ) lazy val slick_dev = ProjectRef(file("./.."), "slick_dev") // SQLite Project lazy val sqliteSettings = Seq( libraryDependencies ++= sqliteCompileDependencies ) lazy val sqlite = (project in file(".")). settings(baseSettings: _*). settings(sqliteSettings: _*). settings( version := thisV, name := "slick-sqlite", organization := "metamorphic", libraryDependencies += "metamorphic" %% "slick" % metamorphicV ) lazy val sqlite_dev = (project in file(".")). dependsOn(slick_dev). settings(baseSettings: _*). settings(sqliteSettings: _*). settings( target := target.value / "dev", publish := {} ) }
frroliveira/metamorphic
metamorphic-slick/sqlite/project/Build.scala
Scala
mit
1,425
package anagram.core.models /** * Created by Random on 12/24/2014. */ case class AnagramResult(text: String, score: Int) { println(text) }
chprice/Anagram
src/anagram/core/models/AnagramResult.scala
Scala
mit
145
package heartbeat_tests /** * Copyright (c) Nikita Kovaliov, maizy.ru, 2015 * See LICENSE.txt for details. */ import scala.concurrent.duration._ import akka.actor.Props import akka.testkit.TestActorRef import org.scalatest.FlatSpecLike import ru.maizy.dev.heartbeat.actor._ class SupervisorSpec extends ActorSystemClusterBaseSpec with FlatSpecLike { trait TestSupervisors { // blocking tests for underling actor val sv1 = TestActorRef(Props[BaseSupervisor]) val sv2 = TestActorRef(Props[BaseSupervisor]) val sv3 = TestActorRef(Props[BaseSupervisor]) val all = Seq(sv1, sv2, sv3) all.foreach { _ ! StartUp(2) } } "Supervisor" should "init stat actors on startup" in { val sv = TestActorRef(Props[BaseSupervisor]) sv.children.toList should have length 0 sv ! StartUp(3) sv.children.toList should have length 3 } it should "add supervisors" in { new TestSupervisors { sv1 ! GetKnownSupervisors expectMsg(KnownSupervisors(Nil)) sv1 ! AddSupervisors(Seq(sv2, sv3)) sv1 ! GetKnownSupervisors val res = receiveOne(100.millis) res shouldBe a[KnownSupervisors] res.asInstanceOf[KnownSupervisors].supervisorsRefs should contain theSameElementsAs Seq(sv2, sv3) } } it should "remove supervisors" in { new TestSupervisors { sv1 ! AddSupervisors(Seq(sv2, sv3)) sv1 ! RemoveSupervisors(Seq(sv3)) sv1 ! GetKnownSupervisors expectMsg(KnownSupervisors(Seq(sv2))) sv1 ! RemoveSupervisors(Seq(sv2)) sv1 ! GetKnownSupervisors expectMsg(KnownSupervisors(Nil)) } } }
maizy/akka-cluster-heartbeat
src/test/scala/heartbeat_tests/SupervisorSpec.scala
Scala
mit
1,625
/* _ _ _ *\\ ** | (_) | | ** ** ___| |_ __| | ___ clide 2 ** ** / __| | |/ _` |/ _ \\ (c) 2012-2014 Martin Ring ** ** | (__| | | (_| | __/ http://clide.flatmap.net ** ** \\___|_|_|\\__,_|\\___| ** ** ** ** This file is part of Clide. ** ** ** ** Clide is free software: you can redistribute it and/or modify ** ** it under the terms of the GNU Lesser General Public License as ** ** published by the Free Software Foundation, either version 3 of ** ** the License, or (at your option) any later version. ** ** ** ** Clide is distributed in the hope that it will be useful, ** ** but WITHOUT ANY WARRANTY; without even the implied warranty of ** ** MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the ** ** GNU General Public License for more details. ** ** ** ** You should have received a copy of the GNU Lesser General Public ** ** License along with Clide. ** ** If not, see <http://www.gnu.org/licenses/>. ** \\* */ package clide.models import java.security.MessageDigest import java.util.Arrays @SerialVersionUID(1L) case class UserInfo( name: String, email: String) { def withPassword(plain: String) = { // TODO: Think about security new UserInfo(name,email) with Password { val password = UserInfo.passwordHash(name,plain) } } } object UserInfo { def passwordHash(name: String, password: String) = MessageDigest.getInstance("MD5").digest((name + password).getBytes("UTF-8")) } object UserInfoWithPassword { def apply(name: String, email: String, pword: Array[Byte]) = new UserInfo(name,email) with Password { val password = pword } def unapply(u: UserInfo with Password) = Some((u.name,u.email,u.password)) val tupled = (apply _).tupled } trait Password { self: UserInfo => val password: Array[Byte] def authenticate(password: String) = Arrays.equals(UserInfo.passwordHash(name, password),this.password) }
martinring/clide2
modules/clide-core/src/main/scala/clide/models/UserInfo.scala
Scala
lgpl-3.0
2,812
package lert.elasticsearch import java.util.{Date, UUID} import lert.core.config.Source import lert.elasticsearch.restclient.AWSRestClient import org.apache.http.entity.ContentType import org.apache.http.nio.entity.NStringEntity object AWSDataGenerator extends App { val message = UUID.randomUUID().toString val record = s"""{ | "path" : "/testlog", | "@timestamp" : "${ElasticSearchProcessor.DATE_FORMAT.format(new Date())}", | "@version" : "1", | "host" : "MacBook-Pro", | "message" : "Message $message", | "type" : "syslog" |} """.stripMargin new AWSRestClient(Source( url = s"elasticSearch:${System.getProperty("elasticsearch.url")}", params = Some(Map("awsRegion" -> System.getProperty("aws.region"))) )).performRequest("PUT", s"/logstash-2017.09.05/syslog/$message", Map(), new NStringEntity(record, ContentType.APPLICATION_JSON)) }
l3rt/l3rt
elasticsearch-input/src/test/scala/lert/elasticsearch/AWSDataGenerator.scala
Scala
apache-2.0
932
package mesosphere.marathon package api.validation import mesosphere.UnitTest import mesosphere.marathon.core.plugin.PluginManager import mesosphere.marathon.raml.Apps import mesosphere.marathon.state._ class AppDefinitionSchedulerValidationTest extends UnitTest { private lazy val validAppDefinition = AppDefinition.validAppDefinition(Set())(PluginManager.None) class Fixture { def normalApp = AppDefinition( id = PathId("/test"), cmd = Some("sleep 1000")) def schedulerAppWithApi( frameworkName: String = "Framework-42", migrationApiVersion: String = "v1", migrationApiPath: String = "/v1/plan"): AppDefinition = { AppDefinition( id = PathId("/test"), cmd = Some("sleep 1000"), instances = 1, upgradeStrategy = UpgradeStrategy(0, 0), labels = Map( Apps.LabelDcosPackageFrameworkName -> frameworkName, Apps.LabelDcosMigrationApiVersion -> migrationApiVersion, Apps.LabelDcosMigrationApiPath -> migrationApiPath ) ) } } "AppDefinitionSchedulerValidation" should { "scheduler app using API is considered a scheduler and valid" in { val f = new Fixture Given("an scheduler with required labels, 1 instance, correct upgradeStrategy and 1 readinessCheck") val app = f.schedulerAppWithApi() Then("the app is considered valid") validAppDefinition(app).isSuccess shouldBe true } "required scheduler labels" in { val f = new Fixture Given("an app with 1 instance and an UpgradeStrategy of (0,0)") val normalApp = f.normalApp.copy( instances = 1, upgradeStrategy = UpgradeStrategy(0, 0) ) When("the Migration API version is added") val step1 = normalApp.copy(labels = normalApp.labels + ( Apps.LabelDcosMigrationApiVersion -> "v1" )) Then("the app is not valid") validAppDefinition(step1).isSuccess shouldBe false When("the framework label is added") val step2 = normalApp.copy(labels = step1.labels + ( Apps.LabelDcosPackageFrameworkName -> "Framework-42" )) Then("the app is not valid") validAppDefinition(step2).isSuccess shouldBe false When("the Migration API path is added") val step3 = normalApp.copy(labels = step2.labels + ( Apps.LabelDcosMigrationApiPath -> "/v1/plan" )) Then("the app is valid") validAppDefinition(step3).isSuccess shouldBe true } "If a scheduler application defines DCOS_MIGRATION_API_VERSION, only 'v1' is valid" in { val f = new Fixture Given("a scheduler app defining DCOS_MIGRATION_API_VERSION other than 'v1'") val app = f.schedulerAppWithApi(migrationApiVersion = "v2") Then("the validation should fail") validAppDefinition(app).isFailure shouldBe true } "If a scheduler application defines DCOS_MIGRATION_API_PATH it must be non-empty" in { val f = new Fixture Given("a scheduler app with an empty migration path") val app = f.schedulerAppWithApi(migrationApiPath = "") Then("the validation should fail") validAppDefinition(app).isFailure shouldBe true } "If a scheduler application defines DCOS_PACKAGE_FRAMEWORK_NAME it must be non-empty" in { val f = new Fixture Given("a scheduler app with an empty framework name") val app = f.schedulerAppWithApi(frameworkName = "") Then("the validation should fail") validAppDefinition(app).isFailure shouldBe true } } }
meln1k/marathon
src/test/scala/mesosphere/marathon/api/validation/AppDefinitionSchedulerValidationTest.scala
Scala
apache-2.0
3,560
package common object FileEx { import java.sql.Date import java.text.SimpleDateFormat import java.io.{ File, FileInputStream, BufferedInputStream } import org.apache.commons.codec.digest.DigestUtils.md5Hex private def listAllFiles(file: File): Array[File] = { val list = file.listFiles if (list == null) Array[File]() else list ++ list.filter(_.isDirectory).flatMap(listAllFiles) } private val dateFormat = new SimpleDateFormat("yyyy-MM-dd") implicit class FileOps(file: File) { def flatten = if (file.exists) if (file.isDirectory) listAllFiles(file) else Array(file) else Array[File]() def checksum = try { val fIS = new BufferedInputStream(new FileInputStream(file)) val md5 = md5Hex(fIS) fIS.close md5 } catch { case e: Exception => "" } def lastModifiedString = Date.valueOf(dateFormat.format(file.lastModified)) } }
ShiZhan/slickfs
src/main/scala/common/FileEx.scala
Scala
apache-2.0
965
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.sql import java.sql.{Date, Timestamp} import java.time.{Duration, Instant, LocalDate, LocalDateTime, Period} import java.time.temporal.ChronoUnit import java.util.Locale import org.apache.hadoop.io.{LongWritable, Text} import org.apache.hadoop.mapreduce.lib.input.{TextInputFormat => NewTextInputFormat} import org.scalatest.matchers.should.Matchers._ import org.apache.spark.SparkException import org.apache.spark.sql.UpdateFieldsBenchmark._ import org.apache.spark.sql.catalyst.expressions.{InSet, Literal, NamedExpression} import org.apache.spark.sql.catalyst.util.DateTimeTestUtils.{outstandingTimezonesIds, outstandingZoneIds} import org.apache.spark.sql.catalyst.util.DateTimeUtils import org.apache.spark.sql.execution.ProjectExec import org.apache.spark.sql.functions._ import org.apache.spark.sql.internal.SQLConf import org.apache.spark.sql.test.SharedSparkSession import org.apache.spark.sql.types._ import org.apache.spark.unsafe.types.UTF8String class ColumnExpressionSuite extends QueryTest with SharedSparkSession { import testImplicits._ private lazy val booleanData = { spark.createDataFrame(sparkContext.parallelize( Row(false, false) :: Row(false, true) :: Row(true, false) :: Row(true, true) :: Nil), StructType(Seq(StructField("a", BooleanType), StructField("b", BooleanType)))) } private lazy val nullData = Seq( (Some(1), Some(1)), (Some(1), Some(2)), (Some(1), None), (None, None)).toDF("a", "b") test("column names with space") { val df = Seq((1, "a")).toDF("name with space", "name.with.dot") checkAnswer( df.select(df("name with space")), Row(1) :: Nil) checkAnswer( df.select($"name with space"), Row(1) :: Nil) checkAnswer( df.select(col("name with space")), Row(1) :: Nil) checkAnswer( df.select("name with space"), Row(1) :: Nil) checkAnswer( df.select(expr("`name with space`")), Row(1) :: Nil) } test("column names with dot") { val df = Seq((1, "a")).toDF("name with space", "name.with.dot").as("a") checkAnswer( df.select(df("`name.with.dot`")), Row("a") :: Nil) checkAnswer( df.select($"`name.with.dot`"), Row("a") :: Nil) checkAnswer( df.select(col("`name.with.dot`")), Row("a") :: Nil) checkAnswer( df.select("`name.with.dot`"), Row("a") :: Nil) checkAnswer( df.select(expr("`name.with.dot`")), Row("a") :: Nil) checkAnswer( df.select(df("a.`name.with.dot`")), Row("a") :: Nil) checkAnswer( df.select($"a.`name.with.dot`"), Row("a") :: Nil) checkAnswer( df.select(col("a.`name.with.dot`")), Row("a") :: Nil) checkAnswer( df.select("a.`name.with.dot`"), Row("a") :: Nil) checkAnswer( df.select(expr("a.`name.with.dot`")), Row("a") :: Nil) } test("alias and name") { val df = Seq((1, Seq(1, 2, 3))).toDF("a", "intList") assert(df.select(df("a").as("b")).columns.head === "b") assert(df.select(df("a").alias("b")).columns.head === "b") assert(df.select(df("a").name("b")).columns.head === "b") } test("as propagates metadata") { val metadata = new MetadataBuilder metadata.putString("key", "value") val origCol = $"a".as("b", metadata.build()) val newCol = origCol.as("c") assert(newCol.expr.asInstanceOf[NamedExpression].metadata.getString("key") === "value") } test("collect on column produced by a binary operator") { val df = Seq((1, 2, 3)).toDF("a", "b", "c") checkAnswer(df.select(df("a") + df("b")), Seq(Row(3))) checkAnswer(df.select(df("a") + df("b").as("c")), Seq(Row(3))) } test("star") { checkAnswer(testData.select($"*"), testData.collect().toSeq) } test("star qualified by data frame object") { val df = testData.toDF val goldAnswer = df.collect().toSeq checkAnswer(df.select(df("*")), goldAnswer) val df1 = df.select(df("*"), lit("abcd").as("litCol")) checkAnswer(df1.select(df("*")), goldAnswer) } test("star qualified by table name") { checkAnswer(testData.as("testData").select($"testData.*"), testData.collect().toSeq) } test("SPARK-34199: star can be qualified by table name inside a non-count function") { checkAnswer( testData.as("testData").selectExpr("hash(testData.*)"), testData.as("testData").selectExpr("hash(testData.key, testData.value)") ) } test("SPARK-34199: star cannot be qualified by table name inside a count function") { val e = intercept[AnalysisException] { testData.as("testData").selectExpr("count(testData.*)").collect() } assert(e.getMessage.contains( "count(testData.*) is not allowed. Please use count(*) or expand the columns manually")) } test("SPARK-34199: table star can be qualified inside a count function with multiple arguments") { checkAnswer( testData.as("testData").selectExpr("count(testData.*, testData.key)"), testData.as("testData").selectExpr("count(testData.key, testData.value, testData.key)") ) } test("+") { checkAnswer( testData2.select($"a" + 1), testData2.collect().toSeq.map(r => Row(r.getInt(0) + 1))) checkAnswer( testData2.select($"a" + $"b" + 2), testData2.collect().toSeq.map(r => Row(r.getInt(0) + r.getInt(1) + 2))) } test("-") { checkAnswer( testData2.select($"a" - 1), testData2.collect().toSeq.map(r => Row(r.getInt(0) - 1))) checkAnswer( testData2.select($"a" - $"b" - 2), testData2.collect().toSeq.map(r => Row(r.getInt(0) - r.getInt(1) - 2))) } test("*") { checkAnswer( testData2.select($"a" * 10), testData2.collect().toSeq.map(r => Row(r.getInt(0) * 10))) checkAnswer( testData2.select($"a" * $"b"), testData2.collect().toSeq.map(r => Row(r.getInt(0) * r.getInt(1)))) } test("/") { checkAnswer( testData2.select($"a" / 2), testData2.collect().toSeq.map(r => Row(r.getInt(0).toDouble / 2))) checkAnswer( testData2.select($"a" / $"b"), testData2.collect().toSeq.map(r => Row(r.getInt(0).toDouble / r.getInt(1)))) } test("%") { checkAnswer( testData2.select($"a" % 2), testData2.collect().toSeq.map(r => Row(r.getInt(0) % 2))) checkAnswer( testData2.select($"a" % $"b"), testData2.collect().toSeq.map(r => Row(r.getInt(0) % r.getInt(1)))) } test("unary -") { checkAnswer( testData2.select(-$"a"), testData2.collect().toSeq.map(r => Row(-r.getInt(0)))) } test("unary !") { checkAnswer( complexData.select(!$"b"), complexData.collect().toSeq.map(r => Row(!r.getBoolean(3)))) } test("isNull") { checkAnswer( nullStrings.toDF.where($"s".isNull), nullStrings.collect().toSeq.filter(r => r.getString(1) eq null)) checkAnswer( sql("select isnull(null), isnull(1)"), Row(true, false)) } test("isNotNull") { checkAnswer( nullStrings.toDF.where($"s".isNotNull), nullStrings.collect().toSeq.filter(r => r.getString(1) ne null)) checkAnswer( sql("select isnotnull(null), isnotnull('a')"), Row(false, true)) } test("isNaN") { val testData = spark.createDataFrame(sparkContext.parallelize( Row(Double.NaN, Float.NaN) :: Row(math.log(-1), math.log(-3).toFloat) :: Row(null, null) :: Row(Double.MaxValue, Float.MinValue):: Nil), StructType(Seq(StructField("a", DoubleType), StructField("b", FloatType)))) checkAnswer( testData.select($"a".isNaN, $"b".isNaN), Row(true, true) :: Row(true, true) :: Row(false, false) :: Row(false, false) :: Nil) checkAnswer( testData.select(isnan($"a"), isnan($"b")), Row(true, true) :: Row(true, true) :: Row(false, false) :: Row(false, false) :: Nil) checkAnswer( sql("select isnan(15), isnan('invalid')"), Row(false, false)) } test("nanvl") { withTempView("t") { val testData = spark.createDataFrame(sparkContext.parallelize( Row(null, 3.0, Double.NaN, Double.PositiveInfinity, 1.0f, 4) :: Nil), StructType(Seq(StructField("a", DoubleType), StructField("b", DoubleType), StructField("c", DoubleType), StructField("d", DoubleType), StructField("e", FloatType), StructField("f", IntegerType)))) checkAnswer( testData.select( nanvl($"a", lit(5)), nanvl($"b", lit(10)), nanvl(lit(10), $"b"), nanvl($"c", lit(null).cast(DoubleType)), nanvl($"d", lit(10)), nanvl($"b", $"e"), nanvl($"e", $"f")), Row(null, 3.0, 10.0, null, Double.PositiveInfinity, 3.0, 1.0) ) testData.createOrReplaceTempView("t") checkAnswer( sql( "select nanvl(a, 5), nanvl(b, 10), nanvl(10, b), nanvl(c, null), nanvl(d, 10), " + " nanvl(b, e), nanvl(e, f) from t"), Row(null, 3.0, 10.0, null, Double.PositiveInfinity, 3.0, 1.0) ) } } test("===") { checkAnswer( testData2.filter($"a" === 1), testData2.collect().toSeq.filter(r => r.getInt(0) == 1)) checkAnswer( testData2.filter($"a" === $"b"), testData2.collect().toSeq.filter(r => r.getInt(0) == r.getInt(1))) } test("<=>") { checkAnswer( nullData.filter($"b" <=> 1), Row(1, 1) :: Nil) checkAnswer( nullData.filter($"b" <=> null), Row(1, null) :: Row(null, null) :: Nil) checkAnswer( nullData.filter($"a" <=> $"b"), Row(1, 1) :: Row(null, null) :: Nil) val nullData2 = spark.createDataFrame(sparkContext.parallelize( Row("abc") :: Row(null) :: Row("xyz") :: Nil), StructType(Seq(StructField("a", StringType, true)))) checkAnswer( nullData2.filter($"a" <=> null), Row(null) :: Nil) } test("=!=") { checkAnswer( nullData.filter($"b" =!= 1), Row(1, 2) :: Nil) checkAnswer(nullData.filter($"b" =!= null), Nil) checkAnswer( nullData.filter($"a" =!= $"b"), Row(1, 2) :: Nil) } test(">") { checkAnswer( testData2.filter($"a" > 1), testData2.collect().toSeq.filter(r => r.getInt(0) > 1)) checkAnswer( testData2.filter($"a" > $"b"), testData2.collect().toSeq.filter(r => r.getInt(0) > r.getInt(1))) } test(">=") { checkAnswer( testData2.filter($"a" >= 1), testData2.collect().toSeq.filter(r => r.getInt(0) >= 1)) checkAnswer( testData2.filter($"a" >= $"b"), testData2.collect().toSeq.filter(r => r.getInt(0) >= r.getInt(1))) } test("<") { checkAnswer( testData2.filter($"a" < 2), testData2.collect().toSeq.filter(r => r.getInt(0) < 2)) checkAnswer( testData2.filter($"a" < $"b"), testData2.collect().toSeq.filter(r => r.getInt(0) < r.getInt(1))) } test("<=") { checkAnswer( testData2.filter($"a" <= 2), testData2.collect().toSeq.filter(r => r.getInt(0) <= 2)) checkAnswer( testData2.filter($"a" <= $"b"), testData2.collect().toSeq.filter(r => r.getInt(0) <= r.getInt(1))) } test("between") { val testData = sparkContext.parallelize( (0, 1, 2) :: (1, 2, 3) :: (2, 1, 0) :: (2, 2, 4) :: (3, 1, 6) :: (3, 2, 0) :: Nil).toDF("a", "b", "c") val expectAnswer = testData.collect().toSeq. filter(r => r.getInt(0) >= r.getInt(1) && r.getInt(0) <= r.getInt(2)) checkAnswer(testData.filter($"a".between($"b", $"c")), expectAnswer) } test("in") { val df = Seq((1, "x"), (2, "y"), (3, "z")).toDF("a", "b") checkAnswer(df.filter($"a".isin(1, 2)), df.collect().toSeq.filter(r => r.getInt(0) == 1 || r.getInt(0) == 2)) checkAnswer(df.filter($"a".isin(3, 2)), df.collect().toSeq.filter(r => r.getInt(0) == 3 || r.getInt(0) == 2)) checkAnswer(df.filter($"a".isin(3, 1)), df.collect().toSeq.filter(r => r.getInt(0) == 3 || r.getInt(0) == 1)) checkAnswer(df.filter($"b".isin("y", "x")), df.collect().toSeq.filter(r => r.getString(1) == "y" || r.getString(1) == "x")) checkAnswer(df.filter($"b".isin("z", "x")), df.collect().toSeq.filter(r => r.getString(1) == "z" || r.getString(1) == "x")) checkAnswer(df.filter($"b".isin("z", "y")), df.collect().toSeq.filter(r => r.getString(1) == "z" || r.getString(1) == "y")) // Auto casting should work with mixture of different types in collections checkAnswer(df.filter($"a".isin(1.toShort, "2")), df.collect().toSeq.filter(r => r.getInt(0) == 1 || r.getInt(0) == 2)) checkAnswer(df.filter($"a".isin("3", 2.toLong)), df.collect().toSeq.filter(r => r.getInt(0) == 3 || r.getInt(0) == 2)) checkAnswer(df.filter($"a".isin(3, "1")), df.collect().toSeq.filter(r => r.getInt(0) == 3 || r.getInt(0) == 1)) val df2 = Seq((1, Seq(1)), (2, Seq(2)), (3, Seq(3))).toDF("a", "b") val e = intercept[AnalysisException] { df2.filter($"a".isin($"b")) } Seq("cannot resolve", "due to data type mismatch: Arguments must be same type but were") .foreach { s => assert(e.getMessage.toLowerCase(Locale.ROOT).contains(s.toLowerCase(Locale.ROOT))) } } test("IN/INSET with bytes, shorts, ints, dates") { def check(): Unit = { val values = Seq( (Byte.MinValue, Some(Short.MinValue), Int.MinValue, Date.valueOf("2017-01-01")), (Byte.MaxValue, None, Int.MaxValue, null)) val df = values.toDF("b", "s", "i", "d") checkAnswer(df.select($"b".isin(Byte.MinValue, Byte.MaxValue)), Seq(Row(true), Row(true))) checkAnswer(df.select($"b".isin(-1.toByte, 2.toByte)), Seq(Row(false), Row(false))) checkAnswer(df.select($"s".isin(Short.MinValue, 1.toShort)), Seq(Row(true), Row(null))) checkAnswer(df.select($"s".isin(0.toShort, null)), Seq(Row(null), Row(null))) checkAnswer(df.select($"i".isin(0, Int.MinValue)), Seq(Row(true), Row(false))) checkAnswer(df.select($"i".isin(null, Int.MinValue)), Seq(Row(true), Row(null))) checkAnswer( df.select($"d".isin(Date.valueOf("1950-01-01"), Date.valueOf("2017-01-01"))), Seq(Row(true), Row(null))) checkAnswer( df.select($"d".isin(Date.valueOf("1950-01-01"), null)), Seq(Row(null), Row(null))) } withSQLConf(SQLConf.OPTIMIZER_INSET_CONVERSION_THRESHOLD.key -> "10") { check() } withSQLConf( SQLConf.OPTIMIZER_INSET_CONVERSION_THRESHOLD.key -> "0", SQLConf.OPTIMIZER_INSET_SWITCH_THRESHOLD.key -> "0") { check() } withSQLConf( SQLConf.OPTIMIZER_INSET_CONVERSION_THRESHOLD.key -> "0", SQLConf.OPTIMIZER_INSET_SWITCH_THRESHOLD.key -> "20") { check() } } test("isInCollection: Scala Collection") { Seq(0, 1, 10).foreach { optThreshold => Seq(0, 1, 10).foreach { switchThreshold => withSQLConf( SQLConf.OPTIMIZER_INSET_CONVERSION_THRESHOLD.key -> optThreshold.toString, SQLConf.OPTIMIZER_INSET_SWITCH_THRESHOLD.key -> switchThreshold.toString) { val df = Seq((1, "x"), (2, "y"), (3, "z")).toDF("a", "b") // Test with different types of collections checkAnswer(df.filter($"a".isInCollection(Seq(3, 1))), df.collect().toSeq.filter(r => r.getInt(0) == 3 || r.getInt(0) == 1)) checkAnswer(df.filter($"a".isInCollection(Seq(1, 2).toSet)), df.collect().toSeq.filter(r => r.getInt(0) == 1 || r.getInt(0) == 2)) checkAnswer(df.filter($"a".isInCollection(Seq(3, 2).toArray)), df.collect().toSeq.filter(r => r.getInt(0) == 3 || r.getInt(0) == 2)) checkAnswer(df.filter($"a".isInCollection(Seq(3, 1).toList)), df.collect().toSeq.filter(r => r.getInt(0) == 3 || r.getInt(0) == 1)) val df2 = Seq((1, Seq(1)), (2, Seq(2)), (3, Seq(3))).toDF("a", "b") val e = intercept[AnalysisException] { df2.filter($"a".isInCollection(Seq($"b"))) } Seq("cannot resolve", "due to data type mismatch: Arguments must be same type but were") .foreach { s => assert(e.getMessage.toLowerCase(Locale.ROOT).contains(s.toLowerCase(Locale.ROOT))) } } } } } test("SPARK-31553: isInCollection - collection element types") { val expected = Seq(Row(true), Row(false)) Seq(0, 1, 10).foreach { optThreshold => Seq(0, 1, 10).foreach { switchThreshold => withSQLConf( SQLConf.OPTIMIZER_INSET_CONVERSION_THRESHOLD.key -> optThreshold.toString, SQLConf.OPTIMIZER_INSET_SWITCH_THRESHOLD.key -> switchThreshold.toString) { checkAnswer(Seq(0).toDS.select($"value".isInCollection(Seq(null))), Seq(Row(null))) checkAnswer( Seq(true).toDS.select($"value".isInCollection(Seq(true, false))), Seq(Row(true))) checkAnswer( Seq(0.toByte, 1.toByte).toDS.select($"value".isInCollection(Seq(0.toByte, 2.toByte))), expected) checkAnswer( Seq(0.toShort, 1.toShort).toDS .select($"value".isInCollection(Seq(0.toShort, 2.toShort))), expected) checkAnswer(Seq(0, 1).toDS.select($"value".isInCollection(Seq(0, 2))), expected) checkAnswer(Seq(0L, 1L).toDS.select($"value".isInCollection(Seq(0L, 2L))), expected) checkAnswer(Seq(0.0f, 1.0f).toDS .select($"value".isInCollection(Seq(0.0f, 2.0f))), expected) checkAnswer(Seq(0.0D, 1.0D).toDS .select($"value".isInCollection(Seq(0.0D, 2.0D))), expected) checkAnswer( Seq(BigDecimal(0), BigDecimal(2)).toDS .select($"value".isInCollection(Seq(BigDecimal(0), BigDecimal(1)))), expected) checkAnswer( Seq("abc", "def").toDS.select($"value".isInCollection(Seq("abc", "xyz"))), expected) checkAnswer( Seq(Date.valueOf("2020-04-29"), Date.valueOf("2020-05-01")).toDS .select($"value".isInCollection( Seq(Date.valueOf("2020-04-29"), Date.valueOf("2020-04-30")))), expected) checkAnswer( Seq(new Timestamp(0), new Timestamp(2)).toDS .select($"value".isInCollection(Seq(new Timestamp(0), new Timestamp(1)))), expected) checkAnswer( Seq(Array("a", "b"), Array("c", "d")).toDS .select($"value".isInCollection(Seq(Array("a", "b"), Array("x", "z")))), expected) } } } } test("&&") { checkAnswer( booleanData.filter($"a" && true), Row(true, false) :: Row(true, true) :: Nil) checkAnswer( booleanData.filter($"a" && false), Nil) checkAnswer( booleanData.filter($"a" && $"b"), Row(true, true) :: Nil) } test("||") { checkAnswer( booleanData.filter($"a" || true), booleanData.collect()) checkAnswer( booleanData.filter($"a" || false), Row(true, false) :: Row(true, true) :: Nil) checkAnswer( booleanData.filter($"a" || $"b"), Row(false, true) :: Row(true, false) :: Row(true, true) :: Nil) } test("SPARK-7321 when conditional statements") { val testData = (1 to 3).map(i => (i, i.toString)).toDF("key", "value") checkAnswer( testData.select(when($"key" === 1, -1).when($"key" === 2, -2).otherwise(0)), Seq(Row(-1), Row(-2), Row(0)) ) // Without the ending otherwise, return null for unmatched conditions. // Also test putting a non-literal value in the expression. checkAnswer( testData.select(when($"key" === 1, lit(0) - $"key").when($"key" === 2, -2)), Seq(Row(-1), Row(-2), Row(null)) ) // Test error handling for invalid expressions. intercept[IllegalArgumentException] { $"key".when($"key" === 1, -1) } intercept[IllegalArgumentException] { $"key".otherwise(-1) } intercept[IllegalArgumentException] { when($"key" === 1, -1).otherwise(-1).otherwise(-1) } } test("sqrt") { checkAnswer( testData.select(sqrt($"key")).orderBy($"key".asc), (1 to 100).map(n => Row(math.sqrt(n))) ) checkAnswer( testData.select(sqrt($"value"), $"key").orderBy($"key".asc, $"value".asc), (1 to 100).map(n => Row(math.sqrt(n), n)) ) checkAnswer( testData.select(sqrt(lit(null))), (1 to 100).map(_ => Row(null)) ) } test("upper") { checkAnswer( lowerCaseData.select(upper($"l")), ('a' to 'd').map(c => Row(c.toString.toUpperCase(Locale.ROOT))) ) checkAnswer( testData.select(upper($"value"), $"key"), (1 to 100).map(n => Row(n.toString, n)) ) checkAnswer( testData.select(upper(lit(null))), (1 to 100).map(n => Row(null)) ) checkAnswer( sql("SELECT upper('aB'), ucase('cDe')"), Row("AB", "CDE")) } test("lower") { checkAnswer( upperCaseData.select(lower($"L")), ('A' to 'F').map(c => Row(c.toString.toLowerCase(Locale.ROOT))) ) checkAnswer( testData.select(lower($"value"), $"key"), (1 to 100).map(n => Row(n.toString, n)) ) checkAnswer( testData.select(lower(lit(null))), (1 to 100).map(n => Row(null)) ) checkAnswer( sql("SELECT lower('aB'), lcase('cDe')"), Row("ab", "cde")) } test("monotonically_increasing_id") { // Make sure we have 2 partitions, each with 2 records. val df = sparkContext.parallelize(Seq[Int](), 2).mapPartitions { _ => Iterator(Tuple1(1), Tuple1(2)) }.toDF("a") checkAnswer( df.select(monotonically_increasing_id(), expr("monotonically_increasing_id()")), Row(0L, 0L) :: Row(1L, 1L) :: Row((1L << 33) + 0L, (1L << 33) + 0L) :: Row((1L << 33) + 1L, (1L << 33) + 1L) :: Nil ) } test("spark_partition_id") { // Make sure we have 2 partitions, each with 2 records. val df = sparkContext.parallelize(Seq[Int](), 2).mapPartitions { _ => Iterator(Tuple1(1), Tuple1(2)) }.toDF("a") checkAnswer( df.select(spark_partition_id()), Row(0) :: Row(0) :: Row(1) :: Row(1) :: Nil ) } test("input_file_name, input_file_block_start, input_file_block_length - more than one source") { withTempView("tempView1") { withTable("tab1", "tab2") { val data = sparkContext.parallelize(0 to 9).toDF("id") data.write.saveAsTable("tab1") data.write.saveAsTable("tab2") data.createOrReplaceTempView("tempView1") Seq("input_file_name", "input_file_block_start", "input_file_block_length").foreach { f => val e = intercept[AnalysisException] { sql(s"SELECT *, $f() FROM tab1 JOIN tab2 ON tab1.id = tab2.id") }.getMessage assert(e.contains(s"'$f' does not support more than one source")) } def checkResult( fromClause: String, exceptionExpected: Boolean, numExpectedRows: Int = 0): Unit = { val stmt = s"SELECT *, input_file_name() FROM ($fromClause)" if (exceptionExpected) { val e = intercept[AnalysisException](sql(stmt)).getMessage assert(e.contains("'input_file_name' does not support more than one source")) } else { assert(sql(stmt).count() == numExpectedRows) } } checkResult( "SELECT * FROM tab1 UNION ALL SELECT * FROM tab2 UNION ALL SELECT * FROM tab2", exceptionExpected = false, numExpectedRows = 30) checkResult( "(SELECT * FROM tempView1 NATURAL JOIN tab2) UNION ALL SELECT * FROM tab2", exceptionExpected = false, numExpectedRows = 20) checkResult( "(SELECT * FROM tab1 UNION ALL SELECT * FROM tab2) NATURAL JOIN tempView1", exceptionExpected = false, numExpectedRows = 20) checkResult( "(SELECT * FROM tempView1 UNION ALL SELECT * FROM tab2) NATURAL JOIN tab2", exceptionExpected = true) checkResult( "(SELECT * FROM tab1 NATURAL JOIN tab2) UNION ALL SELECT * FROM tab2", exceptionExpected = true) checkResult( "(SELECT * FROM tab1 UNION ALL SELECT * FROM tab2) NATURAL JOIN tab2", exceptionExpected = true) } } } test("input_file_name, input_file_block_start, input_file_block_length - FileScanRDD") { withTempPath { dir => val data = sparkContext.parallelize(0 to 10).toDF("id") data.write.parquet(dir.getCanonicalPath) // Test the 3 expressions when reading from files val q = spark.read.parquet(dir.getCanonicalPath).select( input_file_name(), expr("input_file_block_start()"), expr("input_file_block_length()")) val firstRow = q.head() assert(firstRow.getString(0).contains(dir.toURI.getPath)) assert(firstRow.getLong(1) == 0) assert(firstRow.getLong(2) > 0) // Now read directly from the original RDD without going through any files to make sure // we are returning empty string, -1, and -1. checkAnswer( data.select( input_file_name(), expr("input_file_block_start()"), expr("input_file_block_length()") ).limit(1), Row("", -1L, -1L)) } } test("input_file_name, input_file_block_start, input_file_block_length - HadoopRDD") { withTempPath { dir => val data = sparkContext.parallelize((0 to 10).map(_.toString)).toDF() data.write.text(dir.getCanonicalPath) val df = spark.sparkContext.textFile(dir.getCanonicalPath).toDF() // Test the 3 expressions when reading from files val q = df.select( input_file_name(), expr("input_file_block_start()"), expr("input_file_block_length()")) val firstRow = q.head() assert(firstRow.getString(0).contains(dir.toURI.getPath)) assert(firstRow.getLong(1) == 0) assert(firstRow.getLong(2) > 0) // Now read directly from the original RDD without going through any files to make sure // we are returning empty string, -1, and -1. checkAnswer( data.select( input_file_name(), expr("input_file_block_start()"), expr("input_file_block_length()") ).limit(1), Row("", -1L, -1L)) } } test("input_file_name, input_file_block_start, input_file_block_length - NewHadoopRDD") { withTempPath { dir => val data = sparkContext.parallelize((0 to 10).map(_.toString)).toDF() data.write.text(dir.getCanonicalPath) val rdd = spark.sparkContext.newAPIHadoopFile( dir.getCanonicalPath, classOf[NewTextInputFormat], classOf[LongWritable], classOf[Text]) val df = rdd.map(pair => pair._2.toString).toDF() // Test the 3 expressions when reading from files val q = df.select( input_file_name(), expr("input_file_block_start()"), expr("input_file_block_length()")) val firstRow = q.head() assert(firstRow.getString(0).contains(dir.toURI.getPath)) assert(firstRow.getLong(1) == 0) assert(firstRow.getLong(2) > 0) // Now read directly from the original RDD without going through any files to make sure // we are returning empty string, -1, and -1. checkAnswer( data.select( input_file_name(), expr("input_file_block_start()"), expr("input_file_block_length()") ).limit(1), Row("", -1L, -1L)) } } test("columns can be compared") { assert($"key".desc == $"key".desc) assert($"key".desc != $"key".asc) } test("alias with metadata") { val metadata = new MetadataBuilder() .putString("originName", "value") .build() val schema = testData .select($"*", col("value").as("abc", metadata)) .schema assert(schema("value").metadata === Metadata.empty) assert(schema("abc").metadata === metadata) } test("rand") { val randCol = testData.select($"key", rand(5L).as("rand")) randCol.columns.length should be (2) val rows = randCol.collect() rows.foreach { row => assert(row.getDouble(1) <= 1.0) assert(row.getDouble(1) >= 0.0) } def checkNumProjects(df: DataFrame, expectedNumProjects: Int): Unit = { val projects = df.queryExecution.sparkPlan.collect { case tungstenProject: ProjectExec => tungstenProject } assert(projects.size === expectedNumProjects) } // We first create a plan with two Projects. // Project [rand + 1 AS rand1, rand - 1 AS rand2] // Project [key, (Rand 5 + 1) AS rand] // LogicalRDD [key, value] // Because Rand function is not deterministic, the column rand is not deterministic. // So, in the optimizer, we will not collapse Project [rand + 1 AS rand1, rand - 1 AS rand2] // and Project [key, Rand 5 AS rand]. The final plan still has two Projects. val dfWithTwoProjects = testData .select($"key", (rand(5L) + 1).as("rand")) .select(($"rand" + 1).as("rand1"), ($"rand" - 1).as("rand2")) checkNumProjects(dfWithTwoProjects, 2) // Now, we add one more project rand1 - rand2 on top of the query plan. // Since rand1 and rand2 are deterministic (they basically apply +/- to the generated // rand value), we can collapse rand1 - rand2 to the Project generating rand1 and rand2. // So, the plan will be optimized from ... // Project [(rand1 - rand2) AS (rand1 - rand2)] // Project [rand + 1 AS rand1, rand - 1 AS rand2] // Project [key, (Rand 5 + 1) AS rand] // LogicalRDD [key, value] // to ... // Project [((rand + 1 AS rand1) - (rand - 1 AS rand2)) AS (rand1 - rand2)] // Project [key, Rand 5 AS rand] // LogicalRDD [key, value] val dfWithThreeProjects = dfWithTwoProjects.select($"rand1" - $"rand2") checkNumProjects(dfWithThreeProjects, 2) dfWithThreeProjects.collect().foreach { row => assert(row.getDouble(0) === 2.0 +- 0.0001) } } test("randn") { val randCol = testData.select($"key", randn(5L).as("rand")) randCol.columns.length should be (2) val rows = randCol.collect() rows.foreach { row => assert(row.getDouble(1) <= 4.0) assert(row.getDouble(1) >= -4.0) } } test("bitwiseAND") { checkAnswer( testData2.select($"a".bitwiseAND(75)), testData2.collect().toSeq.map(r => Row(r.getInt(0) & 75))) checkAnswer( testData2.select($"a".bitwiseAND($"b").bitwiseAND(22)), testData2.collect().toSeq.map(r => Row(r.getInt(0) & r.getInt(1) & 22))) } test("bitwiseOR") { checkAnswer( testData2.select($"a".bitwiseOR(170)), testData2.collect().toSeq.map(r => Row(r.getInt(0) | 170))) checkAnswer( testData2.select($"a".bitwiseOR($"b").bitwiseOR(42)), testData2.collect().toSeq.map(r => Row(r.getInt(0) | r.getInt(1) | 42))) } test("bitwiseXOR") { checkAnswer( testData2.select($"a".bitwiseXOR(112)), testData2.collect().toSeq.map(r => Row(r.getInt(0) ^ 112))) checkAnswer( testData2.select($"a".bitwiseXOR($"b").bitwiseXOR(39)), testData2.collect().toSeq.map(r => Row(r.getInt(0) ^ r.getInt(1) ^ 39))) } test("typedLit") { val df = Seq(Tuple1(0)).toDF("a") // Only check the types `lit` cannot handle checkAnswer( df.select(typedLit(Seq(1, 2, 3))), Row(Seq(1, 2, 3)) :: Nil) checkAnswer( df.select(typedLit(Map("a" -> 1, "b" -> 2))), Row(Map("a" -> 1, "b" -> 2)) :: Nil) checkAnswer( df.select(typedLit(("a", 2, 1.0))), Row(Row("a", 2, 1.0)) :: Nil) } test("SPARK-31563: sql of InSet for UTF8String collection") { val inSet = InSet(Literal("a"), Set("a", "b").map(UTF8String.fromString)) assert(inSet.sql === "('a' IN ('a', 'b'))") } def checkAnswer( df: => DataFrame, expectedAnswer: Seq[Row], expectedSchema: StructType): Unit = { checkAnswer(df, expectedAnswer) assert(df.schema == expectedSchema) } private lazy val structType = StructType(Seq( StructField("a", IntegerType, nullable = false), StructField("b", IntegerType, nullable = true), StructField("c", IntegerType, nullable = false))) private lazy val structLevel1: DataFrame = spark.createDataFrame( sparkContext.parallelize(Row(Row(1, null, 3)) :: Nil), StructType(Seq(StructField("a", structType, nullable = false)))) private lazy val nullableStructLevel1: DataFrame = spark.createDataFrame( sparkContext.parallelize(Row(null) :: Row(Row(1, null, 3)) :: Nil), StructType(Seq(StructField("a", structType, nullable = true)))) private lazy val structLevel2: DataFrame = spark.createDataFrame( sparkContext.parallelize(Row(Row(Row(1, null, 3))) :: Nil), StructType(Seq( StructField("a", StructType(Seq( StructField("a", structType, nullable = false))), nullable = false)))) private lazy val nullableStructLevel2: DataFrame = spark.createDataFrame( sparkContext.parallelize(Row(null) :: Row(Row(null)) :: Row(Row(Row(1, null, 3))) :: Nil), StructType(Seq( StructField("a", StructType(Seq( StructField("a", structType, nullable = true))), nullable = true)))) private lazy val structLevel3: DataFrame = spark.createDataFrame( sparkContext.parallelize(Row(Row(Row(Row(1, null, 3)))) :: Nil), StructType(Seq( StructField("a", StructType(Seq( StructField("a", StructType(Seq( StructField("a", structType, nullable = false))), nullable = false))), nullable = false)))) test("withField should throw an exception if called on a non-StructType column") { intercept[AnalysisException] { testData.withColumn("key", $"key".withField("a", lit(2))) }.getMessage should include("struct argument should be struct type, got: int") } test("withField should throw an exception if either fieldName or col argument are null") { intercept[IllegalArgumentException] { structLevel1.withColumn("a", $"a".withField(null, lit(2))) }.getMessage should include("fieldName cannot be null") intercept[IllegalArgumentException] { structLevel1.withColumn("a", $"a".withField("b", null)) }.getMessage should include("col cannot be null") intercept[IllegalArgumentException] { structLevel1.withColumn("a", $"a".withField(null, null)) }.getMessage should include("fieldName cannot be null") } test("withField should throw an exception if any intermediate structs don't exist") { intercept[AnalysisException] { structLevel2.withColumn("a", 'a.withField("x.b", lit(2))) }.getMessage should include("No such struct field x in a") intercept[AnalysisException] { structLevel3.withColumn("a", 'a.withField("a.x.b", lit(2))) }.getMessage should include("No such struct field x in a") } test("withField should throw an exception if intermediate field is not a struct") { intercept[AnalysisException] { structLevel1.withColumn("a", 'a.withField("b.a", lit(2))) }.getMessage should include("struct argument should be struct type, got: int") } test("withField should throw an exception if intermediate field reference is ambiguous") { intercept[AnalysisException] { val structLevel2: DataFrame = spark.createDataFrame( sparkContext.parallelize(Row(Row(Row(1, null, 3), 4)) :: Nil), StructType(Seq( StructField("a", StructType(Seq( StructField("a", structType, nullable = false), StructField("a", structType, nullable = false))), nullable = false)))) structLevel2.withColumn("a", 'a.withField("a.b", lit(2))) }.getMessage should include("Ambiguous reference to fields") } test("withField should add field with no name") { checkAnswer( structLevel1.withColumn("a", $"a".withField("", lit(4))), Row(Row(1, null, 3, 4)) :: Nil, StructType(Seq( StructField("a", StructType(Seq( StructField("a", IntegerType, nullable = false), StructField("b", IntegerType, nullable = true), StructField("c", IntegerType, nullable = false), StructField("", IntegerType, nullable = false))), nullable = false)))) } test("withField should add field to struct") { checkAnswer( structLevel1.withColumn("a", 'a.withField("d", lit(4))), Row(Row(1, null, 3, 4)) :: Nil, StructType(Seq( StructField("a", StructType(Seq( StructField("a", IntegerType, nullable = false), StructField("b", IntegerType, nullable = true), StructField("c", IntegerType, nullable = false), StructField("d", IntegerType, nullable = false))), nullable = false)))) } test("withField should add field to nullable struct") { checkAnswer( nullableStructLevel1.withColumn("a", $"a".withField("d", lit(4))), Row(null) :: Row(Row(1, null, 3, 4)) :: Nil, StructType(Seq( StructField("a", StructType(Seq( StructField("a", IntegerType, nullable = false), StructField("b", IntegerType, nullable = true), StructField("c", IntegerType, nullable = false), StructField("d", IntegerType, nullable = false))), nullable = true)))) } test("withField should add field to nested nullable struct") { checkAnswer( nullableStructLevel2.withColumn("a", $"a".withField("a.d", lit(4))), Row(null) :: Row(Row(null)) :: Row(Row(Row(1, null, 3, 4))) :: Nil, StructType( Seq(StructField("a", StructType(Seq( StructField("a", StructType(Seq( StructField("a", IntegerType, nullable = false), StructField("b", IntegerType, nullable = true), StructField("c", IntegerType, nullable = false), StructField("d", IntegerType, nullable = false))), nullable = true))), nullable = true)))) } test("withField should add null field to struct") { checkAnswer( structLevel1.withColumn("a", 'a.withField("d", lit(null).cast(IntegerType))), Row(Row(1, null, 3, null)) :: Nil, StructType(Seq( StructField("a", StructType(Seq( StructField("a", IntegerType, nullable = false), StructField("b", IntegerType, nullable = true), StructField("c", IntegerType, nullable = false), StructField("d", IntegerType, nullable = true))), nullable = false)))) } test("withField should add multiple fields to struct") { checkAnswer( structLevel1.withColumn("a", 'a.withField("d", lit(4)).withField("e", lit(5))), Row(Row(1, null, 3, 4, 5)) :: Nil, StructType(Seq( StructField("a", StructType(Seq( StructField("a", IntegerType, nullable = false), StructField("b", IntegerType, nullable = true), StructField("c", IntegerType, nullable = false), StructField("d", IntegerType, nullable = false), StructField("e", IntegerType, nullable = false))), nullable = false)))) } test("withField should add multiple fields to nullable struct") { checkAnswer( nullableStructLevel1.withColumn("a", 'a.withField("d", lit(4)).withField("e", lit(5))), Row(null) :: Row(Row(1, null, 3, 4, 5)) :: Nil, StructType(Seq( StructField("a", StructType(Seq( StructField("a", IntegerType, nullable = false), StructField("b", IntegerType, nullable = true), StructField("c", IntegerType, nullable = false), StructField("d", IntegerType, nullable = false), StructField("e", IntegerType, nullable = false))), nullable = true)))) } test("withField should add field to nested struct") { Seq( structLevel2.withColumn("a", 'a.withField("a.d", lit(4))), structLevel2.withColumn("a", 'a.withField("a", $"a.a".withField("d", lit(4)))) ).foreach { df => checkAnswer( df, Row(Row(Row(1, null, 3, 4))) :: Nil, StructType( Seq(StructField("a", StructType(Seq( StructField("a", StructType(Seq( StructField("a", IntegerType, nullable = false), StructField("b", IntegerType, nullable = true), StructField("c", IntegerType, nullable = false), StructField("d", IntegerType, nullable = false))), nullable = false))), nullable = false)))) } } test("withField should add multiple fields to nested struct") { Seq( col("a").withField("a", $"a.a".withField("d", lit(4)).withField("e", lit(5))), col("a").withField("a.d", lit(4)).withField("a.e", lit(5)) ).foreach { column => checkAnswer( structLevel2.select(column.as("a")), Row(Row(Row(1, null, 3, 4, 5))) :: Nil, StructType(Seq( StructField("a", StructType(Seq( StructField("a", StructType(Seq( StructField("a", IntegerType, nullable = false), StructField("b", IntegerType, nullable = true), StructField("c", IntegerType, nullable = false), StructField("d", IntegerType, nullable = false), StructField("e", IntegerType, nullable = false))), nullable = false))), nullable = false)))) } } test("withField should add multiple fields to nested nullable struct") { Seq( col("a").withField("a", $"a.a".withField("d", lit(4)).withField("e", lit(5))), col("a").withField("a.d", lit(4)).withField("a.e", lit(5)) ).foreach { column => checkAnswer( nullableStructLevel2.select(column.as("a")), Row(null) :: Row(Row(null)) :: Row(Row(Row(1, null, 3, 4, 5))) :: Nil, StructType(Seq( StructField("a", StructType(Seq( StructField("a", StructType(Seq( StructField("a", IntegerType, nullable = false), StructField("b", IntegerType, nullable = true), StructField("c", IntegerType, nullable = false), StructField("d", IntegerType, nullable = false), StructField("e", IntegerType, nullable = false))), nullable = true))), nullable = true)))) } } test("withField should add field to deeply nested struct") { checkAnswer( structLevel3.withColumn("a", 'a.withField("a.a.d", lit(4))), Row(Row(Row(Row(1, null, 3, 4)))) :: Nil, StructType(Seq( StructField("a", StructType(Seq( StructField("a", StructType(Seq( StructField("a", StructType(Seq( StructField("a", IntegerType, nullable = false), StructField("b", IntegerType, nullable = true), StructField("c", IntegerType, nullable = false), StructField("d", IntegerType, nullable = false))), nullable = false))), nullable = false))), nullable = false)))) } test("withField should replace field in struct") { checkAnswer( structLevel1.withColumn("a", 'a.withField("b", lit(2))), Row(Row(1, 2, 3)) :: Nil, StructType(Seq( StructField("a", StructType(Seq( StructField("a", IntegerType, nullable = false), StructField("b", IntegerType, nullable = false), StructField("c", IntegerType, nullable = false))), nullable = false)))) } test("withField should replace field in nullable struct") { checkAnswer( nullableStructLevel1.withColumn("a", 'a.withField("b", lit("foo"))), Row(null) :: Row(Row(1, "foo", 3)) :: Nil, StructType(Seq( StructField("a", StructType(Seq( StructField("a", IntegerType, nullable = false), StructField("b", StringType, nullable = false), StructField("c", IntegerType, nullable = false))), nullable = true)))) } test("withField should replace field in nested nullable struct") { checkAnswer( nullableStructLevel2.withColumn("a", $"a".withField("a.b", lit("foo"))), Row(null) :: Row(Row(null)) :: Row(Row(Row(1, "foo", 3))) :: Nil, StructType( Seq(StructField("a", StructType(Seq( StructField("a", StructType(Seq( StructField("a", IntegerType, nullable = false), StructField("b", StringType, nullable = false), StructField("c", IntegerType, nullable = false))), nullable = true))), nullable = true)))) } test("withField should replace field with null value in struct") { checkAnswer( structLevel1.withColumn("a", 'a.withField("c", lit(null).cast(IntegerType))), Row(Row(1, null, null)) :: Nil, StructType(Seq( StructField("a", StructType(Seq( StructField("a", IntegerType, nullable = false), StructField("b", IntegerType, nullable = true), StructField("c", IntegerType, nullable = true))), nullable = false)))) } test("withField should replace multiple fields in struct") { checkAnswer( structLevel1.withColumn("a", 'a.withField("a", lit(10)).withField("b", lit(20))), Row(Row(10, 20, 3)) :: Nil, StructType(Seq( StructField("a", StructType(Seq( StructField("a", IntegerType, nullable = false), StructField("b", IntegerType, nullable = false), StructField("c", IntegerType, nullable = false))), nullable = false)))) } test("withField should replace multiple fields in nullable struct") { checkAnswer( nullableStructLevel1.withColumn("a", 'a.withField("a", lit(10)).withField("b", lit(20))), Row(null) :: Row(Row(10, 20, 3)) :: Nil, StructType(Seq( StructField("a", StructType(Seq( StructField("a", IntegerType, nullable = false), StructField("b", IntegerType, nullable = false), StructField("c", IntegerType, nullable = false))), nullable = true)))) } test("withField should replace field in nested struct") { Seq( structLevel2.withColumn("a", $"a".withField("a.b", lit(2))), structLevel2.withColumn("a", 'a.withField("a", $"a.a".withField("b", lit(2)))) ).foreach { df => checkAnswer( df, Row(Row(Row(1, 2, 3))) :: Nil, StructType(Seq( StructField("a", StructType(Seq( StructField("a", StructType(Seq( StructField("a", IntegerType, nullable = false), StructField("b", IntegerType, nullable = false), StructField("c", IntegerType, nullable = false))), nullable = false))), nullable = false)))) } } test("withField should replace multiple fields in nested struct") { Seq( col("a").withField("a", $"a.a".withField("a", lit(10)).withField("b", lit(20))), col("a").withField("a.a", lit(10)).withField("a.b", lit(20)) ).foreach { column => checkAnswer( structLevel2.select(column.as("a")), Row(Row(Row(10, 20, 3))) :: Nil, StructType(Seq( StructField("a", StructType(Seq( StructField("a", StructType(Seq( StructField("a", IntegerType, nullable = false), StructField("b", IntegerType, nullable = false), StructField("c", IntegerType, nullable = false))), nullable = false))), nullable = false)))) } } test("withField should replace multiple fields in nested nullable struct") { Seq( col("a").withField("a", $"a.a".withField("a", lit(10)).withField("b", lit(20))), col("a").withField("a.a", lit(10)).withField("a.b", lit(20)) ).foreach { column => checkAnswer( nullableStructLevel2.select(column.as("a")), Row(null) :: Row(Row(null)) :: Row(Row(Row(10, 20, 3))) :: Nil, StructType(Seq( StructField("a", StructType(Seq( StructField("a", StructType(Seq( StructField("a", IntegerType, nullable = false), StructField("b", IntegerType, nullable = false), StructField("c", IntegerType, nullable = false))), nullable = true))), nullable = true)))) } } test("withField should replace field in deeply nested struct") { checkAnswer( structLevel3.withColumn("a", $"a".withField("a.a.b", lit(2))), Row(Row(Row(Row(1, 2, 3)))) :: Nil, StructType(Seq( StructField("a", StructType(Seq( StructField("a", StructType(Seq( StructField("a", StructType(Seq( StructField("a", IntegerType, nullable = false), StructField("b", IntegerType, nullable = false), StructField("c", IntegerType, nullable = false))), nullable = false))), nullable = false))), nullable = false)))) } test("withField should replace all fields with given name in struct") { val structLevel1 = spark.createDataFrame( sparkContext.parallelize(Row(Row(1, 2, 3)) :: Nil), StructType(Seq( StructField("a", StructType(Seq( StructField("a", IntegerType, nullable = false), StructField("b", IntegerType, nullable = false), StructField("b", IntegerType, nullable = false))), nullable = false)))) checkAnswer( structLevel1.withColumn("a", 'a.withField("b", lit(100))), Row(Row(1, 100, 100)) :: Nil, StructType(Seq( StructField("a", StructType(Seq( StructField("a", IntegerType, nullable = false), StructField("b", IntegerType, nullable = false), StructField("b", IntegerType, nullable = false))), nullable = false)))) } test("withField should replace fields in struct in given order") { checkAnswer( structLevel1.withColumn("a", 'a.withField("b", lit(2)).withField("b", lit(20))), Row(Row(1, 20, 3)) :: Nil, StructType(Seq( StructField("a", StructType(Seq( StructField("a", IntegerType, nullable = false), StructField("b", IntegerType, nullable = false), StructField("c", IntegerType, nullable = false))), nullable = false)))) } test("withField should add field and then replace same field in struct") { checkAnswer( structLevel1.withColumn("a", 'a.withField("d", lit(4)).withField("d", lit(5))), Row(Row(1, null, 3, 5)) :: Nil, StructType(Seq( StructField("a", StructType(Seq( StructField("a", IntegerType, nullable = false), StructField("b", IntegerType, nullable = true), StructField("c", IntegerType, nullable = false), StructField("d", IntegerType, nullable = false))), nullable = false)))) } test("withField should handle fields with dots in their name if correctly quoted") { val df: DataFrame = spark.createDataFrame( sparkContext.parallelize(Row(Row(Row(1, null, 3))) :: Nil), StructType(Seq( StructField("a", StructType(Seq( StructField("a.b", StructType(Seq( StructField("c.d", IntegerType, nullable = false), StructField("e.f", IntegerType, nullable = true), StructField("g.h", IntegerType, nullable = false))), nullable = false))), nullable = false)))) checkAnswer( df.withColumn("a", 'a.withField("`a.b`.`e.f`", lit(2))), Row(Row(Row(1, 2, 3))) :: Nil, StructType(Seq( StructField("a", StructType(Seq( StructField("a.b", StructType(Seq( StructField("c.d", IntegerType, nullable = false), StructField("e.f", IntegerType, nullable = false), StructField("g.h", IntegerType, nullable = false))), nullable = false))), nullable = false)))) intercept[AnalysisException] { df.withColumn("a", 'a.withField("a.b.e.f", lit(2))) }.getMessage should include("No such struct field a in a.b") } private lazy val mixedCaseStructLevel1: DataFrame = spark.createDataFrame( sparkContext.parallelize(Row(Row(1, 1)) :: Nil), StructType(Seq( StructField("a", StructType(Seq( StructField("a", IntegerType, nullable = false), StructField("B", IntegerType, nullable = false))), nullable = false)))) test("withField should replace field in struct even if casing is different") { withSQLConf(SQLConf.CASE_SENSITIVE.key -> "false") { checkAnswer( mixedCaseStructLevel1.withColumn("a", 'a.withField("A", lit(2))), Row(Row(2, 1)) :: Nil, StructType(Seq( StructField("a", StructType(Seq( StructField("A", IntegerType, nullable = false), StructField("B", IntegerType, nullable = false))), nullable = false)))) checkAnswer( mixedCaseStructLevel1.withColumn("a", 'a.withField("b", lit(2))), Row(Row(1, 2)) :: Nil, StructType(Seq( StructField("a", StructType(Seq( StructField("a", IntegerType, nullable = false), StructField("b", IntegerType, nullable = false))), nullable = false)))) } } test("withField should add field to struct because casing is different") { withSQLConf(SQLConf.CASE_SENSITIVE.key -> "true") { checkAnswer( mixedCaseStructLevel1.withColumn("a", 'a.withField("A", lit(2))), Row(Row(1, 1, 2)) :: Nil, StructType(Seq( StructField("a", StructType(Seq( StructField("a", IntegerType, nullable = false), StructField("B", IntegerType, nullable = false), StructField("A", IntegerType, nullable = false))), nullable = false)))) checkAnswer( mixedCaseStructLevel1.withColumn("a", 'a.withField("b", lit(2))), Row(Row(1, 1, 2)) :: Nil, StructType(Seq( StructField("a", StructType(Seq( StructField("a", IntegerType, nullable = false), StructField("B", IntegerType, nullable = false), StructField("b", IntegerType, nullable = false))), nullable = false)))) } } private lazy val mixedCaseStructLevel2: DataFrame = spark.createDataFrame( sparkContext.parallelize(Row(Row(Row(1, 1), Row(1, 1))) :: Nil), StructType(Seq( StructField("a", StructType(Seq( StructField("a", StructType(Seq( StructField("a", IntegerType, nullable = false), StructField("b", IntegerType, nullable = false))), nullable = false), StructField("B", StructType(Seq( StructField("a", IntegerType, nullable = false), StructField("b", IntegerType, nullable = false))), nullable = false))), nullable = false)))) test("withField should replace nested field in struct even if casing is different") { withSQLConf(SQLConf.CASE_SENSITIVE.key -> "false") { checkAnswer( mixedCaseStructLevel2.withColumn("a", 'a.withField("A.a", lit(2))), Row(Row(Row(2, 1), Row(1, 1))) :: Nil, StructType(Seq( StructField("a", StructType(Seq( StructField("A", StructType(Seq( StructField("a", IntegerType, nullable = false), StructField("b", IntegerType, nullable = false))), nullable = false), StructField("B", StructType(Seq( StructField("a", IntegerType, nullable = false), StructField("b", IntegerType, nullable = false))), nullable = false))), nullable = false)))) checkAnswer( mixedCaseStructLevel2.withColumn("a", 'a.withField("b.a", lit(2))), Row(Row(Row(1, 1), Row(2, 1))) :: Nil, StructType(Seq( StructField("a", StructType(Seq( StructField("a", StructType(Seq( StructField("a", IntegerType, nullable = false), StructField("b", IntegerType, nullable = false))), nullable = false), StructField("b", StructType(Seq( StructField("a", IntegerType, nullable = false), StructField("b", IntegerType, nullable = false))), nullable = false))), nullable = false)))) } } test("withField should throw an exception because casing is different") { withSQLConf(SQLConf.CASE_SENSITIVE.key -> "true") { intercept[AnalysisException] { mixedCaseStructLevel2.withColumn("a", 'a.withField("A.a", lit(2))) }.getMessage should include("No such struct field A in a, B") intercept[AnalysisException] { mixedCaseStructLevel2.withColumn("a", 'a.withField("b.a", lit(2))) }.getMessage should include("No such struct field b in a, B") } } test("withField user-facing examples") { checkAnswer( sql("SELECT named_struct('a', 1, 'b', 2) struct_col") .select($"struct_col".withField("c", lit(3))), Row(Row(1, 2, 3))) checkAnswer( sql("SELECT named_struct('a', 1, 'b', 2) struct_col") .select($"struct_col".withField("b", lit(3))), Row(Row(1, 3))) checkAnswer( sql("SELECT CAST(NULL AS struct<a:int,b:int>) struct_col") .select($"struct_col".withField("c", lit(3))), Row(null)) checkAnswer( sql("SELECT named_struct('a', 1, 'b', 2, 'b', 3) struct_col") .select($"struct_col".withField("b", lit(100))), Row(Row(1, 100, 100))) checkAnswer( sql("SELECT named_struct('a', named_struct('a', 1, 'b', 2)) struct_col") .select($"struct_col".withField("a.c", lit(3))), Row(Row(Row(1, 2, 3)))) intercept[AnalysisException] { sql("SELECT named_struct('a', named_struct('b', 1), 'a', named_struct('c', 2)) struct_col") .select($"struct_col".withField("a.c", lit(3))) }.getMessage should include("Ambiguous reference to fields") checkAnswer( sql("SELECT named_struct('a', named_struct('a', 1, 'b', 2)) struct_col") .select($"struct_col".withField("a.c", lit(3)).withField("a.d", lit(4))), Row(Row(Row(1, 2, 3, 4)))) checkAnswer( sql("SELECT named_struct('a', named_struct('a', 1, 'b', 2)) struct_col") .select($"struct_col".withField("a", $"struct_col.a".withField("c", lit(3)).withField("d", lit(4)))), Row(Row(Row(1, 2, 3, 4)))) } test("SPARK-32641: extracting field from non-null struct column after withField should return " + "field value") { // extract newly added field checkAnswer( structLevel1.withColumn("a", $"a".withField("d", lit(4)).getField("d")), Row(4) :: Nil, StructType(Seq(StructField("a", IntegerType, nullable = false)))) // extract newly replaced field checkAnswer( structLevel1.withColumn("a", $"a".withField("a", lit(4)).getField("a")), Row(4) :: Nil, StructType(Seq(StructField("a", IntegerType, nullable = false)))) // add new field, extract another field from original struct checkAnswer( structLevel1.withColumn("a", $"a".withField("d", lit(4)).getField("c")), Row(3):: Nil, StructType(Seq(StructField("a", IntegerType, nullable = false)))) // replace field, extract another field from original struct checkAnswer( structLevel1.withColumn("a", $"a".withField("a", lit(4)).getField("c")), Row(3):: Nil, StructType(Seq(StructField("a", IntegerType, nullable = false)))) } test("SPARK-32641: extracting field from null struct column after withField should return " + "null if the original struct was null") { val nullStructLevel1 = spark.createDataFrame( sparkContext.parallelize(Row(null) :: Nil), StructType(Seq(StructField("a", structType, nullable = true)))) // extract newly added field checkAnswer( nullStructLevel1.withColumn("a", $"a".withField("d", lit(4)).getField("d")), Row(null) :: Nil, StructType(Seq(StructField("a", IntegerType, nullable = true)))) // extract newly replaced field checkAnswer( nullStructLevel1.withColumn("a", $"a".withField("a", lit(4)).getField("a")), Row(null):: Nil, StructType(Seq(StructField("a", IntegerType, nullable = true)))) // add new field, extract another field from original struct checkAnswer( nullStructLevel1.withColumn("a", $"a".withField("d", lit(4)).getField("c")), Row(null):: Nil, StructType(Seq(StructField("a", IntegerType, nullable = true)))) // replace field, extract another field from original struct checkAnswer( nullStructLevel1.withColumn("a", $"a".withField("a", lit(4)).getField("c")), Row(null):: Nil, StructType(Seq(StructField("a", IntegerType, nullable = true)))) } test("SPARK-32641: extracting field from nullable struct column which contains both null and " + "non-null values after withField should return null if the original struct was null") { val df = spark.createDataFrame( sparkContext.parallelize(Row(Row(1, null, 3)) :: Row(null) :: Nil), StructType(Seq(StructField("a", structType, nullable = true)))) // extract newly added field checkAnswer( df.withColumn("a", $"a".withField("d", lit(4)).getField("d")), Row(4) :: Row(null) :: Nil, StructType(Seq(StructField("a", IntegerType, nullable = true)))) // extract newly replaced field checkAnswer( df.withColumn("a", $"a".withField("a", lit(4)).getField("a")), Row(4) :: Row(null):: Nil, StructType(Seq(StructField("a", IntegerType, nullable = true)))) // add new field, extract another field from original struct checkAnswer( df.withColumn("a", $"a".withField("d", lit(4)).getField("c")), Row(3) :: Row(null):: Nil, StructType(Seq(StructField("a", IntegerType, nullable = true)))) // replace field, extract another field from original struct checkAnswer( df.withColumn("a", $"a".withField("a", lit(4)).getField("c")), Row(3) :: Row(null):: Nil, StructType(Seq(StructField("a", IntegerType, nullable = true)))) } test("SPARK-35213: chained withField operations should have correct schema for new columns") { val df = spark.createDataFrame( sparkContext.parallelize(Row(null) :: Nil), StructType(Seq(StructField("data", NullType)))) checkAnswer( df.withColumn("data", struct() .withField("a", struct()) .withField("b", struct()) .withField("a.aa", lit("aa1")) .withField("b.ba", lit("ba1")) .withField("a.ab", lit("ab1"))), Row(Row(Row("aa1", "ab1"), Row("ba1"))) :: Nil, StructType(Seq( StructField("data", StructType(Seq( StructField("a", StructType(Seq( StructField("aa", StringType, nullable = false), StructField("ab", StringType, nullable = false) )), nullable = false), StructField("b", StructType(Seq( StructField("ba", StringType, nullable = false) )), nullable = false) )), nullable = false) )) ) } test("SPARK-35213: optimized withField operations should maintain correct nested struct " + "ordering") { val df = spark.createDataFrame( sparkContext.parallelize(Row(null) :: Nil), StructType(Seq(StructField("data", NullType)))) checkAnswer( df.withColumn("data", struct() .withField("a", struct().withField("aa", lit("aa1"))) .withField("b", struct().withField("ba", lit("ba1"))) ) .withColumn("data", col("data").withField("b.bb", lit("bb1"))) .withColumn("data", col("data").withField("a.ab", lit("ab1"))), Row(Row(Row("aa1", "ab1"), Row("ba1", "bb1"))) :: Nil, StructType(Seq( StructField("data", StructType(Seq( StructField("a", StructType(Seq( StructField("aa", StringType, nullable = false), StructField("ab", StringType, nullable = false) )), nullable = false), StructField("b", StructType(Seq( StructField("ba", StringType, nullable = false), StructField("bb", StringType, nullable = false) )), nullable = false) )), nullable = false) )) ) } test("dropFields should throw an exception if called on a non-StructType column") { intercept[AnalysisException] { testData.withColumn("key", $"key".dropFields("a")) }.getMessage should include("struct argument should be struct type, got: int") } test("dropFields should throw an exception if fieldName argument is null") { intercept[IllegalArgumentException] { structLevel1.withColumn("a", $"a".dropFields(null)) }.getMessage should include("fieldName cannot be null") } test("dropFields should throw an exception if any intermediate structs don't exist") { intercept[AnalysisException] { structLevel2.withColumn("a", 'a.dropFields("x.b")) }.getMessage should include("No such struct field x in a") intercept[AnalysisException] { structLevel3.withColumn("a", 'a.dropFields("a.x.b")) }.getMessage should include("No such struct field x in a") } test("dropFields should throw an exception if intermediate field is not a struct") { intercept[AnalysisException] { structLevel1.withColumn("a", 'a.dropFields("b.a")) }.getMessage should include("struct argument should be struct type, got: int") } test("dropFields should throw an exception if intermediate field reference is ambiguous") { intercept[AnalysisException] { val structLevel2: DataFrame = spark.createDataFrame( sparkContext.parallelize(Row(Row(Row(1, null, 3), 4)) :: Nil), StructType(Seq( StructField("a", StructType(Seq( StructField("a", structType, nullable = false), StructField("a", structType, nullable = false))), nullable = false)))) structLevel2.withColumn("a", 'a.dropFields("a.b")) }.getMessage should include("Ambiguous reference to fields") } test("dropFields should drop field in struct") { checkAnswer( structLevel1.withColumn("a", 'a.dropFields("b")), Row(Row(1, 3)) :: Nil, StructType(Seq( StructField("a", StructType(Seq( StructField("a", IntegerType, nullable = false), StructField("c", IntegerType, nullable = false))), nullable = false)))) } test("dropFields should drop field in nullable struct") { checkAnswer( nullableStructLevel1.withColumn("a", $"a".dropFields("b")), Row(null) :: Row(Row(1, 3)) :: Nil, StructType(Seq( StructField("a", StructType(Seq( StructField("a", IntegerType, nullable = false), StructField("c", IntegerType, nullable = false))), nullable = true)))) } test("dropFields should drop multiple fields in struct") { Seq( structLevel1.withColumn("a", $"a".dropFields("b", "c")), structLevel1.withColumn("a", 'a.dropFields("b").dropFields("c")) ).foreach { df => checkAnswer( df, Row(Row(1)) :: Nil, StructType(Seq( StructField("a", StructType(Seq( StructField("a", IntegerType, nullable = false))), nullable = false)))) } } test("dropFields should throw an exception if no fields will be left in struct") { intercept[AnalysisException] { structLevel1.withColumn("a", 'a.dropFields("a", "b", "c")) }.getMessage should include("cannot drop all fields in struct") } test("dropFields should drop field with no name in struct") { val structType = StructType(Seq( StructField("a", IntegerType, nullable = false), StructField("", IntegerType, nullable = false))) val structLevel1: DataFrame = spark.createDataFrame( sparkContext.parallelize(Row(Row(1, 2)) :: Nil), StructType(Seq(StructField("a", structType, nullable = false)))) checkAnswer( structLevel1.withColumn("a", $"a".dropFields("")), Row(Row(1)) :: Nil, StructType(Seq( StructField("a", StructType(Seq( StructField("a", IntegerType, nullable = false))), nullable = false)))) } test("dropFields should drop field in nested struct") { checkAnswer( structLevel2.withColumn("a", 'a.dropFields("a.b")), Row(Row(Row(1, 3))) :: Nil, StructType( Seq(StructField("a", StructType(Seq( StructField("a", StructType(Seq( StructField("a", IntegerType, nullable = false), StructField("c", IntegerType, nullable = false))), nullable = false))), nullable = false)))) } test("dropFields should drop multiple fields in nested struct") { checkAnswer( structLevel2.withColumn("a", 'a.dropFields("a.b", "a.c")), Row(Row(Row(1))) :: Nil, StructType( Seq(StructField("a", StructType(Seq( StructField("a", StructType(Seq( StructField("a", IntegerType, nullable = false))), nullable = false))), nullable = false)))) } test("dropFields should drop field in nested nullable struct") { checkAnswer( nullableStructLevel2.withColumn("a", $"a".dropFields("a.b")), Row(null) :: Row(Row(null)) :: Row(Row(Row(1, 3))) :: Nil, StructType( Seq(StructField("a", StructType(Seq( StructField("a", StructType(Seq( StructField("a", IntegerType, nullable = false), StructField("c", IntegerType, nullable = false))), nullable = true))), nullable = true)))) } test("dropFields should drop multiple fields in nested nullable struct") { checkAnswer( nullableStructLevel2.withColumn("a", $"a".dropFields("a.b", "a.c")), Row(null) :: Row(Row(null)) :: Row(Row(Row(1))) :: Nil, StructType( Seq(StructField("a", StructType(Seq( StructField("a", StructType(Seq( StructField("a", IntegerType, nullable = false))), nullable = true))), nullable = true)))) } test("dropFields should drop field in deeply nested struct") { checkAnswer( structLevel3.withColumn("a", 'a.dropFields("a.a.b")), Row(Row(Row(Row(1, 3)))) :: Nil, StructType(Seq( StructField("a", StructType(Seq( StructField("a", StructType(Seq( StructField("a", StructType(Seq( StructField("a", IntegerType, nullable = false), StructField("c", IntegerType, nullable = false))), nullable = false))), nullable = false))), nullable = false)))) } test("dropFields should drop all fields with given name in struct") { val structLevel1 = spark.createDataFrame( sparkContext.parallelize(Row(Row(1, 2, 3)) :: Nil), StructType(Seq( StructField("a", StructType(Seq( StructField("a", IntegerType, nullable = false), StructField("b", IntegerType, nullable = false), StructField("b", IntegerType, nullable = false))), nullable = false)))) checkAnswer( structLevel1.withColumn("a", 'a.dropFields("b")), Row(Row(1)) :: Nil, StructType(Seq( StructField("a", StructType(Seq( StructField("a", IntegerType, nullable = false))), nullable = false)))) } test("dropFields should drop field in struct even if casing is different") { withSQLConf(SQLConf.CASE_SENSITIVE.key -> "false") { checkAnswer( mixedCaseStructLevel1.withColumn("a", 'a.dropFields("A")), Row(Row(1)) :: Nil, StructType(Seq( StructField("a", StructType(Seq( StructField("B", IntegerType, nullable = false))), nullable = false)))) checkAnswer( mixedCaseStructLevel1.withColumn("a", 'a.dropFields("b")), Row(Row(1)) :: Nil, StructType(Seq( StructField("a", StructType(Seq( StructField("a", IntegerType, nullable = false))), nullable = false)))) } } test("dropFields should not drop field in struct because casing is different") { withSQLConf(SQLConf.CASE_SENSITIVE.key -> "true") { checkAnswer( mixedCaseStructLevel1.withColumn("a", 'a.dropFields("A")), Row(Row(1, 1)) :: Nil, StructType(Seq( StructField("a", StructType(Seq( StructField("a", IntegerType, nullable = false), StructField("B", IntegerType, nullable = false))), nullable = false)))) checkAnswer( mixedCaseStructLevel1.withColumn("a", 'a.dropFields("b")), Row(Row(1, 1)) :: Nil, StructType(Seq( StructField("a", StructType(Seq( StructField("a", IntegerType, nullable = false), StructField("B", IntegerType, nullable = false))), nullable = false)))) } } test("dropFields should drop nested field in struct even if casing is different") { withSQLConf(SQLConf.CASE_SENSITIVE.key -> "false") { checkAnswer( mixedCaseStructLevel2.withColumn("a", 'a.dropFields("A.a")), Row(Row(Row(1), Row(1, 1))) :: Nil, StructType(Seq( StructField("a", StructType(Seq( StructField("A", StructType(Seq( StructField("b", IntegerType, nullable = false))), nullable = false), StructField("B", StructType(Seq( StructField("a", IntegerType, nullable = false), StructField("b", IntegerType, nullable = false))), nullable = false))), nullable = false)))) checkAnswer( mixedCaseStructLevel2.withColumn("a", 'a.dropFields("b.a")), Row(Row(Row(1, 1), Row(1))) :: Nil, StructType(Seq( StructField("a", StructType(Seq( StructField("a", StructType(Seq( StructField("a", IntegerType, nullable = false), StructField("b", IntegerType, nullable = false))), nullable = false), StructField("b", StructType(Seq( StructField("b", IntegerType, nullable = false))), nullable = false))), nullable = false)))) } } test("dropFields should throw an exception because casing is different") { withSQLConf(SQLConf.CASE_SENSITIVE.key -> "true") { intercept[AnalysisException] { mixedCaseStructLevel2.withColumn("a", 'a.dropFields("A.a")) }.getMessage should include("No such struct field A in a, B") intercept[AnalysisException] { mixedCaseStructLevel2.withColumn("a", 'a.dropFields("b.a")) }.getMessage should include("No such struct field b in a, B") } } test("dropFields should drop only fields that exist") { checkAnswer( structLevel1.withColumn("a", 'a.dropFields("d")), Row(Row(1, null, 3)) :: Nil, StructType(Seq( StructField("a", StructType(Seq( StructField("a", IntegerType, nullable = false), StructField("b", IntegerType, nullable = true), StructField("c", IntegerType, nullable = false))), nullable = false)))) checkAnswer( structLevel1.withColumn("a", 'a.dropFields("b", "d")), Row(Row(1, 3)) :: Nil, StructType(Seq( StructField("a", StructType(Seq( StructField("a", IntegerType, nullable = false), StructField("c", IntegerType, nullable = false))), nullable = false)))) checkAnswer( structLevel2.withColumn("a", $"a".dropFields("a.b", "a.d")), Row(Row(Row(1, 3))) :: Nil, StructType( Seq(StructField("a", StructType(Seq( StructField("a", StructType(Seq( StructField("a", IntegerType, nullable = false), StructField("c", IntegerType, nullable = false))), nullable = false))), nullable = false)))) } test("dropFields should drop multiple fields at arbitrary levels of nesting in a single call") { val df: DataFrame = spark.createDataFrame( sparkContext.parallelize(Row(Row(Row(1, null, 3), 4)) :: Nil), StructType(Seq( StructField("a", StructType(Seq( StructField("a", structType, nullable = false), StructField("b", IntegerType, nullable = false))), nullable = false)))) checkAnswer( df.withColumn("a", $"a".dropFields("a.b", "b")), Row(Row(Row(1, 3))) :: Nil, StructType(Seq( StructField("a", StructType(Seq( StructField("a", StructType(Seq( StructField("a", IntegerType, nullable = false), StructField("c", IntegerType, nullable = false))), nullable = false))), nullable = false)))) } test("dropFields user-facing examples") { checkAnswer( sql("SELECT named_struct('a', 1, 'b', 2) struct_col") .select($"struct_col".dropFields("b")), Row(Row(1))) checkAnswer( sql("SELECT named_struct('a', 1, 'b', 2) struct_col") .select($"struct_col".dropFields("c")), Row(Row(1, 2))) checkAnswer( sql("SELECT named_struct('a', 1, 'b', 2, 'c', 3) struct_col") .select($"struct_col".dropFields("b", "c")), Row(Row(1))) intercept[AnalysisException] { sql("SELECT named_struct('a', 1, 'b', 2) struct_col") .select($"struct_col".dropFields("a", "b")) }.getMessage should include("cannot drop all fields in struct") checkAnswer( sql("SELECT CAST(NULL AS struct<a:int,b:int>) struct_col") .select($"struct_col".dropFields("b")), Row(null)) checkAnswer( sql("SELECT named_struct('a', 1, 'b', 2, 'b', 3) struct_col") .select($"struct_col".dropFields("b")), Row(Row(1))) checkAnswer( sql("SELECT named_struct('a', named_struct('a', 1, 'b', 2)) struct_col") .select($"struct_col".dropFields("a.b")), Row(Row(Row(1)))) intercept[AnalysisException] { sql("SELECT named_struct('a', named_struct('b', 1), 'a', named_struct('c', 2)) struct_col") .select($"struct_col".dropFields("a.c")) }.getMessage should include("Ambiguous reference to fields") checkAnswer( sql("SELECT named_struct('a', named_struct('a', 1, 'b', 2, 'c', 3)) struct_col") .select($"struct_col".dropFields("a.b", "a.c")), Row(Row(Row(1)))) checkAnswer( sql("SELECT named_struct('a', named_struct('a', 1, 'b', 2, 'c', 3)) struct_col") .select($"struct_col".withField("a", $"struct_col.a".dropFields("b", "c"))), Row(Row(Row(1)))) } test("should correctly handle different dropField + withField + getField combinations") { val structType = StructType(Seq( StructField("a", IntegerType, nullable = false), StructField("b", IntegerType, nullable = false))) val structLevel1: DataFrame = spark.createDataFrame( sparkContext.parallelize(Row(Row(1, 2)) :: Nil), StructType(Seq(StructField("a", structType, nullable = false)))) val nullStructLevel1: DataFrame = spark.createDataFrame( sparkContext.parallelize(Row(null) :: Nil), StructType(Seq(StructField("a", structType, nullable = true)))) val nullableStructLevel1: DataFrame = spark.createDataFrame( sparkContext.parallelize(Row(Row(1, 2)) :: Row(null) :: Nil), StructType(Seq(StructField("a", structType, nullable = true)))) def check( fieldOps: Column => Column, getFieldName: String, expectedValue: Option[Int]): Unit = { def query(df: DataFrame): DataFrame = df.select(fieldOps(col("a")).getField(getFieldName).as("res")) checkAnswer( query(structLevel1), Row(expectedValue.orNull) :: Nil, StructType(Seq(StructField("res", IntegerType, nullable = expectedValue.isEmpty)))) checkAnswer( query(nullStructLevel1), Row(null) :: Nil, StructType(Seq(StructField("res", IntegerType, nullable = true)))) checkAnswer( query(nullableStructLevel1), Row(expectedValue.orNull) :: Row(null) :: Nil, StructType(Seq(StructField("res", IntegerType, nullable = true)))) } // add attribute, extract an attribute from the original struct check(_.withField("c", lit(3)), "a", Some(1)) check(_.withField("c", lit(3)), "b", Some(2)) // add attribute, extract added attribute check(_.withField("c", lit(3)), "c", Some(3)) check(_.withField("c", col("a.a")), "c", Some(1)) check(_.withField("c", col("a.b")), "c", Some(2)) check(_.withField("c", lit(null).cast(IntegerType)), "c", None) // replace attribute, extract an attribute from the original struct check(_.withField("b", lit(3)), "a", Some(1)) check(_.withField("a", lit(3)), "b", Some(2)) // replace attribute, extract replaced attribute check(_.withField("b", lit(3)), "b", Some(3)) check(_.withField("b", lit(null).cast(IntegerType)), "b", None) check(_.withField("a", lit(3)), "a", Some(3)) check(_.withField("a", lit(null).cast(IntegerType)), "a", None) // drop attribute, extract an attribute from the original struct check(_.dropFields("b"), "a", Some(1)) check(_.dropFields("a"), "b", Some(2)) // drop attribute, add attribute, extract an attribute from the original struct check(_.dropFields("b").withField("c", lit(3)), "a", Some(1)) check(_.dropFields("a").withField("c", lit(3)), "b", Some(2)) // drop attribute, add another attribute, extract added attribute check(_.dropFields("a").withField("c", lit(3)), "c", Some(3)) check(_.dropFields("b").withField("c", lit(3)), "c", Some(3)) // add attribute, drop attribute, extract an attribute from the original struct check(_.withField("c", lit(3)).dropFields("a"), "b", Some(2)) check(_.withField("c", lit(3)).dropFields("b"), "a", Some(1)) // add attribute, drop another attribute, extract added attribute check(_.withField("c", lit(3)).dropFields("a"), "c", Some(3)) check(_.withField("c", lit(3)).dropFields("b"), "c", Some(3)) // replace attribute, drop same attribute, extract an attribute from the original struct check(_.withField("b", lit(3)).dropFields("b"), "a", Some(1)) check(_.withField("a", lit(3)).dropFields("a"), "b", Some(2)) // add attribute, drop same attribute, extract an attribute from the original struct check(_.withField("c", lit(3)).dropFields("c"), "a", Some(1)) check(_.withField("c", lit(3)).dropFields("c"), "b", Some(2)) // add attribute, drop another attribute, extract added attribute check(_.withField("b", lit(3)).dropFields("a"), "b", Some(3)) check(_.withField("a", lit(3)).dropFields("b"), "a", Some(3)) check(_.withField("b", lit(null).cast(IntegerType)).dropFields("a"), "b", None) check(_.withField("a", lit(null).cast(IntegerType)).dropFields("b"), "a", None) // drop attribute, add same attribute, extract added attribute check(_.dropFields("b").withField("b", lit(3)), "b", Some(3)) check(_.dropFields("a").withField("a", lit(3)), "a", Some(3)) check(_.dropFields("b").withField("b", lit(null).cast(IntegerType)), "b", None) check(_.dropFields("a").withField("a", lit(null).cast(IntegerType)), "a", None) check(_.dropFields("c").withField("c", lit(3)), "c", Some(3)) // add attribute, drop same attribute, add same attribute again, extract added attribute check(_.withField("c", lit(3)).dropFields("c").withField("c", lit(4)), "c", Some(4)) } test("should move field up one level of nesting") { // move a field up one level checkAnswer( nullableStructLevel2.select( col("a").withField("c", col("a.a.c")).dropFields("a.c").as("res")), Row(null) :: Row(Row(null, null)) :: Row(Row(Row(1, null), 3)) :: Nil, StructType(Seq( StructField("res", StructType(Seq( StructField("a", StructType(Seq( StructField("a", IntegerType, nullable = false), StructField("b", IntegerType, nullable = true))), nullable = true), StructField("c", IntegerType, nullable = true))), nullable = true)))) // move a field up one level and then extract it checkAnswer( nullableStructLevel2.select( col("a").withField("c", col("a.a.c")).dropFields("a.c").getField("c").as("res")), Row(null) :: Row(null) :: Row(3) :: Nil, StructType(Seq(StructField("res", IntegerType, nullable = true)))) } test("should be able to refer to newly added nested column") { intercept[AnalysisException] { structLevel1.select($"a".withField("d", lit(4)).withField("e", $"a.d" + 1).as("a")) }.getMessage should include("No such struct field d in a, b, c") checkAnswer( structLevel1 .select($"a".withField("d", lit(4)).as("a")) .select($"a".withField("e", $"a.d" + 1).as("a")), Row(Row(1, null, 3, 4, 5)) :: Nil, StructType(Seq( StructField("a", StructType(Seq( StructField("a", IntegerType, nullable = false), StructField("b", IntegerType, nullable = true), StructField("c", IntegerType, nullable = false), StructField("d", IntegerType, nullable = false), StructField("e", IntegerType, nullable = false))), nullable = false)))) } test("should be able to drop newly added nested column") { Seq( structLevel1.select($"a".withField("d", lit(4)).dropFields("d").as("a")), structLevel1 .select($"a".withField("d", lit(4)).as("a")) .select($"a".dropFields("d").as("a")) ).foreach { query => checkAnswer( query, Row(Row(1, null, 3)) :: Nil, StructType(Seq( StructField("a", structType, nullable = false)))) } } test("should still be able to refer to dropped column within the same select statement") { // we can still access the nested column even after dropping it within the same select statement checkAnswer( structLevel1.select($"a".dropFields("c").withField("z", $"a.c").as("a")), Row(Row(1, null, 3)) :: Nil, StructType(Seq( StructField("a", StructType(Seq( StructField("a", IntegerType, nullable = false), StructField("b", IntegerType, nullable = true), StructField("z", IntegerType, nullable = false))), nullable = false)))) // we can't access the nested column in subsequent select statement after dropping it in a // previous select statement intercept[AnalysisException]{ structLevel1 .select($"a".dropFields("c").as("a")) .select($"a".withField("z", $"a.c")).as("a") }.getMessage should include("No such struct field c in a, b") } test("nestedDf should generate nested DataFrames") { checkAnswer( emptyNestedDf(1, 1, nullable = false), Seq.empty[Row], StructType(Seq(StructField("nested0Col0", StructType(Seq( StructField("nested1Col0", IntegerType, nullable = false))), nullable = false)))) checkAnswer( emptyNestedDf(1, 2, nullable = false), Seq.empty[Row], StructType(Seq(StructField("nested0Col0", StructType(Seq( StructField("nested1Col0", IntegerType, nullable = false), StructField("nested1Col1", IntegerType, nullable = false))), nullable = false)))) checkAnswer( emptyNestedDf(2, 1, nullable = false), Seq.empty[Row], StructType(Seq(StructField("nested0Col0", StructType(Seq( StructField("nested1Col0", StructType(Seq( StructField("nested2Col0", IntegerType, nullable = false))), nullable = false))), nullable = false)))) checkAnswer( emptyNestedDf(2, 2, nullable = false), Seq.empty[Row], StructType(Seq(StructField("nested0Col0", StructType(Seq( StructField("nested1Col0", StructType(Seq( StructField("nested2Col0", IntegerType, nullable = false), StructField("nested2Col1", IntegerType, nullable = false))), nullable = false), StructField("nested1Col1", IntegerType, nullable = false))), nullable = false)))) checkAnswer( emptyNestedDf(2, 2, nullable = true), Seq.empty[Row], StructType(Seq(StructField("nested0Col0", StructType(Seq( StructField("nested1Col0", StructType(Seq( StructField("nested2Col0", IntegerType, nullable = false), StructField("nested2Col1", IntegerType, nullable = false))), nullable = true), StructField("nested1Col1", IntegerType, nullable = false))), nullable = true)))) } Seq(Performant, NonPerformant).foreach { method => Seq(false, true).foreach { nullable => test(s"should add and drop 1 column at each depth of nesting using ${method.name} method, " + s"nullable = $nullable") { val maxDepth = 3 // dataframe with nested*Col0 to nested*Col2 at each depth val inputDf = emptyNestedDf(maxDepth, 3, nullable) // add nested*Col3 and drop nested*Col2 val modifiedColumn = method( column = col(nestedColName(0, 0)), numsToAdd = Seq(3), numsToDrop = Seq(2), maxDepth = maxDepth ).as(nestedColName(0, 0)) val resultDf = inputDf.select(modifiedColumn) // dataframe with nested*Col0, nested*Col1, nested*Col3 at each depth val expectedDf = { val colNums = Seq(0, 1, 3) val nestedColumnDataType = nestedStructType(colNums, nullable, maxDepth) spark.createDataFrame( spark.sparkContext.emptyRDD[Row], StructType(Seq(StructField(nestedColName(0, 0), nestedColumnDataType, nullable)))) } checkAnswer(resultDf, expectedDf.collect(), expectedDf.schema) } } } test("assert_true") { // assert_true(condition, errMsgCol) val booleanDf = Seq((true), (false)).toDF("cond") checkAnswer( booleanDf.filter("cond = true").select(assert_true($"cond")), Row(null) :: Nil ) val e1 = intercept[SparkException] { booleanDf.select(assert_true($"cond", lit(null.asInstanceOf[String]))).collect() } assert(e1.getCause.isInstanceOf[RuntimeException]) assert(e1.getCause.getMessage == null) val nullDf = Seq(("first row", None), ("second row", Some(true))).toDF("n", "cond") checkAnswer( nullDf.filter("cond = true").select(assert_true($"cond", $"cond")), Row(null) :: Nil ) val e2 = intercept[SparkException] { nullDf.select(assert_true($"cond", $"n")).collect() } assert(e2.getCause.isInstanceOf[RuntimeException]) assert(e2.getCause.getMessage == "first row") // assert_true(condition) val intDf = Seq((0, 1)).toDF("a", "b") checkAnswer(intDf.select(assert_true($"a" < $"b")), Row(null) :: Nil) val e3 = intercept[SparkException] { intDf.select(assert_true($"a" > $"b")).collect() } assert(e3.getCause.isInstanceOf[RuntimeException]) assert(e3.getCause.getMessage == "'('a > 'b)' is not true!") } test("raise_error") { val strDf = Seq(("hello")).toDF("a") val e1 = intercept[SparkException] { strDf.select(raise_error(lit(null.asInstanceOf[String]))).collect() } assert(e1.getCause.isInstanceOf[RuntimeException]) assert(e1.getCause.getMessage == null) val e2 = intercept[SparkException] { strDf.select(raise_error($"a")).collect() } assert(e2.getCause.isInstanceOf[RuntimeException]) assert(e2.getCause.getMessage == "hello") } test("SPARK-34677: negate/add/subtract year-month and day-time intervals") { import testImplicits._ val df = Seq((Period.ofMonths(10), Duration.ofDays(10), Period.ofMonths(1), Duration.ofDays(1))) .toDF("year-month-A", "day-time-A", "year-month-B", "day-time-B") val negatedDF = df.select(-$"year-month-A", -$"day-time-A") checkAnswer(negatedDF, Row(Period.ofMonths(-10), Duration.ofDays(-10))) val addDF = df.select($"year-month-A" + $"year-month-B", $"day-time-A" + $"day-time-B") checkAnswer(addDF, Row(Period.ofMonths(11), Duration.ofDays(11))) val subDF = df.select($"year-month-A" - $"year-month-B", $"day-time-A" - $"day-time-B") checkAnswer(subDF, Row(Period.ofMonths(9), Duration.ofDays(9))) } test("SPARK-34721: add a year-month interval to a date") { withSQLConf(SQLConf.DATETIME_JAVA8API_ENABLED.key -> "true") { outstandingTimezonesIds.foreach { zid => withSQLConf(SQLConf.SESSION_LOCAL_TIMEZONE.key -> zid) { Seq( (LocalDate.of(1900, 10, 1), Period.ofMonths(0)) -> LocalDate.of(1900, 10, 1), (LocalDate.of(1970, 1, 1), Period.ofMonths(-1)) -> LocalDate.of(1969, 12, 1), (LocalDate.of(2021, 3, 11), Period.ofMonths(1)) -> LocalDate.of(2021, 4, 11), (LocalDate.of(2020, 12, 31), Period.ofMonths(2)) -> LocalDate.of(2021, 2, 28), (LocalDate.of(2021, 5, 31), Period.ofMonths(-3)) -> LocalDate.of(2021, 2, 28), (LocalDate.of(2020, 2, 29), Period.ofYears(1)) -> LocalDate.of(2021, 2, 28), (LocalDate.of(1, 1, 1), Period.ofYears(2020)) -> LocalDate.of(2021, 1, 1) ).foreach { case ((date, period), result) => val df = Seq((date, period)).toDF("date", "interval") checkAnswer( df.select($"date" + $"interval", $"interval" + $"date"), Row(result, result)) } } } val e = intercept[SparkException] { Seq((LocalDate.of(2021, 3, 11), Period.ofMonths(Int.MaxValue))) .toDF("date", "interval") .select($"date" + $"interval") .collect() }.getCause assert(e.isInstanceOf[ArithmeticException]) assert(e.getMessage.contains("integer overflow")) } } test("SPARK-34721: subtract a year-month interval from a date") { withSQLConf(SQLConf.DATETIME_JAVA8API_ENABLED.key -> "true") { outstandingTimezonesIds.foreach { zid => withSQLConf(SQLConf.SESSION_LOCAL_TIMEZONE.key -> zid) { Seq( (LocalDate.of(1582, 10, 4), Period.ofMonths(0)) -> LocalDate.of(1582, 10, 4), (LocalDate.of(1582, 10, 15), Period.ofMonths(1)) -> LocalDate.of(1582, 9, 15), (LocalDate.of(1, 1, 1), Period.ofMonths(-1)) -> LocalDate.of(1, 2, 1), (LocalDate.of(9999, 10, 31), Period.ofMonths(-2)) -> LocalDate.of(9999, 12, 31), (LocalDate.of(2021, 5, 31), Period.ofMonths(3)) -> LocalDate.of(2021, 2, 28), (LocalDate.of(2021, 2, 28), Period.ofYears(1)) -> LocalDate.of(2020, 2, 28), (LocalDate.of(2020, 2, 29), Period.ofYears(4)) -> LocalDate.of(2016, 2, 29) ).foreach { case ((date, period), result) => val df = Seq((date, period)).toDF("date", "interval") checkAnswer(df.select($"date" - $"interval"), Row(result)) } } } val e = intercept[SparkException] { Seq((LocalDate.of(2021, 3, 11), Period.ofMonths(Int.MaxValue))) .toDF("date", "interval") .select($"date" - $"interval") .collect() }.getCause assert(e.isInstanceOf[ArithmeticException]) assert(e.getMessage.contains("integer overflow")) } } test("SPARK-34739: add a year-month interval to a timestamp") { withSQLConf(SQLConf.DATETIME_JAVA8API_ENABLED.key -> "true") { outstandingZoneIds.foreach { zid => withSQLConf(SQLConf.SESSION_LOCAL_TIMEZONE.key -> zid.getId) { Seq( (LocalDateTime.of(1900, 1, 1, 0, 0, 0, 123456000), Period.ofMonths(0)) -> LocalDateTime.of(1900, 1, 1, 0, 0, 0, 123456000), (LocalDateTime.of(1970, 1, 1, 0, 0, 0, 1000), Period.ofMonths(-1)) -> LocalDateTime.of(1969, 12, 1, 0, 0, 0, 1000), (LocalDateTime.of(2021, 3, 14, 1, 2, 3, 0), Period.ofMonths(1)) -> LocalDateTime.of(2021, 4, 14, 1, 2, 3, 0), (LocalDateTime.of(2020, 12, 31, 23, 59, 59, 999999000), Period.ofMonths(2)) -> LocalDateTime.of(2021, 2, 28, 23, 59, 59, 999999000), (LocalDateTime.of(2021, 5, 31, 0, 0, 1, 0), Period.ofMonths(-3)) -> LocalDateTime.of(2021, 2, 28, 0, 0, 1, 0), (LocalDateTime.of(2020, 2, 29, 12, 13, 14), Period.ofYears(1)) -> LocalDateTime.of(2021, 2, 28, 12, 13, 14), (LocalDateTime.of(1, 1, 1, 1, 1, 1, 1000), Period.ofYears(2020)) -> LocalDateTime.of(2021, 1, 1, 1, 1, 1, 1000) ).foreach { case ((ldt, period), expected) => val df = Seq((ldt.atZone(zid).toInstant, period)).toDF("ts", "interval") val result = expected.atZone(zid).toInstant checkAnswer(df.select($"ts" + $"interval", $"interval" + $"ts"), Row(result, result)) } } } val e = intercept[SparkException] { Seq((Instant.parse("2021-03-14T18:55:00Z"), Period.ofMonths(Int.MaxValue))) .toDF("ts", "interval") .select($"ts" + $"interval") .collect() }.getCause assert(e.isInstanceOf[ArithmeticException]) assert(e.getMessage.contains("long overflow")) } } test("SPARK-34739: subtract a year-month interval from a timestamp") { withSQLConf(SQLConf.DATETIME_JAVA8API_ENABLED.key -> "true") { outstandingZoneIds.foreach { zid => withSQLConf(SQLConf.SESSION_LOCAL_TIMEZONE.key -> zid.getId) { Seq( (LocalDateTime.of(1582, 10, 4, 0, 0, 0), Period.ofMonths(0)) -> LocalDateTime.of(1582, 10, 4, 0, 0, 0), (LocalDateTime.of(1582, 10, 15, 23, 59, 59, 999999000), Period.ofMonths(1)) -> LocalDateTime.of(1582, 9, 15, 23, 59, 59, 999999000), (LocalDateTime.of(1, 1, 1, 1, 1, 1, 1000), Period.ofMonths(-1)) -> LocalDateTime.of(1, 2, 1, 1, 1, 1, 1000), (LocalDateTime.of(9999, 10, 31, 23, 59, 59, 999000000), Period.ofMonths(-2)) -> LocalDateTime.of(9999, 12, 31, 23, 59, 59, 999000000), (LocalDateTime.of(2021, 5, 31, 0, 0, 0, 1000), Period.ofMonths(3)) -> LocalDateTime.of(2021, 2, 28, 0, 0, 0, 1000), (LocalDateTime.of(2021, 2, 28, 11, 12, 13, 123456000), Period.ofYears(1)) -> LocalDateTime.of(2020, 2, 28, 11, 12, 13, 123456000), (LocalDateTime.of(2020, 2, 29, 1, 2, 3, 5000), Period.ofYears(4)) -> LocalDateTime.of(2016, 2, 29, 1, 2, 3, 5000) ).foreach { case ((ldt, period), expected) => val df = Seq((ldt.atZone(zid).toInstant, period)).toDF("ts", "interval") checkAnswer(df.select($"ts" - $"interval"), Row(expected.atZone(zid).toInstant)) } } } val e = intercept[SparkException] { Seq((Instant.parse("2021-03-14T18:55:00Z"), Period.ofMonths(Int.MaxValue))) .toDF("ts", "interval") .select($"ts" - $"interval") .collect() }.getCause assert(e.isInstanceOf[ArithmeticException]) assert(e.getMessage.contains("long overflow")) } } test("SPARK-34761, SPARK-34903: add/subtract a day-time interval to/from a timestamp") { withSQLConf(SQLConf.DATETIME_JAVA8API_ENABLED.key -> "true") { outstandingZoneIds.foreach { zid => withSQLConf(SQLConf.SESSION_LOCAL_TIMEZONE.key -> zid.getId) { Seq( (LocalDateTime.of(1900, 1, 1, 0, 0, 0, 123456000), Duration.ofDays(0)) -> LocalDateTime.of(1900, 1, 1, 0, 0, 0, 123456000), (LocalDateTime.of(1970, 1, 1, 0, 0, 0, 100000000), Duration.ofDays(-1)) -> LocalDateTime.of(1969, 12, 31, 0, 0, 0, 100000000), (LocalDateTime.of(2021, 3, 14, 1, 2, 3), Duration.ofDays(1)) -> LocalDateTime.of(2021, 3, 15, 1, 2, 3), (LocalDateTime.of(2020, 12, 31, 23, 59, 59, 999000000), Duration.ofDays(2 * 30).plusMillis(1)) -> LocalDateTime.of(2021, 3, 2, 0, 0, 0), (LocalDateTime.of(2020, 3, 16, 0, 0, 0, 1000), Duration.of(-1, ChronoUnit.MICROS)) -> LocalDateTime.of(2020, 3, 16, 0, 0, 0), (LocalDateTime.of(2020, 2, 29, 12, 13, 14), Duration.ofDays(365)) -> LocalDateTime.of(2021, 2, 28, 12, 13, 14), (LocalDateTime.of(1582, 10, 4, 1, 2, 3, 40000000), Duration.ofDays(10).plusMillis(60)) -> LocalDateTime.of(1582, 10, 14, 1, 2, 3, 100000000) ).foreach { case ((ldt, duration), expected) => val ts = ldt.atZone(zid).toInstant val result = expected.atZone(zid).toInstant val df = Seq((ts, duration, result)).toDF("ts", "interval", "result") checkAnswer( df.select($"ts" + $"interval", $"interval" + $"ts", $"result" - $"interval", $"result" - $"ts"), Row(result, result, ts, duration)) } } } Seq( "2021-03-16T18:56:00Z" -> "ts + i", "1900-03-16T18:56:00Z" -> "ts - i").foreach { case (instant, op) => val e = intercept[SparkException] { Seq( (Instant.parse(instant), Duration.of(Long.MaxValue, ChronoUnit.MICROS))) .toDF("ts", "i") .selectExpr(op) .collect() }.getCause assert(e.isInstanceOf[ArithmeticException]) assert(e.getMessage.contains("long overflow")) } } } test("SPARK-34824: multiply year-month interval by numeric") { checkAnswer( Seq((Period.ofYears(0), 0)).toDF("i", "n").select($"i" * $"n"), Row(Period.ofYears(0))) checkAnswer( Seq((Period.ofMonths(0), 10.toByte)).toDF("i", "n").select($"i" * $"n"), Row(Period.ofMonths(0))) checkAnswer( Seq((Period.ofMonths(5), 3.toShort)).toDF("i", "n").select($"n" * $"i"), Row(Period.ofYears(1).plusMonths(3))) checkAnswer( Seq((Period.ofYears(1000), "2")).toDF("i", "n").select($"i" * $"n"), Row(Period.ofYears(2000))) checkAnswer( Seq((Period.ofMonths(1), 12L)).toDF("i", "n").select($"n" * $"i"), Row(Period.ofYears(1))) checkAnswer( Seq((Period.ofYears(100).plusMonths(11), Short.MaxValue)).toDF("i", "n").select($"n" * $"i"), Row(Period.ofYears(100).plusMonths(11).multipliedBy(Short.MaxValue).normalized())) checkAnswer( Seq((Period.ofMonths(-1), 0.499f)).toDF("i", "n").select($"i" * $"n"), Row(Period.ofMonths(0))) checkAnswer( Seq((Period.ofMonths(10000000), 0.0000001d)).toDF("i", "n").select($"i" * $"n"), Row(Period.ofMonths(1))) checkAnswer( Seq((Period.ofMonths(-10000000), BigDecimal(0.0000001d))).toDF("i", "n").select($"i" * $"n"), Row(Period.ofMonths(-1))) checkAnswer( Seq((Period.ofMonths(-1), BigDecimal(0.5))).toDF("i", "n").select($"i" * $"n"), Row(Period.ofMonths(-1))) val e = intercept[SparkException] { Seq((Period.ofYears(9999), Long.MinValue)).toDF("i", "n").select($"n" * $"i").collect() }.getCause assert(e.isInstanceOf[ArithmeticException]) assert(e.getMessage.contains("overflow")) } test("SPARK-34850: multiply day-time interval by numeric") { checkAnswer( Seq((Duration.ofDays(0), 0)).toDF("i", "n").select($"i" * $"n"), Row(Duration.ofDays(0))) checkAnswer( Seq((Duration.ofDays(0), 10.toByte)).toDF("i", "n").select($"i" * $"n"), Row(Duration.ofDays(0))) checkAnswer( Seq((Duration.ofHours(12), 3.toShort)).toDF("i", "n").select($"n" * $"i"), Row(Duration.ofDays(1).plusHours(12))) checkAnswer( Seq((Duration.ofMinutes(1000), "2")).toDF("i", "n").select($"i" * $"n"), Row(Duration.ofMinutes(2000))) checkAnswer( Seq((Duration.ofSeconds(1), 60L)).toDF("i", "n").select($"n" * $"i"), Row(Duration.ofMinutes(1))) checkAnswer( Seq((Duration.of(-1, ChronoUnit.MICROS), 0.499f)).toDF("i", "n").select($"i" * $"n"), Row(Duration.of(0, ChronoUnit.MICROS))) checkAnswer( Seq((Duration.of(-1, ChronoUnit.MICROS), 0.51d)).toDF("i", "n").select($"i" * $"n"), Row(Duration.of(-1, ChronoUnit.MICROS))) checkAnswer( Seq((Duration.of(-10000000, ChronoUnit.MICROS), BigDecimal(0.0000001d))) .toDF("i", "n").select($"i" * $"n"), Row(Duration.of(-1, ChronoUnit.MICROS))) val e = intercept[SparkException] { Seq((Duration.ofDays(9999), Long.MinValue)).toDF("i", "n").select($"n" * $"i").collect() }.getCause assert(e.isInstanceOf[ArithmeticException]) assert(e.getMessage.contains("overflow")) } test("SPARK-34868: divide year-month interval by numeric") { checkAnswer( Seq((Period.ofYears(0), 10.toByte)).toDF("i", "n").select($"i" / $"n"), Row(Period.ofYears(0))) checkAnswer( Seq((Period.ofYears(10), 3.toShort)).toDF("i", "n").select($"i" / $"n"), Row(Period.ofYears(3).plusMonths(4))) checkAnswer( Seq((Period.ofYears(1000), "2")).toDF("i", "n").select($"i" / $"n"), Row(Period.ofYears(500))) checkAnswer( Seq((Period.ofMonths(1).multipliedBy(Int.MaxValue), Int.MaxValue)) .toDF("i", "n").select($"i" / $"n"), Row(Period.ofMonths(1))) checkAnswer( Seq((Period.ofYears(-1), 12L)).toDF("i", "n").select($"i" / $"n"), Row(Period.ofMonths(-1))) checkAnswer( Seq((Period.ofMonths(-1), 0.499f)).toDF("i", "n").select($"i" / $"n"), Row(Period.ofMonths(-2))) checkAnswer( Seq((Period.ofMonths(10000000), 10000000d)).toDF("i", "n").select($"i" / $"n"), Row(Period.ofMonths(1))) checkAnswer( Seq((Period.ofMonths(-1), BigDecimal(0.5))).toDF("i", "n").select($"i" / $"n"), Row(Period.ofMonths(-2))) val e = intercept[SparkException] { Seq((Period.ofYears(9999), 0)).toDF("i", "n").select($"i" / $"n").collect() }.getCause assert(e.isInstanceOf[ArithmeticException]) assert(e.getMessage.contains("/ by zero")) } test("SPARK-34875: divide day-time interval by numeric") { checkAnswer( Seq((Duration.ZERO, 10.toByte)).toDF("i", "n").select($"i" / $"n"), Row(Duration.ZERO)) checkAnswer( Seq((Duration.ofDays(10), 3.toShort)).toDF("i", "n").select($"i" / $"n"), Row(Duration.ofDays(10).dividedBy(3))) checkAnswer( Seq((Duration.ofHours(1000), "2")).toDF("i", "n").select($"i" / $"n"), Row(Duration.ofHours(500))) checkAnswer( Seq((Duration.of(1, ChronoUnit.MICROS).multipliedBy(Long.MaxValue), Long.MaxValue)) .toDF("i", "n").select($"i" / $"n"), Row(Duration.of(1, ChronoUnit.MICROS))) checkAnswer( Seq((Duration.ofMinutes(-1), 60L)).toDF("i", "n").select($"i" / $"n"), Row(Duration.ofSeconds(-1))) checkAnswer( Seq((Duration.ofDays(-1), 0.5f)).toDF("i", "n").select($"i" / $"n"), Row(Duration.ofDays(-2))) checkAnswer( Seq((Duration.ofMillis(10000000), 10000000d)).toDF("i", "n").select($"i" / $"n"), Row(Duration.ofMillis(1))) checkAnswer( Seq((Duration.of(-1, ChronoUnit.MICROS), BigDecimal(10000.0001))) .toDF("i", "n").select($"i" / $"n"), Row(Duration.of(-1, ChronoUnit.MICROS).multipliedBy(10000).dividedBy(100000001))) val e = intercept[SparkException] { Seq((Duration.ofDays(9999), 0)).toDF("i", "n").select($"i" / $"n").collect() }.getCause assert(e.isInstanceOf[ArithmeticException]) assert(e.getMessage.contains("/ by zero")) } test("SPARK-34896: return day-time interval from dates subtraction") { withSQLConf( SQLConf.DATETIME_JAVA8API_ENABLED.key -> "true", SQLConf.LEGACY_INTERVAL_ENABLED.key -> "false") { outstandingTimezonesIds.foreach { zid => withSQLConf(SQLConf.SESSION_LOCAL_TIMEZONE.key -> zid) { Seq( (LocalDate.of(1582, 10, 15), LocalDate.of(1582, 10, 4)), (LocalDate.of(1900, 10, 1), LocalDate.of(1900, 10, 1)), (LocalDate.of(1969, 12, 1), LocalDate.of(1970, 1, 1)), (LocalDate.of(2021, 3, 1), LocalDate.of(2020, 2, 29)), (LocalDate.of(2021, 3, 15), LocalDate.of(2021, 3, 14)), (LocalDate.of(1, 1, 1), LocalDate.of(2021, 3, 29)) ).foreach { case (end, start) => val df = Seq((end, start)).toDF("end", "start") val daysBetween = Duration.ofDays(ChronoUnit.DAYS.between(start, end)) checkAnswer(df.select($"end" - $"start"), Row(daysBetween)) } } } val e = intercept[SparkException] { Seq((LocalDate.ofEpochDay(0), LocalDate.of(500000, 1, 1))) .toDF("start", "end") .select($"end" - $"start") .collect() }.getCause assert(e.isInstanceOf[ArithmeticException]) assert(e.getMessage.contains("long overflow")) } } test("SPARK-34903: Return day-time interval from timestamps subtraction") { outstandingTimezonesIds.foreach { tz => withSQLConf(SQLConf.SESSION_LOCAL_TIMEZONE.key -> tz) { checkAnswer( sql("select timestamp '2021-03-31 19:11:10' - timestamp '2021-03-01 19:11:10'"), Row(Duration.ofDays(30))) checkAnswer( Seq((Instant.parse("2021-03-31T00:01:02Z"), Instant.parse("2021-04-01T00:00:00Z"))) .toDF("start", "end").select($"end" - $"start" < Duration.ofDays(1)), Row(true)) checkAnswer( Seq((Instant.parse("2021-03-31T00:01:02.777Z"), Duration.ofMillis(333))) .toDF("ts", "i") .select(($"ts" + $"i") - $"ts"), Row(Duration.ofMillis(333))) checkAnswer( Seq((LocalDateTime.of(2021, 3, 31, 10, 0, 0) .atZone(DateTimeUtils.getZoneId(tz)).toInstant, LocalDate.of(2020, 3, 31))) .toDF("ts", "d") .select($"ts" - $"d"), Row(Duration.ofDays(365).plusHours(10))) } } } test("SPARK-35051: add/subtract a day-time interval to/from a date") { withSQLConf(SQLConf.DATETIME_JAVA8API_ENABLED.key -> "true") { outstandingZoneIds.foreach { zid => withSQLConf(SQLConf.SESSION_LOCAL_TIMEZONE.key -> zid.getId) { Seq( (LocalDate.of(1, 1, 1), Duration.ofDays(31)) -> LocalDateTime.of(1, 2, 1, 0, 0, 0), (LocalDate.of(1582, 9, 15), Duration.ofDays(30).plus(1, ChronoUnit.MICROS)) -> LocalDateTime.of(1582, 10, 15, 0, 0, 0, 1000), (LocalDate.of(1900, 1, 1), Duration.ofDays(0).plusHours(1)) -> LocalDateTime.of(1900, 1, 1, 1, 0, 0), (LocalDate.of(1970, 1, 1), Duration.ofDays(-1).minusMinutes(1)) -> LocalDateTime.of(1969, 12, 30, 23, 59, 0), (LocalDate.of(2021, 3, 14), Duration.ofDays(1)) -> LocalDateTime.of(2021, 3, 15, 0, 0, 0), (LocalDate.of(2020, 12, 31), Duration.ofDays(4 * 30).plusMinutes(30)) -> LocalDateTime.of(2021, 4, 30, 0, 30, 0), (LocalDate.of(2020, 2, 29), Duration.ofDays(365).plusSeconds(59)) -> LocalDateTime.of(2021, 2, 28, 0, 0, 59), (LocalDate.of(10000, 1, 1), Duration.ofDays(-2)) -> LocalDateTime.of(9999, 12, 30, 0, 0, 0) ).foreach { case ((date, duration), expected) => val result = expected.atZone(zid).toInstant val ts = date.atStartOfDay(zid).toInstant val df = Seq((date, duration, result)).toDF("date", "interval", "result") checkAnswer( df.select($"date" + $"interval", $"interval" + $"date", $"result" - $"interval", $"result" - $"date"), Row(result, result, ts, duration)) } } } Seq( "2021-04-14" -> "date + i", "1900-04-14" -> "date - i").foreach { case (date, op) => val e = intercept[SparkException] { Seq( (LocalDate.parse(date), Duration.of(Long.MaxValue, ChronoUnit.MICROS))) .toDF("date", "i") .selectExpr(op) .collect() }.getCause assert(e.isInstanceOf[ArithmeticException]) assert(e.getMessage.contains("long overflow")) } } } }
cloud-fan/spark
sql/core/src/test/scala/org/apache/spark/sql/ColumnExpressionSuite.scala
Scala
apache-2.0
112,812
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.sql.catalyst.optimizer import scala.annotation.tailrec import org.apache.spark.sql.AnalysisException import org.apache.spark.sql.catalyst.expressions._ import org.apache.spark.sql.catalyst.planning.ExtractFiltersAndInnerJoins import org.apache.spark.sql.catalyst.plans._ import org.apache.spark.sql.catalyst.plans.logical._ import org.apache.spark.sql.catalyst.rules._ import org.apache.spark.sql.internal.SQLConf /** * Reorder the joins and push all the conditions into join, so that the bottom ones have at least * one condition. * * The order of joins will not be changed if all of them already have at least one condition. * * If star schema detection is enabled, reorder the star join plans based on heuristics. */ object ReorderJoin extends Rule[LogicalPlan] with PredicateHelper { /** * Join a list of plans together and push down the conditions into them. * * The joined plan are picked from left to right, prefer those has at least one join condition. * * @param input a list of LogicalPlans to inner join and the type of inner join. * @param conditions a list of condition for join. */ @tailrec final def createOrderedJoin(input: Seq[(LogicalPlan, InnerLike)], conditions: Seq[Expression]) : LogicalPlan = { assert(input.size >= 2) if (input.size == 2) { val (joinConditions, others) = conditions.partition(canEvaluateWithinJoin) val ((left, leftJoinType), (right, rightJoinType)) = (input(0), input(1)) val innerJoinType = (leftJoinType, rightJoinType) match { case (Inner, Inner) => Inner case (_, _) => Cross } val join = Join(left, right, innerJoinType, joinConditions.reduceLeftOption(And)) if (others.nonEmpty) { Filter(others.reduceLeft(And), join) } else { join } } else { val (left, _) :: rest = input.toList // find out the first join that have at least one join condition val conditionalJoin = rest.find { planJoinPair => val plan = planJoinPair._1 val refs = left.outputSet ++ plan.outputSet conditions .filterNot(l => l.references.nonEmpty && canEvaluate(l, left)) .filterNot(r => r.references.nonEmpty && canEvaluate(r, plan)) .exists(_.references.subsetOf(refs)) } // pick the next one if no condition left val (right, innerJoinType) = conditionalJoin.getOrElse(rest.head) val joinedRefs = left.outputSet ++ right.outputSet val (joinConditions, others) = conditions.partition( e => e.references.subsetOf(joinedRefs) && canEvaluateWithinJoin(e)) val joined = Join(left, right, innerJoinType, joinConditions.reduceLeftOption(And)) // should not have reference to same logical plan createOrderedJoin(Seq((joined, Inner)) ++ rest.filterNot(_._1 eq right), others) } } def apply(plan: LogicalPlan): LogicalPlan = plan transform { case ExtractFiltersAndInnerJoins(input, conditions) if input.size > 2 && conditions.nonEmpty => if (SQLConf.get.starSchemaDetection && !SQLConf.get.cboEnabled) { val starJoinPlan = StarSchemaDetection.reorderStarJoins(input, conditions) if (starJoinPlan.nonEmpty) { val rest = input.filterNot(starJoinPlan.contains(_)) createOrderedJoin(starJoinPlan ++ rest, conditions) } else { createOrderedJoin(input, conditions) } } else { createOrderedJoin(input, conditions) } } } /** * Elimination of outer joins, if the predicates can restrict the result sets so that * all null-supplying rows are eliminated * * - full outer -> inner if both sides have such predicates * - left outer -> inner if the right side has such predicates * - right outer -> inner if the left side has such predicates * - full outer -> left outer if only the left side has such predicates * - full outer -> right outer if only the right side has such predicates * * This rule should be executed before pushing down the Filter */ object EliminateOuterJoin extends Rule[LogicalPlan] with PredicateHelper { /** * Returns whether the expression returns null or false when all inputs are nulls. */ private def canFilterOutNull(e: Expression): Boolean = { if (!e.deterministic || SubqueryExpression.hasCorrelatedSubquery(e)) return false val attributes = e.references.toSeq val emptyRow = new GenericInternalRow(attributes.length) val boundE = BindReferences.bindReference(e, attributes) if (boundE.find(_.isInstanceOf[Unevaluable]).isDefined) return false val v = boundE.eval(emptyRow) v == null || v == false } private def buildNewJoinType(filter: Filter, join: Join): JoinType = { val conditions = splitConjunctivePredicates(filter.condition) ++ filter.constraints val leftConditions = conditions.filter(_.references.subsetOf(join.left.outputSet)) val rightConditions = conditions.filter(_.references.subsetOf(join.right.outputSet)) lazy val leftHasNonNullPredicate = leftConditions.exists(canFilterOutNull) lazy val rightHasNonNullPredicate = rightConditions.exists(canFilterOutNull) join.joinType match { case RightOuter if leftHasNonNullPredicate => Inner case LeftOuter if rightHasNonNullPredicate => Inner case FullOuter if leftHasNonNullPredicate && rightHasNonNullPredicate => Inner case FullOuter if leftHasNonNullPredicate => LeftOuter case FullOuter if rightHasNonNullPredicate => RightOuter case o => o } } def apply(plan: LogicalPlan): LogicalPlan = plan transform { case f @ Filter(condition, j @ Join(_, _, RightOuter | LeftOuter | FullOuter, _)) => val newJoinType = buildNewJoinType(f, j) if (j.joinType == newJoinType) f else Filter(condition, j.copy(joinType = newJoinType)) } } /** * PythonUDF in join condition can not be evaluated, this rule will detect the PythonUDF * and pull them out from join condition. For python udf accessing attributes from only one side, * they are pushed down by operation push down rules. If not (e.g. user disables filter push * down rules), we need to pull them out in this rule too. */ object PullOutPythonUDFInJoinCondition extends Rule[LogicalPlan] with PredicateHelper { def hasPythonUDF(expression: Expression): Boolean = { expression.collectFirst { case udf: PythonUDF => udf }.isDefined } override def apply(plan: LogicalPlan): LogicalPlan = plan transformUp { case j @ Join(_, _, joinType, condition) if condition.isDefined && hasPythonUDF(condition.get) => if (!joinType.isInstanceOf[InnerLike] && joinType != LeftSemi) { // The current strategy only support InnerLike and LeftSemi join because for other type, // it breaks SQL semantic if we run the join condition as a filter after join. If we pass // the plan here, it'll still get a an invalid PythonUDF RuntimeException with message // `requires attributes from more than one child`, we throw firstly here for better // readable information. throw new AnalysisException("Using PythonUDF in join condition of join type" + s" $joinType is not supported.") } // If condition expression contains python udf, it will be moved out from // the new join conditions. val (udf, rest) = splitConjunctivePredicates(condition.get).partition(hasPythonUDF) val newCondition = if (rest.isEmpty) { logWarning(s"The join condition:$condition of the join plan contains PythonUDF only," + s" it will be moved out and the join plan will be turned to cross join.") None } else { Some(rest.reduceLeft(And)) } val newJoin = j.copy(condition = newCondition) joinType match { case _: InnerLike => Filter(udf.reduceLeft(And), newJoin) case LeftSemi => Project( j.left.output.map(_.toAttribute), Filter(udf.reduceLeft(And), newJoin.copy(joinType = Inner))) case _ => throw new AnalysisException("Using PythonUDF in join condition of join type" + s" $joinType is not supported.") } } }
michalsenkyr/spark
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/optimizer/joins.scala
Scala
apache-2.0
9,011
/* * Copyright (c) 2002-2018 "Neo Technology," * Network Engine for Objects in Lund AB [http://neotechnology.com] * * This file is part of Neo4j. * * Neo4j is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ package org.neo4j.cypher.internal.compiler.v2_3.commands.expressions import org.neo4j.cypher.internal.compiler.v2_3._ import org.neo4j.cypher.internal.compiler.v2_3.commands.predicates.Predicate import org.neo4j.cypher.internal.compiler.v2_3.helpers.CollectionSupport import org.neo4j.cypher.internal.compiler.v2_3.pipes.QueryState import org.neo4j.cypher.internal.compiler.v2_3.symbols.SymbolTable import org.neo4j.cypher.internal.frontend.v2_3.symbols._ case class FilterFunction(collection: Expression, id: String, predicate: Predicate) extends NullInNullOutExpression(collection) with CollectionSupport with Closure { def compute(value: Any, m: ExecutionContext)(implicit state: QueryState) = makeTraversable(value).filter(element => predicate.isTrue(m.newWith(id -> element) )) def rewrite(f: (Expression) => Expression) = f(FilterFunction(collection.rewrite(f), id, predicate.rewriteAsPredicate(f))) override def children = Seq(collection, predicate) def arguments: Seq[Expression] = Seq(collection) def calculateType(symbols: SymbolTable): CypherType = collection.evaluateType(CTCollection(CTAny), symbols) def symbolTableDependencies = symbolTableDependencies(collection, predicate, id) }
HuangLS/neo4j
community/cypher/cypher-compiler-2.3/src/main/scala/org/neo4j/cypher/internal/compiler/v2_3/commands/expressions/FilterFunction.scala
Scala
apache-2.0
2,024
package com.tresata.spark.skewjoin import org.apache.spark.{ SparkConf, SparkContext } object SparkSuite { lazy val sc = { val conf = new SparkConf(false) .setMaster("local") .setAppName("test") .set("spark.serializer", "org.apache.spark.serializer.KryoSerializer") .set("Spark.ui.enable", "false") new SparkContext(conf) } }
tresata/spark-skewjoin
src/test/scala/com/tresata/spark/skewjoin/SparkSuite.scala
Scala
apache-2.0
364
package com.outr.nextui.screen import com.outr.nextui.{Container, UI} import com.outr.nextui.transition._ import pl.metastack.metarx.{ReadStateChannel, Sub, Var} class Screen(implicit ui: UI) extends Container { private[screen] val _status = Var[ScreenStatus](ScreenStatus.Removed) val status: ReadStateChannel[ScreenStatus] = _status val transitionIn: Sub[Option[Transition]] = Sub(None) val transitionOut: Sub[Option[Transition]] = Sub(None) protected[screen] def activate(screens: Screens): Unit = { size.width := screens.size.width.actual size.height := screens.size.height.actual if (!screens.children.get.contains(this)) { screens.children += this } _status := ScreenStatus.TransitioningIn transitionIn.get match { case Some(t) => function(visible := true).andThen(t.andThen(function(_status := ScreenStatus.Active))).start() case None => { visible := true _status := ScreenStatus.Active } } } protected[screen] def remove(screens: Screens): Unit = { _status := ScreenStatus.TransitioningOut val f = () => { visible := false screens.children -= this _status := ScreenStatus.Removed } transitionOut.get match { case Some(t) => t.andThen(function(f())).start() case None => f() } } }
outr/nextui
core/src/main/scala/com/outr/nextui/screen/Screen.scala
Scala
mit
1,325
/* * Copyright (c) 2016. Fengguo (Hugo) Wei and others. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html * * Detailed contributors are listed in the CONTRIBUTOR.md */ package org.argus.jc.incremental.jawa.data import java.io.{File, IOException} import java.util import org.argus.jc.incremental.jawa.SettingsManager import org.argus.jc.incremental.jawa.model.CompileOrder import org.jetbrains.jps.builders.java.JavaModuleBuildTargetType import org.jetbrains.jps.incremental.java.JavaBuilder import org.jetbrains.jps.{ModuleChunk, ProjectPaths} import org.jetbrains.jps.incremental.{CompileContext, ModuleBuildTarget} import org.jetbrains.jps.model.java.JpsJavaExtensionService import org.jetbrains.jps.model.java.compiler.JpsJavaCompilerOptions import scala.collection.JavaConverters._ /** * @author <a href="mailto:fgwei521@gmail.com">Fengguo Wei</a> */ case class CompilationData(sources: Seq[File], classpath: Seq[File], output: File, jawaOptions: Seq[String], javaOptions: Seq[String], order: CompileOrder.Value, cacheFile: File, outputToCacheMap: Map[File, File], outputGroups: Seq[(File, File)]) object CompilationData { def from(sources: Seq[File], context: CompileContext, chunk: ModuleChunk): Either[String, CompilationData] = { val target = chunk.representativeTarget val module = target.getModule outputsNotSpecified(chunk) match { case Some(message) => return Left(message) case None => } val output = target.getOutputDir checkOrCreate(output) val classpath = ProjectPaths.getCompilationClasspathFiles(chunk, chunk.containsTests, false, true).asScala.toSeq val compilerSettings = SettingsManager.getProjectSettings(module.getProject).getCompilerSettings(chunk) val noBootCp = Seq("-nobootcp", "-javabootclasspath", File.pathSeparator) val jawaOptions = noBootCp ++: compilerSettings.getCompilerOptions val order = compilerSettings.getCompileOrder createOutputToCacheMap(context) match { case Left(a) => Left(a) case Right(outputToCacheMap) => val cacheFile = outputToCacheMap.getOrElse(output, throw new RuntimeException("Unknown build target output directory: " + output)) val relevantOutputToCacheMap = (outputToCacheMap - output).filter(p => classpath.contains(p._1)) val commonOptions = { val encoding = context.getProjectDescriptor.getEncodingConfiguration.getPreferredModuleChunkEncoding(chunk) Option(encoding).map(Seq("-encoding", _)).getOrElse(Seq.empty) } val javaOptions = javaOptionsFor(context, chunk) val outputGroups = createOutputGroups(chunk) Right(CompilationData(sources, classpath, output, commonOptions ++ jawaOptions, commonOptions ++ javaOptions, order, cacheFile, relevantOutputToCacheMap, outputGroups)) } } def checkOrCreate(output: File) { if (!output.exists()) { try { if (!output.mkdirs()) throw new IOException("Cannot create output directory: " + output.toString) } catch { case t: Throwable => throw new IOException("Cannot create output directory: " + output.toString, t) } } } def outputsNotSpecified(chunk: ModuleChunk): Option[String] = { chunk.getTargets.asScala.find(_.getOutputDir == null) .map("Output directory not specified for module " + _.getModule.getName) } private def javaOptionsFor(context: CompileContext, chunk: ModuleChunk): Seq[String] = { val compilerConfig = { val project = context.getProjectDescriptor.getProject JpsJavaExtensionService.getInstance.getOrCreateCompilerConfiguration(project) } val options = new util.ArrayList[String]() addCommonJavacOptions(options, compilerConfig.getCurrentCompilerOptions) val annotationProcessingProfile = { val module = chunk.representativeTarget.getModule compilerConfig.getAnnotationProcessingProfile(module) } JavaBuilder.addCompilationOptions(options, context, chunk, annotationProcessingProfile) options.asScala } // TODO JavaBuilder.loadCommonJavacOptions should be public def addCommonJavacOptions(options: util.ArrayList[String], compilerOptions: JpsJavaCompilerOptions) { if (compilerOptions.DEBUGGING_INFO) { options.add("-g") } if (compilerOptions.DEPRECATION) { options.add("-deprecation") } if (compilerOptions.GENERATE_NO_WARNINGS) { options.add("-nowarn") } if (!compilerOptions.ADDITIONAL_OPTIONS_STRING.isEmpty) { // TODO extract VM options options.addAll(compilerOptions.ADDITIONAL_OPTIONS_STRING.split("\\s+").toSeq.asJava) } } private def createOutputToCacheMap(context: CompileContext): Either[String, Map[File, File]] = { val targetToOutput = targetsIn(context).map(target => (target, target.getOutputDir)) outputClashesIn(targetToOutput).toLeft { val paths = context.getProjectDescriptor.dataManager.getDataPaths for ((target, output) <- targetToOutput.toMap) yield (output, new File(paths.getTargetDataRoot(target), "cache.dat")) } } private def createOutputGroups(chunk: ModuleChunk): Seq[(File, File)] = { for { target <- chunk.getTargets.asScala.toSeq module = target.getModule output = target.getOutputDir sourceRoot <- module.getSourceRoots.asScala.map(_.getFile) if sourceRoot.exists } yield (sourceRoot, output) } private def targetsIn(context: CompileContext): Seq[ModuleBuildTarget] = { def isExcluded(target: ModuleBuildTarget): Boolean = false val buildTargetIndex = context.getProjectDescriptor.getBuildTargetIndex val targets = JavaModuleBuildTargetType.ALL_TYPES.asScala.flatMap(buildTargetIndex.getAllTargets(_).asScala) targets.distinct.filterNot { target => buildTargetIndex.isDummy(target) || isExcluded(target) } } private def outputClashesIn(targetToOutput: Seq[(ModuleBuildTarget, File)]): Option[String] = { val outputToTargetsMap = targetToOutput.groupBy(_._2).mapValues(_.map(_._1)) val errors = outputToTargetsMap.collect { case (output, targets) if output != null && targets.length > 1 => val targetNames = targets.map(_.getPresentableName).mkString(", ") "Output path %s is shared between: %s".format(output, targetNames) } if (errors.isEmpty) None else Some(errors.mkString("\n") + "\nPlease configure separate output paths to proceed with the compilation." + "\nTIP: you can use Project Artifacts to combine compiled classes if needed.") } }
arguslab/argus-cit-intellij
jc-plugin/src/main/scala/org/argus/jc/incremental/jawa/data/CompilationData.scala
Scala
epl-1.0
6,984