code stringlengths 5 1M | repo_name stringlengths 5 109 | path stringlengths 6 208 | language stringclasses 1 value | license stringclasses 15 values | size int64 5 1M |
|---|---|---|---|---|---|
import java.awt.Color
import java.awt.image.BufferedImage
import java.io.File
import javax.imageio.ImageIO
import scala.io.Source
object Gradient extends App {
val lines = Source.fromFile(args(0)).getLines().toList
val Array(w, h) = lines.head.split(" ").map(_.toInt)
val Array(r0, g0, b0) = lines(1).split(" ").map(_.toInt)
val Array(r1, g1, b1) = lines(2).split(" ").map(_.toInt)
val img = new BufferedImage(w, h, BufferedImage.TYPE_INT_RGB)
for (
x <- 0 until w;
y <- 0 until h
) img.setRGB(x, y, new Color(interp(x, r0, r1), interp(x, g0, g1), interp(x, b0, b1)).getRGB)
ImageIO.write(img, "png", new File("gradient.png"))
def interp(x: Int, y0: Int, y1: Int) = (y0 + (y1 - y0) * (x / w.toDouble)).toInt
}
| ccampo133/daily-programmer | 210-intermediate/src/Gradient.scala | Scala | mit | 739 |
/*
* Distributed as part of Scalala, a linear algebra library.
*
* Copyright (C) 2008- Daniel Ramage
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110 USA
*/
package scalala;
package library;
package plotting;
import java.awt.{Color,Paint,TexturePaint};
import java.awt.geom.Rectangle2D;
import java.awt.image.BufferedImage;
/**
* Maps items of type T to a well defined Paint (usually a color).
*
* An implicit conversion exists to make a singleton PaintScaleFactory from
* a PaintScale instance, which means that PaintScales can be provided
* directly whenever a PaintScaleFactory is required.
*
* @author dramage
*/
sealed trait PaintScale[T] extends (T => Paint);
/**
* A simple numeric paint scale for mapping a number within a range to a
* corresponding element of a pre-computed color gradient. Colors from the
* given gradient array are used linearly to represent values between
* lower and upper.
*
* @author dramage
*/
case class GradientPaintScale[T]
(lower : T, upper : T, gradient : Array[Color] = PaintScale.WhiteToBlack)
(implicit view : T=>Double)
extends PaintScale[T] {
def apply(value : T) : Paint = {
if (view(value).isNaN) {
PaintScale.nanPaint
} else {
val index = gradient.length * (value - lower) / (upper - lower);
gradient(math.min(gradient.length-1, math.max(0, index.toInt)));
}
}
}
/**
* Maps items to colors using the given partial function. If no color
* is provided for the given item, then returns PaintScale.nanPaint.
*
* @author dramage
*/
case class CategoricalPaintScale[T]
(categories : PartialFunction[T,Paint])
extends PaintScale[T] {
def apply(value : T) : Paint = {
if (!categories.isDefinedAt(value)) {
PaintScale.nanPaint
} else {
categories(value)
}
}
}
object PaintScale {
/**
* Convert a color description string into a color suitable for plotting.
* @param colorcode A string that is a single character (like "k"), a name
* (like "black"), "r,g,b" or, "[r,g,b]"
*
* @author Patryk Laurent
*/
def convertToColor(colorcode:String) : java.awt.Color = {
val rgbcsv = "(.*),(.*),(.*)".r;
colorcode.toLowerCase.replace(" ", "").replace("[","").replace("]", "") match {
case "y" | "yellow" => yellow
case "m" | "magenta" => magenta
case "c" | "cyan" => cyan
case "r" | "red" => red
case "g" | "green" => green
case "b" | "blue" => blue
case "w" | "white" => white
case "k" | "black" => black
case rgbcsv(r,g,b) => new java.awt.Color(r.toInt,g.toInt,b.toInt)
case uninterpretable:String => throw new IllegalArgumentException(
"Expected color code to be either y m c r g b w k OR R,G,B or " +
"[R,G,B] where R,G,B are numbers such that 0<=R,G,B<=255, but got '" +
uninterpretable + "' instead.")
}
}
/** Creates a GradientPaintScale automatically for the given range. */
implicit def gradientTuple[T](vLowerUpper : (T,T))(implicit view : T=>Double)
: GradientPaintScale[T] =
GradientPaintScale[T](vLowerUpper._1, vLowerUpper._2);
/** Creates a CategoricalPaintScale from the provided partial function. */
implicit def literalColorMap[T](map : PartialFunction[T,Paint])
: CategoricalPaintScale[T] =
CategoricalPaintScale[T](map);
//
// Default colors and patterns.
//
/** For painting NaN. */
val nanPaint = {
val img = new BufferedImage(5,5,BufferedImage.TYPE_INT_ARGB);
val gfx = img.getGraphics;
gfx.setColor(Color.gray);
gfx.drawLine(0,0,4,4);
gfx.dispose();
new TexturePaint(img, new Rectangle2D.Double(0,0,5,5));
}
/** The Category10 palette from Protovis http://vis.stanford.edu/protovis/docs/color.html */
object Category10 {
val values : Array[Color] = Array(
"#1f77b4", "#ff7f0e", "#2ca02c", "#d62728", "#9467bd",
"#8c564b", "#e377c2", "#7f7f7f", "#bcbd22", "#17becf"
).map(Color.decode);
val blue = values(0);
val orange = values(1);
val green = values(2);
val red = values(3);
val purple = values(4);
val brown = values(5);
val magenta = values(6);
val gray = values(7);
val gold = values(8);
val teal = values(9);
def apply(i : Int) = values(i);
}
/** The Category20 palette from Protovis http://vis.stanford.edu/protovis/docs/color.html */
object Category20 {
val values : Array[Color] = Category10.values ++ Array(
"#aec7e8", "#ffbb78", "#98df8a", "#ff9896", "#c5b0d5",
"#c49c94", "#f7b6d2", "#c7c7c7", "#dbdb8d", "#9edae5"
).map(Color.decode);
val lightblue = values(10);
val lightorange = values(11);
val lightgreen = values(12);
val lightred = values(13);
val lightpurple = values(14);
val lightbrown = values(15);
val lightmagenta = values(16);
val lightgray = values(17);
val lightgold = values(18);
val lightteal = values(19);
def apply(i : Int) = values(i);
}
//
// A large pallete of color literals from ProtoVis
//
val aliceblue = Color.decode( "#f0f8ff");
val antiquewhite = Color.decode( "#faebd7");
val aqua = Color.decode( "#00ffff");
val aquamarine = Color.decode( "#7fffd4");
val azure = Color.decode( "#f0ffff");
val beige = Color.decode( "#f5f5dc");
val bisque = Color.decode( "#ffe4c4");
val black = Color.decode( "#000000");
val blanchedalmond = Color.decode( "#ffebcd");
val blue = Color.decode( "#0000ff");
val blueviolet = Color.decode( "#8a2be2");
val brown = Color.decode( "#a52a2a");
val burlywood = Color.decode( "#deb887");
val cadetblue = Color.decode( "#5f9ea0");
val chartreuse = Color.decode( "#7fff00");
val chocolate = Color.decode( "#d2691e");
val coral = Color.decode( "#ff7f50");
val cornflowerblue = Color.decode( "#6495ed");
val cornsilk = Color.decode( "#fff8dc");
val crimson = Color.decode( "#dc143c");
val cyan = Color.decode( "#00ffff");
val darkblue = Color.decode( "#00008b");
val darkcyan = Color.decode( "#008b8b");
val darkgoldenrod = Color.decode( "#b8860b");
val darkgray = Color.decode( "#a9a9a9");
val darkgreen = Color.decode( "#006400");
val darkgrey = Color.decode( "#a9a9a9");
val darkkhaki = Color.decode( "#bdb76b");
val darkmagenta = Color.decode( "#8b008b");
val darkolivegreen = Color.decode( "#556b2f");
val darkorange = Color.decode( "#ff8c00");
val darkorchid = Color.decode( "#9932cc");
val darkred = Color.decode( "#8b0000");
val darksalmon = Color.decode( "#e9967a");
val darkseagreen = Color.decode( "#8fbc8f");
val darkslateblue = Color.decode( "#483d8b");
val darkslategray = Color.decode( "#2f4f4f");
val darkslategrey = Color.decode( "#2f4f4f");
val darkturquoise = Color.decode( "#00ced1");
val darkviolet = Color.decode( "#9400d3");
val deeppink = Color.decode( "#ff1493");
val deepskyblue = Color.decode( "#00bfff");
val dimgray = Color.decode( "#696969");
val dimgrey = Color.decode( "#696969");
val dodgerblue = Color.decode( "#1e90ff");
val firebrick = Color.decode( "#b22222");
val floralwhite = Color.decode( "#fffaf0");
val forestgreen = Color.decode( "#228b22");
val fuchsia = Color.decode( "#ff00ff");
val gainsboro = Color.decode( "#dcdcdc");
val ghostwhite = Color.decode( "#f8f8ff");
val gold = Color.decode( "#ffd700");
val goldenrod = Color.decode( "#daa520");
val gray = Color.decode( "#808080");
val green = Color.decode( "#008000");
val greenyellow = Color.decode( "#adff2f");
val grey = Color.decode( "#808080");
val honeydew = Color.decode( "#f0fff0");
val hotpink = Color.decode( "#ff69b4");
val indianred = Color.decode( "#cd5c5c");
val indigo = Color.decode( "#4b0082");
val ivory = Color.decode( "#fffff0");
val khaki = Color.decode( "#f0e68c");
val lavender = Color.decode( "#e6e6fa");
val lavenderblush = Color.decode( "#fff0f5");
val lawngreen = Color.decode( "#7cfc00");
val lemonchiffon = Color.decode( "#fffacd");
val lightblue = Color.decode( "#add8e6");
val lightcoral = Color.decode( "#f08080");
val lightcyan = Color.decode( "#e0ffff");
val lightgoldenrodyellow = Color.decode( "#fafad2");
val lightgray = Color.decode( "#d3d3d3");
val lightgreen = Color.decode( "#90ee90");
val lightgrey = Color.decode( "#d3d3d3");
val lightpink = Color.decode( "#ffb6c1");
val lightsalmon = Color.decode( "#ffa07a");
val lightseagreen = Color.decode( "#20b2aa");
val lightskyblue = Color.decode( "#87cefa");
val lightslategray = Color.decode( "#778899");
val lightslategrey = Color.decode( "#778899");
val lightsteelblue = Color.decode( "#b0c4de");
val lightyellow = Color.decode( "#ffffe0");
val lime = Color.decode( "#00ff00");
val limegreen = Color.decode( "#32cd32");
val linen = Color.decode( "#faf0e6");
val magenta = Color.decode( "#ff00ff");
val maroon = Color.decode( "#800000");
val mediumaquamarine = Color.decode( "#66cdaa");
val mediumblue = Color.decode( "#0000cd");
val mediumorchid = Color.decode( "#ba55d3");
val mediumpurple = Color.decode( "#9370db");
val mediumseagreen = Color.decode( "#3cb371");
val mediumslateblue = Color.decode( "#7b68ee");
val mediumspringgreen = Color.decode( "#00fa9a");
val mediumturquoise = Color.decode( "#48d1cc");
val mediumvioletred = Color.decode( "#c71585");
val midnightblue = Color.decode( "#191970");
val mintcream = Color.decode( "#f5fffa");
val mistyrose = Color.decode( "#ffe4e1");
val moccasin = Color.decode( "#ffe4b5");
val navajowhite = Color.decode( "#ffdead");
val navy = Color.decode( "#000080");
val oldlace = Color.decode( "#fdf5e6");
val olive = Color.decode( "#808000");
val olivedrab = Color.decode( "#6b8e23");
val orange = Color.decode( "#ffa500");
val orangered = Color.decode( "#ff4500");
val orchid = Color.decode( "#da70d6");
val palegoldenrod = Color.decode( "#eee8aa");
val palegreen = Color.decode( "#98fb98");
val paleturquoise = Color.decode( "#afeeee");
val palevioletred = Color.decode( "#db7093");
val papayawhip = Color.decode( "#ffefd5");
val peachpuff = Color.decode( "#ffdab9");
val peru = Color.decode( "#cd853f");
val pink = Color.decode( "#ffc0cb");
val plum = Color.decode( "#dda0dd");
val powderblue = Color.decode( "#b0e0e6");
val purple = Color.decode( "#800080");
val red = Color.decode( "#ff0000");
val rosybrown = Color.decode( "#bc8f8f");
val royalblue = Color.decode( "#4169e1");
val saddlebrown = Color.decode( "#8b4513");
val salmon = Color.decode( "#fa8072");
val sandybrown = Color.decode( "#f4a460");
val seagreen = Color.decode( "#2e8b57");
val seashell = Color.decode( "#fff5ee");
val sienna = Color.decode( "#a0522d");
val silver = Color.decode( "#c0c0c0");
val skyblue = Color.decode( "#87ceeb");
val slateblue = Color.decode( "#6a5acd");
val slategray = Color.decode( "#708090");
val slategrey = Color.decode( "#708090");
val snow = Color.decode( "#fffafa");
val springgreen = Color.decode( "#00ff7f");
val steelblue = Color.decode( "#4682b4");
val tan = Color.decode( "#d2b48c");
val teal = Color.decode( "#008080");
val thistle = Color.decode( "#d8bfd8");
val tomato = Color.decode( "#ff6347");
val turquoise = Color.decode( "#40e0d0");
val violet = Color.decode( "#ee82ee");
val wheat = Color.decode( "#f5deb3");
val white = Color.decode( "#ffffff");
val whitesmoke = Color.decode( "#f5f5f5");
val yellow = Color.decode( "#ffff00");
val yellowgreen = Color.decode( "#9acd32");
val transparent = new Color(0,0,0,0);
/** Produces a gradient using the University of Minnesota's school colors, from maroon (low) to gold (high) */
lazy val MaroonToGold = createGradient(new Color(0xA0, 0x00, 0x00), new Color(0xFF, 0xFF, 0x00), 256);
/** Produces a gradient from blue (low) to red (high) */
lazy val BlueToRed= createGradient(Color.BLUE, Color.RED, 500);
/** Produces a gradient from black (low) to white (high) */
lazy val BlackToWhite = createGradient(Color.BLACK, Color.WHITE, 500);
/** Produces a gradient from white (low) to black (high) */
lazy val WhiteToBlack = createGradient(Color.WHITE, Color.BLACK, 500);
/** Produces a gradient from red (low) to green (high) */
lazy val RedToGreen = createGradient(Color.RED, Color.GREEN, 500);
/** Produces a gradient through green, yellow, orange, red */
lazy val GreenYelloOrangeRed = createMultiGradient(
Array(Color.green, Color.yellow, Color.orange, Color.red), 500);
/** Produces a gradient through the rainbow: violet, blue, green, yellow, orange, red */
lazy val Rainbow = createMultiGradient(
Array(new Color(181, 32, 255), Color.blue, Color.green, Color.yellow, Color.orange, Color.red), 500);
/** Produces a gradient for hot things (black, red, orange, yellow, white) */
lazy val Hot = createMultiGradient(
Array(Color.black, new Color(87, 0, 0), Color.red, Color.orange, Color.yellow, Color.white), 500);
/** Produces a different gradient for hot things (black, brown, orange, white) */
lazy val Heat = createMultiGradient(
Array(Color.black, new Color(105, 0, 0), new Color(192, 23, 0), new Color(255, 150, 38), Color.white), 500);
/** Produces a gradient through red, orange, yellow */
lazy val RedOrangeYellow = createMultiGradient(
Array(Color.red, Color.orange, Color.yellow), 500);
/**
* Creates an array of Color objects for use as a gradient, using a linear
* interpolation between the two specified colors.
*
* From http://www.mbeckler.org/heatMap/heatMap.html
*
* @param one Color used for the bottom of the gradient
* @param two Color used for the top of the gradient
* @param numSteps The number of steps in the gradient. 250 is a good number.
*/
def createGradient(one : Color, two : Color, numSteps : Int) : Array[Color] = {
val r1 = one.getRed();
val g1 = one.getGreen();
val b1 = one.getBlue();
val r2 = two.getRed();
val g2 = two.getGreen();
val b2 = two.getBlue();
val gradient = new Array[Color](numSteps);
var iNorm : Double = 0;
for (i <- 0 until numSteps) {
iNorm = i / numSteps.toDouble; //a normalized [0:1] variable
val newR = (r1 + iNorm * (r2 - r1)).toInt;
val newG = (g1 + iNorm * (g2 - g1)).toInt;
val newB = (b1 + iNorm * (b2 - b1)).toInt;
gradient(i) = new Color(newR, newG, newB);
}
return gradient;
}
/**
* Creates an array of Color objects for use as a gradient, using an array
* of Color objects. It uses a linear interpolation between each pair of
* points.
*
* From http://www.mbeckler.org/heatMap/heatMap.html
*
* @param colors An array of Color objects used for the gradient. The
* Color at index 0 will be the lowest color.
*
* @param numSteps The number of steps in the gradient. 250 is a good number.
*/
def createMultiGradient(colors : Array[Color], numSteps : Int) : Array[Color] = {
//we assume a linear gradient, with equal spacing between colors
//The final gradient will be made up of n 'sections', where n = colors.length - 1
val numSections = colors.length - 1;
var gradientIndex = 0; //points to the next open spot in the final gradient
val gradient = new Array[Color](numSteps)
require(numSections > 0, "Array must have at least two colors");
for (section <- 0 until numSections) {
//we divide the gradient into (n - 1) sections, and do a regular gradient for each
val temp = createGradient(colors(section), colors(section+1), numSteps / numSections);
for (i <- 0 until temp.length) {
//copy the sub-gradient into the overall gradient
gradient(gradientIndex) = temp(i);
gradientIndex += 1
}
}
if (gradientIndex < numSteps) {
//The rounding didn't work out in our favor, and there is at least
// one unfilled slot in the gradient[] array.
//We can just copy the final color there
while (gradientIndex < numSteps) {
gradient(gradientIndex) = colors(colors.length - 1);
gradientIndex += 1
}
}
return gradient;
}
}
| scalala/Scalala | src/main/scala/scalala/library/plotting/PaintScale.scala | Scala | lgpl-2.1 | 16,855 |
package com.marqod.biosphere.art
import java.awt.Color
import com.marqod.biosphere.engine.Gui
import com.marqod.biosphere.models.{Entity, Tile}
import com.marqod.biosphere.utils.{Colors, Config}
import scala.swing.Graphics2D
/**
* Created by ryan.walker on 1/25/17.
*/
class GuiArt extends Config {
val hoverColor: Color = Colors.brightMagenta
val selectColor: Color = Colors.brightRed
def draw(g: Graphics2D, gui: Gui) = {
gui.hoverEntity match {
case Some(e: Entity) =>
drawEntitySelect(g, e, hoverColor)
case None =>
gui.hoverTile match {
case Some(t: Tile) =>
drawTileSelect(g, t, hoverColor)
case None => {}
}
}
gui.targetEntity match {
case Some(e: Entity) =>
drawEntitySelect(g, e, selectColor)
case None =>
gui.targetTile match {
case Some(t: Tile) =>
drawTileSelect(g, t, selectColor)
case None => {}
}
}
}
def drawTileSelect(g: Graphics2D, tile: Tile, color: Color) = {
g.setColor(color)
g.drawRect(tile.position.x.toInt,tile.position.y.toInt,TILE_SIZE.x.toInt,TILE_SIZE.y.toInt)
}
def drawEntitySelect(g: Graphics2D, entity: Entity, color: Color) = {
g.setColor(color)
val pos = entity.topLeft()
g.drawRect(pos.x.toInt,pos.y.toInt,entity.size.x.toInt,entity.size.y.toInt)
}
}
| rwalks/ecosim | src/main/scala/com/marqod/biosphere/art/GuiArt.scala | Scala | apache-2.0 | 1,388 |
/* Copyright (C) 2008-2016 University of Massachusetts Amherst.
This file is part of "FACTORIE" (Factor graphs, Imperative, Extensible)
http://factorie.cs.umass.edu, http://github.com/factorie
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */
package cc.factorie.app.nlp.segment
import cc.factorie.app.nlp.Document
import scala.collection.mutable.ArrayBuffer
/**
* User: apassos
* Date: 8/19/13
* Time: 2:00 PM
*/
class BigramStatistics {
val wordCounts = new collection.mutable.LinkedHashMap[String, Int]()
val bigramCounts = new collection.mutable.LinkedHashMap[(String,String),Int]()
var totalTokens = 0
def process(document: Document): Unit = {
for (token <- document.tokens) {
totalTokens += 1
wordCounts(token.string) = 1 + wordCounts.getOrElse(token.string, 0)
token.getPrev.foreach(prev => {
bigramCounts((prev.string,token.string)) = 1 + bigramCounts.getOrElse((prev.string,token.string), 0)
})
}
}
def process(documents: Iterable[Document]): Unit = documents.foreach(process)
def aggregateCounts(others: Iterable[BigramStatistics]): Unit = {
for (other <- others) {
for ((unigram,value) <- other.wordCounts) {
wordCounts(unigram) = wordCounts.getOrElse(unigram, 0) + value
}
for ((bigram,value) <- other.bigramCounts) {
bigramCounts(bigram) = bigramCounts.getOrElse(bigram, 0) + value
}
totalTokens += other.totalTokens
}
}
def processParallel(documents: Iterable[Document], nThreads: Int = Runtime.getRuntime.availableProcessors()): Unit = {
val others = new cc.factorie.util.ThreadLocal[BigramStatistics](new BigramStatistics)
cc.factorie.util.Threading.parForeach(documents, nThreads) { doc =>
others.get.process(doc)
}
aggregateCounts(others.instances)
}
def getLikelyPhrases(countThreshold: Int = 5, scoreThreshold: Double = 100.0): Seq[Seq[String]] = {
val bigramPhrases = collection.mutable.LinkedHashSet[Seq[String]]()
val phraseStarts = collection.mutable.HashMap[String,ArrayBuffer[String]]()
bigramCounts.foreach({ case ((prev,token),count) =>
val pc = wordCounts(prev)
val pt = wordCounts(token)
if (count > countThreshold && pc > countThreshold && pt > countThreshold) {
// Pointwise mutual information is defined as P(A,B) / P(A) P(B).
// In this case P(A,B) = bigramCounts(A,B)/totalTokens ,
// P(A) = wordCounts(A) / totalTokens, P(B) = wordCounts(B) / totalTokens
// Hence we can write PMI = bigramCounts(A,B) * totalTokens / (wordCounts(A) * wordCounts(B))
val score = totalTokens * count.toDouble / (pc * pt)
if (score > scoreThreshold) {
bigramPhrases += Seq(prev,token)
phraseStarts.getOrElseUpdate(prev, new ArrayBuffer[String]).append(token)
}
}
})
// now we should have all interesting bigrams. I'll make the assumption that
// if A B and B C are interesting phrases then A B C is interesting without checking.
val trigramPhrases = collection.mutable.HashSet[Seq[String]]()
bigramPhrases.foreach({ case Seq(prev,token) =>
phraseStarts.getOrElse(token, Seq()).foreach(last => trigramPhrases += Seq(prev, token, last))
})
bigramPhrases.toSeq ++ trigramPhrases.toSeq
}
def topMutualInformationBigrams(threshold: Int = 5): Seq[(String,String,Double)] = {
bigramCounts.toSeq.filter(_._2 > threshold).map({ case ((prev,token),count) =>
((prev,token),totalTokens * count.toDouble / (wordCounts(prev) * wordCounts(token)))
}).sortBy(-_._2).take(100).map({case ((prev,token),score) => (prev,token,score)})
}
}
| Craigacp/factorie | src/main/scala/cc/factorie/app/nlp/segment/BigramStatistics.scala | Scala | apache-2.0 | 4,155 |
/*
* MilmSearch is a mailing list searching system.
*
* Copyright (C) 2013 MilmSearch Project.
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License
* as published by the Free Software Foundation; either version 3
* of the License, or any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty
* of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
* See the GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public
* License along with this program.
* If not, see <http://www.gnu.org/licenses/>.
*
* You can contact MilmSearch Project at mailing list
* milm-search-public@lists.sourceforge.jp.
*/
package org.milmsearch.core.dao
import net.liftweb.mapper.Schemifier
import mapper._
import net.liftweb.mapper.AscOrDesc
import net.liftweb.mapper.Descending
import net.liftweb.mapper.Ascending
import org.milmsearch.core.domain.SortOrder
/**
* DAO 関連のヘルパークラス
*/
object DaoHelper {
/**
* O/R マッパーとして定義済みの テーブル や カラムが
* 存在しない場合、DBに対してそれらを作成する
*/
def schemify() {
Schemifier.schemify(true, Schemifier.infoF _,
MLProposalMetaMapper,
MLMetaMapper
)
}
/**
* ドメインのソートをマッパーのソートに変換します。
*/
def toAscOrDesc(order: SortOrder.Value): AscOrDesc =
order match {
case SortOrder.Ascending => Ascending
case SortOrder.Descending => Descending
}
} | mzkrelx/milm-search-core | src/main/scala/org/milmsearch/core/dao/DaoHelper.scala | Scala | gpl-3.0 | 1,683 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.io._
import java.nio.file.Files
import scala.io.Source
import scala.util.Properties
import scala.collection.JavaConverters._
import scala.collection.mutable.Stack
import sbt._
import sbt.Classpaths.publishTask
import sbt.Keys._
import sbtunidoc.Plugin.UnidocKeys.unidocGenjavadocVersion
import com.etsy.sbt.checkstyle.CheckstylePlugin.autoImport._
import com.simplytyped.Antlr4Plugin._
import com.typesafe.sbt.pom.{PomBuild, SbtPomKeys}
import com.typesafe.tools.mima.plugin.MimaKeys
import org.scalastyle.sbt.ScalastylePlugin.autoImport._
import org.scalastyle.sbt.Tasks
import spray.revolver.RevolverPlugin._
object BuildCommons {
private val buildLocation = file(".").getAbsoluteFile.getParentFile
val sqlProjects@Seq(catalyst, sql, hive, hiveThriftServer, sqlKafka010, avro) = Seq(
"catalyst", "sql", "hive", "hive-thriftserver", "sql-kafka-0-10", "avro"
).map(ProjectRef(buildLocation, _))
val streamingProjects@Seq(streaming, streamingKafka010) =
Seq("streaming", "streaming-kafka-0-10").map(ProjectRef(buildLocation, _))
val allProjects@Seq(
core, graphx, mllib, mllibLocal, repl, networkCommon, networkShuffle, launcher, unsafe, tags, sketch, kvstore, _*
) = Seq(
"core", "graphx", "mllib", "mllib-local", "repl", "network-common", "network-shuffle", "launcher", "unsafe",
"tags", "sketch", "kvstore"
).map(ProjectRef(buildLocation, _)) ++ sqlProjects ++ streamingProjects
val optionallyEnabledProjects@Seq(kubernetes, mesos, yarn,
streamingFlumeSink, streamingFlume,
streamingKafka, sparkGangliaLgpl, streamingKinesisAsl,
dockerIntegrationTests, hadoopCloud, kubernetesIntegrationTests) =
Seq("kubernetes", "mesos", "yarn",
"streaming-flume-sink", "streaming-flume",
"streaming-kafka-0-8", "ganglia-lgpl", "streaming-kinesis-asl",
"docker-integration-tests", "hadoop-cloud", "kubernetes-integration-tests").map(ProjectRef(buildLocation, _))
val assemblyProjects@Seq(networkYarn, streamingFlumeAssembly, streamingKafkaAssembly, streamingKafka010Assembly, streamingKinesisAslAssembly) =
Seq("network-yarn", "streaming-flume-assembly", "streaming-kafka-0-8-assembly", "streaming-kafka-0-10-assembly", "streaming-kinesis-asl-assembly")
.map(ProjectRef(buildLocation, _))
val copyJarsProjects@Seq(assembly, examples) = Seq("assembly", "examples")
.map(ProjectRef(buildLocation, _))
val tools = ProjectRef(buildLocation, "tools")
// Root project.
val spark = ProjectRef(buildLocation, "spark")
val sparkHome = buildLocation
val testTempDir = s"$sparkHome/target/tmp"
val javacJVMVersion = settingKey[String]("source and target JVM version for javac")
val scalacJVMVersion = settingKey[String]("source and target JVM version for scalac")
}
object SparkBuild extends PomBuild {
import BuildCommons._
import scala.collection.mutable.Map
val projectsMap: Map[String, Seq[Setting[_]]] = Map.empty
override val profiles = {
val profiles = Properties.envOrNone("SBT_MAVEN_PROFILES") match {
case None => Seq("sbt")
case Some(v) =>
v.split("(\\\\s+|,)").filterNot(_.isEmpty).map(_.trim.replaceAll("-P", "")).toSeq
}
if (System.getProperty("scala-2.12") == "") {
// To activate scala-2.10 profile, replace empty property value to non-empty value
// in the same way as Maven which handles -Dname as -Dname=true before executes build process.
// see: https://github.com/apache/maven/blob/maven-3.0.4/maven-embedder/src/main/java/org/apache/maven/cli/MavenCli.java#L1082
System.setProperty("scala-2.12", "true")
}
profiles
}
Properties.envOrNone("SBT_MAVEN_PROPERTIES") match {
case Some(v) =>
v.split("(\\\\s+|,)").filterNot(_.isEmpty).map(_.split("=")).foreach(x => System.setProperty(x(0), x(1)))
case _ =>
}
override val userPropertiesMap = System.getProperties.asScala.toMap
lazy val MavenCompile = config("m2r") extend(Compile)
lazy val publishLocalBoth = TaskKey[Unit]("publish-local", "publish local for m2 and ivy")
lazy val sparkGenjavadocSettings: Seq[sbt.Def.Setting[_]] = Seq(
libraryDependencies += compilerPlugin(
"com.typesafe.genjavadoc" %% "genjavadoc-plugin" % unidocGenjavadocVersion.value cross CrossVersion.full),
scalacOptions ++= Seq(
"-P:genjavadoc:out=" + (target.value / "java"),
"-P:genjavadoc:strictVisibility=true" // hide package private types
)
)
lazy val scalaStyleRules = Project("scalaStyleRules", file("scalastyle"))
.settings(
libraryDependencies += "org.scalastyle" %% "scalastyle" % "1.0.0"
)
lazy val scalaStyleOnCompile = taskKey[Unit]("scalaStyleOnCompile")
lazy val scalaStyleOnTest = taskKey[Unit]("scalaStyleOnTest")
// We special case the 'println' lint rule to only be a warning on compile, because adding
// printlns for debugging is a common use case and is easy to remember to remove.
val scalaStyleOnCompileConfig: String = {
val in = "scalastyle-config.xml"
val out = "scalastyle-on-compile.generated.xml"
val replacements = Map(
"""customId="println" level="error"""" -> """customId="println" level="warn""""
)
var contents = Source.fromFile(in).getLines.mkString("\\n")
for ((k, v) <- replacements) {
require(contents.contains(k), s"Could not rewrite '$k' in original scalastyle config.")
contents = contents.replace(k, v)
}
new PrintWriter(out) {
write(contents)
close()
}
out
}
// Return a cached scalastyle task for a given configuration (usually Compile or Test)
private def cachedScalaStyle(config: Configuration) = Def.task {
val logger = streams.value.log
// We need a different cache dir per Configuration, otherwise they collide
val cacheDir = target.value / s"scalastyle-cache-${config.name}"
val cachedFun = FileFunction.cached(cacheDir, FilesInfo.lastModified, FilesInfo.exists) {
(inFiles: Set[File]) => {
val args: Seq[String] = Seq.empty
val scalaSourceV = Seq(file(scalaSource.in(config).value.getAbsolutePath))
val configV = (baseDirectory in ThisBuild).value / scalaStyleOnCompileConfig
val configUrlV = scalastyleConfigUrl.in(config).value
val streamsV = streams.in(config).value
val failOnErrorV = true
val failOnWarningV = false
val scalastyleTargetV = scalastyleTarget.in(config).value
val configRefreshHoursV = scalastyleConfigRefreshHours.in(config).value
val targetV = target.in(config).value
val configCacheFileV = scalastyleConfigUrlCacheFile.in(config).value
logger.info(s"Running scalastyle on ${name.value} in ${config.name}")
Tasks.doScalastyle(args, configV, configUrlV, failOnErrorV, failOnWarningV, scalaSourceV,
scalastyleTargetV, streamsV, configRefreshHoursV, targetV, configCacheFileV)
Set.empty
}
}
cachedFun(findFiles(scalaSource.in(config).value))
}
private def findFiles(file: File): Set[File] = if (file.isDirectory) {
file.listFiles().toSet.flatMap(findFiles) + file
} else {
Set(file)
}
def enableScalaStyle: Seq[sbt.Def.Setting[_]] = Seq(
scalaStyleOnCompile := cachedScalaStyle(Compile).value,
scalaStyleOnTest := cachedScalaStyle(Test).value,
logLevel in scalaStyleOnCompile := Level.Warn,
logLevel in scalaStyleOnTest := Level.Warn,
(compile in Compile) := {
scalaStyleOnCompile.value
(compile in Compile).value
},
(compile in Test) := {
scalaStyleOnTest.value
(compile in Test).value
}
)
lazy val sharedSettings = sparkGenjavadocSettings ++
(if (sys.env.contains("NOLINT_ON_COMPILE")) Nil else enableScalaStyle) ++ Seq(
exportJars in Compile := true,
exportJars in Test := false,
javaHome := sys.env.get("JAVA_HOME")
.orElse(sys.props.get("java.home").map { p => new File(p).getParentFile().getAbsolutePath() })
.map(file),
incOptions := incOptions.value.withNameHashing(true),
publishMavenStyle := true,
unidocGenjavadocVersion := "0.11",
// Override SBT's default resolvers:
resolvers := Seq(
DefaultMavenRepository,
Resolver.mavenLocal,
Resolver.file("local", file(Path.userHome.absolutePath + "/.ivy2/local"))(Resolver.ivyStylePatterns)
),
externalResolvers := resolvers.value,
otherResolvers := SbtPomKeys.mvnLocalRepository(dotM2 => Seq(Resolver.file("dotM2", dotM2))).value,
publishLocalConfiguration in MavenCompile :=
new PublishConfiguration(None, "dotM2", packagedArtifacts.value, Seq(), ivyLoggingLevel.value),
publishMavenStyle in MavenCompile := true,
publishLocal in MavenCompile := publishTask(publishLocalConfiguration in MavenCompile, deliverLocal).value,
publishLocalBoth := Seq(publishLocal in MavenCompile, publishLocal).dependOn.value,
javacOptions in (Compile, doc) ++= {
val versionParts = System.getProperty("java.version").split("[+.\\\\-]+", 3)
var major = versionParts(0).toInt
if (major == 1) major = versionParts(1).toInt
if (major >= 8) Seq("-Xdoclint:all", "-Xdoclint:-missing") else Seq.empty
},
javacJVMVersion := "1.8",
scalacJVMVersion := "1.8",
javacOptions in Compile ++= Seq(
"-encoding", "UTF-8",
"-source", javacJVMVersion.value
),
// This -target and Xlint:unchecked options cannot be set in the Compile configuration scope since
// `javadoc` doesn't play nicely with them; see https://github.com/sbt/sbt/issues/355#issuecomment-3817629
// for additional discussion and explanation.
javacOptions in (Compile, compile) ++= Seq(
"-target", javacJVMVersion.value,
"-Xlint:unchecked"
),
scalacOptions in Compile ++= Seq(
s"-target:jvm-${scalacJVMVersion.value}",
"-sourcepath", (baseDirectory in ThisBuild).value.getAbsolutePath // Required for relative source links in scaladoc
),
// Remove certain packages from Scaladoc
scalacOptions in (Compile, doc) := Seq(
"-groups",
"-skip-packages", Seq(
"org.apache.spark.api.python",
"org.apache.spark.network",
"org.apache.spark.deploy",
"org.apache.spark.util.collection"
).mkString(":"),
"-doc-title", "Spark " + version.value.replaceAll("-SNAPSHOT", "") + " ScalaDoc"
) ++ {
// Do not attempt to scaladoc javadoc comments under 2.12 since it can't handle inner classes
if (scalaBinaryVersion.value == "2.12") Seq("-no-java-comments") else Seq.empty
},
// Implements -Xfatal-warnings, ignoring deprecation warnings.
// Code snippet taken from https://issues.scala-lang.org/browse/SI-8410.
compile in Compile := {
val analysis = (compile in Compile).value
val out = streams.value
def logProblem(l: (=> String) => Unit, f: File, p: xsbti.Problem) = {
l(f.toString + ":" + p.position.line.fold("")(_ + ":") + " " + p.message)
l(p.position.lineContent)
l("")
}
var failed = 0
analysis.infos.allInfos.foreach { case (k, i) =>
i.reportedProblems foreach { p =>
val deprecation = p.message.contains("is deprecated")
if (!deprecation) {
failed = failed + 1
}
val printer: (=> String) => Unit = s => if (deprecation) {
out.log.warn(s)
} else {
out.log.error("[warn] " + s)
}
logProblem(printer, k, p)
}
}
if (failed > 0) {
sys.error(s"$failed fatal warnings")
}
analysis
}
)
def enable(settings: Seq[Setting[_]])(projectRef: ProjectRef) = {
val existingSettings = projectsMap.getOrElse(projectRef.project, Seq[Setting[_]]())
projectsMap += (projectRef.project -> (existingSettings ++ settings))
}
// Note ordering of these settings matter.
/* Enable shared settings on all projects */
(allProjects ++ optionallyEnabledProjects ++ assemblyProjects ++ copyJarsProjects ++ Seq(spark, tools))
.foreach(enable(sharedSettings ++ DependencyOverrides.settings ++
ExcludedDependencies.settings ++ Checkstyle.settings))
/* Enable tests settings for all projects except examples, assembly and tools */
(allProjects ++ optionallyEnabledProjects).foreach(enable(TestSettings.settings))
val mimaProjects = allProjects.filterNot { x =>
Seq(
spark, hive, hiveThriftServer, catalyst, repl, networkCommon, networkShuffle, networkYarn,
unsafe, tags, sqlKafka010, kvstore, avro
).contains(x)
}
mimaProjects.foreach { x =>
enable(MimaBuild.mimaSettings(sparkHome, x))(x)
}
/* Generate and pick the spark build info from extra-resources */
enable(Core.settings)(core)
/* Unsafe settings */
enable(Unsafe.settings)(unsafe)
/*
* Set up tasks to copy dependencies during packaging. This step can be disabled in the command
* line, so that dev/mima can run without trying to copy these files again and potentially
* causing issues.
*/
if (!"false".equals(System.getProperty("copyDependencies"))) {
copyJarsProjects.foreach(enable(CopyDependencies.settings))
}
/* Enable Assembly for all assembly projects */
assemblyProjects.foreach(enable(Assembly.settings))
/* Package pyspark artifacts in a separate zip file for YARN. */
enable(PySparkAssembly.settings)(assembly)
/* Enable unidoc only for the root spark project */
enable(Unidoc.settings)(spark)
/* Catalyst ANTLR generation settings */
enable(Catalyst.settings)(catalyst)
/* Spark SQL Core console settings */
enable(SQL.settings)(sql)
/* Hive console settings */
enable(Hive.settings)(hive)
enable(Flume.settings)(streamingFlumeSink)
// SPARK-14738 - Remove docker tests from main Spark build
// enable(DockerIntegrationTests.settings)(dockerIntegrationTests)
/**
* Adds the ability to run the spark shell directly from SBT without building an assembly
* jar.
*
* Usage: `build/sbt sparkShell`
*/
val sparkShell = taskKey[Unit]("start a spark-shell.")
val sparkPackage = inputKey[Unit](
s"""
|Download and run a spark package.
|Usage `builds/sbt "sparkPackage <group:artifact:version> <MainClass> [args]
""".stripMargin)
val sparkSql = taskKey[Unit]("starts the spark sql CLI.")
enable(Seq(
connectInput in run := true,
fork := true,
outputStrategy in run := Some (StdoutOutput),
javaOptions += "-Xmx2g",
sparkShell := {
(runMain in Compile).toTask(" org.apache.spark.repl.Main -usejavacp").value
},
sparkPackage := {
import complete.DefaultParsers._
val packages :: className :: otherArgs = spaceDelimited("<group:artifact:version> <MainClass> [args]").parsed.toList
val scalaRun = (runner in run).value
val classpath = (fullClasspath in Runtime).value
val args = Seq("--packages", packages, "--class", className, (Keys.`package` in Compile in LocalProject("core"))
.value.getCanonicalPath) ++ otherArgs
println(args)
scalaRun.run("org.apache.spark.deploy.SparkSubmit", classpath.map(_.data), args, streams.value.log)
},
javaOptions in Compile += "-Dspark.master=local",
sparkSql := {
(runMain in Compile).toTask(" org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver").value
}
))(assembly)
enable(Seq(sparkShell := sparkShell in LocalProject("assembly")))(spark)
// TODO: move this to its upstream project.
override def projectDefinitions(baseDirectory: File): Seq[Project] = {
super.projectDefinitions(baseDirectory).map { x =>
if (projectsMap.exists(_._1 == x.id)) x.settings(projectsMap(x.id): _*)
else x.settings(Seq[Setting[_]](): _*)
} ++ Seq[Project](OldDeps.project)
}
}
object Core {
lazy val settings = Seq(
resourceGenerators in Compile += Def.task {
val buildScript = baseDirectory.value + "/../build/spark-build-info"
val targetDir = baseDirectory.value + "/target/extra-resources/"
val command = Seq("bash", buildScript, targetDir, version.value)
Process(command).!!
val propsFile = baseDirectory.value / "target" / "extra-resources" / "spark-version-info.properties"
Seq(propsFile)
}.taskValue
)
}
object Unsafe {
lazy val settings = Seq(
// This option is needed to suppress warnings from sun.misc.Unsafe usage
javacOptions in Compile += "-XDignore.symbol.file"
)
}
object Flume {
lazy val settings = sbtavro.SbtAvro.avroSettings
}
object DockerIntegrationTests {
// This serves to override the override specified in DependencyOverrides:
lazy val settings = Seq(
dependencyOverrides += "com.google.guava" % "guava" % "18.0",
resolvers += "DB2" at "https://app.camunda.com/nexus/content/repositories/public/",
libraryDependencies += "com.oracle" % "ojdbc6" % "11.2.0.1.0" from "https://app.camunda.com/nexus/content/repositories/public/com/oracle/ojdbc6/11.2.0.1.0/ojdbc6-11.2.0.1.0.jar" // scalastyle:ignore
)
}
/**
* Overrides to work around sbt's dependency resolution being different from Maven's.
*/
object DependencyOverrides {
lazy val settings = Seq(
dependencyOverrides += "com.google.guava" % "guava" % "14.0.1",
dependencyOverrides += "jline" % "jline" % "2.14.3")
}
/**
* This excludes library dependencies in sbt, which are specified in maven but are
* not needed by sbt build.
*/
object ExcludedDependencies {
lazy val settings = Seq(
libraryDependencies ~= { libs => libs.filterNot(_.name == "groovy-all") }
)
}
/**
* Project to pull previous artifacts of Spark for generating Mima excludes.
*/
object OldDeps {
lazy val project = Project("oldDeps", file("dev"), settings = oldDepsSettings)
lazy val allPreviousArtifactKeys = Def.settingDyn[Seq[Set[ModuleID]]] {
SparkBuild.mimaProjects
.map { project => MimaKeys.mimaPreviousArtifacts in project }
.map(k => Def.setting(k.value))
.join
}
def oldDepsSettings() = Defaults.coreDefaultSettings ++ Seq(
name := "old-deps",
libraryDependencies := allPreviousArtifactKeys.value.flatten
)
}
object Catalyst {
lazy val settings = antlr4Settings ++ Seq(
antlr4Version in Antlr4 := "4.7",
antlr4PackageName in Antlr4 := Some("org.apache.spark.sql.catalyst.parser"),
antlr4GenListener in Antlr4 := true,
antlr4GenVisitor in Antlr4 := true
)
}
object SQL {
lazy val settings = Seq(
initialCommands in console :=
"""
|import org.apache.spark.SparkContext
|import org.apache.spark.sql.SQLContext
|import org.apache.spark.sql.catalyst.analysis._
|import org.apache.spark.sql.catalyst.dsl._
|import org.apache.spark.sql.catalyst.errors._
|import org.apache.spark.sql.catalyst.expressions._
|import org.apache.spark.sql.catalyst.plans.logical._
|import org.apache.spark.sql.catalyst.rules._
|import org.apache.spark.sql.catalyst.util._
|import org.apache.spark.sql.execution
|import org.apache.spark.sql.functions._
|import org.apache.spark.sql.types._
|
|val sc = new SparkContext("local[*]", "dev-shell")
|val sqlContext = new SQLContext(sc)
|import sqlContext.implicits._
|import sqlContext._
""".stripMargin,
cleanupCommands in console := "sc.stop()"
)
}
object Hive {
lazy val settings = Seq(
// Specially disable assertions since some Hive tests fail them
javaOptions in Test := (javaOptions in Test).value.filterNot(_ == "-ea"),
// Supporting all SerDes requires us to depend on deprecated APIs, so we turn off the warnings
// only for this subproject.
scalacOptions := (scalacOptions map { currentOpts: Seq[String] =>
currentOpts.filterNot(_ == "-deprecation")
}).value,
initialCommands in console :=
"""
|import org.apache.spark.SparkContext
|import org.apache.spark.sql.catalyst.analysis._
|import org.apache.spark.sql.catalyst.dsl._
|import org.apache.spark.sql.catalyst.errors._
|import org.apache.spark.sql.catalyst.expressions._
|import org.apache.spark.sql.catalyst.plans.logical._
|import org.apache.spark.sql.catalyst.rules._
|import org.apache.spark.sql.catalyst.util._
|import org.apache.spark.sql.execution
|import org.apache.spark.sql.functions._
|import org.apache.spark.sql.hive._
|import org.apache.spark.sql.hive.test.TestHive._
|import org.apache.spark.sql.hive.test.TestHive.implicits._
|import org.apache.spark.sql.types._""".stripMargin,
cleanupCommands in console := "sparkContext.stop()",
// Some of our log4j jars make it impossible to submit jobs from this JVM to Hive Map/Reduce
// in order to generate golden files. This is only required for developers who are adding new
// new query tests.
fullClasspath in Test := (fullClasspath in Test).value.filterNot { f => f.toString.contains("jcl-over") }
)
}
object Assembly {
import sbtassembly.AssemblyUtils._
import sbtassembly.Plugin._
import AssemblyKeys._
val hadoopVersion = taskKey[String]("The version of hadoop that spark is compiled against.")
lazy val settings = assemblySettings ++ Seq(
test in assembly := {},
hadoopVersion := {
sys.props.get("hadoop.version")
.getOrElse(SbtPomKeys.effectivePom.value.getProperties.get("hadoop.version").asInstanceOf[String])
},
jarName in assembly := {
if (moduleName.value.contains("streaming-flume-assembly")
|| moduleName.value.contains("streaming-kafka-0-8-assembly")
|| moduleName.value.contains("streaming-kafka-0-10-assembly")
|| moduleName.value.contains("streaming-kinesis-asl-assembly")) {
// This must match the same name used in maven (see external/kafka-0-8-assembly/pom.xml)
s"${moduleName.value}-${version.value}.jar"
} else {
s"${moduleName.value}-${version.value}-hadoop${hadoopVersion.value}.jar"
}
},
jarName in (Test, assembly) := s"${moduleName.value}-test-${version.value}.jar",
mergeStrategy in assembly := {
case m if m.toLowerCase.endsWith("manifest.mf") => MergeStrategy.discard
case m if m.toLowerCase.matches("meta-inf.*\\\\.sf$") => MergeStrategy.discard
case "log4j.properties" => MergeStrategy.discard
case m if m.toLowerCase.startsWith("meta-inf/services/") => MergeStrategy.filterDistinctLines
case "reference.conf" => MergeStrategy.concat
case _ => MergeStrategy.first
}
)
}
object PySparkAssembly {
import sbtassembly.Plugin._
import AssemblyKeys._
import java.util.zip.{ZipOutputStream, ZipEntry}
lazy val settings = Seq(
// Use a resource generator to copy all .py files from python/pyspark into a managed directory
// to be included in the assembly. We can't just add "python/" to the assembly's resource dir
// list since that will copy unneeded / unwanted files.
resourceGenerators in Compile += Def.macroValueI(resourceManaged in Compile map { outDir: File =>
val src = new File(BuildCommons.sparkHome, "python/pyspark")
val zipFile = new File(BuildCommons.sparkHome , "python/lib/pyspark.zip")
zipFile.delete()
zipRecursive(src, zipFile)
Seq.empty[File]
}).value
)
private def zipRecursive(source: File, destZipFile: File) = {
val destOutput = new ZipOutputStream(new FileOutputStream(destZipFile))
addFilesToZipStream("", source, destOutput)
destOutput.flush()
destOutput.close()
}
private def addFilesToZipStream(parent: String, source: File, output: ZipOutputStream): Unit = {
if (source.isDirectory()) {
output.putNextEntry(new ZipEntry(parent + source.getName()))
for (file <- source.listFiles()) {
addFilesToZipStream(parent + source.getName() + File.separator, file, output)
}
} else {
val in = new FileInputStream(source)
output.putNextEntry(new ZipEntry(parent + source.getName()))
val buf = new Array[Byte](8192)
var n = 0
while (n != -1) {
n = in.read(buf)
if (n != -1) {
output.write(buf, 0, n)
}
}
output.closeEntry()
in.close()
}
}
}
object Unidoc {
import BuildCommons._
import sbtunidoc.Plugin._
import UnidocKeys._
private def ignoreUndocumentedPackages(packages: Seq[Seq[File]]): Seq[Seq[File]] = {
packages
.map(_.filterNot(_.getName.contains("$")))
.map(_.filterNot(_.getCanonicalPath.contains("org/apache/spark/deploy")))
.map(_.filterNot(_.getCanonicalPath.contains("org/apache/spark/examples")))
.map(_.filterNot(_.getCanonicalPath.contains("org/apache/spark/memory")))
.map(_.filterNot(_.getCanonicalPath.contains("org/apache/spark/network")))
.map(_.filterNot(_.getCanonicalPath.contains("org/apache/spark/shuffle")))
.map(_.filterNot(_.getCanonicalPath.contains("org/apache/spark/executor")))
.map(_.filterNot(_.getCanonicalPath.contains("org/apache/spark/unsafe")))
.map(_.filterNot(_.getCanonicalPath.contains("python")))
.map(_.filterNot(_.getCanonicalPath.contains("org/apache/spark/util/collection")))
.map(_.filterNot(_.getCanonicalPath.contains("org/apache/spark/sql/catalyst")))
.map(_.filterNot(_.getCanonicalPath.contains("org/apache/spark/sql/execution")))
.map(_.filterNot(_.getCanonicalPath.contains("org/apache/spark/sql/internal")))
.map(_.filterNot(_.getCanonicalPath.contains("org/apache/spark/sql/hive/test")))
}
private def ignoreClasspaths(classpaths: Seq[Classpath]): Seq[Classpath] = {
classpaths
.map(_.filterNot(_.data.getCanonicalPath.matches(""".*kafka-clients-0\\.10.*""")))
.map(_.filterNot(_.data.getCanonicalPath.matches(""".*kafka_2\\..*-0\\.10.*""")))
}
val unidocSourceBase = settingKey[String]("Base URL of source links in Scaladoc.")
lazy val settings = scalaJavaUnidocSettings ++ Seq (
publish := {},
unidocProjectFilter in(ScalaUnidoc, unidoc) :=
inAnyProject -- inProjects(OldDeps.project, repl, examples, tools, streamingFlumeSink, kubernetes,
yarn, tags, streamingKafka010, sqlKafka010, avro),
unidocProjectFilter in(JavaUnidoc, unidoc) :=
inAnyProject -- inProjects(OldDeps.project, repl, examples, tools, streamingFlumeSink, kubernetes,
yarn, tags, streamingKafka010, sqlKafka010, avro),
unidocAllClasspaths in (ScalaUnidoc, unidoc) := {
ignoreClasspaths((unidocAllClasspaths in (ScalaUnidoc, unidoc)).value)
},
unidocAllClasspaths in (JavaUnidoc, unidoc) := {
ignoreClasspaths((unidocAllClasspaths in (JavaUnidoc, unidoc)).value)
},
// Skip actual catalyst, but include the subproject.
// Catalyst is not public API and contains quasiquotes which break scaladoc.
unidocAllSources in (ScalaUnidoc, unidoc) := {
ignoreUndocumentedPackages((unidocAllSources in (ScalaUnidoc, unidoc)).value)
},
// Skip class names containing $ and some internal packages in Javadocs
unidocAllSources in (JavaUnidoc, unidoc) := {
ignoreUndocumentedPackages((unidocAllSources in (JavaUnidoc, unidoc)).value)
.map(_.filterNot(_.getCanonicalPath.contains("org/apache/hadoop")))
},
javacOptions in (JavaUnidoc, unidoc) := Seq(
"-windowtitle", "Spark " + version.value.replaceAll("-SNAPSHOT", "") + " JavaDoc",
"-public",
"-noqualifier", "java.lang",
"-tag", """example:a:Example\\:""",
"-tag", """note:a:Note\\:""",
"-tag", "group:X",
"-tag", "tparam:X",
"-tag", "constructor:X",
"-tag", "todo:X",
"-tag", "groupname:X"
),
// Use GitHub repository for Scaladoc source links
unidocSourceBase := s"https://github.com/apache/spark/tree/v${version.value}",
scalacOptions in (ScalaUnidoc, unidoc) ++= Seq(
"-groups", // Group similar methods together based on the @group annotation.
"-skip-packages", "org.apache.hadoop",
"-sourcepath", (baseDirectory in ThisBuild).value.getAbsolutePath
) ++ (
// Add links to sources when generating Scaladoc for a non-snapshot release
if (!isSnapshot.value) {
Opts.doc.sourceUrl(unidocSourceBase.value + "€{FILE_PATH}.scala")
} else {
Seq()
}
)
)
}
object Checkstyle {
lazy val settings = Seq(
checkstyleSeverityLevel := Some(CheckstyleSeverityLevel.Error),
javaSource in (Compile, checkstyle) := baseDirectory.value / "src/main/java",
javaSource in (Test, checkstyle) := baseDirectory.value / "src/test/java",
checkstyleConfigLocation := CheckstyleConfigLocation.File("dev/checkstyle.xml"),
checkstyleOutputFile := baseDirectory.value / "target/checkstyle-output.xml",
checkstyleOutputFile in Test := baseDirectory.value / "target/checkstyle-output.xml"
)
}
object CopyDependencies {
val copyDeps = TaskKey[Unit]("copyDeps", "Copies needed dependencies to the build directory.")
val destPath = (crossTarget in Compile) { _ / "jars"}
lazy val settings = Seq(
copyDeps := {
val dest = destPath.value
if (!dest.isDirectory() && !dest.mkdirs()) {
throw new IOException("Failed to create jars directory.")
}
(dependencyClasspath in Compile).value.map(_.data)
.filter { jar => jar.isFile() }
.foreach { jar =>
val destJar = new File(dest, jar.getName())
if (destJar.isFile()) {
destJar.delete()
}
Files.copy(jar.toPath(), destJar.toPath())
}
},
crossTarget in (Compile, packageBin) := destPath.value,
packageBin in Compile := (packageBin in Compile).dependsOn(copyDeps).value
)
}
object TestSettings {
import BuildCommons._
private val scalaBinaryVersion =
if (System.getProperty("scala-2.12") == "true") {
"2.12"
} else {
"2.11"
}
lazy val settings = Seq (
// Fork new JVMs for tests and set Java options for those
fork := true,
// Setting SPARK_DIST_CLASSPATH is a simple way to make sure any child processes
// launched by the tests have access to the correct test-time classpath.
envVars in Test ++= Map(
"SPARK_DIST_CLASSPATH" ->
(fullClasspath in Test).value.files.map(_.getAbsolutePath)
.mkString(File.pathSeparator).stripSuffix(File.pathSeparator),
"SPARK_PREPEND_CLASSES" -> "1",
"SPARK_SCALA_VERSION" -> scalaBinaryVersion,
"SPARK_TESTING" -> "1",
"JAVA_HOME" -> sys.env.get("JAVA_HOME").getOrElse(sys.props("java.home"))),
javaOptions in Test += s"-Djava.io.tmpdir=$testTempDir",
javaOptions in Test += "-Dspark.test.home=" + sparkHome,
javaOptions in Test += "-Dspark.testing=1",
javaOptions in Test += "-Dspark.port.maxRetries=100",
javaOptions in Test += "-Dspark.master.rest.enabled=false",
javaOptions in Test += "-Dspark.memory.debugFill=true",
javaOptions in Test += "-Dspark.ui.enabled=false",
javaOptions in Test += "-Dspark.ui.showConsoleProgress=false",
javaOptions in Test += "-Dspark.unsafe.exceptionOnMemoryLeak=true",
javaOptions in Test += "-Dsun.io.serialization.extendedDebugInfo=false",
javaOptions in Test += "-Dderby.system.durability=test",
javaOptions in Test ++= System.getProperties.asScala.filter(_._1.startsWith("spark"))
.map { case (k,v) => s"-D$k=$v" }.toSeq,
javaOptions in Test += "-ea",
javaOptions in Test ++= "-Xmx3g -Xss4m"
.split(" ").toSeq,
javaOptions += "-Xmx3g",
// Exclude tags defined in a system property
testOptions in Test += Tests.Argument(TestFrameworks.ScalaTest,
sys.props.get("test.exclude.tags").map { tags =>
tags.split(",").flatMap { tag => Seq("-l", tag) }.toSeq
}.getOrElse(Nil): _*),
testOptions in Test += Tests.Argument(TestFrameworks.JUnit,
sys.props.get("test.exclude.tags").map { tags =>
Seq("--exclude-categories=" + tags)
}.getOrElse(Nil): _*),
// Show full stack trace and duration in test cases.
testOptions in Test += Tests.Argument("-oDF"),
testOptions in Test += Tests.Argument(TestFrameworks.JUnit, "-v", "-a"),
// Enable Junit testing.
libraryDependencies += "com.novocode" % "junit-interface" % "0.11" % "test",
// Only allow one test at a time, even across projects, since they run in the same JVM
parallelExecution in Test := false,
// Make sure the test temp directory exists.
resourceGenerators in Test += Def.macroValueI(resourceManaged in Test map { outDir: File =>
var dir = new File(testTempDir)
if (!dir.isDirectory()) {
// Because File.mkdirs() can fail if multiple callers are trying to create the same
// parent directory, this code tries to create parents one at a time, and avoids
// failures when the directories have been created by somebody else.
val stack = new Stack[File]()
while (!dir.isDirectory()) {
stack.push(dir)
dir = dir.getParentFile()
}
while (stack.nonEmpty) {
val d = stack.pop()
require(d.mkdir() || d.isDirectory(), s"Failed to create directory $d")
}
}
Seq.empty[File]
}).value,
concurrentRestrictions in Global += Tags.limit(Tags.Test, 1)
)
}
| eyalfa/spark | project/SparkBuild.scala | Scala | apache-2.0 | 34,244 |
package com.avsystem.commons
package mongo
import org.scalacheck.{Arbitrary, Gen}
import org.scalatest.funsuite.AnyFunSuite
import org.scalatestplus.scalacheck.ScalaCheckPropertyChecks
class KeyEscaperTest extends AnyFunSuite with ScalaCheckPropertyChecks {
import KeyEscaper._
import KeyEscaperTest._
test("custom keys") {
val customCases = List(
"plain" -> "plain",
"<plain, but strange>" -> "<plain, but strange>",
"not_so_plain" -> "not_so_plain",
"$" -> "\\\\$",
"." -> "\\\\_",
"plain$ with$ $dollars$" -> "plain$ with$ $dollars$",
"Sentence." -> "Sentence\\\\_",
"$operator" -> "\\\\$operator",
"$worst.of.both.worlds" -> "\\\\$worst\\\\_of\\\\_both\\\\_worlds"
)
for ((input, expected) <- customCases) {
val escaped = escape(input)
assert(escaped == expected)
assert(unescape(escaped) == input)
}
}
test("plain keys") {
forAll(plainKeyGen) { plainKey =>
val escaped = escape(plainKey)
assert(escaped == plainKey)
assert(unescape(escaped) == plainKey)
}
}
test("arbitrary keys") {
forAll(deniedKeyGen) { arbitraryKey =>
val escaped = escape(arbitraryKey)
assert(unescape(escaped) == arbitraryKey)
}
}
}
object KeyEscaperTest {
def isPlain(char: Char): Boolean = char != '.' && char != '$' && char != '\\\\'
val plainCharGen: Gen[Char] = Arbitrary.arbitrary[Char].filter(isPlain)
val plainKeyGen: Gen[String] = Gen.listOf(plainCharGen).map(_.mkString)
val deniedCharGen: Gen[Char] = Gen.oneOf('.', '$')
val deniedKeyGen: Gen[String] = Gen.listOf(Gen.oneOf(plainCharGen, deniedCharGen)).map(_.mkString)
}
| AVSystem/scala-commons | commons-mongo/jvm/src/test/scala/com/avsystem/commons/mongo/KeyEscaperTest.scala | Scala | mit | 1,662 |
package se.lu.nateko.cp.meta.onto
import java.net.URI
import scala.util.Failure
import scala.util.Try
import scala.util.control.NoStackTrace
import org.eclipse.rdf4j.model.Literal
import org.eclipse.rdf4j.model.Statement
import org.eclipse.rdf4j.model.IRI
import org.eclipse.rdf4j.model.Value
import org.eclipse.rdf4j.model.vocabulary.RDF
import org.eclipse.rdf4j.query.UpdateExecutionException
import org.semanticweb.owlapi.model.{IRI => OwlIri}
import se.lu.nateko.cp.meta._
import se.lu.nateko.cp.meta.instanceserver.InstanceServer
import se.lu.nateko.cp.meta.instanceserver.InstanceServerUtils
import se.lu.nateko.cp.meta.instanceserver.RdfUpdate
import se.lu.nateko.cp.meta.utils.rdf4j._
import org.eclipse.rdf4j.model.vocabulary.XMLSchema
import org.eclipse.rdf4j.model.vocabulary.RDFS
class InstOnto (instServer: InstanceServer, val onto: Onto){
private implicit val factory = instServer.factory
private val rdfsLabelInfo = DataPropertyDto(
ResourceDto("label", RDFS.LABEL.toJava, None),
CardinalityDto(None, None),
DataRangeDto(XMLSchema.STRING.toJava, Nil)
)
private val rdfsCommentInfo = DataPropertyDto(
ResourceDto("comment", RDFS.COMMENT.toJava, None),
CardinalityDto(None, None),
DataRangeDto(XMLSchema.STRING.toJava, Nil)
)
private val rdfsSeeAlsoInfo = DataPropertyDto(
ResourceDto("seeAlso", RDFS.SEEALSO.toJava, None),
CardinalityDto(None, None),
DataRangeDto(XMLSchema.ANYURI.toJava, Nil)
)
def getWriteContext: URI = {
val writeContexts = instServer.writeContexts
val nCtxts = writeContexts.length
assert(nCtxts == 1, s"Expected exactly one write context, found $nCtxts")
writeContexts.head.toJava
}
def getIndividuals(classUri: URI): Seq[ResourceDto] = {
val labeler = onto.getLabelerForClassIndividuals(classUri)
instServer.getInstances(classUri.toRdf).map(labeler.getInfo(_, instServer))
}
def getRangeValues(individClassUri: URI, propUri: URI): Seq[ResourceDto] = {
assert(individClassUri != null)//just to silence the not-used warning;
//class uri will be needed in the future for better class-specific range calculation
val rangeClassUris = onto.getObjPropRangeClassUnion(propUri)
rangeClassUris.flatMap(getIndividuals)
}
def getIndividual(uri: URI): IndividualDto = {
val labeler = onto.getUniversalLabeler
val iri = uri.toRdf
val classInfo: ClassDto = {
val theType = InstanceServerUtils.getSingleType(iri, instServer)
val mainInfo = onto.getClassInfo(theType.toJava)
val extraProps: Seq[PropertyDto] = Seq(rdfsLabelInfo, rdfsCommentInfo, rdfsSeeAlsoInfo)
mainInfo.copy(properties = extraProps ++ mainInfo.properties)
}
val values: Seq[ValueDto] = instServer.getStatements(iri).collect{
case Rdf4jStatement(_, pred, value: Literal) =>
val prop = onto.factory.getOWLDataProperty(OwlIri.create(pred.toJava))
LiteralValueDto(
value = value.getLabel,
property = onto.rdfsLabeling(prop)
)
//rdfs:seeAlso is special: anyURI literal on the front end, Resource on the back end
case Rdf4jStatement(_, RDFS.SEEALSO, value: IRI) =>
LiteralValueDto(
value = value.stringValue,
property = rdfsSeeAlsoInfo.resource
)
case Rdf4jStatement(_, pred, value: IRI) if(pred != RDF.TYPE) =>
val prop = onto.factory.getOWLObjectProperty(OwlIri.create(pred.toJava))
ObjectValueDto(
value = labeler.getInfo(value, instServer),
property = onto.rdfsLabeling(prop)
)
}
IndividualDto(
resource = labeler.getInfo(iri, instServer),
owlClass = classInfo,
values = values
)
}
def hasIndividual(uriStr: String): Boolean =
instServer.hasStatement(Some(factory.createIRI(uriStr)), None, None)
def createIndividual(uriStr: String, typeUriStr: String): Try[Unit] = {
if(hasIndividual(uriStr)) Failure(new Exception("Individual already exists!") with NoStackTrace)
else Try{
val uri = instServer.factory.createIRI(uriStr)
val typeUri = instServer.factory.createIRI(typeUriStr)
instServer.addInstance(uri, typeUri)
}.flatten
}
def deleteIndividual(uriStr: String): Try[Unit] = Try{
val uri = instServer.factory.createIRI(uriStr)
val asSubject = instServer.getStatements(uri)
val asObject = instServer.getStatements(None, None, Some(uri))
instServer.removeAll(asSubject ++ asObject)
}
def performReplacement(replacement: ReplaceDto): Try[Unit] = {
val updates = Try{
val assertion: RdfUpdate = updateDtoToRdfUpdate(replacement.assertion)
val retraction: RdfUpdate = updateDtoToRdfUpdate(replacement.retraction)
if(!hasStatement(retraction.statement)) throw new UpdateExecutionException(
"Database does not contain the statement to retract during the requested replacement."
)
Seq(retraction, assertion)
}
updates.flatMap(instServer.applyAll)
}
def applyUpdates(updates: Seq[UpdateDto]): Try[Unit] = {
val rdfUpdates = Try(updates.map(updateDtoToRdfUpdate))
rdfUpdates.flatMap(instServer.applyAll)
}
private def hasStatement(statement: Statement): Boolean = instServer.hasStatement(
Some(statement.getSubject.asInstanceOf[IRI]),
Some(statement.getPredicate),
Some(statement.getObject)
)
private def updateDtoToStatement(update: UpdateDto): Statement = {
val classUri = InstanceServerUtils.getSingleType(update.subject.toRdf, instServer)
val obj: Value = getPropInfo(update.predicate, classUri.toJava) match{
case dp: DataPropertyDto =>
val dtype = dp.range.dataType
factory.createLiteral(update.obj, dtype)
case _: ObjectPropertyDto => factory.createIRI(update.obj)
}
factory.createStatement(update.subject.toRdf, update.predicate.toRdf, obj)
}
private def getPropInfo(propUri: URI, classUri: URI): PropertyDto =
propUri.toRdf match {
case RDFS.LABEL => rdfsLabelInfo
case RDFS.COMMENT => rdfsCommentInfo
//rdfs:seeAlso is special: anyURI literal on the front end, Resource on the back end
case RDFS.SEEALSO => ObjectPropertyDto(rdfsSeeAlsoInfo.resource, rdfsSeeAlsoInfo.cardinality)
case _ => onto.getPropInfo(propUri, classUri)
}
private def updateDtoToRdfUpdate(update: UpdateDto) =
RdfUpdate(updateDtoToStatement(update), update.isAssertion)
}
| ICOS-Carbon-Portal/meta | src/main/scala/se/lu/nateko/cp/meta/onto/InstOnto.scala | Scala | gpl-3.0 | 6,149 |
/***********************************************************************
* Copyright (c) 2013-2022 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.kudu.result
import java.nio.ByteBuffer
import java.nio.charset.StandardCharsets
import org.apache.kudu.client.RowResult
import org.geotools.filter.text.ecql.ECQL
import org.locationtech.geomesa.features.TransformSimpleFeature
import org.locationtech.geomesa.filter.factory.FastFilterFactory
import org.locationtech.geomesa.filter.{FilterHelper, filterToString}
import org.locationtech.geomesa.kudu.result.KuduResultAdapter.KuduResultAdapterSerialization
import org.locationtech.geomesa.kudu.schema.KuduIndexColumnAdapter.{FeatureIdAdapter, VisibilityAdapter}
import org.locationtech.geomesa.kudu.schema.{KuduSimpleFeatureSchema, RowResultSimpleFeature}
import org.locationtech.geomesa.security.{SecurityUtils, VisibilityEvaluator}
import org.locationtech.geomesa.utils.collection.CloseableIterator
import org.locationtech.geomesa.utils.geotools.SimpleFeatureTypes
import org.locationtech.geomesa.utils.geotools.Transform.Transforms
import org.locationtech.geomesa.utils.io.ByteBuffers.ExpandingByteBuffer
import org.opengis.feature.simple.{SimpleFeature, SimpleFeatureType}
import org.opengis.filter.Filter
/**
* Converts rows into simple features, first filtering and then transforming
*
* @param sft simple feature type
* @param auths authorizations
* @param ecql filter
* @param tsft transform simple feature type
* @param tdefs transform definitions
*/
case class FilteringTransformAdapter(sft: SimpleFeatureType,
auths: Seq[Array[Byte]],
ecql: Filter,
tsft: SimpleFeatureType,
tdefs: String) extends KuduResultAdapter {
import org.locationtech.geomesa.filter.RichTransform.RichTransform
// determine all the attributes that we need to be able to evaluate the transform and filter
private val attributes = {
val fromTransform = Transforms(sft, tdefs).flatMap(_.properties)
val fromFilter = FilterHelper.propertyNames(ecql, sft)
(fromTransform ++ fromFilter).distinct
}
private val schema = KuduSimpleFeatureSchema(sft)
private val feature = new RowResultSimpleFeature(sft, FeatureIdAdapter, schema.adapters)
private val transformFeature = TransformSimpleFeature(sft, tsft, tdefs)
transformFeature.setFeature(feature)
override val columns: Seq[String] =
Seq(FeatureIdAdapter.name, VisibilityAdapter.name) ++ schema.schema(attributes).map(_.getName)
override def result: SimpleFeatureType = tsft
override def adapt(results: CloseableIterator[RowResult]): CloseableIterator[SimpleFeature] = {
results.flatMap { row =>
val vis = VisibilityAdapter.readFromRow(row)
if ((vis == null || VisibilityEvaluator.parse(vis).evaluate(auths)) &&
{ feature.setRowResult(row); ecql.evaluate(feature)}) {
SecurityUtils.setFeatureVisibility(feature, vis)
Iterator.single(transformFeature)
} else {
CloseableIterator.empty
}
}
}
override def toString: String =
s"FilteringTransformAdapter(sft=${sft.getTypeName}{${SimpleFeatureTypes.encodeType(sft)}}, " +
s"filter=${filterToString(ecql)}, transform=$tdefs, " +
s"auths=${auths.map(new String(_, StandardCharsets.UTF_8)).mkString(",")})"
}
object FilteringTransformAdapter extends KuduResultAdapterSerialization[FilteringTransformAdapter] {
override def serialize(adapter: FilteringTransformAdapter, bb: ExpandingByteBuffer): Unit = {
bb.putString(adapter.sft.getTypeName)
bb.putString(SimpleFeatureTypes.encodeType(adapter.sft, includeUserData = true))
bb.putInt(adapter.auths.length)
adapter.auths.foreach(bb.putBytes)
bb.putString(ECQL.toCQL(adapter.ecql))
bb.putString(SimpleFeatureTypes.encodeType(adapter.tsft, includeUserData = true))
bb.putString(adapter.tdefs)
}
override def deserialize(bb: ByteBuffer): FilteringTransformAdapter = {
import org.locationtech.geomesa.utils.io.ByteBuffers.RichByteBuffer
val sft = SimpleFeatureTypes.createType(bb.getString, bb.getString)
val auths = Seq.fill(bb.getInt)(bb.getBytes)
val ecql = FastFilterFactory.toFilter(sft, bb.getString)
val tsft = SimpleFeatureTypes.createType(sft.getTypeName, bb.getString)
val tdefs = bb.getString
FilteringTransformAdapter(sft, auths, ecql, tsft, tdefs)
}
} | locationtech/geomesa | geomesa-kudu/geomesa-kudu-datastore/src/main/scala/org/locationtech/geomesa/kudu/result/FilteringTransformAdapter.scala | Scala | apache-2.0 | 4,840 |
/*
Copyright 2013 Twitter, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package com.twitter.summingbird.scalding
import com.twitter.algebird.monad._
import com.twitter.summingbird.batch._
import com.twitter.scalding.{ Source => ScaldingSource, Test => TestMode, _ }
import com.twitter.summingbird.{Producer, TimeExtractor }
import scala.collection.mutable.Buffer
import cascading.tuple.Tuple
import cascading.flow.FlowDef
object TestSource {
def apply[T](iter: Iterable[T])
(implicit mf: Manifest[T], te: TimeExtractor[T], tc: TupleConverter[T], tset: TupleSetter[T]):
(Map[ScaldingSource, Buffer[Tuple]], Producer[Scalding, T]) = {
val src = IterableSource(iter)
val prod = Scalding.sourceFromMappable { _ => src }
(Map(src -> iter.map { tset(_) }.toBuffer), prod)
}
}
| surabhiiyer/summingbird | summingbird-scalding-test/src/main/scala/com/twitter/summingbird/scalding/TestSource.scala | Scala | apache-2.0 | 1,311 |
/*
* Copyright 2014 JHC Systems Limited
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package sqlest.untyped
import sqlest.ast._
import sqlest.extractor._
import sqlest.untyped.extractor._
import scala.util.Try
object ColumnFinder {
def apply(extractor: Extractor[_], path: String): Option[AliasedColumn[_]] = {
path.trim match {
case "" => apply(extractor, Nil)
case str => apply(extractor, path.split("\\\\.").toList)
}
}
def apply(extractor: Extractor[_], path: List[String]): Option[AliasedColumn[_]] = {
extractor match {
case ColumnExtractor(column) =>
path match {
case Nil => Some(column)
case _ => None
}
case named @ NamedExtractor(product: ProductExtractor[_], _) =>
path match {
case name :: tail => findByName(named, product, name).flatMap(apply(_, tail))
case _ => None
}
case product: ProductExtractor[_] =>
path match {
case StringToInt(index) :: tail => findByIndex(product, index).flatMap(apply(_, tail))
case _ => None
}
case MappedExtractor(inner, _) =>
apply(inner, path)
case OptionExtractor(inner) =>
apply(inner, path)
case ListExtractor(inner) =>
apply(inner, path)
case GroupedExtractor(inner, _) =>
apply(inner, path)
case other => sys.error(s"Unsupported extractor type: $other")
}
}
def findByName(named: NamedExtractor[_, _], product: ProductExtractor[_], name: String): Option[Extractor[_]] =
for {
index <- named.names.zipWithIndex.find(_._1 == name).map(_._2)
extractor <- findByIndex(product, index)
} yield extractor
def findByIndex(product: ProductExtractor[_], index: Int): Option[Extractor[_]] =
if (index >= 0 && index < product.innerExtractors.length) {
Some(product.innerExtractors(index))
} else {
None
}
object StringToInt {
def unapply(str: String) = Try(str.toInt).toOption
}
} | andrewjskatz/sqlest | src/main/scala/sqlest/untyped/ColumnFinder.scala | Scala | apache-2.0 | 2,531 |
package com.benkolera.Rt.Formatter
import com.benkolera.Rt
import com.ning.http.client.{Part,StringPart,ByteArrayPart}
object TicketMessage {
def toParts( action: String , id: Int, msg: Rt.TicketMessage ):List[Part] = {
val contentStr = fieldsToContentString(
List(
Some("Action" -> action ),
msg.subject.map( "Subject" -> _ ),
Some("Cc" -> fieldListToString(msg.ccs) ),
Some("Bcc" -> fieldListToString(msg.bccs) ),
Some("TimeWorked" -> msg.timeWorked.toString ) ,
Some("Text" -> msg.text) ,
msg.attachments.headOption.map( _ => "Attachment" -> fieldListToString( msg.attachments.map( _.fileName ) ) )
).collect{ case Some(t) => t }
)
new StringPart("content",contentStr) ::
msg.attachments.zipWithIndex.map{
case (a,i) => new ByteArrayPart(
s"attachment_${i+1}",a.fileName,a.data,a.mimeType,a.charSet
)
}
}
}
| benkolera/scala-rt | src/main/scala/Rt/Formatter/TicketMessage.scala | Scala | mit | 926 |
package com.hasanozgan.services.myservice.core
import com.typesafe.config.ConfigFactory
/**
* Created by hozgan on 06/08/15.
*/
object ServiceConfig {
private val config = ConfigFactory.load()
object HttpConfig {
private val httpConfig = config.getConfig("http")
lazy val interface = httpConfig.getString("interface")
lazy val port = httpConfig.getInt("port")
}
}
| hasanozgan/spray-microservice-template | service/src/main/scala/com/hasanozgan/services/myservice/core/ServiceConfig.scala | Scala | mit | 396 |
object NAME {
def main(args: Array[String]) {
// Put code here
}
}
| LoyolaChicagoBooks/introcs-scala-examples | boilerplate/sample.scala | Scala | gpl-3.0 | 75 |
/*
* Copyright (C) 04/10/13 Romain Reuillon
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.openmole.core.serializer.file
import org.openmole.core.exception.InternalProcessingError
import org.openmole.core.serializer.converter.Serialiser
import java.io.{ File, InputStream }
trait FileInjection <: Serialiser {
var injectedFiles: Map[String, File] = Map.empty
xStream.registerConverter(new FileConverterInjecter(this))
def getMatchingFile(file: String): File =
injectedFiles.getOrElse(file, throw InternalProcessingError(s"Replacement for file $file not found among $injectedFiles"))
def fromXML[T](is: InputStream): T = xStream.fromXML(is).asInstanceOf[T]
override def clean = {
super.clean
injectedFiles = null
}
}
| openmole/openmole | openmole/core/org.openmole.core.serializer/src/main/scala/org/openmole/core/serializer/file/FileInjection.scala | Scala | agpl-3.0 | 1,381 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution.joins
import org.apache.spark.sql.{DataFrame, Row}
import org.apache.spark.sql.catalyst.expressions.Expression
import org.apache.spark.sql.catalyst.optimizer.{BuildLeft, BuildRight, BuildSide}
import org.apache.spark.sql.catalyst.planning.ExtractEquiJoinKeys
import org.apache.spark.sql.catalyst.plans.Inner
import org.apache.spark.sql.catalyst.plans.logical.{Join, JoinHint}
import org.apache.spark.sql.execution._
import org.apache.spark.sql.execution.exchange.EnsureRequirements
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.test.SharedSparkSession
import org.apache.spark.sql.types.{IntegerType, StringType, StructType}
class InnerJoinSuite extends SparkPlanTest with SharedSparkSession {
import testImplicits.newProductEncoder
import testImplicits.localSeqToDatasetHolder
private lazy val myUpperCaseData = spark.createDataFrame(
sparkContext.parallelize(Seq(
Row(1, "A"),
Row(2, "B"),
Row(3, "C"),
Row(4, "D"),
Row(5, "E"),
Row(6, "F"),
Row(null, "G")
)), new StructType().add("N", IntegerType).add("L", StringType))
private lazy val myLowerCaseData = spark.createDataFrame(
sparkContext.parallelize(Seq(
Row(1, "a"),
Row(2, "b"),
Row(3, "c"),
Row(4, "d"),
Row(null, "e")
)), new StructType().add("n", IntegerType).add("l", StringType))
private lazy val myTestData1 = Seq(
(1, 1),
(1, 2),
(2, 1),
(2, 2),
(3, 1),
(3, 2)
).toDF("a", "b")
private lazy val myTestData2 = Seq(
(1, 1),
(1, 2),
(2, 1),
(2, 2),
(3, 1),
(3, 2)
).toDF("a", "b")
// Note: the input dataframes and expression must be evaluated lazily because
// the SQLContext should be used only within a test to keep SQL tests stable
private def testInnerJoin(
testName: String,
leftRows: => DataFrame,
rightRows: => DataFrame,
condition: () => Expression,
expectedAnswer: Seq[Product]): Unit = {
def extractJoinParts(): Option[ExtractEquiJoinKeys.ReturnType] = {
val join = Join(leftRows.logicalPlan, rightRows.logicalPlan,
Inner, Some(condition()), JoinHint.NONE)
ExtractEquiJoinKeys.unapply(join)
}
def makeBroadcastHashJoin(
leftKeys: Seq[Expression],
rightKeys: Seq[Expression],
boundCondition: Option[Expression],
leftPlan: SparkPlan,
rightPlan: SparkPlan,
side: BuildSide) = {
val broadcastJoin = joins.BroadcastHashJoinExec(
leftKeys,
rightKeys,
Inner,
side,
boundCondition,
leftPlan,
rightPlan)
EnsureRequirements.apply(broadcastJoin)
}
def makeShuffledHashJoin(
leftKeys: Seq[Expression],
rightKeys: Seq[Expression],
boundCondition: Option[Expression],
leftPlan: SparkPlan,
rightPlan: SparkPlan,
side: BuildSide) = {
val shuffledHashJoin = joins.ShuffledHashJoinExec(leftKeys, rightKeys, Inner,
side, None, leftPlan, rightPlan)
val filteredJoin =
boundCondition.map(FilterExec(_, shuffledHashJoin)).getOrElse(shuffledHashJoin)
EnsureRequirements.apply(filteredJoin)
}
def makeSortMergeJoin(
leftKeys: Seq[Expression],
rightKeys: Seq[Expression],
boundCondition: Option[Expression],
leftPlan: SparkPlan,
rightPlan: SparkPlan) = {
val sortMergeJoin = joins.SortMergeJoinExec(leftKeys, rightKeys, Inner, boundCondition,
leftPlan, rightPlan)
EnsureRequirements.apply(sortMergeJoin)
}
testWithWholeStageCodegenOnAndOff(s"$testName using BroadcastHashJoin (build=left)") { _ =>
extractJoinParts().foreach { case (_, leftKeys, rightKeys, boundCondition, _, _, _) =>
withSQLConf(SQLConf.SHUFFLE_PARTITIONS.key -> "1") {
checkAnswer2(leftRows, rightRows, (leftPlan: SparkPlan, rightPlan: SparkPlan) =>
makeBroadcastHashJoin(
leftKeys, rightKeys, boundCondition, leftPlan, rightPlan, BuildLeft),
expectedAnswer.map(Row.fromTuple),
sortAnswers = true)
}
}
}
testWithWholeStageCodegenOnAndOff(s"$testName using BroadcastHashJoin (build=right)") { _ =>
extractJoinParts().foreach { case (_, leftKeys, rightKeys, boundCondition, _, _, _) =>
withSQLConf(SQLConf.SHUFFLE_PARTITIONS.key -> "1") {
checkAnswer2(leftRows, rightRows, (leftPlan: SparkPlan, rightPlan: SparkPlan) =>
makeBroadcastHashJoin(
leftKeys, rightKeys, boundCondition, leftPlan, rightPlan, BuildRight),
expectedAnswer.map(Row.fromTuple),
sortAnswers = true)
}
}
}
test(s"$testName using ShuffledHashJoin (build=left)") {
extractJoinParts().foreach { case (_, leftKeys, rightKeys, boundCondition, _, _, _) =>
withSQLConf(SQLConf.SHUFFLE_PARTITIONS.key -> "1") {
checkAnswer2(leftRows, rightRows, (leftPlan: SparkPlan, rightPlan: SparkPlan) =>
makeShuffledHashJoin(
leftKeys, rightKeys, boundCondition, leftPlan, rightPlan, BuildLeft),
expectedAnswer.map(Row.fromTuple),
sortAnswers = true)
}
}
}
test(s"$testName using ShuffledHashJoin (build=right)") {
extractJoinParts().foreach { case (_, leftKeys, rightKeys, boundCondition, _, _, _) =>
withSQLConf(SQLConf.SHUFFLE_PARTITIONS.key -> "1") {
checkAnswer2(leftRows, rightRows, (leftPlan: SparkPlan, rightPlan: SparkPlan) =>
makeShuffledHashJoin(
leftKeys, rightKeys, boundCondition, leftPlan, rightPlan, BuildRight),
expectedAnswer.map(Row.fromTuple),
sortAnswers = true)
}
}
}
testWithWholeStageCodegenOnAndOff(s"$testName using SortMergeJoin") { _ =>
extractJoinParts().foreach { case (_, leftKeys, rightKeys, boundCondition, _, _, _) =>
withSQLConf(SQLConf.SHUFFLE_PARTITIONS.key -> "1") {
checkAnswer2(leftRows, rightRows, (leftPlan: SparkPlan, rightPlan: SparkPlan) =>
makeSortMergeJoin(leftKeys, rightKeys, boundCondition, leftPlan, rightPlan),
expectedAnswer.map(Row.fromTuple),
sortAnswers = true)
}
}
}
test(s"$testName using CartesianProduct") {
withSQLConf(SQLConf.SHUFFLE_PARTITIONS.key -> "1",
SQLConf.CROSS_JOINS_ENABLED.key -> "true") {
checkAnswer2(leftRows, rightRows, (left: SparkPlan, right: SparkPlan) =>
CartesianProductExec(left, right, Some(condition())),
expectedAnswer.map(Row.fromTuple),
sortAnswers = true)
}
}
test(s"$testName using BroadcastNestedLoopJoin build left") {
withSQLConf(SQLConf.SHUFFLE_PARTITIONS.key -> "1") {
checkAnswer2(leftRows, rightRows, (left: SparkPlan, right: SparkPlan) =>
BroadcastNestedLoopJoinExec(left, right, BuildLeft, Inner, Some(condition())),
expectedAnswer.map(Row.fromTuple),
sortAnswers = true)
}
}
test(s"$testName using BroadcastNestedLoopJoin build right") {
withSQLConf(SQLConf.SHUFFLE_PARTITIONS.key -> "1") {
checkAnswer2(leftRows, rightRows, (left: SparkPlan, right: SparkPlan) =>
BroadcastNestedLoopJoinExec(left, right, BuildRight, Inner, Some(condition())),
expectedAnswer.map(Row.fromTuple),
sortAnswers = true)
}
}
}
testInnerJoin(
"inner join, one match per row",
myUpperCaseData,
myLowerCaseData,
() => (myUpperCaseData.col("N") === myLowerCaseData.col("n")).expr,
Seq(
(1, "A", 1, "a"),
(2, "B", 2, "b"),
(3, "C", 3, "c"),
(4, "D", 4, "d")
)
)
{
lazy val left = myTestData1.where("a = 1")
lazy val right = myTestData2.where("a = 1")
testInnerJoin(
"inner join, multiple matches",
left,
right,
() => (left.col("a") === right.col("a")).expr,
Seq(
(1, 1, 1, 1),
(1, 1, 1, 2),
(1, 2, 1, 1),
(1, 2, 1, 2)
)
)
}
{
lazy val left = myTestData1.where("a = 1")
lazy val right = myTestData2.where("a = 2")
testInnerJoin(
"inner join, no matches",
left,
right,
() => (left.col("a") === right.col("a")).expr,
Seq.empty
)
}
{
lazy val left = Seq((1, Some(0)), (2, None)).toDF("a", "b")
lazy val right = Seq((1, Some(0)), (2, None)).toDF("a", "b")
testInnerJoin(
"inner join, null safe",
left,
right,
() => (left.col("b") <=> right.col("b")).expr,
Seq(
(1, 0, 1, 0),
(2, null, 2, null)
)
)
}
{
def df: DataFrame = spark.range(3).selectExpr("struct(id, id) as key", "id as value")
lazy val left = df.selectExpr("key", "concat('L', value) as value").alias("left")
lazy val right = df.selectExpr("key", "concat('R', value) as value").alias("right")
testInnerJoin(
"SPARK-15822 - test structs as keys",
left,
right,
() => (left.col("key") === right.col("key")).expr,
Seq(
(Row(0, 0), "L0", Row(0, 0), "R0"),
(Row(1, 1), "L1", Row(1, 1), "R1"),
(Row(2, 2), "L2", Row(2, 2), "R2")))
}
}
| witgo/spark | sql/core/src/test/scala/org/apache/spark/sql/execution/joins/InnerJoinSuite.scala | Scala | apache-2.0 | 10,098 |
package de.randombyte.nightmare_ai.config
import java.io.{IOException, File}
import com.google.common.base.Charsets
import com.google.common.io.Files
import com.google.gson.{JsonIOException, JsonSyntaxException, JsonParseException, GsonBuilder}
import org.slf4j.{MarkerFactory, Logger}
import org.spongepowered.api.{CatalogType, GameRegistry}
import org.spongepowered.api.entity.EntityType
import org.spongepowered.api.item.ItemType
object GsonConfigurationManager {
val marker = MarkerFactory.getMarker("ConfigParser")
}
/**
* Manages the config file representation of NightmareAiConfig
* @param file The File the config should be in
* @param registry GameRegistry for deserializing string representation of CatalogTypes
* @param version The version of the plugin(for maintaining compatibility with future versions)
* @param logger A Logger for pushing information to the user
*/
class GsonConfigurationManager(file: File, registry: GameRegistry, version: Double, logger: Logger) {
val gson = {
val builder = new GsonBuilder().setVersion(version)
/* Some classes Sponge have fields in its superclass with same name which bothers GSON, so prevent GSON even
thinking about what to do with them */
.addSerializationExclusionStrategy(new SubclassExclusionStrategy())
.addDeserializationExclusionStrategy(new SubclassExclusionStrategy())
.enableComplexMapKeySerialization() //So keys of Maps are Strings provided by JsonSerializer and not by toString()
.setPrettyPrinting()
//Register used CatalogTypes for (de)serialization
List(classOf[EntityType], classOf[ItemType])
.foreach((clazz: Class[_ <: CatalogType]) => builder.registerTypeAdapter(clazz, new CatalogTypeDeSerializer(registry, clazz)))
builder.create()
}
if (!file.exists()) {
logger.info("Configuration file doesn't exist -> creating default config file")
save(new GsonNightmareAiConfig())
}
def load(): Option[NightmareAiConfig] = {
try {
gson.fromJson(Files.newReader(file, Charsets.UTF_8), classOf[GsonNightmareAiConfig]).toConfig(logger)
} catch {
case ex: JsonParseException => logger.e(GsonConfigurationManager.marker, s"JsonParseException while reading config file: ${ex.getMessage}")
case ex: JsonSyntaxException => logger.e(GsonConfigurationManager.marker, s"JsonSyntaxException while reading config file: ${ex.getMessage}")
case ex: JsonIOException => logger.e(GsonConfigurationManager.marker, s"JsonIOException while reading config file: ${ex.getMessage}")
}
}
@throws(classOf[IOException])
def save(config: GsonNightmareAiConfig): Unit = Files.write(gson.toJson(config), file, Charsets.UTF_8)
}
| randombyte-developer/NightmareAI | src/main/scala/de/randombyte/nightmare_ai/config/GsonConfigurationManager.scala | Scala | gpl-2.0 | 2,707 |
/**
* This file is part of agora-mixnet.
* Copyright (C) 2015-2016 Agora Voting SL <agora@agoravoting.com>
* agora-mixnet is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License.
* agora-mixnet is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
* You should have received a copy of the GNU Affero General Public License
* along with agora-mixnet. If not, see <http://www.gnu.org/licenses/>.
**/
package accumulator
import app._
import models._
import play.api.libs.json._
import scala.concurrent.{Future, Promise}
import scala.util.{Try, Success, Failure}
import akka.actor.ActorSystem
import akka.stream.{ActorMaterializer, Materializer}
import akka.http.scaladsl.model._
import utils._
import election.JsElection
import election.ElectionJsonFormatter
trait PostOffice extends ElectionJsonFormatter with Response with ErrorProcessing
{
implicit val system = ActorSystem()
implicit val executor = system.dispatchers.lookup("my-other-dispatcher")
implicit val materializer = ActorMaterializer()
// post index counter
private var index : Long = 0
private val queue = scala.collection.mutable.Queue[Option[Post]]()
// the first parameter is the uid
private val electionMap = scala.collection.mutable.Map[Long, MaintainerWrapper]()
// list of callbacks to be called when a new election is created
private val callbackQueue = scala.collection.mutable.Queue[String => Unit]()
def getElectionInfo(electionId: Long) : Future[HttpResponse] = {
val promise = Promise[HttpResponse]()
Future {
electionMap.get(electionId) match {
case Some(electionWrapper) =>
promise.success(HttpResponse(status = 200, entity = Json.stringify(response( electionWrapper.getElectionInfo() )) ))
case None =>
promise.success(HttpResponse(status = 400, entity = Json.stringify(error(s"Election $electionId not found", ErrorCodes.EO_ERROR)) ))
}
} recover { case err =>
promise.trySuccess(HttpResponse(status = 400, entity = Json.stringify(error(getMessageFromThrowable(err), ErrorCodes.EO_ERROR)) ))
}
promise.future
}
def getResults(electionId: Long) : Future[HttpResponse] = {
val promise = Promise[HttpResponse]()
Future {
electionMap.get(electionId) match {
case Some(electionWrapper) =>
electionWrapper.getResults() match {
case Some(results) =>
promise.success(HttpResponse(status = 200, entity = Json.stringify(response( results )) ))
case None =>
promise.success(HttpResponse(status = 400, entity = Json.stringify(error(s"Election $electionId has no results yet", ErrorCodes.EO_ERROR)) ))
}
case None =>
promise.success(HttpResponse(status = 400, entity = Json.stringify(error(s"Election $electionId not found", ErrorCodes.EO_ERROR)) ))
}
} recover { case err =>
promise.trySuccess(HttpResponse(status = 400, entity = Json.stringify(error(getMessageFromThrowable(err), ErrorCodes.EO_ERROR)) ))
}
promise.future
}
def add(post: Post) {
println("GG PostOffice::add")
queue.synchronized {
Try {
post.board_attributes.index.toLong
} map { postIndex =>
if(postIndex < index) {
println("Error: old post")
} else if(postIndex >= index) {
if(postIndex < index + queue.size) {
queue.get((postIndex - index).toInt) map { x =>
x match {
case Some(p) =>
println("Error: duplicated post")
case None =>
queue.update((postIndex - index).toInt, Some(post))
}
}
} else {
queue ++= List.fill((postIndex - (index + (queue.size).toLong)).toInt)(None)
queue += Some(post)
}
}
}
}
remove()
}
private def send(post: Post) {
println("send post index: " + post.board_attributes.index)
if("election" == post.user_attributes.section) {
val group : String = post.user_attributes.group
if("create" == group) {
electionMap.synchronized {
val jsMsg = Json.parse(post.message)
jsMsg.validate[JsElection] match {
case jSeqPost: JsSuccess[JsElection] =>
val electionIdStr = jSeqPost.get.state.id
Try { electionIdStr.toLong } match {
case Success(electionId) =>
val maintainer = new MaintainerWrapper(jSeqPost.get.level, electionIdStr)
maintainer.push(post)
electionMap += (electionId -> maintainer)
callbackQueue.synchronized {
callbackQueue foreach { func =>
Future { func(electionIdStr) }
}
}
case Failure(e) =>
println(s"Error: Election Id is not a number (but It should be): ${electionIdStr}")
}
case e: JsError =>
println("Error: JsCreate format error")
}
}
} else {
Try { group.toLong } match {
case Success(electionId) =>
electionMap.synchronized {
electionMap.get(electionId)
} match {
case Some(electionWrapper) =>
electionWrapper.push(post)
case None =>
println(s"Error: Election Id not found in db: ${electionId}, post is: " + post.toString())
}
case Failure(e) =>
println(s"Error: group is not a number : ${group}")
}
}
} else {
println("Error: post is not an election")
}
}
private def getQueueHeadOpt() :Option[Post] = {
queue.synchronized {
if(queue.size > 0) {
queue.head match {
case Some(post) =>
// TODO: here we should check the post hash and signature
index = index + 1
queue.dequeue
case None =>
None
}
} else {
None
}
}
}
private def remove() {
println("GG PostOffice::remove")
var head = getQueueHeadOpt()
while (head != None) {
send(head.get)
head = getQueueHeadOpt()
}
}
def getSubscriber(uid : String) = {
Try { uid.toLong } match {
case Success(electionId) =>
electionMap.get(electionId) match {
case Some(electionWrapper) =>
electionWrapper.getSubscriber()
case None =>
throw new scala.Error(s"Error subscribing: Election Id not found in db: ${electionId}")
}
case Failure(e) =>
throw new scala.Error(s"Error subscribing: Election id is not a number: {uid}")
}
}
def addElectionCreationListener(callback: (String) => Unit) {
callbackQueue.synchronized {
callbackQueue += callback
}
}
}
| agoravoting/agora-mixnet | src/main/scala/accumulator/PostOffice.scala | Scala | agpl-3.0 | 7,263 |
package example.scalate
import org.fusesource.scalate.layout.DefaultLayoutStrategy
import org.scalatra.test.specs2.ScalatraSpec
import skinny.micro.contrib.{ ScalateSupport, FlashMapSupport }
import skinny.micro.SkinnyMicroServlet
class ScalateSupportSpec extends ScalatraSpec {
def is =
"ScalateSupport should" ^
"render uncaught errors with 500.scaml" ! e1 ^ br ^
"not throw a NullPointerException for trivial requests" ! e2 ^ br ^
"render a simple template" ! e3 ^ br ^
"render a simple template with params" ! e4 ^ br ^
"looks for layouts in /WEB-INF/layouts" ! e5 ^ br ^
"render a simple template via jade method" ! e8 ^ br ^
"render a simple template with params via jade method" ! e9 ^ br ^
"render a simple template via scaml method" ! e10 ^ br ^
"render a simple template with params via scaml method" ! e11 ^ br ^
"render a simple template via ssp method" ! e12 ^ br ^
"render a simple template with params via ssp method" ! e13 ^ br ^
"render a simple template via mustache method" ! e14 ^ br ^
"render a simple template with params via mustache method" ! e15 ^ br ^
"looks for templates in legacy /WEB-INF/scalate/templates" ! e16 ^ br ^
"looks for index page if no template found" ! e17 ^ br ^
"implicitly bind flash" ! e18 ^ br ^
"implicitly bind session" ! e19 ^ br ^
"implicitly bind params" ! e20 ^ br ^
"implicitly bind multiParams" ! e21 ^ br ^
"set templateAttributes when creating a render context" ! e22 ^ br ^
"render to a string instead of response" ! e23 ^ br ^
"set status to 500 when rendering 500.scaml" ! e24 ^ br ^
end
addServlet(new SkinnyMicroServlet with ScalateSupport with FlashMapSupport {
get("/barf") {
throw new RuntimeException
}
get("/happy-happy") {
"puppy dogs"
}
get("/simple-template") {
layoutTemplate("/simple.jade")
}
get("/params") {
layoutTemplate("/params.jade", "foo" -> "Configurable")
}
get("/jade-template") {
jade("simple")
}
get("/jade-params") {
jade("params", "foo" -> "Configurable")
}
get("/scaml-template") {
scaml("simple")
}
get("/scaml-params") {
scaml("params", "foo" -> "Configurable")
}
get("/ssp-template") {
ssp("simple")
}
get("/ssp-params") {
ssp("params", "foo" -> "Configurable")
}
get("/mustache-template") {
mustache("simple")
}
get("/mustache-params") {
mustache("params", "foo" -> "Configurable")
}
get("/layout-strategy") {
templateEngine.layoutStrategy.asInstanceOf[DefaultLayoutStrategy].defaultLayouts mkString ";"
}
get("/legacy-view-path") {
jade("legacy")
}
get("/directory") {
jade("directory/index")
}
get("/bindings/*") {
flash.now("message") = "flash works"
session("message") = "session works"
jade(requestPath)
}
get("/bindings/params/:foo") {
jade("/bindings/params")
}
get("/bindings/multiParams/*/*") {
jade("/bindings/multiParams")
}
get("/template-attributes") {
templateAttributes("foo") = "from attributes"
scaml("params")
}
get("/render-to-string") {
response.setHeader("X-Template-Output", layoutTemplate("simple"))
}
}, "/*")
def e1 = get("/barf") {
body must contain("id=\\"scalate-error\\"")
}
def e2 = get("/happy-happy") {
body must_== "puppy dogs"
}
def e3 = get("/simple-template") {
body must_== "<div>Jade template</div>\\n"
}
def e4 = get("/params") {
body must_== "<div>Configurable template</div>\\n"
}
// Testing the default layouts is going to be hard, but we can at least
// verify that it's looking in the right place.
def e5 = get("/layout-strategy") {
body must_== (List(
"/WEB-INF/templates/layouts/default.mustache",
"/WEB-INF/templates/layouts/default.ssp",
"/WEB-INF/templates/layouts/default.scaml",
"/WEB-INF/templates/layouts/default.jade",
"/WEB-INF/layouts/default.mustache",
"/WEB-INF/layouts/default.ssp",
"/WEB-INF/layouts/default.scaml",
"/WEB-INF/layouts/default.jade",
"/WEB-INF/scalate/layouts/default.mustache",
"/WEB-INF/scalate/layouts/default.ssp",
"/WEB-INF/scalate/layouts/default.scaml",
"/WEB-INF/scalate/layouts/default.jade"
) mkString ";")
}
def e8 = get("/jade-template") {
body must_== "<div>Jade template</div>\\n"
}
def e9 = get("/jade-params") {
body must_== "<div>Configurable template</div>\\n"
}
def e10 = get("/scaml-template") {
body must_== "<div>Scaml template</div>\\n"
}
def e11 = get("/scaml-params") {
body must_== "<div>Configurable template</div>\\n"
}
def e12 = get("/ssp-template") {
body must_== "<div>SSP template</div>"
}
def e13 = get("/ssp-params") {
body must_== "<div>Configurable template</div>\\n"
}
def e14 = get("/mustache-template") {
body must_== "<div>Mustache template</div>\\n"
}
def e15 = get("/mustache-params") {
body must_== "<div>Configurable template</div>\\n"
}
def e16 = get("/legacy-view-path") {
body must_== "<p>legacy</p>\\n"
}
def e17 = get("/directory") {
body must_== "<p>index</p>\\n"
}
def e18 = get("/bindings/flash") {
body must_== "<div>flash works</div>\\n"
}
def e19 = get("/bindings/session") {
body must_== "<div>session works</div>\\n"
}
def e20 = get("/bindings/params/bar") {
body must_== "<div>bar</div>\\n"
}
def e21 = get("/bindings/multiParams/bar/baz") {
body must_== "<div>bar;baz</div>\\n"
}
def e22 = get("/template-attributes") {
body must_== "<div>from attributes template</div>\\n"
}
def e23 = get("/render-to-string") {
val hdr = header("X-Template-Output")
hdr must_== "<div>SSP template</div>"
}
def e24 = get("/barf") {
status must_== 500
}
}
| xerial/skinny-micro | micro-scalate/src/test/scala/example/scalate/ScalateSupportSpec.scala | Scala | bsd-2-clause | 5,999 |
/*
* Copyright (C) Lightbend Inc. <https://www.lightbend.com>
*/
package play.api.libs.ws
import java.net.URI
import akka.stream.scaladsl.Source
import akka.util.ByteString
import play.api.libs.json.JsValue
import scala.xml.Elem
/**
* A WS Response that can use Play specific classes.
*/
trait WSResponse extends StandaloneWSResponse with WSBodyReadables {
/**
* The response status code.
*/
override def status: Int
/**
* The response status message.
*/
override def statusText: String
/**
* Return the current headers for this response.
*/
override def headers: Map[String, scala.collection.Seq[String]]
/**
* Get the underlying response object.
*/
override def underlying[T]: T
/**
* Get all the cookies.
*/
override def cookies: scala.collection.Seq[WSCookie]
/**
* Get only one cookie, using the cookie name.
*/
override def cookie(name: String): Option[WSCookie]
override def contentType: String = super.contentType
override def header(name: String): Option[String] = super.header(name)
override def headerValues(name: String): scala.collection.Seq[String] = super.headerValues(name)
/**
* The response body as the given type. This renders as the given type.
* You must have a BodyReadable in implicit scope, which is done with
*
* {{{
* class MyClass extends play.api.libs.ws.WSBodyReadables {
* // JSON and XML body readables
* }
* }}}
*
* The simplest use case is
*
* {{{
* val responseBodyAsString: String = response.getBody[String]
* }}}
*
* But you can also render as JSON
*
* {{{
* val responseBodyAsJson: JsValue = response.getBody[JsValue]
* }}}
*
* or as XML:
*
* {{{
* val xml: Elem = response.getBody[Elem]
* }}}
*/
override def body[T: BodyReadable]: T = super.body[T]
/**
* The response body as String.
*/
override def body: String
/**
* The response body as a byte string.
*/
override def bodyAsBytes: ByteString
override def bodyAsSource: Source[ByteString, _]
@deprecated("Use response.headers", "2.6.0")
def allHeaders: Map[String, scala.collection.Seq[String]]
def xml: Elem
def json: JsValue
}
| benmccann/playframework | transport/client/play-ws/src/main/scala/play/api/libs/ws/WSResponse.scala | Scala | apache-2.0 | 2,229 |
package okapies.finagle.kafka
import org.jboss.netty.buffer.ChannelBuffers
package object protocol {
/*
* Implicit conversions
*/
import scala.language.implicitConversions
implicit def asRequiredAcks(requiredAcks: Short) = RequiredAcks(requiredAcks)
implicit def asMessage(value: String): Message =
Message.create(ChannelBuffers.wrappedBuffer(value.getBytes(Spec.DefaultCharset)))
implicit def asMessage(entry: (String, String)): Message =
Message.create(
ChannelBuffers.wrappedBuffer(entry._2.getBytes(Spec.DefaultCharset)),
Option(ChannelBuffers.wrappedBuffer(entry._1.getBytes(Spec.DefaultCharset))))
implicit def asKafkaError(code: Short): KafkaError = KafkaError(code)
}
| yonglehou/finagle-kafka | src/main/scala/okapies/finagle/kafka/protocol/package.scala | Scala | apache-2.0 | 722 |
/*
* Copyright (c) 2012-2019 Snowplow Analytics Ltd. All rights reserved.
*
* This program is licensed to you under the Apache License Version 2.0,
* and you may not use this file except in compliance with the Apache License Version 2.0.
* You may obtain a copy of the Apache License Version 2.0 at http://www.apache.org/licenses/LICENSE-2.0.
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the Apache License Version 2.0 is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the Apache License Version 2.0 for the specific language governing permissions and limitations there under.
*/
package com.snowplowanalytics.snowplow.enrich.common
package enrichments.registry.sqlquery
// Scalaz
import scalaz._
import Scalaz._
// Java
import java.sql._
/**
* Common trait for all Databases
* Contains exception-free logic wrapping JDBC to acquire DB-connection
* and handle its lifecycle
*/
trait Rdbms {
/**
* Placeholder for database driver (not used)
*/
val driver: Class[_]
/**
* Correctly generated connection URI specific for database
*/
val connectionString: String
/**
* Cached connection, it persist until it is open. After closing [[getConnection]]
* will try to reinitilize it
*/
private[this] var lastConnection: ThrowableXor[Connection] =
InvalidStateException("SQL Query Enrichment: Connection hasn't been initialized").left
/**
* Try to initialize new connection if cached one is closed or wasn't
* acquired successfully
*
* @return successful connection if it was in cache or initialized or
* [[Throwable]] as failure
*/
def getConnection: ThrowableXor[Connection] = lastConnection match {
case \\/-(c) if !c.isClosed => c.right
case _ =>
try { lastConnection = DriverManager.getConnection(connectionString).right } catch {
case e: SQLException => lastConnection = e.left
}
lastConnection
}
/**
* Execute filled [[PreparedStatement]]
*/
def execute(preparedStatement: PreparedStatement): ThrowableXor[ResultSet] =
try {
preparedStatement.executeQuery().right
} catch {
case e: SQLException => e.left
}
/**
* Get amount of placeholders (?-signs) in [[PreparedStatement]]
*/
def getPlaceholderCount(preparedStatement: PreparedStatement): ThrowableXor[Int] =
\\/ fromTryCatch preparedStatement.getParameterMetaData.getParameterCount
/**
* Transform SQL-string with placeholders (?-signs) into [[PreparedStatement]]
*/
def createEmptyStatement(sql: String): ThrowableXor[PreparedStatement] =
for { connection <- getConnection } yield connection.prepareStatement(sql)
}
/**
* Class representing connection configuration for databases speaking PostgreSQL dialect
*/
case class PostgresqlDb(
host: String,
port: Int,
sslMode: Boolean,
username: String,
password: String,
database: String
) extends Rdbms {
val driver: Class[_] = Class.forName("org.postgresql.Driver") // Load class
val connectionString = s"jdbc:postgresql://$host:$port/$database?user=$username&password=$password" ++ (if (sslMode)
"&ssl=true&sslfactory=org.postgresql.ssl.NonValidatingFactory"
else "")
}
/**
* Class representing connection configuration for databases speaking MySQL dialect
*/
case class MysqlDb(
host: String,
port: Int,
sslMode: Boolean,
username: String,
password: String,
database: String
) extends Rdbms {
val driver: Class[_] = Class.forName("com.mysql.jdbc.Driver") // Load class
val connectionString = s"jdbc:mysql://$host:$port/$database?user=$username&password=$password" ++ (if (sslMode)
"&useSsl=true&verifyServerCertificate=false"
else "")
}
| RetentionGrid/snowplow | 3-enrich/scala-common-enrich/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/enrichments/registry/sqlquery/Rdbms.scala | Scala | apache-2.0 | 4,198 |
/*
* -╥⌐⌐⌐⌐ -⌐⌐⌐⌐-
* ≡╢░░░░⌐\\░░░φ ╓╝░░░░⌐░░░░╪╕
* ╣╬░░` `░░░╢┘ φ▒╣╬╝╜ ░░╢╣Q
* ║╣╬░⌐ ` ╤▒▒▒Å` ║╢╬╣
* ╚╣╬░⌐ ╔▒▒▒▒`«╕ ╢╢╣▒
* ╫╬░░╖ .░ ╙╨╨ ╣╣╬░φ ╓φ░╢╢Å
* ╙╢░░░░⌐"░░░╜ ╙Å░░░░⌐░░░░╝`
* ``˚¬ ⌐ ˚˚⌐´
*
* Copyright © 2016 Flipkart.com
*/
package com.flipkart.connekt.commons.services
import com.flipkart.connekt.commons.dao.DaoFactory
import com.flipkart.connekt.commons.entities.WAMessageIdMappingEntity
import com.flipkart.connekt.commons.metrics.Instrumented
import com.flipkart.metrics.Timed
import scala.util.Try
object WAMessageIdMappingService extends Instrumented {
private lazy val dao = DaoFactory.getWaMessageIdMappingDao
@Timed("add")
def add(waMessageIdMappingEntity: WAMessageIdMappingEntity): Try[Unit] = profile("add") {
dao.add(waMessageIdMappingEntity)
}
@Timed("get")
def get(appName: String, waMessageId: String): Try[Option[WAMessageIdMappingEntity]] = profile(s"get") {
dao.get(appName, waMessageId)
}
}
| Flipkart/connekt | commons/src/main/scala/com/flipkart/connekt/commons/services/WAMessageIdMappingService.scala | Scala | mit | 1,325 |
package it.trenzalore.build.settings
object ProjectSettings {
val organization = "it.trenzalore"
val basePackages = "it.trenzalore.hdfs.compactor"
}
| JunkieLand/HDFS-Compactor | hdfs-compactor-build/src/main/scala/it/trenzalore/build/settings/ProjectSettings.scala | Scala | apache-2.0 | 157 |
package teststate.data
import japgolly.microlibs.name_fn._
case class NamedError[+E](name: Name, error: E) {
def map[F](f: E => F): NamedError[F] =
NamedError(name, f(error))
}
| japgolly/test-state | core/shared/src/main/scala/teststate/data/NamedError.scala | Scala | apache-2.0 | 186 |
/*
* Copyright (C) 2016-2017 Lightbend Inc. <https://www.lightbend.com>
*/
package com.lightbend.lagom.internal.scaladsl.persistence.cassandra
import akka.actor.ActorSystem
import com.lightbend.lagom.internal.persistence.ReadSideConfig
import com.lightbend.lagom.internal.persistence.cassandra.{ CassandraReadSideSettings, CassandraOffsetStore }
import com.lightbend.lagom.scaladsl.persistence.cassandra.CassandraSession
import scala.concurrent.ExecutionContext
/**
* Internal API
*/
private[lagom] final class ScaladslCassandraOffsetStore(system: ActorSystem, session: CassandraSession,
cassandraReadSideSettings: CassandraReadSideSettings,
config: ReadSideConfig)(implicit ec: ExecutionContext)
extends CassandraOffsetStore(system, session.delegate, cassandraReadSideSettings, config)
| edouardKaiser/lagom | persistence-cassandra/scaladsl/src/main/scala/com/lightbend/lagom/internal/scaladsl/persistence/cassandra/ScaladslCassandraOffsetStore.scala | Scala | apache-2.0 | 927 |
package org.nisshiee.crawler
import scala.concurrent.Future
import scala.concurrent.ExecutionContext.Implicits.global
import scalaz._, Scalaz._
import scalaz.contrib.std._
trait FvInstances {
implicit object fvInstance extends Monad[Fv] {
override def bind[A, B](fa: Fv[A])(f: (A) => Fv[B]): Fv[B] = {
val Fva: Future[Vld[A]] = fa.self
val Fvb: Future[Vld[B]] = Fva.flatMap { va =>
val vfvb: Vld[Fv[B]] = va.map(f)
val vFvb: Vld[Future[Vld[B]]] = vfvb.map(_.self)
val Fvvb: Future[Vld[Vld[B]]] = vFvb.sequence
Fvvb.map(_.join)
}
FvImpl(Fvb)
}
override def point[A](a: => A): Fv[A] =
FvImpl(Applicative[Future].point(Applicative[Vld].point(a)))
}
}
| nisshiee/async-crawler | core/src/main/scala/instances/Fv.scala | Scala | mit | 734 |
package org.synyx.git
import org.junit.runner.RunWith
import org.scalatest.junit.JUnitRunner
import org.scalatest.FunSuite
import org.scalatest.matchers.ShouldMatchers
@RunWith(classOf[JUnitRunner])
class GittaTest extends FunSuite with ShouldMatchers {
test("message help") {
val gitta = new Gitta(null, null)
gitta.privateMessage("Gitta: help") should be("help")
}
test("ignored message") {
val gitta = new Gitta(null, null)
gitta.privateMessage("ignored message") should be("")
}
}
| fhopf/gitta | src/test/scala/org/synyx/git/GittaTest.scala | Scala | apache-2.0 | 513 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.sinks
import java.util
import org.apache.flink.table.descriptors.StreamTableDescriptorValidator.{UPDATE_MODE, UPDATE_MODE_VALUE_APPEND}
import org.apache.flink.table.factories.StreamTableSinkFactory
import org.apache.flink.types.Row
/**
* Factory base for creating configured instances of [[CsvTableSink]] in a stream environment.
*/
class CsvAppendTableSinkFactory
extends CsvTableSinkFactoryBase
with StreamTableSinkFactory[Row] {
override def requiredContext(): util.Map[String, String] = {
val context = new util.HashMap[String, String](super.requiredContext())
context.put(UPDATE_MODE, UPDATE_MODE_VALUE_APPEND)
context
}
override def createStreamTableSink(
properties: util.Map[String, String])
: StreamTableSink[Row] = {
createTableSink(isStreaming = true, properties)
}
}
| ueshin/apache-flink | flink-table/flink-table-planner/src/main/scala/org/apache/flink/table/sinks/CsvAppendTableSinkFactory.scala | Scala | apache-2.0 | 1,665 |
/***********************************************************************
* Copyright (c) 2013-2018 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.convert.osm
import com.typesafe.config.ConfigFactory
import org.junit.runner.RunWith
import org.locationtech.geomesa.convert.SimpleFeatureConverters
import org.locationtech.geomesa.utils.geotools.SimpleFeatureTypes
import org.specs2.mutable.Specification
import org.specs2.runner.JUnitRunner
@RunWith(classOf[JUnitRunner])
class OsmConverterLoadTest extends Specification {
sequential
// note: this file isn't checked into git because it's pretty large
def getFile = getClass.getClassLoader.getResourceAsStream("northern-ireland.osm")
"OSM Converter" should {
"parse large nodes files" >> {
skipped("integration test")
val sftConf = ConfigFactory.parseString(
"""{ type-name = "osmNodeType"
| attributes = [
| { name = "user", type = "String" }
| { name = "tags", type = "Map[String,String]" }
| { name = "dtg", type = "Date", default = "true" }
| { name = "geom", type = "Point", default = "true" }
| ]
|}
""".stripMargin)
val parserConf = ConfigFactory.parseString(
"""
| {
| type = "osm-nodes"
| id-field = "$id"
| fields = [
| { name = "id", attribute = "id", transform = "toString($0)" }
| { name = "user", attribute = "user" }
| { name = "tags", attribute = "tags" }
| { name = "dtg", attribute = "timestamp" }
| { name = "geom", attribute = "geometry" }
| ]
| }
""".stripMargin)
val nodesSft = SimpleFeatureTypes.createType(sftConf)
val converter = SimpleFeatureConverters.build[Any](nodesSft, parserConf)
val start = System.currentTimeMillis()
val features = converter.process(getFile)
val count = features.length
println(s"parsed $count node features in ${System.currentTimeMillis() - start}ms")
// parsed 3278174 node features in 15817ms
ok
}
"parse large ways files" >> {
skipped("integration test")
val sftConf = ConfigFactory.parseString(
"""{ type-name = "osmWayType"
| attributes = [
| { name = "user", type = "String" }
| { name = "tags", type = "Map[String,String]" }
| { name = "dtg", type = "Date", default = "true" }
| { name = "geom", type = "LineString", default = "true" }
| ]
|}
""".stripMargin)
val parserConf = ConfigFactory.parseString(
"""
| {
| type = "osm-ways"
| id-field = "$id"
| fields = [
| { name = "id", attribute = "id", transform = "toString($0)" }
| { name = "user", attribute = "user" }
| { name = "tags", attribute = "tags" }
| { name = "name", transform = "mapValue($tags, 'name')" }
| // { name = "name", attribute = "tags" transform = "mapValue($0, 'name')" }
| { name = "dtg", attribute = "timestamp" }
| { name = "geom", attribute = "geometry" }
| ]
| }
""".stripMargin)
val waysSft = SimpleFeatureTypes.createType(sftConf)
val converter = SimpleFeatureConverters.build[Any](waysSft, parserConf)
try {
val start = System.currentTimeMillis()
val features = converter.process(getFile)
val count = features.length
println(s"parsed $count way features in ${System.currentTimeMillis() - start}ms")
// parsed 238717 way features in 39180ms
} finally {
converter.close()
}
ok
}
}
}
| jahhulbert-ccri/geomesa | geomesa-convert/geomesa-convert-osm/src/test/scala/org/locationtech/geomesa/convert/osm/OsmConverterLoadTest.scala | Scala | apache-2.0 | 4,193 |
// Brevity3.scala
import com.atomicscala.AtomicTest._
def filterWithYield3(
v:Vector[Int]):Vector[Int] =
for {
n <- v
if n < 10
if n % 2 != 0
} yield n
val v = Vector(1,2,3,5,6,7,8,10,13,14,17)
filterWithYield3(v) is Vector(1,3,5,7)
| P7h/ScalaPlayground | Atomic Scala/atomic-scala-examples/examples/32_Brevity/Brevity3.scala | Scala | apache-2.0 | 253 |
/*
* Copyright (C) Lightbend Inc. <https://www.lightbend.com>
*/
package play.api.data
import play.api.Configuration
import play.api.Environment
import play.api.data.Forms._
import play.api.data.validation.Constraints._
import play.api.data.format.Formats._
import play.api.i18n._
import play.api.libs.json.Json
import org.specs2.mutable.Specification
import play.api.http.HttpConfiguration
import play.api.libs.Files.TemporaryFile
import play.api.mvc.MultipartFormData
import play.core.test.FakeRequest
class FormSpec extends Specification {
import FormBinding.Implicits.formBinding
"A form" should {
"have an error due to a malformed email" in {
val f5 = ScalaForms.emailForm.fillAndValidate(("john@", "John"))
f5.errors must haveSize(1)
f5.errors.find(_.message == "error.email") must beSome
val f6 = ScalaForms.emailForm.fillAndValidate(("john@zen.....com", "John"))
f6.errors must haveSize(1)
f6.errors.find(_.message == "error.email") must beSome
}
"be valid with a well-formed email" in {
val f7 = ScalaForms.emailForm.fillAndValidate(("john@zen.com", "John"))
f7.errors must beEmpty
val f8 = ScalaForms.emailForm.fillAndValidate(("john@zen.museum", "John"))
f8.errors must beEmpty
val f9 = ScalaForms.emailForm.fillAndValidate(("john@mail.zen.com", "John"))
f9.errors must beEmpty
ScalaForms.emailForm.fillAndValidate(("o'flynn@example.com", "O'Flynn")).errors must beEmpty
}
"bind params when POSTing a multipart body" in {
val multipartBody = MultipartFormData[TemporaryFile](
dataParts = Map("email" -> Seq("michael@jackson.com")),
files = Seq.empty,
badParts = Seq.empty
)
implicit val request = FakeRequest(method = "POST", "/").withMultipartFormDataBody(multipartBody)
val f1 = ScalaForms.updateForm.bindFromRequest()
f1.errors must beEmpty
f1.get must equalTo((Some("michael@jackson.com"), None))
}
"query params ignored when using POST" in {
implicit val request = FakeRequest(method = "POST", "/?email=bob%40marley.com&name=john")
.withFormUrlEncodedBody("email" -> "michael@jackson.com")
val f1 = ScalaForms.updateForm.bindFromRequest()
f1.errors must beEmpty
f1.get must equalTo((Some("michael@jackson.com"), None))
}
"query params ignored when using PUT" in {
implicit val request = FakeRequest(method = "PUT", "/?email=bob%40marley.com&name=john")
.withFormUrlEncodedBody("email" -> "michael@jackson.com")
val f1 = ScalaForms.updateForm.bindFromRequest()
f1.errors must beEmpty
f1.get must equalTo((Some("michael@jackson.com"), None))
}
"query params ignored when using PATCH" in {
implicit val request = FakeRequest(method = "PATCH", "/?email=bob%40marley.com&name=john")
.withFormUrlEncodedBody("email" -> "michael@jackson.com")
val f1 = ScalaForms.updateForm.bindFromRequest()
f1.errors must beEmpty
f1.get must equalTo((Some("michael@jackson.com"), None))
}
"query params NOT ignored when using GET" in {
implicit val request = FakeRequest(method = "GET", "/?email=bob%40marley.com&name=john")
.withFormUrlEncodedBody("email" -> "michael@jackson.com")
val f1 = ScalaForms.updateForm.bindFromRequest()
f1.errors must beEmpty
f1.get must equalTo((Some("bob@marley.com"), Some("john")))
}
"query params NOT ignored when using DELETE" in {
implicit val request = FakeRequest(method = "DELETE", "/?email=bob%40marley.com&name=john")
.withFormUrlEncodedBody("email" -> "michael@jackson.com")
val f1 = ScalaForms.updateForm.bindFromRequest()
f1.errors must beEmpty
f1.get must equalTo((Some("bob@marley.com"), Some("john")))
}
"query params NOT ignored when using HEAD" in {
implicit val request = FakeRequest(method = "HEAD", "/?email=bob%40marley.com&name=john")
.withFormUrlEncodedBody("email" -> "michael@jackson.com")
val f1 = ScalaForms.updateForm.bindFromRequest()
f1.errors must beEmpty
f1.get must equalTo((Some("bob@marley.com"), Some("john")))
}
"query params NOT ignored when using OPTIONS" in {
implicit val request = FakeRequest(method = "OPTIONS", "/?email=bob%40marley.com&name=john")
.withFormUrlEncodedBody("email" -> "michael@jackson.com")
val f1 = ScalaForms.updateForm.bindFromRequest()
f1.errors must beEmpty
f1.get must equalTo((Some("bob@marley.com"), Some("john")))
}
"support mapping 22 fields" in {
val form = Form(
tuple(
"k1" -> of[String],
"k2" -> of[String],
"k3" -> of[String],
"k4" -> of[String],
"k5" -> of[String],
"k6" -> of[String],
"k7" -> of[String],
"k8" -> of[String],
"k9" -> of[String],
"k10" -> of[String],
"k11" -> of[String],
"k12" -> of[String],
"k13" -> of[String],
"k14" -> of[String],
"k15" -> of[String],
"k16" -> of[String],
"k17" -> of[String],
"k18" -> of[String],
"k19" -> of[String],
"k20" -> of[String],
"k21" -> of[String],
"k22" -> of[String]
)
)
form
.bind(
Map(
"k1" -> "v1",
"k2" -> "v2",
"k3" -> "v3",
"k4" -> "v4",
"k5" -> "v5",
"k6" -> "v6",
"k7" -> "v7",
"k8" -> "v8",
"k9" -> "v9",
"k10" -> "v10",
"k11" -> "v11",
"k12" -> "v12",
"k13" -> "v13",
"k14" -> "v14",
"k15" -> "v15",
"k16" -> "v16",
"k17" -> "v17",
"k18" -> "v18",
"k19" -> "v19",
"k20" -> "v20",
"k21" -> "v21",
"k22" -> "v22"
)
)
.fold(_ => "errors", t => t._21) must_== "v21"
}
"apply constraints on wrapped mappings" in {
"when it binds data" in {
val f1 = ScalaForms.form.bind(Map("foo" -> "0"))
f1.errors must haveSize(1)
f1.errors.find(_.message == "first.digit") must beSome
val f2 = ScalaForms.form.bind(Map("foo" -> "3"))
f2.errors must beEmpty
val f3 = ScalaForms.form.bind(Map("foo" -> "50"))
f3.errors must haveSize(1) // Only one error because "number.42" can’t be applied since wrapped bind failed
f3.errors.find(_.message == "first.digit") must beSome
val f4 = ScalaForms.form.bind(Map("foo" -> "333"))
f4.errors must haveSize(1)
f4.errors.find(_.message == "number.42") must beSome
}
"when it is filled with data" in {
val f1 = ScalaForms.form.fillAndValidate(0)
f1.errors must haveSize(1)
f1.errors.find(_.message == "first.digit") must beSome
val f2 = ScalaForms.form.fillAndValidate(3)
f2.errors must beEmpty
val f3 = ScalaForms.form.fillAndValidate(50)
f3.errors must haveSize(2)
f3.errors.find(_.message == "first.digit") must beSome
f3.errors.find(_.message == "number.42") must beSome
val f4 = ScalaForms.form.fillAndValidate(333)
f4.errors must haveSize(1)
f4.errors.find(_.message == "number.42") must beSome
}
}
"apply constraints on longNumber fields" in {
val f1 = ScalaForms.longNumberForm.fillAndValidate(0)
f1.errors must haveSize(1)
f1.errors.find(_.message == "error.min") must beSome
val f2 = ScalaForms.longNumberForm.fillAndValidate(9000)
f2.errors must haveSize(1)
f2.errors.find(_.message == "error.max") must beSome
val f3 = ScalaForms.longNumberForm.fillAndValidate(10)
f3.errors must beEmpty
val f4 = ScalaForms.longNumberForm.fillAndValidate(42)
f4.errors must beEmpty
}
"apply constraints on shortNumber fields" in {
val f1 = ScalaForms.shortNumberForm.fillAndValidate(0)
f1.errors must haveSize(1)
f1.errors.find(_.message == "error.min") must beSome
val f2 = ScalaForms.shortNumberForm.fillAndValidate(9000)
f2.errors must haveSize(1)
f2.errors.find(_.message == "error.max") must beSome
val f3 = ScalaForms.shortNumberForm.fillAndValidate(10)
f3.errors must beEmpty
val f4 = ScalaForms.shortNumberForm.fillAndValidate(42)
f4.errors must beEmpty
}
"apply constraints on byteNumber fields" in {
val f1 = ScalaForms.byteNumberForm.fillAndValidate(0)
f1.errors must haveSize(1)
f1.errors.find(_.message == "error.min") must beSome
val f2 = ScalaForms.byteNumberForm.fillAndValidate(9000)
f2.errors must haveSize(1)
f2.errors.find(_.message == "error.max") must beSome
val f3 = ScalaForms.byteNumberForm.fillAndValidate(10)
f3.errors must beEmpty
val f4 = ScalaForms.byteNumberForm.fillAndValidate(42)
f4.errors must beEmpty
}
"apply constraints on char fields" in {
val f = ScalaForms.charForm.fillAndValidate('M')
f.errors must beEmpty
}
"not even attempt to validate on fill" in {
val failingValidatorForm = Form(
"foo" -> Forms.text.verifying(
"isEmpty",
s =>
if (s.isEmpty) true
else throw new AssertionError("Validation was run when it wasn't meant to")
)
)
failingValidatorForm.fill("foo").errors must beEmpty
}
}
"render form using field[Type] syntax" in {
val anyData = Map("email" -> "bob@gmail.com", "password" -> "123")
ScalaForms.loginForm.bind(anyData).get.toString must equalTo("(bob@gmail.com,123)")
}
"support default values" in {
ScalaForms.defaultValuesForm.bindFromRequest(Map()).get must equalTo((42, "default text"))
ScalaForms.defaultValuesForm.bindFromRequest(Map("name" -> Seq("another text"))).get must equalTo(
(42, "another text")
)
ScalaForms.defaultValuesForm.bindFromRequest(Map("pos" -> Seq("123"))).get must equalTo((123, "default text"))
ScalaForms.defaultValuesForm
.bindFromRequest(Map("pos" -> Seq("123"), "name" -> Seq("another text")))
.get must equalTo((123, "another text"))
val f1 = ScalaForms.defaultValuesForm.bindFromRequest(Map("pos" -> Seq("abc")))
f1.errors must haveSize(1)
}
"support repeated values" in {
ScalaForms.repeatedForm.bindFromRequest(Map("name" -> Seq("Kiki"))).get must equalTo(("Kiki", Seq()))
ScalaForms.repeatedForm
.bindFromRequest(Map("name" -> Seq("Kiki"), "emails[0]" -> Seq("kiki@gmail.com")))
.get must equalTo(("Kiki", Seq("kiki@gmail.com")))
ScalaForms.repeatedForm
.bindFromRequest(
Map("name" -> Seq("Kiki"), "emails[0]" -> Seq("kiki@gmail.com"), "emails[1]" -> Seq("kiki@zen.com"))
)
.get must equalTo(("Kiki", Seq("kiki@gmail.com", "kiki@zen.com")))
ScalaForms.repeatedForm
.bindFromRequest(Map("name" -> Seq("Kiki"), "emails[0]" -> Seq(), "emails[1]" -> Seq("kiki@zen.com")))
.hasErrors must equalTo(true)
ScalaForms.repeatedForm
.bindFromRequest(Map("name" -> Seq("Kiki"), "emails[]" -> Seq("kiki@gmail.com")))
.get must equalTo(("Kiki", Seq("kiki@gmail.com")))
ScalaForms.repeatedForm
.bindFromRequest(Map("name" -> Seq("Kiki"), "emails[]" -> Seq("kiki@gmail.com", "kiki@zen.com")))
.get must equalTo(("Kiki", Seq("kiki@gmail.com", "kiki@zen.com")))
}
"support repeated values with set" in {
ScalaForms.repeatedFormWithSet.bindFromRequest(Map("name" -> Seq("Kiki"))).get must equalTo(("Kiki", Set()))
ScalaForms.repeatedFormWithSet
.bindFromRequest(Map("name" -> Seq("Kiki"), "emails[0]" -> Seq("kiki@gmail.com")))
.get must equalTo(("Kiki", Set("kiki@gmail.com")))
ScalaForms.repeatedFormWithSet
.bindFromRequest(
Map("name" -> Seq("Kiki"), "emails[0]" -> Seq("kiki@gmail.com"), "emails[1]" -> Seq("kiki@zen.com"))
)
.get must equalTo(("Kiki", Set("kiki@gmail.com", "kiki@zen.com")))
ScalaForms.repeatedFormWithSet
.bindFromRequest(Map("name" -> Seq("Kiki"), "emails[0]" -> Seq(), "emails[1]" -> Seq("kiki@zen.com")))
.hasErrors must equalTo(true)
ScalaForms.repeatedFormWithSet
.bindFromRequest(Map("name" -> Seq("Kiki"), "emails[]" -> Seq("kiki@gmail.com")))
.get must equalTo(("Kiki", Set("kiki@gmail.com")))
ScalaForms.repeatedFormWithSet
.bindFromRequest(Map("name" -> Seq("Kiki"), "emails[]" -> Seq("kiki@gmail.com", "kiki@zen.com")))
.get must equalTo(("Kiki", Set("kiki@gmail.com", "kiki@zen.com")))
ScalaForms.repeatedFormWithSet
.bindFromRequest(Map("name" -> Seq("Kiki"), "emails[]" -> Seq("kiki@gmail.com", "kiki@gmail.com")))
.get must equalTo(("Kiki", Set("kiki@gmail.com")))
}
"support repeated values with indexedSeq" in {
ScalaForms.repeatedFormWithIndexedSeq.bindFromRequest(Map("name" -> Seq("Kiki"))).get must equalTo(
("Kiki", IndexedSeq())
)
ScalaForms.repeatedFormWithIndexedSeq
.bindFromRequest(Map("name" -> Seq("Kiki"), "emails[0]" -> Seq("kiki@gmail.com")))
.get must equalTo(("Kiki", IndexedSeq("kiki@gmail.com")))
ScalaForms.repeatedFormWithIndexedSeq
.bindFromRequest(
Map("name" -> Seq("Kiki"), "emails[0]" -> Seq("kiki@gmail.com"), "emails[1]" -> Seq("kiki@zen.com"))
)
.get must equalTo(("Kiki", IndexedSeq("kiki@gmail.com", "kiki@zen.com")))
ScalaForms.repeatedFormWithIndexedSeq
.bindFromRequest(Map("name" -> Seq("Kiki"), "emails[0]" -> Seq(), "emails[1]" -> Seq("kiki@zen.com")))
.hasErrors must equalTo(true)
ScalaForms.repeatedFormWithIndexedSeq
.bindFromRequest(Map("name" -> Seq("Kiki"), "emails[]" -> Seq("kiki@gmail.com")))
.get must equalTo(("Kiki", IndexedSeq("kiki@gmail.com")))
ScalaForms.repeatedFormWithIndexedSeq
.bindFromRequest(Map("name" -> Seq("Kiki"), "emails[]" -> Seq("kiki@gmail.com", "kiki@zen.com")))
.get must equalTo(("Kiki", IndexedSeq("kiki@gmail.com", "kiki@zen.com")))
}
"support repeated values with vector" in {
ScalaForms.repeatedFormWithVector.bindFromRequest(Map("name" -> Seq("Kiki"))).get must equalTo(("Kiki", Vector()))
ScalaForms.repeatedFormWithVector
.bindFromRequest(Map("name" -> Seq("Kiki"), "emails[0]" -> Seq("kiki@gmail.com")))
.get must equalTo(("Kiki", Vector("kiki@gmail.com")))
ScalaForms.repeatedFormWithVector
.bindFromRequest(
Map("name" -> Seq("Kiki"), "emails[0]" -> Seq("kiki@gmail.com"), "emails[1]" -> Seq("kiki@zen.com"))
)
.get must equalTo(("Kiki", Vector("kiki@gmail.com", "kiki@zen.com")))
ScalaForms.repeatedFormWithVector
.bindFromRequest(Map("name" -> Seq("Kiki"), "emails[0]" -> Seq(), "emails[1]" -> Seq("kiki@zen.com")))
.hasErrors must equalTo(true)
ScalaForms.repeatedFormWithVector
.bindFromRequest(Map("name" -> Seq("Kiki"), "emails[]" -> Seq("kiki@gmail.com")))
.get must equalTo(("Kiki", Vector("kiki@gmail.com")))
ScalaForms.repeatedFormWithVector
.bindFromRequest(Map("name" -> Seq("Kiki"), "emails[]" -> Seq("kiki@gmail.com", "kiki@zen.com")))
.get must equalTo(("Kiki", Vector("kiki@gmail.com", "kiki@zen.com")))
}
"render a form with max 18 fields" in {
ScalaForms.helloForm.bind(Map("name" -> "foo", "repeat" -> "1")).get.toString must equalTo(
"(foo,1,None,None,None,None,None,None,None,None,None,None,None,None,None,None,None,None)"
)
}
"reject input if it contains global errors" in {
Form("value" -> nonEmptyText)
.withGlobalError("some.error")
.bind(Map("value" -> "some value"))
.errors
.headOption must beSome.like {
case error => error.message must equalTo("some.error")
}
}
"find nested error on unbind" in {
case class Item(text: String)
case class Items(seq: Seq[Item])
val itemForm = Form[Items](
mapping(
"seq" -> seq(
mapping("text" -> nonEmptyText)(Item)(Item.unapply)
)
)(Items)(Items.unapply)
)
val filled = itemForm.fillAndValidate(Items(Seq(Item(""))))
val result = filled.fold(
errors => false,
success => true
)
result should beFalse
}
"support boolean binding from json" in {
ScalaForms.booleanForm.bind(Json.obj("accepted" -> "true")).get must beTrue
ScalaForms.booleanForm.bind(Json.obj("accepted" -> "false")).get must beFalse
}
"reject boolean binding from an invalid json" in {
val f = ScalaForms.booleanForm.bind(Json.obj("accepted" -> "foo"))
f.errors must not be 'empty
}
"correctly lookup error messages when using errorsAsJson" in {
val messagesApi: MessagesApi = {
val config = Configuration.reference
val langs = new DefaultLangsProvider(config).get
new DefaultMessagesApiProvider(Environment.simple(), config, langs, HttpConfiguration()).get
}
implicit val messages = messagesApi.preferred(Seq.empty)
val form =
Form(single("foo" -> Forms.text), Map.empty, Seq(FormError("foo", "error.custom", Seq("error.customarg"))), None)
(form.errorsAsJson \\ "foo")(0).asOpt[String] must beSome("This <b>is</b> a custom <b>error</b>")
}
"correctly format error messages with arguments" in {
val messagesApi: MessagesApi = {
val config = Configuration.reference
val langs = new DefaultLangsProvider(config).get
new DefaultMessagesApiProvider(Environment.simple(), config, langs, HttpConfiguration()).get
}
implicit val messages = messagesApi.preferred(Seq.empty)
val filled = ScalaForms.parameterizederrorMessageForm.fillAndValidate("john")
filled.errors("name").find(_.message == "error.minLength").map(_.format) must beSome("Minimum length is 5")
}
"render form using java.time.LocalDate" in {
import java.time.LocalDate
val dateForm = Form("date" -> localDate)
val data = Map("date" -> "2012-01-01")
dateForm.bind(data).get must beEqualTo(LocalDate.of(2012, 1, 1))
}
"render form using java.time.LocalDate with format(15/6/2016)" in {
import java.time.LocalDate
val dateForm = Form("date" -> localDate("dd/MM/yyyy"))
val data = Map("date" -> "15/06/2016")
dateForm.bind(data).get must beEqualTo(LocalDate.of(2016, 6, 15))
}
"render form using java.time.LocalDateTime" in {
import java.time.LocalDateTime
val dateForm = Form("date" -> localDateTime)
val data = Map("date" -> "2012-01-01 10:10:10")
dateForm.bind(data).get must beEqualTo(LocalDateTime.of(2012, 1, 1, 10, 10, 10))
}
"render form using java.time.LocalDateTime with format(17/06/2016T17:15:33)" in {
import java.time.LocalDateTime
val dateForm = Form("date" -> localDateTime("dd/MM/yyyy HH:mm:ss"))
val data = Map("date" -> "17/06/2016 10:10:10")
dateForm.bind(data).get must beEqualTo(LocalDateTime.of(2016, 6, 17, 10, 10, 10))
}
"render form using java.time.LocalTime" in {
import java.time.LocalTime
val dateForm = Form("date" -> localTime)
val data = Map("date" -> "10:10:10")
dateForm.bind(data).get must beEqualTo(LocalTime.of(10, 10, 10))
}
"render form using java.time.LocalTime with format(HH-mm-ss)" in {
import java.time.LocalTime
val dateForm = Form("date" -> localTime("HH-mm-ss"))
val data = Map("date" -> "10-11-12")
dateForm.bind(data).get must beEqualTo(LocalTime.of(10, 11, 12))
}
"render form using java.sql.Date" in {
import java.time.LocalDate
val dateForm = Form("date" -> sqlDate)
val data = Map("date" -> "2017-07-04")
val date = dateForm.bind(data).get.toLocalDate
date must beEqualTo(LocalDate.of(2017, 7, 4))
}
"render form using java.sql.Date with format(dd-MM-yyyy)" in {
import java.time.LocalDate
val dateForm = Form("date" -> sqlDate("dd-MM-yyyy"))
val data = Map("date" -> "04-07-2017")
val date = dateForm.bind(data).get.toLocalDate
date must beEqualTo(LocalDate.of(2017, 7, 4))
}
"render form using java.sql.Timestamp" in {
import java.time.LocalDateTime
val dateForm = Form("date" -> sqlTimestamp)
val data = Map("date" -> "2017-07-04 10:11:12")
val date = dateForm.bind(data).get.toLocalDateTime
date must beEqualTo(LocalDateTime.of(2017, 7, 4, 10, 11, 12))
}
"render form using java.sql.Date with format(dd/MM/yyyy HH-mm-ss)" in {
import java.time.LocalDateTime
val dateForm = Form("date" -> sqlTimestamp("dd/MM/yyyy HH-mm-ss"))
val data = Map("date" -> "04/07/2017 10-11-12")
val date = dateForm.bind(data).get.toLocalDateTime
date must beEqualTo(LocalDateTime.of(2017, 7, 4, 10, 11, 12))
}
"render form using java.time.Timestamp with format(17/06/2016T17:15:33)" in {
import java.time.LocalDateTime
val dateForm = Form("date" -> sqlTimestamp("dd/MM/yyyy HH:mm:ss"))
val data = Map("date" -> "17/06/2016 10:10:10")
val date = dateForm.bind(data).get.toLocalDateTime
date must beEqualTo(LocalDateTime.of(2016, 6, 17, 10, 10, 10))
}
}
object ScalaForms {
val booleanForm = Form("accepted" -> Forms.boolean)
case class User(name: String, age: Int)
val userForm = Form(
mapping(
"name" -> of[String].verifying(nonEmpty),
"age" -> of[Int].verifying(min(0), max(100))
)(User.apply)(User.unapply)
)
val loginForm = Form(
tuple(
"email" -> of[String],
"password" -> of[Int]
)
)
val defaultValuesForm = Form(
tuple(
"pos" -> default(number, 42),
"name" -> default(text, "default text")
)
)
val helloForm = Form(
tuple(
"name" -> nonEmptyText,
"repeat" -> number(min = 1, max = 100),
"color" -> optional(text),
"still works" -> optional(text),
"1" -> optional(text),
"2" -> optional(text),
"3" -> optional(text),
"4" -> optional(text),
"5" -> optional(text),
"6" -> optional(text),
"7" -> optional(text),
"8" -> optional(text),
"9" -> optional(text),
"10" -> optional(text),
"11" -> optional(text),
"12" -> optional(text),
"13" -> optional(text),
"14" -> optional(text)
)
)
val repeatedForm = Form(
tuple(
"name" -> nonEmptyText,
"emails" -> list(nonEmptyText)
)
)
val repeatedFormWithSet = Form(
tuple(
"name" -> nonEmptyText,
"emails" -> set(nonEmptyText)
)
)
val repeatedFormWithIndexedSeq = Form(
tuple(
"name" -> nonEmptyText,
"emails" -> indexedSeq(nonEmptyText)
)
)
val repeatedFormWithVector = Form(
tuple(
"name" -> nonEmptyText,
"emails" -> vector(nonEmptyText)
)
)
val form = Form(
"foo" -> Forms.text
.verifying("first.digit", s => s.headOption contains '3')
.transform[Int](Integer.parseInt, _.toString)
.verifying("number.42", _ < 42)
)
val emailForm = Form(
tuple(
"email" -> email,
"name" -> of[String]
)
)
val updateForm = Form(
tuple(
"email" -> optional(text),
"name" -> optional(text)
)
)
val longNumberForm = Form("longNumber" -> longNumber(10, 42))
val shortNumberForm = Form("shortNumber" -> shortNumber(10, 42))
val byteNumberForm = Form("byteNumber" -> shortNumber(10, 42))
val charForm = Form("gender" -> char)
val parameterizederrorMessageForm = Form("name" -> nonEmptyText(minLength = 5))
}
| mkurz/playframework | core/play/src/test/scala/play/api/data/FormSpec.scala | Scala | apache-2.0 | 23,799 |
/*
* Copyright (c) 2013-2015 Erik van Oosten
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package nl.grons.metrics.scala
import org.mockito.Mockito.verify
import org.scalatest.OneInstancePerTest
import org.scalatest.Matchers._
import org.scalatest.mock.MockitoSugar._
import org.scalatest.FunSpec
import scala.concurrent.ExecutionContext
import org.junit.runner.RunWith
import org.scalatest.junit.JUnitRunner
import scala.concurrent.Await
import com.codahale.metrics.Timer.Context
import scala.concurrent.Promise
import scala.concurrent.duration._
@RunWith(classOf[JUnitRunner])
class FutureMetricsSpec extends FunSpec with OneInstancePerTest with FutureMetrics with InstrumentedBuilder {
val metricRegistry = null
override def metrics = new MetricBuilder(null,null) {
override def timer(name: String, scope: String = null): Timer = mockTimer
}
var timeCalled = false
val mockTimer = new Timer(null) {
override def time[A](action: => A): A = { timeCalled = true; action }
override def timerContext = mockTimerContext
}
val mockTimerContext = mock[Context]
implicit def sameThreadEc: ExecutionContext = new ExecutionContext {
def execute(runnable: Runnable): Unit = runnable.run
def reportFailure(t: Throwable): Unit = throw t
}
describe("A future timer") {
it("should time an execution") {
val f = timed("test") {
Thread.sleep(10L)
10
}
val result = Await.result(f, 300.millis)
timeCalled should be (true)
result should be (10)
}
it("should attach an onComplete listener") {
val p = Promise[String]()
var invocationCount = 0
val f = timing("test") {
invocationCount += 1
p.future
}
p.success("test")
val result = Await.result(f, 50.millis)
result should be ("test")
verify(mockTimerContext).stop()
invocationCount should be (1)
}
}
}
| maciej/metrics-scala | src/test/scala/nl/grons/metrics/scala/FutureMetricsSpec.scala | Scala | apache-2.0 | 2,435 |
package mesosphere.marathon.core.task.bus
import mesosphere.marathon.MarathonTestHelper
import mesosphere.marathon.core.task.bus.TaskChangeObservables.TaskChanged
import mesosphere.marathon.core.task.{ Task, TaskStateChange, TaskStateOp }
import mesosphere.marathon.state.{ PathId, Timestamp }
import org.apache.mesos.Protos.TaskStatus.Reason
import org.apache.mesos.Protos.{ TaskState, TaskStatus }
import org.joda.time.DateTime
import org.slf4j.LoggerFactory
class TaskStatusUpdateTestHelper(val wrapped: TaskChanged) {
def simpleName = wrapped.stateOp match {
case TaskStateOp.MesosUpdate(_, MarathonTaskStatus.WithMesosStatus(mesosStatus), _) =>
mesosStatus.getState.toString
case _ => wrapped.stateOp.getClass.getSimpleName
}
def status = wrapped.stateOp match {
case TaskStateOp.MesosUpdate(_, MarathonTaskStatus.WithMesosStatus(mesosStatus), _) => mesosStatus
case _ => throw new scala.RuntimeException("the wrapped stateOp os no MesosUpdate!")
}
def reason: String = if (status.hasReason) status.getReason.toString else "no reason"
}
object TaskStatusUpdateTestHelper {
val log = LoggerFactory.getLogger(getClass)
def apply(taskChanged: TaskChanged): TaskStatusUpdateTestHelper =
new TaskStatusUpdateTestHelper(taskChanged)
private def newTaskID(appId: String) = {
Task.Id.forRunSpec(PathId(appId))
}
val taskId = newTaskID("/app")
lazy val defaultTask = MarathonTestHelper.stagedTask(taskId.idString)
lazy val defaultTimestamp = Timestamp.apply(new DateTime(2015, 2, 3, 12, 30, 0, 0))
def taskLaunchFor(task: Task, timestamp: Timestamp = defaultTimestamp) = {
val taskStateOp = TaskStateOp.LaunchEphemeral(task)
val taskStateChange = task.update(taskStateOp)
TaskStatusUpdateTestHelper(TaskChanged(taskStateOp, taskStateChange))
}
def taskUpdateFor(task: Task, taskStatus: MarathonTaskStatus, timestamp: Timestamp = defaultTimestamp) = {
val taskStateOp = TaskStateOp.MesosUpdate(task, taskStatus, timestamp)
val taskStateChange = task.update(taskStateOp)
TaskStatusUpdateTestHelper(TaskChanged(taskStateOp, taskStateChange))
}
def taskExpungeFor(task: Task, taskStatus: MarathonTaskStatus, timestamp: Timestamp = defaultTimestamp) = {
TaskStatusUpdateTestHelper(
TaskChanged(
TaskStateOp.MesosUpdate(task, taskStatus, timestamp),
TaskStateChange.Expunge(task)))
}
def makeMesosTaskStatus(taskId: Task.Id, state: TaskState, maybeHealth: Option[Boolean] = None, maybeReason: Option[TaskStatus.Reason] = None) = {
val mesosStatus = TaskStatus.newBuilder
.setTaskId(taskId.mesosTaskId)
.setState(state)
maybeHealth.foreach(mesosStatus.setHealthy)
maybeReason.foreach(mesosStatus.setReason)
mesosStatus.build()
}
def makeTaskStatus(taskId: Task.Id, state: TaskState, maybeHealth: Option[Boolean] = None, maybeReason: Option[TaskStatus.Reason] = None) = {
val mesosStatus = makeMesosTaskStatus(taskId, state, maybeHealth, maybeReason)
MarathonTaskStatus(mesosStatus)
}
def running(task: Task = defaultTask) = taskUpdateFor(task, makeTaskStatus(task.taskId, TaskState.TASK_RUNNING))
def runningHealthy(task: Task = defaultTask) = taskUpdateFor(task, makeTaskStatus(task.taskId, TaskState.TASK_RUNNING, maybeHealth = Some(true)))
def runningUnhealthy(task: Task = defaultTask) = taskUpdateFor(task, makeTaskStatus(task.taskId, TaskState.TASK_RUNNING, maybeHealth = Some(false)))
def staging(task: Task = defaultTask) = taskUpdateFor(task, makeTaskStatus(task.taskId, TaskState.TASK_STAGING))
def finished(task: Task = defaultTask) = taskExpungeFor(task, makeTaskStatus(task.taskId, TaskState.TASK_FINISHED))
def lost(reason: Reason, task: Task = defaultTask) = {
val taskStatus = makeTaskStatus(task.taskId, TaskState.TASK_LOST, maybeReason = Some(reason))
taskStatus match {
case MarathonTaskStatus.Terminal(status) =>
taskExpungeFor(task, taskStatus)
case _ =>
taskUpdateFor(task, taskStatus)
}
}
def killed(task: Task = defaultTask) = taskExpungeFor(task, makeTaskStatus(task.taskId, TaskState.TASK_KILLED))
def killing(task: Task = defaultTask) = taskUpdateFor(task, makeTaskStatus(task.taskId, TaskState.TASK_KILLING))
def error(task: Task = defaultTask) = taskExpungeFor(task, makeTaskStatus(task.taskId, TaskState.TASK_ERROR))
}
| yp-engineering/marathon | src/test/scala/mesosphere/marathon/core/task/bus/TaskStatusUpdateTestHelper.scala | Scala | apache-2.0 | 4,379 |
/*
* Copyright 2013 ChronoTrack
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.chronotrack.flurry
import com.chronotrack.flurry.worker.WorkerIdGenerator
import com.typesafe.scalalogging.LazyLogging
/**
* User: ilya
* Date: 8/15/13
* Time: 10:48 AM
*/
trait Generator extends LazyLogging {
protected[this] val workerIdGenerator: WorkerIdGenerator
lazy val workerId: Long = workerIdGenerator.workerId
// Configurable variables
val epochStart: Long
val workerIdBits: Long
val sequenceBits: Long
private[this] val maxWorkerId = -1L ^ (-1L << workerIdBits)
private[this] final val workerIdShift = sequenceBits
private[this] final val timestampLeftShift = sequenceBits + workerIdBits
private[this] final val sequenceMask = -1L ^ (-1L << sequenceBits)
private var lastTimestamp = -1L
private[this] var sequence = 0L
// sanity check for workerId
if (workerId > maxWorkerId || workerId < 0) {
throw new IllegalArgumentException("worker Id can't be greater than %d or less than 0".format(maxWorkerId))
}
logger.info(
s"""Using ${this.getClass.getName}:
|\\tBits used for time: ${64-workerIdBits-sequenceBits} allowing ${Math.pow(2, 64-workerIdBits-sequenceBits)/3.15569e10} years
|\\tBits used for worker: $workerIdBits allowing ${Math.pow(2, workerIdBits).toInt} workers
|\\tBits used for sequence: $sequenceBits allowing ${Math.pow(2, sequenceBits).toInt} ids per millisecond
""".stripMargin)
protected def timeGen(): Long = System.currentTimeMillis()
def getId = nextId
private[this] final def nextId: Long = synchronized {
var timestamp = timeGen()
if (timestamp < lastTimestamp) {
logger.error("clock is moving backwards. Rejecting requests until %d.", Array(lastTimestamp))
throw new RuntimeException("Clock moved backwards. Refusing to generate id for %d milliseconds".format(
lastTimestamp - timestamp))
}
sequence = if (lastTimestamp == timestamp) {
val seq = (sequence + 1) & sequenceMask
if (seq == 0) { // This means we went above the Long upper bound and we have to wait
timestamp = tilNextMillis(lastTimestamp)
}
seq
} else {
0
}
// println(sequence)
lastTimestamp = timestamp
((timestamp - epochStart) << timestampLeftShift) |
(workerId << workerIdShift) |
sequence
}
private[this] def tilNextMillis(lastTimestamp: Long): Long = {
var timestamp = timeGen()
while (timestamp <= lastTimestamp) {
timestamp = timeGen()
}
timestamp
}
}
| isterin/flurry | src/main/scala/com/chronotrack/flurry/Generator.scala | Scala | apache-2.0 | 3,120 |
package utils.silhouette
import com.mohiva.play.silhouette.core.{Identity, LoginInfo}
import Implicits._
trait IdentitySilhouette extends Identity {
def key: String
def loginInfo: LoginInfo = key
} | vtapadia/crickit | modules/common/app/utils/silhouette/IdentitySilhouette.scala | Scala | apache-2.0 | 201 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.util
import org.apache.carbondata.common.logging.LogServiceFactory
import org.apache.carbondata.core.constants.CarbonCommonConstants
import org.apache.carbondata.core.datastore.filesystem.CarbonFile
import org.apache.carbondata.core.datastore.impl.FileFactory
import org.apache.carbondata.processing.exception.DataLoadingException
object FileUtils {
/**
* append all csv file path to a String, file path separated by comma
*/
private def getPathsFromCarbonFile(carbonFile: CarbonFile, stringBuild: StringBuilder): Unit = {
if (carbonFile.isDirectory) {
val files = carbonFile.listFiles()
for (j <- 0 until files.size) {
getPathsFromCarbonFile(files(j), stringBuild)
}
} else {
val path = carbonFile.getAbsolutePath
val fileName = carbonFile.getName
if (carbonFile.getSize == 0) {
LogServiceFactory.getLogService(this.getClass.getCanonicalName)
.warn(s"skip empty input file: $path")
} else if (fileName.startsWith(CarbonCommonConstants.UNDERSCORE) ||
fileName.startsWith(CarbonCommonConstants.POINT)) {
LogServiceFactory.getLogService(this.getClass.getCanonicalName)
.warn(s"skip invisible input file: $path")
} else {
stringBuild.append(path.replace('\\\\', '/')).append(CarbonCommonConstants.COMMA)
}
}
}
/**
* append all file path to a String, inputPath path separated by comma
*
*/
def getPaths(inputPath: String): String = {
if (inputPath == null || inputPath.isEmpty) {
throw new DataLoadingException("Input file path cannot be empty.")
} else {
val stringBuild = new StringBuilder()
val filePaths = inputPath.split(",")
for (i <- 0 until filePaths.size) {
val fileType = FileFactory.getFileType(filePaths(i))
val carbonFile = FileFactory.getCarbonFile(filePaths(i), fileType)
if (!carbonFile.exists()) {
throw new DataLoadingException(s"The input file does not exist: ${filePaths(i)}" )
}
getPathsFromCarbonFile(carbonFile, stringBuild)
}
if (stringBuild.nonEmpty) {
stringBuild.substring(0, stringBuild.size - 1)
} else {
throw new DataLoadingException("Please check your input path and make sure " +
"that files end with '.csv' and content is not empty.")
}
}
}
def getSpaceOccupied(inputPath: String): Long = {
var size : Long = 0
if (inputPath == null || inputPath.isEmpty) {
size
} else {
val filePaths = inputPath.split(",")
for (i <- 0 until filePaths.size) {
val fileType = FileFactory.getFileType(filePaths(i))
val carbonFile = FileFactory.getCarbonFile(filePaths(i), fileType)
size = size + carbonFile.getSize
}
size
}
}
}
| HuaweiBigData/carbondata | integration/spark-common/src/main/scala/org/apache/spark/util/FileUtils.scala | Scala | apache-2.0 | 3,675 |
package org.jetbrains.plugins.scala
package lang
package psi
package api
package expr
import com.intellij.openapi.progress.ProgressManager
import com.intellij.psi._
import com.intellij.psi.util.PsiModificationTracker
import org.jetbrains.plugins.scala.caches.CachesUtil
import org.jetbrains.plugins.scala.extensions.ElementText
import org.jetbrains.plugins.scala.lang.lexer.ScalaTokenTypes
import org.jetbrains.plugins.scala.lang.psi.ScalaPsiUtil.SafeCheckException
import org.jetbrains.plugins.scala.lang.psi.api.base.ScLiteral
import org.jetbrains.plugins.scala.lang.psi.api.base.types.ScTypeElement
import org.jetbrains.plugins.scala.lang.psi.api.statements.ScTypeAliasDefinition
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.imports.usages.ImportUsed
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.typedef.ScTrait
import org.jetbrains.plugins.scala.lang.psi.impl.{ScalaPsiElementFactory, ScalaPsiManager}
import org.jetbrains.plugins.scala.lang.psi.implicits.{ImplicitCollector, ScImplicitlyConvertible}
import org.jetbrains.plugins.scala.lang.psi.types._
import org.jetbrains.plugins.scala.lang.psi.types.nonvalue.{Parameter, ScMethodType, ScTypePolymorphicType}
import org.jetbrains.plugins.scala.lang.psi.types.result.{Failure, Success, TypeResult, TypingContext}
import org.jetbrains.plugins.scala.lang.refactoring.util.ScTypeUtil.AliasType
import org.jetbrains.plugins.scala.lang.resolve.processor.MethodResolveProcessor
import org.jetbrains.plugins.scala.lang.resolve.{ScalaResolveResult, StdKinds}
import scala.annotation.tailrec
import scala.collection.mutable.ArrayBuffer
import scala.collection.{Seq, Set}
/**
* @author ilyas, Alexander Podkhalyuzin
*/
trait ScExpression extends ScBlockStatement with PsiAnnotationMemberValue with ImplicitParametersOwner {
import org.jetbrains.plugins.scala.lang.psi.api.expr.ScExpression._
/**
* This method returns real type, after using implicit conversions.
* Second parameter to return is used imports for this conversion.
* @param expectedOption to which type we trying to convert
* @param ignoreBaseTypes parameter to avoid value discarding, literal narrowing, widening
* this parameter is useful for refactorings (introduce variable)
*/
def getTypeAfterImplicitConversion(checkImplicits: Boolean = true, isShape: Boolean = false,
expectedOption: Option[ScType] = None,
ignoreBaseTypes: Boolean = false,
fromUnderscore: Boolean = false): ExpressionTypeResult = {
type Data = (Boolean, Boolean, Option[ScType], Boolean, Boolean)
val data = (checkImplicits, isShape, expectedOption, ignoreBaseTypes, fromUnderscore)
CachesUtil.getMappedWithRecursionPreventingWithRollback(this, data, CachesUtil.TYPE_AFTER_IMPLICIT_KEY,
(expr: ScExpression, data: Data) => {
val (checkImplicits: Boolean, isShape: Boolean,
expectedOption: Option[ScType],
ignoreBaseTypes: Boolean,
fromUnderscore: Boolean) = data
if (isShape) ExpressionTypeResult(Success(getShape()._1, Some(this)), Set.empty, None)
else {
val expected: ScType = expectedOption.getOrElse(expectedType(fromUnderscore).orNull)
if (expected == null) {
ExpressionTypeResult(getTypeWithoutImplicits(TypingContext.empty, ignoreBaseTypes, fromUnderscore), Set.empty, None)
} else {
val tr = getTypeWithoutImplicits(TypingContext.empty, ignoreBaseTypes, fromUnderscore)
def defaultResult: ExpressionTypeResult = ExpressionTypeResult(tr, Set.empty, None)
if (!checkImplicits) defaultResult //do not try implicit conversions for shape check
else {
tr match {
//if this result is ok, we do not need to think about implicits
case Success(tp, _) if tp.conforms(expected) => defaultResult
case Success(tp, _) =>
if (ScalaPsiUtil.isSAMEnabled(this) && ScFunctionType.isFunctionType(tp)) {
val des = tp match {
case param: ScParameterizedType => Some(param.designator)
case _ => None
}
ScalaPsiUtil.toSAMType(expected) match {
case Some(methodType) if methodType.conforms(tp) =>
return ExpressionTypeResult(Success(expected, Some(this)), Set.empty, None)
case _ =>
}
}
val functionType = ScFunctionType(expected, Seq(tp))(getProject, getResolveScope)
val results = new ImplicitCollector(this, functionType, functionType, None,
isImplicitConversion = true, isExtensionConversion = false).collect()
if (results.length == 1) {
val res = results.head
val paramType = InferUtil.extractImplicitParameterType(res)
paramType match {
case ScFunctionType(rt, Seq(param)) =>
ExpressionTypeResult(Success(rt, Some(this)), res.importsUsed, Some(res.getElement))
case _ =>
ScalaPsiManager.instance(getProject).getCachedClass(
"scala.Function1", getResolveScope, ScalaPsiManager.ClassCategory.TYPE
) match {
case function1: ScTrait =>
ScParameterizedType(ScType.designator(function1), function1.typeParameters.map(tp =>
new ScUndefinedType(new ScTypeParameterType(tp, ScSubstitutor.empty), 1))) match {
case funTp: ScParameterizedType =>
val secondArg = funTp.typeArgs(1)
Conformance.undefinedSubst(funTp, paramType).getSubstitutor match {
case Some(subst) =>
val rt = subst.subst(secondArg)
if (rt.isInstanceOf[ScUndefinedType]) defaultResult
else {
ExpressionTypeResult(Success(rt, Some(this)), res.importsUsed, Some(res.getElement))
}
case None => defaultResult
}
case _ => defaultResult
}
case _ => defaultResult
}
}
} else defaultResult
case _ => defaultResult
}
}
}
}
}, ExpressionTypeResult(Failure("Recursive getTypeAfterImplicitConversion", Some(this)), Set.empty, None),
PsiModificationTracker.MODIFICATION_COUNT)
}
def getTypeWithoutImplicits(ctx: TypingContext, //todo: remove TypingContext?
ignoreBaseTypes: Boolean = false,
fromUnderscore: Boolean = false): TypeResult[ScType] = {
ProgressManager.checkCanceled()
type Data = (Boolean, Boolean)
val data = (ignoreBaseTypes, fromUnderscore)
CachesUtil.getMappedWithRecursionPreventingWithRollback(this, data, CachesUtil.TYPE_WITHOUT_IMPLICITS,
(expr: ScExpression, data: Data) => {
val (ignoreBaseTypes: Boolean,
fromUnderscore: Boolean) = data
val inner = getNonValueType(TypingContext.empty, ignoreBaseTypes, fromUnderscore)
inner match {
case Success(rtp, _) =>
var res = rtp
def tryUpdateRes(checkExpectedType: Boolean) {
if (checkExpectedType) {
InferUtil.updateAccordingToExpectedType(Success(res, Some(this)), fromImplicitParameters = true,
filterTypeParams = false, expectedType = expectedType(fromUnderscore), expr = this,
check = checkExpectedType) match {
case Success(newRes, _) => res = newRes
case _ =>
}
}
val checkImplicitParameters = ScalaPsiUtil.withEtaExpansion(this)
if (checkImplicitParameters) {
val tuple = InferUtil.updateTypeWithImplicitParameters(res, this, None, checkExpectedType, fullInfo = false)
res = tuple._1
if (fromUnderscore) implicitParametersFromUnder = tuple._2
else implicitParameters = tuple._2
}
}
@tailrec
def isMethodInvocation(expr: ScExpression = this): Boolean = {
expr match {
case p: ScPrefixExpr => false
case p: ScPostfixExpr => false
case _: MethodInvocation => true
case p: ScParenthesisedExpr =>
p.expr match {
case Some(exp) => isMethodInvocation(exp)
case _ => false
}
case _ => false
}
}
if (!isMethodInvocation()) { //it is not updated according to expected type, let's do it
val oldRes = res
try {
tryUpdateRes(checkExpectedType = true)
} catch {
case _: SafeCheckException =>
res = oldRes
tryUpdateRes(checkExpectedType = false)
}
}
def removeMethodType(retType: ScType, updateType: ScType => ScType = t => t) {
def updateRes(exp: Option[ScType]) {
exp match {
case Some(expected) =>
expected.removeAbstracts match {
case ScFunctionType(_, params) =>
case _ =>
expected.isAliasType match {
case Some(AliasType(ta: ScTypeAliasDefinition, _, _)) =>
ta.aliasedType match {
case Success(ScFunctionType(_, _), _) =>
case _ => res = updateType(retType)
}
case _ => res = updateType(retType)
}
}
case _ => res = updateType(retType)
}
}
updateRes(expectedType(fromUnderscore))
}
res match {
case ScTypePolymorphicType(ScMethodType(retType, params, _), tp) if params.length == 0 &&
!ScUnderScoreSectionUtil.isUnderscore(this) =>
removeMethodType(retType, t => ScTypePolymorphicType(t, tp))
case ScMethodType(retType, params, _) if params.length == 0 &&
!ScUnderScoreSectionUtil.isUnderscore(this) =>
removeMethodType(retType)
case _ =>
}
val valType = res.inferValueType.unpackedType
if (ignoreBaseTypes) Success(valType, Some(this))
else {
expectedType(fromUnderscore) match {
case Some(expected) =>
//value discarding
if (expected.removeAbstracts == Unit) return Success(Unit, Some(this))
//numeric literal narrowing
val needsNarrowing = this match {
case _: ScLiteral => getNode.getFirstChildNode.getElementType == ScalaTokenTypes.tINTEGER
case p: ScPrefixExpr => p.operand match {
case l: ScLiteral =>
l.getNode.getFirstChildNode.getElementType == ScalaTokenTypes.tINTEGER &&
Set("+", "-").contains(p.operation.getText)
case _ => false
}
case _ => false
}
def checkNarrowing: Option[TypeResult[ScType]] = {
try {
lazy val i = this match {
case l: ScLiteral => l.getValue match {
case i: Integer => i.intValue
case _ => scala.Int.MaxValue
}
case p: ScPrefixExpr =>
val mult = if (p.operation.getText == "-") -1 else 1
p.operand match {
case l: ScLiteral => l.getValue match {
case i: Integer => mult * i.intValue
case _ => scala.Int.MaxValue
}
}
}
expected.removeAbstracts match {
case types.Char =>
if (i >= scala.Char.MinValue.toInt && i <= scala.Char.MaxValue.toInt) {
return Some(Success(Char, Some(this)))
}
case types.Byte =>
if (i >= scala.Byte.MinValue.toInt && i <= scala.Byte.MaxValue.toInt) {
return Some(Success(Byte, Some(this)))
}
case types.Short =>
if (i >= scala.Short.MinValue.toInt && i <= scala.Short.MaxValue.toInt) {
return Some(Success(Short, Some(this)))
}
case _ =>
}
}
catch {
case _: NumberFormatException => //do nothing
}
None
}
val check = if (needsNarrowing) checkNarrowing else None
if (check.isDefined) check.get
else {
//numeric widening
def checkWidening(l: ScType, r: ScType): Option[TypeResult[ScType]] = {
(l, r) match {
case (Byte, Short | Int | Long | Float | Double) => Some(Success(expected, Some(this)))
case (Short, Int | Long | Float | Double) => Some(Success(expected, Some(this)))
case (Char, Byte | Short | Int | Long | Float | Double) => Some(Success(expected, Some(this)))
case (Int, Long | Float | Double) => Some(Success(expected, Some(this)))
case (Long, Float | Double) => Some(Success(expected, Some(this)))
case (Float, Double) => Some(Success(expected, Some(this)))
case _ => None
}
}
(valType.getValType, expected.getValType) match {
case (Some(l), Some(r)) => checkWidening(l, r) match {
case Some(x) => x
case _ => Success(valType, Some(this))
}
case _ => Success(valType, Some(this))
}
}
case _ => Success(valType, Some(this))
}
}
case _ => inner
}
}, Failure("Recursive getTypeWithoutImplicits", Some(this)), PsiModificationTracker.MODIFICATION_COUNT)
}
def getType(ctx: TypingContext = TypingContext.empty): TypeResult[ScType] = {
this match {
case ref: ScReferenceExpression if ref.refName == ScImplicitlyConvertible.IMPLICIT_EXPRESSION_NAME =>
val data = getUserData(ScImplicitlyConvertible.FAKE_EXPRESSION_TYPE_KEY)
if (data != null) return Success(data, Some(this))
case _ =>
}
getTypeAfterImplicitConversion().tr
}
def getTypeIgnoreBaseType(ctx: TypingContext = TypingContext.empty): TypeResult[ScType] = getTypeAfterImplicitConversion(ignoreBaseTypes = true).tr
def getTypeExt(ctx: TypingContext = TypingContext.empty): ScExpression.ExpressionTypeResult = getTypeAfterImplicitConversion()
def getShape(ignoreAssign: Boolean = false): (ScType, String) = {
this match {
case assign: ScAssignStmt if !ignoreAssign && assign.assignName != None =>
(assign.getRExpression.map(_.getShape(ignoreAssign = true)._1).getOrElse(Nothing), assign.assignName.get)
case expr: ScExpression =>
ScalaPsiUtil.isAnonymousExpression(expr) match {
case (-1, _) => (Nothing, "")
case (i, expr: ScFunctionExpr) =>
(ScFunctionType(expr.result.map(_.getShape(ignoreAssign = true)._1).getOrElse(Nothing), Seq.fill(i)(Any))(getProject, getResolveScope), "")
case (i, _) => (ScFunctionType(Nothing, Seq.fill(i)(Any))(getProject, getResolveScope), "")
}
case _ => (Nothing, "")
}
}
@volatile
protected var implicitParameters: Option[Seq[ScalaResolveResult]] = None
@volatile
protected var implicitParametersFromUnder: Option[Seq[ScalaResolveResult]] = None
/**
* Warning! There is a hack in scala compiler for ClassManifest and ClassTag.
* In case of implicit parameter with type ClassManifest[T]
* this method will return ClassManifest with substitutor of type T.
* @return implicit parameters used for this expression
*/
def findImplicitParameters: Option[Seq[ScalaResolveResult]] = {
ProgressManager.checkCanceled()
if (ScUnderScoreSectionUtil.underscores(this).nonEmpty) {
getTypeWithoutImplicits(TypingContext.empty, fromUnderscore = true) //to update implicitParametersFromUnder
implicitParametersFromUnder
} else {
getType(TypingContext.empty) //to update implicitParameters field
implicitParameters
}
}
def getNonValueType(ctx: TypingContext = TypingContext.empty, //todo: remove?
ignoreBaseType: Boolean = false,
fromUnderscore: Boolean = false): TypeResult[ScType] = {
ProgressManager.checkCanceled()
type Data = (Boolean, Boolean)
val data = (ignoreBaseType, fromUnderscore)
CachesUtil.getMappedWithRecursionPreventingWithRollback(this, data, CachesUtil.NON_VALUE_TYPE_KEY,
(expr: ScExpression, data: Data) => {
val (ignoreBaseType, fromUnderscore) = data
if (fromUnderscore) innerType(TypingContext.empty)
else {
val unders = ScUnderScoreSectionUtil.underscores(this)
if (unders.length == 0) innerType(TypingContext.empty)
else {
val params = unders.zipWithIndex.map {
case (u, index) =>
val tpe = u.getNonValueType(TypingContext.empty, ignoreBaseType).getOrAny.inferValueType.unpackedType
new Parameter("", None, tpe, false, false, false, index)
}
val methType =
new ScMethodType(getTypeAfterImplicitConversion(ignoreBaseTypes = ignoreBaseType,
fromUnderscore = true).tr.getOrAny,
params, false)(getProject, getResolveScope)
new Success(methType, Some(this))
}
}
}, Failure("Recursive getNonValueType", Some(this)), PsiModificationTracker.MODIFICATION_COUNT)
}
protected def innerType(ctx: TypingContext): TypeResult[ScType] =
Failure(ScalaBundle.message("no.type.inferred", getText), Some(this))
/**
* Some expression may be replaced only with another one
*/
def replaceExpression(expr: ScExpression, removeParenthesis: Boolean): ScExpression = {
val oldParent = getParent
if (oldParent == null) throw new PsiInvalidElementAccessException(this)
if (removeParenthesis && oldParent.isInstanceOf[ScParenthesisedExpr]) {
return oldParent.asInstanceOf[ScExpression].replaceExpression(expr, removeParenthesis = true)
}
val newExpr: ScExpression = if (ScalaPsiUtil.needParentheses(this, expr)) {
ScalaPsiElementFactory.createExpressionFromText("(" + expr.getText + ")", getManager)
} else expr
val parentNode = oldParent.getNode
val newNode = newExpr.copy.getNode
parentNode.replaceChild(this.getNode, newNode)
newNode.getPsi.asInstanceOf[ScExpression]
}
def expectedType(fromUnderscore: Boolean = true): Option[ScType] = {
this match {
case ref: ScMethodCall if ref.getText == ScImplicitlyConvertible.IMPLICIT_CALL_TEXT =>
val data = getUserData(ScImplicitlyConvertible.FAKE_EXPECTED_TYPE_KEY)
if (data != null) return data
case _ =>
}
expectedTypeEx(fromUnderscore).map(_._1)
}
def expectedTypeEx(fromUnderscore: Boolean = true): Option[(ScType, Option[ScTypeElement])] =
ExpectedTypes.expectedExprType(this, fromUnderscore)
def expectedTypes(fromUnderscore: Boolean = true): Array[ScType] = expectedTypesEx(fromUnderscore).map(_._1)
def expectedTypesEx(fromUnderscore: Boolean = true): Array[(ScType, Option[ScTypeElement])] = {
CachesUtil.getMappedWithRecursionPreventingWithRollback(this, fromUnderscore, CachesUtil.EXPECTED_TYPES_KEY,
(expr: ScExpression, data: Boolean) => ExpectedTypes.expectedExprTypes(expr, fromUnderscore = data),
Array.empty[(ScType, Option[ScTypeElement])], PsiModificationTracker.MODIFICATION_COUNT)
}
def smartExpectedType(fromUnderscore: Boolean = true): Option[ScType] = {
CachesUtil.getMappedWithRecursionPreventingWithRollback(this, fromUnderscore, CachesUtil.SMART_EXPECTED_TYPE,
(expr: ScExpression, data: Boolean) => ExpectedTypes.smartExpectedType(expr, fromUnderscore = data),
None, PsiModificationTracker.MODIFICATION_COUNT)
}
@volatile
private var additionalExpression: Option[(ScExpression, ScType)] = None
def setAdditionalExpression(additionalExpression: Option[(ScExpression, ScType)]) {
this.additionalExpression = additionalExpression
}
/**
* This method should be used to get implicit conversions and used imports, while eta expanded method return was
* implicitly converted
* @return mirror for this expression, in case if it exists
*/
def getAdditionalExpression: Option[(ScExpression, ScType)] = {
getType(TypingContext.empty)
additionalExpression
}
/**
* This method returns following values:
* @return implicit conversions, actual value, conversions from the first part, conversions from the second part
*/
def getImplicitConversions(fromUnder: Boolean = false,
expectedOption: => Option[ScType] = smartExpectedType()):
(Seq[PsiNamedElement], Option[PsiNamedElement], Seq[PsiNamedElement], Seq[PsiNamedElement]) = {
val map = new ScImplicitlyConvertible(this).implicitMap(fromUnder = fromUnder, args = expectedTypes(fromUnder).toSeq)
val implicits: Seq[PsiNamedElement] = map.map(_.element)
val implicitFunction: Option[PsiNamedElement] = getParent match {
case ref: ScReferenceExpression =>
val resolve = ref.multiResolve(false)
if (resolve.length == 1) {
resolve.apply(0).asInstanceOf[ScalaResolveResult].implicitFunction
} else None
case inf: ScInfixExpr if (inf.isLeftAssoc && this == inf.rOp) || (!inf.isLeftAssoc && this == inf.lOp) =>
val resolve = inf.operation.multiResolve(false)
if (resolve.length == 1) {
resolve.apply(0).asInstanceOf[ScalaResolveResult].implicitFunction
} else None
case call: ScMethodCall => call.getImplicitFunction
case gen: ScGenerator => gen.getParent match {
case call: ScMethodCall => call.getImplicitFunction
case _ => None
}
case _ => getTypeAfterImplicitConversion(expectedOption = expectedOption,
fromUnderscore = fromUnder).implicitFunction
}
(implicits, implicitFunction, map.filter(!_.isFromCompanion).map(_.element), map.filter(_.isFromCompanion).map(_.element))
}
final def calculateReturns(withBooleanInfix: Boolean = false): Seq[PsiElement] = {
val res = new ArrayBuffer[PsiElement]
def calculateReturns0(el: PsiElement) {
el match {
case tr: ScTryStmt =>
calculateReturns0(tr.tryBlock)
tr.catchBlock match {
case Some(ScCatchBlock(caseCl)) =>
caseCl.caseClauses.flatMap(_.expr).foreach(calculateReturns0)
case _ =>
}
case block: ScBlock =>
block.lastExpr match {
case Some(expr) => calculateReturns0(expr)
case _ => res += block
}
case pe: ScParenthesisedExpr =>
pe.expr.foreach(calculateReturns0)
case m: ScMatchStmt =>
m.getBranches.foreach(calculateReturns0)
case i: ScIfStmt =>
i.elseBranch match {
case Some(e) =>
calculateReturns0(e)
i.thenBranch match {
case Some(thenBranch) => calculateReturns0(thenBranch)
case _ =>
}
case _ => res += i
}
case infix @ ScInfixExpr(ScExpression.Type(types.Boolean), ElementText(op), right @ ScExpression.Type(types.Boolean))
if withBooleanInfix && (op == "&&" || op == "||") => calculateReturns0(right)
//TODO "!contains" is a quick fix, function needs unit testing to validate its behavior
case _ => if (!res.contains(el)) res += el
}
}
calculateReturns0(this)
res
}
def applyShapeResolveForExpectedType(tp: ScType, exprs: Seq[ScExpression], call: Option[MethodInvocation]): Array[ScalaResolveResult] = {
def inner(expr: ScExpression, tp: ScType, exprs: Seq[ScExpression], call: Option[MethodInvocation]): Array[ScalaResolveResult] = {
val applyProc =
new MethodResolveProcessor(expr, "apply", List(exprs), Seq.empty, Seq.empty /* todo: ? */,
StdKinds.methodsOnly, isShapeResolve = true)
applyProc.processType(tp, expr)
var cand = applyProc.candidates
if (cand.length == 0 && call != None) {
val expr = call.get.getEffectiveInvokedExpr
ScalaPsiUtil.findImplicitConversion(expr, "apply", expr, applyProc, noImplicitsForArgs = false) match {
case Some(res) =>
var state = ResolveState.initial.put(CachesUtil.IMPLICIT_FUNCTION, res.element)
res.getClazz match {
case Some(cl: PsiClass) => state = state.put(ScImplicitlyConvertible.IMPLICIT_RESOLUTION_KEY, cl)
case _ =>
}
applyProc.processType(res.getTypeWithDependentSubstitutor, expr, state)
cand = applyProc.candidates
case _ =>
}
}
if (cand.length == 0 && ScalaPsiUtil.approveDynamic(tp, getProject, getResolveScope) && call.isDefined) {
cand = ScalaPsiUtil.processTypeForUpdateOrApplyCandidates(call.get, tp, isShape = true, noImplicits = true, isDynamic = true)
}
cand
}
type Data = (ScType, Seq[ScExpression], Option[MethodInvocation])
CachesUtil.getMappedWithRecursionPreventingWithRollback[ScExpression, Data,
Array[ScalaResolveResult]](this, (tp, exprs, call),
CachesUtil.EXPRESSION_APPLY_SHAPE_RESOLVE_KEY,
(expr: ScExpression, tuple: Data) => inner(expr, tuple._1, tuple._2, tuple._3),
Array.empty[ScalaResolveResult], PsiModificationTracker.MODIFICATION_COUNT)
}
}
object ScExpression {
case class ExpressionTypeResult(tr: TypeResult[ScType],
importsUsed: scala.collection.Set[ImportUsed],
implicitFunction: Option[PsiNamedElement])
object Type {
def unapply(exp: ScExpression): Option[ScType] = exp.getType(TypingContext.empty).toOption
}
} | triggerNZ/intellij-scala | src/org/jetbrains/plugins/scala/lang/psi/api/expr/ScExpression.scala | Scala | apache-2.0 | 27,637 |
/**
* Copyright (C) 2013 Orbeon, Inc.
*
* This program is free software; you can redistribute it and/or modify it under the terms of the
* GNU Lesser General Public License as published by the Free Software Foundation; either version
* 2.1 of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
* without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
* See the GNU Lesser General Public License for more details.
*
* The full text of the license is available at http://www.gnu.org/copyleft/lesser.html
*/
package org.orbeon.oxf.fr
import org.orbeon.dom
import org.orbeon.dom.saxon.DocumentWrapper
import org.orbeon.oxf.pipeline.api.PipelineContext
import org.orbeon.oxf.processor.SimpleProcessor
import org.orbeon.oxf.properties.{Properties, PropertySet}
import org.orbeon.oxf.util.StringUtils._
import org.orbeon.oxf.util.{CoreCrossPlatformSupport, XPath}
import org.orbeon.oxf.xml.dom.Support
import org.orbeon.oxf.xml.{TransformerUtils, XMLReceiver}
import org.orbeon.saxon.om.NodeInfo
import org.orbeon.scaxon.NodeInfoConversions.unsafeUnwrapElement
import org.orbeon.scaxon.SimplePath._
// Processor to replace or add resources based on properties
//
// An property looks like: oxf.fr.resource.*.*.en.detail.labels.save
//
// NOTE: We used to do this in XSLT, but when it came to implement *adding* missing resources, the level of complexity
// increased too much and readability would have suffered so we rewrote in Scala.
class ResourcesPatcher extends SimpleProcessor {
def generateData(pipelineContext: PipelineContext, xmlReceiver: XMLReceiver): Unit = {
// Read inputs
val resourcesDocument = readInputAsOrbeonDom(pipelineContext, "data")
val instanceElem = new DocumentWrapper(readInputAsOrbeonDom(pipelineContext, "instance"), null, XPath.GlobalConfiguration) / *
val appForm = AppForm(
instanceElem / "app" stringValue,
instanceElem / "form" stringValue
)
val langs = CoreCrossPlatformSupport.externalContext.getRequest.getFirstParamAsString("langs") map (_.splitTo[Set](","))
// Transform and write out the document
ResourcesPatcher.transform(resourcesDocument, appForm, langs)(Properties.instance.getPropertySet)
TransformerUtils.writeOrbeonDom(resourcesDocument, xmlReceiver)
}
}
object ResourcesPatcher {
private val Prefix = "oxf.fr.resource"
private val WildCard = "*"
def transform(
resourcesDocument : dom.Document,
appForm : AppForm,
langsOpt : Option[Set[String]] = None
)(implicit
properties : PropertySet
): Unit = {
// Start by filtering out unwanted languages if specified
langsOpt foreach { langs =>
resourcesDocument.getRootElement.elements.filterNot(e => langs(e.attributeValue(XMLNames.XMLLangQName))) foreach
(_.detach())
}
val resourceElems = new DocumentWrapper(resourcesDocument, null, XPath.GlobalConfiguration).rootElement / "resource"
val propertyNames = properties.propertiesStartsWith((Prefix :: appForm.toList).mkString("."))
// In 4.6 summary/detail buttons are at the top level
def filterPathForBackwardCompatibility(path: List[String]): List[String] = path match {
case ("detail" | "summary") :: "buttons" :: _ => path drop 1
case _ => path
}
val langPathValue = propertyNames flatMap { propertyName =>
val _ :: _ :: _ :: _ :: _ :: lang :: resourceTokens = propertyName.splitTo[List](".")
// Property name with possible `*` replaced by actual app/form name
val expandedPropertyName = Prefix :: appForm.toList ::: lang :: resourceTokens mkString "."
// Had a case where value was null (more details would be useful)
val value = properties.getNonBlankString(expandedPropertyName)
value.map((lang, filterPathForBackwardCompatibility(resourceTokens), _))
}
// Return all languages or the language specified if it exists
// For now we don't support creating new top-level resource elements for new languages.
def findConcreteLanguages(langOrWildcard: String) = {
val allLanguages =
resourceElems attValue XMLNames.XMLLangQName
val filtered =
if (langOrWildcard == WildCard)
allLanguages
else
allLanguages filter (_ == langOrWildcard)
filtered.distinct
}
def resourceElemsForLang(lang: String) =
resourceElems filter (_.attValueOpt(XMLNames.XMLLangQName) contains lang) map unsafeUnwrapElement
// Update or create elements and set values
for {
(langOrWildcard, path, value) <- langPathValue.distinct
lang <- findConcreteLanguages(langOrWildcard)
rootForLang <- resourceElemsForLang(lang)
} locally {
val elem = Support.ensurePath(rootForLang, path map dom.QName.apply)
elem.attributeOpt("todo") foreach elem.remove
elem.setText(value)
}
def hasTodo(e: NodeInfo) =
e.attValueOpt("todo") contains "true"
def isBracketed(s: String) =
s.startsWith("[") && s.endsWith("]")
for {
e <- resourceElems descendant *
if ! e.hasChildElement && hasTodo(e) && ! isBracketed(e.stringValue)
elem = unsafeUnwrapElement(e)
} locally {
elem.attributeOpt("todo") foreach elem.remove
elem.setText(s"[${elem.getText}]")
}
}
} | orbeon/orbeon-forms | form-runner/jvm/src/main/scala/org/orbeon/oxf/fr/ResourcesPatcher.scala | Scala | lgpl-2.1 | 5,515 |
package beam.analysis.physsim
import java.util.{ArrayList, List}
import java.util
import beam.analysis.plots.GraphsStatsAgentSimEventsListener
import beam.sim.OutputDataDescription
import beam.utils.OutputDataDescriptor
object PhyssimCalcLinkSpeedStatsObject extends OutputDataDescriptor {
/**
* Get description of fields written to the output files.
*
* @return list of data description objects
*/
def getOutputDataDescriptions: util.List[OutputDataDescription] = {
val freeSpeedDistOutputFilePath: String = GraphsStatsAgentSimEventsListener.CONTROLLER_IO
.getIterationFilename(0, PhyssimCalcLinkSpeedStats.outputFileName + ".csv")
val outputDirPath: String = GraphsStatsAgentSimEventsListener.CONTROLLER_IO.getOutputPath
val freeSpeedDistRelativePath: String = freeSpeedDistOutputFilePath.replace(outputDirPath, "")
val list: util.List[OutputDataDescription] = new util.ArrayList[OutputDataDescription]
list
.add(
OutputDataDescription(
getClass.getSimpleName,
freeSpeedDistRelativePath,
"Bin",
"A given time slot within a day"
)
)
list
.add(
OutputDataDescription(
getClass.getSimpleName,
freeSpeedDistRelativePath,
"AverageLinkSpeed",
"The average speed at which a vehicle can travel across the network during the given time bin"
)
)
list
}
}
| colinsheppard/beam | src/main/java/beam/analysis/physsim/PhyssimCalcLinkSpeedStatsObject.scala | Scala | gpl-3.0 | 1,444 |
package ps.tricerato.pureimage
import org.apache.commons.io.IOUtils
package object test {
System.setProperty("java.awt.headless", "true")
def resource(name: String) = IOUtils.toByteArray(getClass.getResourceAsStream(name))
def zardozJpeg = resource("/zardoz.jpeg")
} | non/pureimage | src/test/scala/ps/tricerato/pureimage/test/package.scala | Scala | mit | 277 |
package com.twitter.finagle.memcached.protocol.text
import com.twitter.io.Buf
import com.twitter.finagle.decoder.{Decoder => FinagleDecoder}
import scala.annotation.tailrec
import scala.collection.mutable.ArrayBuffer
private[finagle] final class FramingDecoder[Result](
frameDecoder: FrameDecoder[Result])
extends FinagleDecoder[Result] {
private final val InitialBufferSize: Int = 4
private final val MaxBufferSize: Int = 32
private[this] var outputMessages: ArrayBuffer[Result] = new ArrayBuffer(InitialBufferSize)
private[this] var accumulatedBytes: Buf = Buf.Empty
def apply(data: Buf): IndexedSeq[Result] = {
accumulatedBytes = accumulatedBytes.concat(data)
decodeMessages()
if (outputMessages.isEmpty) IndexedSeq.empty
else if (outputMessages.length > MaxBufferSize) {
// we don't want to leak memory by keeping a huge ArrayBuffer around, so just
// send this one and make a new, smaller one
val result = outputMessages
outputMessages = new ArrayBuffer[Result](InitialBufferSize)
result
} else {
// So save allocations we make a new ArrayBuffer with the exact right size and
// copy the contents so we're not sending a half full collection or resizing
val result = new ArrayBuffer[Result](outputMessages.length)
result ++= outputMessages
outputMessages.clear()
result
}
}
// We decode at least once, and continue of we continue to get more messages
@tailrec
private[this] def decodeMessages(): Unit = {
val bytesNeeded = frameDecoder.nextFrameBytes()
val continue =
if (bytesNeeded == -1) decodeTextLine()
else decodeRawBytes(bytesNeeded)
if (continue) decodeMessages()
}
private[this] def decodeTextLine(): Boolean = {
if (accumulatedBytes.isEmpty) false
else {
val frameLength = Framer.bytesBeforeLineEnd(accumulatedBytes)
if (frameLength == -1) false // Not enough data for a tokens frame
else {
// We have received a text frame. Extract the frame and decode it.
val frameBuf = accumulatedBytes.slice(0, frameLength)
// Remove the extracted frame from the accumulator, stripping the newline (2 chars)
accumulatedBytes = accumulatedBytes.slice(frameLength + 2, Int.MaxValue)
frameDecoder.decodeData(frameBuf, outputMessages)
true
}
}
}
private[this] def decodeRawBytes(nextByteFrameSize: Int): Boolean = {
// We add the 2 bytes for the trailing '\\r\\n'
if (accumulatedBytes.length < nextByteFrameSize + 2) false // Not enough data
else {
val dataFrame = accumulatedBytes.slice(0, nextByteFrameSize)
accumulatedBytes = accumulatedBytes.slice(nextByteFrameSize + 2, Int.MaxValue)
frameDecoder.decodeData(dataFrame, outputMessages)
true
}
}
}
| koshelev/finagle | finagle-memcached/src/main/scala/com/twitter/finagle/memcached/protocol/text/FramingDecoder.scala | Scala | apache-2.0 | 2,829 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.api.stream.table.stringexpr
import org.apache.flink.api.scala._
import org.apache.flink.table.api.scala._
import org.apache.flink.table.runtime.utils.JavaUserDefinedAggFunctions.WeightedAvg
import org.apache.flink.table.utils.TableTestBase
import org.junit.Test
class AggregateStringExpressionTest extends TableTestBase {
@Test
def testGroupedAggregate(): Unit = {
val util = streamTestUtil()
val t = util.addTable[(Int, Long, String)]('int, 'long, 'string)
val weightAvgFun = new WeightedAvg
util.tableEnv.registerFunction("weightAvgFun", weightAvgFun)
// Expression / Scala API
val resScala = t
.groupBy('string)
.select('int.count as 'cnt, weightAvgFun('long, 'int))
// String / Java API
val resJava = t
.groupBy("string")
.select("int.count as cnt, weightAvgFun(long, int)")
verifyTableEquals(resJava, resScala)
}
@Test
def testNonGroupedAggregate(): Unit = {
val util = streamTestUtil()
val t = util.addTable[(Int, Long, String)]('int, 'long, 'string)
// Expression / Scala API
val resScala = t.select('int.count as 'cnt, 'long.sum)
// String / Java API
val resJava = t.select("int.count as cnt, long.sum")
verifyTableEquals(resJava, resScala)
}
}
| zimmermatt/flink | flink-libraries/flink-table/src/test/scala/org/apache/flink/table/api/stream/table/stringexpr/AggregateStringExpressionTest.scala | Scala | apache-2.0 | 2,102 |
package arena.algorithms.sorting
/**
* @author Bhupendra Bhudia <bhupendra.bhudia@quedex.co>
* 16/11/2015 22:44
*/
class QuickSort extends Sortable {
def sort(nums: Array[Int]) {
if (nums.length > 1) {
quickSort(nums, 0, nums.length - 1)
}
}
private def quickSort(nums: Array[Int], lower: Int, upper: Int) {
var l: Int = lower
var u: Int = upper
val pivot: Int = nums(l + (u - l) / 2)
while (l <= u) {
while (nums(l) < pivot) {
l += 1
}
while (nums(u) > pivot) {
u -= 1
}
if (l <= u) {
swap(nums, l, u)
l += 1
u -= 1
}
}
if (lower < u) {
quickSort(nums, lower, u)
}
if (l < upper) {
quickSort(nums, l, upper)
}
}
}
| quedexco/arena-scala | src/main/scala/arena/algorithms/sorting/QuickSort.scala | Scala | apache-2.0 | 775 |
package com.delprks.productservicesprototype.config
import com.typesafe.config.{Config => AkkaConfig, ConfigFactory => AkkaConfigFactory}
trait Config {
protected lazy val config: AkkaConfig = AkkaConfigFactory.load()
lazy val serviceName: String = config.getString("service.name")
lazy val httpInterface: String = config.getString("service.http.interface")
lazy val httpPort: Int = config.getInt("service.http.port")
lazy val defaultPageLimit: Int = config.getInt("service.pagination.default-limit")
lazy val maximumPageLimit: Int = config.getInt("service.pagination.max-limit")
lazy val offerSchemaUrl: String = config.getString("service.schema.offer-response-url")
lazy val errorSchemaUrl: String = config.getString("service.schema.error-response-url")
lazy val errorDocumentationUrl: String = config.getString("service.error.documentation-url")
}
| delprks/product-services-prototype | src/main/scala/com/delprks/productservicesprototype/config/Config.scala | Scala | mit | 876 |
/*
* Copyright 2016 The BigDL Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.bigdl.keras
import com.intel.analytics.bigdl.dllib.nn.{MeanAbsolutePercentageCriterion, MeanSquaredLogarithmicCriterion}
class MeanSquaredLogarithmicCriterionSpec extends KerasBaseSpec {
"MeanSquaredLogarithmicCriterion" should "be ok" in {
val kerasCode =
"""
|input_tensor = Input(shape=[10])
|target_tensor = Input(shape=[10])
|loss = mean_squared_logarithmic_error(target_tensor, input_tensor)
|input = np.random.uniform(-1, 1, [5, 10])
|Y = np.random.uniform(-1, 1, [5, 10])
""".stripMargin
val criterion = MeanSquaredLogarithmicCriterion[Float]()
checkOutputAndGradForLoss(criterion, kerasCode)
}
"MeanSquaredLogarithmicCriterion" should "be ok with epsilon" in {
val kerasCode =
"""
|input_tensor = Input(shape=[3])
|target_tensor = Input(shape=[3])
|loss = mean_squared_logarithmic_error(target_tensor, input_tensor)
|input = np.array([[1e-07, 1e-06, 1e-08]])
|Y = np.array([[1, 2, 3]])
""".stripMargin
val criterion = MeanSquaredLogarithmicCriterion[Float]()
checkOutputAndGradForLoss(criterion, kerasCode)
}
}
| intel-analytics/BigDL | scala/dllib/src/test/scala/com/intel/analytics/bigdl/dllib/nn/keras/MeanSquaredLogarithmicCriterionSpec.scala | Scala | apache-2.0 | 1,789 |
/*
Copyright 2015 Coursera Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package org.coursera.courier.generator.specs
import com.linkedin.data.DataList
import com.linkedin.data.DataMap
import com.linkedin.data.schema.RecordDataSchema
import com.linkedin.pegasus.generator.spec.RecordTemplateSpec
import org.coursera.courier.generator.ScaladocEscaping
import org.coursera.courier.generator.TypeConversions
import scala.collection.JavaConverters._
case class RecordDefinition(override val spec: RecordTemplateSpec) extends Definition(spec) {
def recordSchema: RecordDataSchema = spec.getSchema
def schema: Option[RecordDataSchema] = Some(recordSchema)
override def rawDataType = classOf[DataMap].getSimpleName
def fields: Seq[RecordField] = spec.getFields.asScala.map(RecordField).filter(!_.omit).toSeq
def directReferencedTypes: Set[Definition] = fields.map(_.typ).toSet
def customInfosToRegister: Seq[(RecordField, Seq[CustomInfoDefinition])] = {
// customInfos only appear once per field set, so we must consider even omitted field when
// accumulating customInfos.
// TODO(jbetz): Improve pegasus internals to always return customInfos fields. De-duplication
// should happen up at routines like this one, not in the RecordTemplateSpec.
val fieldsInclOmitted = spec.getFields.asScala.map(RecordField).toSeq
fieldsInclOmitted.map { field =>
field -> field.customInfo.toSeq.flatMap { customInfo =>
customInfo.customInfosToRegister
}
}
}
// parameter list rendering utilities
def fieldParamDefs: String = {
fields.map { field =>
val default = defaultLiteral(field)
s"${field.name}: ${field.scalaTypeFullname}" + default.map(v => s" = $v").getOrElse("")
}.mkString(", ")
}
private def defaultLiteral(field: RecordField): Option[String] = {
field.default.map {
case RequiredFieldDefault(default) =>
toDefaultLiteral(default, field.typ)
case OptionalFieldDefault(Some(default)) =>
val literal = toDefaultLiteral(default, field.typ)
s"Some($literal)"
case OptionalFieldDefault(None) =>
"None"
}
}
def copyFieldParamDefs: String = {
fields.map { field =>
s"${field.name}: ${field.scalaTypeFullname} = this.${field.name}"
}.mkString(", ")
}
def fieldsAsParams: String = {
fields.map(_.name).mkString(", ")
}
def fieldsAsTypeParams: String = {
fields.map(_.scalaTypeFullname).mkString(", ")
}
def prefixedFieldParams(prefix: String): String = {
fields.map(field => s"$prefix${field.name}").mkString(", ")
}
def scalaDoc: Option[String] = Option(recordSchema.getDoc)
.flatMap(ScaladocEscaping.stringToScaladoc)
def implicitCompanionName: String = {
val companionName = spec.getClassName
companionName.take(1).toLowerCase + companionName.tail + "Companion"
}
private[this] def toDefaultLiteral(any: AnyRef, definition: Definition): String = {
definition match {
case _: PrimitiveDefinition | _: TyperefDefinition =>
TypeConversions.anyToLiteral(any)
case enum: EnumDefinition =>
any match {
case symbol: String => enumToDefault(enum, symbol)
case _: Any =>
throw new IllegalArgumentException("'any' must be a string for an enum")
}
case _: RecordDefinition | _: MapDefinition | _: UnionDefinition =>
any match {
case data: DataMap => dataMapToDefault(definition, data)
case _: Any =>
throw new IllegalArgumentException("'any' must be a DataMap for a record, map or union")
}
case _: ArrayDefinition =>
any match {
case data: DataList => dataListToDefault(definition, data)
case _: Any =>
throw new IllegalArgumentException("'any' must be a DataList for an array")
}
case customType: ClassDefinition => customTypeToDefault(customType, any)
case fixed: FixedDefinition => ??? // TODO: support fixed types
}
}
private[this] def customTypeToDefault(customType: ClassDefinition, any: AnyRef): String = {
val value = TypeConversions.anyToLiteral(any)
s"DataTemplateUtil.coerceOutput($value, classOf[${customType.scalaTypeFullname}])"
}
private[this] def enumToDefault(enum: EnumDefinition, symbol: String): String = {
val symbolLiteral = TypeConversions.toLiteral(symbol)
s"${enum.enumFullname}.withName($symbolLiteral)"
}
// TODO(jbetz): This is an "ugly" way to generate the default value. If/when we have time,
// we should instead generate the default entirely in scala types, e.g.:
// Foo(Bar(1), Baz(2)) instead of the current
// Foo(DataTemplates.mapLiteral("""{ "bar": 1, "baz": 2 }"""), ...)
private[this] def dataMapToDefault(definition: Definition, data: DataMap): String = {
val mapLiteral = TypeConversions.toLiteral(data)
s"${definition.scalaTypeFullname}.build($mapLiteral, DataConversion.SetReadOnly)"
}
// TODO(jbetz): Same as for maps, we should improve the default representation if/when we have
// time
private[this] def dataListToDefault(definition: Definition, data: DataList): String = {
val arrayLiteral = TypeConversions.toLiteral(data)
s"${definition.scalaTypeFullname}.build($arrayLiteral, DataConversion.SetReadOnly)"
}
}
| coursera/courier | scala/generator/src/main/scala/org/coursera/courier/generator/specs/RecordDefinition.scala | Scala | apache-2.0 | 5,825 |
package org.freeour.app.controllers
import java.sql.Timestamp
import java.text.SimpleDateFormat
import java.util.Date
import org.freeour.app.FreeourStack
import org.freeour.app.auth.AuthenticationSupport
import org.freeour.app.models._
import org.json4s.{DefaultFormats, Formats}
import org.scalatra.Ok
import org.scalatra.json.{JValueResult, JacksonJsonSupport}
import scala.slick.driver.MySQLDriver.simple._
/**
* Created by Bill Lv on 2/11/15.
*/
case class AdminController(val db: Database) extends FreeourStack with AuthenticationSupport
with JValueResult with JacksonJsonSupport {
protected implicit val jsonFormats: Formats = DefaultFormats
val formatter: SimpleDateFormat = new SimpleDateFormat("yyyy-MM-dd hh:mm")
before() {
requireLogin()
if (!user.isAdmin) {
halt(403, ssp("/errors/error403", "layout" -> ""))
}
contentType = formats("json")
}
get("/") {
contentType = "text/html"
ssp("/admin/index", "layout" -> "", "userId" -> user.id.get)
}
get("/data/activities") {
db.withSession { implicit session =>
val activities: List[Activities#TableElementType] = ActivityRepository.list
activities.map(p => ActivityJson(p.id, p.title, p.address, p.description, formatter.format(p.startTime),
p.available, p.initiator))
}
}
post("/data/activities") {
val dateString: String = params("startTimeString").replaceAll("\\"", "")
val parsedDate: Date = formatter.parse(dateString)
var status: Int = 0
val id: Long = params.getOrElse("id", "-1").toLong
val activity: Activity = Activity(None, params("title"), params("address"), params.getOrElse("description", ""),
new Timestamp(parsedDate.getTime),
params("available").toInt match {
case 1 => true
case _ => false
},
user.id.get)
try {
db.withTransaction { implicit session =>
if (id == -1) {
val activityId = (ActivityRepository returning ActivityRepository.map(_.id)) += activity
ActivityStatsRepository += ActivityStats(None, activityId.toLong, Some(1), None)
status = activityId.toInt
} else {
activity.id = Some(id)
ActivityRepository.update(activity)
status = id.toInt
}
}
}
catch {
case e: Throwable =>
logger.info("Save activity error", e)
status = -1
}
Ok(response.getWriter.print(status))
}
post("/data/activities/:id") {
val activityId: Long = params("id").toLong
var status: Int = 0
try {
db.withTransaction { implicit session =>
ActivityStatsRepository.deleteByActivityId(activityId)
ActivityUserRepository.deleteByActivityId(activityId)
ActivityRepository.deleteById(activityId)
}
}
catch {
case e: Throwable =>
logger.info("Delete activity error", e)
status = -1
}
Ok(response.getWriter.print(status))
}
get("/data/users") {
db.withSession { implicit session =>
UserRepository.list
}
}
post("/data/users") {
var status: Int = 0
val id: Long = params.getOrElse("id", "-1").toLong
if (id == -1) {
status = -1
} else {
try {
db.withTransaction { implicit session =>
UserRepository.update(User(Some(id), params("email"), params("password")
, params("nickname"), Some(params("phone")),
params("isAdmin").toInt match {
case 1 => true
case _ => false
}))
status = id.toInt
}
}
catch {
case e: Throwable =>
logger.info("Update user error", e)
status = -2
}
}
Ok(response.getWriter.print(status))
}
}
| ideaalloc/freeour | src/main/scala/org/freeour/app/controllers/AdminController.scala | Scala | gpl-2.0 | 3,743 |
package scala.lms
package internal
import scala.reflect.SourceContext
import scala.annotation.unchecked.uncheckedVariance
import scala.collection.mutable.ListBuffer
import java.lang.{StackTraceElement,Thread}
/**
* The Expressions trait houses common AST nodes. It also manages a list of encountered Definitions which
* allows for common sub-expression elimination (CSE).
*
* @since 0.1
*/
trait Expressions extends Utils {
abstract class Exp[+T:Manifest] { // constants/symbols (atomic)
def tp: Manifest[T @uncheckedVariance] = manifest[T] //invariant position! but hey...
def pos: List[SourceContext] = Nil
}
case class Const[+T:Manifest](x: T) extends Exp[T] {
override def equals(other: Any) = other match {
case c: Const[_] => x == c.x && tp == c.tp
case _ => false
}
}
case class Sym[+T:Manifest](val id: Int) extends Exp[T] {
var sourceContexts: List[SourceContext] = Nil
override def pos = sourceContexts
def withPos(pos: List[SourceContext]) = { sourceContexts :::= pos; this }
}
case class Variable[+T](val e: Exp[Variable[T]]) // TODO: decide whether it should stay here ... FIXME: should be invariant
var nVars = 0
def fresh[T:Manifest]: Sym[T] = Sym[T] { nVars += 1; if (nVars%1000 == 0) printlog("nVars="+nVars); nVars -1 }
def fresh[T:Manifest](pos: List[SourceContext]): Sym[T] = fresh[T].withPos(pos)
def quotePos(e: Exp[Any]): String = e.pos match {
case Nil => "<unknown>"
case cs =>
def all(cs: SourceContext): List[SourceContext] = cs.parent match {
case None => List(cs)
case Some(p) => cs::all(p)
}
cs.map(c => all(c).reverse.map(c => c.fileName.split("/").last + ":" + c.line).mkString("//")).mkString(";")
}
abstract class Def[+T] { // operations (composite)
override final lazy val hashCode = scala.runtime.ScalaRunTime._hashCode(this.asInstanceOf[Product])
}
abstract class Stm // statement (links syms and definitions)
def infix_lhs(stm: Stm): List[Sym[Any]] = stm match {
case TP(sym, rhs) => sym::Nil
}
def infix_rhs(stm: Stm): Any = stm match { // clients use syms(e.rhs), boundSyms(e.rhs) etc.
case TP(sym, rhs) => rhs
}
def infix_defines[A](stm: Stm, sym: Sym[A]): Option[Def[A]] = stm match {
case TP(`sym`, rhs: Def[A]) => Some(rhs)
case _ => None
}
def infix_defines[A: Manifest](stm: Stm, rhs: Def[A]): Option[Sym[A]] = stm match {
case TP(sym: Sym[A], `rhs`) if sym.tp <:< manifest[A] => Some(sym)
case _ => None
}
case class TP[+T](sym: Sym[T], rhs: Def[T]) extends Stm
// graph construction state
var globalDefs: List[Stm] = Nil
var localDefs: List[Stm] = Nil
var globalDefsCache: Map[Sym[Any],Stm] = Map.empty
def reifySubGraph[T](b: =>T): (T, List[Stm]) = {
val saveLocal = localDefs
val saveGlobal = globalDefs
val saveGlobalCache = globalDefsCache
localDefs = Nil
val r = b
val defs = localDefs
localDefs = saveLocal
globalDefs = saveGlobal
globalDefsCache = saveGlobalCache
(r, defs)
}
def reflectSubGraph(ds: List[Stm]): Unit = {
val lhs = ds.flatMap(_.lhs)
assert(lhs.length == lhs.distinct.length, "multiple defs: " + ds)
// equivalent to: globalDefs filter (_.lhs exists (lhs contains _))
val existing = lhs flatMap (globalDefsCache get _)
assert(existing.isEmpty, "already defined: " + existing + " for " + ds)
localDefs = localDefs ::: ds
globalDefs = globalDefs ::: ds
for (stm <- ds; s <- stm.lhs) {
globalDefsCache += (s->stm)
}
}
def findDefinition[T](s: Sym[T]): Option[Stm] =
globalDefsCache.get(s)
//globalDefs.find(x => x.defines(s).nonEmpty)
def findDefinition[T: Manifest](d: Def[T]): Option[Stm] =
globalDefs.find(x => x.defines(d).nonEmpty)
def findOrCreateDefinition[T:Manifest](d: Def[T], pos: List[SourceContext]): Stm =
findDefinition[T](d) map { x => x.defines(d).foreach(_.withPos(pos)); x } getOrElse {
createDefinition(fresh[T](pos), d)
}
def findOrCreateDefinitionExp[T:Manifest](d: Def[T], pos: List[SourceContext]): Exp[T] = {
val stm = findOrCreateDefinition(d, pos)
val optExp = stm.defines(d)
optExp.get
}
def createDefinition[T](s: Sym[T], d: Def[T]): Stm = {
val f = TP(s, d)
reflectSubGraph(List(f))
f
}
protected implicit def toAtom[T:Manifest](d: Def[T])(implicit pos: SourceContext): Exp[T] = {
findOrCreateDefinitionExp(d, List(pos)) // TBD: return Const(()) if type is Unit??
}
object Def {
def unapply[T](e: Exp[T]): Option[Def[T]] = e match {
case s @ Sym(_) =>
findDefinition(s).flatMap(_.defines(s))
case _ =>
None
}
}
// dependencies
// regular data (and effect) dependencies
def syms(e: Any): List[Sym[Any]] = e match {
case s: Sym[Any] => List(s)
case ss: Iterable[Any] => ss.toList.flatMap(syms(_))
// All case classes extend Product!
case p: Product =>
// performance hotspot: this is the same as
// p.productIterator.toList.flatMap(syms(_))
// but faster
val iter = p.productIterator
val out = new ListBuffer[Sym[Any]]
while (iter.hasNext) {
val e = iter.next()
out ++= syms(e)
}
out.result
case _ => Nil
}
// symbols which are bound in a definition
def boundSyms(e: Any): List[Sym[Any]] = e match {
case ss: Iterable[Any] => ss.toList.flatMap(boundSyms(_))
case p: Product => p.productIterator.toList.flatMap(boundSyms(_))
case _ => Nil
}
// symbols which are bound in a definition, but also defined elsewhere
def tunnelSyms(e: Any): List[Sym[Any]] = e match {
case ss: Iterable[Any] => ss.toList.flatMap(tunnelSyms(_))
case p: Product => p.productIterator.toList.flatMap(tunnelSyms(_))
case _ => Nil
}
// symbols of effectful components of a definition
def effectSyms(x: Any): List[Sym[Any]] = x match {
case ss: Iterable[Any] => ss.toList.flatMap(effectSyms(_))
case p: Product => p.productIterator.toList.flatMap(effectSyms(_))
case _ => Nil
}
// soft dependencies: they are not required but if they occur,
// they must be scheduled before
def softSyms(e: Any): List[Sym[Any]] = e match {
// empty by default
//case s: Sym[Any] => List(s)
case ss: Iterable[Any] => ss.toList.flatMap(softSyms(_))
case p: Product => p.productIterator.toList.flatMap(softSyms(_))
case _ => Nil
}
// generic symbol traversal: f is expected to call rsyms again
def rsyms[T](e: Any)(f: Any=>List[T]): List[T] = e match {
case s: Sym[Any] => f(s)
case ss: Iterable[Any] => ss.toList.flatMap(f)
case p: Product => p.productIterator.toList.flatMap(f)
case _ => Nil
}
// frequency information for dependencies: used/computed
// often (hot) or not often (cold). used to drive code motion.
def symsFreq(e: Any): List[(Sym[Any], Double)] = e match {
case s: Sym[Any] => List((s,1.0))
case ss: Iterable[Any] => ss.toList.flatMap(symsFreq(_))
case p: Product => p.productIterator.toList.flatMap(symsFreq(_))
//case _ => rsyms(e)(symsFreq)
case _ => Nil
}
def freqNormal(e: Any) = symsFreq(e)
def freqHot(e: Any) = symsFreq(e).map(p=>(p._1,p._2*1000.0))
def freqCold(e: Any) = symsFreq(e).map(p=>(p._1,p._2*0.5))
// bookkeeping
def reset { // used by delite?
nVars = 0
globalDefs = Nil
localDefs = Nil
globalDefsCache = Map.empty
}
}
| scalan/virtualization-lms-core | src/internal/Expressions.scala | Scala | bsd-3-clause | 7,490 |
package akka.http.extensions
import akka.http.extensions.security._
import akka.http.scaladsl.server.{Directives, Route}
import scala.collection.mutable
case object SpecialRealm extends Realm
case object VIPRealm extends Realm
trait PermissionControllers extends RegistrationControllers
{
class Permissions(sessionController: TestSessionController,loginController: TestLoginController) extends AuthDirectives with Directives
{
case object OtherRealm extends Realm
var drugs:Set[String] = Set.empty
val permissions = new mutable.HashMap[Realm, mutable.Set[LoginInfo]] with mutable.MultiMap[Realm, LoginInfo]
def checkRights(user:LoginInfo,realm:Realm):Boolean = if(realm==UserRealm)
loginController.exists(user)
else
permissions.get(realm).exists(_.contains(user))
def add2realm(user:LoginInfo,realm: Realm) ={
permissions.addBinding(realm,user)
}
def removeFromRealm(user:LoginInfo,realm: Realm) ={
permissions.removeBinding(realm,user)
}
lazy val realms: Map[String, Realm] = Map("user"->UserRealm,"vip"->VIPRealm,"special"->SpecialRealm,""->UserRealm)
def routes: Route =
pathPrefix("add") {
pathPrefix("drug") {
put
{
parameter("name","kind"){ (name,kind)=>
authenticate(sessionController.userByToken _){ user=>
val realm: Realm = realms.getOrElse(kind, realms("user"))
allow(user,realm, checkRights _)
{
drugs = drugs + name
complete(s"drug $name added!")
}
}
}
}
}
}
}
}
| denigma/akka-http-extensions | extensions/src/test/scala/akka/http/extensions/PermissionControllers.scala | Scala | mpl-2.0 | 1,681 |
package pl.newicom.eventstore
import akka.actor.Actor
import akka.persistence.PersistentRepr
import akka.serialization.{Serialization, SerializationExtension}
import eventstore.EventData
import pl.newicom.dddd.aggregate.EntityId
import pl.newicom.dddd.messaging.MetaData
import pl.newicom.dddd.messaging.event.{EventMessage, AggregateSnapshotId, DomainEventMessage}
import scala.reflect.ClassTag
import scala.util.Try
trait EventstoreSerializationSupport {
this: Actor =>
val serialization: Serialization = SerializationExtension(context.system)
def toDomainEventMessage(eventData: EventData): Try[DomainEventMessage] = {
for {
pr <- deserialize[PersistentRepr](eventData.data.value.toArray)
metadata <- deserialize[MetaData](eventData.metadata.value.toArray)
} yield {
toDomainEventMessage(pr, metadata)
}
}
def toEventMessage(eventData: EventData): Try[EventMessage] = {
for {
pr <- deserialize[PersistentRepr](eventData.data.value.toArray)
metadata <- deserialize[MetaData](eventData.metadata.value.toArray)
} yield {
toEventMessage(pr, metadata)
}
}
private def deserialize[A](bytes: Array[Byte])(implicit ct: ClassTag[A]): Try[A] =
serialization.deserialize(bytes, ct.runtimeClass).asInstanceOf[Try[A]]
private def toDomainEventMessage(pr: PersistentRepr, metadata: MetaData) = {
val id: EntityId = metadata.get("id")
val aggrSnapId = new AggregateSnapshotId(pr.persistenceId, pr.sequenceNr)
val event: AnyRef = pr.payload.asInstanceOf[AnyRef]
new DomainEventMessage(aggrSnapId, event, id).withMetaData(Some(metadata)).asInstanceOf[DomainEventMessage]
}
private def toEventMessage(pr: PersistentRepr, metadata: MetaData) = {
val id: EntityId = metadata.get("id")
val event: AnyRef = pr.payload.asInstanceOf[AnyRef]
new EventMessage(event, id).withMetaData(Some(metadata))
}
}
| ahjohannessen/akka-ddd | eventstore-akka-persistence/src/main/scala/pl/newicom/eventstore/EventstoreSerializationSupport.scala | Scala | mit | 1,909 |
/*
* Copyright (c) 2015 Miles Sabin
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package shapeless
import scala.language.experimental.macros
import scala.reflect.macros.whitebox
case class Orphan[F[_], D, T](instance: F[T])
object Orphan {
implicit def materializeOrphan[F[_], D, T]: Orphan[F, D, T] = macro OrphanMacros.materializeOrphanImpl[F, D, T]
}
case class WrappedOrphan[T](instance: T)
object WrappedOrphan {
implicit def apply[T]: WrappedOrphan[T] = macro OrphanMacros.materializeWrapped[T]
}
trait OrphanDeriver[F[_], D] {
implicit def materialize[T]: F[T] = macro OrphanMacros.materializeImpl[F, D, T]
}
class OrphanMacros(val c: whitebox.Context) extends CaseClassMacros {
import c.universe._
def materializeImpl[F[_], D, T]
(implicit fTag: WeakTypeTag[F[_]], dTag: WeakTypeTag[D], tTag: WeakTypeTag[T]): Tree =
materializeAux[F, D, T](false)
def materializeOrphanImpl[F[_], D, T]
(implicit fTag: WeakTypeTag[F[_]], dTag: WeakTypeTag[D], tTag: WeakTypeTag[T]): Tree = {
val inst = materializeAux[F, D, T](true)
val fTpe = fTag.tpe.typeConstructor
val dTpe = dTag.tpe
val tTpe = tTag.tpe
q"""
new _root_.shapeless.Orphan[$fTpe, $dTpe, $tTpe]($inst)
"""
}
def materializeAux[F[_], D, T](proxied: Boolean)
(implicit fTag: WeakTypeTag[F[_]], dTag: WeakTypeTag[D], tTag: WeakTypeTag[T]): Tree = {
val fTcTpe = fTag.tpe.typeConstructor
val dTpe = dTag.tpe
val tTpe = tTag.tpe
val appTpe = appliedType(fTcTpe, List(tTpe))
val open = c.openImplicits
val materializerIdx = if(proxied) 1 else 0
val materializer = open(materializerIdx)
val checkIdx = (materializerIdx*2)+1
if(open.size > checkIdx) {
val check = open(checkIdx)
if(materializer.sym == check.sym && materializer.pre =:= check.pre && materializer.pt =:= check.pt)
c.abort(c.enclosingPosition, "Backtrack")
}
val deriver =
dTpe match {
case SingleType(pre, sym) => mkAttributedRef(pre, sym)
case other =>
c.abort(c.enclosingPosition, "Deriver $dTpe not found")
}
val inst = c.inferImplicitValue(appTpe, silent = true)
val masks =
if(!proxied) List(q"def materialize = ???")
else {
val proxyOwner = materializer.sym.owner
val proxyTpe = proxyOwner.typeSignature
val proxyNames = proxyTpe.members.filter(_.isImplicit).map(_.name)
proxyNames.map { name => q"def ${name.toTermName} = ???" }
}
val probe =
q"""
..$masks
import $deriver._
_root_.shapeless.the[$appTpe]
"""
val checkedProbe = c.typecheck(probe, pt = appTpe, silent = true)
if(checkedProbe == EmptyTree) {
if(inst == EmptyTree) {
c.abort(c.enclosingPosition, s"No derived instance $appTpe")
} else {
inst
}
} else {
val derived = checkedProbe match {
case b: Block => b.expr
}
if(derived.equalsStructure(inst)) inst
else if(inst == EmptyTree) derived
else {
val resTpeD = derived.symbol.asMethod.info.finalResultType
val resTpeI = inst.symbol.asMethod.info.finalResultType
val useDerived =
resTpeD.typeArgs.zip(resTpeI.typeArgs).forall { case (ad, ai) =>
ai.typeSymbol.isParameter ||
(!ad.typeSymbol.isParameter && !(ad <:< ai))
}
if(useDerived) derived else inst
}
}
}
def materializeWrapped[T](implicit tTag: WeakTypeTag[T]): Tree = {
val open = c.openImplicits
val masks =
if(open.size < 2) Nil
else {
val sym = open(1).sym
List(q"def ${sym.name.toTermName} = ???")
}
val tpe = tTag.tpe
q"""
{
..$masks
_root_.shapeless.WrappedOrphan[$tpe](_root_.shapeless.lazily[$tpe])
}
"""
}
}
| malcolmgreaves/shapeless | core/src/main/scala/shapeless/orphans.scala | Scala | apache-2.0 | 4,365 |
package utils.paternoster.visualisation
import ir._
import ir.ast.Lambda
import utils.paternoster.rendering.Graphics
import scala.collection.mutable.ListBuffer;
/**
* Capsules all methods and classes to convert a Lift-Type to graphical representations.
*/
object Scene {
//Definition of the node classes
sealed trait Node
case class ScalarNode() extends Node
case class TupleNode(elements:Seq[Node]) extends Node
case class VectorNode(element:Node, size: Int) extends Node
sealed trait ArrayNode extends Node
case class MatrixNode(element: Node, rows:Int, columns:Int) extends ArrayNode
case class LinearArrayNode(element: Node, size:Int) extends ArrayNode
//Positioning Constants
//Margin that a container element has to the child elements.
val MARGIN_TO_CHILDREN_X= 2
val MARGIN_TO_CHILDREN_Y = 2
//Margin between container elements
val CONTAINER_NODE_SPACING = 1
//Width of a Tuple seperator
val SEPERATOR_WIDTH=1
/**
* Returns the width of the given node.
* @param node The node of wich the width will be returned.
* @return The width of the given node.
*/
private def nodeWidth(node: Node):Double = node match {
case ScalarNode() => 5
case TupleNode(elements) => 2*CONTAINER_NODE_SPACING + elements.map(nodeWidth).sum + (2*MARGIN_TO_CHILDREN_X)+(elements.size-1 )*(SEPERATOR_WIDTH*4)//Add seperator width
case MatrixNode(elem, _, columns) => 2*CONTAINER_NODE_SPACING +(2*MARGIN_TO_CHILDREN_X) + nodeWidth(elem) * columns
case LinearArrayNode(elem, size) => 2*CONTAINER_NODE_SPACING +(2*MARGIN_TO_CHILDREN_X) + nodeWidth(elem) * size
case VectorNode(elem, size) => 2*CONTAINER_NODE_SPACING +(2*MARGIN_TO_CHILDREN_X) + nodeWidth(elem) * size
}
/**
* Returns the height of the given node.
* @param node The node of wich the height will be returned.
* @return The height of the given node.
*/
private def nodeHeight(node: Node):Double = node match {
case ScalarNode() => 5
case TupleNode(elements) => 2*CONTAINER_NODE_SPACING + elements.map(nodeHeight).max + (2*MARGIN_TO_CHILDREN_Y)
case MatrixNode(elem, rows, _ ) => 2*CONTAINER_NODE_SPACING + (2*MARGIN_TO_CHILDREN_Y) + nodeHeight(elem) * rows
case LinearArrayNode(elem, size) => 2*CONTAINER_NODE_SPACING + (2*MARGIN_TO_CHILDREN_Y) +nodeHeight(elem)
case VectorNode(elem, size) => 2*CONTAINER_NODE_SPACING + (2*MARGIN_TO_CHILDREN_Y) +nodeHeight(elem)
}
//Node construction (from lift source items)
/**
* Constructs the node structure from the given type and Dimension-Grouping.
* @param t The Lift-Type wich will be processed.
* @param dimensions The Dimension-Grouping wich will be processed.
* @return The node structure that represents the visualisation.
*/
def typeNode(t:Type,dimensions : List[List[Int]]=List()):Node = t match {
//Only float scalars for now
case ScalarType(_,_) => ScalarNode()
case tt: TupleType => {
var elementBuffer = new ListBuffer[Node]()
var modifiedDimensions = dimensions
//render each tupel element after another
for(node <- tt.elemsT){
elementBuffer+= typeNode(node,modifiedDimensions)
var nestedArrayCount = getDimensionCount(node)
while(nestedArrayCount != 0){
//if arrays are grouped they share a sizelist. if the current level is not grouped
//this is 0
nestedArrayCount -= modifiedDimensions.head.size-1
//remove the next size
modifiedDimensions = modifiedDimensions.drop(1)
//count down
nestedArrayCount-=1
}
}
TupleNode(elementBuffer.toSeq)
}
case vt: VectorType => {
val bottomElement = bottomElementType(vt)
vectorTypeNode(bottomElement,vt.len.evalInt)
}
case array:ArrayType with Size =>
//The ultimate non-array element contained in the nested array
val bottomElement = bottomElementType(array)
arrayTypeNode(array,bottomElement, dimensions)
case _ => throw new NotImplementedError("No support for drawing function types yet")
}
/**
* Generates a vector node.
* @param scalarT The scalarType of the vector.
* @param size The size of the vector.
* @return The generated vector node.
*/
private def vectorTypeNode(scalarT: Type, size: Int):VectorNode = {
//Build the contained element first...
val inner = typeNode(scalarT);
VectorNode(inner,size)
}
/**
* Generates a LinearArrayNode or MatrixArrayNoded depending on the grouping.
* @param currentType The current type.
* @param bottomType The bottom type of the nesting of arrays.
* @param dimensionGrouping The grouping containing the sizes of arrays.
* @return The generated ArrayNode.
*/
private def arrayTypeNode(currentType:Type,bottomType:Type, dimensionGrouping:List[List[Int]]):ArrayNode = {
if(dimensionGrouping.isEmpty) {
throw new Exception("Array type renderer with empty sizes - impossible!!")
}
val currentSizes = dimensionGrouping.head
val nextSizes = dimensionGrouping.tail
//Build inner element first. If this is a matrix (currentsize = 2) take the inner element of the inner array as inner
val inner = if (currentSizes.size == 2) typeNode(currentType.asInstanceOf[ArrayType].elemT.asInstanceOf[ArrayType].elemT, nextSizes) else typeNode(currentType.asInstanceOf[ArrayType].elemT,nextSizes)
//Now build the current level of array
currentSizes.length match {
//1 dimension
case 1 => LinearArrayNode(inner, currentSizes.head)
case 2 => {
//2 dimensions
MatrixNode(inner, currentSizes.head, currentSizes.tail.head)
}
//any other - not supported yet!
case n => throw new Exception(s"Unsupported rendering of $n-dimensional array level. Try another dimension grouping")
}
}
/**
* Helper method that returns the bottom-type of an array.
* @param arr The array that will be searched.
* @return The bottom type.
*/
def bottomElementType(arr:ArrayType):Type = {
arr.elemT match {
case array: ArrayType => bottomElementType(array)
case other => other
}
}
/**
* Helper method that returns the scalartype of a vector.
* @param vt The vector of wich the scalartype will be returned.
* @return The scalartype of the vector.
*/
def bottomElementType(vt:VectorType):Type = {
vt.scalarT
}
/**
* Helper method that counts the number of nested arrays.
* @param argType The type in wich the arrays are searched.
* @return The number of nested arrays.
*/
def getDimensionCount(argType:Type): Int= {
argType match {
case ar: ArrayType with Size => 1 + getDimensionCount(ar.elemT)
case other => 0
}
}
import Graphics._
/**
* Recursively converts the node into a series of GraphicalPrimitives.
* @param typeNode The node that will be converted.
* @return The corresponding GraphicalPrimitives to the node.
*/
def drawType(typeNode: Node):Iterable[GraphicalPrimitive] = {
typeNode match {
case ScalarNode() => Seq(Rectangle(0, 0,5,5))
case TupleNode(elements) =>{
//Draw elements
var elementIterator = elements.iterator
var accumulatedWidth = Double.box(0)
var maxNodeHeight = Double.box(0)
var primitiveBuffer = new ListBuffer[GraphicalPrimitive]
var sets = new ListBuffer[Iterable[GraphicalPrimitive]]
var seperators = new ListBuffer[Iterable[GraphicalPrimitive]]
while(elementIterator.hasNext){
val current = elementIterator.next()
maxNodeHeight = Double.box(nodeHeight(current)).doubleValue().max(maxNodeHeight)
sets += translateAll(drawType(current), dx =accumulatedWidth+MARGIN_TO_CHILDREN_X-0.5, dy =MARGIN_TO_CHILDREN_Y-0.5)
accumulatedWidth+= nodeWidth(current)
if(elementIterator.hasNext){
//Print seperator
seperators += translateAll(Seq(Seperator(0,0)), dx =accumulatedWidth+MARGIN_TO_CHILDREN_X+SEPERATOR_WIDTH*2, dy =MARGIN_TO_CHILDREN_Y-0.5)
accumulatedWidth+= SEPERATOR_WIDTH*4//seperator width
}
}
sets.flatten.toSeq ++ translateAll(seperators.flatten.toSeq, dx = 0, dy = maxNodeHeight) ++ Seq(CorneredClause(0,0,accumulatedWidth+(MARGIN_TO_CHILDREN_X*2), maxNodeHeight+(MARGIN_TO_CHILDREN_Y*2)))
}
case VectorNode(elementType,size)=>{
val elemWidth =nodeWidth(elementType)
val elemHeight = nodeHeight(elementType)
//compute inner element primitives
val elementPrims = drawType(elementType)
var sets = (0 until size).map(pos => translateAll(elementPrims, dx = ((pos*elemWidth)+MARGIN_TO_CHILDREN_X)-0.5 , dy = MARGIN_TO_CHILDREN_Y-0.5))
//As a final results, flatten the sets and add the container box
sets.flatten ++ Seq(DashedBox(0, 0, (size*elemWidth)+(MARGIN_TO_CHILDREN_X*2), elemHeight+(MARGIN_TO_CHILDREN_Y*2)))
}
case MatrixNode(elementType, rows, columns) =>
val elemWidth =nodeWidth(elementType)
val elemHeight = nodeHeight(elementType)
//compute inner element primitives
val elementPrims = drawType(elementType)
//Compute the positions where the children will go
val positions = for(x <- 0 until columns;
y <- 0 until rows) yield (x*elemWidth, y*elemHeight)
//For each position, replicate the elementPrimitives and translate them to that place
val sets = positions.map{case (x,y) => translateAll(elementPrims, x+MARGIN_TO_CHILDREN_X-0.5, y+MARGIN_TO_CHILDREN_Y-0.5)}
//Flatten the sets and wrap in container box
sets.flatten ++ Seq(BoxWithText(rows.toString+"x"+ columns.toString,0, 0, (columns*elemWidth)+(MARGIN_TO_CHILDREN_X*2), (rows*elemHeight)+(MARGIN_TO_CHILDREN_Y*2)))
case LinearArrayNode(elementType, size) =>
val elemWidth =nodeWidth(elementType)
val elemHeight = nodeHeight(elementType)
var sets : IndexedSeq[Iterable[Graphics.GraphicalPrimitive]] = null
//compute inner element primitives
val elementPrims = drawType(elementType)
elementType match {
case _:ScalarNode =>sets = (0 until size).map(pos => translateAll(elementPrims, dx = ((pos*elemWidth)+MARGIN_TO_CHILDREN_X)-0.5 , dy = MARGIN_TO_CHILDREN_Y-0.5))
case _:Any =>sets = (0 until size).map(pos => translateAll(elementPrims, dx = (pos*elemWidth)+MARGIN_TO_CHILDREN_X, dy = MARGIN_TO_CHILDREN_Y))
}
//As a final results, flatten the sets and add the container box
sets.flatten ++ Seq(BoxWithText(size.toString,0, 0, (size*elemWidth)+(MARGIN_TO_CHILDREN_X*2), elemHeight+(MARGIN_TO_CHILDREN_Y*2)))
}
}
/*
/***
* Computes horizontal translations needed for each node to be aligned on a common
* center axis.
* @param nodes The nodes to align
* @return A mapping from each node to the amount of horizontal translation needed to align
*/
private def horizontalAlignment(nodes:Iterable[Node]):scala.collection.Map[Node,Int] = {
val nodeWidthMap = nodes.map(node => (node, nodeWidth(node))).toMap
val maxWidth = nodeWidthMap.values.max
//For each node, we need to translate by (maxWidth - nodeWidth)/2
nodeWidthMap.mapValues(x => (maxWidth - x)/2)
}
*/
} | lift-project/lift | src/main/utils/paternoster/visualisation/Scene.scala | Scala | mit | 11,564 |
// x1, x2, and x3 resulted in: symbol variable bitmap$0 does not exist in A.<init>
object A {
try {
lazy val x1 = 1
println(x1)
sys.error("!")
} catch {
case _: Throwable =>
lazy val x2 = 2
println(x2)
} finally {
lazy val x3 = 3
println(x3)
}
if ("".isEmpty) {
lazy val x4 = 4
println(x4)
}
var b = true
while(b) {
lazy val x5 = 5
println(x5)
b = false
}
def method {
try {
lazy val x6 = 6
println(x6)
sys.error("!")
} catch {
case _: Throwable =>
lazy val x7 = 7
println(x7)
} finally {
lazy val x8 = 8
println(x8)
}
if ("".isEmpty) {
lazy val x9 = 9
println(x9)
}
var b = true
while(b) {
lazy val x10 = 10
println(x10)
b = false
}
}
}
object Test {
def main(args: Array[String]) {
A.method
}
}
| felixmulder/scala | test/files/run/t6272.scala | Scala | bsd-3-clause | 909 |
package com.productfoundry.akka.cqrs
/**
* Defines the possible results of an aggregate command.
*/
object AggregateResult {
sealed trait AggregateResult extends Serializable
/**
* Indicates a successful update to the aggregate.
* @param tag after the update.
* @param response of the aggregate.
*/
case class Success(tag: AggregateTag, response: Any = Unit) extends AggregateResult
/**
* Indicates an update failure that can be corrected by the user.
* @param cause of the failure
*/
case class Failure(cause: DomainError) extends AggregateResult
}
| odd/akka-cqrs | core/src/main/scala/com/productfoundry/akka/cqrs/AggregateResult.scala | Scala | apache-2.0 | 588 |
package utils
import controllers.routes
import play.api.i18n.Lang
import play.api.mvc._
import shared.messages.Language
import shared.messages.Languages.{EN, RU}
import shared.pageparams.HeaderParams
import upickle.default._
object ServerUtils {
def getDefaultLanguage(acceptLanguages: Seq[Lang]) = acceptLanguages.toList match {
case Nil => EN
case locale :: tail => locale.code match {
case "ru-RU" => RU
case "ru" => RU
case "en-US" => EN
case "en" => EN
case _ => EN
}
}
def getSession(implicit requestHeader: RequestHeader): Session =
requestHeader.session.get(Session.SESSION)
.map(read[Session])
.getOrElse(Session(language = getDefaultLanguage(requestHeader.acceptLanguages)))
def modSession(s: Session): (String, String) = {
(Session.SESSION -> write(s))
}
def headerParams(language: Language) = HeaderParams (
language = language
)
def bundleUrl(projectName: String): String = {
val name = projectName.toLowerCase
Seq(s"$name-opt-bundle.js", s"$name-fastopt-bundle.js")
.find(name => getClass.getResource(s"/public/$name") != null)
.map(controllers.routes.Assets.versioned(_).url)
.get
}
}
| Igorocky/readtext | server/app/utils/ServerUtils.scala | Scala | mit | 1,216 |
package com.krux.hyperion.examples
import org.scalatest.WordSpec
import org.json4s.JsonDSL._
import org.json4s._
class ExampleS3DistCpWorkflowSpec extends WordSpec {
"ExampleS3DistCpWorkflowSpec" should {
"produce correct pipeline JSON" in {
val pipelineJson = ExampleS3DistCpWorkflow.toJson
val objectsField = pipelineJson.children.head.children.sortBy(o => (o \ "name").toString)
// have the correct number of objects
assert(objectsField.size === 4)
// the first object should be Default
val defaultObj = objectsField(1)
val defaultObjShouldBe = ("id" -> "Default") ~
("name" -> "Default") ~
("scheduleType" -> "cron") ~
("failureAndRerunMode" -> "CASCADE") ~
("pipelineLogUri" -> "s3://your-bucket/datapipeline-logs/") ~
("role" -> "DataPipelineDefaultRole") ~
("resourceRole" -> "DataPipelineDefaultResourceRole") ~
("schedule" -> ("ref" -> "PipelineSchedule"))
assert(defaultObj === defaultObjShouldBe)
val pipelineSchedule = objectsField(2)
val pipelineScheduleShouldBe =
("id" -> "PipelineSchedule") ~
("name" -> "PipelineSchedule") ~
("period" -> "1 days") ~
("startAt" -> "FIRST_ACTIVATION_DATE_TIME") ~
("occurrences" -> "3") ~
("type" -> "Schedule")
assert(pipelineSchedule === pipelineScheduleShouldBe)
val mapReduceCluster = objectsField(0)
val mapReduceClusterId = (mapReduceCluster \ "id").values.toString
assert(mapReduceClusterId.startsWith("MapReduceCluster_"))
val mapReduceClusterShouldBe =
("id" -> mapReduceClusterId) ~
("name" -> "Cluster with release label") ~
("bootstrapAction" -> Seq.empty[String]) ~
("masterInstanceType" -> "m3.xlarge") ~
("coreInstanceType" -> "m3.xlarge") ~
("coreInstanceCount" -> "2") ~
("taskInstanceType" -> "#{my_InstanceType}") ~
("taskInstanceCount" -> "#{my_InstanceCount}") ~
("terminateAfter" -> "8 hours") ~
("keyPair" -> "your-aws-key-pair") ~
("type" -> "EmrCluster") ~
("region" -> "us-east-1") ~
("role" -> "DataPipelineDefaultRole") ~
("resourceRole" -> "DataPipelineDefaultResourceRole") ~
("releaseLabel" -> "emr-4.4.0") ~
("initTimeout" -> "1 hours")
assert(mapReduceCluster === mapReduceClusterShouldBe)
val s3DistCpActivity = objectsField(3)
val s3DistCpActivityyId = (s3DistCpActivity \ "id").values.toString
assert(s3DistCpActivityyId.startsWith("S3DistCpActivity_"))
val filterActivityShouldBe =
("id" -> s3DistCpActivityyId) ~
("name" -> "s3DistCpActivity") ~
("runsOn" -> ("ref" -> mapReduceClusterId)) ~
("step" -> List("command-runner.jar,s3-dist-cp,--src,s3://the-source,--dest,#{my_HdfsLocation},--outputCodec,gz")) ~
("type" -> "EmrActivity")
assert(s3DistCpActivity === filterActivityShouldBe)
}
}
}
| sethyates/hyperion | examples/src/test/scala/com/krux/hyperion/examples/ExampleS3DistCpWorkflowSpec.scala | Scala | apache-2.0 | 3,001 |
/*
* Copyright 2012 Twitter Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.twitter.zipkin.web
import com.twitter.logging.Logger
import com.twitter.ostrich.admin.RuntimeEnvironment
import com.twitter.zipkin.BuildProperties
object Main {
val log = Logger.get(getClass.getName)
def main(args: Array[String]) {
log.info("Loading configuration")
val runtime = RuntimeEnvironment(BuildProperties, args)
val server = runtime.loadRuntimeConfig[ZipkinWeb]()
try {
server.start()
} catch {
case e: Exception =>
e.printStackTrace()
log.error(e, "Unexpected exception: %s", e.getMessage)
System.exit(0)
}
}
}
| gspandy/zipkin | zipkin-web/src/main/scala/com/twitter/zipkin/web/Main.scala | Scala | apache-2.0 | 1,200 |
/* *\\
** Squants **
** **
** Scala Quantities and Units of Measure Library and DSL **
** (c) 2013-2015, Gary Keorkunian **
** **
\\* */
package squants
import squants.space.AngleUnit
/**
* Root trait for representing Vectors
*
* @author garyKeorkunian
* @since 0.3.0
*
* @tparam A Type for the Vector's coordinate values
*/
trait SVector[A] {
type SVectorType <: SVector[A]
/**
* The list of values that makeup the Vector's Cartesian coordinates
* @return
*/
def coordinates: Seq[A]
/**
* The scalar value of the Vector
* @return
*/
def magnitude: A
/**
* The angle between the two Cartesian coordinates at the supplied indices
* @param coordinateX index of the abscissa coordinate (defaults to 0)
* @param coordinateY index of the ordinate coordinate (defaults to 1)
* @param unit unit for the angle (theta) component (defaults to Radians)
* @return Angle
*/
def angle(coordinateX: Int = 0, coordinateY: Int = 1, unit: AngleUnit = Radians): Angle
/**
* The polar coordinates (r, theta) of the two Cartesian coordinates at the supplied indices
* @param coordinateX index of the abscissa coordinate (defaults to 0)
* @param coordinateY index of the ordinate coordinate (defaults to 1)
* @param unit unit for the angle (theta) component (defaults to Radians)
* @return (A, Angle)
*/
def polar(coordinateX: Int = 0, coordinateY: Int = 1, unit: AngleUnit = Radians): (A, Angle) = (magnitude, angle(coordinateX, coordinateY, unit))
/**
* Creates the Unit Vector which corresponds to this vector
* @return
*/
def normalize: SVectorType
/**
* Add two Vectors
* @param that Vector[A]
* @return
*/
def plus(that: SVectorType): SVectorType
def + = plus _
/**
* Subtract two Vectors
* @param that Vector[A]
* @return
*/
def minus(that: SVectorType): SVectorType
def - = minus _
/**
* Scale a Vector
* @param that Double
* @return
*/
def times(that: Double): SVectorType
def * = times _
/**
* Reduce a Vector
*
* @param that Double
* @return
*/
def divide(that: Double): SVectorType
def /(that: Double) = divide(that)
/**
* Create the Dot Product of two Vectors
* @param that Double
* @return
*/
def dotProduct(that: DoubleVector): A
def *(that: DoubleVector) = dotProduct(that)
/**
* Create the Cross Product of two Vectors
* @param that Vector[A]
* @return
*/
def crossProduct(that: DoubleVector): SVector[A]
def #* = crossProduct _
}
object SVector {
def apply(coordinates: Double*): DoubleVector = DoubleVector(coordinates: _*)
def apply[A <: Quantity[A]](coordinates: A*): QuantityVector[A] = QuantityVector[A](coordinates: _*)
/**
* Create a 2-dimensional DoubleVector from Polar Coordinates
* @param radius the magnitude of the vector
* @param theta the angle from the polar axis
* @return
*/
def apply(radius: Double, theta: Angle): DoubleVector = apply(radius * theta.cos, radius * theta.sin)
/**
* Create a 2-dimensional QuantityVector[A] from Polar Coordinates
* @param radius the magnitude of the vector
* @param theta the angle from the polar axis
* @tparam A Quantity type
* @return
*/
def apply[A <: Quantity[A]](radius: A, theta: Angle): QuantityVector[A] = apply(radius * theta.cos, radius * theta.sin)
}
/**
* Double (Real Number) Vector
*
* @author garyKeorkunian
* @since 0.3.0
*
* @param coordinates Double*
*/
case class DoubleVector(coordinates: Double*) extends SVector[Double] {
type SVectorType = DoubleVector
def magnitude: Double = math.sqrt(coordinates.toTraversable.map(v ⇒ v * v).sum)
def angle(coordinateX: Int = 0, coordinateY: Int = 1, unit: AngleUnit = Radians): Angle =
Radians(math.atan2(coordinates(coordinateY), coordinates(coordinateX))) in unit
def normalize: SVectorType = this.divide(magnitude)
/**
* Creates a DoubleVector by mapping over each coordinate with the supplied function
* @param f A => Double map function
* @return
*/
def map[A <: Double](f: Double ⇒ Double): DoubleVector = DoubleVector(coordinates.toTraversable.map(f).toSeq: _*)
/**
* Creates a QuantityVector by mapping over each coordinate with the supplied function
* @param f Double => B
* @tparam A <: Quantity
* @return
*/
def map[A <: Quantity[A]](f: Double ⇒ A): QuantityVector[A] = QuantityVector(coordinates.toTraversable.map(f).toSeq: _*)
def plus(that: SVectorType): SVectorType =
DoubleVector(coordinates.toIterable.zipAll(that.coordinates.toIterable, 0d, 0d).toTraversable.map(v ⇒ v._1 + v._2).toSeq: _*)
def minus(that: SVectorType): SVectorType =
DoubleVector(coordinates.toIterable.zipAll(that.coordinates.toIterable, 0d, 0d).toTraversable.map(v ⇒ v._1 - v._2).toSeq: _*)
def times(that: Double): SVectorType = map(_ * that)
def times[A <: Quantity[A]](that: A): QuantityVector[A] = map(_ * that)
def divide(that: Double): SVectorType = map(_ / that)
def dotProduct(that: SVectorType): Double =
coordinates.toIterable.zipAll(that.coordinates.toIterable, 0d, 0d).toTraversable.map(v ⇒ v._1 * v._2).sum
def dotProduct[B <: Quantity[B]](that: QuantityVector[B]) = that dotProduct this
def crossProduct(that: SVectorType) = (this.coordinates.length, that.coordinates.length) match {
case (3, 3) ⇒
DoubleVector(this.coordinates(1) * that.coordinates(2) - this.coordinates(2) * that.coordinates(1),
coordinates(2) * that.coordinates(0) - this.coordinates(0) * that.coordinates(2),
coordinates(0) * that.coordinates(1) - this.coordinates(1) * that.coordinates(0))
case (7, 7) ⇒ throw new UnsupportedOperationException("Seven-dimensional cross product is not currently supported")
case _ ⇒ throw new UnsupportedOperationException("Cross product is not supported on vectors with an arbitrary number of dimensions")
}
def crossProduct[B <: Quantity[B]](that: QuantityVector[B]) = that crossProduct this
}
/**
* Quantity Vector
*
* @author garyKeorkunian
* @since 0.3.0
*
* @param coordinates Variable list of A
* @tparam A QuantityType
*/
case class QuantityVector[A <: Quantity[A]](coordinates: A*) extends SVector[A] {
type SVectorType = QuantityVector[A]
def valueUnit = coordinates(0).unit
def magnitude: A = valueUnit(math.sqrt(coordinates.toTraversable.map(v ⇒ v.to(valueUnit) * v.to(valueUnit)).sum))
def angle(coordinateX: Int = 0, coordinateY: Int = 1, unit: AngleUnit = Radians): Angle =
Radians(math.atan(coordinates(coordinateY) / coordinates(coordinateX))) in unit
def normalize: SVectorType = this / magnitude.to(valueUnit)
/**
* Creates the Unit Vector which corresponds to this vector using the given unit
* @return
*/
def normalize(unit: UnitOfMeasure[A]): SVectorType = this / magnitude.to(unit)
/**
* Creates a DoubleVector by mapping over each coordinate with the supplied function
* @param f A => Double map function
* @return
*/
def map[B <: Double](f: A ⇒ Double): DoubleVector = DoubleVector(coordinates.toTraversable.map(f).toSeq: _*)
/**
* Creates a QuantityVector by mapping over each coordinate with the supplied function
* @param f A => B
* @tparam B <: Quantity
* @return
*/
def map[B <: Quantity[B]](f: A ⇒ B): QuantityVector[B] = QuantityVector(coordinates.toTraversable.map(f).toSeq: _*)
def plus(that: SVectorType): SVectorType =
QuantityVector(coordinates.toIterable.zipAll(that.coordinates.toIterable, valueUnit(0), valueUnit(0)).toTraversable.map(v ⇒ v._1 + v._2).toSeq: _*)
def minus(that: SVectorType): SVectorType =
QuantityVector(coordinates.toIterable.zipAll(that.coordinates.toIterable, valueUnit(0), valueUnit(0)).toTraversable.map(v ⇒ v._1 - v._2).toSeq: _*)
def times(that: Double): SVectorType = map(_ * that)
def *(that: Double): SVectorType = times(that)
def times[B <: Quantity[B], C <: Quantity[C]](quantTimes: A ⇒ C): QuantityVector[C] = map(quantTimes)
def divide(that: Double): SVectorType = map(_ / that)
def divide(that: A): DoubleVector = map(_ / that)
def /(that: A) = divide(that)
def divide[B <: Quantity[B], C <: Quantity[C]](quantDiv: A ⇒ C): QuantityVector[C] = map(quantDiv(_))
def dotProduct(that: DoubleVector): A =
valueUnit(coordinates.toIterable.zipAll(that.coordinates.toIterable, valueUnit(0), 0d).toTraversable.map(v ⇒ v._1.to(valueUnit) * v._2).sum)
def dotProduct[B <: Quantity[B], C <: Quantity[C]](that: SVector[B], quantTimes: (A, B) ⇒ C)(implicit num: Numeric[C]): C =
coordinates.toIterable.zipAll(that.coordinates.toIterable, valueUnit(0), that.coordinates.head.unit(0)).toTraversable.map(v ⇒ quantTimes(v._1, v._2)).sum
def crossProduct(that: DoubleVector): SVectorType = (this.coordinates.length, that.coordinates.length) match {
case (3, 3) ⇒
QuantityVector(
(this.coordinates(1) * that.coordinates(2)) - (this.coordinates(2) * that.coordinates(1)),
(this.coordinates(2) * that.coordinates(0)) - (this.coordinates(0) * that.coordinates(2)),
(this.coordinates(0) * that.coordinates(1)) - (this.coordinates(1) * that.coordinates(0)))
case (7, 7) ⇒ throw new UnsupportedOperationException("Seven-dimensional Cross Product is not currently supported")
case _ ⇒ throw new UnsupportedOperationException("Cross Product is not supported on vectors with an arbitrary number of dimensions")
}
def crossProduct[B <: Quantity[B], C <: Quantity[C]](that: SVector[B], quantTimes: (A, B) ⇒ C)(implicit num: Numeric[C]): QuantityVector[C] = {
(this.coordinates.length, that.coordinates.length) match {
case (3, 3) ⇒
QuantityVector(
quantTimes(this.coordinates(1), that.coordinates(2)) - quantTimes(coordinates(2), that.coordinates(1)),
quantTimes(this.coordinates(2), that.coordinates(0)) - quantTimes(coordinates(0), that.coordinates(2)),
quantTimes(this.coordinates(0), that.coordinates(1)) - quantTimes(coordinates(1), that.coordinates(0)))
case (7, 7) ⇒ throw new UnsupportedOperationException("Seven-dimensional Cross Product is not currently supported")
case _ ⇒ throw new UnsupportedOperationException("Cross Product is not supported on vectors with an arbitrary number of dimensions")
}
}
/**
* Returns a DoubleVector representing the quantity values in terms of the supplied unit
* @param uom UnitOfMeasure[A]
* @return
*/
def to(uom: UnitOfMeasure[A]): DoubleVector = this / uom(1)
/**
* Returns a QuantityVector with all coordinates set to the supplied unit
* @param uom UnitOfMeasure[A]
* @return
*/
def in(uom: UnitOfMeasure[A]): QuantityVector[A] = map[A](_.in(uom))
}
| underscorenico/squants | shared/src/main/scala/squants/SVector.scala | Scala | apache-2.0 | 11,187 |
/*
Copyright 2008-2012 E-Hentai.org
http://forums.e-hentai.org/
ehentai@gmail.com
This file is part of Hentai@Home.
Hentai@Home is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Hentai@Home is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with Hentai@Home. If not, see <http://www.gnu.org/licenses/>.
*/
package org.hath.base
// note: this class does not necessarily represent an actual file even though it is occasionally used as such (getLocalFileRef()) - it is an abstract representation of files in the HentaiVerse System
import java.io.File
class HVFile(val hash:String, val size:Int, val xres:Int, val yres:Int, val _type:String) {
def getLocalFileRef() = new File(CacheHandler.getCacheDir(), hash.substring(0, 2) + "/" + getFileid())
def getFileid() = hash + "-" + size + "-" + xres + "-" + yres + "-" + _type
def getHash() = hash
def getSize() = size
def getType() = _type
def getMimeType() = _type match {
case "jpg" => Settings.CONTENT_TYPE_JPG
case "png" => Settings.CONTENT_TYPE_PNG
case "gif" => Settings.CONTENT_TYPE_GIF
case _ => Settings.CONTENT_TYPE_OCTET
}
def localFileMatches(file: File): Boolean = {
// note: we only check the sha-1 hash and filesize here, to save resources and avoid dealing with the crummy image handlers
try {
return file.length() == size && hash.startsWith(MiscTools.getSHAString(file))
} catch {
case e: java.io.IOException => Out.warning("Failed reading file " + file + " to determine hash.")
}
false
}
override def toString() = getFileid()
}
// static stuff
object HVFile {
def isValidHVFileid(fileid: String): Boolean
= java.util.regex.Pattern.matches("^[a-f0-9]{40}-[0-9]{1,8}-[0-9]{1,5}-[0-9]{1,5}-((jpg)|(png)|(gif))$", fileid)
def getHVFileFromFile(file: File, verify: Boolean): HVFile = {
if(file.exists()) {
val fileid = file.getName()
try {
if(verify) {
if(!fileid.startsWith(MiscTools.getSHAString(file))) {
return null
}
}
return getHVFileFromFileid(fileid)
} catch {
case e: java.io.IOException => {
e.printStackTrace()
Out.warning("Warning: Encountered IO error computing the hash value of " + file)
}
}
}
return null
}
def getHVFileFromFileid(fileid: String): HVFile = {
if(isValidHVFileid(fileid)) {
try {
val fileidParts = fileid.split("-")
val hash = fileidParts(0)
val size = Integer.parseInt(fileidParts(1))
val xres = Integer.parseInt(fileidParts(2))
val yres = Integer.parseInt(fileidParts(3))
val _type = fileidParts(4)
return new HVFile(hash, size, xres, yres, _type)
} catch {
case e: Exception =>
Out.warning("Failed to parse fileid \\"" + fileid + "\\" : " + e)
}
} else {
Out.warning("Invalid fileid \\"" + fileid + "\\"")
}
return null
}
}
| lain-dono/hath-scala | src/main/scala/HVFile.scala | Scala | gpl-3.0 | 3,379 |
/***********************************************************************
* Copyright (c) 2013-2017 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.utils.geohash
import com.typesafe.scalalogging.LazyLogging
import org.junit.{Assert, Test}
class BoundingBoxTest extends LazyLogging {
@Test def boundingBoxTest {
var bbox = BoundingBox.apply(GeoHash.apply("dqb00").getPoint, GeoHash.apply("dqbxx").getPoint)
var hashes = BoundingBox.getGeoHashesFromBoundingBox(bbox)
logger.debug(hashes.size + "\\n" + hashes)
Assert.assertEquals(24, hashes.size)
bbox = BoundingBox.apply(-78, -77.895029, 38.045834, 38)
hashes = BoundingBox.getGeoHashesFromBoundingBox(bbox, 32)
logger.debug(hashes.size + "\\n" + hashes)
Assert.assertEquals(6, hashes.size)
bbox = BoundingBox.apply(-78, -77.89503, 38.0458335, 38)
hashes = BoundingBox.getGeoHashesFromBoundingBox(bbox, 32)
logger.debug(hashes.size + "\\n" + hashes)
Assert.assertEquals(6, hashes.size)
bbox = BoundingBox.apply(-50, 50, -40, 40)
hashes = BoundingBox.getGeoHashesFromBoundingBox(bbox, 32)
logger.debug(hashes.size + "\\n" + hashes)
Assert.assertEquals(8, hashes.size)
bbox = BoundingBox.apply(1, 1, 1, 1)
hashes = BoundingBox.getGeoHashesFromBoundingBox(bbox, 32)
logger.debug(hashes.size + "\\n" + hashes)
Assert.assertEquals(1, hashes.size)
}
}
| ronq/geomesa | geomesa-utils/src/test/scala/org/locationtech/geomesa/utils/geohash/BoundingBoxTest.scala | Scala | apache-2.0 | 1,731 |
package com.twitter.finagle.exp.routing
/**
* The key associated with a field value of type `T`.
* @tparam T The type of value associated with this message field key.
*/
sealed abstract class Field[+T]
/**
* The key associated with a [[Message]] field.
* @tparam T The type of value associated with this message field key.
*/
abstract class MessageField[+T] extends Field[T]
/**
* The key associated with a [[Route]] field.
* @tparam T The type of value associated with this message field key.
*/
abstract class RouteField[+T] extends Field[T]
| twitter/finagle | finagle-core/src/main/scala/com/twitter/finagle/exp/routing/Field.scala | Scala | apache-2.0 | 556 |
package controllers
import javax.inject._
import play.api.i18n._
import play.api.libs.json.Json
import play.api.libs.ws.WSClient
import play.api.mvc._
import scala.concurrent.ExecutionContext
class PersonController @Inject()(val messagesApi: MessagesApi, ws: WSClient)
(implicit ec: ExecutionContext) extends Controller with I18nSupport {
def index = Action {
Ok(Json.obj("content" -> "hello world!"))
}
def call = Action.async {
ws.url("http://192.168.1.216:8080/echo").get().map(resp => Ok(Json.obj("content" -> s"the response code is: ${resp.status}")))
}
}
| focusj/web-benchmark | simple-play/app/controllers/PersonController.scala | Scala | mit | 615 |
/*
* Copyright 2021 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package views.businessdetails
import forms.{EmptyForm, Form2, InvalidForm, ValidForm}
import models.businessdetails.{VATRegistered, VATRegisteredYes}
import org.scalatest.MustMatchers
import utils.AmlsViewSpec
import jto.validation.Path
import jto.validation.ValidationError
import play.api.i18n.Messages
import views.Fixture
import views.html.businessdetails.vat_registered
class vat_registeredSpec extends AmlsViewSpec with MustMatchers {
trait ViewFixture extends Fixture {
lazy val vat_registered = app.injector.instanceOf[vat_registered]
implicit val requestWithToken = addTokenForView()
}
"vat_registered view" must {
"have correct title" in new ViewFixture {
val form2: ValidForm[VATRegistered] = Form2(VATRegisteredYes("1234"))
def view = vat_registered(form2, true)
doc.title must startWith(Messages("businessdetails.registeredforvat.title") + " - " + Messages("summary.businessdetails"))
}
"have correct headings" in new ViewFixture {
val form2: ValidForm[VATRegistered] = Form2(VATRegisteredYes("1234"))
def view = vat_registered(form2, true)
heading.html must be(Messages("businessdetails.registeredforvat.title"))
subHeading.html must include(Messages("summary.businessdetails"))
}
"show errors in the correct locations" in new ViewFixture {
val form2: InvalidForm = InvalidForm(Map.empty,
Seq(
(Path \ "registeredForVAT") -> Seq(ValidationError("not a message Key")),
(Path \ "vrnNumber-panel") -> Seq(ValidationError("second not a message Key"))
))
def view = vat_registered(form2, true)
errorSummary.html() must include("not a message Key")
errorSummary.html() must include("second not a message Key")
doc.getElementById("registeredForVAT")
.getElementsByClass("error-notification").first().html() must include("not a message Key")
doc.getElementById("vrnNumber-panel")
.getElementsByClass("error-notification").first().html() must include("second not a message Key")
}
"have a back link" in new ViewFixture {
val form2: Form2[_] = EmptyForm
def view = vat_registered(form2, true)
doc.getElementsByAttributeValue("class", "link-back") must not be empty
}
}
}
| hmrc/amls-frontend | test/views/businessdetails/vat_registeredSpec.scala | Scala | apache-2.0 | 2,900 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution.datasources.jdbc.connection
import java.sql.{Connection, Driver}
import java.util.ServiceLoader
import javax.security.auth.login.Configuration
import scala.collection.mutable
import org.apache.spark.internal.Logging
import org.apache.spark.security.SecurityConfigurationLock
import org.apache.spark.sql.execution.datasources.jdbc.JDBCOptions
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.jdbc.JdbcConnectionProvider
import org.apache.spark.util.Utils
protected abstract class ConnectionProviderBase extends Logging {
protected val providers = loadProviders()
def loadProviders(): Seq[JdbcConnectionProvider] = {
val loader = ServiceLoader.load(classOf[JdbcConnectionProvider],
Utils.getContextOrSparkClassLoader)
val providers = mutable.ArrayBuffer[JdbcConnectionProvider]()
val iterator = loader.iterator
while (iterator.hasNext) {
try {
val provider = iterator.next
logDebug(s"Loaded built-in provider: $provider")
providers += provider
} catch {
case t: Throwable =>
logError("Failed to load built-in provider.")
logInfo("Loading of the provider failed with the exception:", t)
}
}
val disabledProviders = Utils.stringToSeq(SQLConf.get.disabledJdbcConnectionProviders)
// toSeq seems duplicate but it's needed for Scala 2.13
providers.filterNot(p => disabledProviders.contains(p.name)).toSeq
}
def create(
driver: Driver,
options: Map[String, String],
connectionProviderName: Option[String]): Connection = {
val filteredProviders = providers.filter(_.canHandle(driver, options))
if (filteredProviders.isEmpty) {
throw new IllegalArgumentException(
"Empty list of JDBC connection providers for the specified driver and options")
}
val selectedProvider = connectionProviderName match {
case Some(providerName) =>
// It is assumed that no two providers will have the same name
filteredProviders.filter(_.name == providerName).headOption.getOrElse {
throw new IllegalArgumentException(
s"Could not find a JDBC connection provider with name '$providerName' " +
"that can handle the specified driver and options. " +
s"Available providers are ${providers.mkString("[", ", ", "]")}")
}
case None =>
if (filteredProviders.size != 1) {
throw new IllegalArgumentException(
"JDBC connection initiated but more than one connection provider was found. Use " +
s"'${JDBCOptions.JDBC_CONNECTION_PROVIDER}' option to select a specific provider. " +
s"Found active providers ${filteredProviders.mkString("[", ", ", "]")}")
}
filteredProviders.head
}
SecurityConfigurationLock.synchronized {
// Inside getConnection it's safe to get parent again because SecurityConfigurationLock
// makes sure it's untouched
val parent = Configuration.getConfiguration
try {
selectedProvider.getConnection(driver, options)
} finally {
logDebug("Restoring original security configuration")
Configuration.setConfiguration(parent)
}
}
}
}
private[jdbc] object ConnectionProvider extends ConnectionProviderBase
| chuckchen/spark | sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/jdbc/connection/ConnectionProvider.scala | Scala | apache-2.0 | 4,146 |
package components
import autowire._
import com.kodekutters.datepicker.{ DatepickerOptions, _ }
import japgolly.scalajs.react.vdom.prefix_<^._
import japgolly.scalajs.react.{ BackendScope, ReactComponentB, _ }
import org.scalajs.dom
import org.scalajs.jquery._
import org.widok.moment._
import services.AjaxClient
import shared.Utils.SharedUtils
import shared.services.AdminApi
import scala.language.{ existentials, implicitConversions, postfixOps }
import scala.scalajs.js.Date
import scalacss.Defaults._
import scalacss.ScalaCssReact._
import scalacss.internal.mutable.StyleSheet
/**
* admin
*/
object AwayDateSelector {
// the important implicit for the datepicker to work with jQuery
implicit def datepickerExt(jq: JQuery): Datepicker = jq.asInstanceOf[Datepicker]
// the id of the account and a callback to the parent to collect the date selections
case class Props(accId: Int, dateCollector: (List[String]) => Callback)
// the list of dates strings
case class State(value: List[String] = List.empty)
class Backend(scope: BackendScope[Props, State]) {
// get the user away dates from the server
def mounted(props: Props) = {
AjaxClient[AdminApi].getAwayOf(props.accId).call().map { data =>
scope.setState(State(data.toList)).runNow()
}
Callback.empty
}
def processDates = (event: EventAttributes) => {
event.stopImmediatePropagation()
if (event.dates.isDefined && event.dates.toString.nonEmpty) {
// get the dates as a list of strings
val strDates = event.dates.toString.split(',').toList
// a quick way to convert to the appropriate format
val strList: List[String] = for (d <- strDates) yield Moment(new Date(d)).format("DD/MM/YYYY")
// call the parent to collect the date selections
scope.props.map(p => p.dateCollector(strList)).runNow()
}
false
}
// when the dom is ready the datepicker will be created
def createDatePicker(state: State) = jQuery(dom.document).ready(() => {
val jq = jQuery("#theDatePicker")
jq.datepicker(DatepickerOptions.
todayHighlight(true).
multidate(true).
format("dd/mm/yyyy").
language("ja").
startDate("-0d")).asInstanceOf[JQuery].on("changeDate", processDates)
// fill the datepicker with the initial dates
jq.datepicker("setDates", state.value)
})
def render(props: Props, state: State) = {
createDatePicker(state)
<.div(^.id := "theDatePicker", <.p("仕事のために利用不可能"))
}
}
val component = ReactComponentB[Props]("AwayDateSelector")
.initialState(State(List.empty))
.renderBackend[Backend]
.componentDidMount(p => p.backend.mounted(p.props))
.build
def apply(accId: Int, dateCollector: (List[String]) => Callback) = component(Props(accId, dateCollector))
}
| workingDog/SilverworkReact | clientAdmin/src/main/scala/components/AwayDateSelector.scala | Scala | apache-2.0 | 2,881 |
package cmwell.analytics.data
import org.apache.spark.sql.Column
import org.apache.spark.sql.functions.to_timestamp
object TimestampConversion {
private val ISO8601Format = "yyyy-MM-dd'T'HH:mm:ss.SSSX"
// This conversion truncates millisecond precision.
// If full precision is required, we may need to implement a UDF.
def convertISO8601ToDate(column: Column): Column =
to_timestamp(column, ISO8601Format)
}
| dudi3001/CM-Well | tools/dataConsistencyTool/cmwell-spark-analysis/src/main/scala/cmwell/analytics/data/TimestampConversion.scala | Scala | apache-2.0 | 425 |
/*
* Copyright 2015 LG CNS.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package scouter.server.tagcnt.core;
import java.util.HashMap
import scouter.lang.value.Value
import scouter.server.Logger
import scouter.server.core.CoreRun
import scouter.server.tagcnt.next.NextTagCountDB
import scouter.util.DateUtil
import scouter.util.IShutdown
import scouter.util.RequestQueue
import scouter.util.ThreadUtil;
import scouter.server.util.ThreadScala
object MoveToNextCollector {
private val MAX_MASTER = 1000;
private val MAX_QUE_SIZE = 30000;
private val queue = new RequestQueue[CountItem](MAX_QUE_SIZE + 1);
class CountItem(_time: Long, _objType: String, _tagKey: Long, _tagValue: Value, _count: Float) {
val time = _time
val objType = _objType
val tagKey = _tagKey
val tagValue = _tagValue
val count = _count
}
def isQueueOk() = queue.size() < MAX_QUE_SIZE
def add(time: Long, objType: String, tagKey: Long, tagValue: Value, count: Float) {
while (isQueueOk() == false) {
ThreadUtil.qWait();
Logger.println("S182", 10, "queue is full");
}
queue.put(new CountItem(time, objType, tagKey, tagValue, count));
}
protected var countTable = new HashMap[NextTagCountData, Array[Float]]();
ThreadScala.startDaemon("scouter.server.tagcnt.core.MoveToNextCollector") {
while (CountEnv.running) {
checkSaveToDb();
val p = queue.get();
val key = new NextTagCountData(p.time, p.objType, p.tagKey, p.tagValue);
var minCountForHour = countTable.get(key);
if (minCountForHour == null) {
minCountForHour = new Array[Float](60);
countTable.put(key, minCountForHour);
}
val min = ((p.time / DateUtil.MILLIS_PER_MINUTE) % 60).toInt
minCountForHour(min) += p.count;
}
}
private var last_save_time = System.currentTimeMillis();
private def checkSaveToDb() {
val now = System.currentTimeMillis();
if (now > last_save_time + 5000 || countTable.size() > MAX_MASTER) {
last_save_time = System.currentTimeMillis();
if (countTable.size() > 0) {
NextTagCountDB.add(countTable);
countTable = new HashMap[NextTagCountData, Array[Float]]();
}
}
}
}
| jahnaviancha/scouter | scouter.server/src/scouter/server/tagcnt/core/MoveToNextCollector.scala | Scala | apache-2.0 | 3,017 |
package chap3
object Exe9 extends App {
def length[A](xs: List[A]): Int = {
List.foldRight(xs, 0)((_, x) => x + 1)
}
assert(length(List(1, 2, 3)) == 3)
assert(length(Nil) == 0)
}
| ponkotuy/FPScala | src/main/scala/chap3/Exe9.scala | Scala | unlicense | 192 |
package tastytest
object TestMultiAnnotatedType extends Suite("TestMultiAnnotatedType") {
import MultiAnnotatedType.{annotA, annotB, annotC}
test(assert(MultiAnnotatedType.listOfStrings.headOption === Option("foo")))
test(assert(MultiAnnotatedType.id(23) === (23: @annotA @annotB @annotC)))
}
| scala/scala | test/tasty/run/src-2/tastytest/TestMultiAnnotatedType.scala | Scala | apache-2.0 | 303 |
package io.flow.play.expanders
import io.flow.common.v0.models.UserReference
import io.flow.play.util.Expander
import play.api.libs.json._
import scala.concurrent.{Future, ExecutionContext}
import io.flow.common.v0.models.json._
/*
User 'Expander' work by:
1. Generate list of all User Ids from the passed in JsValue
2. Query User API with just those User Ids that may be expandable
3. For each returned (valid) User, if an expanded User exists, return it, otherwise, return the UserReference as is
*/
case class User (
fieldName: String,
userClient: io.flow.user.v0.interfaces.Client
) extends Expander {
def expand(records: Seq[JsValue], requestHeaders: Seq[(String, String)] = Nil)(implicit ec: ExecutionContext): Future[Seq[JsValue]] = {
val userIds: Seq[String] = records.map { r =>
((r \\ fieldName).validate[UserReference]: @unchecked) match {
case JsSuccess(userReference, _) => userReference.id
}
}
userIds match {
case Nil => Future.successful(records)
case ids => {
userClient.users.get(id = Some(ids), limit = userIds.size.toLong, requestHeaders = requestHeaders).map(users =>
Map(users.map(user => user.id -> user): _*)
).map(userIdLookup =>
records.map { r =>
r.validate[JsObject] match {
case JsSuccess(obj, _) => {
(r \\ fieldName).validate[UserReference] match {
case JsSuccess(userReference, _) => {
obj ++ Json.obj(
fieldName ->
(userIdLookup.get(userReference.id) match { //getOrElse can't be used to serialize multiple types - no formatter
case Some(user) => Json.toJson(user)
case None => Json.toJson(userReference)
})
)
}
case JsError(_) => r
}
}
case JsError(_) => r
}
}
)
}
}
}
}
| flowcommerce/lib-play | app/io/flow/play/expanders/User.scala | Scala | mit | 2,049 |
package com.github.agourlay.cornichon.http
import cats.Show
import cats.data.EitherT
import cats.syntax.traverse._
import cats.syntax.show._
import cats.syntax.either._
import cats.effect.IO
import cats.effect.unsafe.IORuntime
import org.http4s.circe._
import com.github.agourlay.cornichon.core._
import com.github.agourlay.cornichon.http.client.HttpClient
import com.github.agourlay.cornichon.json.JsonPath
import com.github.agourlay.cornichon.json.CornichonJson._
import com.github.agourlay.cornichon.http.HttpStreams._
import com.github.agourlay.cornichon.resolver.Resolvable
import com.github.agourlay.cornichon.http.HttpService._
import com.github.agourlay.cornichon.http.HttpRequest._
import com.github.agourlay.cornichon.util.Caching
import io.circe.{ Encoder, Json }
import scala.concurrent.Future
import scala.concurrent.duration._
class HttpService(
baseUrl: String,
requestTimeout: FiniteDuration,
client: HttpClient,
config: Config)(implicit ioRuntime: IORuntime) {
// Cannot be globally shared because it depends on `baseUrl`
private val fullUrlCache = Caching.buildCache[String, String]()
private def resolveAndParseBody[A: Show: Resolvable: Encoder](body: Option[A], scenarioContext: ScenarioContext): Either[CornichonError, Option[Json]] =
body.map(scenarioContext.fillPlaceholders(_)) match {
case None => rightNone
case Some(Left(e)) => Left(e)
case Some(Right(resolvedBody)) => parseDslJson(resolvedBody).map(Some.apply)
}
private def resolveRequestParts[A: Show: Resolvable: Encoder](
url: String,
body: Option[A],
params: Seq[(String, String)],
headers: Seq[(String, String)],
ignoreFromWithHeaders: HeaderSelection)(scenarioContext: ScenarioContext): Either[CornichonError, (String, Option[Json], Seq[(String, String)], List[(String, String)])] =
for {
jsonBodyResolved <- resolveAndParseBody(body, scenarioContext)
urlResolved <- scenarioContext.fillPlaceholders(url)
completeUrlResolved = withBaseUrl(urlResolved)
urlParams <- client.paramsFromUrl(completeUrlResolved)
explicitParams <- scenarioContext.fillPlaceholders(params)
allParams = urlParams ++ explicitParams
extractedWithHeaders <- extractWithHeadersSession(scenarioContext.session)
allHeaders = headers ++ ignoreHeadersSelection(extractedWithHeaders, ignoreFromWithHeaders)
headersResolved <- scenarioContext.fillPlaceholders(allHeaders)
} yield (completeUrlResolved, jsonBodyResolved, allParams, headersResolved)
private def runRequest[A: Show: Resolvable: Encoder](
r: HttpRequest[A],
expectedStatus: Option[Int],
extractor: ResponseExtractor,
ignoreFromWithHeaders: HeaderSelection)(scenarioContext: ScenarioContext): EitherT[IO, CornichonError, Session] =
for {
(url, jsonBody, params, headers) <- EitherT.fromEither[IO](resolveRequestParts(r.url, r.body, r.params, r.headers, ignoreFromWithHeaders)(scenarioContext))
resolvedRequest = HttpRequest(r.method, url, jsonBody, params, headers)
configuredRequest = configureRequest(resolvedRequest, config)
resp <- client.runRequest(configuredRequest, requestTimeout)
newSession <- EitherT.fromEither[IO](handleResponse(resp, configuredRequest.show, expectedStatus, extractor)(scenarioContext.session))
} yield newSession
private def runStreamRequest(r: HttpStreamedRequest, expectedStatus: Option[Int], extractor: ResponseExtractor)(scenarioContext: ScenarioContext) =
for {
(url, _, params, headers) <- EitherT.fromEither[IO](resolveRequestParts[String](r.url, None, r.params, r.headers, SelectNone)(scenarioContext))
resolvedRequest = HttpStreamedRequest(r.stream, url, r.takeWithin, params, headers)
resp <- EitherT(client.openStream(resolvedRequest, requestTimeout))
newSession <- EitherT.fromEither[IO](handleResponse(resp, resolvedRequest.show, expectedStatus, extractor)(scenarioContext.session))
} yield newSession
private def withBaseUrl(input: String) = {
def urlBuilder(url: String) = {
val trimmedUrl = url.trim
if (baseUrl.isEmpty) trimmedUrl
// the base URL is not applied if the input URL already starts with the protocol
else if (trimmedUrl.startsWith("https://") || trimmedUrl.startsWith("http://")) trimmedUrl
else baseUrl + trimmedUrl
}
fullUrlCache.get(input, k => urlBuilder(k))
}
def requestEffectT[A: Show: Resolvable: Encoder](
request: HttpRequest[A],
extractor: ResponseExtractor = NoOpExtraction,
expectedStatus: Option[Int] = None,
ignoreFromWithHeaders: HeaderSelection = SelectNone): ScenarioContext => EitherT[Future, CornichonError, Session] =
sc => {
val f = requestEffect(request, extractor, expectedStatus, ignoreFromWithHeaders)
EitherT(f(sc))
}
def requestEffectIO[A: Show: Resolvable: Encoder](
request: HttpRequest[A],
extractor: ResponseExtractor = NoOpExtraction,
expectedStatus: Option[Int] = None,
ignoreFromWithHeaders: HeaderSelection = SelectNone): ScenarioContext => IO[Either[CornichonError, Session]] =
sc => runRequest(request, expectedStatus, extractor, ignoreFromWithHeaders)(sc).value
def requestEffect[A: Show: Resolvable: Encoder](
request: HttpRequest[A],
extractor: ResponseExtractor = NoOpExtraction,
expectedStatus: Option[Int] = None,
ignoreFromWithHeaders: HeaderSelection = SelectNone): ScenarioContext => Future[Either[CornichonError, Session]] =
sc => {
val effect = requestEffectIO(request, extractor, expectedStatus, ignoreFromWithHeaders)
effect(sc).unsafeToFuture()
}
def streamEffect(request: HttpStreamedRequest, expectedStatus: Option[Int] = None, extractor: ResponseExtractor = NoOpExtraction): ScenarioContext => Future[Either[CornichonError, Session]] =
rs => runStreamRequest(request, expectedStatus, extractor)(rs).value.unsafeToFuture()
def openSSE(url: String, takeWithin: FiniteDuration, params: Seq[(String, String)], headers: Seq[(String, String)],
extractor: ResponseExtractor = NoOpExtraction, expectedStatus: Option[Int] = None) = {
val req = HttpStreamedRequest(SSE, url, takeWithin, params, headers)
streamEffect(req, expectedStatus, extractor)
}
def openWS(url: String, takeWithin: FiniteDuration, params: Seq[(String, String)], headers: Seq[(String, String)],
extractor: ResponseExtractor = NoOpExtraction, expectedStatus: Option[Int] = None) = {
val req = HttpStreamedRequest(WS, url, takeWithin, params, headers)
streamEffect(req, expectedStatus, extractor)
}
}
sealed trait ResponseExtractor
case class RootExtractor(targetKey: String) extends ResponseExtractor
case class PathExtractor(path: String, targetKey: String) extends ResponseExtractor
object NoOpExtraction extends ResponseExtractor
sealed trait HeaderSelection
object SelectAll extends HeaderSelection
object SelectNone extends HeaderSelection
case class ByNames(names: Seq[String]) extends HeaderSelection
object HttpService {
val rightNil = Right(Nil)
val rightNone = Right(None)
object SessionKeys {
val lastResponseBodyKey = "last-response-body"
val lastResponseStatusKey = "last-response-status"
val lastResponseHeadersKey = "last-response-headers"
val lastResponseRequestKey = "last-response-request"
val withHeadersKey = "with-headers"
// Using non-ASCII chars to assure that those won't be present inside the headers.
val headersKeyValueDelim = '→'
val interHeadersValueDelim = '¦'
val interHeadersValueDelimString = interHeadersValueDelim.toString
}
import HttpService.SessionKeys._
def extractWithHeadersSession(session: Session): Either[CornichonError, Seq[(String, String)]] =
session.getOpt(withHeadersKey) match {
case Some(h) => decodeSessionHeaders(h)
case None => rightNil
}
def encodeSessionHeader(name: String, value: String) =
s"$name$headersKeyValueDelim$value"
def encodeSessionHeaders(headers: Seq[(String, String)]): String =
headers.iterator
.map { case (name, value) => encodeSessionHeader(name, value) }
.mkString(interHeadersValueDelimString)
def decodeSessionHeaders(headers: String): Either[CornichonError, List[(String, String)]] =
headers.split(interHeadersValueDelim).toList.traverse { header =>
val elms = header.split(headersKeyValueDelim)
if (elms.length != 2)
BadSessionHeadersEncoding(header).asLeft
else
(elms(0) -> elms(1)).asRight
}
def configureRequest[A: Show](req: HttpRequest[A], config: Config): HttpRequest[A] = {
if (config.traceRequests)
println(DebugLogInstruction(req.show, 1).colorized)
if (config.warnOnDuplicateHeaders && req.headers.groupBy(_._1).exists(_._2.size > 1))
println(WarningLogInstruction(s"\\n**Warning**\\nduplicate headers detected in request:\\n${req.show}", 1).colorized)
if (config.failOnDuplicateHeaders && req.headers.groupBy(_._1).exists(_._2.size > 1))
throw BasicError(s"duplicate headers detected in request:\\n${req.show}").toException
else
req
}
def ignoreHeadersSelection(headers: Seq[(String, String)], ignore: HeaderSelection): Seq[(String, String)] =
ignore match {
case SelectNone => headers
case SelectAll => Nil
case ByNames(names) => headers.filterNot { case (n, _) => names.contains(n) }
}
def expectStatusCode(httpResponse: HttpResponse, expected: Option[Int], requestDescription: String): Either[CornichonError, HttpResponse] =
expected match {
case None =>
httpResponse.asRight
case Some(expectedStatus) if httpResponse.status == expectedStatus =>
httpResponse.asRight
case Some(expectedStatus) =>
StatusNonExpected(expectedStatus, httpResponse.status, httpResponse.headers, httpResponse.body, requestDescription).asLeft
}
def fillInSessionWithResponse(session: Session, extractor: ResponseExtractor, requestDescription: String)(response: HttpResponse): Either[CornichonError, Session] = {
val additionalExtractions = extractor match {
case NoOpExtraction =>
rightNil
case RootExtractor(targetKey) =>
Right((targetKey -> response.body) :: Nil)
case PathExtractor(path, targetKey) =>
JsonPath.runStrict(path, response.body)
.map(extractedJson => (targetKey -> jsonStringValue(extractedJson)) :: Nil)
}
additionalExtractions.flatMap { extra =>
val allElementsToAdd = commonSessionExtractions(response, requestDescription) ++ extra
session.addValues(allElementsToAdd: _*)
}
}
private def handleResponse(resp: HttpResponse, requestDescription: String, expectedStatus: Option[Int], extractor: ResponseExtractor)(session: Session): Either[CornichonError, Session] =
expectStatusCode(resp, expectedStatus, requestDescription)
.flatMap(fillInSessionWithResponse(session, extractor, requestDescription))
private def commonSessionExtractions(response: HttpResponse, requestDescription: String): List[(String, String)] =
(lastResponseStatusKey -> response.status.toString) ::
(lastResponseBodyKey -> response.body) ::
(lastResponseHeadersKey -> encodeSessionHeaders(response.headers)) ::
(lastResponseRequestKey -> requestDescription) :: Nil
} | agourlay/cornichon | cornichon-core/src/main/scala/com/github/agourlay/cornichon/http/HttpService.scala | Scala | apache-2.0 | 11,370 |
/*
* Copyright 2015 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.atlas.wiki
object Utils {
// Using unicode hyphen is a hack to get around:
// https://github.com/github/markup/issues/345
def fileName(name: String) = name.replace('-', '\\u2010')
}
| jasimmk/atlas | atlas-wiki/src/main/scala/com/netflix/atlas/wiki/Utils.scala | Scala | apache-2.0 | 805 |
/* Code Pulse: a real-time code coverage tool, for more information, see <http://code-pulse.com/>
*
* Copyright (C) 2014-2017 Code Dx, Inc. <https://codedx.com/>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.codedx.codepulse.hq.agent
import com.codedx.codepulse.agent.common.message.AgentOperationMode
import com.codedx.codepulse.hq.connect.ControlConnection
import com.codedx.codepulse.hq.errors.TraceErrorController
import com.codedx.codepulse.hq.protocol.ControlMessage
import reactive.EventSource
import reactive.EventStream
sealed trait AgentStateCommand
object AgentStateCommand {
case object Start extends AgentStateCommand
case object Stop extends AgentStateCommand
case object ReceivedShutdown extends AgentStateCommand
case object Pause extends AgentStateCommand
case object Suspend extends AgentStateCommand
case object Resume extends AgentStateCommand
}
sealed trait AgentState
object AgentState {
case object Initializing extends AgentState
case object Tracing extends AgentState
case object Paused extends AgentState
case object Suspended extends AgentState
case object ShuttingDown extends AgentState
case object Unknown extends AgentState
}
class AgentStateManager(agentControlConnection: ControlConnection, traceErrorController: TraceErrorController) {
private var currentState: InternalState = Initializing
private var _lastStateChange: Long = System.currentTimeMillis
/** An event fired when the agent state changes */
def agentStateChange: EventStream[AgentState] = agentStateChangeEvent
private val agentStateChangeEvent = new EventSource[AgentState]
def transitionTo(newState: InternalState) {
synchronized {
if (!isNewStateValid(newState))
throw new IllegalStateException("invalid state transition")
currentState.exit(newState)
newState.enter(currentState)
currentState = newState
_lastStateChange = System.currentTimeMillis
agentStateChangeEvent fire currentState.state
}
}
private def isNewStateValid(newState: InternalState): Boolean = currentState.canExit(newState) && newState.canEnter(currentState)
def isHeartbeatModeExpected(mode: AgentOperationMode) = currentState.isHeartbeatModeExpected(mode)
def current = currentState.state
def lastStateChange = _lastStateChange
def handleCommand(command: AgentStateCommand) = (currentState.handleCommand orElse baseCommandHandler)(command) match {
case Some(newState) if (newState != currentState) => transitionTo(newState)
case _ =>
}
private val baseCommandHandler: PartialFunction[AgentStateCommand, Option[InternalState]] = {
case AgentStateCommand.Stop => Some(ShuttingDown)
case AgentStateCommand.ReceivedShutdown => Some(ShuttingDown)
case AgentStateCommand.Pause => Some(Paused)
case AgentStateCommand.Suspend => Some(Suspended)
case AgentStateCommand.Resume => Some(Tracing)
case _ => throw new IllegalStateException("unexpected command")
}
sealed private trait InternalState {
def state: AgentState
def isHeartbeatModeExpected(mode: AgentOperationMode): Boolean
def canEnter(oldState: InternalState): Boolean = true
def canExit(newState: InternalState): Boolean = true
def handleCommand = PartialFunction.empty[AgentStateCommand, Option[InternalState]]
def enter(oldState: InternalState) {}
def exit(newState: InternalState) {}
}
private case object Initializing extends InternalState {
private var suspended = false
def state = AgentState.Initializing
def isHeartbeatModeExpected(mode: AgentOperationMode) = mode == AgentOperationMode.Initializing
override def canEnter(oldState: InternalState) = false
override def handleCommand = {
case AgentStateCommand.Suspend =>
suspended = true
None
case AgentStateCommand.Resume =>
suspended = false
None
case AgentStateCommand.Start => suspended match {
case true => Some(Suspended)
case false => Some(Tracing)
}
}
override def exit(newState: InternalState) = {
// special handling, we need to write suspend before start
val msg1 = if (newState == Suspended) List(ControlMessage.Suspend) else Nil
val msgs = msg1 :+ ControlMessage.Start
agentControlConnection.send(msgs: _*)
}
}
private case object Tracing extends InternalState {
def state = AgentState.Tracing
def isHeartbeatModeExpected(mode: AgentOperationMode) = mode == AgentOperationMode.Tracing
}
private case object Paused extends InternalState {
def state = AgentState.Paused
def isHeartbeatModeExpected(mode: AgentOperationMode) = mode == AgentOperationMode.Paused
override def enter(oldState: InternalState) = agentControlConnection.send(ControlMessage.Pause)
override def exit(newState: InternalState) = agentControlConnection.send(ControlMessage.Unpause)
}
private case object Suspended extends InternalState {
def state = AgentState.Suspended
def isHeartbeatModeExpected(mode: AgentOperationMode) = mode == AgentOperationMode.Suspended
override def enter(oldState: InternalState) = oldState match {
case Initializing => // special handling, do nothing
case _ => agentControlConnection.send(ControlMessage.Suspend)
}
override def exit(newState: InternalState) = agentControlConnection.send(ControlMessage.Unsuspend)
}
private case object ShuttingDown extends InternalState {
def state = AgentState.ShuttingDown
def isHeartbeatModeExpected(mode: AgentOperationMode) = mode == AgentOperationMode.Shutdown
override def canExit(newState: InternalState) = false
override def handleCommand = {
case _ => None // ignore all commands now
}
override def enter(oldState: InternalState) = {
agentControlConnection.send(ControlMessage.Stop)
traceErrorController.setTraceShuttingDown
}
}
} | secdec/codepulse | hq/src/main/scala/com/secdec/bytefrog/hq/agent/AgentStateManager.scala | Scala | apache-2.0 | 6,239 |
/*
* Copyright 2016 The BigDL Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.bigdl.dllib.keras.layers
import com.intel.analytics.bigdl.dllib.nn.abstractnn.AbstractModule
import com.intel.analytics.bigdl.dllib.tensor.Tensor
import com.intel.analytics.bigdl.dllib.utils.Shape
import com.intel.analytics.bigdl.dllib.keras.Sequential
import com.intel.analytics.bigdl.dllib.keras.serializer.ModuleSerializationTest
class MaskingSpec extends KerasBaseSpec{
"Masking" should "be the same as Keras" in {
val kerasCode =
"""
|input_tensor = Input(shape=[3])
|input = np.random.uniform(0, 1, [1, 3])
|output_tensor = Masking(0.0)(input_tensor)
|model = Model(input=input_tensor, output=output_tensor)
""".stripMargin
val seq = Sequential[Float]()
val masking = Masking[Float](0.0, inputShape = Shape(3))
seq.add(masking)
checkOutputAndGrad(seq.asInstanceOf[AbstractModule[Tensor[Float], Tensor[Float], Float]],
kerasCode)
}
"Masking 3D" should "be the same as Keras" in {
val kerasCode =
"""
|input_tensor = Input(shape=[3, 24])
|input = np.random.random([2, 3, 24])
|output_tensor = Masking(0.0)(input_tensor)
|model = Model(input=input_tensor, output=output_tensor)
""".stripMargin
val seq = Sequential[Float]()
val masking = Masking[Float](0.0, inputShape = Shape(3, 24))
seq.add(masking)
checkOutputAndGrad(seq.asInstanceOf[AbstractModule[Tensor[Float], Tensor[Float], Float]],
kerasCode)
}
}
class MaskingSerialTest extends ModuleSerializationTest {
override def test(): Unit = {
val layer = Masking[Float](0.0, inputShape = Shape(3, 12))
layer.build(Shape(2, 3, 12))
val input = Tensor[Float](2, 3, 12).rand()
runSerializationTest(layer, input)
}
}
| intel-analytics/BigDL | scala/dllib/src/test/scala/com/intel/analytics/bigdl/dllib/keras/layers/MaskingSpec.scala | Scala | apache-2.0 | 2,374 |
package visceljs
import org.scalajs.dom.{MouseEvent, document}
import scalatags.JsDom.all.{Frag, HtmlTag, Modifier, SeqFrag, Tag, a, bindJsAnyLike, button, cls, href, onclick, raw, stringAttr}
import viscel.shared.{Blob, Vid}
import scala.scalajs.js.URIUtils.encodeURIComponent
object Definitions {
def path_main = "#"
def path_asset(vid: Vid, pos: Int) = s"#${encodeURIComponent(vid.str)}/${pos + 1}"
def path_blob(blob: Blob) = s"blob/${blob.sha1}?mime=${blob.mime}"
def path_front(vid: Vid) = s"#${encodeURIComponent(vid.str)}"
def path_tools = "tools"
val class_placeholder = cls := "placeholder"
val class_preview = cls := "preview"
def link_tools(ts: Frag*): Tag = a(href := path_tools)(ts)
def toggleFullscreen(): Unit = {
if (document.fullscreenElement == null)
document.documentElement.requestFullscreen()
else document.exitFullscreen()
}
def lcButton(action: => Unit, m: Modifier*): HtmlTag =
button(onclick := { (e: MouseEvent) =>
if (e.button == 0) {
e.preventDefault()
action
}
})(m: _*)
}
object Icons {
// icons MIT licensed: https://feathericons.com/
val prev: Modifier = raw(
"""<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round"><polyline points="15 18 9 12 15 6"></polyline></svg>"""
)
val next: Modifier = raw(
"""<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round"><polyline points="9 18 15 12 9 6"></polyline></svg>"""
)
val modus: Modifier = raw(
"""<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round"><line x1="4" y1="21" x2="4" y2="14"></line><line x1="4" y1="10" x2="4" y2="3"></line><line x1="12" y1="21" x2="12" y2="12"></line><line x1="12" y1="8" x2="12" y2="3"></line><line x1="20" y1="21" x2="20" y2="16"></line><line x1="20" y1="12" x2="20" y2="3"></line><line x1="1" y1="14" x2="7" y2="14"></line><line x1="9" y1="8" x2="15" y2="8"></line><line x1="17" y1="16" x2="23" y2="16"></line></svg>"""
)
val bookmark: Modifier = raw(
"""<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round"><path d="M19 21l-7-5-7 5V5a2 2 0 0 1 2-2h10a2 2 0 0 1 2 2z"></path></svg>"""
)
val maximize: Modifier = raw(
"""<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="no-fullscreen"><polyline points="15 3 21 3 21 9"></polyline><polyline points="9 21 3 21 3 15"></polyline><line x1="21" y1="3" x2="14" y2="10"></line><line x1="3" y1="21" x2="10" y2="14"></line></svg>"""
)
val minimize: Modifier = raw(
"""<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="only-fullscreen"><polyline points="4 14 10 14 10 20"></polyline><polyline points="20 10 14 10 14 4"></polyline><line x1="14" y1="10" x2="21" y2="3"></line><line x1="3" y1="21" x2="10" y2="14"></line></svg>"""
)
val externalLink: Modifier = raw(
"""<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round"><path d="M18 13v6a2 2 0 0 1-2 2H5a2 2 0 0 1-2-2V8a2 2 0 0 1 2-2h6"></path><polyline points="15 3 21 3 21 9"></polyline><line x1="10" y1="14" x2="21" y2="3"></line></svg>"""
)
val front: Modifier = raw(
"""<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round"><line x1="8" y1="6" x2="21" y2="6"></line><line x1="8" y1="12" x2="21" y2="12"></line><line x1="8" y1="18" x2="21" y2="18"></line><line x1="3" y1="6" x2="3.01" y2="6"></line><line x1="3" y1="12" x2="3.01" y2="12"></line><line x1="3" y1="18" x2="3.01" y2="18"></line></svg>"""
)
val unavailable: Modifier = raw(
"""<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round"><path d="M22.61 16.95A5 5 0 0 0 18 10h-1.26a8 8 0 0 0-7.05-6M5 5a8 8 0 0 0 4 15h9a5 5 0 0 0 1.7-.3"></path><line x1="1" y1="1" x2="23" y2="23"></line></svg>"""
)
}
| rmgk/viscel | code/js/src/main/scala/visceljs/Definitions.scala | Scala | agpl-3.0 | 4,858 |
package us.feliscat.util
/**
* @author K. Sakamoto
* Created on 2017/08/09
*/
object RunModes {
sealed trait RunMode
final object Development extends RunMode
final object ProcessDetail extends RunMode
final object Test extends RunMode
final object Pilot extends RunMode
final object Production extends RunMode
}
| ktr-skmt/FelisCatusZero-multilingual | libraries/src/main/scala/us/feliscat/util/RunModes.scala | Scala | apache-2.0 | 362 |
package japgolly.scalajs.react.component.builder
import japgolly.scalajs.react.component.Scala
import japgolly.scalajs.react.component.Scala.RawMounted
import japgolly.scalajs.react.component.builder.LifecycleF._
import japgolly.scalajs.react.facade.React
import japgolly.scalajs.react.internal.Lens
import japgolly.scalajs.react.util.Effect._
import japgolly.scalajs.react.util._
import japgolly.scalajs.react.{ComponentDom, PropsChildren, ReactCaughtError, StateAccess}
import scala.scalajs.LinkingInfo.developmentMode
import scala.scalajs.js
final case class LifecycleF[F[_], A[_], P, S, B, SS](
componentDidCatch : Option[ComponentDidCatchFn [F, A, P, S, B]],
componentDidMount : Option[ComponentDidMountFn [F, A, P, S, B]],
componentDidUpdate : Option[ComponentDidUpdateFn [F, A, P, S, B, SS]],
componentWillMount : Option[ComponentWillMountFn [F, A, P, S, B]],
componentWillReceiveProps: Option[ComponentWillReceivePropsFn[F, A, P, S, B]],
componentWillUnmount : Option[ComponentWillUnmountFn [F, A, P, S, B]],
componentWillUpdate : Option[ComponentWillUpdateFn [F, A, P, S, B]],
getDerivedStateFromProps : Option[GetDerivedStateFromPropsFn [ P, S]],
getSnapshotBeforeUpdate : Option[GetSnapshotBeforeUpdateFn [F, A, P, S, B, SS]],
shouldComponentUpdate : Option[ShouldComponentUpdateFn [F, A, P, S, B]]) {
type This = LifecycleF[F, A, P, S, B, SS]
def append[I, O](lens: Lens[LifecycleF[F, A, P, S, B, SS], Option[I => O]])(g: I => O)(implicit s: Semigroup[O]): This =
lens.mod(o => Some(o.fold(g)(f => i => s.append(f(i), g(i)))))(this)
def resetSnapshot[SS2](componentDidUpdate : Option[ComponentDidUpdateFn [F, A, P, S, B, SS2]],
getSnapshotBeforeUpdate: Option[GetSnapshotBeforeUpdateFn[F, A, P, S, B, SS2]]): LifecycleF[F, A, P, S, B, SS2] =
LifecycleF(
componentDidCatch = componentDidCatch ,
componentDidMount = componentDidMount ,
componentDidUpdate = componentDidUpdate ,
componentWillMount = componentWillMount ,
componentWillReceiveProps = componentWillReceiveProps,
componentWillUnmount = componentWillUnmount ,
componentWillUpdate = componentWillUpdate ,
getDerivedStateFromProps = getDerivedStateFromProps ,
getSnapshotBeforeUpdate = getSnapshotBeforeUpdate ,
shouldComponentUpdate = shouldComponentUpdate )
}
object LifecycleF {
type NoSnapshot = Unit
def empty[F[_], A[_], P, S, B]: LifecycleF[F, A, P, S, B, NoSnapshot] =
new LifecycleF(None, None, None, None, None, None, None, None, None, None)
sealed abstract class Base[F[_], A[_], P, S, B](final val raw: RawMounted[P, S, B])(implicit f: UnsafeSync[F], a: Async[A]) {
protected final implicit def F: UnsafeSync[F] = f
protected final implicit def A: Async[A] = a
final def backend : B = raw.backend
final def mountedImpure: Scala.MountedImpure[P, S, B] = raw.mountedImpure
final def mountedPure : Scala.Mounted[F, A, P, S, B] = raw.mountedPure.withEffect(F).withAsyncEffect(A)
}
sealed trait StateW[F[_], A[_], P, S, B] extends StateAccess.WriteWithProps[F, A, P, S] { self: Base[F, A, P, S, B] =>
/** @param callback Executed after state is changed. */
final override def setState[G[_]](newState: S, callback: => G[Unit])(implicit G: Dispatch[G]): F[Unit] =
mountedPure.setState(newState, callback)
/** @param callback Executed after state is changed. */
final override def modState[G[_]](mod: S => S, callback: => G[Unit])(implicit G: Dispatch[G]): F[Unit] =
mountedPure.modState(mod, callback)
/** @param callback Executed after state is changed. */
final override def modState[G[_]](mod: (S, P) => S, callback: => G[Unit])(implicit G: Dispatch[G]): F[Unit] =
mountedPure.modState(mod, callback)
/** @param callback Executed regardless of whether state is changed. */
final override def setStateOption[G[_]](newState: Option[S], callback: => G[Unit])(implicit G: Dispatch[G]): F[Unit] =
mountedPure.setStateOption(newState, callback)
/** @param callback Executed regardless of whether state is changed. */
final override def modStateOption[G[_]](mod: S => Option[S], callback: => G[Unit])(implicit G: Dispatch[G]): F[Unit] =
mountedPure.modStateOption(mod, callback)
/** @param callback Executed regardless of whether state is changed. */
final override def modStateOption[G[_]](mod: (S, P) => Option[S], callback: => G[Unit])(implicit G: Dispatch[G]): F[Unit] =
mountedPure.modStateOption(mod, callback)
}
sealed trait StateRW[F[_], A[_], P, S, B] extends StateW[F, A, P, S, B] { self: Base[F, A, P, S, B] =>
final def state: S = mountedImpure.state
}
sealed trait ForceUpdate[F[_], A[_], P, S, B] { self: Base[F, A, P, S, B] =>
final def forceUpdate: F[Unit] =
forceUpdate(DefaultEffects.Sync.empty)(DefaultEffects.Sync)
final def forceUpdate[G[_]](callback: => G[Unit])(implicit G: Dispatch[G]): F[Unit] =
mountedPure.forceUpdate(callback)
}
private def wrapTostring(toString: String) =
if (developmentMode)
toString
.replaceAll("undefined → undefined", "undefined")
.replace("props: undefined, ", "")
.replace("state: undefined)", ")")
.replace(", )", ")")
else
toString
// ===================================================================================================================
def componentDidCatch[F[_], A[_], P, S, B, SS] = Lens((_: LifecycleF[F, A, P, S, B, SS]).componentDidCatch)(n => _.copy(componentDidCatch = n))
type ComponentDidCatchFn[F[_], A[_], P, S, B] = ComponentDidCatch[F, A, P, S, B] => F[Unit]
final class ComponentDidCatch[F[_]: UnsafeSync, A[_]: Async, P, S, B](raw: RawMounted[P, S, B], rawError: js.Any, rawInfo: React.ErrorInfo)
extends Base[F, A, P, S, B](raw) with StateRW[F, A, P, S, B] with ForceUpdate[F, A, P, S, B] {
override type WithEffect [G[_]] = ComponentDidCatch[G, A, P, S, B]
override type WithAsyncEffect[G[_]] = ComponentDidCatch[F, G, P, S, B]
override def withEffect [G[_]](implicit G: UnsafeSync[G]): WithEffect[G] = new ComponentDidCatch(raw, rawError, rawInfo)
override def withAsyncEffect[G[_]](implicit G: Async[G]): WithAsyncEffect[G] = new ComponentDidCatch(raw, rawError, rawInfo)
override def toString = wrapTostring(s"ComponentDidCatch(${error.rawErrorString})")
val error = ReactCaughtError(rawError, rawInfo)
def props : P = mountedImpure.props
def propsChildren: PropsChildren = mountedImpure.propsChildren
def getDOMNode : ComponentDom.Mounted = mountedImpure.getDOMNode.asMounted()
}
// ===================================================================================================================
def componentDidMount[F[_], A[_], P, S, B, SS] = Lens((_: LifecycleF[F, A, P, S, B, SS]).componentDidMount)(n => _.copy(componentDidMount = n))
type ComponentDidMountFn[F[_], A[_], P, S, B] = ComponentDidMount[F, A, P, S, B] => F[Unit]
final class ComponentDidMount[F[_]: UnsafeSync, A[_]: Async, P, S, B](raw: RawMounted[P, S, B])
extends Base[F, A, P, S, B](raw) with StateRW[F, A, P, S, B] with ForceUpdate[F, A, P, S, B] {
override type WithEffect [G[_]] = ComponentDidMount[G, A, P, S, B]
override type WithAsyncEffect[G[_]] = ComponentDidMount[F, G, P, S, B]
override def withEffect [G[_]](implicit G: UnsafeSync[G]): WithEffect[G] = new ComponentDidMount(raw)
override def withAsyncEffect[G[_]](implicit G: Async[G]): WithAsyncEffect[G] = new ComponentDidMount(raw)
override def toString = wrapTostring(s"ComponentDidMount(props: $props, state: $state)")
def props : P = mountedImpure.props
def propsChildren: PropsChildren = mountedImpure.propsChildren
def getDOMNode : ComponentDom.Mounted = mountedImpure.getDOMNode.asMounted()
}
// ===================================================================================================================
def componentDidUpdate[F[_], A[_], P, S, B, SS] = Lens((_: LifecycleF[F, A, P, S, B, SS]).componentDidUpdate)(n => _.copy(componentDidUpdate = n))
type ComponentDidUpdateFn[F[_], A[_], P, S, B, SS] = ComponentDidUpdate[F, A, P, S, B, SS] => F[Unit]
final class ComponentDidUpdate[F[_]: UnsafeSync, A[_]: Async, P, S, B, SS](raw: RawMounted[P, S, B], val prevProps: P, val prevState: S, val snapshot: SS)
extends Base[F, A, P, S, B](raw) with StateW[F, A, P, S, B] with ForceUpdate[F, A, P, S, B] {
override type WithEffect [G[_]] = ComponentDidUpdate[G, A, P, S, B, SS]
override type WithAsyncEffect[G[_]] = ComponentDidUpdate[F, G, P, S, B, SS]
override def withEffect [G[_]](implicit G: UnsafeSync[G]): WithEffect[G] = new ComponentDidUpdate(raw, prevProps, prevState, snapshot)
override def withAsyncEffect[G[_]](implicit G: Async[G]): WithAsyncEffect[G] = new ComponentDidUpdate(raw, prevProps, prevState, snapshot)
override def toString = wrapTostring(s"ComponentDidUpdate(props: $prevProps → $currentProps, state: $prevState → $currentState)")
def propsChildren: PropsChildren = mountedImpure.propsChildren
def currentProps : P = mountedImpure.props
def currentState : S = mountedImpure.state
def getDOMNode : ComponentDom.Mounted = mountedImpure.getDOMNode.asMounted()
}
// ===================================================================================================================
def componentWillMount[F[_], A[_], P, S, B, SS] = Lens((_: LifecycleF[F, A, P, S, B, SS]).componentWillMount)(n => _.copy(componentWillMount = n))
type ComponentWillMountFn[F[_], A[_], P, S, B] = ComponentWillMount[F, A, P, S, B] => F[Unit]
final class ComponentWillMount[F[_]: UnsafeSync, A[_]: Async, P, S, B](raw: RawMounted[P, S, B])
extends Base[F, A, P, S, B](raw) with StateRW[F, A, P, S, B] {
override type WithEffect [G[_]] = ComponentWillMount[G, A, P, S, B]
override type WithAsyncEffect[G[_]] = ComponentWillMount[F, G, P, S, B]
override def withEffect [G[_]](implicit G: UnsafeSync[G]): WithEffect[G] = new ComponentWillMount(raw)
override def withAsyncEffect[G[_]](implicit G: Async[G]): WithAsyncEffect[G] = new ComponentWillMount(raw)
override def toString = wrapTostring(s"ComponentWillMount(props: $props, state: $state)")
def props : P = mountedImpure.props
def propsChildren: PropsChildren = mountedImpure.propsChildren
@deprecated("forceUpdate prohibited within the componentWillMount callback.", "")
def forceUpdate(no: NotAllowed) = no.result
// Nope
// def getDOMNode : dom.Element = raw.mounted.getDOMNode
}
// ===================================================================================================================
def componentWillUnmount[F[_], A[_], P, S, B, SS] = Lens((_: LifecycleF[F, A, P, S, B, SS]).componentWillUnmount)(n => _.copy(componentWillUnmount = n))
type ComponentWillUnmountFn[F[_], A[_], P, S, B] = ComponentWillUnmount[F, A, P, S, B] => F[Unit]
final class ComponentWillUnmount[F[_]: UnsafeSync, A[_]: Async, P, S, B](raw: RawMounted[P, S, B])
extends Base[F, A, P, S, B](raw) {
override def toString = wrapTostring(s"ComponentWillUnmount(props: $props, state: $state)")
def props : P = mountedImpure.props
def propsChildren: PropsChildren = mountedImpure.propsChildren
def state : S = mountedImpure.state
def getDOMNode : ComponentDom.Mounted = mountedImpure.getDOMNode.asMounted()
@deprecated("setState prohibited within the componentWillUnmount callback.", "")
def setState(no: NotAllowed, cb: Any = null) = no.result
@deprecated("modState prohibited within the componentWillUnmount callback.", "")
def modState(no: NotAllowed, cb: Any = null) = no.result
@deprecated("forceUpdate prohibited within the componentWillUnmount callback.", "")
def forceUpdate(no: NotAllowed) = no.result
}
// ===================================================================================================================
def componentWillReceiveProps[F[_], A[_], P, S, B, SS] = Lens((_: LifecycleF[F, A, P, S, B, SS]).componentWillReceiveProps)(n => _.copy(componentWillReceiveProps = n))
type ComponentWillReceivePropsFn[F[_], A[_], P, S, B] = ComponentWillReceiveProps[F, A, P, S, B] => F[Unit]
final class ComponentWillReceiveProps[F[_]: UnsafeSync, A[_]: Async, P, S, B](raw: RawMounted[P, S, B], val nextProps: P)
extends Base[F, A, P, S, B](raw) with StateRW[F, A, P, S, B] with ForceUpdate[F, A, P, S, B] {
override type WithEffect [G[_]] = ComponentWillReceiveProps[G, A, P, S, B]
override type WithAsyncEffect[G[_]] = ComponentWillReceiveProps[F, G, P, S, B]
override def withEffect [G[_]](implicit G: UnsafeSync[G]): WithEffect[G] = new ComponentWillReceiveProps(raw, nextProps)
override def withAsyncEffect[G[_]](implicit G: Async[G]): WithAsyncEffect[G] = new ComponentWillReceiveProps(raw, nextProps)
override def toString = wrapTostring(s"ComponentWillReceiveProps(props: $currentProps → $nextProps, state: $state)")
def propsChildren: PropsChildren = mountedImpure.propsChildren
def currentProps : P = mountedImpure.props
def getDOMNode : ComponentDom.Mounted = mountedImpure.getDOMNode.asMounted()
}
// ===================================================================================================================
def componentWillUpdate[F[_], A[_], P, S, B, SS] = Lens((_: LifecycleF[F, A, P, S, B, SS]).componentWillUpdate)(n => _.copy(componentWillUpdate = n))
type ComponentWillUpdateFn[F[_], A[_], P, S, B] = ComponentWillUpdate[F, A, P, S, B] => F[Unit]
final class ComponentWillUpdate[F[_]: UnsafeSync, A[_]: Async, P, S, B](raw: RawMounted[P, S, B], val nextProps: P, val nextState: S)
extends Base[F, A, P, S, B](raw) {
override def toString = wrapTostring(s"ComponentWillUpdate(props: $currentProps → $nextProps, state: $currentState → $nextState)")
def propsChildren: PropsChildren = mountedImpure.propsChildren
def currentProps : P = mountedImpure.props
def currentState : S = mountedImpure.state
def getDOMNode : ComponentDom.Mounted = mountedImpure.getDOMNode.asMounted()
@deprecated("setState prohibited within the componentWillUpdate callback. Use componentWillReceiveProps instead.", "")
def setState(no: NotAllowed, cb: Any = null) = no.result
@deprecated("modState prohibited within the componentWillUpdate callback. Use componentWillReceiveProps instead.", "")
def modState(no: NotAllowed, cb: Any = null) = no.result
@deprecated("forceUpdate prohibited within the componentWillUpdate callback. Use componentWillReceiveProps instead.", "")
def forceUpdate(no: NotAllowed) = no.result
}
// ===================================================================================================================
def getDerivedStateFromProps[F[_], A[_], P, S, B, SS] = Lens((_: LifecycleF[F, A, P, S, B, SS]).getDerivedStateFromProps)(n => _.copy(getDerivedStateFromProps = n))
type GetDerivedStateFromPropsFn[P, S] = (P, S) => Option[S]
// ===================================================================================================================
def getSnapshotBeforeUpdate[F[_], A[_], P, S, B, SS] = Lens((_: LifecycleF[F, A, P, S, B, SS]).getSnapshotBeforeUpdate)(n => _.copy(getSnapshotBeforeUpdate = n))
type GetSnapshotBeforeUpdateFn[F[_], A[_], P, S, B, SS] = GetSnapshotBeforeUpdate[F, A, P, S, B] => F[SS]
final class GetSnapshotBeforeUpdate[F[_]: Sync, A[_]: Async, P, S, B](raw: RawMounted[P, S, B], val prevProps: P, val prevState: S)
extends Base[F, A, P, S, B](raw) {
override def toString = wrapTostring(s"GetSnapshotBeforeUpdate(props: $prevProps → $currentProps, state: $prevState → $currentState)")
def propsChildren: PropsChildren = mountedImpure.propsChildren
def currentProps : P = mountedImpure.props
def currentState : S = mountedImpure.state
def getDOMNode : ComponentDom.Mounted = mountedImpure.getDOMNode.asMounted()
def cmpProps(cmp: (P, P) => Boolean): Boolean = cmp(currentProps, prevProps)
def cmpState(cmp: (S, S) => Boolean): Boolean = cmp(currentState, prevState)
@deprecated("setState prohibited within the getSnapshotBeforeUpdate callback.", "")
def setState(no: NotAllowed, cb: Any = null) = no.result
@deprecated("modState prohibited within the getSnapshotBeforeUpdate callback.", "")
def modState(no: NotAllowed, cb: Any = null) = no.result
@deprecated("forceUpdate prohibited within the getSnapshotBeforeUpdate callback.", "")
def forceUpdate(no: NotAllowed) = no.result
}
// ===================================================================================================================
def shouldComponentUpdate[F[_], A[_], P, S, B, SS] = Lens((_: LifecycleF[F, A, P, S, B, SS]).shouldComponentUpdate)(n => _.copy(shouldComponentUpdate = n))
type ShouldComponentUpdateFn[F[_], A[_], P, S, B] = ShouldComponentUpdate[F, A, P, S, B] => F[Boolean]
final class ShouldComponentUpdate[F[_]: Sync, A[_]: Async, P, S, B](raw: RawMounted[P, S, B], val nextProps: P, val nextState: S)
extends Base[F, A, P, S, B](raw) {
override def toString = wrapTostring(s"ShouldComponentUpdate(props: $currentProps → $nextProps, state: $currentState → $nextState)")
def propsChildren: PropsChildren = mountedImpure.propsChildren
def currentProps : P = mountedImpure.props
def currentState : S = mountedImpure.state
def getDOMNode : ComponentDom.Mounted = mountedImpure.getDOMNode.asMounted()
def cmpProps(cmp: (P, P) => Boolean): Boolean = cmp(currentProps, nextProps)
def cmpState(cmp: (S, S) => Boolean): Boolean = cmp(currentState, nextState)
@deprecated("setState prohibited within the shouldComponentUpdate callback.", "")
def setState(no: NotAllowed, cb: Any = null) = no.result
@deprecated("modState prohibited within the shouldComponentUpdate callback.", "")
def modState(no: NotAllowed, cb: Any = null) = no.result
@deprecated("forceUpdate prohibited within the shouldComponentUpdate callback.", "")
def forceUpdate(no: NotAllowed) = no.result
}
// ===================================================================================================================
final class RenderScope[F[_]: UnsafeSync, A[_]: Async, P, S, B](raw: RawMounted[P, S, B])
extends Base[F, A, P, S, B](raw) with StateRW[F, A, P, S, B] with ForceUpdate[F, A, P, S, B] {
override type WithEffect [G[_]] = RenderScope[G, A, P, S, B]
override type WithAsyncEffect[G[_]] = RenderScope[F, G, P, S, B]
override def withEffect [G[_]](implicit G: UnsafeSync[G]): WithEffect[G] = new RenderScope(raw)
override def withAsyncEffect[G[_]](implicit G: Async[G]): WithAsyncEffect[G] = new RenderScope(raw)
override def toString = wrapTostring(s"Render(props: $props, state: $state)")
def props : P = mountedImpure.props
def propsChildren: PropsChildren = mountedImpure.propsChildren
def getDOMNode : ComponentDom.Mounted = mountedImpure.getDOMNode.asMounted()
}
}
| japgolly/scalajs-react | coreGeneric/src/main/scala/japgolly/scalajs/react/component/builder/LifecycleF.scala | Scala | apache-2.0 | 19,882 |
package hyperion
import java.time.{Duration, LocalDate, LocalDateTime}
import scala.concurrent.ExecutionContext
import scala.concurrent.duration.DurationLong
import akka.actor.{ActorLogging, ActorRef, FSM}
import hyperion.MessageDistributor.RegisterReceiver
import hyperion.DailyHistoryActor._
import hyperion.database.DatabaseActor.StoreMeterReading
import hyperion.database.HistoricalMeterReading
import hyperion.p1._
object DailyHistoryActor {
sealed trait State
case object Sleeping extends State
case object Receiving extends State
sealed trait Data
case object Empty extends Data
}
/**
* Actor that stores a daily meter reading in an external database
*
* @param messageDistributor The Actor that distributes incoming telegrams.
* @param databaseActor The Actor that interacts with the database.
*/
class DailyHistoryActor(messageDistributor: ActorRef,
databaseActor: ActorRef)
(implicit executionContext: ExecutionContext)
extends FSM[DailyHistoryActor.State, DailyHistoryActor.Data]
with ActorLogging with AppSettings {
override def preStart(): Unit = {
messageDistributor ! RegisterReceiver
scheduleNextAwakening()
}
startWith(Sleeping, Empty)
when(Receiving) {
case Event(TelegramReceived(telegram), _) => prepareMeterReading(telegram)
case Event(StateTimeout, _) => stay()
}
when(Sleeping) {
case Event(_: TelegramReceived, _) => stay()
case Event(StateTimeout, _) => log.debug("Awaking to receive new meter reading"); goto(Receiving)
}
initialize()
private def prepareMeterReading(telegram: P1Telegram) = {
val today = LocalDate.now()
val gas = telegram.data.devices.map({
case P1GasMeter(_, _, _, gasDelivered) => gasDelivered
case _ => BigDecimal.apply(0)
}).sum
val electricityNormal = telegram.data.totalConsumption(P1Constants.normalTariff)
val electricityLow = telegram.data.totalConsumption(P1Constants.lowTariff)
log.info("Scheduling database I/O")
databaseActor ! StoreMeterReading(HistoricalMeterReading(today, gas, electricityNormal, electricityLow))
scheduleNextAwakening()
goto(Sleeping) using Empty
}
private def scheduleNextAwakening(): Unit = {
val midnight = LocalDate.now().plusDays(1).atStartOfDay()
val untilMidnight = Duration.between(LocalDateTime.now(), midnight)
log.info("Sleeping for {} milliseconds", untilMidnight.toMillis)
startTimerAtFixedRate("wake-up", StateTimeout, untilMidnight.toMillis millis)
}
}
| mthmulders/hyperion | app/src/main/scala/hyperion/DailyHistoryActor.scala | Scala | mit | 2,587 |
package com.whisk.docker
import java.util.concurrent.atomic.AtomicBoolean
import scala.concurrent.{Future, Promise}
/**
* General utility functions
*/
package object testkit {
implicit class OptionalOps[A](val content: A) extends AnyVal {
def withOption[B](optional: Option[B])(f: (A, B) => A): A =
optional match {
case None => content
case Some(x) => f(content, x)
}
}
private[docker] class SinglePromise[T] {
val promise: Promise[T] = Promise[T]()
def future: Future[T] = promise.future
val flag = new AtomicBoolean(false)
def init(f: => Future[T]): Future[T] = {
if (!flag.getAndSet(true)) {
promise.tryCompleteWith(f)
}
future
}
}
private[docker] object SinglePromise {
def apply[T] = new SinglePromise[T]
}
}
| whisklabs/docker-it-scala | core/src/main/scala/com/whisk/docker/testkit/package.scala | Scala | mit | 825 |
package ch.ltouroumov.modularmachines.common
import ch.ltouroumov.modularmachines.common.init.{Recipes, Items, Blocks}
import ch.ltouroumov.modularmachines.common.tileentity.utils.SaveHandlerCache
import cpw.mods.fml.common.event.{FMLPostInitializationEvent, FMLInitializationEvent, FMLPreInitializationEvent}
abstract class CommonProxy {
def preInit(evt: FMLPreInitializationEvent) = {
}
def onInit(evt: FMLInitializationEvent) = {
Blocks.register()
Items.register()
Recipes.register()
registerRenders()
}
def postInit(evt: FMLPostInitializationEvent) = {
}
def registerRenders() = {}
}
| ltouroumov/modular-machines | src/main/scala/ch/ltouroumov/modularmachines/common/CommonProxy.scala | Scala | gpl-2.0 | 624 |
package views.html.admin
import play.templates._
import play.templates.TemplateMagic._
import play.api.templates._
import play.api.templates.PlayMagic._
import models._
import controllers._
import play.api.i18n._
import play.api.mvc._
import play.api.data._
import views.html._
import org.bson.types.ObjectId
/**/
object updateUser extends BaseScalaTemplate[play.api.templates.Html,Format[play.api.templates.Html]](play.api.templates.HtmlFormat) with play.api.templates.Template3[String,Form[models.UUser],String,play.api.templates.Html] {
/**/
def apply/*1.2*/(message: String)(userForm: Form[models.UUser])(uId: String):play.api.templates.Html = {
_display_ {import helper._
import helper.twitterBootstrap._
Seq[Any](format.raw/*1.62*/("""
"""),format.raw/*5.1*/("""
"""),_display_(Seq[Any](/*6.2*/main(message)/*6.15*/ {_display_(Seq[Any](format.raw/*6.17*/("""
"""),_display_(Seq[Any](/*8.3*/adminNavbar("nav4"))),format.raw/*8.22*/("""
<div class="form-sub">
"""),_display_(Seq[Any](/*10.6*/helper/*10.12*/.form(action = routes.Admin.updateU)/*10.48*/ {_display_(Seq[Any](format.raw/*10.50*/("""
<fieldset>
<legend>"""),_display_(Seq[Any](/*12.12*/message)),format.raw/*12.19*/("""</legend>
<input type="hidden" name="id" value=""""),_display_(Seq[Any](/*13.43*/uId)),format.raw/*13.46*/("""" />
"""),_display_(Seq[Any](/*15.4*/inputText(
userForm("login_id"),
'_label -> "登录名",
'_help -> "请输入登录名"
))),format.raw/*19.4*/("""
"""),_display_(Seq[Any](/*20.4*/inputText(
userForm("name"),
'_label -> "姓名",
'_help -> "请填写真实姓名"
))),format.raw/*24.4*/("""
"""),_display_(Seq[Any](/*25.4*/select(
userForm("authority"),
options = options(List("普通用户", "管理员")),
'_label -> "权限",
'_help -> "请选择权限"
))),format.raw/*30.4*/("""
</fieldset>
<div class="actions">
<input type="submit" value="修改" class="btn btn-primary" />
<a class="btn" href="javascript:;" onClick="javascript:history.back(-1)">返回</a>
</div>
""")))})),format.raw/*36.3*/("""
</div>
<script type="text/javascript">
$(document).ready(function()"""),format.raw/*39.31*/("""{"""),format.raw/*39.32*/("""
$('#login_id').focus()
"""),format.raw/*41.3*/("""}"""),format.raw/*41.4*/(""")
</script>
""")))})),format.raw/*43.2*/("""
"""))}
}
def render(message:String,userForm:Form[models.UUser],uId:String): play.api.templates.Html = apply(message)(userForm)(uId)
def f:((String) => (Form[models.UUser]) => (String) => play.api.templates.Html) = (message) => (userForm) => (uId) => apply(message)(userForm)(uId)
def ref: this.type = this
}
/*
-- GENERATED --
DATE: Wed Jul 17 08:05:00 CST 2013
SOURCE: /opt/dacOrder.git/app/views/admin/updateUser.scala.html
HASH: 95d07662392b09566de8d5ea9a840ae4a32d7f13
MATRIX: 573->1|760->61|788->114|824->116|845->129|884->131|922->135|962->154|1027->184|1042->190|1087->226|1127->228|1190->255|1219->262|1307->314|1332->317|1376->326|1481->410|1520->414|1622->495|1661->499|1803->620|2024->810|2127->885|2156->886|2212->915|2240->916|2285->930
LINES: 20->1|26->1|28->5|29->6|29->6|29->6|31->8|31->8|33->10|33->10|33->10|33->10|35->12|35->12|36->13|36->13|38->15|42->19|43->20|47->24|48->25|53->30|59->36|62->39|62->39|64->41|64->41|66->43
-- GENERATED --
*/
| kandole/simple_reservation | target/scala-2.10/src_managed/main/views/html/admin/updateUser.template.scala | Scala | gpl-2.0 | 3,542 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.hive
import java.io.IOException
import java.util.Locale
import org.apache.hadoop.fs.{FileSystem, Path}
import org.apache.spark.sql._
import org.apache.spark.sql.catalyst.catalog._
import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.catalyst.planning._
import org.apache.spark.sql.catalyst.plans.logical.{InsertIntoDir, InsertIntoStatement, LogicalPlan, ScriptTransformation, Statistics}
import org.apache.spark.sql.catalyst.rules.Rule
import org.apache.spark.sql.execution._
import org.apache.spark.sql.execution.command.{CreateTableCommand, DDLUtils}
import org.apache.spark.sql.execution.datasources.{CreateTable, DataSourceStrategy}
import org.apache.spark.sql.hive.execution._
import org.apache.spark.sql.hive.execution.HiveScriptTransformationExec
import org.apache.spark.sql.internal.HiveSerDe
/**
* Determine the database, serde/format and schema of the Hive serde table, according to the storage
* properties.
*/
class ResolveHiveSerdeTable(session: SparkSession) extends Rule[LogicalPlan] {
private def determineHiveSerde(table: CatalogTable): CatalogTable = {
if (table.storage.serde.nonEmpty) {
table
} else {
if (table.bucketSpec.isDefined) {
throw new AnalysisException("Creating bucketed Hive serde table is not supported yet.")
}
val defaultStorage = HiveSerDe.getDefaultStorage(conf)
val options = new HiveOptions(table.storage.properties)
val fileStorage = if (options.fileFormat.isDefined) {
HiveSerDe.sourceToSerDe(options.fileFormat.get) match {
case Some(s) =>
CatalogStorageFormat.empty.copy(
inputFormat = s.inputFormat,
outputFormat = s.outputFormat,
serde = s.serde)
case None =>
throw new IllegalArgumentException(s"invalid fileFormat: '${options.fileFormat.get}'")
}
} else if (options.hasInputOutputFormat) {
CatalogStorageFormat.empty.copy(
inputFormat = options.inputFormat,
outputFormat = options.outputFormat)
} else {
CatalogStorageFormat.empty
}
val rowStorage = if (options.serde.isDefined) {
CatalogStorageFormat.empty.copy(serde = options.serde)
} else {
CatalogStorageFormat.empty
}
val storage = table.storage.copy(
inputFormat = fileStorage.inputFormat.orElse(defaultStorage.inputFormat),
outputFormat = fileStorage.outputFormat.orElse(defaultStorage.outputFormat),
serde = rowStorage.serde.orElse(fileStorage.serde).orElse(defaultStorage.serde),
properties = options.serdeProperties)
table.copy(storage = storage)
}
}
override def apply(plan: LogicalPlan): LogicalPlan = plan resolveOperators {
case c @ CreateTable(t, _, query) if DDLUtils.isHiveTable(t) =>
// Finds the database name if the name does not exist.
val dbName = t.identifier.database.getOrElse(session.catalog.currentDatabase)
val table = t.copy(identifier = t.identifier.copy(database = Some(dbName)))
// Determines the serde/format of Hive tables
val withStorage = determineHiveSerde(table)
// Infers the schema, if empty, because the schema could be determined by Hive
// serde.
val withSchema = if (query.isEmpty) {
val inferred = HiveUtils.inferSchema(withStorage)
if (inferred.schema.length <= 0) {
throw new AnalysisException("Unable to infer the schema. " +
s"The schema specification is required to create the table ${inferred.identifier}.")
}
inferred
} else {
withStorage
}
c.copy(tableDesc = withSchema)
}
}
class DetermineTableStats(session: SparkSession) extends Rule[LogicalPlan] {
private def hiveTableWithStats(relation: HiveTableRelation): HiveTableRelation = {
val table = relation.tableMeta
val partitionCols = relation.partitionCols
// For partitioned tables, the partition directory may be outside of the table directory.
// Which is expensive to get table size. Please see how we implemented it in the AnalyzeTable.
val sizeInBytes = if (conf.fallBackToHdfsForStatsEnabled && partitionCols.isEmpty) {
try {
val hadoopConf = session.sessionState.newHadoopConf()
val tablePath = new Path(table.location)
val fs: FileSystem = tablePath.getFileSystem(hadoopConf)
fs.getContentSummary(tablePath).getLength
} catch {
case e: IOException =>
logWarning("Failed to get table size from HDFS.", e)
conf.defaultSizeInBytes
}
} else {
conf.defaultSizeInBytes
}
val stats = Some(Statistics(sizeInBytes = BigInt(sizeInBytes)))
relation.copy(tableStats = stats)
}
override def apply(plan: LogicalPlan): LogicalPlan = plan resolveOperators {
case relation: HiveTableRelation
if DDLUtils.isHiveTable(relation.tableMeta) && relation.tableMeta.stats.isEmpty =>
hiveTableWithStats(relation)
// handles InsertIntoStatement specially as the table in InsertIntoStatement is not added in its
// children, hence not matched directly by previous HiveTableRelation case.
case i @ InsertIntoStatement(relation: HiveTableRelation, _, _, _, _, _)
if DDLUtils.isHiveTable(relation.tableMeta) && relation.tableMeta.stats.isEmpty =>
i.copy(table = hiveTableWithStats(relation))
}
}
/**
* Replaces generic operations with specific variants that are designed to work with Hive.
*
* Note that, this rule must be run after `PreprocessTableCreation` and
* `PreprocessTableInsertion`.
*/
object HiveAnalysis extends Rule[LogicalPlan] {
override def apply(plan: LogicalPlan): LogicalPlan = plan resolveOperators {
case InsertIntoStatement(
r: HiveTableRelation, partSpec, _, query, overwrite, ifPartitionNotExists)
if DDLUtils.isHiveTable(r.tableMeta) =>
InsertIntoHiveTable(r.tableMeta, partSpec, query, overwrite,
ifPartitionNotExists, query.output.map(_.name))
case CreateTable(tableDesc, mode, None) if DDLUtils.isHiveTable(tableDesc) =>
CreateTableCommand(tableDesc, ignoreIfExists = mode == SaveMode.Ignore)
case CreateTable(tableDesc, mode, Some(query))
if DDLUtils.isHiveTable(tableDesc) && query.resolved =>
CreateHiveTableAsSelectCommand(tableDesc, query, query.output.map(_.name), mode)
case InsertIntoDir(isLocal, storage, provider, child, overwrite)
if DDLUtils.isHiveTable(provider) && child.resolved =>
val outputPath = new Path(storage.locationUri.get)
if (overwrite) DDLUtils.verifyNotReadPath(child, outputPath)
InsertIntoHiveDirCommand(isLocal, storage, child, overwrite, child.output.map(_.name))
}
}
/**
* Relation conversion from metastore relations to data source relations for better performance
*
* - When writing to non-partitioned Hive-serde Parquet/Orc tables
* - When scanning Hive-serde Parquet/ORC tables
*
* This rule must be run before all other DDL post-hoc resolution rules, i.e.
* `PreprocessTableCreation`, `PreprocessTableInsertion`, `DataSourceAnalysis` and `HiveAnalysis`.
*/
case class RelationConversions(
sessionCatalog: HiveSessionCatalog) extends Rule[LogicalPlan] {
private def isConvertible(relation: HiveTableRelation): Boolean = {
isConvertible(relation.tableMeta)
}
private def isConvertible(tableMeta: CatalogTable): Boolean = {
val serde = tableMeta.storage.serde.getOrElse("").toLowerCase(Locale.ROOT)
serde.contains("parquet") && conf.getConf(HiveUtils.CONVERT_METASTORE_PARQUET) ||
serde.contains("orc") && conf.getConf(HiveUtils.CONVERT_METASTORE_ORC)
}
private val metastoreCatalog = sessionCatalog.metastoreCatalog
override def apply(plan: LogicalPlan): LogicalPlan = {
plan resolveOperators {
// Write path
case InsertIntoStatement(
r: HiveTableRelation, partition, cols, query, overwrite, ifPartitionNotExists)
if query.resolved && DDLUtils.isHiveTable(r.tableMeta) &&
(!r.isPartitioned || conf.getConf(HiveUtils.CONVERT_INSERTING_PARTITIONED_TABLE))
&& isConvertible(r) =>
InsertIntoStatement(metastoreCatalog.convert(r), partition, cols,
query, overwrite, ifPartitionNotExists)
// Read path
case relation: HiveTableRelation
if DDLUtils.isHiveTable(relation.tableMeta) && isConvertible(relation) =>
metastoreCatalog.convert(relation)
// CTAS
case CreateTable(tableDesc, mode, Some(query))
if query.resolved && DDLUtils.isHiveTable(tableDesc) &&
tableDesc.partitionColumnNames.isEmpty && isConvertible(tableDesc) &&
conf.getConf(HiveUtils.CONVERT_METASTORE_CTAS) =>
// validation is required to be done here before relation conversion.
DDLUtils.checkDataColNames(tableDesc.copy(schema = query.schema))
OptimizedCreateHiveTableAsSelectCommand(
tableDesc, query, query.output.map(_.name), mode)
}
}
}
private[hive] trait HiveStrategies {
// Possibly being too clever with types here... or not clever enough.
self: SparkPlanner =>
val sparkSession: SparkSession
object HiveScripts extends Strategy {
def apply(plan: LogicalPlan): Seq[SparkPlan] = plan match {
case ScriptTransformation(script, output, child, ioschema) =>
val hiveIoSchema = ScriptTransformationIOSchema(ioschema)
HiveScriptTransformationExec(script, output, planLater(child), hiveIoSchema) :: Nil
case _ => Nil
}
}
/**
* Retrieves data using a HiveTableScan. Partition pruning predicates are also detected and
* applied.
*/
object HiveTableScans extends Strategy {
def apply(plan: LogicalPlan): Seq[SparkPlan] = plan match {
case ScanOperation(projectList, filters, relation: HiveTableRelation) =>
// Filter out all predicates that only deal with partition keys, these are given to the
// hive table scan operator to be used for partition pruning.
val partitionKeyIds = AttributeSet(relation.partitionCols)
val normalizedFilters = DataSourceStrategy.normalizeExprs(
filters.filter(_.deterministic), relation.output)
val partitionKeyFilters = DataSourceStrategy.getPushedDownFilters(relation.partitionCols,
normalizedFilters)
pruneFilterProject(
projectList,
filters.filter(f => f.references.isEmpty || !f.references.subsetOf(partitionKeyIds)),
identity[Seq[Expression]],
HiveTableScanExec(_, relation, partitionKeyFilters.toSeq)(sparkSession)) :: Nil
case _ =>
Nil
}
}
}
| chuckchen/spark | sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveStrategies.scala | Scala | apache-2.0 | 11,512 |
package de.unihamburg.vsis.sddf.visualisation.model
import org.apache.spark.rdd.RDD
import de.unihamburg.vsis.sddf.reading.SymPair
import de.unihamburg.vsis.sddf.reading.Tuple
class IndexingModel extends BasicAnalysable {
var _pairs: Option[RDD[SymPair[Tuple]]] = None
def pairs = {
if (_pairs.isDefined) {
_pairs.get
} else {
throw new Exception("Pairs not defined")
}
}
def pairs_=(pairs: RDD[SymPair[Tuple]]) = _pairs = Option(pairs)
var _corpus: Option[RDD[Tuple]] = None
def corpus = {
if (_corpus.isDefined) {
_corpus.get
} else {
throw new Exception("Corpus not defined")
}
}
def corpus_=(corpus: RDD[Tuple]) = _corpus = Option(corpus)
lazy val reducedSearchSpace = pairs.count
lazy val naiveSearchSpace = {
val tupleCount = corpus.count
((tupleCount * tupleCount) - tupleCount) / 2;
}
lazy val searchSpaceReductionRatio: Double = {
1 - (reducedSearchSpace / naiveSearchSpace.toDouble)
}
lazy val searchSpaceReductionMagnitude: Double = {
var factor: Double = (naiveSearchSpace.toDouble / reducedSearchSpace)
var result = 0.0
while(factor > 1){
factor = factor / 10.0
if(factor <= 1){
result = result + factor
}else{
result = result + 1
}
}
result
}
}
| numbnut/sddf | src/main/scala/de/unihamburg/vsis/sddf/visualisation/model/IndexingModel.scala | Scala | gpl-3.0 | 1,322 |
package nfn.localAbstractMachine
import akka.actor._
import akka.event.Logging
import ccn.packet._
import lambdacalculus._
import lambdacalculus.machine.{MachineValue, ListMachineValue, ConstMachineValue}
import scala.util.{Success, Failure, Try}
import nfn.NFNApi
/**
* Encapsulates a single abstract machine, reduces lambdacalculus expressions and sends requests for content back to the [[ccnServer]].
* @param ccnServer
*/
class LocalAbstractMachineWorker(ccnServer: ActorRef) extends Actor {
import context.dispatcher
val logger = Logging(context.system, this)
val lc = LambdaCalculus(execOrder = ExecutionOrder.CallByValue,
debug = true,
storeIntermediateSteps = true,
maybeExecutor = Some(LocalNFNCallExecutor(ccnServer)),
parser = new NFNLambdaParser)
override def receive: Actor.Receive = {
case content: Content => {
logger.debug(s"Abstract machine received content $content")
handleContent(content, sender)
}
case interest: Interest =>
logger.debug(s"Abstract machine received interest $interest")
handleInterest(interest, sender)
}
private def handleContent(content: Content, senderCopy: ActorRef) = {
logger.warning(s"Discarding content $content")
}
// Returns the content for the interest if it is in the contentstore
// otherwise no response
private def handleInterest(interest: Interest, senderCopy: ActorRef) = {
def computeResultToContent(computeResult: MachineValue): String = computeResult match {
case ConstMachineValue(n, _) => n.toString
case ListMachineValue(values, _) => (values map { computeResultToContent }).mkString(" ")
case r@_ => throw new Exception(s"Result is only implemented for type ConstValue and not $r")
}
def tryComputeContentForExpr(expr: String): Try[Content] = {
lc.substituteParseCompileExecute(expr) map {
case List(result: MachineValue) => {
val resultString = computeResultToContent(result)
Content(interest.name, resultString.getBytes, MetaInfo.empty)
}
case results@_ => throw new Exception(s"Local abstract machine: Result of execution contains more or less than one element: $results")
}
}
def handleNFNRequest(interest: Interest) = {
def tryComputeResultContent: Try[Content] = {
// TODO this only works if the expression is in a single name and not split
interest.name.cmps match {
case Seq(lambdaExpr, "NFN") => tryComputeContentForExpr(lambdaExpr)
case Seq(postCmp, preExpr, "NFN") => tryComputeContentForExpr(s"$preExpr $postCmp")
case _ => throw new Exception(s"Local abstract machine can only parse compute requests " +
s"with the form <lambda expr><NFN> or <postcmp><lambdaexpr><NFN> and not $interest")
}
}
tryComputeResultContent match {
case Success(content) => {
logger.info(s"Computed content $content")
senderCopy ! content
}
case Failure(e) => logger.error(e, s"Could not compute the result for the interest $interest")
}
}
handleNFNRequest(interest)
}
}
| cn-uofbasel/nfn-scala | src/main/scala/nfn/localAbstractMachine/LocalAbstractMachineWorker.scala | Scala | isc | 3,269 |
// Copyright: 2010 - 2016 https://github.com/ensime/ensime-server/graphs
// License: http://www.gnu.org/licenses/gpl-3.0.en.html
package org.ensime.core.javac
import akka.event.slf4j.SLF4JLogging
import com.sun.source.tree.Tree
import javax.lang.model.element.Element
import com.sun.tools.javac.tree.JCTree
import com.sun.tools.javac.tree.JCTree._
/**
* This trait provides behavior missing from jdk 1.6, 1.7, see:
* http://hg.openjdk.java.net/jdk8/jdk8/langtools/rev/8dd528992c15 and
* sadly depends on unsupported implementation classes,
* (com.sun.tools.javac.*). As a result, this may stop working on on
* Java 9. At that point we can either:
* 1) Take the long way around and find Elements ourselves by searching the
* scope/target for the selected name (see my older version of JavaDocFinding).
* 2) Use reflection here.
* 3) Convince Java 9 to let us import these unsafe libs.
*
* -aemon
*/
trait UnsafeHelpers extends SLF4JLogging {
protected def unsafeGetElement(t: Tree): Option[Element] = {
t match {
case t: JCCompilationUnit => Some(t.packge)
case t: JCClassDecl => Some(t.sym)
case t: JCMethodDecl => Some(t.sym)
case t: JCVariableDecl => Some(t.sym)
case t: JCIdent => Some(t.sym)
case t: JCFieldAccess => Some(t.sym)
case t: JCNewClass => Some(t.constructor)
case t: JCMethodInvocation => unsafeGetElement(t.meth)
case t: JCTypeApply => unsafeGetElement(t.clazz)
case t: JCTree => Option(t.`type`).map(_.tsym)
case _ => None
}
}
}
| espinhogr/ensime-server | core/src/main/scala/org/ensime/core/javac/UnsafeHelpers.scala | Scala | gpl-3.0 | 1,544 |
package com.twitter.finagle.mux
import com.twitter.finagle.Mux.param.MaxFrameSize
import com.twitter.conversions.StorageUnitOps._
import com.twitter.finagle.Mux
class PushToPushMuxEndToEndTest extends AbstractEndToEndTest {
override type ClientT = Mux.Client
override type ServerT = Mux.Server
def implName: String = "push-based"
def clientImpl() = Mux.client
def serverImpl() = Mux.server
}
class FragmentingPushMuxEndToEndTest extends AbstractEndToEndTest {
override type ClientT = Mux.Client
override type ServerT = Mux.Server
def implName: String = "push-based"
def clientImpl() = Mux.client.configured(MaxFrameSize(5.bytes))
def serverImpl() = Mux.server.configured(MaxFrameSize(5.bytes))
}
| luciferous/finagle | finagle-mux/src/test/scala/com/twitter/finagle/mux/MuxEndToEndTest.scala | Scala | apache-2.0 | 719 |
/**
* Copyright 2009 Barry Kaplan
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
**/
package com.github.nscala_time.time
import org.joda.time._
import com.github.nscala_time.PimpedType
class RichLocalTime(val underlying: LocalTime) extends Super with PimpedType[LocalTime] {
def -(period: ReadablePeriod): LocalTime = underlying.minus(period)
def -(builder: DurationBuilder): LocalTime = underlying.minus(builder.underlying)
def +(period: ReadablePeriod): LocalTime = underlying.plus(period)
def +(builder: DurationBuilder): LocalTime = underlying.plus(builder.underlying)
def second: LocalTime.Property = underlying.secondOfMinute
def minute: LocalTime.Property = underlying.minuteOfHour
def hour: LocalTime.Property = underlying.hourOfDay
def withSecond(second: Int) = underlying.withSecondOfMinute(second)
def withMinute(minute: Int) = underlying.withMinuteOfHour(minute)
def withHour(hour: Int) = underlying.withHourOfDay(hour)
}
| tkawachi/nscala-time | src/main/scala/com/github/nscala_time/time/RichLocalTime.scala | Scala | apache-2.0 | 1,480 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst.expressions
import scala.collection.mutable.ArrayBuffer
import org.apache.spark.sql.catalyst.expressions.aggregate.AggregateExpression
import org.apache.spark.sql.catalyst.plans.QueryPlan
import org.apache.spark.sql.catalyst.plans.logical.{Filter, LogicalPlan}
import org.apache.spark.sql.types._
/**
* An interface for expressions that contain a [[QueryPlan]].
*/
abstract class PlanExpression[T <: QueryPlan[_]] extends Expression {
/** The id of the subquery expression. */
def exprId: ExprId
/** The plan being wrapped in the query. */
def plan: T
/** Updates the expression with a new plan. */
def withNewPlan(plan: T): PlanExpression[T]
protected def conditionString: String = children.mkString("[", " && ", "]")
}
/**
* A base interface for expressions that contain a [[LogicalPlan]].
*/
abstract class SubqueryExpression(
plan: LogicalPlan,
children: Seq[Expression],
exprId: ExprId) extends PlanExpression[LogicalPlan] {
override lazy val resolved: Boolean = childrenResolved && plan.resolved
override lazy val references: AttributeSet =
if (plan.resolved) super.references -- plan.outputSet else super.references
override def withNewPlan(plan: LogicalPlan): SubqueryExpression
override def semanticEquals(o: Expression): Boolean = o match {
case p: SubqueryExpression =>
this.getClass.getName.equals(p.getClass.getName) && plan.sameResult(p.plan) &&
children.length == p.children.length &&
children.zip(p.children).forall(p => p._1.semanticEquals(p._2))
case _ => false
}
}
object SubqueryExpression {
/**
* Returns true when an expression contains an IN or EXISTS subquery and false otherwise.
*/
def hasInOrExistsSubquery(e: Expression): Boolean = {
e.find {
case _: ListQuery | _: Exists => true
case _ => false
}.isDefined
}
/**
* Returns true when an expression contains a subquery that has outer reference(s). The outer
* reference attributes are kept as children of subquery expression by
* [[org.apache.spark.sql.catalyst.analysis.Analyzer.ResolveSubquery]]
*/
def hasCorrelatedSubquery(e: Expression): Boolean = {
e.find {
case s: SubqueryExpression => s.children.nonEmpty
case _ => false
}.isDefined
}
}
object SubExprUtils extends PredicateHelper {
/**
* Returns true when an expression contains correlated predicates i.e outer references and
* returns false otherwise.
*/
def containsOuter(e: Expression): Boolean = {
e.find(_.isInstanceOf[OuterReference]).isDefined
}
/**
* Returns whether there are any null-aware predicate subqueries inside Not. If not, we could
* turn the null-aware predicate into not-null-aware predicate.
*/
def hasNullAwarePredicateWithinNot(condition: Expression): Boolean = {
splitConjunctivePredicates(condition).exists {
case _: Exists | Not(_: Exists) => false
case In(_, Seq(_: ListQuery)) | Not(In(_, Seq(_: ListQuery))) => false
case e => e.find { x =>
x.isInstanceOf[Not] && e.find {
case In(_, Seq(_: ListQuery)) => true
case _ => false
}.isDefined
}.isDefined
}
}
/**
* Returns an expression after removing the OuterReference shell.
*/
def stripOuterReference(e: Expression): Expression = e.transform { case OuterReference(r) => r }
/**
* Returns the list of expressions after removing the OuterReference shell from each of
* the expression.
*/
def stripOuterReferences(e: Seq[Expression]): Seq[Expression] = e.map(stripOuterReference)
/**
* Returns the logical plan after removing the OuterReference shell from all the expressions
* of the input logical plan.
*/
def stripOuterReferences(p: LogicalPlan): LogicalPlan = {
p.transformAllExpressions {
case OuterReference(a) => a
}
}
/**
* Given a logical plan, returns TRUE if it has an outer reference and false otherwise.
*/
def hasOuterReferences(plan: LogicalPlan): Boolean = {
plan.find {
case f: Filter => containsOuter(f.condition)
case other => false
}.isDefined
}
/**
* Given a list of expressions, returns the expressions which have outer references. Aggregate
* expressions are treated in a special way. If the children of aggregate expression contains an
* outer reference, then the entire aggregate expression is marked as an outer reference.
* Example (SQL):
* {{{
* SELECT a FROM l GROUP by 1 HAVING EXISTS (SELECT 1 FROM r WHERE d < min(b))
* }}}
* In the above case, we want to mark the entire min(b) as an outer reference
* OuterReference(min(b)) instead of min(OuterReference(b)).
* TODO: Currently we don't allow deep correlation. Also, we don't allow mixing of
* outer references and local references under an aggregate expression.
* For example (SQL):
* {{{
* SELECT .. FROM p1
* WHERE EXISTS (SELECT ...
* FROM p2
* WHERE EXISTS (SELECT ...
* FROM sq
* WHERE min(p1.a + p2.b) = sq.c))
*
* SELECT .. FROM p1
* WHERE EXISTS (SELECT ...
* FROM p2
* WHERE EXISTS (SELECT ...
* FROM sq
* WHERE min(p1.a) + max(p2.b) = sq.c))
*
* SELECT .. FROM p1
* WHERE EXISTS (SELECT ...
* FROM p2
* WHERE EXISTS (SELECT ...
* FROM sq
* WHERE min(p1.a + sq.c) > 1))
* }}}
* The code below needs to change when we support the above cases.
*/
def getOuterReferences(conditions: Seq[Expression]): Seq[Expression] = {
val outerExpressions = ArrayBuffer.empty[Expression]
conditions foreach { expr =>
expr transformDown {
case a: AggregateExpression if a.collectLeaves.forall(_.isInstanceOf[OuterReference]) =>
val newExpr = stripOuterReference(a)
outerExpressions += newExpr
newExpr
case OuterReference(e) =>
outerExpressions += e
e
}
}
outerExpressions
}
/**
* Returns all the expressions that have outer references from a logical plan. Currently only
* Filter operator can host outer references.
*/
def getOuterReferences(plan: LogicalPlan): Seq[Expression] = {
val conditions = plan.collect { case Filter(cond, _) => cond }
getOuterReferences(conditions)
}
/**
* Returns the correlated predicates from a logical plan. The OuterReference wrapper
* is removed before returning the predicate to the caller.
*/
def getCorrelatedPredicates(plan: LogicalPlan): Seq[Expression] = {
val conditions = plan.collect { case Filter(cond, _) => cond }
conditions.flatMap { e =>
val (correlated, _) = splitConjunctivePredicates(e).partition(containsOuter)
stripOuterReferences(correlated) match {
case Nil => None
case xs => xs
}
}
}
}
/**
* A subquery that will return only one row and one column. This will be converted into a physical
* scalar subquery during planning.
*
* Note: `exprId` is used to have a unique name in explain string output.
*/
case class ScalarSubquery(
plan: LogicalPlan,
children: Seq[Expression] = Seq.empty,
exprId: ExprId = NamedExpression.newExprId)
extends SubqueryExpression(plan, children, exprId) with Unevaluable {
override def dataType: DataType = plan.schema.fields.head.dataType
override def nullable: Boolean = true
override def withNewPlan(plan: LogicalPlan): ScalarSubquery = copy(plan = plan)
override def toString: String = s"scalar-subquery#${exprId.id} $conditionString"
}
object ScalarSubquery {
def hasCorrelatedScalarSubquery(e: Expression): Boolean = {
e.find {
case s: ScalarSubquery => s.children.nonEmpty
case _ => false
}.isDefined
}
}
/**
* A [[ListQuery]] expression defines the query which we want to search in an IN subquery
* expression. It should and can only be used in conjunction with an IN expression.
*
* For example (SQL):
* {{{
* SELECT *
* FROM a
* WHERE a.id IN (SELECT id
* FROM b)
* }}}
*/
case class ListQuery(
plan: LogicalPlan,
children: Seq[Expression] = Seq.empty,
exprId: ExprId = NamedExpression.newExprId)
extends SubqueryExpression(plan, children, exprId) with Unevaluable {
override def dataType: DataType = plan.schema.fields.head.dataType
override def nullable: Boolean = false
override def withNewPlan(plan: LogicalPlan): ListQuery = copy(plan = plan)
override def toString: String = s"list#${exprId.id} $conditionString"
}
/**
* The [[Exists]] expression checks if a row exists in a subquery given some correlated condition.
*
* For example (SQL):
* {{{
* SELECT *
* FROM a
* WHERE EXISTS (SELECT *
* FROM b
* WHERE b.id = a.id)
* }}}
*/
case class Exists(
plan: LogicalPlan,
children: Seq[Expression] = Seq.empty,
exprId: ExprId = NamedExpression.newExprId)
extends SubqueryExpression(plan, children, exprId) with Predicate with Unevaluable {
override def nullable: Boolean = false
override def withNewPlan(plan: LogicalPlan): Exists = copy(plan = plan)
override def toString: String = s"exists#${exprId.id} $conditionString"
}
| jianran/spark | sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/subquery.scala | Scala | apache-2.0 | 10,289 |
package mesosphere.marathon
import java.util.concurrent.atomic.AtomicBoolean
import java.util.{ Timer, TimerTask }
import akka.actor.{ ActorRef, ActorSystem }
import akka.event.EventStream
import akka.testkit.{ TestKit, TestProbe }
import com.codahale.metrics.MetricRegistry
import com.twitter.common.base.ExceptionalCommand
import com.twitter.common.zookeeper.Group.JoinException
import com.twitter.common.zookeeper.{ Candidate, Group }
import mesosphere.chaos.http.HttpConf
import mesosphere.marathon.Protos.StorageVersion
import mesosphere.marathon.core.leadership.LeadershipCoordinator
import mesosphere.marathon.health.HealthCheckManager
import mesosphere.marathon.metrics.Metrics
import mesosphere.marathon.state.{ AppRepository, MarathonStore, Migration }
import mesosphere.marathon.tasks.TaskTracker
import mesosphere.util.state.memory.InMemoryStore
import mesosphere.util.state.{ FrameworkId, FrameworkIdUtil }
import org.apache.mesos.{ Protos => mesos, SchedulerDriver }
import org.mockito.Matchers.{ any, eq => mockEq }
import org.mockito.Mockito
import org.mockito.Mockito.{ times, verify, when }
import org.mockito.invocation.InvocationOnMock
import org.mockito.stubbing.Answer
import org.rogach.scallop.ScallopOption
import org.scalatest.{ BeforeAndAfterAll, Matchers }
import scala.concurrent.Future
import scala.concurrent.duration._
object MarathonSchedulerServiceTest {
import Mockito.mock
val ReconciliationDelay = 5000L
val ReconciliationInterval = 5000L
val ScaleAppsDelay = 4000L
val ScaleAppsInterval = 4000L
val MaxActorStartupTime = 5000L
val OnElectedPrepareTimeout = 3 * 60 * 1000L
def mockConfig = {
val config = mock(classOf[MarathonConf])
when(config.reconciliationInitialDelay).thenReturn(scallopOption(Some(ReconciliationDelay)))
when(config.reconciliationInterval).thenReturn(scallopOption(Some(ReconciliationInterval)))
when(config.scaleAppsInitialDelay).thenReturn(scallopOption(Some(ScaleAppsDelay)))
when(config.scaleAppsInterval).thenReturn(scallopOption(Some(ScaleAppsInterval)))
when(config.zkTimeoutDuration).thenReturn(1.second)
when(config.maxActorStartupTime).thenReturn(scallopOption(Some(MaxActorStartupTime)))
when(config.onElectedPrepareTimeout).thenReturn(scallopOption(Some(OnElectedPrepareTimeout)))
config
}
def scallopOption[A](a: Option[A]): ScallopOption[A] = {
new ScallopOption[A]("") {
override def get = a
override def apply() = a.get
}
}
}
class MarathonSchedulerServiceTest
extends TestKit(ActorSystem("System"))
with MarathonSpec
with BeforeAndAfterAll
with Matchers {
import MarathonSchedulerServiceTest._
import system.dispatcher
private[this] var probe: TestProbe = _
private[this] var leadershipCoordinator: LeadershipCoordinator = _
private[this] var healthCheckManager: HealthCheckManager = _
private[this] var candidate: Option[Candidate] = _
private[this] var config: MarathonConf = _
private[this] var httpConfig: HttpConf = _
private[this] var frameworkIdUtil: FrameworkIdUtil = _
private[this] var leader: AtomicBoolean = _
private[this] var appRepository: AppRepository = _
private[this] var taskTracker: TaskTracker = _
private[this] var scheduler: MarathonScheduler = _
private[this] var migration: Migration = _
private[this] var schedulerActor: ActorRef = _
private[this] var events: EventStream = _
before {
probe = TestProbe()
leadershipCoordinator = mock[LeadershipCoordinator]
healthCheckManager = mock[HealthCheckManager]
candidate = mock[Option[Candidate]]
config = mockConfig
httpConfig = mock[HttpConf]
frameworkIdUtil = mock[FrameworkIdUtil]
leader = mock[AtomicBoolean]
appRepository = mock[AppRepository]
taskTracker = mock[TaskTracker]
scheduler = mock[MarathonScheduler]
migration = mock[Migration]
schedulerActor = probe.ref
events = new EventStream()
}
def driverFactory[T](provide: => SchedulerDriver): SchedulerDriverFactory = {
new SchedulerDriverFactory {
override def createDriver(): SchedulerDriver = provide
}
}
test("Start timer when elected") {
val mockTimer = mock[Timer]
when(frameworkIdUtil.fetch()).thenReturn(None)
val schedulerService = new MarathonSchedulerService(
leadershipCoordinator,
healthCheckManager,
candidate,
config,
frameworkIdUtil,
leader,
appRepository,
taskTracker,
driverFactory(mock[SchedulerDriver]),
system,
migration,
schedulerActor,
events
) {
override def runDriver(abdicateCmdOption: Option[ExceptionalCommand[JoinException]]): Unit = ()
}
schedulerService.timer = mockTimer
when(leadershipCoordinator.prepareForStart()).thenReturn(Future.successful(()))
schedulerService.onElected(mock[ExceptionalCommand[Group.JoinException]])
verify(mockTimer).schedule(any[TimerTask](), mockEq(ReconciliationDelay), mockEq(ReconciliationInterval))
verify(mockTimer).schedule(any(), mockEq(ReconciliationDelay + ReconciliationInterval))
}
test("Cancel timer when defeated") {
val mockTimer = mock[Timer]
when(frameworkIdUtil.fetch()).thenReturn(None)
val schedulerService = new MarathonSchedulerService(
leadershipCoordinator,
healthCheckManager,
candidate,
config,
frameworkIdUtil,
leader,
appRepository,
taskTracker,
driverFactory(mock[SchedulerDriver]),
system,
migration,
schedulerActor,
events
) {
override def runDriver(abdicateCmdOption: Option[ExceptionalCommand[JoinException]]): Unit = ()
}
schedulerService.timer = mockTimer
schedulerService.onDefeated()
verify(mockTimer).cancel()
assert(schedulerService.timer != mockTimer, "Timer should be replaced after leadership defeat")
}
test("Re-enable timer when re-elected") {
val mockTimer = mock[Timer]
when(frameworkIdUtil.fetch()).thenReturn(None)
val schedulerService = new MarathonSchedulerService(
leadershipCoordinator,
healthCheckManager,
candidate,
config,
frameworkIdUtil,
leader,
appRepository,
taskTracker,
driverFactory(mock[SchedulerDriver]),
system,
migration,
schedulerActor,
events
) {
override def runDriver(abdicateCmdOption: Option[ExceptionalCommand[JoinException]]): Unit = ()
override def newTimer() = mockTimer
}
when(leadershipCoordinator.prepareForStart()).thenReturn(Future.successful(()))
schedulerService.onElected(mock[ExceptionalCommand[Group.JoinException]])
schedulerService.onDefeated()
schedulerService.onElected(mock[ExceptionalCommand[Group.JoinException]])
verify(mockTimer, times(2)).schedule(any(), mockEq(ScaleAppsDelay), mockEq(ScaleAppsInterval))
verify(mockTimer, times(2)).schedule(any[TimerTask](), mockEq(ReconciliationDelay), mockEq(ReconciliationInterval))
verify(mockTimer, times(2)).schedule(any(), mockEq(ReconciliationDelay + ReconciliationInterval))
verify(mockTimer).cancel()
}
test("Always fetch current framework ID") {
val frameworkId = mesos.FrameworkID.newBuilder.setValue("myId").build()
val mockTimer = mock[Timer]
val metrics = new Metrics(new MetricRegistry)
val store = new MarathonStore[FrameworkId](new InMemoryStore, metrics, () => new FrameworkId(""), "frameworkId:")
frameworkIdUtil = new FrameworkIdUtil(store, Duration.Inf)
val schedulerService = new MarathonSchedulerService(
leadershipCoordinator,
healthCheckManager,
candidate,
config,
frameworkIdUtil,
leader,
appRepository,
taskTracker,
driverFactory(mock[SchedulerDriver]),
system,
migration,
schedulerActor,
events
) {
override def runDriver(abdicateCmdOption: Option[ExceptionalCommand[JoinException]]): Unit = ()
override def newTimer() = mockTimer
}
schedulerService.frameworkId should be(None)
implicit lazy val timeout = 1.second
frameworkIdUtil.store(frameworkId)
awaitAssert(schedulerService.frameworkId should be(Some(frameworkId)))
}
test("Abdicate leadership when migration fails and reoffer leadership") {
when(frameworkIdUtil.fetch()).thenReturn(None)
candidate = Some(mock[Candidate])
val schedulerService = new MarathonSchedulerService(
leadershipCoordinator,
healthCheckManager,
candidate,
config,
frameworkIdUtil,
leader,
appRepository,
taskTracker,
driverFactory(mock[SchedulerDriver]),
system,
migration,
schedulerActor,
events
) {
override def runDriver(abdicateCmdOption: Option[ExceptionalCommand[JoinException]]): Unit = ()
}
// use an Answer object here because Mockito's thenThrow does only
// allow to throw RuntimeExceptions
when(migration.migrate()).thenAnswer(new Answer[StorageVersion] {
override def answer(invocation: InvocationOnMock): StorageVersion = {
import java.util.concurrent.TimeoutException
throw new TimeoutException("Failed to wait for future within timeout")
}
})
schedulerService.onElected(mock[ExceptionalCommand[Group.JoinException]])
awaitAssert { verify(candidate.get).offerLeadership(schedulerService) }
leader.get() should be (false)
}
test("Abdicate leadership when the driver creation fails by some exception") {
when(frameworkIdUtil.fetch()).thenReturn(None)
candidate = Some(mock[Candidate])
val driverFactory = mock[SchedulerDriverFactory]
val schedulerService = new MarathonSchedulerService(
leadershipCoordinator,
healthCheckManager,
candidate,
config,
frameworkIdUtil,
leader,
appRepository,
taskTracker,
driverFactory,
system,
migration,
schedulerActor,
events
) {
override def runDriver(abdicateCmdOption: Option[ExceptionalCommand[JoinException]]): Unit = ()
}
when(leadershipCoordinator.prepareForStart()).thenReturn(Future.successful(()))
when(driverFactory.createDriver()).thenThrow(new Exception("Some weird exception"))
schedulerService.onElected(mock[ExceptionalCommand[Group.JoinException]])
verify(candidate.get, Mockito.timeout(1000)).offerLeadership(schedulerService)
leader.get() should be (false)
}
test("Abdicate leadership when prepareStart throws an exception") {
when(frameworkIdUtil.fetch()).thenReturn(None)
candidate = Some(mock[Candidate])
val driverFactory = mock[SchedulerDriverFactory]
val schedulerService = new MarathonSchedulerService(
leadershipCoordinator,
healthCheckManager,
candidate,
config,
frameworkIdUtil,
leader,
appRepository,
taskTracker,
driverFactory,
system,
migration,
schedulerActor,
events
) {
override def runDriver(abdicateCmdOption: Option[ExceptionalCommand[JoinException]]): Unit = ()
}
when(leadershipCoordinator.prepareForStart()).thenReturn(Future.failed(new RuntimeException("fail")))
when(driverFactory.createDriver()).thenReturn(mock[SchedulerDriver])
schedulerService.onElected(mock[ExceptionalCommand[Group.JoinException]])
verify(candidate.get, Mockito.timeout(1000)).offerLeadership(schedulerService)
leader.get() should be (false)
}
}
| Kosta-Github/marathon | src/test/scala/mesosphere/marathon/MarathonSchedulerServiceTest.scala | Scala | apache-2.0 | 11,487 |
package org.scalajars.core
import org.scalajars.core.ArtifactFileType._
import scalaz._, Scalaz._
import org.scalajars.lib.maven.Model
import scala.xml._
import play.api.libs.Files.TemporaryFile
import play.api.Logger
import play.api.Play.{application, current}
object PomUtils {
def readModel(xml: Elem): Res[Model] = \\/.fromTryCatch(scalaxb.fromXML[Model](xml))
def writeModel(model: Model) = scalaxb.toXML(model, "project", org.scalajars.lib.maven.defaultScope)
def readXml(file: java.io.File): Res[Elem] = \\/.fromTryCatch(XML.loadFile(file))
}
trait Publisher {
this: Store with Users =>
import PomUtils._
case class File(rawPath: String, tmpFile: TemporaryFile, fileType: FileType){
lazy val basePath = rawPath.split('/').dropRight(1).mkString("/")
}
def publish(token: UserToken, projectName: String, path: Path, tmpFile: TemporaryFile) = for {
user <- authorize(token, projectName)
fileType <- getFileType(path)
res <- fileType match {
case Pom => for {
model <- (Kleisli(readModel) <==< readXml).run(tmpFile.file)
version <- getVersion(path, fileType, model)
project = Project(projectName, model.description | "", user.login, version :: Nil)
_ <- setProject(project, path.base)
- <- saveRecentlyUpdated(project, version)
_ <- upload(path, tmpFile)
r <- addPathToIndex(path)
} yield r
case _ => for {
_ <- setArtifactFiles(path.base, ArtifactFiles.forPath(path, fileType))
_ <- upload(path, tmpFile)
r <- addPathToIndex(path)
} yield r
}
} yield res
def authorize(token: UserToken, projectName: String): Error \\/ User = (for {
userOpt <- getUserByToken(token)
projectOpt <- getProject(projectName)
} yield (userOpt, projectOpt)) >>= (_ match {
case (None, _) => UserNotFound.left
case (Some(user), Some(project)) if project.user != user.login => Unauthorized.left
case (Some(user), _) => user.right
})
val uploadDir = application.configuration.getString("upload.dir") | "/tmp/scalajars"
def uploadFile(path: Path) = new java.io.File(new java.io.File(uploadDir), path.str)
def upload(path: Path, tmpFile: TemporaryFile): Error \\/ Unit = {
Logger.trace("Uploading " + path)
tmpFile.moveTo(uploadFile(path), replace = true)
().right
}
def addPathToIndex(path: Path): Error \\/ Unit = for {
(base, last) <- path.baseAndLast.toRightDisjunction[Error](WrongPath)
_ <- makeIndexItems(base, last).map(addToIndex).sequence[Res, Unit]
} yield ()
def makeIndexItems(base: Path, last: String) = {
val (_, pkg) = ((Path.root, List[IndexItem]()) /: base.parts){ case ((p, xs), x) =>
(p / x, IndexPackage(x, p) :: xs)
}
IndexFile(last, base) :: pkg
}
def getVersion(path: Path, fileType: ArtifactFileType.FileType, model: Model): Error \\/ Version = path.reversed.list match {
case fileName :: version :: ArtifactId(artifactId, scalaVersion) :: groupId =>
Version(version, ScalaVersion(scalaVersion, Artifact(artifactId, groupId.reverse.mkString("."), extractDependencies(model), ArtifactFiles.forPath(path, fileType)) :: Nil) :: Nil).right
case _ => WrongPath.left
}
def extractDependencies(model: Model): List[Dependency] = {
model.dependencies.map { _.dependency.map { dep =>
Dependency(dep.groupId, dep.artifactId, dep.version, dep.scope)
}.flatten }.flatten.toList
}
def getFileType(path: Path): Error \\/ FileType =
ArtifactFileType.All.find(t => path.endsWith(t.ending)).toRightDisjunction(UnsupportedFileType)
}
| teamon/scalajars.org | app/lib/core/publish.scala | Scala | mit | 3,653 |
package controllers
import db.DashboardBuildsDao
import io.flow.delta.config.v0.models.Cluster
import io.flow.delta.v0.models.json._
import io.flow.play.controllers.FlowControllerComponents
import play.api.libs.json._
import play.api.mvc._
@javax.inject.Singleton
class DashboardBuilds @javax.inject.Inject() (
dashboardBuildsDao: DashboardBuildsDao,
val controllerComponents: ControllerComponents,
val flowControllerComponents: FlowControllerComponents
) extends BaseIdentifiedRestController {
def get(
organization: Option[String],
cluster: Option[Cluster],
limit: Long,
offset: Long
) = Identified { request =>
Ok(
Json.toJson(
dashboardBuildsDao.findAll(
authorization(request),
organization = organization,
cluster= cluster,
limit = Some(limit),
offset = offset
)
)
)
}
}
| flowcommerce/delta | api/app/controllers/DashboardBuilds.scala | Scala | mit | 896 |
package io.udash.web.guide.views.frontend
import io.udash._
import io.udash.bootstrap.utils.BootstrapStyles
import io.udash.css.CssView
import io.udash.web.commons.components.CodeBlock
import io.udash.web.commons.styles.GlobalStyles
import io.udash.web.guide.components.ForceBootstrap
import io.udash.web.guide.styles.partials.GuideStyles
import io.udash.web.guide.views.References
import io.udash.web.guide.{Context, _}
import org.scalajs.dom
import scala.scalajs.js
import scalatags.JsDom
case object FrontendRoutingViewFactory extends ViewFactory[FrontendRoutingState] {
override def create(): (View, Presenter[FrontendRoutingState]) = {
val url = Property.blank[String]
(new FrontendRoutingView(url), new FrontendRoutingPresenter(url))
}
}
class FrontendRoutingPresenter(url: Property[String]) extends Presenter[FrontendRoutingState] {
import Context.applicationInstance
override def handleState(state: FrontendRoutingState) = {
url.set(applicationInstance.currentUrl.value)
}
}
class FrontendRoutingView(url: Property[String]) extends FinalView with CssView {
import Context._
import JsDom.all._
override def getTemplate: Modifier = div(
h2("Routing"),
p(
"Modern web applications create user friendly URLs and use them to handle the frontend routing. Udash framework ",
"provides a convenient routing engine. To use it, create:"
),
ul(GuideStyles.defaultList)(
li(i("RoutingRegistry"), " - mapping from a URL to ", i("RoutingState"), "."),
li(i("ViewFactoryRegistry"), " - mapping from ", i("RoutingState"), " to ", i("ViewFactory"), ".")
),
h3("URL"),
p(
"The routing support in Udash comes in two flavours (both available in package ",
i("io.udash.routing"), "):"
),
ul(GuideStyles.defaultList)(
li(i("WindowUrlFragmentChangeProvider"), " - based on the URL part following the ", b("#"), " sign."),
li(i("WindowUrlPathChangeProvider"), " - based on the URL path.")
),
p("To get the current URL, you can use the method ", i("currentUrl"), " from your ", i("Application"), " instance."),
ForceBootstrap(
div(GuideStyles.frame, GuideStyles.useBootstrap)(
p(
span("The URL of this page is: "),
span(id := "url-demo-link")(bind(url)), br(), br(),
span("Click here to change URL: ")
),
a(id := "url-demo-link-apple", href := s"${FrontendRoutingState(Some("apple")).url}")("Apple"), " | ",
a(id := "url-demo-link-orange", href := s"${FrontendRoutingState(Some("orange")).url}")("Orange"), " | ",
a(id := "url-demo-link-chocolate", href := s"${FrontendRoutingState(Some("chocolate")).url}")("Chocolate"), " | ",
a(id := "url-demo-link-pizza", href := s"${FrontendRoutingState(Some("pizza")).url}")("Pizza"),
br(), br(),
input(GlobalStyles.inline, BootstrapStyles.Form.control, id := "url-demo-input", placeholder := "Type anything in this field, it should not disappear on a state change...")
)
),
p(
i("WindowUrlFragmentChangeProvider"), " is a default routing mechanism. If you want to use ",
i("WindowUrlPathChangeProvider"), ", you should remember that your web server has to handle frontend routing paths ",
"by serving ", i("index.html"), " file. Your should also refer all your resources (like images, styles or scripts) ",
"with an absolute URLs. Take a look at this guide sources migration from hash-based to path-based routing (",
a(href := "https://github.com/UdashFramework/udash-guide/commit/cc54f57cc2128e446e1df1c29f65d0baa97c6fc9")("GitHub commit"), ")."
),
h3("RoutingState & RoutingRegistry"),
p(
"A Udash application is based on states. The application state describes the created ViewFactories structure and is determined ",
"by a URL. The URL is resolved to a ", i("RoutingState"), " on every change. The application states structure is your decision, ",
"Udash requires only that all states must extend ", i("State"),
". States tend to form a nested hierarchy. ",
"With ", i("ContainerState"), " and ", i("FinalState"), " you can express the place of a state in the hierarchy. ",
"For example:"
),
CodeBlock(
"""import io.udash._
|
|sealed abstract class RoutingState(
| val parentState: Option[ContainerRoutingState]
|) extends State[RoutingState]
|
|sealed abstract class ContainerRoutingState(
| parentState: Option[ContainerRoutingState]
|) extends RoutingState(parentState) with ContainerState[RoutingState]
|
|sealed abstract class FinalRoutingState(
| parentState: Option[ContainerRoutingState]
|) extends RoutingState(parentState) with FinalState[RoutingState]
|
|case object RootState extends ContainerRoutingState(None)
|case class UsersListState(searchQuery: Option[String]) extends FinalRoutingState(Some(RootState))
|case class UserDetailsState(username: String) extends FinalRoutingState(Some(RootState))
|case object Dashboard extends FinalRoutingState(Some(RootState))""".stripMargin
)(GuideStyles),
p(i("RoutingRegistry"), " is used to create a new application state on an URL change. For example:"),
CodeBlock(
"""import io.udash._
|
|class RoutingRegistryDef extends RoutingRegistry[RoutingState] {
| def matchUrl(url: Url): RoutingState =
| url2State.applyOrElse(
| url.value.stripSuffix("/"),
| (x: String) => ErrorState
| )
|
| def matchState(state: RoutingState): Url =
| Url(state2Url.apply(state))
|
| private val (url2State, state2Url) = bidirectional {
| case "/users" => Dashboard
| case "/users/search" => UsersListState(None)
| case "/users/search" / query => UsersListState(Some(query))
| case "/users/details" / username => UserDetailsState(username)
| }
|}""".stripMargin
)(GuideStyles),
p(
"You can pass URL parts into the application state, just use the ", i("/"), " operator like in the example above. ",
"For ", i("UsersListState"), " it is possible to keep some search query in the URL. ",
"You can update the application state with the ", i("goTo"), " method from the ", i("Application"), " interface ",
"and the URL will be automatically updated. A user can copy and paste the URL to a new window and you can access ",
"the current search query in the ", i("handleState"), " method of the presenter."
),
h3("ViewFactory & ViewFactoryRegistry"),
p(
"When the state changes, the application needs to resolve matching ", i("ViewFactory"), ". ",
"The way this matching is implemented is crucial, because if it returns a different ", i("ViewFactory"), ", ",
"new presenter and view will be created and rendered. If the matching returns equal (value, not reference comparison) ",
i("ViewFactory"), ", then the previously created presenter will be informed about the state changed through calling the ", i("handleState"), " method."
),
CodeBlock(
"""class StatesToViewFactoryDef extends ViewFactoryRegistry[RoutingState] {
| def matchStateToResolver(state: RoutingState): ViewFactory[_ <: RoutingState] =
| state match {
| // let's assume that these ViewFactory objects exist somewhere
| case Dashboard => DashboardViewFactory
| case UsersListState(query) => UsersListViewFactory
| // let's assume that UserDetailsViewFactory
| // is a case class with one String argument
| case UserDetailsState(username) => UserDetailsViewFactory(username)
| }
|}""".stripMargin
)(GuideStyles),
p(
"Notice that matching for ", i("UsersListState"), " always returns the same ", i("UsersListViewFactory"), " and ",
"for ", i("UserDetailsState"), " always returns new ", i("UserDetailsViewFactory"), ""
),
ul(GuideStyles.defaultList)(
li(
span("The URL change: /users/details/john ➔ /users/details/david"),
ul(GuideStyles.innerList)(
li("The application state changes: UserDetailsState(\\"john\\") ➔ UserDetailsState(\\"david\\")."),
li("The ViewFactory changes: UserDetailsViewFactory(\\"john\\") ➔ UserDetailsViewFactory(\\"david\\")."),
li("The application creates new view and presenter.")
)
),
li(
span("The URL change: /users/search/john ➔ /users/search/david"),
ul(GuideStyles.innerList)(
li("The application state changes: UsersListState(Some(\\"john\\")) ➔ UsersListState(Some(\\"david\\"))."),
li("The ViewFactory stays: UsersListViewFactory ➔ UsersListViewFactory."),
li("Presenter's ", i("handleState"), " method is called with the new state as an argument."),
li("The view is not touched at all. The presenter can update the model or the view.")
)
)
),
p(
"Below you can find input which changes the URL on every update. This change is handled like ",
i("UsersListState"), " in the above example, so this view is not refreshed after the URL change."
),
ForceBootstrap(
div(GuideStyles.frame, GuideStyles.useBootstrap)(
input(
BootstrapStyles.Form.control, id := "url-demo-link-input", value := "",
placeholder := "Type something in this field and look at the URL...", onkeyup :+= ((event: dom.Event) => {
applicationInstance.goTo(FrontendRoutingState(
Some(js.Dynamic.global
.encodeURIComponent(event.target.asInstanceOf[dom.html.Input].value)
.asInstanceOf[String])
))
true
})
),
p("This view was created with: ", span(id := "url-demo-link-init")(applicationInstance.currentUrl.value))
)
),
h3("Handling routing errors"),
p(
"In some cases (for example authorization) it is useful to throw an exception in the routing registry or presenter's ",
i("handleState"), " method. These exceptions are handled by the ", i("Application"), " which allows you to register ",
"a routing failure callback with the ", i("onRoutingFailure"), " method."
),
p("Take a look at routing a failure handler from authorization utilities: "),
CodeBlock(
"""application.onRoutingFailure {
| case _: UnauthorizedException | _: UnauthenticatedException
| if application.currentState != authFailedRedirectState =>
| application.goTo(authFailedRedirectState)
|}""".stripMargin
)(GuideStyles),
h2("What's next?"),
p(
"Take a look at the ", a(href := FrontendMVPState.url)("Model, View, Presenter & ViewFactory"), " chapter to ",
"learn more about the ", a(href := References.MvpPattern)("MVP pattern"), " variation used in Udash."
)
)
}
| UdashFramework/udash-guide | guide/src/main/scala/io/udash/web/guide/views/frontend/FrontendRoutingView.scala | Scala | gpl-3.0 | 11,099 |
object fibonacci {
def main(args: Array[String]): Unit = {
print("Input the nth fibonacci number you'd like to compute: ")
val x: Int = scala.io.StdIn.readLine.toInt
println(nthFib(x))
}
def nthFib(n: Int): Int = {
if (n == 1)
return 1
else if (n == 2)
return 1
else
return nthFib(n-2) + nthFib(n-1)
}
} | felipecustodio/algorithms | math/fibonacci/Scala/fibonacci.scala | Scala | mit | 354 |
package com.arcusys.valamis.reports.service
import java.io.File
import com.arcusys.valamis.certificate.reports.DateReport
import com.arcusys.valamis.certificate.service.{LearningPathService}
import com.arcusys.valamis.course.api.CourseService
import com.arcusys.valamis.gradebook.service.LessonGradeService
import com.arcusys.valamis.lesson.service.{LessonService, LessonStatementReader, UserLessonResultService}
import com.arcusys.valamis.persistence.common.SlickProfile
import com.arcusys.valamis.reports.model.{AttemptedLessonsRow, AveragePassingGrades, CertificateReportRow, MostActiveUsers, TopLesson, TopLessonWithPopularity}
import com.arcusys.valamis.reports.table.LessonTables
import com.arcusys.valamis.slick.util.SlickDbTestBase
import com.arcusys.valamis.user.service.UserService
import com.arcusys.valamis.util.FileSystemUtil
import org.joda.time.DateTime
import org.scalatest.{BeforeAndAfter, FunSuite}
import com.arcusys.valamis.util.serialization.JsonHelper
import org.json4s.{DefaultFormats, Formats}
import org.json4s.ext.DateTimeSerializer
/**
* Created by amikhailov on 16.01.17.
*/
class ReportServiceTest extends FunSuite
with BeforeAndAfter
with LessonTables
with SlickProfile
with SlickDbTestBase {
implicit val formats: Formats = DefaultFormats + DateTimeSerializer
import driver.api._
val certificateRows = Seq(
CertificateReportRow(DateTime.parse("2017-01-16T09:49:29Z"), 1, 2),
CertificateReportRow(DateTime.parse("2017-01-16T09:49:29Z"), 3, 4)
)
val lessonRows = Seq(
TopLessonWithPopularity(TopLesson(1L, "lesson Hello World", None, 1), 99.0f),
TopLessonWithPopularity(TopLesson(2L, "lesson, Hello World", None, 1), 1.0f),
TopLessonWithPopularity(TopLesson(3L, "lesson 3", None, 1), 0.0f)
)
val userRows = Seq(
MostActiveUsers(1L, "Bob", "/picture1.jpg", 1, 2, 3),
MostActiveUsers(2L, "Alice \"Smith\" ", "/picture2.jpg", 1, 2, 3),
MostActiveUsers(3L, "James, James Bond", "/picture3.jpg", 1, 2, 3)
)
val averageGrades = Seq(
AveragePassingGrades(1, "title 1", 0.1F),
AveragePassingGrades(2, "title 2", 0.2F),
AveragePassingGrades(3, "title 1", 0.3F)
)
val attemptedLessons = Seq(
AttemptedLessonsRow(1L, "Bob", 3, 1),
AttemptedLessonsRow(2L, "Alice \"Smith\" ", 3, 3)
)
after {
reportService.cleanReportDir(0)
}
lazy val reportService = new ReportServiceImpl(driver, db) {
override def dateReport: DateReport = ???
override def userService: UserService = ???
override def lessonGradeService: LessonGradeService = ???
override def reader: LessonStatementReader = ???
override def lessonService: LessonService = ???
override def courseService: CourseService = ???
override def learningPathService: LearningPathService = ???
override def userResult: UserLessonResultService = ???
}
test("get mime type") {
assert(reportService.getMimeType("report.csv") == "text/csv")
assert(reportService.getMimeType("report.json") == "application/json")
assert(reportService.getMimeType("report.other") == "application/octet-stream")
}
test("get report file") {
val filename = "report.csv"
assert(reportService.getReportFile(filename).getPath.contains(filename))
}
test("save users as csv") {
val csvFile = reportService.saveUsersAsCsv(userRows)
val csv = getFileContent(csvFile)
println(csv)
assert(getRowsCount(csv) == userRows.length + 1)
}
test("save users as json") {
val jsonFile = reportService.saveUsersAsJson(userRows)
val json = getFileContent(jsonFile)
val decoded = fromJson(json)
assert(decoded.length == userRows.length)
}
test("save lessons as csv") {
val csvFile = reportService.saveLessonsAsCsv(lessonRows)
val csv = getFileContent(csvFile)
assert(getRowsCount(csv) == lessonRows.length + 1)
}
test("save lessons as json") {
val jsonFile = reportService.saveLessonsAsJson(lessonRows)
val json = getFileContent(jsonFile)
val decoded = fromJson(json)
assert(decoded.length == lessonRows.length)
}
test("save certificates as csv") {
val csvFile = reportService.saveCertificatesAsCsv(certificateRows)
val csv = getFileContent(csvFile)
assert(getRowsCount(csv) == certificateRows.length + 1)
}
test("save certificates as json") {
val jsonFile = reportService.saveCertificatesAsJson(certificateRows)
val json = getFileContent(jsonFile)
val decoded = fromJson(json)
assert(decoded.length == certificateRows.length)
}
test("save average grade report as csv") {
val csvFile = reportService.saveAverageGradesAsCsv(averageGrades)
val csv = getFileContent(csvFile)
assert(getRowsCount(csv) == averageGrades.length + 1)
}
test("save average grade report as json") {
val jsonFile = reportService.saveAverageGradesAsJson(averageGrades)
val json = getFileContent(jsonFile)
val decoded = fromJson(json)
assert(decoded.length == averageGrades.length)
}
test("save attempted report as csv") {
val csvFile = reportService.saveAttemptedLessonsAsCsv(attemptedLessons)
val csv = getFileContent(csvFile)
assert(getRowsCount(csv) == attemptedLessons.length + 1)
}
test("save attempted report as json") {
val jsonFile = reportService.saveAttemptedLessonsAsJson(attemptedLessons)
val json = getFileContent(jsonFile)
val decoded = fromJson(json)
assert(decoded.length == attemptedLessons.length)
}
private def fromJson(json: String) = {
JsonHelper.fromJson[Seq[Any]](json)
}
private def getRowsCount(str: String) = {
str.split("\n").length
}
private def getFileContent(file: File) = {
FileSystemUtil.getFileContent(file).map(_.toChar).mkString
}
}
| arcusys/Valamis | valamis-reports/src/test/scala/com/arcusys/valamis/reports/service/ReportServiceTest.scala | Scala | gpl-3.0 | 5,754 |
/*
* Copyright 2014 Cisco Systems, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.cisco.oss.foundation.orchestration.scope.provision.exception
/**
* Created by mgoldshm on 4/10/14.
*/
class ScopeProvisionException(message: String) extends Exception(message)
| foundation-runtime/orchestration | src/main/java/com/cisco/oss/foundation/orchestration/scope/provision/exception/ScopeProvisionException.scala | Scala | apache-2.0 | 795 |
/*
* Copyright 2021 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package models.businessdetails
import cats.data.Validated.{Invalid, Valid}
import jto.validation.{Path, ValidationError}
import org.scalatestplus.mockito.MockitoSugar
import org.scalatestplus.play.PlaySpec
class ContactingYouEmailSpec extends PlaySpec with MockitoSugar {
"ContactingYouEmailSpec" must {
"successfully validate" when {
"given a 'matchinig emails" in {
val data = Map(
"email" -> Seq("test@test.com"),
"confirmEmail" -> Seq("test@test.com")
)
ContactingYouEmail.formRule.validate(data) must
be(Valid(ContactingYouEmail("test@test.com","test@test.com")))
}
}
"fail validation" when {
"given missing data represented by an empty Map" in {
ContactingYouEmail.formRule.validate(Map.empty) must
be(Invalid(Seq(
(Path \\ "email") -> Seq(ValidationError("error.required")),
(Path \\ "confirmEmail") -> Seq(ValidationError("error.required"))
)))
}
"given missing data represented by an empty string" in {
val data = Map(
"email" -> Seq(""),
"confirmEmail" -> Seq("")
)
ContactingYouEmail.formRule.validate(data) must
be(Invalid(Seq(
(Path \\ "email") -> Seq(ValidationError("error.required.email")),
(Path \\ "confirmEmail") -> Seq(ValidationError("error.required.email.reenter"))
)))
}
}
"write correct data" in {
val model = ContactingYouEmail("test@test.com","test@test.com")
ContactingYouEmail.formWrites.writes(model) must
be(Map(
"email" -> Seq("test@test.com"),
"confirmEmail" -> Seq("test@test.com")
))
}
}
}
| hmrc/amls-frontend | test/models/businessdetails/ContactingYouEmailSpec.scala | Scala | apache-2.0 | 2,346 |
/**
* Copyright (C) 2015 DANS - Data Archiving and Networked Services (info@dans.knaw.nl)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package nl.knaw.dans.easy.stage.dataset
import java.io.File
import nl.knaw.dans.easy.stage.lib.Util.loadXML
import nl.knaw.dans.easy.stage.{ RejectedDepositException, Settings }
import nl.knaw.dans.lib.logging.DebugEnhancedLogging
import scala.sys.error
import scala.util.{ Failure, Success, Try }
import scala.xml.{ Node, NodeSeq }
object Util extends DebugEnhancedLogging {
/**
* Load file metadata XML file and extract the metadata for the specified file.
* Use this as input for further processing and extraction of sub-elements like title and mime type.
*
* @param filePath Path to the file, relative to the bag
* @return File metadata (XML Nodes) for the specified file
*/
def readFileMetadata(filePath: String)(implicit s: Settings): Try[NodeSeq] = Try {
for {
file <- loadBagXML("metadata/files.xml") \\ "files" \ "file"
if (file \@ "filepath") == filePath
} yield file
}
private def getText(fileMetadata: NodeSeq, label: String): Option[String] = {
fileMetadata \ label match {
case Seq(l) => Option(l.text)
case _ => None
}
}
def readMimeType(fileMetadata: NodeSeq): Try[String] = {
getText(fileMetadata, "format")
.map(Success(_))
.getOrElse(Failure(RejectedDepositException(s"format element doesn't exist for the file, or isn't unique.")))
}
def readTitle(fileMetadata: NodeSeq): Try[Option[String]] = Try {
getText(fileMetadata, "title")
}
def readAccessRights(fileMetadata: NodeSeq): Try[Option[String]] = Try {
getText(fileMetadata, "accessRights") orElse getText(fileMetadata, "accessibleToRights")
}
def readVisibleToRights(fileMetadata: NodeSeq): Try[Option[String]] = Try {
getText(fileMetadata, "visibleToRights")
}
def readAudiences()(implicit s: Settings): Try[Seq[String]] = Try {
trace(())
(loadBagXML("metadata/dataset.xml") \\ "DDM" \ "profile" \ "audience").map(_.text)
}
def loadBagXML(fileName: String)(implicit s: Settings): Node = {
new File(s.bagitDir, fileName) match {
case file if file.exists() => loadXML(file)
case _ => error(s"Unable to find `$fileName` in bag.")
}
}
}
| DANS-KNAW/easy-stage-dataset | lib/src/main/scala/nl.knaw.dans.easy.stage/dataset/Util.scala | Scala | apache-2.0 | 2,817 |
/**
* Copyright 2011-2017 GatlingCorp (http://gatling.io)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.gatling.core.action.builder
import io.gatling.core.action.{ Action, Loop }
import io.gatling.core.session.Expression
import io.gatling.core.structure.{ ChainBuilder, ScenarioContext }
import io.gatling.core.util.NameGen
sealed abstract class LoopType(val name: String, val timeBased: Boolean, val evaluateConditionAfterLoop: Boolean)
case object RepeatLoopType extends LoopType("repeat", false, false)
case object ForeachLoopType extends LoopType("foreach", false, false)
case object DuringLoopType extends LoopType("during", true, false)
case object ForeverLoopType extends LoopType("forever", false, false)
case object AsLongAsLoopType extends LoopType("asLongAs", false, false)
case object DoWhileType extends LoopType("doWhile", false, true)
/**
* @constructor create a new Loop
* @param condition the function that determine the condition
* @param loopNext chain that will be executed if condition evaluates to true
* @param counterName the name of the loop counter
* @param exitASAP if the loop is to be exited as soon as the condition no longer holds
* @param loopType the loop type
*/
class LoopBuilder(condition: Expression[Boolean], loopNext: ChainBuilder, counterName: String, exitASAP: Boolean, loopType: LoopType) extends ActionBuilder with NameGen {
def build(ctx: ScenarioContext, next: Action): Action = {
import ctx._
val safeCondition = condition.safe
val loopAction = new Loop(safeCondition, counterName, exitASAP, loopType.timeBased, loopType.evaluateConditionAfterLoop, coreComponents.statsEngine, genName(loopType.name), next)
val loopNextAction = loopNext.build(ctx, loopAction)
loopAction.initialize(loopNextAction, ctx.system)
loopAction
}
}
| timve/gatling | gatling-core/src/main/scala/io/gatling/core/action/builder/LoopBuilder.scala | Scala | apache-2.0 | 2,335 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst.expressions
import java.util.regex.{MatchResult, Pattern}
import org.apache.commons.lang3.StringEscapeUtils
import org.apache.spark.sql.catalyst.expressions.codegen._
import org.apache.spark.sql.catalyst.util.{GenericArrayData, StringUtils}
import org.apache.spark.sql.types._
import org.apache.spark.unsafe.types.UTF8String
trait StringRegexExpression extends ImplicitCastInputTypes {
self: BinaryExpression =>
def escape(v: String): String
def matches(regex: Pattern, str: String): Boolean
override def dataType: DataType = BooleanType
override def inputTypes: Seq[DataType] = Seq(StringType, StringType)
// try cache the pattern for Literal
private lazy val cache: Pattern = right match {
case x @ Literal(value: String, StringType) => compile(value)
case _ => null
}
protected def compile(str: String): Pattern = if (str == null) {
null
} else {
// Let it raise exception if couldn't compile the regex string
Pattern.compile(escape(str))
}
protected def pattern(str: String) = if (cache == null) compile(str) else cache
protected override def nullSafeEval(input1: Any, input2: Any): Any = {
val regex = pattern(input2.asInstanceOf[UTF8String].toString)
if(regex == null) {
null
} else {
matches(regex, input1.asInstanceOf[UTF8String].toString)
}
}
}
/**
* Simple RegEx pattern matching function
*/
case class Like(left: Expression, right: Expression)
extends BinaryExpression with StringRegexExpression with CodegenFallback {
override def escape(v: String): String = StringUtils.escapeLikeRegex(v)
override def matches(regex: Pattern, str: String): Boolean = regex.matcher(str).matches()
override def toString: String = s"$left LIKE $right"
override protected def genCode(ctx: CodeGenContext, ev: GeneratedExpressionCode): String = {
val patternClass = classOf[Pattern].getName
val escapeFunc = StringUtils.getClass.getName.stripSuffix("$") + ".escapeLikeRegex"
val pattern = ctx.freshName("pattern")
if (right.foldable) {
val rVal = right.eval()
if (rVal != null) {
val regexStr =
StringEscapeUtils.escapeJava(escape(rVal.asInstanceOf[UTF8String].toString()))
ctx.addMutableState(patternClass, pattern,
s"""$pattern = ${patternClass}.compile("$regexStr");""")
// We don't use nullSafeCodeGen here because we don't want to re-evaluate right again.
val eval = left.gen(ctx)
s"""
${eval.code}
boolean ${ev.isNull} = ${eval.isNull};
${ctx.javaType(dataType)} ${ev.value} = ${ctx.defaultValue(dataType)};
if (!${ev.isNull}) {
${ev.value} = $pattern.matcher(${eval.value}.toString()).matches();
}
"""
} else {
s"""
boolean ${ev.isNull} = true;
${ctx.javaType(dataType)} ${ev.value} = ${ctx.defaultValue(dataType)};
"""
}
} else {
nullSafeCodeGen(ctx, ev, (eval1, eval2) => {
s"""
String rightStr = ${eval2}.toString();
${patternClass} $pattern = ${patternClass}.compile($escapeFunc(rightStr));
${ev.value} = $pattern.matcher(${eval1}.toString()).matches();
"""
})
}
}
}
case class RLike(left: Expression, right: Expression)
extends BinaryExpression with StringRegexExpression with CodegenFallback {
override def escape(v: String): String = v
override def matches(regex: Pattern, str: String): Boolean = regex.matcher(str).find(0)
override def toString: String = s"$left RLIKE $right"
override protected def genCode(ctx: CodeGenContext, ev: GeneratedExpressionCode): String = {
val patternClass = classOf[Pattern].getName
val pattern = ctx.freshName("pattern")
if (right.foldable) {
val rVal = right.eval()
if (rVal != null) {
val regexStr =
StringEscapeUtils.escapeJava(rVal.asInstanceOf[UTF8String].toString())
ctx.addMutableState(patternClass, pattern,
s"""$pattern = ${patternClass}.compile("$regexStr");""")
// We don't use nullSafeCodeGen here because we don't want to re-evaluate right again.
val eval = left.gen(ctx)
s"""
${eval.code}
boolean ${ev.isNull} = ${eval.isNull};
${ctx.javaType(dataType)} ${ev.value} = ${ctx.defaultValue(dataType)};
if (!${ev.isNull}) {
${ev.value} = $pattern.matcher(${eval.value}.toString()).find(0);
}
"""
} else {
s"""
boolean ${ev.isNull} = true;
${ctx.javaType(dataType)} ${ev.value} = ${ctx.defaultValue(dataType)};
"""
}
} else {
nullSafeCodeGen(ctx, ev, (eval1, eval2) => {
s"""
String rightStr = ${eval2}.toString();
${patternClass} $pattern = ${patternClass}.compile(rightStr);
${ev.value} = $pattern.matcher(${eval1}.toString()).find(0);
"""
})
}
}
}
/**
* Splits str around pat (pattern is a regular expression).
*/
case class StringSplit(str: Expression, pattern: Expression)
extends BinaryExpression with ImplicitCastInputTypes {
override def left: Expression = str
override def right: Expression = pattern
override def dataType: DataType = ArrayType(StringType)
override def inputTypes: Seq[DataType] = Seq(StringType, StringType)
override def nullSafeEval(string: Any, regex: Any): Any = {
val strings = string.asInstanceOf[UTF8String].split(regex.asInstanceOf[UTF8String], -1)
new GenericArrayData(strings.asInstanceOf[Array[Any]])
}
override def genCode(ctx: CodeGenContext, ev: GeneratedExpressionCode): String = {
val arrayClass = classOf[GenericArrayData].getName
nullSafeCodeGen(ctx, ev, (str, pattern) =>
// Array in java is covariant, so we don't need to cast UTF8String[] to Object[].
s"""${ev.value} = new $arrayClass($str.split($pattern, -1));""")
}
override def prettyName: String = "split"
}
/**
* Replace all substrings of str that match regexp with rep.
*
* NOTE: this expression is not THREAD-SAFE, as it has some internal mutable status.
*/
case class RegExpReplace(subject: Expression, regexp: Expression, rep: Expression)
extends TernaryExpression with ImplicitCastInputTypes {
// last regex in string, we will update the pattern iff regexp value changed.
@transient private var lastRegex: UTF8String = _
// last regex pattern, we cache it for performance concern
@transient private var pattern: Pattern = _
// last replacement string, we don't want to convert a UTF8String => java.langString every time.
@transient private var lastReplacement: String = _
@transient private var lastReplacementInUTF8: UTF8String = _
// result buffer write by Matcher
@transient private val result: StringBuffer = new StringBuffer
override def nullSafeEval(s: Any, p: Any, r: Any): Any = {
if (!p.equals(lastRegex)) {
// regex value changed
lastRegex = p.asInstanceOf[UTF8String].clone()
pattern = Pattern.compile(lastRegex.toString)
}
if (!r.equals(lastReplacementInUTF8)) {
// replacement string changed
lastReplacementInUTF8 = r.asInstanceOf[UTF8String].clone()
lastReplacement = lastReplacementInUTF8.toString
}
val m = pattern.matcher(s.toString())
result.delete(0, result.length())
while (m.find) {
m.appendReplacement(result, lastReplacement)
}
m.appendTail(result)
UTF8String.fromString(result.toString)
}
override def dataType: DataType = StringType
override def inputTypes: Seq[AbstractDataType] = Seq(StringType, StringType, StringType)
override def children: Seq[Expression] = subject :: regexp :: rep :: Nil
override def prettyName: String = "regexp_replace"
override protected def genCode(ctx: CodeGenContext, ev: GeneratedExpressionCode): String = {
val termLastRegex = ctx.freshName("lastRegex")
val termPattern = ctx.freshName("pattern")
val termLastReplacement = ctx.freshName("lastReplacement")
val termLastReplacementInUTF8 = ctx.freshName("lastReplacementInUTF8")
val termResult = ctx.freshName("result")
val classNamePattern = classOf[Pattern].getCanonicalName
val classNameStringBuffer = classOf[java.lang.StringBuffer].getCanonicalName
ctx.addMutableState("UTF8String", termLastRegex, s"${termLastRegex} = null;")
ctx.addMutableState(classNamePattern, termPattern, s"${termPattern} = null;")
ctx.addMutableState("String", termLastReplacement, s"${termLastReplacement} = null;")
ctx.addMutableState("UTF8String",
termLastReplacementInUTF8, s"${termLastReplacementInUTF8} = null;")
ctx.addMutableState(classNameStringBuffer,
termResult, s"${termResult} = new $classNameStringBuffer();")
nullSafeCodeGen(ctx, ev, (subject, regexp, rep) => {
s"""
if (!$regexp.equals(${termLastRegex})) {
// regex value changed
${termLastRegex} = $regexp.clone();
${termPattern} = ${classNamePattern}.compile(${termLastRegex}.toString());
}
if (!$rep.equals(${termLastReplacementInUTF8})) {
// replacement string changed
${termLastReplacementInUTF8} = $rep.clone();
${termLastReplacement} = ${termLastReplacementInUTF8}.toString();
}
${termResult}.delete(0, ${termResult}.length());
java.util.regex.Matcher m = ${termPattern}.matcher($subject.toString());
while (m.find()) {
m.appendReplacement(${termResult}, ${termLastReplacement});
}
m.appendTail(${termResult});
${ev.value} = UTF8String.fromString(${termResult}.toString());
${ev.isNull} = false;
"""
})
}
}
/**
* Extract a specific(idx) group identified by a Java regex.
*
* NOTE: this expression is not THREAD-SAFE, as it has some internal mutable status.
*/
case class RegExpExtract(subject: Expression, regexp: Expression, idx: Expression)
extends TernaryExpression with ImplicitCastInputTypes {
def this(s: Expression, r: Expression) = this(s, r, Literal(1))
// last regex in string, we will update the pattern iff regexp value changed.
@transient private var lastRegex: UTF8String = _
// last regex pattern, we cache it for performance concern
@transient private var pattern: Pattern = _
override def nullSafeEval(s: Any, p: Any, r: Any): Any = {
if (!p.equals(lastRegex)) {
// regex value changed
lastRegex = p.asInstanceOf[UTF8String].clone()
pattern = Pattern.compile(lastRegex.toString)
}
val m = pattern.matcher(s.toString)
if (m.find) {
val mr: MatchResult = m.toMatchResult
UTF8String.fromString(mr.group(r.asInstanceOf[Int]))
} else {
UTF8String.EMPTY_UTF8
}
}
override def dataType: DataType = StringType
override def inputTypes: Seq[AbstractDataType] = Seq(StringType, StringType, IntegerType)
override def children: Seq[Expression] = subject :: regexp :: idx :: Nil
override def prettyName: String = "regexp_extract"
override protected def genCode(ctx: CodeGenContext, ev: GeneratedExpressionCode): String = {
val termLastRegex = ctx.freshName("lastRegex")
val termPattern = ctx.freshName("pattern")
val classNamePattern = classOf[Pattern].getCanonicalName
ctx.addMutableState("UTF8String", termLastRegex, s"${termLastRegex} = null;")
ctx.addMutableState(classNamePattern, termPattern, s"${termPattern} = null;")
nullSafeCodeGen(ctx, ev, (subject, regexp, idx) => {
s"""
if (!$regexp.equals(${termLastRegex})) {
// regex value changed
${termLastRegex} = $regexp.clone();
${termPattern} = ${classNamePattern}.compile(${termLastRegex}.toString());
}
java.util.regex.Matcher m =
${termPattern}.matcher($subject.toString());
if (m.find()) {
java.util.regex.MatchResult mr = m.toMatchResult();
${ev.value} = UTF8String.fromString(mr.group($idx));
${ev.isNull} = false;
} else {
${ev.value} = UTF8String.EMPTY_UTF8;
${ev.isNull} = false;
}"""
})
}
}
| chenc10/Spark-PAF | sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/regexpExpressions.scala | Scala | apache-2.0 | 12,910 |
Subsets and Splits
Filtered Scala Code Snippets
The query filters and retrieves a sample of code snippets that meet specific criteria, providing a basic overview of the dataset's content without revealing deeper insights.