code stringlengths 5 1M | repo_name stringlengths 5 109 | path stringlengths 6 208 | language stringclasses 1 value | license stringclasses 15 values | size int64 5 1M |
|---|---|---|---|---|---|
/* TypeScript importer for Scala.js
* Copyright 2013-2014 LAMP/EPFL
* @author Sébastien Doeraene
*/
package importer.sc
import importer.ParseResult
import importer.Trees._
import utl.FramerConfig
import scala.annotation.tailrec
import scala.collection.mutable
object Printer {
final def printTab(current: Int) :String = {
val list : List [Int] = (1 to current).toList
list.foldLeft("")((result, item) => result +"\\t")
}
final def transformUrl (a1: String, a2: String ): String = {
val a = a1.indexOf("/")
a2 + a1.substring(a +1)
}
private implicit val self = this
def isHtml(list: List[TermTree]):Boolean = {
}
final def params2CssStringVnode(list1: List[TermTree], head1: String)(implicit framer: FramerConfig): String = {
val head = "style: {" ;
val ifResponsive = !framer.selectedHand.isEmpty;
val SCREEN_WIDTH = framer.deviceType match {
case "apple-iphone-5c-white" => 640
case _ => 600
}
// width_ident
val list2:List[TermTree] = list1.collectFirst {
case str: WidthIdent => str
} match {
case Some(widthIdent) => list1
case None => list1.+:(WidthIdent(StringIdent("0")))
}
val list3 = list2.collectFirst {
case str: HeightIdent => str
} match {
case Some(widthIdent) => list2
case None => list2.+:(HeightIdent(StringIdent("0")))
}
val list4 = list3.collectFirst {
case str: XIdent => str
} match {
case Some(ident) => list3
case None => list3.+:(XIdent(StringIdent("0")))
}
val list = list4.collectFirst {
case str: YIdent => str
} match {
case Some(ident) => list4
case None => list4.+:(YIdent(StringIdent("0")))
}
def hasBorderColor = list1.collectFirst {
case border: BorderColorIdent => border
}.isDefined
val borderWithOpt:Option[Double] = list1.collectFirst {
case BorderWidthIdent(v @ StringIdent(value)) => value.toDouble
}
val paddingOpt = list1.collectFirst {
case PaddingIdent(v @StringIdent(value)) => value.toDouble
}
val isRelative:Boolean =
list.collectFirst {
case XIdent(v @Value3Ident(pos,optCal,optStr)) => v
case YIdent(v @Value3Ident(pos,optCal,optStr)) => v
}.isDefined
// match {
// case Some(ident) => true
// case None => false
// }
val isText:Boolean = list.collectFirst {
case HtmlIdent(str) => str
}.isDefined
val paddingLeftWidth:Double = list.collectFirst {
case PaddingLeftIdent(v @StringIdent(value)) => value.toDouble
} match {
case Some(padding) => padding
case None => paddingOpt match {
case Some(padding) => padding
case None => 0
}
}
val paddingRightWidth:Double = list.collectFirst {
case PaddingRightIdent(v @StringIdent(value)) => value.toDouble
} match {
case Some(d) => d
case None => paddingOpt match {
case Some(d) => d
case None => 0
}
}
val paddingTopWidth:Double = list.collectFirst {
case PaddingTopIdent(v @StringIdent(value)) => value.toDouble
} match {
case Some(padding) => padding
case None => paddingOpt match {
case Some(padding) => padding
case None => 0
}
}
val paddingBottomWidth:Double = list.collectFirst {
case PaddingBottomIdent(v @StringIdent(value)) => value.toDouble
} match {
case Some(padding) => padding
case None => paddingOpt match {
case Some(padding) => padding
case None => 0
}
}
def getPosition(): String = {
// if(!isRelative) "position: \\"absolute\\","
"position: \\"relative\\", "
}
def getDisplay(): String = {
if(!isText) "display: \\"flex\\", "
else "display: \\"inline-block\\", "
}
def getBorderWidth(): String =
list.collectFirst {
case BorderWidthIdent(v @StringIdent(value)) => value.toDouble
} match {
case Some(padding) => "";
case None => "borderWidth: \\"0px\\", "
}
def textAlign(): String =
list.collectFirst {
case StyleTextAlignIdent(v ) => v
} match {
case Some(v) => "textAlign: \\""+v+"\\",";
case None => "textAlign: \\"left\\", "
}
def getPaddingWidth(): String =
list.collectFirst {
case PaddingIdent(v @StringIdent(value)) => value.toDouble
case PaddingLeftIdent(v @StringIdent(value)) => value.toDouble
case PaddingTopIdent(v @StringIdent(value)) => value.toDouble
case PaddingBottomIdent(v @StringIdent(value)) => value.toDouble
case PaddingRightIdent(v @StringIdent(value)) => value.toDouble
} match {
case Some(padding) => "";
case None => "padding: \\"0px\\", "
}
list.foldLeft(head + getDisplay + getPosition+getBorderWidth + getPaddingWidth+ textAlign)((result, term) =>
term match {
case HeightIdent(v@StringIdent(s1)) =>
val s = s1.toDouble -paddingTopWidth - paddingBottomWidth - (2 * borderWithOpt.getOrElse(0.toDouble))
if(ifResponsive) result + "height: " + s.toDouble / SCREEN_WIDTH + "rem\\","
else result + "height: \\"" + s + "px\\", "
case WidthIdent(v@ValueWithIdent(Ident("Screen"), value)) =>
result + "width: \\"100%\\","
case HeightIdent(v@ValueWithIdent(Ident("Screen"), value)) =>
result + "height: \\"100%\\", "
case WidthIdent(v@StringIdent(s1)) =>
val s = s1.toDouble -paddingLeftWidth - paddingRightWidth- (2 * borderWithOpt.getOrElse(0.toDouble))
if(ifResponsive) result + "width: \\"" + s.toDouble / SCREEN_WIDTH + "rem\\"," else
result + "width: \\"" + s + "px\\", "
case ScrollVerticalIdent(BooleanValueIdent(b)) =>
result
case BorderWidthIdent(value@StringIdent(px)) =>
val r = if(ifResponsive) result + "borderWidth: \\"" + px.toDouble / SCREEN_WIDTH + "rem\\"," else
result + "borderWidth: \\"" + px + "px\\", "
if (r.contains("borderStyle")) r else r + "borderStyle: \\"solid\\", "
case BackGroundColorIdent(color) =>
result + "backgroundColor: \\"" + color + "\\","
case YIdent(v @Value3Ident(pos,optCal,pxOpt)) =>
pos match {
case "bottom" if (optCal==Some("-")) =>
if(ifResponsive) result + "bottom: \\"" + pxOpt.get.toDouble / SCREEN_WIDTH + "rem\\"," else
result + "bottom: \\"" + pxOpt.get.toDouble + "px\\"," + " "
case "top" if (optCal==Some("-")) =>
if(ifResponsive) result + "bottom: \\"" + pxOpt.get.toDouble / SCREEN_WIDTH + "rem\\"," else
result + "bottom: \\"" + pxOpt.get.toDouble + "px\\"," + " "
case "top" if (optCal==Some("-")) =>
if(ifResponsive) result + "top: \\"" + pxOpt.get.toDouble / SCREEN_WIDTH + "rem\\"," else
result + "top: \\"" + pxOpt.get.toDouble + "px\\"," + " "
case "center" => {
if(result.contains("position"))
result + "marginTop: \\"auto\\",marginBottom: \\"auto\\","
else
result + "marginTop: \\"auto\\"\\nmarginBottom: auto;\\n"
}
}
case XIdent(v @Value3Ident(pos,optCal,pxOpt)) =>
pos match {
case "right" if (optCal==Some("-")) =>
if(ifResponsive) result + "right: \\"" + pxOpt.get.toDouble / SCREEN_WIDTH + "rem\\"," else
result + "right: " + pxOpt.get.toDouble + "px;" + " "
case "right" if (optCal==Some("+")) =>
if(ifResponsive) result + "right: \\"" + pxOpt.get.toDouble / SCREEN_WIDTH + "rem\\"," else
result + "right: \\"" + pxOpt.get.toDouble + "px\\"," + " "
case "center" => {
if(result.contains("position"))
result + "marginLeft: \\"auto\\", marginRight: \\"auto\\", "
else
result + "marginLeft: \\"auto\\", marginRight: \\"auto\\", "
}
}
case XIdent(v@StringIdent(px)) =>
if(ifResponsive) result + "left: \\"" + px.toDouble / SCREEN_WIDTH + "rem\\"," else
result + "left: \\"" + px + "px\\"," + " "
case YIdent(v@StringIdent(px)) =>
if(ifResponsive) result + "top: \\"" + px.toDouble / SCREEN_WIDTH + "rem\\"," else
result + "top: \\"" + px + "px\\"," + ""
case VisibleIdent(isVisible) if isVisible == false =>
result + "display: \\"" + "hidden\\" "
case BorderRadiusIdent(v@StringIdent(value)) =>
if(ifResponsive) result + "borderRadius: \\"" + value.toDouble / SCREEN_WIDTH + "rem\\"," else
result + "borderRadius: \\"" + value + "px\\", "
// case StyleTextAlignIdent(v) =>
// result + "textAlign: \\"" + v + "\\", "
case LineHeightIdent(v) =>
if(ifResponsive) result + "lineHeight: \\"" + v.toDouble / SCREEN_WIDTH + "rem\\"," else
result + "lineHeight: \\"" + v + "px\\", "
case StyleFontSizeIdent(v) =>
if(ifResponsive) result + "fontSize: \\"" + v.toDouble / SCREEN_WIDTH + "rem\\"," else
result + "fontSize: \\"" + v + "px\\", "
case PaddingBottomIdent(v@StringIdent(value)) =>
if(ifResponsive) result + "paddingBottom: \\"" + value.toDouble / SCREEN_WIDTH + "rem\\"," else
result + "paddingBottom: \\"" + value +"px\\", "
case PaddingRightIdent(v@StringIdent(value)) =>
if(ifResponsive) result + "paddingRight: \\"" + value.toDouble / SCREEN_WIDTH + "rem\\"," else
result + "padding-right: \\"" + value +"px\\", "
case MarginTopIdent(v@StringIdent(value)) =>
if(ifResponsive) result + "marginTop: \\"" + value.toDouble / SCREEN_WIDTH + "rem\\"," else
result + "marginTop: \\"" + value +"px\\", "
case MarginRightIdent(v@StringIdent(value)) =>
if(ifResponsive) result + "marginRight: \\"" + value.toDouble / SCREEN_WIDTH + "rem\\"," else
result + "marginRight: \\"" + value +"px\\", "
case MarginLeftIdent(v@StringIdent(value)) =>
if(ifResponsive) result + "marginLeft: \\"" + value.toDouble / SCREEN_WIDTH + "rem\\"," else
result + "marginLeft: \\"" + value +"px\\", "
case MarginBottomIdent(v@StringIdent(value)) =>
if(ifResponsive) result + "marginBottom: \\"" + value.toDouble / SCREEN_WIDTH + "rem\\"," else
result + "marginBottom: \\"" + value +"px\\", "
case PaddingTopIdent(v@StringIdent(value)) =>
if(ifResponsive) result + "paddingTop: \\"" + value.toDouble / SCREEN_WIDTH + "rem\\"," else
result + "paddingTop: \\"" + value +"px\\", "
case PaddingRightIdent(v@StringIdent(value)) =>
if(ifResponsive) result + "paddingRight: " + value.toDouble / SCREEN_WIDTH + "rem\\"," else
result + "paddingRight: \\"" + value +"px\\", "
case PaddingLeftIdent(v@StringIdent(value)) =>
if(ifResponsive) result + "paddingLeft: \\"" + value.toDouble / SCREEN_WIDTH + "rem;\\"," else
result + "paddingLeft: \\"" + value +"px\\", "
case BorderColorIdent(v) =>
if(ifResponsive) result + "borderColor: \\"" + v.toDouble / SCREEN_WIDTH + "\\"," else
result + "borderColor: \\"" + v +"\\", "
case FontColorIdent(v) =>
result + "color: \\"" + v + "\\","
case OpacityIdent(v) =>
result + "opacity: \\"" + v + "\\", "
case _ =>
result;
}) +"}"
}
final def params2CssString(list1: List[TermTree], head: String)(implicit framer: FramerConfig): String = {
val ifResponsive = !framer.selectedHand.isEmpty;
val SCREEN_WIDTH = framer.deviceType match {
case "apple-iphone-5c-white" => 640
case _ => 600
}
// width_ident
val list2:List[TermTree] = list1.collectFirst {
case str: WidthIdent => str
} match {
case Some(widthIdent) => list1
case None => list1.+:(WidthIdent(StringIdent("0")))
}
val list3 = list2.collectFirst {
case str: HeightIdent => str
} match {
case Some(widthIdent) => list2
case None => list2.+:(HeightIdent(StringIdent("0")))
}
val list4 = list3.collectFirst {
case str: XIdent => str
} match {
case Some(ident) => list3
case None => list3.+:(XIdent(StringIdent("0")))
}
val list = list4.collectFirst {
case str: YIdent => str
} match {
case Some(ident) => list4
case None => list4.+:(YIdent(StringIdent("0")))
}
def hasBorderColor = list1.collectFirst {
case border: BorderColorIdent => border
}.isDefined
val borderWithOpt:Option[Double] = list1.collectFirst {
case BorderWidthIdent(v @ StringIdent(value)) => value.toDouble
}
val paddingOpt = list1.collectFirst {
case PaddingIdent(v @StringIdent(value)) => value.toDouble
}
val isRelative:Boolean =
list.collectFirst {
case XIdent(v @Value3Ident(pos,optCal,optStr)) => v
case YIdent(v @Value3Ident(pos,optCal,optStr))=> v
} match {
case Some(ident) => true
case None => false
}
val isText:Boolean = list.collectFirst {
case HtmlIdent(str) => str
}.isDefined
val paddingLeftWidth:Double = list.collectFirst {
case PaddingLeftIdent(v @StringIdent(value)) => value.toDouble
} match {
case Some(padding) => padding
case None => paddingOpt match {
case Some(padding) => padding
case None => 0
}
}
val paddingRightWidth:Double = list.collectFirst {
case PaddingRightIdent(v @StringIdent(value)) => value.toDouble
} match {
case Some(d) => d
case None => paddingOpt match {
case Some(d) => d
case None => 0
}
}
val paddingTopWidth:Double = list.collectFirst {
case PaddingTopIdent(v @StringIdent(value)) => value.toDouble
} match {
case Some(padding) => padding
case None => paddingOpt match {
case Some(padding) => padding
case None => 0
}
}
val paddingBottomWidth:Double = list.collectFirst {
case PaddingBottomIdent(v @StringIdent(value)) => value.toDouble
} match {
case Some(padding) => padding
case None => paddingOpt match {
case Some(padding) => padding
case None => 0
}
}
def getPosition(): String = {
// if(!isRelative) "position: absolute;\\n"
"position: relative;\\n"
}
def getDisplay(): String = {
if(!isText) "display: flex;\\n"
else "display: inline-block;\\n"
}
def getBorderWidth(): String =
list.collectFirst {
case BorderWidthIdent(v @StringIdent(value)) => value.toDouble
} match {
case Some(padding) => "";
case None => "border-width: 0px;\\n"
}
def getPaddingWidth(): String =
list.collectFirst {
case PaddingIdent(v @StringIdent(value)) => value.toDouble
case PaddingLeftIdent(v @StringIdent(value)) => value.toDouble
case PaddingTopIdent(v @StringIdent(value)) => value.toDouble
case PaddingBottomIdent(v @StringIdent(value)) => value.toDouble
case PaddingRightIdent(v @StringIdent(value)) => value.toDouble
} match {
case Some(padding) => "";
case None => "padding: 0px;\\n"
}
def textAlign(): String =
list.collectFirst {
case StyleTextAlignIdent(v ) => v
} match {
case Some(v) => "text-align: "+v+";\\n";
case None => "text-align: left;\\n";
}
list.foldLeft(head + getDisplay + getPosition+getBorderWidth + getPaddingWidth+ textAlign)((result, term) =>
term match {
case HeightIdent(v@StringIdent(s1)) =>
val s = s1.toDouble -paddingTopWidth - paddingBottomWidth - (2 * borderWithOpt.getOrElse(0.toDouble))
if(ifResponsive) result + "height: " + s.toDouble / SCREEN_WIDTH + "rem;\\n"
else result + "height: " + s + "px;\\n"
case WidthIdent(v@ValueWithIdent(Ident("Screen"), value)) =>
result + "width: 100%;\\n"
case HeightIdent(v@ValueWithIdent(Ident("Screen"), value)) =>
result + "height: 100%;\\n"
case WidthIdent(v@StringIdent(s1)) =>
val s = s1.toDouble -paddingLeftWidth - paddingRightWidth- (2 * borderWithOpt.getOrElse(0.toDouble))
if(ifResponsive) result + "width: " + s.toDouble / SCREEN_WIDTH + "rem;\\n" else
result + "width: " + s + "px;\\n"
case ScrollVerticalIdent(BooleanValueIdent(b)) =>
result
case BorderWidthIdent(value@StringIdent(px)) =>
val r = if(ifResponsive) result + "border-width: " + px.toDouble / SCREEN_WIDTH + "rem;\\n" else
result + "border-width: " + px + "px;\\n"
if (r.contains("border-style")) r else r + "border-style: solid;\\n"
case BackGroundColorIdent(color) =>
result + "background-color: " + color + ";\\n"
case YIdent(v @Value3Ident(pos,optCal,pxOpt)) =>
pos match {
case "bottom" if (optCal==Some("-")) =>
if(ifResponsive) result + "bottom: " + pxOpt.get.toDouble / SCREEN_WIDTH + "rem;\\n" else
result + "bottom: " + pxOpt.get.toDouble + "px;" + "\\n"
case "top" if (optCal==Some("-")) =>
if(ifResponsive) result + "bottom: " + pxOpt.get.toDouble / SCREEN_WIDTH + "rem;\\n" else
result + "bottom: " + pxOpt.get.toDouble + "px;" + "\\n"
case "top" if (optCal==Some("-")) =>
if(ifResponsive) result + "top: " + pxOpt.get.toDouble / SCREEN_WIDTH + "rem;\\n" else
result + "top: " + pxOpt.get.toDouble + "px;" + "\\n"
case "center" => {
if(result.contains("position"))
result + "margin-top: auto;\\nmargin-bottom: auto;\\n"
else
result + "margin-top: auto;\\nmargin-bottom: auto;\\n"
}
}
case XIdent(v @Value3Ident(pos,optCal,pxOpt)) =>
pos match {
case "right" if (optCal==Some("-")) =>
if(ifResponsive) result + "right: " + pxOpt.get.toDouble / SCREEN_WIDTH + "rem;\\n" else
result + "right: " + pxOpt.get.toDouble + "px;" + "\\n"
case "right" if (optCal==Some("+")) =>
if(ifResponsive) result + "right: " + pxOpt.get.toDouble / SCREEN_WIDTH + "rem;\\n" else
result + "right: -" + pxOpt.get.toDouble + "px;" + "\\n"
case "center" => {
if(result.contains("position"))
result + "margin-left: auto;\\nmargin-right: auto;\\n"
else
result + "margin-left: auto;\\nmargin-right: auto;\\n"
}
}
case XIdent(v@StringIdent(px)) =>
if(ifResponsive) result + "left: " + px.toDouble / SCREEN_WIDTH + "rem;\\n" else
result + "left: " + px + "px;" + "\\n"
case YIdent(v@StringIdent(px)) =>
if(ifResponsive) result + "top: " + px.toDouble / SCREEN_WIDTH + "rem;\\n" else
result + "top: " + px + "px;" + "\\n"
case VisibleIdent(isVisible) if isVisible == false =>
result + "display: " + "hidden;\\n"
case BorderRadiusIdent(v@StringIdent(value)) =>
if(ifResponsive) result + "border-radius: " + value.toDouble / SCREEN_WIDTH + "rem;\\n" else
result + "border-radius: " + value + "px;\\n"
case LineHeightIdent(v) =>
if(ifResponsive) result + "line-height: " + v.toDouble / SCREEN_WIDTH + "rem;\\n" else
result + "line-height: " + v + "px;\\n"
case StyleFontSizeIdent(v) =>
if(ifResponsive) result + "font-size: " + v.toDouble / SCREEN_WIDTH + "rem;\\n" else
result + "font-size: " + v + "px;\\n"
case MarginBottomIdent(v@StringIdent(value)) =>
if(ifResponsive) result + "margin-bottom: " + value.toDouble / SCREEN_WIDTH + "rem;\\n" else
result + "margin-bottom: " + value +"px;\\n"
case MarginTopIdent(v@StringIdent(value)) =>
if(ifResponsive) result + "margin-top: " + value.toDouble / SCREEN_WIDTH + "rem;\\n" else
result + "margin-top: " + value +"px;\\n"
case MarginLeftIdent(v@StringIdent(value)) =>
if(ifResponsive) result + "margin-left: " + value.toDouble / SCREEN_WIDTH + "rem;\\n" else
result + "margin-left: " + value +"px;\\n"
case MarginRightIdent(v@StringIdent(value)) =>
if(ifResponsive) result + "margin-right: " + value.toDouble / SCREEN_WIDTH + "rem;\\n" else
result + "margin-right: " + value +"px;\\n"
case PaddingBottomIdent(v@StringIdent(value)) =>
if(ifResponsive) result + "padding-bottom: " + value.toDouble / SCREEN_WIDTH + "rem;\\n" else
result + "padding-bottom: " + value +"px;\\n"
case PaddingRightIdent(v@StringIdent(value)) =>
if(ifResponsive) result + "padding-right: " + value.toDouble / SCREEN_WIDTH + "rem;\\n" else
result + "padding-right: " + value +"px;\\n"
case PaddingTopIdent(v@StringIdent(value)) =>
if(ifResponsive) result + "padding-top: " + value.toDouble / SCREEN_WIDTH + "rem;\\n" else
result + "padding-top: " + value +"px;\\n"
case PaddingRightIdent(v@StringIdent(value)) =>
if(ifResponsive) result + "padding-right: " + value.toDouble / SCREEN_WIDTH + "rem;\\n" else
result + "padding-right: " + value +"px;\\n"
case PaddingLeftIdent(v@StringIdent(value)) =>
if(ifResponsive) result + "padding-left: " + value.toDouble / SCREEN_WIDTH + "rem;\\n" else
result + "padding-left: " + value +"px;\\n"
case BorderColorIdent(v) =>
if(ifResponsive) result + "border-color: " + v.toDouble / SCREEN_WIDTH + ";\\n" else
result + "border-color: " + v +";\\n"
case FontColorIdent(v) =>
result + "color: " + v + ";\\n"
case OpacityIdent(v) =>
result + "opacity: " + v + ";\\n"
case _ =>
result;
})
}
final def printSymbolVNode(initialSym: Symbol, framerConfig: FramerConfig): ParseResult = {
implicit val framer:FramerConfig = framerConfig;
@tailrec def factorialAcc(current: Int, hashMap: mutable.HashMap[Int, mutable.Stack[Symbol]], parseResult: ParseResult): ParseResult = {
if (current.equals(0)) {
ParseResult(parseResult.html, parseResult.css)
}
else {
hashMap(current).length match {
case a: Int if a == 0 && current > 0 =>
factorialAcc(current - 1, hashMap, ParseResult(parseResult.html +printTab(current )+"])\\n" , parseResult.css))
case a: Int if a == 1 =>
val a = hashMap(current).pop()
a match {
case layer: PackageSymbol =>
layer.members.isEmpty match {
case true =>
factorialAcc(current - 1, hashMap, ParseResult(parseResult.html , parseResult.css))
case false =>
val s = mutable.Stack(layer.members: _*)
val nh = hashMap.+=((current + 1, s))
factorialAcc(current + 1, nh, ParseResult(parseResult.html+
printTab(current)+"div(\\"\\",{},["+"\\n" , parseResult.css))
}
case layer: ImageSymbol =>
val parent = layer.parentOpt.map(s => "." + s + " > ").getOrElse("")
factorialAcc(current - 1, hashMap, ParseResult(parseResult.html +printTab(current)+ "img(\\"."+layer.name.name+"\\",{"+" props: { src: \\""+layer.imageUrl+"\\"},"+params2CssStringVnode(layer.params,"")+"})\\n",
params2CssString(layer.params, parseResult.css + parent + " ." ++ layer.name.name + "{" + "\\n") + "}\\n"))
case layer: InputSymbol =>
val parent = layer.parentOpt.map(s => "." + s + " > ").getOrElse("")
factorialAcc(current - 1, hashMap, ParseResult(parseResult.html +printTab(current)+ "input(\\"."+layer.name.name+"\\",{"+" props: { type: \\""+layer.inputType+"\\", placeHolder: \\""+layer.value+"\\",},"+params2CssStringVnode(layer.params,"")+"})\\n",
params2CssString(layer.params, parseResult.css + " ." + parent + layer.name.name + "{" + "\\n") + "}\\n"))
case layer: TextSymbol =>
val parent = layer.parentOpt.map(s => "." + s + " > ").getOrElse("")
factorialAcc(current - 1, hashMap, ParseResult(parseResult.html +printTab(current)+ "a(.\\""+layer.name.name+"\\",{"+params2CssStringVnode(layer.params,"")+"}, \\""+layer.value+"\\")\\n",
params2CssString(layer.params, parseResult.css + parent + " ." ++ layer.name.name + "{" + "\\n") + "}\\n"))
case layer: LayerSymbol =>
val parent = layer.parentOpt.map(s => "." + s + " > ").getOrElse("")
layer.members.isEmpty match {
case true =>
factorialAcc(current - 1, hashMap, ParseResult(parseResult.html +
printTab(current) +"div(\\"."+layer.name.name+"\\",{"+params2CssStringVnode(layer.params,"")+"},[]),\\n",
params2CssString(layer.params, parseResult.css + parent + "." ++ layer.name.name + "{" + "\\n") + "}\\n"))
case false =>
val s = mutable.Stack(layer.members: _*)
val nh = hashMap.+=((current + 1, s))
factorialAcc(current + 1, nh, ParseResult(parseResult.html +printTab(current)+
"div(\\"."+layer.name.name +"\\",{"+params2CssStringVnode(layer.params,"")+"},[\\n",
params2CssString(layer.params, parseResult.css + parent + "." ++ layer.name.name + "{" + "\\n") + "}\\n"))
}
// case layer: PageSymbol =>
// layer.members.isEmpty match {
// case true =>
// factorialAcc(current - 1, hashMap, ParseResult(parseResult.html + printTab(current)+ "<div class=\\"" + layer.name.name + "\\"></div>\\n",
// params2CssString(layer.params, "." ++ layer.name.name + "{" + "\\n") + "}\\n"))
// case false =>
// val s = mutable.Stack(layer.members: _*)
// val nh = hashMap.+=((current + 1, s))
// factorialAcc(current + 1, nh, ParseResult(parseResult.html +printTab(current)+ "<div class=\\"" + layer.name.name + "\\">\\n",
// params2CssString(layer.params, "." ++ layer.name.name + "{" + "\\n") + "}\\n"))
// }
case layer: PageSymbol =>
layer.members.isEmpty match {
case true =>
factorialAcc(current - 1, hashMap, ParseResult(parseResult.html+
printTab(current)+"div(\\"\\",{},[])"+"\\n" ,
params2CssString(layer.params, "." ++ layer.name.name + "{" + "\\n") + "}\\n"))
case false =>
val s = mutable.Stack(layer.members: _*)
val nh = hashMap.+=((current + 1, s))
factorialAcc(current + 1, nh, ParseResult(parseResult.html+
printTab(current)+"div(\\"\\",{},["+"\\n" ,
params2CssString(layer.params, "." ++ layer.name.name + "{" + "\\n") + "}\\n"))
}
case sy: Symbol =>
factorialAcc(current - 1, hashMap, ParseResult(parseResult.html +printTab(current)+ "<div class=\\"" + sy.name.name + "\\"></div>\\n", parseResult.css))
}
case a: Int if a > 1 =>
val layer = hashMap(current).pop()
layer match {
// case layer: ImageSymbol =>
// val parent = layer.parentOpt.map(s => "." + s + " > ").getOrElse("")
// factorialAcc(current, hashMap, ParseResult(parseResult.html +printTab(current)+ "<img class=\\"" + layer.name.name + "\\"" + " src=\\""+ transformUrl(layer.imageUrl,framerConfig.projectId) + "\\" />\\n",
// params2CssString(layer.params, parseResult.css + parent + " ." ++ layer.name.name + "{" + "\\n") + "}\\n"))
// case layer: InputSymbol =>
// val parent = layer.parentOpt.map(s => "." + s + " > ").getOrElse("")
// factorialAcc(current, hashMap, ParseResult(parseResult.html +printTab(current)+ "<input type=\\""+ layer.inputType+"\\" placeHolder=\\""+layer.value + "\\" class=\\"" + layer.name.name + "\\" />\\n",
// params2CssString(layer.params, parseResult.css + parent + " ." ++ layer.name.name + "{" + "\\n") + "}\\n"))
// case layer: TextSymbol =>
// val parent = layer.parentOpt.map(s => "." + s + " > ").getOrElse("")
// factorialAcc(current, hashMap, ParseResult(parseResult.html +printTab(current)+ "<a class=\\"" + layer.name.name + "\\">" + layer.value + "</a>\\n",
// params2CssString(layer.params, parseResult.css + parent + " ." ++ layer.name.name + "{" + "\\n") + "}\\n"))
case layer: LayerSymbol =>
val parent = layer.parentOpt.map(s => "." + s + " > ").getOrElse("")
layer.members.isEmpty match {
case true =>
factorialAcc(current, hashMap, ParseResult(parseResult.html +printTab(current)+
"div(\\"."+layer.name.name+"\\",{"+params2CssStringVnode(layer.params,"")+"},[]),\\n",
params2CssString(layer.params, parseResult.css + parent + "." ++ layer.name.name + "{" + "\\n") + "}\\n"))
case false =>
val s = mutable.Stack(layer.members: _*)
val nh = hashMap.+=((current + 1, s))
factorialAcc(current + 1, nh, ParseResult(parseResult.html +printTab(current)+
"div(\\"."+layer.name.name+"\\", {"+params2CssStringVnode(layer.params,"")+"},[\\n",
params2CssString(layer.params, parseResult.css + parent + "." ++ layer.name.name + "{" + "\\n") + "}\\n"))
}
// case layer: PageSymbol =>
// layer.members.isEmpty match {
// case true =>
// factorialAcc(current, hashMap, ParseResult(parseResult.html + printTab(current) + "<div class=\\"" + layer.name.name + "\\"></div>\\n",
// params2CssString(layer.params, "." ++ layer.name.name + "{" + "\\n") + "}\\n"))
// case false =>
// val s = mutable.Stack(layer.members: _*)
// val nh = hashMap.+=((current + 1, s))
// factorialAcc(current + 1, nh, ParseResult(parseResult.html + printTab(current) + "<div class=\\"" + layer.name.name + "\\">\\n",
// params2CssString(layer.params, "." ++ layer.name.name + "{" + "\\n") + "}\\n"))
// }
case symbol: Symbol =>
factorialAcc(current, hashMap, parseResult)
}
case _ =>
parseResult
}
}
}
val initialStack: mutable.Stack[Symbol] = mutable.Stack(initialSym)
val hm: mutable.HashMap[Int, mutable.Stack[Symbol]] = new mutable.HashMap[Int, mutable.Stack[Symbol]]()
hm.put(1, initialStack)
factorialAcc(1, hm, ParseResult("", ""))
}
final def printSymbol(initialSym: Symbol, framerConfig: FramerConfig): ParseResult = {
implicit val framer:FramerConfig = framerConfig;
@tailrec def factorialAcc(current: Int, hashMap: mutable.HashMap[Int, mutable.Stack[Symbol]], parseResult: ParseResult): ParseResult = {
if (current.equals(0)) {
ParseResult(parseResult.html, parseResult.css)
}
else {
hashMap(current).length match {
case a: Int if a == 0 && current > 1 =>
factorialAcc(current - 1, hashMap, ParseResult(parseResult.html +printTab(current -1 )+"</div>\\n" , parseResult.css))
case a: Int if a == 1 =>
val a = hashMap(current).pop()
a match {
case layer: PackageSymbol =>
layer.members.isEmpty match {
case true =>
factorialAcc(current - 1, hashMap, ParseResult(parseResult.html , parseResult.css))
case false =>
val s = mutable.Stack(layer.members: _*)
val nh = hashMap.+=((current + 1, s))
factorialAcc(current + 1, nh, ParseResult(parseResult.html+ printTab(current)+"<div>\\n" , parseResult.css))
}
case layer: ImageSymbol =>
val parent = layer.parentOpt.map(s => "." + s + " > ").getOrElse("")
factorialAcc(current - 1, hashMap, ParseResult(parseResult.html + printTab(current)+ "<img class=\\"" + layer.name.name + "\\"" + "src=\\""+ transformUrl(layer.imageUrl,framerConfig.projectId) + "\\" />\\n",
params2CssString(layer.params, parseResult.css + parent + " ." ++ layer.name.name + "{" + "\\n") + "}\\n"))
case layer: InputSymbol =>
val parent = layer.parentOpt.map(s => "." + s + " > ").getOrElse("")
factorialAcc(current - 1, hashMap, ParseResult(parseResult.html +printTab(current)+ "<input type=\\""+layer.inputType + "\\" placeHolder=\\""+layer.value + "\\" class=\\"" + layer.name.name + "\\" />\\n" +
printTab(current-1) +
"</div>\\n",
params2CssString(layer.params, parseResult.css + parent + " ." ++ layer.name.name + "{" + "\\n") + "}\\n"))
case layer: TextSymbol =>
val parent = layer.parentOpt.map(s => "." + s + " > ").getOrElse("")
factorialAcc(current - 1, hashMap, ParseResult(parseResult.html +printTab(current)+ "<a class=\\"" + layer.name.name + "\\">" + layer.value + "</a>\\n" +
printTab(current-1) +
"</div>\\n",
params2CssString(layer.params, parseResult.css + parent + " ." ++ layer.name.name + "{" + "\\n") + "}\\n"))
case layer: LayerSymbol =>
val parent = layer.parentOpt.map(s => "." + s + " > ").getOrElse("")
layer.members.isEmpty match {
case true =>
factorialAcc(current - 1, hashMap, ParseResult(parseResult.html +printTab(current)+ "<div class=\\"" + layer.name.name + "\\"></div>\\n"+ printTab(current-1)+ "</div>\\n",
params2CssString(layer.params, parseResult.css + parent + "." ++ layer.name.name + "{" + "\\n") + "}\\n"))
case false =>
val s = mutable.Stack(layer.members: _*)
val nh = hashMap.+=((current + 1, s))
factorialAcc(current + 1, nh, ParseResult(parseResult.html +printTab(current)+ "<div class=\\"" + layer.name.name + "\\">\\n",
params2CssString(layer.params, parseResult.css + parent + "." ++ layer.name.name + "{" + "\\n") + "}\\n"))
}
case layer: PageSymbol =>
layer.members.isEmpty match {
case true =>
factorialAcc(current - 1, hashMap, ParseResult(parseResult.html + printTab(current)+ "<div class=\\"" + layer.name.name + "\\"></div>\\n",
params2CssString(layer.params, "." ++ layer.name.name + "{" + "\\n") + "}\\n"))
case false =>
val s = mutable.Stack(layer.members: _*)
val nh = hashMap.+=((current + 1, s))
factorialAcc(current + 1, nh, ParseResult(parseResult.html +printTab(current)+ "<div class=\\"" + layer.name.name + "\\">\\n",
params2CssString(layer.params, "." ++ layer.name.name + "{" + "\\n") + "}\\n"))
}
case sy: Symbol =>
factorialAcc(current - 1, hashMap, ParseResult(parseResult.html +printTab(current)+ "<div class=\\"" + sy.name.name + "\\"></div>\\n", parseResult.css))
}
case a: Int if a > 1 =>
val layer = hashMap(current).pop()
layer match {
case layer: ImageSymbol =>
val parent = layer.parentOpt.map(s => "." + s + " > ").getOrElse("")
factorialAcc(current, hashMap, ParseResult(parseResult.html +printTab(current)+ "<img class=\\"" + layer.name.name + "\\"" + " src=\\""+ transformUrl(layer.imageUrl,framerConfig.projectId) + "\\" />\\n",
params2CssString(layer.params, parseResult.css + parent + " ." ++ layer.name.name + "{" + "\\n") + "}\\n"))
case layer: InputSymbol =>
val parent = layer.parentOpt.map(s => "." + s + " > ").getOrElse("")
factorialAcc(current, hashMap, ParseResult(parseResult.html +printTab(current)+ "<input type=\\""+ layer.inputType+"\\" placeHolder=\\""+layer.value + "\\" class=\\"" + layer.name.name + "\\" />\\n",
params2CssString(layer.params, parseResult.css + parent + " ." ++ layer.name.name + "{" + "\\n") + "}\\n"))
case layer: TextSymbol =>
val parent = layer.parentOpt.map(s => "." + s + " > ").getOrElse("")
factorialAcc(current, hashMap, ParseResult(parseResult.html +printTab(current)+ "<a class=\\"" + layer.name.name + "\\">" + layer.value + "</a>\\n",
params2CssString(layer.params, parseResult.css + parent + " ." ++ layer.name.name + "{" + "\\n") + "}\\n"))
case layer: LayerSymbol =>
val parent = layer.parentOpt.map(s => "." + s + " > ").getOrElse("")
layer.members.isEmpty match {
case true =>
factorialAcc(current, hashMap, ParseResult(parseResult.html +printTab(current)+ "<div class=\\"" + layer.name.name + "\\"></div>\\n",
params2CssString(layer.params, parseResult.css + parent + "." ++ layer.name.name + "{" + "\\n") + "}\\n"))
case false =>
val s = mutable.Stack(layer.members: _*)
val nh = hashMap.+=((current + 1, s))
factorialAcc(current + 1, nh, ParseResult(parseResult.html +printTab(current)+ "<div class=\\"" + layer.name.name + "\\">\\n",
params2CssString(layer.params, parseResult.css + parent + "." ++ layer.name.name + "{" + "\\n") + "}\\n"))
}
case layer: PageSymbol =>
layer.members.isEmpty match {
case true =>
factorialAcc(current, hashMap, ParseResult(parseResult.html + printTab(current) + "<div class=\\"" + layer.name.name + "\\"></div>\\n",
params2CssString(layer.params, "." ++ layer.name.name + "{" + "\\n") + "}\\n"))
case false =>
val s = mutable.Stack(layer.members: _*)
val nh = hashMap.+=((current + 1, s))
factorialAcc(current + 1, nh, ParseResult(parseResult.html + printTab(current) + "<div class=\\"" + layer.name.name + "\\">\\n",
params2CssString(layer.params, "." ++ layer.name.name + "{" + "\\n") + "}\\n"))
}
case symbol: Symbol =>
factorialAcc(current, hashMap, parseResult)
}
case _ =>
parseResult
}
}
}
val initialStack: mutable.Stack[Symbol] = mutable.Stack(initialSym)
val hm: mutable.HashMap[Int, mutable.Stack[Symbol]] = new mutable.HashMap[Int, mutable.Stack[Symbol]]()
hm.put(1, initialStack)
factorialAcc(1, hm, ParseResult("", "")) //免添加 <div>
}
}
| ranglang/dumbframer | src/main/scala/importer/sc/Printer.scala | Scala | mit | 39,831 |
package helper.pdfpreprocessing.stats
import java.awt.Color
import helper.pdfpreprocessing.entities._
/**
* Created by pdeboer on 20/10/15.
*/
class StatTermPermuter(occurrences: Iterable[StatTermOccurrence]) {
private var _missingAssumptions = List.empty[(StatTermOccurrence, StatisticalAssumption)]
def missingAssumptions = _missingAssumptions
lazy val permutations: List[PDFPermutation] = {
val methods = occurrences.filter(_.term.isInstanceOf[StatisticalMethod])
val assumptionsMap: Map[StatisticalAssumption, Iterable[StatTermOccurrence]] = occurrences.filter(_.term.isInstanceOf[StatisticalAssumption])
.groupBy(_.term).map(a => a._1.asInstanceOf[StatisticalAssumption] -> a._2)
methods.flatMap(methodOccurrence => {
val termOfMethod = methodOccurrence.term.asInstanceOf[StatisticalMethod]
termOfMethod.assumptions.flatMap(a => {
assumptionsMap.getOrElse(a, {
_missingAssumptions = (methodOccurrence, a) :: _missingAssumptions
Nil
}).map(assumptionOccurrence => {
PDFPermutation(assumptionOccurrence.paper, List(
PDFHighlightTerm.fromTermOccurrence(assumptionOccurrence),
PDFHighlightTerm.fromTermOccurrence(methodOccurrence)
))
})
})
}).toList
}
}
case class PDFPermutation(paper: Paper, highlights: List[PDFHighlightTerm]) {
def method = getTermByType(getMethods = true).asInstanceOf[StatisticalMethod]
def assumption = getTermByType(getMethods = false).asInstanceOf[StatisticalAssumption]
def getOccurrencesByType(getMethods: Boolean = true) = {
highlights.filter(_.occurrence.term.isStatisticalMethod == getMethods).map(_.occurrence)
}
def distanceBetweenMinMaxIndex = {
val indices = highlights.map(o => o.occurrence.inclPageOffset(o.occurrence.startIndex))
indices.max - indices.min
}
private def getTermByType(getMethods: Boolean = true) = {
getOccurrencesByType(getMethods).head.term
}
override def toString: String = s"Permutation($paper, $method, $assumption)"
}
case class PDFHighlightTerm(color: Color, occurrence: StatTermOccurrence)
object PDFHighlightTerm {
def fromTermOccurrence(o: StatTermOccurrence) = {
val color = o.term match {
case m: StatisticalMethod => Color.yellow
case a: StatisticalAssumption => Color.green
}
PDFHighlightTerm(color, o)
}
}
| manuelroesch/PaperValidator | app/helper/pdfpreprocessing/stats/StatTermPermuter.scala | Scala | mit | 2,294 |
//======================================================================================================================
// Facsimile: A Discrete-Event Simulation Library
// Copyright © 2004-2020, Michael J Allen.
//
// This file is part of Facsimile.
//
// Facsimile is free software: you can redistribute it and/or modify it under the terms of the GNU Lesser General Public
// License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later
// version.
//
// Facsimile is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied
// warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
// details.
//
// You should have received a copy of the GNU Lesser General Public License along with Facsimile. If not, see:
//
// http://www.gnu.org/licenses/lgpl.
//
// The developers welcome all comments, suggestions and offers of assistance. For further information, please visit the
// project home page at:
//
// http://facsim.org/
//
// Thank you for your interest in the Facsimile project!
//
// IMPORTANT NOTE: All patches (modifications to existing files and/or the addition of new files) submitted for
// inclusion as part of the official Facsimile code base, must comply with the published Facsimile Coding Standards. If
// your code fails to comply with the standard, then your patches will be rejected. For further information, please
// visit the coding standards at:
//
// http://facsim.org/Documentation/CodingStandards/
//======================================================================================================================
// Scala source file belonging to the org.facsim.sfx.beans.value package.
//======================================================================================================================
package org.facsim.sfx.beans
/** Utilities relating to the ''JavaFX'' [[javafx.beans.value]] package.
*
* @since 0.0
*/
package object value
| MichaelJAllen/facsimile | facsimile-sfx/src/main/scala/org/facsim/sfx/beans/value/package.scala | Scala | lgpl-3.0 | 2,053 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark
import collection.mutable
import serializer.Serializer
import akka.actor.{Actor, ActorRef, Props, ActorSystemImpl, ActorSystem}
import akka.remote.RemoteActorRefProvider
import org.apache.spark.broadcast.BroadcastManager
import org.apache.spark.metrics.MetricsSystem
import org.apache.spark.storage.{BlockManagerMasterActor, BlockManager, BlockManagerMaster}
import org.apache.spark.network.ConnectionManager
import org.apache.spark.serializer.{Serializer, SerializerManager}
import org.apache.spark.util.{Utils, AkkaUtils}
import org.apache.spark.api.python.PythonWorkerFactory
import com.google.common.collect.MapMaker
/**
* Holds all the runtime environment objects for a running Spark instance (either master or worker),
* including the serializer, Akka actor system, block manager, map output tracker, etc. Currently
* Spark code finds the SparkEnv through a thread-local variable, so each thread that accesses these
* objects needs to have the right SparkEnv set. You can get the current environment with
* SparkEnv.get (e.g. after creating a SparkContext) and set it with SparkEnv.set.
*/
class SparkEnv (
val executorId: String,
val actorSystem: ActorSystem,
val serializerManager: SerializerManager,
val serializer: Serializer,
val closureSerializer: Serializer,
val cacheManager: CacheManager,
val mapOutputTracker: MapOutputTracker,
val shuffleFetcher: ShuffleFetcher,
val broadcastManager: BroadcastManager,
val blockManager: BlockManager,
val connectionManager: ConnectionManager,
val httpFileServer: HttpFileServer,
val sparkFilesDir: String,
val metricsSystem: MetricsSystem) {
private val pythonWorkers = mutable.HashMap[(String, Map[String, String]), PythonWorkerFactory]()
// A general, soft-reference map for metadata needed during HadoopRDD split computation
// (e.g., HadoopFileRDD uses this to cache JobConfs and InputFormats).
private[spark] val hadoopJobMetadata = new MapMaker().softValues().makeMap[String, Any]()
def stop() {
pythonWorkers.foreach { case(key, worker) => worker.stop() }
httpFileServer.stop()
mapOutputTracker.stop()
shuffleFetcher.stop()
broadcastManager.stop()
blockManager.stop()
blockManager.master.stop()
metricsSystem.stop()
actorSystem.shutdown()
// Unfortunately Akka's awaitTermination doesn't actually wait for the Netty server to shut
// down, but let's call it anyway in case it gets fixed in a later release
actorSystem.awaitTermination()
}
def createPythonWorker(pythonExec: String, envVars: Map[String, String]): java.net.Socket = {
synchronized {
val key = (pythonExec, envVars)
pythonWorkers.getOrElseUpdate(key, new PythonWorkerFactory(pythonExec, envVars)).create()
}
}
}
object SparkEnv extends Logging {
private val env = new ThreadLocal[SparkEnv]
@volatile private var lastSetSparkEnv : SparkEnv = _
def set(e: SparkEnv) {
lastSetSparkEnv = e
env.set(e)
}
/**
* Returns the ThreadLocal SparkEnv, if non-null. Else returns the SparkEnv
* previously set in any thread.
*/
def get: SparkEnv = {
Option(env.get()).getOrElse(lastSetSparkEnv)
}
/**
* Returns the ThreadLocal SparkEnv.
*/
def getThreadLocal : SparkEnv = {
env.get()
}
def createFromSystemProperties(
executorId: String,
hostname: String,
port: Int,
isDriver: Boolean,
isLocal: Boolean): SparkEnv = {
val (actorSystem, boundPort) = AkkaUtils.createActorSystem("spark", hostname, port)
// Bit of a hack: If this is the driver and our port was 0 (meaning bind to any free port),
// figure out which port number Akka actually bound to and set spark.driver.port to it.
if (isDriver && port == 0) {
System.setProperty("spark.driver.port", boundPort.toString)
}
// set only if unset until now.
if (System.getProperty("spark.hostPort", null) == null) {
if (!isDriver){
// unexpected
Utils.logErrorWithStack("Unexpected NOT to have spark.hostPort set")
}
Utils.checkHost(hostname)
System.setProperty("spark.hostPort", hostname + ":" + boundPort)
}
val classLoader = Thread.currentThread.getContextClassLoader
// Create an instance of the class named by the given Java system property, or by
// defaultClassName if the property is not set, and return it as a T
def instantiateClass[T](propertyName: String, defaultClassName: String): T = {
val name = System.getProperty(propertyName, defaultClassName)
Class.forName(name, true, classLoader).newInstance().asInstanceOf[T]
}
val serializerManager = new SerializerManager
val serializer = serializerManager.setDefault(
System.getProperty("spark.serializer", "org.apache.spark.serializer.JavaSerializer"))
val closureSerializer = serializerManager.get(
System.getProperty("spark.closure.serializer", "org.apache.spark.serializer.JavaSerializer"))
def registerOrLookup(name: String, newActor: => Actor): ActorRef = {
if (isDriver) {
logInfo("Registering " + name)
actorSystem.actorOf(Props(newActor), name = name)
} else {
val driverHost: String = System.getProperty("spark.driver.host", "localhost")
val driverPort: Int = System.getProperty("spark.driver.port", "7077").toInt
Utils.checkHost(driverHost, "Expected hostname")
val url = "akka://spark@%s:%s/user/%s".format(driverHost, driverPort, name)
logInfo("Connecting to " + name + ": " + url)
actorSystem.actorFor(url)
}
}
val blockManagerMaster = new BlockManagerMaster(registerOrLookup(
"BlockManagerMaster",
new BlockManagerMasterActor(isLocal)))
val blockManager = new BlockManager(executorId, actorSystem, blockManagerMaster, serializer)
val connectionManager = blockManager.connectionManager
val broadcastManager = new BroadcastManager(isDriver)
val cacheManager = new CacheManager(blockManager)
// Have to assign trackerActor after initialization as MapOutputTrackerActor
// requires the MapOutputTracker itself
val mapOutputTracker = new MapOutputTracker()
mapOutputTracker.trackerActor = registerOrLookup(
"MapOutputTracker",
new MapOutputTrackerActor(mapOutputTracker))
val shuffleFetcher = instantiateClass[ShuffleFetcher](
"spark.shuffle.fetcher", "org.apache.spark.BlockStoreShuffleFetcher")
val httpFileServer = new HttpFileServer()
httpFileServer.initialize()
System.setProperty("spark.fileserver.uri", httpFileServer.serverUri)
val metricsSystem = if (isDriver) {
MetricsSystem.createMetricsSystem("driver")
} else {
MetricsSystem.createMetricsSystem("executor")
}
metricsSystem.start()
// Set the sparkFiles directory, used when downloading dependencies. In local mode,
// this is a temporary directory; in distributed mode, this is the executor's current working
// directory.
val sparkFilesDir: String = if (isDriver) {
Utils.createTempDir().getAbsolutePath
} else {
"."
}
// Warn about deprecated spark.cache.class property
if (System.getProperty("spark.cache.class") != null) {
logWarning("The spark.cache.class property is no longer being used! Specify storage " +
"levels using the RDD.persist() method instead.")
}
new SparkEnv(
executorId,
actorSystem,
serializerManager,
serializer,
closureSerializer,
cacheManager,
mapOutputTracker,
shuffleFetcher,
broadcastManager,
blockManager,
connectionManager,
httpFileServer,
sparkFilesDir,
metricsSystem)
}
}
| windeye/spark | core/src/main/scala/org/apache/spark/SparkEnv.scala | Scala | apache-2.0 | 8,567 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package vta.dpi
import chisel3._
import chisel3.util._
import vta.util.config._
import vta.interface.axi._
import vta.shell._
/** Sim DPI module.
*
* Wrapper for Sim Verilog DPI module.
*/
class VTASimDPI extends BlackBox with HasBlackBoxResource {
val io = IO(new Bundle {
val clock = Input(Clock())
val reset = Input(Bool())
val dpi_wait = Output(Bool())
})
setResource("/verilog/VTASimDPI.v")
}
| Huyuwei/tvm | vta/hardware/chisel/src/main/scala/dpi/VTASimDPI.scala | Scala | apache-2.0 | 1,233 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.dynamicpruning
import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.catalyst.planning.ExtractEquiJoinKeys
import org.apache.spark.sql.catalyst.plans._
import org.apache.spark.sql.catalyst.plans.logical._
import org.apache.spark.sql.catalyst.rules.Rule
import org.apache.spark.sql.execution.datasources.{HadoopFsRelation, LogicalRelation}
import org.apache.spark.sql.internal.SQLConf
/**
* Dynamic partition pruning optimization is performed based on the type and
* selectivity of the join operation. During query optimization, we insert a
* predicate on the partitioned table using the filter from the other side of
* the join and a custom wrapper called DynamicPruning.
*
* The basic mechanism for DPP inserts a duplicated subquery with the filter from the other side,
* when the following conditions are met:
* (1) the table to prune is partitioned by the JOIN key
* (2) the join operation is one of the following types: INNER, LEFT SEMI (partitioned on left),
* LEFT OUTER (partitioned on right), or RIGHT OUTER (partitioned on left)
*
* In order to enable partition pruning directly in broadcasts, we use a custom DynamicPruning
* clause that incorporates the In clause with the subquery and the benefit estimation.
* During query planning, when the join type is known, we use the following mechanism:
* (1) if the join is a broadcast hash join, we replace the duplicated subquery with the reused
* results of the broadcast,
* (2) else if the estimated benefit of partition pruning outweighs the overhead of running the
* subquery query twice, we keep the duplicated subquery
* (3) otherwise, we drop the subquery.
*/
object PartitionPruning extends Rule[LogicalPlan] with PredicateHelper {
/**
* Search the partitioned table scan for a given partition column in a logical plan
*/
def getPartitionTableScan(a: Expression, plan: LogicalPlan): Option[LogicalRelation] = {
val srcInfo: Option[(Expression, LogicalPlan)] = findExpressionAndTrackLineageDown(a, plan)
srcInfo.flatMap {
case (resExp, l: LogicalRelation) =>
l.relation match {
case fs: HadoopFsRelation =>
val partitionColumns = AttributeSet(
l.resolve(fs.partitionSchema, fs.sparkSession.sessionState.analyzer.resolver))
if (resExp.references.subsetOf(partitionColumns)) {
return Some(l)
} else {
None
}
case _ => None
}
case _ => None
}
}
/**
* Insert a dynamic partition pruning predicate on one side of the join using the filter on the
* other side of the join.
* - to be able to identify this filter during query planning, we use a custom
* DynamicPruning expression that wraps a regular In expression
* - we also insert a flag that indicates if the subquery duplication is worthwhile and it
* should run regardless of the join strategy, or is too expensive and it should be run only if
* we can reuse the results of a broadcast
*/
private def insertPredicate(
pruningKey: Expression,
pruningPlan: LogicalPlan,
filteringKey: Expression,
filteringPlan: LogicalPlan,
joinKeys: Seq[Expression],
hasBenefit: Boolean): LogicalPlan = {
val reuseEnabled = SQLConf.get.exchangeReuseEnabled
val index = joinKeys.indexOf(filteringKey)
if (hasBenefit || reuseEnabled) {
// insert a DynamicPruning wrapper to identify the subquery during query planning
Filter(
DynamicPruningSubquery(
pruningKey,
filteringPlan,
joinKeys,
index,
!hasBenefit || SQLConf.get.dynamicPartitionPruningReuseBroadcastOnly),
pruningPlan)
} else {
// abort dynamic partition pruning
pruningPlan
}
}
/**
* Given an estimated filtering ratio we assume the partition pruning has benefit if
* the size in bytes of the partitioned plan after filtering is greater than the size
* in bytes of the plan on the other side of the join. We estimate the filtering ratio
* using column statistics if they are available, otherwise we use the config value of
* `spark.sql.optimizer.joinFilterRatio`.
*/
private def pruningHasBenefit(
partExpr: Expression,
partPlan: LogicalPlan,
otherExpr: Expression,
otherPlan: LogicalPlan): Boolean = {
// get the distinct counts of an attribute for a given table
def distinctCounts(attr: Attribute, plan: LogicalPlan): Option[BigInt] = {
plan.stats.attributeStats.get(attr).flatMap(_.distinctCount)
}
// the default filtering ratio when CBO stats are missing, but there is a
// predicate that is likely to be selective
val fallbackRatio = SQLConf.get.dynamicPartitionPruningFallbackFilterRatio
// the filtering ratio based on the type of the join condition and on the column statistics
val filterRatio = (partExpr.references.toList, otherExpr.references.toList) match {
// filter out expressions with more than one attribute on any side of the operator
case (leftAttr :: Nil, rightAttr :: Nil)
if SQLConf.get.dynamicPartitionPruningUseStats =>
// get the CBO stats for each attribute in the join condition
val partDistinctCount = distinctCounts(leftAttr, partPlan)
val otherDistinctCount = distinctCounts(rightAttr, otherPlan)
val availableStats = partDistinctCount.isDefined && partDistinctCount.get > 0 &&
otherDistinctCount.isDefined
if (!availableStats) {
fallbackRatio
} else if (partDistinctCount.get.toDouble <= otherDistinctCount.get.toDouble) {
// there is likely an estimation error, so we fallback
fallbackRatio
} else {
1 - otherDistinctCount.get.toDouble / partDistinctCount.get.toDouble
}
case _ => fallbackRatio
}
// the pruning overhead is the total size in bytes of all scan relations
val overhead = otherPlan.collectLeaves().map(_.stats.sizeInBytes).sum.toFloat
filterRatio * partPlan.stats.sizeInBytes.toFloat > overhead.toFloat
}
/**
* Returns whether an expression is likely to be selective
*/
private def isLikelySelective(e: Expression): Boolean = e match {
case Not(expr) => isLikelySelective(expr)
case And(l, r) => isLikelySelective(l) || isLikelySelective(r)
case Or(l, r) => isLikelySelective(l) && isLikelySelective(r)
case Like(_, _, _) => true
case _: BinaryComparison => true
case _: In | _: InSet => true
case _: StringPredicate => true
case _ => false
}
/**
* Search a filtering predicate in a given logical plan
*/
private def hasSelectivePredicate(plan: LogicalPlan): Boolean = {
plan.find {
case f: Filter => isLikelySelective(f.condition)
case _ => false
}.isDefined
}
/**
* To be able to prune partitions on a join key, the filtering side needs to
* meet the following requirements:
* (1) it can not be a stream
* (2) it needs to contain a selective predicate used for filtering
*/
private def hasPartitionPruningFilter(plan: LogicalPlan): Boolean = {
!plan.isStreaming && hasSelectivePredicate(plan)
}
private def canPruneLeft(joinType: JoinType): Boolean = joinType match {
case Inner | LeftSemi | RightOuter => true
case _ => false
}
private def canPruneRight(joinType: JoinType): Boolean = joinType match {
case Inner | LeftOuter => true
case _ => false
}
private def prune(plan: LogicalPlan): LogicalPlan = {
plan transformUp {
// skip this rule if there's already a DPP subquery on the LHS of a join
case j @ Join(Filter(_: DynamicPruningSubquery, _), _, _, _, _) => j
case j @ Join(_, Filter(_: DynamicPruningSubquery, _), _, _, _) => j
case j @ Join(left, right, joinType, Some(condition), hint) =>
var newLeft = left
var newRight = right
// extract the left and right keys of the join condition
val (leftKeys, rightKeys) = j match {
case ExtractEquiJoinKeys(_, lkeys, rkeys, _, _, _, _) => (lkeys, rkeys)
case _ => (Nil, Nil)
}
// checks if two expressions are on opposite sides of the join
def fromDifferentSides(x: Expression, y: Expression): Boolean = {
def fromLeftRight(x: Expression, y: Expression) =
!x.references.isEmpty && x.references.subsetOf(left.outputSet) &&
!y.references.isEmpty && y.references.subsetOf(right.outputSet)
fromLeftRight(x, y) || fromLeftRight(y, x)
}
splitConjunctivePredicates(condition).foreach {
case EqualTo(a: Expression, b: Expression)
if fromDifferentSides(a, b) =>
val (l, r) = if (a.references.subsetOf(left.outputSet) &&
b.references.subsetOf(right.outputSet)) {
a -> b
} else {
b -> a
}
// there should be a partitioned table and a filter on the dimension table,
// otherwise the pruning will not trigger
var partScan = getPartitionTableScan(l, left)
if (partScan.isDefined && canPruneLeft(joinType) &&
hasPartitionPruningFilter(right)) {
val hasBenefit = pruningHasBenefit(l, partScan.get, r, right)
newLeft = insertPredicate(l, newLeft, r, right, rightKeys, hasBenefit)
} else {
partScan = getPartitionTableScan(r, right)
if (partScan.isDefined && canPruneRight(joinType) &&
hasPartitionPruningFilter(left) ) {
val hasBenefit = pruningHasBenefit(r, partScan.get, l, left)
newRight = insertPredicate(r, newRight, l, left, leftKeys, hasBenefit)
}
}
case _ =>
}
Join(newLeft, newRight, joinType, Some(condition), hint)
}
}
override def apply(plan: LogicalPlan): LogicalPlan = plan match {
// Do not rewrite subqueries.
case s: Subquery if s.correlated => plan
case _ if !SQLConf.get.dynamicPartitionPruningEnabled => plan
case _ => prune(plan)
}
}
| darionyaphet/spark | sql/core/src/main/scala/org/apache/spark/sql/dynamicpruning/PartitionPruning.scala | Scala | apache-2.0 | 11,082 |
package actors.serializers
import drt.shared.Alert
import org.specs2.mutable.Specification
import services.SDate
class AlertMessageConversionSpec extends Specification{
"Given an alert I should serialise it and deserialise it and get back the same result" >> {
val created = SDate("2020-01-11T13:00").millisSinceEpoch
val expiry = SDate("2020-01-11T14:00").millisSinceEpoch
val alert = Alert("title", "message", "warning", expiry, created)
val serialised = AlertMessageConversion.alertToMessage(alert)
val deserialised = AlertMessageConversion.alertFromMessage(serialised).get
deserialised === alert
}
}
| UKHomeOffice/drt-scalajs-spa-exploration | server/src/test/scala/actors/serializers/AlertMessageConversionSpec.scala | Scala | apache-2.0 | 638 |
trait TC[F[_, _[_]]]
object TC {
def derived[F[_, _[_]]]: TC[F] = ???
}
case class Foo[A](a: A) derives TC // error
| dotty-staging/dotty | tests/neg/i13487.scala | Scala | apache-2.0 | 119 |
/*******************************************************************************
* Copyright (c) 2013, 2015 EclipseSource.
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
******************************************************************************/
package json
import java.lang.String
final class JsonString(val string: String) extends JsonValue {
override def isString() = true
}
| sjrd/scalajs-benchmarks | json/src/main/scala/json/JsonString.scala | Scala | bsd-3-clause | 1,417 |
package master
import Import._
import RestConnection.TeamRequest
import akka.actor.ActorSystem
import akka.testkit.{ImplicitSender, TestKit}
import org.scalatest.{MustMatchers, WordSpecLike}
import scala.concurrent.duration._
/**
* Created by yannick on 16.02.16.
*/
class UeberActorTest extends TestKit(ActorSystem("testSys"))
with WordSpecLike
with MustMatchers
with ImplicitSender
with StopSystemAfterAll {
"An UeberActorMust" must {
// "read in an csv file and send back the best team" in {
// val ueberActor = system.actorOf(UeberActor.props, UeberActor.name)
// ueberActor ! TeamRequest(TestData.validPlayers, 2,Vector.empty[Int])
// expectMsgPF(13.seconds) {
// case a1 => println("ueberTest " + a1)
// }
// }
"tell me where the values get so big" in {
val ueberActor = system.actorOf(UeberActor.props, UeberActor.name)
ueberActor ! TeamRequest(TestData.validPlayersShort, 2,Vector.empty[Int])
expectMsgPF(13.seconds) {
case a1 => println("ueberTest " + a1)
}
}
}
} | yannick-cw/tournament_planer | hatplaner/src/test/scala/master/UeberActorTest.scala | Scala | mit | 1,066 |
package propertynder.extractors.seloger
import org.scalatest.{FlatSpec, Matchers}
import propertynder.model.Property
import scala.io.Source
import scala.xml.{XML => ScalaXML}
import XML._
import akka.http.scaladsl.model.Uri
class XMLSpec extends FlatSpec with Matchers {
val xml = ScalaXML.load(Source.fromFile("src/test/resources/seloger.xml").reader())
val lastPageXml = ScalaXML.load(Source.fromFile("src/test/resources/selogerLastPage.xml").reader())
"parseProperties" should "parse a XML payload from seloger.com" in {
parseProperties(xml).head should be(
Property(
"Appartement 3 pièces",
"http://www.seloger.com/annonces/achat/appartement/paris-9eme-75/117856511.htm?p=CCBPqSgIBo-wKSdA",
"",
Some(3),
Some(2),
Some(860000),
Some(76.0),
Some(75009),
None,
None
)
)
}
"parseNextRequest" should "extract the next request uri from the payload" in {
parseNextRequest(xml) should be(
Some(Uri("http://ws.seloger.com/search.xml?cp=75009&idtt=2&nb_chambres=2&SEARCHpg=2"))
)
}
it should "return None if it is the last page" in {
parseNextRequest(lastPageXml) should be(
None
)
}
}
| ostapneko/propertynder | src/test/scala/propertynder/extractors/seloger/XMLSpec.scala | Scala | mit | 1,234 |
package org.raisercostin.jedi.impl
object Predef2 {
def requireArgNotNull(arg: =>AnyRef, name: =>String=""):Unit = require(arg!=null, "Parameter "+name+" should not be null!")
def requireNotNull(arg: =>AnyRef, message: =>String=""):Unit = require(arg!=null, message)
} | raisercostin/jedi-io | src/main/scala/org/raisercostin/jedi/impl/Predef2.scala | Scala | apache-2.0 | 275 |
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** @author John Miller
* @version 1.2
* @date Mon Dec 28 15:57:04 EST 2015
* @see LICENSE (MIT style license file).
*/
package scalation.stat
import scala.math.{log, sqrt}
import scalation.linalgebra.VectorD
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** The `Normality` object provides methods for testing Normality based on skewness
* and kurtosis. Such test are more suitable for large sample sizes where more
* powerful goodness-of-fit tests tend to frequently reject Normality.
* @see stats.stackexchange.com/questions/29731/regression-when-the-ols-residuals-are-not-normally-distributed
* @see stats.stackexchange.com/questions/2492/is-normality-testing-essentially-useless
* @see en.wikipedia.org/wiki/D%27Agostino%27s_K-squared_test
*/
object Normality
{
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Test vector data 'd' to see if its skewness is sufficiently close to Normal.
* @param d the data vector to be tested for Normality
*/
def normalSkew (d: VectorD): Double =
{
val n = d.nd
val g1 = d.skew ()
val mu2 = 6.0 * (n-2.0) / ((n+1.0) * (n+3.0))
val gm2 = 36.0 * (n-7.0) * (n*n + 2*n - 5.0) / ((n-2.0) * (n+5.0) * (n+7.0) * (n+9.0))
val w2 = sqrt (2.0 * gm2 + 4.0) - 1.0
val dl = 1.0 / sqrt (log (sqrt (w2)))
val a = 1.0 / (w2 - 1.0)
dl * log (g1 / (a * sqrt (mu2)) + sqrt (g1*g1 / (a*a*mu2) + 1.0))
} // normalSkew
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Test vector data 'd' to see if its kurtosis is sufficiently close to Normal.
* @param d the data vector to be tested for Normality
*/
def normalKurtosis (d: VectorD): Double =
{
0.0 // FIX - to be implemented
} // normalKurtosis
} // Normality
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** The `NormalityTest` object is used to test the `Normality` object.
* @see www.seattlecentral.edu/qelp/sets/057/057.html
* > run-main scalation.stat.NormalityTest
*/
object NormalityTest extends App
{
import scalation.random.{Normal, Uniform}
val d = VectorD (36.0, 37.0, 38.0, 38.0, 39.0, 39.0, 40.0, 40.0, 40.0, 40.0,
41.0, 41.0, 41.0, 41.0, 41.0, 41.0, 42.0, 42.0, 42.0, 42.0,
42.0, 42.0, 42.0, 43.0, 43.0, 43.0, 43.0, 43.0, 43.0, 43.0,
43.0, 44.0, 44.0, 44.0, 44.0, 44.0, 44.0, 44.0, 44.0, 44.0,
45.0, 45.0, 45.0, 45.0, 45.0, 45.0, 45.0, 45.0, 45.0, 45.0,
46.0, 46.0, 46.0, 46.0, 46.0, 46.0, 46.0, 46.0, 46.0, 46.0,
47.0, 47.0, 47.0, 47.0, 47.0, 47.0, 47.0, 47.0, 47.0, 48.0,
48.0, 48.0, 48.0, 48.0, 48.0, 48.0, 48.0, 49.0, 49.0, 49.0,
49.0, 49.0, 49.0, 49.0, 50.0, 50.0, 50.0, 50.0, 50.0, 50.0,
51.0, 51.0, 51.0, 51.0, 52.0, 52.0, 53.0, 53.0, 54.0, 55.0)
val dmin = d.min () // the minimum
val dmax = d.max () // the minimum
val dmu = d.mean // the mean
val dsig2 = d.variance // the variance
val dsig = sqrt (dsig2) // the standard deviation
println ("-------------------------------------------------------------")
println (" Basic Statistics")
println ("-------------------------------------------------------------")
println ("n = " + d.dim)
println ("dmin = " + dmin)
println ("dmax = " + dmax)
println ("dmu = " + dmu)
println ("dsig2 = " + dsig2)
println ("dsig = " + dsig)
println ("-------------------------------------------------------------")
println ("normality = " + Normality.normalSkew (d))
println ("-------------------------------------------------------------")
} // NormalityTest object
| NBKlepp/fda | scalation_1.2/src/main/scala/scalation/stat/Normality.scala | Scala | mit | 4,039 |
package lectures
package dataparallelism
import scala.collection._
import org.scalameter._
object IntersectionCorrect {
def main(args: Array[String]) {
def intersection(a: GenSet[Int], b: GenSet[Int]): GenSet[Int] = {
if (a.size < b.size) a.filter(b(_))
else b.filter(a(_))
}
val seqres = intersection((0 until 1000).toSet, (0 until 1000 by 4).toSet)
val parres = intersection((0 until 1000).par.toSet, (0 until 1000 by 4).par.toSet)
log(s"Sequential result - ${seqres.size}")
log(s"Parallel result - ${parres.size}")
}
}
| twistedgut/scala_coursera | parprog-snippets/src/main/scala/lectures/dataparallelism/IntersectionCorrect.scala | Scala | gpl-3.0 | 570 |
package org.json4s
final class SomeValue[A](val get: A) extends AnyVal {
/**
* @see [[https://github.com/scala/scala/pull/9343]]
*/
def isEmpty: false = false
}
| json4s/json4s | ast/shared/src/main/scala-2.13+/org/json4s/SomeValue.scala | Scala | apache-2.0 | 173 |
/*
* Copyright 2007-2008 WorldWide Conferencing, LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions
* and limitations under the License.
*/
package net.liftweb.builtin.snippet
import _root_.net.liftweb.http.{S, DispatchSnippet, LiftRules}
import _root_.net.liftweb.sitemap._
import _root_.net.liftweb.util._
import Helpers._
import _root_.scala.xml._
class Menu extends DispatchSnippet {
def dispatch: DispatchIt = {
case "builder" => ignore => builder
case "title" => title
case "item" => item
case "group" => group
}
def builder: NodeSeq = {
var r: Box[NodeSeq] =
S.request.map(_.buildMenu.lines.toList match {
case Nil => List(Text("No Navigation Defined."))
case xs =>
val liMap = S.prefixedAttrsToMap("li")
val li = S.mapToAttrs(liMap)
def buildANavItem(i: MenuItem) = {
i match {
case MenuItem(text, uri, kids, true, _, _) =>
(<li><span>{text}</span>{buildUlLine(kids)}</li>) % S.prefixedAttrsToMetaData("li_item", liMap)
case MenuItem(text, uri, kids, _, true, _) =>
(<li><a href={uri}>{text}</a>{buildUlLine(kids)}</li>) % S.prefixedAttrsToMetaData("li_path", liMap)
case MenuItem(text, uri, kids, _, _, _) =>
(<li><a href={uri}>{text}</a>{buildUlLine(kids)}</li> % li)
}
}
def buildUlLine(in: Seq[MenuItem]): Node = if (in.isEmpty) Text("")
else <ul>{in.flatMap(buildANavItem)}</ul> %
S.prefixedAttrsToMetaData("ul")
buildUlLine(xs)
})
r.openOr(List(Text("No Navigation Defined.")))
}
def title(text: NodeSeq): NodeSeq = {
val r =
for (request <- S.request;
loc <- request.location) yield loc.title
r openOr Text("")
}
def group(template: NodeSeq): NodeSeq = {
val toBind = if ((template \ "bind").filter(_.prefix == "menu").isEmpty)
<xml:group><menu:bind/> </xml:group>
else template
val attrs = S.prefixedAttrsToMetaData("a")
for (group <- S.attr("group").toList;
siteMap <- LiftRules.siteMap.toList;
loc <- siteMap.locForGroup(group);
link <- loc.createDefaultLink;
linkText <- loc.linkText) yield {
val a = <a href={link}>{linkText}</a> % attrs
Group(bind("menu", toBind, "bind" -> a))
}
}
def item(text: NodeSeq): NodeSeq =
for (name <- S.attr("name").toList;
request <- S.request.toList;
loc <- request.location.toList if loc.name != name;
item <- SiteMap.buildLink(name, text))
yield item match {
case e: Elem => e % S.prefixedAttrsToMetaData("a")
case x => x
}
}
| andreum/liftweb | lift/src/main/scala/net/liftweb/builtin/snippet/Menu.scala | Scala | apache-2.0 | 3,161 |
// Copyright: 2010 - 2016 https://github.com/ensime/ensime-server/graphs
// License: http://www.gnu.org/licenses/gpl-3.0.en.html
package org.ensime.fixture
import org.ensime.api._
import org.ensime.vfs._
import org.ensime.indexer._
trait SourceResolverFixture {
def withSourceResolver(testCode: SourceResolver => Any): Any
def withSourceResolver(testCode: (EnsimeConfig, SourceResolver) => Any): Any
}
trait IsolatedSourceResolverFixture extends SourceResolverFixture
with IsolatedEnsimeConfigFixture {
override def withSourceResolver(testCode: SourceResolver => Any): Any = withEnsimeConfig { config =>
implicit val vfs = EnsimeVFS()
try {
testCode(new SourceResolver(config))
} finally {
vfs.close()
}
}
override def withSourceResolver(testCode: (EnsimeConfig, SourceResolver) => Any): Any = withEnsimeConfig { config =>
implicit val vfs = EnsimeVFS()
try {
testCode(config, new SourceResolver(config))
} finally {
vfs.close()
}
}
}
trait SharedSourceResolverFixture extends SourceResolverFixture
with SharedEnsimeConfigFixture {
this: SharedEnsimeVFSFixture =>
private[fixture] var _resolver: SourceResolver = _
override def beforeAll(): Unit = {
super.beforeAll()
_resolver = new SourceResolver(_config)
}
override def withSourceResolver(testCode: SourceResolver => Any): Any = testCode(_resolver)
override def withSourceResolver(testCode: (EnsimeConfig, SourceResolver) => Any): Any = {
testCode(_config, _resolver)
}
}
| sugakandrey/ensime-server | core/src/it/scala/org/ensime/fixture/SourceResolverFixture.scala | Scala | gpl-3.0 | 1,534 |
package com.twitter.finagle.netty3.channel
import com.twitter.finagle.stats.StatsReceiver
import com.twitter.util.{Duration, Monitor, Stopwatch, Time}
import java.io.IOException
import java.util.concurrent.atomic.AtomicLong
import java.util.logging.{Level, Logger}
import org.jboss.netty.buffer.ChannelBuffer
import org.jboss.netty.channel.{
ChannelHandlerContext,
ChannelStateEvent,
ExceptionEvent,
MessageEvent,
WriteCompletionEvent,
SimpleChannelHandler
}
/**
* A [[org.jboss.netty.channel.ChannelHandler]] that tracks channel/connection
* statistics. The handler is meant to be shared by all
* [[org.jboss.netty.channel.Channel Channels]] within a Finagle client or
* server in order to consolidate statistics across a number of channels.
*/
class ChannelStatsHandler(statsReceiver: StatsReceiver) extends SimpleChannelHandler {
private[this] val log = Logger.getLogger(getClass.getName)
private[this] val connectionCount: AtomicLong = new AtomicLong()
private[this] var elapsed: () => Duration = null
private[this] val connects = statsReceiver.counter("connects")
private[this] val connectionDuration = statsReceiver.stat("connection_duration")
private[this] val connectionReceivedBytes = statsReceiver.stat("connection_received_bytes")
private[this] val connectionSentBytes = statsReceiver.stat("connection_sent_bytes")
private[this] val receivedBytes = statsReceiver.counter("received_bytes")
private[this] val sentBytes = statsReceiver.counter("sent_bytes")
private[this] val writable = statsReceiver.counter("socket_writable_ms")
private[this] val unwritable = statsReceiver.counter("socket_unwritable_ms")
private[this] val exceptions = statsReceiver.scope("exn")
private[this] val closesCount = statsReceiver.counter("closes")
private[this] val connections = statsReceiver.addGauge("connections") {
connectionCount.get()
}
override def channelOpen(ctx: ChannelHandlerContext, e: ChannelStateEvent): Unit = {
elapsed = Stopwatch.start()
ctx.setAttachment((new AtomicLong(0), new AtomicLong(0)))
connects.incr()
connectionCount.incrementAndGet()
super.channelOpen(ctx, e)
}
override def writeComplete(ctx: ChannelHandlerContext, e: WriteCompletionEvent) {
val (_, channelWriteCount) = ctx.getAttachment().asInstanceOf[(AtomicLong, AtomicLong)]
channelWriteCount.getAndAdd(e.getWrittenAmount)
sentBytes.incr(e.getWrittenAmount.toInt)
super.writeComplete(ctx, e)
}
override def messageReceived(ctx: ChannelHandlerContext, e: MessageEvent) {
e.getMessage match {
case buffer: ChannelBuffer =>
val (channelReadCount, _) = ctx.getAttachment().asInstanceOf[(AtomicLong, AtomicLong)]
val readableBytes = buffer.readableBytes
channelReadCount.getAndAdd(readableBytes)
receivedBytes.incr(readableBytes)
case _ =>
log.warning("ChannelStatsHandler received non-channelbuffer read")
}
super.messageReceived(ctx, e)
}
override def closeRequested(ctx: ChannelHandlerContext, e: ChannelStateEvent) {
closesCount.incr()
super.closeRequested(ctx, e)
}
override def channelClosed(ctx: ChannelHandlerContext, e: ChannelStateEvent) {
// guarded in case Netty calls channelClosed without calling channelOpen.
if (elapsed != null) {
val (channelReadCount, channelWriteCount) =
ctx.getAttachment().asInstanceOf[(AtomicLong, AtomicLong)]
connectionReceivedBytes.add(channelReadCount.get)
connectionSentBytes.add(channelWriteCount.get)
connectionDuration.add(elapsed().inMilliseconds)
connectionCount.decrementAndGet()
elapsed = null
}
super.channelClosed(ctx, e)
}
override def exceptionCaught(ctx: ChannelHandlerContext, evt: ExceptionEvent) {
val m = if (evt.getCause != null) evt.getCause.getClass.getName else "unknown"
exceptions.counter(m).incr()
// If no Monitor is active, then log the exception so we don't fail silently.
if (!Monitor.isActive) {
val level = evt.getCause match {
case t: IOException => Level.FINE
case _ => Level.WARNING
}
log.log(level, "ChannelStatsHandler caught an exception", evt.getCause)
}
super.exceptionCaught(ctx, evt)
}
private[this] var hasBeenWritable = true //netty channels start in writable state
private[this] var since = Time.now
private[this] def socketDuration(now: Time): Duration = now - since
override def channelInterestChanged(ctx: ChannelHandlerContext, e: ChannelStateEvent): Unit = {
val now = Time.now
super.channelInterestChanged(ctx, e)
val isWritable = ctx.getChannel.isWritable()
if (isWritable != hasBeenWritable) {
val stat = if (hasBeenWritable) writable else unwritable
stat.incr(socketDuration(now).inMillis.toInt)
hasBeenWritable = isWritable
since = now
}
}
}
| mkhq/finagle | finagle-netty3/src/main/scala/com/twitter/finagle/netty3/channel/ChannelStatsHandler.scala | Scala | apache-2.0 | 4,886 |
/*
* Copyright (C) 2016 University of Basel, Graphics and Vision Research Group
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package scalismo.ui.rendering.internal
import javax.media.opengl.GLCapabilitiesImmutable
import javax.media.opengl.awt.GLJPanel
import scalismo.ui.view.ViewportPanel
class GLJPanelWithViewport(val viewport: ViewportPanel, capabilities: GLCapabilitiesImmutable)
extends GLJPanel(capabilities) {}
| unibas-gravis/scalismo-ui | src/main/scala/scalismo/ui/rendering/internal/GLJPanelWithViewport.scala | Scala | gpl-3.0 | 1,037 |
package mesosphere.marathon
package core.task.jobs
import akka.actor.{ ActorRef, PoisonPill, Terminated }
import akka.testkit.TestProbe
import java.time.Clock
import mesosphere.AkkaUnitTest
import mesosphere.marathon.test.SettableClock
import mesosphere.marathon.core.condition.Condition
import mesosphere.marathon.core.instance.{ Instance, TestInstanceBuilder }
import mesosphere.marathon.core.instance.update.InstanceUpdateOperation
import mesosphere.marathon.core.task.jobs.impl.{ ExpungeOverdueLostTasksActor, ExpungeOverdueLostTasksActorLogic }
import mesosphere.marathon.core.task.tracker.InstanceTracker.InstancesBySpec
import mesosphere.marathon.core.task.tracker.{ InstanceTracker, TaskStateOpProcessor }
import mesosphere.marathon.state.PathId._
import mesosphere.marathon.state.{ Timestamp, UnreachableEnabled, UnreachableDisabled, UnreachableStrategy }
import mesosphere.marathon.test.MarathonTestHelper
import org.scalatest.prop.TableDrivenPropertyChecks
import scala.concurrent.{ ExecutionContext, Future }
import scala.concurrent.duration._
class ExpungeOverdueLostTasksActorTest extends AkkaUnitTest with TableDrivenPropertyChecks {
class Fixture {
val clock = new SettableClock()
val config = MarathonTestHelper.defaultConfig(maxInstancesPerOffer = 10)
val stateOpProcessor: TaskStateOpProcessor = mock[TaskStateOpProcessor]
val taskTracker: InstanceTracker = mock[InstanceTracker]
val fiveTen = UnreachableEnabled(inactiveAfter = 5.minutes, expungeAfter = 10.minutes)
}
def withActor(testCode: (Fixture, ActorRef) => Any): Unit = {
val f = new Fixture
val checkActor = system.actorOf(ExpungeOverdueLostTasksActor.props(f.clock, f.config, f.taskTracker, f.stateOpProcessor))
try {
testCode(f, checkActor)
} finally {
checkActor ! PoisonPill
val probe = TestProbe()
probe.watch(checkActor)
val terminated = probe.expectMsgAnyClassOf(classOf[Terminated])
assert(terminated.actor == checkActor)
}
}
"The expunge overdue tasks business logic's filtering methods" in {
val f = new Fixture
val businessLogic = new ExpungeOverdueLostTasksActorLogic {
override val config: TaskJobsConfig = MarathonTestHelper.defaultConfig(maxInstancesPerOffer = 10)
override val clock: Clock = new SettableClock()
override val stateOpProcessor: TaskStateOpProcessor = mock[TaskStateOpProcessor]
}
// format: OFF
// Different task configuration with startedAt, status since and condition values. Expunge indicates whether an
// expunge is expected or not.
import f.fiveTen
val disabled = UnreachableDisabled
val taskCases = Table(
("name", "startedAt", "since", "unreachableStrategy", "condition", "expunge"),
("running", Timestamp.zero, Timestamp.zero, fiveTen, Condition.Running, false ),
("expired inactive", Timestamp.zero, f.clock.now - fiveTen.expungeAfter - 1.minute, fiveTen, Condition.UnreachableInactive, true ),
("unreachable", Timestamp.zero, f.clock.now - 5.minutes, fiveTen, Condition.Unreachable, false ),
("expired disabled", Timestamp.zero, f.clock.now - 365.days, disabled, Condition.Unreachable, false )
)
// format: ON
forAll(taskCases) { (name: String, startedAt: Timestamp, since: Timestamp, unreachableStrategy: UnreachableStrategy, condition: Condition, expunge: Boolean) =>
When(s"filtering $name task since $since")
val instance: Instance = (condition match {
case Condition.Unreachable =>
TestInstanceBuilder.newBuilder("/unreachable".toPath).addTaskUnreachable(since = since).getInstance()
case Condition.UnreachableInactive =>
TestInstanceBuilder.newBuilder("/unreachable".toPath).addTaskUnreachableInactive(since = since).getInstance()
case _ =>
TestInstanceBuilder.newBuilder("/running".toPath).addTaskRunning(startedAt = startedAt).getInstance()
}).copy(unreachableStrategy = unreachableStrategy)
val instances = InstancesBySpec.forInstances(instance).instancesMap
val filterForExpunge = businessLogic.filterUnreachableForExpunge(instances, f.clock.now()).map(identity)
Then(s"${if (!expunge) "not " else ""}select it for expunge")
filterForExpunge.nonEmpty should be(expunge)
}
When("filtering two running tasks")
val running1 = TestInstanceBuilder.newBuilder("/running1".toPath).addTaskRunning(startedAt = Timestamp.zero)
.getInstance()
.copy(unreachableStrategy = f.fiveTen)
val running2 = TestInstanceBuilder.newBuilder("/running2".toPath).addTaskRunning(startedAt = Timestamp.zero)
.getInstance()
.copy(unreachableStrategy = f.fiveTen)
val instances = InstancesBySpec.forInstances(running1, running2).instancesMap
val filtered = businessLogic.filterUnreachableForExpunge(instances, f.clock.now()).map(identity)
Then("return an empty collection")
filtered.isEmpty should be(true)
When("filtering two expired inactive Unreachable tasks")
val inactive1 = TestInstanceBuilder.newBuilder("/unreachable1".toPath).addTaskUnreachableInactive(since = Timestamp.zero)
.getInstance()
.copy(unreachableStrategy = f.fiveTen)
val inactive2 = TestInstanceBuilder.newBuilder("/unreachable1".toPath).addTaskUnreachableInactive(since = Timestamp.zero)
.getInstance()
.copy(unreachableStrategy = f.fiveTen)
val instances2 = InstancesBySpec.forInstances(inactive1, inactive2).instancesMap
val filtered2 = businessLogic.filterUnreachableForExpunge(instances2, f.clock.now()).map(identity)
Then("return the expired Unreachable tasks")
filtered2 should be(Iterable(inactive1, inactive2))
}
"The ExpungeOverdueLostTaskActor" when {
"checking two running tasks" in withActor { (f: Fixture, checkActor: ActorRef) =>
val running1 = TestInstanceBuilder.newBuilder("/running1".toPath).addTaskRunning(startedAt = Timestamp.zero)
.getInstance()
.copy(unreachableStrategy = f.fiveTen)
val running2 = TestInstanceBuilder.newBuilder("/running2".toPath).addTaskRunning(startedAt = Timestamp.zero)
.getInstance()
.copy(unreachableStrategy = f.fiveTen)
f.taskTracker.instancesBySpec()(any[ExecutionContext]) returns Future.successful(InstancesBySpec.forInstances(running1, running2))
Then("issue no expunge")
noMoreInteractions(f.stateOpProcessor)
}
"checking one inactive Unreachable and one running task" in withActor { (f: Fixture, checkActor: ActorRef) =>
val running = TestInstanceBuilder.newBuilder("/running".toPath).addTaskRunning(startedAt = Timestamp.zero)
.getInstance()
.copy(unreachableStrategy = f.fiveTen)
val unreachable = TestInstanceBuilder.newBuilder("/unreachable".toPath).addTaskUnreachableInactive(since = Timestamp.zero)
.getInstance()
.copy(unreachableStrategy = f.fiveTen)
f.taskTracker.instancesBySpec()(any[ExecutionContext]) returns Future.successful(InstancesBySpec.forInstances(running, unreachable))
val testProbe = TestProbe()
testProbe.send(checkActor, ExpungeOverdueLostTasksActor.Tick)
testProbe.receiveOne(3.seconds)
Then("issue one expunge")
verify(f.stateOpProcessor, once).process(InstanceUpdateOperation.ForceExpunge(unreachable.instanceId))
noMoreInteractions(f.stateOpProcessor)
}
"checking two inactive Unreachable tasks and one is overdue" in withActor { (f: Fixture, checkActor: ActorRef) =>
val unreachable1 = TestInstanceBuilder.newBuilder("/unreachable1".toPath).addTaskUnreachableInactive(since = Timestamp.zero)
.getInstance()
.copy(unreachableStrategy = f.fiveTen)
val unreachable2 = TestInstanceBuilder.newBuilder("/unreachable2".toPath).addTaskUnreachableInactive(since = f.clock.now())
.getInstance()
.copy(unreachableStrategy = f.fiveTen)
f.taskTracker.instancesBySpec()(any[ExecutionContext]) returns Future.successful(InstancesBySpec.forInstances(unreachable1, unreachable2))
val testProbe = TestProbe()
testProbe.send(checkActor, ExpungeOverdueLostTasksActor.Tick)
testProbe.receiveOne(3.seconds)
Then("issue one expunge")
verify(f.stateOpProcessor, once).process(InstanceUpdateOperation.ForceExpunge(unreachable1.instanceId))
noMoreInteractions(f.stateOpProcessor)
}
"checking two lost task and one is overdue" in withActor { (f: Fixture, checkActor: ActorRef) =>
// Note that both won't have unreachable time set.
val unreachable1 = TestInstanceBuilder.newBuilder("/unreachable1".toPath).addTaskLost(since = Timestamp.zero)
.getInstance()
.copy(unreachableStrategy = f.fiveTen)
val unreachable2 = TestInstanceBuilder.newBuilder("/unreachable2".toPath).addTaskLost(since = f.clock.now())
.getInstance()
.copy(unreachableStrategy = f.fiveTen)
f.taskTracker.instancesBySpec()(any[ExecutionContext]) returns Future.successful(InstancesBySpec.forInstances(unreachable1, unreachable2))
val testProbe = TestProbe()
// Trigger UnreachableInactive mark
testProbe.send(checkActor, ExpungeOverdueLostTasksActor.Tick)
testProbe.receiveOne(3.seconds)
Then("ensure backwards compatibility and issue one expunge")
val (taskId, task) = unreachable1.tasksMap.head
verify(f.stateOpProcessor, once).process(InstanceUpdateOperation.ForceExpunge(unreachable1.instanceId))
noMoreInteractions(f.stateOpProcessor)
}
}
}
| Caerostris/marathon | src/test/scala/mesosphere/marathon/core/task/jobs/ExpungeOverdueLostTasksActorTest.scala | Scala | apache-2.0 | 9,804 |
package org.pico.hash
package object instances {
}
| newhoggy/pico-cuckoo-filter | pico-hash/src/main/scala/org/pico/hash/instances/package.scala | Scala | bsd-3-clause | 53 |
package provingground.interface
import provingground._, learning._
import andrewscurtis._
import ACRoutes._
import AcFlows.{system, mat}
//import akka.http._
import akka.http.scaladsl._
import akka.http.scaladsl.server.Directives._
import scala.concurrent.ExecutionContext.Implicits.global
import scala.io.StdIn
object HttpServer /*extends App*/ {
val server = new ServerStart
val bindingFuture = server.bindingFuture
println(s"Server online at http://localhost:8080/\nPress RETURN to stop...")
StdIn.readLine() // for the future transformation
server.stop()
}
class ServerStart(port: Int = 8080) {
val route = acRoutes ~ getFromResourceDirectory("")
val bindingFuture = Http().bindAndHandle(route, "localhost", port)
def stop() =
bindingFuture
.flatMap(_.unbind()) // trigger unbinding from the port
.onComplete(_ => FDHub.stop(StartData.quickhub)) // and shutdown when done
}
| siddhartha-gadgil/ProvingGround | digressions/src/main/scala/provingground/andrewscurtis/HttpServer.scala | Scala | mit | 921 |
package com.sksamuel.elastic4s.requests.searches.aggs.builders
import com.sksamuel.elastic4s.json.{XContentBuilder, XContentFactory}
import com.sksamuel.elastic4s.requests.searches.aggs.{AggMetaDataFn, ReverseNestedAggregation, SubAggsBuilderFn}
object ReverseNestedAggregationBuilder {
def apply(agg: ReverseNestedAggregation): XContentBuilder = {
val builder = XContentFactory.obj().startObject("reverse_nested")
agg.path.foreach(builder.field("path", _))
builder.endObject()
SubAggsBuilderFn(agg, builder)
AggMetaDataFn(agg, builder)
builder.endObject()
}
}
| sksamuel/elastic4s | elastic4s-core/src/main/scala/com/sksamuel/elastic4s/requests/searches/aggs/builders/ReverseNestedAggregationBuilder.scala | Scala | apache-2.0 | 591 |
package com.scala.bala.util
import scala.io.Source
object ResourceReader {
/**
* Reads configuration file.
* @param fileName filename with path
*/
def readResourceFile(fileName:String):Array[String] = {
val res = getClass.getResource(fileName)
val source = Source.fromURL(getClass.getResource(fileName))
val lines = source.getLines.toArray
source.close
lines
}
}
| bbalajisg/scala-projects | currency-retriever/src/main/scala/com/scala/bala/util/ResourceReader.scala | Scala | gpl-2.0 | 439 |
package TurboRav
import Chisel._
/** A Register Bank for a RISC-V processor with 32 registers of 32
* bits each for a total of 1024 bits.
*
* Reads and writes are done synchronously so that FPGA block RAM can
* be used as the underlying memory technology.
*
* Register 0 AKA x0 is treated as special; all reads to it return 0
*/
class RegBank extends Module {
val io = new Bundle(){
val reads = new Bundle {
val rs1 = Valid(UInt(width = 5)).flip()
val rs2 = Valid(UInt(width = 5)).flip()
}
val write = Valid(new RegWrite()).flip()
val rs1_data = UInt(OUTPUT, Config.xlen)
val rs2_data = UInt(OUTPUT, Config.xlen)
}
val regs = Mem(32, UInt(width = Config.xlen))
when (io.write.valid && io.write.bits.addr =/= UInt(0)) {
regs(io.write.bits.addr) := io.write.bits.data
}
val rs1_addr_prev = Reg(init = UInt(0), next = io.reads.rs1.bits)
val rs2_addr_prev = Reg(init = UInt(0), next = io.reads.rs2.bits)
// Mux reads of x0 to be 0 because x0 must be hardwired to 0.
val rs1_read = Mux(rs1_addr_prev === UInt(0), UInt(0), regs(rs1_addr_prev))
val rs2_read = Mux(rs2_addr_prev === UInt(0), UInt(0), regs(rs2_addr_prev))
io.rs1_data := rs1_read
io.rs2_data := rs2_read
}
| SebastianBoe/turborav | hw/src/main/rtl/regbank.scala | Scala | bsd-2-clause | 1,246 |
package io.finch.todo
import java.util.UUID
import com.twitter.app.Flag
import com.twitter.finagle.{Http, Service}
import com.twitter.finagle.http.{Request, Response}
import com.twitter.finagle.stats.Counter
import com.twitter.server.TwitterServer
import com.twitter.util.Await
import io.circe.generic.auto._
import io.finch._
import io.finch.circe._
/**
* A simple Finch application implementing the backend for the TodoMVC project.
*
* Use the following sbt command to run the application.
*
* {{{
* $ sbt 'examples/runMain io.finch.todo.Main'
* }}}
*
* Use the following HTTPie commands to test endpoints.
*
* {{{
* $ http POST :8081/todos title=foo order:=0 completed:=false
* $ http PATCH :8081/todos/<UUID> completed:=true
* $ http :8081/todos
* $ http DELETE :8081/todos/<UUID>
* $ http DELETE :8081/todos
* }}}
*/
object Main extends TwitterServer {
val port: Flag[Int] = flag("port", 8081, "TCP port for HTTP server")
val todos: Counter = statsReceiver.counter("todos")
def postedTodo: Endpoint[Todo] =
body.as[UUID => Todo].map(_(UUID.randomUUID()))
def postTodo: Endpoint[Todo] = post("todos" :: postedTodo) { t: Todo =>
todos.incr()
Todo.save(t)
Created(t)
}
def patchedTodo: Endpoint[Todo => Todo] = body.as[Todo => Todo]
def patchTodo: Endpoint[Todo] =
patch("todos" :: uuid :: patchedTodo) { (id: UUID, pt: Todo => Todo) =>
Todo.get(id) match {
case Some(currentTodo) =>
val newTodo: Todo = pt(currentTodo)
Todo.delete(id)
Todo.save(newTodo)
Ok(newTodo)
case None => throw TodoNotFound(id)
}
}
def getTodos: Endpoint[List[Todo]] = get("todos") {
Ok(Todo.list())
}
def deleteTodo: Endpoint[Todo] = delete("todos" :: uuid) { id: UUID =>
Todo.get(id) match {
case Some(t) => Todo.delete(id); Ok(t)
case None => throw new TodoNotFound(id)
}
}
def deleteTodos: Endpoint[List[Todo]] = delete("todos") {
val all: List[Todo] = Todo.list()
all.foreach(t => Todo.delete(t.id))
Ok(all)
}
val api: Service[Request, Response] = (
getTodos :+: postTodo :+: deleteTodo :+: deleteTodos :+: patchTodo
).handle({
case e: TodoNotFound => NotFound(e)
}).toService
def main(): Unit = {
log.info("Serving the Todo application")
val server = Http.server
.withStatsReceiver(statsReceiver)
.serve(s":${port()}", api)
onExit { server.close() }
Await.ready(adminHttpServer)
}
}
| ilya-murzinov/finch | examples/src/main/scala/io/finch/todo/Main.scala | Scala | apache-2.0 | 2,516 |
/*
* Copyright 2016-2018 Michal Harish, michal.harish@gmail.com
*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package affinity
import akka.http.scaladsl.model.HttpMethods
import akka.util.Timeout
import io.amient.affinity.avro.record.{AvroRecord, Fixed}
import io.amient.affinity.core.ack
import io.amient.affinity.core.actor.{GatewayHttp, GatewayStream, Partition, Routed}
import io.amient.affinity.core.cluster.Node
import io.amient.affinity.core.http.RequestMatchers.{HTTP, PATH}
import io.amient.affinity.core.storage.Record
import io.amient.affinity.core.util._
import scala.collection.JavaConverters._
import scala.concurrent.Future
import scala.concurrent.duration._
import scala.language.postfixOps
object ESecondaryIndexMain extends App {
new Node("example.conf").start()
}
case class Author(username: String) extends AvroRecord {
val id = username.hashCode
}
case class Article(title: String, timestamp: Long) extends AvroRecord with EventTime {
override def eventTimeUnix() = timestamp
}
class ESecondaryIndex extends GatewayStream with GatewayHttp {
implicit val executor = context.dispatcher
implicit val scheduler = context.system.scheduler
implicit val timeout = Timeout(5 seconds)
val ks = keyspace("articles")
input[Author, Article]("input-stream") { record =>
ks ?! StoreArticle(record.key, record.value)
}
override def handle: Receive = {
case HTTP(HttpMethods.GET, PATH("articles", username), _, response) =>
ks ?! GetAuthorArticles(Author(username), 0L) map (handleAsJson(response, _))
case HTTP(HttpMethods.GET, PATH("words", word), _, response) =>
ks ?? GetWordIndex(word, 0L) map (handleAsJson(response, _))
case HTTP(HttpMethods.GET, PATH("words-since", word), _, response) =>
ks ?? GetWordIndex(word, 1530086400000L) map (handleAsJson(response, _))
case HTTP(HttpMethods.GET, PATH("delete-articles-containing", word), _, response) =>
ks ?? DeleteArticles(word) map (accept(response, _))
}
}
case class GetWordIndex(word: String, since: Long) extends ScatterIterable[Article]
case class DeleteArticles(word: String) extends ScatterUnit
sealed trait Authored extends Routed {
val author: Author
override def key: Any = author.id
}
case class StoreArticle(author: Author, article: Article) extends AvroRecord with Authored with Reply[Unit]
case class StorageKey(@Fixed authorId: Int, auto: Int) extends AvroRecord
case class GetAuthorArticles(author: Author, since: Long) extends AvroRecord with Authored with Reply[Seq[Article]]
class ArticlesPartition extends Partition {
val articles = state[StorageKey, Article]("articles")
val wordindex = articles.index("words") { record: Record[_, Article] =>
record.value.title.split("\\s").toList.map(_.trim.toLowerCase)
}
implicit val executor = context.dispatcher
override def handle: Receive = {
case request@GetAuthorArticles(author, since) =>
request(sender) ! articles.range(TimeRange.since(since), author.id).values.toList
case request@GetWordIndex(word, since) =>
request(sender) ! wordindex(word.trim.toLowerCase, TimeRange.since(since))(_.toList).map(articles.apply).flatten
case request@DeleteArticles(word) =>
val deleted = Future.sequence(wordindex(word.trim.toLowerCase, TimeRange.UNBOUNDED)(_.map(articles.delete)))
request(sender) ! deleted.map(_ => ())
case request@StoreArticle(author, article) => request(sender) ! {
articles.lockAsync(author) {
//using an async lock because .replace is asynchronous so another request may get processed and miscount
val articlesSoFar = articles.iterator(TimeRange.UNBOUNDED, author.id)
try {
val nextAuto = if (!articlesSoFar.hasNext) 1 else articlesSoFar.asScala.map(_.key.auto).max + 1
val key = StorageKey(author.id, nextAuto)
articles.replace(key, article).map { _ =>
//this is only here to tell the test the fixtures were all processed
context.system.eventStream.publish(request)
}
} finally {
//raw memstore iterators need closing to free resources
articlesSoFar.close
}
}
}
}
}
| amient/affinity | examples/example-secondary-index/src/test/scala/affinity/ESecondaryIndex.scala | Scala | apache-2.0 | 4,970 |
/*
* Scala.js (https://www.scala-js.org/)
*
* Copyright EPFL.
*
* Licensed under Apache License 2.0
* (https://www.apache.org/licenses/LICENSE-2.0).
*
* See the NOTICE file distributed with this work for
* additional information regarding copyright ownership.
*/
package java.nio
import scala.scalajs.js.typedarray._
abstract class Buffer private[nio] (val _capacity: Int) {
private[nio] type ElementType
private[nio] type BufferType >: this.type <: Buffer {
type ElementType = Buffer.this.ElementType
}
private[nio] type TypedArrayType >: Null <: TypedArray[_, TypedArrayType]
// Normal implementation of Buffer
private var _limit: Int = capacity
private var _position: Int = 0
private[nio] var _mark: Int = -1
final def capacity(): Int = _capacity
final def position(): Int = _position
def position(newPosition: Int): Buffer = {
if (newPosition < 0 || newPosition > limit())
throw new IllegalArgumentException
_position = newPosition
if (_mark > newPosition)
_mark = -1
this
}
final def limit(): Int = _limit
def limit(newLimit: Int): Buffer = {
if (newLimit < 0 || newLimit > capacity())
throw new IllegalArgumentException
_limit = newLimit
if (_position > newLimit) {
_position = newLimit
if (_mark > newLimit)
_mark = -1
}
this
}
def mark(): Buffer = {
_mark = _position
this
}
def reset(): Buffer = {
if (_mark == -1)
throw new InvalidMarkException
_position = _mark
this
}
def clear(): Buffer = {
_mark = -1
_position = 0
_limit = capacity
this
}
def flip(): Buffer = {
_mark = -1
_limit = _position
_position = 0
this
}
def rewind(): Buffer = {
_mark = -1
_position = 0
this
}
@inline final def remaining(): Int = limit - position
@inline final def hasRemaining(): Boolean = position != limit
def isReadOnly(): Boolean
def hasArray(): Boolean
/* Note: in the JDK, this returns Object.
* But Array[ElementType] erases to Object so this is binary compatible.
*/
def array(): Array[ElementType]
def arrayOffset(): Int
def isDirect(): Boolean
override def toString(): String =
s"${getClass.getName}[pos=$position lim=$limit cap=$capacity]"
/* Extended API - exposed to user-space with a hacky bridge and extension
* methods.
*/
def hasArrayBuffer(): Boolean =
_arrayBuffer != null && !isReadOnly
def arrayBuffer(): ArrayBuffer = {
val buffer = _arrayBuffer
if (buffer == null || isReadOnly)
throw new UnsupportedOperationException
buffer
}
def arrayBufferOffset(): Int = {
val offset = _arrayBufferOffset
if (offset == -1 || isReadOnly)
throw new UnsupportedOperationException
offset
}
def dataView(): DataView = {
val view = _dataView
if (view == null || isReadOnly)
throw new UnsupportedOperationException
view
}
def hasTypedArray(): Boolean =
_typedArray != null && !isReadOnly
def typedArray(): TypedArrayType = {
val array = _typedArray
if (array == null || isReadOnly)
throw new UnsupportedOperationException
array
}
/* Generic access to methods declared in subclasses.
* These methods allow to write generic algorithms on any kind of Buffer.
* The optimizer will get rid of all the overhead.
* We only declare the methods we need somewhere.
*/
private[nio] def _array: Array[ElementType]
private[nio] def _arrayOffset: Int
private[nio] def _arrayBuffer: ArrayBuffer = null
private[nio] def _arrayBufferOffset: Int = -1
private[nio] def _dataView: DataView = null
private[nio] def _typedArray: TypedArrayType = null
/** Loads an element at the given absolute, unchecked index. */
private[nio] def load(index: Int): ElementType
/** Stores an element at the given absolute, unchecked index. */
private[nio] def store(index: Int, elem: ElementType): Unit
/** Loads a range of elements with absolute, unchecked indices. */
private[nio] def load(startIndex: Int,
dst: Array[ElementType], offset: Int, length: Int): Unit
/** Stores a range of elements with absolute, unchecked indices. */
private[nio] def store(startIndex: Int,
src: Array[ElementType], offset: Int, length: Int): Unit
/* Only for HeapByteBufferViews -- but that's the only place we can put it.
* For all other types, it will be dce'ed.
*/
private[nio] def _byteArray: Array[Byte] =
throw new UnsupportedOperationException
private[nio] def _byteArrayOffset: Int =
throw new UnsupportedOperationException
private[nio] def isBigEndian: Boolean =
throw new UnsupportedOperationException
// Helpers
@inline private[nio] def ensureNotReadOnly(): Unit = {
if (isReadOnly)
throw new ReadOnlyBufferException
}
@inline private[nio] def validateArrayIndexRange(
array: Array[_], offset: Int, length: Int): Unit = {
if (offset < 0 || length < 0 || offset > array.length - length)
throw new IndexOutOfBoundsException
}
@inline private[nio] def getPosAndAdvanceRead(): Int = {
val p = _position
if (p == limit)
throw new BufferUnderflowException
_position = p + 1
p
}
@inline private[nio] def getPosAndAdvanceRead(length: Int): Int = {
val p = _position
val newPos = p + length
if (newPos > limit)
throw new BufferUnderflowException
_position = newPos
p
}
@inline private[nio] def getPosAndAdvanceWrite(): Int = {
val p = _position
if (p == limit)
throw new BufferOverflowException
_position = p + 1
p
}
@inline private[nio] def getPosAndAdvanceWrite(length: Int): Int = {
val p = _position
val newPos = p + length
if (newPos > limit)
throw new BufferOverflowException
_position = newPos
p
}
@inline private[nio] def validateIndex(index: Int): Int = {
if (index < 0 || index >= limit)
throw new IndexOutOfBoundsException
index
}
@inline private[nio] def validateIndex(index: Int, length: Int): Int = {
if (index < 0 || index + length > limit)
throw new IndexOutOfBoundsException
index
}
}
| nicolasstucki/scala-js | javalib/src/main/scala/java/nio/Buffer.scala | Scala | apache-2.0 | 6,204 |
/*******************************************************************************
* Copyright (c) 2015-2018 Skymind, Inc.
*
* This program and the accompanying materials are made available under the
* terms of the Apache License, Version 2.0 which is available at
* https://www.apache.org/licenses/LICENSE-2.0.
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*
* SPDX-License-Identifier: Apache-2.0
******************************************************************************/
package org.nd4s
import org.scalatest.{ Suite, SuiteMixin }
trait OrderingForTest extends SuiteMixin { this: Suite =>
val ordering: NDOrdering
}
trait COrderingForTest extends OrderingForTest { this: Suite =>
override val ordering: NDOrdering = NDOrdering.C
}
trait FortranOrderingForTest extends OrderingForTest { this: Suite =>
override val ordering: NDOrdering = NDOrdering.Fortran
}
| RobAltena/deeplearning4j | nd4s/src/test/scala/org/nd4s/OrderingForTest.scala | Scala | apache-2.0 | 1,170 |
/*
* Copyright 2021 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.ct.accounts.frs102.boxes
import uk.gov.hmrc.ct.accounts.frs102.retriever.FullAccountsBoxRetriever
import uk.gov.hmrc.ct.box._
case class ACQ5035(value: Option[Boolean]) extends CtBoxIdentifier(name = "Motor vehicles") with CtOptionalBoolean with Input
with ValidatableBox[FullAccountsBoxRetriever]
{
def validate(boxRetriever: FullAccountsBoxRetriever) = {
import boxRetriever._
cannotExistErrorIf(hasValue && ac44.noValue && ac45.noValue)
}
}
| hmrc/ct-calculations | src/main/scala/uk/gov/hmrc/ct/accounts/frs102/boxes/ACQ5035.scala | Scala | apache-2.0 | 1,087 |
package gs.nick.tests
import gs.nick._
import akka.http.scaladsl.testkit.ScalatestRouteTest
import gs.nick.server.AkkaHttpImplicits._
import org.scalatest.FunSpec
import scala.concurrent.Future
// Testing an Akka HTTP app notes at https://doc.akka.io/docs/akka-http/current/routing-dsl/testkit.html#table-of-inspectors
class AppTestSuite extends FunSpec with ScalatestRouteTest {
def generateServer: WebServer = {
new gs.nick.WebServer(DummyGamesDao, DummySystemsDao)
}
describe("Basic routes") {
val routes = generateServer.routes
it("responds to root when running") {
Get("/") ~> routes ~> check {
assert(status.intValue === 200)
assert(true === responseAs[String].contains("server is running"))
}
}
}
}
// mocks
object DummyGamesDao extends GamesDaoTrait {
override def getAllGames: Future[Seq[DbGame]] = ???
override def getAllBySystem(systemId: Int): Future[Seq[DbGame]] = ???
override def getGame(id: Int): Future[Option[DbGame]] = ???
override def addGame(game: DbGame): Future[Int] = ???
}
object DummySystemsDao extends SystemsDaoTrait {
override def getAllSystems: Future[Seq[DbSystem]] = ???
override def getSystemById(id: Int): Future[Option[DbSystem]] = ???
override def addSystem(newSystem: DbSystem): Future[Int] = ???
}
| nickfun/api-games | src/test/scala/gs/nick/tests/AppTestSuite.scala | Scala | gpl-3.0 | 1,317 |
package org.scalatra
import test.specs2.ScalatraSpec
class EnvironmentFilter extends ScalatraFilter {
get("/*/environment") {
environment
}
get("/*/is-development-mode") {
isDevelopmentMode
}
}
class EnvironmentFilterSpec extends ScalatraSpec { def is =
"The dev filter should" ^
"return 'development' as the environment" ! env("dev", "development")^
"be development mode" ! isDevMode("dev", true)^
p^
"The prod filter should" ^
"return 'development' as the environment" ! env("prod", "production")^
"be development mode" ! isDevMode("prod", false)^
end
val devFilterHolder = addFilter(classOf[EnvironmentFilter], "/dev/*")
val prodFilterHolder = addFilter(classOf[EnvironmentFilter], "/prod/*")
prodFilterHolder.setInitParameter("org.scalatra.environment", "production")
def env(environment: String, expected: String) =
get("/%s/environment".format(environment)) {
body must be equalTo(expected)
}
def isDevMode(environment: String, expected: Boolean) =
get("/%s/is-development-mode".format(environment)) {
body must be equalTo(expected.toString)
}
}
| louk/scalatra | core/src/test/scala/org/scalatra/EnvironmentSpec.scala | Scala | bsd-2-clause | 1,409 |
/*
* Copyright 2015 PayPal
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.squbs.httpclient.json
import org.scalatest.{Matchers, FlatSpec}
import org.json4s._
import jackson.Serialization._
class Json4sJacksonSpec extends FlatSpec with Matchers{
"NotTypeHints Example (case class)" should "have correct behaviour of read/write" in {
import Json4sJacksonNoTypeHintsProtocol._
val playInfo = PlayerInfo("d", "k", 30)
val jsonString = """{"firstName":"d","lastName":"k","age":30}"""
write(playInfo) should be (jsonString)
read[PlayerInfo](jsonString) should be (playInfo)
}
"NotTypeHints Example (case class contain the other case class)" should "have correct behaviour of read/write" in {
import Json4sJacksonNoTypeHintsProtocol._
val name = Player("d", "k")
val playInfo = PlayerInfo2(name, 30)
val jsonString = """{"name":{"firstName":"d","lastName":"k"},"age":30}"""
write(playInfo) should be (jsonString)
read[PlayerInfo2](jsonString) should be (playInfo)
}
"ShortTypeHints Example (inheritance)" should "have correct behaviour of read/write" in {
import Json4sJacksonShortTypeHintsProtocolExample._
val animals = Animals(Dog("pluto") :: Fish(1.2) :: Nil)
val jsonString = """{"animals":[{"jsonClass":"Dog","name":"pluto"},{"jsonClass":"Fish","weight":1.2}]}"""
write(animals) should be (jsonString)
read[Animals](jsonString) should be (animals)
}
"FullTypeHints Example (inheritance)" should "have correct behaviour of read/write" in {
import Json4sJacksonFullTypeHintsProtocolExample._
val animals = Animals(Dog("lucky") :: Fish(3.4) :: Nil)
val jsonString = """{"animals":[{"jsonClass":"org.squbs.httpclient.json.Dog","name":"lucky"},""" +
"""{"jsonClass":"org.squbs.httpclient.json.Fish","weight":3.4}]}"""
write(animals) should be (jsonString)
read[Animals](jsonString) should be (animals)
}
"Custom Example (inheritance)" should "have correct behaviour of read/write" in {
import Json4sJacksonCustomProtocolExample._
val animals = Animals(Dog("lucky") :: Fish(3.4) :: Nil)
val jsonString = """{"animals":[{"$type$":"org.squbs.httpclient.json.Dog","name":"lucky"},""" +
"""{"$type$":"org.squbs.httpclient.json.Fish","weight":3.4}]}"""
write(animals) should be (jsonString)
read[Animals](jsonString) should be (animals)
}
}
object Json4sJacksonShortTypeHintsProtocolExample extends Json4sJacksonShortTypeHintsProtocol {
override def hints: List[Class[_]] = List(classOf[Dog], classOf[Fish])
}
object Json4sJacksonFullTypeHintsProtocolExample extends Json4sJacksonFullTypeHintsProtocol {
override def hints: List[Class[_]] = List(classOf[Dog], classOf[Fish])
}
object Json4sJacksonCustomProtocolExample extends Json4sJacksonCustomProtocol {
override implicit def json4sJacksonFormats: Formats = new Formats {
val dateFormat = DefaultFormats.lossless.dateFormat
override val typeHints = FullTypeHints(classOf[Fish] :: classOf[Dog] :: Nil)
override val typeHintFieldName = "$type$"
}
}
case class Player(firstName: String, lastName: String)
case class PlayerInfo(firstName: String, lastName: String, age: Int)
case class PlayerInfo2(name: Player, age: Int)
trait Animal
case class Dog(name: String) extends Animal
case class Fish(weight: Double) extends Animal
case class Animals(animals: List[Animal]) | keshin/squbs | squbs-httpclient/src/test/scala/org/squbs/httpclient/json/Json4sJacksonSpec.scala | Scala | apache-2.0 | 3,908 |
package org.jetbrains.plugins.scala
package codeInspection.feature
import com.intellij.codeInspection.ProblemsHolder
import com.intellij.openapi.module.ModuleUtilCore
import com.intellij.openapi.project.Project
import com.intellij.psi.PsiElement
import org.jetbrains.plugins.scala.annotator.intention.ScalaImportTypeFix
import org.jetbrains.plugins.scala.codeInspection.{AbstractFixOnPsiElement, AbstractInspection}
import org.jetbrains.plugins.scala.extensions.{ClassQualifiedName, ReferenceTarget, _}
import org.jetbrains.plugins.scala.lang.lexer.ScalaTokenTypes
import org.jetbrains.plugins.scala.lang.psi.api.base.patterns.ScReferencePattern
import org.jetbrains.plugins.scala.lang.psi.api.base.types.{ScExistentialClause, ScRefinement}
import org.jetbrains.plugins.scala.lang.psi.api.expr.ScPostfixExpr
import org.jetbrains.plugins.scala.lang.psi.api.statements.params.ScTypeParamClause
import org.jetbrains.plugins.scala.lang.psi.api.statements.{ScFunctionDefinition, ScMacroDefinition, ScTypeAliasDeclaration}
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.templates.ScClassParents
import org.jetbrains.plugins.scala.lang.psi.impl.ScalaPsiElementFactory
import org.jetbrains.plugins.scala.project.ScalaLanguageLevel.Scala_2_10
import org.jetbrains.plugins.scala.project._
import org.jetbrains.plugins.scala.project.settings.ScalaCompilerSettings
/**
* @author Pavel Fatin
*/
class LanguageFeatureInspection extends AbstractInspection("LanguageFeature", "Advanced language features"){
private val Features = Seq(
Feature("postfix operator notation", "scala.language", "postfixOps", _.postfixOps, _.postfixOps = true) {
case e: ScPostfixExpr => e.operation
},
Feature("reflective call", "scala.language", "reflectiveCalls", _.reflectiveCalls, _.reflectiveCalls = true) {
case e @ ReferenceTarget(Parent(_: ScRefinement)) => e.getLastChild match {
case id @ ElementType(ScalaTokenTypes.tIDENTIFIER) => id
case _ => e
}
},
Feature("dynamic member selection", "scala.language", "dynamics", _.dynamics, _.dynamics = true) {
case e @ ReferenceTarget(ClassQualifiedName("scala.Dynamic")) && Parent(Parent(Parent(_: ScClassParents))) => e
},
Feature("implicit conversion", "scala.language", "implicitConversions", _.implicitConversions, _.implicitConversions = true) {
case e: ScFunctionDefinition if e.getModifierList.has(ScalaTokenTypes.kIMPLICIT) &&
e.parameters.size == 1 &&
!e.parameterList.clauses.exists(_.isImplicit) =>
Option(e.getModifierList.findFirstChildByType(ScalaTokenTypes.kIMPLICIT)).getOrElse(e)
},
Feature("higher-kinded type", "scala.language", "higherKinds", _.higherKinds, _.higherKinds = true) {
case (e: ScTypeParamClause) && Parent(Parent(_: ScTypeParamClause)) => e
case (e: ScTypeParamClause) && Parent(_: ScTypeAliasDeclaration) => e
},
Feature("existential type", "scala.language", "existentials", _.existentials, _.existentials = true) {
case e: ScExistentialClause => e.firstChild.getOrElse(e) // TODO Exclude reducible existential types
},
Feature("macro definition", "scala.language.experimental", "macros", _.macros, _.macros = true) {
case e: ScMacroDefinition => e.children.find(it => it.getText == "macro").getOrElse(e)
})
override def actionFor(holder: ProblemsHolder): PartialFunction[PsiElement, Unit] = PartialFunction.apply { e: PsiElement =>
val module = ModuleUtilCore.findModuleForPsiElement(e)
if (module != null && module.scalaSdk.exists(_.languageLevel >= Scala_2_10)) {
Features.foreach(_.process(e, holder))
}
}
}
private case class Feature(name: String,
flagQualifier: String,
flagName: String,
isEnabled: ScalaCompilerSettings => Boolean,
enable: ScalaCompilerSettings => Unit)
(findIn: PartialFunction[PsiElement, PsiElement]) {
def process(e: PsiElement, holder: ProblemsHolder) {
e.module.foreach { module =>
if (!isEnabled(module.scalaCompilerSettings)) {
findIn.lift(e).foreach { it =>
if (!isFlagImportedFor(it)) {
holder.registerProblem(it, "Advanced language feature: " + name,
new ImportFeatureFlagFix(it, name, flagQualifier + "." + flagName),
new EnableFeatureFix(module.scalaCompilerSettings, it, name, enable))
}
}
}
}
}
private def isFlagImportedFor(e: PsiElement): Boolean = {
ScalaPsiElementFactory.createReferenceFromText(flagName, e, e).resolve() match {
case e: ScReferencePattern => Option(e.containingClass).exists(_.qualifiedName == flagQualifier)
case _ => false
}
}
}
private class ImportFeatureFlagFix(e: PsiElement, name: String, flag: String)
extends AbstractFixOnPsiElement("Import feature flag for %ss".format(name), e) {
def doApplyFix(project: Project) {
val elem = getElement
val importsHolder = ScalaImportTypeFix.getImportHolder(elem, elem.getProject)
importsHolder.addImportForPath(flag, elem)
}
}
private class EnableFeatureFix(settings: => ScalaCompilerSettings, e: PsiElement, name: String, f: ScalaCompilerSettings => Unit)
extends AbstractFixOnPsiElement("Enable " + name + "s", e) {
def doApplyFix(project: Project) {
f(settings)
}
}
| ilinum/intellij-scala | src/org/jetbrains/plugins/scala/codeInspection/feature/LanguageFeatureInspection.scala | Scala | apache-2.0 | 5,435 |
/* The Computer Language Benchmarks Game
http://shootout.alioth.debian.org/
Contributed by Rex Kerr
(inspired by the C++ version by Andrew Moon)
*/
import java.io._
import actors.Futures._
object knucleotide {
val BlockSize = 1024*1024
// Iterators are not specialized so we need our own
abstract class LongIter {
def hasNext: Boolean
def next: Long
def foreach(f: Long => Unit) { while (hasNext) f(next) }
}
val table = Array.tabulate[Byte](256) {
case 'a' | 'A' => 0
case 't' | 'T' => 1
case 'g' | 'G' => 2
case 'c' | 'C' => 3
case '\\n' => -3
case '>' => -2
case _ => -1
}
// More efficient to store DNA sequence data as bits instead of bytes
class Bits(val data: Array[Int]) {
self =>
var size = 0
var index = 0
var n = 0
def add2(b: Byte) {
size += 1
if (n>30) { index += 1; n = 0 }
data(index) |= (b&0x3)<<n
n += 2
}
def addLots(bs: Bits) {
if (n==0 || n>30) {
if (n>30) { index += 1; n = 0 }
System.arraycopy(bs.data,0,data,index,bs.index)
index += bs.index
if (index > 0 && n == 0) { index -= 1; n = 32 }
}
else {
var i = 0
while (i < bs.index) {
val j = bs.data(i)
data(index) |= j << n
index += 1
data(index) |= j >>> (32-n)
i += 1
}
size
}
size += bs.index*16
if (bs.n != 0) {
var n = bs.n
var i = bs.data(bs.index)
while (n > 0) {
add2( i.toByte )
i >>>= 2
n -= 2
}
}
}
def scan(n: Int, offset: Int) = new LongIter {
var i = offset % 16
var j = offset / 16
val mask = (1L << (2*n)) - 1
def hasNext = j*16 + i + n <= self.size
def next = {
if (i+n <= 16) {
val l = ((data(j) >>> (2*i)) & mask)
i += n
if (i>=16) { j += 1; i -= 16 }
l
}
else {
val l = (((data(j) >>> (2*i))).toLong | (data(j+1).toLong << 2*(16-i))) & mask
j += 1
i += n - 16
if (i>=16) { j += 1; i -= 16 }
l
}
}
}
}
// Load a UTF-8 DNA file from standard in, picking out requested sequence
def load(is: InputStream, target: Array[Byte]) = {
var need = 1
var found,nl,done = false
def read: Bits = {
val data = new Array[Byte](BlockSize)
val n = is.read(data)
var i = 0
while (i<n && need<target.length) {
if (data(i)==target(need)) need += 1 else need = 0
i += 1
}
if (need >= target.length && !found) {
while (i<n && data(i)!='\\n') i += 1
if (i<n) found = true
}
if (found && !done)
{
val bits = new Bits(new Array[Int](1+((n-i)>>4)))
while (i < n) {
val x = table(data(i)&0xFF)
if (x >= 0) { bits.add2(x); nl = false }
else if (x == -3) nl = true
else if (nl && x == -2) { i = n; done = true }
i += 1
}
bits
}
else if (n==BlockSize && !done) read
else new Bits(new Array[Int](0))
}
val data = Iterator.continually(read).takeWhile(_.size > 0).toArray
val all = new Bits(new Array[Int](data.map(_.size).sum/16+1))
data.foreach(all.addLots)
all
}
// Utility to go from binary to text representation
val decode = Map(0L->"A", 1L->"T", 2L->"G", 3L->"C")
def l2s(l: Long, n: Int): String = {
if (n <= 0) ""
else decode(l&0x3) + l2s(l>>>2, n-1)
}
// Custom counted hash set (neither Java nor Scala provides one)
class DnaHash(z: Int) {
var size = 16
var n = 0
var keys = new Array[Long](size)
var counts = new Array[Int](size)
final def hc(l: Long) = (l.toInt + (l>>17).toInt) & (size-1)
final def nx(i: Int) = (i+1) & (size - 1)
def +=(key: Long, count: Int = 1) {
val index = hc(key)
if (counts(index) == 0) {
keys(index) = key
counts(index) = count
n += 1
}
else if (keys(index) == key) counts(index) += count
else if (6*n > size) {
val (oldk, oldc, olds) = (keys, counts, size)
size *= 2
keys = new Array[Long](size)
counts = new Array[Int](size)
n = 0
var i = 0
while (i < olds) {
if (oldc(i) > 0) this += (oldk(i), oldc(i))
i += 1
}
this += key
}
else {
var i = nx(index)
while (counts(i) != 0 && keys(i) != key) i = nx(i)
if (counts(i) == 0) {
keys(i) = key
counts(i) = count
n += 1
}
else counts(i) += count
}
}
def apply(key: Long) = {
var i = hc(key)
while (counts(i) > 0 && keys(i) != key) i = nx(i)
counts(i)
}
def printSorted {
val factor = 100.0/counts.sum
(counts.map(_*factor) zip keys.map(l2s(_,z))).filter(_._1 > 0).sortWith((a,b) =>
a._1 > b._1 || (a._1 == b._1 && a._2 < b._2)
).foreach{ case (freq, label) => printf("%s %.3f\\n",label,freq) }
println
}
def print(s: String) {
val key = s.getBytes.map(x => table(x & 0xFF).toLong).reduceRight((l,r) => 4*r + l)
printf("%-7d %s\\n",this(key),s)
}
}
// Required function that adds data with offset to hash set
def addToHash(data: Bits, hash: DnaHash, n: Int, offset: Int) = data.scan(n,offset).foreach(hash += _)
def main(args: Array[String]) {
val sizes = List(1,2,3,4,6,12,18)
val sequence = "GGTATTTTAATTTATAGT"
val data = load(System.in, "\\n>THREE".getBytes)
val answers = sizes.map(n => n -> future {
val h = new DnaHash(n)
for (i <- 0 until n) addToHash(data,h,n,i)
h
}).toMap
answers(1)().printSorted
answers(2)().printSorted
sizes.drop(2).foreach(n => answers(n)().print(sequence.substring(0,n)))
}
}
| kragen/shootout | bench/knucleotide/knucleotide.scala-5.scala | Scala | bsd-3-clause | 5,954 |
package scorex.unit
import org.scalatest.{FlatSpec, Matchers}
import play.api.libs.json.Json
import scorex.api.http._
import spray.routing.HttpService
import spray.testkit.ScalatestRouteTest
class HttpServiceSpecification extends FlatSpec
with ScalatestRouteTest
with HttpService
with Matchers
with AddressHttpService
with BlocksHttpService {
def actorRefFactory = system
"blocksRouting" should "return first block" in {
Get("/blocks/first") ~> blocksRouting ~> check {
val js = Json.parse(responseAs[String])
(js \ "fee").as[Int] shouldBe 0
(js \ "version").as[Int] should be >= 1
(js \ "transactions").toOption should not be None
//TODO check concrete block?
}
}
it should "return last block" in {
Get("/blocks/last") ~> blocksRouting ~> check {
val js = Json.parse(responseAs[String])
(js \ "fee").as[Int] should be >= 0
(js \ "version").as[Int] should be >= 1
(js \ "transactions").toOption should not be None
}
}
it should "return error for wrong signature" in {
Get("/blocks/signature/wrongSignature") ~> blocksRouting ~> check {
val js = Json.parse(responseAs[String])
(js \ "error").as[Int] shouldBe 301
}
}
//TODO check correct signature
it should "return block at 1" in {
Get("/blocks/at/1") ~> blocksRouting ~> check {
val js = Json.parse(responseAs[String])
(js \ "fee").as[Int] should be >= 0
(js \ "version").as[Int] should be >= 1
}
}
it should "return height" in {
Get("/blocks/height") ~> blocksRouting ~> check {
val js = Json.parse(responseAs[String])
(js \ "height").as[Int] should be >= 1
}
}
//TODO test route /blocks/height/$encodedSignature
//TODO test route /blocks/child/$encodedSignature
//TODO test route /blocks/address/$address
//TODO uncomment after fixing hang up problem
"adressesRouting" should "handle root request" in {
Get("/addresses/") ~> adressesRouting ~> check {
println(responseAs[String])
}
}
} | Pole-he/Scorex-Lagonaki | src/test/scala/scorex/unit/HttpServiceSpecification.scala | Scala | cc0-1.0 | 2,032 |
package org.awong
package object beyond {
object ClosestPair
object Complex
object FFT
object FarthestPair
object GaussianElimination
object GrahamScan
} | alanktwong/algorithms-scala | beyond/src/main/scala/org/awong/beyond/package.scala | Scala | mit | 166 |
package mesosphere.marathon
package test
import java.time.{Duration => JavaDuration, _}
import scala.compat.java8.DurationConverters
import scala.concurrent.duration.FiniteDuration
object SettableClock {
private val defaultJavaClock =
Clock.fixed(LocalDateTime.of(2015, 4, 9, 12, 30, 0).toInstant(ZoneOffset.UTC), ZoneOffset.UTC)
def ofNow() = new SettableClock(Clock.fixed(Instant.now(), ZoneOffset.UTC))
}
class SettableClock(private[this] var clock: Clock = SettableClock.defaultJavaClock) extends Clock {
private[this] var subscribers: List[() => Unit] = Nil
def onChange(fn: () => Unit): Unit =
synchronized {
subscribers = fn :: subscribers
}
override def getZone: ZoneId = clock.getZone
override def instant(): Instant = clock.instant()
override def withZone(zoneId: ZoneId): Clock = new SettableClock(clock.withZone(zoneId))
def advanceBy(duration: FiniteDuration): this.type =
advanceBy(DurationConverters.toJava(duration))
def advanceBy(duration: JavaDuration): this.type = {
clock = Clock.offset(clock, duration)
subscribers.foreach(_())
this
}
def advanceTo(instant: Instant): this.type = {
clock = Clock.fixed(instant, clock.getZone)
subscribers.foreach(_())
this
}
}
| mesosphere/marathon | src/test/scala/mesosphere/marathon/test/SettableClock.scala | Scala | apache-2.0 | 1,262 |
package ui
import algorithm.Common.Problems
import algorithm.Common.StringProblemMap
import algorithm.Problem
import algorithm.Termination
import algorithm.Termination.Generations
import algorithm.Termination.Termination
import algorithm.Termination.Time
import algorithm.Termination.stringTerminationMap
import javafx.event.ActionEvent
import javafx.event.EventHandler
import javafx.scene.control.{ ToggleButton => JfxToggleBtn }
import scalafx.collections.ObservableBuffer
import scalafx.geometry.Insets
import scalafx.geometry.Pos
import scalafx.scene.control.Button
import scalafx.scene.control.CheckBox
import scalafx.scene.control.ComboBox
import scalafx.scene.control.Label
import scalafx.scene.control.ToggleButton
import scalafx.scene.control.ToggleGroup
import scalafx.scene.layout.HBox
import scalafx.scene.layout.Priority
import scalafx.scene.layout.VBox
import ui.MyTab.TExecution
import algorithm.Rastrigin
import scalafx.scene.control.Tooltip
/**
* Settings tab in GUI
*/
object Settings extends VBox {
val AlgorithmFirefly = "Firefly"
/* Handles interaction with user */
object Controller {
/**
* Settings that can be chosen in GUI
* Default values defined here are preselected in the GUI control elements
*/
case class ExecutionSettings(
algorithm: Option[String] = Some(AlgorithmFirefly),
problem: Option[Problem] = Some(Rastrigin),
alpha: Double = 0.2d,
beta: Double = 0.2d,
gamma: Double = 1.0d,
population: Int = 20,
termination: Option[Termination] = Some(Generations),
terminationGenerations: Int = 10,
terminationTime: Int = 10,
visualization: Boolean = true,
visualizationDelay: Int = 500) {
/** are all mandatory fields selected? */
lazy val isValid = Seq(algorithm, problem, termination).forall(_.isDefined)
}
/* use the default values for the GUI */
def setDefaultValues {
alpha.value_=(settings.alpha)
beta.value_=(settings.beta)
gamma.value_=(settings.gamma)
population.value_=(settings.population)
problemToggle.head.selected_=(true)
terminationToggle.head.selected_=(true)
terminationGenerations.value_=(settings.terminationGenerations)
terminationTime.value_=(settings.terminationTime)
visualization.selected_=(settings.visualization)
visualizationDelay.value_=(settings.visualizationDelay)
}
/* current settings */
private var settings = ExecutionSettings()
// update methods start
def setProblem(problem: Option[String]) {
settings = settings.copy(problem = problem map StringProblemMap)
update
}
def setAlgorithm(algorithm: Option[String]) {
settings = settings.copy(algorithm = algorithm)
update
}
def setAlpha(alpha: Double) {
settings = settings.copy(alpha = alpha)
update
}
def setBeta(beta: Double) {
settings = settings.copy(beta = beta)
update
}
def setGamma(gamma: Double) {
settings = settings.copy(gamma = gamma)
update
}
def setPopulation(population: Int) {
settings = settings.copy(population = population)
update
}
def setTermination(termination: Option[String]) {
settings = settings.copy(termination = termination map stringTerminationMap)
update
}
def setTerminationGenerations(terminationGenerations: Int) {
settings = settings.copy(terminationGenerations = terminationGenerations)
update
}
def setTerminationTime(terminationTime: Int) {
settings = settings.copy(terminationTime = terminationTime)
update
}
def setVisualization(visualization: Boolean) {
settings = settings.copy(visualization = visualization)
update
}
def setVisualizationDelay(visualizationDelay: Int) {
settings = settings.copy(visualizationDelay = visualizationDelay)
update
}
// update methods stop
/* udpate GUI for current settings */
private def update {
visualizationDelay.disable_=(!settings.visualization)
terminationGenerations.disable_=(true)
terminationTime.disable_=(true)
settings.termination match {
case Some(termination) => termination match {
case Generations =>
terminationGenerations.disable_=(false)
case Time =>
terminationTime.disable_=(false)
}
case None =>
}
if (settings.isValid) {
execution.disable_=(false)
execution.style_=("-fx-base: red")
} else {
execution.disable_=(true)
execution.style_=("-fx-base: grey")
}
}
def curSettings = settings
}
import Controller._
/**
* Helper method to generate toggles
* @param values selection options
* @param worker handler function for selection changes
*/
private def toggleGenerator(values: List[String], worker: Option[String] => Unit): List[ToggleButton] = {
// Radio Button Toggle Group
val toggleLabel = new Label {
text = ""
style = "-fx-font-size: 2em;"
}
val tog = new ToggleGroup {
selectedToggle.onChange(
(_, _, newValue) => newValue match {
case btn: JfxToggleBtn => worker(Some(btn.getText))
case _ => worker(None)
})
}
val firstSelected = values.length == 1
values.map { e =>
new ToggleButton {
minWidth = 100
text = e
selected_=(firstSelected)
toggleGroup = tog
}
}
}
/**
* Helper method to generate combo boxes
* @param values selection options
* @param worker handler function for selection changes
*/
private def comboGenerator[T](values: Seq[T], worker: T => Unit) = new ComboBox[T] {
minWidth = 100
maxWidth = 100
promptText = "Choose..."
items = ObservableBuffer(values)
value.onChange((_, _, newValue) => {
worker(newValue)
})
}
private val visualization = new CheckBox {
maxWidth_=(100)
minWidth_=(100)
text = "Enable"
selected.onChange((_, _, newValue) => {
setVisualization(newValue)
})
}
private val execution = new Button {
maxWidth = 100
maxHeight = 100
text = "Execution"
style = "-fx-base: grey"
disable_=(true)
onAction = new EventHandler[ActionEvent] {
override def handle(event: ActionEvent) {
Execution.Controller.init(curSettings)
Tabs.Controller.switchTo(TExecution)
}
}
}
private val visualizationDelay = comboGenerator[Int](Seq(1, 2, 3, 4, 5, 10, 20, 30, 40, 50, 100, 200, 300, 400, 500, 1000), setVisualizationDelay)
private val terminationGenerations = comboGenerator[Int](Seq(2, 3, 4, 5, 10, 20, 30, 40, 50, 100, 200, 300, 400, 500, 1000, 5000, 10000), setTerminationGenerations)
private val terminationTime = comboGenerator[Int](Seq(1, 2, 3, 4, 5, 10, 20, 30, 40, 50, 100, 200, 300, 400, 500, 1000), setTerminationTime)
terminationTime.tooltip_=(Tooltip("Execution duration in seconds"))
private val alpha = comboGenerator[Double](Seq(0.1d, 0.2d, 0.3d, 0.4d, 0.5d, 0.6d, 0.7d, 0.8d, 0.9d), setAlpha)
private val beta = comboGenerator[Double](Seq(0.1d, 0.2d, 0.3d, 0.4d, 0.5d, 0.6d, 0.7d, 0.8d, 0.9d), setBeta)
private val gamma = comboGenerator[Double](Seq(0.1d, 0.2d, 0.3d, 0.4d, 0.5d, 0.6d, 0.7d, 0.8d, 0.9d), setGamma)
private val population = comboGenerator[Int](Seq(5, 10, 15, 20, 25, 30, 35, 40, 45, 50, 60, 70, 80, 90, 100), setPopulation)
private val terminationToggle = toggleGenerator(Termination.values.toSeq.sortBy(_.id).map(_.toString).toList, setTermination)
private val problemToggle = toggleGenerator(Problems.map(_.name).toList, setProblem)
vgrow = Priority.ALWAYS
hgrow = Priority.ALWAYS
spacing = 10
padding = Insets(20)
// put all UI elements together
content = List(
separator,
new Label { text = "Algorithm" },
new HBox {
spacing = 10
content = toggleGenerator(List(AlgorithmFirefly), setAlgorithm)
},
separator,
new Label { text = "Problem" },
new HBox {
spacing = 10
content = problemToggle
},
separator,
new Label { text = "Algorithm Parameters" },
new HBox {
spacing = 10
content = List(
new Label { minWidth = 100; text = "Population"; alignment_=(Pos.BASELINE_RIGHT) },
population,
new Label { minWidth = 100; text = "Alpha"; alignment_=(Pos.BASELINE_RIGHT) },
alpha,
new Label { minWidth = 100; text = "Beta"; alignment_=(Pos.BASELINE_RIGHT) },
beta,
new Label { minWidth = 100; text = "Gamma"; alignment_=(Pos.BASELINE_RIGHT) },
gamma)
},
separator,
new Label { text = "Termination Conditon" },
new HBox {
spacing = 10
content = terminationToggle
},
new HBox {
spacing = 10
content = List(
terminationGenerations,
terminationTime)
},
separator,
new Label { text = "Visualization" },
new HBox {
spacing = 10
content = List(
visualization,
new Label { minWidth = 100; text = "Delay ms"; alignment_=(Pos.BASELINE_RIGHT) },
visualizationDelay)
},
separator,
execution)
} | felixamerbauer/firefly-simulator | src/main/scala/ui/Settings.scala | Scala | gpl-3.0 | 9,230 |
/***********************************************************************
* Copyright (c) 2013-2016 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
*************************************************************************/
package org.locationtech.geomesa.accumulo.process.knn
import org.geotools.factory.Hints
import org.geotools.feature.simple.SimpleFeatureBuilder
import org.junit.runner.RunWith
import org.locationtech.geomesa.utils.geotools.SimpleFeatureTypes
import org.locationtech.geomesa.utils.text.WKTUtils
import org.specs2.mutable.Specification
import org.specs2.runner.JUnitRunner
import scala.collection.JavaConversions._
@RunWith(classOf[JUnitRunner])
class GeoHashSpiralTest extends Specification {
def generateCvilleSF = {
val sftName = "geomesaKNNTestQueryFeature"
val sft = SimpleFeatureTypes.createType(sftName, "geom:Point:srid=4326,dtg:Date,dtg_end_time:Date")
val cvilleSF = SimpleFeatureBuilder.build(sft, List(), "charlottesville")
cvilleSF.setDefaultGeometry(WKTUtils.read(f"POINT(-78.4953560 38.0752150 )"))
cvilleSF.getUserData()(Hints.USE_PROVIDED_FID) = java.lang.Boolean.TRUE
cvilleSF
}
def generateLineSF = {
val sftName = "geomesaKNNTestQueryFeature"
val sft = SimpleFeatureTypes.createType(sftName, "geom:LineString:srid=4326,dtg:Date,dtg_end_time:Date")
val lineSF = SimpleFeatureBuilder.build(sft, List(), "route 29")
lineSF.setDefaultGeometry(WKTUtils.read(f"LINESTRING(-78.491 38.062, -78.474 38.082)"))
lineSF.getUserData()(Hints.USE_PROVIDED_FID) = java.lang.Boolean.TRUE
lineSF
}
"Geomesa GeoHashSpiral PriorityQueue" should {
"order GeoHashes correctly around Charlottesville" in {
val cvilleSF = generateCvilleSF
val cvillePQ = GeoHashSpiral(cvilleSF, 500.0, 5000.0)
val cvillePQ2List = cvillePQ.toList
val nearest9ByCalculation = cvillePQ2List.take(9).map{_.hash}
// the below are ordered by geodetic distances
val nearest9ByVisualInspection = List (
"dqb0tg",
"dqb0te",
"dqb0tf",
"dqb0td",
"dqb0tu",
"dqb0ts",
"dqb0w5",
"dqb0w4",
"dqb0tc")
nearest9ByCalculation must equalTo(nearest9ByVisualInspection)
}
"use the statefulDistanceFilter around Charlottesville correctly before pulling GeoHashes" in {
val cvilleSF = generateCvilleSF
val cvillePQ = GeoHashSpiral(cvilleSF, 500.0, 10000.0)
cvillePQ.mutateFilterDistance(1000.0) // units are meters
val numHashesAfterFilter = cvillePQ.toList.length
numHashesAfterFilter must equalTo(12)
}
"use the statefulDistanceFilter around Charlottesville correctly after pulling GeoHashes " in {
val cvilleSF = generateCvilleSF
val cvillePQ = GeoHashSpiral(cvilleSF, 500.0, 10000.0)
// take the 20 closest GeoHashes
val ghBeforeFilter = cvillePQ.take(20)
ghBeforeFilter.length must equalTo(20)
// now mutate the filter -- this is restrictive enough that no further GeoHashes should pass
cvillePQ.mutateFilterDistance(1000.0) // units are meters
// attempt to take five more
val ghAfterFilter = cvillePQ.take(5)
ghAfterFilter.length must equalTo(0)
}
"throw an exception if given a non-point geometry" in {
val route29SF = generateLineSF
GeoHashSpiral(route29SF, 500.0, 10000.0) should throwAn[RuntimeException]
}
}
}
| MutahirKazmi/geomesa | geomesa-accumulo/geomesa-accumulo-datastore/src/test/scala/org/locationtech/geomesa/accumulo/process/knn/GeoHashSpiralTest.scala | Scala | apache-2.0 | 3,662 |
/**
* Implements an example of Counting sort.
*
* In English, here's how the algorithm works:
* Create an ordered array of all unique values (ie, keys) between min and max
* of the array to be sorted.
* Count the occurrences of each key, and store this value in the array.
* Iterate through array, assembling each key in order, repeated once for
* each occurrence.
**/
object CountingSort {
def main(args: Array[String]) {
var mess = Array(3, 9, 8, 13, 2, 5, 4);
val result = sort(mess, mess.min, mess.max)
result.foreach( println )
}
def sort(a: Array[Int], min: Int, max: Int): Array[Int] = {
def key(value: Int): Int = {
return value - min
}
val result: Array[Int] = new Array[Int](a.length)
// Count how many of each key we have
val count: Array[Int] = new Array[Int](max - min + 1)
a.foreach( (e: Int) => count(key(e)) += 1)
// Add preceding counts to compute offset for each key
for (i <- 1 to (max-min)) {
count(i) += count(i-1)
}
// Assemble results using offset and sorted keys
for (e <- a.reverseIterator) {
count(key(e)) -= 1
result(count(key(e))) = e
}
return result
}
}
| mypetyak/scala-algorithms | sort/countingsort.scala | Scala | mit | 1,196 |
package monocle.internal
import monocle.Optional
import monocle.function.Index
import scalaz.Maybe
private[monocle] trait Bits[A] {
def bitSize: Int
def bitwiseAnd(a1: A, a2: A): A
def bitwiseOr(a1: A, a2: A): A
def bitwiseXor(a1: A, a2: A): A
def shiftL(a: A, n: Int): A
def shiftR(a: A, n: Int): A
// create an A with a single bit set at position n
def singleBit(n: Int): A
def updateBit(newValue: Boolean)(a: A, n: Int): A = if (newValue) setBit(a, n) else clearBit(a, n)
def setBit(a: A, n: Int): A = bitwiseOr(a, singleBit(n))
def clearBit(a: A, n: Int): A = bitwiseAnd(a, negate(singleBit(n)))
def testBit(a: A, n: Int): Boolean
def negate(a: A): A
def signed(a: A): Boolean
}
private[monocle] object Bits extends BitsInstances {
def apply[A](implicit ev: Bits[A]): Bits[A] = ev
def bitsIndex[S: Bits]: Index[S, Int, Boolean] = new Index[S, Int, Boolean] {
private def doIfInRange[A](i: Int)(a: => A): Option[A] =
if(i >= 0 && i < Bits[S].bitSize) Some(a)
else None
def index(i: Int): Optional[S, Boolean] =
Optional[S, Boolean](
s => doIfInRange(i)(Bits[S].testBit(s, i)))(
a => s => doIfInRange(i)(Bits[S].updateBit(a)(s, i)).getOrElse(s)
)
}
}
private[monocle] trait BitsInstances {
implicit val booleanBits = new Bits[Boolean] {
val bitSize: Int = 1
def bitwiseOr(a1: Boolean, a2: Boolean) : Boolean = a1 | a2
def bitwiseAnd(a1: Boolean, a2: Boolean): Boolean = a1 & a2
def bitwiseXor(a1: Boolean, a2: Boolean): Boolean = a1 ^ a2
def singleBit(n: Int): Boolean = true
def shiftL(a: Boolean, n: Int): Boolean = false
def shiftR(a: Boolean, n: Int): Boolean = false
def testBit(a: Boolean, n: Int): Boolean = a
def signed(a: Boolean): Boolean = a
def negate(a: Boolean): Boolean = !a
}
implicit val byteBits = new Bits[Byte] {
val bitSize: Int = 8
def bitwiseOr(a1: Byte, a2: Byte) : Byte = (a1 | a2).toByte
def bitwiseAnd(a1: Byte, a2: Byte): Byte = (a1 & a2).toByte
def bitwiseXor(a1: Byte, a2: Byte): Byte = (a1 ^ a2).toByte
def singleBit(n: Int): Byte = (1 << n).toByte
def shiftL(a: Byte, n: Int): Byte = (a << n).toByte
def shiftR(a: Byte, n: Int): Byte = (a >> n).toByte
def testBit(a: Byte, n: Int): Boolean = bitwiseAnd(a, singleBit(n)) != 0
def signed(a: Byte): Boolean = a.signum > 0
def negate(a: Byte): Byte = (~a).toByte
}
implicit val charBits = new Bits[Char] {
val bitSize: Int = 16
def bitwiseOr(a1: Char, a2: Char): Char = (a1 | a2).toChar
def bitwiseAnd(a1: Char, a2: Char): Char = (a1 & a2).toChar
def bitwiseXor(a1: Char, a2: Char): Char = (a1 ^ a2).toChar
def shiftL(a: Char, n: Int): Char = (a << n).toChar
def shiftR(a: Char, n: Int): Char = (a >> n).toChar
def singleBit(n: Int): Char = (1 << n).toChar
def testBit(a: Char, n: Int): Boolean = bitwiseAnd(a, singleBit(n)) != 0
def negate(a: Char): Char = (~a).toChar
def signed(a: Char): Boolean = a.signum > 0
}
implicit val intBits = new Bits[Int] {
val bitSize: Int = 32
def bitwiseOr(a1: Int, a2: Int) : Int = a1 | a2
def bitwiseAnd(a1: Int, a2: Int): Int = a1 & a2
def bitwiseXor(a1: Int, a2: Int): Int = a1 ^ a2
def singleBit(n: Int): Int = 1 << n
def shiftL(a: Int, n: Int): Int = a << n
def shiftR(a: Int, n: Int): Int = a >> n
def testBit(a: Int, n: Int): Boolean = bitwiseAnd(a, singleBit(n)) != 0
def signed(a: Int): Boolean = a.signum > 0
def negate(a: Int): Int = ~a
}
implicit val longBits = new Bits[Long] {
def signed(a: Long): Boolean = a.signum > 0
def negate(a: Long): Long = ~a
def testBit(a: Long, n: Int): Boolean = bitwiseAnd(a, singleBit(n)) != 0
def singleBit(n: Int): Long = (1 << n).toLong
def shiftR(a: Long, n: Int): Long = a >> n
def shiftL(a: Long, n: Int): Long = a << n
def bitwiseXor(a1: Long, a2: Long): Long = a1 ^ a2
def bitwiseOr(a1: Long, a2: Long) : Long = a1 | a2
def bitwiseAnd(a1: Long, a2: Long): Long = a1 & a2
val bitSize: Int = 32
}
}
| NightRa/Monocle | core/src/main/scala/monocle/internal/Bits.scala | Scala | mit | 4,126 |
/*
* Copyright 2017 Nicolas Rinaudo
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kantan.mongodb
package joda.time
import laws.discipline.ArbitraryInstances
object arbitrary extends kantan.codecs.strings.joda.time.laws.discipline.ArbitraryInstances with ArbitraryInstances
| nrinaudo/kantan.mongodb | joda-time/src/test/scala/kantan/mongodb/joda/time/arbitrary.scala | Scala | apache-2.0 | 801 |
/*
* Copyright 2022 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package model
import java.util.UUID
import play.api.libs.json._
import reactivemongo.bson.{ BSON, BSONHandler, BSONString }
case class UniqueIdentifier(uuid: UUID) {
override def toString = uuid.toString
}
object UniqueIdentifier {
def apply(value: String): UniqueIdentifier = {
UniqueIdentifier(UUID.fromString(value))
}
def randomUniqueIdentifier = {
UniqueIdentifier(UUID.randomUUID())
}
def toOpt(value: String): Option[UniqueIdentifier] = {
if (value.isEmpty) {
None
} else {
Some(UniqueIdentifier(value))
}
}
def toOpt(value: Option[String]): Option[UniqueIdentifier] = {
value.flatMap(toOpt)
}
def fromOpt(optUID: Option[UniqueIdentifier]): String = {
optUID.map(_.toString()).getOrElse("")
}
implicit val uniqueIdentifierFormats: Writes[UniqueIdentifier] = Writes {
(identifier: UniqueIdentifier) => JsString(identifier.toString)
}
implicit val uniqueIdentifierReads: Reads[UniqueIdentifier] = Reads {
(jsValue: JsValue) => JsSuccess(UniqueIdentifier(jsValue.as[String]))
}
implicit object UniqueIdentifierBSONHandler extends BSONHandler[BSONString, UniqueIdentifier] {
def read(doc: BSONString) = UniqueIdentifier(doc.value)
def write(id: UniqueIdentifier) = BSON.write(id.toString)
}
}
| hmrc/fset-faststream | app/model/UniqueIdentifier.scala | Scala | apache-2.0 | 1,900 |
/*
* Copyright (C) 2020 MapRoulette contributors (see CONTRIBUTORS.md).
* Licensed under the Apache License, Version 2.0 (see LICENSE).
*/
package org.maproulette.framework.graphql.schemas
import org.maproulette.framework.model.{Task, TaskReviewFields, MapillaryImage}
import sangria.macros.derive.{ObjectTypeName, deriveObjectType}
import sangria.schema._
/**
* @author mcuthbert
*/
class TaskSchema {}
object TaskSchema {
val taskIdArg: Argument[Long] = Argument("taskId", LongType)
}
| mgcuthbert/maproulette2 | app/org/maproulette/framework/graphql/schemas/TaskSchema.scala | Scala | apache-2.0 | 500 |
/*
* Copyright 2016 LinkedIn Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package com.linkedin.drelephant.spark.data
import org.apache.spark.scheduler.SparkListenerEnvironmentUpdate
case class SparkLogDerivedData(environmentUpdate: SparkListenerEnvironmentUpdate) {
def appConfigurationProperties: Map[String, String] = environmentUpdate.environmentDetails("Spark Properties").toMap
}
| nntnag17/dr-elephant-1 | app/com/linkedin/drelephant/spark/data/SparkLogDerivedData.scala | Scala | apache-2.0 | 911 |
import sbt._
import Keys._
import org.scalajs.sbtplugin.ScalaJSPlugin.autoImport._
object ScalaJSHelper {
def packageScalaJS(client: Project): Seq[Setting[_]] = Seq(
watchSources ++= (client / watchSources).value,
// Pick fastOpt when developing and fullOpt when publishing
Compile / resourceGenerators += Def.task {
val js = (client / Compile / fastOptJS).value.data
val sourceMap = getSourceMap(js)
IO.copy(
Seq(
js -> (Compile / resourceManaged).value / js.getName,
sourceMap -> (Compile / resourceManaged).value / sourceMap.getName
)
).toSeq
}.taskValue,
Compile / packageBin / mappings ++= {
val optJs = (client / Compile / fullOptJS).value.data
val sourceMap = getSourceMap(optJs)
Seq(
optJs -> optJs.getName,
sourceMap -> sourceMap.getName
)
}
)
private def getSourceMap(jsFile: java.io.File): File =
file(jsFile.getAbsolutePath + ".map")
}
| scalacenter/scaladex | project/ScalaJSHelper.scala | Scala | bsd-3-clause | 984 |
/**
* MIT License
*
* Copyright (c) 2016-2018 James Sherwood-Jones <james.sherwoodjones@gmail.com>
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package com.jsherz.luskydive.json
import java.util.UUID
import akka.http.scaladsl.marshallers.sprayjson.SprayJsonSupport
import com.jsherz.luskydive.core.{Member, TextMessage}
import com.jsherz.luskydive.util.{DateJsonFormat, TimestampJsonFormat, UuidJsonFormat}
import spray.json.DefaultJsonProtocol
/**
* JSON (de)serialization support.
*/
object MemberJsonSupport extends DefaultJsonProtocol with SprayJsonSupport {
implicit val UuidFormat = UuidJsonFormat
implicit val DateFormat = DateJsonFormat
implicit val TimestampFormat = TimestampJsonFormat
implicit val MemberFormat = jsonFormat13(Member)
implicit val MemberSearchResultFormat = jsonFormat5(MemberSearchResult)
implicit val MemberSearchRequestFormat = jsonFormat1(MemberSearchRequest)
implicit val textMessageFormat = jsonFormat11(TextMessage)
}
/**
* Used to look for members.
*
* @param searchTerm
*/
case class MemberSearchRequest(searchTerm: String)
/**
* Useful information about a member to return from a search.
*
* @param uuid
* @param firstName
* @param lastName
* @param phoneNumber
* @param email
*/
case class MemberSearchResult(uuid: UUID, firstName: String, lastName: Option[String], phoneNumber: Option[String],
email: Option[String])
| jSherz/lsd-members | backend/src/main/scala/com/jsherz/luskydive/json/Members.scala | Scala | mit | 2,495 |
/*
* Code Pulse: A real-time code coverage testing tool. For more information
* see http://code-pulse.com
*
* Copyright (C) 2014 Applied Visions - http://securedecisions.avi.com
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package bootstrap.liftweb
import com.secdec.codepulse.components.dependencycheck.{ Updates => DependencyCheckUpdates }
import com.secdec.codepulse.components.surface.{ Updates => SurfaceDetectorUpdates }
import com.secdec.codepulse.components.includes.snippet.Includes
import com.secdec.codepulse.components.notifications.Notifications
import com.secdec.codepulse.components.version.snippet.VersionSnippet
import com.secdec.codepulse.tracer.ProjectManager
import com.secdec.codepulse.tracer.TracingTarget
import com.secdec.codepulse.tracer.snippet._
import com.secdec.codepulse.util.comet.PublicCometInit
import net.liftweb.common.Full
import net.liftweb.http.CometCreationInfo
import net.liftweb.http.LiftRules
import net.liftweb.http.LiftRulesMocker.toLiftRules
import net.liftweb.util.StringHelpers
private[liftweb] object BootSnippets {
def apply(projectManager: ProjectManager) = {
LiftRules.snippetDispatch.prepend {
case SnippetRequest("Includes", _) => Includes
case SnippetRequest("VersionSnippet", _) => new VersionSnippet
case SnippetRequest("ProjectWidgetry", Full(target: TracingTarget)) => new ProjectWidgetry(projectManager, target)
case SnippetRequest("ConnectionHelp", _) => ConnectionHelp
case SnippetRequest("DotNETIISHelp", _) => DotNETIISHelp
case SnippetRequest("DotNETExecutableHelp", _) => DotNETExecutableHelp
case SnippetRequest("Notifications", _) => Notifications
case SnippetRequest("TraceConnectorState", _) => TraceConnectorState
}
val cometActorsByName: PartialFunction[String, PublicCometInit] = {
case CometTracerUI.className => new CometTracerUI
case "ProjectListUpdates" => new ProjectListUpdates(projectManager)
case "ProjectUpdated" => new ProjectUpdated(projectManager)
case "Notifications" => Notifications
case "TraceConnectorStateChanges" => new TraceConnectorStateChanges
case "DependencyCheck" => DependencyCheckUpdates
case "SurfaceDetector" => SurfaceDetectorUpdates
}
LiftRules.cometCreation append {
case CometCreationInfo(cType, name, xml, attribs, session) if cometActorsByName.isDefinedAt(StringHelpers.camelify(cType)) =>
val comet = cometActorsByName(StringHelpers.camelify(cType))
comet.initCometActor(session, Full(cType), name, xml, attribs)
comet
}
}
} | secdec/codepulse | codepulse/src/main/scala/bootstrap/liftweb/BootSnippets.scala | Scala | apache-2.0 | 3,033 |
package sms.core
import akka.actor._
import com.typesafe.config.{Config, ConfigFactory}
import akka.dispatch.MessageDispatcher
import sms.core.boot.Initiable
object Akka extends Initiable with ClusterMediator with WorkScheduler {
private def config: Config = Properties.parseFile("sms.conf.path", ConfigFactory.load()) { file =>
ConfigFactory.parseFileAnySyntax(file).withFallback(ConfigFactory.load())
}
protected[core] lazy val system: ActorSystem = ActorSystem("stock-market-sherlock", config)
/** Get the specified dispatcher
*/
def dispatcher(path: String): MessageDispatcher = system.dispatchers.lookup(path)
/** Get settings at given path
*/
def settings(path: String): Config = system.settings.config.getConfig(path)
/** Create actor from given props
*/
def actorOf(props: Props): ActorRef = system.actorOf(props)
/** Create named actor from given props
*/
def actorOf(props: Props, name: String): ActorRef = system.actorOf(props, name)
def init(): Unit = {
system
cluster
mediator
}
def clean(): Unit = {
system.shutdown()
}
}
| kjanosz/stock-market-sherlock | core/src/main/scala/sms/core/Akka.scala | Scala | apache-2.0 | 1,111 |
package feh.tec.visual.api
import java.awt.Color
// // // // // // // // // // // // // // // // // String Draw Options // // // // // // // // // // // // // // // // //
trait StringDrawOptions[+E <: Easel]
{
def font: String
def size: E#CoordinateUnit
def color: Color
def alignment: StringAlignment
def vSpacing: E#CoordinateUnit
}
trait StringAlignment
object StringAlignment{
case object Left extends StringAlignment
case object Center extends StringAlignment
case object Right extends StringAlignment
}
case class BasicStringDrawOps[+E <: Easel]( alignment: StringAlignment,
color: Color,
font: String,
size: E#CoordinateUnit,
vSpacing: E#CoordinateUnit
) extends StringDrawOptions[E]
// // // // // // // // // // // // // // // // // Tile Draw Options // // // // // // // // // // // // // // // // //
trait TileDrawOptions[+E <: Easel]{
def drawBorder: Boolean
def borderColor: Color
def fillColor: Option[Color]
// tiles with this flag are rendered after the rendering is done for the rest of tiles
def delayedRendering: Boolean
}
trait SquareTileDrawOptions[E <: Easel] extends TileDrawOptions[E]{
def sideSize: E#CoordinateUnit
}
case class BasicSquareTileDrawOptions[E <: Easel](sideSize: E#CoordinateUnit, borderColorOpt: Option[Color], fillColor: Option[Color], delayedRendering: Boolean = false)
extends SquareTileDrawOptions[E]
{
def drawBorder: Boolean = borderColorOpt.isDefined
def borderColor: Color = borderColorOpt.orNull
}
// // // // // // // // // // // // // // // // // Map Draw Options // // // // // // // // // // // // // // // // //
trait MapDrawOptions[+E <: Easel]
trait SquareMapDrawOptions[E <: Easel] extends MapDrawOptions[E]{
def tileSideSize: E#CoordinateUnit
def showLabels: Boolean
def routeHighlightColor: Color
}
case class BasicSquareMapDrawOptions[E <: Easel](tileSideSize: E#CoordinateUnit, showLabels: Boolean, routeHighlightColor: Color) extends SquareMapDrawOptions[E]
object BasicSquareMapDrawOptions{
def apply[E <: Easel](n: Int, showLabels: Boolean, routeHighlightColor: Color)(implicit easel: E): BasicSquareMapDrawOptions[E] =
BasicSquareMapDrawOptions(easel.unitNumeric.fromInt(n), showLabels, routeHighlightColor)
} | fehu/agent-tareas | agent/src/main/scala/feh/tec/visual/api/DrawOptions.scala | Scala | mit | 2,463 |
package slick.migration.dialect
import scala.collection.JavaConverters._
import com.typesafe.config._
object TestConfig {
val ref = ConfigFactory.parseResources(getClass, "/test-dialects.conf")
val defaults = ref.getObject("defaults").toConfig
def getStrings(config: Config, path: String): Option[Seq[String]] = {
if (config.hasPath(path)) {
config.getValue(path).unwrapped() match {
case l: java.util.List[_] => Some(l.asScala.map(_.toString))
case o => Some(List(o.toString))
}
} else None
}
def testConfig(name: String) = {
val c = if (ref.hasPath(name)) ref.getConfig(name).withFallback(defaults) else defaults
c.resolve()
}
}
| itryapitsin/slick-migration | test-utils/src/main/scala/slick/migration/dialect/TestConfig.scala | Scala | apache-2.0 | 711 |
package com.gilt.cavellc.models
import org.joda.time.DateTime
object Joda {
implicit def dateTimeOrdering: Ordering[DateTime] = Ordering.fromLessThan(_ isBefore _)
} | gilt/cave | www/app/com/gilt/cavellc/models/Joda.scala | Scala | mit | 169 |
/*
* Part of GDL book_api.
* Copyright (C) 2018 Global Digital Library
*
* See LICENSE
*/
package io.digitallibrary.bookapi.controller
import io.digitallibrary.bookapi.BookApiProperties
import io.digitallibrary.bookapi.BookApiProperties.DefaultLanguage
import io.digitallibrary.bookapi.model.api
import io.digitallibrary.bookapi.model.api.ValidationError
import io.digitallibrary.bookapi.model.domain.{Paging, Sort}
import io.digitallibrary.bookapi.service.ConverterService
import io.digitallibrary.bookapi.service.search.SearchService
import io.digitallibrary.language.model.LanguageTag
import org.scalatra.swagger.{ResponseMessage, Swagger, SwaggerSupport}
trait SearchController {
this: SearchService with ConverterService =>
val searchController: SearchController
class SearchController(implicit val swagger: Swagger) extends GdlController with SwaggerSupport {
protected val applicationDescription = "API for searching books from GDL."
def extractPageAndPageSize(): Paging = {
Paging(
page = intOrDefault("page", 1).max(1),
pageSize = intOrDefault("page-size", BookApiProperties.DefaultPageSize).min(BookApiProperties.MaxPageSize).max(1)
)
}
registerModel[api.Error]
registerModel[ValidationError]
val response400 = ResponseMessage(400, "Validation error", Some("ValidationError"))
val response403 = ResponseMessage(403, "Access Denied", Some("Error"))
val response404 = ResponseMessage(404, "Not found", Some("Error"))
val response500 = ResponseMessage(500, "Unknown error", Some("Error"))
private val searchBooks = (apiOperation[api.SearchResult]("searchBooks")
summary s"Search for books in the default language $DefaultLanguage"
description s"Returns a list of books in $DefaultLanguage"
parameters(
headerParam[Option[String]]("X-Correlation-ID").description("User supplied correlation-id. May be omitted."),
queryParam[Option[Int]]("page-size").description("Return this many results per page."),
queryParam[Option[Int]]("page").description("Return results for this page."),
queryParam[Option[String]]("query").description("Query to search for"),
queryParam[Option[String]]("source").description("Filter results by source"),
queryParam[Option[String]]("sort").description(s"Sorts result based on parameter. Possible values: ${Sort.values.mkString(",")}; Default value: ${Sort.ByRelevance}"))
responseMessages response500)
private val searchBooksForLang = (apiOperation[api.SearchResult]("searchBooksForLang")
summary "Search for books in the provided language"
description "Returns a list of books in the provided language"
parameters(
headerParam[Option[String]]("X-Correlation-ID").description("User supplied correlation-id. May be omitted."),
pathParam[String]("lang").description("Desired language for books specified in BCP-47 format."),
queryParam[Option[Int]]("page-size").description("Return this many results per page."),
queryParam[Option[Int]]("page").description("Return results for this page."),
queryParam[Option[String]]("query").description("Query to search for"),
queryParam[Option[String]]("source").description("Filter results by source"),
queryParam[Option[String]]("sort").description(s"Sorts result based on parameter. Possible values: ${Sort.values.mkString(",")}; Default value: ${Sort.ByRelevance}"))
responseMessages response500)
get("/", operation(searchBooks)) {
val query = paramOrNone("query")
val source = paramOrNone("source")
val sort = Sort.valueOf(paramOrNone("sort")).getOrElse(Sort.ByRelevance)
paramOrNone("language") match {
case Some(language) =>
searchService.searchWithQuery(
languageTag = LanguageTag(language),
query = query,
source = source,
paging = extractPageAndPageSize(),
sort = sort)
case None =>
searchService.searchWithQueryForAllLanguages(
query = query,
source = source,
paging = extractPageAndPageSize(),
sort = sort)
}
}
get("/:lang/?", operation(searchBooksForLang)) {
val query = paramOrNone("query")
val source = paramOrNone("source")
val sort = Sort.valueOf(paramOrNone("sort")).getOrElse(Sort.ByRelevance)
searchService.searchWithQuery(
languageTag = LanguageTag(params("lang")),
query = query,
source = source,
paging = extractPageAndPageSize(),
sort = sort)
}
}
}
| GlobalDigitalLibraryio/book-api | src/main/scala/io/digitallibrary/bookapi/controller/SearchController.scala | Scala | apache-2.0 | 4,615 |
/*
* This software is licensed under the GNU Affero General Public License, quoted below.
*
* This file is a part of PowerAPI.
*
* Copyright (C) 2011-2014 Inria, University of Lille 1.
*
* PowerAPI is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of
* the License, or (at your option) any later version.
*
* PowerAPI is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with PowerAPI.
*
* If not, please consult http://www.gnu.org/licenses/agpl-3.0.html.
*/
package org.powerapi.core.target
/**
* Targets are system elements that can be monitored by PowerAPI
*
* @author <a href="mailto:romain.rouvoy@univ-lille1.fr">Romain Rouvoy</a>
* @author <a href="mailto:maxime.colmant@gmail.com">Maxime Colmant</a>
*/
trait Target
/**
* Monitoring target for a specific Process IDentifier.
*
* @param pid: process identifier.
*
* @author <a href="mailto:romain.rouvoy@univ-lille1.fr">Romain Rouvoy</a>
* @author <a href="mailto:maxime.colmant@gmail.com">Maxime Colmant</a>
*/
case class Process(pid: Int) extends Target {
override def toString(): String = s"$pid"
}
/**
* Monitoring target for a specific application.
*
* @param name: name of the application.
*
* @author <a href="mailto:romain.rouvoy@univ-lille1.fr">Romain Rouvoy</a>
* @author <a href="mailto:maxime.colmant@gmail.com">Maxime Colmant</a>
*/
case class Application(name: String) extends Target {
override def toString(): String = name
}
/**
* Target usage ratio.
*
* @param ratio: usage ratio.
*
* @author <a href="mailto:maxime.colmant@gmail.com">Maxime Colmant</a>
*/
case class TargetUsageRatio(ratio: Double)
/**
* Monitoring target for the whole system.
*
* @author <a href="mailto:romain.rouvoy@univ-lille1.fr">Romain Rouvoy</a>
* @author <a href="mailto:maxime.colmant@gmail.com">Maxime Colmant</a>
*/
object All extends Target {
override def toString = "All"
}
| rouvoy/powerapi | powerapi-core/src/main/scala/org/powerapi/core/target/Target.scala | Scala | agpl-3.0 | 2,289 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// scalastyle:off println
package org.apache.spark.examples.ml
import org.apache.spark.examples.mllib.AbstractParams
import org.apache.spark.ml.classification.{RandomForestClassificationModel, RandomForestClassifier}
import org.apache.spark.ml.feature.{StringIndexer, VectorIndexer}
import org.apache.spark.ml.regression.{RandomForestRegressionModel, RandomForestRegressor}
import org.apache.spark.ml.{Pipeline, PipelineStage}
import org.apache.spark.sql.{DataFrame, SparkSession}
import scopt.OptionParser
import scala.collection.mutable
import scala.language.reflectiveCalls
/**
* An example runner for decision trees. Run with
* {{{
* ./bin/run-example ml.RandomForestExample [options]
* }}}
* Decision Trees and ensembles can take a large amount of memory. If the run-example command
* above fails, try running via spark-submit and specifying the amount of memory as at least 1g.
* For local mode, run
* {{{
* ./bin/spark-submit --class org.apache.spark.examples.ml.RandomForestExample --driver-memory 1g
* [examples JAR path] [options]
* }}}
* If you use it as a template to create your own app, please use `spark-submit` to submit your app.
*/
object RandomForestExample {
case class Params(
input: String = null,
testInput: String = "",
dataFormat: String = "libsvm",
algo: String = "classification",
maxDepth: Int = 5,
maxBins: Int = 32,
minInstancesPerNode: Int = 1,
minInfoGain: Double = 0.0,
numTrees: Int = 10,
featureSubsetStrategy: String = "auto",
fracTest: Double = 0.2,
cacheNodeIds: Boolean = false,
checkpointDir: Option[String] = None,
checkpointInterval: Int = 10) extends AbstractParams[Params]
def main(args: Array[String]) {
val defaultParams = Params()
val parser = new OptionParser[Params]("RandomForestExample") {
head("RandomForestExample: an example random forest app.")
opt[String]("algo")
.text(s"algorithm (classification, regression), default: ${defaultParams.algo}")
.action((x, c) => c.copy(algo = x))
opt[Int]("maxDepth")
.text(s"max depth of the tree, default: ${defaultParams.maxDepth}")
.action((x, c) => c.copy(maxDepth = x))
opt[Int]("maxBins")
.text(s"max number of bins, default: ${defaultParams.maxBins}")
.action((x, c) => c.copy(maxBins = x))
opt[Int]("minInstancesPerNode")
.text(s"min number of instances required at child nodes to create the parent split," +
s" default: ${defaultParams.minInstancesPerNode}")
.action((x, c) => c.copy(minInstancesPerNode = x))
opt[Double]("minInfoGain")
.text(s"min info gain required to create a split, default: ${defaultParams.minInfoGain}")
.action((x, c) => c.copy(minInfoGain = x))
opt[Int]("numTrees")
.text(s"number of trees in ensemble, default: ${defaultParams.numTrees}")
.action((x, c) => c.copy(numTrees = x))
opt[String]("featureSubsetStrategy")
.text(s"number of features to use per node (supported:" +
s" ${RandomForestClassifier.supportedFeatureSubsetStrategies.mkString(",")})," +
s" default: ${defaultParams.numTrees}")
.action((x, c) => c.copy(featureSubsetStrategy = x))
opt[Double]("fracTest")
.text(s"fraction of data to hold out for testing. If given option testInput, " +
s"this option is ignored. default: ${defaultParams.fracTest}")
.action((x, c) => c.copy(fracTest = x))
opt[Boolean]("cacheNodeIds")
.text(s"whether to use node Id cache during training, " +
s"default: ${defaultParams.cacheNodeIds}")
.action((x, c) => c.copy(cacheNodeIds = x))
opt[String]("checkpointDir")
.text(s"checkpoint directory where intermediate node Id caches will be stored, " +
s"default: ${
defaultParams.checkpointDir match {
case Some(strVal) => strVal
case None => "None"
}
}")
.action((x, c) => c.copy(checkpointDir = Some(x)))
opt[Int]("checkpointInterval")
.text(s"how often to checkpoint the node Id cache, " +
s"default: ${defaultParams.checkpointInterval}")
.action((x, c) => c.copy(checkpointInterval = x))
opt[String]("testInput")
.text(s"input path to test dataset. If given, option fracTest is ignored." +
s" default: ${defaultParams.testInput}")
.action((x, c) => c.copy(testInput = x))
opt[String]("dataFormat")
.text("data format: libsvm (default), dense (deprecated in Spark v1.1)")
.action((x, c) => c.copy(dataFormat = x))
arg[String]("<input>")
.text("input path to labeled examples")
.required()
.action((x, c) => c.copy(input = x))
checkConfig { params =>
if (params.fracTest < 0 || params.fracTest >= 1) {
failure(s"fracTest ${params.fracTest} value incorrect; should be in [0,1).")
} else {
success
}
}
}
parser.parse(args, defaultParams) match {
case Some(params) => run(params)
case _ => sys.exit(1)
}
}
def run(params: Params): Unit = {
val spark = SparkSession
.builder
.appName(s"RandomForestExample with $params")
.getOrCreate()
params.checkpointDir.foreach(spark.sparkContext.setCheckpointDir)
val algo = params.algo.toLowerCase
println(s"RandomForestExample with parameters:\\n$params")
// Load training and test data and cache it.
val (training: DataFrame, test: DataFrame) = DecisionTreeExample.loadDatasets(params.input,
params.dataFormat, params.testInput, algo, params.fracTest)
// Set up Pipeline.
val stages = new mutable.ArrayBuffer[PipelineStage]()
// (1) For classification, re-index classes.
val labelColName = if (algo == "classification") "indexedLabel" else "label"
if (algo == "classification") {
val labelIndexer = new StringIndexer()
.setInputCol("label")
.setOutputCol(labelColName)
stages += labelIndexer
}
// (2) Identify categorical features using VectorIndexer.
// Features with more than maxCategories values will be treated as continuous.
val featuresIndexer = new VectorIndexer()
.setInputCol("features")
.setOutputCol("indexedFeatures")
.setMaxCategories(10)
stages += featuresIndexer
// (3) Learn Random Forest.
val dt = algo match {
case "classification" =>
new RandomForestClassifier()
.setFeaturesCol("indexedFeatures")
.setLabelCol(labelColName)
.setMaxDepth(params.maxDepth)
.setMaxBins(params.maxBins)
.setMinInstancesPerNode(params.minInstancesPerNode)
.setMinInfoGain(params.minInfoGain)
.setCacheNodeIds(params.cacheNodeIds)
.setCheckpointInterval(params.checkpointInterval)
.setFeatureSubsetStrategy(params.featureSubsetStrategy)
.setNumTrees(params.numTrees)
case "regression" =>
new RandomForestRegressor()
.setFeaturesCol("indexedFeatures")
.setLabelCol(labelColName)
.setMaxDepth(params.maxDepth)
.setMaxBins(params.maxBins)
.setMinInstancesPerNode(params.minInstancesPerNode)
.setMinInfoGain(params.minInfoGain)
.setCacheNodeIds(params.cacheNodeIds)
.setCheckpointInterval(params.checkpointInterval)
.setFeatureSubsetStrategy(params.featureSubsetStrategy)
.setNumTrees(params.numTrees)
case _ => throw new IllegalArgumentException("Algo ${params.algo} not supported.")
}
stages += dt
val pipeline = new Pipeline().setStages(stages.toArray)
// Fit the Pipeline.
val startTime = System.nanoTime()
val pipelineModel = pipeline.fit(training)
val elapsedTime = (System.nanoTime() - startTime) / 1e9
println(s"Training time: $elapsedTime seconds")
// Get the trained Random Forest from the fitted PipelineModel.
algo match {
case "classification" =>
val rfModel = pipelineModel.stages.last.asInstanceOf[RandomForestClassificationModel]
if (rfModel.totalNumNodes < 30) {
println(rfModel.toDebugString) // Print full model.
} else {
println(rfModel) // Print model summary.
}
case "regression" =>
val rfModel = pipelineModel.stages.last.asInstanceOf[RandomForestRegressionModel]
if (rfModel.totalNumNodes < 30) {
println(rfModel.toDebugString) // Print full model.
} else {
println(rfModel) // Print model summary.
}
case _ => throw new IllegalArgumentException("Algo ${params.algo} not supported.")
}
// Evaluate model on training, test data.
algo match {
case "classification" =>
println("Training data results:")
DecisionTreeExample.evaluateClassificationModel(pipelineModel, training, labelColName)
println("Test data results:")
DecisionTreeExample.evaluateClassificationModel(pipelineModel, test, labelColName)
case "regression" =>
println("Training data results:")
DecisionTreeExample.evaluateRegressionModel(pipelineModel, training, labelColName)
println("Test data results:")
DecisionTreeExample.evaluateRegressionModel(pipelineModel, test, labelColName)
case _ =>
throw new IllegalArgumentException("Algo ${params.algo} not supported.")
}
spark.stop()
}
}
// scalastyle:on println
| fharenheit/template-spark-app | src/main/scala/org/apache/spark/examples/ml/RandomForestExample.scala | Scala | apache-2.0 | 11,868 |
/*
* Copyright 2017-2022 John Snow Labs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.johnsnowlabs.nlp.annotators.spell.symmetric
import org.apache.spark.ml.param.{IntParam, LongParam, Params}
/**
*
* @groupname anno Annotator types
* @groupdesc anno Required input and expected output annotator types
* @groupname Ungrouped Members
* @groupname param Parameters
* @groupname setParam Parameter setters
* @groupname getParam Parameter getters
* @groupname Ungrouped Members
* @groupprio param 1
* @groupprio anno 2
* @groupprio Ungrouped 3
* @groupprio setParam 4
* @groupprio getParam 5
* @groupdesc param A list of (hyper-)parameter keys this annotator can take. Users can set and get the parameter values through setters and getters, respectively.
*/
trait SymmetricDeleteParams extends Params {
/** Max edit distance characters to derive strings from a word (Default: `3`)
*
* @group param
* */
val maxEditDistance = new IntParam(this, "maxEditDistance", "max edit distance characters to derive strings from a word")
/** Minimum frequency of words to be considered from training. Increase if training set is LARGE (Default: `0`).
*
* @group param
* */
val frequencyThreshold = new IntParam(this, "frequencyThreshold", "minimum frequency of words to be considered from training. Increase if training set is LARGE. Defaults to 0")
/** Minimum frequency of corrections a word needs to have to be considered from training. Increase if training set is LARGE (Default: `0`).
*
* @group param
* */
val deletesThreshold = new IntParam(this, "deletesThreshold", "minimum frequency of corrections a word needs to have to be considered from training. Increase if training set is LARGE. Defaults to 0")
/** Maximum duplicate of characters in a word to consider (Default: `2`).
*
* @group param
* */
val dupsLimit = new IntParam(this, "dupsLimit", "maximum duplicate of characters in a word to consider. Defaults to 2")
/** Length of longest word in corpus
*
* @group param
* */
val longestWordLength = new IntParam(this, "longestWordLength", "length of longest word in corpus")
/** Minimum frequency of a word in the corpus
*
* @group param
* */
val minFrequency = new LongParam(this, "minFrequency", "minimum frequency of a word in the corpus")
/** Maximum frequency of a word in the corpus
*
* @group param
* */
val maxFrequency = new LongParam(this, "maxFrequency", "maximum frequency of a word in the corpus")
/** Max edit distance characters to derive strings from a word
*
* @group setParam
* */
def setMaxEditDistance(value: Int): this.type = set(maxEditDistance, value)
/** Minimum frequency of words to be considered from training. Increase if training set is LARGE (Default: `0`)
*
* @group setParam
* */
def setFrequencyThreshold(value: Int): this.type = set(frequencyThreshold, value)
/** Minimum frequency of corrections a word needs to have to be considered from training. Increase if training set is LARGE (Default: `0`).
*
* @group setParam
* */
def setDeletesThreshold(value: Int): this.type = set(deletesThreshold, value)
/** Maximum duplicate of characters in a word to consider (Default: `2`)
*
* @group setParam
* */
def setDupsLimit(value: Int): this.type = set(dupsLimit, value)
/** Length of longest word in corpus
*
* @group setParam
* */
def setLongestWordLength(value: Int): this.type = set(longestWordLength, value)
/** Maximum frequency of a word in the corpus
*
* @group setParam
* */
def setMaxFrequency(value: Long): this.type = set(maxFrequency, value)
/** Minimum frequency of a word in the corpus
*
* @group setParam
* */
def setMinFrequency(value: Long): this.type = set(minFrequency, value)
/** Max edit distance characters to derive strings from a word
*
* @group getParam
* */
def getMaxEditDistance: Int = $(maxEditDistance)
/** Minimum frequency of words to be considered from training. Increase if training set is LARGE (Default: `0`).
*
* @group getParam
* */
def getFrequencyThreshold: Int = $(frequencyThreshold)
/** Minimum frequency of corrections a word needs to have to be considered from training. Increase if training set is LARGE (Default: `0`).
*
* @group getParam
* */
def getDeletesThreshold: Int = $(deletesThreshold)
/** Maximum duplicate of characters in a word to consider (Default: `2`).
*
* @group getParam
* */
def getDupsLimit: Int = $(dupsLimit)
}
| JohnSnowLabs/spark-nlp | src/main/scala/com/johnsnowlabs/nlp/annotators/spell/symmetric/SymmetricDeleteParams.scala | Scala | apache-2.0 | 5,101 |
/*
package org.scalatest.examples.suite.info
import collection.mutable
import org.scalatest._
class SetSuite extends Suite with GivenWhenThen {
def `test: an element can be added to an empty mutable Set` {
given("an empty mutable Set")
val set = mutable.Set.empty[String]
when("an element is added")
set += "clarity"
then("the Set should have size 1")
assert(set.size === 1)
and("the Set should contain the added element")
assert(set.contains("clarity"))
info("That's all folks!")
}
}
*/
| hubertp/scalatest | examples/src/main/scala/org/scalatest/examples/suite/info/SetSuite.scala | Scala | apache-2.0 | 536 |
package spatial.stdlib
import spatial.dsl._
import virtualized._
import spatial.metadata._
import argon.core.State
object Convolution {
val coltile = 480
val tileSizeM = 16
val tileSizeN = 16
val tileSizeK = 16
@virtualize
def ConvolutionSlide[T:Type:Num](output: DRAM2[T],
input: DRAM2[T],
filter: LUT2[T],
colstride: scala.Int, rowstride: scala.Int,
load_par: Index, store_par: Index )(implicit state: State): Unit = {
val lb = LineBuffer.strided[T](filter.rows, coltile, rowstride)
val sr = RegFile[T](filter.rows, filter.cols)
val lineout = SRAM[T](coltile/colstride)
Foreach(input.rows by rowstride){row =>
lb load input(row::row+rowstride, 0::input.cols par load_par)
Foreach(input.cols by colstride){j =>
Foreach(filter.rows by 1 par filter.rows){i => sr(i,*) <<= lb(i,j::j+colstride)}
val filter_elements = List.tabulate(3){ii => List.tabulate(3){jj =>
filter(ii,jj)
}}.flatten
val sr_elements = List.tabulate(3){ii => List.tabulate(3){jj =>
if ((row.to[Int]+rowstride-1) - (filter.rows - 1 - ii.to[Int]) < 0 || (j.to[Int]+colstride-1) - (filter.cols - 1 - jj.to[Int]) < 0) 0.to[T] else sr(ii,filter.cols - 1 - jj)
}}.flatten
lineout(j/colstride) = sr_elements.zip(filter_elements).map{case (s, f) => s * f}.reduce{_+_}
// lineout(j/colstride) = mux(row + (rowstride-1) < filter.rows.to[Int]-1 || j + (colstride-1) < filter.cols.to[Int]-1, 0.to[T], Reduce(Reg[T](0.to[T]))(filter.rows by 1, filter.cols by 1){(ii,jj) => sr(ii,jj) * filter(ii,jj)}{_+_}.value)
}
output(row/rowstride, 0::output.cols par store_par) store lineout
}
}
// Multifilter
@virtualize
def MFConvolutionSlide[T:Type:Num](output: DRAM3[T],
input: DRAM2[T],
filter: List[LUT2[T]],
colstride: scala.Int, rowstride: scala.Int,
load_par: Index, store_par: Index )(implicit state: State): Unit = {
val lb = LineBuffer.strided[T](filter.head.rows, coltile, rowstride)
val sr = RegFile[T](filter.head.rows, filter.head.cols)
val lineout = List.tabulate(filter.length){_ => SRAM[T](coltile/colstride)}
Foreach(input.rows by rowstride){row =>
lb load input(row::row+rowstride, 0::input.cols par load_par)
Foreach(input.cols by colstride){j =>
Foreach(filter.head.rows by 1 par filter.head.rows){i => sr(i,*) <<= lb(i,j::j+colstride)}
val sr_elements = List.tabulate(3){ii => List.tabulate(3){jj =>
if ((row.to[Int]+rowstride-1) - (filter.head.rows - 1 - ii.to[Int]) < 0 || (j.to[Int]+colstride-1) - (filter.head.cols - 1 - jj.to[Int]) < 0) 0.to[T] else sr(ii,filter.head.cols - 1 - jj)
}}.flatten
lineout.zipWithIndex.foreach{case (lo, page) =>
val filter_elements = List.tabulate(3){ii => List.tabulate(3){jj =>
filter(page).apply(ii,jj)
}}.flatten
lo(j/colstride) = sr_elements.zip(filter_elements).map{case (s, f) => s * f}.reduce{_+_}
}
// lineout(j/colstride) = mux(row + (rowstride-1) < filter.head.rows.to[Int]-1 || j + (colstride-1) < filter.head.cols.to[Int]-1, 0.to[T], Reduce(Reg[T](0.to[T]))(filter.head.rows by 1, filter.head.cols by 1){(ii,jj) => sr(ii,jj) * filter(ii,jj)}{_+_}.value)
}
Parallel{
lineout.zipWithIndex.foreach{case (lo, page) =>
output(page, row/rowstride, 0::output.dim2 par store_par) store lo
}
}
}
}
// Multichannel
@virtualize
def MCConvolutionSlide[T:Type:Num](output: DRAM2[T],
input: DRAM3[T],
filter: LUT3[T],
colstride: scala.Int, rowstride: scala.Int,
load_par: Index, store_par: Index, channels: scala.Int)(implicit state: State): Unit = {
Foreach(input.dim1 by rowstride){row =>
val lineout = SRAM[T](coltile/colstride)
val lineout_temps = List.tabulate(channels){_ => SRAM[T](coltile/colstride)}
val lbs = List.tabulate(channels){_ => LineBuffer.strided[T](filter.dim1, coltile, rowstride)}
val srs = List.tabulate(channels){_ => RegFile[T](filter.dim1, filter.dim2)}
lbs.zip(srs.zip(lineout_temps)).zipWithIndex.foreach{case ((lb, (sr,lo)), i) =>
lb load input(i, row::row+rowstride, 0::input.dim2 par load_par)
Parallel { // why is this here?
Foreach(input.dim2 by colstride){j =>
Foreach(filter.dim1 by 1 par filter.dim1){i => sr(i,*) <<= lb(i,j::j+colstride)}
lo(j/colstride) = Reduce(Reg[T](0.to[T]))(filter.dim1 by 1, filter.dim2 by 1){(ii,jj) =>
val img = if ((row.to[Int]+rowstride-1) - (filter.dim1 - 1 - ii.to[Int]) < 0 || (j.to[Int]+colstride-1) - (filter.dim2 - 1 - jj.to[Int]) < 0) 0.to[T] else sr(ii,filter.dim2 - 1 - jj)
img * filter(i,ii,jj)
}{_+_}
}
}
}
Foreach(input.dim2 by 1){ j => lineout(j) = lineout_temps.map{t => t(j)}.reduce{_+_} }
output(row/rowstride, 0::output.cols par store_par) store lineout
}
}
// Multichannel, multifilter, assume coltile fits all columns
@virtualize
def MCMFConvolutionSlide[T:Type:Num](output: DRAM3[T],
input: DRAM3[T],
filter: List[LUT3[T]],
colstride: scala.Int, rowstride: scala.Int,
load_par: Index, store_par: Index, channels: scala.Int)(implicit state: State): Unit = {
Foreach(input.dim1 by rowstride){row =>
val lineout = List.tabulate(filter.length) {_ => SRAM[T](coltile/colstride)}
val lineout_temps = List.tabulate(filter.length){_ => List.tabulate(channels) {_ => SRAM[T](coltile/colstride)}} // TODO: Fix hardcoded 3
val lbs = List.tabulate(channels){_ => LineBuffer.strided[T](filter.head.dim1, coltile, rowstride)} // TODO: Fix hardcoded 3
val srs = List.tabulate(channels){_ => RegFile[T](filter.head.dim1, filter.head.dim2)} // TODO: Fix hardcoded 3
lbs.zip(srs).zipWithIndex.foreach{case ((lb, sr), i) =>
lb load input(i, row::row+rowstride, 0::input.dim2 par load_par)
Foreach(input.dim2 by colstride){j =>
Foreach(filter.head.dim1 by 1 par filter.head.dim1){i => sr(i,*) <<= lb(i,j::j+colstride)}
lineout_temps.zipWithIndex.foreach{case (lot, p) =>
lot(i)(j/colstride) = Reduce(Reg[T](0.to[T]))(filter.head.dim1 by 1, filter.head.dim2 by 1){(ii,jj) =>
val img = if ((row.to[Int]+rowstride-1) - (filter.head.dim1 - 1 - ii.to[Int]) < 0 || (j.to[Int]+colstride-1) - (filter.head.dim2 - 1 - jj.to[Int]) < 0) 0.to[T] else sr(ii,filter.head.dim2 - 1 - jj)
val f = filter(p).apply(i,ii,jj)
img * f
}{_+_}
}
}
}
Foreach(output.dim2 by 1){ j =>
lineout.zip(lineout_temps).zipWithIndex.foreach{case ((lo, lot), i) =>
lo(j) = lot.map{t => t(j)}.reduce{_+_}
}
}
Parallel{
lineout.zipWithIndex.foreach{case (lo, p) =>
output(p, row/rowstride, 0::output.dim2 par store_par) store lo
}
}
}
}
// Multichannel, multifilter, assume coltile fits all columns
// Regfile version for layer-by-layer assembly of output
@virtualize
def MCConvolutionSlide[T:Type:Num](output: DRAM3[T], slice: Index,
input: DRAM3[T],
filter: RegFile3[T],
colstride: scala.Int, rowstride: scala.Int,
load_par: Index, store_par: Index, channels: scala.Int, colsize: scala.Int)(implicit state: State): Unit = {
Foreach(input.dim1 by rowstride){row =>
val lineout = SRAM[T](colsize/colstride)
val lineout_temps = List.tabulate(channels){_ => SRAM[T](colsize/colstride)}
val lbs = List.tabulate(channels){_ => LineBuffer.strided[T](filter.dim1, colsize, rowstride)}
val srs = List.tabulate(channels){_ => RegFile[T](filter.dim1, filter.dim2)}
lbs.zip(srs.zip(lineout_temps)).zipWithIndex.foreach{case ((lb, (sr,lo)), i) =>
lb load input(i, row::row+rowstride, 0::input.dim2 par load_par)
Parallel { // why is this here?
Foreach(input.dim2 by colstride){j =>
Foreach(filter.dim1 by 1 par filter.dim1){i => sr(i,*) <<= lb(i,j::j+colstride)}
lo(j/colstride) = Reduce(Reg[T](0.to[T]))(filter.dim1 by 1, filter.dim2 by 1){(ii,jj) =>
val img = if ((row.to[Int]+rowstride-1) - (filter.dim1 - 1 - ii.to[Int]) < 0 || (j.to[Int]+colstride-1) - (filter.dim2 - 1 - jj.to[Int]) < 0) 0.to[T] else sr(ii,filter.dim2 - 1 - jj)
img * filter(i,ii,jj)
}{_+_}
}
}
}
Foreach(input.dim2 by 1){ j => lineout(j) = lineout_temps.map{t => t(j)}.reduce{_+_} }
output(slice, row/rowstride, 0::output.dim2 par store_par) store lineout
}
}
@virtualize
def ConvolutionGEMM[T:Type:Num](output: DRAM1[T],
input: DRAM1[T],
filter: DRAM2[T])(implicit state: State): Unit = {
Foreach(filter.rows by tileSizeM par 1/*m_outer_par*/){i =>
// Compute leftover dim
val elements_m = min(lift(tileSizeM), filter.rows - i)
// Create Y tile
val y_tile = SRAM[T](tileSizeM)
MemReduce(y_tile par 1/*y_reduce_par*/)(filter.cols by tileSizeN par 1/*n_outer_par*/){j =>
// Compute leftover dim
val elements_n = min(lift(tileSizeN), filter.cols - j)
// Create local Y tile for accumulating
val y_tile_local = SRAM[T](tileSizeM)
// Create X tile
val x_tile = SRAM[T](tileSizeN)
// Load vector tile
x_tile load input(j::j+elements_n par 1/*load_par*/)
// Create A tile
val a_tile = SRAM[T](tileSizeM, tileSizeN)
// Load matrix tile
a_tile load filter(i::i+elements_m, j::j+elements_n par 1/*load_par*/)
Foreach(elements_m by 1 par 1/*m_inner_par*/){ii =>
y_tile_local(ii) = Reduce(Reg[T])(elements_n by 1 par 1/*n_inner_par*/){jj =>
a_tile(ii,jj) * x_tile(jj)
}{_+_}
}
y_tile_local
}{_+_}
output(i::i+elements_m par 1/*store_par*/) store y_tile
}
}
} | stanford-ppl/spatial-lang | spatial/core/src/spatial/stdlib/Convolution.scala | Scala | mit | 10,071 |
/***********************************************************************
* Copyright (c) 2013-2022 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.utils.collection
import org.locationtech.geomesa.utils.collection.WordBitSet.Word
/**
* A bit set backed by ints (to reduce memory footprint for small sizes).
*
* Provides access to the underlying words, for low-level synchronization. Not thread safe.
*/
trait WordBitSet {
/**
* Gets the word for the given value. The word can be synchronized on for any operations on the given value.
*
* @param value value
* @return word
*/
def word(value: Int): Word
}
object WordBitSet {
// use `>> 5` instead of `/ 32`
private final val Divisor = 5
/**
* Create a bit set capable of holding numbers up to `length`
*
* @param length max int to be stored in the bit set
* @return
*/
def apply(length: Int): WordBitSet = {
IntBitSet.size(length) match {
case 1 => new WordBitSet32()
case 2 => new WordBitSet64()
case 3 => new WordBitSet96()
case 4 => new WordBitSet128()
case 5 => new WordBitSet160()
case 6 => new WordBitSet192()
case n => new WordBitSetN(n)
}
}
/**
* A single word in the bit set
*
* @param mask the mask for this word
*/
class Word(private var mask: Int = 0) {
/**
* Checks whether the value is contained in the set or not
*
* @param value value to check
* @return true if contains, false otherwise
*/
def contains(value: Int): Boolean = (mask & (1 << value)) != 0
/**
* Adds the value to the set, if it is not present. Note that implementations may be constructed with
* a fixed capacity - attempting to add a value outside the capacity will result in undefined behavior
*
* @param value value to add
* @return true if value was added, false if value was already present
*/
def add(value: Int): Boolean = {
val updated = mask | (1 << value)
if (updated == mask) { false } else {
mask = updated
true
}
}
/**
* Removes the value from the set, if it is present
*
* @param value value to remove
* @return true if value was removed, false if value was not present
*/
def remove(value: Int): Boolean = {
val updated = mask & ~(1 << value)
if (updated == mask) { false } else {
mask = updated
true
}
}
}
/**
* Bit set for tracking values < 32
*/
class WordBitSet32 extends WordBitSet {
private val word0 = new Word()
override def word(value: Int): Word = {
value >> Divisor match {
case 0 => word0
case _ => throw new ArrayIndexOutOfBoundsException(value)
}
}
}
/**
* Bit set for tracking values < 64
*/
class WordBitSet64 extends WordBitSet {
private val word0 = new Word()
private val word1 = new Word()
override def word(value: Int): Word = {
value >> Divisor match {
case 0 => word0
case 1 => word1
case _ => throw new ArrayIndexOutOfBoundsException(value)
}
}
}
/**
* Bit set for tracking values < 96
*/
class WordBitSet96 extends WordBitSet {
private val word0 = new Word()
private val word1 = new Word()
private val word2 = new Word()
override def word(value: Int): Word = {
value >> Divisor match {
case 0 => word0
case 1 => word1
case 2 => word2
case _ => throw new ArrayIndexOutOfBoundsException(value)
}
}
}
/**
* Bit set for tracking values < 128
*/
class WordBitSet128 extends WordBitSet {
private val word0 = new Word()
private val word1 = new Word()
private val word2 = new Word()
private val word3 = new Word()
override def word(value: Int): Word = {
value >> Divisor match {
case 0 => word0
case 1 => word1
case 2 => word2
case 3 => word3
case _ => throw new ArrayIndexOutOfBoundsException(value)
}
}
}
/**
* Bit set for tracking values < 160
*/
class WordBitSet160 extends WordBitSet {
private val word0 = new Word()
private val word1 = new Word()
private val word2 = new Word()
private val word3 = new Word()
private val word4 = new Word()
override def word(value: Int): Word = {
value >> Divisor match {
case 0 => word0
case 1 => word1
case 2 => word2
case 3 => word3
case 4 => word4
case _ => throw new ArrayIndexOutOfBoundsException(value)
}
}
}
/**
* Bit set for tracking values < 192
*/
class WordBitSet192 extends WordBitSet {
private val word0 = new Word()
private val word1 = new Word()
private val word2 = new Word()
private val word3 = new Word()
private val word4 = new Word()
private val word5 = new Word()
override def word(value: Int): Word = {
value >> Divisor match {
case 0 => word0
case 1 => word1
case 2 => word2
case 3 => word3
case 4 => word4
case 5 => word5
case _ => throw new ArrayIndexOutOfBoundsException(value)
}
}
}
/**
* Bit set for tracking any number of values
*
* @param n number of words
*/
class WordBitSetN(n: Int) extends WordBitSet {
private val words = Array.fill(n)(new Word())
override def word(value: Int): Word = words(value >> Divisor)
}
}
| locationtech/geomesa | geomesa-utils/src/main/scala/org/locationtech/geomesa/utils/collection/WordBitSet.scala | Scala | apache-2.0 | 5,896 |
package actors
import akka.actor.{Actor, Props}
import akka.actor.Actor.Receive
import services.{ExampleService, ExampleServiceImpl}
import scala.concurrent.Future
import akka.pattern.pipe
class ExampleActor extends Actor {
self: ExampleActorComponent =>
import context.dispatcher
override def receive: Receive = {
case _ => service.getServiceMessage().pipeTo(sender())
}
}
object ExampleActor {
def props: Props = Props(classOf[DefaultExampleActor])
}
trait ExampleActorComponent {
val service: ExampleService
}
class DefaultExampleActor extends ExampleActor with DefaultExampleActorComponent
trait DefaultExampleActorComponent extends ExampleActorComponent {
val service:ExampleService = new ExampleServiceImpl
}
| eggm0n/microservices-template | example-service/src/main/scala/actors/ExampleActor.scala | Scala | mit | 740 |
package com.geteit.rcouch.views
/**
*/
case class View(name: String, doc: DesignDocument, reduce: Boolean = false) {
val path = doc.path / "_view" / name
} | zbsz/reactive-couch | src/main/scala/com/geteit/rcouch/views/View.scala | Scala | apache-2.0 | 161 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.streaming.scheduler
import akka.actor.{ActorRef, ActorSystem, Props, Actor}
import org.apache.spark.{SparkException, SparkEnv, Logging}
import org.apache.spark.streaming.{Checkpoint, Time, CheckpointWriter}
import org.apache.spark.streaming.util.{ManualClock, RecurringTimer, Clock}
import scala.util.{Failure, Success, Try}
/** Event classes for JobGenerator */
private[scheduler] sealed trait JobGeneratorEvent
private[scheduler] case class GenerateJobs(time: Time) extends JobGeneratorEvent
private[scheduler] case class ClearMetadata(time: Time) extends JobGeneratorEvent
private[scheduler] case class DoCheckpoint(time: Time) extends JobGeneratorEvent
private[scheduler] case class ClearCheckpointData(time: Time) extends JobGeneratorEvent
/**
* This class generates jobs from DStreams as well as drives checkpointing and cleaning
* up DStream metadata.
*/
private[streaming]
class JobGenerator(jobScheduler: JobScheduler) extends Logging {
private val ssc = jobScheduler.ssc
private val graph = ssc.graph
val clock = {
val clockClass = ssc.sc.conf.get(
"spark.streaming.clock", "org.apache.spark.streaming.util.SystemClock")
Class.forName(clockClass).newInstance().asInstanceOf[Clock]
}
private val timer = new RecurringTimer(clock, ssc.graph.batchDuration.milliseconds,
longTime => eventActor ! GenerateJobs(new Time(longTime)))
private lazy val checkpointWriter = if (ssc.checkpointDuration != null && ssc.checkpointDir != null) {
new CheckpointWriter(this, ssc.conf, ssc.checkpointDir, ssc.sparkContext.hadoopConfiguration)
} else {
null
}
// eventActor is created when generator starts.
// This not being null means the scheduler has been started and not stopped
private var eventActor: ActorRef = null
/** Start generation of jobs */
def start() = synchronized {
if (eventActor != null) {
throw new SparkException("JobGenerator already started")
}
eventActor = ssc.env.actorSystem.actorOf(Props(new Actor {
def receive = {
case event: JobGeneratorEvent =>
logDebug("Got event of type " + event.getClass.getName)
processEvent(event)
}
}), "JobGenerator")
if (ssc.isCheckpointPresent) {
restart()
} else {
startFirstTime()
}
}
/** Stop generation of jobs */
def stop() = synchronized {
if (eventActor != null) {
timer.stop()
ssc.env.actorSystem.stop(eventActor)
if (checkpointWriter != null) checkpointWriter.stop()
ssc.graph.stop()
logInfo("JobGenerator stopped")
}
}
/**
* On batch completion, clear old metadata and checkpoint computation.
*/
def onBatchCompletion(time: Time) {
eventActor ! ClearMetadata(time)
}
def onCheckpointCompletion(time: Time) {
eventActor ! ClearCheckpointData(time)
}
/** Processes all events */
private def processEvent(event: JobGeneratorEvent) {
event match {
case GenerateJobs(time) => generateJobs(time)
case ClearMetadata(time) => clearMetadata(time)
case DoCheckpoint(time) => doCheckpoint(time)
case ClearCheckpointData(time) => clearCheckpointData(time)
}
}
/** Starts the generator for the first time */
private def startFirstTime() {
val startTime = new Time(timer.getStartTime())
graph.start(startTime - graph.batchDuration)
timer.start(startTime.milliseconds)
logInfo("JobGenerator started at " + startTime)
}
/** Restarts the generator based on the information in checkpoint */
private def restart() {
// If manual clock is being used for testing, then
// either set the manual clock to the last checkpointed time,
// or if the property is defined set it to that time
if (clock.isInstanceOf[ManualClock]) {
val lastTime = ssc.initialCheckpoint.checkpointTime.milliseconds
val jumpTime = ssc.sc.conf.getLong("spark.streaming.manualClock.jump", 0)
clock.asInstanceOf[ManualClock].setTime(lastTime + jumpTime)
}
val batchDuration = ssc.graph.batchDuration
// Batches when the master was down, that is,
// between the checkpoint and current restart time
val checkpointTime = ssc.initialCheckpoint.checkpointTime
val restartTime = new Time(timer.getRestartTime(graph.zeroTime.milliseconds))
val downTimes = checkpointTime.until(restartTime, batchDuration)
logInfo("Batches during down time (" + downTimes.size + " batches): "
+ downTimes.mkString(", "))
// Batches that were unprocessed before failure
val pendingTimes = ssc.initialCheckpoint.pendingTimes.sorted(Time.ordering)
logInfo("Batches pending processing (" + pendingTimes.size + " batches): " +
pendingTimes.mkString(", "))
// Reschedule jobs for these times
val timesToReschedule = (pendingTimes ++ downTimes).distinct.sorted(Time.ordering)
logInfo("Batches to reschedule (" + timesToReschedule.size + " batches): " +
timesToReschedule.mkString(", "))
timesToReschedule.foreach(time =>
jobScheduler.runJobs(time, graph.generateJobs(time))
)
// Restart the timer
timer.start(restartTime.milliseconds)
logInfo("JobGenerator restarted at " + restartTime)
}
/** Generate jobs and perform checkpoint for the given `time`. */
private def generateJobs(time: Time) {
SparkEnv.set(ssc.env)
Try(graph.generateJobs(time)) match {
case Success(jobs) => jobScheduler.runJobs(time, jobs)
case Failure(e) => jobScheduler.reportError("Error generating jobs for time " + time, e)
}
eventActor ! DoCheckpoint(time)
}
/** Clear DStream metadata for the given `time`. */
private def clearMetadata(time: Time) {
ssc.graph.clearMetadata(time)
eventActor ! DoCheckpoint(time)
}
/** Clear DStream checkpoint data for the given `time`. */
private def clearCheckpointData(time: Time) {
ssc.graph.clearCheckpointData(time)
}
/** Perform checkpoint for the give `time`. */
private def doCheckpoint(time: Time) = synchronized {
if (checkpointWriter != null && (time - graph.zeroTime).isMultipleOf(ssc.checkpointDuration)) {
logInfo("Checkpointing graph for time " + time)
ssc.graph.updateCheckpointData(time)
checkpointWriter.write(new Checkpoint(ssc, time))
}
}
}
| cloudera/spark | streaming/src/main/scala/org/apache/spark/streaming/scheduler/JobGenerator.scala | Scala | apache-2.0 | 7,098 |
package gitbucket.core.api
import java.util.{Calendar, Date, TimeZone}
import gitbucket.core.model._
import gitbucket.core.plugin.PluginInfo
import gitbucket.core.service.ProtectedBranchService.ProtectedBranchInfo
import gitbucket.core.service.RepositoryService.RepositoryInfo
import gitbucket.core.util.JGitUtil.{CommitInfo, DiffInfo, FileInfo, TagInfo}
import gitbucket.core.util.RepositoryName
import org.eclipse.jgit.diff.DiffEntry.ChangeType
import org.eclipse.jgit.lib.ObjectId
object ApiSpecModels {
implicit val context = JsonFormat.Context("http://gitbucket.exmple.com", None)
val date1 = {
val d = Calendar.getInstance(TimeZone.getTimeZone("UTC"))
d.set(2011, 3, 14, 16, 0, 49)
d.getTime
}
def date(date: String): Date = {
val f = new java.text.SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss'Z'")
f.setTimeZone(TimeZone.getTimeZone("UTC"))
f.parse(date)
}
// Models
val account = Account(
userName = "octocat",
fullName = "octocat",
mailAddress = "octocat@example.com",
password = "1234",
isAdmin = false,
url = None,
registeredDate = date1,
updatedDate = date1,
lastLoginDate = Some(date1),
image = None,
isGroupAccount = false,
isRemoved = false,
description = None
)
val sha1 = "6dcb09b5b57875f334f61aebed695e2e4193db5e"
val repo1Name = RepositoryName("octocat/Hello-World")
val repository = Repository(
userName = repo1Name.owner,
repositoryName = repo1Name.name,
isPrivate = false,
description = Some("This your first repo!"),
defaultBranch = "master",
registeredDate = date1,
updatedDate = date1,
lastActivityDate = date1,
originUserName = Some("octopus plus cat"),
originRepositoryName = Some("Hello World"),
parentUserName = Some("github"),
parentRepositoryName = Some("Hello-World"),
options = RepositoryOptions(
issuesOption = "PUBLIC",
externalIssuesUrl = Some("https://external.com/gitbucket"),
wikiOption = "PUBLIC",
externalWikiUrl = Some("https://external.com/gitbucket"),
allowFork = true,
mergeOptions = "merge-commit,squash,rebase",
defaultMergeOption = "merge-commit",
safeMode = true
)
)
val repositoryInfo = RepositoryInfo(
owner = repo1Name.owner,
name = repo1Name.name,
repository = repository,
issueCount = 1,
pullCount = 1,
forkedCount = 1,
milestoneCount = 1,
branchList = Seq("master", "develop"),
tags = Seq(
TagInfo(
name = "v1.0",
time = date("2015-05-05T23:40:27Z"),
commitId = "id1",
message = "1.0 released",
objectId = "id1"
),
TagInfo(
name = "v2.0",
time = date("2016-05-05T23:40:27Z"),
commitId = "id2",
message = "2.0 released",
objectId = "id2"
)
),
managers = Seq("myboss")
)
val label = Label(
userName = repo1Name.owner,
repositoryName = repo1Name.name,
labelId = 10,
labelName = "bug",
color = "f29513"
)
val issue = Issue(
userName = repo1Name.owner,
repositoryName = repo1Name.name,
issueId = 1347,
openedUserName = "bear",
milestoneId = None,
priorityId = None,
assignedUserName = None,
title = "Found a bug",
content = Some("I'm having a problem with this."),
closed = false,
registeredDate = date1,
updatedDate = date1,
isPullRequest = false
)
val issuePR = issue.copy(
title = "new-feature",
content = Some("Please pull these awesome changes"),
closed = true,
isPullRequest = true
)
val issueComment = IssueComment(
userName = repo1Name.owner,
repositoryName = repo1Name.name,
issueId = issue.issueId,
commentId = 1,
action = "comment",
commentedUserName = "bear",
content = "Me too",
registeredDate = date1,
updatedDate = date1
)
val pullRequest = PullRequest(
userName = repo1Name.owner,
repositoryName = repo1Name.name,
issueId = issuePR.issueId,
branch = "master",
requestUserName = "bear",
requestRepositoryName = repo1Name.name,
requestBranch = "new-topic",
commitIdFrom = sha1,
commitIdTo = sha1,
isDraft = true
)
val commitComment = CommitComment(
userName = repo1Name.owner,
repositoryName = repo1Name.name,
commitId = sha1,
commentId = 29724692,
commentedUserName = "bear",
content = "Maybe you should use more emoji on this line.",
fileName = Some("README.md"),
oldLine = Some(1),
newLine = Some(1),
registeredDate = date("2015-05-05T23:40:27Z"),
updatedDate = date("2015-05-05T23:40:27Z"),
issueId = Some(issuePR.issueId),
originalCommitId = sha1,
originalOldLine = None,
originalNewLine = None
)
val commitStatus = CommitStatus(
commitStatusId = 1,
userName = repo1Name.owner,
repositoryName = repo1Name.name,
commitId = sha1,
context = "Default",
state = CommitState.SUCCESS,
targetUrl = Some("https://ci.example.com/1000/output"),
description = Some("Build has completed successfully"),
creator = account.userName,
registeredDate = date1,
updatedDate = date1
)
val milestone = Milestone(
userName = repo1Name.owner,
repositoryName = repo1Name.name,
milestoneId = 1,
title = "Test milestone",
description = Some("Milestone description"),
dueDate = Some(date1),
closedDate = Some(date1)
)
// APIs
val apiUser = ApiUser(account)
val apiRepository = ApiRepository(
repository = repository,
owner = apiUser,
forkedCount = repositoryInfo.forkedCount,
watchers = 0
)
val apiLabel = ApiLabel(
label = label,
repositoryName = repo1Name
)
val apiMilestone = ApiMilestone(
repository = repository,
milestone = milestone,
open_issue_count = 1,
closed_issue_count = 1
)
val apiIssue = ApiIssue(
issue = issue,
repositoryName = repo1Name,
user = apiUser,
assignee = Some(apiUser),
labels = List(apiLabel),
milestone = Some(apiMilestone)
)
val apiNotAssignedIssue = ApiIssue(
issue = issue,
repositoryName = repo1Name,
user = apiUser,
assignee = None,
labels = List(apiLabel),
milestone = Some(apiMilestone)
)
val apiIssuePR = ApiIssue(
issue = issuePR,
repositoryName = repo1Name,
user = apiUser,
assignee = Some(apiUser),
labels = List(apiLabel),
milestone = Some(apiMilestone)
)
val apiComment = ApiComment(
comment = issueComment,
repositoryName = repo1Name,
issueId = issueComment.issueId,
user = apiUser,
isPullRequest = false
)
val apiCommentPR = ApiComment(
comment = issueComment,
repositoryName = repo1Name,
issueId = issueComment.issueId,
user = apiUser,
isPullRequest = true
)
val apiPullRequest = ApiPullRequest(
issue = issuePR,
pullRequest = pullRequest,
headRepo = apiRepository,
baseRepo = apiRepository,
user = apiUser,
labels = List(apiLabel),
assignee = Some(apiUser),
mergedComment = Some((issueComment, account))
)
// https://developer.github.com/v3/activity/events/types/#pullrequestreviewcommentevent
val apiPullRequestReviewComment = ApiPullRequestReviewComment(
comment = commitComment,
commentedUser = apiUser,
repositoryName = repo1Name,
issueId = commitComment.issueId.get
)
val commitInfo = (id: String) =>
CommitInfo(
id = id,
shortMessage = "short message",
fullMessage = "full message",
parents = List("1da452aa92d7db1bc093d266c80a69857718c406"),
authorTime = date1,
authorName = account.userName,
authorEmailAddress = account.mailAddress,
commitTime = date1,
committerName = account.userName,
committerEmailAddress = account.mailAddress,
None,
None
)
val apiCommitListItem = ApiCommitListItem(
commit = commitInfo(sha1),
repositoryName = repo1Name
)
val apiCommit = {
val commit = commitInfo(sha1)
ApiCommit(
id = commit.id,
message = commit.fullMessage,
timestamp = commit.commitTime,
added = Nil,
removed = Nil,
modified = List("README.md"),
author = ApiPersonIdent.author(commit),
committer = ApiPersonIdent.committer(commit)
)(repo1Name)
}
val apiCommits = ApiCommits(
repositoryName = repo1Name,
commitInfo = commitInfo(sha1),
diffs = Seq(
DiffInfo(
changeType = ChangeType.MODIFY,
oldPath = "doc/README.md",
newPath = "doc/README.md",
oldContent = None,
newContent = None,
oldIsImage = false,
newIsImage = false,
oldObjectId = None,
newObjectId = Some(sha1),
oldMode = "old_mode",
newMode = "new_mode",
tooLarge = false,
patch = Some("""@@ -1 +1,2 @@
|-body1
|\ No newline at end of file
|+body1
|+body2
|\ No newline at end of file""".stripMargin)
)
),
author = account,
committer = account,
commentCount = 2
)
val apiCommitStatus = ApiCommitStatus(
status = commitStatus,
creator = apiUser
)
val apiCombinedCommitStatus = ApiCombinedCommitStatus(
sha = sha1,
statuses = Iterable((commitStatus, account)),
repository = apiRepository
)
val apiBranchProtectionOutput = ApiBranchProtection(
info = ProtectedBranchInfo(
owner = repo1Name.owner,
repository = repo1Name.name,
branch = "master",
enabled = true,
contexts = Seq("continuous-integration/travis-ci"),
includeAdministrators = true
)
)
val apiBranchProtectionInput = new ApiBranchProtection(
url = None,
enabled = true,
required_status_checks = Some(
ApiBranchProtection.Status(
url = None,
enforcement_level = ApiBranchProtection.Everyone,
contexts = Seq("continuous-integration/travis-ci"),
contexts_url = None
)
)
)
val apiBranch = ApiBranch(
name = "master",
commit = ApiBranchCommit(sha1),
protection = apiBranchProtectionOutput
)(
repositoryName = repo1Name
)
val apiBranchForList = ApiBranchForList(
name = "master",
commit = ApiBranchCommit(sha1)
)
val apiContents = ApiContents(
fileInfo = FileInfo(
id = ObjectId.fromString(sha1),
isDirectory = false,
name = "README.md",
path = "doc/README.md",
message = "message",
commitId = sha1,
time = date1,
author = account.userName,
mailAddress = account.mailAddress,
linkUrl = None
),
repositoryName = repo1Name,
content = Some("README".getBytes("UTF-8"))
)
val apiEndPoint = ApiEndPoint()
val apiError = ApiError(
message = "A repository with this name already exists on this account",
documentation_url = Some("https://developer.github.com/v3/repos/#create")
)
val apiGroup = ApiGroup(
account.copy(
isAdmin = true,
isGroupAccount = true,
description = Some("Admin group")
)
)
val apiPlugin = ApiPlugin(
plugin = PluginInfo(
pluginId = "gist",
pluginName = "Gist Plugin",
pluginVersion = "4.16.0",
gitbucketVersion = Some("4.30.1"),
description = "Provides Gist feature on GitBucket.",
pluginClass = null,
pluginJar = new java.io.File("gitbucket-gist-plugin-gitbucket_4.30.0-SNAPSHOT-4.17.0.jar"),
classLoader = null
)
)
val apiPusher = ApiPusher(account)
//have both urls as https, as the expected samples are using https
val gitHubContext = JsonFormat.Context("https://api.github.com", Some("https://api.github.com"))
val apiRefHeadsMaster = ApiRef(
ref = "refs/heads/master",
url = ApiPath("/repos/gitbucket/gitbucket/git/refs/heads/master"),
node_id = "MDM6UmVmOTM1MDc0NjpyZWZzL2hlYWRzL21hc3Rlcg==",
`object` = ApiRefCommit(
sha = "6b2d124d092402f2c2b7131caada05ead9e7de6d",
`type` = "commit",
url = ApiPath("/repos/gitbucket/gitbucket/git/commits/6b2d124d092402f2c2b7131caada05ead9e7de6d")
)
)
val apiRefTag = ApiRef(
ref = "refs/tags/1.0",
url = ApiPath("/repos/gitbucket/gitbucket/git/refs/tags/1.0"),
node_id = "MDM6UmVmOTM1MDc0NjpyZWZzL3RhZ3MvMS4w",
`object` = ApiRefCommit(
sha = "1f164ecf2f59190afc8d7204a221c739e707df4c",
`type` = "tag",
url = ApiPath("/repos/gitbucket/gitbucket/git/tags/1f164ecf2f59190afc8d7204a221c739e707df4c")
)
)
val assetFileName = "010203040a0b0c0d"
val apiReleaseAsset = ApiReleaseAsset(
name = "release.zip",
size = 100
)(
tag = "tag1",
fileName = assetFileName,
repositoryName = repo1Name
)
val apiRelease = ApiRelease(
name = "release1",
tag_name = "tag1",
body = Some("content"),
author = apiUser,
assets = Seq(apiReleaseAsset)
)
// JSON String for APIs
val jsonUser = """{
|"login":"octocat",
|"email":"octocat@example.com",
|"type":"User",
|"site_admin":false,
|"created_at":"2011-04-14T16:00:49Z",
|"id":0,
|"url":"http://gitbucket.exmple.com/api/v3/users/octocat",
|"html_url":"http://gitbucket.exmple.com/octocat",
|"avatar_url":"http://gitbucket.exmple.com/octocat/_avatar"
|}""".stripMargin
val jsonRepository = s"""{
|"name":"Hello-World",
|"full_name":"octocat/Hello-World",
|"description":"This your first repo!",
|"watchers":0,
|"forks":1,
|"private":false,
|"default_branch":"master",
|"owner":$jsonUser,
|"has_issues":true,
|"id":0,
|"forks_count":1,
|"watchers_count":0,
|"url":"http://gitbucket.exmple.com/api/v3/repos/octocat/Hello-World",
|"clone_url":"http://gitbucket.exmple.com/git/octocat/Hello-World.git",
|"html_url":"http://gitbucket.exmple.com/octocat/Hello-World"
|}""".stripMargin
val jsonLabel =
"""{"name":"bug","color":"f29513","url":"http://gitbucket.exmple.com/api/v3/repos/octocat/Hello-World/labels/bug"}"""
val jsonMilestone = """{
|"url":"http://gitbucket.exmple.com/api/v3/repos/octocat/Hello-World/milestones/1",
|"html_url":"http://gitbucket.exmple.com/octocat/Hello-World/milestone/1",
|"id":1,
|"number":1,
|"state":"closed",
|"title":"Test milestone",
|"description":"Milestone description",
|"open_issues":1,"closed_issues":1,
|"closed_at":"2011-04-14T16:00:49Z",
|"due_on":"2011-04-14T16:00:49Z"
|}""".stripMargin
val jsonIssue = s"""{
|"number":1347,
|"title":"Found a bug",
|"user":$jsonUser,
|"assignee":$jsonUser,
|"labels":[$jsonLabel],
|"state":"open",
|"created_at":"2011-04-14T16:00:49Z",
|"updated_at":"2011-04-14T16:00:49Z",
|"body":"I'm having a problem with this.",
|"milestone":$jsonMilestone,
|"id":0,
|"assignees":[$jsonUser],
|"comments_url":"http://gitbucket.exmple.com/api/v3/repos/octocat/Hello-World/issues/1347/comments",
|"html_url":"http://gitbucket.exmple.com/octocat/Hello-World/issues/1347"
|}""".stripMargin
val jsonNotAssignedIssue = s"""{
|"number":1347,
|"title":"Found a bug",
|"user":$jsonUser,
|"labels":[$jsonLabel],
|"state":"open",
|"created_at":"2011-04-14T16:00:49Z",
|"updated_at":"2011-04-14T16:00:49Z",
|"body":"I'm having a problem with this.",
|"milestone":$jsonMilestone,
|"id":0,
|"assignees":[],
|"comments_url":"http://gitbucket.exmple.com/api/v3/repos/octocat/Hello-World/issues/1347/comments",
|"html_url":"http://gitbucket.exmple.com/octocat/Hello-World/issues/1347"
|}""".stripMargin
val jsonIssuePR = s"""{
|"number":1347,
|"title":"new-feature",
|"user":$jsonUser,
|"assignee":$jsonUser,
|"labels":[$jsonLabel],
|"state":"closed",
|"created_at":"2011-04-14T16:00:49Z",
|"updated_at":"2011-04-14T16:00:49Z",
|"body":"Please pull these awesome changes",
|"milestone":$jsonMilestone,
|"id":0,
|"assignees":[$jsonUser],
|"comments_url":"http://gitbucket.exmple.com/api/v3/repos/octocat/Hello-World/issues/1347/comments",
|"html_url":"http://gitbucket.exmple.com/octocat/Hello-World/pull/1347",
|"pull_request":{
|"url":"http://gitbucket.exmple.com/api/v3/repos/octocat/Hello-World/pulls/1347",
|"html_url":"http://gitbucket.exmple.com/octocat/Hello-World/pull/1347"}
|}""".stripMargin
val jsonPullRequest = s"""{
|"number":1347,
|"state":"closed",
|"updated_at":"2011-04-14T16:00:49Z",
|"created_at":"2011-04-14T16:00:49Z",
|"head":{"sha":"6dcb09b5b57875f334f61aebed695e2e4193db5e","ref":"new-topic","repo":$jsonRepository,"label":"new-topic","user":$jsonUser},
|"base":{"sha":"6dcb09b5b57875f334f61aebed695e2e4193db5e","ref":"master","repo":$jsonRepository,"label":"master","user":$jsonUser},
|"merged":true,
|"merged_at":"2011-04-14T16:00:49Z",
|"merged_by":$jsonUser,
|"title":"new-feature",
|"body":"Please pull these awesome changes",
|"user":$jsonUser,
|"labels":[$jsonLabel],
|"assignee":$jsonUser,
|"draft":true,
|"id":0,
|"html_url":"http://gitbucket.exmple.com/octocat/Hello-World/pull/1347",
|"url":"http://gitbucket.exmple.com/api/v3/repos/octocat/Hello-World/pulls/1347",
|"commits_url":"http://gitbucket.exmple.com/api/v3/repos/octocat/Hello-World/pulls/1347/commits",
|"review_comments_url":"http://gitbucket.exmple.com/api/v3/repos/octocat/Hello-World/pulls/1347/comments",
|"review_comment_url":"http://gitbucket.exmple.com/api/v3/repos/octocat/Hello-World/pulls/comments/{number}",
|"comments_url":"http://gitbucket.exmple.com/api/v3/repos/octocat/Hello-World/issues/1347/comments",
|"statuses_url":"http://gitbucket.exmple.com/api/v3/repos/octocat/Hello-World/statuses/6dcb09b5b57875f334f61aebed695e2e4193db5e"
|}""".stripMargin
val jsonPullRequestReviewComment = s"""{
|"id":29724692,
|"path":"README.md",
|"commit_id":"6dcb09b5b57875f334f61aebed695e2e4193db5e",
|"user":$jsonUser,
|"body":"Maybe you should use more emoji on this line.",
|"created_at":"2015-05-05T23:40:27Z",
|"updated_at":"2015-05-05T23:40:27Z",
|"url":"http://gitbucket.exmple.com/api/v3/repos/octocat/Hello-World/pulls/comments/29724692",
|"html_url":"http://gitbucket.exmple.com/octocat/Hello-World/pull/1347#discussion_r29724692",
|"pull_request_url":"http://gitbucket.exmple.com/api/v3/repos/octocat/Hello-World/pulls/1347",
|"_links":{
|"self":{"href":"http://gitbucket.exmple.com/api/v3/repos/octocat/Hello-World/pulls/comments/29724692"},
|"html":{"href":"http://gitbucket.exmple.com/octocat/Hello-World/pull/1347#discussion_r29724692"},
|"pull_request":{"href":"http://gitbucket.exmple.com/api/v3/repos/octocat/Hello-World/pulls/1347"}}
|}""".stripMargin
val jsonComment = s"""{
|"id":1,
|"user":$jsonUser,
|"body":"Me too",
|"created_at":"2011-04-14T16:00:49Z",
|"updated_at":"2011-04-14T16:00:49Z",
|"html_url":"http://gitbucket.exmple.com/octocat/Hello-World/issues/1347#comment-1"
|}""".stripMargin
val jsonCommentPR = s"""{
|"id":1,
|"user":$jsonUser,
|"body":"Me too",
|"created_at":"2011-04-14T16:00:49Z",
|"updated_at":"2011-04-14T16:00:49Z",
|"html_url":"http://gitbucket.exmple.com/octocat/Hello-World/pull/1347#comment-1"
|}""".stripMargin
val jsonCommitListItem = s"""{
|"sha":"6dcb09b5b57875f334f61aebed695e2e4193db5e",
|"commit":{
|"message":"full message",
|"author":{"name":"octocat","email":"octocat@example.com","date":"2011-04-14T16:00:49Z"},
|"committer":{"name":"octocat","email":"octocat@example.com","date":"2011-04-14T16:00:49Z"},
|"url":"http://gitbucket.exmple.com/api/v3/repos/octocat/Hello-World/git/commits/6dcb09b5b57875f334f61aebed695e2e4193db5e"
|},
|"parents":[{
|"sha":"1da452aa92d7db1bc093d266c80a69857718c406",
|"url":"http://gitbucket.exmple.com/api/v3/repos/octocat/Hello-World/commits/1da452aa92d7db1bc093d266c80a69857718c406"}],
|"url":"http://gitbucket.exmple.com/api/v3/repos/octocat/Hello-World/commits/6dcb09b5b57875f334f61aebed695e2e4193db5e"
|}""".stripMargin
val jsonCommit = (id: String) => s"""{
|"id":"$id",
|"message":"full message",
|"timestamp":"2011-04-14T16:00:49Z",
|"added":[],
|"removed":[],
|"modified":["README.md"],
|"author":{"name":"octocat","email":"octocat@example.com","date":"2011-04-14T16:00:49Z"},
|"committer":{"name":"octocat","email":"octocat@example.com","date":"2011-04-14T16:00:49Z"},
|"url":"http://gitbucket.exmple.com/api/v3/octocat/Hello-World/commits/$id",
|"html_url":"http://gitbucket.exmple.com/octocat/Hello-World/commit/$id"
|}""".stripMargin
val jsonCommits = s"""{
|"url":"http://gitbucket.exmple.com/api/v3/repos/octocat/Hello-World/commits/6dcb09b5b57875f334f61aebed695e2e4193db5e",
|"sha":"6dcb09b5b57875f334f61aebed695e2e4193db5e",
|"html_url":"http://gitbucket.exmple.comoctocat/Hello-World/commit/6dcb09b5b57875f334f61aebed695e2e4193db5e",
|"comment_url":"http://gitbucket.exmple.com",
|"commit":{
|"url":"http://gitbucket.exmple.com/api/v3/repos/octocat/Hello-World/commits/6dcb09b5b57875f334f61aebed695e2e4193db5e",
|"author":{"name":"octocat","email":"octocat@example.com","date":"2011-04-14T16:00:49Z"},
|"committer":{"name":"octocat","email":"octocat@example.com","date":"2011-04-14T16:00:49Z"},
|"message":"short message",
|"comment_count":2,
|"tree":{"url":"http://gitbucket.exmple.com/api/v3/repos/octocat/Hello-World/tree/6dcb09b5b57875f334f61aebed695e2e4193db5e","sha":"6dcb09b5b57875f334f61aebed695e2e4193db5e"}
|},
|"author":$jsonUser,
|"committer":$jsonUser,
|"parents":[{
|"url":"http://gitbucket.exmple.com/api/v3/repos/octocat/Hello-World/tree/1da452aa92d7db1bc093d266c80a69857718c406",
|"sha":"1da452aa92d7db1bc093d266c80a69857718c406"}],
|"stats":{"additions":2,"deletions":1,"total":3},
|"files":[{
|"filename":"doc/README.md",
|"additions":2,
|"deletions":1,
|"changes":3,
|"status":"modified",
|"raw_url":"http://gitbucket.exmple.com/octocat/Hello-World/raw/6dcb09b5b57875f334f61aebed695e2e4193db5e/doc/README.md",
|"blob_url":"http://gitbucket.exmple.com/octocat/Hello-World/blob/6dcb09b5b57875f334f61aebed695e2e4193db5e/doc/README.md",
|"patch":"@@ -1 +1,2 @@\\n-body1\\n\\\\ No newline at end of file\\n+body1\\n+body2\\n\\\\ No newline at end of file"}]
|}""".stripMargin
val jsonCommitStatus = s"""{
|"created_at":"2011-04-14T16:00:49Z",
|"updated_at":"2011-04-14T16:00:49Z",
|"state":"success",
|"target_url":"https://ci.example.com/1000/output",
|"description":"Build has completed successfully",
|"id":1,
|"context":"Default",
|"creator":$jsonUser,
|"url":"http://gitbucket.exmple.com/api/v3/repos/octocat/Hello-World/commits/6dcb09b5b57875f334f61aebed695e2e4193db5e/statuses"
|}""".stripMargin
val jsonCombinedCommitStatus = s"""{
|"state":"success",
|"sha":"6dcb09b5b57875f334f61aebed695e2e4193db5e",
|"total_count":1,
|"statuses":[$jsonCommitStatus],
|"repository":$jsonRepository,
|"url":"http://gitbucket.exmple.com/api/v3/repos/octocat/Hello-World/commits/6dcb09b5b57875f334f61aebed695e2e4193db5e/status"
|}""".stripMargin
val jsonBranchProtectionOutput =
"""{
|"url":"http://gitbucket.exmple.com/api/v3/repos/octocat/Hello-World/branches/master/protection",
|"enabled":true,
|"required_status_checks":{
|"url":"http://gitbucket.exmple.com/api/v3/repos/octocat/Hello-World/branches/master/protection/required_status_checks",
|"enforcement_level":"everyone",
|"contexts":["continuous-integration/travis-ci"],
|"contexts_url":"http://gitbucket.exmple.com/api/v3/repos/octocat/Hello-World/branches/master/protection/required_status_checks/contexts"}
|}""".stripMargin
val jsonBranchProtectionInput =
"""{
|"enabled":true,
|"required_status_checks":{
|"enforcement_level":"everyone",
|"contexts":["continuous-integration/travis-ci"]
|}
|}""".stripMargin
val jsonBranch = s"""{
|"name":"master",
|"commit":{"sha":"6dcb09b5b57875f334f61aebed695e2e4193db5e"},
|"protection":$jsonBranchProtectionOutput,
|"_links":{
|"self":"http://gitbucket.exmple.com/api/v3/repos/octocat/Hello-World/branches/master",
|"html":"http://gitbucket.exmple.com/octocat/Hello-World/tree/master"}
|}""".stripMargin
val jsonBranchForList = """{"name":"master","commit":{"sha":"6dcb09b5b57875f334f61aebed695e2e4193db5e"}}"""
val jsonContents =
"""{
|"type":"file",
|"name":"README.md",
|"path":"doc/README.md",
|"sha":"6dcb09b5b57875f334f61aebed695e2e4193db5e",
|"content":"UkVBRE1F",
|"encoding":"base64",
|"download_url":"http://gitbucket.exmple.com/api/v3/repos/octocat/Hello-World/raw/6dcb09b5b57875f334f61aebed695e2e4193db5e/doc/README.md"
|}""".stripMargin
val jsonEndPoint = """{"rate_limit_url":"http://gitbucket.exmple.com/api/v3/rate_limit"}"""
val jsonError = """{
|"message":"A repository with this name already exists on this account",
|"documentation_url":"https://developer.github.com/v3/repos/#create"
|}""".stripMargin
val jsonGroup = """{
|"login":"octocat",
|"description":"Admin group",
|"created_at":"2011-04-14T16:00:49Z",
|"id":0,
|"url":"http://gitbucket.exmple.com/api/v3/orgs/octocat",
|"html_url":"http://gitbucket.exmple.com/octocat",
|"avatar_url":"http://gitbucket.exmple.com/octocat/_avatar"
|}""".stripMargin
val jsonPlugin = """{
|"id":"gist",
|"name":"Gist Plugin",
|"version":"4.16.0",
|"description":"Provides Gist feature on GitBucket.",
|"jarFileName":"gitbucket-gist-plugin-gitbucket_4.30.0-SNAPSHOT-4.17.0.jar"
|}""".stripMargin
val jsonPusher = """{"name":"octocat","email":"octocat@example.com"}"""
//I checked all refs in gitbucket repo, and there appears to be only type "commit" and type "tag"
val jsonRef = """{"ref":"refs/heads/featureA","object":{"sha":"6dcb09b5b57875f334f61aebed695e2e4193db5e"}}"""
val jsonRefHeadsMaster =
"""{
|"ref": "refs/heads/master",
|"node_id": "MDM6UmVmOTM1MDc0NjpyZWZzL2hlYWRzL21hc3Rlcg==",
|"url": "https://api.github.com/repos/gitbucket/gitbucket/git/refs/heads/master",
|"object": {
|"sha": "6b2d124d092402f2c2b7131caada05ead9e7de6d",
|"type": "commit",
|"url": "https://api.github.com/repos/gitbucket/gitbucket/git/commits/6b2d124d092402f2c2b7131caada05ead9e7de6d"
|}
|}""".stripMargin
val jsonRefTag =
"""{
|"ref": "refs/tags/1.0",
|"node_id": "MDM6UmVmOTM1MDc0NjpyZWZzL3RhZ3MvMS4w",
|"url": "https://api.github.com/repos/gitbucket/gitbucket/git/refs/tags/1.0",
|"object": {
|"sha": "1f164ecf2f59190afc8d7204a221c739e707df4c",
|"type": "tag",
|"url": "https://api.github.com/repos/gitbucket/gitbucket/git/tags/1f164ecf2f59190afc8d7204a221c739e707df4c"
|}
|}""".stripMargin
val jsonReleaseAsset =
s"""{
|"name":"release.zip",
|"size":100,
|"label":"release.zip",
|"file_id":"${assetFileName}",
|"browser_download_url":"http://gitbucket.exmple.com/octocat/Hello-World/releases/tag1/assets/${assetFileName}"
|}""".stripMargin
val jsonRelease =
s"""{
|"name":"release1",
|"tag_name":"tag1",
|"body":"content",
|"author":${jsonUser},
|"assets":[${jsonReleaseAsset}]
|}""".stripMargin
}
| gitbucket/gitbucket | src/test/scala/gitbucket/core/api/ApiSpecModels.scala | Scala | apache-2.0 | 28,520 |
package com.codahale.jerkson.util
package scalax
package rules
trait Name {
def name: String
override def toString = name
}
/**A factory for rules.
*
* @author Andrew Foggin
*
* Inspired by the Scala parser combinator.
*/
trait Rules {
implicit def rule[In, Out, A, X](f: In => Result[Out, A, X]): Rule[In, Out, A, X] = new DefaultRule(f)
implicit def inRule[In, Out, A, X](rule: Rule[In, Out, A, X]): InRule[In, Out, A, X] = new InRule(rule)
implicit def seqRule[In, A, X](rule: Rule[In, In, A, X]): SeqRule[In, A, X] = new SeqRule(rule)
def from[In] = new {
def apply[Out, A, X](f: In => Result[Out, A, X]) = rule(f)
}
def state[s] = new StateRules {
type S = s
val factory = Rules.this
}
def success[Out, A](out: Out, a: A) = rule {in: Any => Success(out, a)}
def failure = rule {in: Any => Failure}
def error[In] = rule {in: In => Error(in)}
def error[X](err: X) = rule {in: Any => Error(err)}
def oneOf[In, Out, A, X](rules: Rule[In, Out, A, X]*): Rule[In, Out, A, X] = new Choice[In, Out, A, X] {
val factory = Rules.this
val choices = rules.toList
}
def ruleWithName[In, Out, A, X](_name: String,
f: In => Result[Out, A, X]): Rule[In, Out, A, X] with Name =
new DefaultRule(f) with Name {
val name = _name
}
class DefaultRule[In, Out, A, X](f: In => Result[Out, A, X]) extends Rule[In, Out, A, X] {
val factory = Rules.this
def apply(in: In) = f(in)
}
/**Converts a rule into a function that throws an Exception on failure. */
def expect[In, Out, A, Any](rule: Rule[In, Out, A, Any]): In => A = (in) =>
rule(in) match {
case Success(_, a) => a
case Failure => throw new ScalaSigParserError("Unexpected failure")
case Error(x) => throw new ScalaSigParserError("Unexpected error: " + x)
}
}
/**A factory for rules that apply to a particular context.
*
* @requires S the context to which rules apply.
*
* @author Andrew Foggin
*
* Inspired by the Scala parser combinator.
*/
trait StateRules {
type S
type Rule[+A, +X] = rules.Rule[S, S, A, X]
val factory: Rules
import factory._
def apply[A, X](f: S => Result[S, A, X]) = rule(f)
def unit[A](a: => A) = apply {s => Success(s, a)}
def read[A](f: S => A) = apply {s => Success(s, f(s))}
def get = apply {s => Success(s, s)}
def set(s: => S) = apply {oldS => Success(s, oldS)}
def update(f: S => S) = apply {s => Success(s, f(s))}
def nil = unit(Nil)
def none = unit(None)
/**Create a rule that identities if f(in) is true. */
def cond(f: S => Boolean) = get filter f
/**Create a rule that succeeds if all of the given rules succeed.
@param rules the rules to apply in sequence.
*/
def allOf[A, X](rules: Seq[Rule[A, X]]) = {
def rep(in: S, rules: List[Rule[A, X]],
results: List[A]): Result[S, List[A], X] = {
rules match {
case Nil => Success(in, results.reverse)
case rule :: tl => rule(in) match {
case Failure => Failure
case Error(x) => Error(x)
case Success(out, v) => rep(out, tl, v :: results)
}
}
}
in: S => rep(in, rules.toList, Nil)
}
/**Create a rule that succeeds with a list of all the provided rules that succeed.
@param rules the rules to apply in sequence.
*/
def anyOf[A, X](rules: Seq[Rule[A, X]]) = allOf(rules.map(_ ?)) ^^ {
opts => opts.flatMap(x => x)
}
/**Repeatedly apply a rule from initial value until finished condition is met. */
def repeatUntil[T, X](rule: Rule[T => T, X])(finished: T => Boolean)
(initial: T) = apply {
// more compact using HoF but written this way so it's tail-recursive
def rep(in: S, t: T): Result[S, T, X] = {
if (finished(t)) Success(in, t)
else rule(in) match {
case Success(out, f) => rep(out, f(t))
case Failure => Failure
case Error(x) => Error(x)
}
}
in => rep(in, initial)
}
}
trait RulesWithState extends Rules with StateRules {
val factory = this
}
| gilt/jerkson | src/main/scala/com/codahale/jerkson/util/scalax/rules/Rules.scala | Scala | mit | 4,095 |
package org.jetbrains.plugins.scala.debugger.smartStepInto
import com.intellij.debugger.actions.SmartStepTarget
import org.jetbrains.plugins.scala.{DebuggerTests, SlowTests}
import org.jetbrains.plugins.scala.debugger._
import org.jetbrains.plugins.scala.extensions.inReadAction
import org.junit.Assert
import org.junit.experimental.categories.Category
import scala.collection.JavaConverters._
import com.intellij.debugger.engine.SuspendContextImpl
/**
* @author Nikolay.Tropin
*/
@Category(Array(classOf[DebuggerTests]))
class SmartStepIntoTest extends SmartStepIntoTestBase {
override implicit val version: ScalaVersion = Scala_2_11
}
@Category(Array(classOf[DebuggerTests]))
class SmartStepIntoTest_212 extends SmartStepIntoTestBase {
override implicit val version: ScalaVersion = Scala_2_12
override def testByNameArgument(): Unit = {
runDebugger() {
waitForBreakpoint()
checkSmartStepTargets("inTryBlock(String)", "u: => String")
checkSmartStepInto("inTryBlock(String)", "ByNameArgument.scala", "inTryBlock", 5)
}
runDebugger() {
waitForBreakpoint()
checkSmartStepInto("u: => String", "ByNameArgument.scala", "$anonfun$main$1", 14)
}
}
}
abstract class SmartStepIntoTestBase extends ScalaDebuggerTestCase {
protected def handler = new ScalaSmartStepIntoHandler
def availableSmartStepTargets(): Seq[SmartStepTarget] =
handler.findSmartStepTargets(currentSourcePosition).asScala
def checkSmartStepTargets(expected: String*): Unit = {
val targets = inReadAction {
availableSmartStepTargets().map(_.getPresentation)
}
Assert.assertEquals("Wrong set of smart step targets:", expected, targets)
}
def checkSmartStepInto(target: String, source: String, methodName: String, line: Int) = {
val sst = inReadAction {
availableSmartStepTargets().find(_.getPresentation == target)
}
Assert.assertTrue(s"Cannot find such target: $target", sst.isDefined)
implicit val ctx: SuspendContextImpl = doSmartStepInto(sst.get)
checkLocation(source, methodName, line)
}
private def doSmartStepInto(target: SmartStepTarget): SuspendContextImpl = {
val filter = inReadAction {
handler.createMethodFilter(target)
}
val stepIntoCommand = getDebugProcess.createStepIntoCommand(currentSuspendContext(), false, filter)
getDebugProcess.getManagerThread.invokeAndWait(stepIntoCommand)
waitForBreakpoint()
}
addFileWithBreakpoints("ChainedMethodsAndConstructor.scala",
s"""
|object ChainedMethodsAndConstructor {
| def main(args: Array[String]) {
| val s = new A(11).id1().id2.asString $bp
| }
|}
""".stripMargin.trim()
)
addSourceFile("A.scala",
s"""
|class A(i: Int) {
|
| val a = i
|
| def id1() = {
| val x: A = this
| x
| }
|
| def id2 = {
| val x: A = this
| x
| }
|
| def asString = "A"
|}
|""".stripMargin.trim()
)
def testChainedMethodsAndConstructor() {
runDebugger() {
waitForBreakpoint()
checkSmartStepTargets("new A(int)", "id1()", "id2()", "asString()")
checkSmartStepInto("new A(int)", "A.scala", "<init>", 1)
}
runDebugger() {
waitForBreakpoint()
checkSmartStepInto("id1()", "A.scala", "id1", 6)
}
runDebugger() {
waitForBreakpoint()
checkSmartStepInto("id2()", "A.scala", "id2", 11)
}
}
addFileWithBreakpoints("InnerClassAndConstructor.scala",
s"""
|object InnerClassAndConstructor {
| def main(args: Array[String]) {
| val s = new A(10).id1().asString $bp
| }
|
| class A(i: Int) {
|
| def id1() = {
| val x: A = this
| x
| }
|
| def asString = "A"
| }
|}
""".stripMargin.trim()
)
def testInnerClassAndConstructor(): Unit = {
runDebugger() {
waitForBreakpoint()
checkSmartStepTargets("new A(int)", "id1()", "asString()")
checkSmartStepInto("new A(int)", "InnerClassAndConstructor.scala", "<init>", 6)
}
runDebugger() {
waitForBreakpoint()
checkSmartStepInto("id1()", "InnerClassAndConstructor.scala", "id1", 9)
}
}
addFileWithBreakpoints("InArguments.scala",
s"""
|object InArguments {
|
| def foo(a: B, a1: B) = {}
|
| def main(args: Array[String]) {
| val a = new B(2)
| foo(new B(1), a.id()) $bp
| }
|}
|
|class B(i: Int) {
|
| def id() = {
| val x: B = this
| x
| }
|
| def asString = "B"
|}""".stripMargin.trim()
)
def testInArguments(): Unit = {
runDebugger() {
waitForBreakpoint()
checkSmartStepTargets("foo(B, B)", "new B(int)", "id()")
checkSmartStepInto("new B(int)", "InArguments.scala", "<init>", 11)
}
runDebugger() {
waitForBreakpoint()
checkSmartStepInto("id()", "InArguments.scala", "id", 14)
}
}
addFileWithBreakpoints("InfixAndApply.scala",
s"""
|object InfixAndApply {
|
| def main(args: Array[String]) {
| val a = new C(2)
| a add C(1) $bp
| }
|}
|
|class C(i: Int) {
|
| def add(a: C) = {
| "adding"
| }
|}
|
|object C {
| def apply(i: Int) = new C(i)
|}""".stripMargin.trim()
)
def testInfixAndApply(): Unit = {
runDebugger() {
waitForBreakpoint()
checkSmartStepTargets("add(C)", "C.apply(int)")
checkSmartStepInto("add(C)", "InfixAndApply.scala", "add", 12)
}
runDebugger() {
waitForBreakpoint()
checkSmartStepInto("C.apply(int)", "InfixAndApply.scala", "apply", 17)
}
}
addFileWithBreakpoints("PostfixAndUnapply.scala",
s"""
|object PostfixAndUnapply {
|
| def main(args: Array[String]) {
| new D(1) match {
| case a @ D(1) => a foo $bp
| }
| }
|}
|
|class D(val i: Int) {
| def foo() = {}
|
|}
|
|object D {
| def unapply(a: D) = Some(a.i)
|}""".stripMargin.trim()
)
def testPostfixAndUnapply(): Unit = {
runDebugger() {
waitForBreakpoint()
checkSmartStepTargets("D.unapply(D)", "foo()")
checkSmartStepInto("D.unapply(D)", "PostfixAndUnapply.scala", "unapply", 16)
}
// runDebugger("Sample") { //should work after cleaning up match statements
// waitForBreakpoint()
// checkSmartStepInto("foo()", "Sample.scala", "foo", 11)
// }
}
addFileWithBreakpoints("AnonymousClassFromTrait.scala",
s"""
|object AnonymousClassFromTrait {
|
| def execute(processor: Processor) = processor.execute()
|
| def main(args: Array[String]) {
| execute(new Processor { $bp
| val z = 1
|
| override def execute(): Unit = {
| "aaa"
| }
| })
| }
|}
|
|trait Processor {
| def execute()
|}""".stripMargin.trim()
)
def testAnonymousClassFromTrait(): Unit = {
runDebugger() {
waitForBreakpoint()
checkSmartStepTargets("execute(Processor)", "new Processor()", "new Processor.execute()")
checkSmartStepInto("new Processor()", "AnonymousClassFromTrait.scala", "<init>", 6)
}
runDebugger() {
waitForBreakpoint()
checkSmartStepInto("new Processor.execute()", "AnonymousClassFromTrait.scala", "execute", 10)
}
}
addFileWithBreakpoints("AnonymousClassFromClass.scala",
s"""
|object AnonymousClassFromClass {
|
| def execute(processor: ProcessorClass) = processor.execute()
|
| def main(args: Array[String]) {
| execute(new ProcessorClass("aa") { $bp
| val z = 1
|
| override def execute(): Unit = {
| "aaa"
| }
| })
| }
|}
|
|class ProcessorClass(s: String) {
| def execute(): Unit = {}
|}""".stripMargin.trim()
)
def testAnonymousClassFromClass(): Unit = {
runDebugger() {
waitForBreakpoint()
checkSmartStepTargets("execute(ProcessorClass)", "new ProcessorClass()", "new ProcessorClass.execute()")
checkSmartStepInto("new ProcessorClass()", "AnonymousClassFromClass.scala", "<init>", 6)
}
runDebugger() {
waitForBreakpoint()
checkSmartStepInto("new ProcessorClass.execute()", "AnonymousClassFromClass.scala", "execute", 10)
}
}
addFileWithBreakpoints("ByNameArgument.scala",
s"""
|object ByNameArgument {
|
| def inTryBlock(u: => String): Unit = {
| try {
| u
| }
| catch {
| case t: Throwable =>
| }
| }
|
| def main(args: Array[String]) {
| inTryBlock { $bp
| val s = "a"
| s + "aaa"
| }
| }
|}""".stripMargin.trim()
)
def testByNameArgument(): Unit = {
runDebugger() {
waitForBreakpoint()
checkSmartStepTargets("inTryBlock(String)", "u: => String")
checkSmartStepInto("inTryBlock(String)", "ByNameArgument.scala", "inTryBlock", 5)
}
runDebugger() {
waitForBreakpoint()
checkSmartStepInto("u: => String", "ByNameArgument.scala", "apply", 14)
}
}
addFileWithBreakpoints("LocalFunction.scala",
s"""
|object LocalFunction {
|
| def main(args: Array[String]) {
| def foo(s: String): Unit = {
| println(s)
| }
|
| foo("aaa") $bp
| }
|}""".stripMargin.trim()
)
def testLocalFunction(): Unit = {
runDebugger() {
waitForBreakpoint()
checkSmartStepTargets("foo(String)")
checkSmartStepInto("foo(String)", "LocalFunction.scala", "foo$1", 5)
}
}
addFileWithBreakpoints("ImplicitConversion.scala",
s"""
|import scala.language.implicitConversions
|
|object ImplicitConversion {
|
| implicit def string2Int(s: String): Int = Integer.valueOf(s)
|
| def inc(i: Int): Int = {
| i + 1
| }
|
| def main(args: Array[String]) {
| inc("1") $bp
| }
|}""".stripMargin.trim()
)
def testImplicitConversion(): Unit = {
runDebugger() {
waitForBreakpoint()
checkSmartStepTargets("inc(int)", "implicit string2Int(String)")
checkSmartStepInto("implicit string2Int(String)", "ImplicitConversion.scala", "string2Int", 5)
}
}
addFileWithBreakpoints("ImplicitClass.scala",
s"""
|import scala.language.implicitConversions
|
|object ImplicitClass {
|
| implicit class ObjectExt[T](v: T) {
| def toOption: Option[T] = Option(v)
| }
|
| def main(args: Array[String]) {
| "aaa".charAt(1).toOption $bp
| }
|}""".stripMargin.trim()
)
def testImplicitClass(): Unit = {
runDebugger() {
waitForBreakpoint()
checkSmartStepTargets("charAt(int)", "implicit toOption()")
checkSmartStepInto("implicit toOption()", "ImplicitClass.scala", "toOption", 6)
}
}
addFileWithBreakpoints("ImplicitValueClass.scala",
s"""
|import scala.language.implicitConversions
|
|object ImplicitValueClass {
|
| implicit class ObjectExt[T](val v: T) extends AnyVal {
| def toOption: Option[T] = Option(v)
| }
|
| def main(args: Array[String]) {
| "aaa".charAt(1).toOption $bp
| }
|}""".stripMargin.trim()
)
def testImplicitValueClass(): Unit = {
runDebugger() {
waitForBreakpoint()
checkSmartStepTargets("charAt(int)", "implicit toOption()")
checkSmartStepInto("implicit toOption()", "ImplicitValueClass.scala", "toOption$extension", 6)
}
}
addFileWithBreakpoints("MethodValue.scala",
s"""
|object MethodValue {
| def main(args: Array[String]): Unit = {
| val a = new A(Seq(1, 2, 3))
| a.update(incr(2) _).update(MethodValue.id[Int](_)).update(a.decr) $bp
| }
|
| def incr(i: Int)(j: Int): Int = i + j
|
| def id[T](t: T) = t
|
| class A(var seq: Seq[Int]) {
| def update(f: Int => Int) = {
| seq = seq.map(f)
| this
| }
| def decr(i: Int) = i - 1
| }
|}
|""".stripMargin.trim
)
def testMethodValue(): Unit = {
runDebugger() {
waitForBreakpoint()
checkSmartStepTargets("update(Function1<Object, Object>)", "incr(int, int)", "update(Function1<Object, Object>)", "id(T)", "update(Function1<Object, Object>)", "decr(int)")
checkSmartStepInto("id(T)", "MethodValue.scala", "id", 9)
}
runDebugger() {
waitForBreakpoint()
checkSmartStepInto("decr(int)", "MethodValue.scala", "decr", 16)
}
runDebugger() {
waitForBreakpoint()
checkSmartStepInto("incr(int, int)", "MethodValue.scala", "incr", 7)
}
}
} | jastice/intellij-scala | scala/scala-impl/test/org/jetbrains/plugins/scala/debugger/smartStepInto/SmartStepIntoTestBase.scala | Scala | apache-2.0 | 13,400 |
package is.hail.types.encoded
import is.hail.annotations.Region
import is.hail.asm4s._
import is.hail.expr.ir.{EmitCodeBuilder}
import is.hail.types.BaseType
import is.hail.types.physical._
import is.hail.types.virtual._
import is.hail.io.{InputBuffer, OutputBuffer}
import is.hail.utils._
case object EBooleanOptional extends EBoolean(false)
case object EBooleanRequired extends EBoolean(true)
class EBoolean(override val required: Boolean) extends EFundamentalType {
def _buildFundamentalEncoder(cb: EmitCodeBuilder, pt: PType, v: Value[_], out: Value[OutputBuffer]): Unit = {
cb += out.writeBoolean(coerce[Boolean](v))
}
def _buildFundamentalDecoder(
cb: EmitCodeBuilder,
pt: PType,
region: Value[Region],
in: Value[InputBuffer]
): Code[Boolean] = in.readBoolean()
def _buildSkip(cb: EmitCodeBuilder, r: Value[Region], in: Value[InputBuffer]): Unit = cb += in.skipBoolean()
override def _compatible(pt: PType): Boolean = pt.isInstanceOf[PBoolean]
def _decodedPType(requestedType: Type): PType = PBoolean(required)
def _asIdent = "bool"
def _toPretty = "EBoolean"
def setRequired(newRequired: Boolean): EBoolean = EBoolean(newRequired)
}
object EBoolean {
def apply(required: Boolean = false): EBoolean = if (required) EBooleanRequired else EBooleanOptional
}
| cseed/hail | hail/src/main/scala/is/hail/types/encoded/EBoolean.scala | Scala | mit | 1,314 |
package spire.math
import org.scalatest.Matchers
import org.scalacheck.Arbitrary._
import org.scalatest._
import prop._
class RationalCheck extends PropSpec with Matchers with GeneratorDrivenPropertyChecks {
type Q = Rational
def rat1(name: String)(f: Q => Unit) =
property(name) {
forAll { (nx: Long, _dx: Long) =>
val dx = if (_dx == 0) 1 else _dx
f(Rational(nx, dx))
}
}
def rat2(name: String)(f: (Q, Q) => Unit) =
property(name) {
forAll { (nx: Long, _dx: Long, ny: Long, _dy: Long) =>
val dx = if (_dx == 0) 1 else _dx
val dy = if (_dy == 0) 1 else _dy
f(Rational(nx, dx), Rational(ny, dy))
}
}
def rat3(name: String)(f: (Q, Q, Q) => Unit) =
property(name) {
forAll { (nx: Long, _dx: Long, ny: Long, _dy: Long, nz: Long, _dz: Long) =>
val dx = if (_dx == 0) 1 else _dx
val dy = if (_dy == 0) 1 else _dy
val dz = if (_dz == 0) 1 else _dz
f(Rational(nx, dx), Rational(ny, dy), Rational(nz, dz))
}
}
rat1("x + 0 == x") { x: Q => x + Rational(0) shouldBe x }
rat1("x * 1 == x") { x: Q => x * Rational(1) shouldBe x }
rat1("x * 0 == 0") { x: Q => x * Rational(0) shouldBe Rational(0) }
rat1("x.floor <= x.round <= x.ceil") { x: Q =>
x.floor should be <= x.round
x.round should be <= x.ceil
}
rat1("x + x == 2x") { x: Q => (x + x) shouldBe 2 * x }
rat1("x - x == 0") { x: Q => x - x shouldBe Rational(0) }
rat1("x * x == x^2") { x: Q => (x * x) shouldBe x.pow(2) }
rat1("(x^-1)^3 == x^-3") { x: Q => if (x != 0) x.reciprocal.pow(3) shouldBe x.pow(-3) }
rat1("x / x == 1") { x: Q => if (x != 0) x / x shouldBe Rational(1) }
rat2("x + y == y + x") { (x: Q, y: Q) => x + y shouldBe y + x }
rat2("x - y == -y + x") { (x: Q, y: Q) => x - y shouldBe -y + x }
rat2("x + y - x == y") { (x: Q, y: Q) => (x + y) - x shouldBe y }
rat2("x / y == x * (y^-1)") { (x: Q, y: Q) => if (y != 0) x / y shouldBe x * y.reciprocal }
rat3("(x + y) * z == x * z + y * z") { (x: Q, y: Q, z: Q) => (x + y) * z shouldBe x * z + y * z }
property("Round-trip Double") {
forAll("x") { (n: Double) =>
Rational(n).toDouble == n
}
}
}
| woparry/spire | tests/src/test/scala/spire/math/RationalCheck.scala | Scala | mit | 2,208 |
class C { def m(x: Int, x: Int){} }
object Main { def main(args: Array[String]) { } }
| tobast/compil-petitscala | tests/typing/bad/testfile-duplicate_var-1.scala | Scala | gpl-3.0 | 86 |
package org.jetbrains.plugins.scala
package lang
package psi
package stubs
import com.intellij.psi.stubs.StubElement
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.packaging.ScPackageContainer
/**
* @author ilyas
*/
trait ScPackageContainerStub extends StubElement[ScPackageContainer] {
def prefix: String
def ownNamePart: String
def isExplicit: Boolean
} | triggerNZ/intellij-scala | src/org/jetbrains/plugins/scala/lang/psi/stubs/ScPackageContainerStub.scala | Scala | apache-2.0 | 376 |
/*
* Copyright 2011 Hui Wen Han.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package me.huiwen.prefz
object Cursor {
def cursorZip(seq: Seq[Long]) = for (i <- seq) yield (i, Cursor(i))
val End = new Cursor(0)
val Start = new Cursor(-1)
}
case class Cursor(position: Long) extends Ordered[Cursor] {
def compare(that: Cursor) = position.compare(that.position)
def reverse = new Cursor(-position)
def magnitude = new Cursor(math.abs(position))
}
| huiwenhan/PrefStore | src/main/scala/me/huiwen/prefz/Cursor.scala | Scala | apache-2.0 | 978 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.openwhisk.core.containerpool
import java.time.Instant
import akka.actor.Status.{Failure => FailureMessage}
import akka.actor.{FSM, Props, Stash}
import akka.event.Logging.InfoLevel
import akka.pattern.pipe
import pureconfig.loadConfigOrThrow
import scala.collection.immutable
import spray.json.DefaultJsonProtocol._
import spray.json._
import org.apache.openwhisk.common.{AkkaLogging, Counter, LoggingMarkers, TransactionId}
import org.apache.openwhisk.core.ConfigKeys
import org.apache.openwhisk.core.connector.{
ActivationMessage,
CombinedCompletionAndResultMessage,
CompletionMessage,
ResultMessage
}
import org.apache.openwhisk.core.containerpool.logging.LogCollectingException
import org.apache.openwhisk.core.database.UserContext
import org.apache.openwhisk.core.entity.ExecManifest.ImageName
import org.apache.openwhisk.core.entity._
import org.apache.openwhisk.core.entity.size._
import org.apache.openwhisk.core.invoker.InvokerReactive.{ActiveAck, LogsCollector}
import org.apache.openwhisk.http.Messages
import scala.concurrent.Future
import scala.concurrent.duration._
import scala.util.{Failure, Success}
// States
sealed trait ContainerState
case object Uninitialized extends ContainerState
case object Starting extends ContainerState
case object Started extends ContainerState
case object Running extends ContainerState
case object Ready extends ContainerState
case object Pausing extends ContainerState
case object Paused extends ContainerState
case object Removing extends ContainerState
// Data
/** Base data type */
sealed abstract class ContainerData(val lastUsed: Instant, val memoryLimit: ByteSize, val activeActivationCount: Int) {
/** When ContainerProxy in this state is scheduled, it may result in a new state (ContainerData)*/
def nextRun(r: Run): ContainerData
/**
* Return Some(container) (for ContainerStarted instances) or None(for ContainerNotStarted instances)
* Useful for cases where all ContainerData instances are handled, vs cases where only ContainerStarted
* instances are handled */
def getContainer: Option[Container]
/** String to indicate the state of this container after scheduling */
val initingState: String
/** Inidicates whether this container can service additional activations */
def hasCapacity(): Boolean
}
/** abstract type to indicate an unstarted container */
sealed abstract class ContainerNotStarted(override val lastUsed: Instant,
override val memoryLimit: ByteSize,
override val activeActivationCount: Int)
extends ContainerData(lastUsed, memoryLimit, activeActivationCount) {
override def getContainer = None
override val initingState = "cold"
}
/** abstract type to indicate a started container */
sealed abstract class ContainerStarted(val container: Container,
override val lastUsed: Instant,
override val memoryLimit: ByteSize,
override val activeActivationCount: Int)
extends ContainerData(lastUsed, memoryLimit, activeActivationCount) {
override def getContainer = Some(container)
}
/** trait representing a container that is in use and (potentially) usable by subsequent or concurrent activations */
sealed abstract trait ContainerInUse {
val activeActivationCount: Int
val action: ExecutableWhiskAction
def hasCapacity() =
activeActivationCount < action.limits.concurrency.maxConcurrent
}
/** trait representing a container that is NOT in use and is usable by subsequent activation(s) */
sealed abstract trait ContainerNotInUse {
def hasCapacity() = true
}
/** type representing a cold (not running) container */
case class NoData(override val activeActivationCount: Int = 0)
extends ContainerNotStarted(Instant.EPOCH, 0.B, activeActivationCount)
with ContainerNotInUse {
override def nextRun(r: Run) = WarmingColdData(r.msg.user.namespace.name, r.action, Instant.now, 1)
}
/** type representing a cold (not running) container with specific memory allocation */
case class MemoryData(override val memoryLimit: ByteSize, override val activeActivationCount: Int = 0)
extends ContainerNotStarted(Instant.EPOCH, memoryLimit, activeActivationCount)
with ContainerNotInUse {
override def nextRun(r: Run) = WarmingColdData(r.msg.user.namespace.name, r.action, Instant.now, 1)
}
/** type representing a prewarmed (running, but unused) container (with a specific memory allocation) */
case class PreWarmedData(override val container: Container,
kind: String,
override val memoryLimit: ByteSize,
override val activeActivationCount: Int = 0)
extends ContainerStarted(container, Instant.EPOCH, memoryLimit, activeActivationCount)
with ContainerNotInUse {
override val initingState = "prewarmed"
override def nextRun(r: Run) =
WarmingData(container, r.msg.user.namespace.name, r.action, Instant.now, 1)
}
/** type representing a prewarm (running, but not used) container that is being initialized (for a specific action + invocation namespace) */
case class WarmingData(override val container: Container,
invocationNamespace: EntityName,
action: ExecutableWhiskAction,
override val lastUsed: Instant,
override val activeActivationCount: Int = 0)
extends ContainerStarted(container, lastUsed, action.limits.memory.megabytes.MB, activeActivationCount)
with ContainerInUse {
override val initingState = "warming"
override def nextRun(r: Run) = copy(lastUsed = Instant.now, activeActivationCount = activeActivationCount + 1)
}
/** type representing a cold (not yet running) container that is being initialized (for a specific action + invocation namespace) */
case class WarmingColdData(invocationNamespace: EntityName,
action: ExecutableWhiskAction,
override val lastUsed: Instant,
override val activeActivationCount: Int = 0)
extends ContainerNotStarted(lastUsed, action.limits.memory.megabytes.MB, activeActivationCount)
with ContainerInUse {
override val initingState = "warmingCold"
override def nextRun(r: Run) = copy(lastUsed = Instant.now, activeActivationCount = activeActivationCount + 1)
}
/** type representing a warm container that has already been in use (for a specific action + invocation namespace) */
case class WarmedData(override val container: Container,
invocationNamespace: EntityName,
action: ExecutableWhiskAction,
override val lastUsed: Instant,
override val activeActivationCount: Int = 0)
extends ContainerStarted(container, lastUsed, action.limits.memory.megabytes.MB, activeActivationCount)
with ContainerInUse {
override val initingState = "warmed"
override def nextRun(r: Run) = copy(lastUsed = Instant.now, activeActivationCount = activeActivationCount + 1)
}
// Events received by the actor
case class Start(exec: CodeExec[_], memoryLimit: ByteSize)
case class Run(action: ExecutableWhiskAction, msg: ActivationMessage, retryLogDeadline: Option[Deadline] = None)
case object Remove
// Events sent by the actor
case class NeedWork(data: ContainerData)
case object ContainerPaused
case object ContainerRemoved // when container is destroyed
case object RescheduleJob // job is sent back to parent and could not be processed because container is being destroyed
case class PreWarmCompleted(data: PreWarmedData)
case class InitCompleted(data: WarmedData)
case object RunCompleted
/**
* A proxy that wraps a Container. It is used to keep track of the lifecycle
* of a container and to guarantee a contract between the client of the container
* and the container itself.
*
* The contract is as follows:
* 1. If action.limits.concurrency.maxConcurrent == 1:
* Only one job is to be sent to the ContainerProxy at one time. ContainerProxy
* will delay all further jobs until a previous job has finished.
*
* 1a. The next job can be sent to the ContainerProxy after it indicates available
* capacity by sending NeedWork to its parent.
*
* 2. If action.limits.concurrency.maxConcurrent > 1:
* Parent must coordinate with ContainerProxy to attempt to send only data.action.limits.concurrency.maxConcurrent
* jobs for concurrent processing.
*
* Since the current job count is only periodically sent to parent, the number of jobs
* sent to ContainerProxy may exceed data.action.limits.concurrency.maxConcurrent,
* in which case jobs are buffered, so that only a max of action.limits.concurrency.maxConcurrent
* are ever sent into the container concurrently. Parent will NOT be signalled to send more jobs until
* buffered jobs are completed, but their order is not guaranteed.
*
* 2a. The next job can be sent to the ContainerProxy after ContainerProxy has "concurrent capacity",
* indicated by sending NeedWork to its parent.
*
* 3. A Remove message can be sent at any point in time. Like multiple jobs though,
* it will be delayed until the currently running job finishes.
*
* @constructor
* @param factory a function generating a Container
* @param sendActiveAck a function sending the activation via active ack
* @param storeActivation a function storing the activation in a persistent store
* @param unusedTimeout time after which the container is automatically thrown away
* @param pauseGrace time to wait for new work before pausing the container
*/
class ContainerProxy(factory: (TransactionId,
String,
ImageName,
Boolean,
ByteSize,
Int,
Option[ExecutableWhiskAction]) => Future[Container],
sendActiveAck: ActiveAck,
storeActivation: (TransactionId, WhiskActivation, UserContext) => Future[Any],
collectLogs: LogsCollector,
instance: InvokerInstanceId,
poolConfig: ContainerPoolConfig,
unusedTimeout: FiniteDuration,
pauseGrace: FiniteDuration)
extends FSM[ContainerState, ContainerData]
with Stash {
implicit val ec = context.system.dispatcher
implicit val logging = new AkkaLogging(context.system.log)
var rescheduleJob = false // true iff actor receives a job but cannot process it because actor will destroy itself
var runBuffer = immutable.Queue.empty[Run] //does not retain order, but does manage jobs that would have pushed past action concurrency limit
//keep a separate count to avoid confusion with ContainerState.activeActivationCount that is tracked/modified only in ContainerPool
var activeCount = 0;
startWith(Uninitialized, NoData())
when(Uninitialized) {
// pre warm a container (creates a stem cell container)
case Event(job: Start, _) =>
factory(
TransactionId.invokerWarmup,
ContainerProxy.containerName(instance, "prewarm", job.exec.kind),
job.exec.image,
job.exec.pull,
job.memoryLimit,
poolConfig.cpuShare(job.memoryLimit),
None)
.map(container => PreWarmCompleted(PreWarmedData(container, job.exec.kind, job.memoryLimit)))
.pipeTo(self)
goto(Starting)
// cold start (no container to reuse or available stem cell container)
case Event(job: Run, _) =>
implicit val transid = job.msg.transid
activeCount += 1
// create a new container
val container = factory(
job.msg.transid,
ContainerProxy.containerName(instance, job.msg.user.namespace.name.asString, job.action.name.asString),
job.action.exec.image,
job.action.exec.pull,
job.action.limits.memory.megabytes.MB,
poolConfig.cpuShare(job.action.limits.memory.megabytes.MB),
Some(job.action))
// container factory will either yield a new container ready to execute the action, or
// starting up the container failed; for the latter, it's either an internal error starting
// a container or a docker action that is not conforming to the required action API
container
.andThen {
case Success(container) =>
// the container is ready to accept an activation; register it as PreWarmed; this
// normalizes the life cycle for containers and their cleanup when activations fail
self ! PreWarmCompleted(
PreWarmedData(container, job.action.exec.kind, job.action.limits.memory.megabytes.MB, 1))
case Failure(t) =>
// the container did not come up cleanly, so disambiguate the failure mode and then cleanup
// the failure is either the system fault, or for docker actions, the application/developer fault
val response = t match {
case WhiskContainerStartupError(msg) => ActivationResponse.whiskError(msg)
case BlackboxStartupError(msg) => ActivationResponse.developerError(msg)
case _ => ActivationResponse.whiskError(Messages.resourceProvisionError)
}
val context = UserContext(job.msg.user)
// construct an appropriate activation and record it in the datastore,
// also update the feed and active ack; the container cleanup is queued
// implicitly via a FailureMessage which will be processed later when the state
// transitions to Running
val activation = ContainerProxy.constructWhiskActivation(job, None, Interval.zero, false, response)
sendActiveAck(
transid,
activation,
job.msg.blocking,
job.msg.rootControllerIndex,
job.msg.user.namespace.uuid,
CombinedCompletionAndResultMessage(transid, activation, instance))
storeActivation(transid, activation, context)
}
.flatMap { container =>
// now attempt to inject the user code and run the action
initializeAndRun(container, job)
.map(_ => RunCompleted)
}
.pipeTo(self)
goto(Running)
}
when(Starting) {
// container was successfully obtained
case Event(completed: PreWarmCompleted, _) =>
context.parent ! NeedWork(completed.data)
goto(Started) using completed.data
// container creation failed
case Event(_: FailureMessage, _) =>
context.parent ! ContainerRemoved
stop()
case _ => delay
}
when(Started) {
case Event(job: Run, data: PreWarmedData) =>
implicit val transid = job.msg.transid
activeCount += 1
initializeAndRun(data.container, job)
.map(_ => RunCompleted)
.pipeTo(self)
goto(Running) using PreWarmedData(data.container, data.kind, data.memoryLimit, 1)
case Event(Remove, data: PreWarmedData) => destroyContainer(data.container)
}
when(Running) {
// Intermediate state, we were able to start a container
// and we keep it in case we need to destroy it.
case Event(completed: PreWarmCompleted, _) => stay using completed.data
// Init was successful
case Event(completed: InitCompleted, _: PreWarmedData) =>
stay using completed.data
// Init was successful
case Event(data: WarmedData, _: PreWarmedData) =>
//in case concurrency supported, multiple runs can begin as soon as init is complete
context.parent ! NeedWork(data)
stay using data
// Run was successful
case Event(RunCompleted, data: WarmedData) =>
activeCount -= 1
//if there are items in runbuffer, process them if there is capacity, and stay; otherwise if we have any pending activations, also stay
if (requestWork(data) || activeCount > 0) {
stay using data
} else {
goto(Ready) using data
}
case Event(job: Run, data: WarmedData)
if activeCount >= data.action.limits.concurrency.maxConcurrent && !rescheduleJob => //if we are over concurrency limit, and not a failure on resume
logging.warn(this, s"buffering for container ${data.container}; ${activeCount} activations in flight")
runBuffer = runBuffer.enqueue(job)
stay()
case Event(job: Run, data: WarmedData)
if activeCount < data.action.limits.concurrency.maxConcurrent && !rescheduleJob => //if there was a delay, and not a failure on resume, skip the run
activeCount += 1
implicit val transid = job.msg.transid
initializeAndRun(data.container, job)
.map(_ => RunCompleted)
.pipeTo(self)
stay() using data
// Failed after /init (the first run failed)
case Event(_: FailureMessage, data: PreWarmedData) =>
activeCount -= 1
destroyContainer(data.container)
// Failed for a subsequent /run
case Event(_: FailureMessage, data: WarmedData) =>
activeCount -= 1
destroyContainer(data.container)
// Failed at getting a container for a cold-start run
case Event(_: FailureMessage, _) =>
activeCount -= 1
context.parent ! ContainerRemoved
rejectBuffered()
stop()
case _ => delay
}
when(Ready, stateTimeout = pauseGrace) {
case Event(job: Run, data: WarmedData) =>
implicit val transid = job.msg.transid
activeCount += 1
initializeAndRun(data.container, job)
.map(_ => RunCompleted)
.pipeTo(self)
goto(Running) using data
// pause grace timed out
case Event(StateTimeout, data: WarmedData) =>
data.container.suspend()(TransactionId.invokerNanny).map(_ => ContainerPaused).pipeTo(self)
goto(Pausing)
case Event(Remove, data: WarmedData) => destroyContainer(data.container)
}
when(Pausing) {
case Event(ContainerPaused, data: WarmedData) => goto(Paused)
case Event(_: FailureMessage, data: WarmedData) => destroyContainer(data.container)
case _ => delay
}
when(Paused, stateTimeout = unusedTimeout) {
case Event(job: Run, data: WarmedData) =>
implicit val transid = job.msg.transid
activeCount += 1
data.container
.resume()
.andThen {
// Sending the message to self on a failure will cause the message
// to ultimately be sent back to the parent (which will retry it)
// when container removal is done.
case Failure(_) =>
rescheduleJob = true
self ! job
}
.flatMap(_ => initializeAndRun(data.container, job))
.map(_ => RunCompleted)
.pipeTo(self)
goto(Running) using data
// container is reclaimed by the pool or it has become too old
case Event(StateTimeout | Remove, data: WarmedData) =>
rescheduleJob = true // to supress sending message to the pool and not double count
destroyContainer(data.container)
}
when(Removing) {
case Event(job: Run, _) =>
// Send the job back to the pool to be rescheduled
context.parent ! job
stay
case Event(ContainerRemoved, _) => stop()
case Event(_: FailureMessage, _) => stop()
}
// Unstash all messages stashed while in intermediate state
onTransition {
case _ -> Started => unstashAll()
case _ -> Ready => unstashAll()
case _ -> Paused => unstashAll()
case _ -> Removing => unstashAll()
}
initialize()
/** Either process runbuffer or signal parent to send work; return true if runbuffer is being processed */
def requestWork(newData: WarmedData): Boolean = {
//if there is concurrency capacity, process runbuffer, or signal NeedWork
if (activeCount < newData.action.limits.concurrency.maxConcurrent) {
runBuffer.dequeueOption match {
case Some((run, q)) =>
runBuffer = q
self ! run
true
case _ =>
context.parent ! NeedWork(newData)
false
}
} else {
false
}
}
/** Delays all incoming messages until unstashAll() is called */
def delay = {
stash()
stay
}
/**
* Destroys the container after unpausing it if needed. Can be used
* as a state progression as it goes to Removing.
*
* @param container the container to destroy
*/
def destroyContainer(container: Container) = {
if (!rescheduleJob) {
context.parent ! ContainerRemoved
} else {
context.parent ! RescheduleJob
}
rejectBuffered()
val unpause = stateName match {
case Paused => container.resume()(TransactionId.invokerNanny)
case _ => Future.successful(())
}
unpause
.flatMap(_ => container.destroy()(TransactionId.invokerNanny))
.map(_ => ContainerRemoved)
.pipeTo(self)
goto(Removing)
}
/**
* Return any buffered jobs to parent, in case buffer is not empty at removal/error time.
*/
def rejectBuffered() = {
//resend any buffered items on container removal
if (runBuffer.nonEmpty) {
logging.info(this, s"resending ${runBuffer.size} buffered jobs to parent on container removal")
runBuffer.foreach(context.parent ! _)
runBuffer = immutable.Queue.empty[Run]
}
}
/**
* Runs the job, initialize first if necessary.
* Completes the job by:
* 1. sending an activate ack,
* 2. fetching the logs for the run,
* 3. indicating the resource is free to the parent pool,
* 4. recording the result to the data store
*
* @param container the container to run the job on
* @param job the job to run
* @return a future completing after logs have been collected and
* added to the WhiskActivation
*/
def initializeAndRun(container: Container, job: Run)(implicit tid: TransactionId): Future[WhiskActivation] = {
val actionTimeout = job.action.limits.timeout.duration
val (env, parameters) = ContainerProxy.partitionArguments(job.msg.content, job.msg.initArgs)
val environment = Map(
"namespace" -> job.msg.user.namespace.name.toJson,
"action_name" -> job.msg.action.qualifiedNameWithLeadingSlash.toJson,
"activation_id" -> job.msg.activationId.toString.toJson,
"transaction_id" -> job.msg.transid.id.toJson)
// if the action requests the api key to be injected into the action context, add it here;
// treat a missing annotation as requesting the api key for backward compatibility
val authEnvironment = {
if (job.action.annotations.isTruthy(Annotations.ProvideApiKeyAnnotationName, valueForNonExistent = true)) {
job.msg.user.authkey.toEnvironment.fields
} else Map.empty
}
// Only initialize iff we haven't yet warmed the container
val initialize = stateData match {
case data: WarmedData =>
Future.successful(None)
case _ =>
val owEnv = (authEnvironment ++ environment + ("deadline" -> (Instant.now.toEpochMilli + actionTimeout.toMillis).toString.toJson)) map {
case (key, value) => "__OW_" + key.toUpperCase -> value
}
container
.initialize(
job.action.containerInitializer(env ++ owEnv),
actionTimeout,
job.action.limits.concurrency.maxConcurrent)
.map(Some(_))
}
val activation: Future[WhiskActivation] = initialize
.flatMap { initInterval =>
//immediately setup warmedData for use (before first execution) so that concurrent actions can use it asap
if (initInterval.isDefined) {
self ! InitCompleted(WarmedData(container, job.msg.user.namespace.name, job.action, Instant.now, 1))
}
val env = authEnvironment ++ environment ++ Map(
// compute deadline on invoker side avoids discrepancies inside container
// but potentially under-estimates actual deadline
"deadline" -> (Instant.now.toEpochMilli + actionTimeout.toMillis).toString.toJson)
container
.run(parameters, env.toJson.asJsObject, actionTimeout, job.action.limits.concurrency.maxConcurrent)(
job.msg.transid)
.map {
case (runInterval, response) =>
val initRunInterval = initInterval
.map(i => Interval(runInterval.start.minusMillis(i.duration.toMillis), runInterval.end))
.getOrElse(runInterval)
ContainerProxy.constructWhiskActivation(
job,
initInterval,
initRunInterval,
runInterval.duration >= actionTimeout,
response)
}
}
.recover {
case InitializationError(interval, response) =>
ContainerProxy.constructWhiskActivation(
job,
Some(interval),
interval,
interval.duration >= actionTimeout,
response)
case t =>
// Actually, this should never happen - but we want to make sure to not miss a problem
logging.error(this, s"caught unexpected error while running activation: ${t}")
ContainerProxy.constructWhiskActivation(
job,
None,
Interval.zero,
false,
ActivationResponse.whiskError(Messages.abnormalRun))
}
val splitAckMessagesPendingLogCollection = collectLogs.logsToBeCollected(job.action)
// Sending an active ack is an asynchronous operation. The result is forwarded as soon as
// possible for blocking activations so that dependent activations can be scheduled. The
// completion message which frees a load balancer slot is sent after the active ack future
// completes to ensure proper ordering.
val sendResult = if (job.msg.blocking) {
activation.map { result =>
val msg =
if (splitAckMessagesPendingLogCollection) ResultMessage(tid, result)
else CombinedCompletionAndResultMessage(tid, result, instance)
sendActiveAck(tid, result, job.msg.blocking, job.msg.rootControllerIndex, job.msg.user.namespace.uuid, msg)
}
} else {
// For non-blocking request, do not forward the result.
if (splitAckMessagesPendingLogCollection) Future.successful(())
else
activation.map { result =>
val msg = CompletionMessage(tid, result, instance)
sendActiveAck(tid, result, job.msg.blocking, job.msg.rootControllerIndex, job.msg.user.namespace.uuid, msg)
}
}
val context = UserContext(job.msg.user)
// Adds logs to the raw activation.
val activationWithLogs: Future[Either[ActivationLogReadingError, WhiskActivation]] = activation
.flatMap { activation =>
// Skips log collection entirely, if the limit is set to 0
if (!splitAckMessagesPendingLogCollection) {
Future.successful(Right(activation))
} else {
val start = tid.started(this, LoggingMarkers.INVOKER_COLLECT_LOGS, logLevel = InfoLevel)
collectLogs(tid, job.msg.user, activation, container, job.action)
.andThen {
case Success(_) => tid.finished(this, start)
case Failure(t) => tid.failed(this, start, s"reading logs failed: $t")
}
.map(logs => Right(activation.withLogs(logs)))
.recover {
case LogCollectingException(logs) =>
Left(ActivationLogReadingError(activation.withLogs(logs)))
case _ =>
Left(ActivationLogReadingError(activation.withLogs(ActivationLogs(Vector(Messages.logFailure)))))
}
}
}
activationWithLogs
.map(_.fold(_.activation, identity))
.foreach { activation =>
// Sending the completion message to the controller after the active ack ensures proper ordering
// (result is received before the completion message for blocking invokes).
if (splitAckMessagesPendingLogCollection) {
sendResult.onComplete(
_ =>
sendActiveAck(
tid,
activation,
job.msg.blocking,
job.msg.rootControllerIndex,
job.msg.user.namespace.uuid,
CompletionMessage(tid, activation, instance)))
}
// Storing the record. Entirely asynchronous and not waited upon.
storeActivation(tid, activation, context)
}
// Disambiguate activation errors and transform the Either into a failed/successful Future respectively.
activationWithLogs.flatMap {
case Right(act) if !act.response.isSuccess && !act.response.isApplicationError =>
Future.failed(ActivationUnsuccessfulError(act))
case Left(error) => Future.failed(error)
case Right(act) => Future.successful(act)
}
}
}
final case class ContainerProxyTimeoutConfig(idleContainer: FiniteDuration, pauseGrace: FiniteDuration)
object ContainerProxy {
def props(factory: (TransactionId,
String,
ImageName,
Boolean,
ByteSize,
Int,
Option[ExecutableWhiskAction]) => Future[Container],
ack: ActiveAck,
store: (TransactionId, WhiskActivation, UserContext) => Future[Any],
collectLogs: LogsCollector,
instance: InvokerInstanceId,
poolConfig: ContainerPoolConfig,
unusedTimeout: FiniteDuration = timeouts.idleContainer,
pauseGrace: FiniteDuration = timeouts.pauseGrace) =
Props(new ContainerProxy(factory, ack, store, collectLogs, instance, poolConfig, unusedTimeout, pauseGrace))
// Needs to be thread-safe as it's used by multiple proxies concurrently.
private val containerCount = new Counter
val timeouts = loadConfigOrThrow[ContainerProxyTimeoutConfig](ConfigKeys.containerProxyTimeouts)
/**
* Generates a unique container name.
*
* @param prefix the container name's prefix
* @param suffix the container name's suffix
* @return a unique container name
*/
def containerName(instance: InvokerInstanceId, prefix: String, suffix: String): String = {
def isAllowed(c: Char): Boolean = c.isLetterOrDigit || c == '_'
val sanitizedPrefix = prefix.filter(isAllowed)
val sanitizedSuffix = suffix.filter(isAllowed)
s"${ContainerFactory.containerNamePrefix(instance)}_${containerCount.next()}_${sanitizedPrefix}_${sanitizedSuffix}"
}
/**
* Creates a WhiskActivation ready to be sent via active ack.
*
* @param job the job that was executed
* @param interval the time it took to execute the job
* @param response the response to return to the user
* @return a WhiskActivation to be sent to the user
*/
def constructWhiskActivation(job: Run,
initInterval: Option[Interval],
totalInterval: Interval,
isTimeout: Boolean,
response: ActivationResponse) = {
val causedBy = Some {
if (job.msg.causedBySequence) {
Parameters(WhiskActivation.causedByAnnotation, JsString(Exec.SEQUENCE))
} else {
// emit the internal system hold time as the 'wait' time, but only for non-sequence
// actions, since the transid start time for a sequence does not correspond
// with a specific component of the activation but the entire sequence;
// it will require some work to generate a new transaction id for a sequence
// component - however, because the trace of activations is recorded in the parent
// sequence, a client can determine the queue time for sequences that way
val end = initInterval.map(_.start).getOrElse(totalInterval.start)
Parameters(
WhiskActivation.waitTimeAnnotation,
Interval(job.msg.transid.meta.start, end).duration.toMillis.toJson)
}
}
val initTime = {
initInterval.map(initTime => Parameters(WhiskActivation.initTimeAnnotation, initTime.duration.toMillis.toJson))
}
val binding =
job.msg.action.binding.map(f => Parameters(WhiskActivation.bindingAnnotation, JsString(f.asString)))
WhiskActivation(
activationId = job.msg.activationId,
namespace = job.msg.user.namespace.name.toPath,
subject = job.msg.user.subject,
cause = job.msg.cause,
name = job.action.name,
version = job.action.version,
start = totalInterval.start,
end = totalInterval.end,
duration = Some(totalInterval.duration.toMillis),
response = response,
annotations = {
Parameters(WhiskActivation.limitsAnnotation, job.action.limits.toJson) ++
Parameters(WhiskActivation.pathAnnotation, JsString(job.action.fullyQualifiedName(false).asString)) ++
Parameters(WhiskActivation.kindAnnotation, JsString(job.action.exec.kind)) ++
Parameters(WhiskActivation.timeoutAnnotation, JsBoolean(isTimeout)) ++
causedBy ++ initTime ++ binding
})
}
/**
* Partitions the activation arguments into two JsObject instances. The first is exported as intended for export
* by the action runtime to the environment. The second is passed on as arguments to the action.
*
* @param content the activation arguments
* @param initArgs set of parameters to treat as initialization arguments
* @return A partition of the arguments into an environment variables map and the JsObject argument to the action
*/
def partitionArguments(content: Option[JsObject], initArgs: Set[String]): (Map[String, JsValue], JsObject) = {
content match {
case None => (Map.empty, JsObject.empty)
case Some(js) if initArgs.isEmpty => (Map.empty, js)
case Some(js) =>
val (env, args) = js.fields.partition(k => initArgs.contains(k._1))
(env, JsObject(args))
}
}
}
/** Indicates that something went wrong with an activation and the container should be removed */
trait ActivationError extends Exception {
val activation: WhiskActivation
}
/** Indicates an activation with a non-successful response */
case class ActivationUnsuccessfulError(activation: WhiskActivation) extends ActivationError
/** Indicates reading logs for an activation failed (terminally, truncated) */
case class ActivationLogReadingError(activation: WhiskActivation) extends ActivationError
| openwhisk/openwhisk | core/invoker/src/main/scala/org/apache/openwhisk/core/containerpool/ContainerProxy.scala | Scala | apache-2.0 | 35,469 |
/* sbt -- Simple Build Tool
* Copyright 2008, 2009, 2010, 2011 Mark Harrah
*/
package sbt
import ProjectNavigation._
import Project.updateCurrent
import Keys.sessionSettings
import CommandSupport.logger
import complete.{DefaultParsers, Parser}
import DefaultParsers._
import java.net.URI
object ProjectNavigation
{
def command(s: State): Parser[() => State] =
if(s get sessionSettings isEmpty) failure("No project loaded") else (new ProjectNavigation(s)).command
}
final class ProjectNavigation(s: State)
{
val extracted = Project extract s
import extracted.{currentRef, structure, session}
def setProject(nuri: URI, nid: String) =
{
val neval = if(currentRef.build == nuri) session.currentEval else mkEval(nuri)
updateCurrent(s.put(sessionSettings, session.setCurrent(nuri, nid, neval)))
}
def mkEval(nuri: URI) = Load.lazyEval(structure.units(nuri).unit)
def getRoot(uri: URI) = Load.getRootProject(structure.units)(uri)
def apply(action: Option[ResolvedReference]): State =
action match
{
case None => show(); s
case Some(BuildRef(uri)) => changeBuild(uri)
case Some(ProjectRef(uri, id)) => selectProject(uri, id)
/* else if(to.forall(_ == '.'))
if(to.length > 1) gotoParent(to.length - 1, nav, s) else s */ // semantics currently undefined
}
def show(): Unit = logger(s).info(currentRef.project + " (in build " + currentRef.build + ")")
def selectProject(uri: URI, to: String): State =
if( structure.units(uri).defined.contains(to) )
setProject(uri, to)
else
fail("Invalid project name '" + to + "' in build " + uri + " (type 'projects' to list available projects).")
def changeBuild(newBuild: URI): State =
if(structure.units contains newBuild)
setProject(newBuild, getRoot(newBuild))
else
fail("Invalid build unit '" + newBuild + "' (type 'projects' to list available builds).")
def fail(msg: String): State =
{
logger(s).error(msg)
s.fail
}
import complete.Parser._
import complete.Parsers._
val parser: Parser[Option[ResolvedReference]] =
{
val reference = Act.resolvedReference(structure.index.keyIndex, currentRef.build, success(()))
val root = token('/' ^^^ rootRef)
success(None) | some( token(Space) ~> (root | reference) )
}
def rootRef = ProjectRef(currentRef.build, getRoot(currentRef.build))
val command: Parser[() => State] = Command.applyEffect(parser)(apply)
} | ornicar/xsbt | main/ProjectNavigation.scala | Scala | bsd-3-clause | 2,383 |
/*
* Copyright 2014 IBM Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.ibm.spark.kernel.protocol.v5.relay
import akka.pattern.ask
import akka.util.Timeout
import com.ibm.spark.communication.security.SecurityActorType
import com.ibm.spark.kernel.protocol.v5.MessageType.MessageType
import com.ibm.spark.kernel.protocol.v5.content.ShutdownRequest
import com.ibm.spark.kernel.protocol.v5.kernel.ActorLoader
import com.ibm.spark.kernel.protocol.v5.{KernelMessage, MessageType, _}
import com.ibm.spark.utils.MessageLogSupport
import scala.collection.immutable.HashMap
import scala.concurrent.duration._
import scala.util.{Random, Failure, Success}
/**
* This class is meant to be a relay for send KernelMessages through kernel
* system.
* @param actorLoader The ActorLoader used by this class for finding actors for
* relaying messages
* @param incomingSpecialCases The special cases for incoming messages
* @param outgoingSpecialCases The special cases for outgoing messages
* @param useSignatureManager Whether or not to use signature verification and
* generation
*/
case class KernelMessageRelay(
actorLoader: ActorLoader,
useSignatureManager: Boolean,
incomingSpecialCases: Map[String, String] = new HashMap[String, String](),
outgoingSpecialCases: Map[String, String] = new HashMap[String, String]()
) extends OrderedSupport with MessageLogSupport {
// NOTE: Required to provide the execution context for futures with akka
import context._
// NOTE: Required for ask (?) to function... maybe can define elsewhere?
implicit val timeout = Timeout(5.seconds)
// Flag indicating if can receive messages (or add them to buffer)
var isReady = false
def this(actorLoader: ActorLoader) =
this(actorLoader, true)
/**
* Relays a KernelMessage to a specific actor to handle that message.
*
* @param messageType The enumeration representing the message type
* @param kernelMessage The message to relay
*/
private def relay(messageType: MessageType, kernelMessage: KernelMessage) = {
logger.debug("Relaying message type of " + messageType.toString)
logKernelMessageAction("Relaying", kernelMessage)
actorLoader.load(messageType) ! kernelMessage
}
private def incomingRelay(kernelMessage: KernelMessage) = {
var messageTypeString = kernelMessage.header.msg_type
// If this is a special case, transform the message type accordingly
if (incomingSpecialCases.contains(messageTypeString)) {
logger.debug(s"$messageTypeString is a special incoming case!")
messageTypeString = incomingSpecialCases(messageTypeString)
}
relay(MessageType.withName(messageTypeString), kernelMessage)
}
private def outgoingRelay(kernelMessage: KernelMessage) = {
var messageTypeString = kernelMessage.header.msg_type
// If this is a special case, transform the message type accordingly
if (outgoingSpecialCases.contains(messageTypeString)) {
logger.debug(s"$messageTypeString is a special outgoing case!")
messageTypeString = outgoingSpecialCases(messageTypeString)
}
relay(MessageType.withName(messageTypeString), kernelMessage)
}
/**
* This actor will receive and handle two types; ZMQMessage and KernelMessage.
* These messages will be forwarded to the actors that are responsible for them.
*/
override def receive = {
// TODO: How to restore this when the actor dies?
// Update ready status
case ready: Boolean =>
isReady = ready
if (isReady) {
logger.info("Unstashing all messages received!")
unstashAll()
logger.info("Relay is now fully ready to receive messages!")
} else {
logger.info("Relay is now disabled!")
}
// Add incoming messages (when not ready) to buffer to be processed
case (zmqStrings: Seq[_], kernelMessage: KernelMessage) if !isReady && kernelMessage.header.msg_type != ShutdownRequest.toTypeString =>
logger.info("Not ready for messages! Stashing until ready!")
stash()
// Assuming these messages are incoming messages
case (zmqStrings: Seq[_], kernelMessage: KernelMessage) if isReady || kernelMessage.header.msg_type == ShutdownRequest.toTypeString =>
startProcessing()
if (useSignatureManager) {
logger.trace(s"Verifying signature for incoming message " +
s"${kernelMessage.header.msg_id}")
val signatureManager =
actorLoader.load(SecurityActorType.SignatureManager)
val signatureVerificationFuture = signatureManager ? (
(kernelMessage.signature, zmqStrings)
)
signatureVerificationFuture.mapTo[Boolean].onComplete {
case Success(true) =>
incomingRelay(kernelMessage)
finishedProcessing()
case Success(false) =>
// TODO: Figure out what the failure message structure should be!
logger.error(s"Invalid signature received from message " +
s"${kernelMessage.header.msg_id}!")
finishedProcessing()
case Failure(t) =>
logger.error("Failure when verifying signature!", t)
finishedProcessing()
}
} else {
logger.debug(s"Relaying incoming message " +
s"${kernelMessage.header.msg_id} without SignatureManager")
incomingRelay(kernelMessage)
finishedProcessing()
}
// Assuming all kernel messages without zmq strings are outgoing
case kernelMessage: KernelMessage =>
startProcessing()
if (useSignatureManager) {
logger.trace(s"Creating signature for outgoing message " +
s"${kernelMessage.header.msg_id}")
val signatureManager = actorLoader.load(SecurityActorType.SignatureManager)
val signatureInsertFuture = signatureManager ? kernelMessage
// TODO: Handle error case for mapTo and non-present onFailure
signatureInsertFuture.mapTo[KernelMessage] onSuccess {
case message =>
outgoingRelay(message)
finishedProcessing()
}
} else {
logger.debug(s"Relaying outgoing message " +
s"${kernelMessage.header.msg_id} without SignatureManager")
outgoingRelay(kernelMessage)
finishedProcessing()
}
}
override def orderedTypes(): Seq[Class[_]] = {
Seq(classOf[(Seq[_], KernelMessage)])
}
}
| slowenthal/spark-kernel | kernel/src/main/scala/com/ibm/spark/kernel/protocol/v5/relay/KernelMessageRelay.scala | Scala | apache-2.0 | 6,942 |
package org.alitouka.spark.dbscan.spatial
import org.alitouka.spark.dbscan.spatial.rdd.PointsPartitionedByBoxesRDD
import org.alitouka.spark.dbscan.{SuiteBase, DbscanSettings}
class FindClosePointsSuite extends SuiteBase {
val dataset1 = sc.parallelize(Array (
new Point (0.0, 0.0), new Point (0.5, 4.0), new Point (4.0, 2.5),
new Point (0.5, 0.5), new Point (0.5, 0.9), new Point (0.9, 0.9),
new Point (0.5, 1.3),
new Point (1.1, 0.9),
new Point (1.1, 1.1), new Point (1.3, 1.3),
new Point (2.5, 1.5), new Point (2.9, 1.5), new Point (3.3, 1.5)
))
val dataset1_1 = sc.parallelize(Array ( new Point (0.0, 0.0), new Point (1.0, 0.0),
new Point (0.5, 1.0), new Point (0.5, 0.5), new Point (0.5, 5.0) ))
val settings = new DbscanSettings ().withEpsilon (0.4)
val partitionedData = PointsPartitionedByBoxesRDD (dataset1, dbscanSettings = settings)
val boxes = partitionedData.boxes
val boundingBox = partitionedData.boundingBox
test ("DistanceAnalyzer should find close points within and across boxes") {
val distanceAnalyzer = new DistanceAnalyzer (settings)
println ("All boxes:")
boxes.foreach ( println )
println ("----------\\n")
println ("All points:")
partitionedData.collect ().foreach ( x => println (x._2) )
println ("----------\\n")
println ("Indexed points:")
PointsPartitionedByBoxesRDD.extractPointIdsAndCoordinates(partitionedData).collect ().foreach ( println )
println ("----------\\n")
val closePointsWithinBoxes = distanceAnalyzer.countClosePointsWithinEachBox(partitionedData)
println ("Pairs of close points within boxes:")
closePointsWithinBoxes.collect ().foreach( println )
println ("----------\\n")
val pointsCloseToBounds = distanceAnalyzer.findPointsCloseToBoxBounds(partitionedData, partitionedData.boxes, settings.epsilon)
println ("Points close to box boundaries:")
pointsCloseToBounds.collect ().foreach( println )
println ("----------\\n")
val closePointsAcrossBoxes = distanceAnalyzer.countClosePointsInDifferentBoxes(pointsCloseToBounds, partitionedData.boxes, settings.epsilon)
println ("Close points which reside in different boxes:")
closePointsAcrossBoxes.collect ().foreach( println )
println ("----------\\n")
// TODO: add assertions
}
test("DistanceAnalyzer should find close points properly") {
val settings = new DbscanSettings ().withEpsilon(0.8)
val partitionedAndSortedData = PointsPartitionedByBoxesRDD (dataset1_1, dbscanSettings = settings)
val distanceAnalyzer = new DistanceAnalyzer(settings)
val closePointTuples = distanceAnalyzer.countClosePoints(partitionedAndSortedData)
closePointTuples.foreach( println )
// TODO: add assertions
}
test("DistanceAnalyzer should Count neighbors of points properly") {
val settings = new DbscanSettings ().withEpsilon(0.8)
val partitionedAndSortedData = PointsPartitionedByBoxesRDD (dataset1_1, dbscanSettings = settings)
val distanceAnalyzer = new DistanceAnalyzer(settings)
val pointsWithCounts = distanceAnalyzer.countNeighborsForEachPoint(partitionedAndSortedData)
pointsWithCounts.foreach( println )
// TODO: add assertions
}
}
| zerosign/spark_dbscan | src/test/scala/org/alitouka/spark/dbscan/spatial/FindClosePointsSuite.scala | Scala | apache-2.0 | 3,237 |
/*
* Copyright 2021 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package views.notifications.v1m0
import models.notifications.NotificationParams
import org.scalatest.MustMatchers
import play.api.i18n.Messages
import utils.AmlsViewSpec
import views.Fixture
import views.html.notifications.v1m0.minded_to_revoke
class minded_to_revokeSpec extends AmlsViewSpec with MustMatchers {
trait ViewFixture extends Fixture {
lazy val minded_to_revoke = app.injector.instanceOf[minded_to_revoke]
implicit val requestWithToken = addTokenForView()
val notificationParams = NotificationParams(msgContent = "msgContent", businessName = Some("Fake Name Ltd."), amlsRefNo = Some("amlsRegNo"))
}
"minded_to_revoke view" must {
"have correct title" in new ViewFixture {
def view = minded_to_revoke(notificationParams)
doc.title must be("Revocation being considered" +
" - " + "Your registration" +
" - " + Messages("title.amls") +
" - " + Messages("title.gov"))
}
"have correct headings" in new ViewFixture {
def view = minded_to_revoke(notificationParams)
heading.html must be("Revocation being considered")
subHeading.html must include("Your registration")
}
"have correct content, businessName and reference displayed" in new ViewFixture {
def view = minded_to_revoke(notificationParams)
doc.html must (include("msgContent") and include("Fake Name Ltd.") and include("amlsRegNo"))
}
"have a back link" in new ViewFixture {
def view = minded_to_revoke(notificationParams)
doc.getElementsByAttributeValue("class", "link-back") must not be empty
}
}
}
| hmrc/amls-frontend | test/views/notifications/v1m0/minded_to_revokeSpec.scala | Scala | apache-2.0 | 2,224 |
/*
* Copyright (C) 2016-2019 Lightbend Inc. <https://www.lightbend.com>
*/
package com.lightbend.lagom.javadsl.persistence.cassandra.testkit
import com.lightbend.lagom.javadsl.persistence.cassandra.CassandraPersistenceSpec
import com.lightbend.lagom.javadsl.persistence.testkit.AbstractEmbeddedPersistentActorSpec
class EmbeddedCassandraPersistentActorSpec extends CassandraPersistenceSpec with AbstractEmbeddedPersistentActorSpec
| rstento/lagom | persistence-cassandra/javadsl/src/test/scala/com/lightbend/lagom/javadsl/persistence/cassandra/testkit/EmbeddedCassandraPersistentActorSpec.scala | Scala | apache-2.0 | 436 |
package wandou.math.algebra
trait VectorIterable extends Iterable[MatrixSlice] {
def iterateAll: Iterator[MatrixSlice]
def numSlices: Int
def numRows: Int
def numCols: Int
/**
* Return a new vector with cardinality equal to getNumRows() of this matrix which is the matrix product of the
* recipient and the argument
*
* @param v a vector with cardinality equal to getNumCols() of the recipient
* @return a new vector (typically a DenseVector)
* @throws CardinalityException if this.getNumRows() != v.size()
*/
def times(v: Vector): Vector
/**
* Convenience method for producing this.transpose().times(this.times(v)), which can be implemented with only one pass
* over the matrix, without making the transpose() call (which can be expensive if the matrix is sparse)
*
* @param v a vector with cardinality equal to getNumCols() of the recipient
* @return a new vector (typically a DenseVector) with cardinality equal to that of the argument.
* @throws CardinalityException if this.getNumCols() != v.size()
*/
def timesSquared(v: Vector): Vector
}
| wandoulabs/wandou-math | wandou-math/src/main/scala/wandou/math/algebra/VectorIterable.scala | Scala | apache-2.0 | 1,111 |
package services.free
import cats.free.{Free, Inject}
import cats.~>
import monix.eval.Task
object Interactions {
sealed trait DSL[A]
final case class Get(input: String) extends DSL[String]
final case class Print(msg: String) extends DSL[Unit]
}
final class InteractionService[F[_]](implicit I: Inject[Interactions.DSL, F]) {
import Interactions._
def get(input: String): Free[F, String] = Free.inject[DSL, F](Get(input))
def print(msg: String): Free[F, Unit] = Free.inject[DSL, F](Print(msg))
}
object InteractionService {
implicit def actions[F[_]](implicit I: Inject[Interactions.DSL, F]): InteractionService[F] = new InteractionService[F]
}
final class InteractionInterpreter(read: () => String) extends (Interactions.DSL ~> Task) {
import Interactions._
def apply[A](a: DSL[A]) = a match {
case Get(input) => Task { println(input); read() }
case Print(msg) => Task { println(msg) }
}
}
| radusw/tagless-free-monix-sample | src/main/scala/services/free/InteractionService.scala | Scala | apache-2.0 | 924 |
/*
* Copyright 2013 Twitter Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.twitter.algebird.util
import com.twitter.algebird._
import com.twitter.util.{ Future, Return, Try }
object UtilAlgebras {
implicit val futureMonad: Monad[Future] = new Monad[Future] {
def apply[T](v: T) = Future.value(v)
override def map[T, U](m: Future[T])(fn: T => U) = m.map(fn)
def flatMap[T, U](m: Future[T])(fn: T => Future[U]) = m.flatMap(fn)
}
implicit val tryMonad: Monad[Try] = new Monad[Try] {
def apply[T](v: T) = Return(v)
override def map[T, U](m: Try[T])(fn: T => U) = m.map(fn)
def flatMap[T, U](m: Try[T])(fn: T => Try[U]) = m.flatMap(fn)
}
implicit def futureSemigroup[T: Semigroup]: Semigroup[Future[T]] = new MonadSemigroup[T, Future]
implicit def futureMonoid[T: Monoid]: Monoid[Future[T]] = new MonadMonoid[T, Future]
implicit def futureGroup[T: Group]: Group[Future[T]] = new MonadGroup[T, Future]
implicit def futureRing[T: Ring]: Ring[Future[T]] = new MonadRing[T, Future]
implicit def futureField[T: Field]: Field[Future[T]] = new MonadField[T, Future]
implicit def trySemigroup[T: Semigroup]: Semigroup[Try[T]] = new MonadSemigroup[T, Try]
implicit def tryMonoid[T: Monoid]: Monoid[Try[T]] = new MonadMonoid[T, Try]
implicit def tryGroup[T: Group]: Group[Try[T]] = new MonadGroup[T, Try]
implicit def tryRing[T: Ring]: Ring[Try[T]] = new MonadRing[T, Try]
implicit def tryField[T: Field]: Field[Try[T]] = new MonadField[T, Try]
}
| jinlee/algebird | algebird-util/src/main/scala/com/twitter/algebird/util/UtilAlgebras.scala | Scala | apache-2.0 | 2,022 |
package org.velvia.filo
import java.nio.ByteBuffer
/**
* Type class for encoding a ColumnBuilder to queryable binary Filo format
*/
trait BuilderEncoder[A] {
// Used for automatic conversion of Seq[A] and Seq[Option[A]]
def getBuilder(): ColumnBuilder[A]
def encode(builder: ColumnBuilder[A], hint: BuilderEncoder.EncodingHint): ByteBuffer
}
/**
* Classes to encode a Builder to a queryable binary Filo format.
* Methods automatically detect the best encoding method to use, but hints are available
* to pass to the methods.
*
* To extend the encoder for additional base types A, implement a type class BuilderEncoder[A].
*/
object BuilderEncoder {
sealed trait EncodingHint
case object AutoDetect extends EncodingHint
case object SimpleEncoding extends EncodingHint
case object DictionaryEncoding extends EncodingHint
def apply[T: BuilderEncoder]: BuilderEncoder[T] = implicitly[BuilderEncoder[T]]
implicit object IntEncoder extends BuilderEncoder[Int] {
def getBuilder(): ColumnBuilder[Int] = new IntColumnBuilder
def encode(builder: ColumnBuilder[Int], hint: EncodingHint): ByteBuffer = {
SimpleEncoders.toSimpleColumn(builder.data, builder.naMask.result,
Utils.intVectorBuilder)
}
}
implicit object LongEncoder extends BuilderEncoder[Long] {
def getBuilder(): ColumnBuilder[Long] = new LongColumnBuilder
def encode(builder: ColumnBuilder[Long], hint: EncodingHint): ByteBuffer = {
SimpleEncoders.toSimpleColumn(builder.data, builder.naMask.result,
Utils.longVectorBuilder)
}
}
implicit object DoubleEncoder extends BuilderEncoder[Double] {
def getBuilder(): ColumnBuilder[Double] = new DoubleColumnBuilder
def encode(builder: ColumnBuilder[Double], hint: EncodingHint): ByteBuffer = {
SimpleEncoders.toSimpleColumn(builder.data, builder.naMask.result,
Utils.doubleVectorBuilder)
}
}
implicit object StringEncoder extends BuilderEncoder[String] {
def getBuilder(): ColumnBuilder[String] = new StringColumnBuilder
def encode(builder: ColumnBuilder[String], hint: EncodingHint): ByteBuffer = {
val useDictEncoding = hint match {
case DictionaryEncoding => true
case SimpleEncoding => false
case x: Any => builder match {
case sb: StringColumnBuilder =>
// If the string cardinality is below say half of # of elements
// then definitely worth it to do dictionary encoding.
// Empty/missing elements do not count towards cardinality, so columns with
// many NA values will get dict encoded, which saves space
sb.stringSet.size <= (sb.data.size / 2)
// case x: Any => // Someone used something other than our own builder. Oh well. TODO: log
// false
// NOTE: above is commented out for now because ColumnBuilder is sealed. May change in future.
}
}
(useDictEncoding, builder) match {
case (true, sb: StringColumnBuilder) =>
DictEncodingEncoders.toDictStringColumn(sb.data, sb.naMask.result, sb.stringSet)
case x: Any =>
SimpleEncoders.toSimpleColumn(builder.data, builder.naMask.result,
Utils.stringVectorBuilder)
}
}
}
/**
* Encodes a [[org.velvia.filo.ColumnBuilder]] to a Filo format ByteBuffer
*/
def builderToBuffer[A: BuilderEncoder](builder: ColumnBuilder[A],
hint: EncodingHint = AutoDetect): ByteBuffer = {
BuilderEncoder[A].encode(builder, hint)
}
/**
* Encodes a sequence of type A to a Filo format ByteBuffer
* All values will be marked available.
* I know this may not be the most efficient way to encode, but the benefits is that
* all of the auto-encoding-detection is available.
*/
def seqToBuffer[A: BuilderEncoder](vector: collection.Seq[A],
hint: EncodingHint = AutoDetect): ByteBuffer = {
val builder = BuilderEncoder[A].getBuilder
vector.foreach(builder.addData)
builderToBuffer(builder, hint)
}
/**
* Encodes a sequence of type Option[A] to a Filo format ByteBuffer.
* Elements which are None will get encoded as NA bits.
*/
def seqOptionToBuffer[A: BuilderEncoder](vector: collection.Seq[Option[A]],
hint: EncodingHint = AutoDetect): ByteBuffer = {
val builder = BuilderEncoder[A].getBuilder
vector.foreach(builder.addOption)
builderToBuffer(builder, hint)
}
}
| samklr/filo | filo-scala/src/main/scala/org.velvia.filo/BuilderEncoder.scala | Scala | apache-2.0 | 4,664 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.openwhisk.common
import org.junit.runner.RunWith
import org.scalatest.FlatSpec
import org.scalatest.Matchers
import org.scalatest.junit.JUnitRunner
@RunWith(classOf[JUnitRunner])
class NestedSemaphoreTests extends FlatSpec with Matchers {
behavior of "NestedSemaphore"
it should "allow acquire of concurrency permits before acquire of memory permits" in {
val s = new NestedSemaphore[String](20)
s.availablePermits shouldBe 20
val actionId = "action1"
val actionConcurrency = 5
val actionMemory = 3
//use all concurrency on a single slot
(1 to 5).par.map { i =>
s.tryAcquireConcurrent(actionId, actionConcurrency, actionMemory) shouldBe true
}
s.availablePermits shouldBe 20 - 3 //we used a single container (memory == 3)
s.concurrentState(actionId).availablePermits shouldBe 0
//use up all the remaining memory (17) and concurrency slots (17 / 3 * 5 = 25)
(1 to 25).par.map { i =>
s.tryAcquireConcurrent(actionId, actionConcurrency, actionMemory) shouldBe true
}
s.availablePermits shouldBe 2 //we used 18 (20/3 = 6, 6*3=18)
s.concurrentState(actionId).availablePermits shouldBe 0
s.tryAcquireConcurrent("action1", actionConcurrency, actionMemory) shouldBe false
}
it should "not give away more permits even under concurrent load" in {
// 100 iterations of this test
(0 until 100).foreach { _ =>
val s = new NestedSemaphore(32)
// try to acquire more permits than allowed in parallel
val acquires = (0 until 64).par.map(_ => s.tryAcquire()).seq
val result = Seq.fill(32)(true) ++ Seq.fill(32)(false)
acquires should contain theSameElementsAs result
}
}
}
| starpit/openwhisk | tests/src/test/scala/org/apache/openwhisk/common/NestedSemaphoreTests.scala | Scala | apache-2.0 | 2,514 |
package controllers
import javax.inject.Inject
import models.K2KCacheKeyPrefix.CookiePrefix
import play.api.mvc.{Request, Result}
import uk.gov.dvla.vehicles.presentation.common
import common.clientsidesession.ClientSideSessionFactory
import common.clientsidesession.CookieImplicits.RichCookies
import common.controllers.NewKeeperChooseYourAddressBase
import common.model.NewKeeperChooseYourAddressViewModel
import common.webserviceclients.addresslookup.AddressLookupService
import utils.helpers.Config
import views.html.changekeeper.new_keeper_choose_your_address
import common.views.constraints.Postcode.formatPostcode
class NewKeeperChooseYourAddress @Inject()(protected override val addressLookupService: AddressLookupService)
(implicit protected override val clientSideSessionFactory: ClientSideSessionFactory,
config: Config) extends NewKeeperChooseYourAddressBase(addressLookupService) {
override protected def invalidFormResult(model: NewKeeperChooseYourAddressViewModel,
name: String,
postcode: String,
email: Option[String],
dropDownOptions: Seq[(String, String)],
isBusinessKeeper: Boolean,
fleetNumber: Option[String])(implicit request: Request[_]): Result =
BadRequest(new_keeper_choose_your_address(
model, name, postcode, email, dropDownOptions, isBusinessKeeper, fleetNumber)
)
override protected def presentView(model: NewKeeperChooseYourAddressViewModel,
name: String,
postcode: String,
email: Option[String],
dropDownOptions: Seq[(String, String)],
isBusinessKeeper: Boolean,
fleetNumber: Option[String])(implicit request: Request[_]): Result = {
logMessage(request.cookies.trackingId(), Info, "Presenting new keeper choose your address view")
Ok(views.html.changekeeper.new_keeper_choose_your_address(
model, name, formatPostcode(postcode), email, dropDownOptions, isBusinessKeeper, fleetNumber)
)
}
override protected def privateKeeperDetailsRedirect(implicit request: Request[_]) = {
logMessage(request.cookies.trackingId(), Debug, s"Redirecting to ${routes.PrivateKeeperDetails.present()}")
Redirect(routes.PrivateKeeperDetails.present())
}
override protected def businessKeeperDetailsRedirect(implicit request: Request[_]) = {
logMessage(request.cookies.trackingId(), Debug, s"Redirecting to ${routes.BusinessKeeperDetails.present()}")
Redirect(routes.BusinessKeeperDetails.present())
}
override protected def vehicleLookupRedirect(implicit request: Request[_]) = {
logMessage(request.cookies.trackingId(), Debug, s"Redirecting to ${routes.VehicleLookup.present()}")
Redirect(routes.VehicleLookup.present())
}
override protected def completeAndConfirmRedirect(implicit request: Request[_]) = {
logMessage(request.cookies.trackingId(), Debug, s"Redirecting to ${routes.DateOfSale.present()}")
Redirect(routes.DateOfSale.present())
}
}
| dvla/vehicles-change-keeper-online | app/controllers/NewKeeperChooseYourAddress.scala | Scala | mit | 3,421 |
/*
* Copyright 2018 Analytics Zoo Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.zoo.grpc.utils
import java.nio.file.Files
import com.intel.analytics.zoo.pipeline.inference.InferenceModel
import com.intel.analytics.zoo.serving.utils.FileUtils
import org.slf4j.{Logger, LoggerFactory}
import scala.beans.BeanProperty
class gRPCHelper extends Serializable {
// BeanProperty store attributes read from config file
@BeanProperty var modelPath = ""
@BeanProperty var port = 8980
// performance attributes
@BeanProperty var inputAlreadyBatched = false
@BeanProperty var coreNumberPerMachine = -1
@BeanProperty var modelParallelism = 1
@BeanProperty var threadPerModel = 1
// redis specific attributes
@BeanProperty var redisUrl = "localhost:6379"
@BeanProperty var redisMaxMemory = "4g"
@BeanProperty var redisTimeout = 5000
@BeanProperty var redisPoolMaxTotal = 256
// feature service attributes
@BeanProperty var serviceType = "kv"
@BeanProperty var loadInitialData = false
@BeanProperty var initialDataPath: String = _
@BeanProperty var initialUserDataPath: String = _
@BeanProperty var initialItemDataPath: String = _
@BeanProperty var userFeatureColumns: String = _
@BeanProperty var itemFeatureColumns: String = _
@BeanProperty var userIDColumn: String = _
@BeanProperty var itemIDColumn: String = _
// vector search service attributes
@BeanProperty var loadSavedIndex = false
@BeanProperty var indexPath: String = _
// feature & vector search service attributes
@BeanProperty var userModelPath: String = _
@BeanProperty var itemModelPath: String = _
// recommend service attributes
@BeanProperty var inferenceColumns: String = _
@BeanProperty var vectorSearchURL = "localhost:8980"
@BeanProperty var featureServiceURL = "localhost:8980"
@BeanProperty var rankingServiceURL = "localhost:8980"
var configPath: String = "config.yaml"
var redisHost: String = _
var redisPort: Int = _
var blasFlag: Boolean = false
var userFeatureColArr: Array[String] = _
var itemFeatureColArr: Array[String] = _
var inferenceColArr: Array[String] = _
val logger = LoggerFactory.getLogger(getClass)
def parseConfigStrings(): Unit = {
redisHost = redisUrl.split(":").head.trim
redisPort = redisUrl.split(":").last.trim.toInt
if (serviceType != "kv" && serviceType != "inference") {
logger.error(s"serviceType must be 'kv' or 'inference' but got ${serviceType}")
}
if (!userFeatureColumns.isEmpty) {
userFeatureColArr = userFeatureColumns.split("\\\\s*,\\\\s*")
}
if (!itemFeatureColumns.isEmpty) {
itemFeatureColArr = itemFeatureColumns.split("\\\\s*,\\\\s*")
}
if (!inferenceColumns.isEmpty) {
inferenceColArr = inferenceColumns.split("\\\\s*,\\\\s*")
}
}
/**
* Load inference model
* The concurrent number of inference model depends on
* backend engine type
* @return
*/
def loadInferenceModel(concurrentNum: Int = 0, modelPathToLoad: String = modelPath,
savedModelInputs: Array[String] = null)
: InferenceModel = {
val model = new InferenceModel(modelParallelism)
if (modelPathToLoad == "") {
logger.error("The path to model should not be '', load model failed.");
} else {
val (modelType, defPath, weightPath) = parseModelType(modelPathToLoad)
// Allow concurrent number overwrite
if (concurrentNum > 0) {
modelParallelism = concurrentNum
}
logger.info(
s"gPRC load Inference Model with Parallelism $modelParallelism")
modelType match {
case "caffe" => model.doLoadCaffe(defPath, weightPath, blas = blasFlag)
case "bigdl" => model.doLoadBigDL(weightPath, blas = blasFlag)
case "tensorflowFrozenModel" =>
model.doLoadTensorflow(weightPath, "frozenModel", 1, 1, true)
case "tensorflowSavedModel" =>
model.doLoadTensorflow(weightPath, "savedModel", savedModelInputs, null)
case "pytorch" => model.doLoadPyTorch(weightPath)
case "keras" => logger.error("Keras currently not supported in gRPC service," +
"consider transform it to Tensorflow")
case _ => logger.error("Invalid model type, please check your model directory")
}
}
model
}
/**
* Infer the model type in model directory
* Try every file in the directory, infer which are the
* model definition file and model weight file
* @param location
*/
def parseModelType(location: String): (String, String, String) = {
/**
* Download file to local if the scheme is remote
* Currently support hdfs, s3
*/
val scheme = location.split(":").head
val localModelPath = if (scheme == "file" || location.split(":").length <= 1) {
location.split("file://").last
} else {
val path = Files.createTempDirectory("model")
val dstPath = path.getParent + "/" + path.getFileName
FileUtils.copyToLocal(location, dstPath)
dstPath
}
/**
* Initialize all relevant parameters at first
*/
var modelType: String = null
var weightPath: String = null
var defPath: String = null
var variablesPathExist = false
import java.io.File
val f = new File(localModelPath)
val fileList = f.listFiles
if (fileList == null) {
logger.error("Your model path provided in config is empty, please check your model path.")
}
// model type is always null, not support pass model type currently
if (modelType == null) {
for (file <- fileList) {
val fName = file.getName
val fPath = new File(localModelPath, fName).toString
if (fName.endsWith("caffemodel")) {
throwOneModelError(true, false, true, modelType, defPath, weightPath)
weightPath = fPath
modelType = "caffe"
}
else if (fName.endsWith("prototxt")) {
throwOneModelError(false, true, false, modelType, defPath, weightPath)
defPath = fPath
}
// ckpt seems not supported
else if (fName.endsWith("pb")) {
throwOneModelError(true, false, true, modelType, defPath, weightPath)
weightPath = localModelPath
if (variablesPathExist) {
modelType = "tensorflowSavedModel"
} else {
modelType = "tensorflowFrozenModel"
}
}
else if (fName.endsWith("pt")) {
throwOneModelError(true, false, true, modelType, defPath, weightPath)
weightPath = fPath
modelType = "pytorch"
}
else if (fName.endsWith("model")) {
throwOneModelError(true, false, true, modelType, defPath, weightPath)
weightPath = fPath
modelType = "bigdl"
}
else if (fName.endsWith("keras")) {
throwOneModelError(true, false, true, modelType, defPath, weightPath)
weightPath = fPath
modelType = "keras"
}
else if (fName.endsWith("bin")) {
throwOneModelError(true, false, true, modelType, defPath, weightPath)
weightPath = fPath
modelType = "openvino"
}
else if (fName.endsWith("xml")) {
throwOneModelError(false, true, false, modelType, defPath, weightPath)
defPath = fPath
}
else if (fName.equals("variables")) {
if (modelType != null && modelType.equals("tensorflowFrozenModel")) {
modelType = "tensorflowSavedModel"
} else {
variablesPathExist = true
}
}
}
if (modelType == null) logger.error("There is no model detected in your directory." +
"Please refer to document for supported model types.")
}
else {
modelType = modelType.toLowerCase
}
// auto set parallelism if coreNumberPerMachine is set
if (coreNumberPerMachine > 0) {
if (modelType == "openvino") {
threadPerModel = coreNumberPerMachine
modelParallelism = 1
} else {
threadPerModel = 1
modelParallelism = coreNumberPerMachine
}
}
(modelType, defPath, weightPath)
}
/**
* To check if there already exists detected defPath or weightPath
* @param defPath Boolean, true means need to check if it is not null
* @param weightPath Boolean, true means need to check if it is not null
*/
def throwOneModelError(modelType: Boolean,
defPath: Boolean, weightPath: Boolean, modelTypeStr: String,
defPathStr: String, weightPathStr: String)
: Unit = {
if ((modelType && modelTypeStr != null) ||
(defPath && defPathStr != null) ||
(weightPath && weightPathStr != null)) {
logger.error("Only one model is allowed to exist in " +
"model folder, please check your model folder to keep just" +
"one model in the directory")
}
}
}
| intel-analytics/analytics-zoo | zoo/src/main/scala/com/intel/analytics/zoo/grpc/utils/gRPCHelper.scala | Scala | apache-2.0 | 9,457 |
package org.eigengo.akkapatterns.api
import spray.routing.HttpService
import org.eigengo.akkapatterns.domain.Customer
import org.eigengo.akkapatterns.core.CustomerController
import akka.util.Timeout
import scala.concurrent.Future
import java.util.Date
trait CustomerService extends HttpService {
this: EndpointMarshalling with AuthenticationDirectives =>
protected val customerController = new CustomerController
val customerRoute =
path("customers" / JavaUUID) { id =>
get {
complete {
// when using controllers, we have to explicitly create the Future here
// it is not necessary to add the T information, but it helps with API documentation.
Future[Customer] {
customerController.get(id)
}
}
} ~
authenticate(validCustomer) { ud =>
post {
handleWith { customer: Customer =>
// if we authenticated only validUser or validSuperuser
Future[Customer] {
customerController.update(ud, customer)
}
}
}
}
}
}
| janm399/akka-patterns | server/api/src/main/scala/org/eigengo/akkapatterns/api/customer.scala | Scala | apache-2.0 | 1,098 |
package sparkstreaming
import org.apache.spark.SparkConf
import org.apache.spark.storage.StorageLevel
import org.apache.spark.streaming.dstream.{DStream, ReceiverInputDStream}
import org.apache.spark.streaming.{Seconds, StreamingContext}
import scala.collection.mutable
/**
* 从订单服务器获取订单,服务器每秒生成一条订单,简化订单格式(商品码\\t单价\\t购买数量)
* 1、实时显示总销售额
* 2、打印最近1分钟、5分钟、15分钟的销售总额(其实一样)
* 3、打印最近1小时内销售总额前10的商品
*/
object StreamingTrade {
def main(args: Array[String]): Unit = {
//1、创建配置对象
val conf: SparkConf = new SparkConf()
conf.setAppName(StreamingOrder.getClass.getSimpleName)
conf.setMaster("local[2]")
//2、构造StreamingContext,第1个是配置对象,第2个是时间间隔
val ssc: StreamingContext = new StreamingContext(conf, Seconds(1))
ssc.checkpoint("D:/trade")
//3、定义接收器
val textStream: ReceiverInputDStream[String] = ssc.socketTextStream("192.168.1.100", 5656, StorageLevel.MEMORY_ONLY)
//4、业务逻辑
val wc: DStream[(String, Double)] = textStream.map(line => {
val split: Array[String] = line.split("\\t")
// (商品码,销售额)
(split(0), split(1).toDouble * split(2).toInt)
})
// 实时销售额,调用状态更新函数更新状态
wc.map { case (k, v) => ("实时销售总额", v) }.updateStateByKey(updateFunc).print()
// 1、5、15分钟销售额
wc.reduceByKeyAndWindow((x: Double, y: Double) => x + y, Seconds(60), Seconds(1)).map(_._2).reduce(_ + _).print()
wc.reduceByKeyAndWindow((x: Double, y: Double) => x + y, Seconds(60 * 5), Seconds(1)).map(_._2).reduce(_ + _).print()
wc.reduceByKeyAndWindow((x: Double, y: Double) => x + y, Seconds(60 * 15), Seconds(1)).map(_._2).reduce(_ + _).print()
// Top10 1小时内的销售总额前10的商品
val top10DStream: DStream[(String, Double)] = wc.reduceByKeyAndWindow((x: Double, y: Double) => x + y, Seconds(60 * 60), Seconds(1))
top10DStream.foreachRDD {
_.sortBy(_._2, false).take(10).foreach(println)
}
//5、启动流计算
ssc.start()
//6、等待程序结束
ssc.awaitTermination()
}
/**
* 状态更新
*
* @param value
* @param status
* @return
*/
def updateFunc(value: Seq[Double], status: Option[Double]) = {
//获取当前状态
val thisStatus: Double = value.sum
//获取上一状态
val lastStatus: Double = status.getOrElse(0)
Some(thisStatus + lastStatus)
}
}
| monsonlee/BigData | Project5_订单交易额实时统计、离线审计/实时统计/spark streaming版/StreamingTrade.scala | Scala | gpl-3.0 | 2,711 |
/*
* Copyright (C) 2010 Peter Lewerin
* Copyright (C) 2010 Lalit Pant <pant.lalit@gmail.com>
*
* The contents of this file are subject to the GNU General Public License
* Version 3 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of
* the License at http://www.gnu.org/copyleft/gpl.html
*
* Software distributed under the License is distributed on an "AS
* IS" basis, WITHOUT WARRANTY OF ANY KIND, either express or
* implied. See the License for the specific language governing
* rights and limitations under the License.
*
*/
package net.kogics.kojo
package staging
import util.Utils
import util.Math
import util.InputAware
import core.Point
import core.UnitLen
import java.awt.BasicStroke
import edu.umd.cs.piccolo.PNode
import edu.umd.cs.piccolo.nodes.PPath
import edu.umd.cs.piccolo.util.PBounds
import edu.umd.cs.piccolo.event._
import java.awt.Color
import java.awt.Paint
object Impl {
val canvas = SpriteCanvas.instance
val turtle0 = canvas.turtle0
val figure0 = canvas.figure0
}
/** Staging API
*
* This object contains the API for using Staging within Kojo scripts.
*
* DISCLAIMER
*
* Parts of this interface is written to approximately conform to the
* Processing API as described in the reference at
* <URL: http://processing.org/reference/>.
* The implementation code is the work of Peter Lewerin
* (<peter.lewerin@tele2.se>) and is not in any way derived from the
* Processing source. */
object API {
//W#summary Developer home-page for the Staging Module
//W
//W=Introduction=
//W
//WThe Staging Module is currently being developed by Peter Lewerin.
//WThe original impetus came from a desire to run Processing-style code in Kojo.
//W
//WAt this point, the shape hierarchy is the most complete part, but
//Wutilities for color definition, time keeping etc are being added.
//W
//W=Examples=
//W
//W * StagingHelloKojoExample
//W * StagingArrayExample
//W * StagingArrayTwoDeeExample
//W * StagingClockExample
//W * StagingColorWheelExample
//W * StagingCreatingColorsExample
//W * StagingDifferenceOfTwoSquaresExample
//W * StagingEasingExample
//W * StagingHueSaturationBrightnessExample
//W * StagingSineOfAnAngleExample
//W
//W=Overview=
//W
//W==Points==
//W
//WStaging uses {{{net.kogics.kojo.core.Point}}} for coordinates.
//W
//T PointTest begins
def point(x: Double, y: Double) = Point(x, y)
implicit def tupleDDToPoint(tuple: (Double, Double)) = Point(tuple._1, tuple._2)
implicit def tupleDIToPoint(tuple: (Double, Int)) = Point(tuple._1, tuple._2)
implicit def tupleIDToPoint(tuple: (Int, Double)) = Point(tuple._1, tuple._2)
implicit def tupleIIToPoint(tuple: (Int, Int)) = Point(tuple._1, tuple._2)
//implicit def baseShapeToPoint(b: BaseShape) = b.origin
//implicit def awtPointToPoint(p: java.awt.geom.Point2D) = Point(p.getX, p.getY)
//implicit def awtDimToPoint(d: java.awt.geom.Dimension2D) = Point(d.getWidth, d.getHeight)
/** The point of origin, located at a corner of the user screen if
* `screenSize` has been called, or the middle of the screen otherwise. */
val O = Point(0, 0)
//T PointTest ends
//W
//W==User Screen==
//W
//WThe zoom level and axis orientations can be set using `screenSize`.
//W
//T ScreenMethodsTest begins
def screenWidth = Screen.rect.getWidth.toInt
def screenHeight = Screen.rect.getHeight.toInt
def screenSize(width: Int, height: Int) = Screen.size(width, height)
/** The middle point of the user screen, or (0, 0) if `screenSize` hasn't
* been called. */
def screenMid = Screen.rect.getCenter2D
/** The extreme point of the user screen (i.e. the opposite corner from
* the point of origin), or (0, 0) if `screenSize` hasn't been called. */
def screenExt = Screen.rect.getExt
/** Fills the user screen with the specified color. */
def background(bc: Color) = {
withStyle(bc, null, 1) { rectangle(O, screenExt) }
}
//T ScreenMethodsTest ends
//W
//W==Simple shapes and text==
//W
//WGiven `Point`s or _x_ and _y_ coordinate values, simple shapes like dots,
//Wlines, rectangles, ellipses, and elliptic arcs can be drawn. Texts can
//Walso be placed in this way.
//W
//T SimpleShapesTest begins
def dot(x: Double, y: Double): Dot = Dot(Point(x, y))
def dot(p: Point): Dot = Dot(p)
def line(x1: Double, y1: Double, x2: Double, y2: Double) =
Line(Point(x1, y1), Point(x2, y2))
def line(p1: Point, p2: Point) =
Line(p1, p2)
def vector(x1: Double, y1: Double, x2: Double, y2: Double, a: Double) =
Vector(Point(x1, y1), Point(x2, y2), a)
def vector(p1: Point, p2: Point, a: Double) =
Vector(p1, p2, a)
def rectangle(x: Double, y: Double, w: Double, h: Double) =
Rectangle(Point(x, y), Point(x + w, y + h))
def rectangle(p: Point, w: Double, h: Double) =
Rectangle(p, Point(p.x + w, p.y + h))
def rectangle(p1: Point, p2: Point) =
Rectangle(p1, p2)
def square(x: Double, y: Double, s: Double) =
Rectangle(Point(x, y), Point(x + s, y + s))
def square(p: Point, s: Double) =
Rectangle(p, Point(p.x + s, p.y + s))
def roundRectangle(
x: Double, y: Double,
w: Double, h: Double,
rx: Double, ry: Double
) =
RoundRectangle(Point(x, y), Point(x + w, y + h), Point(rx, ry))
def roundRectangle(
p: Point,
w: Double, h: Double,
rx: Double, ry: Double
) =
RoundRectangle(p, Point(p.x + w, p.y + h), Point(rx, ry))
def roundRectangle(p1: Point, p2: Point, rx: Double, ry: Double) =
RoundRectangle(p1, p2, Point(rx, ry))
def roundRectangle(p1: Point, p2: Point, p3: Point) =
RoundRectangle(p1, p2, p3)
def ellipse(cx: Double, cy: Double, rx: Double, ry: Double) =
Ellipse(Point(cx, cy), Point(cx + rx, cy + ry))
def ellipse(p: Point, rx: Double, ry: Double) =
Ellipse(p, Point(p.x + rx, p.y + ry))
def ellipse(p1: Point, p2: Point) =
Ellipse(p1, p2)
def circle(x: Double, y: Double, r: Double) =
Ellipse(Point(x, y), Point(x + r, y + r))
def circle(p: Point, r: Double) =
Ellipse(p, Point(p.x + r, p.y + r))
def arc(cx: Double, cy: Double, rx: Double, ry: Double, s: Double, e: Double) =
Arc(Point(cx, cy), Point(cx + rx, cy + ry), s, e, java.awt.geom.Arc2D.PIE)
def arc(p: Point, rx: Double, ry: Double, s: Double, e: Double) =
Arc(p, Point(p.x + rx, p.y + ry), s, e, java.awt.geom.Arc2D.PIE)
def arc(p1: Point, p2: Point, s: Double, e: Double) =
Arc(p1, p2, s, e, java.awt.geom.Arc2D.PIE)
def pieslice(cx: Double, cy: Double, rx: Double, ry: Double, s: Double, e: Double) =
Arc(Point(cx, cy), Point(cx + rx, cy + ry), s, e, java.awt.geom.Arc2D.PIE)
def pieslice(p: Point, rx: Double, ry: Double, s: Double, e: Double) =
Arc(p, Point(p.x + rx, p.y + ry), s, e, java.awt.geom.Arc2D.PIE)
def pieslice(p1: Point, p2: Point, s: Double, e: Double) =
Arc(p1, p2, s, e, java.awt.geom.Arc2D.PIE)
def openArc(cx: Double, cy: Double, rx: Double, ry: Double, s: Double, e: Double) =
Arc(Point(cx, cy), Point(cx + rx, cy + ry), s, e, java.awt.geom.Arc2D.OPEN)
def openArc(p: Point, rx: Double, ry: Double, s: Double, e: Double) =
Arc(p, Point(p.x + rx, p.y + ry), s, e, java.awt.geom.Arc2D.OPEN)
def openArc(p1: Point, p2: Point, s: Double, e: Double) =
Arc(p1, p2, s, e, java.awt.geom.Arc2D.OPEN)
def chord(cx: Double, cy: Double, rx: Double, ry: Double, s: Double, e: Double) =
Arc(Point(cx, cy), Point(cx + rx, cy + ry), s, e, java.awt.geom.Arc2D.CHORD)
def chord(p: Point, rx: Double, ry: Double, s: Double, e: Double) =
Arc(p, Point(p.x + rx, p.y + ry), s, e, java.awt.geom.Arc2D.CHORD)
def chord(p1: Point, p2: Point, s: Double, e: Double) =
Arc(p1, p2, s, e, java.awt.geom.Arc2D.CHORD)
def text(s: String, x: Double, y: Double) = Text(s, Point(x, y))
def text(s: String, p: Point) = Text(s, p)
def star(cx: Double, cy: Double, inner: Double, outer: Double, points: Int) =
Star(Point(cx, cy), inner, outer, points)
def star(p: Point, inner: Double, outer: Double, points: Int) =
Star(p, inner, outer, points)
def star(p1: Point, p2: Point, p3: Point, points: Int) =
Star(p1, dist(p1, p2), dist(p1, p3), points)
def cross(p1: Point, p2: Point, cw: Double, r: Double = 1, greek: Boolean = false) =
Cross(p1, p2, cw, r, greek)
def crossOutline(p1: Point, p2: Point, cw: Double, r: Double = 1, greek: Boolean = false) =
CrossOutline(p1, p2, cw, r, greek)
def saltire(p1: Point, p2: Point, s: Double) = Saltire(p1, p2, s)
def saltireOutline(p1: Point, p2: Point, s: Double) = SaltireOutline(p1, p2, s)
//T SimpleShapesTest ends
//W
//W==Complex Shapes==
//W
//WGiven a sequence of `Point`s, a number of complex shapes can be drawn,
//Wincluding basic polylines and polygons, and patterns of polylines/polygons.
//W
//T ComplexShapesTest begins
def polyline(pts: Seq[Point]) = Polyline(pts)
def polygon(pts: Seq[Point]): Polygon = Polygon(pts)
def triangle(p0: Point, p1: Point, p2: Point) = polygon(Seq(p0, p1, p2))
def quad(p0: Point, p1: Point, p2: Point, p3: Point) =
polygon(Seq(p0, p1, p2, p3))
def linesShape(pts: Seq[Point]) = LinesShape(pts)
def trianglesShape(pts: Seq[Point]) = TrianglesShape(pts)
def triangleStripShape(pts: Seq[Point]) = TriangleStripShape(pts)
def quadsShape(pts: Seq[Point]) = QuadsShape(pts)
def quadStripShape(pts: Seq[Point]) = QuadStripShape(pts)
def triangleFanShape(p0: Point, pts: Seq[Point]) = TriangleFanShape(p0, pts)
//T ComplexShapesTest ends
//W
//W==SVG Shapes==
//W
//WGiven an SVG element, the corresponding shape can be drawn.
//W
//T SvgShapesTest begins
def svgShape(node: scala.xml.Node) = SvgShape(node)
//T SvgShapesTest ends
def sprite(x: Double, y: Double, fname: String) = Sprite(point(x, y), fname)
def path(x: Double, y: Double) = Path(point(x, y))
def group(shapes: List[Shape]) = Composite(shapes)
def group(shapes: Shape *) = Composite(shapes)
//W
//W==Color==
//W
//WColor values can be created with the method `color`, or using a
//W_color-maker_. The methods `fill`, `noFill`,
//W`stroke`, and `noStroke` set the colors used to draw the insides and edges
//Wof figures. The method `strokeWidth` doesn't actually affect color but is
//Wtypically used together with the color setting methods. The method
//W`withStyle` allows the user to set fill color, stroke color, and stroke
//Wwidth temporarily.
//W
//W
//T ColorTest begins
def grayColors(grayMax: Int) =
ColorMaker(GRAY(grayMax))
def grayColorsWithAlpha(grayMax: Int, alphaMax: Int) =
ColorMaker(GRAYA(grayMax, alphaMax))
def rgbColors(rMax: Int, gMax: Int, bMax: Int) =
ColorMaker(RGB(rMax, gMax, bMax))
def rgbColorsWithAlpha(rMax: Int, gMax: Int, bMax: Int, alphaMax: Int) =
ColorMaker(RGBA(rMax, gMax, bMax, alphaMax))
def hsbColors(hMax: Int, sMax: Int, bMax: Int) =
ColorMaker(HSB(hMax, sMax, bMax))
def namedColor(s: String) = ColorMaker.color(s)
def fill(c: Paint) = Impl.figure0.setFillColor(c)
def noFill() = Impl.figure0.setFillColor(null)
def stroke(c: Color) = Impl.figure0.setPenColor(c)
def noStroke() = Impl.figure0.setPenColor(null)
def strokeWidth(w: Double) = Impl.figure0.setPenThickness(w)
def setPenColor(color: Color) = stroke(color)
def setFillColor(color: Paint) = fill(color)
def setPenThickness(w: Double) = strokeWidth(w)
def withStyle(fc: Color, sc: Color, sw: Double)(body: => Unit) =
Style(fc, sc, sw)(body)
implicit def ColorToRichColor (c: java.awt.Color) = RichColor(c)
def lerpColor(from: RichColor, to: RichColor, amt: Double) =
RichColor.lerpColor(from, to, amt)
//T ColorTest ends
Inputs.init()
//W
//W==Timekeeping==
//W
//WA number of methods report the current time.
//W
//T TimekeepingTest begins
def millis = System.currentTimeMillis()
def time = System.currentTimeMillis()/1000.0
import java.util.Calendar
def second = Calendar.getInstance().get(Calendar.SECOND)
def minute = Calendar.getInstance().get(Calendar.MINUTE)
def hour = Calendar.getInstance().get(Calendar.HOUR_OF_DAY)
def day = Calendar.getInstance().get(Calendar.DAY_OF_MONTH)
def month = Calendar.getInstance().get(Calendar.MONTH) + 1
def year = Calendar.getInstance().get(Calendar.YEAR)
//T UtilsTest ends
//W
//W==Math==
//W
//WA number of methods perform number processing tasks.
//W
//T MathTest begins
def constrain(value: Double, min: Double, max: Double) =
Math.constrain(value, min, max)
def norm(value: Double, low: Double, high: Double) =
Math.map(value, low, high, 0, 1)
def map(value: Double, low1: Double, high1: Double, low2: Double, high2: Double) =
Math.map(value, low1, high1, low2, high2)
def lerp(value1: Double, value2: Double, amt: Double) =
Math.lerp(value1, value2, amt)
def sq(x: Double) = x * x
def sqrt(x: Double) = math.sqrt(x)
def dist(x0: Double, y0: Double, x1: Double, y1: Double) =
sqrt(sq(x1 - x0) + sq(y1 - y0))
def dist(p1: Point, p2: Point) =
sqrt(sq(p2.x - p1.x) + sq(p2.y - p1.y))
def mag(x: Double, y: Double) = dist(0, 0, x, y)
def mag(p: Point) = dist(0, 0, p.x, p.y)
//W
//W==Trigonometry==
//W
//WA number of methods perform trigonometry tasks.
//W
val PI = math.Pi
val TWO_PI = 2*PI
val HALF_PI = PI/2
val QUARTER_PI = PI/4
def sin(a: Double) = math.sin(a)
def cos(a: Double) = math.cos(a)
def tan(a: Double) = math.tan(a)
def asin(a: Double) = math.asin(a)
def acos(a: Double) = math.acos(a)
def atan(a: Double) = math.atan(a)
def radians(deg: Double) = deg.toRadians
def degrees(rad: Double) = rad.toDegrees
//T MathTest ends
//W
//W==Control==
//W
//WThere are methods for execution control and mouse feedback.
//W
//T ControlTest begins
def loop(fn: => Unit) = Impl.figure0.refresh(fn)
def animate(fn: => Unit) = loop(fn)
def stop() = Impl.figure0.stopRefresh()
def reset() = {
Impl.canvas.clearStaging()
Impl.canvas.turtle0.invisible()
}
def clear() = reset()
def clearWithUL(ul: UnitLen) {
Impl.canvas.clearStagingWul(ul)
Impl.canvas.turtle0.invisible()
}
def switchTo() = Impl.canvas.makeStagingVisible()
def wipe() = Impl.figure0.fgClear()
def onAnimationStop(fn: => Unit) = Impl.figure0.onStop(fn)
def mouseX() = Inputs.stepMousePos.x
def mouseY() = Inputs.stepMousePos.y
def pmouseX() = Inputs.prevMousePos.x
def pmouseY() = Inputs.prevMousePos.y
val LEFT = 1
val CENTER = 2
val RIGHT = 3
def mouseButton = Inputs.mouseBtn
def mousePressed = Inputs.mousePressedFlag
def interpolatePolygon(pts1: Seq[Point], pts2: Seq[Point], n: Int) {
require(pts1.size == pts2.size, "The polygons don't match up.")
var g0 = polygon(pts1)
for (i <- 0 to n ; amt = i / n.toFloat) {
val pts = (pts1 zip pts2) map { case(p1, p2) =>
point(lerp(p1.x, p2.x, amt), lerp(p1.y, p2.y, amt))
}
g0.hide
g0 = polygon(pts)
for (i <- 0 to 10) {
net.kogics.kojo.util.Throttler.throttle()
}
}
}
//T ControlTest ends
// expose some types in the API
type Shape = staging.Shape
type StrokedShape = staging.StrokedShape
//W
//W=Usage=
//W
} // end of API
abstract class ColorModes
case class RGB(r: Int, g: Int, b: Int) extends ColorModes
case class RGBA(r: Int, g: Int, b: Int, a: Int) extends ColorModes
case class HSB(h: Int, s: Int, b: Int) extends ColorModes
case class HSBA(h: Int, s: Int, b: Int, a: Int) extends ColorModes
case class GRAY(v: Int) extends ColorModes
case class GRAYA(v: Int, a: Int) extends ColorModes
object Point {
def apply(x: Double, y: Double) = new Point(x, y)
def unapply(p: Point) = Some((p.x, p.y))
}
//T ShapeMethodsTest begins
trait Shape extends InputAware {
def myCanvas = Impl.canvas
def myNode = node
def node: PNode
var sizeFactor = 1.
def hide() {
Utils.runInSwingThread {
node.setVisible(false)
}
Impl.canvas.repaint()
}
def show() {
Utils.runInSwingThread {
node.setVisible(true)
}
Impl.canvas.repaint()
}
def erase() {
Impl.figure0.removePnode(node)
}
def fill_=(color: Paint) {
Utils.runInSwingThread {
node.setPaint(color)
node.repaint()
}
}
def fill(color: Paint) {
fill = color
}
def fill = Utils.runInSwingThreadAndWait {
node.getPaint
}
def setFillColor(color: Paint) = fill(color)
def rotate(amount: Double) = turn(amount)
def rotateTo(angle: Double) = {
turn(angle - _theta.toDegrees)
}
def scale(amount: Double) = {
Utils.runInSwingThread {
node.scale(amount)
node.repaint()
}
sizeFactor *= amount
}
def scaleTo(size: Double) = {
scale(size / sizeFactor)
}
def translate(p: Point) {
translate(p.x, p.y)
}
def offset(p: Point) {
offset(p.x, p.y)
}
def translate(x: Double, y: Double) = {
Utils.runInSwingThread {
node.translate(x, y)
node.repaint()
}
}
def offset(x: Double, y: Double) = {
Utils.runInSwingThread {
node.offset(x, y)
node.repaint()
}
}
def offset = Utils.runInSwingThreadAndWait {
val o = node.getOffset
Point(o.getX, o.getY)
}
import turtle.TurtleHelper._
protected var _theta: Double = 0
def turn(angle: Double) {
Utils.runInSwingThread {
_theta = thetaAfterTurn(angle, _theta)
node.setRotation(_theta)
node.repaint()
}
}
def heading = Utils.runInSwingThreadAndWait {
_theta.toDegrees
}
def orientation = heading
def setHeading(angle: Double) = rotateTo(angle)
def act(fn: Shape => Unit) = API.loop {
fn(this)
}
}
//T ShapeMethodsTest ends
trait Rounded {
val curvature: Point
def radiusX = curvature.x
def radiusY = curvature.y
}
trait BaseShape extends Shape with core.RichTurtleCommands {
val origin: Point
import turtle.TurtleHelper._
def setPosition(p: Point) {
setPosition(p.x, p.y)
}
def setPosition(x: Double, y: Double) {
Utils.runInSwingThread {
node.setOffset(x - origin.x, y - origin.y)
node.repaint()
}
}
def position = Utils.runInSwingThreadAndWait {
val o = node.getOffset
Point(o.getX + origin.x, o.getY + origin.y)
}
def forward(n: Double) {
Utils.runInSwingThread {
val pos = position
val xy = posAfterForward(pos.x, pos.y, _theta, n)
setPosition(xy._1, xy._2)
}
}
def towards(x: Double, y: Double) {
Utils.runInSwingThread {
val pos = position
_theta = thetaTowards(pos.x, pos.y, x, y, _theta)
node.setRotation(_theta)
node.repaint()
}
}
}
trait StrokedShape extends BaseShape {
val path: PPath
def node = path
def stroke_=(color: Color) {
Utils.runInSwingThread {
node.setStrokePaint(color)
node.repaint()
}
}
def stroke = Utils.runInSwingThreadAndWait {
node.getStrokePaint
}
def stroke(color: Color) {
stroke = color
}
def strokeWidth(w: Double) {
Utils.runInSwingThread {
node.setStroke(new BasicStroke(w.toFloat, BasicStroke.CAP_ROUND, BasicStroke.JOIN_ROUND))
node.repaint()
}
}
def strokeWidth = Utils.runInSwingThreadAndWait {
node.getStroke.asInstanceOf[BasicStroke].getLineWidth
}
def setPenColor(color: Color) = stroke(color)
def setPenThickness(w: Double) = strokeWidth(w)
}
trait SimpleShape extends StrokedShape {
val endpoint: Point
def width = endpoint.x - origin.x
def height = endpoint.y - origin.y
}
trait Elliptical extends SimpleShape with Rounded {
val curvature = endpoint - origin
override def width = 2 * radiusX
override def height = 2 * radiusY
}
class Text(val text: String, val origin: Point) extends BaseShape {
import java.awt.Font
val tnode = Utils.textNode(text, origin.x, origin.y, Impl.canvas.camScale, 14)
def node = tnode
def setPenColor(color: Color) {
Utils.runInSwingThread {
tnode.setTextPaint(color)
tnode.repaint()
}
}
def setFontSize(s: Int) {
Utils.runInSwingThread {
val font = new Font(node.getFont.getName, Font.PLAIN, s)
tnode.setFont(font)
tnode.repaint()
}
}
def setContent(content: String) {
Utils.runInSwingThread {
tnode.setText(content)
tnode.repaint()
}
}
override def toString = "Staging.Text(" + text + ", " + origin + ")"
}
object Text {
def apply(s: String, p: Point) = Utils.runInSwingThreadAndWait {
val shape = new Text(s, p)
Impl.figure0.pnode(shape.node)
shape
}
}
trait PolyShape extends BaseShape {
val points: Seq[Point]
val origin = points(0)
//def toPolygon: Polygon = Polygon(points)
//def toPolyline: Polyline = Polyline(points)
}
trait CrossShape {
val xdims = Array.fill(8){0.}
val ydims = Array.fill(8){0.}
def crossDims(len: Double, wid: Double, cw: Double, r: Double = 1, greek: Boolean = false) = {
require(wid / 2 > cw)
require(len / 2 > cw)
val a = (wid - cw) / 2
val b = a / (if (greek) 1 else r)
val c = cw / 6
val d = c / 2
xdims(1) = a - c
xdims(2) = a
xdims(3) = a + d
xdims(4) = a + cw - d
xdims(5) = a + cw
xdims(6) = a + cw + c
xdims(7) = len
ydims(1) = b - c
ydims(2) = b
ydims(3) = b + d
ydims(4) = b + cw - d
ydims(5) = b + cw
ydims(6) = b + cw + c
ydims(7) = wid
this
}
def points() = List(
Point(xdims(0), ydims(5)), Point(xdims(2), ydims(5)),
Point(xdims(2), ydims(7)), Point(xdims(5), ydims(7)),
Point(xdims(5), ydims(5)), Point(xdims(7), ydims(5)),
Point(xdims(7), ydims(2)), Point(xdims(5), ydims(2)),
Point(xdims(5), ydims(0)), Point(xdims(2), ydims(0)),
Point(xdims(2), ydims(2)), Point(xdims(0), ydims(2))
)
def outlinePoints() = List(
Point(xdims(0), ydims(6)), Point(xdims(1), ydims(6)), Point(xdims(1), ydims(7)),
Point(xdims(3), ydims(7)), Point(xdims(3), ydims(4)), Point(xdims(0), ydims(4)),
Point(xdims(6), ydims(7)), Point(xdims(6), ydims(6)), Point(xdims(7), ydims(6)),
Point(xdims(7), ydims(4)), Point(xdims(4), ydims(4)), Point(xdims(4), ydims(7)),
Point(xdims(7), ydims(1)), Point(xdims(6), ydims(1)), Point(xdims(6), ydims(0)),
Point(xdims(4), ydims(0)), Point(xdims(4), ydims(3)), Point(xdims(7), ydims(3)),
Point(xdims(1), ydims(0)), Point(xdims(1), ydims(1)), Point(xdims(0), ydims(1)),
Point(xdims(0), ydims(3)), Point(xdims(3), ydims(3)), Point(xdims(3), ydims(0))
)
}
class Composite(val shapes: Seq[Shape]) extends Shape {
val node = new PNode
shapes foreach { shape =>
node.addChild(shape.node)
}
override def toString = "Staging.Group(" + shapes.mkString(",") + ")"
}
object Composite {
def apply(shapes: Seq[Shape]) = Utils.runInSwingThreadAndWait {
val shape = new Composite(shapes)
Impl.figure0.addPnode(shape.node)
shape
}
}
object Style {
val savedStyles =
new scala.collection.mutable.Stack[(Paint, Color, java.awt.Stroke)]()
val f = Impl.figure0
def save {
Utils.runInSwingThread {
savedStyles push Tuple3(f.fillColor, f.lineColor, f.lineStroke)
}
}
def restore {
Utils.runInSwingThread {
if (savedStyles nonEmpty) {
val (fc, sc, st) = savedStyles.pop
f.setFillColor(fc)
f.setPenColor(sc)
f.setLineStroke(st)
}
}
}
def apply(fc: Color, sc: Color, sw: Double)(body: => Unit) = {
save
Utils.runInSwingThread {
f.setFillColor(fc)
f.setPenColor(sc)
f.setPenThickness(sw)
}
try { body }
finally { restore }
}
}
class Bounds(x1: Double, y1: Double, x2: Double, y2: Double) {
val bounds = new PBounds(x1, y1, x2 - x1, y2 - y1)
def getWidth = Utils.runInSwingThreadAndWait { bounds.getWidth }
def getHeight = Utils.runInSwingThreadAndWait { bounds.getHeight }
def getOrigin = Utils.runInSwingThreadAndWait {
val p = bounds.getOrigin
Point(p.getX, p.getY)
}
def getCenter2D = Utils.runInSwingThreadAndWait {
val p = bounds.getCenter2D
Point(p.getX, p.getY)
}
def getExt = Utils.runInSwingThreadAndWait {
val p = bounds.getOrigin
Point(p.getX + bounds.getWidth, p.getY + bounds.getHeight)
}
def resetToZero = Utils.runInSwingThreadAndWait { bounds.resetToZero }
def inset(dx: Double, dy: Double) = Utils.runInSwingThreadAndWait {
bounds.inset(dx, dy)
}
def setRect(x1: Double, y1: Double, x2: Double, y2: Double) {
Utils.runInSwingThread {
bounds.setRect(x1, y1, x2 - x1, y2 - y1)
}
}
}
object Bounds {
def apply(b: PBounds) = Utils.runInSwingThreadAndWait {
val x = b.getX
val y = b.getY
val w = b.getWidth
val h = b.getHeight
new Bounds(x, y, x + w, y + h)
}
def apply(x1: Double, y1: Double, x2: Double, y2: Double) = Utils.runInSwingThreadAndWait {
new Bounds(x1, y1, x2, y2)
}
}
| vnkmr7620/kojo | KojoEnv/src/net/kogics/kojo/staging/staging.scala | Scala | gpl-3.0 | 24,949 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution
import java.io.{File, ByteArrayInputStream, ByteArrayOutputStream}
import org.apache.spark.executor.ShuffleWriteMetrics
import org.apache.spark.rdd.RDD
import org.apache.spark.storage.ShuffleBlockId
import org.apache.spark.util.collection.ExternalSorter
import org.apache.spark.util.Utils
import org.apache.spark.sql.Row
import org.apache.spark.sql.catalyst.{CatalystTypeConverters, InternalRow}
import org.apache.spark.sql.catalyst.expressions.{UnsafeProjection, UnsafeRow}
import org.apache.spark.sql.types._
import org.apache.spark._
/**
* used to test close InputStream in UnsafeRowSerializer
*/
class ClosableByteArrayInputStream(buf: Array[Byte]) extends ByteArrayInputStream(buf) {
var closed: Boolean = false
override def close(): Unit = {
closed = true
super.close()
}
}
class UnsafeRowSerializerSuite extends SparkFunSuite with LocalSparkContext {
private def toUnsafeRow(row: Row, schema: Array[DataType]): UnsafeRow = {
val converter = unsafeRowConverter(schema)
converter(row)
}
private def unsafeRowConverter(schema: Array[DataType]): Row => UnsafeRow = {
val converter = UnsafeProjection.create(schema)
(row: Row) => {
converter(CatalystTypeConverters.convertToCatalyst(row).asInstanceOf[InternalRow])
}
}
test("toUnsafeRow() test helper method") {
// This currently doesnt work because the generic getter throws an exception.
val row = Row("Hello", 123)
val unsafeRow = toUnsafeRow(row, Array(StringType, IntegerType))
assert(row.getString(0) === unsafeRow.getUTF8String(0).toString)
assert(row.getInt(1) === unsafeRow.getInt(1))
}
test("basic row serialization") {
val rows = Seq(Row("Hello", 1), Row("World", 2))
val unsafeRows = rows.map(row => toUnsafeRow(row, Array(StringType, IntegerType)))
val serializer = new UnsafeRowSerializer(numFields = 2).newInstance()
val baos = new ByteArrayOutputStream()
val serializerStream = serializer.serializeStream(baos)
for (unsafeRow <- unsafeRows) {
serializerStream.writeKey(0)
serializerStream.writeValue(unsafeRow)
}
serializerStream.close()
val input = new ClosableByteArrayInputStream(baos.toByteArray)
val deserializerIter = serializer.deserializeStream(input).asKeyValueIterator
for (expectedRow <- unsafeRows) {
val actualRow = deserializerIter.next().asInstanceOf[(Integer, UnsafeRow)]._2
assert(expectedRow.getSizeInBytes === actualRow.getSizeInBytes)
assert(expectedRow.getString(0) === actualRow.getString(0))
assert(expectedRow.getInt(1) === actualRow.getInt(1))
}
assert(!deserializerIter.hasNext)
assert(input.closed)
}
test("close empty input stream") {
val input = new ClosableByteArrayInputStream(Array.empty)
val serializer = new UnsafeRowSerializer(numFields = 2).newInstance()
val deserializerIter = serializer.deserializeStream(input).asKeyValueIterator
assert(!deserializerIter.hasNext)
assert(input.closed)
}
test("SPARK-10466: external sorter spilling with unsafe row serializer") {
var sc: SparkContext = null
var outputFile: File = null
val oldEnv = SparkEnv.get // save the old SparkEnv, as it will be overwritten
Utils.tryWithSafeFinally {
val conf = new SparkConf()
.set("spark.shuffle.spill.initialMemoryThreshold", "1024")
.set("spark.shuffle.sort.bypassMergeThreshold", "0")
.set("spark.shuffle.memoryFraction", "0.0001")
sc = new SparkContext("local", "test", conf)
outputFile = File.createTempFile("test-unsafe-row-serializer-spill", "")
// prepare data
val converter = unsafeRowConverter(Array(IntegerType))
val data = (1 to 1000).iterator.map { i =>
(i, converter(Row(i)))
}
val sorter = new ExternalSorter[Int, UnsafeRow, UnsafeRow](
partitioner = Some(new HashPartitioner(10)),
serializer = Some(new UnsafeRowSerializer(numFields = 1)))
// Ensure we spilled something and have to merge them later
assert(sorter.numSpills === 0)
sorter.insertAll(data)
assert(sorter.numSpills > 0)
// Merging spilled files should not throw assertion error
val taskContext =
new TaskContextImpl(0, 0, 0, 0, null, null, InternalAccumulator.create(sc))
taskContext.taskMetrics.shuffleWriteMetrics = Some(new ShuffleWriteMetrics)
sorter.writePartitionedFile(ShuffleBlockId(0, 0, 0), taskContext, outputFile)
} {
// Clean up
if (sc != null) {
sc.stop()
}
// restore the spark env
SparkEnv.set(oldEnv)
if (outputFile != null) {
outputFile.delete()
}
}
}
test("SPARK-10403: unsafe row serializer with UnsafeShuffleManager") {
val conf = new SparkConf()
.set("spark.shuffle.manager", "tungsten-sort")
sc = new SparkContext("local", "test", conf)
val row = Row("Hello", 123)
val unsafeRow = toUnsafeRow(row, Array(StringType, IntegerType))
val rowsRDD = sc.parallelize(Seq((0, unsafeRow), (1, unsafeRow), (0, unsafeRow)))
.asInstanceOf[RDD[Product2[Int, InternalRow]]]
val shuffled = new ShuffledRowRDD(rowsRDD, new UnsafeRowSerializer(2), 2)
shuffled.count()
}
}
| ArvinDevel/onlineAggregationOnSparkV2 | sql/core/src/test/scala/org/apache/spark/sql/execution/UnsafeRowSerializerSuite.scala | Scala | apache-2.0 | 6,071 |
package fe.CCC
import FormulaList.{One, Chc}
import LazyFormulaList.{LazyNil, LazyCons}
import OList.{ONil, OCons}
object Main extends App {
type VImpl[A] = FormulaList[A]
val V = FormulaList
def f(s: String)(a: Int): VImpl[Int] = {
Chc(tag(s), One(a + 1), One(a + 100))
}
println("--- Lazy ---")
val lazyInt = Lazy(10)
println(lazyInt.isDone, lazyInt)
lazyInt()
println(lazyInt.isDone, lazyInt)
val y = 1 + lazyInt
println(y)
println("--- FormulaList ---")
var x = One(0)
println(x)
x = x.flatMap(f("A"))
println(x)
x = x.flatMap(f("B"))
println(x)
println(x(Set()))
println(x(Set("A")))
println(x(Set("B")))
println(x(Set("A", "B")))
//println((tag("a") & !tag("b")).evaluate(Set("a")))
//println(Set(1,2,3) &~ Set(2,3,4))
println("--- LazyFormulaList ---")
def genNat(n: Int): LazyFormulaList[Int] = {
LazyCons(tag(n.toString), n, genNat(n+1))
}
def genFib(n: Int, a: Int, b: Int): LazyFormulaList[Int] = {
LazyCons(tag("n=" + n.toString), a, genFib(n+1, b, a+b))
}
val lazyFL = genFib(0,0,1).map(x => -x)
//println(lazyFL(Set("n=0")))
//println(lazyFL(Set("n=1")))
//println(lazyFL(Set("n=2")))
println(lazyFL(Set("n=3")))
println(lazyFL(Set("n=4")))
println(lazyFL(Set("n=5")))
println(lazyFL(Set("n=6")))
//println(lazyFL(Set("n=7")))
//println(lazyFL(Set("n=8")))
println(lazyFL)
val a = LazyNil(1)
val b = LazyNil(2)
val c = LazyFormulaList.Chc(tag("a"), a, b)
println(c(Set("a")))
println(c(Set()))
println(c)
println("--- OList ---")
def g(s: String)(a: Int): VImpl[Option[Int]] = {
Chc(tag(s), One(Some(a)), One(None))
}
var ol = ONil(): OList[Int]
for(i <- 0 to 2) {
ol = OCons(g("a" + i.toString)(10*(i+1)), ol)
}
println(ol)
println(ol.size())
println(ol.get(0))
println(ol.get(1))
println(ol.get(2))
}
| FiveEye/CCC | scala/src/main/scala/Main.scala | Scala | mit | 1,889 |
/*
* Copyright 2012-2013 Stephane Godbillon (@sgodbillon) and Zenexity
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package reactivemongo.core.protocol
import reactivemongo.io.netty.buffer.ByteBuf
import BufferAccessors._
/** A Mongo Wire Protocol operation */
sealed trait Op {
/** The operation code */
val code: Int
}
/**
* A Mongo Wire Protocol request operation.
*
* Actually, all operations excepted Reply are requests.
*/
sealed trait RequestOp extends Op with ChannelBufferWritable {
/** States if this request expects a response. */
val expectsResponse: Boolean = false
/** States if this request has to be run on a primary. */
val requiresPrimary: Boolean = false
}
/** A request that needs to know the full collection name. */
sealed trait CollectionAwareRequestOp extends RequestOp {
/** The full collection name (''<dbname.collectionname>'') */
val fullCollectionName: String
/** Database and collection name */
lazy val (db: String, collectionName: String) =
fullCollectionName.span(_ != '.')
}
/** A request that will perform a write on the database */
sealed trait WriteRequestOp extends CollectionAwareRequestOp
/**
* Reply operation.
*
* @param flags The flags of this response.
* @param cursorID The cursor id. Strictly positive if a cursor has been created server side, 0 if none or exhausted.
* @param startingFrom The index the returned documents start from.
* @param numberReturned The number of documents that are present in this reply.
*/
case class Reply(
flags: Int,
cursorID: Long,
startingFrom: Int,
numberReturned: Int) extends Op {
val code = Reply.code
/** States whether the cursor given in the request was found */
lazy val cursorNotFound = (flags & 0x01) != 0
/** States if the request encountered an error */
lazy val queryFailure = (flags & 0x02) != 0
/** States if the answering server supports the AwaitData query option */
lazy val awaitCapable = (flags & 0x08) != 0
private def str(b: Boolean, s: String) = if (b) s else ""
/** States if this reply is in error */
lazy val inError = cursorNotFound || queryFailure
lazy val stringify = toString + " [" + str(cursorNotFound, "CursorNotFound;") + str(queryFailure, "QueryFailure;") + str(awaitCapable, "AwaitCapable") + "]"
}
object Reply extends ChannelBufferReadable[Reply] {
/** OP_REPLY = 1 */
val code = 1
/** Once the [[Reply]] is parsed, the buffer can immediately be released. */
def readFrom(buffer: ByteBuf): Reply = Reply(
buffer.readIntLE,
buffer.readLongLE,
buffer.readIntLE,
buffer.readIntLE)
}
/**
* Update operation.
*
* @param flags Operation flags.
*/
case class Update(
fullCollectionName: String,
flags: Int) extends WriteRequestOp {
val code = 2001
val writeTo = writeTupleToBuffer3((0, fullCollectionName, flags)) _
val size = 4 /* int32 = ZERO */ + 4 + fullCollectionName.length + 1
override val requiresPrimary = true
}
object UpdateFlags {
/** If set, the database will insert the supplied object into the collection if no matching document is found. */
val Upsert = 0x01
/** If set, the database will update all matching objects in the collection. Otherwise only updates first matching doc. */
val MultiUpdate = 0x02
}
/**
* Insert operation.
*
* @param flags Operation flags.
*/
case class Insert(
flags: Int,
fullCollectionName: String) extends WriteRequestOp {
val code = 2002
val writeTo = writeTupleToBuffer2((flags, fullCollectionName)) _
val size = 4 + fullCollectionName.length + 1
override val requiresPrimary = true
}
/**
* Query operation.
*
* @param flags the operation flags
* @param fullCollectionName the full name of the queried collection
* @param numberToSkip the number of documents to skip in the response.
* @param numberToReturn The number of documents to return in the response. 0 means the server will choose.
*/
case class Query(
flags: Int,
fullCollectionName: String,
numberToSkip: Int,
numberToReturn: Int) extends CollectionAwareRequestOp {
override val expectsResponse = true
val code = 2004
val size = 4 + fullCollectionName.length + 1 + 4 + 4
val writeTo: ByteBuf => Unit = writeTupleToBuffer4(
(flags, fullCollectionName, numberToSkip, numberToReturn)) _
}
/**
* Query flags.
*/
object QueryFlags {
/** Makes the cursor not to close after all the data is consumed. */
val TailableCursor = 0x02
/** The query is might be run on a secondary. */
val SlaveOk = 0x04
/** OplogReplay */
val OplogReplay = 0x08
/** The cursor will not expire automatically */
val NoCursorTimeout = 0x10
/**
* Block a little while waiting for more data
* instead of returning immediately if no data.
* Use along with TailableCursor.
*/
val AwaitData = 0x20
/** Exhaust */
val Exhaust = 0x40
/**
* The response can be partial;
* If a shard is down, no error will be thrown.
*/
val Partial = 0x80
}
/**
* GetMore operation.
*
* Allows to get more data from a cursor.
* @param numberToReturn number of documents to return in the response. 0 means the server will choose.
* @param cursorId id of the cursor.
*/
case class GetMore(
fullCollectionName: String,
numberToReturn: Int,
cursorID: Long) extends CollectionAwareRequestOp {
override val expectsResponse = true
val code = 2005
val writeTo =
writeTupleToBuffer4((0, fullCollectionName, numberToReturn, cursorID)) _
val size = 4 /* int32 ZERO */ + fullCollectionName.length + 1 + 4 + 8
}
/**
* Delete operation.
*
* @param flags operation flags.
*/
case class Delete(
fullCollectionName: String,
flags: Int) extends WriteRequestOp {
val code = 2006
val writeTo = writeTupleToBuffer3((0, fullCollectionName, flags)) _
val size = 4 /* int32 ZERO */ + fullCollectionName.length + 1 + 4
override val requiresPrimary = true
}
/**
* KillCursors operation.
*
* @param cursorIDs ids of the cursors to kill. Should not be empty.
*/
case class KillCursors(cursorIDs: Set[Long]) extends RequestOp {
val code = 2007
val writeTo: ByteBuf => Unit = { buffer: ByteBuf =>
buffer writeIntLE 0
buffer writeIntLE cursorIDs.size
for (cursorID <- cursorIDs) {
buffer writeLongLE cursorID
}
}
val size = 4 /* int32 ZERO */ + 4 + cursorIDs.size * 8
}
| ornicar/ReactiveMongo | core/src/main/scala/core/protocol/operations.scala | Scala | apache-2.0 | 6,835 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.carbondata.cluster.sdv.generated
import org.apache.spark.sql.common.util._
import org.scalatest.BeforeAndAfterAll
import org.apache.carbondata.core.constants.CarbonCommonConstants
import org.apache.carbondata.core.util.CarbonProperties
/**
* Test Class for BatchSortLoad2TestCase to verify all scenerios
*/
class BatchSortLoad2TestCase extends QueryTest with BeforeAndAfterAll {
//To load data after setting only sort scope in carbon property file
test("Batch_sort_Loading_001-01-01-01_001-TC_027", Include) {
sql(s"""drop table if exists uniqdata11""").collect
sql(s"""CREATE TABLE uniqdata11 (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'carbondata'""").collect
sql(s"""LOAD DATA INPATH '$resourcesPath/Data/uniqdata/7000_UniqData.csv' into table uniqdata11 OPTIONS('DELIMITER'=',' , 'QUOTECHAR'='"','BAD_RECORDS_ACTION'='FORCE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
sql(s"""select * from uniqdata11""").collect
sql(s"""drop table uniqdata11""").collect
}
//To load 1 lac data load after setting only sort scope in carbon property file
test("Batch_sort_Loading_001-01-01-01_001-TC_028", Include) {
sql(s"""drop table if exists uniqdata12""").collect
sql(s"""CREATE TABLE uniqdata12 (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'carbondata'""").collect
sql(s"""LOAD DATA INPATH '$resourcesPath/Data/uniqdata/1lac_UniqData.csv' into table uniqdata12 OPTIONS('DELIMITER'=',' , 'QUOTECHAR'='"','BAD_RECORDS_ACTION'='FORCE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
sql(s"""select * from uniqdata12""").collect
sql(s"""drop table uniqdata12""").collect
}
//To load data after setting only sort scope in carbon property file with option file header in load
test("Batch_sort_Loading_001-01-01-01_001-TC_029", Include) {
sql(s"""drop table if exists uniqdata12a""").collect
sql(s"""CREATE TABLE uniqdata12a(CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'carbondata'""").collect
sql(s"""LOAD DATA INPATH '$resourcesPath/Data/uniqdata/7000_UniqData.csv' into table uniqdata12a OPTIONS('DELIMITER'=',' , 'QUOTECHAR'='"','BAD_RECORDS_ACTION'='FORCE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
sql(s"""select * from uniqdata12a""").collect
sql(s"""drop table uniqdata12a""").collect
}
//To load data after setting only sort scope in carbon property file without folder path in load
test("Batch_sort_Loading_001-01-01-01_001-TC_030", Include) {
intercept[Exception] {
sql(s"""CREATE TABLE uniqdata13 (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'org.apache.carbondata.format'""").collect
sql(s"""LOAD DATA into table uniqdata13 OPTIONS('DELIMITER'=',' , 'FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
}
sql(s"""drop table uniqdata13""").collect
}
//To load data after setting only sort scope in carbon property file without table_name in load
test("Batch_sort_Loading_001-01-01-01_001-TC_031", Include) {
intercept[Exception] {
sql(s"""CREATE TABLE uniqdata14 (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'org.apache.carbondata.format'""").collect
sql(s"""LOAD DATA INPATH '$resourcesPath/Data/uniqdata/2000_UniqData.csv' into table OPTIONS('DELIMITER'=',' , 'FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
}
sql(s"""drop table uniqdata14""").collect
}
//To load data after setting only sort scope in carbon property file with option QUOTECHAR'='"'
test("Batch_sort_Loading_001-01-01-01_001-TC_032", Include) {
sql(s"""CREATE TABLE uniqdata15 (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'carbondata'""").collect
sql(s"""LOAD DATA INPATH '$resourcesPath/Data/uniqdata/7000_UniqData.csv' into table uniqdata15 OPTIONS('DELIMITER'=',' , 'QUOTECHAR'='"','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
sql(s"""select * from uniqdata15""").collect
sql(s"""drop table uniqdata15""").collect
}
//To load data after setting only sort scope in carbon property file with OPTIONS('COMMENTCHAR'='#')
test("Batch_sort_Loading_001-01-01-01_001-TC_033", Include) {
sql(s"""CREATE TABLE uniqdata16 (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'carbondata'""").collect
sql(s"""LOAD DATA INPATH '$resourcesPath/Data/uniqdata/7000_UniqData.csv' into table uniqdata16 OPTIONS('DELIMITER'=',' , 'QUOTECHAR'='"','COMMENTCHAR'='#','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
sql(s"""select * from uniqdata16""").collect
sql(s"""drop table uniqdata16""").collect
}
//To load data after setting only sort scope in carbon property file with option 'MULTILINE'='true'
test("Batch_sort_Loading_001-01-01-01_001-TC_034", Include) {
sql(s"""CREATE TABLE uniqdata17 (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'carbondata'""").collect
sql(s"""LOAD DATA INPATH '$resourcesPath/Data/uniqdata/7000_UniqData.csv' into table uniqdata17 OPTIONS('DELIMITER'=',' , 'QUOTECHAR'='"','COMMENTCHAR'='#','MULTILINE'='true','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
sql(s"""select * from uniqdata17""").collect
sql(s"""drop table uniqdata17""").collect
}
//To load data after setting only sort scope in carbon property file with OPTIONS('ESCAPECHAR'='\\')
test("Batch_sort_Loading_001-01-01-01_001-TC_035", Include) {
sql(s"""CREATE TABLE uniqdata18 (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'carbondata'""").collect
sql(s"""LOAD DATA INPATH '$resourcesPath/Data/uniqdata/7000_UniqData.csv' into table uniqdata18 OPTIONS('DELIMITER'=',' , 'QUOTECHAR'='"','COMMENTCHAR'='#','MULTILINE'='true','ESCAPECHAR'='\\','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
sql(s"""select * from uniqdata18""").collect
sql(s"""drop table uniqdata18""").collect
}
//To load data after setting only sort scope in carbon property file with OPTIONS 'BAD_RECORDS_ACTION'='FORCE'
test("Batch_sort_Loading_001-01-01-01_001-TC_036", Include) {
sql(s"""CREATE TABLE uniqdata19b (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'carbondata'""").collect
sql(s"""LOAD DATA INPATH '$resourcesPath/Data/uniqdata/7000_UniqData.csv' into table uniqdata19b OPTIONS('DELIMITER'=',' , 'QUOTECHAR'='"','COMMENTCHAR'='#','MULTILINE'='true','ESCAPECHAR'='\\','BAD_RECORDS_ACTION'='FORCE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
sql(s"""select * from uniqdata19b""").collect
sql(s"""drop table uniqdata19b""").collect
}
//To load data after setting only sort scope in carbon property file with OPTIONS 'BAD_RECORDS_ACTION'='IGNORE'
test("Batch_sort_Loading_001-01-01-01_001-TC_037", Include) {
sql(s"""CREATE TABLE uniqdata19c (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'carbondata'""").collect
sql(s"""LOAD DATA INPATH '$resourcesPath/Data/uniqdata/7000_UniqData.csv' into table uniqdata19c OPTIONS('DELIMITER'=',' , 'QUOTECHAR'='"','COMMENTCHAR'='#','MULTILINE'='true','ESCAPECHAR'='\\','BAD_RECORDS_ACTION'='IGNORE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
sql(s"""select * from uniqdata19c""").collect
sql(s"""drop table uniqdata19c""").collect
}
//To load data after setting only sort scope in carbon property file with OPTIONS 'BAD_RECORDS_ACTION'='REDIRECT'
test("Batch_sort_Loading_001-01-01-01_001-TC_038", Include) {
sql(s"""CREATE TABLE uniqdata19d (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'carbondata'""").collect
sql(s"""LOAD DATA INPATH '$resourcesPath/Data/uniqdata/7000_UniqData.csv' into table uniqdata19d OPTIONS('DELIMITER'=',' , 'QUOTECHAR'='"','COMMENTCHAR'='#','MULTILINE'='true','ESCAPECHAR'='\\','BAD_RECORDS_ACTION'='REDIRECT','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
sql(s"""select * from uniqdata19d""").collect
sql(s"""drop table uniqdata19d""").collect
}
//To load data after setting only sort scope in carbon property file with OPTIONS 'BAD_RECORDS_LOGGER_ENABLE'='FALSE'
test("Batch_sort_Loading_001-01-01-01_001-TC_039", Include) {
sql(s"""CREATE TABLE uniqdata19e (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'carbondata'""").collect
sql(s"""LOAD DATA INPATH '$resourcesPath/Data/uniqdata/7000_UniqData.csv' into table uniqdata19e OPTIONS('DELIMITER'=',' , 'QUOTECHAR'='"','COMMENTCHAR'='#','MULTILINE'='true','ESCAPECHAR'='\\','BAD_RECORDS_ACTION'='REDIRECT','BAD_RECORDS_LOGGER_ENABLE'='FALSE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
sql(s"""select * from uniqdata19e""").collect
sql(s"""drop table uniqdata19e""").collect
}
//To load data after setting only sort scope in carbon property file with OPTIONS 'BAD_RECORDS_LOGGER_ENABLE'='TRUE'
test("Batch_sort_Loading_001-01-01-01_001-TC_040", Include) {
sql(s"""CREATE TABLE uniqdata19f (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'carbondata'""").collect
sql(s"""LOAD DATA INPATH '$resourcesPath/Data/uniqdata/7000_UniqData.csv' into table uniqdata19f OPTIONS('DELIMITER'=',' , 'QUOTECHAR'='"','COMMENTCHAR'='#','MULTILINE'='true','ESCAPECHAR'='\\','BAD_RECORDS_ACTION'='REDIRECT','BAD_RECORDS_LOGGER_ENABLE'='TRUE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
sql(s"""select * from uniqdata19f""").collect
sql(s"""drop table uniqdata19f""").collect
}
//To load data after setting only sort scope in carbon property file with OPTIONS ‘SINGLE_PASS’=’true’
test("Batch_sort_Loading_001-01-01-01_001-TC_041", Include) {
sql(s"""CREATE TABLE uniqdata20a (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'carbondata'""").collect
sql(s"""LOAD DATA INPATH '$resourcesPath/Data/uniqdata/7000_UniqData.csv' into table uniqdata20a OPTIONS('DELIMITER'=',','QUOTECHAR'='"','SINGLE_PASS'='TRUE','COMMENTCHAR'='#','MULTILINE'='true','ESCAPECHAR'='\\','BAD_RECORDS_ACTION'='REDIRECT','BAD_RECORDS_LOGGER_ENABLE'='TRUE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
sql(s"""select * from uniqdata20a""").collect
sql(s"""drop table uniqdata20a""").collect
}
//To load data after setting only sort scope in carbon property file with OPTIONS ‘SINGLE_PASS’=’false’
test("Batch_sort_Loading_001-01-01-01_001-TC_042", Include) {
sql(s"""CREATE TABLE uniqdata20b (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'carbondata'""").collect
sql(s"""LOAD DATA INPATH '$resourcesPath/Data/uniqdata/7000_UniqData.csv' into table uniqdata20b OPTIONS('DELIMITER'=',','QUOTECHAR'='"','SINGLE_PASS'='FALSE','COMMENTCHAR'='#','MULTILINE'='true','ESCAPECHAR'='\\','BAD_RECORDS_ACTION'='REDIRECT','BAD_RECORDS_LOGGER_ENABLE'='TRUE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
sql(s"""select * from uniqdata20b""").collect
sql(s"""drop table uniqdata20b""").collect
}
//To load data after setting only sort scope in carbon property file with NO_INVERTED_INDEX
test("Batch_sort_Loading_001-01-01-01_001-TC_043", Include) {
sql(s"""CREATE TABLE uniqdata20c (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'carbondata' TBLPROPERTIES('NO_INVERTED_INDEX'='CUST_NAME')""").collect
sql(s"""LOAD DATA INPATH '$resourcesPath/Data/uniqdata/7000_UniqData.csv' into table uniqdata20c OPTIONS('DELIMITER'=',' ,'QUOTECHAR'='"','COMMENTCHAR'='#','MULTILINE'='true','ESCAPECHAR'='\\','BAD_RECORDS_ACTION'='REDIRECT','BAD_RECORDS_LOGGER_ENABLE'='TRUE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
sql(s"""select * from uniqdata20c""").collect
sql(s"""drop table uniqdata20c""").collect
}
//To load data after setting only sort scope in carbon property file with COLUMNDICT
test("Batch_sort_Loading_001-01-01-01_001-TC_044", Include) {
sql(s"""drop table if exists t3""").collect
sql(s"""CREATE TABLE t3 (ID Int, country String, name String, phonetype String, serialname String, salary Int,floatField float) STORED BY 'carbondata'""").collect
sql(s"""LOAD DATA LOCAL INPATH '$resourcesPath/Data/batchsort/data.csv' into table t3 options('COLUMNDICT'='country:$resourcesPath/Data/columndict/country.csv', 'SINGLE_PASS'='true')""").collect
sql(s"""drop table t3""").collect
}
//To load data after setting only sort scope in carbon property file with ALL_DICTIONARY_PATH
test("Batch_sort_Loading_001-01-01-01_001-TC_045", Include) {
sql(s"""drop table if exists t3""").collect
sql(s"""CREATE TABLE t3 (ID Int, country String, name String, phonetype String, serialname String, salary Int,floatField float) STORED BY 'carbondata'""").collect
sql(s"""LOAD DATA LOCAL INPATH '$resourcesPath/Data/batchsort/data.csv' into table t3 options('ALL_DICTIONARY_PATH'='$resourcesPath/Data/columndict/data.dictionary', 'SINGLE_PASS'='true')""").collect
sql(s"""drop table t3""").collect
}
//To check incremental load one with batch_sort
test("Batch_sort_Loading_001-01-01-01_001-TC_047", Include) {
sql(s"""CREATE TABLE uniqdata20a (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'carbondata' TBLPROPERTIES ('SORT_SCOPE'='BATCH_SORT')""").collect
sql(s"""LOAD DATA INPATH '$resourcesPath/Data/uniqdata/2000_UniqData.csv' into table uniqdata20a OPTIONS('DELIMITER'=',' ,'QUOTECHAR'='"','COMMENTCHAR'='#','MULTILINE'='true','ESCAPECHAR'='\\','BAD_RECORDS_ACTION'='REDIRECT','BAD_RECORDS_LOGGER_ENABLE'='TRUE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
sql(s"""LOAD DATA INPATH '$resourcesPath/Data/uniqdata/7000_UniqData.csv' into table uniqdata20a OPTIONS('DELIMITER'=',' ,'QUOTECHAR'='"','COMMENTCHAR'='#','MULTILINE'='true','ESCAPECHAR'='\\','BAD_RECORDS_ACTION'='REDIRECT','BAD_RECORDS_LOGGER_ENABLE'='TRUE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
sql(s"""LOAD DATA INPATH '$resourcesPath/Data/uniqdata/2000_UniqData.csv' into table uniqdata20a OPTIONS('DELIMITER'=',' ,'QUOTECHAR'='"','COMMENTCHAR'='#','MULTILINE'='true','ESCAPECHAR'='\\','BAD_RECORDS_ACTION'='REDIRECT','BAD_RECORDS_LOGGER_ENABLE'='TRUE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
sql(s"""LOAD DATA INPATH '$resourcesPath/Data/uniqdata/7000_UniqData.csv' into table uniqdata20a OPTIONS('DELIMITER'=',' ,'QUOTECHAR'='"','COMMENTCHAR'='#','MULTILINE'='true','ESCAPECHAR'='\\','BAD_RECORDS_ACTION'='REDIRECT','BAD_RECORDS_LOGGER_ENABLE'='TRUE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
sql(s"""LOAD DATA INPATH '$resourcesPath/Data/uniqdata/2000_UniqData.csv' into table uniqdata20a OPTIONS('DELIMITER'=',' ,'QUOTECHAR'='"','COMMENTCHAR'='#','MULTILINE'='true','ESCAPECHAR'='\\','BAD_RECORDS_ACTION'='REDIRECT','BAD_RECORDS_LOGGER_ENABLE'='TRUE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
sql(s"""LOAD DATA INPATH '$resourcesPath/Data/uniqdata/2000_UniqData.csv' into table uniqdata20a OPTIONS('DELIMITER'=',' ,'QUOTECHAR'='"','COMMENTCHAR'='#','MULTILINE'='true','ESCAPECHAR'='\\','BAD_RECORDS_ACTION'='REDIRECT','BAD_RECORDS_LOGGER_ENABLE'='TRUE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
sql(s"""select * from uniqdata20a""").collect
sql(s"""drop table uniqdata20a""").collect
}
//To check sort_scope option with a wrong value
test("Batch_sort_Loading_001-01-01-01_001-TC_049", Include) {
intercept[Exception] {
sql(s"""CREATE TABLE uniqdata20a (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'carbondata'""").collect
sql(s"""LOAD DATA INPATH '$resourcesPath/Data/uniqdata/7000_UniqData.csv' into table uniqdata20a OPTIONS('DELIMITER'=',' , 'SORT_SCOPE'='ABCXYZ',‘SINGLE_PASS’=’true’,'QUOTECHAR'='"','COMMENTCHAR'='#','MULTILINE'='true','ESCAPECHAR'='\\','BAD_RECORDS_ACTION'='REDIRECT','BAD_RECORDS_LOGGER_ENABLE'='TRUE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
}
sql(s"""drop table uniqdata20a""").collect
}
//To check sort_scope option with null value
test("Batch_sort_Loading_001-01-01-01_001-TC_050", Include) {
intercept[Exception] {
sql(s"""CREATE TABLE uniqdata20a (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'carbondata'""").collect
sql(s"""LOAD DATA INPATH '$resourcesPath/Data/uniqdata/7000_UniqData.csv' into table uniqdata20a OPTIONS('DELIMITER'=',' , 'SORT_SCOPE'='null',‘SINGLE_PASS’=’true’,'QUOTECHAR'='"','COMMENTCHAR'='#','MULTILINE'='true','ESCAPECHAR'='\\','BAD_RECORDS_ACTION'='REDIRECT','BAD_RECORDS_LOGGER_ENABLE'='TRUE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
}
sql(s"""drop table uniqdata20a""").collect
}
val prop = CarbonProperties.getInstance()
val p1 = prop.getProperty("carbon.load.sort.scope", CarbonCommonConstants.LOAD_SORT_SCOPE_DEFAULT)
override protected def beforeAll() {
// Adding new properties
prop.addProperty("carbon.load.sort.scope", "batch_sort")
}
override def afterAll: Unit = {
//Reverting to old
prop.addProperty("carbon.load.sort.scope", p1)
}
} | ravipesala/incubator-carbondata | integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/BatchSortLoad2TestCase.scala | Scala | apache-2.0 | 25,188 |
package models
import models.messages.logger.Log
import models.messages.persistenceManaging.DataSetEntry
import play.api.libs.json._
/**
* Created by basso on 22/04/15.
*
* Writes converters for the push types in the application
*/
package object jsonWrites {
implicit val logWrites = new Writes[Log] {
def writes(log: Log) = Json.obj(
"status" -> log.status.toString.substring(2).toUpperCase,
"message" -> log.msg,
"activity" -> log.content.logWrite
)
}
implicit val dataSetEntryWrites = new Writes[DataSetEntry] {
def writes(dsEntry: DataSetEntry) = Json.obj(
"name" -> dsEntry.name,
"desc" -> dsEntry.desc,
"type" -> dsEntry.datatype,
"algo" -> JsString("Mn" + dsEntry.targetAlgorithm.toString), // Front end conventions
"status" -> dsEntry.status,
"source" -> dsEntry.source
)
}
//TODO: implement a miner results
}
| ChetanBhasin/Veracious | app/models/jsonWrites/package.scala | Scala | apache-2.0 | 910 |
package com.twitter.concurrent
import java.util.concurrent.RejectedExecutionException
import java.util.ArrayDeque
import com.twitter.util.{Future, Promise, Throw, NonFatal}
/**
* An AsyncSemaphore is a traditional semaphore but with asynchronous
* execution. Grabbing a permit returns a Future[Permit]
*/
class AsyncSemaphore protected (initialPermits: Int, maxWaiters: Option[Int]) {
import AsyncSemaphore._
def this(initialPermits: Int = 0) = this(initialPermits, None)
def this(initialPermits: Int, maxWaiters: Int) = this(initialPermits, Some(maxWaiters))
require(maxWaiters.getOrElse(0) >= 0)
private[this] val waitq = new ArrayDeque[Promise[Permit]]
private[this] var availablePermits = initialPermits
private[this] class SemaphorePermit extends Permit {
/**
* Indicate that you are done with your Permit.
*/
override def release() {
val run = AsyncSemaphore.this.synchronized {
val next = waitq.pollFirst()
if (next == null) availablePermits += 1
next
}
if (run != null) run.setValue(new SemaphorePermit)
}
}
def numWaiters: Int = synchronized(waitq.size)
def numPermitsAvailable: Int = synchronized(availablePermits)
/**
* Acquire a Permit, asynchronously. Be sure to permit.release() in a 'finally'
* block of your onSuccess() callback.
*
* Interrupting this future is only advisory, and will not release the permit
* if the future has already been satisfied.
*
* @return a Future[Permit] when the Future is satisfied, computation can proceed,
* or a Future.Exception[RejectedExecutionException] if the configured maximum number of waitq
* would be exceeded.
*/
def acquire(): Future[Permit] = {
synchronized {
if (availablePermits > 0) {
availablePermits -= 1
Future.value(new SemaphorePermit)
} else {
maxWaiters match {
case Some(max) if (waitq.size >= max) =>
MaxWaitersExceededException
case _ =>
val promise = new Promise[Permit]
promise.setInterruptHandler { case t: Throwable =>
AsyncSemaphore.this.synchronized {
if (promise.updateIfEmpty(Throw(t)))
waitq.remove(promise)
}
}
waitq.addLast(promise)
promise
}
}
}
}
/**
* Execute the function asynchronously when a permit becomes available.
*
* If the function throws a non-fatal exception, the exception is returned as part of the Future.
* For all exceptions, the permit would be released before returning.
*
* @return a Future[T] equivalent to the return value of the input function. If the configured
* maximum value of waitq is reached, Future.Exception[RejectedExecutionException] is
* returned.
*/
def acquireAndRun[T](func: => Future[T]): Future[T] = {
acquire() flatMap { permit =>
val f = try func catch {
case NonFatal(e) =>
Future.exception(e)
case e =>
permit.release()
throw e
}
f ensure {
permit.release()
}
}
}
/**
* Execute the function when a permit becomes available.
*
* If the function throws an exception, the exception is returned as part of the Future.
* For all exceptions, the permit would be released before returning.
*
* @return a Future[T] equivalent to the return value of the input function. If the configured
* maximum value of waitq is reached, Future.Exception[RejectedExecutionException] is
* returned.
*/
def acquireAndRunSync[T](func: => T): Future[T] = {
acquire() flatMap { permit =>
Future(func) ensure {
permit.release()
}
}
}
}
object AsyncSemaphore {
private val MaxWaitersExceededException =
Future.exception(new RejectedExecutionException("Max waiters exceeded"))
}
| mosesn/util | util-core/src/main/scala/com/twitter/concurrent/AsyncSemaphore.scala | Scala | apache-2.0 | 3,950 |
/**
* Copyright 2013 Gianluca Amato
*
* This file is part of JANDOM: JVM-based Analyzer for Numerical DOMains
* JANDOM is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* JANDOM is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty ofa
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with JANDOM. If not, see <http://www.gnu.org/licenses/>.
*/
package it.unich.jandom.ppfactories
/**
* This is the trait for "per program point" factories, i.e. functions
* which build objects parameterized by program points.
* @tparam ProgramPoint the type of program point
* @tapram T the type of values returned by the factory
* @author Gianluca Amato <gamato@unich.it>
*/
trait PPFactory[-ProgramPoint, +T] extends Function1[ProgramPoint, T]
object PPFactory {
/**
* A "per program point" implicit factory which always returns the same value.
* @tparam T the type of the object built by the factory
* @param obj the object returned by the factory
* @author Gianluca Amato <gamato@unich.it>
*/
implicit class ConstantFactory[T](private val obj: T) extends PPFactory[Any, T] {
def apply(pp: Any) = obj
}
}
| rubino22/JDBeta | core/src/main/scala/it/unich/jandom/ppfactories/PPFactory.scala | Scala | lgpl-3.0 | 1,528 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.deploy.k8s.integrationtest
import java.io.{File, FileOutputStream}
import java.nio.file.Paths
import java.util.{Properties, UUID}
import com.google.common.base.Charsets
import com.google.common.io.Files
import io.fabric8.kubernetes.client.internal.readiness.Readiness
import org.scalatest.BeforeAndAfter
import org.scalatest.concurrent.{Eventually, PatienceConfiguration}
import org.scalatest.time.{Minutes, Seconds, Span}
import scala.collection.JavaConverters._
import org.apache.spark.{SparkConf, SparkFunSuite, SSLOptions}
import org.apache.spark.deploy.k8s.SSLUtils
import org.apache.spark.deploy.k8s.config._
import org.apache.spark.deploy.k8s.integrationtest.backend.IntegrationTestBackendFactory
import org.apache.spark.deploy.k8s.integrationtest.backend.minikube.Minikube
import org.apache.spark.deploy.k8s.integrationtest.constants.MINIKUBE_TEST_BACKEND
import org.apache.spark.deploy.k8s.submit.{Client, ClientArguments, JavaMainAppResource, KeyAndCertPem, MainAppResource, PythonMainAppResource, RMainAppResource}
import org.apache.spark.launcher.SparkLauncher
import org.apache.spark.util.Utils
private[spark] class KubernetesSuite extends SparkFunSuite with BeforeAndAfter {
import KubernetesSuite._
private val testBackend = IntegrationTestBackendFactory.getTestBackend()
private val APP_LOCATOR_LABEL = UUID.randomUUID().toString.replaceAll("-", "")
private var kubernetesTestComponents: KubernetesTestComponents = _
private var sparkConf: SparkConf = _
private var resourceStagingServerLauncher: ResourceStagingServerLauncher = _
private var staticAssetServerLauncher: StaticAssetServerLauncher = _
override def beforeAll(): Unit = {
testBackend.initialize()
kubernetesTestComponents = new KubernetesTestComponents(testBackend.getKubernetesClient)
resourceStagingServerLauncher = new ResourceStagingServerLauncher(
kubernetesTestComponents.kubernetesClient.inNamespace(kubernetesTestComponents.namespace))
staticAssetServerLauncher = new StaticAssetServerLauncher(
kubernetesTestComponents.kubernetesClient.inNamespace(kubernetesTestComponents.namespace))
}
override def afterAll(): Unit = {
testBackend.cleanUp()
}
before {
sparkConf = kubernetesTestComponents.newSparkConf()
.set(INIT_CONTAINER_DOCKER_IMAGE, s"spark-init:latest")
.set(DRIVER_DOCKER_IMAGE, s"spark-driver:latest")
.set(s"${KUBERNETES_DRIVER_LABEL_PREFIX}spark-app-locator", APP_LOCATOR_LABEL)
kubernetesTestComponents.createNamespace()
}
after {
kubernetesTestComponents.deleteNamespace()
}
test("Run PySpark Job on file from SUBMITTER with --py-files") {
assume(testBackend.name == MINIKUBE_TEST_BACKEND)
launchStagingServer(SSLOptions(), None)
sparkConf
.set(DRIVER_DOCKER_IMAGE,
System.getProperty("spark.docker.test.driverImage", "spark-driver-py:latest"))
.set(EXECUTOR_DOCKER_IMAGE,
System.getProperty("spark.docker.test.executorImage", "spark-executor-py:latest"))
runPySparkPiAndVerifyCompletion(
PYSPARK_PI_SUBMITTER_LOCAL_FILE_LOCATION,
Seq(PYSPARK_SORT_CONTAINER_LOCAL_FILE_LOCATION)
)
}
test("Run PySpark Job on file from CONTAINER with spark.jar defined") {
assume(testBackend.name == MINIKUBE_TEST_BACKEND)
sparkConf.setJars(Seq(CONTAINER_LOCAL_HELPER_JAR_PATH))
sparkConf
.set(DRIVER_DOCKER_IMAGE,
System.getProperty("spark.docker.test.driverImage", "spark-driver-py:latest"))
.set(EXECUTOR_DOCKER_IMAGE,
System.getProperty("spark.docker.test.executorImage", "spark-executor-py:latest"))
runPySparkPiAndVerifyCompletion(PYSPARK_PI_CONTAINER_LOCAL_FILE_LOCATION, Seq.empty[String])
}
test("Run SparkR Job on file locally") {
assume(testBackend.name == MINIKUBE_TEST_BACKEND)
launchStagingServer(SSLOptions(), None)
sparkConf
.set(DRIVER_DOCKER_IMAGE,
System.getProperty("spark.docker.test.driverImage", "spark-driver-r:latest"))
.set(EXECUTOR_DOCKER_IMAGE,
System.getProperty("spark.docker.test.executorImage", "spark-executor-r:latest"))
runSparkRAndVerifyCompletion(SPARK_R_DATAFRAME_CONTAINER_LOCAL_FILE_LOCATION)
}
test("Run SparkR Job on file from SUBMITTER") {
assume(testBackend.name == MINIKUBE_TEST_BACKEND)
sparkConf.setJars(Seq(CONTAINER_LOCAL_HELPER_JAR_PATH))
sparkConf
.set(DRIVER_DOCKER_IMAGE,
System.getProperty("spark.docker.test.driverImage", "spark-driver-r:latest"))
.set(EXECUTOR_DOCKER_IMAGE,
System.getProperty("spark.docker.test.executorImage", "spark-executor-r:latest"))
runSparkRAndVerifyCompletion(SPARK_R_DATAFRAME_SUBMITTER_FILE_LOCATION)
}
test("Simple submission test with the resource staging server.") {
assume(testBackend.name == MINIKUBE_TEST_BACKEND)
launchStagingServer(SSLOptions(), None)
runSparkPiAndVerifyCompletion(SUBMITTER_LOCAL_MAIN_APP_RESOURCE)
}
test("Enable SSL on the resource staging server") {
assume(testBackend.name == MINIKUBE_TEST_BACKEND)
val keyStoreAndTrustStore = SSLUtils.generateKeyStoreTrustStorePair(
ipAddress = Minikube.getMinikubeIp,
keyStorePassword = "keyStore",
keyPassword = "key",
trustStorePassword = "trustStore")
sparkConf.set(RESOURCE_STAGING_SERVER_SSL_ENABLED, true)
.set("spark.ssl.kubernetes.resourceStagingServer.keyStore",
keyStoreAndTrustStore.keyStore.getAbsolutePath)
.set("spark.ssl.kubernetes.resourceStagingServer.trustStore",
keyStoreAndTrustStore.trustStore.getAbsolutePath)
.set("spark.ssl.kubernetes.resourceStagingServer.keyStorePassword", "keyStore")
.set("spark.ssl.kubernetes.resourceStagingServer.keyPassword", "key")
.set("spark.ssl.kubernetes.resourceStagingServer.trustStorePassword", "trustStore")
launchStagingServer(SSLOptions(
enabled = true,
keyStore = Some(keyStoreAndTrustStore.keyStore),
trustStore = Some(keyStoreAndTrustStore.trustStore),
keyStorePassword = Some("keyStore"),
keyPassword = Some("key"),
trustStorePassword = Some("trustStore")),
None)
runSparkPiAndVerifyCompletion(SUBMITTER_LOCAL_MAIN_APP_RESOURCE)
}
test("Use container-local resources without the resource staging server") {
assume(testBackend.name == MINIKUBE_TEST_BACKEND)
sparkConf.setJars(Seq(CONTAINER_LOCAL_HELPER_JAR_PATH))
runSparkPiAndVerifyCompletion(CONTAINER_LOCAL_MAIN_APP_RESOURCE)
}
test("Dynamic executor scaling basic test") {
assume(testBackend.name == MINIKUBE_TEST_BACKEND)
launchStagingServer(SSLOptions(), None)
createShuffleServiceDaemonSet()
sparkConf.setJars(Seq(CONTAINER_LOCAL_HELPER_JAR_PATH))
sparkConf.set("spark.dynamicAllocation.enabled", "true")
sparkConf.set("spark.local.dir", "/tmp")
sparkConf.set("spark.shuffle.service.enabled", "true")
sparkConf.set("spark.kubernetes.shuffle.labels", "app=spark-shuffle-service")
sparkConf.set("spark.kubernetes.shuffle.namespace", kubernetesTestComponents.namespace)
sparkConf.set("spark.app.name", "group-by-test")
runSparkApplicationAndVerifyCompletion(
JavaMainAppResource(SUBMITTER_LOCAL_MAIN_APP_RESOURCE),
GROUP_BY_MAIN_CLASS,
Seq("The Result is"),
Array.empty[String],
Seq.empty[String])
}
test("Use remote resources without the resource staging server.") {
assume(testBackend.name == MINIKUBE_TEST_BACKEND)
val assetServerUri = staticAssetServerLauncher.launchStaticAssetServer()
sparkConf.setJars(Seq(
s"$assetServerUri/${EXAMPLES_JAR_FILE.getName}",
s"$assetServerUri/${HELPER_JAR_FILE.getName}"
))
runSparkPiAndVerifyCompletion(SparkLauncher.NO_RESOURCE)
}
test("Mix remote resources with submitted ones.") {
assume(testBackend.name == MINIKUBE_TEST_BACKEND)
launchStagingServer(SSLOptions(), None)
val assetServerUri = staticAssetServerLauncher.launchStaticAssetServer()
sparkConf.setJars(Seq(
SUBMITTER_LOCAL_MAIN_APP_RESOURCE, s"$assetServerUri/${HELPER_JAR_FILE.getName}"
))
runSparkPiAndVerifyCompletion(SparkLauncher.NO_RESOURCE)
}
test("Use key and certificate PEM files for TLS.") {
assume(testBackend.name == MINIKUBE_TEST_BACKEND)
val keyAndCertificate = SSLUtils.generateKeyCertPemPair(Minikube.getMinikubeIp)
launchStagingServer(
SSLOptions(enabled = true),
Some(keyAndCertificate))
sparkConf.set(RESOURCE_STAGING_SERVER_SSL_ENABLED, true)
.set(
RESOURCE_STAGING_SERVER_CLIENT_CERT_PEM.key, keyAndCertificate.certPem.getAbsolutePath)
runSparkPiAndVerifyCompletion(SUBMITTER_LOCAL_MAIN_APP_RESOURCE)
}
test("Use client key and client cert file when requesting executors") {
assume(testBackend.name == MINIKUBE_TEST_BACKEND)
sparkConf.setJars(Seq(
CONTAINER_LOCAL_MAIN_APP_RESOURCE,
CONTAINER_LOCAL_HELPER_JAR_PATH))
sparkConf.set(
s"$APISERVER_AUTH_DRIVER_CONF_PREFIX.$CLIENT_KEY_FILE_CONF_SUFFIX",
kubernetesTestComponents.clientConfig.getClientKeyFile)
sparkConf.set(
s"$APISERVER_AUTH_DRIVER_CONF_PREFIX.$CLIENT_CERT_FILE_CONF_SUFFIX",
kubernetesTestComponents.clientConfig.getClientCertFile)
sparkConf.set(
s"$APISERVER_AUTH_DRIVER_CONF_PREFIX.$CA_CERT_FILE_CONF_SUFFIX",
kubernetesTestComponents.clientConfig.getCaCertFile)
runSparkPiAndVerifyCompletion(SparkLauncher.NO_RESOURCE)
}
test("Added files should be placed in the driver's working directory.") {
assume(testBackend.name == MINIKUBE_TEST_BACKEND)
launchStagingServer(SSLOptions(), None)
val testExistenceFileTempDir = Utils.createTempDir(namePrefix = "test-existence-file-temp-dir")
val testExistenceFile = new File(testExistenceFileTempDir, "input.txt")
Files.write(TEST_EXISTENCE_FILE_CONTENTS, testExistenceFile, Charsets.UTF_8)
sparkConf.set("spark.files", testExistenceFile.getAbsolutePath)
runSparkApplicationAndVerifyCompletion(
JavaMainAppResource(SUBMITTER_LOCAL_MAIN_APP_RESOURCE),
FILE_EXISTENCE_MAIN_CLASS,
Seq(
s"File found at /opt/spark/work-dir/${testExistenceFile.getName} with correct contents.",
s"File found on the executors at the relative path ${testExistenceFile.getName} with" +
s" the correct contents."),
Array(testExistenceFile.getName, TEST_EXISTENCE_FILE_CONTENTS),
Seq.empty[String])
}
test("Setting JVM options on the driver and executors with spaces.") {
assume(testBackend.name == MINIKUBE_TEST_BACKEND)
launchStagingServer(SSLOptions(), None)
val driverJvmOptionsFile = storeJvmOptionsInTempFile(
Map("simpleDriverConf" -> "simpleDriverConfValue",
"driverconfwithspaces" -> "driver conf with spaces value"),
"driver-jvm-options.properties",
"JVM options that should be set on the driver.")
val executorJvmOptionsFile = storeJvmOptionsInTempFile(
Map("simpleExecutorConf" -> "simpleExecutorConfValue",
"executor conf with spaces" -> "executor conf with spaces value"),
"executor-jvm-options.properties",
"JVM options that should be set on the executors.")
sparkConf.set(SparkLauncher.DRIVER_EXTRA_JAVA_OPTIONS,
"-DsimpleDriverConf=simpleDriverConfValue" +
" -Ddriverconfwithspaces='driver conf with spaces value'")
sparkConf.set(SparkLauncher.EXECUTOR_EXTRA_JAVA_OPTIONS,
"-DsimpleExecutorConf=simpleExecutorConfValue" +
" -D\\'executor conf with spaces\\'=\\'executor conf with spaces value\\'")
sparkConf.set("spark.files",
Seq(driverJvmOptionsFile.getAbsolutePath, executorJvmOptionsFile.getAbsolutePath)
.mkString(","))
runSparkApplicationAndVerifyCompletion(
JavaMainAppResource(SUBMITTER_LOCAL_MAIN_APP_RESOURCE),
JAVA_OPTIONS_MAIN_CLASS,
Seq(s"All expected JVM options were present on the driver and executors."),
Array(driverJvmOptionsFile.getName, executorJvmOptionsFile.getName),
Seq.empty[String])
}
test("Submit small local files without the resource staging server.") {
assume(testBackend.name == MINIKUBE_TEST_BACKEND)
sparkConf.setJars(Seq(CONTAINER_LOCAL_HELPER_JAR_PATH))
val testExistenceFileTempDir = Utils.createTempDir(namePrefix = "test-existence-file-temp-dir")
val testExistenceFile = new File(testExistenceFileTempDir, "input.txt")
Files.write(TEST_EXISTENCE_FILE_CONTENTS, testExistenceFile, Charsets.UTF_8)
sparkConf.set("spark.files", testExistenceFile.getAbsolutePath)
runSparkApplicationAndVerifyCompletion(
JavaMainAppResource(CONTAINER_LOCAL_MAIN_APP_RESOURCE),
FILE_EXISTENCE_MAIN_CLASS,
Seq(
s"File found at /opt/spark/work-dir/${testExistenceFile.getName} with correct contents.",
s"File found on the executors at the relative path ${testExistenceFile.getName} with" +
s" the correct contents."),
Array(testExistenceFile.getName, TEST_EXISTENCE_FILE_CONTENTS),
Seq.empty[String])
}
test("Use a very long application name.") {
assume(testBackend.name == MINIKUBE_TEST_BACKEND)
sparkConf.setJars(Seq(CONTAINER_LOCAL_HELPER_JAR_PATH)).setAppName("long" * 40)
runSparkPiAndVerifyCompletion(CONTAINER_LOCAL_MAIN_APP_RESOURCE)
}
private def launchStagingServer(
resourceStagingServerSslOptions: SSLOptions, keyAndCertPem: Option[KeyAndCertPem]): Unit = {
assume(testBackend.name == MINIKUBE_TEST_BACKEND)
val resourceStagingServerPort = resourceStagingServerLauncher.launchStagingServer(
resourceStagingServerSslOptions, keyAndCertPem)
val resourceStagingServerUriScheme = if (resourceStagingServerSslOptions.enabled) {
"https"
} else {
"http"
}
sparkConf.set(RESOURCE_STAGING_SERVER_URI,
s"$resourceStagingServerUriScheme://" +
s"${Minikube.getMinikubeIp}:$resourceStagingServerPort")
}
private def runSparkPiAndVerifyCompletion(appResource: String): Unit = {
runSparkApplicationAndVerifyCompletion(
JavaMainAppResource(appResource),
SPARK_PI_MAIN_CLASS,
Seq("Pi is roughly 3"),
Array.empty[String],
Seq.empty[String])
}
private def runPySparkPiAndVerifyCompletion(
appResource: String, otherPyFiles: Seq[String]): Unit = {
runSparkApplicationAndVerifyCompletion(
PythonMainAppResource(appResource),
PYSPARK_PI_MAIN_CLASS,
Seq("Submitting 5 missing tasks from ResultStage", "Pi is roughly 3"),
Array("5"),
otherPyFiles)
}
private def runSparkRAndVerifyCompletion(
appResource: String): Unit = {
runSparkApplicationAndVerifyCompletion(
RMainAppResource(appResource),
SPARK_R_MAIN_CLASS,
Seq("name: string (nullable = true)", "1 Justin"),
Array.empty[String],
Seq.empty[String])
}
private def runSparkApplicationAndVerifyCompletion(
appResource: MainAppResource,
mainClass: String,
expectedLogOnCompletion: Seq[String],
appArgs: Array[String],
otherPyFiles: Seq[String]): Unit = {
val clientArguments = ClientArguments(
mainAppResource = appResource,
mainClass = mainClass,
driverArgs = appArgs,
otherPyFiles = otherPyFiles,
hadoopConfDir = None)
Client.run(sparkConf, clientArguments)
val driverPod = kubernetesTestComponents.kubernetesClient
.pods()
.withLabel("spark-app-locator", APP_LOCATOR_LABEL)
.list()
.getItems
.get(0)
Eventually.eventually(TIMEOUT, INTERVAL) {
expectedLogOnCompletion.foreach { e =>
assert(kubernetesTestComponents.kubernetesClient
.pods()
.withName(driverPod.getMetadata.getName)
.getLog
.contains(e), "The application did not complete.")
}
}
}
private def createShuffleServiceDaemonSet(): Unit = {
val ds = kubernetesTestComponents.kubernetesClient.extensions().daemonSets()
.createNew()
.withNewMetadata()
.withName("shuffle")
.endMetadata()
.withNewSpec()
.withNewTemplate()
.withNewMetadata()
.withLabels(Map("app" -> "spark-shuffle-service").asJava)
.endMetadata()
.withNewSpec()
.addNewVolume()
.withName("shuffle-dir")
.withNewHostPath()
.withPath("/tmp")
.endHostPath()
.endVolume()
.addNewContainer()
.withName("shuffle")
.withImage("spark-shuffle:latest")
.withImagePullPolicy("IfNotPresent")
.addNewVolumeMount()
.withName("shuffle-dir")
.withMountPath("/tmp")
.endVolumeMount()
.endContainer()
.endSpec()
.endTemplate()
.endSpec()
.done()
// wait for daemonset to become available.
Eventually.eventually(TIMEOUT, INTERVAL) {
val pods = kubernetesTestComponents.kubernetesClient.pods()
.withLabel("app", "spark-shuffle-service").list().getItems
if (pods.size() == 0 || !Readiness.isReady(pods.get(0))) {
throw ShuffleNotReadyException
}
}
}
private def storeJvmOptionsInTempFile(
options: Map[String, String],
propertiesFileName: String,
comments: String): File = {
val tempDir = Utils.createTempDir()
val propertiesFile = new File(tempDir, propertiesFileName)
val properties = new Properties()
options.foreach { case (propKey, propValue) => properties.setProperty(propKey, propValue) }
Utils.tryWithResource(new FileOutputStream(propertiesFile)) { os =>
properties.store(os, comments)
}
propertiesFile
}
}
private[spark] object KubernetesSuite {
val EXAMPLES_JAR_FILE = Paths.get("target", "integration-tests-spark-jobs")
.toFile
.listFiles()(0)
val HELPER_JAR_FILE = Paths.get("target", "integration-tests-spark-jobs-helpers")
.toFile
.listFiles()(0)
val SUBMITTER_LOCAL_MAIN_APP_RESOURCE = s"file://${EXAMPLES_JAR_FILE.getAbsolutePath}"
val CONTAINER_LOCAL_MAIN_APP_RESOURCE = s"local:///opt/spark/examples/" +
s"integration-tests-jars/${EXAMPLES_JAR_FILE.getName}"
val CONTAINER_LOCAL_HELPER_JAR_PATH = s"local:///opt/spark/examples/" +
s"integration-tests-jars/${HELPER_JAR_FILE.getName}"
val TIMEOUT = PatienceConfiguration.Timeout(Span(2, Minutes))
val INTERVAL = PatienceConfiguration.Interval(Span(2, Seconds))
val SPARK_PI_MAIN_CLASS = "org.apache.spark.deploy.k8s" +
".integrationtest.jobs.SparkPiWithInfiniteWait"
val PYSPARK_PI_MAIN_CLASS = "org.apache.spark.deploy.PythonRunner"
val SPARK_R_MAIN_CLASS = "org.apache.spark.deploy.RRunner"
val PYSPARK_PI_CONTAINER_LOCAL_FILE_LOCATION =
"local:///opt/spark/examples/src/main/python/pi.py"
val PYSPARK_SORT_CONTAINER_LOCAL_FILE_LOCATION =
"local:///opt/spark/examples/src/main/python/sort.py"
val SPARK_R_DATAFRAME_SUBMITTER_FILE_LOCATION =
"local:///opt/spark/examples/src/main/r/dataframe.R"
val SPARK_R_DATAFRAME_CONTAINER_LOCAL_FILE_LOCATION =
"src/test/R/dataframe.R"
val PYSPARK_PI_SUBMITTER_LOCAL_FILE_LOCATION = "src/test/python/pi.py"
val FILE_EXISTENCE_MAIN_CLASS = "org.apache.spark.deploy.k8s" +
".integrationtest.jobs.FileExistenceTest"
val GROUP_BY_MAIN_CLASS = "org.apache.spark.deploy.k8s" +
".integrationtest.jobs.GroupByTest"
val JAVA_OPTIONS_MAIN_CLASS = "org.apache.spark.deploy.k8s" +
".integrationtest.jobs.JavaOptionsTest"
val TEST_EXISTENCE_FILE_CONTENTS = "contents"
case object ShuffleNotReadyException extends Exception
}
| apache-spark-on-k8s/spark | resource-managers/kubernetes/integration-tests/src/test/scala/org/apache/spark/deploy/k8s/integrationtest/KubernetesSuite.scala | Scala | apache-2.0 | 20,520 |
package io.iohk.ethereum.security
import java.io.{ByteArrayInputStream, File, FileInputStream, FileOutputStream}
import java.security.{KeyStore, SecureRandom}
import javax.net.ssl.{KeyManager, TrustManager}
import org.scalamock.scalatest.MockFactory
import org.scalatest.BeforeAndAfterAll
import org.scalatest.flatspec.AnyFlatSpec
import org.scalatest.matchers.should.Matchers
import scala.io.BufferedSource
class SSLContextFactorySpec extends AnyFlatSpec with Matchers with MockFactory with BeforeAndAfterAll {
val fileName: String = "temp.txt"
var file: File = _
override def beforeAll {
new FileOutputStream(fileName, false).getFD
file = new File(fileName)
}
override def afterAll {
file.delete()
}
val keyStorePath = "mantisCA.p12"
val keyStoreType = "pkcs12"
val passwordFile = "password"
it should "createSSLContext" in new TestSetup(
existingFiles = List(keyStorePath, passwordFile),
fCreateFileInputStream = () => Right(new FileInputStream(file)),
fLoadKeyStore = () => Right(()),
fGetKeyManager = () => Right(Array.empty),
fGetTrustManager = () => Right(Array.empty)
) {
val sslConfig = SSLConfig(
keyStorePath = keyStorePath,
keyStoreType = keyStoreType,
passwordFile = passwordFile
)
sSLContextFactory.createSSLContext(sslConfig, new SecureRandom()) match {
case Right(ssl) =>
ssl.getProtocol shouldBe "TLS"
case Left(error) => fail(error.reason)
}
}
it should "return a Error because keystore path and password are missing" in new TestSetup(
existingFiles = Nil,
fCreateFileInputStream = () => Right(new FileInputStream(file)),
fLoadKeyStore = () => Right(()),
fGetKeyManager = () => Right(Array.empty),
fGetTrustManager = () => Right(Array.empty)
) {
val sslConfig = SSLConfig(
keyStorePath = keyStorePath,
keyStoreType = keyStoreType,
passwordFile = passwordFile
)
val response = sSLContextFactory.createSSLContext(sslConfig, new SecureRandom())
response shouldBe Left(SSLError("Certificate keystore path and password file configured but files are missing"))
}
it should "return a Error because keystore path is missing" in new TestSetup(
existingFiles = List(passwordFile),
fCreateFileInputStream = () => Right(new FileInputStream(file)),
fLoadKeyStore = () => Right(()),
fGetKeyManager = () => Right(Array.empty),
fGetTrustManager = () => Right(Array.empty)
) {
val sslConfig = SSLConfig(
keyStorePath = keyStorePath,
keyStoreType = keyStoreType,
passwordFile = passwordFile
)
val response = sSLContextFactory.createSSLContext(sslConfig, new SecureRandom())
response shouldBe Left(SSLError("Certificate keystore path configured but file is missing"))
}
it should "return a Error because password file is missing" in new TestSetup(
existingFiles = List(keyStorePath),
fCreateFileInputStream = () => Right(new FileInputStream(file)),
fLoadKeyStore = () => Right(()),
fGetKeyManager = () => Right(Array.empty),
fGetTrustManager = () => Right(Array.empty)
) {
val sslConfig = SSLConfig(
keyStorePath = keyStorePath,
keyStoreType = keyStoreType,
passwordFile = passwordFile
)
val response = sSLContextFactory.createSSLContext(sslConfig, new SecureRandom())
response shouldBe Left(SSLError("Certificate password file configured but file is missing"))
}
it should "return a Error because invalid KeyStore Type" in new TestSetup(
existingFiles = List(keyStorePath, passwordFile),
fCreateFileInputStream = () => Right(new FileInputStream(file)),
fLoadKeyStore = () => Right(()),
fGetKeyManager = () => Right(Array.empty),
fGetTrustManager = () => Right(Array.empty)
) {
val invalidKeyStoreType = "invalidkeyStoreType"
val sslConfig = SSLConfig(
keyStorePath = keyStorePath,
keyStoreType = invalidKeyStoreType,
passwordFile = passwordFile
)
val response = sSLContextFactory.createSSLContext(sslConfig, new SecureRandom())
response shouldBe Left(SSLError(s"Certificate keystore invalid type set: $invalidKeyStoreType"))
}
it should "return a Error because keystore file creation failed" in new TestSetup(
existingFiles = List(keyStorePath, passwordFile),
fCreateFileInputStream = () => Left(new RuntimeException("Certificate keystore file creation failed")),
fLoadKeyStore = () => Right(()),
fGetKeyManager = () => Right(Array.empty),
fGetTrustManager = () => Right(Array.empty)
) {
val sslConfig = SSLConfig(
keyStorePath = keyStorePath,
keyStoreType = keyStoreType,
passwordFile = passwordFile
)
val response = sSLContextFactory.createSSLContext(sslConfig, new SecureRandom())
response shouldBe Left(SSLError("Certificate keystore file creation failed"))
}
it should "return a Error because failed to load keystore" in new TestSetup(
existingFiles = List(keyStorePath, passwordFile),
fCreateFileInputStream = () => Right(new FileInputStream(file)),
fLoadKeyStore = () => Left(new RuntimeException("Failed to load keyStore")),
fGetKeyManager = () => Right(Array.empty),
fGetTrustManager = () => Right(Array.empty)
) {
val sslConfig = SSLConfig(
keyStorePath = keyStorePath,
keyStoreType = keyStoreType,
passwordFile = passwordFile
)
val response = sSLContextFactory.createSSLContext(sslConfig, new SecureRandom())
response shouldBe Left(SSLError("Failed to load keyStore"))
}
it should "return a Error because KeyManager failure" in new TestSetup(
existingFiles = List(keyStorePath, passwordFile),
fCreateFileInputStream = () => Right(new FileInputStream(file)),
fLoadKeyStore = () => Right(()),
fGetKeyManager = () => Left(new RuntimeException("Failed to get KeyManager")),
fGetTrustManager = () => Right(Array.empty)
) {
val sslConfig = SSLConfig(
keyStorePath = keyStorePath,
keyStoreType = keyStoreType,
passwordFile = passwordFile
)
val response = sSLContextFactory.createSSLContext(sslConfig, new SecureRandom())
response shouldBe Left(SSLError("Invalid Certificate keystore"))
}
it should "return a Error because TrustManager failure" in new TestSetup(
existingFiles = List(keyStorePath, passwordFile),
fCreateFileInputStream = () => Right(new FileInputStream(file)),
fLoadKeyStore = () => Right(()),
fGetKeyManager = () => Right(Array.empty),
fGetTrustManager = () => Left(new RuntimeException("Failed to get TrustManager"))
) {
val sslConfig = SSLConfig(
keyStorePath = keyStorePath,
keyStoreType = keyStoreType,
passwordFile = passwordFile
)
val response = sSLContextFactory.createSSLContext(sslConfig, new SecureRandom())
response shouldBe Left(SSLError("Invalid Certificate keystore"))
}
class TestSetup(
existingFiles: List[String],
fCreateFileInputStream: () => Either[Throwable, FileInputStream],
fLoadKeyStore: () => Either[Throwable, Unit],
fGetKeyManager: () => Either[Throwable, Array[KeyManager]],
fGetTrustManager: () => Either[Throwable, Array[TrustManager]]
) {
val sSLContextFactory = new SSLContextFactory {
override def exist(pathName: String): Boolean = existingFiles.contains(pathName)
override def createFileInputStream(pathName: String): Either[Throwable, FileInputStream] =
fCreateFileInputStream()
override def getReader(passwordFile: String): BufferedSource = new BufferedSource(
new ByteArrayInputStream("password".getBytes)
)
override def loadKeyStore(
keyStoreFile: FileInputStream,
passwordCharArray: Array[Char],
keyStore: KeyStore
): Either[Throwable, Unit] =
fLoadKeyStore()
override def getKeyManager(
keyStore: KeyStore,
passwordCharArray: Array[Char]
): Either[Throwable, Array[KeyManager]] = fGetKeyManager()
override def getTrustManager(keyStore: KeyStore): Either[Throwable, Array[TrustManager]] =
fGetTrustManager()
}
}
}
| input-output-hk/etc-client | src/test/scala/io/iohk/ethereum/security/SSLContextFactorySpec.scala | Scala | mit | 8,183 |
// get expected error message without package declaration
package ex
import scala.language.experimental.macros
import scala.reflect.macros._
object IW {
def foo(a: String): String = ???
}
object Mac {
def mac(s: String): String = macro macImpl
def macImpl(c: Context)(s: c.Expr[String]): c.Expr[String] =
c.universe.reify(IW.foo(s.splice))
}
| AlexSikia/dotty | tests/untried/neg-with-implicits/t7519-b/Mac_1.scala | Scala | bsd-3-clause | 355 |
package com.rumblesan.scalaexperiments.tests.typeclass
import org.specs2.mutable._
import com.rumblesan.scalaexperiments.typeclass._
class TypeclassSpec extends Specification {
"The 'Typeclass Example'" should {
"get user info from the BasicUser" in {
val basic = BasicUser(1, "foo bar", "foo@bar.com")
val info = UserInfoMethods.userInfo(basic)
val expected = UserInfo("foo bar", "foo@bar.com")
info must_==(expected)
}
"get user info from the PaidUser" in {
val paid = PaidUser(1, "baz bim", "baz@bim.com", 55.4)
val info = UserInfoMethods.userInfo(paid)
val expected = UserInfo("baz bim", "baz@bim.com")
info must_==(expected)
}
}
}
| lachatak/scala-experiments | src/test/scala/TypeclassSpec.scala | Scala | bsd-2-clause | 709 |
package uk.gov.gds.ier.transaction.forces
import uk.gov.gds.ier.step.InprogressApplication
import uk.gov.gds.ier.model._
import uk.gov.gds.ier.model.Statement
import uk.gov.gds.ier.model.DateOfBirth
import uk.gov.gds.ier.model.PartialNationality
import uk.gov.gds.ier.model.PostalOrProxyVote
import uk.gov.gds.ier.model.PossibleContactAddresses
import uk.gov.gds.ier.model.Name
import uk.gov.gds.ier.model.PreviousName
import uk.gov.gds.ier.model.Service
import uk.gov.gds.ier.model.Rank
import uk.gov.gds.ier.model.Nino
import uk.gov.gds.ier.model.WaysToVote
import uk.gov.gds.ier.model.Contact
case class InprogressForces(
statement: Option[Statement] = None,
address: Option[LastAddress] = None,
previousAddress: Option[PartialPreviousAddress] = None,
nationality: Option[PartialNationality] = None,
dob: Option[DateOfBirth] = None,
name: Option[Name] = None,
previousName: Option[PreviousName] = None,
nino: Option[Nino] = None,
service: Option[Service] = None,
rank: Option[Rank] = None,
contactAddress: Option[PossibleContactAddresses] = None,
openRegisterOptin: Option[Boolean] = None,
waysToVote: Option[WaysToVote] = None,
postalOrProxyVote: Option[PostalOrProxyVote] = None,
contact: Option[Contact] = None,
possibleAddresses: Option[PossibleAddress] = None,
sessionId: Option[String] = None)
extends InprogressApplication[InprogressForces] {
def merge(other:InprogressForces) = {
other.copy(
statement = this.statement.orElse(other.statement),
address = this.address.orElse(other.address),
previousAddress = this.previousAddress.orElse(other.previousAddress),
nationality = this.nationality.orElse(other.nationality),
dob = this.dob.orElse(other.dob),
name = this.name.orElse(other.name),
previousName = this.previousName.orElse(other.previousName),
nino = this.nino.orElse(other.nino),
service = this.service.orElse(other.service),
rank = this.rank.orElse(other.rank),
contactAddress = this.contactAddress.orElse(other.contactAddress),
openRegisterOptin = this.openRegisterOptin.orElse(other.openRegisterOptin),
waysToVote = this.waysToVote.orElse(other.waysToVote),
postalOrProxyVote = this.postalOrProxyVote.orElse(other.postalOrProxyVote),
contact = this.contact.orElse(other.contact),
possibleAddresses = None,
sessionId = this.sessionId.orElse(other.sessionId)
)
}
}
| michaeldfallen/ier-frontend | app/uk/gov/gds/ier/transaction/forces/InprogressForces.scala | Scala | mit | 2,471 |
/***********************************************************************
* Copyright (c) 2013-2020 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.spark
import org.apache.spark.sql.catalyst.expressions.AttributeReference
import org.apache.spark.sql.execution.datasources.LogicalRelation
import org.apache.spark.sql.sources.BaseRelation
import scala.util.Try
/**
* Utility to smooth over differences in spark API versions using reflection
*/
object SparkVersions {
private val _copy: Try[(LogicalRelation, BaseRelation, Seq[AttributeReference]) => LogicalRelation] = Try {
val methods = classOf[LogicalRelation].getMethods
val m = methods.find(m => m.getName == "copy" && Seq(3, 4).contains(m.getParameterCount)).getOrElse {
throw new NoSuchMethodError(s"Could not find method named 'copy' in class ${classOf[LogicalRelation].getName}")
}
if (m.getParameterCount == 4) {
val streaming = methods.find(_.getName == "isStreaming").getOrElse {
throw new NoSuchMethodError(s"Could not find method named 'isStreaming' in class " +
classOf[LogicalRelation].getName)
}
(r, b, o) => m.invoke(r, b, o, r.catalogTable, streaming.invoke(r)).asInstanceOf[LogicalRelation]
} else {
(r, b, o) => m.invoke(r, b, o, r.catalogTable).asInstanceOf[LogicalRelation]
}
}
/**
* Replacement for LogicalRelation#copy
*
* @param r relation to copy
* @param relation base relation
* @param output output
* @return
*/
def copy(r: LogicalRelation)
(relation: BaseRelation = r.relation,
output: Seq[AttributeReference] = r.output): LogicalRelation = _copy.get.apply(r, relation, output)
}
| aheyne/geomesa | geomesa-spark/geomesa-spark-sql/src/main/scala/org/locationtech/geomesa/spark/SparkVersions.scala | Scala | apache-2.0 | 2,049 |
Subsets and Splits
Filtered Scala Code Snippets
The query filters and retrieves a sample of code snippets that meet specific criteria, providing a basic overview of the dataset's content without revealing deeper insights.