code stringlengths 5 1M | repo_name stringlengths 5 109 | path stringlengths 6 208 | language stringclasses 1 value | license stringclasses 15 values | size int64 5 1M |
|---|---|---|---|---|---|
package observatory
import observatory.Visualization._
/**
* 4th milestone: value-added information
*/
object Manipulation {
/**
* @param temperatures Known temperatures
* @return A function that, given a latitude in [-89, 90] and a longitude in [-180, 179],
* returns the predicted temperature at this location
*/
def makeGrid(temperatures: Iterable[(Location, Double)]): (Int, Int) => Double = {
(lat, lon) => predictTemperature(temperatures, Location(lat, lon))
}
/**
* @param temperaturess Sequence of known temperatures over the years (each element of the collection
* is a collection of pairs of location and temperature)
* @return A function that, given a latitude and a longitude, returns the average temperature at this location
*/
def average(temperaturess: Iterable[Iterable[(Location, Double)]]): (Int, Int) => Double = {
(lat, lon) => temperaturess.map(temperatures => makeGrid(temperatures)(lat, lon)).sum / temperaturess.size
}
/**
* @param temperatures Known temperatures
* @param normals A grid containing the “normal” temperatures
* @return A grid containing the deviations compared to the normal temperatures
*/
def deviation(temperatures: Iterable[(Location, Double)], normals: (Int, Int) => Double): (Int, Int) => Double = {
(lat, lon) => makeGrid(temperatures)(lat, lon) - normals(lat, lon)
}
}
| syhan/coursera | scala-capstone/observatory/src/main/scala/observatory/Manipulation.scala | Scala | gpl-3.0 | 1,440 |
package service
import java.util.Date
import org.eclipse.jgit.api.Git
import org.apache.commons.io.FileUtils
import util.{PatchUtil, Directory, JGitUtil, LockUtil}
import _root_.util.ControlUtil._
import org.eclipse.jgit.treewalk.{TreeWalk, CanonicalTreeParser}
import org.eclipse.jgit.lib._
import org.eclipse.jgit.dircache.{DirCache, DirCacheEntry}
import org.eclipse.jgit.revwalk.RevWalk
import org.eclipse.jgit.diff.{DiffEntry, DiffFormatter}
import java.io.ByteArrayInputStream
import org.eclipse.jgit.patch._
import org.eclipse.jgit.api.errors.PatchFormatException
import scala.collection.JavaConverters._
object WikiService {
/**
* The model for wiki page.
*
* @param name the page name
* @param content the page content
* @param committer the last committer
* @param time the last modified time
* @param id the latest commit id
*/
case class WikiPageInfo(name: String, content: String, committer: String, time: Date, id: String)
/**
* The model for wiki page history.
*
* @param name the page name
* @param committer the committer the committer
* @param message the commit message
* @param date the commit date
*/
case class WikiPageHistoryInfo(name: String, committer: String, message: String, date: Date)
}
trait WikiService {
import WikiService._
def createWikiRepository(loginAccount: model.Account, owner: String, repository: String): Unit =
LockUtil.lock(s"${owner}/${repository}/wiki"){
defining(Directory.getWikiRepositoryDir(owner, repository)){ dir =>
if(!dir.exists){
JGitUtil.initRepository(dir)
saveWikiPage(owner, repository, "Home", "Home", s"Welcome to the ${repository} wiki!!", loginAccount, "Initial Commit", None)
}
}
}
/**
* Returns the wiki page.
*/
def getWikiPage(owner: String, repository: String, pageName: String): Option[WikiPageInfo] = {
using(Git.open(Directory.getWikiRepositoryDir(owner, repository))){ git =>
optionIf(!JGitUtil.isEmpty(git)){
JGitUtil.getFileList(git, "master", ".").find(_.name == pageName + ".md").map { file =>
WikiPageInfo(file.name, new String(git.getRepository.open(file.id).getBytes, "UTF-8"), file.committer, file.time, file.commitId)
}
}
}
}
/**
* Returns the content of the specified file.
*/
def getFileContent(owner: String, repository: String, path: String): Option[Array[Byte]] =
using(Git.open(Directory.getWikiRepositoryDir(owner, repository))){ git =>
optionIf(!JGitUtil.isEmpty(git)){
val index = path.lastIndexOf('/')
val parentPath = if(index < 0) "." else path.substring(0, index)
val fileName = if(index < 0) path else path.substring(index + 1)
JGitUtil.getFileList(git, "master", parentPath).find(_.name == fileName).map { file =>
git.getRepository.open(file.id).getBytes
}
}
}
/**
* Returns the list of wiki page names.
*/
def getWikiPageList(owner: String, repository: String): List[String] = {
using(Git.open(Directory.getWikiRepositoryDir(owner, repository))){ git =>
JGitUtil.getFileList(git, "master", ".")
.filter(_.name.endsWith(".md"))
.map(_.name.replaceFirst("\\\\.md$", ""))
.sortBy(x => x)
}
}
/**
* Reverts specified changes.
*/
def revertWikiPage(owner: String, repository: String, from: String, to: String,
committer: model.Account, pageName: Option[String]): Boolean = {
case class RevertInfo(operation: String, filePath: String, source: String)
try {
LockUtil.lock(s"${owner}/${repository}/wiki"){
using(Git.open(Directory.getWikiRepositoryDir(owner, repository))){ git =>
val reader = git.getRepository.newObjectReader
val oldTreeIter = new CanonicalTreeParser
oldTreeIter.reset(reader, git.getRepository.resolve(from + "^{tree}"))
val newTreeIter = new CanonicalTreeParser
newTreeIter.reset(reader, git.getRepository.resolve(to + "^{tree}"))
val diffs = git.diff.setNewTree(oldTreeIter).setOldTree(newTreeIter).call.asScala.filter { diff =>
pageName match {
case Some(x) => diff.getNewPath == x + ".md"
case None => true
}
}
val patch = using(new java.io.ByteArrayOutputStream()){ out =>
val formatter = new DiffFormatter(out)
formatter.setRepository(git.getRepository)
formatter.format(diffs.asJava)
new String(out.toByteArray, "UTF-8")
}
val p = new Patch()
p.parse(new ByteArrayInputStream(patch.getBytes("UTF-8")))
if(!p.getErrors.isEmpty){
throw new PatchFormatException(p.getErrors())
}
val revertInfo = (p.getFiles.asScala.map { fh =>
fh.getChangeType match {
case DiffEntry.ChangeType.MODIFY => {
val source = getWikiPage(owner, repository, fh.getNewPath.replaceFirst("\\\\.md$", "")).map(_.content).getOrElse("")
val applied = PatchUtil.apply(source, patch, fh)
if(applied != null){
Seq(RevertInfo("ADD", fh.getNewPath, applied))
} else Nil
}
case DiffEntry.ChangeType.ADD => {
val applied = PatchUtil.apply("", patch, fh)
if(applied != null){
Seq(RevertInfo("ADD", fh.getNewPath, applied))
} else Nil
}
case DiffEntry.ChangeType.DELETE => {
Seq(RevertInfo("DELETE", fh.getNewPath, ""))
}
case DiffEntry.ChangeType.RENAME => {
val applied = PatchUtil.apply("", patch, fh)
if(applied != null){
Seq(RevertInfo("DELETE", fh.getOldPath, ""), RevertInfo("ADD", fh.getNewPath, applied))
} else {
Seq(RevertInfo("DELETE", fh.getOldPath, ""))
}
}
case _ => Nil
}
}).flatten
if(revertInfo.nonEmpty){
val builder = DirCache.newInCore.builder()
val inserter = git.getRepository.newObjectInserter()
val headId = git.getRepository.resolve(Constants.HEAD + "^{commit}")
using(new RevWalk(git.getRepository)){ revWalk =>
using(new TreeWalk(git.getRepository)){ treeWalk =>
val index = treeWalk.addTree(revWalk.parseTree(headId))
treeWalk.setRecursive(true)
while(treeWalk.next){
val path = treeWalk.getPathString
val tree = treeWalk.getTree(index, classOf[CanonicalTreeParser])
if(revertInfo.find(x => x.filePath == path).isEmpty){
builder.add(JGitUtil.createDirCacheEntry(path, tree.getEntryFileMode, tree.getEntryObjectId))
}
}
}
}
revertInfo.filter(_.operation == "ADD").foreach { x =>
builder.add(JGitUtil.createDirCacheEntry(x.filePath, FileMode.REGULAR_FILE, inserter.insert(Constants.OBJ_BLOB, x.source.getBytes("UTF-8"))))
}
builder.finish()
JGitUtil.createNewCommit(git, inserter, headId, builder.getDirCache.writeTree(inserter), committer.fullName, committer.mailAddress,
pageName match {
case Some(x) => s"Revert ${from} ... ${to} on ${x}"
case None => s"Revert ${from} ... ${to}"
})
}
}
}
true
} catch {
case e: Exception => {
e.printStackTrace()
false
}
}
}
/**
* Save the wiki page.
*/
def saveWikiPage(owner: String, repository: String, currentPageName: String, newPageName: String,
content: String, committer: model.Account, message: String, currentId: Option[String]): Option[String] = {
LockUtil.lock(s"${owner}/${repository}/wiki"){
using(Git.open(Directory.getWikiRepositoryDir(owner, repository))){ git =>
val builder = DirCache.newInCore.builder()
val inserter = git.getRepository.newObjectInserter()
val headId = git.getRepository.resolve(Constants.HEAD + "^{commit}")
var created = true
var updated = false
var removed = false
if(headId != null){
using(new RevWalk(git.getRepository)){ revWalk =>
using(new TreeWalk(git.getRepository)){ treeWalk =>
val index = treeWalk.addTree(revWalk.parseTree(headId))
treeWalk.setRecursive(true)
while(treeWalk.next){
val path = treeWalk.getPathString
val tree = treeWalk.getTree(index, classOf[CanonicalTreeParser])
if(path == currentPageName + ".md" && currentPageName != newPageName){
removed = true
} else if(path != newPageName + ".md"){
builder.add(JGitUtil.createDirCacheEntry(path, tree.getEntryFileMode, tree.getEntryObjectId))
} else {
created = false
updated = JGitUtil.getContent(git, tree.getEntryObjectId, true).map(new String(_, "UTF-8") != content).getOrElse(false)
}
}
}
}
}
optionIf(created || updated || removed){
builder.add(JGitUtil.createDirCacheEntry(newPageName + ".md", FileMode.REGULAR_FILE, inserter.insert(Constants.OBJ_BLOB, content.getBytes("UTF-8"))))
builder.finish()
val newHeadId = JGitUtil.createNewCommit(git, inserter, headId, builder.getDirCache.writeTree(inserter), committer.fullName, committer.mailAddress,
if(message.trim.length == 0) {
if(removed){
s"Rename ${currentPageName} to ${newPageName}"
} else if(created){
s"Created ${newPageName}"
} else {
s"Updated ${newPageName}"
}
} else {
message
})
Some(newHeadId)
}
}
}
}
/**
* Delete the wiki page.
*/
def deleteWikiPage(owner: String, repository: String, pageName: String,
committer: String, mailAddress: String, message: String): Unit = {
LockUtil.lock(s"${owner}/${repository}/wiki"){
using(Git.open(Directory.getWikiRepositoryDir(owner, repository))){ git =>
val builder = DirCache.newInCore.builder()
val inserter = git.getRepository.newObjectInserter()
val headId = git.getRepository.resolve(Constants.HEAD + "^{commit}")
var removed = false
using(new RevWalk(git.getRepository)){ revWalk =>
using(new TreeWalk(git.getRepository)){ treeWalk =>
val index = treeWalk.addTree(revWalk.parseTree(headId))
treeWalk.setRecursive(true)
while(treeWalk.next){
val path = treeWalk.getPathString
val tree = treeWalk.getTree(index, classOf[CanonicalTreeParser])
if(path != pageName + ".md"){
builder.add(JGitUtil.createDirCacheEntry(path, tree.getEntryFileMode, tree.getEntryObjectId))
} else {
removed = true
}
}
}
if(removed){
builder.finish()
JGitUtil.createNewCommit(git, inserter, headId, builder.getDirCache.writeTree(inserter), committer, mailAddress, message)
}
}
}
}
}
}
| unixcrh/gitbucket | src/main/scala/service/WikiService.scala | Scala | apache-2.0 | 12,003 |
/*
* Copyright 2016 The BigDL Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.bigdl.dllib.nn
import com.intel.analytics.bigdl.Module
import com.intel.analytics.bigdl.dllib.optim.SGD
import com.intel.analytics.bigdl.dllib.tensor.TensorNumericMath.TensorNumeric
import com.intel.analytics.bigdl.dllib.tensor.Tensor
import com.intel.analytics.bigdl.dllib.utils.T
import scala.reflect.ClassTag
@com.intel.analytics.bigdl.tags.Parallel
class GradientCheckerRNN(stepSize: Double = 0.01, threshold: Double = 0.01) {
def checkLayer[T: ClassTag](
layer: Module[Double],
input: Tensor[Double],
label: Tensor[Double],
epsilon: Double = 0.01): Boolean = {
val criterion = new CrossEntropyCriterion[Double]()
val (weights, grad) = layer.getParameters()
val state = T("learningRate" -> 0.05, "momentum" -> 0.0, "weightDecay" -> 0.0,
"dampening" -> 0.0)
val sgd = new SGD[Double]
def feval(x: Tensor[Double]): (Double, Tensor[Double]) = {
layer.forward(input)
criterion.forward(layer.output.asInstanceOf[Tensor[Double]], label)
layer.zeroGradParameters()
val gradOutputTest = criterion.backward(layer.output.asInstanceOf[Tensor[Double]], label)
layer.backward(input, gradOutputTest)
(criterion.output, grad)
}
sgd.optimize(feval, weights, state)
for (i <- 1 to grad.size(1)) {
var originalValue = weights.valueAt(i)
weights.setValue(i, originalValue + stepSize)
layer.forward(input)
criterion.forward(layer.output.asInstanceOf[Tensor[Double]], label)
var gradPlus = criterion.output
weights.setValue(i, originalValue - stepSize)
layer.forward(input)
criterion.forward(layer.output.asInstanceOf[Tensor[Double]], label)
var gradMinus = criterion.output
var estimatedGradient = (gradPlus - gradMinus) / (2*stepSize)
weights.setValue(i, originalValue)
var backpropGradient = grad.valueAt(i)
var relativeError = if ((Math.abs(backpropGradient) + Math.abs(estimatedGradient)) == 0) 0
else {Math.abs(backpropGradient-estimatedGradient) /
(Math.abs(backpropGradient) + Math.abs(estimatedGradient))}
println(s"parameter ${i}, EstimatedGradient = ${estimatedGradient}, " +
s"BackpropGradient = ${backpropGradient}," +
s"RelativeError = ${relativeError}")
}
false
}
def lossAndGradient[T: ClassTag](output: Tensor[T])(
implicit ev: TensorNumeric[T]): (Double, Tensor[T]) = {
val gradOutput = Tensor[T]().resizeAs(output).copy(output)
var loss = 0.0
gradOutput.apply1(a => {
val aDouble = ev.toType[Double](a)
loss += 0.5 * aDouble * aDouble
a
})
(loss, gradOutput)
}
}
| intel-analytics/BigDL | scala/dllib/src/test/scala/com/intel/analytics/bigdl/dllib/nn/GradientCheckerRNN.scala | Scala | apache-2.0 | 3,273 |
package com.idyria.osi.ooxoo.lib.json.model
import com.fasterxml.jackson.databind.ObjectMapper
import com.fasterxml.jackson.databind.module.SimpleModule
import com.fasterxml.jackson.dataformat.yaml.YAMLFactory
import com.google.gson
import java.io.{File, FileOutputStream, FileReader, InputStream, InputStreamReader, Reader, StringReader}
import java.lang.reflect.{Field, Method, Type}
import java.time.Instant
import java.util
import java.util.{Base64, UUID}
import com.google.gson.annotations.{Expose, SerializedName}
import com.google.gson.reflect.TypeToken
import com.google.gson.{ExclusionStrategy, FieldAttributes, GsonBuilder, JsonArray, JsonDeserializationContext, JsonDeserializer, JsonElement, JsonPrimitive, JsonSerializationContext, JsonSerializer}
import com.idyria.osi.ooxoo.lib.json.yaml.{JsonObjectDeserialiser, JsonValueDeserialiser}
import javax.json.{Json, JsonObject, JsonString, JsonValue}
import javax.json.bind.annotation.JsonbProperty
import javax.json.bind.{JsonbBuilder, JsonbConfig}
import javax.json.bind.config.{BinaryDataStrategy, PropertyNamingStrategy, PropertyOrderStrategy, PropertyVisibilityStrategy}
import javax.json.bind.serializer.{DeserializationContext, JsonbDeserializer, JsonbSerializer, SerializationContext}
import javax.json.stream.{JsonGenerator, JsonParser}
import scala.jdk.CollectionConverters.CollectionHasAsScala
import scala.reflect.ClassTag
object JSONHelper {
lazy val jsonBConfig = new JsonbConfig
jsonBConfig.withBinaryDataStrategy(BinaryDataStrategy.BASE_64_URL)
jsonBConfig.withFormatting(true)
jsonBConfig.withPropertyOrderStrategy(PropertyOrderStrategy.REVERSE)
jsonBConfig.withPropertyNamingStrategy(PropertyNamingStrategy.IDENTITY)
jsonBConfig.withPropertyVisibilityStrategy(new OnlyExposeStrategy)
jsonBConfig.withSerializers(new UUIDJsonBSerDes)
jsonBConfig.withDeserializers(new UUIDJsonBSerDes)
lazy val jsonb = JsonbBuilder.create(jsonBConfig)
def createStdJSONBConfig = {
val jsonBConfig = new JsonbConfig
jsonBConfig.withBinaryDataStrategy(BinaryDataStrategy.BASE_64_URL)
jsonBConfig.withFormatting(true)
jsonBConfig.withPropertyOrderStrategy(PropertyOrderStrategy.REVERSE)
jsonBConfig.withPropertyNamingStrategy(PropertyNamingStrategy.IDENTITY)
jsonBConfig.withPropertyVisibilityStrategy(new OnlyExposeStrategy)
jsonBConfig.withSerializers(new UUIDJsonBSerDes)
jsonBConfig.withDeserializers(new UUIDJsonBSerDes)
jsonBConfig
}
def createGSONBuilder = {
new GsonBuilder()
.setPrettyPrinting()
.setExclusionStrategies(new JacksonAndDocumentExclusionStrategy)
.registerTypeHierarchyAdapter(classOf[Array[Byte]], new JSONHelper.ByteArrayToBase64TypeAdapter())
.registerTypeHierarchyAdapter(classOf[UUID], new JSONHelper.UUIDTypeAdapter())
.registerTypeHierarchyAdapter(classOf[JsonValue], new JSONHelper.JsonValueTypeAdapter())
}
def createGSON = {
createGSONBuilder.create()
}
def createJSONB = {
jsonb
}
/**
* Creates a Jackson Mapper that can parse YAML into JSONB compatible object hierarchies
*
* @return
*/
def createJacksonYAMLReader = {
val mapper = new ObjectMapper(new YAMLFactory)
mapper.findAndRegisterModules()
val m = new SimpleModule()
m.addDeserializer(classOf[JsonValue], new JsonValueDeserialiser)
m.addDeserializer(classOf[JsonObject],new JsonObjectDeserialiser)
mapper.registerModule(m)
mapper
}
def fromYAML[T](r: Reader)(implicit tag: ClassTag[T]) : T = {
val mapper = createJacksonYAMLReader
mapper.readValue(r,tag.runtimeClass).asInstanceOf[T]
}
/**
* Returns A Populared Copy of this Object!!!
*
* @param f
* @return
*/
def fromJSONFile[T](f: File)(implicit tag: ClassTag[T]) = {
jsonb.fromJson(new FileReader(f), tag.runtimeClass).asInstanceOf[T]
}
/**
*
* @param is
* @param tag
* @tparam T
* @return
*/
def fromJSONStream[T](is: InputStream)(implicit tag: ClassTag[T]) = {
jsonb.fromJson(new InputStreamReader(is, "UTF-8"), tag.runtimeClass).asInstanceOf[T]
}
/**
*
* @param is
* @param tag
* @tparam T
* @return
*/
def fromJSONStreamArray[T](is: InputStream)(implicit tag: ClassTag[T]): Array[T] = {
// var istType = new java.util.ArrayList[T]() {}.getClass.getGenericSuperclass
jsonb.fromJson[Array[T]](new InputStreamReader(is, "UTF-8"), tag.newArray(0).getClass)
}
def fromGJSONStreamArray[T](is: InputStream)(implicit tag: ClassTag[T]): Array[T] = {
var istType = new java.util.ArrayList[T]() {}.getClass.getGenericSuperclass
//val userListType = new Array[T](0).getClass
//createGSON.fromJson(new InputStreamReader(is, "UTF-8"),tag.runtimeClass.arrayType())
//createGSON.fromJson(new InputStreamReader(is, "UTF-8"),classOf[Array[T]]).asInstanceOf[util.ArrayList[T]].asScala.toArray
var res = createGSON.fromJson[Array[T]](new InputStreamReader(is, "UTF-8"), tag.newArray(0).getClass)
/*res.foreach {
parsed =>
println("Parsed element: "+parsed.getClass.getCanonicalName)
}*/
res
}
def toJSONFile(obj: AnyRef, f: File) = {
f.getParentFile.mkdirs()
val w = new FileOutputStream(f)
JSONHelper.jsonb.toJson(obj, w)
w.close()
}
def toJSONString(obj: AnyRef) = jsonb.toJson(obj)
def fromString[T](str: String)(implicit tag: ClassTag[T]) = {
jsonb.fromJson[T](str, tag.runtimeClass)
}
def fromJsonObject[T](obj: JsonObject)(implicit tag: ClassTag[T]) = {
fromString[T](obj.toString)
}
class JacksonAndDocumentExclusionStrategy extends ExclusionStrategy {
override def shouldSkipField(f: FieldAttributes): Boolean = {
// All @Expose are translated
f.getAnnotation(classOf[Expose]) match {
// NO expose, authorize _id and rev
case null if (f.getAnnotation(classOf[SerializedName]) != null) =>
//println("Testing field: "+f.getAnnotation(classOf[SerializedName]).value())
!f.getAnnotation(classOf[SerializedName]).value().matches("_id|_rev|_attachments|_deleted")
case other => false
}
}
override def shouldSkipClass(clazz: Class[_]): Boolean = {
false
}
}
class OnlyExposeStrategy extends PropertyVisibilityStrategy {
override def isVisible(f: Field): Boolean = {
// All @Expose are translated
f.getAnnotation(classOf[Expose]) match {
// NO expose, authorize _id and rev
case null if (f.getAnnotation(classOf[SerializedName]) != null) =>
//println("Testing field: "+f.getAnnotation(classOf[SerializedName]).value())
!f.getAnnotation(classOf[SerializedName]).value().matches("_id|_rev|_attachments|_deleted")
case null if (f.getAnnotation(classOf[JsonbProperty]) != null) =>
true
case null => false
case other => true
}
}
override def isVisible(method: Method): Boolean = {
false
}
}
class ByteArrayToBase64TypeAdapter extends JsonSerializer[Array[Byte]] with JsonDeserializer[Array[Byte]] {
def deserialize(json: JsonElement, typeOfT: Type, context: JsonDeserializationContext): Array[Byte] = Base64.getUrlDecoder.decode(json.getAsString)
def serialize(src: Array[Byte], typeOfSrc: Type, context: JsonSerializationContext) = new JsonPrimitive(Base64.getUrlEncoder.encodeToString(src))
}
class UUIDTypeAdapter extends JsonSerializer[UUID] with JsonDeserializer[UUID] {
def deserialize(json: JsonElement, typeOfT: Type, context: JsonDeserializationContext): UUID = java.util.UUID.fromString(json.getAsString)
def serialize(src: UUID, typeOfSrc: Type, context: JsonSerializationContext) = new JsonPrimitive(src.toString)
}
class UUIDJsonBSerDes extends JsonbDeserializer[UUID] with JsonbSerializer[UUID] {
override def deserialize(jsonParser: JsonParser, deserializationContext: DeserializationContext, `type`: Type): UUID = {
UUID.fromString(jsonParser.getString)
}
override def serialize(t: UUID, jsonGenerator: JsonGenerator, serializationContext: SerializationContext): Unit = {
jsonGenerator.write(t.toString)
}
}
class JSONInstantDeserialiser extends JsonbDeserializer[Instant] {
/* def deserialize(json: JsonElement, typeOfT: Type, context: JsonDeserializationContext): Instant = {
val instant = Instant.ofEpochSecond(json.getAsJsonObject.get.getJsonNumber("seconds").longValue(), tsObj.getJsonNumber("nanos").longValue())
}*/
override def deserialize(jsonParser: JsonParser, deserializationContext: DeserializationContext, `type`: Type): Instant = {
val obj = jsonParser.getObject
val instant = Instant.ofEpochSecond(
obj.getJsonNumber("seconds").longValue(),
obj.getJsonNumber("nanos").longValue()
)
instant
}
}
class JsonValueTypeAdapter extends JsonSerializer[JsonValue] with JsonDeserializer[JsonValue] {
def deserialize(json: JsonElement, typeOfT: Type, context: JsonDeserializationContext): JsonValue = {
json match {
case json if (json.isJsonPrimitive) =>
json.getAsJsonPrimitive match {
case str if (str.isString) => Json.createValue(str.getAsString)
case b if (b.isBoolean) => if (b.getAsBoolean) JsonValue.TRUE else JsonValue.FALSE
case double if (double.isNumber && double.getAsString.contains(".")) => Json.createValue(double.getAsDouble)
case long if (long.isNumber) => Json.createValue(long.getAsLong)
//case long if (str.isString) => Json.createValue(str.getAsString)
// case double if (str.isString) => Json.createValue(str.getAsString)
case other => sys.error("Primitive not supported: " + other)
}
case json if (json.isJsonObject) => Json.createReader(new StringReader(json.toString)).readObject()
case json if (json.isJsonArray && json.getAsJsonArray.size() > 0) => Json.createReader(new StringReader(json.toString)).readArray()
case other => null
}
}
def serialize(src: JsonValue, typeOfSrc: Type, context: JsonSerializationContext) = {
src.getValueType match {
case JsonValue.ValueType.STRING => new JsonPrimitive(src.asInstanceOf[JsonString].getString)
case JsonValue.ValueType.NUMBER if (src.toString.contains(".")) => new JsonPrimitive(src.toString.toDouble)
case JsonValue.ValueType.NUMBER => new JsonPrimitive(src.toString.toLong)
case JsonValue.ValueType.TRUE => new JsonPrimitive(true)
case JsonValue.ValueType.FALSE => new JsonPrimitive(false)
case JsonValue.ValueType.ARRAY =>
//println("InARR")
val arr = new JsonArray()
src.asJsonArray().asScala.foreach {
elt =>
arr.add(context.serialize(elt))
}
arr
case JsonValue.ValueType.OBJECT =>
// println("InObj")
val obj = new gson.JsonObject()
val srcJson = src.asJsonObject()
srcJson.keySet().forEach {
key =>
obj.add(key, context.serialize(srcJson.get(key)))
}
// obj.addProperty("hello","World")
obj
//createGSON.toJsonTree(src.asJsonObject().toString)
//context.serialize(src.asJsonObject().toString)
case other => sys.error("Not supported Object or Array")
// case JsonValue.ValueType.OBJECT => createGSON.newJsonReader(new StringReader(src.toString))
}
}
}
}
| richnou/ooxoo-core | ooxoo-core/src/main/scala/com/idyria/osi/ooxoo/lib/json/model/JSONHelper.scala | Scala | agpl-3.0 | 11,508 |
package ch.uzh.ifi.pdeboer.pplib.patterns
import ch.uzh.ifi.pdeboer.pplib.process.entities.Patch
import org.junit.{Assert, Test}
/**
* Created by pdeboer on 10/12/14.
*/
class GeneticAlgorithmExecutorTest {
@Test
def testMethodsCalled: Unit = {
val driver: TestDriver = new TestDriver()
val exec = new GeneticAlgorithmExecutor(driver, new GAIterationLimitTerminator(10))
println(exec.refinedData)
Assert.assertTrue(driver.counter > 1)
Assert.assertEquals(1, exec.refinedData.takeRight(1)(0).toString.length)
Assert.assertTrue(exec.refinedData(0).toString.length > 1)
}
private class TestDriver extends GeneticAlgorithmDriver {
var counter: Int = 0
override def initialPopulation: GAPopulation = new GAPopulation(
List("a", "b", "c", "d", "e", "f", "g", "h", "i", "j")
.map(c => new GAChromosome(new Patch(c), 0))
)
override def combine(patch1: Patch, patch2: Patch): Patch = {
counter += 1
new Patch("" + patch1 + patch2)
}
override def mutate(patch: Patch): Patch = {
counter += 1
new Patch(patch + "1")
}
override def fitness(patch: Patch): Double = -patch.toString.length
}
}
| uzh/PPLib | src/test/scala/ch/uzh/ifi/pdeboer/pplib/patterns/GeneticAlgorithmExecutorTest.scala | Scala | mit | 1,141 |
package com.twitter.finatra.kafkastreams.integration.finatratransformer
import com.twitter.conversions.DurationOps._
import com.twitter.finagle.stats.StatsReceiver
import com.twitter.finatra.kafkastreams.integration.finatratransformer.WordLengthFinatraTransformer._
import com.twitter.finatra.kafkastreams.transformer.FinatraTransformer
import com.twitter.finatra.kafkastreams.transformer.domain.{Expire, Time, TimerMetadata}
import com.twitter.finatra.kafkastreams.transformer.stores.PersistentTimers
import com.twitter.util.Duration
import org.apache.kafka.streams.processor.PunctuationType
object WordLengthFinatraTransformer {
val delayedMessageTime: Duration = 5.seconds
}
class WordLengthFinatraTransformer(statsReceiver: StatsReceiver, timerStoreName: String)
extends FinatraTransformer[String, String, String, String](statsReceiver)
with PersistentTimers {
private val timerStore =
getPersistentTimerStore[String](timerStoreName, onEventTimer, PunctuationType.STREAM_TIME)
override def onMessage(messageTime: Time, key: String, value: String): Unit = {
forward(key, "onMessage " + key + " " + key.length)
val time = messageTime + delayedMessageTime
timerStore.addTimer(time, Expire, key)
}
private def onEventTimer(time: Time, metadata: TimerMetadata, key: String): Unit = {
forward(key, "onEventTimer " + key + " " + key.length)
}
}
| twitter/finatra | kafka-streams/kafka-streams/src/test/scala/com/twitter/finatra/kafkastreams/integration/finatratransformer/WordLengthFinatraTransformer.scala | Scala | apache-2.0 | 1,390 |
package edu.mit.csail.cap.query
package util
import scala.collection.mutable
/**
* The edges are directed from the root downwards.
*
* Each node corresponds to a subtree rooted at the node as a graph.
*
* Contains at least one node.
*
* The elements in the tree must be distinct for the graph view to function
* correctly.
*/
trait Tree[V] extends Graph[V, Unit] {
/** Data element at the root of the tree. */
def data: V
/** Number of edges on the longest path to a leaf */
def depth: Int = children match {
case Nil => 0
case l => 1 + l.map(_.depth).max
}
/** Children of the root. */
def children: Traversable[Tree[V]]
/** Search for a transitive child node. */
def search(v: V): Option[Tree[V]] =
if (data == v)
Some(this)
else {
for (c <- children)
c.search(v) match {
case Some(e) => return Some(e)
case _ =>
}
return None
}
/** Traverses nodes in DFS fashion. Root then children recursion. */
override def nodes =
Traversable(data).view ++ children.view.flatMap(_.nodes)
/** Traverses edges in DFS fashion. */
override def foreach[U](f: Edge[V, Unit] => U) {
for (c <- children) {
f(Edge(data, (), c.data))
c.foreach(f)
}
}
override def outbound(v: V) = search(v) match {
case Some(kid) =>
for (c <- kid.children)
yield (c.data, ())
case None => None
}
override def has(v: V) = (data == v) || children.exists(_.has(v))
override def numEdges = children.map(_.numNodes).foldLeft(0)(_ + _)
override def numNodes = 1 + numEdges
override def isEmpty = children.isEmpty
override def toString = "tree with " + numNodes + " nodes at " + data
}
/**
* Linked tree data structure.
*
* Data elements are mutable.
* Nodes are mutable as long as the tree property is maintained
* (no checks that you might try to add this to this.)
*
* Operations are local to a node. Data is invoked only when necessary.
*
* Children are ordered.
*/
trait LinkedTree[Repr <: LinkedTree[Repr, V], V] extends Tree[V] { self: Repr =>
/** Add a direct child as last */
def add(t: Repr)
/** Add and return a child */
def +(t: Repr): Repr = {
add(t)
t
}
/** Add all children */
def addAll(ts: Traversable[Repr]): Repr = {
for (t <- ts) add(t)
this
}
/** Remove a direct child by data. */
def remove(v: V)
/** Create a new tree with the same data but no children. */
def copy: Repr
/** Ordered list of children. */
override def children: List[Repr]
/** Grand children. */
def grandChildren: List[Repr] =
for (c <- children; c1 <- c.children) yield c1
/**
* Make a copy containing only nodes satisfying f.
* The children of the removed nodes are inherited
* by the grandparent or made roots.
*/
def select(f: Repr => Boolean): List[Repr] = {
def helper(in: Repr, out: Repr) {
assert(f(out))
for (inc <- in.children)
if (f(inc))
helper(inc, out + inc.copy)
else
helper(inc, out)
}
if (f(this)) {
val out = copy
helper(this, out)
out :: Nil
} else
children.flatMap(_.select(f))
}
def selectWithParents(f: Repr => Boolean): Option[Repr] = {
val kids: List[Repr] = children.flatMap(_.selectWithParents(f))
if (kids.size > 0)
Some(copy.addAll(kids))
else if (f(this))
Some(copy)
else
None
}
/** Check that data is pairwise similar in both trees. */
def isSimilar(that: Repr, similar: (V, V) => Boolean): Boolean =
similar(this.data, that.data) &&
this.children.corresponds(that.children) { _.isSimilar(_, similar) }
/**
* Compute top-most nodes in the tree for which the condition holds
*/
def find(f: Repr => Boolean): List[Repr] =
if (f(this))
this :: Nil
else
children.flatMap(_.find(f))
/** Traverses tree in DFS fashion. Root then children recursion. */
def trees: Traversable[Repr] =
Traversable(this).view ++ children.view.flatMap(_.trees)
/**
* Make a copy by merging children with parents
* if they have the same f-value.
* Preserves only top-level value.
*/
def project[T](f: Repr => T): Repr = {
def helper(in: Repr, out: Repr) {
assert(f(in) == f(out))
for (inc <- in.children)
if (f(inc) == f(out))
helper(inc, out)
else
helper(inc, out + inc.copy)
}
val out = copy
helper(this, out)
out
}
}
object LinkedTree {
implicit class Forest[Repr <: LinkedTree[Repr, V], V](val roots: List[LinkedTree[Repr, V]]) extends AnyVal {
def select(f: Repr => Boolean): List[Repr] =
for (t <- roots; s <- t.select(f)) yield s
def project(f: Repr => _): List[Repr] =
for (t <- roots) yield t.project(f)
def numNodes = roots.map(_.numNodes).sum
def numEdges = roots.map(_.numEdges).sum
}
}
/** Linked list implementation of a linked tree */
trait LinkedListTree[Repr <: LinkedListTree[Repr, V], V] extends LinkedTree[Repr, V] { self: Repr =>
/** Reverse list of children.*/
private var rep: List[Repr] = Nil
override def add(t: Repr) {
rep = t :: rep
}
override def remove(v: V) {
rep = rep.filterNot(_.data == v)
}
override def children = rep.reverse
}
class LinkedTreeImpl[V](val data: V) extends LinkedListTree[LinkedTreeImpl[V], V] {
override def copy = new LinkedTreeImpl(data)
}
| kyessenov/semeru | src/main/scala/util/Tree.scala | Scala | gpl-3.0 | 5,451 |
package org.jetbrains.plugins.scala
package lang
package psi
package impl
package statements
import com.intellij.ide.util.EditSourceUtil
import com.intellij.lang.ASTNode
import com.intellij.navigation.ItemPresentation
import com.intellij.openapi.editor.colors.TextAttributesKey
import com.intellij.psi._
import org.jetbrains.plugins.scala.lang.lexer.ScalaTokenTypes
import org.jetbrains.plugins.scala.lang.psi.api.ScalaElementVisitor
import org.jetbrains.plugins.scala.lang.psi.api.statements._
import org.jetbrains.plugins.scala.lang.psi.stubs.ScTypeAliasStub
/**
* @author Alexander Podkhalyuzin
* Date: 22.02.2008
* Time: 9:55:13
*/
class ScTypeAliasDefinitionImpl extends ScalaStubBasedElementImpl[ScTypeAlias] with ScTypeAliasDefinition {
def this(node: ASTNode) = {this(); setNode(node)}
def this(stub: ScTypeAliasStub) = {this(); setStub(stub); setNullNode()}
def nameId = findChildByType[PsiElement](ScalaTokenTypes.tIDENTIFIER) match {
case null =>
val name = getStub.asInstanceOf[ScTypeAliasStub].getName
val id = ScalaPsiElementFactory.createIdentifier(name, getManager)
if (id == null) {
assert(assertion = false, s"Id is null. Name: $name. Text: $getText. Parent text: ${getParent.getText}.")
}
id.getPsi
case n => n
}
override def getTextOffset: Int = nameId.getTextRange.getStartOffset
override def navigate(requestFocus: Boolean) {
val descriptor = EditSourceUtil.getDescriptor(nameId);
if (descriptor != null) descriptor.navigate(requestFocus)
}
override def toString: String = "ScTypeAliasDefinition: " + name
override def getPresentation: ItemPresentation = {
new ItemPresentation() {
def getPresentableText = name
def getTextAttributesKey: TextAttributesKey = null
def getLocationString: String = "(" + ScTypeAliasDefinitionImpl.this.containingClass.qualifiedName + ")"
override def getIcon(open: Boolean) = ScTypeAliasDefinitionImpl.this.getIcon(0)
}
}
override def getOriginalElement: PsiElement = super[ScTypeAliasDefinition].getOriginalElement
override def accept(visitor: ScalaElementVisitor) {
visitor.visitTypeAliasDefinition(this)
}
override def accept(visitor: PsiElementVisitor) {
visitor match {
case s: ScalaElementVisitor => s.visitTypeAliasDefinition(this)
case _ => super.accept(visitor)
}
}
}
| triggerNZ/intellij-scala | src/org/jetbrains/plugins/scala/lang/psi/impl/statements/ScTypeAliasDefinitionImpl.scala | Scala | apache-2.0 | 2,380 |
package utils
import scala.annotation.switch
package object tree {
sealed class Strictness(val op_strict:Boolean, val tree_strict:Boolean) {
@inline final def succeeds[K](t:PrefixTreeLike[K,_,_],o:PrefixTreeLike[K,_,_])(k:K) =
(!tree_strict || t.isDefinedAt(k)) && (!op_strict || o.isDefinedAt(k))
final def apply[K,T<:PrefixTreeLike[K,_,T],O<:PrefixTreeLike[K,_,O]](k:K,t:T,o:O) = {
try { if (succeeds(t,o)(k)) { val r=(t(k),o(k)); if (r._1==null||r._2==null) null else r } else null } catch { case _:NoSuchElementException=> null }
}
}
val OP_STRICT = new Strictness(true,false)
val RIGHT_STRICT = new Strictness(false,true)
val FULL_STRICT = new Strictness(true,true)
val NOT_STRICT = new Strictness(false,false)
sealed class MergeMode(id:Int) {
def apply[K,V,T<:PrefixTreeLike[K,V,T]](r1:T,r2:T,mergeValues:(Option[V],Option[V])=>Option[V])(implicit bf:PrefixTreeLikeBuilder[K,V,T]):T = {
(id: @switch) match {
case 0 => r1
case 1 => r2
case 2 => r1.merge(r2,false,mergeValues,false)
case 3 => r1.merge(r2,true,mergeValues,false)
case 4 => r2.merge(r1,false,mergeValues,false)
case 5 => r2.merge(r1,true,mergeValues,false)
}
}
}
/** keep first node */
val KEEP = new MergeMode(0)
/** keep second node */
val REPLACE = new MergeMode(1)
/** merge values and common children (2 on 1), keep different children */
val MERGE = new MergeMode(2)
/** merge values and keep different children. second node children replace first node ones when common */
val MERGE_OVERWRITE = new MergeMode(3)
/** merge values and common children (1 on 2), keep different children */
val MERGE_REVERSE = new MergeMode(4)
/** merge values and keep different children. first node children stay when common */
val MERGE_REVERSE_OVERWRITE = new MergeMode(5)
} | Y-P-/data-processing-binding | Utils/src/utils/tree/package.scala | Scala | gpl-3.0 | 2,001 |
/*
* Copyright 2013 - 2020 Outworkers Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.outworkers.phantom.finagle.query.prepared
import com.outworkers.phantom.PhantomSuite
import com.outworkers.phantom.finagle._
import com.outworkers.phantom.tables._
import com.outworkers.util.samplers._
class PreparedSelectQueryTest extends PhantomSuite with TwitterFutures {
override def beforeAll(): Unit = {
super.beforeAll()
System.setProperty("user.timezone", "Canada/Pacific") // perform these tests in non utc timezone
new CassandraTableStoreMethods(database.recipes).createSchema()
database.articlesByAuthor.createSchema()
database.primitives.createSchema()
if (session.v4orNewer) {
database.primitivesCassandra22.createSchema()
}
}
it should "serialise and execute a prepared select with the same clause as a normal one" in {
val recipe = gen[Recipe]
val query = database.recipes.select.where(_.url eqs ?).prepare()
val operation = for {
_ <- database.recipes.truncate.future
_ <- database.recipes.store(recipe).future()
select <- query.bind(recipe.url).one()
select2 <- database.recipes.select.where(_.url eqs recipe.url).one()
} yield (select, select2)
whenReady(operation) { case (items, items2) =>
items shouldBe defined
items.value shouldEqual recipe
items2 shouldBe defined
items2.value shouldEqual recipe
}
}
it should "serialise and execute an async prepared select with the same clause as a normal one" in {
val recipe = gen[Recipe]
val operation = for {
query <- database.recipes.select.where(_.url eqs ?).prepareAsync()
_ <- database.recipes.truncate.future
_ <- database.recipes.store(recipe).future()
select <- query.bind(recipe.url).one()
select2 <- database.recipes.select.where(_.url eqs recipe.url).one()
} yield (select, select2)
whenReady(operation) { case (items, items2) =>
items shouldBe defined
items.value shouldEqual recipe
items2 shouldBe defined
items2.value shouldEqual recipe
}
}
it should "allow setting a limit using a prepared statement" in {
val recipe = gen[Recipe]
val limit = 1
val query = database.recipes.select.where(_.url eqs ?).limit(?).prepare()
val operation = for {
_ <- database.recipes.truncate.future
_ <- database.recipes.store(recipe).future()
select <- query.bind(recipe.url, limit).fetch()
select2 <- database.recipes.select.where(_.url eqs recipe.url).one()
} yield (select, select2)
whenReady(operation) { case (items, items2) =>
items.size shouldEqual limit
items should contain (recipe)
items2 shouldBe defined
items2.value shouldEqual recipe
}
}
it should "allow setting a limit using an async prepared statement" in {
val recipe = gen[Recipe]
val limit = 1
val operation = for {
query <- database.recipes.select.where(_.url eqs ?).limit(?).prepareAsync()
_ <- database.recipes.truncate.future
_ <- database.recipes.store(recipe).future()
select <- query.bind(recipe.url, limit).fetch()
select2 <- database.recipes.select.where(_.url eqs recipe.url).one()
} yield (select, select2)
whenReady(operation) { case (items, items2) =>
items.size shouldEqual limit
items should contain (recipe)
items2 shouldBe defined
items2.value shouldEqual recipe
}
}
it should "serialise and execute a prepared select statement with the correct number of arguments" in {
val recipe = gen[Recipe]
val query = database.recipes.select.where(_.url eqs ?).prepare()
val operation = for {
_ <- database.recipes.truncate.future
_ <- database.recipes.store(recipe).future()
select <- query.bind(recipe.url).one()
} yield select
whenReady(operation) { items =>
items shouldBe defined
items.value shouldEqual recipe
}
}
it should "serialise and execute am async prepared select statement with the correct number of arguments" in {
val recipe = gen[Recipe]
val operation = for {
query <- database.recipes.select.where(_.url eqs ?).prepareAsync()
_ <- database.recipes.truncate.future
_ <- database.recipes.store(recipe).future()
select <- query.bind(recipe.url).one()
} yield select
whenReady(operation) { items =>
items shouldBe defined
items.value shouldEqual recipe
}
}
it should "serialise and execute an async prepared statement with 2 arguments" in {
val sample = gen[Article]
val sample2 = gen[Article]
val owner = gen[UUID]
val category = gen[UUID]
val category2 = gen[UUID]
val op = for {
query <- database.articlesByAuthor.select.where(_.author_id eqs ?).and(_.category eqs ?).prepareAsync()
_ <- database.articlesByAuthor.store(owner, category, sample).future()
_ <- database.articlesByAuthor.store(owner, category2, sample2).future()
get <- query.bind(owner, category).one()
get2 <- query.bind(owner, category2).one()
} yield (get, get2)
whenReady(op) { case (res, res2) =>
res shouldBe defined
res.value shouldEqual sample
res2 shouldBe defined
res2.value shouldEqual sample2
}
}
it should "serialise and execute a primitives prepared select statement with the correct number of arguments" in {
val primitive = gen[PrimitiveRecord]
val query = database.primitives.select.where(_.pkey eqs ?).prepare()
val operation = for {
_ <- database.primitives.truncate.future
_ <- database.primitives.store(primitive).future()
select <- query.bind(primitive.pkey).one()
} yield select
whenReady(operation) { items =>
items shouldBe defined
items.value shouldEqual primitive
}
}
it should "serialise and execute an async primitives prepared select statement with the correct number of arguments" in {
val primitive = gen[PrimitiveRecord]
val operation = for {
query <- database.primitives.select.where(_.pkey eqs ?).prepareAsync()
_ <- database.primitives.truncate.future
_ <- database.primitives.store(primitive).future()
select <- query.bind(primitive.pkey).one()
} yield select
whenReady(operation) { items =>
items shouldBe defined
items.value shouldEqual primitive
}
}
if (session.v4orNewer) {
it should "serialise and execute a primitives cassandra 2.2 prepared select statement with the correct number of arguments" in {
val primitive = gen[PrimitiveCassandra22]
val query = database.primitivesCassandra22.select.where(_.pkey eqs ?).prepare()
val operation = for {
_ <- database.primitivesCassandra22.truncate.future
_ <- database.primitivesCassandra22.store(primitive).future()
select <- query.bind(primitive.pkey).one()
} yield select
whenReady(operation) { items =>
items shouldBe defined
items.value shouldEqual primitive
}
}
it should "serialise and execute an async primitives cassandra 2.2 prepared select statement with the correct number of arguments" in {
val primitive = gen[PrimitiveCassandra22]
val operation = for {
query <- database.primitivesCassandra22.select.where(_.pkey eqs ?).prepareAsync()
_ <- database.primitivesCassandra22.truncate.future
_ <- database.primitivesCassandra22.store(primitive).future()
select <- query.bind(primitive.pkey).one()
} yield select
whenReady(operation) { items =>
items shouldBe defined
items.value shouldEqual primitive
}
}
}
}
| outworkers/phantom | phantom-finagle/src/test/scala/com/outworkers/phantom/finagle/query/prepared/PreparedSelectQueryTest.scala | Scala | apache-2.0 | 8,246 |
/*
* @author Philip Stutz
* @author Mihaela Verman
*
* Copyright 2013 University of Zurich
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.signalcollect.triplerush.vertices
abstract class CardinalityCountingIndex(
id: Long) extends OptimizedIndexVertex(id) {
@transient var cardinalityCounter = 0
override def cardinality: Int = cardinalityCounter
override def handleCardinalityIncrement(i: Int): Unit = cardinalityCounter += i
}
| uzh/triplerush | src/main/scala/com/signalcollect/triplerush/vertices/CardinalityCountingIndex.scala | Scala | apache-2.0 | 994 |
package yuuto.yuutolib.inventory
import net.minecraft.inventory.ISidedInventory
import net.minecraft.item.ItemStack
import net.minecraftforge.common.util.ForgeDirection
import net.minecraft.inventory.IInventory
/**
* @author Jacob
*/
object InventoryHelper {
def pullStacks(origin:IInventory, target:IInventory, max:Int, pullDirection:ForgeDirection, simulate:Boolean):Boolean={
return pullStacks(origin, target, max, pullDirection, pullDirection.getOpposite(), simulate);
}
def pullStacks(origin:IInventory, target:IInventory, max:Int, pullDirection:ForgeDirection, pushDirection:ForgeDirection, simulate:Boolean):Boolean={
val inv:ISidedInventory = SidedInventoryWrapper.getWrapper(origin);
if(inv.getSizeInventory() < 1)
return false;
val tar:ISidedInventory = SidedInventoryWrapper.getWrapper(target);
val slots:Array[Int] = inv.getAccessibleSlotsFromSide(ForgeDirection.DOWN.ordinal());
var break:Boolean = false;
for(i <- 0 until slots.length if(!break)){
if(inv.getStackInSlot(slots(i)) == null || inv.getStackInSlot(slots(i)).stackSize < 1){}
else if(!inv.canExtractItem(slots(i), inv.getStackInSlot(slots(i)), pullDirection.ordinal())){}
else if(mergeStack(inv, i,tar, max, pushDirection, simulate)){
return true;
}
}
return false;
}
def mergeStack(src:ISidedInventory, srcSlot:Int, target:ISidedInventory, max:Int, insertDirection:ForgeDirection, simulate:Boolean):Boolean={
if(src.getStackInSlot(srcSlot) == null){
return false;
}
val stack:ItemStack = if(simulate){src.getStackInSlot(srcSlot).copy()}else{src.getStackInSlot(srcSlot)};
var maxTransfer = Math.min(max, stack.stackSize);
if(maxTransfer < 1)
return false;
val slots:Array[Int] = target.getAccessibleSlotsFromSide(insertDirection.ordinal());
for(i<- 0 until slots.length){
val tStack:ItemStack = target.getStackInSlot(slots(i));
if(!target.canInsertItem(slots(i), stack, insertDirection.ordinal())){}
else if(tStack != null &&
(tStack.stackSize == tStack.getMaxStackSize() ||
tStack.stackSize == target.getInventoryStackLimit())){}
else if(tStack == null){
if(!simulate){
target.setInventorySlotContents(slots(i), src.decrStackSize(srcSlot, maxTransfer));
}
return true;
}
else if(!tStack.isItemEqual(stack)){}
else if(!ItemStack.areItemStackTagsEqual(stack, tStack)){}
else{
var maxT:Int = stack.getMaxStackSize();
if(target.getInventoryStackLimit() < maxT){
maxT = target.getInventoryStackLimit();
}
maxT -= tStack.stackSize;
if(maxTransfer < maxT){
maxT = maxTransfer;
}
if(maxT == 0){}
else {
if(simulate){
maxTransfer-=max;
}else{
val d:Int=src.decrStackSize(srcSlot, maxT).stackSize;
tStack.stackSize += d;
maxTransfer-=d;
}
}
if(simulate && maxTransfer < 1){
return true;
}else if(!simulate &&(src.getStackInSlot(srcSlot) == null || src.getStackInSlot(srcSlot).stackSize<1 || maxTransfer < 1)){
return true;
}
}
}
return false;
}
def mergeStack(stack:ItemStack, target:ISidedInventory, insertDirection:ForgeDirection, simulate:Boolean):Boolean={
if(stack.stackSize < 1){
return false;
}
val slots:Array[Int] = target.getAccessibleSlotsFromSide(insertDirection.ordinal());
for(i<- 0 until slots.length){
val tStack:ItemStack = target.getStackInSlot(slots(i));
if(!target.canInsertItem(slots(i), stack, insertDirection.ordinal())){}
else if(tStack != null &&
(tStack.stackSize == tStack.getMaxStackSize() ||
tStack.stackSize == target.getInventoryStackLimit())){}
else if(tStack == null){
if(!simulate){
target.setInventorySlotContents(slots(i), stack.copy());
}
stack.stackSize = 0;
return true;
}
else if(tStack.getItem() != stack.getItem()){}
else if(tStack.getItemDamage() != stack.getItemDamage()){}
else if(!ItemStack.areItemStackTagsEqual(stack, tStack)){}
else{
var max:Int = stack.getMaxStackSize();
if(target.getInventoryStackLimit() < max){
max = target.getInventoryStackLimit();
}
max -= tStack.stackSize;
if(stack.stackSize < max){
max = stack.stackSize;
}
if(max <= 0){}
else {
if(!simulate){
tStack.stackSize += max;
}
stack.stackSize -= max;
if(stack.stackSize < 1){
return true;
}
}
}
}
return false;
}
def mergeStack(stack:ItemStack, inv:IInventory, simulate:Boolean):Boolean={
return mergeStack(stack, InventoryWrapper.getWrapper(inv), simulate);
}
def mergeStack(stack:ItemStack, inv:IInventoryExtended, simulate:Boolean):Boolean={
System.out.println("Merging stack");
if(stack.stackSize < 1)
return false;
var max:Int= Math.min(stack.getMaxStackSize(), inv.getInventoryStackLimit());
for(i<-0 until inv.getSizeInventory() if(inv.canInsertItem(i, stack))){
var stack2=inv.getStackInSlot(i);
if(stack2 == null){
val m2=Math.min(stack.stackSize, max);
if(m2 == stack.stackSize){
stack2=stack.copy();
stack.stackSize=0;
}else{
stack2=stack.splitStack(m2);
}
if(!simulate){
inv.setInventorySlotContents(i, stack2);
}
if(stack.stackSize < 1)
return true;
}
else if(!stack.isItemEqual(stack2)){}
else if(!ItemStack.areItemStackTagsEqual(stack, stack2)){}
else if(stack2.stackSize < max){
val m2=Math.min(stack.stackSize, max-stack2.stackSize);
if(!simulate){
stack2.stackSize+=m2;
inv.setInventorySlotContents(i, stack2);
}
stack.stackSize-=m2;
if(stack.stackSize < 1)
return true;
}
}
return false;
}
} | AnimeniacYuuto/YuutoLib | src/main/scala/yuuto/yuutolib/inventory/InventoryHelper.scala | Scala | gpl-3.0 | 6,148 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kafka.zk
import java.util.Properties
import com.yammer.metrics.core.MetricName
import kafka.api.LeaderAndIsr
import kafka.cluster.Broker
import kafka.common.KafkaException
import kafka.controller.LeaderIsrAndControllerEpoch
import kafka.log.LogConfig
import kafka.metrics.KafkaMetricsGroup
import kafka.security.auth.SimpleAclAuthorizer.VersionedAcls
import kafka.security.auth.{Acl, Resource, ResourceType}
import kafka.server.ConfigType
import kafka.utils.Logging
import kafka.zookeeper._
import org.apache.kafka.common.TopicPartition
import org.apache.kafka.common.security.token.delegation.{DelegationToken, TokenInformation}
import org.apache.kafka.common.utils.Time
import org.apache.zookeeper.KeeperException.{Code, NodeExistsException}
import org.apache.zookeeper.data.{ACL, Stat}
import org.apache.zookeeper.{CreateMode, KeeperException, ZooKeeper}
import scala.collection.mutable.ArrayBuffer
import scala.collection.{Seq, mutable}
/**
* Provides higher level Kafka-specific operations on top of the pipelined [[kafka.zookeeper.ZooKeeperClient]].
*
* This performs better than [[kafka.utils.ZkUtils]] and should replace it completely, eventually.
*
* Implementation note: this class includes methods for various components (Controller, Configs, Old Consumer, etc.)
* and returns instances of classes from the calling packages in some cases. This is not ideal, but it makes it
* easier to quickly migrate away from `ZkUtils`. We should revisit this once the migration is completed and tests are
* in place. We should also consider whether a monolithic [[kafka.zk.ZkData]] is the way to go.
*/
class KafkaZkClient private (zooKeeperClient: ZooKeeperClient, isSecure: Boolean, time: Time) extends AutoCloseable with
Logging with KafkaMetricsGroup {
override def metricName(name: String, metricTags: scala.collection.Map[String, String]): MetricName = {
explicitMetricName("kafka.server", "ZooKeeperClientMetrics", name, metricTags)
}
private val latencyMetric = newHistogram("ZooKeeperRequestLatencyMs")
import KafkaZkClient._
// Only for testing
private[kafka] def currentZooKeeper: ZooKeeper = zooKeeperClient.currentZooKeeper
/**
* Create a sequential persistent path. That is, the znode will not be automatically deleted upon client's disconnect
* and a monotonically increasing number will be appended to its name.
*
* @param path the path to create (with the monotonically increasing number appended)
* @param data the znode data
* @return the created path (including the appended monotonically increasing number)
*/
private[zk] def createSequentialPersistentPath(path: String, data: Array[Byte]): String = {
val createRequest = CreateRequest(path, data, acls(path), CreateMode.PERSISTENT_SEQUENTIAL)
val createResponse = retryRequestUntilConnected(createRequest)
createResponse.maybeThrow
createResponse.name
}
def registerBrokerInZk(brokerInfo: BrokerInfo): Unit = {
val path = brokerInfo.path
checkedEphemeralCreate(path, brokerInfo.toJsonBytes)
info(s"Registered broker ${brokerInfo.broker.id} at path $path with addresses: ${brokerInfo.broker.endPoints}")
}
def updateBrokerInfoInZk(brokerInfo: BrokerInfo): Unit = {
val brokerIdPath = brokerInfo.path
val setDataRequest = SetDataRequest(brokerIdPath, brokerInfo.toJsonBytes, ZkVersion.NoVersion)
val response = retryRequestUntilConnected(setDataRequest)
response.maybeThrow()
info("Updated broker %d at path %s with addresses: %s".format(brokerInfo.broker.id, brokerIdPath, brokerInfo.broker.endPoints))
}
/**
* Gets topic partition states for the given partitions.
* @param partitions the partitions for which we want ot get states.
* @return sequence of GetDataResponses whose contexts are the partitions they are associated with.
*/
def getTopicPartitionStatesRaw(partitions: Seq[TopicPartition]): Seq[GetDataResponse] = {
val getDataRequests = partitions.map { partition =>
GetDataRequest(TopicPartitionStateZNode.path(partition), ctx = Some(partition))
}
retryRequestsUntilConnected(getDataRequests)
}
/**
* Sets topic partition states for the given partitions.
* @param leaderIsrAndControllerEpochs the partition states of each partition whose state we wish to set.
* @return sequence of SetDataResponse whose contexts are the partitions they are associated with.
*/
def setTopicPartitionStatesRaw(leaderIsrAndControllerEpochs: Map[TopicPartition, LeaderIsrAndControllerEpoch]): Seq[SetDataResponse] = {
val setDataRequests = leaderIsrAndControllerEpochs.map { case (partition, leaderIsrAndControllerEpoch) =>
val path = TopicPartitionStateZNode.path(partition)
val data = TopicPartitionStateZNode.encode(leaderIsrAndControllerEpoch)
SetDataRequest(path, data, leaderIsrAndControllerEpoch.leaderAndIsr.zkVersion, Some(partition))
}
retryRequestsUntilConnected(setDataRequests.toSeq)
}
/**
* Creates topic partition state znodes for the given partitions.
* @param leaderIsrAndControllerEpochs the partition states of each partition whose state we wish to set.
* @return sequence of CreateResponse whose contexts are the partitions they are associated with.
*/
def createTopicPartitionStatesRaw(leaderIsrAndControllerEpochs: Map[TopicPartition, LeaderIsrAndControllerEpoch]): Seq[CreateResponse] = {
createTopicPartitions(leaderIsrAndControllerEpochs.keys.map(_.topic).toSet.toSeq)
createTopicPartition(leaderIsrAndControllerEpochs.keys.toSeq)
val createRequests = leaderIsrAndControllerEpochs.map { case (partition, leaderIsrAndControllerEpoch) =>
val path = TopicPartitionStateZNode.path(partition)
val data = TopicPartitionStateZNode.encode(leaderIsrAndControllerEpoch)
CreateRequest(path, data, acls(path), CreateMode.PERSISTENT, Some(partition))
}
retryRequestsUntilConnected(createRequests.toSeq)
}
/**
* Sets the controller epoch conditioned on the given epochZkVersion.
* @param epoch the epoch to set
* @param epochZkVersion the expected version number of the epoch znode.
* @return SetDataResponse
*/
def setControllerEpochRaw(epoch: Int, epochZkVersion: Int): SetDataResponse = {
val setDataRequest = SetDataRequest(ControllerEpochZNode.path, ControllerEpochZNode.encode(epoch), epochZkVersion)
retryRequestUntilConnected(setDataRequest)
}
/**
* Creates the controller epoch znode.
* @param epoch the epoch to set
* @return CreateResponse
*/
def createControllerEpochRaw(epoch: Int): CreateResponse = {
val createRequest = CreateRequest(ControllerEpochZNode.path, ControllerEpochZNode.encode(epoch),
acls(ControllerEpochZNode.path), CreateMode.PERSISTENT)
retryRequestUntilConnected(createRequest)
}
/**
* Update the partition states of multiple partitions in zookeeper.
* @param leaderAndIsrs The partition states to update.
* @param controllerEpoch The current controller epoch.
* @return UpdateLeaderAndIsrResult instance containing per partition results.
*/
def updateLeaderAndIsr(leaderAndIsrs: Map[TopicPartition, LeaderAndIsr], controllerEpoch: Int): UpdateLeaderAndIsrResult = {
val successfulUpdates = mutable.Map.empty[TopicPartition, LeaderAndIsr]
val updatesToRetry = mutable.Buffer.empty[TopicPartition]
val failed = mutable.Map.empty[TopicPartition, Exception]
val leaderIsrAndControllerEpochs = leaderAndIsrs.map { case (partition, leaderAndIsr) =>
partition -> LeaderIsrAndControllerEpoch(leaderAndIsr, controllerEpoch)
}
val setDataResponses = try {
setTopicPartitionStatesRaw(leaderIsrAndControllerEpochs)
} catch {
case e: Exception =>
leaderAndIsrs.keys.foreach(partition => failed.put(partition, e))
return UpdateLeaderAndIsrResult(successfulUpdates.toMap, updatesToRetry, failed.toMap)
}
setDataResponses.foreach { setDataResponse =>
val partition = setDataResponse.ctx.get.asInstanceOf[TopicPartition]
setDataResponse.resultCode match {
case Code.OK =>
val updatedLeaderAndIsr = leaderAndIsrs(partition).withZkVersion(setDataResponse.stat.getVersion)
successfulUpdates.put(partition, updatedLeaderAndIsr)
case Code.BADVERSION => updatesToRetry += partition
case _ => failed.put(partition, setDataResponse.resultException.get)
}
}
UpdateLeaderAndIsrResult(successfulUpdates.toMap, updatesToRetry, failed.toMap)
}
/**
* Get log configs that merge local configs with topic-level configs in zookeeper.
* @param topics The topics to get log configs for.
* @param config The local configs.
* @return A tuple of two values:
* 1. The successfully gathered log configs
* 2. Exceptions corresponding to failed log config lookups.
*/
def getLogConfigs(topics: Seq[String], config: java.util.Map[String, AnyRef]):
(Map[String, LogConfig], Map[String, Exception]) = {
val logConfigs = mutable.Map.empty[String, LogConfig]
val failed = mutable.Map.empty[String, Exception]
val configResponses = try {
getTopicConfigs(topics)
} catch {
case e: Exception =>
topics.foreach(topic => failed.put(topic, e))
return (logConfigs.toMap, failed.toMap)
}
configResponses.foreach { configResponse =>
val topic = configResponse.ctx.get.asInstanceOf[String]
configResponse.resultCode match {
case Code.OK =>
val overrides = ConfigEntityZNode.decode(configResponse.data)
val logConfig = LogConfig.fromProps(config, overrides)
logConfigs.put(topic, logConfig)
case Code.NONODE =>
val logConfig = LogConfig.fromProps(config, new Properties)
logConfigs.put(topic, logConfig)
case _ => failed.put(topic, configResponse.resultException.get)
}
}
(logConfigs.toMap, failed.toMap)
}
/**
* Get entity configs for a given entity name
* @param rootEntityType entity type
* @param sanitizedEntityName entity name
* @return The successfully gathered log configs
*/
def getEntityConfigs(rootEntityType: String, sanitizedEntityName: String): Properties = {
val getDataRequest = GetDataRequest(ConfigEntityZNode.path(rootEntityType, sanitizedEntityName))
val getDataResponse = retryRequestUntilConnected(getDataRequest)
getDataResponse.resultCode match {
case Code.OK =>
ConfigEntityZNode.decode(getDataResponse.data)
case Code.NONODE => new Properties()
case _ => throw getDataResponse.resultException.get
}
}
/**
* Sets or creates the entity znode path with the given configs depending
* on whether it already exists or not.
* @param rootEntityType entity type
* @param sanitizedEntityName entity name
* @throws KeeperException if there is an error while setting or creating the znode
*/
def setOrCreateEntityConfigs(rootEntityType: String, sanitizedEntityName: String, config: Properties) = {
def set(configData: Array[Byte]): SetDataResponse = {
val setDataRequest = SetDataRequest(ConfigEntityZNode.path(rootEntityType, sanitizedEntityName), ConfigEntityZNode.encode(config), ZkVersion.NoVersion)
retryRequestUntilConnected(setDataRequest)
}
def create(configData: Array[Byte]) = {
val path = ConfigEntityZNode.path(rootEntityType, sanitizedEntityName)
createRecursive(path, ConfigEntityZNode.encode(config))
}
val configData = ConfigEntityZNode.encode(config)
val setDataResponse = set(configData)
setDataResponse.resultCode match {
case Code.NONODE => create(configData)
case _ => setDataResponse.maybeThrow
}
}
/**
* Returns all the entities for a given entityType
* @param entityType entity type
* @return List of all entity names
*/
def getAllEntitiesWithConfig(entityType: String): Seq[String] = {
getChildren(ConfigEntityTypeZNode.path(entityType))
}
/**
* Creates config change notification
* @param sanitizedEntityPath sanitizedEntityPath path to write
* @throws KeeperException if there is an error while setting or creating the znode
*/
def createConfigChangeNotification(sanitizedEntityPath: String): Unit = {
makeSurePersistentPathExists(ConfigEntityChangeNotificationZNode.path)
val path = ConfigEntityChangeNotificationSequenceZNode.createPath
val createRequest = CreateRequest(path, ConfigEntityChangeNotificationSequenceZNode.encode(sanitizedEntityPath), acls(path), CreateMode.PERSISTENT_SEQUENTIAL)
val createResponse = retryRequestUntilConnected(createRequest)
createResponse.maybeThrow()
}
/**
* Gets all brokers in the cluster.
* @return sequence of brokers in the cluster.
*/
def getAllBrokersInCluster: Seq[Broker] = {
val brokerIds = getSortedBrokerList
val getDataRequests = brokerIds.map(brokerId => GetDataRequest(BrokerIdZNode.path(brokerId), ctx = Some(brokerId)))
val getDataResponses = retryRequestsUntilConnected(getDataRequests)
getDataResponses.flatMap { getDataResponse =>
val brokerId = getDataResponse.ctx.get.asInstanceOf[Int]
getDataResponse.resultCode match {
case Code.OK =>
Option(BrokerIdZNode.decode(brokerId, getDataResponse.data).broker)
case Code.NONODE => None
case _ => throw getDataResponse.resultException.get
}
}
}
/**
* Get a broker from ZK
* @return an optional Broker
*/
def getBroker(brokerId: Int): Option[Broker] = {
val getDataRequest = GetDataRequest(BrokerIdZNode.path(brokerId))
val getDataResponse = retryRequestUntilConnected(getDataRequest)
getDataResponse.resultCode match {
case Code.OK =>
Option(BrokerIdZNode.decode(brokerId, getDataResponse.data).broker)
case Code.NONODE => None
case _ => throw getDataResponse.resultException.get
}
}
/**
* Gets the list of sorted broker Ids
*/
def getSortedBrokerList(): Seq[Int] =
getChildren(BrokerIdsZNode.path).map(_.toInt).sorted
/**
* Gets all topics in the cluster.
* @return sequence of topics in the cluster.
*/
def getAllTopicsInCluster: Seq[String] = {
val getChildrenResponse = retryRequestUntilConnected(GetChildrenRequest(TopicsZNode.path))
getChildrenResponse.resultCode match {
case Code.OK => getChildrenResponse.children
case Code.NONODE => Seq.empty
case _ => throw getChildrenResponse.resultException.get
}
}
/**
* Checks the topic existence
* @param topicName
* @return true if topic exists else false
*/
def topicExists(topicName: String): Boolean = {
pathExists(TopicZNode.path(topicName))
}
/**
* Sets the topic znode with the given assignment.
* @param topic the topic whose assignment is being set.
* @param assignment the partition to replica mapping to set for the given topic
* @return SetDataResponse
*/
def setTopicAssignmentRaw(topic: String, assignment: collection.Map[TopicPartition, Seq[Int]]): SetDataResponse = {
val setDataRequest = SetDataRequest(TopicZNode.path(topic), TopicZNode.encode(assignment), ZkVersion.NoVersion)
retryRequestUntilConnected(setDataRequest)
}
/**
* Sets the topic znode with the given assignment.
* @param topic the topic whose assignment is being set.
* @param assignment the partition to replica mapping to set for the given topic
* @throws KeeperException if there is an error while setting assignment
*/
def setTopicAssignment(topic: String, assignment: Map[TopicPartition, Seq[Int]]) = {
val setDataResponse = setTopicAssignmentRaw(topic, assignment)
setDataResponse.maybeThrow
}
/**
* Create the topic znode with the given assignment.
* @param topic the topic whose assignment is being set.
* @param assignment the partition to replica mapping to set for the given topic
* @throws KeeperException if there is an error while creating assignment
*/
def createTopicAssignment(topic: String, assignment: Map[TopicPartition, Seq[Int]]) = {
createRecursive(TopicZNode.path(topic), TopicZNode.encode(assignment))
}
/**
* Gets the log dir event notifications as strings. These strings are the znode names and not the absolute znode path.
* @return sequence of znode names and not the absolute znode path.
*/
def getAllLogDirEventNotifications: Seq[String] = {
val getChildrenResponse = retryRequestUntilConnected(GetChildrenRequest(LogDirEventNotificationZNode.path))
getChildrenResponse.resultCode match {
case Code.OK => getChildrenResponse.children.map(LogDirEventNotificationSequenceZNode.sequenceNumber)
case Code.NONODE => Seq.empty
case _ => throw getChildrenResponse.resultException.get
}
}
/**
* Reads each of the log dir event notifications associated with the given sequence numbers and extracts the broker ids.
* @param sequenceNumbers the sequence numbers associated with the log dir event notifications.
* @return broker ids associated with the given log dir event notifications.
*/
def getBrokerIdsFromLogDirEvents(sequenceNumbers: Seq[String]): Seq[Int] = {
val getDataRequests = sequenceNumbers.map { sequenceNumber =>
GetDataRequest(LogDirEventNotificationSequenceZNode.path(sequenceNumber))
}
val getDataResponses = retryRequestsUntilConnected(getDataRequests)
getDataResponses.flatMap { getDataResponse =>
getDataResponse.resultCode match {
case Code.OK => LogDirEventNotificationSequenceZNode.decode(getDataResponse.data)
case Code.NONODE => None
case _ => throw getDataResponse.resultException.get
}
}
}
/**
* Deletes all log dir event notifications.
*/
def deleteLogDirEventNotifications(): Unit = {
val getChildrenResponse = retryRequestUntilConnected(GetChildrenRequest(LogDirEventNotificationZNode.path))
if (getChildrenResponse.resultCode == Code.OK) {
deleteLogDirEventNotifications(getChildrenResponse.children.map(LogDirEventNotificationSequenceZNode.sequenceNumber))
} else if (getChildrenResponse.resultCode != Code.NONODE) {
getChildrenResponse.maybeThrow
}
}
/**
* Deletes the log dir event notifications associated with the given sequence numbers.
* @param sequenceNumbers the sequence numbers associated with the log dir event notifications to be deleted.
*/
def deleteLogDirEventNotifications(sequenceNumbers: Seq[String]): Unit = {
val deleteRequests = sequenceNumbers.map { sequenceNumber =>
DeleteRequest(LogDirEventNotificationSequenceZNode.path(sequenceNumber), ZkVersion.NoVersion)
}
retryRequestsUntilConnected(deleteRequests)
}
/**
* Gets the assignments for the given topics.
* @param topics the topics whose partitions we wish to get the assignments for.
* @return the replica assignment for each partition from the given topics.
*/
def getReplicaAssignmentForTopics(topics: Set[String]): Map[TopicPartition, Seq[Int]] = {
val getDataRequests = topics.map(topic => GetDataRequest(TopicZNode.path(topic), ctx = Some(topic)))
val getDataResponses = retryRequestsUntilConnected(getDataRequests.toSeq)
getDataResponses.flatMap { getDataResponse =>
val topic = getDataResponse.ctx.get.asInstanceOf[String]
getDataResponse.resultCode match {
case Code.OK => TopicZNode.decode(topic, getDataResponse.data)
case Code.NONODE => Map.empty[TopicPartition, Seq[Int]]
case _ => throw getDataResponse.resultException.get
}
}.toMap
}
/**
* Gets partition the assignments for the given topics.
* @param topics the topics whose partitions we wish to get the assignments for.
* @return the partition assignment for each partition from the given topics.
*/
def getPartitionAssignmentForTopics(topics: Set[String]): Map[String, Map[Int, Seq[Int]]] = {
val getDataRequests = topics.map(topic => GetDataRequest(TopicZNode.path(topic), ctx = Some(topic)))
val getDataResponses = retryRequestsUntilConnected(getDataRequests.toSeq)
getDataResponses.flatMap { getDataResponse =>
val topic = getDataResponse.ctx.get.asInstanceOf[String]
if (getDataResponse.resultCode == Code.OK) {
val partitionMap = TopicZNode.decode(topic, getDataResponse.data).map { case (k, v) => (k.partition, v) }
Map(topic -> partitionMap)
} else if (getDataResponse.resultCode == Code.NONODE) {
Map.empty[String, Map[Int, Seq[Int]]]
} else {
throw getDataResponse.resultException.get
}
}.toMap
}
/**
* Gets the partition numbers for the given topics
* @param topics the topics whose partitions we wish to get.
* @return the partition array for each topic from the given topics.
*/
def getPartitionsForTopics(topics: Set[String]): Map[String, Seq[Int]] = {
getPartitionAssignmentForTopics(topics).map { topicAndPartitionMap =>
val topic = topicAndPartitionMap._1
val partitionMap = topicAndPartitionMap._2
topic -> partitionMap.keys.toSeq.sortWith((s, t) => s < t)
}
}
/**
* Gets the partition count for a given topic
* @param topic The topic to get partition count for.
* @return optional integer that is Some if the topic exists and None otherwise.
*/
def getTopicPartitionCount(topic: String): Option[Int] = {
val topicData = getReplicaAssignmentForTopics(Set(topic))
if (topicData.nonEmpty)
Some(topicData.size)
else
None
}
/**
* Gets the assigned replicas for a specific topic and partition
* @param topicPartition TopicAndPartition to get assigned replicas for .
* @return List of assigned replicas
*/
def getReplicasForPartition(topicPartition: TopicPartition): Seq[Int] = {
val topicData = getReplicaAssignmentForTopics(Set(topicPartition.topic))
topicData.getOrElse(topicPartition, Seq.empty)
}
/**
* Gets all partitions in the cluster
* @return all partitions in the cluster
*/
def getAllPartitions(): Set[TopicPartition] = {
val topics = getChildren(TopicsZNode.path)
if (topics == null) Set.empty
else {
topics.flatMap { topic =>
// The partitions path may not exist if the topic is in the process of being deleted
getChildren(TopicPartitionsZNode.path(topic)).map(_.toInt).map(new TopicPartition(topic, _))
}.toSet
}
}
/**
* Gets the data and version at the given zk path
* @param path zk node path
* @return A tuple of 2 elements, where first element is zk node data as an array of bytes
* and second element is zk node version.
* returns (None, ZkVersion.NoVersion) if node doesn't exists and throws exception for any error
*/
def getDataAndVersion(path: String): (Option[Array[Byte]], Int) = {
val (data, stat) = getDataAndStat(path)
stat match {
case ZkStat.NoStat => (data, ZkVersion.NoVersion)
case _ => (data, stat.getVersion)
}
}
/**
* Gets the data and Stat at the given zk path
* @param path zk node path
* @return A tuple of 2 elements, where first element is zk node data as an array of bytes
* and second element is zk node stats.
* returns (None, ZkStat.NoStat) if node doesn't exists and throws exception for any error
*/
def getDataAndStat(path: String): (Option[Array[Byte]], Stat) = {
val getDataRequest = GetDataRequest(path)
val getDataResponse = retryRequestUntilConnected(getDataRequest)
getDataResponse.resultCode match {
case Code.OK => (Option(getDataResponse.data), getDataResponse.stat)
case Code.NONODE => (None, ZkStat.NoStat)
case _ => throw getDataResponse.resultException.get
}
}
/**
* Gets all the child nodes at a given zk node path
* @param path
* @return list of child node names
*/
def getChildren(path : String): Seq[String] = {
val getChildrenResponse = retryRequestUntilConnected(GetChildrenRequest(path))
getChildrenResponse.resultCode match {
case Code.OK => getChildrenResponse.children
case Code.NONODE => Seq.empty
case _ => throw getChildrenResponse.resultException.get
}
}
/**
* Conditional update the persistent path data, return (true, newVersion) if it succeeds, otherwise (the path doesn't
* exist, the current version is not the expected version, etc.) return (false, ZkVersion.NoVersion)
*
* When there is a ConnectionLossException during the conditional update, ZookeeperClient will retry the update and may fail
* since the previous update may have succeeded (but the stored zkVersion no longer matches the expected one).
* In this case, we will run the optionalChecker to further check if the previous write did indeed succeeded.
*/
def conditionalUpdatePath(path: String, data: Array[Byte], expectVersion: Int,
optionalChecker: Option[(KafkaZkClient, String, Array[Byte]) => (Boolean,Int)] = None): (Boolean, Int) = {
val setDataRequest = SetDataRequest(path, data, expectVersion)
val setDataResponse = retryRequestUntilConnected(setDataRequest)
setDataResponse.resultCode match {
case Code.OK =>
debug("Conditional update of path %s with value %s and expected version %d succeeded, returning the new version: %d"
.format(path, data, expectVersion, setDataResponse.stat.getVersion))
(true, setDataResponse.stat.getVersion)
case Code.BADVERSION =>
optionalChecker match {
case Some(checker) => checker(this, path, data)
case _ =>
debug("Checker method is not passed skipping zkData match")
debug("Conditional update of path %s with data %s and expected version %d failed due to %s"
.format(path, data, expectVersion, setDataResponse.resultException.get.getMessage))
(false, ZkVersion.NoVersion)
}
case Code.NONODE =>
debug("Conditional update of path %s with data %s and expected version %d failed due to %s".format(path, data,
expectVersion, setDataResponse.resultException.get.getMessage))
(false, ZkVersion.NoVersion)
case _ =>
debug("Conditional update of path %s with data %s and expected version %d failed due to %s".format(path, data,
expectVersion, setDataResponse.resultException.get.getMessage))
throw setDataResponse.resultException.get
}
}
/**
* Creates the delete topic znode.
* @param topicName topic name
* @throws KeeperException if there is an error while setting or creating the znode
*/
def createDeleteTopicPath(topicName: String): Unit = {
createRecursive(DeleteTopicsTopicZNode.path(topicName))
}
/**
* Checks if topic is marked for deletion
* @param topic
* @return true if topic is marked for deletion, else false
*/
def isTopicMarkedForDeletion(topic: String): Boolean = {
pathExists(DeleteTopicsTopicZNode.path(topic))
}
/**
* Get all topics marked for deletion.
* @return sequence of topics marked for deletion.
*/
def getTopicDeletions: Seq[String] = {
val getChildrenResponse = retryRequestUntilConnected(GetChildrenRequest(DeleteTopicsZNode.path))
getChildrenResponse.resultCode match {
case Code.OK => getChildrenResponse.children
case Code.NONODE => Seq.empty
case _ => throw getChildrenResponse.resultException.get
}
}
/**
* Remove the given topics from the topics marked for deletion.
* @param topics the topics to remove.
*/
def deleteTopicDeletions(topics: Seq[String]): Unit = {
val deleteRequests = topics.map(topic => DeleteRequest(DeleteTopicsTopicZNode.path(topic), ZkVersion.NoVersion))
retryRequestsUntilConnected(deleteRequests)
}
/**
* Returns all reassignments.
* @return the reassignments for each partition.
*/
def getPartitionReassignment: collection.Map[TopicPartition, Seq[Int]] = {
val getDataRequest = GetDataRequest(ReassignPartitionsZNode.path)
val getDataResponse = retryRequestUntilConnected(getDataRequest)
getDataResponse.resultCode match {
case Code.OK =>
ReassignPartitionsZNode.decode(getDataResponse.data) match {
case Left(e) =>
logger.warn(s"Ignoring partition reassignment due to invalid json: ${e.getMessage}", e)
Map.empty[TopicPartition, Seq[Int]]
case Right(assignments) => assignments
}
case Code.NONODE => Map.empty
case _ => throw getDataResponse.resultException.get
}
}
/**
* Sets or creates the partition reassignment znode with the given reassignment depending on whether it already
* exists or not.
*
* @param reassignment the reassignment to set on the reassignment znode
* @throws KeeperException if there is an error while setting or creating the znode
*/
def setOrCreatePartitionReassignment(reassignment: collection.Map[TopicPartition, Seq[Int]]): Unit = {
def set(reassignmentData: Array[Byte]): SetDataResponse = {
val setDataRequest = SetDataRequest(ReassignPartitionsZNode.path, reassignmentData, ZkVersion.NoVersion)
retryRequestUntilConnected(setDataRequest)
}
def create(reassignmentData: Array[Byte]): CreateResponse = {
val createRequest = CreateRequest(ReassignPartitionsZNode.path, reassignmentData, acls(ReassignPartitionsZNode.path),
CreateMode.PERSISTENT)
retryRequestUntilConnected(createRequest)
}
val reassignmentData = ReassignPartitionsZNode.encode(reassignment)
val setDataResponse = set(reassignmentData)
setDataResponse.resultCode match {
case Code.NONODE =>
val createDataResponse = create(reassignmentData)
createDataResponse.maybeThrow
case _ => setDataResponse.maybeThrow
}
}
/**
* Creates the partition reassignment znode with the given reassignment.
* @param reassignment the reassignment to set on the reassignment znode.
* @throws KeeperException if there is an error while creating the znode
*/
def createPartitionReassignment(reassignment: Map[TopicPartition, Seq[Int]]) = {
createRecursive(ReassignPartitionsZNode.path, ReassignPartitionsZNode.encode(reassignment))
}
/**
* Deletes the partition reassignment znode.
*/
def deletePartitionReassignment(): Unit = {
val deleteRequest = DeleteRequest(ReassignPartitionsZNode.path, ZkVersion.NoVersion)
retryRequestUntilConnected(deleteRequest)
}
/**
* Checks if reassign partitions is in progress
* @return true if reassign partitions is in progress, else false
*/
def reassignPartitionsInProgress(): Boolean = {
pathExists(ReassignPartitionsZNode.path)
}
/**
* Gets topic partition states for the given partitions.
* @param partitions the partitions for which we want to get states.
* @return map containing LeaderIsrAndControllerEpoch of each partition for we were able to lookup the partition state.
*/
def getTopicPartitionStates(partitions: Seq[TopicPartition]): Map[TopicPartition, LeaderIsrAndControllerEpoch] = {
val getDataResponses = getTopicPartitionStatesRaw(partitions)
getDataResponses.flatMap { getDataResponse =>
val partition = getDataResponse.ctx.get.asInstanceOf[TopicPartition]
getDataResponse.resultCode match {
case Code.OK => TopicPartitionStateZNode.decode(getDataResponse.data, getDataResponse.stat).map(partition -> _)
case Code.NONODE => None
case _ => throw getDataResponse.resultException.get
}
}.toMap
}
/**
* Gets topic partition state for the given partition.
* @param partition the partition for which we want to get state.
* @return LeaderIsrAndControllerEpoch of the partition state if exists, else None
*/
def getTopicPartitionState(partition: TopicPartition): Option[LeaderIsrAndControllerEpoch] = {
val getDataResponse = getTopicPartitionStatesRaw(Seq(partition)).head
if (getDataResponse.resultCode == Code.OK) {
TopicPartitionStateZNode.decode(getDataResponse.data, getDataResponse.stat)
} else if (getDataResponse.resultCode == Code.NONODE) {
None
} else {
throw getDataResponse.resultException.get
}
}
/**
* Gets the leader for a given partition
* @param partition The partition for which we want to get leader.
* @return optional integer if the leader exists and None otherwise.
*/
def getLeaderForPartition(partition: TopicPartition): Option[Int] =
getTopicPartitionState(partition).map(_.leaderAndIsr.leader)
/**
* Gets the in-sync replicas (ISR) for a specific topicPartition
* @param partition The partition for which we want to get ISR.
* @return optional ISR if exists and None otherwise
*/
def getInSyncReplicasForPartition(partition: TopicPartition): Option[Seq[Int]] =
getTopicPartitionState(partition).map(_.leaderAndIsr.isr)
/**
* Gets the leader epoch for a specific topicPartition
* @param partition The partition for which we want to get the leader epoch
* @return optional integer if the leader exists and None otherwise
*/
def getEpochForPartition(partition: TopicPartition): Option[Int] = {
getTopicPartitionState(partition).map(_.leaderAndIsr.leaderEpoch)
}
/**
* Gets the isr change notifications as strings. These strings are the znode names and not the absolute znode path.
* @return sequence of znode names and not the absolute znode path.
*/
def getAllIsrChangeNotifications: Seq[String] = {
val getChildrenResponse = retryRequestUntilConnected(GetChildrenRequest(IsrChangeNotificationZNode.path))
getChildrenResponse.resultCode match {
case Code.OK => getChildrenResponse.children.map(IsrChangeNotificationSequenceZNode.sequenceNumber)
case Code.NONODE => Seq.empty
case _ => throw getChildrenResponse.resultException.get
}
}
/**
* Reads each of the isr change notifications associated with the given sequence numbers and extracts the partitions.
* @param sequenceNumbers the sequence numbers associated with the isr change notifications.
* @return partitions associated with the given isr change notifications.
*/
def getPartitionsFromIsrChangeNotifications(sequenceNumbers: Seq[String]): Seq[TopicPartition] = {
val getDataRequests = sequenceNumbers.map { sequenceNumber =>
GetDataRequest(IsrChangeNotificationSequenceZNode.path(sequenceNumber))
}
val getDataResponses = retryRequestsUntilConnected(getDataRequests)
getDataResponses.flatMap { getDataResponse =>
getDataResponse.resultCode match {
case Code.OK => IsrChangeNotificationSequenceZNode.decode(getDataResponse.data)
case Code.NONODE => None
case _ => throw getDataResponse.resultException.get
}
}
}
/**
* Deletes all isr change notifications.
*/
def deleteIsrChangeNotifications(): Unit = {
val getChildrenResponse = retryRequestUntilConnected(GetChildrenRequest(IsrChangeNotificationZNode.path))
if (getChildrenResponse.resultCode == Code.OK) {
deleteIsrChangeNotifications(getChildrenResponse.children.map(IsrChangeNotificationSequenceZNode.sequenceNumber))
} else if (getChildrenResponse.resultCode != Code.NONODE) {
getChildrenResponse.maybeThrow
}
}
/**
* Deletes the isr change notifications associated with the given sequence numbers.
* @param sequenceNumbers the sequence numbers associated with the isr change notifications to be deleted.
*/
def deleteIsrChangeNotifications(sequenceNumbers: Seq[String]): Unit = {
val deleteRequests = sequenceNumbers.map { sequenceNumber =>
DeleteRequest(IsrChangeNotificationSequenceZNode.path(sequenceNumber), ZkVersion.NoVersion)
}
retryRequestsUntilConnected(deleteRequests)
}
/**
* Creates preferred replica election znode with partitions undergoing election
* @param partitions
* @throws KeeperException if there is an error while creating the znode
*/
def createPreferredReplicaElection(partitions: Set[TopicPartition]): Unit = {
createRecursive(PreferredReplicaElectionZNode.path, PreferredReplicaElectionZNode.encode(partitions))
}
/**
* Gets the partitions marked for preferred replica election.
* @return sequence of partitions.
*/
def getPreferredReplicaElection: Set[TopicPartition] = {
val getDataRequest = GetDataRequest(PreferredReplicaElectionZNode.path)
val getDataResponse = retryRequestUntilConnected(getDataRequest)
getDataResponse.resultCode match {
case Code.OK => PreferredReplicaElectionZNode.decode(getDataResponse.data)
case Code.NONODE => Set.empty
case _ => throw getDataResponse.resultException.get
}
}
/**
* Deletes the preferred replica election znode.
*/
def deletePreferredReplicaElection(): Unit = {
val deleteRequest = DeleteRequest(PreferredReplicaElectionZNode.path, ZkVersion.NoVersion)
retryRequestUntilConnected(deleteRequest)
}
/**
* Gets the controller id.
* @return optional integer that is Some if the controller znode exists and can be parsed and None otherwise.
*/
def getControllerId: Option[Int] = {
val getDataRequest = GetDataRequest(ControllerZNode.path)
val getDataResponse = retryRequestUntilConnected(getDataRequest)
getDataResponse.resultCode match {
case Code.OK => ControllerZNode.decode(getDataResponse.data)
case Code.NONODE => None
case _ => throw getDataResponse.resultException.get
}
}
/**
* Deletes the controller znode.
*/
def deleteController(): Unit = {
val deleteRequest = DeleteRequest(ControllerZNode.path, ZkVersion.NoVersion)
retryRequestUntilConnected(deleteRequest)
}
/**
* Gets the controller epoch.
* @return optional (Int, Stat) that is Some if the controller epoch path exists and None otherwise.
*/
def getControllerEpoch: Option[(Int, Stat)] = {
val getDataRequest = GetDataRequest(ControllerEpochZNode.path)
val getDataResponse = retryRequestUntilConnected(getDataRequest)
getDataResponse.resultCode match {
case Code.OK =>
val epoch = ControllerEpochZNode.decode(getDataResponse.data)
Option(epoch, getDataResponse.stat)
case Code.NONODE => None
case _ => throw getDataResponse.resultException.get
}
}
/**
* Recursively deletes the topic znode.
* @param topic the topic whose topic znode we wish to delete.
*/
def deleteTopicZNode(topic: String): Unit = {
deleteRecursive(TopicZNode.path(topic))
}
/**
* Deletes the topic configs for the given topics.
* @param topics the topics whose configs we wish to delete.
*/
def deleteTopicConfigs(topics: Seq[String]): Unit = {
val deleteRequests = topics.map(topic => DeleteRequest(ConfigEntityZNode.path(ConfigType.Topic, topic), ZkVersion.NoVersion))
retryRequestsUntilConnected(deleteRequests)
}
//Acl management methods
/**
* Creates the required zk nodes for Acl storage
*/
def createAclPaths(): Unit = {
createRecursive(AclZNode.path, throwIfPathExists = false)
createRecursive(AclChangeNotificationZNode.path, throwIfPathExists = false)
ResourceType.values.foreach(resource => createRecursive(ResourceTypeZNode.path(resource.name), throwIfPathExists = false))
}
/**
* Gets VersionedAcls for a given Resource
* @param resource Resource to get VersionedAcls for
* @return VersionedAcls
*/
def getVersionedAclsForResource(resource: Resource): VersionedAcls = {
val getDataRequest = GetDataRequest(ResourceZNode.path(resource))
val getDataResponse = retryRequestUntilConnected(getDataRequest)
getDataResponse.resultCode match {
case Code.OK => ResourceZNode.decode(getDataResponse.data, getDataResponse.stat)
case Code.NONODE => VersionedAcls(Set(), -1)
case _ => throw getDataResponse.resultException.get
}
}
/**
* Sets or creates the resource znode path with the given acls and expected zk version depending
* on whether it already exists or not.
* @param resource
* @param aclsSet
* @param expectedVersion
* @return true if the update was successful and the new version
*/
def conditionalSetOrCreateAclsForResource(resource: Resource, aclsSet: Set[Acl], expectedVersion: Int): (Boolean, Int) = {
def set(aclData: Array[Byte], expectedVersion: Int): SetDataResponse = {
val setDataRequest = SetDataRequest(ResourceZNode.path(resource), aclData, expectedVersion)
retryRequestUntilConnected(setDataRequest)
}
def create(aclData: Array[Byte]): CreateResponse = {
val path = ResourceZNode.path(resource)
val createRequest = CreateRequest(path, aclData, acls(path), CreateMode.PERSISTENT)
retryRequestUntilConnected(createRequest)
}
val aclData = ResourceZNode.encode(aclsSet)
val setDataResponse = set(aclData, expectedVersion)
setDataResponse.resultCode match {
case Code.OK => (true, setDataResponse.stat.getVersion)
case Code.NONODE => {
val createResponse = create(aclData)
createResponse.resultCode match {
case Code.OK => (true, 0)
case Code.NODEEXISTS => (false, 0)
case _ => throw createResponse.resultException.get
}
}
case Code.BADVERSION => (false, 0)
case _ => throw setDataResponse.resultException.get
}
}
/**
* Creates Acl change notification message
* @param resourceName resource name
*/
def createAclChangeNotification(resourceName: String): Unit = {
val path = AclChangeNotificationSequenceZNode.createPath
val createRequest = CreateRequest(path, AclChangeNotificationSequenceZNode.encode(resourceName), acls(path), CreateMode.PERSISTENT_SEQUENTIAL)
val createResponse = retryRequestUntilConnected(createRequest)
createResponse.maybeThrow
}
def propagateLogDirEvent(brokerId: Int) {
val logDirEventNotificationPath: String = createSequentialPersistentPath(
LogDirEventNotificationZNode.path + "/" + LogDirEventNotificationSequenceZNode.SequenceNumberPrefix,
LogDirEventNotificationSequenceZNode.encode(brokerId))
debug(s"Added $logDirEventNotificationPath for broker $brokerId")
}
def propagateIsrChanges(isrChangeSet: collection.Set[TopicPartition]): Unit = {
val isrChangeNotificationPath: String = createSequentialPersistentPath(IsrChangeNotificationSequenceZNode.path(),
IsrChangeNotificationSequenceZNode.encode(isrChangeSet))
debug(s"Added $isrChangeNotificationPath for $isrChangeSet")
}
/**
* Deletes all Acl change notifications.
* @throws KeeperException if there is an error while deleting Acl change notifications
*/
def deleteAclChangeNotifications(): Unit = {
val getChildrenResponse = retryRequestUntilConnected(GetChildrenRequest(AclChangeNotificationZNode.path))
if (getChildrenResponse.resultCode == Code.OK) {
deleteAclChangeNotifications(getChildrenResponse.children)
} else if (getChildrenResponse.resultCode != Code.NONODE) {
getChildrenResponse.maybeThrow
}
}
/**
* Deletes the Acl change notifications associated with the given sequence nodes
* @param sequenceNodes
*/
private def deleteAclChangeNotifications(sequenceNodes: Seq[String]): Unit = {
val deleteRequests = sequenceNodes.map { sequenceNode =>
DeleteRequest(AclChangeNotificationSequenceZNode.deletePath(sequenceNode), ZkVersion.NoVersion)
}
val deleteResponses = retryRequestsUntilConnected(deleteRequests)
deleteResponses.foreach { deleteResponse =>
if (deleteResponse.resultCode != Code.NONODE) {
deleteResponse.maybeThrow
}
}
}
/**
* Gets the resource types
* @return list of resource type names
*/
def getResourceTypes(): Seq[String] = {
getChildren(AclZNode.path)
}
/**
* Gets the resource names for a give resource type
* @param resourceType
* @return list of resource names
*/
def getResourceNames(resourceType: String): Seq[String] = {
getChildren(ResourceTypeZNode.path(resourceType))
}
/**
* Deletes the given Resource node
* @param resource
* @return delete status
*/
def deleteResource(resource: Resource): Boolean = {
deleteRecursive(ResourceZNode.path(resource))
}
/**
* checks the resource existence
* @param resource
* @return existence status
*/
def resourceExists(resource: Resource): Boolean = {
pathExists(ResourceZNode.path(resource))
}
/**
* Conditional delete the resource node
* @param resource
* @param expectedVersion
* @return return true if it succeeds, false otherwise (the current version is not the expected version)
*/
def conditionalDelete(resource: Resource, expectedVersion: Int): Boolean = {
val deleteRequest = DeleteRequest(ResourceZNode.path(resource), expectedVersion)
val deleteResponse = retryRequestUntilConnected(deleteRequest)
deleteResponse.resultCode match {
case Code.OK | Code.NONODE => true
case Code.BADVERSION => false
case _ => throw deleteResponse.resultException.get
}
}
/**
* Deletes the zk node recursively
* @param path
* @return return true if it succeeds, false otherwise
*/
def deletePath(path: String): Boolean = {
deleteRecursive(path)
}
/**
* Creates the required zk nodes for Delegation Token storage
*/
def createDelegationTokenPaths(): Unit = {
createRecursive(DelegationTokenChangeNotificationZNode.path, throwIfPathExists = false)
createRecursive(DelegationTokensZNode.path, throwIfPathExists = false)
}
/**
* Creates Delegation Token change notification message
* @param tokenId token Id
*/
def createTokenChangeNotification(tokenId: String): Unit = {
val path = DelegationTokenChangeNotificationSequenceZNode.createPath
val createRequest = CreateRequest(path, DelegationTokenChangeNotificationSequenceZNode.encode(tokenId), acls(path), CreateMode.PERSISTENT_SEQUENTIAL)
val createResponse = retryRequestUntilConnected(createRequest)
createResponse.resultException.foreach(e => throw e)
}
/**
* Sets or creates token info znode with the given token details depending on whether it already
* exists or not.
*
* @param token the token to set on the token znode
* @throws KeeperException if there is an error while setting or creating the znode
*/
def setOrCreateDelegationToken(token: DelegationToken): Unit = {
def set(tokenData: Array[Byte]): SetDataResponse = {
val setDataRequest = SetDataRequest(DelegationTokenInfoZNode.path(token.tokenInfo().tokenId()), tokenData, ZkVersion.NoVersion)
retryRequestUntilConnected(setDataRequest)
}
def create(tokenData: Array[Byte]): CreateResponse = {
val path = DelegationTokenInfoZNode.path(token.tokenInfo().tokenId())
val createRequest = CreateRequest(path, tokenData, acls(path), CreateMode.PERSISTENT)
retryRequestUntilConnected(createRequest)
}
val tokenInfo = DelegationTokenInfoZNode.encode(token)
val setDataResponse = set(tokenInfo)
setDataResponse.resultCode match {
case Code.NONODE =>
val createDataResponse = create(tokenInfo)
createDataResponse.maybeThrow
case _ => setDataResponse.maybeThrow
}
}
/**
* Gets the Delegation Token Info
* @return optional TokenInfo that is Some if the token znode exists and can be parsed and None otherwise.
*/
def getDelegationTokenInfo(delegationTokenId: String): Option[TokenInformation] = {
val getDataRequest = GetDataRequest(DelegationTokenInfoZNode.path(delegationTokenId))
val getDataResponse = retryRequestUntilConnected(getDataRequest)
getDataResponse.resultCode match {
case Code.OK => DelegationTokenInfoZNode.decode(getDataResponse.data)
case Code.NONODE => None
case _ => throw getDataResponse.resultException.get
}
}
/**
* Deletes the given Delegation token node
* @param delegationTokenId
* @return delete status
*/
def deleteDelegationToken(delegationTokenId: String): Boolean = {
deleteRecursive(DelegationTokenInfoZNode.path(delegationTokenId))
}
/**
* This registers a ZNodeChangeHandler and attempts to register a watcher with an ExistsRequest, which allows data
* watcher registrations on paths which might not even exist.
*
* @param zNodeChangeHandler
* @return `true` if the path exists or `false` if it does not
* @throws KeeperException if an error is returned by ZooKeeper
*/
def registerZNodeChangeHandlerAndCheckExistence(zNodeChangeHandler: ZNodeChangeHandler): Boolean = {
zooKeeperClient.registerZNodeChangeHandler(zNodeChangeHandler)
val existsResponse = retryRequestUntilConnected(ExistsRequest(zNodeChangeHandler.path))
existsResponse.resultCode match {
case Code.OK => true
case Code.NONODE => false
case _ => throw existsResponse.resultException.get
}
}
/**
* See ZooKeeperClient.registerZNodeChangeHandler
* @param zNodeChangeHandler
*/
def registerZNodeChangeHandler(zNodeChangeHandler: ZNodeChangeHandler): Unit = {
zooKeeperClient.registerZNodeChangeHandler(zNodeChangeHandler)
}
/**
* See ZooKeeperClient.unregisterZNodeChangeHandler
* @param path
*/
def unregisterZNodeChangeHandler(path: String): Unit = {
zooKeeperClient.unregisterZNodeChangeHandler(path)
}
/**
* See ZooKeeperClient.registerZNodeChildChangeHandler
* @param zNodeChildChangeHandler
*/
def registerZNodeChildChangeHandler(zNodeChildChangeHandler: ZNodeChildChangeHandler): Unit = {
zooKeeperClient.registerZNodeChildChangeHandler(zNodeChildChangeHandler)
}
/**
* See ZooKeeperClient.unregisterZNodeChildChangeHandler
* @param path
*/
def unregisterZNodeChildChangeHandler(path: String): Unit = {
zooKeeperClient.unregisterZNodeChildChangeHandler(path)
}
/**
*
* @param stateChangeHandler
*/
def registerStateChangeHandler(stateChangeHandler: StateChangeHandler): Unit = {
zooKeeperClient.registerStateChangeHandler(stateChangeHandler)
}
/**
*
* @param name
*/
def unregisterStateChangeHandler(name: String): Unit = {
zooKeeperClient.unregisterStateChangeHandler(name)
}
/**
* Close the underlying ZooKeeperClient.
*/
def close(): Unit = {
removeMetric("ZooKeeperRequestLatencyMs")
zooKeeperClient.close()
}
/**
* Get the committed offset for a topic partition and group
* @param group the group we wish to get offset for
* @param topicPartition the topic partition we wish to get the offset for
* @return optional long that is Some if there was an offset committed for topic partition, group and None otherwise.
*/
def getConsumerOffset(group: String, topicPartition: TopicPartition): Option[Long] = {
val getDataRequest = GetDataRequest(ConsumerOffset.path(group, topicPartition.topic, topicPartition.partition))
val getDataResponse = retryRequestUntilConnected(getDataRequest)
getDataResponse.resultCode match {
case Code.OK => ConsumerOffset.decode(getDataResponse.data)
case Code.NONODE => None
case _ => throw getDataResponse.resultException.get
}
}
/**
* Set the committed offset for a topic partition and group
* @param group the group whose offset is being set
* @param topicPartition the topic partition whose offset is being set
* @param offset the offset value
*/
def setOrCreateConsumerOffset(group: String, topicPartition: TopicPartition, offset: Long): Unit = {
val setDataResponse = setConsumerOffset(group, topicPartition, offset)
if (setDataResponse.resultCode == Code.NONODE) {
createConsumerOffset(group, topicPartition, offset)
} else {
setDataResponse.maybeThrow
}
}
/**
* Get the cluster id.
* @return optional cluster id in String.
*/
def getClusterId: Option[String] = {
val getDataRequest = GetDataRequest(ClusterIdZNode.path)
val getDataResponse = retryRequestUntilConnected(getDataRequest)
getDataResponse.resultCode match {
case Code.OK => Some(ClusterIdZNode.fromJson(getDataResponse.data))
case Code.NONODE => None
case _ => throw getDataResponse.resultException.get
}
}
/**
* Create the cluster Id. If the cluster id already exists, return the current cluster id.
* @return cluster id
*/
def createOrGetClusterId(proposedClusterId: String): String = {
try {
createRecursive(ClusterIdZNode.path, ClusterIdZNode.toJson(proposedClusterId))
proposedClusterId
} catch {
case e: NodeExistsException => getClusterId.getOrElse(
throw new KafkaException("Failed to get cluster id from Zookeeper. This can happen if /cluster/id is deleted from Zookeeper."))
}
}
/**
* Generate a broker id by updating the broker sequence id path in ZK and return the version of the path.
* The version is incremented by one on every update starting from 1.
* @return sequence number as the broker id
*/
def generateBrokerSequenceId(): Int = {
val setDataRequest = SetDataRequest(BrokerSequenceIdZNode.path, Array.empty[Byte], -1)
val setDataResponse = retryRequestUntilConnected(setDataRequest)
setDataResponse.resultCode match {
case Code.OK => setDataResponse.stat.getVersion
case Code.NONODE =>
// maker sure the path exists
createRecursive(BrokerSequenceIdZNode.path, Array.empty[Byte], throwIfPathExists = false)
generateBrokerSequenceId()
case _ => throw setDataResponse.resultException.get
}
}
/**
* Pre-create top level paths in ZK if needed.
*/
def createTopLevelPaths(): Unit = {
ZkData.PersistentZkPaths.foreach(makeSurePersistentPathExists(_))
}
/**
* Make sure a persistent path exists in ZK.
* @param path
*/
def makeSurePersistentPathExists(path: String): Unit = {
createRecursive(path, data = null, throwIfPathExists = false)
}
private def setConsumerOffset(group: String, topicPartition: TopicPartition, offset: Long): SetDataResponse = {
val setDataRequest = SetDataRequest(ConsumerOffset.path(group, topicPartition.topic, topicPartition.partition),
ConsumerOffset.encode(offset), ZkVersion.NoVersion)
retryRequestUntilConnected(setDataRequest)
}
private def createConsumerOffset(group: String, topicPartition: TopicPartition, offset: Long) = {
val path = ConsumerOffset.path(group, topicPartition.topic, topicPartition.partition)
createRecursive(path, ConsumerOffset.encode(offset))
}
/**
* Deletes the given zk path recursively
* @param path
* @return true if path gets deleted successfully, false if root path doesn't exist
* @throws KeeperException if there is an error while deleting the znodes
*/
def deleteRecursive(path: String): Boolean = {
val getChildrenResponse = retryRequestUntilConnected(GetChildrenRequest(path))
getChildrenResponse.resultCode match {
case Code.OK =>
getChildrenResponse.children.foreach(child => deleteRecursive(s"$path/$child"))
val deleteResponse = retryRequestUntilConnected(DeleteRequest(path, ZkVersion.NoVersion))
if (deleteResponse.resultCode != Code.OK && deleteResponse.resultCode != Code.NONODE) {
throw deleteResponse.resultException.get
}
true
case Code.NONODE => false
case _ => throw getChildrenResponse.resultException.get
}
}
def pathExists(path: String): Boolean = {
val existsRequest = ExistsRequest(path)
val existsResponse = retryRequestUntilConnected(existsRequest)
existsResponse.resultCode match {
case Code.OK => true
case Code.NONODE => false
case _ => throw existsResponse.resultException.get
}
}
private[zk] def createRecursive(path: String, data: Array[Byte] = null, throwIfPathExists: Boolean = true) = {
def parentPath(path: String): String = {
val indexOfLastSlash = path.lastIndexOf("/")
if (indexOfLastSlash == -1) throw new IllegalArgumentException(s"Invalid path ${path}")
path.substring(0, indexOfLastSlash)
}
def createRecursive0(path: String): Unit = {
val createRequest = CreateRequest(path, null, acls(path), CreateMode.PERSISTENT)
var createResponse = retryRequestUntilConnected(createRequest)
if (createResponse.resultCode == Code.NONODE) {
createRecursive0(parentPath(path))
createResponse = retryRequestUntilConnected(createRequest)
if (createResponse.resultCode != Code.OK && createResponse.resultCode != Code.NODEEXISTS) {
throw createResponse.resultException.get
}
} else if (createResponse.resultCode != Code.OK && createResponse.resultCode != Code.NODEEXISTS) {
throw createResponse.resultException.get
}
}
val createRequest = CreateRequest(path, data, acls(path), CreateMode.PERSISTENT)
var createResponse = retryRequestUntilConnected(createRequest)
if (throwIfPathExists && createResponse.resultCode == Code.NODEEXISTS) {
createResponse.maybeThrow
} else if (createResponse.resultCode == Code.NONODE) {
createRecursive0(parentPath(path))
createResponse = retryRequestUntilConnected(createRequest)
if (throwIfPathExists || createResponse.resultCode != Code.NODEEXISTS)
createResponse.maybeThrow
} else if (createResponse.resultCode != Code.NODEEXISTS)
createResponse.maybeThrow
}
private def createTopicPartition(partitions: Seq[TopicPartition]): Seq[CreateResponse] = {
val createRequests = partitions.map { partition =>
val path = TopicPartitionZNode.path(partition)
CreateRequest(path, null, acls(path), CreateMode.PERSISTENT, Some(partition))
}
retryRequestsUntilConnected(createRequests)
}
private def createTopicPartitions(topics: Seq[String]): Seq[CreateResponse] = {
val createRequests = topics.map { topic =>
val path = TopicPartitionsZNode.path(topic)
CreateRequest(path, null, acls(path), CreateMode.PERSISTENT, Some(topic))
}
retryRequestsUntilConnected(createRequests)
}
private def getTopicConfigs(topics: Seq[String]): Seq[GetDataResponse] = {
val getDataRequests = topics.map { topic =>
GetDataRequest(ConfigEntityZNode.path(ConfigType.Topic, topic), ctx = Some(topic))
}
retryRequestsUntilConnected(getDataRequests)
}
private def acls(path: String): Seq[ACL] = ZkData.defaultAcls(isSecure, path)
private def retryRequestUntilConnected[Req <: AsyncRequest](request: Req): Req#Response = {
retryRequestsUntilConnected(Seq(request)).head
}
private def retryRequestsUntilConnected[Req <: AsyncRequest](requests: Seq[Req]): Seq[Req#Response] = {
val remainingRequests = ArrayBuffer(requests: _*)
val responses = new ArrayBuffer[Req#Response]
while (remainingRequests.nonEmpty) {
val batchResponses = zooKeeperClient.handleRequests(remainingRequests)
batchResponses.foreach(response => latencyMetric.update(response.metadata.responseTimeMs))
// Only execute slow path if we find a response with CONNECTIONLOSS
if (batchResponses.exists(_.resultCode == Code.CONNECTIONLOSS)) {
val requestResponsePairs = remainingRequests.zip(batchResponses)
remainingRequests.clear()
requestResponsePairs.foreach { case (request, response) =>
if (response.resultCode == Code.CONNECTIONLOSS)
remainingRequests += request
else
responses += response
}
if (remainingRequests.nonEmpty)
zooKeeperClient.waitUntilConnected()
} else {
remainingRequests.clear()
responses ++= batchResponses
}
}
responses
}
def checkedEphemeralCreate(path: String, data: Array[Byte]): Unit = {
val checkedEphemeral = new CheckedEphemeral(path, data)
info(s"Creating $path (is it secure? $isSecure)")
val code = checkedEphemeral.create()
info(s"Result of znode creation at $path is: $code")
if (code != Code.OK)
throw KeeperException.create(code)
}
private class CheckedEphemeral(path: String, data: Array[Byte]) extends Logging {
def create(): Code = {
val createRequest = CreateRequest(path, data, acls(path), CreateMode.EPHEMERAL)
val createResponse = retryRequestUntilConnected(createRequest)
createResponse.resultCode match {
case code@ Code.OK => code
case Code.NODEEXISTS => getAfterNodeExists()
case code =>
error(s"Error while creating ephemeral at $path with return code: $code")
code
}
}
private def getAfterNodeExists(): Code = {
val getDataRequest = GetDataRequest(path)
val getDataResponse = retryRequestUntilConnected(getDataRequest)
getDataResponse.resultCode match {
case Code.OK if getDataResponse.stat.getEphemeralOwner != zooKeeperClient.sessionId =>
error(s"Error while creating ephemeral at $path, node already exists and owner " +
s"'${getDataResponse.stat.getEphemeralOwner}' does not match current session '${zooKeeperClient.sessionId}'")
Code.NODEEXISTS
case code@ Code.OK => code
case Code.NONODE =>
info(s"The ephemeral node at $path went away while reading it, attempting create() again")
create()
case code =>
error(s"Error while creating ephemeral at $path as it already exists and error getting the node data due to $code")
code
}
}
}
}
object KafkaZkClient {
/**
* @param successfulPartitions The successfully updated partition states with adjusted znode versions.
* @param partitionsToRetry The partitions that we should retry due to a zookeeper BADVERSION conflict. Version conflicts
* can occur if the partition leader updated partition state while the controller attempted to
* update partition state.
* @param failedPartitions Exceptions corresponding to failed partition state updates.
*/
case class UpdateLeaderAndIsrResult(successfulPartitions: Map[TopicPartition, LeaderAndIsr],
partitionsToRetry: Seq[TopicPartition],
failedPartitions: Map[TopicPartition, Exception])
/**
* Create an instance of this class with the provided parameters.
*
* The metric group and type are preserved by default for compatibility with previous versions.
*/
def apply(connectString: String,
isSecure: Boolean,
sessionTimeoutMs: Int,
connectionTimeoutMs: Int,
maxInFlightRequests: Int,
time: Time,
metricGroup: String = "kafka.server",
metricType: String = "SessionExpireListener") = {
val zooKeeperClient = new ZooKeeperClient(connectString, sessionTimeoutMs, connectionTimeoutMs, maxInFlightRequests,
time, metricGroup, metricType)
new KafkaZkClient(zooKeeperClient, isSecure, time)
}
}
| sebadiaz/kafka | core/src/main/scala/kafka/zk/KafkaZkClient.scala | Scala | apache-2.0 | 63,978 |
package apps
import java.io._
import scala.util.Random
import org.apache.spark.SparkContext
import org.apache.spark.SparkConf
import org.apache.spark.storage.StorageLevel
import org.apache.spark.sql.types._
import org.apache.spark.sql.{DataFrame, Row}
import org.bytedeco.javacpp.tensorflow._
import libs._
import loaders._
import preprocessing._
object TFImageNetApp {
val trainBatchSize = 256
val testBatchSize = 50
val channels = 3
val fullHeight = 256
val fullWidth = 256
val croppedHeight = 227
val croppedWidth = 227
val fullImShape = Array(channels, fullHeight, fullWidth)
val fullImSize = fullImShape.product
val workerStore = new WorkerStore()
def main(args: Array[String]) {
val numWorkers = args(0).toInt
val s3Bucket = args(1)
val conf = new SparkConf()
.setAppName("TFImageNet")
.set("spark.driver.maxResultSize", "30G")
.set("spark.task.maxFailures", "1")
.setExecutorEnv("LD_LIBRARY_PATH", sys.env("LD_LIBRARY_PATH"))
val sc = new SparkContext(conf)
val sqlContext = new org.apache.spark.sql.SQLContext(sc)
val sparkNetHome = sys.env("SPARKNET_HOME")
val logger = new Logger(sparkNetHome + "/training_log_" + System.currentTimeMillis().toString + ".txt")
val loader = new ImageNetLoader(s3Bucket)
logger.log("loading train data")
var trainRDD = loader.apply(sc, "ILSVRC2012_img_train/train.0000", "train.txt", fullHeight, fullWidth)
logger.log("loading test data")
val testRDD = loader.apply(sc, "ILSVRC2012_img_val/val.00", "val.txt", fullHeight, fullWidth)
// convert to dataframes
val schema = StructType(StructField("data", BinaryType, false) :: StructField("label", IntegerType, false) :: Nil)
var trainDF = sqlContext.createDataFrame(trainRDD.map{ case (a, b) => Row(a, b)}, schema)
var testDF = sqlContext.createDataFrame(testRDD.map{ case (a, b) => Row(a, b)}, schema)
val numTrainData = trainDF.count()
logger.log("numTrainData = " + numTrainData.toString)
val numTestData = testDF.count()
logger.log("numTestData = " + numTestData.toString)
logger.log("computing mean image")
val meanImage = trainDF.map(row => row(0).asInstanceOf[Array[Byte]].map(e => (e & 0xFF).toLong))
.reduce((a, b) => (a, b).zipped.map(_ + _))
.map(e => (e.toDouble / numTrainData).toFloat)
logger.log("coalescing") // if you want to shuffle your data, replace coalesce with repartition
trainDF = trainDF.coalesce(numWorkers)
testDF = testDF.coalesce(numWorkers)
val workers = sc.parallelize(Array.range(0, numWorkers), numWorkers)
trainDF.foreachPartition(iter => workerStore.put("trainPartitionSize", iter.size))
testDF.foreachPartition(iter => workerStore.put("testPartitionSize", iter.size))
logger.log("trainPartitionSizes = " + workers.map(_ => workerStore.get[Int]("trainPartitionSize")).collect().deep.toString)
logger.log("testPartitionSizes = " + workers.map(_ => workerStore.get[Int]("testPartitionSize")).collect().deep.toString)
// initialize nets on workers
workers.foreach(_ => {
val graph = new GraphDef()
val status = ReadBinaryProto(Env.Default(), sparkNetHome + "/models/tensorflow/alexnet/alexnet_graph.pb", graph)
if (!status.ok) {
throw new Exception("Failed to read " + sparkNetHome + "/models/tensorflow/alexnet/alexnet_graph.pb, try running `python alexnet_graph.py from that directory`")
}
val net = new TensorFlowNet(graph, schema, new ImageNetTensorFlowPreprocessor(schema, meanImage, fullHeight, fullWidth, croppedHeight, croppedWidth))
workerStore.put("graph", graph) // prevent graph from being garbage collected
workerStore.put("net", net) // prevent net from being garbage collected
})
// initialize weights on master
var netWeights = workers.map(_ => workerStore.get[TensorFlowNet]("net").getWeights()).collect()(0)
var i = 0
while (true) {
logger.log("broadcasting weights", i)
val broadcastWeights = sc.broadcast(netWeights)
logger.log("setting weights on workers", i)
workers.foreach(_ => workerStore.get[TensorFlowNet]("net").setWeights(broadcastWeights.value))
if (i % 5 == 0) {
logger.log("testing", i)
val testAccuracies = testDF.mapPartitions(
testIt => {
val numTestBatches = workerStore.get[Int]("testPartitionSize") / testBatchSize
var accuracy = 0F
for (j <- 0 to numTestBatches - 1) {
val out = workerStore.get[TensorFlowNet]("net").forward(testIt, List("accuracy"))
accuracy += out("accuracy").get(Array())
}
Array[(Float, Int)]((accuracy, numTestBatches)).iterator
}
).cache()
val accuracies = testAccuracies.map{ case (a, b) => a }.sum
val numTestBatches = testAccuracies.map{ case (a, b) => b }.sum
val accuracy = accuracies / numTestBatches
logger.log("%.2f".format(100F * accuracy) + "% accuracy", i)
}
logger.log("training", i)
val syncInterval = 10
trainDF.foreachPartition(
trainIt => {
val t1 = System.currentTimeMillis()
val len = workerStore.get[Int]("trainPartitionSize")
val startIdx = Random.nextInt(len - syncInterval * trainBatchSize)
val it = trainIt.drop(startIdx)
val t2 = System.currentTimeMillis()
print("stuff took " + ((t2 - t1) * 1F / 1000F).toString + " s\\n")
for (j <- 0 to syncInterval - 1) {
workerStore.get[TensorFlowNet]("net").step(it)
}
val t3 = System.currentTimeMillis()
print("iters took " + ((t3 - t2) * 1F / 1000F).toString + " s\\n")
}
)
logger.log("collecting weights", i)
netWeights = workers.map(_ => { workerStore.get[TensorFlowNet]("net").getWeights() }).reduce((a, b) => TensorFlowWeightCollection.add(a, b))
TensorFlowWeightCollection.scalarDivide(netWeights, 1F * numWorkers)
logger.log("weight = " + netWeights("fc6W").toFlat()(0).toString, i)
i += 1
}
logger.log("finished training")
}
}
| amplab/SparkNet | src/main/scala/apps/TFImageNetApp.scala | Scala | mit | 6,193 |
package example.gql_server.schema
import example.gql_server.context.UserContext
import example.gql_server.entity.PersonEntity
import scalikejdbc.DB
import sangria.marshalling.circe._
import sangria.relay.{Connection, ConnectionArgs, ConnectionDefinition}
import sangria.schema._
trait PersonSchema {
@SuppressWarnings(Array("org.wartremover.warts.Any"))
val PersonType = ObjectType(
"Person",
"The person",
fields[Unit, PersonEntity](
Field("id", IntType, resolve = _.value.id),
Field("name", StringType, resolve = _.value.name),
Field("createdAt", ZonedDateTimeType, resolve = _.value.createdAt)
)
)
val PersonInputType = InputObjectType[PersonEntity](
"PersonInput",
List(
InputField("id", IntType),
InputField("name", StringType),
InputField("createdAt", ZonedDateTimeType)
)
)
@SuppressWarnings(Array("org.wartremover.warts.Any"))
val personField = Field(
"person",
OptionType(PersonType),
arguments = Id :: Nil,
resolve = (c: Context[UserContext, Unit]) =>
DB.readOnly { implicit session =>
c.ctx.personHandler.findById(c arg Id)
}
)
@SuppressWarnings(Array("org.wartremover.warts.Any"))
val ConnectionDefinition(_, personConnection) =
Connection.definition[UserContext, Connection, PersonEntity]("Persons", PersonType)
@SuppressWarnings(Array("org.wartremover.warts.Any"))
val personsField = Field(
"persons",
OptionType(personConnection),
arguments = Connection.Args.All,
resolve = (c: Context[UserContext, Unit]) =>
DB.readOnly { implicit session =>
Connection.connectionFromSeq(
c.ctx.personHandler.findList(),
ConnectionArgs(c)
)
}
)
val PersonArg = Argument("person", PersonInputType)
@SuppressWarnings(Array("org.wartremover.warts.Any"))
val createPersonField = Field(
"createPerson",
OptionType(IntType),
arguments = PersonArg :: Nil,
resolve = (c: Context[UserContext, Unit]) =>
DB.localTx { implicit session =>
c.ctx.personHandler.create(c arg PersonArg)
}
)
@SuppressWarnings(Array("org.wartremover.warts.Any"))
val updatePersonField = Field(
"updatePerson",
OptionType(IntType),
arguments = PersonArg :: Nil,
resolve = (c: Context[UserContext, Unit]) =>
DB.localTx { implicit session =>
c.ctx.personHandler.update(c arg PersonArg)
}
)
@SuppressWarnings(Array("org.wartremover.warts.Any"))
val deletePersonField = Field(
"deletePerson",
OptionType(BooleanType),
arguments = Id :: Nil,
resolve = (c: Context[UserContext, Unit]) =>
DB.localTx { implicit session =>
c.ctx.personHandler.delete(c arg Id)
}
)
}
| t-mochizuki/scala-study | circleci-example/gql-server/src/main/scala/example/gql_server/schema/PersonSchema.scala | Scala | mit | 2,756 |
package org.scalassh
import org.specs2.mutable.Specification
import java.io.File
import org.scalassh.SshSession._
/**
* Created by IntelliJ IDEA.
* User: wfaler
* Date: 02/06/2011
* Time: 22:34
* To change this template use File | Settings | File Templates.
*/
class SimpleConnectionTest extends Specification with SpecSetup{
before
var result: CommandResult = null
val emptyFile = new File(testDir, "SimpleConnectionTest.txt")
emptyFile.createNewFile
"Listing a directory" should {
ssh("localhost"){
result = Command("ls -ltr " + testDir.getAbsolutePath)
}
"return a valid exitCode" in {
result.exitCode must be_==(0)
}
"have SimpleConnectionTest.txt in the output" in{
result.output must contain("SimpleConnectionTest.txt")
}
}
after
} | wfaler/ScalaSSH | src/test/scala/org/scalassh/SimpleConnectionTest.scala | Scala | bsd-3-clause | 808 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kafka.api
import java.nio.ByteBuffer
import kafka.api.ApiUtils._
import kafka.common.{ErrorMapping, OffsetAndMetadata, TopicAndPartition}
import kafka.network.{RequestOrResponseSend, RequestChannel}
import kafka.network.RequestChannel.Response
import kafka.utils.Logging
import scala.collection._
object OffsetCommitRequest extends Logging {
val CurrentVersion: Short = 2
val DefaultClientId = ""
def readFrom(buffer: ByteBuffer): OffsetCommitRequest = {
// Read values from the envelope
val versionId = buffer.getShort
assert(versionId == 0 || versionId == 1 || versionId == 2,
"Version " + versionId + " is invalid for OffsetCommitRequest. Valid versions are 0, 1 or 2.")
val correlationId = buffer.getInt
val clientId = readShortString(buffer)
// Read the OffsetRequest
val groupId = readShortString(buffer)
// version 1 and 2 specific fields
val groupGenerationId: Int =
if (versionId >= 1)
buffer.getInt
else
org.apache.kafka.common.requests.OffsetCommitRequest.DEFAULT_GENERATION_ID
val memberId: String =
if (versionId >= 1)
readShortString(buffer)
else
org.apache.kafka.common.requests.OffsetCommitRequest.DEFAULT_MEMBER_ID
// version 2 specific fields
val retentionMs: Long =
if (versionId >= 2)
buffer.getLong
else
org.apache.kafka.common.requests.OffsetCommitRequest.DEFAULT_RETENTION_TIME
val topicCount = buffer.getInt
val pairs = (1 to topicCount).flatMap(_ => {
val topic = readShortString(buffer)
val partitionCount = buffer.getInt
(1 to partitionCount).map(_ => {
val partitionId = buffer.getInt
val offset = buffer.getLong
val timestamp = {
// version 1 specific field
if (versionId == 1)
buffer.getLong
else
org.apache.kafka.common.requests.OffsetCommitRequest.DEFAULT_TIMESTAMP
}
val metadata = readShortString(buffer)
(TopicAndPartition(topic, partitionId), OffsetAndMetadata(offset, metadata, timestamp))
})
})
OffsetCommitRequest(groupId, immutable.Map(pairs:_*), versionId, correlationId, clientId, groupGenerationId, memberId, retentionMs)
}
}
case class OffsetCommitRequest(groupId: String,
requestInfo: immutable.Map[TopicAndPartition, OffsetAndMetadata],
versionId: Short = OffsetCommitRequest.CurrentVersion,
correlationId: Int = 0,
clientId: String = OffsetCommitRequest.DefaultClientId,
groupGenerationId: Int = org.apache.kafka.common.requests.OffsetCommitRequest.DEFAULT_GENERATION_ID,
memberId: String = org.apache.kafka.common.requests.OffsetCommitRequest.DEFAULT_MEMBER_ID,
retentionMs: Long = org.apache.kafka.common.requests.OffsetCommitRequest.DEFAULT_RETENTION_TIME)
extends RequestOrResponse(Some(RequestKeys.OffsetCommitKey)) {
assert(versionId == 0 || versionId == 1 || versionId == 2,
"Version " + versionId + " is invalid for OffsetCommitRequest. Valid versions are 0, 1 or 2.")
lazy val requestInfoGroupedByTopic = requestInfo.groupBy(_._1.topic)
def writeTo(buffer: ByteBuffer) {
// Write envelope
buffer.putShort(versionId)
buffer.putInt(correlationId)
writeShortString(buffer, clientId)
// Write OffsetCommitRequest
writeShortString(buffer, groupId) // consumer group
// version 1 and 2 specific data
if (versionId >= 1) {
buffer.putInt(groupGenerationId)
writeShortString(buffer, memberId)
}
// version 2 or above specific data
if (versionId >= 2) {
buffer.putLong(retentionMs)
}
buffer.putInt(requestInfoGroupedByTopic.size) // number of topics
requestInfoGroupedByTopic.foreach( t1 => { // topic -> Map[TopicAndPartition, OffsetMetadataAndError]
writeShortString(buffer, t1._1) // topic
buffer.putInt(t1._2.size) // number of partitions for this topic
t1._2.foreach( t2 => {
buffer.putInt(t2._1.partition)
buffer.putLong(t2._2.offset)
// version 1 specific data
if (versionId == 1)
buffer.putLong(t2._2.commitTimestamp)
writeShortString(buffer, t2._2.metadata)
})
})
}
override def sizeInBytes =
2 + /* versionId */
4 + /* correlationId */
shortStringLength(clientId) +
shortStringLength(groupId) +
(if (versionId >= 1) 4 /* group generation id */ + shortStringLength(memberId) else 0) +
(if (versionId >= 2) 8 /* retention time */ else 0) +
4 + /* topic count */
requestInfoGroupedByTopic.foldLeft(0)((count, topicAndOffsets) => {
val (topic, offsets) = topicAndOffsets
count +
shortStringLength(topic) + /* topic */
4 + /* number of partitions */
offsets.foldLeft(0)((innerCount, offsetAndMetadata) => {
innerCount +
4 /* partition */ +
8 /* offset */ +
(if (versionId == 1) 8 else 0) /* timestamp */ +
shortStringLength(offsetAndMetadata._2.metadata)
})
})
override def handleError(e: Throwable, requestChannel: RequestChannel, request: RequestChannel.Request): Unit = {
val errorCode = ErrorMapping.codeFor(e.getClass.asInstanceOf[Class[Throwable]])
val commitStatus = requestInfo.mapValues(_ => errorCode)
val commitResponse = OffsetCommitResponse(commitStatus, correlationId)
requestChannel.sendResponse(new Response(request, new RequestOrResponseSend(request.connectionId, commitResponse)))
}
override def describe(details: Boolean): String = {
val offsetCommitRequest = new StringBuilder
offsetCommitRequest.append("Name: " + this.getClass.getSimpleName)
offsetCommitRequest.append("; Version: " + versionId)
offsetCommitRequest.append("; CorrelationId: " + correlationId)
offsetCommitRequest.append("; ClientId: " + clientId)
offsetCommitRequest.append("; GroupId: " + groupId)
offsetCommitRequest.append("; GroupGenerationId: " + groupGenerationId)
offsetCommitRequest.append("; MemberId: " + memberId)
offsetCommitRequest.append("; RetentionMs: " + retentionMs)
if(details)
offsetCommitRequest.append("; RequestInfo: " + requestInfo.mkString(","))
offsetCommitRequest.toString()
}
override def toString = {
describe(details = true)
}
}
| eljefe6a/kafka | core/src/main/scala/kafka/api/OffsetCommitRequest.scala | Scala | apache-2.0 | 7,337 |
package io.slicker.core.sort
import io.slicker.core.SortDirection
import io.slicker.core.sort.gen.SortMacros
import slick.ast.Ordering
import slick.lifted.Ordered
import scala.language.experimental.macros
import scala.language.implicitConversions
/**
* For some table `T` provide sorting mechanic
*
* @tparam T table
*/
trait Sort[T] {
/**
* Build [[slick.lifted.Ordered]] for given table & field with provided direction
*
* @param table table with fields
* @param field field to sort by
* @param sortDirection direction (asc or desc)
* @return If it's possible to sort by `field` of table `T`, return [[slick.lifted.Ordered]]. In other case return `None`
*/
protected def ordered(table: T, field: String, sortDirection: io.slicker.core.SortDirection): Option[slick.lifted.Ordered]
/**
* Build [[slick.lifted.Ordered]] for given table & fields with provided direction
*
* @param table table with fields
* @param fields fields to sort by with direction for each field
* @return Will return non-empty [[slick.lifted.Ordered]] if it's possible to sort at least by one field.
*/
def apply(table: T, fields: Seq[(String, SortDirection)]): Ordered = {
val os = fields.flatMap({
case (field, direction) => ordered(table, field, direction)
})
os.foldLeft(new Ordered(columns = IndexedSeq.empty))((p, c) => {
new Ordered(columns = p.columns ++ c.columns)
})
}
}
object Sort {
/**
* Manually build [[Sort]] for table `T` using PartialFunction
*
* {{{
* Sort.manual[Users](userTable => {
* case "photo" => userTable.photo
* case "email" => userTable.email
* })
* }}}
*
* @return [[Sort]] instance that will apply sorting only for names/columns in provided partial function
*/
def manual[T](pf: T => PartialFunction[String, Ordered]): Sort[T] = {
new Sort[T] {
override protected def ordered(table: T, field: String, sortDirection: SortDirection): Option[Ordered] = {
pf(table).lift(field).map { ordered =>
new Ordered(columns = ordered.columns.map {
case (node, _) => (node, sortDirectionToOrdering(sortDirection))
})
}
}
}
}
private def sortDirectionToOrdering(sortDirection: SortDirection): Ordering = {
sortDirection match {
case SortDirection.Asc => Ordering().asc
case SortDirection.Desc => Ordering().desc
}
}
/**
* Build [[Sort]] for table `T` semiautomatically.
* It will use provided method names as sorting names. All others fields will be ignored.
*
* {{{
* Sort.semiauto[Users](table => Seq(table.id, table.name, table.email))
* }}}
*
* @return [[Sort]] instance that will apply sorting only for columns in provided sequence
*/
def semiauto[T](f: T => Seq[_]): Sort[T] = macro SortMacros.partialImpl[T]
/**
* Build [[Sort]] for table `T` automatically
* It will build [[Sort]] for all columns in table, using method names as sorting names.
*
* {{
* Sort.auto[Users]
* }}
*
* @return [[Sort]] instance that will apply sorting for all columns in provided table `T`
*/
def auto[T]: Sort[T] = macro SortMacros.fullImpl[T]
/**
* Build empty [[Sort]] for table `T`. Default behaviour for all tables.
*
* @return [[Sort]] instance that will always return no fields to sort by
*/
def empty[T]: Sort[T] = new Sort[T] {
override protected def ordered(table: T, field: String, sortDirection: SortDirection): Option[Ordered] = None
}
} | ImLiar/slicker | slicker-core/src/main/scala/io/slicker/core/sort/Sort.scala | Scala | apache-2.0 | 3,616 |
import org.specs2.mutable._
import play.api.libs.iteratee._
import scala.concurrent._
import play.api.libs.json._
import play.api.libs.json.util._
import play.api.libs.json.Reads._
import play.api.libs.json.Writes._
object Common {
import scala.concurrent._
import scala.concurrent.duration._
import reactivemongo.api._
implicit val ec = ExecutionContext.Implicits.global
/*implicit val writer = DefaultBSONHandlers.DefaultBSONDocumentWriter
implicit val reader = DefaultBSONHandlers.DefaultBSONDocumentReader
implicit val handler = DefaultBSONHandlers.DefaultBSONReaderHandler*/
val timeout = 5 seconds
lazy val connection = new MongoDriver().connection(List("localhost:27017"))
lazy val db = {
val _db = connection("specs2-test-reactivemongo")
Await.ready(_db.drop, timeout)
_db
}
}
case class Expeditor(name: String)
case class Item(name: String, description: String, occurrences: Int)
case class Package(
expeditor: Expeditor,
items: List[Item],
price: Float)
class JsonBson extends Specification {
import Common._
import reactivemongo.bson._
import play.modules.reactivemongo.json.ImplicitBSONHandlers
import play.modules.reactivemongo.json.ImplicitBSONHandlers._
import play.modules.reactivemongo.json.BSONFormats
sequential
lazy val collection = db("somecollection_commonusecases")
val pack = Package(
Expeditor("Amazon"),
List(Item("A Game of Thrones", "Book", 1)),
20)
"ReactiveMongo Plugin" should {
"convert a simple json to bson and vice versa" in {
val json = Json.obj("coucou" -> JsString("jack"))
val bson = JsObjectWriter.write(json)
val json2 = JsObjectReader.read(bson)
json.toString mustEqual json2.toString
}
"convert a simple json array to bson and vice versa" in {
val json = Json.arr(JsString("jack"), JsNumber(9.1))
val bson = BSONFormats.toBSON(json).get.asInstanceOf[BSONArray]
val json2 = BSONFormats.toJSON(bson)
json.toString mustEqual json2.toString
}
"convert a json doc containing an array and vice versa" in {
val json = Json.obj(
"name" -> JsString("jack"),
"contacts" -> Json.arr(
Json.obj(
"email" -> "jack@jack.com")))
val bson = JsObjectWriter.write(json)
val json2 = JsObjectReader.read(bson)
json.toString mustEqual json2.toString
}
"format a jspath for mongo crud" in {
import play.api.libs.functional._
import play.api.libs.functional.syntax._
import play.modules.reactivemongo.json.Writers._
case class Limit(low: Option[Int], high: Option[Int])
case class App(limit: Option[Limit])
val lowWriter = (__ \\ "low").writeNullable[Int]
val highWriter = (__ \\ "high").writeNullable[Int]
val limitWriter = (lowWriter and highWriter)(unlift(Limit.unapply))
val appWriter = (__ \\ "limit").writemongo[Limit](limitWriter)
appWriter.writes(Limit(Some(1), None)) mustEqual
Json.obj("limit.low" -> 1)
appWriter.writes(Limit(Some(1), Some(2))) mustEqual
Json.obj("limit.low" -> 1, "limit.high" -> 2)
appWriter.writes(Limit(None, None)) mustEqual
Json.obj()
}
}
}
| Rhinofly/Play-ReactiveMongo | src/test/scala/json.scala | Scala | apache-2.0 | 3,215 |
package org.jetbrains.plugins.scala.codeInsight.intentions.argument
import org.jetbrains.plugins.scala.codeInsight.intention.argument.ArgumentToBlockExpressionIntention
import org.jetbrains.plugins.scala.codeInsight.intentions.ScalaIntentionTestBase
/**
* @author Roman.Shein
* @since 21.03.2016.
*/
class ArgumentToBlockExpressionIntentionTest extends ScalaIntentionTestBase {
override def familyName = ArgumentToBlockExpressionIntention.familyName
def test() {
val text =
"""
|class Test {
| method(x, y)<caret>( x => x)
|}
"""
val resultText =
"""
|class Test {
| method(x, y)<caret> { x =>
| x
| }
|}
"""
doTest(text, resultText)
}
def testParameterdOnNewLine(): Unit = {
val scalaSettings = getScalaCodeStyleSettings
scalaSettings.PLACE_CLOSURE_PARAMETERS_ON_NEW_LINE = true
val text =
"""
|class Test {
| method(x, y)<caret>( x => x)
|}
"""
val resultText =
"""
|class Test {
| method(x, y)<caret> {
| x =>
| x
| }
|}
"""
doTest(text, resultText)
}
}
| whorbowicz/intellij-scala | test/org/jetbrains/plugins/scala/codeInsight/intentions/argument/ArgumentToBlockExpressionIntentionTest.scala | Scala | apache-2.0 | 1,216 |
/*^
===========================================================================
TwoBinManager
===========================================================================
Copyright (C) 2016-2017 Gianluca Costa
===========================================================================
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as
published by the Free Software Foundation, either version 3 of the
License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public
License along with this program. If not, see
<http://www.gnu.org/licenses/gpl-3.0.html>.
===========================================================================
*/
package info.gianlucacosta.twobinmanager.generators.binpacking
import javafx.fxml.FXML
import info.gianlucacosta.twobinmanager.generators.spectrum.algorithm.SpectrumAlgorithm
import info.gianlucacosta.twobinmanager.generators.spectrumbase.SpectrumControllerBase
import info.gianlucacosta.twobinpack.core._
import scalafx.Includes._
private class BinPackingController extends SpectrumControllerBase {
private class BinPackingTask extends BlockPoolTask {
override def call(): BlockPool = {
val minBlockWidth =
1
val minBlockHeight =
minBlockHeightField.text().toInt
val minBlockDimension =
BlockDimension(
minBlockWidth,
minBlockHeight
)
val maxBlockWidth =
1
val maxBlockHeight =
maxBlockHeightField.text().toInt
val maxBlockDimension =
BlockDimension(
maxBlockWidth,
maxBlockHeight
)
val minBlockQuantity =
minBlockQuantityField.text().toInt
val maxBlockQuantity =
maxBlockQuantityField.text().toInt
val blockQuantityRange =
Range.inclusive(minBlockQuantity, maxBlockQuantity)
val canRotateBlocks =
false
SpectrumAlgorithm.createRandomPool(
minBlockDimension,
maxBlockDimension,
blockQuantityRange,
canRotateBlocks
)
}
}
override protected def createBlockPoolTask(): BlockPoolTask =
new BinPackingTask
@FXML
var minBlockHeightField: javafx.scene.control.TextField = _
@FXML
var maxBlockHeightField: javafx.scene.control.TextField = _
@FXML
var minBlockQuantityField: javafx.scene.control.TextField = _
@FXML
var maxBlockQuantityField: javafx.scene.control.TextField = _
}
| giancosta86/TwoBinManager | src/main/scala/info/gianlucacosta/twobinmanager/generators/binpacking/BinPackingController.scala | Scala | gpl-3.0 | 2,795 |
/*
* Copyright (C) 2014 - 2016 Softwaremill <http://softwaremill.com>
* Copyright (C) 2016 - 2019 Lightbend Inc. <http://www.lightbend.com>
*/
package akka.kafka.testkit.internal
import java.time.Duration
import java.util.concurrent.TimeUnit
import java.util.concurrent.atomic.AtomicInteger
import java.util.{Arrays, Properties}
import scala.jdk.CollectionConverters._
import akka.actor.ActorSystem
import akka.kafka.testkit.KafkaTestkitSettings
import akka.kafka.{CommitterSettings, ConsumerSettings, ProducerSettings}
import org.apache.kafka.clients.admin.{AdminClient, AdminClientConfig, NewTopic}
import org.apache.kafka.clients.consumer.ConsumerConfig
import org.apache.kafka.common.serialization.{StringDeserializer, StringSerializer}
import org.slf4j.Logger
/**
* Common functions for scaladsl and javadsl Testkit.
*
* Mixed-in in both, scaladsl and javadsl classes, therefore API should be usable from both - Scala and Java.
*/
trait KafkaTestKit {
def log: Logger
val DefaultKey = "key"
private lazy val producerDefaultsInstance: ProducerSettings[String, String] =
ProducerSettings(system, new StringSerializer, new StringSerializer)
.withBootstrapServers(bootstrapServers)
def producerDefaults: ProducerSettings[String, String] = producerDefaultsInstance
private lazy val consumerDefaultsInstance: ConsumerSettings[String, String] =
ConsumerSettings(system, new StringDeserializer, new StringDeserializer)
.withBootstrapServers(bootstrapServers)
.withProperty(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest")
def consumerDefaults: ConsumerSettings[String, String] = consumerDefaultsInstance
private lazy val committerDefaultsInstance = CommitterSettings(system)
def committerDefaults: CommitterSettings = committerDefaultsInstance
private def nextNumber(): Int = KafkaTestKitClass.topicCounter.incrementAndGet()
/**
* Return a unique topic name.
*/
def createTopicName(suffix: Int): String = s"topic-$suffix-$nextNumber"
/**
* Return a unique group id with a default suffix.
*/
def createGroupId(): String = createGroupId(0)
/**
* Return a unique group id with a given suffix.
*/
def createGroupId(suffix: Int): String = s"group-$suffix-$nextNumber"
/**
* Return a unique transactional id with a default suffix.
*/
def createTransactionalId(): String = createTransactionalId(0)
/**
* Return a unique transactional id with a given suffix.
*/
def createTransactionalId(suffix: Int): String = s"transactionalId-$suffix-$nextNumber"
def system: ActorSystem
def bootstrapServers: String
val settings = KafkaTestkitSettings(system)
private lazy val adminDefaults = {
val config = new Properties()
config.put(AdminClientConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers)
config
}
private var adminClientVar: AdminClient = _
/**
* Access to the Kafka AdminClient which life
*/
def adminClient: AdminClient = {
assert(adminClientVar != null,
"admin client not created, be sure to call setupAdminClient() and cleanupAdminClient()")
adminClientVar
}
/**
* Create internal admin clients.
* Gives access to `adminClient`,
* be sure to call `cleanUpAdminClient` after the tests are done.
*/
def setUpAdminClient(): Unit =
if (adminClientVar == null) {
adminClientVar = AdminClient.create(adminDefaults)
}
/**
* Close internal admin client instances.
*/
def cleanUpAdminClient(): Unit =
if (adminClientVar != null) {
adminClientVar.close(Duration.ofSeconds(60))
adminClientVar = null
}
/**
* Create a topic with a default suffix, single partition, a replication factor of one, and no topic configuration.
*
* This method will block and return only when the topic has been successfully created.
*/
def createTopic(): String = createTopic(0, 1, 1, Map[String, String]())
/**
* Create a topic with a given suffix, single partitions, a replication factor of one, and no topic configuration.
*
* This method will block and return only when the topic has been successfully created.
*/
def createTopic(suffix: Int): String = createTopic(suffix, 1, 1, Map[String, String]())
/**
* Create a topic with a given suffix, partition number, a replication factor of one, and no topic configuration.
*
* This method will block and return only when the topic has been successfully created.
*/
def createTopic(suffix: Int, partitions: Int): String =
createTopic(suffix, partitions, 1, Map[String, String]())
/**
* Create a topic with given suffix, partition number, replication factor, and no topic configuration.
*
* This method will block and return only when the topic has been successfully created.
*/
def createTopic(suffix: Int, partitions: Int, replication: Int): String =
createTopic(suffix, partitions, replication, Map[String, String]())
/**
* Create a topic with given suffix, partition number, replication factor, and topic configuration.
*
* This method will block and return only when the topic has been successfully created.
*/
def createTopic(suffix: Int,
partitions: Int,
replication: Int,
config: scala.collection.Map[String, String]): String =
createTopic(suffix, partitions, replication, config.asJava)
/**
* Java Api
*
* Create a topic with given suffix, partition number, replication factor, and topic configuration.
*
* This method will block and return only when the topic has been successfully created.
*/
def createTopic(suffix: Int, partitions: Int, replication: Int, config: java.util.Map[String, String]): String = {
val topicName = createTopicName(suffix)
val createResult = adminClient.createTopics(
Arrays.asList(new NewTopic(topicName, partitions, replication.toShort).configs(config))
)
createResult.all().get(10, TimeUnit.SECONDS)
topicName
}
def sleepMillis(ms: Long, msg: String): Unit = {
log.debug(s"sleeping $ms ms $msg")
Thread.sleep(ms)
}
def sleepSeconds(s: Int, msg: String): Unit = {
log.debug(s"sleeping $s s $msg")
Thread.sleep(s * 1000L)
}
}
abstract class KafkaTestKitClass(override val system: ActorSystem, override val bootstrapServers: String)
extends KafkaTestKit
object KafkaTestKitClass {
val topicCounter = new AtomicInteger()
def createReplicationFactorBrokerProps(replicationFactor: Int): Map[String, String] = Map(
"offsets.topic.replication.factor" -> s"$replicationFactor",
"transaction.state.log.replication.factor" -> s"$replicationFactor",
"transaction.state.log.min.isr" -> s"$replicationFactor"
)
}
| softwaremill/reactive-kafka | testkit/src/main/scala/akka/kafka/testkit/internal/KafkaTestKit.scala | Scala | apache-2.0 | 6,739 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.bwsw.tstreams.testutils
import java.io.File
import java.lang.management.ManagementFactory
import java.util.concurrent.atomic.AtomicInteger
import com.bwsw.tstreams.env.{ConfigurationOptions, TStreamsFactory}
import com.google.common.io.Files
import org.apache.curator.framework.CuratorFrameworkFactory
import org.apache.curator.retry.ExponentialBackoffRetry
import org.slf4j.LoggerFactory
import scala.collection.mutable.ListBuffer
/**
* Test help utils
*/
trait TestUtils {
protected val batchSizeTestVal = 5
/**
* Random alpha string generator
*
* @return Alpha string
*/
val id = TestUtils.moveId()
val randomKeyspace = TestUtils.getKeyspace(id)
val zookeeperPort = TestUtils.ZOOKEEPER_PORT
val logger = LoggerFactory.getLogger(this.getClass)
val uptime = ManagementFactory.getRuntimeMXBean.getStartTime
logger.info("-------------------------------------------------------")
logger.info("Test suite " + this.getClass.toString + " started")
logger.info("Test Suite uptime is " + ((System.currentTimeMillis - uptime) / 1000L).toString + " seconds")
logger.info("-------------------------------------------------------")
val DEFAULT_STREAM_NAME = "test_stream"
val f = new TStreamsFactory()
f.setProperty(ConfigurationOptions.Coordination.endpoints, s"localhost:$zookeeperPort")
.setProperty(ConfigurationOptions.Stream.name, DEFAULT_STREAM_NAME)
.setProperty(ConfigurationOptions.Stream.partitionsCount, 3)
.setProperty(ConfigurationOptions.Common.authenticationKey, TestUtils.AUTH_KEY)
val curatorClient = CuratorFrameworkFactory.builder()
.namespace("")
.connectionTimeoutMs(7000)
.sessionTimeoutMs(7000)
.retryPolicy(new ExponentialBackoffRetry(1000, 3))
.connectString(s"127.0.0.1:$zookeeperPort").build()
curatorClient.start()
if (curatorClient.checkExists().forPath("/tts") == null)
curatorClient.create().forPath("/tts")
removeZkMetadata(f.getProperty(ConfigurationOptions.Coordination.path).toString)
def getRandomString: String = RandomStringCreator.randomAlphaString(10)
/**
* Sorting checker
*/
def isSorted(list: ListBuffer[Long]): Boolean = {
if (list.isEmpty)
return true
var checkVal = true
var curVal = list.head
list foreach { el =>
if (el < curVal)
checkVal = false
if (el > curVal)
curVal = el
}
checkVal
}
/**
* Remove zk metadata from concrete root
*
* @param path Zk root to delete
*/
def removeZkMetadata(path: String) = {
if (curatorClient.checkExists.forPath(path) != null)
curatorClient.delete.deletingChildrenIfNeeded().forPath(path)
}
/**
* Remove directory recursive
*
* @param f Dir to remove
*/
def remove(f: File): Unit = {
if (f.isDirectory) {
for (c <- f.listFiles())
remove(c)
}
f.delete()
}
def onAfterAll() = {
System.setProperty("DEBUG", "false")
removeZkMetadata(f.getProperty(ConfigurationOptions.Coordination.path).toString)
removeZkMetadata("/unit")
curatorClient.close()
f.dumpStorageClients()
}
def createNewStream(partitions: Int = 3, name: String = DEFAULT_STREAM_NAME) = {
val storageClient = f.getStorageClient()
if(storageClient.checkStreamExists(name))
storageClient.deleteStream(name)
storageClient.createStream(name, partitions, 24 * 3600, "")
storageClient.shutdown()
}
}
object TestUtils {
System.getProperty("java.io.tmpdir", "./target/")
val ZOOKEEPER_PORT = 21810
private val id: AtomicInteger = new AtomicInteger(0)
def moveId(): Int = {
val rid = id.incrementAndGet()
rid
}
def getKeyspace(id: Int): String = "tk_" + id.toString
def getTmpDir(): String = Files.createTempDir().toString
private val zk = new ZookeeperTestServer(ZOOKEEPER_PORT, Files.createTempDir().toString)
val AUTH_KEY = "test"
}
| bwsw/t-streams | src/test/scala/com/bwsw/tstreams/testutils/TestUtils.scala | Scala | apache-2.0 | 4,732 |
package fpscala.chapter15
import fpscala.BaseSpec
/**
* Created by sajit on 11/26/16.
*/
class ProcessSpec extends BaseSpec{
it should "sum up values" in {
val r = Process.sum(Stream(4.1,6.9,1.0))
r.toList should be (List(4.1,11.0,12.0))
}
it should "take 4 values" in {
val r = Process.take(4)(Stream(2,4,6,1,6,3))
r.toList should be (List(2,4,6,1))
}
it should "drop 4 values " in {
val r = Process.drop(4)(Stream(2,4,6,1,6,3))
r.toList should be(List(6,3))
}
it should "takeWhile even " in {
val f:(Int => Boolean) = (x => x%2 == 0)
val r = Process.takeWhile(f)(Stream(2,4,6,1,6,3))
r.toList should be(List(2,4,6))
}
it should "dropWhile even" in {
val f:(Int => Boolean) = (x => x%2 == 0)
val r = Process.dropWhile(f)(Stream(2,4,6,1,6,3))
r.toList should be(List(1,6,3))
val r2 = Process.dropWhile(f)(Stream(1,2,4,6,1,6,3))
r2.toList should be (List(1,2,4,6,1,6,3))
}
it should "count " in {
Process.count(Stream("a","b","c")) should be (Stream(1,2,3))
}
it should "maintain running average 1 " in {
Process.mean(Stream()) should be (Stream())
}
it should "maintain running average 2 " in {
Process.mean(Stream(4,-5,10)) should be (Stream(4.0,-0.5,3.0))
Process.mean(Stream(4)) should be (Stream(4.0))
}
it should "compose two processes" in {
val p1 = Process.count[Int]
val f:(Int => Boolean) = {x => x <2}
val p2 = Process.takeWhile(f)
val result:Stream[Int] = p1.|>(p2)(Stream(4,5,6,9))
result.toList should be (List(1))
}
it should "check if not exists " in {
val f:(Int => Boolean) = {x => x > 10}
Process.exists(f)(Stream(1,5,6,9)).map(result => result should be (false))
}
it should "check if exists " in {
val f:(Int => Boolean) = {x => x %2 == 0}
Process.exists(f)(Stream(1,5,6,9)).map(result => result should be (true))
}
}
| sajit/learnyou | scala/minimal-scala/src/test/scala/fpscala/chapter15/ProcessSpec.scala | Scala | mit | 1,911 |
/*
* Copyright 2001-2013 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalatest
import org.scalactic.Equality
import org.scalactic.Explicitly
import org.scalactic.Prettifier
import collection.GenTraversable
import SharedHelpers._
import Matchers._
import org.scalactic.ArrayHelper.deep
class InOrderContainMatcherEqualitySpec extends FunSpec with Explicitly {
private val prettifier = Prettifier.default
class CustomEquality extends Equality[String] {
def areEqual(left: String, right: Any) =
left.trim == (right match {
case s: String => s.trim
case other => other
})
}
describe("inOrder ") {
implicit val equality = new CustomEquality
def checkShouldContainStackDepth(e: exceptions.StackDepthException, left: Any, right: GenTraversable[Any], lineNumber: Int): Unit = {
val leftText = FailureMessages.decorateToStringValue(prettifier, left)
e.message should be (Some(leftText + " did not contain all of (" + right.map(r => FailureMessages.decorateToStringValue(prettifier, r)).mkString(", ") + ") in order"))
e.failedCodeFileName should be (Some("InOrderContainMatcherEqualitySpec.scala"))
e.failedCodeLineNumber should be (Some(lineNumber))
}
def checkShouldNotContainStackDepth(e: exceptions.StackDepthException, left: Any, right: GenTraversable[Any], lineNumber: Int): Unit = {
val leftText = FailureMessages.decorateToStringValue(prettifier, left)
e.message should be (Some(leftText + " contained all of (" + right.map(r => FailureMessages.decorateToStringValue(prettifier, r)).mkString(", ") + ") in order"))
e.failedCodeFileName should be (Some("InOrderContainMatcherEqualitySpec.scala"))
e.failedCodeLineNumber should be (Some(lineNumber))
}
it("should take custom implicit equality in scope when 'should contain' is used") {
List("1 ", "2", "3 ") should contain inOrder ("1", "2 ", "3")
Array("1 ", "2", "3 ") should contain inOrder ("1", "2 ", "3")
// SKIP-SCALATESTJS,NATIVE-START
javaList("1", "2 ", "3") should contain inOrder ("1", "2 ", "3")
// SKIP-SCALATESTJS,NATIVE-END
}
it("should take custom implicit equality in scope when 'should not contain' is used") {
List("1 ", "2", "3 ") should not contain inOrder ("3", "2 ", "1")
Array("1 ", "2", "3 ") should not contain inOrder ("3", "2 ", "1")
// SKIP-SCALATESTJS,NATIVE-START
javaList("1 ", "2", "3 ") should not contain inOrder ("3", "2 ", "1")
// SKIP-SCALATESTJS,NATIVE-END
}
it("should throw TestFailedException with correct stack depth and message when 'should contain custom matcher' failed with custom implicit equality in scope") {
val left1 = List("1 ", "2", "3 ")
val e1 = intercept[exceptions.TestFailedException] {
left1 should contain inOrder ("3", "2 ", "1")
}
checkShouldContainStackDepth(e1, left1, deep(Array("3", "2 ", "1")), thisLineNumber - 2)
val left2 = Array("1 ", "2", "3 ")
val e2 = intercept[exceptions.TestFailedException] {
left2 should contain inOrder ("3", "2 ", "1")
}
checkShouldContainStackDepth(e2, left2, deep(Array("3", "2 ", "1")), thisLineNumber - 2)
// SKIP-SCALATESTJS,NATIVE-START
val left3 = javaList("1 ", "2", "3 ")
val e3 = intercept[exceptions.TestFailedException] {
left3 should contain inOrder ("3", "2 ", "1")
}
checkShouldContainStackDepth(e3, left3, deep(Array("3", "2 ", "1")), thisLineNumber - 2)
// SKIP-SCALATESTJS,NATIVE-END
}
it("should throw TestFailedException with correct stack depth and message when 'should not contain custom matcher' failed with custom implicit equality in scope") {
val left1 = List("1 ", "2", "3 ")
val e1 = intercept[exceptions.TestFailedException] {
left1 should not contain inOrder ("1", "2 ", "3")
}
checkShouldNotContainStackDepth(e1, left1, deep(Array("1", "2 ", "3")), thisLineNumber - 2)
val left2 = Array("1 ", "2", "3 ")
val e2 = intercept[exceptions.TestFailedException] {
left2 should not contain inOrder ("1", "2 ", "3")
}
checkShouldNotContainStackDepth(e2, left2, deep(Array("1", "2 ", "3")), thisLineNumber - 2)
// SKIP-SCALATESTJS,NATIVE-START
val left3 = javaList("1 ", "2", "3 ")
val e3 = intercept[exceptions.TestFailedException] {
left3 should not contain inOrder ("1", "2 ", "3")
}
checkShouldNotContainStackDepth(e3, left3, deep(Array("1", "2 ", "3")), thisLineNumber - 2)
// SKIP-SCALATESTJS,NATIVE-END
}
it("should take passed in custom explicit equality when 'should contain' is used") {
(List("1 ", "2", "3 ") should contain inOrder ("1", "2 ", "3")) (equality)
(Array("1 ", "2", "3 ") should contain inOrder ("1", "2 ", "3")) (equality)
// SKIP-SCALATESTJS,NATIVE-START
(javaList("1 ", "2", "3 ") should contain inOrder ("1", "2 ", "3")) (equality)
// SKIP-SCALATESTJS,NATIVE-END
}
it("should take passed in custom explicit equality when 'should not contain' is used") {
(List("1 ", "2", "3 ") should not contain inOrder ("3", "2 ", "1")) (equality)
(Array("1 ", "2", "3 ") should not contain inOrder ("3", "2 ", "1")) (equality)
// SKIP-SCALATESTJS,NATIVE-START
(javaList("1 ", "2", "3 ") should not contain inOrder ("3", "2 ", "1")) (equality)
// SKIP-SCALATESTJS,NATIVE-END
}
it("should throw TestFailedException with correct stack depth and message when 'should contain custom matcher' failed with custom explicit equality") {
val left1 = List("1 ", "2", "3 ")
val e1 = intercept[exceptions.TestFailedException] {
(left1 should contain inOrder ("3", "2 ", "1")) (equality)
}
checkShouldContainStackDepth(e1, left1, deep(Array("3", "2 ", "1")), thisLineNumber - 2)
val left2 = Array("1 ", "2", "3 ")
val e2 = intercept[exceptions.TestFailedException] {
(left2 should contain inOrder ("3", "2 ", "1")) (equality)
}
checkShouldContainStackDepth(e2, left2, deep(Array("3", "2 ", "1")), thisLineNumber - 2)
// SKIP-SCALATESTJS,NATIVE-START
val left3 = javaList("1 ", "2", "3 ")
val e3 = intercept[exceptions.TestFailedException] {
(left3 should contain inOrder ("3", "2 ", "1")) (equality)
}
checkShouldContainStackDepth(e3, left3, deep(Array("3", "2 ", "1")), thisLineNumber - 2)
// SKIP-SCALATESTJS,NATIVE-END
}
it("should throw TestFailedException with correct stack depth and message when 'should not contain custom matcher' failed with custom explicit equality") {
val left1 = List("1 ", "2", "3 ")
val e1 = intercept[exceptions.TestFailedException] {
(left1 should not contain inOrder ("1", "2 ", "3")) (equality)
}
checkShouldNotContainStackDepth(e1, left1, deep(Array("1", "2 ", "3")), thisLineNumber - 2)
val left2 = Array("1 ", "2", "3 ")
val e2 = intercept[exceptions.TestFailedException] {
(left2 should not contain inOrder ("1", "2 ", "3")) (equality)
}
checkShouldNotContainStackDepth(e2, left2, deep(Array("1", "2 ", "3")), thisLineNumber - 2)
// SKIP-SCALATESTJS,NATIVE-START
val left3 = javaList("1 ", "2", "3 ")
val e3 = intercept[exceptions.TestFailedException] {
(left3 should not contain inOrder ("1", "2 ", "3")) (equality)
}
checkShouldNotContainStackDepth(e3, left3, deep(Array("1", "2 ", "3")), thisLineNumber - 2)
// SKIP-SCALATESTJS,NATIVE-END
}
}
}
| dotty-staging/scalatest | scalatest-test/src/test/scala/org/scalatest/InOrderContainMatcherEqualitySpec.scala | Scala | apache-2.0 | 8,256 |
package fs2
import scala.concurrent.ExecutionContext
import cats.effect.IO
trait TestUtilPlatform {
implicit val executionContext: ExecutionContext =
ExecutionContext.Implicits.global
val mkScheduler: Stream[IO, Scheduler] = Stream.emit(Scheduler.default)
}
| zaneli/fs2 | core/js/src/test/scala/fs2/TestUtilPlatform.scala | Scala | mit | 268 |
/* __ *\\
** ________ ___ / / ___ Scala API **
** / __/ __// _ | / / / _ | (c) 2002-2018, LAMP/EPFL **
** __\\ \\/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\\___/_/ |_/____/_/ | | **
** |/ **
\\* */
package scala
package xml
package parsing
/**
* Implementation of MarkupHandler that constructs nodes.
*
* @author Burak Emir
*/
abstract class ConstructingHandler extends MarkupHandler {
val preserveWS: Boolean
def elem(pos: Int, pre: String, label: String, attrs: MetaData,
pscope: NamespaceBinding, empty: Boolean, nodes: NodeSeq): NodeSeq =
Elem(pre, label, attrs, pscope, empty, nodes: _*)
def procInstr(pos: Int, target: String, txt: String) =
ProcInstr(target, txt)
def comment(pos: Int, txt: String) = Comment(txt)
def entityRef(pos: Int, n: String) = EntityRef(n)
def text(pos: Int, txt: String) = Text(txt)
}
| ashawley/scala-xml | shared/src/main/scala/scala/xml/parsing/ConstructingHandler.scala | Scala | bsd-3-clause | 1,186 |
/*
* Copyright 2016 Uncharted Software Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package software.uncharted.salt.core.analytic.numeric
import org.scalatest._
import software.uncharted.salt.core.analytic.numeric._
class MeanAggregatorSpec extends FunSpec {
describe("MeanAggregator") {
describe("#default()") {
it("should have a default intermediate value equal to the default values of the Sum and Count aggregators") {
assert(MeanAggregator.default.equals((CountAggregator.default, SumAggregator.default)))
}
}
describe("#add()") {
it("should return a tuple representing the result of calling add() on a CountAggregator and a SumAggregator") {
var test = Double.NaN
var expectedResult = (CountAggregator.add(CountAggregator.default, Some(1)), SumAggregator.add(SumAggregator.default, Some(test)))
assert(MeanAggregator.add(MeanAggregator.default, Some(test)).equals(expectedResult))
expectedResult = (CountAggregator.add(CountAggregator.default, Some(1)), SumAggregator.add(SumAggregator.default, None))
assert(MeanAggregator.add(MeanAggregator.default, None).equals(expectedResult))
test = Math.random()
expectedResult = (CountAggregator.add(CountAggregator.default, Some(1)), SumAggregator.add(SumAggregator.default, Some(test)))
assert(MeanAggregator.add(MeanAggregator.default, Some(test)).equals(expectedResult))
}
}
describe("#merge()") {
it("should return a tuple representing the result of calling merge() on a CountAggregator and a SumAggregator") {
var left = (Math.round(100*Math.random).toDouble, Math.random)
var right = (Math.round(100*Math.random).toDouble, Math.random)
assert(MeanAggregator.merge(left, right).equals(
(CountAggregator.merge(left._1, right._1), SumAggregator.merge(left._2, right._2))
))
}
}
describe("#finish()") {
it("should convert the intermediate value into a Double which represents the mean") {
var test = (Math.round(100*Math.random).toDouble, Math.random)
assert(MeanAggregator.finish(test).isInstanceOf[Double])
assert(MeanAggregator.finish(test).equals(test._2/test._1))
}
}
}
}
| unchartedsoftware/salt | src/test/scala/software/uncharted/salt/core/analytic/numeric/MeanAggregatorSpec.scala | Scala | apache-2.0 | 2,780 |
/*
* Copyright (C) 2016-2019 Lightbend Inc. <https://www.lightbend.com>
*/
package com.lightbend.lagom.internal.persistence.cluster
import akka.actor.ExtendedActorSystem
import akka.serialization.BaseSerializer
import akka.serialization.SerializerWithStringManifest
import com.lightbend.lagom.internal.persistence.cluster.ClusterStartupTaskActor.Execute
private[lagom] class ClusterStartupTaskSerializer(val system: ExtendedActorSystem)
extends SerializerWithStringManifest
with BaseSerializer {
val ExecuteManifest = "E"
override def manifest(obj: AnyRef) = obj match {
case Execute => ExecuteManifest
case _ =>
throw new IllegalArgumentException(s"Can't serialize object of type ${obj.getClass} in [${getClass.getName}]")
}
override def toBinary(obj: AnyRef) = obj match {
case Execute => Array.emptyByteArray
case _ =>
throw new IllegalArgumentException(s"Can't serialize object of type ${obj.getClass} in [${getClass.getName}]")
}
override def fromBinary(bytes: Array[Byte], manifest: String) = manifest match {
case `ExecuteManifest` => Execute
case _ =>
throw new IllegalArgumentException(
s"Unimplemented deserialization of message with manifest [$manifest] in [${getClass.getName}]"
)
}
}
| rcavalcanti/lagom | persistence/core/src/main/scala/com/lightbend/lagom/internal/persistence/cluster/ClusterStartupTaskSerializer.scala | Scala | apache-2.0 | 1,284 |
sealed trait Tree[+A]
case class Leaf[A](a: A) extends Tree[A]
case class Node[A](l: Tree[A], r: Tree[A]) extends Tree[A]
// implicit def treeFunctor = ??? | hmemcpy/milewski-ctfp-pdf | src/content/1.8/code/scala/snippet14.scala | Scala | gpl-3.0 | 155 |
// Project: slogging
// Module:
// Description:
// Distributed under the MIT License (see included file LICENSE)
package slogging.config
import com.typesafe.config.{Config, ConfigFactory}
import slogging.{LogLevel, Logger, LoggerConfig}
object LoggerFactory extends slogging.LoggerFactory {
private var initialized = false
private var registeredProviders: Map[String,UnderlyingLoggerFactoryProvider] = Map(
NullLoggerProvider.name -> NullLoggerProvider,
PrintLoggerProvider.name -> PrintLoggerProvider
)
override def getLogger(name: String): Logger = {
if(!initialized) this.synchronized{
if(!initialized)
loadDefaultConfig()
}
slogging.LoggerFactory.getLogger(name)
}
def loadConfig(config: Config): Unit = {
val providerName = config.getString("slogging.factory")
registeredProviders.get( providerName ) match {
case Some(provider) => LoggerConfig.factory = provider.create(config)
case None => throw new RuntimeException(s"No UnderlyingLoggerFactoryProvider registered for name '$providerName'")
}
val level = config.getString("slogging.level")
LoggerConfig.level = level.toUpperCase.trim match {
case "OFF" => LogLevel.OFF
case "ERROR" => LogLevel.ERROR
case "WARN" => LogLevel.WARN
case "DEBUG" => LogLevel.DEBUG
case "TRACE" => LogLevel.TRACE
case x => throw new RuntimeException(s"Invalid value for slogging.level: $x")
}
}
def loadDefaultConfig(): Config = {
val config = ConfigFactory.load()
loadConfig(config)
config
}
def registerProvider(provider: UnderlyingLoggerFactoryProvider): Unit = this.synchronized{
if( provider == null )
throw new RuntimeException("invalid UnderlyingLoggerFactoryProvider: null")
if( registeredProviders.contains(provider.name) )
throw new RuntimeException(s"another UnderlyingLoggerFactoryProvider already registered for name '${provider.name}")
registeredProviders += (provider.name -> provider)
}
}
| jokade/slogging | sloggingConfig/shared/src/main/scala/slogging/config/LoggerFactory.scala | Scala | mit | 2,022 |
package fpinscala.parallelism
import java.util.concurrent._
object Par {
type Par[A] = ExecutorService => Future[A]
def run[A](s: ExecutorService)(a: Par[A]): Future[A] = a(s)
def unit[A](a: A): Par[A] = (es: ExecutorService) => UnitFuture(a) // `unit` is represented as a function that returns a `UnitFuture`, which is a simple implementation of `Future` that just wraps a constant value. It doesn't use the `ExecutorService` at all. It's always done and can't be cancelled. Its `get` method simply returns the value that we gave it.
def lazyUnit[A](a: => A): Par[A] = fork(unit(a))
private case class UnitFuture[A](get: A) extends Future[A] {
def isDone = true
def get(timeout: Long, units: TimeUnit) = get
def isCancelled = false
def cancel(evenIfRunning: Boolean): Boolean = false
}
def map2[A,B,C](a: Par[A], b: Par[B])(f: (A,B) => C): Par[C] = // `map2` doesn't evaluate the call to `f` in a separate logical thread, in accord with our design choice of having `fork` be the sole function in the API for controlling parallelism. We can always do `fork(map2(a,b)(f))` if we want the evaluation of `f` to occur in a separate thread.
(es: ExecutorService) => {
val af = a(es)
val bf = b(es)
UnitFuture(f(af.get, bf.get)) // This implementation of `map2` does _not_ respect timeouts. It simply passes the `ExecutorService` on to both `Par` values, waits for the results of the Futures `af` and `bf`, applies `f` to them, and wraps them in a `UnitFuture`. In order to respect timeouts, we'd need a new `Future` implementation that records the amount of time spent evaluating `af`, then subtracts that time from the available time allocated for evaluating `bf`.
}
def fork[A](a: => Par[A]): Par[A] = // This is the simplest and most natural implementation of `fork`, but there are some problems with it--for one, the outer `Callable` will block waiting for the "inner" task to complete. Since this blocking occupies a thread in our thread pool, or whatever resource backs the `ExecutorService`, this implies that we're losing out on some potential parallelism. Essentially, we're using two threads when one should suffice. This is a symptom of a more serious problem with the implementation, and we will discuss this later in the chapter.
es => es.submit(new Callable[A] {
def call = a(es).get
})
def asyncF[A,B](f: A => B): A => Par[B] =
a => fork(unit(f(a)))
def map[A,B](pa: Par[A])(f: A => B): Par[B] =
map2(pa, unit(()))((a,_) => f(a))
def sortPar(parList: Par[List[Int]]) = map(parList)(_.sorted)
def sequence[A](as: List[Par[A]]): Par[List[A]] = as match {
case Nil => unit(Nil)
case h :: t => map2(h, fork(sequence(t)))(_ :: _)
}
def parFilter[A](l: List[A])(f: A => Boolean): Par[List[A]] = {
val pars: List[Par[List[A]]] =
l map(asyncF((a: A) => if (f(a)) List(a) else List() ))
map(sequence(pars))(_.flatten)
}
def equal[A](e: ExecutorService)(p: Par[A], p2: Par[A]): Boolean =
p(e).get == p2(e).get
def delay[A](fa: => Par[A]): Par[A] =
es => fa(es)
def choice[A](cond: Par[Boolean])(t: Par[A], f: Par[A]): Par[A] =
es =>
if (run(es)(cond).get) t(es) // Notice we are blocking on the result of `cond`.
else f(es)
def choiceN[A](n: Par[Int])(choices: List[Par[A]]): Par[A] = {
es => {
val index = n(es).get
run(es)(choices(index))
}
}
def choiceViaChoiceN[A](a: Par[Boolean])(ifTrue: Par[A], ifFalse: Par[A]): Par[A] =
choiceN(map(a)(x => if (x) 0 else 1))(List(ifTrue, ifFalse))
def choiceMap[K,V](key: Par[K])(choices: Map[K,Par[V]]): Par[V] = {
es => {
val k = key(es).get()
run(es)(choices(k))
}
}
def chooser[A,B](pa: Par[A])(choices: A => Par[B]): Par[B] = {
es => {
val a = pa(es).get
run(es)(choices(a))
}
}
def choiceViaChooser[A](cond: Par[Boolean])(t: Par[A], f: Par[A]): Par[A] =
chooser(map(cond)(x => if(x) 0 else 1))(List(t, f))
def choiceNViaChooser[A](n: Par[Int])(choices: List[Par[A]]): Par[A] =
chooser(n)(choices)
def flatMap[A,B](pa: Par[A])(f: A => Par[B]): Par[B] = {
executorService => {
val a = run(executorService)(pa).get
run(executorService)(f(a))
}
}
def join[A](ppa: Par[Par[A]]): Par[A] = {
es => {
val parA = ppa(es).get
parA(es)
}
}
def flatMapViaJoin[A,B](pa: Par[A])(f: A => Par[B]): Par[B] =
join(map(pa)(f))
def joinViaFlatMap[A](ppa: Par[Par[A]]): Par[A] = {
flatMap(ppa)(x => x)
}
/* Gives us infix syntax for `Par`. */
implicit def toParOps[A](p: Par[A]): ParOps[A] = new ParOps(p)
class ParOps[A](p: Par[A]) {
}
}
object Examples {
import Par._
def sum(ints: IndexedSeq[Int]): Int = // `IndexedSeq` is a superclass of random-access sequences like `Vector` in the standard library. Unlike lists, these sequences provide an efficient `splitAt` method for dividing them into two parts at a particular index.
if (ints.size <= 1)
ints.headOption getOrElse 0 // `headOption` is a method defined on all collections in Scala. We saw this function in chapter 3.
else {
val (l,r) = ints.splitAt(ints.length/2) // Divide the sequence in half using the `splitAt` function.
sum(l) + sum(r) // Recursively sum both halves and add the results together.
}
} | fpinscala-muc/fpinscala-cko | exercises/src/main/scala/fpinscala/parallelism/Par.scala | Scala | mit | 5,416 |
package soymilky
import java.io.{FileNotFoundException, File}
import org.specs2.Specification
import Utterances._
import scala.io.Source
class UtterancesSpec extends Specification { def is = s2"""
The resolveTokens method should
resolve empty string $rt1
resolve string with no tokens $rt2
not be confused by half a token $rt3
not be confused by a double dollared token $rt4
resolve as failure any attempt to use a double opened token $rt5
not be confused by a double closed token $rt6
resolve a token $rt7
resolve multiple tokens $rt8
resolve multiple instances of the same token $rt9
resolve a token at the start $rt10
resolve a token at the end $rt11
resolve as failure when token is not found $rt12
The phrases method should
return lines from source in an array, eventually $p1
ignore empty lines $p2
trim lines $p3
"""
def rt1 = resolveTokens("")(Map.empty) must beSuccessfulTry("")
def rt2 = resolveTokens("there are no tokens")(Map.empty) must beSuccessfulTry("there are no tokens")
def rt3 = resolveTokens("there ${are no tokens")(Map.empty) must beSuccessfulTry("there ${are no tokens")
def rt4 = resolveTokens("there is $${this} token")(Map("this" -> "one")) must beSuccessfulTry("there is $one token")
def rt5 = resolveTokens("there is ${{this} token")(Map("{this" -> "one")) must beFailedTry
def rt6 = resolveTokens("there is ${this}} token")(Map("this" -> "one")) must beSuccessfulTry("there is one} token")
def rt7 = resolveTokens("there is ${this} token")(Map("this" -> "one")) must beSuccessfulTry("there is one token")
def rt8 = resolveTokens("see ${this} and ${that} tokens")(Map("this" -> "one", "that" -> "two")) must beSuccessfulTry("see one and two tokens")
def rt9 = resolveTokens("see ${this}, ${this} and ${this}")(Map("this" -> "that")) must beSuccessfulTry("see that, that and that")
def rt10 = resolveTokens("${this} is the start")(Map("this" -> "that")) must beSuccessfulTry("that is the start")
def rt11 = resolveTokens("this is the ${this}")(Map("this" -> "end")) must beSuccessfulTry("this is the end")
def rt12 = resolveTokens("what is ${this}?")(Map.empty) must beFailedTry
def p1 = phrases(Source.fromString("one\\ntwo\\nthree")) must beEqualTo(Array("one", "two", "three")).await
def p2 = phrases(Source.fromString("one\\n\\n \\n\\t\\ntwhree")) must beEqualTo(Array("one", "twhree")).await
def p3 = phrases(Source.fromString(" one \\n\\ttw0\\t\\nthr ee")) must beEqualTo(Array("one", "tw0", "thr ee")).await
} | Synesso/soymilky | src/test/scala/soymilky/UtterancesSpec.scala | Scala | apache-2.0 | 2,540 |
package ru.maizy.cheesecake.server
import java.time.{ Instant, ZoneId, ZonedDateTime }
import java.util.Properties
/**
* Copyright (c) Nikita Kovaliov, maizy.ru, 2016
* See LICENSE.txt for details.
*/
case object BuildInfo {
private val props: Properties = {
val props = new Properties
Option(getClass.getClassLoader.getResourceAsStream("buildinfo.properties"))
.foreach(props.load)
props
}
private val buildProperties: Map[String, Option[String]] = {
Map(
"version" -> Option(props.getProperty("version")),
"name" -> Option(props.getProperty("name")),
"buildTime" -> Option(props.getProperty("buildTime")),
"organization" -> Option(props.getProperty("organization"))
)
}
def version: String = buildProperties("version").getOrElse("0.0.0")
def projectName: String = buildProperties("name").getOrElse("unknown")
def organization: String = buildProperties("organization").getOrElse("unknown")
def buildTime: ZonedDateTime = {
val utc = ZoneId.of("UTC")
buildProperties("buildTime")
.map { t => ZonedDateTime.ofInstant(Instant.ofEpochMilli(t.toLong), utc) }
.getOrElse(ZonedDateTime.now().withZoneSameInstant(utc))
}
def getFrontendLibVersion(lib: String): Option[String] =
Option(props.getProperty(s"frontend.$lib"))
}
| maizy/cheesecake | server/src/main/scala/ru/maizy/cheesecake/server/BuildInfo.scala | Scala | apache-2.0 | 1,321 |
package com.novocode.squery.simple
import java.sql.{PreparedStatement, ResultSet, SQLException}
import com.novocode.squery.session._
/**
* Base trait for all queries, using result type T and parameter type P
*/
trait Query[+T,-P] {
def apply(param: P)(implicit session: Session): T
def elements(param: P)(implicit session: Session): CloseableIterator[T]
def list(param: P)(implicit session: Session): List[T] = {
val it = elements(param)
try { List.fromIterator(it) } finally { it.close }
}
def foreach(param: P, f: T => Unit)(implicit session: Session): Unit = {
val it = elements(param)
try { it.foreach(f) } finally { it.close }
}
}
trait NoArgsQueryMixin[+T] extends Query[T,Unit] {
final def apply()(implicit session: Session): T = apply(())
final def list()(implicit session: Session): List[T] = list(())
final def elements()(implicit session: Session): CloseableIterator[T] = elements(())
final def foreach(f: T => Unit)(implicit session: Session): Unit = foreach((), f)
}
trait UpdateQueryMixin[-P] extends Query[Int,P] {
protected def setParam(param: P, st: PreparedStatement)
protected def queryString: String
override def apply(param: P)(implicit session: Session): Int = {
val st = session.allocPS(queryString)
setParam(param, st)
try { st.executeUpdate() } finally session.freePS(queryString, st)
}
override def elements(param: P)(implicit session: Session): CloseableIterator[Int] = {
val st = session.allocPS(queryString)
setParam(param, st)
var doClose = true
try {
var hasRs = st.execute()
var count = if(hasRs) 0 else st.getUpdateCount
var first = true
doClose = false
new ReadAheadIterator[Int] with CloseableIterator[Int] {
def close() = session.freePS(queryString, st)
protected def fetchNext() = {
if(first) first = false;
else {
hasRs = st.getMoreResults
count = if(hasRs) 0 else st.getUpdateCount
}
if(count != -1) Some(count)
else { close(); None }
}
}
} finally if(doClose) session.freePS(queryString, st)
}
}
trait QueryQueryMixin[+T,-P] extends Query[T,P] {
protected def setParam(param: P, st: PreparedStatement)
protected def queryString: String
protected def convertResult(rs: ResultSet): T
override def apply(param: P)(implicit session: Session): T = {
val st = session.allocPS(queryString)
setParam(param, st)
try {
val rs = st.executeQuery()
rs.next
convertResult(rs)
} finally session.freePS(queryString, st)
}
override def list(param: P)(implicit session: Session): List[T] = {
val st = session.allocPS(queryString)
setParam(param, st)
try {
val rs = st.executeQuery()
var xs:List[T] = Nil
while(rs.next) xs = convertResult(rs) :: xs
xs
} finally session.freePS(queryString, st)
}
override def foreach(param: P, f: T => Unit)(implicit session: Session): Unit = {
val st = session.allocPS(queryString)
setParam(param, st)
try {
val rs = st.executeQuery()
while(rs.next) f(convertResult(rs))
} finally session.freePS(queryString, st)
}
override def elements(param: P)(implicit session: Session): CloseableIterator[T] = {
val st = session.allocPS(queryString)
setParam(param, st)
var doClose = true
try {
val rs = st.executeQuery()
doClose = false
new ReadAheadIterator[T] with CloseableIterator[T] {
def close() = session.freePS(queryString, st)
protected def fetchNext() = {
if(rs.next) Some(convertResult(rs))
else { close(); None }
}
}
} finally if(doClose) session.freePS(queryString, st)
}
}
class ParameterizedQuery[+T,-P](query: Query[T,P], fixedParam: P) extends NoArgsQueryMixin[T] {
override def apply(param: Unit)(implicit session: Session): T = query.apply(fixedParam)
override def elements(param: Unit)(implicit session: Session): CloseableIterator[T] = query.elements(fixedParam)
override def list(param: Unit)(implicit session: Session): List[T] = query.list(fixedParam)
override def foreach(param: Unit, f: T => Unit)(implicit session: Session): Unit = query.foreach(fixedParam, f)
}
| gnufied/squery | src/com/novocode/squery/simple/Query.scala | Scala | bsd-2-clause | 4,296 |
package com.stomp
import java.io.{InputStreamReader, BufferedReader, OutputStream, PrintWriter}
import java.net.Socket
import com.stomp.frames.{Message, Connected}
import com.stomp.message.{Connect, STOMPMessage}
/**
* Created by vmp on 8/8/14.
*/
class STOMPClient(serverHost : String = "127.0.0.1", serverPort : Integer = 61613) {
val STOMPServer = new Socket(serverHost, serverPort)
def connect() = {
val client = STOMPServer.getOutputStream()
val connectMessage = Connect()
client.write(connectMessage.build.getBytes("US-ASCII"))
}
def send(message : STOMPMessage) = {
val client = STOMPServer.getOutputStream
val connectMessage = Connect().build
client.write(message.build.getBytes("US-ASCII"))
}
def receive = {
val receiver = new BufferedReader(new InputStreamReader(STOMPServer.getInputStream, "US-ASCII"))
val frame = receiver.readLine()
frame match {
case "CONNECTED" => Connected
case "MESSAGE" => Message
}
}
}
| vitormp/stomps | src/main/scala/com/stomp/STOMPClient.scala | Scala | lgpl-3.0 | 999 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.sources
import org.apache.flink.api.scala.DataSet
import org.apache.flink.streaming.api.scala.DataStream
import org.apache.flink.api.common.typeinfo.TypeInformation
import org.apache.flink.table.api.TableSchema
import org.apache.flink.table.util.TableConnectorUtil
/**
* Defines an external table with the schema that is provided by [[TableSource#getTableSchema]].
*
* The data of a [[TableSource]] is produced as a [[DataSet]] in case of a [[BatchTableSource]] or
* as a [[DataStream]] in case of a [[StreamTableSource]].
* The type of ths produced [[DataSet]] or [[DataStream]] is specified by the
* [[TableSource#getReturnType]] method.
*
* By default, the fields of the [[TableSchema]] are implicitly mapped by name to the fields of the
* return type [[TypeInformation]]. An explicit mapping can be defined by implementing the
* [[DefinedFieldMapping]] interface.
*
* @tparam T The return type of the [[TableSource]].
*/
trait TableSource[T] {
/** Returns the [[TypeInformation]] for the return type of the [[TableSource]].
* The fields of the return type are mapped to the table schema based on their name.
*
* @return The type of the returned [[DataSet]] or [[DataStream]].
*/
def getReturnType: TypeInformation[T]
/**
* Returns the schema of the produced table.
*
* @return The [[TableSchema]] of the produced table.
*/
def getTableSchema: TableSchema
/**
* Describes the table source.
*
* @return A String explaining the [[TableSource]].
*/
def explainSource(): String =
TableConnectorUtil.generateRuntimeName(getClass, getTableSchema.getFieldNames)
}
| mylog00/flink | flink-libraries/flink-table/src/main/scala/org/apache/flink/table/sources/TableSource.scala | Scala | apache-2.0 | 2,493 |
package mesosphere.marathon
package core.launcher.impl
import mesosphere.marathon.core.instance.{Instance, Reservation}
import mesosphere.util.state.FrameworkId
import org.apache.mesos.{Protos => MesosProtos}
object TaskLabels {
private[this] final val FRAMEWORK_ID_LABEL = "marathon_framework_id"
/**
* For backwards compatibility reasons, this is still a field containing a taskId. Reservations and persistent
* volumes from times before the introduction of instances are labeled with taskIds.
* In case a resident instance is relaunched, Marathon will keep the instanceId but launch a task with a new taskId.
* We can always derive the instanceId from the contained taskId.
*/
private[this] final val TASK_ID_LABEL = "marathon_task_id"
/**
* Returns an instance id for which this reservation has been performed if the reservation was
* labeled by this framework.
*/
def instanceIdForResource(frameworkId: FrameworkId, resource: MesosProtos.Resource): Option[Instance.Id] = {
val labels = ReservationLabels(resource)
val maybeMatchingFrameworkId = labels.get(FRAMEWORK_ID_LABEL).filter(_ == frameworkId.id)
def maybeInstanceId = labels.get(TASK_ID_LABEL).map(Reservation.Id(_).instanceId)
maybeMatchingFrameworkId.flatMap(_ => maybeInstanceId)
}
def labelsForTask(frameworkId: FrameworkId, reservationId: Reservation.Id): ReservationLabels = {
ReservationLabels(
Map(
FRAMEWORK_ID_LABEL -> frameworkId.id,
// This uses taskId.reservationId to match against the id that was originally used to create the reservation
// We probably want to call it RESERVATION_ID_LABEL in the future. See MARATHON-8517.
TASK_ID_LABEL -> reservationId.label
)
)
}
def labelKeysForReservations: Set[String] = Set(FRAMEWORK_ID_LABEL, TASK_ID_LABEL)
}
| mesosphere/marathon | src/main/scala/mesosphere/marathon/core/launcher/impl/TaskLabels.scala | Scala | apache-2.0 | 1,857 |
package com.datastax.spark.connector.writer
import com.datastax.driver.core._
import com.datastax.spark.connector.BatchSize
import com.datastax.spark.connector.util.PriorityHashMap
import com.google.common.collect.AbstractIterator
import scala.annotation.tailrec
import scala.collection.Iterator
/**
* A grouping batch builder is an iterator which take an iterator of single data items and tries to group
* those items into batches. For each data item, a batch key is computed with the provided function.
* The items for which the batch key is the same, are grouped together into a batch.
*
* When the batch key for the consecutive data items is different, the items are added to separate
* batches, and those batches are added to the queue. The queue length is limited, therefore when it is
* full, the longest batch is removed and returned by the iterator.
* A batch is removed from the queue also in the case when it reaches the batch size limit.
*
* The implementation is based on `PriorityHashMap`.
*
* @param batchStatementBuilder a configured batch statement builder
* @param batchKeyGenerator a key generator for batches - statements with the same key generated by
* this function are grouped together into batches
* @param batchSize maximum batch size
* @param maxBatches maximum number of batches which can remain in the buffer
* @param data data iterator
* @tparam T data type
*/
private[connector] class GroupingBatchBuilder[T](
boundStatementBuilder: BoundStatementBuilder[T],
batchStatementBuilder: BatchStatementBuilder,
batchKeyGenerator: BoundStatement => Any,
batchSize: BatchSize,
maxBatches: Int,
data: Iterator[T]) extends Iterator[RichStatement] {
require(maxBatches > 0, "The maximum number of batches must be greater than 0")
private[this] val batchMap = new PriorityHashMap[Any, Batch](maxBatches)
/** The method processes the given statement - it adds it to the existing batch or to the new one.
* If adding the statement would not fit into an existing batch or the new batch would not fit into
* the buffer, the batch statement is created from the batch and it is returned and the given
* bound statement is added to a fresh batch. */
private def processStatement(batchKey: Any, boundStatement: RichBoundStatement): Option[RichStatement] = {
batchMap.get(batchKey) match {
case Some(batch) =>
updateBatchInMap(batchKey, batch, boundStatement)
case None =>
addBatchToMap(batchKey, boundStatement)
}
}
/** Adds the given statement to the batch if possible; If there is no enough capacity in the batch,
* a batch statement is created and returned; the batch is cleaned and the given statement is added
* to it. */
private def updateBatchInMap(batchKey: Any, batch: Batch, newStatement: RichBoundStatement): Option[RichStatement] = {
if (batch.add(newStatement, force = false)) {
batchMap.put(batchKey, batch)
None
} else {
Some(replaceBatch(batch, newStatement, batchKey))
}
}
/** Adds a new batch to the buffer and adds the given statement to it. Returns a statement which had
* to be dequeued. */
private def addBatchToMap(batchKey: Any, newStatement: RichBoundStatement): Option[RichStatement] = {
if (batchMap.size == maxBatches) {
Some(replaceBatch(batchMap.dequeue(), newStatement, batchKey))
} else {
val batch = Batch(batchSize)
batch.add(newStatement, force = true)
batchMap.put(batchKey, batch)
None
}
}
/** Creates a statement from the given batch and cleans the batch so that it can be reused. */
@inline
final private def createStmtAndReleaseBatch(batch: Batch): RichStatement = {
val stmt = batchStatementBuilder.maybeCreateBatch(batch.statements)
batch.clear()
stmt
}
/** Creates a statement from the given batch; cleans the batch and adds a given statement to it;
* updates the entry in the buffer. */
@inline
private def replaceBatch(batch: Batch, newStatement: RichBoundStatement, newBatchKey: Any): RichStatement = {
val stmt = createStmtAndReleaseBatch(batch)
batch.add(newStatement, force = true)
batchMap.put(newBatchKey, batch)
stmt
}
final override def hasNext: Boolean =
data.hasNext || batchMap.nonEmpty
@tailrec
final override def next(): RichStatement = {
if (data.hasNext) {
val stmt = boundStatementBuilder.bind(data.next())
val key = batchKeyGenerator(stmt)
processStatement(key, stmt) match {
case Some(batchStmt) => batchStmt
case _ => next()
}
} else if (batchMap.nonEmpty) {
createStmtAndReleaseBatch(batchMap.dequeue())
} else {
throw new NoSuchElementException("Called next() on empty iterator")
}
}
}
| Stratio/spark-cassandra-connector | spark-cassandra-connector/src/main/scala/com/datastax/spark/connector/writer/GroupingBatchBuilder.scala | Scala | apache-2.0 | 4,896 |
import akka.actor.{ActorSystem, Props}
import akka.testkit.{ImplicitSender, TestActors, TestKit}
import hu.bme.mit.ire.messages.ChangeSet
import hu.bme.mit.ire.nodes.unary.MaxNode
import org.scalatest.{BeforeAndAfterAll, Matchers, WordSpecLike}
class MaxNodeTest(_system: ActorSystem) extends TestKit(_system) with ImplicitSender
with WordSpecLike with Matchers with BeforeAndAfterAll {
def this() = this(ActorSystem("MySpec"))
override def afterAll {
TestKit.shutdownActorSystem(system)
}
import hu.bme.mit.ire.util.TestUtil._
"Max" must {
"do simple max 0" in {
val changeSet = ChangeSet(
positive = Vector(
tuple("a", 1),
tuple("a", 2),
tuple("a", 1.1),
tuple("b", 3)
)
)
val echoActor = system.actorOf(TestActors.echoActorProps)
val max = system.actorOf(Props(new MaxNode(echoActor ! _,Vector(0), 1)))
max ! changeSet
expectMsg(ChangeSet(positive = Vector(
tuple("a", 2), tuple("b", 3)
)))
max ! ChangeSet(negative = Vector(tuple("a", 2)))
expectMsg(ChangeSet(
positive = Vector(tuple("a", 1.1)),
negative = Vector(tuple("a", 2))
))
}
}
}
| FTSRG/ire | src/test/scala/MaxNodeTest.scala | Scala | epl-1.0 | 1,216 |
/*
* La Trobe University - Distributed Deep Learning System
* Copyright 2016 Matthias Langer (t3l@threelights.de)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package edu.latrobe.blaze.parameters
import edu.latrobe._
import edu.latrobe.blaze._
import scala.util.hashing._
/**
* Factor for computing the cumulative moving average.
*
* y = 1 / (1 + n)
*/
final class CMAFactor(override val builder: CMAFactorBuilder,
override val name: String,
override val seed: InstanceSeed)
extends IndependentParameter[CMAFactorBuilder] {
override def get(phaseNo: Long)
: Real = Real.one / (1L + phaseNo)
override def update(phaseNo: Long, value: Real)
: Unit = {}
}
final class CMAFactorBuilder
extends IndependentParameterBuilder[CMAFactorBuilder] {
override def repr
: CMAFactorBuilder = this
override def canEqual(that: Any)
: Boolean = that.isInstanceOf[CMAFactorBuilder]
override protected def doCopy()
: CMAFactorBuilder = CMAFactorBuilder()
override def build(name: String, seed: InstanceSeed)
: CMAFactor = new CMAFactor(this, name, seed)
}
object CMAFactorBuilder {
final def apply()
: CMAFactorBuilder = new CMAFactorBuilder
}
| bashimao/ltudl | blaze/src/main/scala/edu/latrobe/blaze/parameters/CMAFactor.scala | Scala | apache-2.0 | 1,752 |
/*
* Copyright 2016 Frugal Mechanic (http://frugalmechanic.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package fm.common
import org.scalajs.dom.raw.HTMLElement
sealed trait HTMLElementType[T]
/**
* Some type class hackery to solve the same problem as with the NodeType class
*/
object HTMLElementType {
implicit object DefaultHTMLElementType extends HTMLElementType[HTMLElement]
implicit def anyHTMLElementType[T <: HTMLElement]: HTMLElementType[T] = HTMLElementType.asInstanceOf[HTMLElementType[T]]
private object HTMLElementType extends HTMLElementType[AnyRef]
} | frugalmechanic/fm-common | js/src/main/scala/fm/common/HTMLElementType.scala | Scala | apache-2.0 | 1,102 |
package colossus.service
import scala.concurrent.{ExecutionContext, Future}
import scala.language.higherKinds
trait ResponseAdapter[C <: CodecDSL, M[_]] {
protected def executeAndMap[T](i : C#Input)(f : C#Output => M[T]) = flatMap(execute(i))(f)
def execute(i : C#Input) : M[C#Output]
protected def map[T, U](t : M[T])(f : T => U) : M[U]
protected def flatMap[T](t : M[C#Output])(f : C#Output => M[T]) : M[T]
protected def success[T](t : T) : M[T]
protected def failure[T](ex : Throwable) : M[T]
}
trait CallbackResponseAdapter[C <: CodecDSL] extends ResponseAdapter[C, Callback] {
protected def client : ServiceClientLike[C#Input, C#Output]
def execute(i : C#Input) : Callback[C#Output] = client.send(i)
override protected def map[T, U](t: Callback[T])(f: (T) => U): Callback[U] = t.map(f)
override protected def flatMap[T](t: Callback[C#Output])(f: (C#Output) => Callback[T]): Callback[T] = t.flatMap(f)
override protected def success[T](t: T): Callback[T] = Callback.successful(t)
override protected def failure[T](ex: Throwable): Callback[T] = Callback.failed(ex)
}
trait FutureResponseAdapter[C <: CodecDSL] extends ResponseAdapter[C, Future] {
protected def client : AsyncServiceClient[C#Input, C#Output]
def execute(i : C#Input) : Future[C#Output] = client.send(i)
implicit protected def executionContext : ExecutionContext
override protected def map[T, U](t: Future[T])(f: (T) => U): Future[U] = t.map(f)
override protected def flatMap[T](t: Future[C#Output])(f: (C#Output) => Future[T]): Future[T] = t.flatMap(f)
override protected def success[T](t: T): Future[T] = Future.successful(t)
override protected def failure[T](ex: Throwable): Future[T] = Future.failed(ex)
}
| sunstick/colossus | colossus/src/main/scala/colossus/service/ResponseAdapter.scala | Scala | apache-2.0 | 1,740 |
/*
* Copyright (C) 2011 Thomas Amland
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package commons.properties
import org.scalatest.matchers.ShouldMatchers
import org.scalatest._
object FactoryTest extends WordSpec with ShouldMatchers {
"Factory method bind" should {
"produce property with same value" in {
val a = new Property(1)
val b = Property.bind(a)
b.value should be(1)
}
"produce property that is bound bidirectional" in {
val a = new Property(1)
val b = Property.bind(a)
b.value should be(1)
a() = 2
a.value should be(2)
b.value should be(2)
b() = 3
a.value should be(3)
b.value should be(3)
}
}
"Factory method observes(Observable)" should {
"produce property with same value" in {
val a = new Property(1)
val b = Property.bind(a)
b.value should be(1)
}
"produce property that is bound unidirectional" in {
val a = new Property(1)
val b = Property.observes(a)
b.value should be(1)
a() = 2
a.value should be(2)
b.value should be(2)
b() = 3
a.value should be(2)
b.value should be(3)
}
}
"Factory method observes(Observable, Function)" should {
"produce property with same value" in {
val a = new Property(1)
val b = Property.bind(a)
b.value should be(1)
}
"produce property that is bound unidirectional with function" in {
var called = false;
var arg = -1
val slot = (x: Int) => { called = true; arg = x }
val a = new Property(1)
val b: Int = Property.observes(a, slot)
a() = 2
called should be (true)
arg should be (2)
}
}
def main(args: Array[String]) {
FactoryTest.execute()
}
} | tamland/scala-property-bindings | src/test/commons/properties/FactoryTest.scala | Scala | gpl-3.0 | 2,434 |
//
// Copyright 2016 Commonwealth Bank of Australia
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
package commbank.coppersmith.scalding
import org.joda.time.DateTime
import au.com.cba.omnia.maestro.api._, Maestro._
import commbank.coppersmith.{FeatureValue, Feature}, Feature._, Value._
import commbank.coppersmith.thrift.Eavt
object EavtText {
val eavtByDay = DerivedSinkPartition[Eavt, (String, String, String)](
HivePartition.byDay(Fields[Eavt].Time, "yyyy-MM-dd")
)
implicit object EavtEnc extends FeatureValueEnc[Eavt] {
def encode(fvt: (FeatureValue[Value], FeatureTime)): Eavt = fvt match {
case (fv, time) =>
val featureValue = (fv.value match {
case Integral(v) => v.map(_.toString)
case Decimal(v) => v.map(_.toString)
case FloatingPoint(v) => v.map(_.toString)
case Str(v) => v
case Bool(v) => v.map(_.toString)
case Date(v) => v.map(_.toIso8601ExtendedFormatString)
case Time(v) => v.map(_.toRfc3339String)
}).getOrElse(HiveTextSink.NullValue)
val featureTime = new DateTime(time).toString("yyyy-MM-dd")
Eavt(fv.entity, fv.name, featureValue, featureTime)
}
}
}
| CommBank/coppersmith | scalding/src/main/scala/commbank/coppersmith/scalding/EavtText.scala | Scala | apache-2.0 | 1,745 |
package epic
import java.io.{FileNotFoundException, IOException, File}
import java.util.zip.{GZIPInputStream, ZipFile}
import scala.collection.JavaConverters._
import scala.util.{Success, Try}
/**
* TODO
*
* @author dlwh
**/
package object models {
def deserialize[T](model: String):T = deserialize[T](model, new File(System.getProperty("user.dir")))
def readFromJar[T](model: String, file: File): T = {
val zip = new ZipFile(file)
val obj = zip.entries().asScala.collectFirst {
case e if e.getName == model || e.getName.endsWith("model.ser.gz") =>
breeze.util.nonstupidObjectInputStream(new GZIPInputStream(zip.getInputStream(e))).readObject().asInstanceOf[T]
}
obj.getOrElse(throw new RuntimeException(s"Could not find model $model in jar $file"))
}
/**
* Deserializes a model by checking first, if path is a file, tries to either read the object
* file named model or (something like it) exists in the directory
* path.
* @param model
* @tparam T
* @return
*/
def deserialize[T](model: String, path: File):T = {
if(!path.exists()) {
throw new FileNotFoundException(path.toString)
} else if(!path.isDirectory) {
try {
readFromJar(model, path)
} catch {
case ex: Exception =>
breeze.util.readObject[T](path)
}
} else {
// exists, is a directory
val modelFile = Seq(model, s"$model.ser.gz", s"$model.gz", s"$model.ser", s"$model.zip", s"$model.jar").map(new File(path, _)).find(_.exists)
modelFile match {
case Some(f) if f.isDirectory =>
deserialize(model, f)
case Some(f) =>
try {
breeze.util.readObject[T](f)
} catch {
case ex: IOException =>
try {
readFromJar("", f)
} catch {
case ex: Exception =>
throw new RuntimeException(s"Could not find model $model in path $path", ex)
}
}
case None =>
// look for jar files, try to read from there
path.listFiles().filter(f => f.getName.endsWith(".jar") || f.getName.endsWith(".zip")).iterator.map { f =>
Try {
readFromJar[T](model, f)
}
}.collectFirst { case Success(r) => r }.getOrElse {
throw new RuntimeException(s"Could not find model $model in path $path")
}
}
}
}
}
| maxim-rabinovich/epic | src/main/scala/epic/models/package.scala | Scala | apache-2.0 | 2,456 |
package org.jetbrains.plugins.scala.failed.resolve
import org.intellij.lang.annotations.Language
import org.jetbrains.plugins.scala.annotator.element.ScAssignmentAnnotator
import org.jetbrains.plugins.scala.annotator.{AnnotatorHolderMock, Message}
import org.jetbrains.plugins.scala.base.SimpleTestCase
import org.jetbrains.plugins.scala.extensions.PsiElementExt
import org.jetbrains.plugins.scala.lang.psi.api.expr.ScAssignment
/**
* @author mucianm
* @since 23.03.16.
*/
class OverrideSetterTest extends SimpleTestCase {
override protected def shouldPass: Boolean = false
// Setter method not being referenced when assigning to a var
def testSCL6054(): Unit = {
val messages1 = messages(
"""
|trait Foo {
| class A
| class B
| var foo:A
| def foo_=(f: B)
| foo = new B
|}
""".stripMargin)
assertNothing(messages1)
}
def messages(@Language(value = "Scala") code: String): List[Message] = {
val file = code.parse
val assignment = file.depthFirst().find(_.isInstanceOf[ScAssignment]).get.asInstanceOf[ScAssignment]
implicit val mock: AnnotatorHolderMock = new AnnotatorHolderMock(file)
ScAssignmentAnnotator.annotate(assignment)
mock.annotations
}
}
| JetBrains/intellij-scala | scala/scala-impl/test/org/jetbrains/plugins/scala/failed/resolve/OverrideSetterTest.scala | Scala | apache-2.0 | 1,275 |
/*
* Copyright 2001-2013 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalatest.funsuite
import org.scalatest._
import SharedHelpers._
import events.TestFailed
import org.scalactic.exceptions.NullArgumentException
import org.scalatest.exceptions.DuplicateTestNameException
import org.scalatest.exceptions.TestFailedException
import org.scalatest.exceptions.TestRegistrationClosedException
import org.scalatest
import org.scalatest.funsuite
class FixtureFunSuiteSpec extends scalatest.funspec.AnyFunSpec /*with PrivateMethodTester*/ {
describe("A fixture.FunSuite") {
it("should return the test names in order of registration from testNames") {
val a = new funsuite.FixtureAnyFunSuite {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = Succeeded
test("that") { fixture =>
/* ASSERTION_SUCCEED */
}
test("this") { fixture =>
/* ASSERTION_SUCCEED */
}
}
assertResult(List("that", "this")) {
a.testNames.iterator.toList
}
val b = new funsuite.FixtureAnyFunSuite {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = Succeeded
}
assertResult(List[String]()) {
b.testNames.iterator.toList
}
val c = new funsuite.FixtureAnyFunSuite {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = Succeeded
test("this") { fixture =>
/* ASSERTION_SUCCEED */
}
test("that") { fixture =>
/* ASSERTION_SUCCEED */
}
}
assertResult(List("this", "that")) {
c.testNames.iterator.toList
}
}
it("should throw NotAllowedException if a duplicate test name registration is attempted") {
intercept[DuplicateTestNameException] {
new funsuite.FixtureAnyFunSuite {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = Succeeded
test("test this") { fixture =>
/* ASSERTION_SUCCEED */
}
test("test this") { fixture =>
/* ASSERTION_SUCCEED */
}
}
}
intercept[DuplicateTestNameException] {
new funsuite.FixtureAnyFunSuite {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = Succeeded
test("test this") { fixture =>
/* ASSERTION_SUCCEED */
}
ignore("test this") { fixture =>
/* ASSERTION_SUCCEED */
}
}
}
intercept[DuplicateTestNameException] {
new funsuite.FixtureAnyFunSuite {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = Succeeded
ignore("test this") { fixture =>
/* ASSERTION_SUCCEED */
}
ignore("test this") { fixture =>
/* ASSERTION_SUCCEED */
}
}
}
intercept[DuplicateTestNameException] {
new funsuite.FixtureAnyFunSuite {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = Succeeded
ignore("test this") { fixture =>
/* ASSERTION_SUCCEED */
}
test("test this") { fixture =>
/* ASSERTION_SUCCEED */
}
}
}
}
it("should pass in the fixture to every test method") {
val a = new funsuite.FixtureAnyFunSuite {
type FixtureParam = String
val hello = "Hello, world!"
def withFixture(test: OneArgTest): Outcome = {
test(hello)
}
test("this") { fixture =>
assert(fixture === hello)
}
test("that") { fixture =>
assert(fixture === hello)
}
}
val rep = new EventRecordingReporter
a.run(None, Args(rep))
assert(!rep.eventsReceived.exists(_.isInstanceOf[TestFailed]))
}
it("should throw NullArgumentException if a null test tag is provided") {
// test
intercept[NullArgumentException] {
new funsuite.FixtureAnyFunSuite {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = Succeeded
test("hi", null) { fixture => /* ASSERTION_SUCCEED */ }
}
}
val caught = intercept[NullArgumentException] {
new funsuite.FixtureAnyFunSuite {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = Succeeded
test("hi", mytags.SlowAsMolasses, null) { fixture => /* ASSERTION_SUCCEED */ }
}
}
assert(caught.getMessage === "a test tag was null")
intercept[NullArgumentException] {
new funsuite.FixtureAnyFunSuite {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = Succeeded
test("hi", mytags.SlowAsMolasses, null, mytags.WeakAsAKitten) { fixture => /* ASSERTION_SUCCEED */ }
}
}
// ignore
intercept[NullArgumentException] {
new funsuite.FixtureAnyFunSuite {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = Succeeded
ignore("hi", null) { fixture => /* ASSERTION_SUCCEED */ }
}
}
val caught2 = intercept[NullArgumentException] {
new funsuite.FixtureAnyFunSuite {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = Succeeded
ignore("hi", mytags.SlowAsMolasses, null) { fixture => /* ASSERTION_SUCCEED */ }
}
}
assert(caught2.getMessage === "a test tag was null")
intercept[NullArgumentException] {
new funsuite.FixtureAnyFunSuite {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = Succeeded
ignore("hi", mytags.SlowAsMolasses, null, mytags.WeakAsAKitten) { fixture => /* ASSERTION_SUCCEED */ }
}
}
// registerTest
intercept[NullArgumentException] {
new funsuite.FixtureAnyFunSuite {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = Succeeded
registerTest("hi", null) { fixture => /* ASSERTION_SUCCEED */ }
}
}
val caught3 = intercept[NullArgumentException] {
new funsuite.FixtureAnyFunSuite {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = Succeeded
registerTest("hi", mytags.SlowAsMolasses, null) { fixture => /* ASSERTION_SUCCEED */ }
}
}
assert(caught3.getMessage === "a test tag was null")
intercept[NullArgumentException] {
new funsuite.FixtureAnyFunSuite {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = Succeeded
registerTest("hi", mytags.SlowAsMolasses, null, mytags.WeakAsAKitten) { fixture => /* ASSERTION_SUCCEED */ }
}
}
// registerIgnoredTest
intercept[NullArgumentException] {
new funsuite.FixtureAnyFunSuite {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = Succeeded
registerIgnoredTest("hi", null) { fixture => /* ASSERTION_SUCCEED */ }
}
}
val caught4 = intercept[NullArgumentException] {
new funsuite.FixtureAnyFunSuite {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = Succeeded
registerIgnoredTest("hi", mytags.SlowAsMolasses, null) { fixture => /* ASSERTION_SUCCEED */ }
}
}
assert(caught4.getMessage === "a test tag was null")
intercept[NullArgumentException] {
new funsuite.FixtureAnyFunSuite {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = Succeeded
registerIgnoredTest("hi", mytags.SlowAsMolasses, null, mytags.WeakAsAKitten) { fixture => /* ASSERTION_SUCCEED */ }
}
}
}
class TestWasCalledSuite extends funsuite.FixtureAnyFunSuite {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
var theTestThisCalled = false
var theTestThatCalled = false
test("this") { fixture => theTestThisCalled = true; /* ASSERTION_SUCCEED */ }
test("that") { fixture => theTestThatCalled = true; /* ASSERTION_SUCCEED */ }
}
it("should execute all tests when run is called with testName None") {
val b = new TestWasCalledSuite
b.run(None, Args(SilentReporter))
assert(b.theTestThisCalled)
assert(b.theTestThatCalled)
}
it("should execute one test when run is called with a defined testName") {
val a = new TestWasCalledSuite
a.run(Some("this"), Args(SilentReporter))
assert(a.theTestThisCalled)
assert(!a.theTestThatCalled)
}
it("should report as ignored, and not run, tests marked ignored") {
class SuiteA extends funsuite.FixtureAnyFunSuite {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
var theTestThisCalled = false
var theTestThatCalled = false
test("test this") { fixture => theTestThisCalled = true; /* ASSERTION_SUCCEED */ }
test("test that") { fixture => theTestThatCalled = true; /* ASSERTION_SUCCEED */ }
}
val a = new SuiteA
import scala.language.reflectiveCalls
val repA = new TestIgnoredTrackingReporter
a.run(None, Args(repA))
assert(!repA.testIgnoredReceived)
assert(a.theTestThisCalled)
assert(a.theTestThatCalled)
class SuiteB extends funsuite.FixtureAnyFunSuite {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
var theTestThisCalled = false
var theTestThatCalled = false
ignore("test this") { fixture => theTestThisCalled = true; /* ASSERTION_SUCCEED */ }
test("test that") { fixture => theTestThatCalled = true; /* ASSERTION_SUCCEED */ }
}
val b = new SuiteB
val repB = new TestIgnoredTrackingReporter
b.run(None, Args(repB))
assert(repB.testIgnoredReceived)
assert(repB.lastEvent.isDefined)
assert(repB.lastEvent.get.testName endsWith "test this")
assert(!b.theTestThisCalled)
assert(b.theTestThatCalled)
class SuiteC extends funsuite.FixtureAnyFunSuite {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
var theTestThisCalled = false
var theTestThatCalled = false
test("test this") { fixture => theTestThisCalled = true; /* ASSERTION_SUCCEED */ }
ignore("test that") { fixture => theTestThatCalled = true; /* ASSERTION_SUCCEED */ }
}
val c = new SuiteC
val repC = new TestIgnoredTrackingReporter
c.run(None, Args(repC))
assert(repC.testIgnoredReceived)
assert(repC.lastEvent.isDefined)
assert(repC.lastEvent.get.testName endsWith "test that", repC.lastEvent.get.testName)
assert(c.theTestThisCalled)
assert(!c.theTestThatCalled)
// The order I want is order of appearance in the file.
// Will try and implement that tomorrow. Subtypes will be able to change the order.
class SuiteD extends funsuite.FixtureAnyFunSuite {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
var theTestThisCalled = false
var theTestThatCalled = false
ignore("test this") { fixture => theTestThisCalled = true; /* ASSERTION_SUCCEED */ }
ignore("test that") { fixture => theTestThatCalled = true; /* ASSERTION_SUCCEED */ }
}
val d = new SuiteD
val repD = new TestIgnoredTrackingReporter
d.run(None, Args(repD))
assert(repD.testIgnoredReceived)
assert(repD.lastEvent.isDefined)
assert(repD.lastEvent.get.testName endsWith "test that") // last because should be in order of appearance
assert(!d.theTestThisCalled)
assert(!d.theTestThatCalled)
}
it("should ignore a test marked as ignored if run is invoked with that testName") {
// If I provide a specific testName to run, then it should ignore an Ignore on that test
// method and actually invoke it.
class SuiteE extends funsuite.FixtureAnyFunSuite {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
var theTestThisCalled = false
var theTestThatCalled = false
ignore("test this") { fixture => theTestThisCalled = true; /* ASSERTION_SUCCEED */ }
test("test that") { fixture => theTestThatCalled = true; /* ASSERTION_SUCCEED */ }
}
val e = new SuiteE
import scala.language.reflectiveCalls
val repE = new TestIgnoredTrackingReporter
e.run(Some("test this"), Args(repE))
assert(repE.testIgnoredReceived)
assert(!e.theTestThisCalled)
assert(!e.theTestThatCalled)
}
it("should run only those tests selected by the tags to include and exclude sets") {
// Nothing is excluded
class SuiteA extends funsuite.FixtureAnyFunSuite {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
var theTestThisCalled = false
var theTestThatCalled = false
test("test this", mytags.SlowAsMolasses) { fixture => theTestThisCalled = true; /* ASSERTION_SUCCEED */ }
test("test that") { fixture => theTestThatCalled = true; /* ASSERTION_SUCCEED */ }
}
val a = new SuiteA
import scala.language.reflectiveCalls
val repA = new TestIgnoredTrackingReporter
a.run(None, Args(repA))
assert(!repA.testIgnoredReceived)
assert(a.theTestThisCalled)
assert(a.theTestThatCalled)
// SlowAsMolasses is included, one test should be excluded
class SuiteB extends funsuite.FixtureAnyFunSuite {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
var theTestThisCalled = false
var theTestThatCalled = false
test("test this", mytags.SlowAsMolasses) { fixture => theTestThisCalled = true; /* ASSERTION_SUCCEED */ }
test("test that") { fixture => theTestThatCalled = true; /* ASSERTION_SUCCEED */ }
}
val b = new SuiteB
val repB = new TestIgnoredTrackingReporter
b.run(None, Args(repB, Stopper.default, Filter(Some(Set("org.scalatest.SlowAsMolasses")), Set()), ConfigMap.empty, None, new Tracker, Set.empty))
assert(!repB.testIgnoredReceived)
assert(b.theTestThisCalled)
assert(!b.theTestThatCalled)
// SlowAsMolasses is included, and both tests should be included
class SuiteC extends funsuite.FixtureAnyFunSuite {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
var theTestThisCalled = false
var theTestThatCalled = false
test("test this", mytags.SlowAsMolasses) { fixture => theTestThisCalled = true; /* ASSERTION_SUCCEED */ }
test("test that", mytags.SlowAsMolasses) { fixture => theTestThatCalled = true; /* ASSERTION_SUCCEED */ }
}
val c = new SuiteC
val repC = new TestIgnoredTrackingReporter
c.run(None, Args(repB, Stopper.default, Filter(Some(Set("org.scalatest.SlowAsMolasses")), Set()), ConfigMap.empty, None, new Tracker, Set.empty))
assert(!repC.testIgnoredReceived)
assert(c.theTestThisCalled)
assert(c.theTestThatCalled)
// SlowAsMolasses is included. both tests should be included but one ignored
class SuiteD extends funsuite.FixtureAnyFunSuite {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
var theTestThisCalled = false
var theTestThatCalled = false
ignore("test this", mytags.SlowAsMolasses) { fixture => theTestThisCalled = true; /* ASSERTION_SUCCEED */ }
test("test that", mytags.SlowAsMolasses) { fixture => theTestThatCalled = true; /* ASSERTION_SUCCEED */ }
}
val d = new SuiteD
val repD = new TestIgnoredTrackingReporter
d.run(None, Args(repD, Stopper.default, Filter(Some(Set("org.scalatest.SlowAsMolasses")), Set("org.scalatest.Ignore")), ConfigMap.empty, None, new Tracker, Set.empty))
assert(repD.testIgnoredReceived)
assert(!d.theTestThisCalled)
assert(d.theTestThatCalled)
// SlowAsMolasses included, FastAsLight excluded
class SuiteE extends funsuite.FixtureAnyFunSuite {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
var theTestThisCalled = false
var theTestThatCalled = false
var theTestTheOtherCalled = false
test("test this", mytags.SlowAsMolasses, mytags.FastAsLight) { fixture => theTestThisCalled = true; /* ASSERTION_SUCCEED */ }
test("test that", mytags.SlowAsMolasses) { fixture => theTestThatCalled = true; /* ASSERTION_SUCCEED */ }
test("test the other") { fixture => theTestTheOtherCalled = true; /* ASSERTION_SUCCEED */ }
}
val e = new SuiteE
val repE = new TestIgnoredTrackingReporter
e.run(None, Args(repE, Stopper.default, Filter(Some(Set("org.scalatest.SlowAsMolasses")), Set("org.scalatest.FastAsLight")),
ConfigMap.empty, None, new Tracker, Set.empty))
assert(!repE.testIgnoredReceived)
assert(!e.theTestThisCalled)
assert(e.theTestThatCalled)
assert(!e.theTestTheOtherCalled)
// An Ignored test that was both included and excluded should not generate a TestIgnored event
class SuiteF extends funsuite.FixtureAnyFunSuite {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
var theTestThisCalled = false
var theTestThatCalled = false
var theTestTheOtherCalled = false
ignore("test this", mytags.SlowAsMolasses, mytags.FastAsLight) { fixture => theTestThisCalled = true; /* ASSERTION_SUCCEED */ }
test("test that", mytags.SlowAsMolasses) { fixture => theTestThatCalled = true; /* ASSERTION_SUCCEED */ }
test("test the other") { fixture => theTestTheOtherCalled = true; /* ASSERTION_SUCCEED */ }
}
val f = new SuiteF
val repF = new TestIgnoredTrackingReporter
f.run(None, Args(repF, Stopper.default, Filter(Some(Set("org.scalatest.SlowAsMolasses")), Set("org.scalatest.FastAsLight")),
ConfigMap.empty, None, new Tracker, Set.empty))
assert(!repF.testIgnoredReceived)
assert(!f.theTestThisCalled)
assert(f.theTestThatCalled)
assert(!f.theTestTheOtherCalled)
// An Ignored test that was not included should not generate a TestIgnored event
class SuiteG extends funsuite.FixtureAnyFunSuite {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
var theTestThisCalled = false
var theTestThatCalled = false
var theTestTheOtherCalled = false
test("test this", mytags.SlowAsMolasses, mytags.FastAsLight) { fixture => theTestThisCalled = true; /* ASSERTION_SUCCEED */ }
test("test that", mytags.SlowAsMolasses) { fixture => theTestThatCalled = true; /* ASSERTION_SUCCEED */ }
ignore("test the other") { fixture => theTestTheOtherCalled = true; /* ASSERTION_SUCCEED */ }
}
val g = new SuiteG
val repG = new TestIgnoredTrackingReporter
g.run(None, Args(repG, Stopper.default, Filter(Some(Set("org.scalatest.SlowAsMolasses")), Set("org.scalatest.FastAsLight")),
ConfigMap.empty, None, new Tracker, Set.empty))
assert(!repG.testIgnoredReceived)
assert(!g.theTestThisCalled)
assert(g.theTestThatCalled)
assert(!g.theTestTheOtherCalled)
// No tagsToInclude set, FastAsLight excluded
class SuiteH extends funsuite.FixtureAnyFunSuite {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
var theTestThisCalled = false
var theTestThatCalled = false
var theTestTheOtherCalled = false
test("test this", mytags.SlowAsMolasses, mytags.FastAsLight) { fixture => theTestThisCalled = true; /* ASSERTION_SUCCEED */ }
test("test that", mytags.SlowAsMolasses) { fixture => theTestThatCalled = true; /* ASSERTION_SUCCEED */ }
test("test the other") { fixture => theTestTheOtherCalled = true; /* ASSERTION_SUCCEED */ }
}
val h = new SuiteH
val repH = new TestIgnoredTrackingReporter
h.run(None, Args(repH, Stopper.default, Filter(None, Set("org.scalatest.FastAsLight")), ConfigMap.empty, None, new Tracker, Set.empty))
assert(!repH.testIgnoredReceived)
assert(!h.theTestThisCalled)
assert(h.theTestThatCalled)
assert(h.theTestTheOtherCalled)
// No tagsToInclude set, SlowAsMolasses excluded
class SuiteI extends funsuite.FixtureAnyFunSuite {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
var theTestThisCalled = false
var theTestThatCalled = false
var theTestTheOtherCalled = false
test("test this", mytags.SlowAsMolasses, mytags.FastAsLight) { fixture => theTestThisCalled = true; /* ASSERTION_SUCCEED */ }
test("test that", mytags.SlowAsMolasses) { fixture => theTestThatCalled = true; /* ASSERTION_SUCCEED */ }
test("test the other") { fixture => theTestTheOtherCalled = true; /* ASSERTION_SUCCEED */ }
}
val i = new SuiteI
val repI = new TestIgnoredTrackingReporter
i.run(None, Args(repI, Stopper.default, Filter(None, Set("org.scalatest.SlowAsMolasses")), ConfigMap.empty, None, new Tracker, Set.empty))
assert(!repI.testIgnoredReceived)
assert(!i.theTestThisCalled)
assert(!i.theTestThatCalled)
assert(i.theTestTheOtherCalled)
// No tagsToInclude set, SlowAsMolasses excluded, TestIgnored should not be received on excluded ones
class SuiteJ extends funsuite.FixtureAnyFunSuite {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
var theTestThisCalled = false
var theTestThatCalled = false
var theTestTheOtherCalled = false
ignore("test this", mytags.SlowAsMolasses, mytags.FastAsLight) { fixture => theTestThisCalled = true; /* ASSERTION_SUCCEED */ }
ignore("test that", mytags.SlowAsMolasses) { fixture => theTestThatCalled = true; /* ASSERTION_SUCCEED */ }
test("test the other") { fixture => theTestTheOtherCalled = true; /* ASSERTION_SUCCEED */ }
}
val j = new SuiteJ
val repJ = new TestIgnoredTrackingReporter
j.run(None, Args(repJ, Stopper.default, Filter(None, Set("org.scalatest.SlowAsMolasses")), ConfigMap.empty, None, new Tracker, Set.empty))
assert(!repI.testIgnoredReceived)
assert(!j.theTestThisCalled)
assert(!j.theTestThatCalled)
assert(j.theTestTheOtherCalled)
// Same as previous, except Ignore specifically mentioned in excludes set
class SuiteK extends funsuite.FixtureAnyFunSuite {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
var theTestThisCalled = false
var theTestThatCalled = false
var theTestTheOtherCalled = false
ignore("test this", mytags.SlowAsMolasses, mytags.FastAsLight) { fixture => theTestThisCalled = true; /* ASSERTION_SUCCEED */ }
ignore("test that", mytags.SlowAsMolasses) { fixture => theTestThatCalled = true; /* ASSERTION_SUCCEED */ }
ignore("test the other") { fixture => theTestTheOtherCalled = true; /* ASSERTION_SUCCEED */ }
}
val k = new SuiteK
val repK = new TestIgnoredTrackingReporter
k.run(None, Args(repK, Stopper.default, Filter(None, Set("org.scalatest.SlowAsMolasses", "org.scalatest.Ignore")), ConfigMap.empty, None, new Tracker, Set.empty))
assert(repK.testIgnoredReceived)
assert(!k.theTestThisCalled)
assert(!k.theTestThatCalled)
assert(!k.theTestTheOtherCalled)
}
it("should run only those registered tests selected by the tags to include and exclude sets") {
// Nothing is excluded
class SuiteA extends funsuite.FixtureAnyFunSuite {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
var theTestThisCalled = false
var theTestThatCalled = false
registerTest("test this", mytags.SlowAsMolasses) { fixture => theTestThisCalled = true; /* ASSERTION_SUCCEED */ }
registerTest("test that") { fixture => theTestThatCalled = true; /* ASSERTION_SUCCEED */ }
}
val a = new SuiteA
import scala.language.reflectiveCalls
val repA = new TestIgnoredTrackingReporter
a.run(None, Args(repA))
assert(!repA.testIgnoredReceived)
assert(a.theTestThisCalled)
assert(a.theTestThatCalled)
// SlowAsMolasses is included, one test should be excluded
class SuiteB extends funsuite.FixtureAnyFunSuite {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
var theTestThisCalled = false
var theTestThatCalled = false
registerTest("test this", mytags.SlowAsMolasses) { fixture => theTestThisCalled = true; /* ASSERTION_SUCCEED */ }
registerTest("test that") { fixture => theTestThatCalled = true; /* ASSERTION_SUCCEED */ }
}
val b = new SuiteB
val repB = new TestIgnoredTrackingReporter
b.run(None, Args(repB, Stopper.default, Filter(Some(Set("org.scalatest.SlowAsMolasses")), Set()), ConfigMap.empty, None, new Tracker, Set.empty))
assert(!repB.testIgnoredReceived)
assert(b.theTestThisCalled)
assert(!b.theTestThatCalled)
// SlowAsMolasses is included, and both tests should be included
class SuiteC extends funsuite.FixtureAnyFunSuite {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
var theTestThisCalled = false
var theTestThatCalled = false
registerTest("test this", mytags.SlowAsMolasses) { fixture => theTestThisCalled = true; /* ASSERTION_SUCCEED */ }
registerTest("test that", mytags.SlowAsMolasses) { fixture => theTestThatCalled = true; /* ASSERTION_SUCCEED */ }
}
val c = new SuiteC
val repC = new TestIgnoredTrackingReporter
c.run(None, Args(repB, Stopper.default, Filter(Some(Set("org.scalatest.SlowAsMolasses")), Set()), ConfigMap.empty, None, new Tracker, Set.empty))
assert(!repC.testIgnoredReceived)
assert(c.theTestThisCalled)
assert(c.theTestThatCalled)
// SlowAsMolasses is included. both tests should be included but one ignored
class SuiteD extends funsuite.FixtureAnyFunSuite {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
var theTestThisCalled = false
var theTestThatCalled = false
registerIgnoredTest("test this", mytags.SlowAsMolasses) { fixture => theTestThisCalled = true; /* ASSERTION_SUCCEED */ }
registerTest("test that", mytags.SlowAsMolasses) { fixture => theTestThatCalled = true; /* ASSERTION_SUCCEED */ }
}
val d = new SuiteD
val repD = new TestIgnoredTrackingReporter
d.run(None, Args(repD, Stopper.default, Filter(Some(Set("org.scalatest.SlowAsMolasses")), Set("org.scalatest.Ignore")), ConfigMap.empty, None, new Tracker, Set.empty))
assert(repD.testIgnoredReceived)
assert(!d.theTestThisCalled)
assert(d.theTestThatCalled)
// SlowAsMolasses included, FastAsLight excluded
class SuiteE extends funsuite.FixtureAnyFunSuite {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
var theTestThisCalled = false
var theTestThatCalled = false
var theTestTheOtherCalled = false
registerTest("test this", mytags.SlowAsMolasses, mytags.FastAsLight) { fixture => theTestThisCalled = true; /* ASSERTION_SUCCEED */ }
registerTest("test that", mytags.SlowAsMolasses) { fixture => theTestThatCalled = true; /* ASSERTION_SUCCEED */ }
registerTest("test the other") { fixture => theTestTheOtherCalled = true; /* ASSERTION_SUCCEED */ }
}
val e = new SuiteE
val repE = new TestIgnoredTrackingReporter
e.run(None, Args(repE, Stopper.default, Filter(Some(Set("org.scalatest.SlowAsMolasses")), Set("org.scalatest.FastAsLight")),
ConfigMap.empty, None, new Tracker, Set.empty))
assert(!repE.testIgnoredReceived)
assert(!e.theTestThisCalled)
assert(e.theTestThatCalled)
assert(!e.theTestTheOtherCalled)
// An Ignored test that was both included and excluded should not generate a TestIgnored event
class SuiteF extends funsuite.FixtureAnyFunSuite {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
var theTestThisCalled = false
var theTestThatCalled = false
var theTestTheOtherCalled = false
registerIgnoredTest("test this", mytags.SlowAsMolasses, mytags.FastAsLight) { fixture => theTestThisCalled = true; /* ASSERTION_SUCCEED */ }
registerTest("test that", mytags.SlowAsMolasses) { fixture => theTestThatCalled = true; /* ASSERTION_SUCCEED */ }
registerTest("test the other") { fixture => theTestTheOtherCalled = true; /* ASSERTION_SUCCEED */ }
}
val f = new SuiteF
val repF = new TestIgnoredTrackingReporter
f.run(None, Args(repF, Stopper.default, Filter(Some(Set("org.scalatest.SlowAsMolasses")), Set("org.scalatest.FastAsLight")),
ConfigMap.empty, None, new Tracker, Set.empty))
assert(!repF.testIgnoredReceived)
assert(!f.theTestThisCalled)
assert(f.theTestThatCalled)
assert(!f.theTestTheOtherCalled)
// An Ignored test that was not included should not generate a TestIgnored event
class SuiteG extends funsuite.FixtureAnyFunSuite {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
var theTestThisCalled = false
var theTestThatCalled = false
var theTestTheOtherCalled = false
registerTest("test this", mytags.SlowAsMolasses, mytags.FastAsLight) { fixture => theTestThisCalled = true; /* ASSERTION_SUCCEED */ }
registerTest("test that", mytags.SlowAsMolasses) { fixture => theTestThatCalled = true; /* ASSERTION_SUCCEED */ }
registerIgnoredTest("test the other") { fixture => theTestTheOtherCalled = true; /* ASSERTION_SUCCEED */ }
}
val g = new SuiteG
val repG = new TestIgnoredTrackingReporter
g.run(None, Args(repG, Stopper.default, Filter(Some(Set("org.scalatest.SlowAsMolasses")), Set("org.scalatest.FastAsLight")),
ConfigMap.empty, None, new Tracker, Set.empty))
assert(!repG.testIgnoredReceived)
assert(!g.theTestThisCalled)
assert(g.theTestThatCalled)
assert(!g.theTestTheOtherCalled)
// No tagsToInclude set, FastAsLight excluded
class SuiteH extends funsuite.FixtureAnyFunSuite {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
var theTestThisCalled = false
var theTestThatCalled = false
var theTestTheOtherCalled = false
registerTest("test this", mytags.SlowAsMolasses, mytags.FastAsLight) { fixture => theTestThisCalled = true; /* ASSERTION_SUCCEED */ }
registerTest("test that", mytags.SlowAsMolasses) { fixture => theTestThatCalled = true; /* ASSERTION_SUCCEED */ }
registerTest("test the other") { fixture => theTestTheOtherCalled = true; /* ASSERTION_SUCCEED */ }
}
val h = new SuiteH
val repH = new TestIgnoredTrackingReporter
h.run(None, Args(repH, Stopper.default, Filter(None, Set("org.scalatest.FastAsLight")), ConfigMap.empty, None, new Tracker, Set.empty))
assert(!repH.testIgnoredReceived)
assert(!h.theTestThisCalled)
assert(h.theTestThatCalled)
assert(h.theTestTheOtherCalled)
// No tagsToInclude set, SlowAsMolasses excluded
class SuiteI extends funsuite.FixtureAnyFunSuite {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
var theTestThisCalled = false
var theTestThatCalled = false
var theTestTheOtherCalled = false
registerTest("test this", mytags.SlowAsMolasses, mytags.FastAsLight) { fixture => theTestThisCalled = true; /* ASSERTION_SUCCEED */ }
registerTest("test that", mytags.SlowAsMolasses) { fixture => theTestThatCalled = true; /* ASSERTION_SUCCEED */ }
registerTest("test the other") { fixture => theTestTheOtherCalled = true; /* ASSERTION_SUCCEED */ }
}
val i = new SuiteI
val repI = new TestIgnoredTrackingReporter
i.run(None, Args(repI, Stopper.default, Filter(None, Set("org.scalatest.SlowAsMolasses")), ConfigMap.empty, None, new Tracker, Set.empty))
assert(!repI.testIgnoredReceived)
assert(!i.theTestThisCalled)
assert(!i.theTestThatCalled)
assert(i.theTestTheOtherCalled)
// No tagsToInclude set, SlowAsMolasses excluded, TestIgnored should not be received on excluded ones
class SuiteJ extends funsuite.FixtureAnyFunSuite {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
var theTestThisCalled = false
var theTestThatCalled = false
var theTestTheOtherCalled = false
registerIgnoredTest("test this", mytags.SlowAsMolasses, mytags.FastAsLight) { fixture => theTestThisCalled = true; /* ASSERTION_SUCCEED */ }
registerIgnoredTest("test that", mytags.SlowAsMolasses) { fixture => theTestThatCalled = true; /* ASSERTION_SUCCEED */ }
registerTest("test the other") { fixture => theTestTheOtherCalled = true; /* ASSERTION_SUCCEED */ }
}
val j = new SuiteJ
val repJ = new TestIgnoredTrackingReporter
j.run(None, Args(repJ, Stopper.default, Filter(None, Set("org.scalatest.SlowAsMolasses")), ConfigMap.empty, None, new Tracker, Set.empty))
assert(!repI.testIgnoredReceived)
assert(!j.theTestThisCalled)
assert(!j.theTestThatCalled)
assert(j.theTestTheOtherCalled)
// Same as previous, except Ignore specifically mentioned in excludes set
class SuiteK extends funsuite.FixtureAnyFunSuite {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
var theTestThisCalled = false
var theTestThatCalled = false
var theTestTheOtherCalled = false
registerIgnoredTest("test this", mytags.SlowAsMolasses, mytags.FastAsLight) { fixture => theTestThisCalled = true; /* ASSERTION_SUCCEED */ }
registerIgnoredTest("test that", mytags.SlowAsMolasses) { fixture => theTestThatCalled = true; /* ASSERTION_SUCCEED */ }
registerIgnoredTest("test the other") { fixture => theTestTheOtherCalled = true; /* ASSERTION_SUCCEED */ }
}
val k = new SuiteK
val repK = new TestIgnoredTrackingReporter
k.run(None, Args(repK, Stopper.default, Filter(None, Set("org.scalatest.SlowAsMolasses", "org.scalatest.Ignore")), ConfigMap.empty, None, new Tracker, Set.empty))
assert(repK.testIgnoredReceived)
assert(!k.theTestThisCalled)
assert(!k.theTestThatCalled)
assert(!k.theTestTheOtherCalled)
}
it("should return the correct test count from its expectedTestCount method") {
val a = new funsuite.FixtureAnyFunSuite {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
test("test this") { fixture => /* ASSERTION_SUCCEED */ }
test("test that") { fixture => /* ASSERTION_SUCCEED */ }
}
assert(a.expectedTestCount(Filter()) == 2)
val b = new funsuite.FixtureAnyFunSuite {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
ignore("test this") { fixture => /* ASSERTION_SUCCEED */ }
test("test that") { fixture => /* ASSERTION_SUCCEED */ }
}
assert(b.expectedTestCount(Filter()) == 1)
val c = new funsuite.FixtureAnyFunSuite {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
test("test this", mytags.FastAsLight) { fixture => /* ASSERTION_SUCCEED */ }
test("test that") { fixture => /* ASSERTION_SUCCEED */ }
}
assert(c.expectedTestCount(Filter(Some(Set("org.scalatest.FastAsLight")), Set())) == 1)
assert(c.expectedTestCount(Filter(None, Set("org.scalatest.FastAsLight"))) == 1)
val d = new funsuite.FixtureAnyFunSuite {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
test("test this", mytags.FastAsLight, mytags.SlowAsMolasses) { fixture => /* ASSERTION_SUCCEED */ }
test("test that", mytags.SlowAsMolasses) { fixture => /* ASSERTION_SUCCEED */ }
test("test the other thing") { fixture => /* ASSERTION_SUCCEED */ }
}
assert(d.expectedTestCount(Filter(Some(Set("org.scalatest.FastAsLight")), Set())) == 1)
assert(d.expectedTestCount(Filter(Some(Set("org.scalatest.SlowAsMolasses")), Set("org.scalatest.FastAsLight"))) == 1)
assert(d.expectedTestCount(Filter(None, Set("org.scalatest.SlowAsMolasses"))) == 1)
assert(d.expectedTestCount(Filter()) == 3)
val e = new funsuite.FixtureAnyFunSuite {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
test("test this", mytags.FastAsLight, mytags.SlowAsMolasses) { fixture => /* ASSERTION_SUCCEED */ }
test("test that", mytags.SlowAsMolasses) { fixture => /* ASSERTION_SUCCEED */ }
ignore("test the other thing") { fixture => /* ASSERTION_SUCCEED */ }
}
assert(e.expectedTestCount(Filter(Some(Set("org.scalatest.FastAsLight")), Set())) == 1)
assert(e.expectedTestCount(Filter(Some(Set("org.scalatest.SlowAsMolasses")), Set("org.scalatest.FastAsLight"))) == 1)
assert(e.expectedTestCount(Filter(None, Set("org.scalatest.SlowAsMolasses"))) == 0)
assert(e.expectedTestCount(Filter()) == 2)
val f = Suites(a, b, c, d, e)
assert(f.expectedTestCount(Filter()) === 10)
}
it("should return the correct test count from its expectedTestCount method when uses registerTest and registerIgnoredTest to register tests") {
val a = new funsuite.FixtureAnyFunSuite {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
registerTest("test this") { fixture => /* ASSERTION_SUCCEED */ }
registerTest("test that") { fixture => /* ASSERTION_SUCCEED */ }
}
assert(a.expectedTestCount(Filter()) == 2)
val b = new funsuite.FixtureAnyFunSuite {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
registerIgnoredTest("test this") { fixture => /* ASSERTION_SUCCEED */ }
registerTest("test that") { fixture => /* ASSERTION_SUCCEED */ }
}
assert(b.expectedTestCount(Filter()) == 1)
val c = new funsuite.FixtureAnyFunSuite {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
registerTest("test this", mytags.FastAsLight) { fixture => /* ASSERTION_SUCCEED */ }
registerTest("test that") { fixture => /* ASSERTION_SUCCEED */ }
}
assert(c.expectedTestCount(Filter(Some(Set("org.scalatest.FastAsLight")), Set())) == 1)
assert(c.expectedTestCount(Filter(None, Set("org.scalatest.FastAsLight"))) == 1)
val d = new funsuite.FixtureAnyFunSuite {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
registerTest("test this", mytags.FastAsLight, mytags.SlowAsMolasses) { fixture => /* ASSERTION_SUCCEED */ }
registerTest("test that", mytags.SlowAsMolasses) { fixture => /* ASSERTION_SUCCEED */ }
registerTest("test the other thing") { fixture => /* ASSERTION_SUCCEED */ }
}
assert(d.expectedTestCount(Filter(Some(Set("org.scalatest.FastAsLight")), Set())) == 1)
assert(d.expectedTestCount(Filter(Some(Set("org.scalatest.SlowAsMolasses")), Set("org.scalatest.FastAsLight"))) == 1)
assert(d.expectedTestCount(Filter(None, Set("org.scalatest.SlowAsMolasses"))) == 1)
assert(d.expectedTestCount(Filter()) == 3)
val e = new funsuite.FixtureAnyFunSuite {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
registerTest("test this", mytags.FastAsLight, mytags.SlowAsMolasses) { fixture => /* ASSERTION_SUCCEED */ }
registerTest("test that", mytags.SlowAsMolasses) { fixture => /* ASSERTION_SUCCEED */ }
registerIgnoredTest("test the other thing") { fixture => /* ASSERTION_SUCCEED */ }
}
assert(e.expectedTestCount(Filter(Some(Set("org.scalatest.FastAsLight")), Set())) == 1)
assert(e.expectedTestCount(Filter(Some(Set("org.scalatest.SlowAsMolasses")), Set("org.scalatest.FastAsLight"))) == 1)
assert(e.expectedTestCount(Filter(None, Set("org.scalatest.SlowAsMolasses"))) == 0)
assert(e.expectedTestCount(Filter()) === 2)
val f = Suites(a, b, c, d, e)
assert(f.expectedTestCount(Filter()) == 10)
}
it("should generate a TestPending message when the test body is (pending)") {
val a = new funsuite.FixtureAnyFunSuite {
type FixtureParam = String
val hello = "Hello, world!"
def withFixture(test: OneArgTest): Outcome = {
test(hello)
}
test("should do this") (pending)
test("should do that") { fixture =>
assert(fixture === hello)
}
test("should do something else") { fixture =>
assert(fixture === hello)
pending
}
}
val rep = new EventRecordingReporter
a.run(None, Args(rep))
val tp = rep.testPendingEventsReceived
assert(tp.size === 2)
}
it("should allow tests without fixtures to be combined with tests with fixtures") {
class SuiteA extends funsuite.FixtureAnyFunSuite {
var theTestWithFixtureWasRun = false
var theTestWithoutFixtureWasRun = false
type FixtureParam = String
val hello = "Hello, world!"
def withFixture(test: OneArgTest): Outcome = {
test(hello)
}
test("should do this") (pending)
test("should do that") { fixture =>
assert(fixture === hello)
theTestWithFixtureWasRun = true
/* ASSERTION_SUCCEED */
}
test("should do something else") { fixture =>
assert(fixture === hello)
pending
}
test("should do that without a fixture") { () =>
assert(2 + 2 === 4)
theTestWithoutFixtureWasRun = true
/* ASSERTION_SUCCEED */
}
}
val a = new SuiteA
import scala.language.reflectiveCalls
val rep = new EventRecordingReporter
a.run(None, Args(rep))
val tp = rep.testPendingEventsReceived
assert(tp.size === 2)
assert(a.theTestWithFixtureWasRun)
assert(a.theTestWithoutFixtureWasRun)
}
it("should generate a test failure if a Throwable, or an Error other than direct Error subtypes " +
"known in JDK 1.5, excluding AssertionError") {
val a = new funsuite.FixtureAnyFunSuite {
type FixtureParam = String
val hello = "Hello, world!"
def withFixture(test: OneArgTest): Outcome = {
test(hello)
}
test("throws AssertionError") { s => throw new AssertionError }
test("throws plain old Error") { s => throw new Error }
test("throws Throwable") { s => throw new Throwable }
}
val rep = new EventRecordingReporter
a.run(None, Args(rep))
val tf = rep.testFailedEventsReceived
assert(tf.size === 3)
}
// SKIP-SCALATESTJS,NATIVE-START
it("should propagate out Errors that are direct subtypes of Error in JDK 1.5, other than " +
"AssertionError, causing Suites and Runs to abort.") {
val a = new funsuite.FixtureAnyFunSuite {
type FixtureParam = String
val hello = "Hello, world!"
def withFixture(test: OneArgTest): Outcome = {
test(hello)
}
test("throws AssertionError") { s => throw new OutOfMemoryError }
}
intercept[OutOfMemoryError] {
a.run(None, Args(SilentReporter))
}
}
// SKIP-SCALATESTJS,NATIVE-END
it("should allow both tests that take fixtures and tests that don't") {
class SuiteA extends funsuite.FixtureAnyFunSuite {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = {
test("Hello, world!")
}
var takesNoArgsInvoked = false
test("take no args") { () => takesNoArgsInvoked = true; /* ASSERTION_SUCCEED */ }
var takesAFixtureInvoked = false
test("takes a fixture") { s => takesAFixtureInvoked = true; /* ASSERTION_SUCCEED */ }
}
val a = new SuiteA
import scala.language.reflectiveCalls
a.run(None, Args(SilentReporter))
assert(a.testNames.size === 2, a.testNames)
assert(a.takesNoArgsInvoked)
assert(a.takesAFixtureInvoked)
}
it("should work with test functions whose inferred result type is not Unit") {
class SuiteA extends funsuite.FixtureAnyFunSuite {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = {
test("Hello, world!")
}
var takesNoArgsInvoked = false
test("take no args") { () => takesNoArgsInvoked = true; true; /* ASSERTION_SUCCEED */ }
var takesAFixtureInvoked = false
test("takes a fixture") { s => takesAFixtureInvoked = true; true; /* ASSERTION_SUCCEED */ }
}
val a = new SuiteA
import scala.language.reflectiveCalls
assert(!a.takesNoArgsInvoked)
assert(!a.takesAFixtureInvoked)
a.run(None, Args(SilentReporter))
assert(a.testNames.size === 2, a.testNames)
assert(a.takesNoArgsInvoked)
assert(a.takesAFixtureInvoked)
}
it("should work with ignored tests whose inferred result type is not Unit") {
class SuiteA extends funsuite.FixtureAnyFunSuite {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
var theTestThisCalled = false
var theTestThatCalled = false
ignore("test this") { () => theTestThisCalled = true; "hi"; /* ASSERTION_SUCCEED */ }
ignore("test that") { fixture => theTestThatCalled = true; 42; /* ASSERTION_SUCCEED */ }
}
val a = new SuiteA
import scala.language.reflectiveCalls
assert(!a.theTestThisCalled)
assert(!a.theTestThatCalled)
val reporter = new EventRecordingReporter
a.run(None, Args(reporter))
assert(reporter.testIgnoredEventsReceived.size === 2)
assert(!a.theTestThisCalled)
assert(!a.theTestThatCalled)
}
it("should pass a NoArgTest to withFixture for tests that take no fixture") {
class MySuite extends funsuite.FixtureAnyFunSuite {
type FixtureParam = String
var aNoArgTestWasPassed = false
var aOneArgTestWasPassed = false
override def withFixture(test: NoArgTest): Outcome = {
aNoArgTestWasPassed = true
Succeeded
}
def withFixture(test: OneArgTest): Outcome = {
aOneArgTestWasPassed = true
Succeeded
}
test("something") { () =>
assert(1 + 1 === 2)
}
}
val s = new MySuite
s.run(None, Args(SilentReporter))
assert(s.aNoArgTestWasPassed)
assert(!s.aOneArgTestWasPassed)
}
it("should not pass a NoArgTest to withFixture for tests that take a Fixture") {
class MySuite extends funsuite.FixtureAnyFunSuite {
type FixtureParam = String
var aNoArgTestWasPassed = false
var aOneArgTestWasPassed = false
override def withFixture(test: NoArgTest): Outcome = {
aNoArgTestWasPassed = true
Succeeded
}
def withFixture(test: OneArgTest): Outcome = {
aOneArgTestWasPassed = true
Succeeded
}
test("something") { fixture =>
assert(1 + 1 === 2)
}
}
val s = new MySuite
s.run(None, Args(SilentReporter))
assert(!s.aNoArgTestWasPassed)
assert(s.aOneArgTestWasPassed)
}
it("should pass a NoArgTest that invokes the no-arg test when the " +
"NoArgTest's no-arg apply method is invoked") {
class MySuite extends funsuite.FixtureAnyFunSuite {
type FixtureParam = String
var theNoArgTestWasInvoked = false
def withFixture(test: OneArgTest): Outcome = {
// Shouldn't be called, but just in case don't invoke a OneArgTest
Succeeded
}
test("something") { () =>
theNoArgTestWasInvoked = true
/* ASSERTION_SUCCEED */
}
}
val s = new MySuite
s.run(None, Args(SilentReporter))
assert(s.theNoArgTestWasInvoked)
}
it("should pass the correct test name in the OneArgTest passed to withFixture") {
class SuiteA extends funsuite.FixtureAnyFunSuite {
type FixtureParam = String
var correctTestNameWasPassed = false
def withFixture(test: OneArgTest): Outcome = {
correctTestNameWasPassed = test.name == "something"
test("hi")
}
test("something") { fixture => /* ASSERTION_SUCCEED */ }
}
val a = new SuiteA
import scala.language.reflectiveCalls
a.run(None, Args(SilentReporter))
assert(a.correctTestNameWasPassed)
}
it("should pass the correct config map in the OneArgTest passed to withFixture") {
class SuiteA extends funsuite.FixtureAnyFunSuite {
type FixtureParam = String
var correctConfigMapWasPassed = false
def withFixture(test: OneArgTest): Outcome = {
correctConfigMapWasPassed = (test.configMap == ConfigMap("hi" -> 7))
test("hi")
}
test("something") { fixture => /* ASSERTION_SUCCEED */ }
}
val a = new SuiteA
import scala.language.reflectiveCalls
a.run(None, Args(SilentReporter, Stopper.default, Filter(), ConfigMap("hi" -> 7), None, new Tracker(), Set.empty))
assert(a.correctConfigMapWasPassed)
}
describe("(when a nesting rule has been violated)") {
it("should, if they call a nested it from within an it clause, result in a TestFailedException when running the test") {
class MySuite extends funsuite.FixtureAnyFunSuite {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
test("should blow up") { fixture =>
test("should never run") { fixture =>
assert(1 === 1)
}
/* ASSERTION_SUCCEED */
}
}
val spec = new MySuite
ensureTestFailedEventReceived(spec, "should blow up")
}
it("should, if they call a nested it with tags from within an it clause, result in a TestFailedException when running the test") {
class MySuite extends funsuite.FixtureAnyFunSuite {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
test("should blow up") { fixture =>
test("should never run", mytags.SlowAsMolasses) { fixture =>
assert(1 === 1)
}
/* ASSERTION_SUCCEED */
}
}
val spec = new MySuite
ensureTestFailedEventReceived(spec, "should blow up")
}
it("should, if they call a nested registerTest with tags from within a registerTest clause, result in a TestFailedException when running the test") {
class MySuite extends funsuite.FixtureAnyFunSuite {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
registerTest("should blow up") { fixture =>
registerTest("should never run", mytags.SlowAsMolasses) { fixture =>
assert(1 === 1)
}
/* ASSERTION_SUCCEED */
}
}
val spec = new MySuite
ensureTestFailedEventReceived(spec, "should blow up")
}
it("should, if they call a nested ignore from within an it clause, result in a TestFailedException when running the test") {
class MySuite extends funsuite.FixtureAnyFunSuite {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
test("should blow up") { fixture =>
ignore("should never run") { fixture =>
assert(1 === 1)
}
/* ASSERTION_SUCCEED */
}
}
val spec = new MySuite
ensureTestFailedEventReceived(spec, "should blow up")
}
it("should, if they call a nested ignore with tags from within an it clause, result in a TestFailedException when running the test") {
class MySuite extends funsuite.FixtureAnyFunSuite {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
test("should blow up") { fixture =>
ignore("should never run", mytags.SlowAsMolasses) { fixture =>
assert(1 == 1)
}
/* ASSERTION_SUCCEED */
}
}
val spec = new MySuite
ensureTestFailedEventReceived(spec, "should blow up")
}
it("should, if they call a nested registerIgnoredTest with tags from within a registerTest clause, result in a TestFailedException when running the test") {
class MySuite extends funsuite.FixtureAnyFunSuite {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
registerTest("should blow up") { fixture =>
registerIgnoredTest("should never run", mytags.SlowAsMolasses) { fixture =>
assert(1 == 1)
}
/* ASSERTION_SUCCEED */
}
}
val spec = new MySuite
ensureTestFailedEventReceived(spec, "should blow up")
}
}
it("should throw IllegalArgumentException if passed a testName that doesn't exist") {
class MySuite extends funsuite.FixtureAnyFunSuite {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = {
test("hi")
}
test("one") {s => (); /* ASSERTION_SUCCEED */ }
test("two") {s => (); /* ASSERTION_SUCCEED */ }
}
val suite = new MySuite
intercept[IllegalArgumentException] {
suite.run(Some("three"), Args(SilentReporter))
}
}
describe("registerTest and registerIgnoredTest method") {
it("should allow test registration and ignored test registration") {
class TestSpec extends funsuite.FixtureAnyFunSuite {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = {
test("hi")
}
val a = 1
registerTest("test 1") { fixture =>
val e = intercept[TestFailedException] {
assert(a == 2)
}
assert(e.message == Some("1 did not equal 2"))
assert(e.failedCodeFileName == Some("FixtureFunSuiteSpec.scala"))
assert(e.failedCodeLineNumber == Some(thisLineNumber - 4))
}
registerTest("test 2") { fixture =>
assert(a == 2)
}
registerTest("test 3") { fixture =>
pending
}
registerTest("test 4") { fixture =>
cancel()
}
registerIgnoredTest("test 5") { fixture =>
assert(a == 2)
}
}
val rep = new EventRecordingReporter
val s = new TestSpec
s.run(None, Args(rep))
assert(rep.testStartingEventsReceived.length == 4)
assert(rep.testSucceededEventsReceived.length == 1)
assert(rep.testSucceededEventsReceived(0).testName == "test 1")
assert(rep.testFailedEventsReceived.length == 1)
assert(rep.testFailedEventsReceived(0).testName == "test 2")
assert(rep.testPendingEventsReceived.length == 1)
assert(rep.testPendingEventsReceived(0).testName == "test 3")
assert(rep.testCanceledEventsReceived.length == 1)
assert(rep.testCanceledEventsReceived(0).testName == "test 4")
assert(rep.testIgnoredEventsReceived.length == 1)
assert(rep.testIgnoredEventsReceived(0).testName == "test 5")
}
it("should generate TestRegistrationClosedException with correct stack depth info when has a registerTest nested inside a registerTest") {
class TestSpec extends funsuite.FixtureAnyFunSuite {
type FixtureParam = String
var registrationClosedThrown = false
registerTest("a scenario") { fixture =>
registerTest("nested scenario") { fixture =>
assert(1 == 2)
}; /* ASSERTION_SUCCEED */
}
override def withFixture(test: OneArgTest): Outcome = {
val outcome = test("test")
outcome match {
case Exceptional(ex: TestRegistrationClosedException) =>
registrationClosedThrown = true
case _ =>
}
outcome
}
}
val rep = new EventRecordingReporter
val s = new TestSpec
s.run(None, Args(rep))
assert(s.registrationClosedThrown == true)
val testFailedEvents = rep.testFailedEventsReceived
assert(testFailedEvents.size === 1)
assert(testFailedEvents(0).throwable.get.getClass() === classOf[TestRegistrationClosedException])
val trce = testFailedEvents(0).throwable.get.asInstanceOf[TestRegistrationClosedException]
assert("FixtureFunSuiteSpec.scala" === trce.failedCodeFileName.get)
assert(trce.failedCodeLineNumber.get === thisLineNumber - 23)
assert(trce.message == Some("Test cannot be nested inside another test."))
}
it("should generate TestRegistrationClosedException with correct stack depth info when has an registerIgnoredTest nested inside a registerTest") {
class TestSpec extends funsuite.FixtureAnyFunSuite {
type FixtureParam = String
var registrationClosedThrown = false
registerTest("a scenario") { fixture =>
registerIgnoredTest("nested scenario") { fixture =>
assert(1 == 2)
}; /* ASSERTION_SUCCEED */
}
override def withFixture(test: OneArgTest): Outcome = {
val outcome = test("test")
outcome match {
case Exceptional(ex: TestRegistrationClosedException) =>
registrationClosedThrown = true
case _ =>
}
outcome
}
}
val rep = new EventRecordingReporter
val s = new TestSpec
s.run(None, Args(rep))
assert(s.registrationClosedThrown == true)
val testFailedEvents = rep.testFailedEventsReceived
assert(testFailedEvents.size === 1)
assert(testFailedEvents(0).throwable.get.getClass() === classOf[TestRegistrationClosedException])
val trce = testFailedEvents(0).throwable.get.asInstanceOf[TestRegistrationClosedException]
assert("FixtureFunSuiteSpec.scala" === trce.failedCodeFileName.get)
assert(trce.failedCodeLineNumber.get === thisLineNumber - 23)
assert(trce.message == Some("Test cannot be nested inside another test."))
}
}
ignore("should support expectations") { // Unignore after we uncomment the expectation implicits in RegistrationPolicy
class TestSpec extends funsuite.FixtureAnyFunSuite with expectations.Expectations {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
test("fail scenario") { fixture =>
expect(1 === 2); /* ASSERTION_SUCCEED */
}
test("nested fail scenario") { () =>
expect(1 === 2); /* ASSERTION_SUCCEED */
}
}
val rep = new EventRecordingReporter
val s1 = new TestSpec
s1.run(None, Args(rep))
assert(rep.testFailedEventsReceived.size === 2)
assert(rep.testFailedEventsReceived(0).throwable.get.asInstanceOf[TestFailedException].failedCodeFileName.get === "FixtureFunSuiteSpec.scala")
assert(rep.testFailedEventsReceived(0).throwable.get.asInstanceOf[TestFailedException].failedCodeLineNumber.get === thisLineNumber - 11)
assert(rep.testFailedEventsReceived(1).throwable.get.asInstanceOf[TestFailedException].failedCodeFileName.get === "FixtureFunSuiteSpec.scala")
assert(rep.testFailedEventsReceived(1).throwable.get.asInstanceOf[TestFailedException].failedCodeLineNumber.get === thisLineNumber - 10)
}
}
describe("when failure happens") {
it("should fire TestFailed event with correct stack depth info when test failed") {
class TestSpec extends funsuite.FixtureAnyFunSuite {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = {
test("hi")
}
test("fail scenario") { fixture =>
assert(1 === 2)
}
}
val rep = new EventRecordingReporter
val s1 = new TestSpec
s1.run(None, Args(rep))
assert(rep.testFailedEventsReceived.size === 1)
assert(rep.testFailedEventsReceived(0).throwable.get.asInstanceOf[TestFailedException].failedCodeFileName.get === "FixtureFunSuiteSpec.scala")
assert(rep.testFailedEventsReceived(0).throwable.get.asInstanceOf[TestFailedException].failedCodeLineNumber.get === thisLineNumber - 8)
}
it("should generate TestRegistrationClosedException with correct stack depth info when has a test nested inside a test") {
class TestSpec extends funsuite.FixtureAnyFunSuite {
type FixtureParam = String
var registrationClosedThrown = false
test("a scenario") { fixture =>
test("nested scenario") { fixture =>
assert(1 === 2)
}; /* ASSERTION_SUCCEED */
}
override def withFixture(test: OneArgTest): Outcome = {
val outcome = test.apply("hi")
outcome match {
case Exceptional(ex: TestRegistrationClosedException) =>
registrationClosedThrown = true
case _ =>
}
outcome
}
}
val rep = new EventRecordingReporter
val s = new TestSpec
s.run(None, Args(rep))
assert(s.registrationClosedThrown == true)
val testFailedEvents = rep.testFailedEventsReceived
assert(testFailedEvents.size === 1)
assert(testFailedEvents(0).throwable.get.getClass() === classOf[TestRegistrationClosedException])
val trce = testFailedEvents(0).throwable.get.asInstanceOf[TestRegistrationClosedException]
assert("FixtureFunSuiteSpec.scala" === trce.failedCodeFileName.get)
assert(trce.failedCodeLineNumber.get === thisLineNumber - 23)
assert(trce.message == Some("A test clause may not appear inside another test clause."))
}
it("should generate TestRegistrationClosedException with correct stack depth info when has an ignore nested inside a test") {
class TestSpec extends funsuite.FixtureAnyFunSuite {
type FixtureParam = String
var registrationClosedThrown = false
test("a scenario") { fixture =>
ignore("nested scenario") { fixture =>
assert(1 === 2)
}; /* ASSERTION_SUCCEED */
}
override def withFixture(test: OneArgTest): Outcome = {
val outcome = test.apply("hi")
outcome match {
case Exceptional(ex: TestRegistrationClosedException) =>
registrationClosedThrown = true
case _ =>
}
outcome
}
}
val rep = new EventRecordingReporter
val s = new TestSpec
s.run(None, Args(rep))
assert(s.registrationClosedThrown == true)
val testFailedEvents = rep.testFailedEventsReceived
assert(testFailedEvents.size === 1)
assert(testFailedEvents(0).throwable.get.getClass() === classOf[TestRegistrationClosedException])
val trce = testFailedEvents(0).throwable.get.asInstanceOf[TestRegistrationClosedException]
assert("FixtureFunSuiteSpec.scala" === trce.failedCodeFileName.get)
assert(trce.failedCodeLineNumber.get === thisLineNumber - 23)
assert(trce.message == Some("An ignore clause may not appear inside a test clause."))
}
it("should generate a DuplicateTestNameException when duplicate test name is detected") {
class TestSpec extends funsuite.FixtureAnyFunSuite {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
test("test 1") { fixture => }
test("test 1") { fixture => }
}
val e = intercept[DuplicateTestNameException] {
new TestSpec
}
assert("FixtureFunSuiteSpec.scala" == e.failedCodeFileName.get)
assert(e.failedCodeLineNumber.get == thisLineNumber - 6)
assert(!e.cause.isDefined)
}
it("should generate a DuplicateTestNameException when duplicate test name is detected when use ignore") {
class TestSpec extends funsuite.FixtureAnyFunSuite {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
test("test 1") { fixture => }
ignore("test 1") { fixture => }
}
val e = intercept[DuplicateTestNameException] {
new TestSpec
}
assert("FixtureFunSuiteSpec.scala" == e.failedCodeFileName.get)
assert(e.failedCodeLineNumber.get == thisLineNumber - 6)
assert(!e.cause.isDefined)
}
}
}
| scalatest/scalatest | jvm/funsuite-test/src/test/scala/org/scalatest/funsuite/FixtureFunSuiteSpec.scala | Scala | apache-2.0 | 67,079 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.planner.codegen
import org.apache.flink.table.planner.codegen.CodeGenUtils.{ROW_DATA, hashCodeForType, newName}
import org.apache.flink.table.planner.codegen.Indenter.toISC
import org.apache.flink.table.runtime.generated.{GeneratedHashFunction, HashFunction}
import org.apache.flink.table.types.logical.LogicalType
import org.apache.flink.util.MathUtils
/**
* CodeGenerator for hash code RowData, Calculate a hash value based on some fields
* of RowData.
* NOTE: If you need a hash value that is more evenly distributed, call [[MathUtils.murmurHash]]
* outside to scatter.
*/
object HashCodeGenerator {
/**
* A sequence of prime numbers to be used for salting the computed hash values.
* Based on some empirical evidence, we are using a 32-element subsequence of the
* OEIS sequence #A068652 (numbers such that every cyclic permutation is a prime).
*
* @see <a href="http://en.wikipedia.org/wiki/List_of_prime_numbers">
* http://en.wikipedia.org/wiki/List_of_prime_numbers</a>
* @see <a href="http://oeis.org/A068652">http://oeis.org/A068652</a>
*/
val HASH_SALT: Array[Int] = Array[Int](
73, 79, 97, 113, 131, 197, 199, 311, 337, 373, 719, 733, 919, 971, 991, 1193, 1931, 3119,
3779, 7793, 7937, 9311, 9377, 11939, 19391, 19937, 37199, 39119, 71993, 91193, 93719, 93911)
def generateRowHash(
ctx: CodeGeneratorContext,
input: LogicalType,
name: String,
hashFields: Array[Int]): GeneratedHashFunction = {
val className = newName(name)
val baseClass = classOf[HashFunction]
val inputTerm = CodeGenUtils.DEFAULT_INPUT1_TERM
val accessExprs = hashFields.map(
idx => GenerateUtils.generateFieldAccess(ctx, input, inputTerm, idx))
val (hashBody, resultTerm) = generateCodeBody(ctx, accessExprs)
val code =
j"""
public class $className implements ${baseClass.getCanonicalName} {
${ctx.reuseMemberCode()}
public $className(Object[] references) throws Exception {
${ctx.reuseInitCode()}
}
@Override
public int hashCode($ROW_DATA $inputTerm) {
${ctx.reuseLocalVariableCode()}
$hashBody
return $resultTerm;
}
${ctx.reuseInnerClassDefinitionCode()}
}
""".stripMargin
new GeneratedHashFunction(
className, code, ctx.references.toArray, ctx.tableConfig.getConfiguration)
}
private def generateCodeBody(
ctx: CodeGeneratorContext,
accessExprs: Seq[GeneratedExpression]): (String, String) = {
val hashIntTerm = CodeGenUtils.newName("hashCode")
var i = -1
val hashBodyCode = accessExprs.map(expr => {
i = i + 1
s"""
|$hashIntTerm *= ${HASH_SALT(i & 0x1F)};
|${expr.code}
|if (!${expr.nullTerm}) {
| $hashIntTerm += ${hashCodeForType(ctx, expr.resultType, expr.resultTerm)};
|}
|""".stripMargin
}).mkString("\\n")
(s"""
|int $hashIntTerm = 0;
|$hashBodyCode""".stripMargin, hashIntTerm)
}
}
| apache/flink | flink-table/flink-table-planner/src/main/scala/org/apache/flink/table/planner/codegen/HashCodeGenerator.scala | Scala | apache-2.0 | 3,889 |
package eu.inn.binders.core
import eu.inn.binders.naming.Converter
trait Deserializer[C <: Converter] {
type nameConverterType = C
def fieldName: Option[String]
def isNull: Boolean
}
| InnovaCo/binders | src/main/scala/eu/inn/binders/core/Deserializer.scala | Scala | bsd-3-clause | 191 |
package gitbucket.core.api
case class ApiObject(sha: String)
case class ApiRef(ref: String, `object`: ApiObject)
| gencer/gitbucket | src/main/scala/gitbucket/core/api/ApiRef.scala | Scala | apache-2.0 | 115 |
package com.avsystem.commons
package redis.examples
import akka.actor.ActorSystem
import com.avsystem.commons.redis._
// Global execution context is used for the sake of simplicity of this example,
// think well if this is what you actually want.
import scala.concurrent.ExecutionContext.Implicits.global
/**
* Examples showing how to create and execute batches made of multiple Redis commands.
*/
object PipeliningExample extends App {
implicit val actorSystem: ActorSystem = ActorSystem()
// Pipelining is a technique in which multiple Redis commands are sent to server at once, without one command
// waiting for previous one to finish. This allows sending multiple commands in a single network message, which
// is beneficial for performance.
// In case of Redis Cluster deployment, batch may be divided into multiple sub-batches, each one sent to different
// master node.
// An API object whose methods return RedisBatch objects, which are representations of not-yet-executed Redis commands
// which can be combined with each other to form larger batches.
val api = RedisApi.Batches.StringTyped
val executor: RedisExecutor = new RedisNodeClient
// In order to send a few commands in a single batch and retrieve all results, create a tuple of batches
// and call extension method `sequence` on it, which will transform the tuple of batches into a single batch
// which returns a tuple
val pairBatch: RedisBatch[(Opt[String], Long)] = (api.get("key"), api.incr("otherKey")).sequence
executor.executeBatch(pairBatch).onComplete {
case Success((Opt(textValue), numericValue)) => println(s"Got $textValue and $numericValue")
case Success((Opt.Empty, numericValue)) => println(s"Got only $numericValue")
case Failure(t) => t.printStackTrace()
}
// When merging two batches with each other, you can discard result of one of them by using *> or <* operator
// In this case we're not interested in result of SET command, only INCR
val singleResultBatch: RedisBatch[Long] =
api.set("key", "value") *> api.incr("otherKey")
// `sequence` works not only on tuples but also on collections
// In this case we're sending 100 INCR commands in a single batch and get the result in a collection
val collectionBatch: RedisBatch[Seq[Long]] =
(0 until 100).map(i => api.incr(s"key$i")).sequence
// You can have both tuples and collections in a single batch, arbitrarily nested:
val compositeBatch: RedisBatch[(Opt[String], Seq[Long])] =
(api.get("key"), (0 until 100).map(i => api.incr(s"key$i"))).sequence
// NOTE: when using `executeBatch`, the type system does not protect you from sending commands not supported by
// particular client type. For example, the code below will compile, but execution will fail with
// `ForbiddenCommandException`
executor.executeBatch(api.clientSetname("name")) // ForbiddedCommandException, can't execute CLIENT SETNAME using RedisNodeClient
}
| AVSystem/scala-commons | commons-redis/src/test/scala/com/avsystem/commons/redis/examples/PipeliningExample.scala | Scala | mit | 2,955 |
package fpinscala.errorhandling
import scala.{Option => _, Either => _, Left => _, Right => _, _} // hide std library `Option` and `Either`, since we are writing our own in this chapter
sealed trait Either[+E,+A] {
def map[B](f: A => B): Either[E, B] =
this match {
case Right(a) => Right(f(a))
case Left(e) => Left(e)
}
def flatMap[EE >: E, B](f: A => Either[EE, B]): Either[EE, B] =
this match {
case Right(a) => f(a)
case Left(e) => Left(e)
}
def orElse[EE >: E, B >: A](b: => Either[EE, B]): Either[EE, B] =
this match {
case Left(_) => b
case Right(a) => Right(a)
}
def map2[EE >: E, B, C](b: Either[EE, B])(f: (A, B) => C): Either[EE, C] =
//this.flatMap(aa => b.map(bb => f(aa, bb)))
for {
aa <- this
bb <- b
} yield f(aa, bb)
}
case class Left[+E](get: E) extends Either[E,Nothing]
case class Right[+A](get: A) extends Either[Nothing,A]
object Either {
def traverse[E, A, B](as: List[A])(f: A => Either[E, B]): Either[E, List[B]] =
as match {
case Nil => Right(Nil)
case h :: t => f(h).map2(traverse(t)(f))(_ :: _)
}
def sequence[E, A](es: List[Either[E, A]]): Either[E, List[A]] =
traverse(es)(x=>x)
def mean(xs: IndexedSeq[Double]): Either[String, Double] =
if (xs.isEmpty)
Left("mean of empty list!")
else
Right(xs.sum / xs.length)
def safeDiv(x: Int, y: Int): Either[Exception, Int] =
try Right(x / y)
catch { case e: Exception => Left(e) }
def Try[A](a: => A): Either[Exception, A] =
try Right(a)
catch { case e: Exception => Left(e) }
}
| mattlong/fpinscala | exercises/src/main/scala/fpinscala/errorhandling/Either.scala | Scala | mit | 1,589 |
package models.database.facade
import models.database.AppDB
import models.database.alias.{Artist, ServiceArtistAbsence}
import org.squeryl.PrimitiveTypeMode._
object ServiceArtistAbsenceFacade {
def insertIfNotExists(id:Long, service:String) = {
inTransaction {
from(AppDB.artists)(a =>
where(a.id === id)
select a.id
).headOption match {
case Some(_) =>
from(AppDB.serviceArtistAbsence)(saa =>
where(saa.artistId === id and saa.service === service)
select saa.id
).headOption match {
case None =>
AppDB.serviceArtistAbsence.insert(ServiceArtistAbsence(artistId = id, service = service))
case _ =>
}
case _ =>
}
}
}
def absentArtists(service:String, artistIds:List[Long]):List[Artist] = {
inTransaction {
from(AppDB.artists, AppDB.serviceArtistAbsence)( (a, saa) =>
where(a.id === saa.artistId and saa.service === service and saa.artistId.in(artistIds))
select a
).distinct.toList
}
}
}
| haffla/stream-compare | app/models/database/facade/ServiceArtistAbsenceFacade.scala | Scala | gpl-3.0 | 1,089 |
/***********************************************************************
* Copyright (c) 2013-2017 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.stream.generic
import java.net.{DatagramPacket, InetAddress}
import java.nio.charset.StandardCharsets
import com.google.common.io.Resources
import com.typesafe.config.ConfigFactory
import org.apache.commons.io.IOUtils
import org.apache.commons.net.{DefaultDatagramSocketFactory, DefaultSocketFactory}
import org.junit.runner.RunWith
import org.locationtech.geomesa.stream.SimpleFeatureStreamSource
import org.opengis.feature.simple.SimpleFeature
import org.specs2.mutable.Specification
import org.specs2.runner.JUnitRunner
import scala.collection.JavaConversions._
import scala.concurrent.Future
@RunWith(classOf[JUnitRunner])
class GenericSimpleFeatureStreamSourceTest extends Specification {
"GenericSimpleFeatureStreamSource" should {
val confString =
"""
|{
| type = "generic"
| source-route = "netty4:tcp://localhost:5899?textline=true"
| sft = {
| type-name = "testdata"
| fields = [
| { name = "label", type = "String" }
| { name = "geom", type = "Point", index = true, srid = 4326, default = true }
| { name = "dtg", type = "Date", index = true }
| ]
| }
| threads = 4
| converter = {
| id-field = "md5(string2bytes($0))"
| type = "delimited-text"
| format = "DEFAULT"
| fields = [
| { name = "label", transform = "trim($1)" }
| { name = "geom", transform = "point($2::double, $3::double)" }
| { name = "dtg", transform = "datetime($4)" }
| ]
| }
|}
""".stripMargin
"be built from a conf" >> {
val source = SimpleFeatureStreamSource.buildSource(ConfigFactory.parseString(confString))
source.init()
source must not beNull
val url = Resources.getResource("testdata.tsv")
val lines = Resources.readLines(url, StandardCharsets.UTF_8)
val socketFactory = new DefaultSocketFactory
Future {
val socket = socketFactory.createSocket("localhost", 5899)
val os = socket.getOutputStream
IOUtils.writeLines(lines, IOUtils.LINE_SEPARATOR_UNIX, os)
os.flush()
// wait for data to arrive at the server
Thread.sleep(4000)
os.close()
}
var i = 0
val iter = new Iterator[SimpleFeature] {
override def hasNext: Boolean = true
override def next() = {
var ret: SimpleFeature = null
while(ret == null) {
ret = source.next
}
i+=1
ret
}
}
val result = iter.take(lines.length).toList
result.length must be equalTo lines.length
}
"work with udp" >> {
val port = 5898
val udpConf = confString.replace("tcp", "udp").replace("5899", port.toString)
.replace("textline=true", "textline=true&decoderMaxLineLength=" + Int.MaxValue)
val source = SimpleFeatureStreamSource.buildSource(ConfigFactory.parseString(udpConf))
source.init()
source must not beNull
val url = Resources.getResource("testdata.tsv")
val lines = Resources.readLines(url, StandardCharsets.UTF_8)
val socketFactory = new DefaultDatagramSocketFactory
Future {
val address = InetAddress.getByName("localhost")
val socket = socketFactory.createDatagramSocket()
socket.connect(address, port)
lines.foreach { line =>
val bytes = (line + "\\n").getBytes("UTF-8")
if (bytes.length > socket.getSendBufferSize) {
println("Error in buffer size with line \\n" + line)
}
val packet = new DatagramPacket(bytes, bytes.length, address, port)
socket.send(packet)
}
socket.disconnect()
}
val iter = new Iterator[SimpleFeature] {
override def hasNext: Boolean = true
override def next() = {
var ret: SimpleFeature = null
while (ret == null) {
Thread.sleep(10)
ret = source.next
}
ret
}
}
val result = iter.take(lines.length).toList
result.length must be equalTo lines.length
}
}
} | ronq/geomesa | geomesa-stream/geomesa-stream-generic/src/test/scala/org/locationtech/geomesa/stream/generic/GenericSimpleFeatureStreamSourceTest.scala | Scala | apache-2.0 | 5,005 |
package reductions
import org.scalameter._
import common._
object LineOfSightRunner {
val standardConfig = config(
Key.exec.minWarmupRuns -> 40,
Key.exec.maxWarmupRuns -> 80,
Key.exec.benchRuns -> 100,
Key.verbose -> true
) withWarmer(new Warmer.Default)
def main(args: Array[String]) {
val length = 10000000
val input = (0 until length).map(_ % 100 * 1.0f).toArray
val output = new Array[Float](length + 1)
val seqtime = standardConfig measure {
LineOfSight.lineOfSight(input, output)
}
println(s"sequential time: $seqtime ms")
val partime = standardConfig measure {
LineOfSight.parLineOfSight(input, output, 10000)
}
println(s"parallel time: $partime ms")
println(s"speedup: ${seqtime / partime}")
}
}
object LineOfSight {
def max(a: Float, b: Float): Float = if (a > b) a else b
def lineOfSight(input: Array[Float], output: Array[Float]): Unit = {
var i = 1
var currentMax = input(i)/i
while (i < input.length) {
currentMax = max(currentMax, input(i)/i)
output(i) = currentMax
i += 1
}
}
sealed abstract class Tree {
def maxPrevious: Float
}
case class Node(left: Tree, right: Tree) extends Tree {
val maxPrevious = max(left.maxPrevious, right.maxPrevious)
}
case class Leaf(from: Int, until: Int, maxPrevious: Float) extends Tree
/** Traverses the specified part of the array and returns the maximum angle.
*/
def upsweepSequential(input: Array[Float], from: Int, until: Int): Float = {
var i = from
input.slice(from, until).foldLeft(input(from)/i) { (currentMax, current) =>
val maxv = max(currentMax, current/i)
i += 1
maxv
}
}
/** Traverses the part of the array starting at `from` and until `end`, and
* returns the reduction tree for that part of the array.
*
* The reduction tree is a `Leaf` if the length of the specified part of the
* array is smaller or equal to `threshold`, and a `Node` otherwise.
* If the specified part of the array is longer than `threshold`, then the
* work is divided and done recursively in parallel.
*/
def upsweep(input: Array[Float], from: Int, end: Int,
threshold: Int): Tree = {
if ((end - from) <= threshold)
Leaf(from, end, upsweepSequential(input, from, end))
else {
val mid = (end + from) / 2
val (l, r) = parallel(upsweep(input, from, mid, threshold), upsweep(input, mid, end, threshold))
Node(l, r)
}
}
/** Traverses the part of the `input` array starting at `from` and until
* `until`, and computes the maximum angle for each entry of the output array,
* given the `startingAngle`.
*/
def downsweepSequential(input: Array[Float], output: Array[Float],
startingAngle: Float, from: Int, until: Int): Unit = {
var i = from
var currentMax = max(startingAngle, input(i)/i)
while (i < until) {
currentMax = max(currentMax, input(i)/i)
output(i) = currentMax
i += 1
}
}
/** Pushes the maximum angle in the prefix of the array to each leaf of the
* reduction `tree` in parallel, and then calls `downsweepSequential` to write
* the `output` angles.
*/
def downsweep(input: Array[Float], output: Array[Float], startingAngle: Float,
tree: Tree): Unit = tree match {
case Leaf(from, until, _) =>
downsweepSequential(input, output, startingAngle, from, until)
case Node(l, r) =>
parallel(downsweep(input, output, startingAngle, l), downsweep(input, output, max(l.maxPrevious, startingAngle), r))
}
/** Compute the line-of-sight in parallel. */
def parLineOfSight(input: Array[Float], output: Array[Float],
threshold: Int): Unit = {
val tree = upsweep(input, 0, input.length, threshold)
downsweep(input, output, 0, tree)
}
}
| matija94/show-me-the-code | scala_practice/reductions/src/main/scala/reductions/LineOfSight.scala | Scala | mit | 3,874 |
package me.yingrui.segment.dict
import org.junit.Assert
import org.junit.Assert._
import org.junit.Test
class PunctuationTest {
DictionaryFactory().loadDictionary()
private val hashDictionary = DictionaryFactory().getCoreDictionary
@Test
def should_contains_basic_punctuations() {
val punctuations = ",./<>?;':\\"{}[]!@#$%^&*()_+-=\\\\|。,、;:?!…-·ˉˇ¨‘`~'“”々~‖∶"'`|〃〔〕〈〉《》「」『』.〖〗【】()[]{}".toCharArray()
for (punctuation <- punctuations) {
val word = hashDictionary.getWord(punctuation.toString)
print(punctuation + "," + getFirstPosOfWord(word) + "; ")
assertEquals(POSUtil.POS_W, getFirstPosOfWord(word))
}
println()
}
@Test
def should_contains_money_symbols() {
val punctuations = "฿€£₤¥".toCharArray()
for (punctuation <- punctuations) {
print(punctuation + " ")
val word = hashDictionary.getWord(punctuation.toString)
Assert.assertEquals(getFirstPosOfWord(word), POSUtil.POS_Q)
}
println()
}
@Test
def should_contains_number_symbols() {
val punctuations = ("ⅠⅡⅢⅣⅤⅥⅦⅧⅨⅩⅪⅫ" +
"㈠㈡㈢㈣㈤㈥㈦㈧㈨㈩" +
"⑴⑵⑶⑷⑸⑹⑺⑻⑼⑽" +
"①②③④⑤⑥⑦⑧⑨⑩⑾⑿⒀⒁⒂⒃⒄⒅⒆⒇" +
"①②③④⑤⑥⑦⑧⑨⑩№" +
"⑴⑵⑶⑷⑸⑹⑺⑻⑼⑽⑾⑿⒀⒁⒂⒃⒄⒅⒆⒇" +
"⒈⒉⒊⒋⒌⒍⒎⒏⒐⒑⒒⒓⒔⒕⒖⒗⒘⒙⒚⒛" +
"ⅠⅡⅢⅣⅤⅥⅦⅧⅨⅩⅪⅫⅰⅱⅲⅳⅴⅵⅶⅷⅸⅹ").toCharArray()
for (punctuation <- punctuations) {
print(punctuation + " ")
val word = hashDictionary.getWord(punctuation.toString)
Assert.assertEquals(getFirstPosOfWord(word), POSUtil.POS_M)
}
println()
}
@Test
def should_contains_math_symbols() {
val punctuations = ("≈≡≠=≤≥<>≮≯∷±+-×÷/∫∮∝∞∧∨∑∏∪∩∈∵∴⊥∥∠⌒⊙≌∽√").toCharArray()
for (punctuation <- punctuations) {
print(punctuation + " ")
val word = hashDictionary.getWord(punctuation.toString)
Assert.assertEquals(getFirstPosOfWord(word), POSUtil.POS_W)
}
println()
}
@Test
def should_contains_units_of_measurement() {
val punctuations = ("㎎㎏㎜㎝㎞㎡㏄㏎㏑°′″$£¥‰%℃¤¢").toCharArray()
for (punctuation <- punctuations) {
print(punctuation + " ")
val word = hashDictionary.getWord(punctuation.toString)
Assert.assertEquals(getFirstPosOfWord(word), POSUtil.POS_Q)
}
println()
}
@Test
def should_contains_breaking_characters() {
val punctuations = List(
"\\r\\n", "\\r", "\\n", " ", " "
)
for (punctuation <- punctuations) {
print(java.net.URLEncoder.encode(punctuation) + " ")
val word = hashDictionary.getWord(punctuation)
Assert.assertEquals(getFirstPosOfWord(word), POSUtil.POS_W)
}
println()
}
@Test
def should_contains_other_characters() {
val punctuations = ("┌┍┎┏┐┑┒┓─┄┈├┝┞┟┠┡┢┣│┆┊┬┭┮┯┰┱┲┳┼┽┾┿╀╁╂╃§☆★●◎◇◆□■△▲※→←↑↓〓#_&@\^αβγδεζηθικλμνξοπρστυφχψωΑΒΓΔΕΖΗΘΙΚΛΜΝΞΟΠΡΣΤΥΦΧΨΩабвгдеёжзийклмнопрстуфхцчшщъыьэюяАБВГДЕЁЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯㄅㄉㄓㄚㄞㄢㄦㄆㄊㄍㄐㄔㄗㄧㄛㄟㄣㄇㄋㄎㄑㄕㄘㄨㄜㄠㄤㄈㄏㄒㄖㄙㄩㄝㄡㄥāáǎàōóǒòêēéěèīíǐìūúǔùǖǘǚǜぁぃぅぇぉかきくけこんさしすせそたちつってとゐなにぬねのはひふへほゑまみむめもゃゅょゎをあいうえおがぎぐげござじずぜぞだぢづでどぱぴぷぺぽぼびぶべぼらりるれろやゆよわァィゥヴェォカヵキクケヶコサシスセソタチツッテトヰンナニヌネノハヒフヘホヱマミムメモャュョヮヲアイウエオガギグゲゴザジズゼゾダヂヅデドパピプペポバビブベボラリルレロヤユヨワ]レロヤユヨワ").toCharArray()
for (punctuation <- punctuations) {
print(punctuation + " ")
val word = hashDictionary.getWord(punctuation.toString)
Assert.assertEquals(getFirstPosOfWord(word), POSUtil.POS_W)
}
println()
}
private def getFirstPosOfWord(word: IWord) = word.getPOSArray().getWordPOSTable()(0)(0)
}
| yingrui/mahjong | lib-segment/src/test/scala/me/yingrui/segment/dict/PunctuationTest.scala | Scala | gpl-3.0 | 4,519 |
/*
* Copyright (c) 2002-2018 "Neo Technology,"
* Network Engine for Objects in Lund AB [http://neotechnology.com]
*
* This file is part of Neo4j.
*
* Neo4j is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.neo4j.cypher.internal.compiler.v2_3.commands
import org.neo4j.cypher.internal.compiler.v2_3.ExecutionContext
import org.neo4j.cypher.internal.compiler.v2_3.commands.expressions.{Expression, InequalitySeekRangeExpression, PrefixSeekRangeExpression}
import org.neo4j.cypher.internal.compiler.v2_3.helpers.IsCollection
import org.neo4j.cypher.internal.compiler.v2_3.mutation.GraphElementPropertyFunctions
import org.neo4j.cypher.internal.compiler.v2_3.pipes.QueryState
import org.neo4j.cypher.internal.frontend.v2_3.CypherTypeException
import org.neo4j.graphdb.Node
import scala.collection.GenTraversableOnce
object indexQuery extends GraphElementPropertyFunctions {
def apply(queryExpression: QueryExpression[Expression],
m: ExecutionContext,
state: QueryState,
index: Any => GenTraversableOnce[Node],
labelName: String,
propertyName: String): Iterator[Node] = queryExpression match {
case SingleQueryExpression(inner) =>
val value = inner(m)(state)
lookupNodes(value, index).toIterator
case ManyQueryExpression(inner) =>
inner(m)(state) match {
case IsCollection(coll) => coll.toSet.toSeq.flatMap {
value: Any => lookupNodes(value, index)
}.iterator
case null => Iterator.empty
case _ => throw new CypherTypeException(s"Expected the value for looking up :$labelName($propertyName) to be a collection but it was not.")
}
case RangeQueryExpression(rangeWrapper) =>
val range = rangeWrapper match {
case s: PrefixSeekRangeExpression =>
s.range.map(expression => makeValueNeoSafe(expression(m)(state)))
case InequalitySeekRangeExpression(innerRange) =>
innerRange.mapBounds(expression => makeValueNeoSafe(expression(m)(state)))
}
index(range).toIterator
}
private def lookupNodes(value: Any, index: Any => GenTraversableOnce[Node]) = value match {
case null =>
Iterator.empty
case _ =>
val neoValue: Any = makeValueNeoSafe(value)
index(neoValue)
}
}
| HuangLS/neo4j | community/cypher/cypher-compiler-2.3/src/main/scala/org/neo4j/cypher/internal/compiler/v2_3/commands/indexQuery.scala | Scala | apache-2.0 | 2,886 |
/* __ *\\
** ________ ___ / / ___ Scala API **
** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\\ \\/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\\___/_/ |_/____/_/ | | **
** |/ **
\\* */
package scala.beans
/** When attached to a field, this annotation adds a setter and a getter
* method following the Java Bean convention. For example:
* {{{
* @BeanProperty
* var status = ""
* }}}
* adds the following methods to the class:
* {{{
* def setStatus(s: String) { this.status = s }
* def getStatus: String = this.status
* }}}
* For fields of type `Boolean`, if you need a getter named `isStatus`,
* use the `scala.beans.BooleanBeanProperty` annotation instead.
*/
@scala.annotation.meta.field
class BeanProperty extends scala.annotation.StaticAnnotation
| felixmulder/scala | src/library/scala/beans/BeanProperty.scala | Scala | bsd-3-clause | 1,125 |
package models
case class PrivateUser(
birthdate: Option[String],
country: Option[String],
display_name: Option[String],
email: Option[String],
external_urls: Map[String, String],
followers: Followers,
href: String,
id: String,
images: List[Image],
product: Option[String],
`type`: String,
uri: String
) | Jakeway/spotify-web-api-scala | src/main/scala/models/PrivateUser.scala | Scala | mit | 590 |
package com.github.projectflink.common.als
import scopt.OptionParser
trait ALSRunner extends ALS {
type Context
val USER_FACTORS_FILE = "userFactorsFile"
val ITEM_FACTORS_FILE = "itemFactorsFile"
case class ALSConfig(master: String = "local[4]",
factors: Int = -1, lambda: Double = 0.0,
iterations: Int = 0, inputRatings: String = null, outputPath: String = null,
blocks: Int = -1, seed: Long = -1, persistencePath: Option[String] = None)
def readRatings(input: String, ctx: Context): DS[RatingType]
def parseCL(args: Array[String]): Option[ALSConfig] = {
val parser = new OptionParser[ALSConfig]("ALS"){
head("ALS", "1.0")
arg[String]("master") action {
(v, c) => c.copy(master = v)
} text {
"Master URL"
}
arg[Int]("factors") action {
(v, c) => c.copy(factors = v)
} text {
"Number of factors"
}
arg[Double]("regularization") action {
(v, c) => c.copy(lambda = v)
} text {
"Regularization constant"
}
arg[Int]("iterations") action {
(v, c) => c.copy(iterations = v)
}
arg[Int]("blocks") action {
(v, c) => c.copy(blocks = v )
} text {
"Number of blocks"
}
arg[String]("seed") action {
(v, c) => {
if(v.startsWith("rand")){
c.copy(seed = System.currentTimeMillis())
}else{
c.copy(seed = v.toLong)
}
}
} text {
"Seed for random initialization"
}
arg[String]("persistencePath") optional() action {
(v, c) => {
if(!v.toLowerCase.equals("none")){
c.copy(persistencePath = Some(if(v.endsWith("/")) v else v+"/"))
}else{
c
}
}
} text {
"Persistence path for the preprocessing data"
}
arg[String]("input") optional() action {
(v, c) => c.copy(inputRatings = v)
} text {
"Path to input ratings"
}
arg[String]("output") optional() action {
(v, c) => c.copy(outputPath = v)
} text {
"Output path for the results"
}
}
parser.parse(args, ALSConfig())
}
}
| mxm/flink-perf | perf-common/src/main/scala/com/github/projectflink/common/als/ALSRunner.scala | Scala | apache-2.0 | 2,270 |
package utils.json
import java.sql.Timestamp
import java.util.UUID
import models.{ Prediction, TaggingImage }
import play.api.libs.json._
import play.api.mvc.{ BodyParser, BodyParsers }
/**
* Created by jlzie on 28.04.2017.
*/
object JsonFormats {
implicit object timestampFormat extends Format[Timestamp] {
def reads(json: JsValue) = {
JsSuccess(new Timestamp(json.as[Long]))
}
def writes(ts: Timestamp) = {
JsNumber(ts.getTime)
}
}
implicit val taggingImageFormat = Json.format[TaggingImage]
implicit object PredictionFormat extends Writes[Prediction] {
override def writes(o: Prediction): JsValue = Json.format[Prediction].writes(o) - ("imageId")
}
def writeImageswithPredicitions(images: Seq[TaggingImage], predicitions: Seq[Prediction]): JsValue = {
val predictionMap = scala.collection.mutable.HashMap.empty[UUID, scala.collection.mutable.MutableList[Prediction]]
predicitions.foreach(prediction => {
predictionMap.get(prediction.imageId) match {
case Some(list: scala.collection.mutable.MutableList[Prediction]) => list.+=(prediction)
case _ => predictionMap += ((prediction.imageId, scala.collection.mutable.MutableList(prediction)))
}
})
val imagesJson = images.map(image => {
val jImage = Json.toJson(image).as[JsObject]
predictionMap.get(image.imageId) match {
case Some(preds) => jImage.+(("predictions", Json.toJson(preds)))
case _ => jImage
}
}).seq
Json.toJson(imagesJson)
}
def writeImageswithPredictions(image: TaggingImage, predictions: Seq[Prediction]): JsValue = {
val jImage = Json.toJson(image).as[JsObject]
predictions.length match {
case l if l > 0 => jImage.+(("predictions", Json.toJson(predictions)))
case _ => jImage
}
}
import play.api.libs.concurrent.Execution.Implicits.defaultContext
def validateJson[A: Reads]: BodyParser[A] = BodyParsers.parse.json.validate(
_.validate[A].asEither.left.map(e => play.api.mvc.Results.BadRequest(JsError.toJson(e)))
)
}
| SwaggerTagger/octo-tagger-backend | app/utils/json/JsonFormats.scala | Scala | mit | 2,065 |
/**
* License
* =======
*
* The MIT License (MIT)
*
*
* Copyright (c) 2017 Antoine DOERAENE @sherpal
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of
* this software and associated documentation files (the "Software"), to deal in
* the Software without restriction, including without limitation the rights to
* use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
* the Software, and to permit persons to whom the Software is furnished to do so,
* subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
* FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
* COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
* IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
* CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
package plot
import complex.Complex
import gameengine.Engine
import gui._
import sharednodejsapis.NodeProcess
import webglgraphics.Canvas2D
/**
* This Frame will contain all the button need to set the draw mode and draw color.
*/
object DrawingOptions extends Frame("DrawingOptions", Some(UIParent)) {
setPoint(TopLeft, PlotWindowsArea, TopRight)
setPoint(BottomRight, UIParent, TopRight, 0, -300)
private val border = createTexture()
border.setAllPoints()
border.setVertexColor(1, 20.0 / 255, 147.0 / 255)
border.setMode(LineMode)
private val defaultButSize: Int = 30
private val xOffset: Int = 30
private val yOffset: Int = 30
val lineButton: Button = new Button(this)
lineButton.setPoint(TopLeft, this, TopLeft, xOffset, -yOffset)
lineButton.setSize(defaultButSize)
private val lineButtonBG = lineButton.createTexture()
lineButtonBG.setAllPoints()
lineButtonBG.setVertexColor(0,0,0)
lineButtonBG.setMode(LineMode)
lineButton.setNormalTexture()
lineButton.normalTexture.get.setAllPoints()
lineButton.normalTexture.get.setTexture(new Canvas2D())
lineButton.normalTexture.get.canvas.get.setSize(lineButton.width.toInt, lineButton.height.toInt)
Engine.painter.withCanvases(lineButton.normalTexture.get.canvas.get)({
Engine.painter.withColor(200.0 / 255, 200.0 / 255, 200.0 / 255)({
Engine.painter.drawRectangle(
-lineButton.width / 2 + lineButton.height / 2 * Complex.i, lineButton.width, lineButton.height
)
})
Engine.painter.withColor(0, 0, 0)({
Engine.painter.drawLine(List(
-8 * lineButton.width / 10 + 8 * lineButton.height / 10 * Complex.i,
lineButton.width * 3 / 10 - lineButton.height * 2 / 10 * Complex.i
))
})
})
lineButton.setPushedTexture(Button.makeSimplePushedTexture(lineButton))
lineButton.setScript(ScriptKind.OnClick)((_: Frame, _: Double, _: Double, button: Int) => {
if (button == 0)
PlotWindowsArea.setDrawMode(DrawAreaLine)
})
lineButton.setScript(ScriptKind.OnEnter)((self: Region, _: Region) => {
ButtonTooltip.clearLines()
ButtonTooltip.addLine("Set draw lines mode (Ctrl+L)", 0, 0, 0, wrap = true, JustifyCenter)
ButtonTooltip.appearIn()
ButtonTooltip.setOwner(self.asInstanceOf[Frame])
})
lineButton.setScript(ScriptKind.OnLeave)((_: Region, _: Region) => {
ButtonTooltip.fadeOut()
})
val ellipseButton = new Button(this)
ellipseButton.setPoint(TopLeft, lineButton, TopRight, 10)
ellipseButton.setSize(defaultButSize)
private val ellipseButtonBG = ellipseButton.createTexture()
ellipseButtonBG.setAllPoints()
ellipseButtonBG.setVertexColor(0,0,0)
ellipseButtonBG.setMode(LineMode)
ellipseButton.setNormalTexture()
ellipseButton.normalTexture.get.setAllPoints()
ellipseButton.normalTexture.get.setTexture(new Canvas2D())
ellipseButton.normalTexture.get.canvas.get.setSize(ellipseButton.width.toInt, ellipseButton.height.toInt)
Engine.painter.withCanvases(ellipseButton.normalTexture.get.canvas.get)({
Engine.painter.withColor(200.0 / 255, 200.0 / 255, 200.0 / 255)({
Engine.painter.drawRectangle(
-ellipseButton.width / 2 + ellipseButton.height / 2 * Complex.i, ellipseButton.width, ellipseButton.height
)
})
Engine.painter.withColor(0, 0, 0)({
Engine.painter.drawEllipse(0, ellipseButton.width * 8 / 20, ellipseButton.height * 5 / 20, lineWidth = 2)
})
})
ellipseButton.setPushedTexture(Button.makeSimplePushedTexture(ellipseButton))
ellipseButton.setScript(ScriptKind.OnClick)((_: Frame, _: Double, _: Double, button: Int) => {
if (button == 0)
PlotWindowsArea.setDrawMode(DrawAreaEllipse)
})
ellipseButton.setScript(ScriptKind.OnEnter)((self: Region, _: Region) => {
ButtonTooltip.clearLines()
ButtonTooltip.addLine("Set draw ellipses mode (Ctrl+E)", 0, 0, 0, wrap = true, JustifyCenter)
ButtonTooltip.appearIn()
ButtonTooltip.setOwner(self.asInstanceOf[Frame])
})
ellipseButton.setScript(ScriptKind.OnLeave)((_: Region, _: Region) => {
ButtonTooltip.fadeOut()
})
val circleButton = new Button(this)
circleButton.setPoint(TopLeft, ellipseButton, TopRight, 10)
circleButton.setSize(defaultButSize)
private val circleButtonBG = circleButton.createTexture()
circleButtonBG.setAllPoints()
circleButtonBG.setVertexColor(0,0,0)
circleButtonBG.setMode(LineMode)
circleButton.setNormalTexture()
circleButton.normalTexture.get.setAllPoints()
circleButton.normalTexture.get.setTexture(new Canvas2D())
circleButton.normalTexture.get.canvas.get.setSize(circleButton.width.toInt, circleButton.height.toInt)
Engine.painter.withCanvases(circleButton.normalTexture.get.canvas.get)({
Engine.painter.withColor(200.0 / 255, 200.0 / 255, 200.0 / 255)({
Engine.painter.drawRectangle(
-circleButton.width / 2 + circleButton.height / 2 * Complex.i, circleButton.width, circleButton.height
)
})
Engine.painter.withColor(0, 0, 0)({
Engine.painter.drawEllipse(0, circleButton.width * 8 / 20, circleButton.height * 8 / 20, lineWidth = 2)
})
})
circleButton.setPushedTexture(Button.makeSimplePushedTexture(circleButton))
circleButton.setScript(ScriptKind.OnClick)((_: Frame, _: Double, _: Double, button: Int) => {
if (button == 0)
PlotWindowsArea.setDrawMode(DrawAreaCircle)
})
circleButton.setScript(ScriptKind.OnEnter)((self: Region, _: Region) => {
ButtonTooltip.clearLines()
ButtonTooltip.addLine("Set draw circles mode (Ctrl+C)", 0, 0, 0, wrap = true, JustifyCenter)
ButtonTooltip.appearIn()
ButtonTooltip.setOwner(self.asInstanceOf[Frame])
})
circleButton.setScript(ScriptKind.OnLeave)((_: Region, _: Region) => {
ButtonTooltip.fadeOut()
})
val rectangleButton = new Button(this)
rectangleButton.setPoint(TopLeft, circleButton, TopRight, 10)
rectangleButton.setSize(defaultButSize)
private val rectangleButtonBG = rectangleButton.createTexture()
rectangleButtonBG.setAllPoints()
rectangleButtonBG.setVertexColor(0,0,0)
rectangleButtonBG.setMode(LineMode)
rectangleButton.setNormalTexture()
rectangleButton.normalTexture.get.setAllPoints()
rectangleButton.normalTexture.get.setTexture(new Canvas2D())
rectangleButton.normalTexture.get.canvas.get.setSize(rectangleButton.width.toInt, rectangleButton.height.toInt)
Engine.painter.withCanvases(rectangleButton.normalTexture.get.canvas.get)({
Engine.painter.withColor(200.0 / 255, 200.0 / 255, 200.0 / 255)({
Engine.painter.drawRectangle(
-circleButton.width / 2 + circleButton.height / 2 * Complex.i, circleButton.width, circleButton.height
)
})
Engine.painter.withColor(0.1, 0.1, 0.1)({
Engine.painter.drawRectangle(
Complex(-rectangleButton.width * 2 / 5, rectangleButton.height / 4),
circleButton.width * 4 / 5, circleButton.height / 2, lineWidth = 0)
})
})
rectangleButton.setPushedTexture(Button.makeSimplePushedTexture(rectangleButton))
rectangleButton.setScript(ScriptKind.OnClick)((_: Frame, _: Double, _: Double, button: Int) => {
if (button == 0)
PlotWindowsArea.setDrawMode(DrawAreaRectangle)
})
rectangleButton.setScript(ScriptKind.OnEnter)((self: Region, _: Region) => {
ButtonTooltip.clearLines()
ButtonTooltip.addLine("Set draw rectangle mode", 0, 0, 0, wrap = true, JustifyCenter)
ButtonTooltip.appearIn()
ButtonTooltip.setOwner(self.asInstanceOf[Frame])
})
rectangleButton.setScript(ScriptKind.OnLeave)((_: Region, _: Region) => {
ButtonTooltip.fadeOut()
})
val filledEllipseButton = new Button(this)
filledEllipseButton.setPoint(TopLeft, rectangleButton, TopRight, 10)
filledEllipseButton.setSize(defaultButSize)
private val filledEllipseButtonBG = filledEllipseButton.createTexture()
filledEllipseButtonBG.setAllPoints()
filledEllipseButtonBG.setVertexColor(0,0,0)
filledEllipseButtonBG.setMode(LineMode)
filledEllipseButton.setNormalTexture()
filledEllipseButton.normalTexture.get.setAllPoints()
filledEllipseButton.normalTexture.get.setTexture(new Canvas2D())
filledEllipseButton.normalTexture.get.canvas.get.setSize(
filledEllipseButton.width.toInt, filledEllipseButton.height.toInt)
Engine.painter.withCanvases(filledEllipseButton.normalTexture.get.canvas.get)({
Engine.painter.withColor(200.0 / 255, 200.0 / 255, 200.0 / 255)({
Engine.painter.drawRectangle(
-filledEllipseButton.width / 2 + filledEllipseButton.height / 2 * Complex.i,
filledEllipseButton.width, filledEllipseButton.height
)
})
Engine.painter.withColor(0.1, 0.1, 0.1)({
Engine.painter.drawEllipse(0, filledEllipseButton.width * 8 / 20, filledEllipseButton.height * 5 / 20,
lineWidth = 0)
})
})
filledEllipseButton.setPushedTexture(Button.makeSimplePushedTexture(filledEllipseButton))
filledEllipseButton.setScript(ScriptKind.OnClick)((_: Frame, _: Double, _: Double, button: Int) => {
if (button == 0)
PlotWindowsArea.setDrawMode(DrawAreaFillEllipse)
})
filledEllipseButton.setScript(ScriptKind.OnEnter)((self: Region, _: Region) => {
ButtonTooltip.clearLines()
ButtonTooltip.addLine("Set draw filled ellipse", 0, 0, 0, wrap = true, JustifyCenter)
ButtonTooltip.appearIn()
ButtonTooltip.setOwner(self.asInstanceOf[Frame])
})
filledEllipseButton.setScript(ScriptKind.OnLeave)((_: Region, _: Region) => {
ButtonTooltip.fadeOut()
})
private val diskButton = new Button(this)
diskButton.setPoint(TopLeft, filledEllipseButton, TopRight, 10)
diskButton.setSize(defaultButSize)
private val diskButtonBG = diskButton.createTexture()
diskButtonBG.setAllPoints()
diskButtonBG.setVertexColor(0,0,0)
diskButtonBG.setMode(LineMode)
diskButton.setNormalTexture()
diskButton.normalTexture.get.setAllPoints()
diskButton.normalTexture.get.setTexture(new Canvas2D())
diskButton.normalTexture.get.canvas.get.setSize(diskButton.width.toInt, diskButton.height.toInt)
Engine.painter.withCanvases(diskButton.normalTexture.get.canvas.get)({
Engine.painter.withColor(200.0 / 255, 200.0 / 255, 200.0 / 255)({
Engine.painter.drawRectangle(
-diskButton.width / 2 + diskButton.height / 2 * Complex.i, diskButton.width, diskButton.height
)
})
Engine.painter.withColor(0.1, 0.1, 0.1)({
Engine.painter.drawEllipse(0, diskButton.width * 8 / 20, diskButton.height * 8 / 20, lineWidth = 0)
})
})
diskButton.setPushedTexture(Button.makeSimplePushedTexture(diskButton))
diskButton.setScript(ScriptKind.OnClick)((_: Frame, _: Double, _: Double, button: Int) => {
if (button == 0)
PlotWindowsArea.setDrawMode(DrawAreaFillCircle)
})
diskButton.setScript(ScriptKind.OnEnter)((self: Region, _: Region) => {
ButtonTooltip.clearLines()
ButtonTooltip.addLine("Set draw disk mode", 0, 0, 0, wrap = true, JustifyCenter)
ButtonTooltip.appearIn()
ButtonTooltip.setOwner(self.asInstanceOf[Frame])
})
diskButton.setScript(ScriptKind.OnLeave)((_: Region, _: Region) => {
ButtonTooltip.fadeOut()
})
//TODO: cancel button
private val slidersHeight = 15
private val slidersSpace = 10
private val chooseColorFS = createFontString()
chooseColorFS.setPoint(TopLeft, lineButton, BottomLeft, 0, -defaultButSize)
chooseColorFS.setSize(this.width - defaultButSize, 20)
//chooseColorFS.setFontSize(15)
chooseColorFS.setFont("Quicksand", 20)
chooseColorFS.setText("Line colors settings:")
chooseColorFS.setTextColor(0,0,0)
chooseColorFS.setJustifyH(JustifyLeft)
private val chosenColor = new Frame("", Some(this))
chosenColor.setSize(defaultButSize, 3 * slidersHeight + 2 * slidersSpace)
chosenColor.setPoint(TopLeft, chooseColorFS, BottomLeft, 0, -slidersSpace)
private val chosenColorBorder = chosenColor.createTexture(layer = Overlay)
chosenColorBorder.setAllPoints()
chosenColorBorder.setVertexColor(0,0,0)
chosenColorBorder.setMode(LineMode)
private val chosenColorBG = chosenColor.createTexture()
chosenColorBG.setAllPoints()
chosenColorBG.setVertexColor(
PlotWindowsArea.drawColor._1,
PlotWindowsArea.drawColor._2,
PlotWindowsArea.drawColor._3
)
private val red = new Slider(this)
red.setPoint(TopLeft, chosenColor, TopRight, 10)
red.setPoint(TopRight, this, TopRight, -10, chosenColor.top - this.top)
red.setHeight(slidersHeight)
private val redBG = red.createTexture()
redBG.setAllPoints()
redBG.setVertexColor(200.0/255,200.0/255,200.0/255)
red.setMinMaxValues(0, 255)
red.setStep(Some(1))
red.setThumbLength(15)
red.thumbTexture.setVertexColor(1,0,0)
red.setValue((255 * PlotWindowsArea.drawColor._1).toInt)
red.setScript(ScriptKind.OnValueChanged)((_: ValueBar, value: Double, _: Double) => {
val (_, g, b, _) = chosenColorBG.vertexColor
chosenColorBG.setVertexColor(value / 255, g, b)
PlotWindowsArea.setDrawColor(value / 255, g, b)
})
private val green = new Slider(this)
green.setPoint(TopLeft, red, BottomLeft, 0, -slidersSpace)
green.setPoint(TopRight, red, BottomRight, 0, -slidersSpace)
green.setHeight(slidersHeight)
private val greenBG = green.createTexture()
greenBG.setAllPoints()
greenBG.setVertexColor(200.0/255,200.0/255,200.0/255)
green.setMinMaxValues(0, 255)
green.setStep(Some(1))
green.setThumbLength(15)
green.thumbTexture.setVertexColor(0,1,0)
green.setValue((255 * PlotWindowsArea.drawColor._2).toInt)
green.setScript(ScriptKind.OnValueChanged)((_: ValueBar, value: Double, _: Double) => {
val (r, _, b, _) = chosenColorBG.vertexColor
chosenColorBG.setVertexColor(r, value / 255, b)
PlotWindowsArea.setDrawColor(r, value / 255, b)
})
private val blue = new Slider(this)
blue.setPoint(TopLeft, green, BottomLeft, 0, -slidersSpace)
blue.setPoint(TopRight, green, BottomRight, 0, -slidersSpace)
blue.setHeight(slidersHeight)
private val blueBG = blue.createTexture()
blueBG.setAllPoints()
blueBG.setVertexColor(200.0/255,200.0/255,200.0/255)
blue.setMinMaxValues(0, 255)
blue.setStep(Some(1))
blue.setThumbLength(15)
blue.thumbTexture.setVertexColor(0,0)
blue.setValue((255 * PlotWindowsArea.drawColor._3).toInt)
blue.setScript(ScriptKind.OnValueChanged)((_: ValueBar, value: Double, _: Double) => {
val (r, g, _, _) = chosenColorBG.vertexColor
chosenColorBG.setVertexColor(r, g, value / 255)
PlotWindowsArea.setDrawColor(r, g, value / 255)
})
def setColor(r: Int, g: Int, b: Int): Unit = {
red.setValue(if (r < 0) 0 else if (r > 255) 255 else r)
green.setValue(if (g < 0) 0 else if (g > 255) 255 else g)
blue.setValue(if (b < 0) 0 else if (b > 255) 255 else b)
}
private val newDrawsButton = new Button(this)
newDrawsButton.setSize(defaultButSize)
newDrawsButton.setPoint(TopLeft, chosenColor, BottomLeft, 0, -20)
newDrawsButton.setNormalTexture("./pics/numerical_drawing.png")
newDrawsButton.normalTexture.get.setAllPoints()
newDrawsButton.setScript(ScriptKind.OnEnter)((_: Frame, _: Frame) => {
newDrawsButton.setPushedTexture(Button.makeSimplePushedTexture(newDrawsButton))
newDrawsButton.removeScript(ScriptKind.OnEnter)
})
newDrawsButton.setScript(ScriptKind.OnMouseReleased)((_: Frame, _: Double, _: Double, button: Int) => {
if (button == 0) {
windows.NewDraws.show()
}
})
newDrawsButton.setScript(ScriptKind.OnEnter)((self: Frame, _: Frame) => {
ButtonTooltip.clearLines()
ButtonTooltip.addLine("Draw with numeric values", 0, 0, 0, wrap = true, JustifyCenter)
ButtonTooltip.appearIn()
ButtonTooltip.setOwner(self)
})
newDrawsButton.setScript(ScriptKind.OnLeave)((_: Frame, _: Frame) => {
ButtonTooltip.fadeOut()
})
DebugWindow.addData((_: Double) => {
val processInfo = NodeProcess.memoryUsage()
"Memory usage: " + (processInfo.heapUsed / 1024 / 1024) + " mb"
})
DebugWindow.hide()
setScript(ScriptKind.OnKeyReleased)((_: Frame, key: String) => {
key match {
case "l" if Engine.isDown("Control") =>
val (x, y) = lineButton.center
lineButton.click(x,y,0)
case "e" if Engine.isDown("Control") =>
val (x, y) = ellipseButton.center
ellipseButton.click(x,y,0)
case "c" if Engine.isDown("Control") =>
val (x, y) = circleButton.center
circleButton.click(x,y,0)
case "z" if Engine.isDown("Control") =>
Action.cancelAction()
case "ArrowUp" =>
FunctionOptions.changeFocusedButton(-1)
case "ArrowDown" =>
FunctionOptions.changeFocusedButton(1)
case "F1" =>
if (DebugWindow.isVisible)
DebugWindow.hide()
else
DebugWindow.show()
case _ =>
}
})
}
| sherpal/holomorphic-maps | src/main/scala/plot/DrawingOptions.scala | Scala | mit | 18,372 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.ui.jobs
import java.net.URLEncoder
import java.util.Date
import javax.servlet.http.HttpServletRequest
import scala.collection.mutable.HashSet
import scala.xml.{Elem, Node, Unparsed}
import org.apache.commons.lang3.StringEscapeUtils
import org.apache.spark.SparkConf
import org.apache.spark.executor.TaskMetrics
import org.apache.spark.scheduler.{AccumulableInfo, TaskInfo, TaskLocality}
import org.apache.spark.ui._
import org.apache.spark.ui.jobs.UIData._
import org.apache.spark.util.{Distribution, Utils}
/** Page showing statistics and task list for a given stage */
private[ui] class StagePage(parent: StagesTab) extends WebUIPage("stage") {
import StagePage._
private val progressListener = parent.progressListener
private val operationGraphListener = parent.operationGraphListener
private val TIMELINE_LEGEND = {
<div class="legend-area">
<svg>
{
val legendPairs = List(("scheduler-delay-proportion", "Scheduler Delay"),
("deserialization-time-proportion", "Task Deserialization Time"),
("shuffle-read-time-proportion", "Shuffle Read Time"),
("executor-runtime-proportion", "Executor Computing Time"),
("shuffle-write-time-proportion", "Shuffle Write Time"),
("serialization-time-proportion", "Result Serialization Time"),
("getting-result-time-proportion", "Getting Result Time"))
legendPairs.zipWithIndex.map {
case ((classAttr, name), index) =>
<rect x={5 + (index / 3) * 210 + "px"} y={10 + (index % 3) * 15 + "px"}
width="10px" height="10px" class={classAttr}></rect>
<text x={25 + (index / 3) * 210 + "px"}
y={20 + (index % 3) * 15 + "px"}>{name}</text>
}
}
</svg>
</div>
}
// TODO: We should consider increasing the number of this parameter over time
// if we find that it's okay.
private val MAX_TIMELINE_TASKS = parent.conf.getInt("spark.ui.timeline.tasks.maximum", 1000)
private val displayPeakExecutionMemory = parent.conf.getBoolean("spark.sql.unsafe.enabled", true)
private def getLocalitySummaryString(stageData: StageUIData): String = {
val localities = stageData.taskData.values.map(_.taskInfo.taskLocality)
val localityCounts = localities.groupBy(identity).mapValues(_.size)
val localityNamesAndCounts = localityCounts.toSeq.map { case (locality, count) =>
val localityName = locality match {
case TaskLocality.PROCESS_LOCAL => "Process local"
case TaskLocality.NODE_LOCAL => "Node local"
case TaskLocality.RACK_LOCAL => "Rack local"
case TaskLocality.ANY => "Any"
}
s"$localityName: $count"
}
localityNamesAndCounts.sorted.mkString("; ")
}
def render(request: HttpServletRequest): Seq[Node] = {
progressListener.synchronized {
val parameterId = request.getParameter("id")
require(parameterId != null && parameterId.nonEmpty, "Missing id parameter")
val parameterAttempt = request.getParameter("attempt")
require(parameterAttempt != null && parameterAttempt.nonEmpty, "Missing attempt parameter")
val parameterTaskPage = request.getParameter("task.page")
val parameterTaskSortColumn = request.getParameter("task.sort")
val parameterTaskSortDesc = request.getParameter("task.desc")
val parameterTaskPageSize = request.getParameter("task.pageSize")
val parameterTaskPrevPageSize = request.getParameter("task.prevPageSize")
val taskPage = Option(parameterTaskPage).map(_.toInt).getOrElse(1)
val taskSortColumn = Option(parameterTaskSortColumn).map { sortColumn =>
UIUtils.decodeURLParameter(sortColumn)
}.getOrElse("Index")
val taskSortDesc = Option(parameterTaskSortDesc).map(_.toBoolean).getOrElse(false)
val taskPageSize = Option(parameterTaskPageSize).map(_.toInt).getOrElse(100)
val taskPrevPageSize = Option(parameterTaskPrevPageSize).map(_.toInt).getOrElse(taskPageSize)
val stageId = parameterId.toInt
val stageAttemptId = parameterAttempt.toInt
val stageDataOption = progressListener.stageIdToData.get((stageId, stageAttemptId))
val stageHeader = s"Details for Stage $stageId (Attempt $stageAttemptId)"
if (stageDataOption.isEmpty) {
val content =
<div id="no-info">
<p>No information to display for Stage {stageId} (Attempt {stageAttemptId})</p>
</div>
return UIUtils.headerSparkPage(stageHeader, content, parent)
}
if (stageDataOption.get.taskData.isEmpty) {
val content =
<div>
<h4>Summary Metrics</h4> No tasks have started yet
<h4>Tasks</h4> No tasks have started yet
</div>
return UIUtils.headerSparkPage(stageHeader, content, parent)
}
val stageData = stageDataOption.get
val tasks = stageData.taskData.values.toSeq.sortBy(_.taskInfo.launchTime)
val numCompleted = stageData.numCompleteTasks
val totalTasks = stageData.numActiveTasks +
stageData.numCompleteTasks + stageData.numFailedTasks
val totalTasksNumStr = if (totalTasks == tasks.size) {
s"$totalTasks"
} else {
s"$totalTasks, showing ${tasks.size}"
}
val allAccumulables = progressListener.stageIdToData((stageId, stageAttemptId)).accumulables
val externalAccumulables = allAccumulables.values.filter { acc => !acc.internal }
val hasAccumulators = externalAccumulables.size > 0
val summary =
<div>
<ul class="unstyled">
<li>
<strong>Total Time Across All Tasks: </strong>
{UIUtils.formatDuration(stageData.executorRunTime)}
</li>
<li>
<strong>Locality Level Summary: </strong>
{getLocalitySummaryString(stageData)}
</li>
{if (stageData.hasInput) {
<li>
<strong>Input Size / Records: </strong>
{s"${Utils.bytesToString(stageData.inputBytes)} / ${stageData.inputRecords}"}
</li>
}}
{if (stageData.hasOutput) {
<li>
<strong>Output: </strong>
{s"${Utils.bytesToString(stageData.outputBytes)} / ${stageData.outputRecords}"}
</li>
}}
{if (stageData.hasShuffleRead) {
<li>
<strong>Shuffle Read: </strong>
{s"${Utils.bytesToString(stageData.shuffleReadTotalBytes)} / " +
s"${stageData.shuffleReadRecords}"}
</li>
}}
{if (stageData.hasShuffleWrite) {
<li>
<strong>Shuffle Write: </strong>
{s"${Utils.bytesToString(stageData.shuffleWriteBytes)} / " +
s"${stageData.shuffleWriteRecords}"}
</li>
}}
{if (stageData.hasBytesSpilled) {
<li>
<strong>Shuffle Spill (Memory): </strong>
{Utils.bytesToString(stageData.memoryBytesSpilled)}
</li>
<li>
<strong>Shuffle Spill (Disk): </strong>
{Utils.bytesToString(stageData.diskBytesSpilled)}
</li>
}}
</ul>
</div>
val showAdditionalMetrics =
<div>
<span class="expand-additional-metrics">
<span class="expand-additional-metrics-arrow arrow-closed"></span>
<a>Show Additional Metrics</a>
</span>
<div class="additional-metrics collapsed">
<ul>
<li>
<input type="checkbox" id="select-all-metrics"/>
<span class="additional-metric-title"><em>(De)select All</em></span>
</li>
<li>
<span data-toggle="tooltip"
title={ToolTips.SCHEDULER_DELAY} data-placement="right">
<input type="checkbox" name={TaskDetailsClassNames.SCHEDULER_DELAY}/>
<span class="additional-metric-title">Scheduler Delay</span>
</span>
</li>
<li>
<span data-toggle="tooltip"
title={ToolTips.TASK_DESERIALIZATION_TIME} data-placement="right">
<input type="checkbox" name={TaskDetailsClassNames.TASK_DESERIALIZATION_TIME}/>
<span class="additional-metric-title">Task Deserialization Time</span>
</span>
</li>
{if (stageData.hasShuffleRead) {
<li>
<span data-toggle="tooltip"
title={ToolTips.SHUFFLE_READ_BLOCKED_TIME} data-placement="right">
<input type="checkbox" name={TaskDetailsClassNames.SHUFFLE_READ_BLOCKED_TIME}/>
<span class="additional-metric-title">Shuffle Read Blocked Time</span>
</span>
</li>
<li>
<span data-toggle="tooltip"
title={ToolTips.SHUFFLE_READ_REMOTE_SIZE} data-placement="right">
<input type="checkbox" name={TaskDetailsClassNames.SHUFFLE_READ_REMOTE_SIZE}/>
<span class="additional-metric-title">Shuffle Remote Reads</span>
</span>
</li>
}}
<li>
<span data-toggle="tooltip"
title={ToolTips.RESULT_SERIALIZATION_TIME} data-placement="right">
<input type="checkbox" name={TaskDetailsClassNames.RESULT_SERIALIZATION_TIME}/>
<span class="additional-metric-title">Result Serialization Time</span>
</span>
</li>
<li>
<span data-toggle="tooltip"
title={ToolTips.GETTING_RESULT_TIME} data-placement="right">
<input type="checkbox" name={TaskDetailsClassNames.GETTING_RESULT_TIME}/>
<span class="additional-metric-title">Getting Result Time</span>
</span>
</li>
{if (displayPeakExecutionMemory) {
<li>
<span data-toggle="tooltip"
title={ToolTips.PEAK_EXECUTION_MEMORY} data-placement="right">
<input type="checkbox" name={TaskDetailsClassNames.PEAK_EXECUTION_MEMORY}/>
<span class="additional-metric-title">Peak Execution Memory</span>
</span>
</li>
}}
</ul>
</div>
</div>
val dagViz = UIUtils.showDagVizForStage(
stageId, operationGraphListener.getOperationGraphForStage(stageId))
val accumulableHeaders: Seq[String] = Seq("Accumulable", "Value")
def accumulableRow(acc: AccumulableInfo): Seq[Node] = {
(acc.name, acc.value) match {
case (Some(name), Some(value)) => <tr><td>{name}</td><td>{value}</td></tr>
case _ => Seq.empty[Node]
}
}
val accumulableTable = UIUtils.listingTable(
accumulableHeaders,
accumulableRow,
externalAccumulables.toSeq)
val page: Int = {
// If the user has changed to a larger page size, then go to page 1 in order to avoid
// IndexOutOfBoundsException.
if (taskPageSize <= taskPrevPageSize) {
taskPage
} else {
1
}
}
val currentTime = System.currentTimeMillis()
val (taskTable, taskTableHTML) = try {
val _taskTable = new TaskPagedTable(
parent.conf,
UIUtils.prependBaseUri(parent.basePath) +
s"/stages/stage?id=${stageId}&attempt=${stageAttemptId}",
tasks,
hasAccumulators,
stageData.hasInput,
stageData.hasOutput,
stageData.hasShuffleRead,
stageData.hasShuffleWrite,
stageData.hasBytesSpilled,
currentTime,
pageSize = taskPageSize,
sortColumn = taskSortColumn,
desc = taskSortDesc
)
(_taskTable, _taskTable.table(page))
} catch {
case e @ (_ : IllegalArgumentException | _ : IndexOutOfBoundsException) =>
val errorMessage =
<div class="alert alert-error">
<p>Error while rendering stage table:</p>
<pre>
{Utils.exceptionString(e)}
</pre>
</div>
(null, errorMessage)
}
val jsForScrollingDownToTaskTable =
<script>
{Unparsed {
"""
|$(function() {
| if (/.*&task.sort=.*$/.test(location.search)) {
| var topOffset = $("#tasks-section").offset().top;
| $("html,body").animate({scrollTop: topOffset}, 200);
| }
|});
""".stripMargin
}
}
</script>
val taskIdsInPage = if (taskTable == null) Set.empty[Long]
else taskTable.dataSource.slicedTaskIds
// Excludes tasks which failed and have incomplete metrics
val validTasks = tasks.filter(t => t.taskInfo.status == "SUCCESS" && t.metrics.isDefined)
val summaryTable: Option[Seq[Node]] =
if (validTasks.size == 0) {
None
}
else {
def getDistributionQuantiles(data: Seq[Double]): IndexedSeq[Double] =
Distribution(data).get.getQuantiles()
def getFormattedTimeQuantiles(times: Seq[Double]): Seq[Node] = {
getDistributionQuantiles(times).map { millis =>
<td>{UIUtils.formatDuration(millis.toLong)}</td>
}
}
def getFormattedSizeQuantiles(data: Seq[Double]): Seq[Elem] = {
getDistributionQuantiles(data).map(d => <td>{Utils.bytesToString(d.toLong)}</td>)
}
val deserializationTimes = validTasks.map { taskUIData: TaskUIData =>
taskUIData.metrics.get.executorDeserializeTime.toDouble
}
val deserializationQuantiles =
<td>
<span data-toggle="tooltip" title={ToolTips.TASK_DESERIALIZATION_TIME}
data-placement="right">
Task Deserialization Time
</span>
</td> +: getFormattedTimeQuantiles(deserializationTimes)
val serviceTimes = validTasks.map { taskUIData: TaskUIData =>
taskUIData.metrics.get.executorRunTime.toDouble
}
val serviceQuantiles = <td>Duration</td> +: getFormattedTimeQuantiles(serviceTimes)
val gcTimes = validTasks.map { taskUIData: TaskUIData =>
taskUIData.metrics.get.jvmGCTime.toDouble
}
val gcQuantiles =
<td>
<span data-toggle="tooltip"
title={ToolTips.GC_TIME} data-placement="right">GC Time
</span>
</td> +: getFormattedTimeQuantiles(gcTimes)
val serializationTimes = validTasks.map { taskUIData: TaskUIData =>
taskUIData.metrics.get.resultSerializationTime.toDouble
}
val serializationQuantiles =
<td>
<span data-toggle="tooltip"
title={ToolTips.RESULT_SERIALIZATION_TIME} data-placement="right">
Result Serialization Time
</span>
</td> +: getFormattedTimeQuantiles(serializationTimes)
val gettingResultTimes = validTasks.map { taskUIData: TaskUIData =>
getGettingResultTime(taskUIData.taskInfo, currentTime).toDouble
}
val gettingResultQuantiles =
<td>
<span data-toggle="tooltip"
title={ToolTips.GETTING_RESULT_TIME} data-placement="right">
Getting Result Time
</span>
</td> +:
getFormattedTimeQuantiles(gettingResultTimes)
val peakExecutionMemory = validTasks.map { taskUIData: TaskUIData =>
taskUIData.metrics.get.peakExecutionMemory.toDouble
}
val peakExecutionMemoryQuantiles = {
<td>
<span data-toggle="tooltip"
title={ToolTips.PEAK_EXECUTION_MEMORY} data-placement="right">
Peak Execution Memory
</span>
</td> +: getFormattedSizeQuantiles(peakExecutionMemory)
}
// The scheduler delay includes the network delay to send the task to the worker
// machine and to send back the result (but not the time to fetch the task result,
// if it needed to be fetched from the block manager on the worker).
val schedulerDelays = validTasks.map { taskUIData: TaskUIData =>
getSchedulerDelay(taskUIData.taskInfo, taskUIData.metrics.get, currentTime).toDouble
}
val schedulerDelayTitle = <td><span data-toggle="tooltip"
title={ToolTips.SCHEDULER_DELAY} data-placement="right">Scheduler Delay</span></td>
val schedulerDelayQuantiles = schedulerDelayTitle +:
getFormattedTimeQuantiles(schedulerDelays)
def getFormattedSizeQuantilesWithRecords(data: Seq[Double], records: Seq[Double])
: Seq[Elem] = {
val recordDist = getDistributionQuantiles(records).iterator
getDistributionQuantiles(data).map(d =>
<td>{s"${Utils.bytesToString(d.toLong)} / ${recordDist.next().toLong}"}</td>
)
}
val inputSizes = validTasks.map { taskUIData: TaskUIData =>
taskUIData.metrics.get.inputMetrics.bytesRead.toDouble
}
val inputRecords = validTasks.map { taskUIData: TaskUIData =>
taskUIData.metrics.get.inputMetrics.recordsRead.toDouble
}
val inputQuantiles = <td>Input Size / Records</td> +:
getFormattedSizeQuantilesWithRecords(inputSizes, inputRecords)
val outputSizes = validTasks.map { taskUIData: TaskUIData =>
taskUIData.metrics.get.outputMetrics.bytesWritten.toDouble
}
val outputRecords = validTasks.map { taskUIData: TaskUIData =>
taskUIData.metrics.get.outputMetrics.recordsWritten.toDouble
}
val outputQuantiles = <td>Output Size / Records</td> +:
getFormattedSizeQuantilesWithRecords(outputSizes, outputRecords)
val shuffleReadBlockedTimes = validTasks.map { taskUIData: TaskUIData =>
taskUIData.metrics.get.shuffleReadMetrics.fetchWaitTime.toDouble
}
val shuffleReadBlockedQuantiles =
<td>
<span data-toggle="tooltip"
title={ToolTips.SHUFFLE_READ_BLOCKED_TIME} data-placement="right">
Shuffle Read Blocked Time
</span>
</td> +:
getFormattedTimeQuantiles(shuffleReadBlockedTimes)
val shuffleReadTotalSizes = validTasks.map { taskUIData: TaskUIData =>
taskUIData.metrics.get.shuffleReadMetrics.totalBytesRead.toDouble
}
val shuffleReadTotalRecords = validTasks.map { taskUIData: TaskUIData =>
taskUIData.metrics.get.shuffleReadMetrics.recordsRead.toDouble
}
val shuffleReadTotalQuantiles =
<td>
<span data-toggle="tooltip"
title={ToolTips.SHUFFLE_READ} data-placement="right">
Shuffle Read Size / Records
</span>
</td> +:
getFormattedSizeQuantilesWithRecords(shuffleReadTotalSizes, shuffleReadTotalRecords)
val shuffleReadRemoteSizes = validTasks.map { taskUIData: TaskUIData =>
taskUIData.metrics.get.shuffleReadMetrics.remoteBytesRead.toDouble
}
val shuffleReadRemoteQuantiles =
<td>
<span data-toggle="tooltip"
title={ToolTips.SHUFFLE_READ_REMOTE_SIZE} data-placement="right">
Shuffle Remote Reads
</span>
</td> +:
getFormattedSizeQuantiles(shuffleReadRemoteSizes)
val shuffleWriteSizes = validTasks.map { taskUIData: TaskUIData =>
taskUIData.metrics.get.shuffleWriteMetrics.bytesWritten.toDouble
}
val shuffleWriteRecords = validTasks.map { taskUIData: TaskUIData =>
taskUIData.metrics.get.shuffleWriteMetrics.recordsWritten.toDouble
}
val shuffleWriteQuantiles = <td>Shuffle Write Size / Records</td> +:
getFormattedSizeQuantilesWithRecords(shuffleWriteSizes, shuffleWriteRecords)
val memoryBytesSpilledSizes = validTasks.map { taskUIData: TaskUIData =>
taskUIData.metrics.get.memoryBytesSpilled.toDouble
}
val memoryBytesSpilledQuantiles = <td>Shuffle spill (memory)</td> +:
getFormattedSizeQuantiles(memoryBytesSpilledSizes)
val diskBytesSpilledSizes = validTasks.map { taskUIData: TaskUIData =>
taskUIData.metrics.get.diskBytesSpilled.toDouble
}
val diskBytesSpilledQuantiles = <td>Shuffle spill (disk)</td> +:
getFormattedSizeQuantiles(diskBytesSpilledSizes)
val listings: Seq[Seq[Node]] = Seq(
<tr>{serviceQuantiles}</tr>,
<tr class={TaskDetailsClassNames.SCHEDULER_DELAY}>{schedulerDelayQuantiles}</tr>,
<tr class={TaskDetailsClassNames.TASK_DESERIALIZATION_TIME}>
{deserializationQuantiles}
</tr>
<tr>{gcQuantiles}</tr>,
<tr class={TaskDetailsClassNames.RESULT_SERIALIZATION_TIME}>
{serializationQuantiles}
</tr>,
<tr class={TaskDetailsClassNames.GETTING_RESULT_TIME}>{gettingResultQuantiles}</tr>,
if (displayPeakExecutionMemory) {
<tr class={TaskDetailsClassNames.PEAK_EXECUTION_MEMORY}>
{peakExecutionMemoryQuantiles}
</tr>
} else {
Nil
},
if (stageData.hasInput) <tr>{inputQuantiles}</tr> else Nil,
if (stageData.hasOutput) <tr>{outputQuantiles}</tr> else Nil,
if (stageData.hasShuffleRead) {
<tr class={TaskDetailsClassNames.SHUFFLE_READ_BLOCKED_TIME}>
{shuffleReadBlockedQuantiles}
</tr>
<tr>{shuffleReadTotalQuantiles}</tr>
<tr class={TaskDetailsClassNames.SHUFFLE_READ_REMOTE_SIZE}>
{shuffleReadRemoteQuantiles}
</tr>
} else {
Nil
},
if (stageData.hasShuffleWrite) <tr>{shuffleWriteQuantiles}</tr> else Nil,
if (stageData.hasBytesSpilled) <tr>{memoryBytesSpilledQuantiles}</tr> else Nil,
if (stageData.hasBytesSpilled) <tr>{diskBytesSpilledQuantiles}</tr> else Nil)
val quantileHeaders = Seq("Metric", "Min", "25th percentile",
"Median", "75th percentile", "Max")
// The summary table does not use CSS to stripe rows, which doesn't work with hidden
// rows (instead, JavaScript in table.js is used to stripe the non-hidden rows).
Some(UIUtils.listingTable(
quantileHeaders,
identity[Seq[Node]],
listings,
fixedWidth = true,
id = Some("task-summary-table"),
stripeRowsWithCss = false))
}
val executorTable = new ExecutorTable(stageId, stageAttemptId, parent)
val maybeAccumulableTable: Seq[Node] =
if (hasAccumulators) { <h4>Accumulators</h4> ++ accumulableTable } else Seq()
val content =
summary ++
dagViz ++
showAdditionalMetrics ++
makeTimeline(
// Only show the tasks in the table
stageData.taskData.values.toSeq.filter(t => taskIdsInPage.contains(t.taskInfo.taskId)),
currentTime) ++
<h4>Summary Metrics for {numCompleted} Completed Tasks</h4> ++
<div>{summaryTable.getOrElse("No tasks have reported metrics yet.")}</div> ++
<h4>Aggregated Metrics by Executor</h4> ++ executorTable.toNodeSeq ++
maybeAccumulableTable ++
<h4 id="tasks-section">Tasks ({totalTasksNumStr})</h4> ++
taskTableHTML ++ jsForScrollingDownToTaskTable
UIUtils.headerSparkPage(stageHeader, content, parent, showVisualization = true)
}
}
def makeTimeline(tasks: Seq[TaskUIData], currentTime: Long): Seq[Node] = {
val executorsSet = new HashSet[(String, String)]
var minLaunchTime = Long.MaxValue
var maxFinishTime = Long.MinValue
val executorsArrayStr =
tasks.sortBy(-_.taskInfo.launchTime).take(MAX_TIMELINE_TASKS).map { taskUIData =>
val taskInfo = taskUIData.taskInfo
val executorId = taskInfo.executorId
val host = taskInfo.host
executorsSet += ((executorId, host))
val launchTime = taskInfo.launchTime
val finishTime = if (!taskInfo.running) taskInfo.finishTime else currentTime
val totalExecutionTime = finishTime - launchTime
minLaunchTime = launchTime.min(minLaunchTime)
maxFinishTime = finishTime.max(maxFinishTime)
def toProportion(time: Long) = time.toDouble / totalExecutionTime * 100
val metricsOpt = taskUIData.metrics
val shuffleReadTime =
metricsOpt.map(_.shuffleReadMetrics.fetchWaitTime).getOrElse(0L)
val shuffleReadTimeProportion = toProportion(shuffleReadTime)
val shuffleWriteTime =
(metricsOpt.map(_.shuffleWriteMetrics.writeTime).getOrElse(0L) / 1e6).toLong
val shuffleWriteTimeProportion = toProportion(shuffleWriteTime)
val serializationTime = metricsOpt.map(_.resultSerializationTime).getOrElse(0L)
val serializationTimeProportion = toProportion(serializationTime)
val deserializationTime = metricsOpt.map(_.executorDeserializeTime).getOrElse(0L)
val deserializationTimeProportion = toProportion(deserializationTime)
val gettingResultTime = getGettingResultTime(taskUIData.taskInfo, currentTime)
val gettingResultTimeProportion = toProportion(gettingResultTime)
val schedulerDelay =
metricsOpt.map(getSchedulerDelay(taskInfo, _, currentTime)).getOrElse(0L)
val schedulerDelayProportion = toProportion(schedulerDelay)
val executorOverhead = serializationTime + deserializationTime
val executorRunTime = if (taskInfo.running) {
totalExecutionTime - executorOverhead - gettingResultTime
} else {
metricsOpt.map(_.executorRunTime).getOrElse(
totalExecutionTime - executorOverhead - gettingResultTime)
}
val executorComputingTime = executorRunTime - shuffleReadTime - shuffleWriteTime
val executorComputingTimeProportion =
math.max(100 - schedulerDelayProportion - shuffleReadTimeProportion -
shuffleWriteTimeProportion - serializationTimeProportion -
deserializationTimeProportion - gettingResultTimeProportion, 0)
val schedulerDelayProportionPos = 0
val deserializationTimeProportionPos =
schedulerDelayProportionPos + schedulerDelayProportion
val shuffleReadTimeProportionPos =
deserializationTimeProportionPos + deserializationTimeProportion
val executorRuntimeProportionPos =
shuffleReadTimeProportionPos + shuffleReadTimeProportion
val shuffleWriteTimeProportionPos =
executorRuntimeProportionPos + executorComputingTimeProportion
val serializationTimeProportionPos =
shuffleWriteTimeProportionPos + shuffleWriteTimeProportion
val gettingResultTimeProportionPos =
serializationTimeProportionPos + serializationTimeProportion
val index = taskInfo.index
val attempt = taskInfo.attemptNumber
val svgTag =
if (totalExecutionTime == 0) {
// SPARK-8705: Avoid invalid attribute error in JavaScript if execution time is 0
"""<svg class="task-assignment-timeline-duration-bar"></svg>"""
} else {
s"""<svg class="task-assignment-timeline-duration-bar">
|<rect class="scheduler-delay-proportion"
|x="$schedulerDelayProportionPos%" y="0px" height="26px"
|width="$schedulerDelayProportion%"></rect>
|<rect class="deserialization-time-proportion"
|x="$deserializationTimeProportionPos%" y="0px" height="26px"
|width="$deserializationTimeProportion%"></rect>
|<rect class="shuffle-read-time-proportion"
|x="$shuffleReadTimeProportionPos%" y="0px" height="26px"
|width="$shuffleReadTimeProportion%"></rect>
|<rect class="executor-runtime-proportion"
|x="$executorRuntimeProportionPos%" y="0px" height="26px"
|width="$executorComputingTimeProportion%"></rect>
|<rect class="shuffle-write-time-proportion"
|x="$shuffleWriteTimeProportionPos%" y="0px" height="26px"
|width="$shuffleWriteTimeProportion%"></rect>
|<rect class="serialization-time-proportion"
|x="$serializationTimeProportionPos%" y="0px" height="26px"
|width="$serializationTimeProportion%"></rect>
|<rect class="getting-result-time-proportion"
|x="$gettingResultTimeProportionPos%" y="0px" height="26px"
|width="$gettingResultTimeProportion%"></rect></svg>""".stripMargin
}
val timelineObject =
s"""
|{
|'className': 'task task-assignment-timeline-object',
|'group': '$executorId',
|'content': '<div class="task-assignment-timeline-content"
|data-toggle="tooltip" data-placement="top"
|data-html="true" data-container="body"
|data-title="${s"Task " + index + " (attempt " + attempt + ")"}<br>
|Status: ${taskInfo.status}<br>
|Launch Time: ${UIUtils.formatDate(new Date(launchTime))}
|${
if (!taskInfo.running) {
s"""<br>Finish Time: ${UIUtils.formatDate(new Date(finishTime))}"""
} else {
""
}
}
|<br>Scheduler Delay: $schedulerDelay ms
|<br>Task Deserialization Time: ${UIUtils.formatDuration(deserializationTime)}
|<br>Shuffle Read Time: ${UIUtils.formatDuration(shuffleReadTime)}
|<br>Executor Computing Time: ${UIUtils.formatDuration(executorComputingTime)}
|<br>Shuffle Write Time: ${UIUtils.formatDuration(shuffleWriteTime)}
|<br>Result Serialization Time: ${UIUtils.formatDuration(serializationTime)}
|<br>Getting Result Time: ${UIUtils.formatDuration(gettingResultTime)}">
|$svgTag',
|'start': new Date($launchTime),
|'end': new Date($finishTime)
|}
|""".stripMargin.replaceAll("""[\r\n]+""", " ")
timelineObject
}.mkString("[", ",", "]")
val groupArrayStr = executorsSet.map {
case (executorId, host) =>
s"""
{
'id': '$executorId',
'content': '$executorId / $host',
}
"""
}.mkString("[", ",", "]")
<span class="expand-task-assignment-timeline">
<span class="expand-task-assignment-timeline-arrow arrow-closed"></span>
<a>Event Timeline</a>
</span> ++
<div id="task-assignment-timeline" class="collapsed">
{
if (MAX_TIMELINE_TASKS < tasks.size) {
<strong>
This stage has more than the maximum number of tasks that can be shown in the
visualization! Only the most recent {MAX_TIMELINE_TASKS} tasks
(of {tasks.size} total) are shown.
</strong>
} else {
Seq.empty
}
}
<div class="control-panel">
<div id="task-assignment-timeline-zoom-lock">
<input type="checkbox"></input>
<span>Enable zooming</span>
</div>
</div>
{TIMELINE_LEGEND}
</div> ++
<script type="text/javascript">
{Unparsed(s"drawTaskAssignmentTimeline(" +
s"$groupArrayStr, $executorsArrayStr, $minLaunchTime, $maxFinishTime, " +
s"${UIUtils.getTimeZoneOffset()})")}
</script>
}
}
private[ui] object StagePage {
private[ui] def getGettingResultTime(info: TaskInfo, currentTime: Long): Long = {
if (info.gettingResult) {
if (info.finished) {
info.finishTime - info.gettingResultTime
} else {
// The task is still fetching the result.
currentTime - info.gettingResultTime
}
} else {
0L
}
}
private[ui] def getSchedulerDelay(
info: TaskInfo, metrics: TaskMetricsUIData, currentTime: Long): Long = {
if (info.finished) {
val totalExecutionTime = info.finishTime - info.launchTime
val executorOverhead = (metrics.executorDeserializeTime +
metrics.resultSerializationTime)
math.max(
0,
totalExecutionTime - metrics.executorRunTime - executorOverhead -
getGettingResultTime(info, currentTime))
} else {
// The task is still running and the metrics like executorRunTime are not available.
0L
}
}
}
private[ui] case class TaskTableRowInputData(inputSortable: Long, inputReadable: String)
private[ui] case class TaskTableRowOutputData(outputSortable: Long, outputReadable: String)
private[ui] case class TaskTableRowShuffleReadData(
shuffleReadBlockedTimeSortable: Long,
shuffleReadBlockedTimeReadable: String,
shuffleReadSortable: Long,
shuffleReadReadable: String,
shuffleReadRemoteSortable: Long,
shuffleReadRemoteReadable: String)
private[ui] case class TaskTableRowShuffleWriteData(
writeTimeSortable: Long,
writeTimeReadable: String,
shuffleWriteSortable: Long,
shuffleWriteReadable: String)
private[ui] case class TaskTableRowBytesSpilledData(
memoryBytesSpilledSortable: Long,
memoryBytesSpilledReadable: String,
diskBytesSpilledSortable: Long,
diskBytesSpilledReadable: String)
/**
* Contains all data that needs for sorting and generating HTML. Using this one rather than
* TaskUIData to avoid creating duplicate contents during sorting the data.
*/
private[ui] class TaskTableRowData(
val index: Int,
val taskId: Long,
val attempt: Int,
val speculative: Boolean,
val status: String,
val taskLocality: String,
val executorIdAndHost: String,
val launchTime: Long,
val duration: Long,
val formatDuration: String,
val schedulerDelay: Long,
val taskDeserializationTime: Long,
val gcTime: Long,
val serializationTime: Long,
val gettingResultTime: Long,
val peakExecutionMemoryUsed: Long,
val accumulators: Option[String], // HTML
val input: Option[TaskTableRowInputData],
val output: Option[TaskTableRowOutputData],
val shuffleRead: Option[TaskTableRowShuffleReadData],
val shuffleWrite: Option[TaskTableRowShuffleWriteData],
val bytesSpilled: Option[TaskTableRowBytesSpilledData],
val error: String)
private[ui] class TaskDataSource(
tasks: Seq[TaskUIData],
hasAccumulators: Boolean,
hasInput: Boolean,
hasOutput: Boolean,
hasShuffleRead: Boolean,
hasShuffleWrite: Boolean,
hasBytesSpilled: Boolean,
currentTime: Long,
pageSize: Int,
sortColumn: String,
desc: Boolean) extends PagedDataSource[TaskTableRowData](pageSize) {
import StagePage._
// Convert TaskUIData to TaskTableRowData which contains the final contents to show in the table
// so that we can avoid creating duplicate contents during sorting the data
private val data = tasks.map(taskRow).sorted(ordering(sortColumn, desc))
private var _slicedTaskIds: Set[Long] = null
override def dataSize: Int = data.size
override def sliceData(from: Int, to: Int): Seq[TaskTableRowData] = {
val r = data.slice(from, to)
_slicedTaskIds = r.map(_.taskId).toSet
r
}
def slicedTaskIds: Set[Long] = _slicedTaskIds
private def taskRow(taskData: TaskUIData): TaskTableRowData = {
val info = taskData.taskInfo
val metrics = taskData.metrics
val duration = if (info.status == "RUNNING") info.timeRunning(currentTime)
else metrics.map(_.executorRunTime).getOrElse(1L)
val formatDuration = if (info.status == "RUNNING") UIUtils.formatDuration(duration)
else metrics.map(m => UIUtils.formatDuration(m.executorRunTime)).getOrElse("")
val schedulerDelay = metrics.map(getSchedulerDelay(info, _, currentTime)).getOrElse(0L)
val gcTime = metrics.map(_.jvmGCTime).getOrElse(0L)
val taskDeserializationTime = metrics.map(_.executorDeserializeTime).getOrElse(0L)
val serializationTime = metrics.map(_.resultSerializationTime).getOrElse(0L)
val gettingResultTime = getGettingResultTime(info, currentTime)
val externalAccumulableReadable = info.accumulables
.filterNot(_.internal)
.flatMap { a =>
(a.name, a.update) match {
case (Some(name), Some(update)) => Some(StringEscapeUtils.escapeHtml4(s"$name: $update"))
case _ => None
}
}
val peakExecutionMemoryUsed = metrics.map(_.peakExecutionMemory).getOrElse(0L)
val maybeInput = metrics.map(_.inputMetrics)
val inputSortable = maybeInput.map(_.bytesRead).getOrElse(0L)
val inputReadable = maybeInput
.map(m => s"${Utils.bytesToString(m.bytesRead)}")
.getOrElse("")
val inputRecords = maybeInput.map(_.recordsRead.toString).getOrElse("")
val maybeOutput = metrics.map(_.outputMetrics)
val outputSortable = maybeOutput.map(_.bytesWritten).getOrElse(0L)
val outputReadable = maybeOutput
.map(m => s"${Utils.bytesToString(m.bytesWritten)}")
.getOrElse("")
val outputRecords = maybeOutput.map(_.recordsWritten.toString).getOrElse("")
val maybeShuffleRead = metrics.map(_.shuffleReadMetrics)
val shuffleReadBlockedTimeSortable = maybeShuffleRead.map(_.fetchWaitTime).getOrElse(0L)
val shuffleReadBlockedTimeReadable =
maybeShuffleRead.map(ms => UIUtils.formatDuration(ms.fetchWaitTime)).getOrElse("")
val totalShuffleBytes = maybeShuffleRead.map(_.totalBytesRead)
val shuffleReadSortable = totalShuffleBytes.getOrElse(0L)
val shuffleReadReadable = totalShuffleBytes.map(Utils.bytesToString).getOrElse("")
val shuffleReadRecords = maybeShuffleRead.map(_.recordsRead.toString).getOrElse("")
val remoteShuffleBytes = maybeShuffleRead.map(_.remoteBytesRead)
val shuffleReadRemoteSortable = remoteShuffleBytes.getOrElse(0L)
val shuffleReadRemoteReadable = remoteShuffleBytes.map(Utils.bytesToString).getOrElse("")
val maybeShuffleWrite = metrics.map(_.shuffleWriteMetrics)
val shuffleWriteSortable = maybeShuffleWrite.map(_.bytesWritten).getOrElse(0L)
val shuffleWriteReadable = maybeShuffleWrite
.map(m => s"${Utils.bytesToString(m.bytesWritten)}").getOrElse("")
val shuffleWriteRecords = maybeShuffleWrite
.map(_.recordsWritten.toString).getOrElse("")
val maybeWriteTime = metrics.map(_.shuffleWriteMetrics.writeTime)
val writeTimeSortable = maybeWriteTime.getOrElse(0L)
val writeTimeReadable = maybeWriteTime.map(t => t / (1000 * 1000)).map { ms =>
if (ms == 0) "" else UIUtils.formatDuration(ms)
}.getOrElse("")
val maybeMemoryBytesSpilled = metrics.map(_.memoryBytesSpilled)
val memoryBytesSpilledSortable = maybeMemoryBytesSpilled.getOrElse(0L)
val memoryBytesSpilledReadable =
maybeMemoryBytesSpilled.map(Utils.bytesToString).getOrElse("")
val maybeDiskBytesSpilled = metrics.map(_.diskBytesSpilled)
val diskBytesSpilledSortable = maybeDiskBytesSpilled.getOrElse(0L)
val diskBytesSpilledReadable = maybeDiskBytesSpilled.map(Utils.bytesToString).getOrElse("")
val input =
if (hasInput) {
Some(TaskTableRowInputData(inputSortable, s"$inputReadable / $inputRecords"))
} else {
None
}
val output =
if (hasOutput) {
Some(TaskTableRowOutputData(outputSortable, s"$outputReadable / $outputRecords"))
} else {
None
}
val shuffleRead =
if (hasShuffleRead) {
Some(TaskTableRowShuffleReadData(
shuffleReadBlockedTimeSortable,
shuffleReadBlockedTimeReadable,
shuffleReadSortable,
s"$shuffleReadReadable / $shuffleReadRecords",
shuffleReadRemoteSortable,
shuffleReadRemoteReadable
))
} else {
None
}
val shuffleWrite =
if (hasShuffleWrite) {
Some(TaskTableRowShuffleWriteData(
writeTimeSortable,
writeTimeReadable,
shuffleWriteSortable,
s"$shuffleWriteReadable / $shuffleWriteRecords"
))
} else {
None
}
val bytesSpilled =
if (hasBytesSpilled) {
Some(TaskTableRowBytesSpilledData(
memoryBytesSpilledSortable,
memoryBytesSpilledReadable,
diskBytesSpilledSortable,
diskBytesSpilledReadable
))
} else {
None
}
new TaskTableRowData(
info.index,
info.taskId,
info.attemptNumber,
info.speculative,
info.status,
info.taskLocality.toString,
s"${info.executorId} / ${info.host}",
info.launchTime,
duration,
formatDuration,
schedulerDelay,
taskDeserializationTime,
gcTime,
serializationTime,
gettingResultTime,
peakExecutionMemoryUsed,
if (hasAccumulators) Some(externalAccumulableReadable.mkString("<br/>")) else None,
input,
output,
shuffleRead,
shuffleWrite,
bytesSpilled,
taskData.errorMessage.getOrElse(""))
}
/**
* Return Ordering according to sortColumn and desc
*/
private def ordering(sortColumn: String, desc: Boolean): Ordering[TaskTableRowData] = {
val ordering = sortColumn match {
case "Index" => new Ordering[TaskTableRowData] {
override def compare(x: TaskTableRowData, y: TaskTableRowData): Int =
Ordering.Int.compare(x.index, y.index)
}
case "ID" => new Ordering[TaskTableRowData] {
override def compare(x: TaskTableRowData, y: TaskTableRowData): Int =
Ordering.Long.compare(x.taskId, y.taskId)
}
case "Attempt" => new Ordering[TaskTableRowData] {
override def compare(x: TaskTableRowData, y: TaskTableRowData): Int =
Ordering.Int.compare(x.attempt, y.attempt)
}
case "Status" => new Ordering[TaskTableRowData] {
override def compare(x: TaskTableRowData, y: TaskTableRowData): Int =
Ordering.String.compare(x.status, y.status)
}
case "Locality Level" => new Ordering[TaskTableRowData] {
override def compare(x: TaskTableRowData, y: TaskTableRowData): Int =
Ordering.String.compare(x.taskLocality, y.taskLocality)
}
case "Executor ID / Host" => new Ordering[TaskTableRowData] {
override def compare(x: TaskTableRowData, y: TaskTableRowData): Int =
Ordering.String.compare(x.executorIdAndHost, y.executorIdAndHost)
}
case "Launch Time" => new Ordering[TaskTableRowData] {
override def compare(x: TaskTableRowData, y: TaskTableRowData): Int =
Ordering.Long.compare(x.launchTime, y.launchTime)
}
case "Duration" => new Ordering[TaskTableRowData] {
override def compare(x: TaskTableRowData, y: TaskTableRowData): Int =
Ordering.Long.compare(x.duration, y.duration)
}
case "Scheduler Delay" => new Ordering[TaskTableRowData] {
override def compare(x: TaskTableRowData, y: TaskTableRowData): Int =
Ordering.Long.compare(x.schedulerDelay, y.schedulerDelay)
}
case "Task Deserialization Time" => new Ordering[TaskTableRowData] {
override def compare(x: TaskTableRowData, y: TaskTableRowData): Int =
Ordering.Long.compare(x.taskDeserializationTime, y.taskDeserializationTime)
}
case "GC Time" => new Ordering[TaskTableRowData] {
override def compare(x: TaskTableRowData, y: TaskTableRowData): Int =
Ordering.Long.compare(x.gcTime, y.gcTime)
}
case "Result Serialization Time" => new Ordering[TaskTableRowData] {
override def compare(x: TaskTableRowData, y: TaskTableRowData): Int =
Ordering.Long.compare(x.serializationTime, y.serializationTime)
}
case "Getting Result Time" => new Ordering[TaskTableRowData] {
override def compare(x: TaskTableRowData, y: TaskTableRowData): Int =
Ordering.Long.compare(x.gettingResultTime, y.gettingResultTime)
}
case "Peak Execution Memory" => new Ordering[TaskTableRowData] {
override def compare(x: TaskTableRowData, y: TaskTableRowData): Int =
Ordering.Long.compare(x.peakExecutionMemoryUsed, y.peakExecutionMemoryUsed)
}
case "Accumulators" =>
if (hasAccumulators) {
new Ordering[TaskTableRowData] {
override def compare(x: TaskTableRowData, y: TaskTableRowData): Int =
Ordering.String.compare(x.accumulators.get, y.accumulators.get)
}
} else {
throw new IllegalArgumentException(
"Cannot sort by Accumulators because of no accumulators")
}
case "Input Size / Records" =>
if (hasInput) {
new Ordering[TaskTableRowData] {
override def compare(x: TaskTableRowData, y: TaskTableRowData): Int =
Ordering.Long.compare(x.input.get.inputSortable, y.input.get.inputSortable)
}
} else {
throw new IllegalArgumentException(
"Cannot sort by Input Size / Records because of no inputs")
}
case "Output Size / Records" =>
if (hasOutput) {
new Ordering[TaskTableRowData] {
override def compare(x: TaskTableRowData, y: TaskTableRowData): Int =
Ordering.Long.compare(x.output.get.outputSortable, y.output.get.outputSortable)
}
} else {
throw new IllegalArgumentException(
"Cannot sort by Output Size / Records because of no outputs")
}
// ShuffleRead
case "Shuffle Read Blocked Time" =>
if (hasShuffleRead) {
new Ordering[TaskTableRowData] {
override def compare(x: TaskTableRowData, y: TaskTableRowData): Int =
Ordering.Long.compare(x.shuffleRead.get.shuffleReadBlockedTimeSortable,
y.shuffleRead.get.shuffleReadBlockedTimeSortable)
}
} else {
throw new IllegalArgumentException(
"Cannot sort by Shuffle Read Blocked Time because of no shuffle reads")
}
case "Shuffle Read Size / Records" =>
if (hasShuffleRead) {
new Ordering[TaskTableRowData] {
override def compare(x: TaskTableRowData, y: TaskTableRowData): Int =
Ordering.Long.compare(x.shuffleRead.get.shuffleReadSortable,
y.shuffleRead.get.shuffleReadSortable)
}
} else {
throw new IllegalArgumentException(
"Cannot sort by Shuffle Read Size / Records because of no shuffle reads")
}
case "Shuffle Remote Reads" =>
if (hasShuffleRead) {
new Ordering[TaskTableRowData] {
override def compare(x: TaskTableRowData, y: TaskTableRowData): Int =
Ordering.Long.compare(x.shuffleRead.get.shuffleReadRemoteSortable,
y.shuffleRead.get.shuffleReadRemoteSortable)
}
} else {
throw new IllegalArgumentException(
"Cannot sort by Shuffle Remote Reads because of no shuffle reads")
}
// ShuffleWrite
case "Write Time" =>
if (hasShuffleWrite) {
new Ordering[TaskTableRowData] {
override def compare(x: TaskTableRowData, y: TaskTableRowData): Int =
Ordering.Long.compare(x.shuffleWrite.get.writeTimeSortable,
y.shuffleWrite.get.writeTimeSortable)
}
} else {
throw new IllegalArgumentException(
"Cannot sort by Write Time because of no shuffle writes")
}
case "Shuffle Write Size / Records" =>
if (hasShuffleWrite) {
new Ordering[TaskTableRowData] {
override def compare(x: TaskTableRowData, y: TaskTableRowData): Int =
Ordering.Long.compare(x.shuffleWrite.get.shuffleWriteSortable,
y.shuffleWrite.get.shuffleWriteSortable)
}
} else {
throw new IllegalArgumentException(
"Cannot sort by Shuffle Write Size / Records because of no shuffle writes")
}
// BytesSpilled
case "Shuffle Spill (Memory)" =>
if (hasBytesSpilled) {
new Ordering[TaskTableRowData] {
override def compare(x: TaskTableRowData, y: TaskTableRowData): Int =
Ordering.Long.compare(x.bytesSpilled.get.memoryBytesSpilledSortable,
y.bytesSpilled.get.memoryBytesSpilledSortable)
}
} else {
throw new IllegalArgumentException(
"Cannot sort by Shuffle Spill (Memory) because of no spills")
}
case "Shuffle Spill (Disk)" =>
if (hasBytesSpilled) {
new Ordering[TaskTableRowData] {
override def compare(x: TaskTableRowData, y: TaskTableRowData): Int =
Ordering.Long.compare(x.bytesSpilled.get.diskBytesSpilledSortable,
y.bytesSpilled.get.diskBytesSpilledSortable)
}
} else {
throw new IllegalArgumentException(
"Cannot sort by Shuffle Spill (Disk) because of no spills")
}
case "Errors" => new Ordering[TaskTableRowData] {
override def compare(x: TaskTableRowData, y: TaskTableRowData): Int =
Ordering.String.compare(x.error, y.error)
}
case unknownColumn => throw new IllegalArgumentException(s"Unknown column: $unknownColumn")
}
if (desc) {
ordering.reverse
} else {
ordering
}
}
}
private[ui] class TaskPagedTable(
conf: SparkConf,
basePath: String,
data: Seq[TaskUIData],
hasAccumulators: Boolean,
hasInput: Boolean,
hasOutput: Boolean,
hasShuffleRead: Boolean,
hasShuffleWrite: Boolean,
hasBytesSpilled: Boolean,
currentTime: Long,
pageSize: Int,
sortColumn: String,
desc: Boolean) extends PagedTable[TaskTableRowData] {
// We only track peak memory used for unsafe operators
private val displayPeakExecutionMemory = conf.getBoolean("spark.sql.unsafe.enabled", true)
override def tableId: String = "task-table"
override def tableCssClass: String =
"table table-bordered table-condensed table-striped table-head-clickable"
override def pageSizeFormField: String = "task.pageSize"
override def prevPageSizeFormField: String = "task.prevPageSize"
override def pageNumberFormField: String = "task.page"
override val dataSource: TaskDataSource = new TaskDataSource(
data,
hasAccumulators,
hasInput,
hasOutput,
hasShuffleRead,
hasShuffleWrite,
hasBytesSpilled,
currentTime,
pageSize,
sortColumn,
desc)
override def pageLink(page: Int): String = {
val encodedSortColumn = URLEncoder.encode(sortColumn, "UTF-8")
basePath +
s"&$pageNumberFormField=$page" +
s"&task.sort=$encodedSortColumn" +
s"&task.desc=$desc" +
s"&$pageSizeFormField=$pageSize"
}
override def goButtonFormPath: String = {
val encodedSortColumn = URLEncoder.encode(sortColumn, "UTF-8")
s"$basePath&task.sort=$encodedSortColumn&task.desc=$desc"
}
def headers: Seq[Node] = {
val taskHeadersAndCssClasses: Seq[(String, String)] =
Seq(
("Index", ""), ("ID", ""), ("Attempt", ""), ("Status", ""), ("Locality Level", ""),
("Executor ID / Host", ""), ("Launch Time", ""), ("Duration", ""),
("Scheduler Delay", TaskDetailsClassNames.SCHEDULER_DELAY),
("Task Deserialization Time", TaskDetailsClassNames.TASK_DESERIALIZATION_TIME),
("GC Time", ""),
("Result Serialization Time", TaskDetailsClassNames.RESULT_SERIALIZATION_TIME),
("Getting Result Time", TaskDetailsClassNames.GETTING_RESULT_TIME)) ++
{
if (displayPeakExecutionMemory) {
Seq(("Peak Execution Memory", TaskDetailsClassNames.PEAK_EXECUTION_MEMORY))
} else {
Nil
}
} ++
{if (hasAccumulators) Seq(("Accumulators", "")) else Nil} ++
{if (hasInput) Seq(("Input Size / Records", "")) else Nil} ++
{if (hasOutput) Seq(("Output Size / Records", "")) else Nil} ++
{if (hasShuffleRead) {
Seq(("Shuffle Read Blocked Time", TaskDetailsClassNames.SHUFFLE_READ_BLOCKED_TIME),
("Shuffle Read Size / Records", ""),
("Shuffle Remote Reads", TaskDetailsClassNames.SHUFFLE_READ_REMOTE_SIZE))
} else {
Nil
}} ++
{if (hasShuffleWrite) {
Seq(("Write Time", ""), ("Shuffle Write Size / Records", ""))
} else {
Nil
}} ++
{if (hasBytesSpilled) {
Seq(("Shuffle Spill (Memory)", ""), ("Shuffle Spill (Disk)", ""))
} else {
Nil
}} ++
Seq(("Errors", ""))
if (!taskHeadersAndCssClasses.map(_._1).contains(sortColumn)) {
throw new IllegalArgumentException(s"Unknown column: $sortColumn")
}
val headerRow: Seq[Node] = {
taskHeadersAndCssClasses.map { case (header, cssClass) =>
if (header == sortColumn) {
val headerLink = Unparsed(
basePath +
s"&task.sort=${URLEncoder.encode(header, "UTF-8")}" +
s"&task.desc=${!desc}" +
s"&task.pageSize=$pageSize")
val arrow = if (desc) "▾" else "▴" // UP or DOWN
<th class={cssClass}>
<a href={headerLink}>
{header}
<span> {Unparsed(arrow)}</span>
</a>
</th>
} else {
val headerLink = Unparsed(
basePath +
s"&task.sort=${URLEncoder.encode(header, "UTF-8")}" +
s"&task.pageSize=$pageSize")
<th class={cssClass}>
<a href={headerLink}>
{header}
</a>
</th>
}
}
}
<thead>{headerRow}</thead>
}
def row(task: TaskTableRowData): Seq[Node] = {
<tr>
<td>{task.index}</td>
<td>{task.taskId}</td>
<td>{if (task.speculative) s"${task.attempt} (speculative)" else task.attempt.toString}</td>
<td>{task.status}</td>
<td>{task.taskLocality}</td>
<td>{task.executorIdAndHost}</td>
<td>{UIUtils.formatDate(new Date(task.launchTime))}</td>
<td>{task.formatDuration}</td>
<td class={TaskDetailsClassNames.SCHEDULER_DELAY}>
{UIUtils.formatDuration(task.schedulerDelay)}
</td>
<td class={TaskDetailsClassNames.TASK_DESERIALIZATION_TIME}>
{UIUtils.formatDuration(task.taskDeserializationTime)}
</td>
<td>
{if (task.gcTime > 0) UIUtils.formatDuration(task.gcTime) else ""}
</td>
<td class={TaskDetailsClassNames.RESULT_SERIALIZATION_TIME}>
{UIUtils.formatDuration(task.serializationTime)}
</td>
<td class={TaskDetailsClassNames.GETTING_RESULT_TIME}>
{UIUtils.formatDuration(task.gettingResultTime)}
</td>
{if (displayPeakExecutionMemory) {
<td class={TaskDetailsClassNames.PEAK_EXECUTION_MEMORY}>
{Utils.bytesToString(task.peakExecutionMemoryUsed)}
</td>
}}
{if (task.accumulators.nonEmpty) {
<td>{Unparsed(task.accumulators.get)}</td>
}}
{if (task.input.nonEmpty) {
<td>{task.input.get.inputReadable}</td>
}}
{if (task.output.nonEmpty) {
<td>{task.output.get.outputReadable}</td>
}}
{if (task.shuffleRead.nonEmpty) {
<td class={TaskDetailsClassNames.SHUFFLE_READ_BLOCKED_TIME}>
{task.shuffleRead.get.shuffleReadBlockedTimeReadable}
</td>
<td>{task.shuffleRead.get.shuffleReadReadable}</td>
<td class={TaskDetailsClassNames.SHUFFLE_READ_REMOTE_SIZE}>
{task.shuffleRead.get.shuffleReadRemoteReadable}
</td>
}}
{if (task.shuffleWrite.nonEmpty) {
<td>{task.shuffleWrite.get.writeTimeReadable}</td>
<td>{task.shuffleWrite.get.shuffleWriteReadable}</td>
}}
{if (task.bytesSpilled.nonEmpty) {
<td>{task.bytesSpilled.get.memoryBytesSpilledReadable}</td>
<td>{task.bytesSpilled.get.diskBytesSpilledReadable}</td>
}}
{errorMessageCell(task.error)}
</tr>
}
private def errorMessageCell(error: String): Seq[Node] = {
val isMultiline = error.indexOf('\n') >= 0
// Display the first line by default
val errorSummary = StringEscapeUtils.escapeHtml4(
if (isMultiline) {
error.substring(0, error.indexOf('\n'))
} else {
error
})
val details = if (isMultiline) {
// scalastyle:off
<span onclick="this.parentNode.querySelector('.stacktrace-details').classList.toggle('collapsed')"
class="expand-details">
+details
</span> ++
<div class="stacktrace-details collapsed">
<pre>{error}</pre>
</div>
// scalastyle:on
} else {
""
}
<td>{errorSummary}{details}</td>
}
}
| gioenn/xSpark | core/src/main/scala/org/apache/spark/ui/jobs/StagePage.scala | Scala | apache-2.0 | 58,919 |
package jigg.util
/*
Copyright 2013-2015 Hiroshi Noji
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
import java.util.Properties
import scala.collection.JavaConversions._
object PropertiesUtil {
def findProperty(key: String, props: Properties): Option[String] = props.getProperty(key) match {
case null => None
case value => Some(value)
}
def safeFind(key: String, props: Properties): String = findProperty(key, props).getOrElse { sys.error(s"$key property is required!" ) }
def getBoolean(key: String, props: Properties): Option[Boolean] = findProperty(key, props) map {
case "true" => true
case "false" => false
case _ => sys.error(s"Property $key should be true or false")
}
def filter(props: Properties)(f: (String, String)=>Boolean): Seq[(String, String)] =
props.stringPropertyNames.toSeq
.map { k => (k, props.getProperty(k)) }
.filter { case (k, v) => f(k, v) }
}
| tomeken-yoshinaga/jigg | src/main/scala/jigg/util/PropertiesUtil.scala | Scala | apache-2.0 | 1,416 |
package com.wavesplatform.lang.v1
import cats.Id
import com.wavesplatform.common.utils.EitherExt2
import com.wavesplatform.lang.Common
import com.wavesplatform.lang.directives.values.V1
import com.wavesplatform.lang.v1.EvaluatorV2Benchmark._
import com.wavesplatform.lang.v1.compiler.ExpressionCompiler
import com.wavesplatform.lang.v1.evaluator.EvaluatorV2
import com.wavesplatform.lang.v1.evaluator.ctx.impl.PureContext
import com.wavesplatform.lang.v1.evaluator.ctx.{EvaluationContext, LoggedEvaluationContext}
import com.wavesplatform.lang.v1.parser.Parser
import com.wavesplatform.lang.v1.traits.Environment
import org.openjdk.jmh.annotations._
import org.openjdk.jmh.infra.Blackhole
import java.util.concurrent.TimeUnit
object EvaluatorV2Benchmark {
val pureContext: CTX[Environment] = PureContext.build(V1, fixUnicodeFunctions = true).withEnvironment[Environment]
val pureEvalContext: EvaluationContext[Environment, Id] = pureContext.evaluationContext(Common.emptyBlockchainEnvironment())
val evaluatorV2: EvaluatorV2 = new EvaluatorV2(LoggedEvaluationContext(_ => _ => (), pureEvalContext), V1)
}
@OutputTimeUnit(TimeUnit.MILLISECONDS)
@BenchmarkMode(Array(Mode.AverageTime))
@Threads(1)
@Fork(1)
@Warmup(iterations = 20)
@Measurement(iterations = 10)
class EvaluatorV2Benchmark {
@Benchmark
def funcs(st: Funcs, bh: Blackhole): Unit = bh.consume(evaluatorV2(st.expr, 1000000))
@Benchmark
def lets(st: Lets, bh: Blackhole): Unit = bh.consume(evaluatorV2(st.expr, 1000000))
@Benchmark
def custom(st: CustomFunc, bh: Blackhole): Unit = bh.consume(evaluatorV2(st.expr, 1000000))
@Benchmark
def littleCustom(st: LittleCustomFunc, bh: Blackhole): Unit = bh.consume(evaluatorV2(st.expr, 1000000))
}
@State(Scope.Benchmark)
class Funcs {
val context = pureEvalContext
val count = 2000
val script =
s"""
| func a0() = {
| 1 + 1
| }
| ${1 to count map (i => s"func a$i() = a${i - 1}()") mkString "\n"}
| a$count() == a$count()
""".stripMargin
val parsed = Parser.parseExpr(script).get.value
val expr = ExpressionCompiler(pureContext.compilerContext, parsed).explicitGet()._1
}
@State(Scope.Benchmark)
class Lets {
val context = pureEvalContext
val count = 5000
val script =
s"""
| let a0 = 1
| ${1 to count map (i => s"let a$i = a${i - 1} + 1") mkString "\n"}
| a$count == a$count
""".stripMargin
val parsed = Parser.parseExpr(script).get.value
val expr = ExpressionCompiler(pureContext.compilerContext, parsed).explicitGet()._1
}
@State(Scope.Benchmark)
class CustomFunc {
val context = pureEvalContext
val script =
s"""
| func f() = {
| let a0 = 0
| let b0 = 1
| let a1 = b0
| let b1 = a0 + b0
| let a2 = b1
| let b2 = a1 + b1
| let a3 = b2
| let b3 = a2 + b2
| let a4 = b3
| let b4 = a3 + b3
| let a5 = b4
| let b5 = a4 + b4
| let a6 = b5
| let b6 = a5 + b5
| let a7 = b6
| let b7 = a6 + b6
| let a8 = b7
| let b8 = a7 + b7
| let a9 = b8
| let b9 = a8 + b8
| let a10 = b9
| let b10 = a9 + b9
| let a11 = b10
| let b11 = a10 + b10
| let a12 = b11
| let b12 = a11 + b11
| let a13 = b12
| let b13 = a12 + b12
| let a14 = b13
| let b14 = a13 + b13
| b14 == 610
| }
|
| f() && f() && f() && f() && f() && f() && f()
""".stripMargin
val parsed = Parser.parseExpr(script).get.value
val expr = ExpressionCompiler(pureContext.compilerContext, parsed).explicitGet()._1
}
@State(Scope.Benchmark)
class LittleCustomFunc {
val context = pureEvalContext
val script =
s"""
| func f() = {
| let a0 = 0
| let b0 = 1
| let a1 = b0
| let b1 = a0 + b0
| let a2 = b1
| let b2 = a1 + b1
| let a3 = b2
| let b3 = a2 + b2
| let a4 = b3
| let b4 = a3 + b3
| let a5 = b4
| let b5 = a4 + b4
| let a6 = b5
| let b6 = a5 + b5
| let a7 = b6
| let b7 = a6 + b6
| let a8 = b7
| let b8 = a7 + b7
| let a9 = b8
| let b9 = a8 + b8
| let a10 = b9
| let b10 = a9 + b9
| let a11 = b10
| let b11 = a10 + b10
| let a12 = b11
| let b12 = a11 + b11
| let a13 = b12
| let b13 = a12 + b12
| let a14 = b13
| let b14 = a13 + b13
| b14 == 610
| }
|
| f()
""".stripMargin
val parsed = Parser.parseExpr(script).get.value
val expr = ExpressionCompiler(pureContext.compilerContext, parsed).explicitGet()._1
}
| wavesplatform/Waves | benchmark/src/test/scala/com/wavesplatform/lang/v1/EvaluatorV2Benchmark.scala | Scala | mit | 4,906 |
package dotty.tools
package dotc
package core
import Periods._
import Names._
import Scopes._
import Flags._
import java.lang.AssertionError
import Decorators._
import Symbols._
import Contexts._
import SymDenotations._
import printing.Texts._
import printing.Printer
import Types._
import Annotations._
import util.Positions._
import DenotTransformers._
import StdNames._
import NameOps._
import ast.tpd.Tree
import ast.TreeTypeMap
import Denotations.{ Denotation, SingleDenotation, MultiDenotation }
import collection.mutable
import io.AbstractFile
import language.implicitConversions
import util.{NoSource, DotClass}
/** Creation methods for symbols */
trait Symbols { this: Context =>
// ---- Factory methods for symbol creation ----------------------
//
// All symbol creations should be done via the next two methods.
/** Create a symbol without a denotation.
* Note this uses a cast instead of a direct type refinement because
* it's debug-friendlier not to create an anonymous class here.
*/
def newNakedSymbol[N <: Name](coord: Coord = NoCoord): Symbol { type ThisName = N } =
new Symbol(coord).asInstanceOf[Symbol { type ThisName = N }]
/** Create a class symbol without a denotation. */
def newNakedClassSymbol(coord: Coord = NoCoord, assocFile: AbstractFile = null) =
new ClassSymbol(coord, assocFile)
// ---- Symbol creation methods ----------------------------------
/** Create a symbol from a function producing its denotation */
def newSymbolDenoting[N <: Name](denotFn: Symbol => SymDenotation, coord: Coord = NoCoord): Symbol { type ThisName = N } = {
val sym = newNakedSymbol[N](coord)
sym.denot = denotFn(sym)
sym
}
/** Create a symbol from its fields (info may be lazy) */
def newSymbol[N <: Name](
owner: Symbol,
name: N,
flags: FlagSet,
info: Type,
privateWithin: Symbol = NoSymbol,
coord: Coord = NoCoord): Symbol { type ThisName = N } = {
val sym = newNakedSymbol[N](coord)
val denot = SymDenotation(sym, owner, name, flags, info, privateWithin)
sym.denot = denot
sym
}
/** Create a class symbol from a function producing its denotation */
def newClassSymbolDenoting(denotFn: ClassSymbol => SymDenotation, coord: Coord = NoCoord, assocFile: AbstractFile = null): ClassSymbol = {
val cls = newNakedClassSymbol(coord, assocFile)
cls.denot = denotFn(cls)
cls
}
/** Create a class symbol from its non-info fields and a function
* producing its info (the produced info may be lazy).
*/
def newClassSymbol(
owner: Symbol,
name: TypeName,
flags: FlagSet,
infoFn: ClassSymbol => Type,
privateWithin: Symbol = NoSymbol,
coord: Coord = NoCoord,
assocFile: AbstractFile = null): ClassSymbol
= {
val cls = newNakedClassSymbol(coord, assocFile)
val denot = SymDenotation(cls, owner, name, flags, infoFn(cls), privateWithin)
cls.denot = denot
cls
}
/** Create a class symbol from its non-info fields and the fields of its info. */
def newCompleteClassSymbol(
owner: Symbol,
name: TypeName,
flags: FlagSet,
parents: List[TypeRef],
decls: Scope = newScope,
selfInfo: Type = NoType,
privateWithin: Symbol = NoSymbol,
coord: Coord = NoCoord,
assocFile: AbstractFile = null): ClassSymbol =
newClassSymbol(
owner, name, flags,
ClassInfo(owner.thisType, _, parents, decls, selfInfo),
privateWithin, coord, assocFile)
/** Create a module symbol with associated module class
* from its non-info fields and a function producing the info
* of the module class (this info may be lazy).
*/
def newModuleSymbol(
owner: Symbol,
name: TermName,
modFlags: FlagSet,
clsFlags: FlagSet,
infoFn: (TermSymbol, ClassSymbol) => Type, // typically a ModuleClassCompleterWithDecls
privateWithin: Symbol = NoSymbol,
coord: Coord = NoCoord,
assocFile: AbstractFile = null): TermSymbol
= {
val base = owner.thisType
val module = newNakedSymbol[TermName](coord)
val modcls = newNakedClassSymbol(coord, assocFile)
val modclsFlags = clsFlags | ModuleClassCreationFlags
val modclsName = name.toTypeName.adjustIfModuleClass(modclsFlags)
val cdenot = SymDenotation(
modcls, owner, modclsName, modclsFlags,
infoFn(module, modcls), privateWithin)
val mdenot = SymDenotation(
module, owner, name, modFlags | ModuleCreationFlags,
if (cdenot.isCompleted) TypeRef.withSymAndName(owner.thisType, modcls, modclsName)
else new ModuleCompleter(modcls))
module.denot = mdenot
modcls.denot = cdenot
module
}
/** Create a module symbol with associated module class
* from its non-info fields and the fields of the module class info.
* @param flags The combined flags of the module and the module class
* These are masked with RetainedModuleValFlags/RetainedModuleClassFlags.
*/
def newCompleteModuleSymbol(
owner: Symbol,
name: TermName,
modFlags: FlagSet,
clsFlags: FlagSet,
parents: List[TypeRef],
decls: Scope,
privateWithin: Symbol = NoSymbol,
coord: Coord = NoCoord,
assocFile: AbstractFile = null): TermSymbol =
newModuleSymbol(
owner, name, modFlags, clsFlags,
(module, modcls) => ClassInfo(
owner.thisType, modcls, parents, decls, TermRef.withSymAndName(owner.thisType, module, name)),
privateWithin, coord, assocFile)
/** Create a package symbol with associated package class
* from its non-info fields and a lazy type for loading the package's members.
*/
def newPackageSymbol(
owner: Symbol,
name: TermName,
infoFn: (TermSymbol, ClassSymbol) => LazyType): TermSymbol =
newModuleSymbol(owner, name, PackageCreationFlags, PackageCreationFlags, infoFn)
/** Create a package symbol with associated package class
* from its non-info fields its member scope.
*/
def newCompletePackageSymbol(
owner: Symbol,
name: TermName,
modFlags: FlagSet = EmptyFlags,
clsFlags: FlagSet = EmptyFlags,
decls: Scope = newScope): TermSymbol =
newCompleteModuleSymbol(
owner, name,
modFlags | PackageCreationFlags, clsFlags | PackageCreationFlags,
Nil, decls)
/** Create a stub symbol that will issue a missing reference error
* when attempted to be completed.
*/
def newStubSymbol(owner: Symbol, name: Name, file: AbstractFile = null): Symbol = {
def stubCompleter = new StubInfo()
val normalizedOwner = if (owner is ModuleVal) owner.moduleClass else owner
println(s"creating stub for ${name.show}, owner = ${normalizedOwner.denot.debugString}, file = $file")
println(s"decls = ${normalizedOwner.unforcedDecls.toList.map(_.debugString).mkString("\\n ")}") // !!! DEBUG
//if (base.settings.debug.value) throw new Error()
val stub = name match {
case name: TermName =>
newModuleSymbol(normalizedOwner, name, EmptyFlags, EmptyFlags, stubCompleter, assocFile = file)
case name: TypeName =>
newClassSymbol(normalizedOwner, name, EmptyFlags, stubCompleter, assocFile = file)
}
stubs = stub :: stubs
stub
}
/** Create the local template dummy of given class `cls`.
* In a template
*
* trait T { val fld: Int; { val x: int = 2 }; val fld2 = { val y = 2; y }}
*
* the owner of `x` is the local dummy of the template. The owner of the local
* dummy is then the class of the template itself. By contrast, the owner of `y`
* would be `fld2`. There is a single local dummy per template.
*/
def newLocalDummy(cls: Symbol, coord: Coord = NoCoord) =
newSymbol(cls, nme.localDummyName(cls), EmptyFlags, NoType)
/** Create an import symbol pointing back to given qualifier `expr`. */
def newImportSymbol(expr: Tree, coord: Coord = NoCoord) =
newSymbol(NoSymbol, nme.IMPORT, EmptyFlags, ImportType(expr), coord = coord)
/** Create a class constructor symbol for given class `cls`. */
def newConstructor(cls: ClassSymbol, flags: FlagSet, paramNames: List[TermName], paramTypes: List[Type], privateWithin: Symbol = NoSymbol, coord: Coord = NoCoord) =
newSymbol(cls, nme.CONSTRUCTOR, flags | Method, MethodType(paramNames, paramTypes)(_ => cls.typeRef), privateWithin, coord)
/** Create an empty default constructor symbol for given class `cls`. */
def newDefaultConstructor(cls: ClassSymbol) =
newConstructor(cls, EmptyFlags, Nil, Nil)
/** Create a symbol representing a selftype declaration for class `cls`. */
def newSelfSym(cls: ClassSymbol, name: TermName = nme.WILDCARD, selfInfo: Type = NoType): TermSymbol =
ctx.newSymbol(cls, name, SelfSymFlags, selfInfo orElse cls.classInfo.selfType, coord = cls.coord)
/** Create new type parameters with given owner, names, and flags.
* @param boundsFn A function that, given type refs to the newly created
* parameters returns a list of their bounds.
*/
def newTypeParams(
owner: Symbol,
names: List[TypeName],
flags: FlagSet,
boundsFn: List[TypeRef] => List[Type]): List[TypeSymbol] = {
val tparamBuf = new mutable.ListBuffer[TypeSymbol]
val trefBuf = new mutable.ListBuffer[TypeRef]
for (name <- names) {
val tparam = newNakedSymbol[TypeName](NoCoord)
tparamBuf += tparam
trefBuf += TypeRef.withSymAndName(owner.thisType, tparam, name)
}
val tparams = tparamBuf.toList
val bounds = boundsFn(trefBuf.toList)
for ((name, tparam, bound) <- (names, tparams, bounds).zipped)
tparam.denot = SymDenotation(tparam, owner, name, flags | owner.typeParamCreationFlags, bound)
tparams
}
/** Create a new skolem symbol. This is not the same as SkolemType, even though the
* motivation (create a singleton referencing to a type) is similar.
*/
def newSkolem(tp: Type) = newSymbol(defn.RootClass, nme.SKOLEM, SyntheticArtifact | Permanent, tp)
def newErrorSymbol(owner: Symbol, name: Name) =
newSymbol(owner, name, SyntheticArtifact,
if (name.isTypeName) TypeAlias(ErrorType) else ErrorType)
/** Map given symbols, subjecting their attributes to the mappings
* defined in the given TreeTypeMap `ttmap`.
* Cross symbol references are brought over from originals to copies.
* Do not copy any symbols if all attributes of all symbols stay the same.
*/
def mapSymbols(originals: List[Symbol], ttmap: TreeTypeMap, mapAlways: Boolean = false): List[Symbol] =
if (originals.forall(sym =>
(ttmap.mapType(sym.info) eq sym.info) &&
!(ttmap.oldOwners contains sym.owner)) && !mapAlways)
originals
else {
val copies: List[Symbol] = for (original <- originals) yield
original match {
case original: ClassSymbol =>
newNakedClassSymbol(original.coord, original.assocFile)
case _ =>
newNakedSymbol[original.ThisName](original.coord)
}
val ttmap1 = ttmap.withSubstitution(originals, copies)
(originals, copies).zipped foreach {(original, copy) =>
copy.denot = original.denot // preliminary denotation, so that we can access symbols in subsequent transform
}
(originals, copies).zipped foreach {(original, copy) =>
val odenot = original.denot
val oinfo = original.info match {
case ClassInfo(pre, _, parents, decls, selfInfo) =>
assert(original.isClass)
ClassInfo(pre, copy.asClass, parents, decls, selfInfo)
case oinfo => oinfo
}
copy.denot = odenot.copySymDenotation(
symbol = copy,
owner = ttmap1.mapOwner(odenot.owner),
initFlags = odenot.flags &~ Frozen | Fresh,
info = ttmap1.mapType(oinfo),
privateWithin = ttmap1.mapOwner(odenot.privateWithin), // since this refers to outer symbols, need not include copies (from->to) in ownermap here.
annotations = odenot.annotations.mapConserve(ttmap1.apply))
}
copies
}
// ----- Locating predefined symbols ----------------------------------------
def requiredPackage(path: PreName): TermSymbol =
base.staticRef(path.toTermName).requiredSymbol(_ is Package).asTerm
def requiredClass(path: PreName): ClassSymbol =
base.staticRef(path.toTypeName).requiredSymbol(_.isClass).asClass
def requiredModule(path: PreName): TermSymbol =
base.staticRef(path.toTermName).requiredSymbol(_ is Module).asTerm
def requiredMethod(cls: ClassSymbol, name: PreName): TermSymbol =
cls.info.member(name.toTermName).requiredSymbol(_ is Method).asTerm
}
object Symbols {
var _nextId = 0 // !!! DEBUG, use global counter instead
def nextId = { _nextId += 1; _nextId }
/** A Symbol represents a Scala definition/declaration or a package.
*/
class Symbol private[Symbols] (val coord: Coord) extends DotClass with printing.Showable {
type ThisName <: Name
private[this] var _id: Int = nextId
//assert(_id != 30214)
/** The unique id of this symbol */
def id = _id
/** The last denotation of this symbol */
private[this] var lastDenot: SymDenotation = _
/** Set the denotation of this symbol */
private[core] def denot_=(d: SymDenotation) =
lastDenot = d
/** The current denotation of this symbol */
final def denot(implicit ctx: Context): SymDenotation = {
var denot = lastDenot
if (!(denot.validFor contains ctx.period)) {
denot = denot.current.asInstanceOf[SymDenotation]
lastDenot = denot
}
denot
}
private[core] def defRunId: RunId =
if (lastDenot == null) NoRunId else lastDenot.validFor.runId
/** Does this symbol come from a currently compiled source file? */
final def isDefinedInCurrentRun(implicit ctx: Context): Boolean = {
pos.exists && defRunId == ctx.runId
}
/** Subclass tests and casts */
final def isTerm(implicit ctx: Context): Boolean = denot.isTerm
final def isType(implicit ctx: Context): Boolean = denot.isType
final def isClass: Boolean = isInstanceOf[ClassSymbol]
final def asTerm(implicit ctx: Context): TermSymbol = { assert(isTerm, s"asTerm called on not-a-Term $this" ); asInstanceOf[TermSymbol] }
final def asType(implicit ctx: Context): TypeSymbol = { assert(isType, s"isType called on not-a-Type $this"); asInstanceOf[TypeSymbol] }
final def asClass: ClassSymbol = asInstanceOf[ClassSymbol]
final def isFresh(implicit ctx: Context) =
lastDenot != null && (lastDenot is Fresh)
/** Special cased here, because it may be used on naked symbols in substituters */
final def isStatic(implicit ctx: Context): Boolean =
lastDenot != null && denot.isStatic
/** A unique, densely packed integer tag for each class symbol, -1
* for all other symbols. To save memory, this method
* should be called only if class is a super class of some other class.
*/
def superId(implicit ctx: Context): Int = -1
/** This symbol entered into owner's scope (owner must be a class). */
final def entered(implicit ctx: Context): this.type = {
assert(this.owner.isClass, s"symbol ($this) entered the scope of non-class owner ${this.owner}") // !!! DEBUG
this.owner.asClass.enter(this)
if (this is Module) this.owner.asClass.enter(this.moduleClass)
this
}
/** Enter this symbol in its class owner after given `phase`. Create a fresh
* denotation for its owner class if the class has not yet already one
* that starts being valid after `phase`.
* @pre Symbol is a class member
*/
def enteredAfter(phase: DenotTransformer)(implicit ctx: Context): this.type =
if (ctx.phaseId != phase.next.id) enteredAfter(phase)(ctx.withPhase(phase.next))
else {
if (this.owner.is(Package)) {
denot.validFor |= InitialPeriod
if (this is Module) this.moduleClass.validFor |= InitialPeriod
}
else this.owner.asClass.ensureFreshScopeAfter(phase)
entered
}
/** This symbol, if it exists, otherwise the result of evaluating `that` */
def orElse(that: => Symbol)(implicit ctx: Context) =
if (this.exists) this else that
/** If this symbol satisfies predicate `p` this symbol, otherwise `NoSymbol` */
def filter(p: Symbol => Boolean): Symbol = if (p(this)) this else NoSymbol
/** Is this symbol a user-defined value class? */
final def isDerivedValueClass(implicit ctx: Context): Boolean =
false // will migrate to ValueClasses.isDerivedValueClass;
// unsupported value class code will continue to use this stub while it exists
/** The current name of this symbol */
final def name(implicit ctx: Context): ThisName = denot.name.asInstanceOf[ThisName]
/** The source or class file from which this class or
* the class containing this symbol was generated, null if not applicable.
* Overridden in ClassSymbol
*/
def associatedFile(implicit ctx: Context): AbstractFile =
denot.topLevelClass.symbol.associatedFile
/** The class file from which this class was generated, null if not applicable. */
final def binaryFile(implicit ctx: Context): AbstractFile =
pickFile(associatedFile, classFile = true)
/** The source file from which this class was generated, null if not applicable. */
final def sourceFile(implicit ctx: Context): AbstractFile =
pickFile(associatedFile, classFile = false)
/** Desire to re-use the field in ClassSymbol which stores the source
* file to also store the classfile, but without changing the behavior
* of sourceFile (which is expected at least in the IDE only to
* return actual source code.) So sourceFile has classfiles filtered out.
*/
private def pickFile(file: AbstractFile, classFile: Boolean): AbstractFile =
if ((file eq null) || classFile != (file.path endsWith ".class")) null else file
/** The position of this symbol, or NoPosition is symbol was not loaded
* from source.
*/
def pos: Position = if (coord.isPosition) coord.toPosition else NoPosition
// -------- Printing --------------------------------------------------------
/** The prefix string to be used when displaying this symbol without denotation */
protected def prefixString = "Symbol"
override def toString: String =
if (lastDenot == null) s"Naked$prefixString#$id"
else lastDenot.toString// +"#"+id // !!! DEBUG
def toText(printer: Printer): Text = printer.toText(this)
def showLocated(implicit ctx: Context): String = ctx.locatedText(this).show
def showDcl(implicit ctx: Context): String = ctx.dclText(this).show
def showKind(implicit ctx: Context): String = ctx.kindString(this)
def showName(implicit ctx: Context): String = ctx.nameString(this)
def showFullName(implicit ctx: Context): String = ctx.fullNameString(this)
}
type TermSymbol = Symbol { type ThisName = TermName }
type TypeSymbol = Symbol { type ThisName = TypeName }
class ClassSymbol private[Symbols] (coord: Coord, val assocFile: AbstractFile)
extends Symbol(coord) {
type ThisName = TypeName
/** The source or class file from which this class was generated, null if not applicable. */
override def associatedFile(implicit ctx: Context): AbstractFile =
if (assocFile != null || (this.owner is PackageClass)) assocFile
else super.associatedFile
final def classDenot(implicit ctx: Context): ClassDenotation =
denot.asInstanceOf[ClassDenotation]
private var superIdHint: Int = -1
override def superId(implicit ctx: Context): Int = {
val hint = superIdHint
if (hint >= 0 && hint <= ctx.lastSuperId && (ctx.classOfId(hint) eq this))
hint
else {
val id = ctx.superIdOfClass get this match {
case Some(id) =>
id
case None =>
val id = ctx.nextSuperId
ctx.superIdOfClass(this) = id
ctx.classOfId(id) = this
id
}
superIdHint = id
id
}
}
override protected def prefixString = "ClassSymbol"
}
class ErrorSymbol(val underlying: Symbol, msg: => String)(implicit ctx: Context) extends Symbol(NoCoord) {
type ThisName = underlying.ThisName
denot = underlying.denot
}
object NoSymbol extends Symbol(NoCoord) {
denot = NoDenotation
override def associatedFile(implicit ctx: Context): AbstractFile = NoSource.file
}
implicit class Copier[N <: Name](sym: Symbol { type ThisName = N })(implicit ctx: Context) {
/** Copy a symbol, overriding selective fields */
def copy(
owner: Symbol = sym.owner,
name: N = sym.name,
flags: FlagSet = sym.flags,
info: Type = sym.info,
privateWithin: Symbol = sym.privateWithin,
coord: Coord = sym.coord,
associatedFile: AbstractFile = sym.associatedFile): Symbol =
if (sym.isClass)
ctx.newClassSymbol(owner, name.asTypeName, flags, _ => info, privateWithin, coord, associatedFile)
else
ctx.newSymbol(owner, name, flags, info, privateWithin, coord)
}
implicit def defn(implicit ctx: Context): Definitions = ctx.definitions
/** Makes all denotation operations available on symbols */
implicit def toDenot(sym: Symbol)(implicit ctx: Context): SymDenotation = sym.denot
/** Makes all class denotations available on class symbols */
implicit def toClassDenot(cls: ClassSymbol)(implicit ctx: Context): ClassDenotation = cls.classDenot
var stubs: List[Symbol] = Nil // diagnostic
}
| AlexSikia/dotty | src/dotty/tools/dotc/core/Symbols.scala | Scala | bsd-3-clause | 21,737 |
/**
* This file is part of the TA Buddy project.
* Copyright (c) 2014 Alexey Aksenov ezh@ezh.msk.ru
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU Affero General Global License version 3
* as published by the Free Software Foundation with the addition of the
* following permission added to Section 15 as permitted in Section 7(a):
* FOR ANY PART OF THE COVERED WORK IN WHICH THE COPYRIGHT IS OWNED
* BY Limited Liability Company «MEZHGALAKTICHESKIJ TORGOVYJ ALIANS»,
* Limited Liability Company «MEZHGALAKTICHESKIJ TORGOVYJ ALIANS» DISCLAIMS
* THE WARRANTY OF NON INFRINGEMENT OF THIRD PARTY RIGHTS.
*
* This program is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE.
* See the GNU Affero General Global License for more details.
* You should have received a copy of the GNU Affero General Global License
* along with this program; if not, see http://www.gnu.org/licenses or write to
* the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
* Boston, MA, 02110-1301 USA, or download the license from the following URL:
* http://www.gnu.org/licenses/agpl.html
*
* The interactive user interfaces in modified source and object code versions
* of this program must display Appropriate Legal Notices, as required under
* Section 5 of the GNU Affero General Global License.
*
* In accordance with Section 7(b) of the GNU Affero General Global License,
* you must retain the producer line in every report, form or document
* that is created or manipulated using TA Buddy.
*
* You can be released from the requirements of the license by purchasing
* a commercial license. Buying such a license is mandatory as soon as you
* develop commercial activities involving the TA Buddy software without
* disclosing the source code of your own applications.
* These activities include: offering paid services to customers,
* serving files in a web or/and network application,
* shipping TA Buddy with a closed source product.
*
* For more information, please contact Digimead Team at this
* address: ezh@ezh.msk.ru
*/
package org.digimead.tabuddy.desktop.logic.operation.graph
import java.io.{ IOException, OutputStream }
import java.net.URI
import org.digimead.digi.lib.aop.log
import org.digimead.digi.lib.api.XDependencyInjection
import org.digimead.digi.lib.log.api.XLoggable
import org.digimead.tabuddy.desktop.core.definition.Operation
import org.digimead.tabuddy.desktop.core.support.App
import org.digimead.tabuddy.desktop.logic.Logic
import org.digimead.tabuddy.desktop.logic.operation.graph.api.XOperationGraphExport
import org.digimead.tabuddy.desktop.logic.payload.marker.GraphMarker
import org.digimead.tabuddy.desktop.logic.payload.marker.serialization.encryption.api.XEncryption
import org.digimead.tabuddy.model.Model
import org.digimead.tabuddy.model.graph.Graph
import org.digimead.tabuddy.model.serialization.digest.Digest
import org.digimead.tabuddy.model.serialization.signature.Signature
import org.digimead.tabuddy.model.serialization.transport.Transport
import org.digimead.tabuddy.model.serialization.{ SData, Serialization, digest, signature }
import org.eclipse.core.runtime.{ IAdaptable, IProgressMonitor }
/** 'Export graph' operation. */
class OperationGraphExport extends XOperationGraphExport with XLoggable {
/**
* Export graph.
*
* @param graph graph to export
* @param location target directory
* @param containerEncParameters container encription parameters
* @param contentEncParameters content encription parameters
* @param dParameters digest parameters
* @param sParameters signature parameters
* @param serialization type of the serialization
*/
def apply(graph: Graph[_ <: Model.Like], location: URI, overwrite: Boolean,
containerEncParameters: Option[XEncryption.Parameters],
contentEncParameters: Option[XEncryption.Parameters],
dParameters: Option[digest.Mechanism.Parameters], sParameters: Option[signature.Mechanism.Parameters],
serialization: Option[Serialization.Identifier]) = GraphMarker(graph).safeUpdate { state ⇒
log.info(s"Export ${graph} to ${location}.")
if (!Logic.container.isOpen())
throw new IllegalStateException("Workspace is not available.")
val marker = GraphMarker(graph)
if (!marker.markerIsValid)
throw new IllegalStateException(marker + " is not valid.")
if (!marker.graphIsOpen())
throw new IllegalStateException(s"$graph is closed.")
val locationURI = Serialization.normalizeURI(location)
// Additional storages
val sDataNStorages = SData(SData.Key.explicitStorages ->
Serialization.Storages(Serialization.Storages.Real(locationURI)))
// Digest
val sDataNDigest = dParameters match {
case Some(parameters) ⇒
sDataNStorages.updated(Digest.Key.freeze, Map(locationURI -> parameters))
case None ⇒
sDataNStorages
}
// Signature
val sDataNSignature = sParameters match {
case Some(parameters) ⇒
sDataNDigest.updated(Signature.Key.freeze, Map(locationURI -> parameters))
case None ⇒
sDataNDigest
}
// Container encryption
val sDataNContainerEncryption = containerEncParameters match {
case Some(parameters) ⇒
sDataNSignature.updated(SData.Key.convertURI,
// encode
((name: String, sData: SData) ⇒
parameters.encryption.toString(parameters.encryption.encrypt(name.getBytes(io.Codec.UTF8.charSet), parameters)),
// decode
(name: String, sData: SData) ⇒
new String(parameters.encryption.decrypt(parameters.encryption.fromString(name), parameters), io.Codec.UTF8.charSet)))
case None ⇒
sDataNSignature
}
// Content encryption
val sDataNContentEncryption = contentEncParameters match {
case Some(parameters) ⇒
sDataNContainerEncryption.updated(SData.Key.writeFilter, ((os: OutputStream, uri: URI, transport: Transport, sData: SData) ⇒
parameters.encryption.encrypt(os, parameters)))
case None ⇒
sDataNContainerEncryption
}
// Element's data serialization type
val sDataNSerialization = serialization match {
case Some(identifier) ⇒
sDataNContentEncryption.updated(SData.Key.explicitSerializationType, identifier)
case None ⇒
sDataNContentEncryption
}
val copy = graph.copy() { g ⇒ }
marker.saveTypeSchemas(App.execNGet { state.payload.typeSchemas.values.toSet }, sDataNSerialization)
Serialization.freeze(copy, sDataNSerialization)
}
/**
* Create 'Export graph' operation.
*
* @param graph graph to export
* @param location target directory
* @param containerEncParameters container encription parameters
* @param contentEncParameters content encription parameters
* @param dParameters digest parameters
* @param sParameters signature parameters
* @param serialization type of the serialization
* @return 'Export graph' operation
*/
def operation(graph: Graph[_ <: Model.Like], location: URI, overwrite: Boolean,
containerEncParameters: Option[XEncryption.Parameters],
contentEncParameters: Option[XEncryption.Parameters],
dParameters: Option[digest.Mechanism.Parameters], sParameters: Option[signature.Mechanism.Parameters],
serialization: Option[Serialization.Identifier]) =
new Implemetation(graph, location, overwrite, containerEncParameters, contentEncParameters, dParameters, sParameters, serialization)
/**
* Checks that this class can be subclassed.
* <p>
* The API class is intended to be subclassed only at specific,
* controlled point. This method enforces this rule
* unless it is overridden.
* </p><p>
* <em>IMPORTANT:</em> By providing an implementation of this
* method that allows a subclass of a class which does not
* normally allow subclassing to be created, the implementer
* agrees to be fully responsible for the fact that any such
* subclass will likely fail.
* </p>
*/
override protected def checkSubclass() {}
class Implemetation(graph: Graph[_ <: Model.Like], location: URI, overwrite: Boolean,
containerEncParameters: Option[XEncryption.Parameters],
contentEncParameters: Option[XEncryption.Parameters],
dParameters: Option[digest.Mechanism.Parameters], sParameters: Option[signature.Mechanism.Parameters],
serialization: Option[Serialization.Identifier])
extends OperationGraphExport.Abstract(graph, location, overwrite,
containerEncParameters, contentEncParameters, dParameters, sParameters, serialization) with XLoggable {
@volatile protected var allowExecute = true
override def canExecute() = allowExecute
override def canRedo() = false
override def canUndo() = false
protected def execute(monitor: IProgressMonitor, info: IAdaptable): Operation.Result[Unit] = {
require(canExecute, "Execution is disabled.")
try {
val result = Option(OperationGraphExport.this(graph, location, overwrite,
containerEncParameters, contentEncParameters, dParameters, sParameters, serialization))
allowExecute = false
Operation.Result.OK(result)
} catch {
case e: IOException if e.getMessage() == "Destination directory is already exists." ⇒
Operation.Result.Error(s"Unable to export $graph: " + e.getMessage(), null, false)
case e: Throwable ⇒
Operation.Result.Error(s"Unable to export $graph: " + e.getMessage(), e)
}
}
protected def redo(monitor: IProgressMonitor, info: IAdaptable): Operation.Result[Unit] =
throw new UnsupportedOperationException
protected def undo(monitor: IProgressMonitor, info: IAdaptable): Operation.Result[Unit] =
throw new UnsupportedOperationException
}
}
object OperationGraphExport extends XLoggable {
/** Stable identifier with OperationGraphExport DI */
lazy val operation = DI.operation.asInstanceOf[OperationGraphExport]
/**
* Build a new 'Export graph' operation.
*
* @param graph graph to export
* @param location target directory
* @param containerEncParameters container encription parameters
* @param contentEncParameters content encription parameters
* @param dParameters digest parameters
* @param sParameters signature parameters
* @param serialization type of the serialization
* @return 'Export graph' operation
*/
@log
def apply(graph: Graph[_ <: Model.Like], location: URI, overwrite: Boolean,
containerEncParameters: Option[XEncryption.Parameters],
contentEncParameters: Option[XEncryption.Parameters],
dParameters: Option[digest.Mechanism.Parameters], sParameters: Option[signature.Mechanism.Parameters],
serialization: Option[Serialization.Identifier]): Option[Abstract] =
Some(operation.operation(graph, location, overwrite,
containerEncParameters, contentEncParameters, dParameters, sParameters, serialization))
/** Bridge between abstract XOperation[Unit] and concrete Operation[Unit] */
abstract class Abstract(val graph: Graph[_ <: Model.Like], val location: URI, val overwrite: Boolean,
val containerEncParameters: Option[XEncryption.Parameters],
val contentEncParameters: Option[XEncryption.Parameters],
val dParameters: Option[digest.Mechanism.Parameters], val sParameters: Option[signature.Mechanism.Parameters],
val serialization: Option[Serialization.Identifier]) extends Operation[Unit](s"Export ${graph} to ${location}.") {
this: XLoggable ⇒
}
/**
* Dependency injection routines.
*/
private object DI extends XDependencyInjection.PersistentInjectable {
lazy val operation = injectOptional[XOperationGraphExport] getOrElse new OperationGraphExport
}
}
| digimead/digi-TABuddy-desktop | part-logic/src/main/scala/org/digimead/tabuddy/desktop/logic/operation/graph/OperationGraphExport.scala | Scala | agpl-3.0 | 11,937 |
package sssg
import java.awt.Desktop
import java.io.File
import java.net.URI
import java.nio.file.StandardWatchEventKinds._
import java.nio.file._
import java.nio.file.attribute.BasicFileAttributes
import java.util.concurrent.Executors
import com.typesafe.config.{Config, ConfigFactory}
import com.typesafe.scalalogging.LazyLogging
import fi.iki.elonen.SimpleWebServer
import scopt.OptionParser
import sssg.renderer.{JadeRenderer, ThymeleafRenderer}
import sssg.scanner.FileContentScanner
import scala.collection.JavaConversions._
case class Arguments(out: String = null, server: Boolean = false)
object Main extends LazyLogging {
def main(args: Array[String]) {
val parser = new OptionParser[Arguments]("scala static site generator") {
head("scala static site generator", "1.0")
opt[String]('o', "out") valueName "<path>" action { (x, c) =>
c.copy(out = x)
} text "the output directory"
arg[Unit]("server") optional() action { (_, c) =>
c.copy(server = true)
} text "serve static files from output"
help("help") text "prints this usage text"
}
parser.parse(args, Arguments()) match {
case Some(arguments) =>
val configuration: Config = ConfigFactory.load()
val sssg: SSSG = if (configuration.getString(ConfigKeys.themeEngine).equals("thyme")) {
new SSSG with ThymeleafRenderer with FileContentScanner {
override def config: Config = configuration
}
} else {
new SSSG with JadeRenderer with FileContentScanner {
override def config: Config = configuration
}
}
if (arguments.server) {
sssg.build()
startServer(new File(sssg.OUTPUT_PATH))
logger.trace("Server initialized")
val watchService: WatchService = FileSystems.getDefault.newWatchService()
val pathStaticFiles: Path = Paths.get(sssg.STATIC_FILES_PATH)
val templateFiles: Path = Paths.get(sssg.TEMPLATE_PATH)
val pathArticles: Path = Paths.get(sssg.ARTICLES_PATH)
val pathPages: Path = Paths.get(sssg.PAGES_PATH)
val visitor: SimpleFileVisitor[Path] = new SimpleFileVisitor[Path] {
override def preVisitDirectory(dir: Path, attrs: BasicFileAttributes): FileVisitResult = {
logger.trace(s"Watching ${dir}")
dir.register(watchService, ENTRY_MODIFY, ENTRY_CREATE, ENTRY_DELETE)
FileVisitResult.CONTINUE
}
}
Files.walkFileTree(pathPages, visitor)
Files.walkFileTree(pathArticles, visitor)
Files.walkFileTree(templateFiles, visitor)
Files.walkFileTree(pathStaticFiles, visitor)
while (true) {
logger.trace("Running watch")
val take: WatchKey = watchService.take()
logger.trace("Event received")
take.pollEvents().listIterator().foreach(e => {
logger.debug(e.kind().toString)
val kind: WatchEvent[Path] = e.asInstanceOf[WatchEvent[Path]]
logger.debug(s"${kind.context().toString} changed")
})
take.reset()
try {
sssg.build()
} catch {
case e: Throwable => logger.error(e.getMessage, e)
}
}
} else {
sssg.build()
}
case None =>
// arguments are bad, error message will have been displayed
}
}
val executor = Executors.newFixedThreadPool(1)
private def startServer(root: File): Unit = {
executor.submit(new Runnable {
override def run(): Unit = {
val server: SimpleWebServer = new SimpleWebServer("localhost", 8000, root, true)
server.start()
while (server.isAlive) {
Thread.sleep(100)
}
}
})
if (Desktop.isDesktopSupported) {
Desktop.getDesktop.browse(new URI("http://localhost:8000"));
}
}
}
| nikosk/sssg | src/main/scala/sssg/Main.scala | Scala | mit | 3,964 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.mllib.tree.model
import org.apache.spark.annotation.{DeveloperApi, Since}
import org.apache.spark.mllib.tree.configuration.FeatureType.FeatureType
import org.apache.spark.mllib.tree.configuration.FeatureType
import org.apache.spark.mllib.tree.configuration.FeatureType.FeatureType
/**
* :: DeveloperApi ::
* Split applied to a feature
* @param feature feature index
* @param threshold Threshold for continuous feature.
* Split left if feature <= threshold, else right.
* @param featureType type of feature -- categorical or continuous
* @param categories Split left if categorical feature value is in this set, else right.
*/
@Since("1.0.0")
@DeveloperApi
case class Split(
@Since("1.0.0") feature: Int,
@Since("1.0.0") threshold: Double,
@Since("1.0.0") featureType: FeatureType,
@Since("1.0.0") categories: List[Double]) {
override def toString: String = {
s"Feature = $feature, threshold = $threshold, featureType = $featureType, " +
s"categories = $categories"
}
}
/**
* Split with minimum threshold for continuous features. Helps with the smallest bin creation.
* @param feature feature index
* @param featureType type of feature -- categorical or continuous
*/
private[tree] class DummyLowSplit(feature: Int, featureType: FeatureType)
extends Split(feature, Double.MinValue, featureType, List())
/**
* Split with maximum threshold for continuous features. Helps with the highest bin creation.
* @param feature feature index
* @param featureType type of feature -- categorical or continuous
*/
private[tree] class DummyHighSplit(feature: Int, featureType: FeatureType)
extends Split(feature, Double.MaxValue, featureType, List())
/**
* Split with no acceptable feature values for categorical features. Helps with the first bin
* creation.
* @param feature feature index
* @param featureType type of feature -- categorical or continuous
*/
private[tree] class DummyCategoricalSplit(feature: Int, featureType: FeatureType)
extends Split(feature, Double.MaxValue, featureType, List())
| practice-vishnoi/dev-spark-1 | mllib/src/main/scala/org/apache/spark/mllib/tree/model/Split.scala | Scala | apache-2.0 | 2,894 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License
*/
package integration
import java.io.{ByteArrayOutputStream, OutputStream}
import akka.actor.{ActorSystem, Props}
import akka.testkit.{ImplicitSender, TestKit}
import com.typesafe.config.ConfigFactory
import org.apache.spark.{SparkConf, SparkContext}
import org.apache.toree.Main
import org.apache.toree.interpreter._
import org.apache.toree.kernel.api.KernelLike
import org.apache.toree.kernel.interpreter.scala.ScalaInterpreter
import org.apache.toree.kernel.protocol.v5._
import org.apache.toree.kernel.protocol.v5.content._
import org.apache.toree.kernel.protocol.v5.interpreter.InterpreterActor
import org.apache.toree.kernel.protocol.v5.interpreter.tasks.InterpreterTaskFactory
import org.apache.toree.utils.MultiOutputStream
import com.typesafe.config.ConfigFactory
import org.apache.spark.{SparkConf, SparkContext}
import org.scalatest.mock.MockitoSugar
import org.scalatest.{BeforeAndAfter, FunSpecLike, Matchers}
import test.utils.UncaughtExceptionSuppression
import test.utils.MaxAkkaTestTimeout
object InterpreterActorSpecForIntegration {
val config = """
akka {
loglevel = "WARNING"
}"""
}
class InterpreterActorSpecForIntegration extends TestKit(
ActorSystem(
"InterpreterActorSpec",
ConfigFactory.parseString(InterpreterActorSpecForIntegration.config),
Main.getClass.getClassLoader
)
) with ImplicitSender with FunSpecLike with Matchers with BeforeAndAfter
with MockitoSugar with UncaughtExceptionSuppression {
private val output = new ByteArrayOutputStream()
private val interpreter = new ScalaInterpreter {
override protected def bindKernelVariable(kernel: KernelLike): Unit = { }
}
private val conf = new SparkConf()
.setMaster("local[*]")
.setAppName("Test Kernel")
before {
output.reset()
// interpreter.start()
interpreter.init(mock[KernelLike])
interpreter.doQuietly({
//context = new SparkContext(conf) with NoSparkLogging
//context = SparkContextProvider.sparkContext
//interpreter.bind(
// "sc", "org.apache.spark.SparkContext",
// context, List( """@transient"""))
})
}
after {
// context is shared so dont stop it
// context.stop()
interpreter.stop()
}
describe("Interpreter Actor with Scala Interpreter") {
describe("#receive") {
it("should return ok if the execute request is executed successfully") {
val interpreterActor =
system.actorOf(Props(
classOf[InterpreterActor],
new InterpreterTaskFactory(interpreter)
))
val executeRequest = ExecuteRequest(
"val x = 3", false, false,
UserExpressions(), false
)
interpreterActor !
((executeRequest, mock[KernelMessage], mock[OutputStream]))
val result =
receiveOne(MaxAkkaTestTimeout)
.asInstanceOf[Either[ExecuteOutput, ExecuteError]]
result.isLeft should be (true)
result.left.get shouldBe an [ExecuteOutput]
}
it("should return error if the execute request fails") {
val interpreterActor =
system.actorOf(Props(
classOf[InterpreterActor],
new InterpreterTaskFactory(interpreter)
))
val executeRequest = ExecuteRequest(
"...", false, false,
UserExpressions(), false
)
interpreterActor !
((executeRequest, mock[KernelMessage], mock[OutputStream]))
val result =
receiveOne(MaxAkkaTestTimeout)
.asInstanceOf[Either[ExecuteOutput, ExecuteError]]
result.isRight should be (true)
result.right.get shouldBe an [ExecuteError]
}
}
}
}
| chipsenkbeil/incubator-toree | kernel/src/test/scala/integration/InterpreterActorSpecForIntegration.scala | Scala | apache-2.0 | 4,505 |
package org.jetbrains.plugins.scala.lang.completion.lookups
import com.intellij.openapi.project.Project
import com.intellij.openapi.util.Key
import com.intellij.psi.impl.light.LightElement
import com.intellij.psi.tree.IElementType
import com.intellij.psi.{PsiElement, PsiManager}
import org.jetbrains.plugins.scala.ScalaLanguage
import org.jetbrains.plugins.scala.lang.lexer.ScalaLexer
import org.jetbrains.plugins.scala.lang.psi.ScalaPsiElement
import org.jetbrains.plugins.scala.project.UserDataHolderExt
import scala.collection.mutable
/**
* @author Alefas
* @since 27.03.12
*/
class ScalaLightKeyword private (manager: PsiManager, text: String)
extends LightElement(manager, ScalaLanguage.INSTANCE) with ScalaPsiElement {
protected def findChildrenByClassScala[T >: Null <: ScalaPsiElement](clazz: Class[T]): Array[T] =
findChildrenByClass[T](clazz)
protected def findChildByClassScala[T >: Null <: ScalaPsiElement](clazz: Class[T]): T = findChildByClass[T](clazz)
override def getText: String = text
def getTokenType: IElementType = {
val lexer = new ScalaLexer
lexer.start(text)
lexer.getTokenType
}
override def copy: PsiElement = new ScalaLightKeyword(getManager, text)
override def toString: String = "ScalaLightKeyword:" + text
}
object ScalaLightKeyword {
private val key = Key.create[mutable.HashMap[String, ScalaLightKeyword]]("scala.light.keywords")
def apply(text: String)
(implicit project: Project): ScalaLightKeyword = {
val manager = PsiManager.getInstance(project)
val map = manager.getOrUpdateUserData(key, mutable.HashMap[String, ScalaLightKeyword]())
map.getOrElseUpdate(text, new ScalaLightKeyword(manager, text))
}
}
| jastice/intellij-scala | scala/scala-impl/src/org/jetbrains/plugins/scala/lang/completion/lookups/ScalaLightKeyword.scala | Scala | apache-2.0 | 1,719 |
/*
* Copyright (C) Lightbend Inc. <https://www.lightbend.com>
*/
package play.cluster.sharding.typed
import akka.cluster.sharding.typed.javadsl.ClusterSharding
import play.api.inject._
import javax.inject.Inject
import javax.inject.Provider
import javax.inject.Singleton
import akka.actor.typed.javadsl.Adapter
import akka.actor.ActorSystem
import akka.annotation.InternalApi
@InternalApi
final class ClusterShardingModule extends SimpleModule(bind[ClusterSharding].toProvider[ClusterShardingProvider])
/** Provider for the Akka Typed ClusterSharding (Java) */
@Singleton
@InternalApi
class ClusterShardingProvider @Inject() (val actorSystem: ActorSystem) extends Provider[ClusterSharding] {
val get: ClusterSharding = ClusterSharding.get(Adapter.toTyped(actorSystem))
}
| benmccann/playframework | cluster/play-java-cluster-sharding/src/main/scala/play/cluster/sharding/typed/ClusterShardingModule.scala | Scala | apache-2.0 | 779 |
package scaffvis.client.components
import japgolly.scalajs.react.vdom.prefix_<^._
import japgolly.scalajs.react.{BackendScope, Callback, ReactComponentB, _}
import org.scalajs.dom._
import org.scalajs.dom.raw.HTMLInputElement
import scaffvis.client.components.common.{CSS, GlyphIcon}
import scaffvis.client.store.Store
import scaffvis.client.store.actions.MoleculesActions.{LoadMoleculesFromJsFile, LoadMoleculesFromSampleDataset, LoadMoleculesLocally}
import scalacss.ScalaCssReact._
object LoadDatasetForm {
case class Props(submitHandler: () => Callback)
sealed trait DatasetSelection
case object NoDataset extends DatasetSelection
case class UserDataset(file: File) extends DatasetSelection
case class SampleDataset(name: String) extends DatasetSelection
case class State(dataset: DatasetSelection = NoDataset, cancelled: Boolean = true)
class Backend($: BackendScope[Props, State]) {
def submitForm(hide: Callback): Callback = {
$.state >>= { state =>
state.dataset match {
case NoDataset =>
Callback.log("No dataset selected") >> hide
case UserDataset(file) =>
val loadAction =
if (file.name.endsWith(".scaffvis")) LoadMoleculesLocally(file) //load locally
else LoadMoleculesFromJsFile(file) //send to server
Callback.log("Loading file") >> Store.dispatchCB(loadAction) >> hide
case SampleDataset(name) =>
Callback.log("Loading file") >> Store.dispatchCB(LoadMoleculesFromSampleDataset(name)) >> hide
}
}
}
def formClosed(state: State, props: Props): Callback = props.submitHandler()
def onChooseFile(e: ReactEventI) = {
val fileList: FileList = e.currentTarget.files
val file = if(fileList.length > 0) UserDataset(fileList.apply(0)) else NoDataset
$.modState(s => s.copy(dataset = file))
}
def onChooseSample(name: String) = {
fileSelectorRef($).get.value = ""
$.modState(s => s.copy(dataset = SampleDataset(name)))
}
val fileSelectorRef = Ref[HTMLInputElement]("fileSelectorRef")
def render(p: Props, s: State) = {
BootstrapModal(BootstrapModal.Props(
// header contains a cancel button (X)
header = hide => <.span(<.button(^.`type` := "button", CSS.btnDefault, CSS.close, ^.onClick --> hide, GlyphIcon.remove), <.h4("Load Dataset")),
// footer has the OK button that submits the form before hiding it
footer = hide => <.span(<.button(^.`type` := "button", CSS.btnDefault, ^.onClick --> submitForm(hide), "Load")),
// this is called after the modal has been hidden (animation is completed)
closed = formClosed(s, p)),
<.div(CSS.formGroup,
<.label(^.`for` := "file", "Select a file to load"),
<.input.file(CSS.formControl, ^.id := "file", ^.ref := fileSelectorRef,
^.onChange ==> onChooseFile
)
),
<.p("Please note that the dataset might take a long time to load and process. Expect up to one minute for " +
"every ten thousand molecules in the dataset."
),
<.p(CSS.textMuted, "In case you are not able to load your data set, it might help to load and save it using " +
"OpenBabel (or a similar tool). The most reliable input formats are SMILES or SDF files, preferably gzipped."),
<.p("In case you just want to explore Scaffvis and have no particular dataset in mind, you can try sample " +
"datasets based on ",
<.a(^.href := "http://www.drugbank.ca/", "DrugBank"),
" or ",
<.a(^.href := "https://www.ebi.ac.uk/chembl/sarfari/kinasesarfari", "Kinase SARfari"),
":"
),
<.div(^.cls := "radio",
<.label(
<.input.radio(
^.onChange --> onChooseSample("drugbank"),
^.checked := (s.dataset == SampleDataset("drugbank"))
),
"DrugBank"
)),
<.div(^.cls := "radio",
<.label(
<.input.radio(
^.onChange --> onChooseSample("kinasesarfari"),
^.checked := (s.dataset == SampleDataset("kinasesarfari"))
),
"Kinase SARfari"
)
)
)
}
}
val component = ReactComponentB[Props]("LoadDatasetForm")
.initialState(State())
.renderBackend[Backend]
.build
def apply(props: Props) = component(props)
}
| velkoborsky/scaffvis | client/src/main/scala/scaffvis/client/components/LoadDatasetForm.scala | Scala | gpl-3.0 | 4,459 |
/*
* The MIT License (MIT)
* <p>
* Copyright (c) 2018-2020
* <p>
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
* <p>
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
* <p>
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package io.techcode.streamy.protobuf.component
import akka.NotUsed
import akka.stream.scaladsl.{Flow, Framing}
import akka.util.ByteString
import com.google.protobuf.MessageLite
import io.techcode.streamy.event.StreamEvent
/**
* Protobuf transformer companion.
*/
object ProtobufTransformer {
/**
* Create a protobuf flow that transform incoming [[ByteString]] to [[StreamEvent]].
*
* @param conf flow configuration.
* @return new protobuf flow.
*/
def parser[T <: MessageLite](conf: Parser.Config[T]): Flow[ByteString, StreamEvent, NotUsed] =
Framing.simpleFramingProtocolDecoder(conf.maxSize)
.map(raw => conf.proto.getParserForType.parseFrom(raw.asByteBuffer).asInstanceOf[T])
.via(Flow.fromFunction(conf.decoder))
/**
* Create a protobuf flow that transform incoming [[StreamEvent]] to [[ByteString]].
*
* @param conf flow configuration.
* @return new protobuf flow.
*/
def printer[T <: MessageLite](conf: Printer.Config[T]): Flow[StreamEvent, ByteString, NotUsed] =
Flow.fromFunction(conf.encoder)
.map(obj => ByteString.fromArrayUnsafe(obj.toByteArray))
.via(Framing.simpleFramingProtocolEncoder(conf.maxSize))
// Parser related stuff
object Parser {
// Configuration
case class Config[T <: MessageLite](
maxSize: Int = Int.MaxValue - 4,
proto: MessageLite,
decoder: T => StreamEvent
)
}
// Printer related stuff
object Printer {
// Configuration
case class Config[T <: MessageLite](
maxSize: Int = Int.MaxValue - 4,
proto: MessageLite,
encoder: StreamEvent => T
)
}
}
| amannocci/streamy | plugin-protobuf/src/main/scala/io/techcode/streamy/protobuf/component/ProtobufTransformer.scala | Scala | mit | 2,795 |
/*
* Copyright (c) 2014 Contributor. All rights reserved.
*/
package scala.tools.nsc.classpath
import org.junit.Assert._
import org.junit.Test
import org.junit.runner.RunWith
import org.junit.runners.JUnit4
import scala.tools.nsc.Settings
import scala.tools.nsc.backend.jvm.AsmUtils
import scala.tools.nsc.util.ClassPath
import scala.tools.util.PathResolver
@RunWith(classOf[JUnit4])
class JrtClassPathTest {
@Test def lookupJavaClasses(): Unit = {
val specVersion = scala.util.Properties.javaSpecVersion
// Run the test using the JDK8 or 9 provider for rt.jar depending on the platform the test is running on.
val cp: ClassPath =
if (specVersion == "" || specVersion == "1.8") {
val settings = new Settings()
val resolver = new PathResolver(settings)
val elements = new ClassPathFactory(settings).classesInPath(resolver.Calculated.javaBootClassPath)
AggregateClassPath(elements)
}
else JrtClassPath().get
assertEquals(Nil, cp.classes(""))
assertTrue(cp.packages("java").toString, cp.packages("java").exists(_.name == "java.lang"))
assertTrue(cp.classes("java.lang").exists(_.name == "Object"))
val jl_Object = cp.classes("java.lang").find(_.name == "Object").get
assertEquals("java/lang/Object", AsmUtils.classFromBytes(jl_Object.file.toByteArray).name)
assertTrue(cp.list("java.lang").packages.exists(_.name == "java.lang.annotation"))
assertTrue(cp.list("java.lang").classesAndSources.exists(_.name == "Object"))
assertTrue(cp.findClass("java.lang.Object").isDefined)
assertTrue(cp.findClassFile("java.lang.Object").isDefined)
}
}
| shimib/scala | test/junit/scala/tools/nsc/classpath/JrtClassPathTest.scala | Scala | bsd-3-clause | 1,643 |
package jitd.spec
case class Mutator(
name: String,
args: Seq[Field],
rewrite: ConstructNode
) {
override def toString =
s"mutator $name(${args.map { _.toString }.mkString(", ")}) {\\n${rewrite.toString(" ")}\\n}"
} | UBOdin/jitd-synthesis | src/main/scala/jitd/spec/Mutator.scala | Scala | apache-2.0 | 227 |
package org.apache.spark.sql
import java.io.File
import com.intel.ie.SparkInteractiveDriver
import org.apache.log4j.{Level, Logger}
import org.apache.spark.sql.functions._
import org.apache.spark.{SparkConf, SparkContext}
import scopt.OptionParser
case class RelationRow(
company: String,
name: String,
relation: String,
entity: String,
text: String
)
case class PageResult(company: String, extracted: Long, labelled: Long, correct: Long, wrong: Long, missed: Long)
object RelationEvaluation {
case class Params(
textPath: String = null,
labelPath: String = null,
partitionSize: Int = 8,
withDetail: Boolean = false)
def main(args: Array[String]) {
val defaultParams = Params()
val parser = new OptionParser[Params]("RelationEvaluation") {
head("Relation Revaluation System")
opt[Int]("partitionSize")
.text(s"partition size, default: ${defaultParams.partitionSize}")
.action((x, c) => c.copy(partitionSize = x))
opt[Boolean]("withDetail")
.text(s"whether to show detailed evaluation result for each company, default: ${defaultParams.withDetail}")
.action((x, c) => c.copy(withDetail = x))
arg[String]("<textPath>")
.text("input path to extract relations")
.required()
.action((x, c) => c.copy(textPath = x))
arg[String]("<labelPath>")
.text("input path which has already labeled entity")
.required()
.action((x, c) => c.copy(labelPath = x))
}
parser.parse(args, defaultParams) match {
case Some(params) => run(params)
case _ => sys.exit(1)
}
}
def run(params: Params): Unit = {
println(s"RelationEvaluation with parameters:\n$params")
Logger.getLogger("org").setLevel(Level.WARN)
Logger.getLogger("intel").setLevel(Level.WARN)
Logger.getLogger("edu").setLevel(Level.WARN)
val sc = SparkContext.getOrCreate(
new SparkConf()
.setAppName(this.getClass.getSimpleName)
)
sc.hadoopConfiguration.set("mapreduce.input.fileinputformat.input.dir.recursive", "true")
val textPath = params.textPath // "data/evaluation/web/"
val labelPath = params.labelPath // "data/evaluation/extraction"
val partitionSize = params.partitionSize
val sqlContext = SQLContext.getOrCreate(sc)
val companyList = //Array("NCR Corporation")
sc.wholeTextFiles(labelPath, partitionSize).map { case (title, content) =>
new File(new File(title).getParent).getName
}.collect()
val st = System.nanoTime()
val extractionResult = sc.wholeTextFiles(textPath, partitionSize)
.filter { case (title, content) =>
val companyName = new File(new File(title).getParent).getName
companyList.contains(companyName)
}.flatMap { case (title, content) =>
val companyName = new File(new File(title).getParent).getName
content.split("\n")
// .map(line => if (line.length > 500) line.substring(0, 500) else line)
.map(line => line.replaceAll("\\(|\\)|\"|\"|``|''", "").replace(" ", " "))
.flatMap(line => SparkInteractiveDriver.getWorkRelation(line))
// .map(rl => (companyName, rl))
.map(t => RelationRow(companyName, t.name, t.relation, t.entity, t.text))
}
val extractedDF = sqlContext.createDataFrame(extractionResult).cache()
val labelledResult = sc.wholeTextFiles(labelPath, partitionSize)
.filter { case (title, content) =>
val companyName = new File(new File(title).getParent).getName
companyList.contains(companyName)
}.flatMap { case (title, content) =>
val companyName = new File(new File(title).getParent).getName
content.split("\n").filter(_.nonEmpty).map { line =>
val elements = line.replaceAll("\u00a0", " ").replaceAll("\u200B|\u200C|\u200D|\uFEFF|\\(|\\)|\"|\"|``|''", "")
.replace(" ", " ").split("\t")
RelationRow(companyName, elements(0), elements(1), elements(2), elements(3))
}
}
val labelledDF = sqlContext.createDataFrame(labelledResult).cache()
getResultForOneRelation("all", extractedDF, labelledDF, "title", sc)
println((System.nanoTime() - st) / 1e9 + " seconds")
// details evaluation
if (params.withDetail) {
val pageResults = companyList.map { companyName =>
val extractedFiltered = extractedDF.where(col("company") === companyName).cache()
val labelledFiltered = labelledDF.where(col("company") === companyName).cache()
val pageResult = getResultForOneRelation(companyName, extractedFiltered, labelledFiltered, "title", sc)
extractedFiltered.unpersist()
labelledFiltered.unpersist()
pageResult
}.toSeq.toArray
println(s"Overall results: ${pageResults.size} companies evaluated")
def printResultForOneRelation(relation: String, index: Int): Unit = {
SQLContext.getOrCreate(sc).createDataFrame(sc.parallelize(pageResults.map(_ (index)))).show(300, false)
val totalCorrect = pageResults.map(_ (index).correct).sum
val totalWrong = pageResults.map(_ (index).wrong).sum
val totalMissed = pageResults.map(_ (index).missed).sum
val recall = totalCorrect.toDouble / (totalCorrect + totalMissed)
val precision = totalCorrect.toDouble / (totalCorrect + totalWrong)
println(s"overall result for $relation --- recall: $recall. precision: $precision")
}
printResultForOneRelation("title", 0)
extractedDF.unpersist()
labelledDF.unpersist()
}
}
def getResultForOneRelation(company: String, extractedRawDF: DataFrame, labelledRawDF: DataFrame,
relationType: String, sc: SparkContext): Array[PageResult] = {
val extractedLowerDF = extractedRawDF
.where(col("relation").isin(relationType))
.select(lower(extractedRawDF("name")).alias("name"), extractedRawDF("relation"),
lower(extractedRawDF("entity")).alias("entity"), extractedRawDF("text"))
val extractedDF = extractedLowerDF
.select("name", "relation", "entity")
.distinct()
.cache()
val labelledLowDF = labelledRawDF
.where(col("relation").isin(relationType))
// .where(col("relation").isin(relationType))
.select(lower(labelledRawDF("name")).alias("name"), labelledRawDF("relation"),
lower(labelledRawDF("entity")).alias("entity"), labelledRawDF("text"))
val labelledDF = labelledLowDF
.select("name", "relation", "entity")
.distinct()
.cache()
val correctDF = labelledDF.intersect(extractedDF).distinct().cache()
println(company)
println(Console.BLUE + "correct:")
println(correctDF.showString(100, false))
println(Console.RED + "missed:")
println(labelledDF.except(correctDF).join(labelledLowDF, Seq("name", "relation", "entity"))
.distinct().showString(100, false))
println("wrong:")
println(Console.RED + extractedDF.except(correctDF).join(extractedLowerDF, Seq("name", "relation", "entity"))
.distinct().showString(100, false))
val extractedCt = extractedDF.count()
val labelledCt = labelledDF.count()
val correctCt = correctDF.count()
val recall = correctCt.toDouble / labelledCt
val precision = correctCt.toDouble / (if (extractedCt == 0) 1 else extractedCt)
println(Console.YELLOW_B + s"recall: $recall. precision: $precision. (" +
s"extracted: ${extractedCt}; labelled: ${labelledCt}; correct: ${correctCt})")
println(Console.RESET)
labelledDF.unpersist()
extractedDF.unpersist()
correctDF.unpersist()
Array(PageResult(company, extractedCt, labelledCt, correctCt, extractedDF.count() - correctCt,
labelledDF.count() - correctCt))
}
} | intel-analytics/InformationExtraction | src/main/scala/com/intel/ie/evaluation/RelationEvaluation.scala | Scala | gpl-3.0 | 7,770 |
package com.github.wakfudecrypt.types.data
import com.github.wakfudecrypt._
@BinaryDecoder
case class ItemType(
id: Short,
parentId: Short,
visibleInAnimations: Boolean,
visibleInMarketPlace: Boolean,
recyclable: Boolean,
equipmentPosition: Array[String],
disabledEquipementPosition: Array[String],
materialType: Short,
craftIds: Array[Int]
)
object ItemType extends BinaryDataCompanion[ItemType] {
override val dataId = 37
}
| jac3km4/wakfudecrypt | types/src/main/scala/com/github/wakfudecrypt/types/data/ItemType.scala | Scala | mit | 449 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kafka.utils.json
import scala.collection._
import com.fasterxml.jackson.databind.{JsonMappingException, JsonNode}
import com.fasterxml.jackson.databind.node.{ArrayNode, ObjectNode}
/**
* A simple wrapper over Jackson's JsonNode that enables type safe parsing via the `DecodeJson` type
* class.
*
* Typical usage would be something like:
*
* {{{
* val jsonNode: JsonNode = ???
* val jsonObject = JsonValue(jsonNode).asJsonObject
* val intValue = jsonObject("int_field").to[Int]
* val optionLongValue = jsonObject("option_long_field").to[Option[Long]]
* val mapStringIntField = jsonObject("map_string_int_field").to[Map[String, Int]]
* val seqStringField = jsonObject("seq_string_field").to[Seq[String]
* }}}
*
* The `to` method throws an exception if the value cannot be converted to the requested type. An alternative is the
* `toEither` method that returns an `Either` instead.
*/
trait JsonValue {
protected def node: JsonNode
/**
* Decode this JSON value into an instance of `T`.
*
* @throws JsonMappingException if this value cannot be decoded into `T`.
*/
def to[T](implicit decodeJson: DecodeJson[T]): T = decodeJson.decode(node)
/**
* Decode this JSON value into an instance of `Right[T]`, if possible. Otherwise, return an error message
* wrapped by an instance of `Left`.
*/
def toEither[T](implicit decodeJson: DecodeJson[T]): Either[String, T] = decodeJson.decodeEither(node)
/**
* If this is a JSON object, return an instance of JsonObject. Otherwise, throw a JsonMappingException.
*/
def asJsonObject: JsonObject =
asJsonObjectOption.getOrElse(throw new JsonMappingException(s"Expected JSON object, received $node"))
/**
* If this is a JSON object, return a JsonObject wrapped by a `Some`. Otherwise, return None.
*/
def asJsonObjectOption: Option[JsonObject] = this match {
case j: JsonObject => Some(j)
case _ => node match {
case n: ObjectNode => Some(new JsonObject(n))
case _ => None
}
}
/**
* If this is a JSON array, return an instance of JsonArray. Otherwise, throw a JsonMappingException.
*/
def asJsonArray: JsonArray =
asJsonArrayOption.getOrElse(throw new JsonMappingException(s"Expected JSON array, received $node"))
/**
* If this is a JSON array, return a JsonArray wrapped by a `Some`. Otherwise, return None.
*/
def asJsonArrayOption: Option[JsonArray] = this match {
case j: JsonArray => Some(j)
case _ => node match {
case n: ArrayNode => Some(new JsonArray(n))
case _ => None
}
}
override def hashCode: Int = node.hashCode
override def equals(a: Any): Boolean = a match {
case a: JsonValue => node == a.node
case _ => false
}
override def toString: String = node.toString
}
object JsonValue {
/**
* Create an instance of `JsonValue` from Jackson's `JsonNode`.
*/
def apply(node: JsonNode): JsonValue = node match {
case n: ObjectNode => new JsonObject(n)
case n: ArrayNode => new JsonArray(n)
case _ => new BasicJsonValue(node)
}
private class BasicJsonValue private[json] (protected val node: JsonNode) extends JsonValue
}
| ErikKringen/kafka | core/src/main/scala/kafka/utils/json/JsonValue.scala | Scala | apache-2.0 | 3,975 |
package views
import controllers.routes
import org.scalatest.{Matchers, FlatSpec}
import play.api.test.Helpers._
import play.api.test.FakeApplication
/**
* Created by pnewman on 24/11/2015.
*/
class MainSpec extends FlatSpec with Matchers{
def testMain: Unit ={
lazy val html = views.html.main("Hello, World!")(views.html.index("Test message"))
lazy val bootstrapCss = routes.Assets.versioned("stylesheets/bootstrap.css")
lazy val bootstrapJs = routes.Assets.versioned("javascripts/bootstrap.js")
"The main view" should "render an html page" in{
running(new FakeApplication()){
contentType(html) should be ("text/html")
}
}
it should "import the ajax javascript library" in{
contentAsString(html) should include ("<script src = \\"http://ajax.googleapis.com/ajax/libs/jquery/1/jquery.min.js\\"></script>")
}
it should "import the required bootstrap-3 css" in{
contentAsString(html) should include ("<link rel=\\"stylesheet\\" media=\\"screen\\" href=\\"" + bootstrapCss + "\\">")
}
it should "import the bootstrap-3 javascript library" in{
contentAsString(html) should include ("<script src=\\"" + bootstrapJs + "\\" type=\\"text/javascript\\"></script>")
}
it should "contain the given content html" in{
running (new FakeApplication()) {
contentAsString(html) should include(contentAsString(views.html.index("Test message")))
}
}
it should "contain the debug html" in {
running (new FakeApplication()) {
contentAsString(html) should include(contentAsString(views.html.debug()))
}
}
}
testMain
}
| Tom-Stacey/ScalaMines | test/views/MainSpec.scala | Scala | gpl-2.0 | 1,644 |
//
// Copyright 2016 Commonwealth Bank of Australia
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
package commbank.coppersmith.util
import java.util.concurrent.TimeUnit.MILLISECONDS
import scala.util.{Failure, Success, Try}
import scalaz.Order
import org.joda.time.{DateTime, Period, LocalDate, DateTimeZone}
import org.joda.time.format.DateTimeFormat
import commbank.coppersmith.util.Timestamp.Offset
case class DatePeriod(years: Int, months: Int, days: Int)
object Datestamp {
val parseDefault = parseFormat("yyyy-MM-dd")
val unsafeParseDefault = unsafeParseFormat("yyyy-MM-dd")
def parse(date: String): Either[(String, String), Datestamp] = parseDefault(date)
def unsafeParse(date: String): Datestamp = unsafeParseDefault(date)
def parseFormat(pattern: String): (String => Either[(String, String), Datestamp]) = {
val fmt = DateTimeFormat.forPattern(pattern)
time => {
Try(fmt.parseLocalDate(time)) match {
case Success(d) => Right(Datestamp(d.getYear, d.getMonthOfYear, d.getDayOfMonth))
case Failure(_) => Left((time, pattern))
}
}
}
def unsafeParseFormat(pattern: String): (String => Datestamp) = {
val f = parseFormat(pattern)
time => f(time).right.getOrElse(sys.error(s"Unable to parse date: ${f(time).left.get}"))
}
implicit def ordering[A <: Datestamp]: Ordering[A] = Ordering.by(_.toIso8601ExtendedFormatString)
implicit def scalazOrder[A <: Datestamp]: Order[A] = Order.fromScalaOrdering(ordering)
}
object Timestamp {
type Offset = Option[(Int, Int)]
val parseWithMillisDefault = parseFormatWithOffset("yyyy-MM-dd'T'HH:mm:ss.SSSZZ")
val parseWithoutMillisDefault = parseFormatWithOffset("yyyy-MM-dd'T'HH:mm:ssZZ")
val unsafeParseWithMillisDefault = unsafeParseFormatWithOffset("yyyy-MM-dd'T'HH:mm:ss.SSSZZ")
val unsafeParseWithoutMillisDefault = unsafeParseFormatWithOffset("yyyy-MM-dd'T'HH:mm:ssZZ")
/**
* Parses a timestamp in RFC3339 format with millisecond precision.
*
* @param time The timestamp to parse
* @return Either the parsed timestamp, or the time arg and pattern used if parsing fails
*/
def parseWithMillis(time: String): Either[(String, String), Timestamp] =
parseWithMillisDefault(time, parseOffset(time))
/**
* Parses a timestamp in RFC3339 format with millisecond precision. An exception is thrown if
* parsing fails.
*
* @param time The timestamp to parse
* @return The parsed timestamp
*/
def unsafeParseWithMillis(time: String): Timestamp =
unsafeParseWithMillisDefault(time, parseOffset(time))
/**
* Parses a timestamp in RFC3339 format without millisecond precision.
*
* @param time The time string to parse
* @return Either the parsed Timestamp, or the time arg and pattern used if parsing fails
*/
def parseWithoutMillis(time: String): Either[(String, String), Timestamp] =
parseWithoutMillisDefault(time, parseOffset(time))
/**
* Parses a timestamp in RFC3339 format without millisecond precision. An exception is thrown if
* parsing fails.
*
* @param time The time string to parse
* @return The parsed Timestamp
*/
def unsafeParseWithoutMillis(time: String): Timestamp =
unsafeParseWithoutMillisDefault(time, parseOffset(time))
/**
* Creates a parse function for a pattern. Note: The pattern must parse timezone information.
*
* @param pattern The pattern to use (Must parse timezone)
* @return A function from a time string to either the parsed Timestamp,
* or the time arg and pattern used if parsing fails
*/
def parseFormat(pattern: String): String => Either[(String, String), Timestamp] = {
// Remove literals
val p = pattern.replaceAll("'[^']*'", "")
if (!p.contains("Z")) throw new IllegalArgumentException(s"$pattern doesn't parse timezones.")
val fmt = DateTimeFormat.forPattern(pattern)
time => {
val triedTime = Try {
val dt = fmt.withOffsetParsed.parseDateTime(time)
val tz = dt.getZone.getOffset(dt)
val offset = Some((MILLISECONDS.toHours(tz).toInt,
Math.abs(MILLISECONDS.toMinutes(tz).toInt % 60)))
Timestamp(dt.getMillis, offset)
}
Either.cond(triedTime.isSuccess, triedTime.get, (time, pattern))
}
}
/**
* Creates an unsafe parse function for a pattern. Note: The pattern must parse timezone
* information. An exception is thrown if parsing fails.
*
* @param pattern The pattern to use (Must parse timezone)
* @return An unsafe parse function from time string to Timestamp
*/
def unsafeParseFormat(pattern: String): String => Timestamp = {
val f = parseFormat(pattern)
s => f(s).right.getOrElse(sys.error(s"Unable to parse time: ${f(s).left.get}"))
}
/**
* Creates a parse function for a pattern. The function should be used to provide offset
* information missing from the timestamp, or to overwrite offset information.
* Note: The time will not be adjusted to the new offset, the existing offset will be replaced.
*
* @param pattern The pattern to use to parse
* @return A function from a time string and offset to either the parsed Timestamp,
* or the time arg and pattern used if parsing fails
*/
def parseFormatWithOffset(pattern: String): (String, Offset) => Either[(String, String), Timestamp] = {
val fmt = DateTimeFormat.forPattern(pattern)
(time, offset) => {
val (h, m) = offset.getOrElse((0, 0))
val tz = DateTimeZone.forOffsetHoursMinutes(h, m)
// Without withOffsetParsed the timezone fields are moved to system timezone
val triedDT = Try(fmt.withOffsetParsed().parseDateTime(time).withZoneRetainFields(tz))
Either.cond(triedDT.isSuccess, Timestamp(triedDT.get.getMillis, offset), (time, pattern))
}
}
/**
* Creates an unsafe parse function for a pattern. The function should be used to provide offset
* information missing from the timestamp, or to overwrite offset information.
* Note: The time will not be adjusted to the new offset, the existing offset will be replaced.
*
* @param pattern The pattern to use to parse
* @return An unsafe function from a time string to a parsed Timestamp
*/
def unsafeParseFormatWithOffset(pattern: String): (String, Offset) => Timestamp = {
val f = parseFormatWithOffset(pattern)
(s, o) => f(s, o).right.getOrElse(sys.error(s"Unable to parse time: ${f(s, o).left.get}"))
}
private def parseOffset(time: String): Option[(Int, Int)] = {
// Parse timezone hour and minute
val tzParser =
""".*([\\-+]\\d{2}):(\\d{2}).*""".r
// -00:00 represents unknown timezone
val offset = time match {
case tzParser("-00", "00") => None
case tzParser(h, m) => Some((h.toInt, m.toInt))
case _ => None
}
offset
}
implicit def ordering[A <: Timestamp]: Ordering[A] = Ordering.by(t => (t.millis, t.offset))
implicit def scalazOrder[A <: Timestamp]: Order[A] = Order.fromScalaOrdering(ordering)
}
case class Datestamp(year: Int, month: Int, day: Int) {
protected def toLocalDate: org.joda.time.LocalDate ={
new LocalDate(year, month, day)
}
def difference(that: Datestamp): DatePeriod = that match {
case Datestamp(y, m, d) =>
val p = new Period(this.toLocalDate, that.toLocalDate)
DatePeriod(p.getYears, p.getMonths, p.getDays)
}
def toIso8601ExtendedFormatString: String = {
f"$year%04d-$month%02d-$day%02d"
}
}
case class Timestamp(millis: Long, offset: Offset) {
def toUTC: Timestamp = {
val dt = toDateTime.toDateTime(DateTimeZone.UTC)
Timestamp.unsafeParseWithMillis(dt.toString("yyyy-MM-dd'T'HH:mm:ss.SSSZZ"))
}
protected def toDateTime: org.joda.time.DateTime ={
val (h, m) = offset.getOrElse((0,0))
val tz = DateTimeZone.forOffsetHoursMinutes(h, m)
new DateTime(millis, tz)
}
def toRfc3339String: String = {
val offsetStr = offset.map { case ((h, m)) => f"$h%+03d:$m%02d" }.getOrElse("-00:00")
f"${toDateTime.toString("yyyy-MM-dd'T'HH:mm:ss.SSS")}$offsetStr"
}
}
| CommBank/coppersmith | core/src/main/scala/commbank/coppersmith/util/DateTime.scala | Scala | apache-2.0 | 8,686 |
/* _____ _ ____ _ _
* | __ \\ | | | _ \\ (_) | |
* | |__) |___ _ __ __| | __ _| |_) |_ __ _ __| | __ _ ___
* | _ // _ \\| '_ \\ / _` |/ _` | _ <| '__| |/ _` |/ _` |/ _ \\
* | | \\ \\ (_) | | | | (_| | (_| | |_) | | | | (_| | (_| | __/
* |_| \\_\\___/|_| |_|\\__,_|\\__,_|____/|_| |_|\\__,_|\\__, |\\___|
* __/ |
* |___/
*
* RondaBridge : Akka Actors to build UDP/TCP/WebSocket/TLS Servers
* Copyright (C) 2018 Advanced Software Production Line, S.L.
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public License
* as published by the Free Software Foundation; either version 2.1
* of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this program; if not, write to the Free
* Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA
* 02111-1307 USA
*
* You may find a copy of the license under this software is released
* at COPYING file. This is LGPL software: you are welcome to develop
* proprietary applications using this library without any royalty or
* fee but returning back any change, improvement or addition in the
* form of source code, project image, documentation patches, etc.
*
* For commercial support on build Websocket enabled solutions
* contact us:
*
* Postal address:
* Advanced Software Production Line, S.L.
* Av. Juan Carlos I, Nº13, 2ºC
* Alcalá de Henares 28806 Madrid
* Spain
*
* Email address:
* info@aspl.es - http://www.aspl.es/nopoll
*/
package ronda.tcp
import akka.actor.{ Actor, ActorLogging, ActorRef, ActorSystem, Props, Timers, Terminated, PoisonPill }
/* import akka.util.ByteString */
import scala.util.{Try, Success, Failure}
import java.net.Socket
import java.nio.channels.SocketChannel
/* selection api */
import java.nio.channels.{Selector, SelectionKey, ClosedChannelException}
import java.nio.channels.spi.SelectorProvider
/* support for 1 millis */
import scala.concurrent.duration._
import ronda.util.Common
import scala.collection.mutable.Map
final case class RegisterSocket (conn : ActorRef, socketChannel : SocketChannel)
final case object ProcessBytesReceived
final case object KeepSending
import Api._
import TcpWatcher._
class TcpWatcher extends Actor with Timers with ActorLogging {
final case object CheckForReading
final case object CheckForReadingKey
/* map of socket channel to connections we are watch */
val socketsToConns = Map[SocketChannel, ActorRef]()
val connsToSockets = Map[ActorRef, SocketChannel]()
/* create selector to watch for this socket */
val socketSelector : Selector = SelectorProvider.provider.openSelector
/* map that holds socket channels and connections that has been
* notified, and hence, removed from the watch list
* (SocketSelector) */
val socketsNotified = Map[ActorRef, SocketChannel]()
/* master actor, mostly for server side to have notification and
* control when this actor can stop */
var masterActor : Option[ActorRef] = None
/* auto stop watcher when no more connections to watch is found */
var autoStop : Boolean = true
/* programming listening timers */
programTimer
def receive = {
case DisableAutoStop =>
autoStop = false
case ReplaceMasterActor (actor) =>
/* say I'm your master..... */
masterActor = Some (actor)
/* ..yes, you are my master */
case CheckForReading =>
/* checkign for pending reads but only if socket is not closed */
checkPendingReading
case RegisterSocket (conn, socketChannel) =>
/* register a new socket to watch for */
val key = socketChannel.register (socketSelector, SelectionKey.OP_READ)
/* register this references */
socketsToConns += socketChannel -> conn
connsToSockets += conn -> socketChannel
/* watch for this connection to close */
context.watch (conn)
/* programming listening timers */
programTimer
case KeepSending =>
if (socketSelector.isOpen) {
/* find connection and associated socketChannel to reenable it */
socketsNotified.get (sender) match {
case Some (socketChannel) =>
/* register socketSelector */
try { socketChannel.register (socketSelector, SelectionKey.OP_READ) } catch {
case e : ClosedChannelException =>
/* unregister connection */
unregisterConnection (sender)
case e : Exception =>
log.error (s"*** ERROR **** :: TcpWatcher.KeepSending :: found exception :: ${e}")
}
/* remove this socketChannel as notified so we can send them more
* notifications */
socketsNotified -= sender
case None => /* do nothing */
}
}
case Terminated (conn) =>
/* call to unregister connection */
unregisterConnection (conn)
case ConnectionClosed (error, details, conn) =>
/* call to unregister connection */
unregisterConnection (conn)
case PoisonPill =>
timers.cancelAll ()
/* no need to call postStop or context.stop (self) here, it going to
* happen right after this message handling finishes */
case StopWatcher =>
/* stop watcher only if we have 0 connections */
if (connsToSockets.size == 0)
context.stop (self)
case unexpected =>
log.error (s"TcpWatcher :: Unexpected message received: ${unexpected} from ${sender}")
}
private def unregisterConnection (conn : ActorRef) = {
val sizeBeforeRemoving = connsToSockets.size
/* connection terminated, unregister socket */
connsToSockets.get (conn) match {
case None => /* nothing to do */
case Some (socketChannel) =>
/* cancel subscription */
unregisterSocketSelector (socketChannel)
/* remove socketChannel from map */
socketsToConns -= socketChannel
}
/* remove connection from map */
connsToSockets -= conn
/* self stop this actor when reached 0 connections watched */
if (sizeBeforeRemoving == 1 && connsToSockets.size == 0 && autoStop) {
masterActor match {
case Some (actor) =>
/* request autorization to finish */
actor ! RequestToStop
case None =>
/* no master, stop tcpwatcher */
/* cancel timer */
timers.cancelAll ()
context.stop (self)
}
}
}
private def unregisterSocketSelector (socketChannel : SocketChannel) = try{
/* cancel subscription */
val key = socketChannel.keyFor (socketSelector)
if (key != null)
key.cancel
} catch {
case e : Exception => /* nothing */
log.error (s"**** ERROR **** :: TcpWatcher.unregisterSocketSelector :: found exception :: ${e}")
}
override def postStop = {
timers.cancelAll ()
/* clear all registrations */
connsToSockets.foreach {
case (conn, socketChannel) =>
unregisterSocketSelector (socketChannel)
/* send notification to close watched connection without
* notification */
conn ! CloseConnection (false)
/* remove connection */
connsToSockets -= conn
}
/* clear both hashes */
connsToSockets.clear
socketsToConns.clear
socketsNotified.clear
/* close socket selector */
socketSelector.close
}
def checkPendingReading = {
/* after reading, programming a timer again */
programTimer
/* read all pending content */
var keepPolling = false
do {
keepPolling = readPending
} while (keepPolling)
}
/* var showLogCount = 0 */
def readPending : Boolean =
Try (socketSelector.selectNow) match {
case Success (keysReady) =>
if (keysReady > 0) {
val keys = socketSelector.selectedKeys().iterator ()
while (keys.hasNext()) {
val key : SelectionKey = keys.next
keys.remove
if (! key.isValid)
removeConnectionByKey (key)
else {
/* check key and notify */
checkKeyAndNotify (key)
}
}
} /* end if */
/* keepPolling */
keysReady > 0
case Failure(e) =>
/* report error found */
val msg = s"TcpWatcher.readPending (10005) :: found exception ${e} while reading..."
log.error (msg)
/* keepPolling */
false
}
def removeConnectionByKey (key : SelectionKey) = try {
val socketChannel = key.channel.asInstanceOf[SocketChannel]
socketsToConns.get (socketChannel) match {
case None => /* nothing to do: LIBRARY ERROR?? */
case Some (conn) =>
/* unregister connection */
unregisterConnection (conn)
}
} catch {
case e : Exception =>
log.error (s"**** ERROR ***** :: TcpWatcher.removeConnectionByKey :: found exception :: ${e}")
}
def checkKeyAndNotify (key : SelectionKey) =
try {
if (key.isValid && key.isReadable) {
/* get connection and socket channel from registration key */
val socketChannel = key.channel.asInstanceOf[SocketChannel]
socketsToConns.get (socketChannel) match {
case None => /* nothing to do: LIBRARY ERROR?? */
case Some (conn) =>
/* report there is content to be received */
conn ! ProcessBytesReceived
/* flag this socket as notified and remove it from the watch list */
socketsNotified += conn -> socketChannel
key.cancel
}
}
} catch {
case e : Exception =>
log.error (s"**** ERROR ***** :: TcpWatcher.checkKeyAndNotify :: found exception :: ${e}")
/* do nothing here, let the system to collect the key and the
* connection with postStop */
/* key canceled, remove connection */
// keysToConns.get (key) match {
// case None => /* nothing to do: LIBRARY ERROR?? */
// case Some ((conn, socketChannel)) =>
// unregisterConnection (conn)
// }
}
def programTimer = {
if (connsToSockets.size > 0) {
/* configure max timeout to check */
val timeout = 5
/* timer */
timers.startSingleTimer (CheckForReadingKey, CheckForReading, timeout.millis)
}
}
}
object TcpWatcher {
/***
* Internal message to make TcpWatcher to request stop in the case
* there is a master actor.
*/
case object RequestToStop
/***
* Internal message to request watcher to stop.
*/
case object StopWatcher
/** Allows to configure tcpwatcher to avoid auto stop when no more
* connections are to be watched. This is mostly useful for client
* side.
*/
case object DisableAutoStop
val config = Props(classOf[TcpWatcher])
def create (implicit system : ActorSystem) : ActorRef =
system.actorOf (config, Common.randomName ("TcpWatcher"))
}
| ASPLes/RondaBridge | src/main/scala/ronda/tcp/TcpWatcher.scala | Scala | lgpl-2.1 | 11,574 |
package com.twitter.server.handler
import com.twitter.finagle.server.ServerRegistry
import com.twitter.finagle.Service
import com.twitter.finagle.util.StackRegistry
import com.twitter.io.Buf
import com.twitter.server.util.HttpUtils._
import com.twitter.server.util.MetricSource
import com.twitter.server.view.StackRegistryView
import com.twitter.util.Future
private object ServerRegistryHandler {
def render(servers: Seq[(String, StackRegistry.Entry)]): String =
s"""<link type="text/css" href="/admin/files/css/server-registry.css" rel="stylesheet"/>
<script type="application/javascript" src="/admin/files/js/server-registry.js"></script>
<script type="application/javascript" src="/admin/files/js/chart-renderer.js"></script>
<ul id="server-tabs" class="nav nav-tabs" data-refresh-uri="/admin/metrics">
${
(for {
(scope, entry) <- servers
} yield {
s"""<li><a href="#${entry.name}-entry" data-toggle="tab">$scope</a></li>"""
}).mkString("\\n")
}
</ul>
<!-- Tab panes -->
<div id="servers" class="tab-content">
${
(for ((scope, entry) <- servers) yield {
val scopeDash = scope.replace("/", "-")
s"""<div class="tab-pane borders" id="${entry.name}-entry">
<div class="row">
<!-- server stats -->
<div class="server-info col-md-3">
<dl class="server-stats dl-horizontal">
<dt><a href="/admin/metrics#$scope/load">Load:</a></dt>
<dd id="${scopeDash}-load" data-key="$scope/load">...</dd>
<dt><a href="/admin/metrics#$scope/failures">Failures:</a></dt>
<dd id="${scopeDash}-failures" data-key="$scope/failures">...</dd>
<dt><a href="/admin/metrics#$scope/success">Success:</a></dt>
<dd id="${scopeDash}-success" data-key="$scope/success">...</dd>
<dt><a href="/admin/metrics#$scope/requests">Requests:</a></dt>
<dd id="${scopeDash}-requests"data-key="$scope/requests">...</dd>
</dl>
</div>
<!-- graph -->
<div id="server-graph" class="col-md-9"></div>
</div>
</div>"""
}).mkString("\\n")
}
</div>"""
}
/**
* Renders information about servers registered to Finagle's ServerRegistry
* in an html fragment. Server's can be queried by passing in the server name
* as part of the uri (ex. "/admin/servers/myserver").
*/
class ServerRegistryHandler(
source: MetricSource = new MetricSource,
registry: StackRegistry = ServerRegistry
) extends Service[Request, Response] {
// Search the metrics source for the stat scope that includes `serverName`.
// The search namespace includes both "$serverName/" and "srv/$serverName"
// to take into account finagle's ServerStatsReceiver. Note, unnamed servers are
// ignored as we can't dissambiguate their stats.
private[this] def findScope(serverName: String): Option[String] = {
val k0 = s"$serverName"
val k1 = s"srv/$serverName"
if (source.contains(s"$k0/load")) Some(k0)
else if (source.contains(s"$k1/load")) Some(k1)
else None
}
def apply(req: Request): Future[Response] = {
val (path, _) = parse(req.getUri)
path.split('/').last match {
case idx@("index.html" | "index.htm" | "index.txt" | "servers") =>
val servers = (registry.registrants flatMap {
case e: StackRegistry.Entry if e.name.nonEmpty =>
for (scope <- findScope(e.name)) yield (scope, e)
case _ => Nil
}).toSeq
val html = ServerRegistryHandler.render(servers)
// This is useful to avoid the returned fragment being wrapped
// with an index in the context of an ajax call.
val typ = if (idx.endsWith(".txt")) "text/plain" else "text/html"
newResponse(
contentType = s"$typ;charset=UTF-8",
content = Buf.Utf8(html)
)
case name =>
val entries = registry.registrants filter { _.name == name }
if (entries.isEmpty) new404(s"$name could not be found.") else {
val server = entries.head
val scope = findScope(server.name)
val html = StackRegistryView.render(server, scope)
newResponse(
contentType = "text/html;charset=UTF-8",
content = Buf.Utf8(html)
)
}
}
}
}
| travisbrown/twitter-server | src/main/scala/com/twitter/server/handler/ServerRegistryHandler.scala | Scala | apache-2.0 | 4,745 |
/*
* (c) Copyright 2016 Hewlett Packard Enterprise Development LP
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package debugger
/*
import libcog._
import cogdebugger._
object DebuggerExample extends CogDebuggerApp( new ComputeGraph{
val a = ScalarField(0f)
a <== a + 1
})
*/
| hpe-cct/cct-core | src/test/scala/debugger/DebuggerExample.scala | Scala | apache-2.0 | 796 |
package controllers
import helper.services.IndicesStatsService
import helper.utils.AuthenticatedAction
import play.api.mvc._
import esclient.Elasticsearch
import views._
object ListIndices extends Controller {
implicit val context = scala.concurrent.ExecutionContext.Implicits.global
def index(highlightIndex: Option[String]) = {
AuthenticatedAction {
Action.async {
implicit request => {
val indicesStatsService = new IndicesStatsService(new Elasticsearch)
indicesStatsService.getIndexList map {
list => {
Ok(html.listindices.indexList(list))
}
}
}
}
}
}
} | MeiSign/Fillable | app/controllers/ListIndices.scala | Scala | apache-2.0 | 671 |
package slick.jdbc
import java.util.UUID
import java.sql.{PreparedStatement, ResultSet}
import scala.concurrent.ExecutionContext
import slick.ast._
import slick.ast.Util._
import slick.basic.Capability
import slick.compiler.{Phase, CompilerState}
import slick.dbio._
import slick.jdbc.meta.{MIndexInfo, MColumn, MTable}
import slick.lifted._
import slick.model.Model
import slick.relational.RelationalProfile
import slick.util.ConstArray
import slick.util.MacroSupport.macroSupportInterpolation
/** Slick profile for PostgreSQL.
*
* This profile implements [[slick.jdbc.JdbcProfile]]
* ''without'' the following capabilities:
*
* <ul>
* <li>[[slick.jdbc.JdbcCapabilities.insertOrUpdate]]:
* InsertOrUpdate operations are emulated on the server side with a single
* JDBC statement executing multiple server-side statements in a transaction.
* This is faster than a client-side emulation but may still fail due to
* concurrent updates. InsertOrUpdate operations with `returning` are
* emulated on the client side.</li>
* <li>[[slick.jdbc.JdbcCapabilities.nullableNoDefault]]:
* Nullable columns always have NULL as a default according to the SQL
* standard. Consequently Postgres treats no specifying a default value
* just as specifying NULL and reports NULL as the default value.
* Some other dbms treat queries with no default as NULL default, but
* distinguish NULL from no default value in the meta data.</li>
* <li>[[slick.jdbc.JdbcCapabilities.supportsByte]]:
* Postgres doesn't have a corresponding type for Byte.
* SMALLINT is used instead and mapped to Short in the Slick model.</li>
* </ul>
*
* Notes:
*
* <ul>
* <li>[[slick.relational.RelationalCapabilities.typeBlob]]:
* The default implementation of the <code>Blob</code> type uses the
* database type <code>lo</code> and the stored procedure
* <code>lo_manage</code>, both of which are provided by the "lo"
* extension in PostgreSQL.</li>
* </ul>
*/
trait PostgresProfile extends JdbcProfile {
override protected def computeCapabilities: Set[Capability] = (super.computeCapabilities
- JdbcCapabilities.insertOrUpdate
- JdbcCapabilities.nullableNoDefault
- JdbcCapabilities.supportsByte
)
class ModelBuilder(mTables: Seq[MTable], ignoreInvalidDefaults: Boolean)(implicit ec: ExecutionContext) extends JdbcModelBuilder(mTables, ignoreInvalidDefaults) {
override def createTableNamer(mTable: MTable): TableNamer = new TableNamer(mTable) {
override def schema = super.schema.filter(_ != "public") // remove default schema
}
override def createColumnBuilder(tableBuilder: TableBuilder, meta: MColumn): ColumnBuilder = new ColumnBuilder(tableBuilder, meta) {
val VarCharPattern = "^'(.*)'::character varying$".r
val IntPattern = "^\\\\((-?[0-9]*)\\\\)$".r
override def default = meta.columnDef.map((_,tpe)).collect{
case ("true","Boolean") => Some(Some(true))
case ("false","Boolean") => Some(Some(false))
case (VarCharPattern(str),"String") => Some(Some(str))
case (IntPattern(v),"Int") => Some(Some(v.toInt))
case (IntPattern(v),"Long") => Some(Some(v.toLong))
case ("NULL::character varying","String") => Some(None)
case (v,"java.util.UUID") => {
val uuid = v.replaceAll("[\\'\\"]", "") //strip quotes
.stripSuffix("::uuid") //strip suffix
Some(Some(java.util.UUID.fromString(uuid)))
}
}.getOrElse{
val d = super.default
if(meta.nullable == Some(true) && d == None){
Some(None)
} else d
}
override def length: Option[Int] = {
val l = super.length
if(tpe == "String" && varying && l == Some(2147483647)) None
else l
}
override def tpe = meta.typeName match {
case "bytea" => "Array[Byte]"
case "lo" if meta.sqlType == java.sql.Types.DISTINCT => "java.sql.Blob"
case "uuid" => "java.util.UUID"
case _ => super.tpe
}
}
override def createIndexBuilder(tableBuilder: TableBuilder, meta: Seq[MIndexInfo]): IndexBuilder = new IndexBuilder(tableBuilder, meta) {
// FIXME: this needs a test
override def columns = super.columns.map(_.stripPrefix("\\"").stripSuffix("\\""))
}
}
override def createModelBuilder(tables: Seq[MTable], ignoreInvalidDefaults: Boolean)(implicit ec: ExecutionContext): JdbcModelBuilder =
new ModelBuilder(tables, ignoreInvalidDefaults)
override def defaultTables(implicit ec: ExecutionContext): DBIO[Seq[MTable]] =
MTable.getTables(None, None, None, Some(Seq("TABLE")))
override val columnTypes = new JdbcTypes
override protected def computeQueryCompiler = super.computeQueryCompiler - Phase.rewriteDistinct
override def createQueryBuilder(n: Node, state: CompilerState): QueryBuilder = new QueryBuilder(n, state)
override def createUpsertBuilder(node: Insert): InsertBuilder = new UpsertBuilder(node)
override def createTableDDLBuilder(table: Table[_]): TableDDLBuilder = new TableDDLBuilder(table)
override def createColumnDDLBuilder(column: FieldSymbol, table: Table[_]): ColumnDDLBuilder = new ColumnDDLBuilder(column)
override protected lazy val useServerSideUpsert = true
override protected lazy val useTransactionForUpsert = true
override protected lazy val useServerSideUpsertReturning = false
override def defaultSqlTypeName(tmd: JdbcType[_], sym: Option[FieldSymbol]): String = tmd.sqlType match {
case java.sql.Types.VARCHAR =>
val size = sym.flatMap(_.findColumnOption[RelationalProfile.ColumnOption.Length])
size.fold("VARCHAR")(l => if(l.varying) s"VARCHAR(${l.length})" else s"CHAR(${l.length})")
case java.sql.Types.BLOB => "lo"
case java.sql.Types.DOUBLE => "DOUBLE PRECISION"
/* PostgreSQL does not have a TINYINT type, so we use SMALLINT instead. */
case java.sql.Types.TINYINT => "SMALLINT"
case _ => super.defaultSqlTypeName(tmd, sym)
}
class QueryBuilder(tree: Node, state: CompilerState) extends super.QueryBuilder(tree, state) {
override protected val concatOperator = Some("||")
override protected val quotedJdbcFns = Some(Vector(Library.Database, Library.User))
override protected def buildSelectModifiers(c: Comprehension): Unit = (c.distinct, c.select) match {
case (Some(ProductNode(onNodes)), Pure(ProductNode(selNodes), _)) if onNodes.nonEmpty =>
def eligible(a: ConstArray[Node]) = a.forall {
case _: PathElement => true
case _: LiteralNode => true
case _: QueryParameter => true
case _ => false
}
if(eligible(onNodes) && eligible(selNodes) &&
onNodes.iterator.collect[List[TermSymbol]] { case FwdPath(ss) => ss }.toSet ==
selNodes.iterator.collect[List[TermSymbol]] { case FwdPath(ss) => ss }.toSet
) b"distinct " else super.buildSelectModifiers(c)
case _ => super.buildSelectModifiers(c)
}
override protected def buildFetchOffsetClause(fetch: Option[Node], offset: Option[Node]) = (fetch, offset) match {
case (Some(t), Some(d)) => b"\\nlimit $t offset $d"
case (Some(t), None ) => b"\\nlimit $t"
case (None, Some(d)) => b"\\noffset $d"
case _ =>
}
override def expr(n: Node, skipParens: Boolean = false) = n match {
case Library.UCase(ch) => b"upper($ch)"
case Library.LCase(ch) => b"lower($ch)"
case Library.IfNull(ch, d) => b"coalesce($ch, $d)"
case Library.NextValue(SequenceNode(name)) => b"nextval('$name')"
case Library.CurrentValue(SequenceNode(name)) => b"currval('$name')"
case Library.CurrentDate() => b"current_date"
case Library.CurrentTime() => b"current_time"
case _ => super.expr(n, skipParens)
}
}
class UpsertBuilder(ins: Insert) extends super.UpsertBuilder(ins) {
override def buildInsert: InsertBuilderResult = {
val update = "update " + tableName + " set " + softNames.map(n => s"$n=?").mkString(",") + " where " + pkNames.map(n => s"$n=?").mkString(" and ")
val nonAutoIncNames = nonAutoIncSyms.map(fs => quoteIdentifier(fs.name)).mkString(",")
val nonAutoIncVars = nonAutoIncSyms.map(_ => "?").mkString(",")
val cond = pkNames.map(n => s"$n=?").mkString(" and ")
val insert = s"insert into $tableName ($nonAutoIncNames) select $nonAutoIncVars where not exists (select 1 from $tableName where $cond)"
new InsertBuilderResult(table, s"$update; $insert", ConstArray.from(softSyms ++ pkSyms))
}
override def transformMapping(n: Node) = reorderColumns(n, softSyms ++ pkSyms ++ nonAutoIncSyms.toSeq ++ pkSyms)
}
class TableDDLBuilder(table: Table[_]) extends super.TableDDLBuilder(table) {
override def createPhase1 = super.createPhase1 ++ columns.flatMap {
case cb: ColumnDDLBuilder => cb.createLobTrigger(table.tableName)
}
override def dropPhase1 = {
val dropLobs = columns.flatMap {
case cb: ColumnDDLBuilder => cb.dropLobTrigger(table.tableName)
}
if(dropLobs.isEmpty) super.dropPhase1
else Seq("delete from "+quoteIdentifier(table.tableName)) ++ dropLobs ++ super.dropPhase1
}
}
class ColumnDDLBuilder(column: FieldSymbol) extends super.ColumnDDLBuilder(column) {
override def appendColumn(sb: StringBuilder) {
sb append quoteIdentifier(column.name) append ' '
if(autoIncrement && !customSqlType) {
sb append (if(sqlType.toUpperCase == "BIGINT") "BIGSERIAL" else "SERIAL")
} else appendType(sb)
autoIncrement = false
appendOptions(sb)
}
def lobTrigger(tname: String) =
quoteIdentifier(tname+"__"+quoteIdentifier(column.name)+"_lob")
def createLobTrigger(tname: String): Option[String] =
if(sqlType == "lo") Some(
"create trigger "+lobTrigger(tname)+" before update or delete on "+
quoteIdentifier(tname)+" for each row execute procedure lo_manage("+quoteIdentifier(column.name)+")"
) else None
def dropLobTrigger(tname: String): Option[String] =
if(sqlType == "lo") Some(
"drop trigger "+lobTrigger(tname)+" on "+quoteIdentifier(tname)
) else None
}
class JdbcTypes extends super.JdbcTypes {
override val byteArrayJdbcType = new ByteArrayJdbcType
override val uuidJdbcType = new UUIDJdbcType
class ByteArrayJdbcType extends super.ByteArrayJdbcType {
override val sqlType = java.sql.Types.BINARY
override def sqlTypeName(sym: Option[FieldSymbol]) = "BYTEA"
}
class UUIDJdbcType extends super.UUIDJdbcType {
override def sqlTypeName(sym: Option[FieldSymbol]) = "UUID"
override def setValue(v: UUID, p: PreparedStatement, idx: Int) = p.setObject(idx, v, sqlType)
override def getValue(r: ResultSet, idx: Int) = r.getObject(idx).asInstanceOf[UUID]
override def updateValue(v: UUID, r: ResultSet, idx: Int) = r.updateObject(idx, v)
override def valueToSQLLiteral(value: UUID) = "'" + value + "'"
override def hasLiteralForm = true
}
}
}
object PostgresProfile extends PostgresProfile
| knoldus/slick-1 | slick/src/main/scala/slick/jdbc/PostgresProfile.scala | Scala | bsd-2-clause | 11,158 |
/*
* Copyright (c) 2002-2018 "Neo Technology,"
* Network Engine for Objects in Lund AB [http://neotechnology.com]
*
* This file is part of Neo4j.
*
* Neo4j is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.neo4j.cypher.internal.compiler.v2_3.commands
import org.neo4j.cypher.internal.compiler.v2_3.executionplan.{MatchPattern, MatchRelationship}
import org.neo4j.cypher.internal.frontend.v2_3.test_helpers.CypherFunSuite
class MatchPatternTest extends CypherFunSuite {
test("should find disjoint graph with single nodes") {
// Given
val pattern = new MatchPattern(Seq("A", "B"), Seq())
// When and then
pattern.disconnectedPatterns should equal(Seq(
new MatchPattern(Seq("A"), Seq()),
new MatchPattern(Seq("B"), Seq()))
)
}
test("should be non empty if it contains a node") {
// When and then
new MatchPattern(Seq("A"), Seq()).nonEmpty should equal(true)
}
test("should find single graph for simple rel") {
// Given
val pattern = new MatchPattern(Seq("A", "B"), Seq(MatchRelationship(None, "A", "B")))
// When and then
pattern.disconnectedPatterns should equal(Seq(pattern))
}
test("should find deeply nested disjoint graphs") {
// Given
val pattern = new MatchPattern(Seq("A", "B", "C", "D"),
Seq(MatchRelationship(None, "A", "B"), MatchRelationship(None, "B", "D")))
// When and then
pattern.disconnectedPatterns should equal(Seq(
new MatchPattern(Seq("A", "B", "D"), Seq(MatchRelationship(None, "A", "B"), MatchRelationship(None, "B", "D"))),
new MatchPattern(Seq("C"), Seq()))
)
}
test("should list subgraphs without specified points") {
// Given
val pattern = new MatchPattern(Seq("A", "B", "C"), Seq(MatchRelationship(None, "A", "B")))
// When and then
pattern.disconnectedPatternsWithout(Seq("B")) should equal(Seq(
new MatchPattern(Seq("C"), Seq()))
)
}
test("should not consider patterns bound by relationships as unbounded") {
// Given
val pattern = new MatchPattern(Seq("A", "B"), Seq(MatchRelationship(Some("r"), "A", "B")))
// When and then
pattern.disconnectedPatternsWithout(Seq("r")) should equal(Seq.empty)
}
}
| HuangLS/neo4j | community/cypher/cypher-compiler-2.3/src/test/scala/org/neo4j/cypher/internal/compiler/v2_3/commands/MatchPatternTest.scala | Scala | apache-2.0 | 2,805 |
package direct.traffic.archivespark.dataspecs
import java.io.{BufferedReader, InputStreamReader}
import java.net.URI
import java.util.stream.Collectors
import org.apache.hadoop.fs.{FileSystem, Path}
import org.apache.hadoop.io.compress.CompressionCodecFactory
import org.apache.spark.deploy.SparkHadoopUtil
import scala.util.Try
import scala.collection.JavaConverters._
/**
* Created by boris on 25.09.17.
*/
package object access {
def ListingFileMap(hdfsHost: URI,listingFile: String) : Set[String] = {
val fs = FileSystem.newInstance(hdfsHost,SparkHadoopUtil.get.conf)
val factory = new CompressionCodecFactory(SparkHadoopUtil.get.conf)
val path = new Path(listingFile)
val codec = factory.getCodec(path)
val reader = new BufferedReader(new InputStreamReader(codec.createInputStream(fs.open(path))))
val files = reader.lines().collect(Collectors.toList()).asScala.toSet
files
}
} | trafficdirect/ArchiveSparkJCDX | src/main/scala/direct/traffic/archivespark/dataspecs/access/acces.scala | Scala | mit | 924 |
package cc.factorie.app.uschema.tac
import scala.util.Random
import cc.factorie.app.uschema._
/**
* Created by beroth on 2/23/15.
*/
class TrainTestTacDataOptions extends cc.factorie.util.DefaultCmdOptions {
val tacData = new CmdOption("tac-data", "", "FILE", "tab separated file with TAC training data")
val dim = new CmdOption("dim", 100, "INT", "dimensionality of data")
val stepsize = new CmdOption("stepsize", 0.1, "DOUBLE", "step size")
val maxNorm = new CmdOption("max-norm", 1.0, "DOUBLE", "maximum l2-norm for vectors")
val useMaxNorm = new CmdOption("use-max-norm", true, "BOOLEAN", "whether to use maximum l2-norm for vectors")
val regularizer = new CmdOption("regularizer", 0.01, "DOUBLE", "regularizer")
}
object TrainTestTacData {
val opts = new TrainTestTacDataOptions
val testCols = Set("org:alternate_names",
"org:city_of_headquarters",
"org:country_of_headquarters",
"org:date_dissolved",
"org:date_founded",
"org:founded_by",
"org:member_of",
"org:members",
"org:number_of_employees_members",
"org:parents",
"org:political_religious_affiliation",
"org:shareholders",
"org:stateorprovince_of_headquarters",
"org:subsidiaries",
"org:top_members_employees",
"org:website",
"per:age",
"per:alternate_names",
"per:cause_of_death",
"per:charges",
"per:children",
"per:cities_of_residence",
"per:city_of_birth",
"per:city_of_death",
"per:countries_of_residence",
"per:country_of_birth",
"per:country_of_death",
"per:date_of_birth",
"per:date_of_death",
"per:employee_or_member_of",
"per:origin",
"per:other_family",
"per:parents",
"per:religion",
"per:schools_attended",
"per:siblings",
"per:spouse",
"per:stateorprovince_of_birth",
"per:stateorprovince_of_death",
"per:statesorprovinces_of_residence",
"per:title")
def main(args: Array[String]) : Unit = {
opts.parse(args)
val tReadStart = System.currentTimeMillis
val kb = EntityRelationKBMatrix.fromTsv(opts.tacData.value).prune(2,1)
val tRead = (System.currentTimeMillis - tReadStart)/1000.0
println(f"Reading from file and pruning took $tRead%.2f s")
println("Stats:")
println("Num Rows:" + kb.numRows())
println("Num Cols:" + kb.numCols())
println("Num cells:" + kb.nnz())
val random = new Random(0)
val numDev = 0
val numTest = 10000
val (trainKb, devKb, testKb) = kb.randomTestSplit(numDev, numTest, None, Some(testCols), random)
val model = UniversalSchemaModel.randomModel(kb.numRows(), kb.numCols(), opts.dim.value, random)
val trainer = if(opts.useMaxNorm.value) {
println("use norm constraint")
new NormConstrainedBprUniversalSchemaTrainer(opts.maxNorm.value, opts.stepsize.value, opts.dim.value,
trainKb.matrix, model, random)
} else {
println("use regularization")
new RegularizedBprUniversalSchemaTrainer(opts.regularizer.value, opts.stepsize.value, opts.dim.value,
trainKb.matrix, model, random)
}
var result = model.similaritiesAndLabels(trainKb.matrix, testKb.matrix)
println("Initial MAP: " + Evaluator.meanAveragePrecision(result))
trainer.train(10)
result = model.similaritiesAndLabels(trainKb.matrix, testKb.matrix)
println("MAP after 10 iterations: " + Evaluator.meanAveragePrecision(result))
trainer.train(40)
result = model.similaritiesAndLabels(trainKb.matrix, testKb.matrix)
println("MAP after 50 iterations: " + Evaluator.meanAveragePrecision(result))
trainer.train(50)
result = model.similaritiesAndLabels(trainKb.matrix, testKb.matrix)
println("MAP after 100 iterations: " + Evaluator.meanAveragePrecision(result))
trainer.train(100)
result = model.similaritiesAndLabels(trainKb.matrix, testKb.matrix)
println("MAP after 200 iterations: " + Evaluator.meanAveragePrecision(result))
}
}
| patverga/factorie | src/main/scala/cc/factorie/app/uschema/tac/TrainTestTacData.scala | Scala | apache-2.0 | 4,026 |
/*
* Copyright 2013-2015 Websudos, Limited.
*
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* - Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
*
* - Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* - Explicit consent must be obtained from the copyright owner, Websudos Limited before any redistribution is made.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*/
package com.websudos.phantom.builder.serializers
import java.util.UUID
import com.websudos.phantom.builder.query.KeySpaceSuite
import com.websudos.phantom.dsl._
import com.websudos.phantom.tables.{Articles, Recipes, TableWithCompoundKey}
import com.websudos.util.testing._
import org.scalatest.{FlatSpec, Matchers}
class QuerySerializationTest extends FlatSpec with Matchers with KeySpaceSuite {
it should "compile a full select query" in {
"Articles.select.where(_.id eqs gen[UUID])" should compile
}
it should "serialize a full select query" in {
val someId = gen[UUID]
Articles.select.where(_.id eqs someId).qb.queryString shouldBe s"SELECT * FROM phantom.articles WHERE id = $someId"
}
it should "compile a single column partial select query" in {
"Articles.select(_.id).where(_.id eqs gen[UUID])" should compile
}
it should "serialize a single column partial select query" in {
val someId = gen[UUID]
Articles.select(_.id).where(_.id eqs someId).qb.queryString shouldBe s"SELECT id FROM phantom.${Articles.tableName} WHERE id = $someId"
}
it should "compile a query to query condition clause" in {
"""Articles.update.where(_.id eqs gen[UUID]).modify(_.name setTo "test").onlyIf(_.name is "update")""" should compile
}
it should "serialize a condition query to a query condition" in {
val someId = gen[UUID]
val query = Articles.update.where(_.id eqs someId).modify(_.name setTo "test").onlyIf(_.name is "update").qb.queryString
query shouldEqual s"UPDATE phantom.articles SET name = 'test' WHERE id = $someId IF name = 'update'"
}
it should "serialize a 2 column partial select query" in {
val someId = gen[UUID]
Articles.select(_.id, _.name).where(_.id eqs someId).qb.queryString shouldBe s"SELECT id, name FROM phantom.articles WHERE id = $someId"
}
it should "serialize a 3 column partial select query" in {
val someId = gen[String]
Recipes.select(
_.url,
_.description,
_.ingredients
).where(_.url eqs someId)
.qb.queryString shouldBe s"SELECT url, description, ingredients FROM phantom.Recipes WHERE url = '$someId'"
}
it should "serialise a conditional update query with a single List column based clause" in {
val qb = Recipes.update.where(_.url eqs "test")
.modify(_.description setTo Some("blabla"))
.onlyIf(_.ingredients is List("1", "2", "3"))
.qb.queryString
qb shouldEqual "UPDATE phantom.Recipes SET description = 'blabla' WHERE url = 'test' IF ingredients = ['1', '2', '3']"
}
it should "serialise a multi-part conditional update query with a List column part" in {
val qb = Recipes.update.where(_.url eqs "test")
.modify(_.description setTo Some("blabla"))
.onlyIf(_.ingredients is List("1", "2", "3"))
.and(_.description is Some("test"))
.qb.queryString
qb shouldEqual "UPDATE phantom.Recipes SET description = 'blabla' WHERE url = 'test' IF ingredients = ['1', '2', '3'] AND description = 'test'"
}
it should "serialize a simple count query" in {
Recipes.select.count.qb.queryString shouldEqual "SELECT COUNT(*) FROM phantom.Recipes"
}
it should "serialize a count query with a where clause" in {
val key = gen[String]
Recipes.select.count.where(_.url eqs key).qb.queryString shouldEqual s"SELECT COUNT(*) FROM phantom.Recipes WHERE url = '$key'"
}
it should "serialize a count query with a where-and clause" in {
val id = UUID.randomUUID()
val key = id.toString
TableWithCompoundKey.select.count.where(_.id eqs id).and(_.second eqs id).qb.queryString shouldEqual s"SELECT COUNT(*) FROM phantom.TableWithCompoundKey WHERE id = $key AND second = $key"
}
it should "allow setting a limit on a count query" in {
val id = UUID.randomUUID()
val key = id.toString
TableWithCompoundKey.select.count.where(_.id eqs id).and(_.second eqs id).limit(10).qb.queryString shouldEqual s"SELECT COUNT(*) FROM phantom.TableWithCompoundKey WHERE id = $key AND second = $key LIMIT 10"
}
it should "allow filtering on a count query" in {
val id = UUID.randomUUID()
val key = id.toString
TableWithCompoundKey.select.count
.where(_.id eqs id).and(_.second eqs id)
.limit(10)
.allowFiltering()
.qb.queryString shouldEqual s"SELECT COUNT(*) FROM phantom.TableWithCompoundKey WHERE id = $key AND second = $key LIMIT 10 ALLOW FILTERING"
}
}
| analytically/phantom | phantom-dsl/src/test/scala/com/websudos/phantom/builder/serializers/QuerySerializationTest.scala | Scala | bsd-2-clause | 5,925 |
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** @author John Miller
* @version 1.2
* @date Mon Sep 2 16:24:38 EDT 2013
* @see LICENSE (MIT style license file).
*/
package scalation.analytics
import scalation.linalgebra.MatrixD
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** The `Reducer` trait provides a common framework for several data reduction
* algorithms.
*/
trait Reducer
{
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Given the original data matrix, produce a lower dimensionality matrix
* that maintains most of the descriptive power of the original matrix.
*/
def reduce (): MatrixD
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Approximately recover the original matrix. The new matrix will have
* the same dimensionality, but will have some lose of information.
*/
def recover (): MatrixD
} // Reducer trait
| scalation/fda | scalation_1.2/src/main/scala/scalation/analytics/Reducer.scala | Scala | mit | 1,032 |
package com.sksamuel.elastic4s.requests.analyzers
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec
class PatternAnalyzerTest extends AnyWordSpec with AnalyzerApi with Matchers {
"PatternAnalyzer builder" should {
"set language" in {
snowballAnalyzer("testy")
.language("klingon")
.json
.string shouldBe """{"type":"snowball","language":"klingon"}"""
}
"set stopwords" in {
snowballAnalyzer("testy")
.stopwords("a", "b")
.json
.string shouldBe """{"type":"snowball","language":"English","stopwords":["a","b"]}"""
}
"not set stopwords if not specified" in {
snowballAnalyzer("testy").json.string shouldBe """{"type":"snowball","language":"English"}"""
}
}
}
| stringbean/elastic4s | elastic4s-tests/src/test/scala/com/sksamuel/elastic4s/requests/analyzers/PatternAnalyzerTest.scala | Scala | apache-2.0 | 792 |
/*
* The MIT License
*
* Copyright (c) 2017 Fulcrum Genomics LLC
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*
*/
package com.fulcrumgenomics.str.tasks
import java.nio.file.{Path, Paths}
import com.fulcrumgenomics.commons.io.{Io, PathUtil}
import com.fulcrumgenomics.str.tasks.HipStr.PathToBed
import dagr.core.config.Configuration
import dagr.core.execsystem.{Cores, Memory}
import dagr.core.tasksystem.{FixedResources, ProcessTask, SimpleInJvmTask}
import dagr.tasks.DagrDef.{FilePath, PathToBam, PathToFasta, PathToVcf}
import com.fulcrumgenomics.commons.CommonsDef._
import htsjdk.samtools.util.IntervalList
import scala.collection.mutable.ListBuffer
object HipStr extends Configuration {
type PathToBed = FilePath
val HipStrDirConfigKey: String = "hipstr.dir"
def findHipStr: Path = configureExecutableFromBinDirectory(HipStrDirConfigKey, "HipSTR")
def findHipStrScript(scriptName: String): Path = {
configureExecutableFromBinDirectory(HipStrDirConfigKey, scriptName, subDir=Some(Paths.get("scripts")))
}
/** Creates the regions BED file for HipStr: Required format is tab-delimited columns CHROM START STOP PERIOD NCOPIES */
class CreateRegionsBed(intervals: PathToIntervals, bed: PathToBed) extends SimpleInJvmTask {
def run(): Unit = {
val list = IntervalList.fromFile(intervals.toFile)
val lines = list.map { interval =>
val nameTokens = interval.getName.split(',')
require(nameTokens.length == 3 || nameTokens.length == 5, s"Require 3 or 5 fields in the name, found ${nameTokens.length} for interval: $interval")
Seq(
interval.getContig,
interval.getStart-1,
interval.getEnd,
nameTokens(0),
nameTokens(1),
nameTokens(2)
).mkString("\\t")
}.toSeq
Io.writeLines(path=bed, lines=lines)
}
}
}
class HipStr(input: PathToBam,
ref: PathToFasta,
regions: PathToBed,
output: PathToVcf,
stutterIn: Option[FilePath] = None,
stutterOut: Option[FilePath] = None,
genotypeLikelihoods: Boolean = true,
posteriorLikelihoods: Boolean = true,
minReads: Option[Int] = Some(1),
useUnpaired: Boolean = false,
removeDuplicates: Boolean = false,
haploidChromosomes: Option[IntervalList] = None,
maxStrLength: Option[Int] = Some(150),
maxFlankHaplotypes: Option[Int] = Some(10),
minFlankFrequency: Option[Double] = Some(0.1))
extends ProcessTask with FixedResources {
requires(Cores(1), Memory("2g"))
// Find it when building so we fail earlier
private val hipStr = HipStr.findHipStr
override def args: Seq[Any] = {
val buffer = ListBuffer[Any]()
buffer.append(hipStr)
buffer.append("--bams", input)
buffer.append("--fasta", ref)
buffer.append("--regions", regions)
buffer.append("--str-vcf", output)
stutterIn.foreach(s => buffer.append("--stutter-in", s))
stutterOut.foreach(s => buffer.append("--stutter-out", s))
if (genotypeLikelihoods) buffer.append("--output-gls")
if (posteriorLikelihoods) buffer.append("--output-pls")
minReads.foreach(m => buffer.append("--min-reads", m))
if (useUnpaired) buffer.append("--use-unpaired")
if (!removeDuplicates) buffer.append("--no-rmdup")
haploidChromosomes.foreach { intervals =>
if (intervals.nonEmpty) {
buffer.append("--haploid-chrs")
buffer.append(intervals.map(_.getContig).toSeq.distinct.mkString(","))
}
}
maxStrLength.foreach(l => buffer.append("--max-str-len", l))
maxFlankHaplotypes.foreach(l => buffer.append("--max-hap-flanks", l))
minFlankFrequency.foreach(f => buffer.append("--min-flank-freq", f))
buffer
}
} | fulcrumgenomics/fgstr | pipelines/src/main/scala/com/fulcrumgenomics/str/tasks/HipStr.scala | Scala | mit | 4,847 |
package kz.rio.core
import akka.actor.{Props, ActorRef, ActorLogging, Actor}
import kz.rio.domain.DomainMessage
/**
* Created by irybakov on 1/16/16.
*/
object RequestHandler {
def props(): Props = Props(classOf[RequestHandler])
}
class RequestHandler extends Actor with ActorLogging {
override def receive: Receive = {
case request: DomainMessage =>
}
}
| irybakov/async-stub | src/main/scala/kz/rio/core/RequestHandler.scala | Scala | mit | 377 |
package org.kokho.scheduling
import org.scalatest.{FunSuite, Matchers}
/**
* @author: Mikhail Kokho
* @date: 7/3/2015
*/
class JobTest extends FunSuite with Matchers{
val job = Job(0, 2, 10)
test("Job() returns what expected") {
job.release shouldEqual 0
job.length shouldEqual 2
job.deadline shouldEqual 10
job.releasedBy.isEmpty shouldBe true
job.toString.nonEmpty shouldBe true
}
test("JobProxy is correct") {
val jobProxy = Job(job)
jobProxy.length shouldEqual job.length
jobProxy.deadline shouldEqual job.deadline
jobProxy.release shouldEqual job.release
jobProxy.releasedBy shouldEqual job.releasedBy
}
test("IdleJob test") {
assert(IdleJob.toString.nonEmpty)
}
}
| mkokho/dynoslack | src/test/scala/org/kokho/scheduling/JobTest.scala | Scala | apache-2.0 | 737 |
/*
Copyright 2013 Ilya Lakhin (Илья Александрович Лахин)
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package name.lakhin.eliah.projects
package papacarlo.lexis
import name.lakhin.eliah.projects.papacarlo.utils.{Registry, Bounds, Signal}
import scala.util.control.Breaks._
final class FragmentController(contextualizer: Contextualizer,
tokens: TokenCollection) {
private val registry = new Registry[Fragment]
private var invalidationContext = Option.empty[Context]
private var invalidationRange = Bounds.undefined
private var invalidTokens = Map.empty[Int, SeamType]
private var valid = true
val onCreate = registry.onAdd
val onInvalidate = new Signal[(Fragment, Bounds)]
val onRemove = registry.onRemove
val rootFragment = createFragment(tokens.head, tokens.last)
tokens.onBeforeRewrite.bind {
case (oldRange: Bounds) =>
updateInvalidationContext(oldRange)
invalidationRange = invalidationRange.takeout(oldRange)
}
tokens.onAfterRewrite.bind {
case (newRange: Bounds) =>
invalidationRange = invalidationRange.inject(newRange)
val newTokens = invalidationRange.slice(tokens.descriptions)
contextualizer.contextualize(contextAfter(invalidationRange.from - 1),
newTokens)
invalidTokens = Map.empty
valid = true
var index = newRange.from
for (token <- newTokens) {
if (token.seam == UnexpectedSeam) {
invalidTokens += Tuple2(index, LeaveContext)
valid = false
}
index += 1
}
val left = contextAfter(invalidationRange.until - 1)
val right = contextBefore(invalidationRange.until)
if (left != right) {
val intersection = left.intersect(right)
invalidationContext = invalidationContext
.map(_.intersect(intersection))
.orElse(Some(intersection))
checkLeftBalance(invalidationRange.until - 1, intersection)
checkRightBalance(invalidationRange.until, intersection)
}
updateInvalidationContext(invalidationRange)
invalidate()
if (valid) {
invalidationRange = Bounds.undefined
invalidationContext = None
} else
for ((index, seam) <- invalidTokens)
invalidationRange = invalidationRange.union(index)
}
private def invalidate(): Unit = {
val range = computeActualRange
updateSkipLevel(range)
var fragmentOrigins = List.empty[TokenReference]
var fragmentsToInvalidate = List.empty[Fragment]
val invalidationContext = this.invalidationContext.getOrElse(Context.Base)
val tokenCount = tokens.descriptions.length
for (index <- range.iterator) {
if (index == 0) fragmentOrigins ::= tokens.head
val token = tokens.descriptions.lift(index).getOrElse(Token.lineBreak)
if (!invalidTokens.contains(index))
token.seam match {
case EnterContext =>
fragmentOrigins ::= tokens.references
.lift(index)
.getOrElse(tokens.head)
case LeaveContext =>
val begin = fragmentOrigins match {
case head :: tail =>
fragmentOrigins = tail
head
case _ => tokens.head
}
val end = tokens.references.lift(index).getOrElse(tokens.last)
val existFragment = begin.fragment.filter(fragment => {
val same = fragment.end.index == end.index
if (!same) fragment.remove()
same
})
if (begin.fragment.isEmpty
|| token.context == invalidationContext)
existFragment match {
case Some(fragment) => fragmentsToInvalidate ::= fragment
case None =>
if (contextualizer.isCachableContext(token.context))
begin.fragment = Some(createFragment(begin, end))
}
case _ =>
} else
for (fragmentToRemove <- tokens.references
.lift(index)
.flatMap(_.fragment))
fragmentToRemove.remove()
if (index == tokenCount - 1 && fragmentOrigins.nonEmpty) {
val rootBegin = fragmentOrigins.head
if (rootBegin.index == 0 || token.context == invalidationContext)
fragmentsToInvalidate ::= rootFragment
fragmentOrigins = fragmentOrigins.tail
}
}
for (fragment <- fragmentsToInvalidate.reverse)
fragment.onInvalidate.trigger(fragment)
if (fragmentsToInvalidate.isEmpty &&
this.invalidationContext.exists(_.parent.nonEmpty)) {
this.invalidationContext = this.invalidationContext.flatMap(_.parent)
invalidate()
}
}
private def createFragment(start: TokenReference, end: TokenReference) = {
val fragment = registry.add(id => Fragment(id, start, end))
fragment.onInvalidate.bind(fragment =>
onInvalidate.trigger(Tuple2(fragment, invalidationRange)))
fragment.onRemove.bind(fragment => registry.remove(fragment.id))
fragment
}
private def updateSkipLevel(range: Bounds): Unit = {
for (token <- range.slice(tokens.descriptions);
skipLevel <- contextualizer.getContextSkipLevel(token.context))
token.applySkipLevel(skipLevel)
}
private def computeActualRange = {
val invalidationContext = this.invalidationContext.getOrElse(Context.Base)
val tokenCount = tokens.descriptions.length
invalidationRange.map(
from =>
(0 until (from + 1)).reverse
.takeWhile(index => {
val token = tokens.descriptions
.lift(index)
.getOrElse(Token.lineBreak)
invalidTokens.contains(index) || token.seam != EnterContext ||
token.context != invalidationContext
})
.lastOption
.map(_ - 1)
.filter(_ >= 0)
.getOrElse(0),
until =>
(((until - 1) max 0) until tokenCount)
.takeWhile(index => {
val token = tokens.descriptions
.lift(index)
.getOrElse(Token.lineBreak)
invalidTokens.contains(index) || token.seam != LeaveContext ||
token.context != invalidationContext
})
.lastOption
.map(_ + 1)
.filter(_ < tokenCount)
.getOrElse(tokenCount - 1) + 1
)
}
private def updateInvalidationContext(range: Bounds): Unit = {
var outOfContext = true
var index = range.from
for (token <- range.slice(tokens.descriptions)) {
val invalid = invalidTokens.contains(index)
if (outOfContext || (!invalid && token.seam == EnterContext)) {
invalidationContext = invalidationContext
.map(_.intersect(token.context))
.orElse(Some(token.context))
outOfContext = false
} else if (!invalid && token.seam == LeaveContext) outOfContext = true
index += 1
}
if (outOfContext)
invalidationContext = invalidationContext
.map(
_.intersect(
tokens.descriptions
.lift(range.until)
.map(_.context)
.getOrElse(Context.Base)))
}
private def contextAfter(index: Int) = contextNear(index, LeaveContext)
private def contextBefore(index: Int) = contextNear(index, EnterContext)
private def contextNear(index: Int, seam: SeamType) =
tokens.descriptions
.lift(index)
.map(token => {
val context = token.context
context.parent.filter(parent => token.seam == seam).getOrElse(context)
})
.getOrElse(Context.Base)
private def checkLeftBalance(start: Int, prototype: Context): Unit = {
checkBalance((0 until (start + 1)).reverse,
LeaveContext,
EnterContext,
prototype)
}
private def checkRightBalance(start: Int, prototype: Context): Unit = {
checkBalance(start until tokens.descriptions.length,
EnterContext,
LeaveContext,
prototype)
}
private def checkBalance(indexes: Range,
increment: SeamType,
decrement: SeamType,
prototype: Context): Unit = {
var balance = 0
breakable {
for (index <- indexes) {
val token = tokens.descriptions.lift(index).getOrElse(Token.lineBreak)
if (token.context == prototype) break()
if (token.seam == increment) balance += 1
else if (token.seam == decrement) {
if (balance > 0) balance -= 1
else {
invalidTokens += index -> decrement
valid = false
}
}
}
}
}
}
| Eliah-Lakhin/papa-carlo | src/main/scala/name.lakhin.eliah.projects/papacarlo/lexis/FragmentController.scala | Scala | apache-2.0 | 9,281 |
package models
import play.api.Logger
import play.api.libs.concurrent.Execution.Implicits.defaultContext
import play.api.libs.json.{JsObject, JsValue, Json}
import play.api.mvc._
import play.modules.reactivemongo.{ReactiveMongoPlugin, MongoController}
import play.modules.reactivemongo.json.collection.JSONCollection
import reactivemongo.api.collections.default.BSONCollection
import reactivemongo.api.gridfs.GridFS
import reactivemongo.api.indexes.{IndexType, Index}
object Models extends Controller with MongoController {
private final val logger = Logger
val gridFS = new GridFS(db)
// let's build an index on our gridfs chunks collection if none
gridFS.ensureIndex().onComplete {
case index =>
Logger.info(s"Checked index, result is $index")
}
/*
* Get a JSONCollection (a Collection implementation that is designed to work
* with JsObject, Reads and Writes.)
* Note that the `collection` is not a `val`, but a `def`. We do _not_ store
* the collection reference to avoid potential problems in development with
* Play hot-reloading.
*/
def collectionFiles: JSONCollection = db.collection[JSONCollection]("fs.files")
}
| luanlv/website | app/models/Models.scala | Scala | apache-2.0 | 1,211 |
package org.jetbrains.plugins.scala
package lang
package psi
package api
package base
import org.jetbrains.plugins.scala.lang.psi.api.base.patterns._
/**
* @author Alexander Podkhalyuzin
* Date: 22.02.2008
*/
trait ScPatternList extends ScalaPsiElement {
def patterns: Seq[ScPattern]
/**
* This method means that Pattern list has just reference patterns:
* val x, y, z = 44
*/
def simplePatterns: Boolean
}
object ScPatternList {
def unapply(e: ScPatternList): Some[Seq[ScPattern]] = Some(e.patterns)
} | loskutov/intellij-scala | src/org/jetbrains/plugins/scala/lang/psi/api/base/ScPatternList.scala | Scala | apache-2.0 | 525 |
package debop4s.core.io.model
case class YearWeek(year: Int = 0, week: Int = 1) | debop/debop4s | debop4s-core/src/test/scala/debop4s/core/io/model/YearWeek.scala | Scala | apache-2.0 | 80 |
Subsets and Splits
Filtered Scala Code Snippets
The query filters and retrieves a sample of code snippets that meet specific criteria, providing a basic overview of the dataset's content without revealing deeper insights.