code
stringlengths 5
1M
| repo_name
stringlengths 5
109
| path
stringlengths 6
208
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 5
1M
|
|---|---|---|---|---|---|
package org.jetbrains.plugins.scala
package lang
package completion
import psi._
import api.base.ScReferenceElement
import psi.api.base.patterns.ScCaseClause
import psi.api.ScalaFile
import psi.api.toplevel.typedef.ScTypeDefinition
import psi.api.base.types.ScTypeElement
import com.intellij.psi._
import org.jetbrains.plugins.scala.lang.psi.api.expr._
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.templates._
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.packaging._
import org.jetbrains.plugins.scala.lang.lexer._
import org.jetbrains.plugins.scala.lang.parser._
import com.intellij.codeInsight.completion.{PrefixMatcher, CompletionParameters}
import refactoring.namesSuggester.NameSuggester
import types.ScType
import collection.mutable.ArrayBuffer
import com.intellij.openapi.util.Key
/**
* User: Alexander Podkhalyuzin
* Date: 21.05.2008.
*/
object ScalaCompletionUtil {
val PREFIX_COMPLETION_KEY: Key[Boolean] = Key.create("prefix.completion.key")
def completeThis(ref: ScReferenceExpression): Boolean = {
ref.qualifier match {
case Some(_) => false
case None =>
ref.getParent match {
case inf: ScInfixExpr if inf.operation == ref => false
case postf: ScPostfixExpr if postf.operation == ref => false
case pref: ScPrefixExpr if pref.operation == ref => false
case _ => true
}
}
}
def shouldRunClassNameCompletion(parameters: CompletionParameters, prefixMatcher: PrefixMatcher,
checkInvocationCount: Boolean = true, lookingForAnnotations: Boolean = false): Boolean = {
val element = parameters.getPosition
if (checkInvocationCount && parameters.getInvocationCount < 2) return false
if (element.getNode.getElementType == ScalaTokenTypes.tIDENTIFIER) {
element.getParent match {
case ref: ScReferenceElement if ref.qualifier != None => return false
case _ =>
}
}
if (checkInvocationCount && parameters.getInvocationCount >= 2) return true
val prefix = prefixMatcher.getPrefix
val capitalized = prefix.length() > 0 && prefix.substring(0, 1).capitalize == prefix.substring(0, 1)
capitalized || lookingForAnnotations
}
def generateAnonymousFunctionText(braceArgs: Boolean, params: scala.Seq[ScType], canonical: Boolean,
withoutEnd: Boolean = false): String = {
val text = new StringBuilder()
if (braceArgs) text.append("case ")
val paramNamesWithTypes = new ArrayBuffer[(String, ScType)]
def contains(name: String): Boolean = {
paramNamesWithTypes.exists{
case (s, _) => s == name
}
}
for (param <- params) {
val names = NameSuggester.suggestNamesByType(param)
var name = if (names.length == 0) "x" else names(0)
if (contains(name)) {
var count = 0
var newName = name + count
while (contains(newName)) {
count += 1
newName = name + count
}
name = newName
}
paramNamesWithTypes.+=(name -> param)
}
val iter = paramNamesWithTypes.map {
case (s, tp) => s + ": " + (if (canonical) {
ScType.canonicalText(tp)
} else ScType.presentableText(tp))
}
val paramsString =
if (paramNamesWithTypes.size != 1 || !braceArgs) iter.mkString("(", ", ", ")")
else iter.head
text.append(paramsString)
if (!withoutEnd) text.append(" =>")
text.toString()
}
def getLeafByOffset(offset: Int, element: PsiElement): PsiElement = {
if (offset < 0) {
return null
}
var candidate: PsiElement = element.getContainingFile
if (candidate == null || candidate.getNode == null) return null
while (candidate.getNode.getChildren(null).length > 0) {
candidate = candidate.findElementAt(offset)
if (candidate == null || candidate.getNode == null) return null
}
candidate
}
/**
* first return value mean to stop here.
* Second return value in case if first is true return second value
*/
def getForAll(parent: PsiElement, leaf: PsiElement): (Boolean, Boolean) = {
parent match {
case _: ScalaFile =>
if (leaf.getNextSibling != null && leaf.getNextSibling.getNextSibling.isInstanceOf[ScPackaging] &&
leaf.getNextSibling.getNextSibling.getText.indexOf('{') == -1)
return (true, false)
case _ =>
}
parent match {
case _: ScalaFile | _: ScPackaging =>
var node = leaf.getPrevSibling
if (node.isInstanceOf[PsiWhiteSpace]) node = node.getPrevSibling
node match {
case x: PsiErrorElement =>
val s = ErrMsg("wrong.top.statment.declaration")
x.getErrorDescription match {
case `s` => return (true, true)
case _ => return (true, false)
}
case _ => return (true, true)
}
case expr: ScReferenceExpression =>
parent.getParent match {
case _: ScBlockExpr | _: ScTemplateBody | _: ScBlock | _: ScCaseClause =>
if (awful(parent, leaf))
return (true, true)
case _ =>
}
case _ =>
}
(false, true)
}
def awful(parent: PsiElement, leaf: PsiElement): Boolean = {
(leaf.getPrevSibling == null || leaf.getPrevSibling.getPrevSibling == null ||
leaf.getPrevSibling.getPrevSibling.getNode.getElementType != ScalaTokenTypes.kDEF) &&
(parent.getPrevSibling == null || parent.getPrevSibling.getPrevSibling == null ||
(parent.getPrevSibling.getPrevSibling.getNode.getElementType != ScalaElementTypes.MATCH_STMT ||
!parent.getPrevSibling.getPrevSibling.getLastChild.isInstanceOf[PsiErrorElement]))
}
val DUMMY_IDENTIFIER = "IntellijIdeaRulezzz"
def checkClassWith(clazz: ScTypeDefinition, additionText: String, manager: PsiManager): Boolean = {
val classText: String = clazz.getText
val text = removeDummy(classText + " " + additionText)
val DUMMY = "dummy."
val dummyFile = PsiFileFactory.getInstance(manager.getProject).
createFileFromText(DUMMY + ScalaFileType.SCALA_FILE_TYPE.getDefaultExtension,
ScalaFileType.SCALA_FILE_TYPE, text).asInstanceOf[ScalaFile]
!checkErrors(dummyFile)
}
def checkElseWith(text: String, manager: PsiManager): Boolean = {
val DUMMY = "dummy."
val dummyFile = PsiFileFactory.getInstance(manager.getProject).
createFileFromText(DUMMY + ScalaFileType.SCALA_FILE_TYPE.getDefaultExtension,
ScalaFileType.SCALA_FILE_TYPE, "class a {\\n" + text + "\\n}").asInstanceOf[ScalaFile]
!checkErrors(dummyFile)
}
def checkDoWith(text: String, manager: PsiManager): Boolean = {
val DUMMY = "dummy."
val dummyFile = PsiFileFactory.getInstance(manager.getProject).
createFileFromText(DUMMY + ScalaFileType.SCALA_FILE_TYPE.getDefaultExtension,
ScalaFileType.SCALA_FILE_TYPE, "class a {\\n" + text + "\\n}").asInstanceOf[ScalaFile]
!checkErrors(dummyFile)
}
def checkTypeWith(typez: ScTypeElement, additionText: String, manager: PsiManager): Boolean = {
val typeText = typez.getText
val text = removeDummy("class a { x:" + typeText + " " + additionText + "}")
val DUMMY = "dummy."
val dummyFile = PsiFileFactory.getInstance(manager.getProject).
createFileFromText(DUMMY + ScalaFileType.SCALA_FILE_TYPE.getDefaultExtension,
ScalaFileType.SCALA_FILE_TYPE, text).asInstanceOf[ScalaFile]
val value = !checkErrors(dummyFile)
value
}
def checkAnyTypeWith(typez: ScTypeElement, additionText: String, manager: PsiManager): Boolean = {
val typeText = typez.getText
val text = removeDummy("class a { val x:" + typeText + " " + additionText + "}")
val DUMMY = "dummy."
val dummyFile = PsiFileFactory.getInstance(manager.getProject).
createFileFromText(DUMMY + ScalaFileType.SCALA_FILE_TYPE.getDefaultExtension,
ScalaFileType.SCALA_FILE_TYPE, text).asInstanceOf[ScalaFile]
val value = !checkErrors(dummyFile)
value
}
def checkAnyWith(typez: PsiElement, additionText: String, manager: PsiManager): Boolean = {
val typeText = typez.getText
val text = removeDummy("class a { " + typeText + " " + additionText + "}")
val DUMMY = "dummy."
val dummyFile = PsiFileFactory.getInstance(manager.getProject).
createFileFromText(DUMMY + ScalaFileType.SCALA_FILE_TYPE.getDefaultExtension,
ScalaFileType.SCALA_FILE_TYPE, text).asInstanceOf[ScalaFile]
!checkErrors(dummyFile)
}
def removeDummy(text: String): String = {
replaceDummy(text, "")
}
def replaceDummy(text: String, to: String): String = {
if (text.indexOf(DUMMY_IDENTIFIER) != -1) {
text.replaceAll("\\\\w*" + DUMMY_IDENTIFIER,to)
} else text
}
def checkNewWith(news: ScNewTemplateDefinition, additionText: String, manager: PsiManager): Boolean = {
val newsText = news.getText
val text = removeDummy("class a { " + newsText + " " + additionText + "}")
val DUMMY = "dummy."
val dummyFile = PsiFileFactory.getInstance(manager.getProject).
createFileFromText(DUMMY + ScalaFileType.SCALA_FILE_TYPE.getDefaultExtension,
ScalaFileType.SCALA_FILE_TYPE, text).asInstanceOf[ScalaFile]
!checkErrors(dummyFile)
}
def checkReplace(elem: PsiElement, additionText: String, manager: PsiManager): Boolean = {
val typeText = elem.getText
var text = "class a { " + typeText + "}"
if (text.indexOf(DUMMY_IDENTIFIER) == -1) return false
text = replaceDummy(text, " "+ additionText+ " ")
val DUMMY = "dummy."
val dummyFile = PsiFileFactory.getInstance(manager.getProject).
createFileFromText(DUMMY + ScalaFileType.SCALA_FILE_TYPE.getDefaultExtension,
ScalaFileType.SCALA_FILE_TYPE, text).asInstanceOf[ScalaFile]
!checkErrors(dummyFile)
}
private def checkErrors(elem: PsiElement): Boolean = {
elem match {
case _: PsiErrorElement => return true
case _ =>
}
val iterator = elem.getChildren.iterator
while (iterator.hasNext) {
val child = iterator.next()
if (checkErrors(child)) return true
}
false
}
/**
* @param leaf Start PsiElement
* @return (End PsiElement, ContainingFile.isScriptFile)
*/
def processPsiLeafForFilter(leaf: PsiElement): (PsiElement, Boolean) = Option(leaf) map {
l => l.getContainingFile match {
case scriptFile: ScalaFile if scriptFile.isScriptFile() => (leaf.getParent, true)
case scalaFile: ScalaFile => (leaf, false)
case _ => (null, false)
}
} getOrElse (null, false)
}
|
consulo/consulo-scala
|
src/org/jetbrains/plugins/scala/lang/completion/ScalaCompletionUtil.scala
|
Scala
|
apache-2.0
| 10,581
|
package com.peterpotts.gene
sealed abstract class AminoAcid(val code: String, val symbol: Char) extends Serializable
case object Alanine extends AminoAcid("Ala", 'A') with AminoAcid.Nonpolar
case object Arginine extends AminoAcid("Arg", 'R') with AminoAcid.Basic
case object Asparagine extends AminoAcid("Asn", 'N') with AminoAcid.Amide
case object AsparticAcid extends AminoAcid("Asp", 'D') with AminoAcid.Acidic
case object Cysteine extends AminoAcid("Cys", 'C') with AminoAcid.Disulfide
case object Glutamine extends AminoAcid("Gln", 'Q') with AminoAcid.Amide
case object GlutamicAcid extends AminoAcid("Glu", 'E') with AminoAcid.Acidic
case object Glycine extends AminoAcid("Gly", 'G') with AminoAcid.Hydrogen
case object Histidine extends AminoAcid("His", 'H') with AminoAcid.Basic
case object Isoleucine extends AminoAcid("Ile", 'I') with AminoAcid.Nonpolar
case object Leucine extends AminoAcid("Leu", 'L') with AminoAcid.Nonpolar
case object Lysine extends AminoAcid("Lys", 'K') with AminoAcid.Basic
case object Methionine extends AminoAcid("Met", 'M') with AminoAcid.Nonpolar
case object Phenylalanine extends AminoAcid("Phe", 'F') with AminoAcid.Nonpolar
case object Proline extends AminoAcid("Pro", 'P') with AminoAcid.Nitrogen
case object Serine extends AminoAcid("Ser", 'S') with AminoAcid.Hydroxy
case object Threonine extends AminoAcid("Thr", 'T') with AminoAcid.Hydroxy
case object Tryptophan extends AminoAcid("Trp", 'W') with AminoAcid.Nonpolar
case object Tyrosine extends AminoAcid("Tyr", 'Y') with AminoAcid.Hydroxy
case object Valine extends AminoAcid("Val", 'V') with AminoAcid.Nonpolar
case object Selenocysteine extends AminoAcid("Sec", 'U')
case object Pyrrolysine extends AminoAcid("Pyl", 'O')
object AminoAcid {
val list = List(
Alanine,
Arginine,
Asparagine,
AsparticAcid,
Cysteine,
Glutamine,
GlutamicAcid,
Glycine,
Histidine,
Isoleucine,
Leucine,
Lysine,
Methionine,
Phenylalanine,
Proline,
Serine,
Threonine,
Tryptophan,
Tyrosine,
Valine,
Selenocysteine,
Pyrrolysine)
private val codeToAminoAcid = list.map(aminoAcid => aminoAcid.code -> aminoAcid).toMap
def apply(code: String): AminoAcid = codeToAminoAcid(code)
def get(code: String): Option[AminoAcid] = codeToAminoAcid.get(code)
private val symbolToAminoAcid = list.map(aminoAcid => aminoAcid.symbol -> aminoAcid).toMap
def apply(symbol: Char): AminoAcid = symbolToAminoAcid(symbol)
def get(symbol: Char): Option[AminoAcid] = symbolToAminoAcid.get(symbol)
/**
* Hydrophobic means does not like water.
*/
trait Hydrophobic
/**
* Hydrophilic means does like water.
*/
trait Hydrophilic
trait Charged extends Hydrophilic
trait Positive extends Charged
trait Negative extends Charged
trait Uncharged
trait Neutral extends Uncharged
trait Nonpolar extends Neutral with Hydrophobic
trait Hydrogen extends Nonpolar
trait Nitrogen extends Nonpolar
trait Disulfide extends Nonpolar
trait Polar extends Neutral with Hydrophilic
trait Amide extends Polar
trait Hydroxy extends Polar
trait Acidic extends Negative
trait Basic extends Positive
}
|
peterpotts/gene
|
src/main/scala/com/peterpotts/gene/AminoAcid.scala
|
Scala
|
mit
| 3,220
|
package ar.com.crypticmind.basewebapp.misc
import scala.util.Random
import org.apache.commons.lang3.RandomStringUtils
import java.util.UUID
object IdGenerator {
val random = new Random()
def shortId: String = RandomStringUtils.randomAlphanumeric(8)
def uuid: String = new UUID(random.nextLong(), random.nextLong()).toString
}
|
crypticmind/base-webapp
|
backend/src/main/scala/ar/com/crypticmind/basewebapp/misc/IdGenerator.scala
|
Scala
|
mit
| 337
|
package org.littlewings.infinispan.jta.service
import scala.collection.JavaConverters._
import javax.ejb.{LocalBean, Stateless}
import javax.persistence.{EntityManager, PersistenceContext}
import org.littlewings.infinispan.jta.entity.User
@Stateless
@LocalBean
class UserService {
@PersistenceContext
private var em: EntityManager = _
def create(user: User): Unit =
em.persist(user)
def createFail(user: User): Unit = {
em.persist(user)
throw new RuntimeException("Oops!!")
}
def udpate(user: User): User =
em.merge(user)
def remove(user: User): Unit =
em.remove(em.merge(user))
def findById(id: Int): User =
em.find(classOf[User], id)
def findAllOrderById: Iterable[User] =
em
.createQuery("""|SELECT u
| FROM User u
| ORDER BY u.id ASC""".stripMargin)
.getResultList
.asScala
.asInstanceOf[Iterable[User]]
def removeAll(): Unit =
em
.createQuery("DELETE FROM User")
.executeUpdate()
}
|
kazuhira-r/infinispan-examples
|
infinispan-jta/src/main/scala/org/littlewings/infinispan/jta/service/UserService.scala
|
Scala
|
mit
| 1,031
|
/* *\\
** Squants **
** **
** Scala Quantities and Units of Measure Library and DSL **
** (c) 2013-2015, Gary Keorkunian **
** **
\\* */
package squants.mass
import org.scalatest.{ FlatSpec, Matchers }
import squants.QuantityParseException
import squants.space.{Hectares, SquareMeters}
/**
* @author garyKeorkunian
* @since 0.2.3
*
*/
class AreaDensitySpec extends FlatSpec with Matchers {
behavior of "AreaDensity and its Units of Measure"
it should "create values using UOM factories" in {
KilogramsPerSquareMeter(1).toKilogramsPerSquareMeter should be(1)
KilogramsPerHectare(1).toKilogramsPerHectare should be(1)
GramsPerSquareCentimeter(1).toGramsPerSquareCentimeter should be(1)
}
it should "create values from properly formatted Strings" in {
AreaDensity("10.22 kg/m²").get should be(KilogramsPerSquareMeter(10.22))
AreaDensity("10.45 zz").failed.get should be(QuantityParseException("Unable to parse AreaDensity", "10.45 zz"))
AreaDensity("zz kg/m²").failed.get should be(QuantityParseException("Unable to parse AreaDensity", "zz kg/m²"))
AreaDensity("10.33 kg/hectare").get should be(KilogramsPerHectare(10.33))
AreaDensity("10.19 g/cm²").get should be(GramsPerSquareCentimeter(10.19))
}
it should "properly convert to all supported Units of Measure" in {
val x = KilogramsPerSquareMeter(1)
x.toKilogramsPerSquareMeter should be(1)
x.toKilogramsPerHectare should be(1e4)
x.toGramsPerSquareCentimeter should be(0.1)
}
it should "return properly formatted strings for all supported Units of Measure" in {
KilogramsPerSquareMeter(1).toString should be("1.0 kg/m²")
KilogramsPerHectare(1).toString should be("1.0 kg/hectare")
GramsPerSquareCentimeter(1).toString should be("1.0 g/cm²")
}
it should "return Mass when multiplied by Volume" in {
KilogramsPerSquareMeter(1) * SquareMeters(1) should be(Kilograms(1))
KilogramsPerHectare(1) * Hectares(1) should be(Kilograms(1))
GramsPerSquareCentimeter(1000) * SquareMeters(1e-4) should be(Kilograms(1))
}
behavior of "AreaDensityConversion"
it should "provide aliases for single unit values" in {
import AreaDensityConversions._
kilogramPerSquareMeter should be(KilogramsPerSquareMeter(1))
kilogramPerHectare should be(KilogramsPerHectare(1))
gramPerSquareCentimeter should be(GramsPerSquareCentimeter(1))
}
it should "provide implicit conversion from Double" in {
import AreaDensityConversions._
val d = 10.22d
d.kilogramsPerSquareMeter should be(KilogramsPerSquareMeter(d))
d.kilogramsPerHectare should be(KilogramsPerHectare(d))
d.gramsPerSquareCentimeter should be(GramsPerSquareCentimeter(d))
}
it should "provide Numeric support" in {
import AreaDensityConversions.AreaDensityNumeric
val as = List(KilogramsPerSquareMeter(100), KilogramsPerSquareMeter(10))
as.sum should be(KilogramsPerSquareMeter(110))
}
}
|
derekmorr/squants
|
shared/src/test/scala/squants/mass/AreaDensitySpec.scala
|
Scala
|
apache-2.0
| 3,337
|
import quoted._
def foo()(using QuoteContext) = {
type C
'[C] // error
}
|
som-snytt/dotty
|
tests/neg/i7013c.scala
|
Scala
|
apache-2.0
| 78
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution.datasources.v2
import java.util
import java.util.Collections
import scala.collection.JavaConverters._
import org.scalatest.BeforeAndAfter
import org.apache.spark.SparkFunSuite
import org.apache.spark.sql.AnalysisException
import org.apache.spark.sql.catalog.v2.{Catalogs, Identifier, NamespaceChange, TableChange}
import org.apache.spark.sql.catalyst.analysis.{NamespaceAlreadyExistsException, NoSuchNamespaceException, NoSuchTableException, TableAlreadyExistsException}
import org.apache.spark.sql.catalyst.parser.CatalystSqlParser
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.test.SharedSparkSession
import org.apache.spark.sql.types.{DoubleType, IntegerType, LongType, StringType, StructField, StructType, TimestampType}
import org.apache.spark.sql.util.CaseInsensitiveStringMap
class V2SessionCatalogBaseSuite extends SparkFunSuite with SharedSparkSession with BeforeAndAfter {
val emptyProps: util.Map[String, String] = Collections.emptyMap[String, String]
val schema: StructType = new StructType()
.add("id", IntegerType)
.add("data", StringType)
val testNs: Array[String] = Array("db")
val defaultNs: Array[String] = Array("default")
val testIdent: Identifier = Identifier.of(testNs, "test_table")
def newCatalog(): V2SessionCatalog = {
val newCatalog = new V2SessionCatalog(spark.sessionState.catalog, spark.sessionState.conf)
newCatalog.initialize("test", CaseInsensitiveStringMap.empty())
newCatalog
}
}
class V2SessionCatalogTableSuite extends V2SessionCatalogBaseSuite {
import org.apache.spark.sql.catalog.v2.CatalogV2Implicits._
override protected def beforeAll(): Unit = {
super.beforeAll()
val catalog = newCatalog()
catalog.createNamespace(Array("db"), emptyProps)
catalog.createNamespace(Array("db2"), emptyProps)
catalog.createNamespace(Array("ns"), emptyProps)
catalog.createNamespace(Array("ns2"), emptyProps)
}
override protected def afterAll(): Unit = {
val catalog = newCatalog()
catalog.dropNamespace(Array("db"))
catalog.dropNamespace(Array("db2"))
catalog.dropNamespace(Array("ns"))
catalog.dropNamespace(Array("ns2"))
super.afterAll()
}
after {
newCatalog().dropTable(testIdent)
newCatalog().dropTable(testIdentNew)
}
private val testIdentNew = Identifier.of(testNs, "test_table_new")
test("listTables") {
val catalog = newCatalog()
val ident1 = Identifier.of(Array("ns"), "test_table_1")
val ident2 = Identifier.of(Array("ns"), "test_table_2")
val ident3 = Identifier.of(Array("ns2"), "test_table_1")
assert(catalog.listTables(Array("ns")).isEmpty)
catalog.createTable(ident1, schema, Array.empty, emptyProps)
assert(catalog.listTables(Array("ns")).toSet == Set(ident1))
assert(catalog.listTables(Array("ns2")).isEmpty)
catalog.createTable(ident3, schema, Array.empty, emptyProps)
catalog.createTable(ident2, schema, Array.empty, emptyProps)
assert(catalog.listTables(Array("ns")).toSet == Set(ident1, ident2))
assert(catalog.listTables(Array("ns2")).toSet == Set(ident3))
catalog.dropTable(ident1)
assert(catalog.listTables(Array("ns")).toSet == Set(ident2))
catalog.dropTable(ident2)
assert(catalog.listTables(Array("ns")).isEmpty)
assert(catalog.listTables(Array("ns2")).toSet == Set(ident3))
catalog.dropTable(ident3)
}
test("createTable") {
val catalog = newCatalog()
assert(!catalog.tableExists(testIdent))
val table = catalog.createTable(testIdent, schema, Array.empty, emptyProps)
val parsed = CatalystSqlParser.parseMultipartIdentifier(table.name)
assert(parsed == Seq("db", "test_table"))
assert(table.schema == schema)
assert(table.properties.asScala == Map())
assert(catalog.tableExists(testIdent))
}
test("createTable: with properties") {
val catalog = newCatalog()
val properties = new util.HashMap[String, String]()
properties.put("property", "value")
assert(!catalog.tableExists(testIdent))
val table = catalog.createTable(testIdent, schema, Array.empty, properties)
val parsed = CatalystSqlParser.parseMultipartIdentifier(table.name)
assert(parsed == Seq("db", "test_table"))
assert(table.schema == schema)
assert(table.properties == properties)
assert(catalog.tableExists(testIdent))
}
test("createTable: table already exists") {
val catalog = newCatalog()
assert(!catalog.tableExists(testIdent))
val table = catalog.createTable(testIdent, schema, Array.empty, emptyProps)
val exc = intercept[TableAlreadyExistsException] {
catalog.createTable(testIdent, schema, Array.empty, emptyProps)
}
assert(exc.message.contains(table.name()))
assert(exc.message.contains("already exists"))
assert(catalog.tableExists(testIdent))
}
test("tableExists") {
val catalog = newCatalog()
assert(!catalog.tableExists(testIdent))
catalog.createTable(testIdent, schema, Array.empty, emptyProps)
assert(catalog.tableExists(testIdent))
catalog.dropTable(testIdent)
assert(!catalog.tableExists(testIdent))
}
test("loadTable") {
val catalog = newCatalog()
val table = catalog.createTable(testIdent, schema, Array.empty, emptyProps)
val loaded = catalog.loadTable(testIdent)
assert(table.name == loaded.name)
assert(table.schema == loaded.schema)
assert(table.properties == loaded.properties)
}
test("loadTable: table does not exist") {
val catalog = newCatalog()
val exc = intercept[NoSuchTableException] {
catalog.loadTable(testIdent)
}
assert(exc.message.contains(testIdent.quoted))
assert(exc.message.contains("not found"))
}
test("invalidateTable") {
val catalog = newCatalog()
val table = catalog.createTable(testIdent, schema, Array.empty, emptyProps)
catalog.invalidateTable(testIdent)
val loaded = catalog.loadTable(testIdent)
assert(table.name == loaded.name)
assert(table.schema == loaded.schema)
assert(table.properties == loaded.properties)
}
test("invalidateTable: table does not exist") {
val catalog = newCatalog()
assert(catalog.tableExists(testIdent) === false)
catalog.invalidateTable(testIdent)
}
test("alterTable: add property") {
val catalog = newCatalog()
val table = catalog.createTable(testIdent, schema, Array.empty, emptyProps)
assert(table.properties.asScala == Map())
val updated = catalog.alterTable(testIdent, TableChange.setProperty("prop-1", "1"))
assert(updated.properties.asScala == Map("prop-1" -> "1"))
val loaded = catalog.loadTable(testIdent)
assert(loaded.properties.asScala == Map("prop-1" -> "1"))
assert(table.properties.asScala == Map())
}
test("alterTable: add property to existing") {
val catalog = newCatalog()
val properties = new util.HashMap[String, String]()
properties.put("prop-1", "1")
val table = catalog.createTable(testIdent, schema, Array.empty, properties)
assert(table.properties.asScala == Map("prop-1" -> "1"))
val updated = catalog.alterTable(testIdent, TableChange.setProperty("prop-2", "2"))
assert(updated.properties.asScala == Map("prop-1" -> "1", "prop-2" -> "2"))
val loaded = catalog.loadTable(testIdent)
assert(loaded.properties.asScala == Map("prop-1" -> "1", "prop-2" -> "2"))
assert(table.properties.asScala == Map("prop-1" -> "1"))
}
test("alterTable: remove existing property") {
val catalog = newCatalog()
val properties = new util.HashMap[String, String]()
properties.put("prop-1", "1")
val table = catalog.createTable(testIdent, schema, Array.empty, properties)
assert(table.properties.asScala == Map("prop-1" -> "1"))
val updated = catalog.alterTable(testIdent, TableChange.removeProperty("prop-1"))
assert(updated.properties.asScala == Map())
val loaded = catalog.loadTable(testIdent)
assert(loaded.properties.asScala == Map())
assert(table.properties.asScala == Map("prop-1" -> "1"))
}
test("alterTable: remove missing property") {
val catalog = newCatalog()
val table = catalog.createTable(testIdent, schema, Array.empty, emptyProps)
assert(table.properties.asScala == Map())
val updated = catalog.alterTable(testIdent, TableChange.removeProperty("prop-1"))
assert(updated.properties.asScala == Map())
val loaded = catalog.loadTable(testIdent)
assert(loaded.properties.asScala == Map())
assert(table.properties.asScala == Map())
}
test("alterTable: add top-level column") {
val catalog = newCatalog()
val table = catalog.createTable(testIdent, schema, Array.empty, emptyProps)
assert(table.schema == schema)
val updated = catalog.alterTable(testIdent, TableChange.addColumn(Array("ts"), TimestampType))
assert(updated.schema == schema.add("ts", TimestampType))
}
test("alterTable: add required column") {
val catalog = newCatalog()
val table = catalog.createTable(testIdent, schema, Array.empty, emptyProps)
assert(table.schema == schema)
val updated = catalog.alterTable(testIdent,
TableChange.addColumn(Array("ts"), TimestampType, false))
assert(updated.schema == schema.add("ts", TimestampType, nullable = false))
}
test("alterTable: add column with comment") {
val catalog = newCatalog()
val table = catalog.createTable(testIdent, schema, Array.empty, emptyProps)
assert(table.schema == schema)
val updated = catalog.alterTable(testIdent,
TableChange.addColumn(Array("ts"), TimestampType, false, "comment text"))
val field = StructField("ts", TimestampType, nullable = false).withComment("comment text")
assert(updated.schema == schema.add(field))
}
test("alterTable: add nested column") {
val catalog = newCatalog()
val pointStruct = new StructType().add("x", DoubleType).add("y", DoubleType)
val tableSchema = schema.add("point", pointStruct)
val table = catalog.createTable(testIdent, tableSchema, Array.empty, emptyProps)
assert(table.schema == tableSchema)
val updated = catalog.alterTable(testIdent,
TableChange.addColumn(Array("point", "z"), DoubleType))
val expectedSchema = schema.add("point", pointStruct.add("z", DoubleType))
assert(updated.schema == expectedSchema)
}
test("alterTable: add column to primitive field fails") {
val catalog = newCatalog()
val table = catalog.createTable(testIdent, schema, Array.empty, emptyProps)
assert(table.schema == schema)
val exc = intercept[IllegalArgumentException] {
catalog.alterTable(testIdent, TableChange.addColumn(Array("data", "ts"), TimestampType))
}
assert(exc.getMessage.contains("Not a struct"))
assert(exc.getMessage.contains("data"))
// the table has not changed
assert(catalog.loadTable(testIdent).schema == schema)
}
test("alterTable: add field to missing column fails") {
val catalog = newCatalog()
val table = catalog.createTable(testIdent, schema, Array.empty, emptyProps)
assert(table.schema == schema)
val exc = intercept[IllegalArgumentException] {
catalog.alterTable(testIdent,
TableChange.addColumn(Array("missing_col", "new_field"), StringType))
}
assert(exc.getMessage.contains("missing_col"))
assert(exc.getMessage.contains("Cannot find"))
}
test("alterTable: update column data type") {
val catalog = newCatalog()
val table = catalog.createTable(testIdent, schema, Array.empty, emptyProps)
assert(table.schema == schema)
val updated = catalog.alterTable(testIdent, TableChange.updateColumnType(Array("id"), LongType))
val expectedSchema = new StructType().add("id", LongType).add("data", StringType)
assert(updated.schema == expectedSchema)
}
test("alterTable: update column data type and nullability") {
val catalog = newCatalog()
val originalSchema = new StructType()
.add("id", IntegerType, nullable = false)
.add("data", StringType)
val table = catalog.createTable(testIdent, originalSchema, Array.empty, emptyProps)
assert(table.schema == originalSchema)
val updated = catalog.alterTable(testIdent,
TableChange.updateColumnType(Array("id"), LongType, true))
val expectedSchema = new StructType().add("id", LongType).add("data", StringType)
assert(updated.schema == expectedSchema)
}
test("alterTable: update optional column to required fails") {
val catalog = newCatalog()
val table = catalog.createTable(testIdent, schema, Array.empty, emptyProps)
assert(table.schema == schema)
val exc = intercept[IllegalArgumentException] {
catalog.alterTable(testIdent, TableChange.updateColumnType(Array("id"), LongType, false))
}
assert(exc.getMessage.contains("Cannot change optional column to required"))
assert(exc.getMessage.contains("id"))
}
test("alterTable: update missing column fails") {
val catalog = newCatalog()
val table = catalog.createTable(testIdent, schema, Array.empty, emptyProps)
assert(table.schema == schema)
val exc = intercept[IllegalArgumentException] {
catalog.alterTable(testIdent,
TableChange.updateColumnType(Array("missing_col"), LongType))
}
assert(exc.getMessage.contains("missing_col"))
assert(exc.getMessage.contains("Cannot find"))
}
test("alterTable: add comment") {
val catalog = newCatalog()
val table = catalog.createTable(testIdent, schema, Array.empty, emptyProps)
assert(table.schema == schema)
val updated = catalog.alterTable(testIdent,
TableChange.updateColumnComment(Array("id"), "comment text"))
val expectedSchema = new StructType()
.add("id", IntegerType, nullable = true, "comment text")
.add("data", StringType)
assert(updated.schema == expectedSchema)
}
test("alterTable: replace comment") {
val catalog = newCatalog()
val table = catalog.createTable(testIdent, schema, Array.empty, emptyProps)
assert(table.schema == schema)
catalog.alterTable(testIdent, TableChange.updateColumnComment(Array("id"), "comment text"))
val expectedSchema = new StructType()
.add("id", IntegerType, nullable = true, "replacement comment")
.add("data", StringType)
val updated = catalog.alterTable(testIdent,
TableChange.updateColumnComment(Array("id"), "replacement comment"))
assert(updated.schema == expectedSchema)
}
test("alterTable: add comment to missing column fails") {
val catalog = newCatalog()
val table = catalog.createTable(testIdent, schema, Array.empty, emptyProps)
assert(table.schema == schema)
val exc = intercept[IllegalArgumentException] {
catalog.alterTable(testIdent,
TableChange.updateColumnComment(Array("missing_col"), "comment"))
}
assert(exc.getMessage.contains("missing_col"))
assert(exc.getMessage.contains("Cannot find"))
}
test("alterTable: rename top-level column") {
val catalog = newCatalog()
val table = catalog.createTable(testIdent, schema, Array.empty, emptyProps)
assert(table.schema == schema)
val updated = catalog.alterTable(testIdent, TableChange.renameColumn(Array("id"), "some_id"))
val expectedSchema = new StructType().add("some_id", IntegerType).add("data", StringType)
assert(updated.schema == expectedSchema)
}
test("alterTable: rename nested column") {
val catalog = newCatalog()
val pointStruct = new StructType().add("x", DoubleType).add("y", DoubleType)
val tableSchema = schema.add("point", pointStruct)
val table = catalog.createTable(testIdent, tableSchema, Array.empty, emptyProps)
assert(table.schema == tableSchema)
val updated = catalog.alterTable(testIdent,
TableChange.renameColumn(Array("point", "x"), "first"))
val newPointStruct = new StructType().add("first", DoubleType).add("y", DoubleType)
val expectedSchema = schema.add("point", newPointStruct)
assert(updated.schema == expectedSchema)
}
test("alterTable: rename struct column") {
val catalog = newCatalog()
val pointStruct = new StructType().add("x", DoubleType).add("y", DoubleType)
val tableSchema = schema.add("point", pointStruct)
val table = catalog.createTable(testIdent, tableSchema, Array.empty, emptyProps)
assert(table.schema == tableSchema)
val updated = catalog.alterTable(testIdent,
TableChange.renameColumn(Array("point"), "p"))
val newPointStruct = new StructType().add("x", DoubleType).add("y", DoubleType)
val expectedSchema = schema.add("p", newPointStruct)
assert(updated.schema == expectedSchema)
}
test("alterTable: rename missing column fails") {
val catalog = newCatalog()
val table = catalog.createTable(testIdent, schema, Array.empty, emptyProps)
assert(table.schema == schema)
val exc = intercept[IllegalArgumentException] {
catalog.alterTable(testIdent,
TableChange.renameColumn(Array("missing_col"), "new_name"))
}
assert(exc.getMessage.contains("missing_col"))
assert(exc.getMessage.contains("Cannot find"))
}
test("alterTable: multiple changes") {
val catalog = newCatalog()
val pointStruct = new StructType().add("x", DoubleType).add("y", DoubleType)
val tableSchema = schema.add("point", pointStruct)
val table = catalog.createTable(testIdent, tableSchema, Array.empty, emptyProps)
assert(table.schema == tableSchema)
val updated = catalog.alterTable(testIdent,
TableChange.renameColumn(Array("point", "x"), "first"),
TableChange.renameColumn(Array("point", "y"), "second"))
val newPointStruct = new StructType().add("first", DoubleType).add("second", DoubleType)
val expectedSchema = schema.add("point", newPointStruct)
assert(updated.schema == expectedSchema)
}
test("alterTable: delete top-level column") {
val catalog = newCatalog()
val table = catalog.createTable(testIdent, schema, Array.empty, emptyProps)
assert(table.schema == schema)
val updated = catalog.alterTable(testIdent,
TableChange.deleteColumn(Array("id")))
val expectedSchema = new StructType().add("data", StringType)
assert(updated.schema == expectedSchema)
}
test("alterTable: delete nested column") {
val catalog = newCatalog()
val pointStruct = new StructType().add("x", DoubleType).add("y", DoubleType)
val tableSchema = schema.add("point", pointStruct)
val table = catalog.createTable(testIdent, tableSchema, Array.empty, emptyProps)
assert(table.schema == tableSchema)
val updated = catalog.alterTable(testIdent,
TableChange.deleteColumn(Array("point", "y")))
val newPointStruct = new StructType().add("x", DoubleType)
val expectedSchema = schema.add("point", newPointStruct)
assert(updated.schema == expectedSchema)
}
test("alterTable: delete missing column fails") {
val catalog = newCatalog()
val table = catalog.createTable(testIdent, schema, Array.empty, emptyProps)
assert(table.schema == schema)
val exc = intercept[IllegalArgumentException] {
catalog.alterTable(testIdent, TableChange.deleteColumn(Array("missing_col")))
}
assert(exc.getMessage.contains("missing_col"))
assert(exc.getMessage.contains("Cannot find"))
}
test("alterTable: delete missing nested column fails") {
val catalog = newCatalog()
val pointStruct = new StructType().add("x", DoubleType).add("y", DoubleType)
val tableSchema = schema.add("point", pointStruct)
val table = catalog.createTable(testIdent, tableSchema, Array.empty, emptyProps)
assert(table.schema == tableSchema)
val exc = intercept[IllegalArgumentException] {
catalog.alterTable(testIdent, TableChange.deleteColumn(Array("point", "z")))
}
assert(exc.getMessage.contains("z"))
assert(exc.getMessage.contains("Cannot find"))
}
test("alterTable: table does not exist") {
val catalog = newCatalog()
val exc = intercept[NoSuchTableException] {
catalog.alterTable(testIdent, TableChange.setProperty("prop", "val"))
}
assert(exc.message.contains(testIdent.quoted))
assert(exc.message.contains("not found"))
}
test("dropTable") {
val catalog = newCatalog()
assert(!catalog.tableExists(testIdent))
catalog.createTable(testIdent, schema, Array.empty, emptyProps)
assert(catalog.tableExists(testIdent))
val wasDropped = catalog.dropTable(testIdent)
assert(wasDropped)
assert(!catalog.tableExists(testIdent))
}
test("dropTable: table does not exist") {
val catalog = newCatalog()
assert(!catalog.tableExists(testIdent))
val wasDropped = catalog.dropTable(testIdent)
assert(!wasDropped)
assert(!catalog.tableExists(testIdent))
}
test("renameTable") {
val catalog = newCatalog()
assert(!catalog.tableExists(testIdent))
assert(!catalog.tableExists(testIdentNew))
catalog.createTable(testIdent, schema, Array.empty, emptyProps)
assert(catalog.tableExists(testIdent))
catalog.renameTable(testIdent, testIdentNew)
assert(!catalog.tableExists(testIdent))
assert(catalog.tableExists(testIdentNew))
}
test("renameTable: fail if table does not exist") {
val catalog = newCatalog()
val exc = intercept[NoSuchTableException] {
catalog.renameTable(testIdent, testIdentNew)
}
assert(exc.message.contains(testIdent.quoted))
assert(exc.message.contains("not found"))
}
test("renameTable: fail if new table name already exists") {
val catalog = newCatalog()
assert(!catalog.tableExists(testIdent))
assert(!catalog.tableExists(testIdentNew))
catalog.createTable(testIdent, schema, Array.empty, emptyProps)
catalog.createTable(testIdentNew, schema, Array.empty, emptyProps)
assert(catalog.tableExists(testIdent))
assert(catalog.tableExists(testIdentNew))
val exc = intercept[TableAlreadyExistsException] {
catalog.renameTable(testIdent, testIdentNew)
}
assert(exc.message.contains(testIdentNew.quoted))
assert(exc.message.contains("already exists"))
}
test("renameTable: fail if db does not match for old and new table names") {
val catalog = newCatalog()
val testIdentNewOtherDb = Identifier.of(Array("db2"), "test_table_new")
assert(!catalog.tableExists(testIdent))
assert(!catalog.tableExists(testIdentNewOtherDb))
catalog.createTable(testIdent, schema, Array.empty, emptyProps)
assert(catalog.tableExists(testIdent))
val exc = intercept[AnalysisException] {
catalog.renameTable(testIdent, testIdentNewOtherDb)
}
assert(exc.message.contains(testIdent.namespace.quoted))
assert(exc.message.contains(testIdentNewOtherDb.namespace.quoted))
assert(exc.message.contains("RENAME TABLE source and destination databases do not match"))
}
}
class V2SessionCatalogNamespaceSuite extends V2SessionCatalogBaseSuite {
import org.apache.spark.sql.catalog.v2.CatalogV2Implicits._
def checkMetadata(
expected: scala.collection.Map[String, String],
actual: scala.collection.Map[String, String]): Unit = {
// remove location and comment that are automatically added by HMS unless they are expected
val toRemove = V2SessionCatalog.RESERVED_PROPERTIES.filter(expected.contains)
assert(expected -- toRemove === actual)
}
test("listNamespaces: basic behavior") {
val catalog = newCatalog()
catalog.createNamespace(testNs, Map("property" -> "value").asJava)
assert(catalog.listNamespaces() === Array(testNs, defaultNs))
assert(catalog.listNamespaces(Array()) === Array(testNs, defaultNs))
assert(catalog.listNamespaces(testNs) === Array())
catalog.dropNamespace(testNs)
}
test("listNamespaces: fail if missing namespace") {
val catalog = newCatalog()
assert(catalog.namespaceExists(testNs) === false)
val exc = intercept[NoSuchNamespaceException] {
assert(catalog.listNamespaces(testNs) === Array())
}
assert(exc.getMessage.contains(testNs.quoted))
assert(catalog.namespaceExists(testNs) === false)
}
test("loadNamespaceMetadata: fail missing namespace") {
val catalog = newCatalog()
val exc = intercept[NoSuchNamespaceException] {
catalog.loadNamespaceMetadata(testNs)
}
assert(exc.getMessage.contains(testNs.quoted))
}
test("loadNamespaceMetadata: non-empty metadata") {
val catalog = newCatalog()
assert(catalog.namespaceExists(testNs) === false)
catalog.createNamespace(testNs, Map("property" -> "value").asJava)
val metadata = catalog.loadNamespaceMetadata(testNs)
assert(catalog.namespaceExists(testNs) === true)
checkMetadata(metadata.asScala, Map("property" -> "value"))
catalog.dropNamespace(testNs)
}
test("loadNamespaceMetadata: empty metadata") {
val catalog = newCatalog()
assert(catalog.namespaceExists(testNs) === false)
catalog.createNamespace(testNs, emptyProps)
val metadata = catalog.loadNamespaceMetadata(testNs)
assert(catalog.namespaceExists(testNs) === true)
checkMetadata(metadata.asScala, emptyProps.asScala)
catalog.dropNamespace(testNs)
}
test("createNamespace: basic behavior") {
val catalog = newCatalog()
val expectedPath = sqlContext.sessionState.catalog.getDefaultDBPath(testNs(0)).toString
catalog.createNamespace(testNs, Map("property" -> "value").asJava)
assert(expectedPath === spark.catalog.getDatabase(testNs(0)).locationUri.toString)
assert(catalog.namespaceExists(testNs) === true)
val metadata = catalog.loadNamespaceMetadata(testNs).asScala
checkMetadata(metadata, Map("property" -> "value"))
assert(expectedPath === metadata("location"))
catalog.dropNamespace(testNs)
}
test("createNamespace: initialize location") {
val catalog = newCatalog()
val expectedPath = "file:/tmp/db.db"
catalog.createNamespace(testNs, Map("location" -> expectedPath).asJava)
assert(expectedPath === spark.catalog.getDatabase(testNs(0)).locationUri.toString)
assert(catalog.namespaceExists(testNs) === true)
val metadata = catalog.loadNamespaceMetadata(testNs).asScala
checkMetadata(metadata, Map.empty)
assert(expectedPath === metadata("location"))
catalog.dropNamespace(testNs)
}
test("createNamespace: fail if namespace already exists") {
val catalog = newCatalog()
catalog.createNamespace(testNs, Map("property" -> "value").asJava)
val exc = intercept[NamespaceAlreadyExistsException] {
catalog.createNamespace(testNs, Map("property" -> "value2").asJava)
}
assert(exc.getMessage.contains(testNs.quoted))
assert(catalog.namespaceExists(testNs) === true)
checkMetadata(catalog.loadNamespaceMetadata(testNs).asScala, Map("property" -> "value"))
catalog.dropNamespace(testNs)
}
test("createNamespace: fail nested namespace") {
val catalog = newCatalog()
// ensure the parent exists
catalog.createNamespace(Array("db"), emptyProps)
val exc = intercept[IllegalArgumentException] {
catalog.createNamespace(Array("db", "nested"), emptyProps)
}
assert(exc.getMessage.contains("Invalid namespace name: db.nested"))
catalog.dropNamespace(Array("db"))
}
test("createTable: fail if namespace does not exist") {
val catalog = newCatalog()
assert(catalog.namespaceExists(testNs) === false)
val exc = intercept[NoSuchNamespaceException] {
catalog.createTable(testIdent, schema, Array.empty, emptyProps)
}
assert(exc.getMessage.contains(testNs.quoted))
assert(catalog.namespaceExists(testNs) === false)
}
test("dropNamespace: drop missing namespace") {
val catalog = newCatalog()
assert(catalog.namespaceExists(testNs) === false)
val ret = catalog.dropNamespace(testNs)
assert(ret === false)
}
test("dropNamespace: drop empty namespace") {
val catalog = newCatalog()
catalog.createNamespace(testNs, emptyProps)
assert(catalog.namespaceExists(testNs) === true)
val ret = catalog.dropNamespace(testNs)
assert(ret === true)
assert(catalog.namespaceExists(testNs) === false)
}
test("dropNamespace: fail if not empty") {
val catalog = newCatalog()
catalog.createNamespace(testNs, Map("property" -> "value").asJava)
catalog.createTable(testIdent, schema, Array.empty, emptyProps)
val exc = intercept[IllegalStateException] {
catalog.dropNamespace(testNs)
}
assert(exc.getMessage.contains(testNs.quoted))
assert(catalog.namespaceExists(testNs) === true)
checkMetadata(catalog.loadNamespaceMetadata(testNs).asScala, Map("property" -> "value"))
catalog.dropTable(testIdent)
catalog.dropNamespace(testNs)
}
test("alterNamespace: basic behavior") {
val catalog = newCatalog()
catalog.createNamespace(testNs, Map("property" -> "value").asJava)
catalog.alterNamespace(testNs, NamespaceChange.setProperty("property2", "value2"))
checkMetadata(
catalog.loadNamespaceMetadata(testNs).asScala,
Map("property" -> "value", "property2" -> "value2"))
catalog.alterNamespace(testNs,
NamespaceChange.removeProperty("property2"),
NamespaceChange.setProperty("property3", "value3"))
checkMetadata(
catalog.loadNamespaceMetadata(testNs).asScala,
Map("property" -> "value", "property3" -> "value3"))
catalog.alterNamespace(testNs, NamespaceChange.removeProperty("property3"))
checkMetadata(
catalog.loadNamespaceMetadata(testNs).asScala,
Map("property" -> "value"))
catalog.dropNamespace(testNs)
}
test("alterNamespace: update namespace location") {
val catalog = newCatalog()
val initialPath = sqlContext.sessionState.catalog.getDefaultDBPath(testNs(0)).toString
val newPath = "file:/tmp/db.db"
catalog.createNamespace(testNs, emptyProps)
assert(initialPath === spark.catalog.getDatabase(testNs(0)).locationUri.toString)
catalog.alterNamespace(testNs, NamespaceChange.setProperty("location", newPath))
assert(newPath === spark.catalog.getDatabase(testNs(0)).locationUri.toString)
catalog.dropNamespace(testNs)
}
test("alterNamespace: update namespace comment") {
val catalog = newCatalog()
val newComment = "test db"
catalog.createNamespace(testNs, emptyProps)
assert(spark.catalog.getDatabase(testNs(0)).description.isEmpty)
catalog.alterNamespace(testNs, NamespaceChange.setProperty("comment", newComment))
assert(newComment === spark.catalog.getDatabase(testNs(0)).description)
catalog.dropNamespace(testNs)
}
test("alterNamespace: fail if namespace doesn't exist") {
val catalog = newCatalog()
assert(catalog.namespaceExists(testNs) === false)
val exc = intercept[NoSuchNamespaceException] {
catalog.alterNamespace(testNs, NamespaceChange.setProperty("property", "value"))
}
assert(exc.getMessage.contains(testNs.quoted))
}
test("alterNamespace: fail to remove location") {
val catalog = newCatalog()
catalog.createNamespace(testNs, emptyProps)
val exc = intercept[UnsupportedOperationException] {
catalog.alterNamespace(testNs, NamespaceChange.removeProperty("location"))
}
assert(exc.getMessage.contains("Cannot remove reserved property: location"))
catalog.dropNamespace(testNs)
}
test("alterNamespace: fail to remove comment") {
val catalog = newCatalog()
catalog.createNamespace(testNs, Map("comment" -> "test db").asJava)
val exc = intercept[UnsupportedOperationException] {
catalog.alterNamespace(testNs, NamespaceChange.removeProperty("comment"))
}
assert(exc.getMessage.contains("Cannot remove reserved property: comment"))
catalog.dropNamespace(testNs)
}
}
|
pgandhi999/spark
|
sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/v2/V2SessionCatalogSuite.scala
|
Scala
|
apache-2.0
| 32,712
|
package util
import org.scalatest._
/**
* PasswordDigestUtilのテスト.
*/
class PasswordDigestUtilSpec extends FunSpec with Matchers {
describe("createHashPassword") {
it("same value") {
val baseDate = CurrentDateUtil.nowDateTime
val beforeStr = PasswordDigestUtil.createHashPassword("hoge", baseDate)
val afterStr = PasswordDigestUtil.createHashPassword("hoge", baseDate)
beforeStr should be(afterStr)
}
}
}
|
nemuzuka/vss-kanban
|
src/test/scala/util/PasswordDigestUtilSpec.scala
|
Scala
|
mit
| 455
|
package de.dfki.cps.specific.sysml
trait TypedElement extends Element {
val typeAnnotation: TypeAnnotation
}
object Types {
abstract class Classifier(val name: String) extends Type with Namespace
abstract class DataType(name: String) extends Classifier(name)
case object Null extends DataType("nullType") {
def members = Seq.empty
}
sealed abstract class PrimitiveType[R](name: String) extends DataType(name) {
def members = Seq.empty
}
/**
* An instance of Integer is a value in the (infinite) set of integers
* (...-2, -1, 0, 1, 2...).
*/
case object Integer extends PrimitiveType[BigInt]("Integer")
/**
* An instance of Boolean is one of the predefined values true and false.
*/
case object Boolean extends PrimitiveType[scala.Boolean]("Boolean")
/**
* An instance of String defines a sequence of characters. Character sets may
* include non-Roman alphabets. The semantics of the string itself depends on
* its purpose; it can be a comment, computational language expression,
* OCL expression, etc.
*/
case object String extends PrimitiveType[java.lang.String]("String")
/**
* An instance of UnlimitedNatural is a value in the (infinite) set of
* natural numbers (0, 1, 2...) plus unlimited. The value of unlimited is
* shown using an asterisk (‘*’). UnlimitedNatural values are typically used
* to denote the upper bound of a range, such as a multiplicity; unlimited is
* used whenever the range is specified to have no upper bound.
*/
case object UnlimitedNatural extends
PrimitiveType[UnlimitedNatural]("UnlimitedNatural")
/**
* An instance of Real is a value in the (infinite) set of real numbers.
* Typically an implementation will internally represent Real numbers using a
* floating point standard such as ISO/IEC/IEEE 60559:2011 (whose content is
* identical to the predecessor IEEE 754 standard).
*/
case object Real extends PrimitiveType[BigDecimal]("Real")
case object Unit extends PrimitiveType[Unit]("Unit")
}
|
DFKI-CPS/specific-sysml
|
src/main/scala/de/dfki/cps/specific/sysml/Types.scala
|
Scala
|
mit
| 2,072
|
package org.jetbrains.plugins.scala
package codeInspection
package typeLambdaSimplify
import com.intellij.codeInspection._
import com.intellij.openapi.project.Project
import com.intellij.psi.PsiElementVisitor
import org.jetbrains.plugins.scala.lang.psi.api.base.types._
import org.jetbrains.plugins.scala.lang.psi.api.statements.ScTypeAliasDefinition
import org.jetbrains.plugins.scala.lang.psi.api.statements.params.PsiTypeParameterExt
import org.jetbrains.plugins.scala.lang.psi.api.{ScalaElementVisitor, ScalaFile}
import org.jetbrains.plugins.scala.lang.psi.impl.ScalaPsiElementFactory.createTypeElementFromText
import org.jetbrains.plugins.scala.lang.psi.types.ScSubstitutor
import org.jetbrains.plugins.scala.lang.psi.{ScalaPsiElement, ScalaPsiUtil}
/**
* Inspection to simplify a type like:
*
* ({type l[a] = Either[String, a]})#l[Int]
*
* to
*
* Either[String, Int]
*
* Such a type can appear after overide/implement when using Type Lamdas.
*
* For example:
* {{{
* trait Parent[M[_]] { def abstracto(m: M[Int]) }
*
* trait Child1 extends Parent[({type l[a]=Either[String,a]})#l] {
* // implement methods
* }
* }}}
*/
class AppliedTypeLambdaCanBeSimplifiedInspection extends LocalInspectionTool {
override def isEnabledByDefault: Boolean = true
override def getID: String = "ScalaAppliedTypeLambdaCanBeSimplified"
override def getDisplayName: String = InspectionBundle.message("applied.type.lambda.can.be.simplified")
override def buildVisitor(holder: ProblemsHolder, isOnTheFly: Boolean): PsiElementVisitor = {
if (!holder.getFile.isInstanceOf[ScalaFile]) return PsiElementVisitor.EMPTY_VISITOR
def addInfo(paramType: ScParameterizedTypeElement, replacementText: => String) = {
val fixes = Array[LocalQuickFix](new SimplifyAppliedTypeLambdaQuickFix(paramType, replacementText))
val problem = holder.getManager.createProblemDescriptor(paramType, getDisplayName, isOnTheFly,
fixes, ProblemHighlightType.GENERIC_ERROR_OR_WARNING)
holder.registerProblem(problem)
}
def inspectTypeProjection(typeProjection: ScTypeProjection, paramType: ScParameterizedTypeElement) = {
typeProjection.typeElement match {
case parenType: ScParenthesisedTypeElement => parenType.typeElement match {
case Some(ct: ScCompoundTypeElement) =>
(ct.components, ct.refinement) match {
case (Seq(), Some(refinement)) =>
(refinement.holders, refinement.types) match {
case (Seq(), Seq(typeAliasDefinition: ScTypeAliasDefinition)) =>
val name1 = typeProjection.nameId
val name2 = typeAliasDefinition.nameId
if (name1.getText == name2.getText) {
val params = typeAliasDefinition.typeParameters
val typeArgs = paramType.typeArgList.typeArgs
if (params.length == typeArgs.length) {
def simplified(): String = {
val aliased = typeAliasDefinition.aliasedType.getOrAny
val subst = params.zip(typeArgs).foldLeft(ScSubstitutor.empty) {
case (res, (param, arg)) =>
res.bindT(param.nameAndId, arg.calcType)
}
val substituted = subst.subst(aliased)
substituted.presentableText
}
addInfo(paramType, simplified())
}
}
case _ =>
}
case _ =>
}
case _ =>
}
case _ =>
}
}
new ScalaElementVisitor {
override def visitElement(elem: ScalaPsiElement): Unit = elem match {
case paramType: ScParameterizedTypeElement => paramType.typeElement match {
case typeProjection: ScTypeProjection => inspectTypeProjection(typeProjection, paramType)
case typeLambda: ScParameterizedTypeElement if ScalaPsiUtil.kindProjectorPluginEnabled(paramType) =>
//def a: λ[A => (A, A)][String]
// can be transformed into
//def a: (String, String)
typeLambda.computeDesugarizedType match {
case Some(typeProjection: ScTypeProjection) =>
inspectTypeProjection(typeProjection, paramType)
case _ =>
}
case _ =>
}
case _ =>
}
}
}
}
class SimplifyAppliedTypeLambdaQuickFix(paramType: ScParameterizedTypeElement, replacement: => String)
extends AbstractFixOnPsiElement(InspectionBundle.message("simplify.type"), paramType) {
def doApplyFix(project: Project): Unit = {
getElement.replace(createTypeElementFromText(replacement)(getElement.getManager))
}
}
|
ilinum/intellij-scala
|
src/org/jetbrains/plugins/scala/codeInspection/typeLambdaSimplify/AppliedTypeLambdaCanBeSimplifiedInspection.scala
|
Scala
|
apache-2.0
| 4,870
|
package dao
import model.{Count, Record, RegistrationInfo}
import org.scalatest.{FlatSpecLike, ShouldMatchers}
/**
* Created by Scott on 9/6/16.
*/
class DAOmock extends DAO with FlatSpecLike with ShouldMatchers {
def insert(r:Record):Unit = {
r.density shouldBe > (0)
r.latitude shouldBe > (-180.0)
r.longitude shouldBe > (-180.0)
r.latitude shouldBe < (180.0)
r.longitude shouldBe < (180.0)
r.user_id shouldBe > (0)
}
def insertUser(registrationInfo: RegistrationInfo): Unit = {
registrationInfo.validate shouldEqual true
}
def getPasswdHash(user:String):Option[String] = Some("123456")
def getCount:Count = Count(0,0)
def getUserIDbyName(username:String):Int = 1
def getRecordByUserID(id: Int):List[model.Record] = Nil
def getRecordByUserID(id:Int, limit:Int):List[model.Record] = Nil
}
|
GreenHunan/aircheck-server
|
test/dao/DAOmock.scala
|
Scala
|
gpl-2.0
| 844
|
package rpm4s.data
import rpm4s.codecs.ConvertingError
import cats.implicits._
case class EVR(
version: Version,
release: Option[Release] = None,
epoch: Epoch = Epoch.ZERO) {
def string: String = {
val e = if (epoch == Epoch.ZERO) "" else s"${epoch.value}:"
val r = release.map(r => s"-${r.value}").getOrElse("")
s"$e${version.string}$r"
}
}
object EVR {
implicit val ordering: Ordering[EVR] = new Ordering[EVR] {
override def compare(x: EVR, y: EVR): Int = {
val epochCmp = Ordering[Epoch].compare(x.epoch, y.epoch)
if (epochCmp != 0) {
epochCmp
} else {
val versionCmp = Ordering[Version].compare(x.version, y.version)
if (versionCmp != 0) versionCmp
else {
val releaseCmp = Ordering[Option[Release]].compare(x.release, y.release)
releaseCmp
}
}
}
}
//TODO: more validation for individual parts
//TODO: Document format
def parse(evr: String): Either[ConvertingError, EVR] = {
// we split at the last - which is consistent with how rpm does it
val relIdx = evr.lastIndexOf("-")
val (release, ev) = if (relIdx == -1) {
(None, evr)
} else {
(Some(evr.substring(relIdx + 1)), evr.substring(0, relIdx))
}
val epochIdx = ev.indexOf(":")
val (epoch, version) = if (epochIdx == -1) {
(None, ev)
} else {
(Some(ev.substring(0, epochIdx)), ev.substring(epochIdx + 1))
}
for {
ver <- Version.fromString(version)
rel <- release match {
case Some(r) => Release.fromString(r).map(Some(_))
case None => Right(None)
}
ep <- epoch match {
case Some(e) => Epoch.fromString(e)
case None => Right(Epoch.ZERO)
}
} yield EVR(ver, rel, ep)
}
}
|
lucidd/rpm4s
|
shared/src/main/scala/rpm4s/data/EVR.scala
|
Scala
|
mit
| 1,793
|
/**
* Copyright (C) 2011 Orbeon, Inc.
*
* This program is free software; you can redistribute it and/or modify it under the terms of the
* GNU Lesser General Public License as published by the Free Software Foundation; either version
* 2.1 of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
* without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
* See the GNU Lesser General Public License for more details.
*
* The full text of the license is available at http://www.gnu.org/copyleft/lesser.html
*/
package org.orbeon.oxf.xforms
// All Ehcache-based caches for XForms support.
object Caches {
val stateCache = org.orbeon.oxf.cache.Caches.getOrElseThrow("xforms.state")
val resourcesCache = org.orbeon.oxf.cache.Caches.getOrElseThrow("xforms.resources")
}
|
brunobuzzi/orbeon-forms
|
xforms/jvm/src/main/scala/org/orbeon/oxf/xforms/Caches.scala
|
Scala
|
lgpl-2.1
| 912
|
package com.socrata.spandex.common.client
import scala.language.implicitConversions
import org.elasticsearch.action.support.WriteRequest
sealed trait RefreshPolicy
object RefreshPolicy {
implicit def toWriteRequestRefreshPolicy(policy: RefreshPolicy): WriteRequest.RefreshPolicy =
policy match {
case BeforeReturning => WriteRequest.RefreshPolicy.WAIT_UNTIL
case Eventually => WriteRequest.RefreshPolicy.NONE
case Immediately => WriteRequest.RefreshPolicy.IMMEDIATE
}
implicit def toDeleteRequestRefreshPolicy(policy: RefreshPolicy): Boolean =
policy match {
case BeforeReturning => true
case Immediately => true
case Eventually => false
}
}
case object Immediately extends RefreshPolicy
case object BeforeReturning extends RefreshPolicy
case object Eventually extends RefreshPolicy
|
socrata-platform/spandex
|
spandex-common/src/main/scala/com/socrata/spandex/common/client/RefreshPolicy.scala
|
Scala
|
apache-2.0
| 845
|
package org.openurp.edu.base.ws.code
import org.openurp.edu.base.code.model.{ CourseCategory, CourseType, ExamMode, ExamStatus, StdLabel, StdLabelType, StdType }
import org.openurp.code.edu.model.StudentStatus
class StdLabelWS extends AbstractWS[StdLabel]
class StdLabelTypeWS extends AbstractWS[StdLabelType]
class StdTypeWS extends AbstractWS[StdType]
class StdStatusWS extends AbstractWS[StudentStatus]
class CourseTypeWS extends AbstractWS[CourseType]
class CourseCategoryWS extends AbstractWS[CourseCategory]
class ExamModeWS extends AbstractWS[ExamMode]
class ExamStatusWS extends AbstractWS[ExamStatus]
|
openurp/edu-core
|
base/ws/src/main/scala/org/openurp/edu/base/ws/code/school.scala
|
Scala
|
gpl-3.0
| 618
|
package ildl
package benchmark
package gcd
import org.scalameter.api._
import org.scalameter.DSL._
//
// You can read about this benchmark on the following wiki page:
// https://github.com/miniboxing/ildl-plugin/wiki/Sample-%7E-Data-Encoding
//
/** The benchmark object */
object BenchmarkRunner extends PerformanceTest.Microbenchmark {
//
// The benchmark object. This object is the entry point into the current
// benchmark and customizes the ScalaMeter configuration.
//
// **Note:** In the ScalaIDE, some of the benchmarked methods will appear
// as not found. This is expected, and occurs since the presentation compiler
// (the fast one which performs syntax highlighting and quick error checking)
// is a stripped-down version of the Scala compiler and does not allow the
// ildl-plugin to transform the program before the typer phase (in the
//`post-parser` phase). Nevertheless, compiling and running occurs correctly.
//
// make sure we're running on the correct setup:
Platform.checkCompatibility()
import GreatestCommonDivisor._
// for the meanings of the 4 labels please see [[ildl.benchmark.gcd.GreatestCommonDividsor]]
val bench = Gen.enumeration("bench")("direct", "adrt_1", "adrt_2", "adrt_3")
override def aggregator = Aggregator.average
var data: List[(Double, Double)] = _
var slope: Double = _
var offset: Double = _
val eps = 1E-6
for (interp <- Seq(false))
measure method "gcd" in {
using(Gen.tupled(bench, Gen.single("jvm_interpreter")(interp))) config (
exec.independentSamples -> 5,
// you may notice that increasing the number of benchmark runs tends
// to even out the numbers (for 100 runs):
// ```
// Parameters(bench -> direct, jvm_interpreter -> false): 8.921889749999998
// Parameters(bench -> adrt_1, jvm_interpreter -> false): 2.9439812500000007
// Parameters(bench -> adrt_2, jvm_interpreter -> false): 5.823887280000001
// Parameters(bench -> adrt_3, jvm_interpreter -> false): 2.01347449
// ```
// We traced this to the fact that scalameter drops the runs where garbage
// collection occurs:
// ```
// Some GC time recorded, accepted: 20, ignored: 33
// ```
// This gives the non-transformed code an unfair advantage over the transformed
// code and thus skews the benchmark.
exec.benchRuns -> 20,
exec.jvmflags -> ("-Xmx100m -Xms100m " /* + "-verbose:gc " */ + flags(interp))
) setUp {
_ =>
// Result correctness checks:
val r1 = (10, 3)
val r2 = gcd_direct((544,185), (131,181))
// Note: It is expected that the method appears as "not found" in the IDE:
val r3 = gcd_adrt_1((544,185), (131,181))
val r4 = gcd_adrt_2((544,185), (131,181))
val r5 = gcd_adrt_3((544,185), (131,181))
assert(r2 == r1, r2.toString)
assert(r3 == r1, r3.toString)
assert(r4 == r1, r4.toString)
assert(r5 == r1, r5.toString)
System.gc()
} in {
case (bench, _) =>
// print("starting ")
// println(bench)
bench match {
case "direct" =>
var i = 10000
while (i > 0) {
gcd_direct((544,185), (131,181))
i -= 1
}
case "adrt_1" =>
var i = 10000
while (i > 0) {
// Note: It is expected that the method appears as "not found" in the IDE:
gcd_adrt_1((544,185), (131,181))
i -= 1
}
case "adrt_2" =>
var i = 10000
while (i > 0) {
// Note: It is expected that the method appears as "not found" in the IDE:
gcd_adrt_2((544,185), (131,181))
i -= 1
}
case "adrt_3" =>
var i = 10000
while (i > 0) {
// Note: It is expected that the method appears as "not found" in the IDE:
gcd_adrt_3((544,185), (131,181))
i -= 1
}
}
// print("stopped ")
// println(bench)
}
}
def flags(interp: Boolean): String = interp match {
case true => "-Xint"
case false => ""
}
}
|
miniboxing/ildl-plugin
|
tests/benchmarks/src/ildl/benchmark/gcd/Benchmark.scala
|
Scala
|
bsd-3-clause
| 4,421
|
package models
import play.api.db._
import play.api.Play.current
case class Task(id: Long, label: String)
object Task {
import anorm._
import anorm.SqlParser._
val parser: RowParser[Task] ={
long("id") ~ str("label") map {
case id ~ label => Task(id, label)
}
}
def all(todo: Todo): List[Task] = DB.withConnection { implicit c =>
SQL("select * from task where list_id = {list_id}")
.on('list_id -> todo.id)
.as(Task.parser.*)
}
def create(label: String, todo: Todo) = DB.withConnection { implicit c =>
SQL("""insert into task (label, list_id)
values ({label}, {list_id})""")
.on('label -> label, 'list_id -> todo.id)
.executeUpdate()
}
def delete(id: Long, todo: Todo) = DB.withConnection { implicit c =>
SQL("delete from task where id = {id} and list_id = list_id")
.on('id -> id, 'list_id -> todo.id)
.executeUpdate()
}
}
|
altos-research/todo
|
app/models/Task.scala
|
Scala
|
mit
| 924
|
package com.tuvistavie.xserver.backend.model
import com.tuvistavie.xserver.protocol.request.QueryExtensionRequest
import com.tuvistavie.xserver.protocol.reply.QueryExtensionReply
class Extension (
val majorOpcode: Int,
val firstEvent: Int,
val firstError: Int
)
object Extension {
private var availableExtensions: Map[String, Extension] = Map()
def register(name: String, extension: Extension) = availableExtensions += (name -> extension)
def getExtension(name: String) = availableExtensions(name)
def isAvailable(name: String) = availableExtensions.contains(name)
def generateQueryExtensionReply(request: QueryExtensionRequest, sequenceNumber: Int) = {
val name = request.name
if(isAvailable(name)) {
val ext = getExtension(name)
QueryExtensionReply(sequenceNumber, true, ext.majorOpcode, ext.firstEvent, ext.firstError)
} else QueryExtensionReply(sequenceNumber, false, 0, 0, 0)
}
}
|
tuvistavie/scala-x-server
|
backend/src/main/scala/com/tuvistavie/xserver/model/Extensions.scala
|
Scala
|
mit
| 934
|
package me.dribba.actors
import java.util.Date
import akka.actor._
import me.dribba.components.{DigitalSensorComponent, GrowBedComponent}
import me.dribba.models.{Status, DigitalSensorStatus}
import me.dribba.providers.{FlushingTookTooLong, SensorTookTooLong, GrowBedTimeoutMessage, GrowBedTimeoutProvider}
class GrowBedActor(
supervisor: ActorRef,
pumpActor: ActorRef,
growBed: GrowBedComponent,
flushSensorFactory: (ActorRef) => DigitalSensorComponent,
timeoutProvider: GrowBedTimeoutProvider
) extends Actor with ActorLogging {
import context._
override def preStart() = {
growBed.turnWaterOff() // JIC it hung up trying to flush
flushSensor = flushSensorFactory(self)
}
var flushSensor: DigitalSensorComponent = null
def outOfService: Receive = {
case _ => sender() ! OutOfService
}
def receive = {
case m: GrowBedMessage => nonFlushingGrowBedMessages(m)
case m: DigitalSensorStatus =>
log.info("Got sensor status({}) while not flushing", m)
}
def flushing(timeout: Cancellable, lastOn: Option[Long]): Receive = {
case m: GrowBedMessage =>
flushingGrowBedMessages(m)
case m: GrowBedTimeoutMessage => m match {
case SensorTookTooLong | FlushingTookTooLong =>
// SensorTookTooLong: Water took too long to reach the sensor.
// FlushingTookTooLong: Water is not flushing form the grow bed or not flushing fast enough.
growBed.turnWaterOff()
pumpActor ! TurnPumpOff
log.info("Bed {} is going OOS because of {}", self.path.toString, m)
supervisor ! OutOfService
become(outOfService)
}
case DigitalSensorStatus(status) =>
status match {
case Status.On =>
// Level of water reached the sensor
timeout.cancel()
become(flushing(timeoutProvider.flushTimeout, Some(System.currentTimeMillis())))
case Status.Off =>
// TODO: Move this to the sensor logic
lastOn.map(System.currentTimeMillis() - _) match {
case Some(millis) if millis > 800 =>
// Bed is flushing
log.info("Sensor triggered off, last on: {} millis ago", millis)
timeout.cancel()
growBed.turnWaterOff()
pumpActor ! TurnPumpOff
log.info("Finished flushing in bed {}", self.path.toString)
become(receive)
case _ =>
log.info("Sensor triggered too fast")
}
}
}
val nonFlushingGrowBedMessages: PartialFunction[GrowBedMessage, Unit] = {
case Flush =>
log.info("Started flushing in bed {}", self.path.toString)
pumpActor ! TurnPumpOn
growBed.turnWaterOn()
become(flushing(timeoutProvider.sensorTimeout, None))
}
val flushingGrowBedMessages: PartialFunction[GrowBedMessage, Unit] = {
case Flush =>
log.warning("Got Flush message when already flushing, from: {}", sender().path.toString)
sender() ! AlreadyFlushing
}
}
object GrowBedActor {
def props(supervisor: ActorRef, pumpActor: ActorRef, growBed: GrowBedComponent,
flushSensorFactory: (ActorRef) => DigitalSensorComponent,
timeoutProvider: GrowBedTimeoutProvider) =
Props(classOf[GrowBedActor], supervisor, pumpActor, growBed, flushSensorFactory, timeoutProvider)
}
sealed trait GrowBedMessage
object Flush extends GrowBedMessage
object AlreadyFlushing extends GrowBedMessage
object OutOfService extends GrowBedMessage
|
dribba/akkaponics
|
src/main/scala/me/dribba/actors/GrowBedActor.scala
|
Scala
|
mit
| 3,489
|
/**
* Copyright 2012-2013 greencheek.org (www.greencheek.org)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.greencheek.jms.yankeedo.scenarioexecution.producer.message
import akka.camel.CamelMessage
/**
* User: dominictootell
* Date: 06/01/2013
* Time: 20:21
*/
trait CamelMessageSource {
def getMessage : CamelMessage
}
|
tootedom/yankeedo
|
yankeedo-core/src/main/scala/org/greencheek/jms/yankeedo/scenarioexecution/producer/message/CamelMessageSource.scala
|
Scala
|
apache-2.0
| 855
|
/**
* Copyright 2013, 2014, 2016 Gianluca Amato <gamato@unich.it>
*
* This file is part of JANDOM: JVM-based Analyzer for Numerical DOMains
* JANDOM is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* JANDOM is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of a
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with JANDOM. If not, see <http://www.gnu.org/licenses/>.
*/
package it.unich.jandom.parsers
import scala.util.parsing.combinator.JavaTokenParsers
import it.unich.jandom.domains.numerical.LinearForm
import spire.math.Rational
/**
* A trait for parsing linear forms. To be inherited by real parsers. An implementation
* should define a parser ''variable'' of type ''Parser[Int]'' and provide a variable ''env''
* of type ''Environment''. The result of the `variable` parser should be the id of the
* variable in the environmen ''env''. It provides a parser ''linform'' for linear forms.
* @author Gianluca Amato <gamato@unich.it>
*/
trait LinearFormParser extends JavaTokenParsers {
/**
* Parser for variables.
*/
protected val variable: Parser[Int]
/**
* Parser for multiplication operator.
*/
protected val mulExpr: Parser[Any] = "*"
/**
* Parser for terms.
*/
private val term: Parser[LinearForm] =
(opt(wholeNumber <~ mulExpr) ~ variable) ^^ {
case Some(coeff) ~ v => LinearForm.sparse(0, v -> Rational(coeff))
case None ~ v => LinearForm.v(v)
} |
wholeNumber ^^ { case coeff => Rational(coeff) }
private val term_with_operator: Parser[LinearForm] =
"+" ~> term |
"-" ~> term ^^ { lf => -lf }
/**
* Parser for integer linear expressions.
*/
protected val linform: Parser[LinearForm] =
(term_with_operator | term) ~ rep(term_with_operator) ^^ {
case lf1 ~ lfarr => (lf1 /: lfarr) { (lfa, lfb) => lfa + lfb }
}
}
|
amato-gianluca/Jandom
|
core/src/main/scala/it/unich/jandom/parsers/LinearFormParser.scala
|
Scala
|
lgpl-3.0
| 2,232
|
package com.github.gdefacci.di.sample
import com.github.gdefacci.di.IOC
object Example8 extends App {
assert(IOC.get[String](Module8, Module8A) == "Foo Bar")
}
|
gdefacci/di
|
docs/slides/src/main/scala/com/github/gdefacci/di/sample/Example8.scala
|
Scala
|
mit
| 169
|
package frameless
import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.{Column, FramelessInternals}
import shapeless.ops.record.Selector
import shapeless._
import scala.annotation.implicitNotFound
sealed trait UntypedExpression[T] {
def expr: Expression
}
/** Documentation marked "apache/spark" is thanks to apache/spark Contributors
* at https://github.com/apache/spark, licensed under Apache v2.0 available at
* http://www.apache.org/licenses/LICENSE-2.0
*/
sealed class TypedColumn[T, U](
val expr: Expression)(
implicit
val uencoder: TypedEncoder[U]
) extends UntypedExpression[T] { self =>
/** From an untyped Column to a [[TypedColumn]]
*
* @param column a spark.sql Column
* @param uencoder encoder of the resulting type U
*/
def this(column: Column)(implicit uencoder: TypedEncoder[U]) {
this(FramelessInternals.expr(column))
}
/** Fall back to an untyped Column
*/
def untyped: Column = new Column(expr)
/** Equality test.
* {{{
* df.filter( df.col('a) === 1 )
* }}}
*
* apache/spark
*/
def ===(other: U): TypedColumn[T, Boolean] = {
new TypedColumn[T, Boolean](untyped === other)
}
/** Equality test.
* {{{
* df.filter( df.col('a) === df.col('b) )
* }}}
*
* apache/spark
*/
def ===(other: TypedColumn[T, U]): TypedColumn[T, Boolean] = {
new TypedColumn[T, Boolean](untyped === other.untyped)
}
/** Sum of this expression and another expression.
* {{{
* // The following selects the sum of a person's height and weight.
* people.select( people.col('height) plus people.col('weight) )
* }}}
*
* apache/spark
*/
def plus(u: TypedColumn[T, U])(implicit n: CatalystNumeric[U]): TypedColumn[T, U] =
new TypedColumn[T, U](self.untyped.plus(u.untyped))
/** Sum of this expression and another expression.
* {{{
* // The following selects the sum of a person's height and weight.
* people.select( people.col('height) + people.col('weight) )
* }}}
*
* apache/spark
*/
def +(u: TypedColumn[T, U])(implicit n: CatalystNumeric[U]): TypedColumn[T, U] = plus(u)
/** Sum of this expression (column) with a constant.
* {{{
* // The following selects the sum of a person's height and weight.
* people.select( people('height) + 2 )
* }}}
*
* @param u a constant of the same type
* apache/spark
*/
def +(u: U)(implicit n: CatalystNumeric[U]): TypedColumn[T, U] = new TypedColumn[T, U](self.untyped.plus(u))
/** Unary minus, i.e. negate the expression.
* {{{
* // Select the amount column and negates all values.
* df.select( -df('amount) )
* }}}
*
* apache/spark
*/
def unary_-(implicit n: CatalystNumeric[U]): TypedColumn[T, U] = new TypedColumn[T, U](-self.untyped)
/** Subtraction. Subtract the other expression from this expression.
* {{{
* // The following selects the difference between people's height and their weight.
* people.select( people.col('height) minus people.col('weight) )
* }}}
*
* apache/spark
*/
def minus(u: TypedColumn[T, U])(implicit n: CatalystNumeric[U]): TypedColumn[T, U] =
new TypedColumn[T, U](self.untyped.minus(u.untyped))
/** Subtraction. Subtract the other expression from this expression.
* {{{
* // The following selects the difference between people's height and their weight.
* people.select( people.col('height) - people.col('weight) )
* }}}
*
* apache/spark
*/
def -(u: TypedColumn[T, U])(implicit n: CatalystNumeric[U]): TypedColumn[T, U] = minus(u)
/** Subtraction. Subtract the other expression from this expression.
* {{{
* // The following selects the difference between people's height and their weight.
* people.select( people('height) - 1 )
* }}}
*
* @param u a constant of the same type
* apache/spark
*/
def -(u: U)(implicit n: CatalystNumeric[U]): TypedColumn[T, U] = new TypedColumn[T, U](self.untyped.minus(u))
/** Multiplication of this expression and another expression.
* {{{
* // The following multiplies a person's height by their weight.
* people.select( people.col('height) multiply people.col('weight) )
* }}}
*
* apache/spark
*/
def multiply(u: TypedColumn[T, U])(implicit n: CatalystNumeric[U]): TypedColumn[T, U] =
new TypedColumn[T, U](self.untyped.multiply(u.untyped))
/** Multiplication of this expression and another expression.
* {{{
* // The following multiplies a person's height by their weight.
* people.select( people.col('height) * people.col('weight) )
* }}}
*
* apache/spark
*/
def *(u: TypedColumn[T, U])(implicit n: CatalystNumeric[U]): TypedColumn[T, U] = multiply(u)
/** Multiplication of this expression a constant.
* {{{
* // The following multiplies a person's height by their weight.
* people.select( people.col('height) * people.col('weight) )
* }}}
*
* apache/spark
*/
def *(u: U)(implicit n: CatalystNumeric[U]): TypedColumn[T, U] = new TypedColumn[T, U](self.untyped.multiply(u))
/**
* Division this expression by another expression.
* {{{
* // The following divides a person's height by their weight.
* people.select( people('height) / people('weight) )
* }}}
*
* @param u another column of the same type
* apache/spark
*/
def divide(u: TypedColumn[T, U])(implicit n: CatalystNumeric[U]): TypedColumn[T, Double] = new TypedColumn[T, Double](self.untyped.divide(u.untyped))
/**
* Division this expression by another expression.
* {{{
* // The following divides a person's height by their weight.
* people.select( people('height) / people('weight) )
* }}}
*
* @param u another column of the same type
* apache/spark
*/
def /(u: TypedColumn[T, U])(implicit n: CatalystNumeric[U]): TypedColumn[T, Double] = divide(u)
/**
* Division this expression by another expression.
* {{{
* // The following divides a person's height by their weight.
* people.select( people('height) / 2 )
* }}}
*
* @param u a constant of the same type
* apache/spark
*/
def /(u: U)(implicit n: CatalystNumeric[U]): TypedColumn[T, Double] = new TypedColumn[T, Double](self.untyped.divide(u))
/** Casts the column to a different type.
* {{{
* df.select(df('a).cast[Int])
* }}}
*/
def cast[A: TypedEncoder](implicit c: CatalystCast[U, A]): TypedColumn[T, A] =
new TypedColumn(self.untyped.cast(TypedEncoder[A].targetDataType))
}
sealed trait TypedAggregate[T, A] extends UntypedExpression[T] {
def expr: Expression
def aencoder: TypedEncoder[A]
}
sealed class TypedAggregateAndColumn[T, A, U](expr: Expression)(
implicit
val aencoder: TypedEncoder[A],
uencoder: TypedEncoder[U]
) extends TypedColumn[T, U](expr) with TypedAggregate[T, A] {
def this(column: Column)(implicit aencoder: TypedEncoder[A], uencoder: TypedEncoder[U]) {
this(FramelessInternals.expr(column))
}
}
object TypedColumn {
/**
* Evidence that type `T` has column `K` with type `V`.
*/
@implicitNotFound(msg = "No column ${K} of type ${V} in ${T}")
trait Exists[T, K, V]
@implicitNotFound(msg = "No columns ${K} of type ${V} in ${T}")
trait ExistsMany[T, K <: HList, V]
object ExistsMany {
implicit def deriveCons[T, KH, KT <: HList, V0, V1](
implicit
head: Exists[T, KH, V0],
tail: ExistsMany[V0, KT, V1]
): ExistsMany[T, KH :: KT, V1] = new ExistsMany[T, KH :: KT, V1] {}
implicit def deriveHNil[T, K, V](
implicit
head: Exists[T, K, V]
): ExistsMany[T, K :: HNil, V] = new ExistsMany[T, K :: HNil, V] {}
}
object Exists {
def apply[T, V](column: Witness)(
implicit
exists: Exists[T, column.T, V]
): Exists[T, column.T, V] = exists
implicit def deriveRecord[T, H <: HList, K, V](
implicit
lgen: LabelledGeneric.Aux[T, H],
selector: Selector.Aux[H, K, V]
): Exists[T, K, V] = new Exists[T, K, V] {}
}
}
|
bamine/frameless
|
dataset/src/main/scala/frameless/TypedColumn.scala
|
Scala
|
apache-2.0
| 8,172
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.columnar
import java.sql.Timestamp
import org.apache.spark.sql.Row
import org.apache.spark.sql.catalyst.expressions.{AttributeMap, Attribute, AttributeReference}
import org.apache.spark.sql.types._
private[sql] class ColumnStatisticsSchema(a: Attribute) extends Serializable {
val upperBound = AttributeReference(a.name + ".upperBound", a.dataType, nullable = true)()
val lowerBound = AttributeReference(a.name + ".lowerBound", a.dataType, nullable = true)()
val nullCount = AttributeReference(a.name + ".nullCount", IntegerType, nullable = false)()
val count = AttributeReference(a.name + ".count", IntegerType, nullable = false)()
val sizeInBytes = AttributeReference(a.name + ".sizeInBytes", LongType, nullable = false)()
val schema = Seq(lowerBound, upperBound, nullCount, count, sizeInBytes)
}
private[sql] class PartitionStatistics(tableSchema: Seq[Attribute]) extends Serializable {
val (forAttribute, schema) = {
val allStats = tableSchema.map(a => a -> new ColumnStatisticsSchema(a))
(AttributeMap(allStats), allStats.map(_._2.schema).foldLeft(Seq.empty[Attribute])(_ ++ _))
}
}
/**
* Used to collect statistical information when building in-memory columns.
*
* NOTE: we intentionally avoid using `Ordering[T]` to compare values here because `Ordering[T]`
* brings significant performance penalty.
*/
private[sql] sealed trait ColumnStats extends Serializable {
protected var count = 0
protected var nullCount = 0
protected var sizeInBytes = 0L
/**
* Gathers statistics information from `row(ordinal)`.
*/
def gatherStats(row: Row, ordinal: Int): Unit = {
if (row.isNullAt(ordinal)) {
nullCount += 1
// 4 bytes for null position
sizeInBytes += 4
}
count += 1
}
/**
* Column statistics represented as a single row, currently including closed lower bound, closed
* upper bound and null count.
*/
def collectedStatistics: Row
}
/**
* A no-op ColumnStats only used for testing purposes.
*/
private[sql] class NoopColumnStats extends ColumnStats {
override def gatherStats(row: Row, ordinal: Int): Unit = super.gatherStats(row, ordinal)
override def collectedStatistics: Row = Row(null, null, nullCount, count, 0L)
}
private[sql] class BooleanColumnStats extends ColumnStats {
protected var upper = false
protected var lower = true
override def gatherStats(row: Row, ordinal: Int): Unit = {
super.gatherStats(row, ordinal)
if (!row.isNullAt(ordinal)) {
val value = row.getBoolean(ordinal)
if (value > upper) upper = value
if (value < lower) lower = value
sizeInBytes += BOOLEAN.defaultSize
}
}
override def collectedStatistics: Row = Row(lower, upper, nullCount, count, sizeInBytes)
}
private[sql] class ByteColumnStats extends ColumnStats {
protected var upper = Byte.MinValue
protected var lower = Byte.MaxValue
override def gatherStats(row: Row, ordinal: Int): Unit = {
super.gatherStats(row, ordinal)
if (!row.isNullAt(ordinal)) {
val value = row.getByte(ordinal)
if (value > upper) upper = value
if (value < lower) lower = value
sizeInBytes += BYTE.defaultSize
}
}
override def collectedStatistics: Row = Row(lower, upper, nullCount, count, sizeInBytes)
}
private[sql] class ShortColumnStats extends ColumnStats {
protected var upper = Short.MinValue
protected var lower = Short.MaxValue
override def gatherStats(row: Row, ordinal: Int): Unit = {
super.gatherStats(row, ordinal)
if (!row.isNullAt(ordinal)) {
val value = row.getShort(ordinal)
if (value > upper) upper = value
if (value < lower) lower = value
sizeInBytes += SHORT.defaultSize
}
}
override def collectedStatistics: Row = Row(lower, upper, nullCount, count, sizeInBytes)
}
private[sql] class LongColumnStats extends ColumnStats {
protected var upper = Long.MinValue
protected var lower = Long.MaxValue
override def gatherStats(row: Row, ordinal: Int): Unit = {
super.gatherStats(row, ordinal)
if (!row.isNullAt(ordinal)) {
val value = row.getLong(ordinal)
if (value > upper) upper = value
if (value < lower) lower = value
sizeInBytes += LONG.defaultSize
}
}
override def collectedStatistics: Row = Row(lower, upper, nullCount, count, sizeInBytes)
}
private[sql] class DoubleColumnStats extends ColumnStats {
protected var upper = Double.MinValue
protected var lower = Double.MaxValue
override def gatherStats(row: Row, ordinal: Int): Unit = {
super.gatherStats(row, ordinal)
if (!row.isNullAt(ordinal)) {
val value = row.getDouble(ordinal)
if (value > upper) upper = value
if (value < lower) lower = value
sizeInBytes += DOUBLE.defaultSize
}
}
override def collectedStatistics: Row = Row(lower, upper, nullCount, count, sizeInBytes)
}
private[sql] class FloatColumnStats extends ColumnStats {
protected var upper = Float.MinValue
protected var lower = Float.MaxValue
override def gatherStats(row: Row, ordinal: Int): Unit = {
super.gatherStats(row, ordinal)
if (!row.isNullAt(ordinal)) {
val value = row.getFloat(ordinal)
if (value > upper) upper = value
if (value < lower) lower = value
sizeInBytes += FLOAT.defaultSize
}
}
override def collectedStatistics: Row = Row(lower, upper, nullCount, count, sizeInBytes)
}
private[sql] class FixedDecimalColumnStats extends ColumnStats {
protected var upper: Decimal = null
protected var lower: Decimal = null
override def gatherStats(row: Row, ordinal: Int): Unit = {
super.gatherStats(row, ordinal)
if (!row.isNullAt(ordinal)) {
val value = row(ordinal).asInstanceOf[Decimal]
if (upper == null || value.compareTo(upper) > 0) upper = value
if (lower == null || value.compareTo(lower) < 0) lower = value
sizeInBytes += FIXED_DECIMAL.defaultSize
}
}
override def collectedStatistics: Row = Row(lower, upper, nullCount, count, sizeInBytes)
}
private[sql] class IntColumnStats extends ColumnStats {
protected var upper = Int.MinValue
protected var lower = Int.MaxValue
override def gatherStats(row: Row, ordinal: Int): Unit = {
super.gatherStats(row, ordinal)
if (!row.isNullAt(ordinal)) {
val value = row.getInt(ordinal)
if (value > upper) upper = value
if (value < lower) lower = value
sizeInBytes += INT.defaultSize
}
}
override def collectedStatistics: Row = Row(lower, upper, nullCount, count, sizeInBytes)
}
private[sql] class StringColumnStats extends ColumnStats {
protected var upper: UTF8String = null
protected var lower: UTF8String = null
override def gatherStats(row: Row, ordinal: Int): Unit = {
super.gatherStats(row, ordinal)
if (!row.isNullAt(ordinal)) {
val value = row(ordinal).asInstanceOf[UTF8String]
if (upper == null || value.compareTo(upper) > 0) upper = value
if (lower == null || value.compareTo(lower) < 0) lower = value
sizeInBytes += STRING.actualSize(row, ordinal)
}
}
override def collectedStatistics: Row = Row(lower, upper, nullCount, count, sizeInBytes)
}
private[sql] class DateColumnStats extends IntColumnStats
private[sql] class TimestampColumnStats extends ColumnStats {
protected var upper: Timestamp = null
protected var lower: Timestamp = null
override def gatherStats(row: Row, ordinal: Int): Unit = {
super.gatherStats(row, ordinal)
if (!row.isNullAt(ordinal)) {
val value = row(ordinal).asInstanceOf[Timestamp]
if (upper == null || value.compareTo(upper) > 0) upper = value
if (lower == null || value.compareTo(lower) < 0) lower = value
sizeInBytes += TIMESTAMP.defaultSize
}
}
override def collectedStatistics: Row = Row(lower, upper, nullCount, count, sizeInBytes)
}
private[sql] class BinaryColumnStats extends ColumnStats {
override def gatherStats(row: Row, ordinal: Int): Unit = {
super.gatherStats(row, ordinal)
if (!row.isNullAt(ordinal)) {
sizeInBytes += BINARY.actualSize(row, ordinal)
}
}
override def collectedStatistics: Row = Row(null, null, nullCount, count, sizeInBytes)
}
private[sql] class GenericColumnStats extends ColumnStats {
override def gatherStats(row: Row, ordinal: Int): Unit = {
super.gatherStats(row, ordinal)
if (!row.isNullAt(ordinal)) {
sizeInBytes += GENERIC.actualSize(row, ordinal)
}
}
override def collectedStatistics: Row = Row(null, null, nullCount, count, sizeInBytes)
}
|
andrewor14/iolap
|
sql/core/src/main/scala/org/apache/spark/sql/columnar/ColumnStats.scala
|
Scala
|
apache-2.0
| 9,349
|
package com.coiney.akka.rabbit.actors
import akka.actor.{Props, Actor}
import akka.testkit.{TestActorRef, TestKit}
import com.coiney.akka.rabbit.RabbitSystem
import com.typesafe.config.ConfigFactory
import org.scalatest.BeforeAndAfterAll
trait RabbitSpec {
this: TestKit with BeforeAndAfterAll =>
override def afterAll(): Unit = {
system.shutdown()
}
object EchoProbe {
def apply(): EchoProbe = new EchoProbe
def props(): Props = Props(EchoProbe())
}
class EchoProbe extends Actor {
def receive: Actor.Receive = {
case msg => sender ! msg
}
}
val config = ConfigFactory.load()
val settings = new RabbitSystem.Settings(RabbitSystem.findClassLoader(), config)
}
|
Coiney/akka-rabbit
|
akka-rabbit-core/src/test/scala/com/coiney/akka/rabbit/actors/RabbitSpec.scala
|
Scala
|
bsd-3-clause
| 712
|
/*
Copyright 2014 Twitter, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package com.twitter.scalding.serialization.macros.impl
import scala.language.experimental.macros
import scala.reflect.macros.Context
import scala.util.Random
import com.twitter.scalding.serialization.OrderedSerialization
import com.twitter.scalding.serialization.macros.impl.ordered_serialization._
import com.twitter.scalding.serialization.macros.impl.ordered_serialization.providers._
object OrderedSerializationProviderImpl {
def normalizedDispatcher(c: Context)(buildDispatcher: => PartialFunction[c.Type, TreeOrderedBuf[c.type]]): PartialFunction[c.Type, TreeOrderedBuf[c.type]] = {
case tpe if (!tpe.toString.contains(ImplicitOrderedBuf.macroMarker) && !(tpe.normalize == tpe)) =>
buildDispatcher(tpe.normalize)
}
def scaldingBasicDispatchers(c: Context)(buildDispatcher: => PartialFunction[c.Type, TreeOrderedBuf[c.type]]): PartialFunction[c.Type, TreeOrderedBuf[c.type]] = {
val primitiveDispatcher = PrimitiveOrderedBuf.dispatch(c)
val optionDispatcher = OptionOrderedBuf.dispatch(c)(buildDispatcher)
val eitherDispatcher = EitherOrderedBuf.dispatch(c)(buildDispatcher)
val caseClassDispatcher = CaseClassOrderedBuf.dispatch(c)(buildDispatcher)
val productDispatcher = ProductOrderedBuf.dispatch(c)(buildDispatcher)
val stringDispatcher = StringOrderedBuf.dispatch(c)
val traversablesDispatcher = TraversablesOrderedBuf.dispatch(c)(buildDispatcher)
val unitDispatcher = UnitOrderedBuf.dispatch(c)
val byteBufferDispatcher = ByteBufferOrderedBuf.dispatch(c)
OrderedSerializationProviderImpl.normalizedDispatcher(c)(buildDispatcher)
.orElse(primitiveDispatcher)
.orElse(unitDispatcher)
.orElse(optionDispatcher)
.orElse(eitherDispatcher)
.orElse(stringDispatcher)
.orElse(byteBufferDispatcher)
.orElse(traversablesDispatcher)
.orElse(caseClassDispatcher)
.orElse(productDispatcher)
}
def fallbackImplicitDispatcher(c: Context): PartialFunction[c.Type, TreeOrderedBuf[c.type]] =
ImplicitOrderedBuf.dispatch(c)
private def dispatcher(c: Context): PartialFunction[c.Type, TreeOrderedBuf[c.type]] = {
import c.universe._
def buildDispatcher: PartialFunction[c.Type, TreeOrderedBuf[c.type]] = OrderedSerializationProviderImpl.dispatcher(c)
scaldingBasicDispatchers(c)(buildDispatcher).orElse(fallbackImplicitDispatcher(c)).orElse {
case tpe: Type => c.abort(c.enclosingPosition, s"""Unable to find OrderedSerialization for type ${tpe}""")
}
}
def apply[T](c: Context)(implicit T: c.WeakTypeTag[T]): c.Expr[OrderedSerialization[T]] = {
import c.universe._
val b: TreeOrderedBuf[c.type] = dispatcher(c)(T.tpe)
val res = TreeOrderedBuf.toOrderedSerialization[T](c)(b)
//println(res)
res
}
}
|
aposwolsky/scalding
|
scalding-serialization-macros/src/main/scala/com/twitter/scalding/serialization/macros/impl/OrderedBufferableProviderImpl.scala
|
Scala
|
apache-2.0
| 3,336
|
package io.github.mandar2812.dynaml.models.neuralnets
import breeze.numerics.sigmoid
/**
* @author mandar2812
*
* Object implementing the various transfer functions.
*/
object TransferFunctions {
/**
* Hyperbolic tangent function
* */
val tansig = math.tanh _
/**
* Sigmoid/Logistic function
*
* */
val logsig = (x:Double) => sigmoid(x)
/**
* Identity Function
* */
val lin = identity _
/**
* Function which returns
* the appropriate activation
* for a given string
* */
val getActivation = (func: String) =>
func match {
case "sigmoid" => logsig
case "logsig" => logsig
case "tansig" => tansig
case "linear" => lin
}
}
|
Koldh/DynaML
|
src/main/scala/io/github/mandar2812/dynaml/models/neuralnets/TransferFunctions.scala
|
Scala
|
apache-2.0
| 712
|
package org.scalaide.util.internal.eclipse
import scala.reflect.io.AbstractFile
import scala.tools.refactoring.common.TextChange
import org.eclipse.core.resources.IFile
import org.eclipse.jface.text.IDocument
import org.eclipse.jface.text.IRegion
import org.eclipse.jface.text.ITextSelection
import org.eclipse.jface.text.TextSelection
import org.eclipse.ltk.core.refactoring.TextFileChange
import org.eclipse.text.edits.MultiTextEdit
import org.eclipse.text.edits.RangeMarker
import org.eclipse.text.edits.ReplaceEdit
import org.eclipse.text.edits.TextEdit
import org.eclipse.text.edits.UndoEdit
import org.eclipse.ui.texteditor.ITextEditor
import org.scalaide.util.eclipse.FileUtils
import org.scalaide.util.eclipse.RegionUtils
object TextEditUtils {
def applyRefactoringChangeToEditor(change: TextChange, editor: ITextEditor): UndoEdit = {
val edit = new ReplaceEdit(change.from, change.to - change.from, change.text)
val document = editor.getDocumentProvider.getDocument(editor.getEditorInput)
edit.apply(document)
}
/** Creates a `TextFileChange` which always contains a `MultiTextEdit`. */
def createTextFileChange(file: IFile, fileChanges: List[TextChange], leaveDirty: Boolean): TextFileChange = {
new TextFileChange(file.getName(), file) {
val fileChangeRootEdit = new MultiTextEdit
fileChanges map { change =>
new ReplaceEdit(change.from, change.to - change.from, change.text)
} foreach fileChangeRootEdit.addChild
if (leaveDirty) setSaveMode(TextFileChange.LEAVE_DIRTY)
else setSaveMode(TextFileChange.KEEP_SAVE_STATE)
setEdit(fileChangeRootEdit)
}
}
/**
* Applies a list of refactoring changes to a document and its underlying file.
* In contrast to `applyChangesToFileWhileKeepingSelection` this method is UI
* independent and therefore does not restore the correct selection in the editor.
* Instead it returns the new selection which then can be handled afterwards.
*
* `None` is returned if an error occurs while writing to the underlying file.
*
* @param document The document the changes are applied to.
* @param textSelection The currently selected area of the document.
* @param file The file that we're currently editing (the document alone isn't enough because we need to get an IFile).
* @param changes The changes that should be applied.
* @param leaveDirty Whether files should be left dirty after changes
*/
def applyChangesToFile(
document: IDocument,
textSelection: ITextSelection,
file: AbstractFile,
changes: List[TextChange],
leaveDirty: Boolean = false): Option[ITextSelection] = {
FileUtils.toIFile(file) map { f =>
createTextFileChange(f, changes, leaveDirty).getEdit match {
// we know that it is a MultiTextEdit because we created it above
case edit: MultiTextEdit =>
applyMultiTextEdit(document, textSelection, edit)
}
}
}
/**
* Applies a list of refactoring changes to a document. The current selection
* (or just the caret position) is tracked and restored after applying the changes.
*
* In contrast to `applyChangesToFile` this method is UI dependent.
*
* @param document The document the changes are applied to.
* @param textSelection The currently selected area of the document.
* @param file The file that we're currently editing (the document alone isn't enough because we need to get an IFile).
* @param changes The changes that should be applied.
* @param saveAfter Whether files should be saved after changes
*/
def applyChangesToFileWhileKeepingSelection(
document: IDocument,
textSelection: ITextSelection,
file: AbstractFile,
changes: List[TextChange],
saveAfter: Boolean = true): Unit = {
applyChangesToFile(document, textSelection, file, changes, saveAfter) foreach { selection =>
org.scalaide.util.eclipse.EditorUtils.doWithCurrentEditor { _.selectAndReveal(selection.getOffset(), selection.getLength()) }
}
}
/**
* Non UI logic that applies a `edit` to the underlying `document`.
* `textSelection` is the selection that should be preserved by this method.
*
* Returns a new text selection that describes the selection after the edit is
* applied.
*/
def applyMultiTextEdit(document: IDocument, textSelection: ITextSelection, edit: MultiTextEdit): ITextSelection = {
import RegionUtils._
val selStart = textSelection.start
val selLen = textSelection.length
val selEnd = textSelection.end
/**
* Checks if the selection overlaps with `region`.
*/
def selectionOverlapsRegion(region: IRegion): Boolean = {
val rStart = region.start
val rEnd = region.end
!(selStart < rStart && selEnd < rStart || selStart > rEnd && selEnd > rEnd)
}
/**
* Handles the case that the selection does not overlap with one of the
* regions.
*/
def handleNonOverlap = {
val currentPosition = new RangeMarker(selStart, selLen)
edit.addChild(currentPosition)
edit.apply(document)
new TextSelection(document, currentPosition.start, currentPosition.length)
}
/**
* Handles the case that the selection overlaps with some of the regions. We
* have to preserve the selection manually and can't rely on the behavior of
* `MultiTextEdit`.
*/
def handleOverlap(overlappingEdit: TextEdit) = {
val (newOffset, newLen) = {
val rStart = overlappingEdit.start
val rLen = overlappingEdit.length
val rEnd = overlappingEdit.end
def offsetInIntersection = rLen-(selStart-rStart)
/**
* In an overlapping region we either have to expand or shrink the
* selection. Furthermore, the selection needs only to be adjusted for
* changes that happen before its position whereas the changes
* afterwards don't affect its position. In case the selection
* intersects with a changed region there is only a subset of the
* whole region needed for which the selection needs to be moved
* forwards or backwards. This subset is described by
* `overlapToPreserve`.
*/
def adjustOffset(overlapToPreserve: Int) = {
val lenAfterSelection = edit.getChildren().collect {
case e if e.start > selStart =>
e match {
case e: ReplaceEdit => e.length-e.getText().length
case e => e.length
}
}.sum
val originalLength = document.length
edit.apply(document)
val modifiedLength = document.length-originalLength
selStart+modifiedLength+lenAfterSelection+overlapToPreserve
}
// ^ = selStart/selEnd, [ = rStart, ] = rEnd
// Don't need to be handled here:
// - case 1: ^ ^ [ ], ^ [ ]
// - case 6: [ ] ^ ^, [ ] ^
// case 2: ^ [ ^ ]
if (selStart < rStart && selEnd < rEnd)
(adjustOffset(0), selLen-(selEnd-rStart))
// case 3: ^ [ ] ^
else if (selStart < rStart && selEnd >= rEnd) {
val sub = overlappingEdit match {
case e: ReplaceEdit => e.length-e.getText().length
case e => e.length
}
(adjustOffset(0), selLen-sub)
}
// case 4: [^ ^], [ ^ ]
else if (selStart < rEnd && selEnd < rEnd)
(adjustOffset(offsetInIntersection), 0)
// case 5: [ ^ ] ^
else
(adjustOffset(offsetInIntersection), selLen-(rEnd-selStart))
}
new TextSelection(document, newOffset, newLen)
}
val overlappingEdit = edit.getChildren().find(e => selectionOverlapsRegion(e.getRegion()))
overlappingEdit map handleOverlap getOrElse handleNonOverlap
}
}
|
Kwestor/scala-ide
|
org.scala-ide.sdt.core/src/org/scalaide/util/internal/eclipse/TextEditUtils.scala
|
Scala
|
bsd-3-clause
| 7,847
|
package com.asto.dmp.shu.util
import com.asto.dmp.shu.base.Constants
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.{FileSystem, Path}
import org.apache.spark.Logging
import org.apache.spark.rdd.RDD
/**
* 文件相关的工具类
*/
object FileUtils extends Logging {
private val conf = new Configuration()
conf.set("fs.defaultFS", Constants.Hadoop.DEFAULT_FS)
conf.set("mapreduce.jobtracker.address", Constants.Hadoop.JOBTRACKER_ADDRESS)
def deleteFilesInHDFS(paths: String*) = {
paths.foreach { path =>
val filePath = new Path(path)
val HDFSFilesSystem = filePath.getFileSystem(new Configuration())
if (HDFSFilesSystem.exists(filePath)) {
logInfo(Utils.logWrapper(s"删除目录:$filePath"))
HDFSFilesSystem.delete(filePath, true)
}
}
}
def saveAsTextFile[T <: Product](rdd: RDD[T], savePath: String) = {
deleteFilesInHDFS(savePath)
logInfo(Utils.logWrapper(s"往${savePath}中写入信息"))
rdd.map(_.productIterator.mkString(Constants.OutputPath.SEPARATOR)).coalesce(1).saveAsTextFile(savePath)
}
def saveAsTextFile(text: String, savePath: String) = {
deleteFilesInHDFS(savePath)
logInfo(Utils.logWrapper(s"往${savePath}中写入信息"))
val out = FileSystem.get(conf).create(new Path(savePath))
out.write(text.getBytes)
out.flush()
out.close()
}
}
|
zj-lingxin/Dmp_shu
|
src/main/scala/com/asto/dmp/shu/util/FileUtils.scala
|
Scala
|
mit
| 1,392
|
/*
* Copyright (c) 2014 Contributor. All rights reserved.
*/
package org.scalaide.debug.internal.expression.proxies
import org.scalaide.debug.internal.expression.context.JdiContext
import com.sun.jdi.StringReference
/**
* JdiProxy implementation for `java.lang.String`.
*/
case class StringJdiProxy(override val __context: JdiContext, override val __value: StringReference)
extends ObjectJdiProxy(__context, __value) {
override protected def callSpecialMethod(name: String, args: Seq[Any]): Option[JdiProxy] = (name, args) match {
case ("+", Seq(proxy: JdiProxy)) =>
Some(__context.invokeMethod(this, None, "+", Seq(proxy)))
case _ => None
}
def stringValue: String = __value.toString().drop(1).dropRight(1) // drop " at the beginning and end
}
object StringJdiProxy extends JdiProxyCompanion[StringJdiProxy, StringReference]
|
andrey-ilinykh/scala-ide
|
org.scala-ide.sdt.debug.expression/src/org/scalaide/debug/internal/expression/proxies/StringJdiProxy.scala
|
Scala
|
bsd-3-clause
| 860
|
/*
* Sonar Scoverage Plugin
* Copyright (C) 2013 Rado Buransky
* dev@sonar.codehaus.org
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 3 of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02
*/
package com.buransky.plugins.scoverage.sensor
import java.io.File
import java.util
import com.buransky.plugins.scoverage.language.Scala
import com.buransky.plugins.scoverage.{FileStatementCoverage, DirectoryStatementCoverage, ProjectStatementCoverage, ScoverageReportParser}
import org.junit.runner.RunWith
import org.mockito.Mockito._
import org.scalatest.junit.JUnitRunner
import org.scalatest.mock.MockitoSugar
import org.scalatest.{FlatSpec, Matchers}
import org.sonar.api.batch.fs.{FilePredicate, FilePredicates, FileSystem}
import org.sonar.api.config.Settings
import org.sonar.api.resources.Project
import org.sonar.api.resources.Project.AnalysisType
import org.sonar.api.scan.filesystem.PathResolver
import scala.collection.JavaConversions._
@RunWith(classOf[JUnitRunner])
class ScoverageSensorSpec extends FlatSpec with Matchers with MockitoSugar {
behavior of "shouldExecuteOnProject"
it should "succeed for Scala project" in new ShouldExecuteOnProject {
checkShouldExecuteOnProject(List("scala"), true)
}
it should "succeed for mixed projects" in new ShouldExecuteOnProject {
checkShouldExecuteOnProject(List("scala", "java"), true)
}
it should "fail for Java project" in new ShouldExecuteOnProject {
checkShouldExecuteOnProject(List("java"), false)
}
class ShouldExecuteOnProject extends ScoverageSensorScope {
protected def checkShouldExecuteOnProject(languages: Iterable[String], expectedResult: Boolean) {
// Setup
val project = mock[Project]
when(fileSystem.languages()).thenReturn(new util.TreeSet(languages))
// Execute & asser
shouldExecuteOnProject(project) should equal(expectedResult)
verify(fileSystem, times(1)).languages
}
}
behavior of "analyse for single project"
it should "set 0% coverage for a project without children" in new AnalyseScoverageSensorScope {
// Setup
val pathToScoverageReport = "#path-to-scoverage-report#"
val reportAbsolutePath = "#report-absolute-path#"
val projectStatementCoverage =
ProjectStatementCoverage("project-name", List(
DirectoryStatementCoverage(File.separator, List(
DirectoryStatementCoverage("home", List(
FileStatementCoverage("a.scala", 3, 2, Nil)
))
)),
DirectoryStatementCoverage("x", List(
FileStatementCoverage("b.scala", 1, 0, Nil)
))
))
val reportFile = mock[java.io.File]
val moduleBaseDir = mock[java.io.File]
val filePredicates = mock[FilePredicates]
when(reportFile.exists).thenReturn(true)
when(reportFile.isFile).thenReturn(true)
when(reportFile.getAbsolutePath).thenReturn(reportAbsolutePath)
when(settings.getString(SCOVERAGE_REPORT_PATH_PROPERTY)).thenReturn(pathToScoverageReport)
when(fileSystem.baseDir).thenReturn(moduleBaseDir)
when(fileSystem.predicates).thenReturn(filePredicates)
when(fileSystem.inputFiles(org.mockito.Matchers.any[FilePredicate]())).thenReturn(Nil)
when(pathResolver.relativeFile(moduleBaseDir, pathToScoverageReport)).thenReturn(reportFile)
when(scoverageReportParser.parse(reportAbsolutePath)).thenReturn(projectStatementCoverage)
// Execute
analyse(project, context)
verify(filePredicates).hasAbsolutePath("/home/a.scala")
verify(filePredicates).matchesPathPattern("**/x/b.scala")
}
class AnalyseScoverageSensorScope extends ScoverageSensorScope {
val project = mock[Project]
val context = new TestSensorContext
override protected lazy val scoverageReportParser = mock[ScoverageReportParser]
}
class ScoverageSensorScope extends {
val scala = new Scala
val settings = mock[Settings]
val pathResolver = mock[PathResolver]
val fileSystem = mock[FileSystem]
} with ScoverageSensor(settings, pathResolver, fileSystem, scala)
}
|
zenderol/sonar-scoverage-plugin
|
plugin/src/test/scala/com/buransky/plugins/scoverage/sensor/ScoverageSensorSpec.scala
|
Scala
|
lgpl-3.0
| 4,642
|
package cpup.mc.computers.content.network.impl.component
import java.util.UUID
import scala.reflect.ClassTag
import scala.reflect.runtime.{universe => ru}
import cpup.lib.reflect.ReflectUtil
import cpup.mc.computers.content.network.impl.Node
import cpup.mc.lib.inspecting.Registry.IDed
import net.minecraft.nbt.NBTTagCompound
trait Component extends IDed with Serializable {
def ownerNode: Node
def nodes: List[Node] = List()
def methods: Map[String, Method]
def writeToNBT(nbt: NBTTagCompound) {
nbt.setString("uuid", uuid.toString)
}
def readFromNBT(nbt: NBTTagCompound) {
changeUUID(UUID.fromString(nbt.getString("uuid")))
}
}
object Component {
def fromAnnotations[C](_node: Node, obj: C)(implicit tt: ru.TypeTag[C]) = {
val rm = ru.runtimeMirror(getClass.getClassLoader)
implicit val classTag = ClassTag[C](rm.runtimeClass(tt.tpe))
val mirror = rm.reflect(obj)
val tree = tt.tpe.typeSymbol.annotations.map(_.tree).find(_.tpe =:= ru.typeOf[ComponentAnnotation]).get
val data = ReflectUtil.annotation(tree)._2
val _nodes = tt.tpe.members.map(_.asTerm).filter { decl =>
ReflectUtil.findAnnotation[ComponentAnnotation.InternalNode](decl) match {
case Some(a) if decl.typeSignature.paramLists.isEmpty && decl.typeSignature.finalResultType <:< ru.typeOf[Node] => true
case _ => false
}
}.map { decl =>
if(decl.isMethod)
mirror.reflectMethod(decl.asMethod).apply()
else if(decl.isVar || decl.isVal)
mirror.reflectField(decl).get
else
throw new RuntimeException("bad")
}.toList.asInstanceOf[List[Node]]
for {
_decl <- tt.tpe.members if _decl.isTerm
decl = _decl.asTerm if decl.isVar && decl.typeSignature =:= ru.typeOf[Node]
anno <- ReflectUtil.findAnnotation[ComponentAnnotation.Node](decl)
m = mirror.reflectField(decl)
} {
m.set(_node)
}
val _methods = getMethods(obj)
new Component {
override def ownerNode = _node
override def typ = s"${data("mod").asInstanceOf[String]}:${data("name").asInstanceOf[String]}"
override def nodes = _nodes
override def methods = _methods
}
}
def getMethods[C](obj: C)(implicit tt: ru.TypeTag[C]) = tt.tpe.members.flatMap { decl =>
(decl, ReflectUtil.findAnnotation[ComponentAnnotation.Method](decl)) match {
case (d, Some(a)) => Some((decl.name.toString, Method.fromAnnotated(obj, d.name.toString)))
case _ => None
}
}.toMap
}
|
CoderPuppy/cpup-computers-mc
|
src/main/scala/cpup/mc/computers/content/network/impl/component/Component.scala
|
Scala
|
mit
| 2,390
|
/*
* Copyright © 2015-2019 the contributors (see Contributors.md).
*
* This file is part of Knora.
*
* Knora is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published
* by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Knora is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public
* License along with Knora. If not, see <http://www.gnu.org/licenses/>.
*/
package org.knora.webapi
import akka.actor.{Actor, Props}
/**
* This trait is part of the cake pattern used in the creation of actors. This trait provides an implementation of the
* makeActor method that creates actors as a child actor.
*/
trait LiveActorMaker extends ActorMaker {
this: Actor =>
def makeActor(props: Props, name: String) = context.actorOf(props, name)
}
|
musicEnfanthen/Knora
|
webapi/src/main/scala/org/knora/webapi/LiveActorMaker.scala
|
Scala
|
agpl-3.0
| 1,146
|
/* *\\
** Squants **
** **
** Scala Quantities and Units of Measure Library and DSL **
** (c) 2013-2015, Gary Keorkunian **
** **
\\* */
package squants.electro
import squants._
import squants.energy.Watts
import squants.time.{ Seconds, TimeDerivative }
/**
* Represents a quantity of electric current
*
* @author garyKeorkunian
* @since 0.1
*
* @param value the amount of charge in [[squants.electro.Amperes]]'s
*/
final class ElectricCurrent private (val value: Double, val unit: ElectricCurrentUnit)
extends Quantity[ElectricCurrent]
with TimeDerivative[ElectricCharge] {
def dimension = ElectricCurrent
protected[squants] def timeIntegrated = Coulombs(toAmperes)
protected[squants] def time = Seconds(1)
def *(that: ElectricalResistance): ElectricPotential = Volts(this.toAmperes * that.toOhms)
def *(that: ElectricPotential): Power = Watts(this.toAmperes * that.toVolts)
def *(that: Inductance): MagneticFlux = Webers(this.toAmperes * that.toHenry)
def /(that: ElectricPotential): ElectricalConductance = Siemens(this.toAmperes / that.toVolts)
def /(that: Length): MagneticFieldStrength = AmperesPerMeter(this.toAmperes / that.toMeters)
def /(that: Area): ElectricCurrentDensity = AmperesPerSquareMeter(this.toAmperes / that.toSquareMeters)
def toAmperes = to(Amperes)
def toMilliamperes = to(Milliamperes)
}
object ElectricCurrent extends Dimension[ElectricCurrent] with BaseDimension {
private[electro] def apply[A](n: A, unit: ElectricCurrentUnit)(implicit num: Numeric[A]) = new ElectricCurrent(num.toDouble(n), unit)
def apply = parse _
def name = "ElectricCurrent"
def primaryUnit = Amperes
def siUnit = Amperes
def units = Set(Amperes, Milliamperes)
def dimensionSymbol = "I"
}
/**
* Base trait for units of [[squants.electro.ElectricCurrent]]
*/
trait ElectricCurrentUnit extends UnitOfMeasure[ElectricCurrent] with UnitConverter {
def apply[A](n: A)(implicit num: Numeric[A]) = ElectricCurrent(n, this)
}
/**
* Amperes
*/
object Amperes extends ElectricCurrentUnit with PrimaryUnit with SiBaseUnit {
val symbol = "A"
}
/**
* Milliamperes
*/
object Milliamperes extends ElectricCurrentUnit with SiUnit {
val symbol = "mA"
val conversionFactor = MetricSystem.Milli
}
object ElectricCurrentConversions {
lazy val ampere = Amperes(1)
lazy val amp = Amperes(1)
lazy val milliampere = Milliamperes(1)
lazy val milliamp = Milliamperes(1)
implicit class ElectricCurrentConversions[A](n: A)(implicit num: Numeric[A]) {
def amperes = Amperes(n)
def amps = Amperes(n)
def A = Amperes(n)
def milliampers = Milliamperes(n)
def milliamps = Milliamperes(n)
def mA = Milliamperes(n)
}
implicit object ElectricCurrentNumeric
extends AbstractQuantityNumeric[ElectricCurrent](ElectricCurrent.primaryUnit)
}
|
underscorenico/squants
|
shared/src/main/scala/squants/electro/ElectricCurrent.scala
|
Scala
|
apache-2.0
| 3,214
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst.expressions
import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.CatalystTypeConverters
import org.apache.spark.sql.catalyst.expressions.codegen.CodegenFallback
import org.apache.spark.sql.types.DataType
/**
* User-defined function.
* @param dataType Return type of function.
*/
case class ScalaUDF(
function: AnyRef,
dataType: DataType,
children: Seq[Expression],
//Nil是一个空的List,::向队列的头部追加数据,创造新的列表
inputTypes: Seq[DataType] = Nil)
extends Expression with ImplicitCastInputTypes with CodegenFallback {
override def nullable: Boolean = true
override def toString: String = s"UDF(${children.mkString(",")})"
// scalastyle:off
/** This method has been generated by this script
(1 to 22).map { x =>
val anys = (1 to x).map(x => "Any").reduce(_ + ", " + _)
val childs = (0 to x - 1).map(x => s"val child$x = children($x)").reduce(_ + "\\n " + _)
val converters = (0 to x - 1).map(x => s"lazy val converter$x = CatalystTypeConverters.createToScalaConverter(child$x.dataType)").reduce(_ + "\\n " + _)
val evals = (0 to x - 1).map(x => s"converter$x(child$x.eval(input))").reduce(_ + ",\\n " + _)
s"""case $x =>
val func = function.asInstanceOf[($anys) => Any]
$childs
$converters
(input: InternalRow) => {
func(
$evals)
}
"""
}.foreach(println)
*/
private[this] val f = children.size match {
case 0 =>
val func = function.asInstanceOf[() => Any]
(input: InternalRow) => {
func()
}
case 1 =>
val func = function.asInstanceOf[(Any) => Any]
val child0 = children(0)
lazy val converter0 = CatalystTypeConverters.createToScalaConverter(child0.dataType)
(input: InternalRow) => {
func(
converter0(child0.eval(input)))
}
case 2 =>
val func = function.asInstanceOf[(Any, Any) => Any]
val child0 = children(0)
val child1 = children(1)
lazy val converter0 = CatalystTypeConverters.createToScalaConverter(child0.dataType)
lazy val converter1 = CatalystTypeConverters.createToScalaConverter(child1.dataType)
(input: InternalRow) => {
func(
converter0(child0.eval(input)),
converter1(child1.eval(input)))
}
case 3 =>
val func = function.asInstanceOf[(Any, Any, Any) => Any]
val child0 = children(0)
val child1 = children(1)
val child2 = children(2)
lazy val converter0 = CatalystTypeConverters.createToScalaConverter(child0.dataType)
lazy val converter1 = CatalystTypeConverters.createToScalaConverter(child1.dataType)
lazy val converter2 = CatalystTypeConverters.createToScalaConverter(child2.dataType)
(input: InternalRow) => {
func(
converter0(child0.eval(input)),
converter1(child1.eval(input)),
converter2(child2.eval(input)))
}
case 4 =>
val func = function.asInstanceOf[(Any, Any, Any, Any) => Any]
val child0 = children(0)
val child1 = children(1)
val child2 = children(2)
val child3 = children(3)
lazy val converter0 = CatalystTypeConverters.createToScalaConverter(child0.dataType)
lazy val converter1 = CatalystTypeConverters.createToScalaConverter(child1.dataType)
lazy val converter2 = CatalystTypeConverters.createToScalaConverter(child2.dataType)
lazy val converter3 = CatalystTypeConverters.createToScalaConverter(child3.dataType)
(input: InternalRow) => {
func(
converter0(child0.eval(input)),
converter1(child1.eval(input)),
converter2(child2.eval(input)),
converter3(child3.eval(input)))
}
case 5 =>
val func = function.asInstanceOf[(Any, Any, Any, Any, Any) => Any]
val child0 = children(0)
val child1 = children(1)
val child2 = children(2)
val child3 = children(3)
val child4 = children(4)
lazy val converter0 = CatalystTypeConverters.createToScalaConverter(child0.dataType)
lazy val converter1 = CatalystTypeConverters.createToScalaConverter(child1.dataType)
lazy val converter2 = CatalystTypeConverters.createToScalaConverter(child2.dataType)
lazy val converter3 = CatalystTypeConverters.createToScalaConverter(child3.dataType)
lazy val converter4 = CatalystTypeConverters.createToScalaConverter(child4.dataType)
(input: InternalRow) => {
func(
converter0(child0.eval(input)),
converter1(child1.eval(input)),
converter2(child2.eval(input)),
converter3(child3.eval(input)),
converter4(child4.eval(input)))
}
case 6 =>
val func = function.asInstanceOf[(Any, Any, Any, Any, Any, Any) => Any]
val child0 = children(0)
val child1 = children(1)
val child2 = children(2)
val child3 = children(3)
val child4 = children(4)
val child5 = children(5)
lazy val converter0 = CatalystTypeConverters.createToScalaConverter(child0.dataType)
lazy val converter1 = CatalystTypeConverters.createToScalaConverter(child1.dataType)
lazy val converter2 = CatalystTypeConverters.createToScalaConverter(child2.dataType)
lazy val converter3 = CatalystTypeConverters.createToScalaConverter(child3.dataType)
lazy val converter4 = CatalystTypeConverters.createToScalaConverter(child4.dataType)
lazy val converter5 = CatalystTypeConverters.createToScalaConverter(child5.dataType)
(input: InternalRow) => {
func(
converter0(child0.eval(input)),
converter1(child1.eval(input)),
converter2(child2.eval(input)),
converter3(child3.eval(input)),
converter4(child4.eval(input)),
converter5(child5.eval(input)))
}
case 7 =>
val func = function.asInstanceOf[(Any, Any, Any, Any, Any, Any, Any) => Any]
val child0 = children(0)
val child1 = children(1)
val child2 = children(2)
val child3 = children(3)
val child4 = children(4)
val child5 = children(5)
val child6 = children(6)
lazy val converter0 = CatalystTypeConverters.createToScalaConverter(child0.dataType)
lazy val converter1 = CatalystTypeConverters.createToScalaConverter(child1.dataType)
lazy val converter2 = CatalystTypeConverters.createToScalaConverter(child2.dataType)
lazy val converter3 = CatalystTypeConverters.createToScalaConverter(child3.dataType)
lazy val converter4 = CatalystTypeConverters.createToScalaConverter(child4.dataType)
lazy val converter5 = CatalystTypeConverters.createToScalaConverter(child5.dataType)
lazy val converter6 = CatalystTypeConverters.createToScalaConverter(child6.dataType)
(input: InternalRow) => {
func(
converter0(child0.eval(input)),
converter1(child1.eval(input)),
converter2(child2.eval(input)),
converter3(child3.eval(input)),
converter4(child4.eval(input)),
converter5(child5.eval(input)),
converter6(child6.eval(input)))
}
case 8 =>
val func = function.asInstanceOf[(Any, Any, Any, Any, Any, Any, Any, Any) => Any]
val child0 = children(0)
val child1 = children(1)
val child2 = children(2)
val child3 = children(3)
val child4 = children(4)
val child5 = children(5)
val child6 = children(6)
val child7 = children(7)
lazy val converter0 = CatalystTypeConverters.createToScalaConverter(child0.dataType)
lazy val converter1 = CatalystTypeConverters.createToScalaConverter(child1.dataType)
lazy val converter2 = CatalystTypeConverters.createToScalaConverter(child2.dataType)
lazy val converter3 = CatalystTypeConverters.createToScalaConverter(child3.dataType)
lazy val converter4 = CatalystTypeConverters.createToScalaConverter(child4.dataType)
lazy val converter5 = CatalystTypeConverters.createToScalaConverter(child5.dataType)
lazy val converter6 = CatalystTypeConverters.createToScalaConverter(child6.dataType)
lazy val converter7 = CatalystTypeConverters.createToScalaConverter(child7.dataType)
(input: InternalRow) => {
func(
converter0(child0.eval(input)),
converter1(child1.eval(input)),
converter2(child2.eval(input)),
converter3(child3.eval(input)),
converter4(child4.eval(input)),
converter5(child5.eval(input)),
converter6(child6.eval(input)),
converter7(child7.eval(input)))
}
case 9 =>
val func = function.asInstanceOf[(Any, Any, Any, Any, Any, Any, Any, Any, Any) => Any]
val child0 = children(0)
val child1 = children(1)
val child2 = children(2)
val child3 = children(3)
val child4 = children(4)
val child5 = children(5)
val child6 = children(6)
val child7 = children(7)
val child8 = children(8)
lazy val converter0 = CatalystTypeConverters.createToScalaConverter(child0.dataType)
lazy val converter1 = CatalystTypeConverters.createToScalaConverter(child1.dataType)
lazy val converter2 = CatalystTypeConverters.createToScalaConverter(child2.dataType)
lazy val converter3 = CatalystTypeConverters.createToScalaConverter(child3.dataType)
lazy val converter4 = CatalystTypeConverters.createToScalaConverter(child4.dataType)
lazy val converter5 = CatalystTypeConverters.createToScalaConverter(child5.dataType)
lazy val converter6 = CatalystTypeConverters.createToScalaConverter(child6.dataType)
lazy val converter7 = CatalystTypeConverters.createToScalaConverter(child7.dataType)
lazy val converter8 = CatalystTypeConverters.createToScalaConverter(child8.dataType)
(input: InternalRow) => {
func(
converter0(child0.eval(input)),
converter1(child1.eval(input)),
converter2(child2.eval(input)),
converter3(child3.eval(input)),
converter4(child4.eval(input)),
converter5(child5.eval(input)),
converter6(child6.eval(input)),
converter7(child7.eval(input)),
converter8(child8.eval(input)))
}
case 10 =>
val func = function.asInstanceOf[(Any, Any, Any, Any, Any, Any, Any, Any, Any, Any) => Any]
val child0 = children(0)
val child1 = children(1)
val child2 = children(2)
val child3 = children(3)
val child4 = children(4)
val child5 = children(5)
val child6 = children(6)
val child7 = children(7)
val child8 = children(8)
val child9 = children(9)
lazy val converter0 = CatalystTypeConverters.createToScalaConverter(child0.dataType)
lazy val converter1 = CatalystTypeConverters.createToScalaConverter(child1.dataType)
lazy val converter2 = CatalystTypeConverters.createToScalaConverter(child2.dataType)
lazy val converter3 = CatalystTypeConverters.createToScalaConverter(child3.dataType)
lazy val converter4 = CatalystTypeConverters.createToScalaConverter(child4.dataType)
lazy val converter5 = CatalystTypeConverters.createToScalaConverter(child5.dataType)
lazy val converter6 = CatalystTypeConverters.createToScalaConverter(child6.dataType)
lazy val converter7 = CatalystTypeConverters.createToScalaConverter(child7.dataType)
lazy val converter8 = CatalystTypeConverters.createToScalaConverter(child8.dataType)
lazy val converter9 = CatalystTypeConverters.createToScalaConverter(child9.dataType)
(input: InternalRow) => {
func(
converter0(child0.eval(input)),
converter1(child1.eval(input)),
converter2(child2.eval(input)),
converter3(child3.eval(input)),
converter4(child4.eval(input)),
converter5(child5.eval(input)),
converter6(child6.eval(input)),
converter7(child7.eval(input)),
converter8(child8.eval(input)),
converter9(child9.eval(input)))
}
case 11 =>
val func = function.asInstanceOf[(Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any) => Any]
val child0 = children(0)
val child1 = children(1)
val child2 = children(2)
val child3 = children(3)
val child4 = children(4)
val child5 = children(5)
val child6 = children(6)
val child7 = children(7)
val child8 = children(8)
val child9 = children(9)
val child10 = children(10)
lazy val converter0 = CatalystTypeConverters.createToScalaConverter(child0.dataType)
lazy val converter1 = CatalystTypeConverters.createToScalaConverter(child1.dataType)
lazy val converter2 = CatalystTypeConverters.createToScalaConverter(child2.dataType)
lazy val converter3 = CatalystTypeConverters.createToScalaConverter(child3.dataType)
lazy val converter4 = CatalystTypeConverters.createToScalaConverter(child4.dataType)
lazy val converter5 = CatalystTypeConverters.createToScalaConverter(child5.dataType)
lazy val converter6 = CatalystTypeConverters.createToScalaConverter(child6.dataType)
lazy val converter7 = CatalystTypeConverters.createToScalaConverter(child7.dataType)
lazy val converter8 = CatalystTypeConverters.createToScalaConverter(child8.dataType)
lazy val converter9 = CatalystTypeConverters.createToScalaConverter(child9.dataType)
lazy val converter10 = CatalystTypeConverters.createToScalaConverter(child10.dataType)
(input: InternalRow) => {
func(
converter0(child0.eval(input)),
converter1(child1.eval(input)),
converter2(child2.eval(input)),
converter3(child3.eval(input)),
converter4(child4.eval(input)),
converter5(child5.eval(input)),
converter6(child6.eval(input)),
converter7(child7.eval(input)),
converter8(child8.eval(input)),
converter9(child9.eval(input)),
converter10(child10.eval(input)))
}
case 12 =>
val func = function.asInstanceOf[(Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any) => Any]
val child0 = children(0)
val child1 = children(1)
val child2 = children(2)
val child3 = children(3)
val child4 = children(4)
val child5 = children(5)
val child6 = children(6)
val child7 = children(7)
val child8 = children(8)
val child9 = children(9)
val child10 = children(10)
val child11 = children(11)
lazy val converter0 = CatalystTypeConverters.createToScalaConverter(child0.dataType)
lazy val converter1 = CatalystTypeConverters.createToScalaConverter(child1.dataType)
lazy val converter2 = CatalystTypeConverters.createToScalaConverter(child2.dataType)
lazy val converter3 = CatalystTypeConverters.createToScalaConverter(child3.dataType)
lazy val converter4 = CatalystTypeConverters.createToScalaConverter(child4.dataType)
lazy val converter5 = CatalystTypeConverters.createToScalaConverter(child5.dataType)
lazy val converter6 = CatalystTypeConverters.createToScalaConverter(child6.dataType)
lazy val converter7 = CatalystTypeConverters.createToScalaConverter(child7.dataType)
lazy val converter8 = CatalystTypeConverters.createToScalaConverter(child8.dataType)
lazy val converter9 = CatalystTypeConverters.createToScalaConverter(child9.dataType)
lazy val converter10 = CatalystTypeConverters.createToScalaConverter(child10.dataType)
lazy val converter11 = CatalystTypeConverters.createToScalaConverter(child11.dataType)
(input: InternalRow) => {
func(
converter0(child0.eval(input)),
converter1(child1.eval(input)),
converter2(child2.eval(input)),
converter3(child3.eval(input)),
converter4(child4.eval(input)),
converter5(child5.eval(input)),
converter6(child6.eval(input)),
converter7(child7.eval(input)),
converter8(child8.eval(input)),
converter9(child9.eval(input)),
converter10(child10.eval(input)),
converter11(child11.eval(input)))
}
case 13 =>
val func = function.asInstanceOf[(Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any) => Any]
val child0 = children(0)
val child1 = children(1)
val child2 = children(2)
val child3 = children(3)
val child4 = children(4)
val child5 = children(5)
val child6 = children(6)
val child7 = children(7)
val child8 = children(8)
val child9 = children(9)
val child10 = children(10)
val child11 = children(11)
val child12 = children(12)
lazy val converter0 = CatalystTypeConverters.createToScalaConverter(child0.dataType)
lazy val converter1 = CatalystTypeConverters.createToScalaConverter(child1.dataType)
lazy val converter2 = CatalystTypeConverters.createToScalaConverter(child2.dataType)
lazy val converter3 = CatalystTypeConverters.createToScalaConverter(child3.dataType)
lazy val converter4 = CatalystTypeConverters.createToScalaConverter(child4.dataType)
lazy val converter5 = CatalystTypeConverters.createToScalaConverter(child5.dataType)
lazy val converter6 = CatalystTypeConverters.createToScalaConverter(child6.dataType)
lazy val converter7 = CatalystTypeConverters.createToScalaConverter(child7.dataType)
lazy val converter8 = CatalystTypeConverters.createToScalaConverter(child8.dataType)
lazy val converter9 = CatalystTypeConverters.createToScalaConverter(child9.dataType)
lazy val converter10 = CatalystTypeConverters.createToScalaConverter(child10.dataType)
lazy val converter11 = CatalystTypeConverters.createToScalaConverter(child11.dataType)
lazy val converter12 = CatalystTypeConverters.createToScalaConverter(child12.dataType)
(input: InternalRow) => {
func(
converter0(child0.eval(input)),
converter1(child1.eval(input)),
converter2(child2.eval(input)),
converter3(child3.eval(input)),
converter4(child4.eval(input)),
converter5(child5.eval(input)),
converter6(child6.eval(input)),
converter7(child7.eval(input)),
converter8(child8.eval(input)),
converter9(child9.eval(input)),
converter10(child10.eval(input)),
converter11(child11.eval(input)),
converter12(child12.eval(input)))
}
case 14 =>
val func = function.asInstanceOf[(Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any) => Any]
val child0 = children(0)
val child1 = children(1)
val child2 = children(2)
val child3 = children(3)
val child4 = children(4)
val child5 = children(5)
val child6 = children(6)
val child7 = children(7)
val child8 = children(8)
val child9 = children(9)
val child10 = children(10)
val child11 = children(11)
val child12 = children(12)
val child13 = children(13)
lazy val converter0 = CatalystTypeConverters.createToScalaConverter(child0.dataType)
lazy val converter1 = CatalystTypeConverters.createToScalaConverter(child1.dataType)
lazy val converter2 = CatalystTypeConverters.createToScalaConverter(child2.dataType)
lazy val converter3 = CatalystTypeConverters.createToScalaConverter(child3.dataType)
lazy val converter4 = CatalystTypeConverters.createToScalaConverter(child4.dataType)
lazy val converter5 = CatalystTypeConverters.createToScalaConverter(child5.dataType)
lazy val converter6 = CatalystTypeConverters.createToScalaConverter(child6.dataType)
lazy val converter7 = CatalystTypeConverters.createToScalaConverter(child7.dataType)
lazy val converter8 = CatalystTypeConverters.createToScalaConverter(child8.dataType)
lazy val converter9 = CatalystTypeConverters.createToScalaConverter(child9.dataType)
lazy val converter10 = CatalystTypeConverters.createToScalaConverter(child10.dataType)
lazy val converter11 = CatalystTypeConverters.createToScalaConverter(child11.dataType)
lazy val converter12 = CatalystTypeConverters.createToScalaConverter(child12.dataType)
lazy val converter13 = CatalystTypeConverters.createToScalaConverter(child13.dataType)
(input: InternalRow) => {
func(
converter0(child0.eval(input)),
converter1(child1.eval(input)),
converter2(child2.eval(input)),
converter3(child3.eval(input)),
converter4(child4.eval(input)),
converter5(child5.eval(input)),
converter6(child6.eval(input)),
converter7(child7.eval(input)),
converter8(child8.eval(input)),
converter9(child9.eval(input)),
converter10(child10.eval(input)),
converter11(child11.eval(input)),
converter12(child12.eval(input)),
converter13(child13.eval(input)))
}
case 15 =>
val func = function.asInstanceOf[(Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any) => Any]
val child0 = children(0)
val child1 = children(1)
val child2 = children(2)
val child3 = children(3)
val child4 = children(4)
val child5 = children(5)
val child6 = children(6)
val child7 = children(7)
val child8 = children(8)
val child9 = children(9)
val child10 = children(10)
val child11 = children(11)
val child12 = children(12)
val child13 = children(13)
val child14 = children(14)
lazy val converter0 = CatalystTypeConverters.createToScalaConverter(child0.dataType)
lazy val converter1 = CatalystTypeConverters.createToScalaConverter(child1.dataType)
lazy val converter2 = CatalystTypeConverters.createToScalaConverter(child2.dataType)
lazy val converter3 = CatalystTypeConverters.createToScalaConverter(child3.dataType)
lazy val converter4 = CatalystTypeConverters.createToScalaConverter(child4.dataType)
lazy val converter5 = CatalystTypeConverters.createToScalaConverter(child5.dataType)
lazy val converter6 = CatalystTypeConverters.createToScalaConverter(child6.dataType)
lazy val converter7 = CatalystTypeConverters.createToScalaConverter(child7.dataType)
lazy val converter8 = CatalystTypeConverters.createToScalaConverter(child8.dataType)
lazy val converter9 = CatalystTypeConverters.createToScalaConverter(child9.dataType)
lazy val converter10 = CatalystTypeConverters.createToScalaConverter(child10.dataType)
lazy val converter11 = CatalystTypeConverters.createToScalaConverter(child11.dataType)
lazy val converter12 = CatalystTypeConverters.createToScalaConverter(child12.dataType)
lazy val converter13 = CatalystTypeConverters.createToScalaConverter(child13.dataType)
lazy val converter14 = CatalystTypeConverters.createToScalaConverter(child14.dataType)
(input: InternalRow) => {
func(
converter0(child0.eval(input)),
converter1(child1.eval(input)),
converter2(child2.eval(input)),
converter3(child3.eval(input)),
converter4(child4.eval(input)),
converter5(child5.eval(input)),
converter6(child6.eval(input)),
converter7(child7.eval(input)),
converter8(child8.eval(input)),
converter9(child9.eval(input)),
converter10(child10.eval(input)),
converter11(child11.eval(input)),
converter12(child12.eval(input)),
converter13(child13.eval(input)),
converter14(child14.eval(input)))
}
case 16 =>
val func = function.asInstanceOf[(Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any) => Any]
val child0 = children(0)
val child1 = children(1)
val child2 = children(2)
val child3 = children(3)
val child4 = children(4)
val child5 = children(5)
val child6 = children(6)
val child7 = children(7)
val child8 = children(8)
val child9 = children(9)
val child10 = children(10)
val child11 = children(11)
val child12 = children(12)
val child13 = children(13)
val child14 = children(14)
val child15 = children(15)
lazy val converter0 = CatalystTypeConverters.createToScalaConverter(child0.dataType)
lazy val converter1 = CatalystTypeConverters.createToScalaConverter(child1.dataType)
lazy val converter2 = CatalystTypeConverters.createToScalaConverter(child2.dataType)
lazy val converter3 = CatalystTypeConverters.createToScalaConverter(child3.dataType)
lazy val converter4 = CatalystTypeConverters.createToScalaConverter(child4.dataType)
lazy val converter5 = CatalystTypeConverters.createToScalaConverter(child5.dataType)
lazy val converter6 = CatalystTypeConverters.createToScalaConverter(child6.dataType)
lazy val converter7 = CatalystTypeConverters.createToScalaConverter(child7.dataType)
lazy val converter8 = CatalystTypeConverters.createToScalaConverter(child8.dataType)
lazy val converter9 = CatalystTypeConverters.createToScalaConverter(child9.dataType)
lazy val converter10 = CatalystTypeConverters.createToScalaConverter(child10.dataType)
lazy val converter11 = CatalystTypeConverters.createToScalaConverter(child11.dataType)
lazy val converter12 = CatalystTypeConverters.createToScalaConverter(child12.dataType)
lazy val converter13 = CatalystTypeConverters.createToScalaConverter(child13.dataType)
lazy val converter14 = CatalystTypeConverters.createToScalaConverter(child14.dataType)
lazy val converter15 = CatalystTypeConverters.createToScalaConverter(child15.dataType)
(input: InternalRow) => {
func(
converter0(child0.eval(input)),
converter1(child1.eval(input)),
converter2(child2.eval(input)),
converter3(child3.eval(input)),
converter4(child4.eval(input)),
converter5(child5.eval(input)),
converter6(child6.eval(input)),
converter7(child7.eval(input)),
converter8(child8.eval(input)),
converter9(child9.eval(input)),
converter10(child10.eval(input)),
converter11(child11.eval(input)),
converter12(child12.eval(input)),
converter13(child13.eval(input)),
converter14(child14.eval(input)),
converter15(child15.eval(input)))
}
case 17 =>
val func = function.asInstanceOf[(Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any) => Any]
val child0 = children(0)
val child1 = children(1)
val child2 = children(2)
val child3 = children(3)
val child4 = children(4)
val child5 = children(5)
val child6 = children(6)
val child7 = children(7)
val child8 = children(8)
val child9 = children(9)
val child10 = children(10)
val child11 = children(11)
val child12 = children(12)
val child13 = children(13)
val child14 = children(14)
val child15 = children(15)
val child16 = children(16)
lazy val converter0 = CatalystTypeConverters.createToScalaConverter(child0.dataType)
lazy val converter1 = CatalystTypeConverters.createToScalaConverter(child1.dataType)
lazy val converter2 = CatalystTypeConverters.createToScalaConverter(child2.dataType)
lazy val converter3 = CatalystTypeConverters.createToScalaConverter(child3.dataType)
lazy val converter4 = CatalystTypeConverters.createToScalaConverter(child4.dataType)
lazy val converter5 = CatalystTypeConverters.createToScalaConverter(child5.dataType)
lazy val converter6 = CatalystTypeConverters.createToScalaConverter(child6.dataType)
lazy val converter7 = CatalystTypeConverters.createToScalaConverter(child7.dataType)
lazy val converter8 = CatalystTypeConverters.createToScalaConverter(child8.dataType)
lazy val converter9 = CatalystTypeConverters.createToScalaConverter(child9.dataType)
lazy val converter10 = CatalystTypeConverters.createToScalaConverter(child10.dataType)
lazy val converter11 = CatalystTypeConverters.createToScalaConverter(child11.dataType)
lazy val converter12 = CatalystTypeConverters.createToScalaConverter(child12.dataType)
lazy val converter13 = CatalystTypeConverters.createToScalaConverter(child13.dataType)
lazy val converter14 = CatalystTypeConverters.createToScalaConverter(child14.dataType)
lazy val converter15 = CatalystTypeConverters.createToScalaConverter(child15.dataType)
lazy val converter16 = CatalystTypeConverters.createToScalaConverter(child16.dataType)
(input: InternalRow) => {
func(
converter0(child0.eval(input)),
converter1(child1.eval(input)),
converter2(child2.eval(input)),
converter3(child3.eval(input)),
converter4(child4.eval(input)),
converter5(child5.eval(input)),
converter6(child6.eval(input)),
converter7(child7.eval(input)),
converter8(child8.eval(input)),
converter9(child9.eval(input)),
converter10(child10.eval(input)),
converter11(child11.eval(input)),
converter12(child12.eval(input)),
converter13(child13.eval(input)),
converter14(child14.eval(input)),
converter15(child15.eval(input)),
converter16(child16.eval(input)))
}
case 18 =>
val func = function.asInstanceOf[(Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any) => Any]
val child0 = children(0)
val child1 = children(1)
val child2 = children(2)
val child3 = children(3)
val child4 = children(4)
val child5 = children(5)
val child6 = children(6)
val child7 = children(7)
val child8 = children(8)
val child9 = children(9)
val child10 = children(10)
val child11 = children(11)
val child12 = children(12)
val child13 = children(13)
val child14 = children(14)
val child15 = children(15)
val child16 = children(16)
val child17 = children(17)
lazy val converter0 = CatalystTypeConverters.createToScalaConverter(child0.dataType)
lazy val converter1 = CatalystTypeConverters.createToScalaConverter(child1.dataType)
lazy val converter2 = CatalystTypeConverters.createToScalaConverter(child2.dataType)
lazy val converter3 = CatalystTypeConverters.createToScalaConverter(child3.dataType)
lazy val converter4 = CatalystTypeConverters.createToScalaConverter(child4.dataType)
lazy val converter5 = CatalystTypeConverters.createToScalaConverter(child5.dataType)
lazy val converter6 = CatalystTypeConverters.createToScalaConverter(child6.dataType)
lazy val converter7 = CatalystTypeConverters.createToScalaConverter(child7.dataType)
lazy val converter8 = CatalystTypeConverters.createToScalaConverter(child8.dataType)
lazy val converter9 = CatalystTypeConverters.createToScalaConverter(child9.dataType)
lazy val converter10 = CatalystTypeConverters.createToScalaConverter(child10.dataType)
lazy val converter11 = CatalystTypeConverters.createToScalaConverter(child11.dataType)
lazy val converter12 = CatalystTypeConverters.createToScalaConverter(child12.dataType)
lazy val converter13 = CatalystTypeConverters.createToScalaConverter(child13.dataType)
lazy val converter14 = CatalystTypeConverters.createToScalaConverter(child14.dataType)
lazy val converter15 = CatalystTypeConverters.createToScalaConverter(child15.dataType)
lazy val converter16 = CatalystTypeConverters.createToScalaConverter(child16.dataType)
lazy val converter17 = CatalystTypeConverters.createToScalaConverter(child17.dataType)
(input: InternalRow) => {
func(
converter0(child0.eval(input)),
converter1(child1.eval(input)),
converter2(child2.eval(input)),
converter3(child3.eval(input)),
converter4(child4.eval(input)),
converter5(child5.eval(input)),
converter6(child6.eval(input)),
converter7(child7.eval(input)),
converter8(child8.eval(input)),
converter9(child9.eval(input)),
converter10(child10.eval(input)),
converter11(child11.eval(input)),
converter12(child12.eval(input)),
converter13(child13.eval(input)),
converter14(child14.eval(input)),
converter15(child15.eval(input)),
converter16(child16.eval(input)),
converter17(child17.eval(input)))
}
case 19 =>
val func = function.asInstanceOf[(Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any) => Any]
val child0 = children(0)
val child1 = children(1)
val child2 = children(2)
val child3 = children(3)
val child4 = children(4)
val child5 = children(5)
val child6 = children(6)
val child7 = children(7)
val child8 = children(8)
val child9 = children(9)
val child10 = children(10)
val child11 = children(11)
val child12 = children(12)
val child13 = children(13)
val child14 = children(14)
val child15 = children(15)
val child16 = children(16)
val child17 = children(17)
val child18 = children(18)
lazy val converter0 = CatalystTypeConverters.createToScalaConverter(child0.dataType)
lazy val converter1 = CatalystTypeConverters.createToScalaConverter(child1.dataType)
lazy val converter2 = CatalystTypeConverters.createToScalaConverter(child2.dataType)
lazy val converter3 = CatalystTypeConverters.createToScalaConverter(child3.dataType)
lazy val converter4 = CatalystTypeConverters.createToScalaConverter(child4.dataType)
lazy val converter5 = CatalystTypeConverters.createToScalaConverter(child5.dataType)
lazy val converter6 = CatalystTypeConverters.createToScalaConverter(child6.dataType)
lazy val converter7 = CatalystTypeConverters.createToScalaConverter(child7.dataType)
lazy val converter8 = CatalystTypeConverters.createToScalaConverter(child8.dataType)
lazy val converter9 = CatalystTypeConverters.createToScalaConverter(child9.dataType)
lazy val converter10 = CatalystTypeConverters.createToScalaConverter(child10.dataType)
lazy val converter11 = CatalystTypeConverters.createToScalaConverter(child11.dataType)
lazy val converter12 = CatalystTypeConverters.createToScalaConverter(child12.dataType)
lazy val converter13 = CatalystTypeConverters.createToScalaConverter(child13.dataType)
lazy val converter14 = CatalystTypeConverters.createToScalaConverter(child14.dataType)
lazy val converter15 = CatalystTypeConverters.createToScalaConverter(child15.dataType)
lazy val converter16 = CatalystTypeConverters.createToScalaConverter(child16.dataType)
lazy val converter17 = CatalystTypeConverters.createToScalaConverter(child17.dataType)
lazy val converter18 = CatalystTypeConverters.createToScalaConverter(child18.dataType)
(input: InternalRow) => {
func(
converter0(child0.eval(input)),
converter1(child1.eval(input)),
converter2(child2.eval(input)),
converter3(child3.eval(input)),
converter4(child4.eval(input)),
converter5(child5.eval(input)),
converter6(child6.eval(input)),
converter7(child7.eval(input)),
converter8(child8.eval(input)),
converter9(child9.eval(input)),
converter10(child10.eval(input)),
converter11(child11.eval(input)),
converter12(child12.eval(input)),
converter13(child13.eval(input)),
converter14(child14.eval(input)),
converter15(child15.eval(input)),
converter16(child16.eval(input)),
converter17(child17.eval(input)),
converter18(child18.eval(input)))
}
case 20 =>
val func = function.asInstanceOf[(Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any) => Any]
val child0 = children(0)
val child1 = children(1)
val child2 = children(2)
val child3 = children(3)
val child4 = children(4)
val child5 = children(5)
val child6 = children(6)
val child7 = children(7)
val child8 = children(8)
val child9 = children(9)
val child10 = children(10)
val child11 = children(11)
val child12 = children(12)
val child13 = children(13)
val child14 = children(14)
val child15 = children(15)
val child16 = children(16)
val child17 = children(17)
val child18 = children(18)
val child19 = children(19)
lazy val converter0 = CatalystTypeConverters.createToScalaConverter(child0.dataType)
lazy val converter1 = CatalystTypeConverters.createToScalaConverter(child1.dataType)
lazy val converter2 = CatalystTypeConverters.createToScalaConverter(child2.dataType)
lazy val converter3 = CatalystTypeConverters.createToScalaConverter(child3.dataType)
lazy val converter4 = CatalystTypeConverters.createToScalaConverter(child4.dataType)
lazy val converter5 = CatalystTypeConverters.createToScalaConverter(child5.dataType)
lazy val converter6 = CatalystTypeConverters.createToScalaConverter(child6.dataType)
lazy val converter7 = CatalystTypeConverters.createToScalaConverter(child7.dataType)
lazy val converter8 = CatalystTypeConverters.createToScalaConverter(child8.dataType)
lazy val converter9 = CatalystTypeConverters.createToScalaConverter(child9.dataType)
lazy val converter10 = CatalystTypeConverters.createToScalaConverter(child10.dataType)
lazy val converter11 = CatalystTypeConverters.createToScalaConverter(child11.dataType)
lazy val converter12 = CatalystTypeConverters.createToScalaConverter(child12.dataType)
lazy val converter13 = CatalystTypeConverters.createToScalaConverter(child13.dataType)
lazy val converter14 = CatalystTypeConverters.createToScalaConverter(child14.dataType)
lazy val converter15 = CatalystTypeConverters.createToScalaConverter(child15.dataType)
lazy val converter16 = CatalystTypeConverters.createToScalaConverter(child16.dataType)
lazy val converter17 = CatalystTypeConverters.createToScalaConverter(child17.dataType)
lazy val converter18 = CatalystTypeConverters.createToScalaConverter(child18.dataType)
lazy val converter19 = CatalystTypeConverters.createToScalaConverter(child19.dataType)
(input: InternalRow) => {
func(
converter0(child0.eval(input)),
converter1(child1.eval(input)),
converter2(child2.eval(input)),
converter3(child3.eval(input)),
converter4(child4.eval(input)),
converter5(child5.eval(input)),
converter6(child6.eval(input)),
converter7(child7.eval(input)),
converter8(child8.eval(input)),
converter9(child9.eval(input)),
converter10(child10.eval(input)),
converter11(child11.eval(input)),
converter12(child12.eval(input)),
converter13(child13.eval(input)),
converter14(child14.eval(input)),
converter15(child15.eval(input)),
converter16(child16.eval(input)),
converter17(child17.eval(input)),
converter18(child18.eval(input)),
converter19(child19.eval(input)))
}
case 21 =>
val func = function.asInstanceOf[(Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any) => Any]
val child0 = children(0)
val child1 = children(1)
val child2 = children(2)
val child3 = children(3)
val child4 = children(4)
val child5 = children(5)
val child6 = children(6)
val child7 = children(7)
val child8 = children(8)
val child9 = children(9)
val child10 = children(10)
val child11 = children(11)
val child12 = children(12)
val child13 = children(13)
val child14 = children(14)
val child15 = children(15)
val child16 = children(16)
val child17 = children(17)
val child18 = children(18)
val child19 = children(19)
val child20 = children(20)
lazy val converter0 = CatalystTypeConverters.createToScalaConverter(child0.dataType)
lazy val converter1 = CatalystTypeConverters.createToScalaConverter(child1.dataType)
lazy val converter2 = CatalystTypeConverters.createToScalaConverter(child2.dataType)
lazy val converter3 = CatalystTypeConverters.createToScalaConverter(child3.dataType)
lazy val converter4 = CatalystTypeConverters.createToScalaConverter(child4.dataType)
lazy val converter5 = CatalystTypeConverters.createToScalaConverter(child5.dataType)
lazy val converter6 = CatalystTypeConverters.createToScalaConverter(child6.dataType)
lazy val converter7 = CatalystTypeConverters.createToScalaConverter(child7.dataType)
lazy val converter8 = CatalystTypeConverters.createToScalaConverter(child8.dataType)
lazy val converter9 = CatalystTypeConverters.createToScalaConverter(child9.dataType)
lazy val converter10 = CatalystTypeConverters.createToScalaConverter(child10.dataType)
lazy val converter11 = CatalystTypeConverters.createToScalaConverter(child11.dataType)
lazy val converter12 = CatalystTypeConverters.createToScalaConverter(child12.dataType)
lazy val converter13 = CatalystTypeConverters.createToScalaConverter(child13.dataType)
lazy val converter14 = CatalystTypeConverters.createToScalaConverter(child14.dataType)
lazy val converter15 = CatalystTypeConverters.createToScalaConverter(child15.dataType)
lazy val converter16 = CatalystTypeConverters.createToScalaConverter(child16.dataType)
lazy val converter17 = CatalystTypeConverters.createToScalaConverter(child17.dataType)
lazy val converter18 = CatalystTypeConverters.createToScalaConverter(child18.dataType)
lazy val converter19 = CatalystTypeConverters.createToScalaConverter(child19.dataType)
lazy val converter20 = CatalystTypeConverters.createToScalaConverter(child20.dataType)
(input: InternalRow) => {
func(
converter0(child0.eval(input)),
converter1(child1.eval(input)),
converter2(child2.eval(input)),
converter3(child3.eval(input)),
converter4(child4.eval(input)),
converter5(child5.eval(input)),
converter6(child6.eval(input)),
converter7(child7.eval(input)),
converter8(child8.eval(input)),
converter9(child9.eval(input)),
converter10(child10.eval(input)),
converter11(child11.eval(input)),
converter12(child12.eval(input)),
converter13(child13.eval(input)),
converter14(child14.eval(input)),
converter15(child15.eval(input)),
converter16(child16.eval(input)),
converter17(child17.eval(input)),
converter18(child18.eval(input)),
converter19(child19.eval(input)),
converter20(child20.eval(input)))
}
case 22 =>
val func = function.asInstanceOf[(Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any) => Any]
val child0 = children(0)
val child1 = children(1)
val child2 = children(2)
val child3 = children(3)
val child4 = children(4)
val child5 = children(5)
val child6 = children(6)
val child7 = children(7)
val child8 = children(8)
val child9 = children(9)
val child10 = children(10)
val child11 = children(11)
val child12 = children(12)
val child13 = children(13)
val child14 = children(14)
val child15 = children(15)
val child16 = children(16)
val child17 = children(17)
val child18 = children(18)
val child19 = children(19)
val child20 = children(20)
val child21 = children(21)
lazy val converter0 = CatalystTypeConverters.createToScalaConverter(child0.dataType)
lazy val converter1 = CatalystTypeConverters.createToScalaConverter(child1.dataType)
lazy val converter2 = CatalystTypeConverters.createToScalaConverter(child2.dataType)
lazy val converter3 = CatalystTypeConverters.createToScalaConverter(child3.dataType)
lazy val converter4 = CatalystTypeConverters.createToScalaConverter(child4.dataType)
lazy val converter5 = CatalystTypeConverters.createToScalaConverter(child5.dataType)
lazy val converter6 = CatalystTypeConverters.createToScalaConverter(child6.dataType)
lazy val converter7 = CatalystTypeConverters.createToScalaConverter(child7.dataType)
lazy val converter8 = CatalystTypeConverters.createToScalaConverter(child8.dataType)
lazy val converter9 = CatalystTypeConverters.createToScalaConverter(child9.dataType)
lazy val converter10 = CatalystTypeConverters.createToScalaConverter(child10.dataType)
lazy val converter11 = CatalystTypeConverters.createToScalaConverter(child11.dataType)
lazy val converter12 = CatalystTypeConverters.createToScalaConverter(child12.dataType)
lazy val converter13 = CatalystTypeConverters.createToScalaConverter(child13.dataType)
lazy val converter14 = CatalystTypeConverters.createToScalaConverter(child14.dataType)
lazy val converter15 = CatalystTypeConverters.createToScalaConverter(child15.dataType)
lazy val converter16 = CatalystTypeConverters.createToScalaConverter(child16.dataType)
lazy val converter17 = CatalystTypeConverters.createToScalaConverter(child17.dataType)
lazy val converter18 = CatalystTypeConverters.createToScalaConverter(child18.dataType)
lazy val converter19 = CatalystTypeConverters.createToScalaConverter(child19.dataType)
lazy val converter20 = CatalystTypeConverters.createToScalaConverter(child20.dataType)
lazy val converter21 = CatalystTypeConverters.createToScalaConverter(child21.dataType)
(input: InternalRow) => {
func(
converter0(child0.eval(input)),
converter1(child1.eval(input)),
converter2(child2.eval(input)),
converter3(child3.eval(input)),
converter4(child4.eval(input)),
converter5(child5.eval(input)),
converter6(child6.eval(input)),
converter7(child7.eval(input)),
converter8(child8.eval(input)),
converter9(child9.eval(input)),
converter10(child10.eval(input)),
converter11(child11.eval(input)),
converter12(child12.eval(input)),
converter13(child13.eval(input)),
converter14(child14.eval(input)),
converter15(child15.eval(input)),
converter16(child16.eval(input)),
converter17(child17.eval(input)),
converter18(child18.eval(input)),
converter19(child19.eval(input)),
converter20(child20.eval(input)),
converter21(child21.eval(input)))
}
}
// scalastyle:on
private[this] val converter = CatalystTypeConverters.createToCatalystConverter(dataType)
override def eval(input: InternalRow): Any = converter(f(input))
}
|
tophua/spark1.52
|
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/ScalaUDF.scala
|
Scala
|
apache-2.0
| 48,460
|
package com.twitter.inject.module
import com.google.inject.AbstractModule
import com.google.inject.name.Names
import com.twitter.inject.TestBindingAnnotation
import com.twitter.inject.{MyServiceImpl, MyServiceInterface}
import java.util.Properties
import javax.inject.Singleton
import net.codingwell.scalaguice.ScalaModule
trait TestModule extends AbstractModule with ScalaModule {
override protected def configure(): Unit = {
bind[String].annotatedWith(Names.named("str1")).toInstance("string1")
bind[String].annotatedWith(Names.named("str2")).toInstance("string2")
bind[Int].toInstance(11)
bind[String].toInstance("default string")
bind[String].annotatedWith[TestBindingAnnotation].toInstance("prod string")
bind[MyServiceInterface].to[MyServiceImpl].in[Singleton]
val properties = new Properties()
properties.setProperty("name", "Steve")
Names.bindProperties(TestModule.super.binder(), properties)
}
}
|
twitter/util
|
util-inject/src/test/scala/com/twitter/util/inject/module/TestModule.scala
|
Scala
|
apache-2.0
| 950
|
/*
* Copyright 2017 Nicolas Rinaudo
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kantan.mongodb
package ops
final class BsonValueEncoderOps[A: BsonValueEncoder](value: A) {
def encodeBson: BsonValue = BsonValueEncoder[A].encode(value)
}
trait ToBsonValueEncoderOps {
implicit def toBsonValueEncoderOps[A: BsonValueEncoder](a: A): BsonValueEncoderOps[A] = new BsonValueEncoderOps(a)
}
object bsonValueEncoder extends ToBsonValueEncoderOps
|
nrinaudo/kantan.mongodb
|
core/src/main/scala/kantan/mongodb/ops/BsonValueEncoderOps.scala
|
Scala
|
apache-2.0
| 972
|
/*
* Copyright (c) 2016 SnappyData, Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you
* may not use this file except in compliance with the License. You
* may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License. See accompanying
* LICENSE file.
*/
package io.snappydata
import scala.collection.JavaConverters._
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.{SnappyContext, SparkSession}
class QueryTest extends SnappyFunSuite {
test("Test exists in select") {
val snContext = SnappyContext(sc)
snContext.sql("CREATE TABLE titles(title_id varchar(20), title varchar(80) " +
"not null, type varchar(12) not null, pub_id varchar(4), price int not null, " +
"advance int not null , royalty int , ytd_sales int,notes varchar(200))")
snContext.sql("insert into titles values ('1', 'Secrets', " +
"'popular_comp', '1389', 20, 8000, 10, 4095,'Note 1')")
snContext.sql("insert into titles values ('2', 'The', " +
"'business', '1389', 19, 5000, 10, 4095,'Note 2')")
snContext.sql("insert into titles values ('3', 'Emotional', " +
"'psychology', '0736', 7, 4000, 10, 3336,'Note 3')")
snContext.sql("insert into titles values ('4', 'Prolonged', " +
"'psychology', '0736', 19, 2000, 10, 4072,'Note 4')")
snContext.sql("insert into titles values ('5', 'With', " +
"'business', '1389', 11, 5000, 10, 3876,'Note 5')")
snContext.sql("insert into titles values ('6', 'Valley', " +
"'mod_cook', '0877', 9, 0, 12, 2032,'Note 6')")
snContext.sql("insert into titles values ('7', 'Any?', " +
"'trad_cook', '0877', 14, 8000, 10, 4095,'Note 7')")
snContext.sql("insert into titles values ('8', 'Fifty', " +
"'trad_cook', '0877', 11, 4000, 14, 1509,'Note 8')")
snContext.sql("CREATE TABLE sales(stor_id varchar(4) not null, " +
"ord_num varchar(20) not null, qty int not null, " +
"payterms varchar(12) not null,title_id varchar(80))")
snContext.sql("insert into sales values('1', 'QA7442.3', 75, 'ON Billing','1')")
snContext.sql("insert into sales values('2', 'D4482', 10, 'Net 60', '1')")
snContext.sql("insert into sales values('3', 'N914008', 20, 'Net 30', '2')")
snContext.sql("insert into sales values('4', 'N914014', 25, 'Net 30', '3')")
snContext.sql("insert into sales values('5', '423LL922', 15, 'ON Billing','3')")
snContext.sql("insert into sales values('6', '423LL930', 10, 'ON Billing','2')")
val df = snContext.sql("SELECT title, price FROM titles WHERE EXISTS (" +
"SELECT * FROM sales WHERE sales.title_id = titles.title_id AND qty >30)")
df.show()
}
test("SNAP-1159") {
val session = SnappyContext(sc).snappySession
session.sql(s"set ${SQLConf.COLUMN_BATCH_SIZE.key}=10")
session.sql(s"set ${SQLConf.AUTO_BROADCASTJOIN_THRESHOLD.key}=1")
val data1 = session.range(20).selectExpr("id")
val data2 = session.range(80).selectExpr("id", "cast ((id / 4) as long) as k",
"(case when (id % 4) < 2 then cast((id % 4) as long) else null end) as v")
data1.write.format("column").saveAsTable("t1")
data2.write.format("column").saveAsTable("t2")
SparkSession.clearActiveSession()
val spark = SparkSession.builder().getOrCreate()
val sdata1 = spark.range(20).selectExpr("id")
val sdata2 = spark.createDataFrame(data2.collect().toSeq.asJava, data2.schema)
sdata1.createOrReplaceTempView("t1")
sdata2.createOrReplaceTempView("t2")
val query = "select k, v from t1 inner join t2 where t1.id = t2.k order by k, v"
val df = session.sql(query)
val result1 = df.collect().mkString(" ")
val result2 = spark.sql(query).collect().mkString(" ")
if (result1 != result2) {
fail(s"Expected result: $result2\\nGot: $result1")
}
}
}
|
vjr/snappydata
|
cluster/src/test/scala/io/snappydata/QueryTest.scala
|
Scala
|
apache-2.0
| 4,268
|
package org.denigma.graphs.simple
import org.denigma.graphs.core.VisualEdge
import org.denigma.graphs.visual.EdgeView
import org.scalajs.dom
import rx.core.Var
class SimpleEdge(from:SimpleNode,to:SimpleNode,data:Var[String],view:EdgeView[Var[String]]) extends VisualEdge[SimpleNode,Var[String],EdgeView[Var[String]]](from,to,data,view)
{
def id = data.now
override def receive:PartialFunction[Any,Unit] = {
case "mouseover"=>
//dom.console.log("mouse over works")
this.view.sprite.element.className = this.view.sprite.element.className.replace("tiny","small")
case "mouseout"=>
dom.console.log("mouse out works")
this.view.sprite.element.className = this.view.sprite.element.className.replace("small","tiny")
case other => dom.console.log(s"unknown message $other")
//nothing
}
}
|
antonkulaga/semantic-graph
|
graphs/src/main/scala/org/denigma/graphs/simple/SimpleEdge.scala
|
Scala
|
mpl-2.0
| 847
|
package mimir;
import java.io._
import java.sql.SQLException
import java.net.URL
import org.jline.terminal.{Terminal,TerminalBuilder}
import org.slf4j.{LoggerFactory}
import org.rogach.scallop._
import com.typesafe.scalalogging.LazyLogging
import scala.collection.JavaConverters._
import sparsity._
import fastparse.Parsed
import mimir.ctables._
import mimir.parser._
import mimir.sql._
import mimir.metadata._
import mimir.util.{Timer,ExperimentalOptions,LineReaderInputSource,PythonProcess,SqlUtils}
import mimir.data.staging.{ RawFileProvider, LocalFSRawFileProvider }
import mimir.algebra._
import mimir.statistics.{DetectSeries,DatasetShape}
import mimir.plot.Plot
import mimir.exec.{OutputFormat,DefaultOutputFormat,PrettyOutputFormat}
import mimir.exec.spark.MimirSpark
/**
* The primary interface to Mimir. Responsible for:
* - Parsing and processing command line arguments.
* - Initializing internal state (Database())
* - Providing a command-line prompt (if appropriate)
* - Invoking MimirJSqlParser and dispatching the
* resulting statements to Database()
*
* Database() handles all of the logical dispatching,
* Mimir provides a friendly command-line user
* interface on top of Database()
*/
object Mimir extends LazyLogging {
var db: Database = null;
lazy val terminal: Terminal = TerminalBuilder.terminal()
var output: OutputFormat = DefaultOutputFormat
def main(args: Array[String]) =
{
val conf = new MimirConfig(args);
// Prepare experiments
ExperimentalOptions.enable(conf.experimental())
val dataDir = new java.io.File(conf.dataDirectory())
if(!dataDir.exists())
dataDir.mkdirs()
val staging = new LocalFSRawFileProvider(dataDir)
// Set up the database connection(s)
MimirSpark.init(conf)
if(!conf.quiet()){
output.print("Connecting to metadata provider [" + conf.metadataBackend() + "://" + conf.dbname() + "]...")
}
val metadata = new JDBCMetadataBackend(conf.metadataBackend(), conf.dbname())
db = new Database(metadata, staging)
logger.debug("Opening Database")
db.open()
if(!ExperimentalOptions.isEnabled("SIMPLE-TERM")){
output = new PrettyOutputFormat(terminal)
}
if(!conf.quiet()){
output.print(" ... ready")
}
var finishByReadingFromConsole = true
conf.files.get match {
case None => {}
case Some(files) =>
for(file <- files){
logger.debug(s"Processing file '$file'")
if(file == "-"){
interactiveEventLoop()
finishByReadingFromConsole = false
} else {
val extensionRegexp = "([^.]+)$".r
val extension:String = extensionRegexp.findFirstIn(file) match {
case Some(e) => e
case None => {
throw new RuntimeException("Error: Unable to determine file format of "+file)
}
}
extension.toLowerCase match {
case "sql" => {
noninteractiveEventLoop(new FileReader(file))
finishByReadingFromConsole = false
}
case "csv" => {
output.print("Loading "+file+"...")
db.loader.loadTable(file)
}
case _ => {
throw new RuntimeException("Error: Unknown file format '"+extension+"' of "+file)
}
}
}
}
}
logger.debug("Checking if console needed")
if(finishByReadingFromConsole){
logger.debug("Starting interactive mode")
interactiveEventLoop()
}
db.close()
if(!conf.quiet()) { output.print("\\n\\nDone. Exiting."); }
}
def interactiveEventLoop(): Unit =
{
eventLoop(
new LineReaderParser(terminal),
(parser: LineReaderParser) => {parser.flush(); parser}
)
}
def noninteractiveEventLoop(source: Reader): Unit =
{
eventLoop(
MimirCommand(source),
(_:Any)=>MimirCommand(source)
)
}
def eventLoop[I <: Iterator[Parsed[MimirCommand]]](initialParser: I, reset: (I => I)): Unit =
{
var parser = initialParser
logger.debug("Event Loop")
while(parser.hasNext){
logger.debug("next command!")
try {
parser.next() match {
case Parsed.Success(cmd:SlashCommand, _) =>
handleSlashCommand(cmd)
case Parsed.Success(SQLCommand(stmt), _) =>
stmt match {
case SQLStatement(sel: sparsity.statement.Select) =>
handleSelect(sel)
case SQLStatement(expl: sparsity.statement.Explain) =>
handleExplain(expl)
case analyze: Analyze =>
handleAnalyze(analyze)
case plot: DrawPlot =>
Plot.plot(plot, db, output)
case compare: Compare =>
handleCompare(compare)
case _ =>
db.update(stmt)
}
case f: Parsed.Failure =>
output.print(s"Parse Error: ${f.msg}")
}
} catch {
case e: EOFException =>
return
case e: FileNotFoundException =>
output.print(e.getMessage)
case e: SQLException =>
output.print("Error: "+e.getMessage)
logger.debug(e.getMessage + "\\n" + e.getStackTrace.map(_.toString).mkString("\\n"))
case e: RAException =>
output.print("Error: "+e.getMessage)
logger.debug(e.getMessage + "\\n" + e.getStackTrace.map(_.toString).mkString("\\n"))
case e: Throwable => {
output.print("An unknown error occurred...");
e.printStackTrace()
// The parser pops the input stream back onto the queue, so
// the next call to Statement() will throw the same exact
// Exception. To prevent this from happening, reset the parser:
parser = reset(parser)
}
}
}
}
def handleSelect(sel: sparsity.statement.Select): Unit =
{
val raw = db.sqlToRA(sel)
handleQuery(raw)
}
def handleQuery(raw:Operator) =
{
Timer.monitor("QUERY", output.print(_)) {
db.query(raw) { output.print(_) }
}
}
def handleCompare(comparison: Compare): Unit =
{
val target = db.sqlToRA(comparison.target)
val expected = db.sqlToRA(comparison.expected)
val facets = DatasetShape.detect(db, expected)
output.print("---- Comparison Dataset Features ----")
for(facet <- facets){
output.print(" > "+facet.description);
}
output.print("---- Target Differences ----")
for(facet <- facets){
for(difference <- facet.test(db, target)){
output.print(" > "+difference)
}
}
}
def handleExplain(explain: sparsity.statement.Explain): Unit =
{
val raw = db.sqlToRA(explain.query)
output.print("------ Raw Query ------")
output.print(raw.toString)
db.typechecker.schemaOf(raw) // <- discard results, just make sure it typechecks
val optimized = db.compiler.optimize(raw)
output.print("\\n--- Optimized Query ---")
output.print(optimized.toString)
db.typechecker.schemaOf(optimized) // <- discard results, just make sure it typechecks
output.print("\\n-------- SQL --------")
try {
output.print(db.raToSQL(optimized).toString)
} catch {
case e:Throwable =>
output.print("Unavailable: "+e.getMessage())
}
output.print("\\n------- Spark -------")
try {
output.print(
db.raToSpark.mimirOpToSparkOp(optimized).toString
)
} catch {
case e:Throwable =>
output.print("Unavailable: "+e.getMessage())
}
}
def handleAnalyzeFeatures(analyze: AnalyzeFeatures)
{
val query = db.sqlToRA(analyze.target)
output.print("==== Analyze Features ====")
for(facet <- DatasetShape.detect(db, query)) {
output.print(s" > ${facet.description}")
}
}
def handleAnalyze(analyze: Analyze)
{
val column = analyze.column // The column of the cell to analyze, or null if full row or table scan
val query = db.sqlToRA(analyze.target)
analyze.rowid match {
case None => {
output.print("==== Analyze Table ====")
logger.debug("Starting to Analyze Table")
val reasonSets = db.uncertainty.explainEverything(query)
logger.debug("Done Analyzing Table")
for(reasonSet <- reasonSets){
logger.debug(s"Expanding $reasonSet")
// Workaround for a bug: SQLite crashes if a UDA is run on an empty input
if(!reasonSet.isEmpty(db)){
logger.debug(s"Not Empty: \\n${reasonSet.argLookup}")
val count = reasonSet.size(db);
logger.debug(s"Size = $count")
val reasons = reasonSet.take(db, 5);
logger.debug(s"Got ${reasons.size} reasons")
printReasons(reasons);
if(count > reasons.size){
output.print(s"... and ${count - reasons.size} more like the last")
}
}
}
if(analyze.withAssignments) {
CTPrioritizer.prioritize(reasonSets.flatMap(x=>x.all(db)))
}
}
case Some(rowid) => {
val token = RowIdPrimitive(db.sqlToRA(rowid).asString)
analyze.column match {
case None => {
output.print("==== Analyze Row ====")
val explanation =
db.uncertainty.explainRow(query, token)
printReasons(explanation.reasons)
output.print("--------")
output.print("Row Probability: "+explanation.probability)
if(analyze.withAssignments) {
CTPrioritizer.prioritize(explanation.reasons)
}
}
case Some(column) => {
output.print("==== Analyze Cell ====")
val explanation =
db.uncertainty.explainCell(query, token, ID.upper(column))
printReasons(explanation.reasons)
output.print("--------")
output.print("Examples: "+explanation.examples.map(_.toString).mkString(", "))
if(analyze.withAssignments) {
CTPrioritizer.prioritize(explanation.reasons)
}
}
}
}
}
}
def printReasons(reasons: Iterable[Reason])
{
for(reason <- reasons.toSeq.sortBy( r => if(r.confirmed){ 1 } else { 0 } )){
val argString =
if(!reason.args.isEmpty){
" (" + reason.args.mkString(",") + ")"
} else { "" }
output.print(reason.reason)
reason match {
case _:DataWarningReason =>
if(!reason.confirmed) {
output.print(s" ... acknowledge with `FEEDBACK ${reason.model.name} ${reason.idx}$argString IS ${reason.repair.exampleString}`")
}
case _ =>
if(!reason.confirmed){
output.print(s" ... repair with `FEEDBACK ${reason.model.name} ${reason.idx}$argString IS ${ reason.repair.exampleString }`");
output.print(s" ... confirm with `FEEDBACK ${reason.model.name} ${reason.idx}$argString IS ${ reason.guess }`");
} else {
output.print(s" ... ammend with `FEEDBACK ${reason.model.name} ${reason.idx}$argString IS ${ reason.repair.exampleString }`");
}
}
output.print("")
}
}
val slashCommands: Map[String, SlashCommandDefinition] = Map(
MakeSlashCommand(
"help",
"",
"Show this help message",
{ case _ => {
val cmdWidth = slashCommands.values.map { _.name.length }.max+1
val argWidth = slashCommands.values.map { _.args.length }.max
output.print("")
for(cmd <- slashCommands.values){
output.print(
String.format(s" %${cmdWidth}s %-${argWidth}s %s",
"/"+cmd.name, cmd.args, cmd.description
)
)
}
output.print("")
}}
),
MakeSlashCommand(
"tables",
"",
"List all available tables",
{ case _ => handleQuery(db.catalog.tableView) }
),
MakeSlashCommand(
"show",
"table_name",
"Show the schema for a specified table",
{ case Seq(name) =>
db.catalog.tableSchema(Name(name)) match {
case None =>
output.print(s"'$name' is not a table")
case Some( schema ) =>
output.print("CREATE TABLE "+name+" (\\n"+
schema.map { col => " "+col._1+" "+col._2 }.mkString(",\\n")
+"\\n);")
}
}
),
MakeSlashCommand(
"log",
"unit [level]",
"Set the logging level for the specified unit (e.g., DEBUG)",
{ case Seq(loggerName) => setLogLevel(loggerName)
case Seq(loggerName, level) => setLogLevel(loggerName, level)
}
)
)
def handleSlashCommand(cmd: SlashCommand): Unit =
{
cmd.body.split(" +").toSeq match {
case Seq() =>
output.print("Empty command")
case cmd =>
slashCommands.get(cmd.head) match {
case None => s"Unknown command: /${cmd.head} (try /help)"
case Some(implementation) => implementation(cmd.tail, output)
}
}
}
def setLogLevel(loggerName: String, levelString: String = "DEBUG")
{
val newLevel = internalSetLogLevel(LoggerFactory.getLogger(loggerName), levelString);
output.print(s"$loggerName <- $newLevel")
}
private def internalSetLogLevel(genericLogger: Object, levelString: String): String =
{
genericLogger match {
case logger: ch.qos.logback.classic.Logger =>
// base logger instance. Set the logger
val level = levelString.toUpperCase match {
case "TRACE" => ch.qos.logback.classic.Level.TRACE
case "DEBUG" => ch.qos.logback.classic.Level.DEBUG
case "INFO" => ch.qos.logback.classic.Level.INFO
case "WARN" => ch.qos.logback.classic.Level.WARN
case "ERROR" => ch.qos.logback.classic.Level.ERROR
case _ => throw new SQLException(s"Invalid log level: $levelString");
}
logger.setLevel(level)
return level.toString
case logger: com.typesafe.scalalogging.Logger =>
// SLF4J wraps existing loggers. Recur to get the real logger
return internalSetLogLevel(logger.underlying, levelString)
case _ => throw new SQLException(s"Don't know how to handle logger ${logger.getClass().toString}")
}
}
}
class MimirConfig(arguments: Seq[String]) extends ScallopConf(arguments)
{
// val start = opt[Long]("start", default = Some(91449149))
// val end = opt[Long]("end", default = Some(99041764))
// val version_count = toggle("vcount", noshort = true, default = Some(false))
// val exclude = opt[Long]("xclude", default = Some(91000000))
// val summarize = toggle("summary-create", default = Some(false))
// val cleanSummary = toggle("summary-clean", default = Some(false))
// val sampleCount = opt[Int]("samples", noshort = true, default = None)
val metadataBackend = opt[String]("driver", descr = "Which metadata backend to use? ([sqlite])",
default = Some("sqlite"))
val precache = opt[String]("precache", descr = "Precache one or more lenses")
val rebuildBestGuess = opt[String]("rebuild-bestguess")
val quiet = toggle("quiet", default = Some(false))
val files = trailArg[List[String]](required = false)
val experimental = opt[List[String]]("X", default = Some(List[String]()))
val mimirHost = opt[String]("mimirHost", descr = "The IP or hostname of mimir",
default = Some("vizier-mimir.local"))
val sparkHost = opt[String]("sparkHost", descr = "The IP or hostname of the spark master",
default = Some("spark-master.local"))
val sparkPort = opt[String]("sparkPort", descr = "The port of the spark master",
default = Some("7077"))
val sparkDriverMem = opt[String]("sparkDriverMem", descr = "The memory for spark driver",
default = Some("8g"))
val sparkExecutorMem = opt[String]("sparkExecutorMem", descr = "The memory for spark executors",
default = Some("8g"))
val hdfsPort = opt[String]("hdfsPort", descr = "The port for hdfs",
default = Some("8020"))
val useHDFSHostnames = toggle("useHDFSHostnames", default = Some(Option(System.getenv("HDFS_CONF_dfs_client_use_datanode_hostname")).getOrElse("false").toBoolean),
descrYes = "use the hostnames for hdfs nodes",
descrNo = "use ip addresses for hdfs nodes")
val overwriteStagedFiles = toggle("overwriteStagedFiles", default = Some(false),
descrYes = "overwrites files sent to staging area (hdfs or s3)",
descrNo = "do not overwrites files sent to staging area (hdfs or s3)")
val overwriteJars = toggle("overwriteJars", default = Some(false),
descrYes = "overwrites jar files sent to hdfs",
descrNo = "do not overwrites jar files sent to hdfs")
val numPartitions = opt[Int]("numPartitions", descr = "number of partitions to use",
default = Some(8))
val dataStagingType = opt[String]("dataStagingType", descr = "where to stage data for spark: hdfs or s3",
default = Some("hdfs"))
val dataDirectory = opt[String]("dataDirectory", descr = "The directory to place data files",
default = Some(s"${System.getProperty("user.home")}/mimir-data"))
val googleSheetsCredentialPath = opt[String]("sheetCred", descr = "Credential file for google sheets",
default = Some("test/data/api-project-378720062738-5923e0b6125f"))
def dbname : ScallopOption[String] = {
opt[String]("db", descr = "Connect to the database with the specified name",
default = Some(dataDirectory() + "/mimir.db"))
}
def sparkJars : ScallopOption[String] = {
opt[String]("sparkJars", descr = "Folder with additional jars for spark to load",
default = Some(dataDirectory() + "/sparkJars"))
}
verify()
}
class SlashCommandDefinition(
val name: String,
val args: String,
val description: String,
val implementation: PartialFunction[Seq[String], Unit]
){
def apply(args: Seq[String], output: OutputFormat): Unit =
if(implementation.isDefinedAt(args)){ implementation(args) }
else { output.print(s"usage: $usage")}
def usage: String = s"/$name $args"
}
object MakeSlashCommand
{
def apply(
name: String,
args: String,
description: String,
implementation: PartialFunction[Seq[String], Unit]
) = { (name, new SlashCommandDefinition(name, args, description, implementation)) }
}
|
UBOdin/mimir
|
src/main/scala/mimir/Mimir.scala
|
Scala
|
apache-2.0
| 18,473
|
package com.guidewire.tools.marathon.client.api.version1
import play.api.libs.json._
import com.guidewire.tools.marathon.client
case class Endpoint(
id : String
, ports : Seq[Int]
, instances: Seq[Task]
) extends client.Endpoint
object Endpoint {
implicit val fmt = Json.format[Endpoint]
}
|
Guidewire/marathon-client
|
src/main/scala/com/guidewire/tools/marathon/client/api/version1/Endpoints.scala
|
Scala
|
apache-2.0
| 313
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.zeppelin.display.angular
import java.io.PrintStream
import org.apache.zeppelin.annotation.ZeppelinApi
import org.apache.zeppelin.display.{AngularObjectWatcher, AngularObject}
import org.apache.zeppelin.interpreter.{InterpreterResult, InterpreterContext}
import scala.xml._
/**
* Element that binded to Angular object
*/
abstract class AbstractAngularElem(val interpreterContext: InterpreterContext,
val modelName: String,
val angularObjects: Map[String, AngularObject[Any]],
prefix: String,
label: String,
attributes1: MetaData,
scope: NamespaceBinding,
minimizeEmpty: Boolean,
child: Node*)
extends Elem(prefix, label, attributes1, scope, minimizeEmpty, child:_*) {
val uniqueId = java.util.UUID.randomUUID.toString.replaceAll("-", "_")
/**
* On click element
*
* @param callback
* @return
*/
@ZeppelinApi
def onClick(callback: () => Unit): AbstractAngularElem = {
onEvent("ng-click", callback)
}
/**
* On
*
* @param callback
* @return
*/
@ZeppelinApi
def onChange(callback: () => Unit): AbstractAngularElem = {
onEvent("ng-change", callback)
}
/**
* Bind angularObject to ng-model directive
*
* @param name name of angularObject
* @param value initialValue
* @return
*/
@ZeppelinApi
def model(name: String, value: Any): AbstractAngularElem = {
val registry = interpreterContext.getAngularObjectRegistry
// create AngularFunction in current paragraph
val elem = this % Attribute(None, "ng-model",
Text(s"${name}"),
Null)
val angularObject = addAngularObject(name, value)
.asInstanceOf[AngularObject[Any]]
newElem(
interpreterContext,
name,
angularObjects + (name -> angularObject),
elem)
}
@ZeppelinApi
def model(name: String): AbstractAngularElem = {
val registry = interpreterContext.getAngularObjectRegistry
// create AngularFunction in current paragraph
val elem = this % Attribute(None, "ng-model",
Text(s"${name}"),
Null)
newElem(
interpreterContext,
name,
angularObjects,
elem)
}
/**
* Retrieve value of model
*
* @return
*/
@ZeppelinApi
def model(): Any = {
if (angularObjects.contains(modelName)) {
angularObjects(modelName).get()
} else {
None
}
}
/**
*
* @param eventName angular directive like ng-click, ng-change, etc.
* @return
*/
@ZeppelinApi
def onEvent(eventName: String, callback: () => Unit): AbstractAngularElem = {
val registry = interpreterContext.getAngularObjectRegistry
// create AngularFunction in current paragraph
val functionName = eventName.replaceAll("-", "_") + "_" + uniqueId
val elem = this % Attribute(None, eventName,
Text(s"${functionName}=$$event.timeStamp"),
Null)
val angularObject = addAngularObject(functionName, "")
angularObject.addWatcher(new AngularObjectWatcher(interpreterContext) {
override def watch(oldObject: scala.Any, newObject: scala.Any, context: InterpreterContext)
:Unit = {
InterpreterContext.set(interpreterContext)
callback()
}
})
newElem(
interpreterContext,
modelName,
angularObjects + (eventName -> angularObject),
elem)
}
protected def addAngularObject(name: String, value: Any): AngularObject[Any]
protected def newElem(interpreterContext: InterpreterContext,
name: String,
angularObjects: Map[String, AngularObject[Any]],
elem: scala.xml.Elem): AbstractAngularElem
/**
* disassociate this element and it's child from front-end
* by removing angularobject
*/
@ZeppelinApi
def disassociate() = {
remove(this)
}
/**
* Remove all angularObject recursively
*
* @param node
*/
private def remove(node: Node): Unit = {
if (node.isInstanceOf[AbstractAngularElem]) {
node.asInstanceOf[AbstractAngularElem].angularObjects.values.foreach{ ao =>
interpreterContext.getAngularObjectRegistry.remove(ao.getName, ao.getNoteId, ao
.getParagraphId)
}
}
node.child.foreach(remove _)
}
/**
* Print into provided print stream
*
* @return
*/
@ZeppelinApi
def display(out: java.io.PrintStream): Unit = {
out.print(this.toString)
out.flush()
}
/**
* Print into InterpreterOutput
*/
@ZeppelinApi
def display(): Unit = {
val out = interpreterContext.out
out.setType(InterpreterResult.Type.ANGULAR)
out.write(this.toString())
out.flush()
}
}
|
spacewalkman/incubator-zeppelin
|
zeppelin-display/src/main/scala/org/apache/zeppelin/display/angular/AbstractAngularElem.scala
|
Scala
|
apache-2.0
| 5,729
|
package models
import akka.actor.ActorRef
import akka.stream.scaladsl.Flow
import play.api.http.websocket._
import play.api.libs.streams.AkkaStreams
import play.api.mvc.WebSocket.MessageFlowTransformer
case class MessageWithSender(contents : String, sender : ActorRef)
object MessageWithSender{
implicit val messageFlowTransformer = new MessageFlowTransformer[MessageWithSender, MessageWithSender] {
def transform(flow: Flow[MessageWithSender, MessageWithSender, _]) = {
AkkaStreams.bypassWith[Message, MessageWithSender, Message](Flow[Message] collect {
case TextMessage(text) => Left(MessageWithSender(text, ActorRef.noSender))
case BinaryMessage(_) =>
Right(CloseMessage(Some(CloseCodes.Unacceptable),
"This WebSocket only supports text frames"))
})(flow.map( msg => TextMessage(msg.contents)))
}
}
}
|
tgodzik/akkajs
|
app/models/MessageWithSender.scala
|
Scala
|
mit
| 868
|
package actors
import actors.FileActor.SaveFileMetadata
import akka.actor.{Actor, ActorLogging, ActorRef}
import akka.pattern._
import com.byteslounge.slickrepo.repository.Repository
import com.google.inject.Inject
import common.ActorNamed
import common.implicits.RichDBIO._
import models.FileMetadata
import scala.concurrent.ExecutionContext
object FileActor extends ActorNamed {
case class SaveFileMetadata(file: FileMetadata, sender: ActorRef)
final val name = "FileActor"
}
class FileActor @Inject()(fileMetadataRepository: Repository[FileMetadata, Int])(
implicit val executionContext: ExecutionContext
) extends Actor
with ActorLogging {
override def receive: Receive = {
case saveFileMetadata: SaveFileMetadata =>
log.info(s"Received a message: [ $saveFileMetadata ]")
fileMetadataRepository.save(saveFileMetadata.file).run.pipeTo(saveFileMetadata.sender)
}
}
|
sysgears/apollo-universal-starter-kit
|
modules/upload/server-scala/src/main/scala/actors/FileActor.scala
|
Scala
|
mit
| 904
|
/**
* This file is part of the TA Buddy project.
* Copyright (c) 2014-2015 Alexey Aksenov ezh@ezh.msk.ru
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU Affero General Global License version 3
* as published by the Free Software Foundation with the addition of the
* following permission added to Section 15 as permitted in Section 7(a):
* FOR ANY PART OF THE COVERED WORK IN WHICH THE COPYRIGHT IS OWNED
* BY Limited Liability Company «MEZHGALAKTICHESKIJ TORGOVYJ ALIANS»,
* Limited Liability Company «MEZHGALAKTICHESKIJ TORGOVYJ ALIANS» DISCLAIMS
* THE WARRANTY OF NON INFRINGEMENT OF THIRD PARTY RIGHTS.
*
* This program is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE.
* See the GNU Affero General Global License for more details.
* You should have received a copy of the GNU Affero General Global License
* along with this program; if not, see http://www.gnu.org/licenses or write to
* the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
* Boston, MA, 02110-1301 USA, or download the license from the following URL:
* http://www.gnu.org/licenses/agpl.html
*
* The interactive user interfaces in modified source and object code versions
* of this program must display Appropriate Legal Notices, as required under
* Section 5 of the GNU Affero General Global License.
*
* In accordance with Section 7(b) of the GNU Affero General Global License,
* you must retain the producer line in every report, form or document
* that is created or manipulated using TA Buddy.
*
* You can be released from the requirements of the license by purchasing
* a commercial license. Buying such a license is mandatory as soon as you
* develop commercial activities involving the TA Buddy software without
* disclosing the source code of your own applications.
* These activities include: offering paid services to customers,
* serving files in a web or/and network application,
* shipping TA Buddy with a closed source product.
*
* For more information, please contact Digimead Team at this
* address: ezh@ezh.msk.ru
*/
package org.digimead.tabuddy.desktop.view.modification.ui.action
import javax.inject.{ Inject, Named }
import org.digimead.digi.lib.aop.log
import org.digimead.digi.lib.log.api.XLoggable
import org.digimead.tabuddy.desktop.core.definition.Context
import org.digimead.tabuddy.desktop.core.support.App
import org.digimead.tabuddy.desktop.core.ui.definition.widget.VComposite
import org.digimead.tabuddy.desktop.logic.Logic
import org.digimead.tabuddy.desktop.view.modification.{ Messages, bundleId }
import org.eclipse.e4.core.contexts.Active
import org.eclipse.e4.core.di.annotations.Optional
import org.eclipse.jface.action.{ Action, IAction }
import org.eclipse.jface.util.{ IPropertyChangeListener, PropertyChangeEvent }
/**
* Hide system elements.
* unchecked - filter is enabled
* checked - filter is disabled
* by default - enabled
*/
class ActionToggleSystem @Inject extends Action(Messages.systemElements_text, IAction.AS_CHECK_BOX) with XLoggable {
setId(ActionToggleSystem.id)
setChecked(false)
setEnabled(false)
/** Last active content context. */
@volatile protected var contentContext = Option.empty[Context]
/** Default value. */
val default = java.lang.Boolean.TRUE
initialize()
def apply() = contentContext match {
case Some(contentContext) ⇒
Option(contentContext.getLocal(Logic.Id.stateOfToggleSystem)) match {
case Some(java.lang.Boolean.TRUE) ⇒
if (!isChecked())
contentContext.set(Logic.Id.stateOfToggleSystem, java.lang.Boolean.FALSE)
case Some(java.lang.Boolean.FALSE) ⇒
if (isChecked())
contentContext.set(Logic.Id.stateOfToggleSystem, java.lang.Boolean.TRUE)
case _ ⇒
contentContext.set(Logic.Id.stateOfToggleSystem, isChecked(): java.lang.Boolean)
}
case None ⇒
setChecked(!isChecked())
}
override def run() = apply()
protected def initialize() {
addPropertyChangeListener(PropertyChangeListener)
}
@Inject @Optional @log
protected def onStateChanged(@Active @Named(Logic.Id.stateOfToggleSystem) state: java.lang.Boolean) = App.exec {
if (isChecked() != state)
setChecked(state)
}
/** Invoked on view activation. */
@Inject @Optional @log
protected def onViewChanged(@Active vComposite: VComposite): Unit = App.exec {
if (vComposite.factory().features.contains(Logic.Feature.viewToggleSystem)) {
if (!isEnabled())
setEnabled(true)
this.contentContext = for (contentContext ← Option(vComposite).flatMap(_.getContentContext())) yield {
Option(contentContext.getLocal(Logic.Id.stateOfToggleSystem)) match {
case Some(java.lang.Boolean.TRUE) ⇒
if (isChecked())
setChecked(true)
case Some(java.lang.Boolean.FALSE) ⇒
if (!isChecked())
setChecked(false)
case _ ⇒
contentContext.set(Logic.Id.stateOfToggleSystem, default)
}
contentContext
}
} else {
this.contentContext = None
if (isEnabled())
setEnabled(false)
if (isChecked())
setChecked(false)
}
}
object PropertyChangeListener extends IPropertyChangeListener {
def propertyChange(e: PropertyChangeEvent) = contentContext.foreach { ctx ⇒
val value = e.getNewValue
if (ctx.getLocal(Logic.Id.stateOfToggleSystem) != value)
ctx.set(Logic.Id.stateOfToggleSystem, value)
}
}
}
object ActionToggleSystem {
val id = bundleId + "#ToggleSystemElements"
}
|
digimead/digi-TABuddy-desktop
|
part-view-modification/src/main/scala/org/digimead/tabuddy/desktop/view/modification/ui/action/ActionToggleSystem.scala
|
Scala
|
agpl-3.0
| 5,776
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.serializer
import java.io.{EOFException, OutputStream, InputStream}
import java.nio.ByteBuffer
import scala.reflect.ClassTag
/**
* A serializer implementation that always returns two elements in a deserialization stream.
* 一个总是在反序列化流中返回两个元素的串行器实现。
*/
class TestSerializer extends Serializer {
override def newInstance(): TestSerializerInstance = new TestSerializerInstance
}
class TestSerializerInstance extends SerializerInstance {
override def serialize[T: ClassTag](t: T): ByteBuffer = throw new UnsupportedOperationException
override def serializeStream(s: OutputStream): SerializationStream =
throw new UnsupportedOperationException
override def deserializeStream(s: InputStream): TestDeserializationStream =
new TestDeserializationStream
override def deserialize[T: ClassTag](bytes: ByteBuffer): T =
throw new UnsupportedOperationException
override def deserialize[T: ClassTag](bytes: ByteBuffer, loader: ClassLoader): T =
throw new UnsupportedOperationException
}
class TestDeserializationStream extends DeserializationStream {
private var count = 0
override def readObject[T: ClassTag](): T = {
count += 1
if (count == 3) {
throw new EOFException
}
new Object().asInstanceOf[T]
}
override def close(): Unit = {}
}
|
tophua/spark1.52
|
core/src/test/scala/org/apache/spark/serializer/TestSerializer.scala
|
Scala
|
apache-2.0
| 2,174
|
trait Higher[F[_]]
trait Box[A]
object Box {
implicit def HigherBox: Higher[Box] = new Higher[Box] {}
}
object Foo {
val box = implicitly[Higher[Box]] // compiles fine !!!
type Bar[A] = Box[A]
val bar = implicitly[Higher[Bar]] // <-- this doesn't compile in 2.10.1-RC1, but does in 2.10.0 !!!
}
|
yusuke2255/dotty
|
tests/untried/pos/t7180.scala
|
Scala
|
bsd-3-clause
| 306
|
/*
* Copyright 2014–2017 SlamData Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package quasar.fs.mount
import org.specs2.scalaz.Spec
import scalaz.scalacheck.ScalazProperties
class MountingErrorSpec extends Spec {
import MountingErrorArbitrary._
checkAll(ScalazProperties.equal.laws[MountingError])
}
|
drostron/quasar
|
core/src/test/scala/quasar/fs/mount/MountingErrorSpec.scala
|
Scala
|
apache-2.0
| 834
|
/*
* Copyright © 2014 TU Berlin (emma@dima.tu-berlin.de)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.emmalanguage
package compiler.integration.graphs
import api._
import compiler.integration.BaseCompilerIntegrationSpec
import compiler.ir.ComprehensionSyntax._
import compiler.ir.DSCFAnnotations._
import lib.graphs._
class TransitiveClosureIntegrationSpec extends BaseCompilerIntegrationSpec {
import compiler._
import u.reify
// ---------------------------------------------------------------------------
// Program closure
// ---------------------------------------------------------------------------
// input parameters
val input: String = null
val output: String = null
val csv = CSV()
implicit val edgeCSVConverter = CSVConverter[Edge[Int]]
// ---------------------------------------------------------------------------
// Program representations
// ---------------------------------------------------------------------------
val sourceExpr = liftPipeline(reify {
// read an initial collection of edges
val edges = DataBag.readCSV[Edge[Int]](input, csv)
// compute the transitive closure of the edges
val closure = transitiveClosure(edges)
// write the results into a CSV file
closure.writeCSV(output, csv)
})
val coreExpr = anfPipeline(reify {
// read in a directed graph
val input: this.input.type = this.input
val csv$1: this.csv.type = this.csv
val readCSV = DataBag.readCSV[Edge[Int]](input, csv$1)
val paths$1 = readCSV.distinct
val count$1 = paths$1.size
@whileLoop def doWhile$1(added$3: Long, count$3: Long, paths$3: DataBag[Edge[Int]]): Unit = {
val closure = comprehension[Edge[Int], DataBag] {
val e1 = generator[Edge[Int], DataBag](paths$3)
val e2 = generator[Edge[Int], DataBag](paths$3)
guard(e1.dst == e2.src)
head {
Edge(e1.src, e2.dst)
}
}
val paths$2 = (paths$3 union closure).distinct
val added$2 = paths$2.size - count$3
val count$2 = paths$2.size
val isReady = added$2 > 0
@suffix def suffix$1(): Unit = {
val output: this.output.type = this.output
val csv$2: this.csv.type = this.csv
paths$2.writeCSV(output, csv$2)
}
if (isReady) doWhile$1(added$2, count$2, paths$2)
else suffix$1()
}
doWhile$1(0L, count$1, paths$1)
})
// ---------------------------------------------------------------------------
// Specs
// ---------------------------------------------------------------------------
"lifting" in {
sourceExpr shouldBe alphaEqTo(coreExpr)
}
}
|
emmalanguage/emma
|
emma-examples/src/test/scala/org/emmalanguage/compiler/integration/graphs/TransitiveClosureIntegrationSpec.scala
|
Scala
|
apache-2.0
| 3,169
|
package lore.compiler.poem
import lore.compiler.core.CompilationException
object Poem {
case class Register(id: Int) extends AnyVal {
override def toString: String = s"reg$id"
}
object Register {
/**
* Returns the maximum ID of the given registers.
*/
def max(registers: Vector[Register]): Int = registers.map(_.id).max
def max(registers: Register*): Int = max(registers.toVector)
def max(reg0: Register, registers: Vector[Register]): Int = max(reg0 +: registers: _*)
def max(reg0: Register, reg1: Register, registers: Vector[Register]): Int = max(reg0 +: reg1 +: registers: _*)
}
/**
* A label refers to a specific [[PoemInstruction]] and is used to resolve label locations into absolute locations.
*
* @param isPost Post labels aren't resolved to the instruction's location but to the location of the next
* instruction. This can be used to jump to the end of a block without knowing the next instruction.
*/
class Label(val isPost: Boolean = false)
/**
* A Location is either an unresolved label or an absolute program counter position. Label locations are resolved by
* [[lore.compiler.assembly.functions.LabelResolver]] and turned into absolute locations.
*/
sealed trait Location {
def forcePc: Int = this match {
case Poem.AbsoluteLocation(pc) => pc
case Poem.LabelLocation(_) => throw CompilationException("All label locations should have been resolved by now.")
}
}
case class LabelLocation(label: Label) extends Location {
override def toString: String = s"<$label>"
}
case class AbsoluteLocation(pc: Int) extends Location {
override def toString: String = pc.toString
}
/**
* The lowest integer value that may be passed through a `*Const` operation such as `IntConst`.
*/
val minDirectInteger: Long = Short.MinValue
/**
* The highest integer value that may be passed through a `*Const` operation such as `IntConst`.
*/
val maxDirectInteger: Long = Short.MaxValue
/**
* This can be used to attach sorted property fields to a poem entity.
*/
trait SortedProperties[A] {
def properties: Map[String, A]
private lazy val sortedEntries: Vector[(String, A)] = properties.toVector.sortBy(_._1)
lazy val sortedNames: Vector[String] = sortedEntries.map(_._1)
lazy val sortedProperties: Vector[A] = sortedEntries.map(_._2)
}
}
|
marcopennekamp/lore
|
compiler/src/lore/compiler/poem/Poem.scala
|
Scala
|
mit
| 2,428
|
/*
* Copyright ixias.net All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license
* For the full copyright and license information,
* please view the LICENSE file that was distributed with this source code.
*/
package ixias.persistence
import slick.jdbc.JdbcProfile
import ixias.model.{ @@, EntityModel }
import ixias.persistence.lifted._
import ixias.persistence.backend.SlickBackend
import ixias.persistence.action.SlickDBActionProvider
/**
* The profile for persistence with using the Slick library.
*/
trait SlickProfile[P <: JdbcProfile]
extends Profile with SlickDBActionProvider[P] { self =>
/** The type of slick driver */
type Driver = P
/** The type of database objects. */
type Database = P#Backend#Database
/** The back-end type required by this profile */
type Backend = SlickBackend[P]
/** The configured driver. */
protected val driver: Driver
/** The back-end implementation for this profile */
protected lazy val backend = SlickBackend(driver)
/** Database Action Helpers */
protected val DBAction = SlickDBAction
protected val RunDBAction = SlickRunDBAction
/**
* The API for using the utility methods with a single import statement.
* This provides the repository's implicits, the Database connections,
* and commonly types and objects.
*/
trait API extends super.API
with driver.API
with SlickQueryOps
with SlickColumnTypeOps[P]
with SlickRepOps[P] {
lazy val driver = self.driver
}
trait APIUnsafe extends API with SlickRepUnsafeOps[P]
val api: API = new API {}
val apiUnsafe: APIUnsafe = new APIUnsafe {}
}
/**
* The repository for persistence with using the Slick library.
*/
trait SlickRepository[K <: @@[_, _], M <: EntityModel[K], P <: JdbcProfile]
extends Repository[K, M] with SlickProfile[P] {
trait API extends super.API
with SlickDBIOActionOps[K, M]
override val api: API = new API {}
}
|
sp1rytus/ixias
|
framework/ixias-core/src/main/scala/ixias/persistence/SlickRepository.scala
|
Scala
|
mit
| 1,987
|
package com.joshcough.minecraft
import org.bukkit.block.Block
import org.bukkit.Material
import com.joshcough.minecraft.BukkitEnrichment._
import Cube._
// GIANT TODO
// GIANT TODO: force some of the streams in mirroring!
// GIANT TODO
object CubeModifier {
object PotentialChange {
def apply(c: Change) = new PotentialChange(c.b, c.oldM)
def apply(b: Block, m: Material) = new PotentialChange(b, m.andData)
}
/**
* Represents a change that might happen in the world at some later time.
* @param b
* @param newM
*/
case class PotentialChange(b: Block, newM: MaterialAndData){
val oldM = b.materialAndData
def run: Boolean = newM update b
}
type PotentialChanges = Stream[PotentialChange]
/**
* Represents a change that actually took place in the world.
* @param b The block that was changed.
* @param oldM The blocks previous material before it was changed
*/
case class Change(b: Block, oldM: MaterialAndData){
override def toString = s"Change(b:${b.loc.xyz} m:${oldM.m.name})"
}
/**
* Represents a number of changes that actually took place in the world.
* @param cs
*/
case class Changes(cs:Array[Change]){
override def toString = cs.toList.mkString(",")
def size = cs.length
def ++(cs: Changes) = Changes(this.cs ++ cs.cs)
}
/**
* Actually execute some PotentialChanges,
* handing back a Seq of all the changes that really took place.
* (A potential change might not happen, if for example, you try to change AIR to AIR.)
* @param newData
* @return
*/
def runChanges(newData: Seq[PotentialChange]): Changes =
Changes(newData.filter(_.run).map(p => Change(p.b, p.oldM)).toArray)
/**
* TODO: document me!
*/
def getTransformationChanges(cube: Cube[Block],
force: Boolean = false): Stream[PotentialChange] = {
val s = cube.toZippedStream.map{ case (c,b) =>
PotentialChange(cube.world(c.x, c.y, c.z), b.materialAndData)
}
if(force) s.force else s
}
def translateAll(cube: Cube[Block], force: Boolean = false): Changes =
runChanges(getTransformationChanges(cube, force))
/**
* Set all the blocks in this cube to the given Material
*/
def setAll(c: Cube[Block], newM: Material): Changes = setAll(c.blocks, newM.andData)
/**
* Set all the blocks in this stream to the given Material
*/
def setAll(bms: Stream[Block], newM: MaterialAndData) = runChanges(
bms.zip(Stream.continually(newM)).map{ case (b,n) => PotentialChange(b,n) }
)
/**
* Change all the blocks of the old material type to the new material type.
*/
def changeAll(c: Cube[Block], oldM: Material, newM: MaterialAndData): Changes =
setAll(c.blocks.filter(_ is oldM), newM)
/**
* Set all the blocks in this cube to air
* TODO: this really could be removed...
*/
def eraseAll(c: Cube[Block]): Changes = setAll(c.blocks, MaterialAndData.AIR)
}
// case class PotentialSwap(b1: Block, b2: Block){
// def run: Seq[Change] = {
// val oldB1M = b1.materialAndData
// val oldB2M = b2.materialAndData
// List(
// oldB1M.update(b2).toOption(Change(b2, oldB2M)),
// oldB2M.update(b1).toOption(Change(b1, oldB1M))
// ).flatten
// }
// }
// def runSwaps(swaps: Seq[PotentialSwap]): Changes = Changes(swaps.flatMap(_.run).toArray)
//
// def paste(newL1: Location): Changes = Changer.runChanges(run(c.paste(newL1.coor)))
// def pasteMirrorY(newL1: Location): Changes = Changer.runChanges(run(c.paste(newL1.coor).mirrorY))
// /**
// * We have to force these (.force), because if they are run lazily,
// * then a will be replaced with b, and later when b trieds to get replaced with a,
// * a's material type is already what b is, so b just gets set to itself.
// * Forcing guarantees that we get the right data values in the list.
// * @return
// */
// def mirrorXChanges: Changes = Changer.runChanges(run(c.mirrorX).force)
// def mirrorYChanges: Changes = Changer.runChanges(run(c.mirrorY).force)
// def mirrorZChanges: Changes = Changer.runChanges(run(c.mirrorZ).force)
//
// /**
// * @param newL1
// */
// def move(newL1: Location): Changes = paste(newL1) ++ setAll(Material.AIR)
//import org.squeryl.{KeyedEntity, Schema}
//import org.squeryl.dsl.{OneToMany, ManyToOne}
//import org.squeryl.PrimitiveTypeMode._
|
JunctionAt/JunctionAPI
|
src/main/scala/com/joshcough/minecraft/CubeModifier.scala
|
Scala
|
agpl-3.0
| 4,411
|
package io.koff.hll.facade
/**
* Utility class for intersection
* @author coffius@gmail.com
*/
object HLLUtils {
/**
* Calculate count of common elements in hlls in the seq.<br/>
* Adaptation of com.twitter.algebird.HyperLogLogMonoid#intersectionSize.
*
* @param hlls hlls for calcs
* @return count of common elements
*/
def intersection(hlls: HLL*): Long = {
hlls.headOption.map{ head =>
val tail = hlls.tail
head.count + intersection(tail:_*) - intersection(tail.map(_ + head):_*)
}.getOrElse(0L)
}
}
|
coffius/koffio-hll
|
src/main/scala/io/koff/hll/facade/HLLUtils.scala
|
Scala
|
mit
| 552
|
package mesosphere.marathon
package state
import com.wix.accord._
import com.wix.accord.dsl._
import mesosphere.marathon.api.v2.Validation.isTrue
import mesosphere.marathon.plugin
import scala.annotation.tailrec
import scala.collection.immutable.Seq
case class PathId(path: Seq[String], absolute: Boolean = true) extends Ordered[PathId] with plugin.PathId {
def root: String = path.headOption.getOrElse("")
def rootPath: PathId = PathId(path.headOption.map(_ :: Nil).getOrElse(Nil), absolute)
def tail: Seq[String] = path.tail
def isEmpty: Boolean = path.isEmpty
def isRoot: Boolean = path.isEmpty
def parent: PathId = path match {
case Nil => this
case head +: Nil => PathId(Nil, absolute)
case head +: rest => PathId(path.init, absolute)
}
def allParents: List[PathId] = if (isRoot) Nil else {
val p = parent
p :: p.allParents
}
def child: PathId = PathId(tail)
def append(id: PathId): PathId = PathId(path ++ id.path, absolute)
def append(id: String): PathId = append(PathId(id))
def /(id: String): PathId = append(id)
def restOf(parent: PathId): PathId = {
@tailrec def in(currentPath: Seq[String], parentPath: Seq[String]): Seq[String] = {
if (currentPath.isEmpty) Nil
else if (parentPath.isEmpty || currentPath.headOption != parentPath.headOption) currentPath
else in(currentPath.tail, parentPath.tail)
}
PathId(in(path, parent.path), absolute)
}
def canonicalPath(base: PathId = PathId(Nil, absolute = true)): PathId = {
require(base.absolute, "Base path is not absolute, canonical path can not be computed!")
@tailrec def in(remaining: Seq[String], result: Seq[String] = Nil): Seq[String] = remaining match {
case head +: tail if head == "." => in(tail, result)
case head +: tail if head == ".." => in(tail, if (result.nonEmpty) result.tail else Nil)
case head +: tail => in(tail, head +: result)
case Nil => result.reverse
}
if (absolute) PathId(in(path)) else PathId(in(base.path ++ path))
}
def safePath: String = {
require(absolute, "Path is not absolute. Can not create safe path.")
path.mkString("_")
}
def toHostname: String = path.reverse.mkString(".")
def includes(definition: plugin.PathId): Boolean = {
if (path.size < definition.path.size) return false
path.zip(definition.path).forall { case (left, right) => left == right }
}
override val toString: String = toString("/")
private def toString(delimiter: String): String = path.mkString(if (absolute) delimiter else "", delimiter, "")
override def compare(that: PathId): Int = {
import Ordering.Implicits._
val seqOrder = implicitly(Ordering[Seq[String]])
seqOrder.compare(canonicalPath().path, that.canonicalPath().path)
}
override def equals(obj: Any): Boolean = {
obj match {
case that: PathId => (that eq this) || (that.toString == toString)
case _ => false
}
}
override def hashCode(): Int = toString.hashCode()
}
object PathId {
def fromSafePath(in: String): PathId = {
if (in.isEmpty) PathId.empty
else PathId(in.split("_").toList, absolute = true)
}
def apply(in: String): PathId =
PathId(in.replaceAll("""(^/+)|(/+$)""", "").split("/").filter(_.nonEmpty).toList, in.startsWith("/"))
def empty: PathId = PathId(Nil)
implicit class StringPathId(val stringPath: String) extends AnyVal {
def toPath: PathId = PathId(stringPath)
def toRootPath: PathId = PathId(stringPath).canonicalPath()
}
/**
* This regular expression is used to validate each path segment of an ID.
*
* If you change this, please also change `pathType` in AppDefinition.json, `PathId` in stringTypes.raml, and
* notify the maintainers of the DCOS CLI.
*/
private[this] val ID_PATH_SEGMENT_PATTERN =
"^(([a-z0-9]|[a-z0-9][a-z0-9\\\\-]*[a-z0-9])\\\\.)*([a-z0-9]|[a-z0-9][a-z0-9\\\\-]*[a-z0-9])|(\\\\.|\\\\.\\\\.)$".r
private val validPathChars = new Validator[PathId] {
override def apply(pathId: PathId): Result = {
validate(pathId.path)(validator = pathId.path.each should matchRegexFully(ID_PATH_SEGMENT_PATTERN.pattern))
}
}
/**
* For external usage. Needed to overwrite the whole description, e.g. id.path -> id.
*/
implicit val pathIdValidator = validator[PathId] { path =>
path is childOf(path.parent)
path is validPathChars
}
/**
* Validate path with regards to some parent path.
* @param base Path of parent.
*/
def validPathWithBase(base: PathId): Validator[PathId] = validator[PathId] { path =>
path is childOf(base)
path is validPathChars
}
/**
* Make sure that the given path is a child of the defined parent path.
* Every relative path can be ignored.
*/
private def childOf(parent: PathId): Validator[PathId] = {
isTrue[PathId](s"Identifier is not child of $parent. Hint: use relative paths.") { child =>
!parent.absolute || (child.canonicalPath(parent).parent == parent)
}
}
/**
* Makes sure, the path is not only the root path and is not empty.
*/
val nonEmptyPath = isTrue[PathId]("Path must contain at least one path element") { _.path.nonEmpty }
/**
* Needed for AppDefinitionValidatorTest.testSchemaLessStrictForId.
*/
val absolutePathValidator = isTrue[PathId]("Path needs to be absolute") { _.absolute }
}
|
natemurthy/marathon
|
src/main/scala/mesosphere/marathon/state/PathId.scala
|
Scala
|
apache-2.0
| 5,371
|
package is.hail.expr.ir
import is.hail.HailContext
import is.hail.expr.JSONAnnotationImpex
import is.hail.expr.ir.agg._
import is.hail.expr.ir.functions.RelationalFunctions
import is.hail.types.TableType
import is.hail.types.virtual.{TArray, TInterval, TStream, Type}
import is.hail.utils.prettyPrint._
import is.hail.utils.richUtils.RichIterable
import is.hail.utils.{space => _, _}
import org.json4s.jackson.{JsonMethods, Serialization}
import scala.collection.mutable
object Pretty {
def apply(ir: BaseIR, width: Int = 100, ribbonWidth: Int = 50, elideLiterals: Boolean = true, maxLen: Int = -1, allowUnboundRefs: Boolean = false): String = {
val useSSA = HailContext.getFlag("use_ssa_logs") != null
val pretty = new Pretty(width, ribbonWidth, elideLiterals, maxLen, allowUnboundRefs, useSSA)
pretty(ir)
}
def sexprStyle(ir: BaseIR, width: Int = 100, ribbonWidth: Int = 50, elideLiterals: Boolean = true, maxLen: Int = -1, allowUnboundRefs: Boolean = false): String = {
val pretty = new Pretty(width, ribbonWidth, elideLiterals, maxLen, allowUnboundRefs, useSSA = false)
pretty(ir)
}
def ssaStyle(ir: BaseIR, width: Int = 100, ribbonWidth: Int = 50, elideLiterals: Boolean = true, maxLen: Int = -1, allowUnboundRefs: Boolean = false): String = {
val pretty = new Pretty(width, ribbonWidth, elideLiterals, maxLen, allowUnboundRefs, useSSA = true)
pretty(ir)
}
def prettyBooleanLiteral(b: Boolean): String =
if (b) "True" else "False"
def prettyClass(x: AnyRef): String =
x.getClass.getName.split("\\\\.").last
}
class Pretty(width: Int, ribbonWidth: Int, elideLiterals: Boolean, maxLen: Int, allowUnboundRefs: Boolean, useSSA: Boolean) {
def short(ir: BaseIR): String = {
val s = apply(ir)
if (s.length < maxLen) s else s.substring(0, maxLen) + "..."
}
def prettyStringLiteral(s: String, elide: Boolean = false, maxLen: Int = 1000): String = {
val esc = StringEscapeUtils.escapeString(s)
if (elide && esc.length > maxLen) {
s""""${esc.substring(0, maxLen)}...""""
} else {
s""""$esc""""
}
}
def fillList(docs: Iterable[Doc], indent: Int = 2): Doc =
group(lineAlt, nest(indent, prettyPrint.fillList(docs)))
def fillList(docs: Doc*): Doc = fillList(docs)
def list(docs: Iterable[Doc], indent: Int = 2): Doc =
nest(indent, prettyPrint.list(docs))
def list(docs: Doc*): Doc = list(docs)
def prettyStrings(xs: IndexedSeq[String]): Doc =
fillList(xs.view.map(x => text(prettyStringLiteral(x))))
def prettyStringsOpt(x: Option[IndexedSeq[String]]): Doc =
x.map(prettyStrings).getOrElse("None")
def prettyTypes(x: Seq[Type]): Doc =
fillList(x.view.map(typ => text(typ.parsableString())))
def prettySortFields(x: Seq[SortField]): Doc =
fillList(x.view.map(typ => text(typ.parsableString())))
def prettySortFieldsString(x: Seq[SortField]): String =
x.view.map(_.parsableString()).mkString("(", " ", ")")
val MAX_VALUES_TO_LOG: Int = 25
def prettyIntOpt(x: Option[Int]): String =
x.map(_.toString).getOrElse("None")
def prettyLongs(x: IndexedSeq[Long], elideLiterals: Boolean): Doc = {
val truncate = elideLiterals && x.length > MAX_VALUES_TO_LOG
val view = if (truncate) x.view else x.view(0, MAX_VALUES_TO_LOG)
val docs = view.map(i => text(i.toString))
concat(docs.intersperse[Doc](
"(",
softline,
if (truncate) s"... ${ x.length - MAX_VALUES_TO_LOG } more values... )" else ")"))
}
def prettyInts(x: IndexedSeq[Int], elideLiterals: Boolean): Doc = {
val truncate = elideLiterals && x.length > MAX_VALUES_TO_LOG
val view = if (truncate) x.view else x.view(0, MAX_VALUES_TO_LOG)
val docs = view.map(i => text(i.toString))
concat(docs.intersperse[Doc](
"(",
softline,
if (truncate) s"... ${ x.length - MAX_VALUES_TO_LOG } more values... )" else ")"))
}
def prettyIdentifiers(x: IndexedSeq[String]): Doc =
fillList(x.view.map(text))
def prettyAggStateSignatures(states: Seq[AggStateSig]): Doc =
list(states.view.map(prettyAggStateSignature))
def prettyAggStateSignature(state: AggStateSig): Doc = {
state match {
case FoldStateSig(resultEmitType, accumName, otherAccumName, combOpIR) =>
fillList(IndexedSeq(text(Pretty.prettyClass(state)), text(resultEmitType.typeWithRequiredness.canonicalPType.toString),
text(accumName), text(otherAccumName), text(apply(combOpIR))))
case _ =>
fillList(state.n match {
case None => text(Pretty.prettyClass(state)) +: state.t.view.map(typ => text(typ.canonicalPType.toString))
case Some(nested) => text(Pretty.prettyClass(state)) +: state.t.view.map(typ => text(typ.canonicalPType.toString)) :+ prettyAggStateSignatures(nested)
})
}
}
def prettyPhysicalAggSigs(aggSigs: Seq[PhysicalAggSig]): Doc =
list(aggSigs.view.map(prettyPhysicalAggSig))
def prettyPhysicalAggSig(aggSig: PhysicalAggSig): Doc = {
aggSig match {
case GroupedAggSig(t, nested) =>
fillList("Grouped", t.canonicalPType.toString, prettyPhysicalAggSigs(nested))
case ArrayLenAggSig(kl, nested) =>
fillList("ArrayLen", Pretty.prettyBooleanLiteral(kl), prettyPhysicalAggSigs(nested))
case AggElementsAggSig(nested) =>
fillList("AggElements", prettyPhysicalAggSigs(nested))
case PhysicalAggSig(op, state) =>
fillList(Pretty.prettyClass(op), prettyAggStateSignature(state))
}
}
def single(d: Doc): Iterable[Doc] = RichIterable.single(d)
def header(ir: BaseIR, elideBindings: Boolean = false): Iterable[Doc] = ir match {
case ApplyAggOp(initOpArgs, seqOpArgs, aggSig) => single(Pretty.prettyClass(aggSig.op))
case ApplyScanOp(initOpArgs, seqOpArgs, aggSig) => single(Pretty.prettyClass(aggSig.op))
case InitOp(i, args, aggSig) => FastSeq(i.toString, prettyPhysicalAggSig(aggSig))
case SeqOp(i, args, aggSig) => FastSeq(i.toString, prettyPhysicalAggSig(aggSig))
case CombOp(i1, i2, aggSig) => FastSeq(i1.toString, i2.toString, prettyPhysicalAggSig(aggSig))
case ResultOp(i, aggSig) => FastSeq(i.toString, prettyPhysicalAggSig(aggSig))
case AggStateValue(i, sig) => FastSeq(i.toString, prettyAggStateSignature(sig))
case InitFromSerializedValue(i, value, aggSig) =>
FastSeq(i.toString, prettyAggStateSignature(aggSig))
case CombOpValue(i, value, sig) => FastSeq(i.toString, prettyPhysicalAggSig(sig))
case SerializeAggs(i, i2, spec, aggSigs) =>
FastSeq(i.toString, i2.toString, prettyStringLiteral(spec.toString), prettyAggStateSignatures(aggSigs))
case DeserializeAggs(i, i2, spec, aggSigs) =>
FastSeq(i.toString, i2.toString, prettyStringLiteral(spec.toString), prettyAggStateSignatures(aggSigs))
case RunAgg(body, result, signature) => single(prettyAggStateSignatures(signature))
case RunAggScan(a, name, init, seq, res, signature) =>
FastSeq(prettyIdentifier(name), prettyAggStateSignatures(signature))
case I32(x) => single(x.toString)
case I64(x) => single(x.toString)
case F32(x) => single(x.toString)
case F64(x) => single(x.toString)
case Str(x) => single(prettyStringLiteral(if (elideLiterals && x.length > 13) x.take(10) + "..." else x))
case UUID4(id) => single(prettyIdentifier(id))
case Cast(_, typ) => single(typ.parsableString())
case CastRename(_, typ) => single(typ.parsableString())
case NA(typ) => single(typ.parsableString())
case Literal(typ, value) =>
FastSeq(typ.parsableString(),
if (!elideLiterals)
prettyStringLiteral(JsonMethods.compact(JSONAnnotationImpex.exportAnnotation(value, typ)))
else
"<literal value>")
case EncodedLiteral(codec, _) => single(codec.encodedVirtualType.parsableString())
case Let(name, _, _) if !elideBindings => single(prettyIdentifier(name))
case AggLet(name, _, _, isScan) => if (elideBindings)
single(Pretty.prettyBooleanLiteral(isScan))
else
FastSeq(prettyIdentifier(name), Pretty.prettyBooleanLiteral(isScan))
case TailLoop(name, args, _) if !elideBindings =>
FastSeq(prettyIdentifier(name), prettyIdentifiers(args.map(_._1).toFastIndexedSeq))
case Recur(name, _, t) => if (elideBindings)
single(t.parsableString())
else
FastSeq(prettyIdentifier(name), t.parsableString())
// case Ref(name, t) if t != null => FastSeq(prettyIdentifier(name), t.parsableString()) // For debug purposes
case Ref(name, _) => single(prettyIdentifier(name))
case RelationalRef(name, t) => if (elideBindings)
single(t.parsableString())
else
FastSeq(prettyIdentifier(name), t.parsableString())
case RelationalLet(name, _, _) if !elideBindings => single(prettyIdentifier(name))
case ApplyBinaryPrimOp(op, _, _) => single(Pretty.prettyClass(op))
case ApplyUnaryPrimOp(op, _) => single(Pretty.prettyClass(op))
case ApplyComparisonOp(op, _, _) => single(op.render())
case GetField(_, name) => single(prettyIdentifier(name))
case GetTupleElement(_, idx) => single(idx.toString)
case MakeTuple(fields) => FastSeq(prettyInts(fields.map(_._1).toFastIndexedSeq, elideLiterals))
case MakeArray(_, typ) => single(typ.parsableString())
case MakeStream(_, typ, requiresMemoryManagementPerElement) =>
FastSeq(typ.parsableString(), Pretty.prettyBooleanLiteral(requiresMemoryManagementPerElement))
case StreamIota(_, _, requiresMemoryManagementPerElement) => FastSeq(Pretty.prettyBooleanLiteral(requiresMemoryManagementPerElement))
case StreamRange(_, _, _, requiresMemoryManagementPerElement, errorID) => FastSeq(errorID.toString, Pretty.prettyBooleanLiteral(requiresMemoryManagementPerElement))
case ToStream(_, requiresMemoryManagementPerElement) => single(Pretty.prettyBooleanLiteral(requiresMemoryManagementPerElement))
case StreamMap(_, name, _) if !elideBindings => single(prettyIdentifier(name))
case StreamZip(_, names, _, behavior, errorID) => if (elideBindings)
FastSeq(errorID.toString, behavior match {
case ArrayZipBehavior.AssertSameLength => "AssertSameLength"
case ArrayZipBehavior.TakeMinLength => "TakeMinLength"
case ArrayZipBehavior.ExtendNA => "ExtendNA"
case ArrayZipBehavior.AssumeSameLength => "AssumeSameLength"
})
else
FastSeq(errorID.toString, behavior match {
case ArrayZipBehavior.AssertSameLength => "AssertSameLength"
case ArrayZipBehavior.TakeMinLength => "TakeMinLength"
case ArrayZipBehavior.ExtendNA => "ExtendNA"
case ArrayZipBehavior.AssumeSameLength => "AssumeSameLength"
}, prettyIdentifiers(names))
case StreamZipJoin(_, key, curKey, curVals, _) if !elideBindings =>
FastSeq(prettyIdentifiers(key), prettyIdentifier(curKey), prettyIdentifier(curVals))
case StreamMultiMerge(_, key) => single(prettyIdentifiers(key))
case StreamFilter(_, name, _) if !elideBindings => single(prettyIdentifier(name))
case StreamTakeWhile(_, name, _) if !elideBindings => single(prettyIdentifier(name))
case StreamDropWhile(_, name, _) if !elideBindings => single(prettyIdentifier(name))
case StreamFlatMap(_, name, _) if !elideBindings => single(prettyIdentifier(name))
case StreamFold(_, _, accumName, valueName, _) if !elideBindings => FastSeq(prettyIdentifier(accumName), prettyIdentifier(valueName))
case StreamFold2(_, acc, valueName, _, _) if !elideBindings => FastSeq(prettyIdentifiers(acc.map(_._1)), prettyIdentifier(valueName))
case StreamScan(_, _, accumName, valueName, _) if !elideBindings => FastSeq(prettyIdentifier(accumName), prettyIdentifier(valueName))
case StreamJoinRightDistinct(_, _, lKey, rKey, l, r, _, joinType) => if (elideBindings)
FastSeq(prettyIdentifiers(lKey), prettyIdentifiers(rKey), joinType)
else
FastSeq(prettyIdentifiers(lKey), prettyIdentifiers(rKey), prettyIdentifier(l), prettyIdentifier(r), joinType)
case StreamFor(_, valueName, _) if !elideBindings => single(prettyIdentifier(valueName))
case StreamAgg(a, name, query) if !elideBindings => single(prettyIdentifier(name))
case StreamAggScan(a, name, query) if !elideBindings => single(prettyIdentifier(name))
case StreamGroupByKey(a, key) => single(prettyIdentifiers(key))
case AggFold(_, _, _, accumName, otherAccumName, isScan) => if (elideBindings)
single(Pretty.prettyBooleanLiteral(isScan))
else
FastSeq(prettyIdentifier(accumName), prettyIdentifier(otherAccumName), Pretty.prettyBooleanLiteral(isScan))
case AggExplode(_, name, _, isScan) => if (elideBindings)
single(Pretty.prettyBooleanLiteral(isScan))
else
FastSeq(prettyIdentifier(name), Pretty.prettyBooleanLiteral(isScan))
case AggFilter(_, _, isScan) => single(Pretty.prettyBooleanLiteral(isScan))
case AggGroupBy(_, _, isScan) => single(Pretty.prettyBooleanLiteral(isScan))
case AggArrayPerElement(_, elementName, indexName, _, knownLength, isScan) => if (elideBindings)
FastSeq(Pretty.prettyBooleanLiteral(isScan), Pretty.prettyBooleanLiteral(knownLength.isDefined))
else
FastSeq(prettyIdentifier(elementName), prettyIdentifier(indexName), Pretty.prettyBooleanLiteral(isScan), Pretty.prettyBooleanLiteral(knownLength.isDefined))
case NDArrayMap(_, name, _) if !elideBindings => single(prettyIdentifier(name))
case NDArrayMap2(_, _, lName, rName, _, errorID) => if (elideBindings)
single(s"$errorID")
else
FastSeq(s"$errorID", prettyIdentifier(lName), prettyIdentifier(rName))
case NDArrayReindex(_, indexExpr) => single(prettyInts(indexExpr, elideLiterals))
case NDArrayConcat(_, axis) => single(axis.toString)
case NDArrayAgg(_, axes) => single(prettyInts(axes, elideLiterals))
case NDArrayRef(_, _, errorID) => single(s"$errorID")
case NDArrayReshape(_, _, errorID) => single(s"$errorID")
case NDArrayMatMul(_, _, errorID) => single(s"$errorID")
case NDArrayQR(_, mode, errorID) => FastSeq(errorID.toString, mode)
case NDArraySVD(_, fullMatrices, computeUV, errorID) => FastSeq(errorID.toString, fullMatrices.toString, computeUV.toString)
case NDArrayInv(_, errorID) => single(s"$errorID")
case ArraySort(_, l, r, _) if !elideBindings => FastSeq(prettyIdentifier(l), prettyIdentifier(r))
case ArrayRef(_,_, errorID) => single(s"$errorID")
case ApplyIR(function, typeArgs, _, errorID) => FastSeq(s"$errorID", prettyIdentifier(function), prettyTypes(typeArgs), ir.typ.parsableString())
case Apply(function, typeArgs, _, t, errorID) => FastSeq(s"$errorID", prettyIdentifier(function), prettyTypes(typeArgs), t.parsableString())
case ApplySeeded(function, _, seed, t) => FastSeq(prettyIdentifier(function), seed.toString, t.parsableString())
case ApplySpecial(function, typeArgs, _, t, errorID) => FastSeq(s"$errorID", prettyIdentifier(function), prettyTypes(typeArgs), t.parsableString())
case SelectFields(_, fields) => single(fillList(fields.view.map(f => text(prettyIdentifier(f)))))
case LowerBoundOnOrderedCollection(_, _, onKey) => single(Pretty.prettyBooleanLiteral(onKey))
case In(i, typ) => FastSeq(typ.toString, i.toString)
case Die(message, typ, errorID) => FastSeq(typ.parsableString(), errorID.toString)
case CollectDistributedArray(_, _, cname, gname, _, _) if !elideBindings =>
FastSeq(prettyIdentifier(cname), prettyIdentifier(gname))
case MatrixRead(typ, dropCols, dropRows, reader) =>
FastSeq(if (typ == reader.fullMatrixType) "None" else typ.parsableString(),
Pretty.prettyBooleanLiteral(dropCols),
Pretty.prettyBooleanLiteral(dropRows),
if (elideLiterals) reader.renderShort() else '"' + StringEscapeUtils.escapeString(JsonMethods.compact(reader.toJValue)) + '"')
case MatrixWrite(_, writer) =>
single('"' + StringEscapeUtils.escapeString(Serialization.write(writer)(MatrixWriter.formats)) + '"')
case MatrixMultiWrite(_, writer) =>
single('"' + StringEscapeUtils.escapeString(Serialization.write(writer)(MatrixNativeMultiWriter.formats)) + '"')
case BlockMatrixRead(reader) =>
single('"' + StringEscapeUtils.escapeString(JsonMethods.compact(reader.toJValue)) + '"')
case BlockMatrixWrite(_, writer) =>
single('"' + StringEscapeUtils.escapeString(Serialization.write(writer)(BlockMatrixWriter.formats)) + '"')
case BlockMatrixMultiWrite(_, writer) =>
single('"' + StringEscapeUtils.escapeString(Serialization.write(writer)(BlockMatrixWriter.formats)) + '"')
case BlockMatrixBroadcast(_, inIndexExpr, shape, blockSize) =>
FastSeq(prettyInts(inIndexExpr, elideLiterals),
prettyLongs(shape, elideLiterals),
blockSize.toString)
case BlockMatrixAgg(_, outIndexExpr) => single(prettyInts(outIndexExpr, elideLiterals))
case BlockMatrixSlice(_, slices) =>
single(fillList(slices.view.map(slice => prettyLongs(slice, elideLiterals))))
case ValueToBlockMatrix(_, shape, blockSize) =>
FastSeq(prettyLongs(shape, elideLiterals), blockSize.toString)
case BlockMatrixFilter(_, indicesToKeepPerDim) =>
single(fillList(indicesToKeepPerDim.toSeq.view.map(indices => prettyLongs(indices, elideLiterals))))
case BlockMatrixSparsify(_, sparsifier) =>
single(sparsifier.pretty())
case BlockMatrixRandom(seed, gaussian, shape, blockSize) =>
FastSeq(seed.toString,
Pretty.prettyBooleanLiteral(gaussian),
prettyLongs(shape, elideLiterals),
blockSize.toString)
case BlockMatrixMap(_, name, _, needsDense) => if (elideBindings)
single(Pretty.prettyBooleanLiteral(needsDense))
else
FastSeq(prettyIdentifier(name), Pretty.prettyBooleanLiteral(needsDense))
case BlockMatrixMap2(_, _, lName, rName, _, sparsityStrategy) => if (elideBindings)
single(Pretty.prettyClass(sparsityStrategy))
else
FastSeq(prettyIdentifier(lName), prettyIdentifier(rName), Pretty.prettyClass(sparsityStrategy))
case MatrixRowsHead(_, n) => single(n.toString)
case MatrixColsHead(_, n) => single(n.toString)
case MatrixRowsTail(_, n) => single(n.toString)
case MatrixColsTail(_, n) => single(n.toString)
case MatrixAnnotateRowsTable(_, _, uid, product) =>
FastSeq(prettyStringLiteral(uid), Pretty.prettyBooleanLiteral(product))
case MatrixAnnotateColsTable(_, _, uid) => single(prettyStringLiteral(uid))
case MatrixExplodeRows(_, path) => single(prettyIdentifiers(path))
case MatrixExplodeCols(_, path) => single(prettyIdentifiers(path))
case MatrixRepartition(_, n, strategy) => single(s"$n $strategy")
case MatrixChooseCols(_, oldIndices) => single(prettyInts(oldIndices, elideLiterals))
case MatrixMapCols(_, _, newKey) => single(prettyStringsOpt(newKey))
case MatrixUnionCols(l, r, joinType) => single(joinType)
case MatrixKeyRowsBy(_, keys, isSorted) =>
FastSeq(prettyIdentifiers(keys), Pretty.prettyBooleanLiteral(isSorted))
case TableRead(typ, dropRows, tr) =>
FastSeq(if (typ == tr.fullType) "None" else typ.parsableString(),
Pretty.prettyBooleanLiteral(dropRows),
if (elideLiterals) tr.renderShort() else '"' + StringEscapeUtils.escapeString(JsonMethods.compact(tr.toJValue)) + '"')
case TableWrite(_, writer) =>
single('"' + StringEscapeUtils.escapeString(Serialization.write(writer)(TableWriter.formats)) + '"')
case TableMultiWrite(_, writer) =>
single('"' + StringEscapeUtils.escapeString(Serialization.write(writer)(WrappedMatrixNativeMultiWriter.formats)) + '"')
case TableKeyBy(_, keys, isSorted) =>
FastSeq(prettyIdentifiers(keys), Pretty.prettyBooleanLiteral(isSorted))
case TableRange(n, nPartitions) => FastSeq(n.toString, nPartitions.toString)
case TableRepartition(_, n, strategy) => FastSeq(n.toString, strategy.toString)
case TableHead(_, n) => single(n.toString)
case TableTail(_, n) => single(n.toString)
case TableJoin(_, _, joinType, joinKey) => FastSeq(joinType, joinKey.toString)
case TableLeftJoinRightDistinct(_, _, root) => single(prettyIdentifier(root))
case TableIntervalJoin(_, _, root, product) =>
FastSeq(prettyIdentifier(root), Pretty.prettyBooleanLiteral(product))
case TableMultiWayZipJoin(_, dataName, globalName) =>
FastSeq(prettyStringLiteral(dataName), prettyStringLiteral(globalName))
case TableKeyByAndAggregate(_, _, _, nPartitions, bufferSize) =>
FastSeq(prettyIntOpt(nPartitions), bufferSize.toString)
case TableExplode(_, path) => single(prettyStrings(path))
case TableMapPartitions(_, g, p, _) => FastSeq(prettyIdentifier(g), prettyIdentifier(p))
case TableParallelize(_, nPartitions) => single(prettyIntOpt(nPartitions))
case TableOrderBy(_, sortFields) => single(prettySortFields(sortFields))
case CastMatrixToTable(_, entriesFieldName, colsFieldName) =>
FastSeq(prettyStringLiteral(entriesFieldName), prettyStringLiteral(colsFieldName))
case CastTableToMatrix(_, entriesFieldName, colsFieldName, colKey) =>
FastSeq(prettyIdentifier(entriesFieldName), prettyIdentifier(colsFieldName), prettyIdentifiers(colKey))
case MatrixToMatrixApply(_, function) =>
single(prettyStringLiteral(Serialization.write(function)(RelationalFunctions.formats)))
case MatrixToTableApply(_, function) =>
single(prettyStringLiteral(Serialization.write(function)(RelationalFunctions.formats)))
case TableToTableApply(_, function) =>
single(prettyStringLiteral(JsonMethods.compact(function.toJValue)))
case TableToValueApply(_, function) =>
single(prettyStringLiteral(Serialization.write(function)(RelationalFunctions.formats)))
case MatrixToValueApply(_, function) =>
single(prettyStringLiteral(Serialization.write(function)(RelationalFunctions.formats)))
case BlockMatrixToValueApply(_, function) =>
single(prettyStringLiteral(Serialization.write(function)(RelationalFunctions.formats)))
case BlockMatrixToTableApply(_, _, function) =>
single(prettyStringLiteral(Serialization.write(function)(RelationalFunctions.formats)))
case TableRename(_, rowMap, globalMap) =>
val rowKV = rowMap.toArray
val globalKV = globalMap.toArray
FastSeq(prettyStrings(rowKV.map(_._1)), prettyStrings(rowKV.map(_._2)),
prettyStrings(globalKV.map(_._1)), prettyStrings(globalKV.map(_._2)))
case MatrixRename(_, globalMap, colMap, rowMap, entryMap) =>
val globalKV = globalMap.toArray
val colKV = colMap.toArray
val rowKV = rowMap.toArray
val entryKV = entryMap.toArray
FastSeq(prettyStrings(globalKV.map(_._1)), prettyStrings(globalKV.map(_._2)),
prettyStrings(colKV.map(_._1)), prettyStrings(colKV.map(_._2)),
prettyStrings(rowKV.map(_._1)), prettyStrings(rowKV.map(_._2)),
prettyStrings(entryKV.map(_._1)), prettyStrings(entryKV.map(_._2)))
case TableFilterIntervals(child, intervals, keep) =>
FastSeq(
prettyStringLiteral(Serialization.write(
JSONAnnotationImpex.exportAnnotation(intervals, TArray(TInterval(child.typ.keyType)))
)(RelationalSpec.formats)),
Pretty.prettyBooleanLiteral(keep))
case MatrixFilterIntervals(child, intervals, keep) =>
FastSeq(
prettyStringLiteral(Serialization.write(
JSONAnnotationImpex.exportAnnotation(intervals, TArray(TInterval(child.typ.rowKeyStruct)))
)(RelationalSpec.formats)),
Pretty.prettyBooleanLiteral(keep))
case RelationalLetTable(name, _, _) => single(prettyIdentifier(name))
case RelationalLetMatrixTable(name, _, _) => single(prettyIdentifier(name))
case RelationalLetBlockMatrix(name, _, _) => single(prettyIdentifier(name))
case ReadPartition(_, rowType, reader) =>
FastSeq(rowType.parsableString(),
prettyStringLiteral(JsonMethods.compact(reader.toJValue)))
case WritePartition(value, writeCtx, writer) =>
single(prettyStringLiteral(JsonMethods.compact(writer.toJValue)))
case WriteMetadata(writeAnnotations, writer) =>
single(prettyStringLiteral(JsonMethods.compact(writer.toJValue), elide = elideLiterals))
case ReadValue(_, spec, reqType) =>
FastSeq(prettyStringLiteral(spec.toString), reqType.parsableString())
case WriteValue(_, _, spec) => single(prettyStringLiteral(spec.toString))
case MakeNDArray(_, _, _, errorId) => FastSeq(errorId.toString)
case _ => Iterable.empty
}
def apply(ir: BaseIR): String = if (useSSA)
ssaStyle(ir)
else
sexprStyle(ir)
def sexprStyle(ir: BaseIR): String = {
def prettySeq(xs: Seq[BaseIR]): Doc =
list(xs.view.map(pretty))
def pretty(ir: BaseIR): Doc = {
val body: Iterable[Doc] = ir match {
case MakeStruct(fields) =>
fields.view.map { case (n, a) =>
list(n, pretty(a))
}
case ApplyAggOp(initOpArgs, seqOpArgs, aggSig) =>
FastSeq(prettySeq(initOpArgs), prettySeq(seqOpArgs))
case ApplyScanOp(initOpArgs, seqOpArgs, aggSig) =>
FastSeq(prettySeq(initOpArgs), prettySeq(seqOpArgs))
case InitOp(i, args, aggSig) => single(prettySeq(args))
case SeqOp(i, args, aggSig) => single(prettySeq(args))
case InsertFields(old, fields, fieldOrder) =>
val fieldDocs = fields.view.map { case (n, a) =>
list(prettyIdentifier(n), pretty(a))
}
pretty(old) +: prettyStringsOpt(fieldOrder) +: fieldDocs
case _ => ir.children.view.map(pretty)
}
/*
val pt = ir match{
case ir: IR => if (ir._pType != null) single(ir.pType.toString)
case _ => Iterable.empty
}
list(fillSep(text(prettyClass(ir)) +: pt ++ header(ir, elideLiterals)) +: body)
*/
list(fillSep(text(Pretty.prettyClass(ir)) +: header(ir, elideLiterals)) +: body)
}
pretty(ir).render(width, ribbonWidth, maxLen)
}
def ssaStyle(ir: BaseIR): String = {
def childIsStrict(ir: BaseIR, i: Int): Boolean = blockArgs(ir, i).isEmpty
def blockArgs(ir: BaseIR, i: Int): Option[IndexedSeq[(String, String)]] = ir match {
case If(_, _, _) =>
if (i > 0) Some(FastIndexedSeq()) else None
case TailLoop(name, args, body) => if (i == args.length)
Some(args.map { case (name, ir) => name -> "loopvar" } :+
name -> "loop") else None
case StreamMap(a, name, _) =>
if (i == 1) Some(Array(name -> "elt")) else None
case StreamZip(as, names, _, _, _) =>
if (i == as.length) Some(names.map(_ -> "elt")) else None
case StreamZipJoin(as, key, curKey, curVals, _) =>
if (i == as.length)
Some(Array(curKey -> "key", curVals -> "elts"))
else
None
case StreamFor(a, name, _) =>
if (i == 1) Some(Array(name -> "elt")) else None
case StreamFlatMap(a, name, _) =>
if (i == 1) Some(Array(name -> "elt")) else None
case StreamFilter(a, name, _) =>
if (i == 1) Some(Array(name -> "elt")) else None
case StreamTakeWhile(a, name, _) =>
if (i == 1) Some(Array(name -> "elt")) else None
case StreamDropWhile(a, name, _) =>
if (i == 1) Some(Array(name -> "elt")) else None
case StreamFold(a, zero, accumName, valueName, _) =>
if (i == 2) Some(Array(accumName -> "accum", valueName -> "elt")) else None
case StreamFold2(a, accum, valueName, seq, result) =>
if (i <= accum.length)
None
else if (i < 2 * accum.length + 1)
Some(Array(valueName -> "elt") ++ accum.map { case (name, value) => name -> "accum" })
else
Some(accum.map { case (name, value) => name -> "accum" })
case RunAggScan(a, name, _, _, _, _) =>
if (i == 2 || i == 3) Some(Array(name -> "elt")) else None
case StreamScan(a, zero, accumName, valueName, _) =>
if (i == 2) Some(Array(accumName -> "accum", valueName -> "elt")) else None
case StreamAggScan(a, name, _) =>
if (i == 1) Some(FastIndexedSeq(name -> "elt")) else None
case StreamJoinRightDistinct(ll, rr, _, _, l, r, _, _) =>
if (i == 2) Some(Array(l -> "l_elt", r -> "r_elt")) else None
case ArraySort(a, left, right, _) =>
if (i == 1) Some(Array(left -> "l", right -> "r")) else None
case AggArrayPerElement(_, elementName, indexName, _, _, _) =>
if (i == 1) Some(Array(elementName -> "elt", indexName -> "idx")) else None
case AggFold(zero, seqOp, combOp, accumName, otherAccumName, _) => {
if (i == 1) Some(Array(accumName -> "accum"))
else if (i == 2) Some(Array(accumName -> "l", otherAccumName -> "r"))
else None
}
case NDArrayMap(nd, name, _) =>
if (i == 1) Some(Array(name -> "elt")) else None
case NDArrayMap2(l, r, lName, rName, _, _) => if (i == 2)
Some(Array(lName -> "l_elt", rName -> "r_elt"))
else
None
case CollectDistributedArray(contexts, globals, cname, gname, _, _) =>
if (i == 2) Some(Array(cname -> "ctx", gname -> "g")) else None
case TableAggregate(child, _) =>
if (i == 1) Some(Array("global" -> "g", "row" -> "row")) else None
case MatrixAggregate(child, _) =>
if (i == 1) Some(Array("global" -> "g", "sa" -> "col", "va" -> "row", "g" -> "entry")) else None
case TableFilter(child, _) =>
if (i == 1) Some(Array("global" -> "g", "row" -> "row")) else None
case TableMapGlobals(child, _) =>
if (i == 1) Some(Array("global" -> "g")) else None
case TableMapRows(child, _) =>
if (i == 1) Some(Array("global" -> "g", "row" -> "row")) else None
case TableAggregateByKey(child, _) =>
if (i == 1) Some(Array("global" -> "g", "row" -> "row")) else None
case TableKeyByAndAggregate(child, _, _, _, _) =>
if (i == 1 || i == 2)
Some(Array("global" -> "g", "row" -> "row"))
else None
case TableMapPartitions(child, g, p, _) =>
if (i == 1) Some(Array(g -> "g", p -> "part")) else None
case MatrixMapRows(child, _) =>
if (i == 1) Some(Array("global" -> "g", "va" -> "row", "sa" -> "col", "g" -> "entry", "n_cols" -> "n_cols")) else None
case MatrixFilterRows(child, _) =>
if (i == 1) Some(Array("global" -> "g", "va" -> "row")) else None
case MatrixMapCols(child, _, _) =>
if (i == 1) Some(Array("global" -> "g", "va" -> "row", "sa" -> "col", "g" -> "entry", "n_rows" -> "n_rows")) else None
case MatrixFilterCols(child, _) =>
if (i == 1) Some(Array("global" -> "g", "sa" -> "col")) else None
case MatrixMapEntries(child, _) =>
if (i == 1) Some(Array("global" -> "g", "sa" -> "col", "va" -> "row", "g" -> "entry")) else None
case MatrixFilterEntries(child, _) =>
if (i == 1) Some(Array("global" -> "g", "sa" -> "col", "va" -> "row", "g" -> "entry")) else None
case MatrixMapGlobals(child, _) =>
if (i == 1) Some(Array("global" -> "g")) else None
case MatrixAggregateColsByKey(child, _, _) =>
if (i == 1)
Some(Array("global" -> "g", "va" -> "row", "sa" -> "col", "g" -> "entry"))
else if (i == 2)
Some(Array("global" -> "g", "sa" -> "col"))
else
None
case MatrixAggregateRowsByKey(child, _, _) =>
if (i == 1)
Some(Array("global" -> "g", "va" -> "row", "sa" -> "col", "g" -> "entry"))
else if (i == 2)
Some(Array("global" -> "g", "va" -> "row"))
else
None
case BlockMatrixMap(_, eltName, _, _) =>
if (i == 1) Some(Array(eltName -> "elt")) else None
case BlockMatrixMap2(_, _, lName, rName, _, _) =>
if (i == 2) Some(Array(lName -> "l", rName -> "r")) else None
case AggLet(name, _, _, _) =>
if (i == 1) Some(Array(name -> "")) else None
case AggExplode(_, name, _, _) =>
if (i == 1) Some(Array(name -> "elt")) else None
case StreamAgg(_, name, _) =>
if (i == 1) Some(Array(name -> "elt")) else None
case _ =>
None
}
var identCounter: Int = 0
val idents = collection.mutable.Map.empty[String, Int]
def getIdentBase(ir: BaseIR): String = ir match {
case True() => "true"
case False() => "false"
case I32(i) => s"c$i"
case stream if stream.typ.isInstanceOf[TStream] => "s"
case table if table.typ.isInstanceOf[TableType] => "ht"
case mt if mt.typ.isInstanceOf[TableType] => "mt"
case _ => ""
}
def uniqueify(base: String): String = {
if (base.isEmpty) {
identCounter += 1
identCounter.toString
} else if (idents.contains(base)) {
idents(base) += 1
if (base.last.isDigit)
s"${base}_${idents(base)}"
else
s"${base}${idents(base)}"
} else {
idents(base) = 1
base
}
}
def prettyWithIdent(ir: BaseIR, bindings: Env[String], prefix: String): (Doc, String) = {
val (pre, body) = pretty(ir, bindings)
val ident = prefix + uniqueify(getIdentBase(ir))
val doc = vsep(pre, hsep(text(ident), "=", body))
(doc, ident)
}
def prettyBlock(ir: BaseIR, newBindings: IndexedSeq[(String, String)], bindings: Env[String]): Doc = {
val args = newBindings.map { case (name, base) => name -> s"%${uniqueify(base)}" }
val blockBindings = bindings.bindIterable(args)
val (pre, body) = pretty(ir, blockBindings)
val openBlock = if (args.isEmpty)
text("{")
else
concat("{", softline, args.map(_._2).mkString("(", ", ", ") =>"))
concat(openBlock, nest(2, vsep(pre, body)), line, "}")
}
def pretty(ir: BaseIR, bindings: Env[String]): (Doc, Doc) = ir match {
case Let(name, value, body) =>
val (valueDoc, valueIdent) = prettyWithIdent(value, bindings, "%")
val (bodyPre, bodyHead) = pretty(body, bindings.bind(name, valueIdent))
(vsep(valueDoc, bodyPre), bodyHead)
case RelationalLet(name, value, body) =>
val (valueDoc, valueIdent) = prettyWithIdent(value, bindings, "%")
val (bodyPre, bodyHead) = pretty(body, bindings.bind(name, valueIdent))
(vsep(valueDoc, bodyPre), bodyHead)
case RelationalLetTable(name, value, body) =>
val (valueDoc, valueIdent) = prettyWithIdent(value, bindings, "%")
val (bodyPre, bodyHead) = pretty(body, bindings.bind(name, valueIdent))
(vsep(valueDoc, bodyPre), bodyHead)
case RelationalLetMatrixTable(name, value, body) =>
val (valueDoc, valueIdent) = prettyWithIdent(value, bindings, "%")
val (bodyPre, bodyHead) = pretty(body, bindings.bind(name, valueIdent))
(vsep(valueDoc, bodyPre), bodyHead)
case RelationalLetBlockMatrix(name, value, body) =>
val (valueDoc, valueIdent) = prettyWithIdent(value, bindings, "%")
val (bodyPre, bodyHead) = pretty(body, bindings.bind(name, valueIdent))
(vsep(valueDoc, bodyPre), bodyHead)
case _ =>
val strictChildBodies = mutable.ArrayBuilder.make[Doc]()
val strictChildIdents = for {
i <- ir.children.indices
if childIsStrict(ir, i)
} yield {
val child = ir.children(i)
child match {
case Ref(name, _) =>
bindings.lookupOption(name).getOrElse(uniqueify("%undefined_ref"))
case _ =>
val (body, ident) = prettyWithIdent(ir.children(i), bindings, "!")
strictChildBodies += body
ident
}
}
val nestedBlocks = for {
i <- ir.children.indices
if !childIsStrict(ir, i)
} yield prettyBlock(ir.children(i), blockArgs(ir, i).get, bindings)
val attsIterable = header(ir, elideBindings = true)
val attributes = if (attsIterable.isEmpty) Iterable.empty else
RichIterable.single(concat(attsIterable.intersperse[Doc]("[", ", ", "]")))
def standardArgs = if (strictChildIdents.isEmpty)
""
else
strictChildIdents.mkString("(", ", ", ")")
val head = ir match {
case MakeStruct(fields) =>
val args = (fields.map(_._1), strictChildIdents).zipped.map { (field, value) =>
s"$field: $value"
}.mkString("(", ", ", ")")
hsep(text(Pretty.prettyClass(ir) + args) +: (attributes ++ nestedBlocks))
case InsertFields(_, fields, _) =>
val newFields = (fields.map(_._1), strictChildIdents.tail).zipped.map { (field, value) =>
s"$field: $value"
}.mkString("(", ", ", ")")
val args = s" ${strictChildIdents.head} $newFields"
hsep(text(Pretty.prettyClass(ir) + args) +: (attributes ++ nestedBlocks))
case ir: If =>
hsep(
text(s"${Pretty.prettyClass(ir)} ${strictChildIdents.head}"),
text("then"),
nestedBlocks(0),
text("else"),
nestedBlocks(1))
case _ =>
val args = strictChildIdents.mkString("(", ", ", ")")
hsep(text(Pretty.prettyClass(ir) + standardArgs) +: (attributes ++ nestedBlocks))
}
(hsep(strictChildBodies.result()), head)
}
val (pre, head) = pretty(ir, Env.empty)
vsep(pre, head, empty).render(width, ribbonWidth, maxLen)
}
}
|
hail-is/hail
|
hail/src/main/scala/is/hail/expr/ir/Pretty.scala
|
Scala
|
mit
| 37,353
|
package sms
import com.typesafe.config.{ConfigValue, Config}
import scala.reflect.runtime.{universe => u}
import scala.reflect._
import sms.core.io.ConfigLoader
import scala.util.Try
import scala.collection.convert.wrapAsScala._
import dispatch._
import scala.xml.XML
import scala.concurrent.{ExecutionContext, Promise, Future}
package object core {
implicit class RichConfig(config: Config) {
def extract[T : ClassTag : u.TypeTag]: T = ConfigLoader.extract[T](config)
def extract[T : ClassTag : u.TypeTag](rootName: String): T = ConfigLoader.extract[T](config, rootName)
def extractOpt[T : ClassTag : u.TypeTag]: Option[T] = Try(ConfigLoader.extract[T](config)).toOption
def extractOpt[T : ClassTag : u.TypeTag](rootName: String): Option[T] = Try(ConfigLoader.extract[T](config, rootName)).toOption
def iterator: Iterator[java.util.Map.Entry[String, ConfigValue]] = config.root().entrySet().iterator()
}
implicit class RichFuture[T](val future: Future[T]) extends Logging {
def lift(implicit executor: ExecutionContext): Future[Try[T]] = {
val promise = Promise[Try[T]]()
future.onComplete(promise success)
promise.future
}
def withFailReqLogging(implicit path: String, executor: ExecutionContext): concurrent.Future[T] = {
future.onFailure {
case cause => log.error(cause, "Can't complete request to {}", path)
}
future
}
def withFailDBLogging(implicit executor: ExecutionContext): concurrent.Future[T] = {
future.onFailure {
case cause => log.error(cause, "Can't complete request to database")
}
future
}
}
object dispatch {
object as {
object xml {
object Elem extends (Res => scala.xml.Elem) {
def apply(res: Res) =
XML.withSAXParser(factory.newSAXParser).loadString(res.getResponseBody("UTF-8"))
private lazy val factory = {
val spf = javax.xml.parsers.SAXParserFactory.newInstance()
spf.setNamespaceAware(false)
spf
}
}
}
}
}
}
|
kjanosz/stock-market-sherlock
|
core/src/main/scala/sms/core/package.scala
|
Scala
|
apache-2.0
| 2,087
|
package com.github.ldaniels528.trifecta.io.kafka
import com.github.ldaniels528.trifecta.io.zookeeper.ZKProxy
import com.github.ldaniels528.trifecta.messages.query.KQLSource
import com.github.ldaniels528.trifecta.messages.{KeyAndMessage, MessageInputSource}
import kafka.common.TopicAndPartition
/**
* Kafka Topic Input Source
* @author lawrence.daniels@gmail.com
*/
class KafkaTopicMessageInputSource(brokers: Seq[Broker], topic: String, partition: Int = 0, fetchSize: Int = 2048, correlationId: Int = 0)(implicit zk: ZKProxy)
extends MessageInputSource {
private val consumer = new KafkaMicroConsumer(TopicAndPartition(topic, partition), brokers)
private var offset_? : Option[Long] = consumer.getFirstOffset
/**
* Reads the given keyed-message from the underlying stream
* @return a [[KeyAndMessage]]
*/
override def read: Option[KeyAndMessage] = {
for {
offset <- offset_?
md <- consumer.fetch(offset)(fetchSize).headOption
} yield {
offset_? = offset_? map (_ + 1)
KeyAndMessage(md.key, md.message)
}
}
/**
* Returns a source for querying via Kafka Query Language (KQL)
* @return the option of a query source
*/
override def getQuerySource: Option[KQLSource] = Option(KafkaQuerySource(topic, brokers, correlationId))
/**
* Closes the underlying stream
*/
override def close() = consumer.close()
}
|
ldaniels528/trifecta
|
src/main/scala/com/github/ldaniels528/trifecta/io/kafka/KafkaTopicMessageInputSource.scala
|
Scala
|
apache-2.0
| 1,389
|
package ru.freefry.furniture_factory.rest
import akka.pattern.ask
import ru.freefry.furniture_factory.core.Factory.{Chair, Product, Table}
import ru.freefry.furniture_factory.core.{Core, Factory}
import spray.http.StatusCodes
import spray.http.StatusCodes.InternalServerError
import spray.httpx.SprayJsonSupport
import spray.httpx.marshalling.MetaMarshallers
import spray.json._
import spray.routing.HttpService
import scala.util.Success
/** Custom json protocol */
object FactoryJsonProtocol extends DefaultJsonProtocol with SprayJsonSupport {
implicit object ProductJsonFormat extends RootJsonFormat[Product] {
val product = "product"
val table = "table"
val chair = "chair"
def read(json: JsValue) = json.asJsObject.getFields(product) match {
case Seq(JsString(s)) => s match {
case `table` => Table
case `chair` => Chair
case _ => throw new DeserializationException("""Ordering product has to be "table" or "chair"""")
}
case _ => throw new DeserializationException("Product expected")
}
def write(p: Product) = p match {
case Table => JsObject(product -> JsString(table))
case Chair => JsObject(product -> JsString(chair))
}
}
}
/**
* Route for producing ordering product.
* Receives POST on /factory/order with product type as a JSON object
*/
trait OrderRoute extends HttpService {
this: Core with Factory =>
import ru.freefry.furniture_factory.rest.FactoryJsonProtocol._
implicit val futureMarshaller = MetaMarshallers.futureMarshaller
val orderRoute =
path("factory" / "order") {
post {
entity(as[Product]) { product =>
onComplete(factory.ref ? product) {
case Success(s: String) => complete(s)
case other: Any => {
system.log.error(s"Unhandled message: $other")
complete(InternalServerError)
}
}
}
}
}
}
|
freefry/furniture-factory
|
src/main/scala/ru/freefry/furniture_factory/rest/routes.scala
|
Scala
|
apache-2.0
| 2,009
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst.optimizer.joinReorder
import org.apache.spark.sql.catalyst.dsl.expressions._
import org.apache.spark.sql.catalyst.dsl.plans._
import org.apache.spark.sql.catalyst.expressions.{Attribute, AttributeMap}
import org.apache.spark.sql.catalyst.optimizer._
import org.apache.spark.sql.catalyst.plans.Inner
import org.apache.spark.sql.catalyst.plans.logical.{ColumnStat, LogicalPlan}
import org.apache.spark.sql.catalyst.rules.RuleExecutor
import org.apache.spark.sql.catalyst.statsEstimation.{StatsEstimationTestBase, StatsTestPlan}
import org.apache.spark.sql.internal.SQLConf._
class StarJoinCostBasedReorderSuite extends JoinReorderPlanTestBase with StatsEstimationTestBase {
object Optimize extends RuleExecutor[LogicalPlan] {
val batches =
Batch("Operator Optimizations", FixedPoint(100),
CombineFilters,
PushPredicateThroughNonJoin,
ReorderJoin,
PushPredicateThroughJoin,
ColumnPruning,
CollapseProject) ::
Batch("Join Reorder", FixedPoint(1),
CostBasedJoinReorder) :: Nil
}
var originalConfCBOEnabled = false
var originalConfJoinReorderEnabled = false
var originalConfJoinReorderDPStarFilter = false
override def beforeAll(): Unit = {
super.beforeAll()
originalConfCBOEnabled = conf.cboEnabled
originalConfJoinReorderEnabled = conf.joinReorderEnabled
originalConfJoinReorderDPStarFilter = conf.joinReorderDPStarFilter
conf.setConf(CBO_ENABLED, true)
conf.setConf(JOIN_REORDER_ENABLED, true)
conf.setConf(JOIN_REORDER_DP_STAR_FILTER, true)
}
override def afterAll(): Unit = {
try {
conf.setConf(CBO_ENABLED, originalConfCBOEnabled)
conf.setConf(JOIN_REORDER_ENABLED, originalConfJoinReorderEnabled)
conf.setConf(JOIN_REORDER_DP_STAR_FILTER, originalConfJoinReorderDPStarFilter)
} finally {
super.afterAll()
}
}
private val columnInfo: AttributeMap[ColumnStat] = AttributeMap(Seq(
// F1 (fact table)
attr("f1_fk1") -> rangeColumnStat(100, 0),
attr("f1_fk2") -> rangeColumnStat(100, 0),
attr("f1_fk3") -> rangeColumnStat(100, 0),
attr("f1_c1") -> rangeColumnStat(100, 0),
attr("f1_c2") -> rangeColumnStat(100, 0),
// D1 (dimension)
attr("d1_pk") -> rangeColumnStat(100, 0),
attr("d1_c2") -> rangeColumnStat(50, 0),
attr("d1_c3") -> rangeColumnStat(50, 0),
// D2 (dimension)
attr("d2_pk") -> rangeColumnStat(20, 0),
attr("d2_c2") -> rangeColumnStat(10, 0),
attr("d2_c3") -> rangeColumnStat(10, 0),
// D3 (dimension)
attr("d3_pk") -> rangeColumnStat(10, 0),
attr("d3_c2") -> rangeColumnStat(5, 0),
attr("d3_c3") -> rangeColumnStat(5, 0),
// T1 (regular table i.e. outside star)
attr("t1_c1") -> rangeColumnStat(20, 1),
attr("t1_c2") -> rangeColumnStat(10, 1),
attr("t1_c3") -> rangeColumnStat(10, 1),
// T2 (regular table)
attr("t2_c1") -> rangeColumnStat(5, 1),
attr("t2_c2") -> rangeColumnStat(5, 1),
attr("t2_c3") -> rangeColumnStat(5, 1),
// T3 (regular table)
attr("t3_c1") -> rangeColumnStat(5, 1),
attr("t3_c2") -> rangeColumnStat(5, 1),
attr("t3_c3") -> rangeColumnStat(5, 1),
// T4 (regular table)
attr("t4_c1") -> rangeColumnStat(5, 1),
attr("t4_c2") -> rangeColumnStat(5, 1),
attr("t4_c3") -> rangeColumnStat(5, 1),
// T5 (regular table)
attr("t5_c1") -> rangeColumnStat(5, 1),
attr("t5_c2") -> rangeColumnStat(5, 1),
attr("t5_c3") -> rangeColumnStat(5, 1),
// T6 (regular table)
attr("t6_c1") -> rangeColumnStat(5, 1),
attr("t6_c2") -> rangeColumnStat(5, 1),
attr("t6_c3") -> rangeColumnStat(5, 1)
))
private val nameToAttr: Map[String, Attribute] = columnInfo.map(kv => kv._1.name -> kv._1)
private val nameToColInfo: Map[String, (Attribute, ColumnStat)] =
columnInfo.map(kv => kv._1.name -> kv)
private val f1 = StatsTestPlan(
outputList = Seq("f1_fk1", "f1_fk2", "f1_fk3", "f1_c1", "f1_c2").map(nameToAttr),
rowCount = 1000,
size = Some(1000 * (8 + 4 * 5)),
attributeStats = AttributeMap(Seq("f1_fk1", "f1_fk2", "f1_fk3", "f1_c1", "f1_c2")
.map(nameToColInfo)))
// To control the layout of the join plans, keep the size for the non-fact tables constant
// and vary the rowcount and the number of distinct values of the join columns.
private val d1 = StatsTestPlan(
outputList = Seq("d1_pk", "d1_c2", "d1_c3").map(nameToAttr),
rowCount = 100,
size = Some(3000),
attributeStats = AttributeMap(Seq("d1_pk", "d1_c2", "d1_c3").map(nameToColInfo)))
private val d2 = StatsTestPlan(
outputList = Seq("d2_pk", "d2_c2", "d2_c3").map(nameToAttr),
rowCount = 20,
size = Some(3000),
attributeStats = AttributeMap(Seq("d2_pk", "d2_c2", "d2_c3").map(nameToColInfo)))
private val d3 = StatsTestPlan(
outputList = Seq("d3_pk", "d3_c2", "d3_c3").map(nameToAttr),
rowCount = 10,
size = Some(3000),
attributeStats = AttributeMap(Seq("d3_pk", "d3_c2", "d3_c3").map(nameToColInfo)))
private val t1 = StatsTestPlan(
outputList = Seq("t1_c1", "t1_c2", "t1_c3").map(nameToAttr),
rowCount = 50,
size = Some(3000),
attributeStats = AttributeMap(Seq("t1_c1", "t1_c2", "t1_c3").map(nameToColInfo)))
private val t2 = StatsTestPlan(
outputList = Seq("t2_c1", "t2_c2", "t2_c3").map(nameToAttr),
rowCount = 10,
size = Some(3000),
attributeStats = AttributeMap(Seq("t2_c1", "t2_c2", "t2_c3").map(nameToColInfo)))
private val t3 = StatsTestPlan(
outputList = Seq("t3_c1", "t3_c2", "t3_c3").map(nameToAttr),
rowCount = 10,
size = Some(3000),
attributeStats = AttributeMap(Seq("t3_c1", "t3_c2", "t3_c3").map(nameToColInfo)))
private val t4 = StatsTestPlan(
outputList = Seq("t4_c1", "t4_c2", "t4_c3").map(nameToAttr),
rowCount = 10,
size = Some(3000),
attributeStats = AttributeMap(Seq("t4_c1", "t4_c2", "t4_c3").map(nameToColInfo)))
private val t5 = StatsTestPlan(
outputList = Seq("t5_c1", "t5_c2", "t5_c3").map(nameToAttr),
rowCount = 10,
size = Some(3000),
attributeStats = AttributeMap(Seq("t5_c1", "t5_c2", "t5_c3").map(nameToColInfo)))
private val t6 = StatsTestPlan(
outputList = Seq("t6_c1", "t6_c2", "t6_c3").map(nameToAttr),
rowCount = 10,
size = Some(3000),
attributeStats = AttributeMap(Seq("t6_c1", "t6_c2", "t6_c3").map(nameToColInfo)))
test("Test 1: Star query with two dimensions and two regular tables") {
// d1 t1
// \ /
// f1
// / \
// d2 t2
//
// star: {f1, d1, d2}
// non-star: {t1, t2}
//
// level 0: (t2 ), (d2 ), (f1 ), (d1 ), (t1 )
// level 1: {f1 d1 }, {d2 f1 }
// level 2: {d2 f1 d1 }
// level 3: {t2 d1 d2 f1 }, {t1 d1 d2 f1 }
// level 4: {f1 t1 t2 d1 d2 }
//
// Number of generated plans: 11 (vs. 20 w/o filter)
val query =
f1.join(t1).join(t2).join(d1).join(d2)
.where((nameToAttr("f1_c1") === nameToAttr("t1_c1")) &&
(nameToAttr("f1_c2") === nameToAttr("t2_c1")) &&
(nameToAttr("f1_fk1") === nameToAttr("d1_pk")) &&
(nameToAttr("f1_fk2") === nameToAttr("d2_pk")))
val expected =
f1.join(d2, Inner, Some(nameToAttr("f1_fk2") === nameToAttr("d2_pk")))
.join(d1, Inner, Some(nameToAttr("f1_fk1") === nameToAttr("d1_pk")))
.join(t2, Inner, Some(nameToAttr("f1_c2") === nameToAttr("t2_c1")))
.join(t1, Inner, Some(nameToAttr("f1_c1") === nameToAttr("t1_c1")))
.select(outputsOf(f1, t1, t2, d1, d2): _*)
assertEqualJoinPlans(Optimize, query, expected)
}
test("Test 2: Star with a linear branch") {
//
// t1 d1 - t2 - t3
// \ /
// f1
// |
// d2
//
// star: {d1, f1, d2}
// non-star: {t2, t1, t3}
//
// level 0: (f1 ), (d2 ), (t3 ), (d1 ), (t1 ), (t2 )
// level 1: {t3 t2 }, {f1 d2 }, {f1 d1 }
// level 2: {d2 f1 d1 }
// level 3: {t1 d1 f1 d2 }, {t2 d1 f1 d2 }
// level 4: {d1 t2 f1 t1 d2 }, {d1 t3 t2 f1 d2 }
// level 5: {d1 t3 t2 f1 t1 d2 }
//
// Number of generated plans: 15 (vs 24)
val query =
d1.join(t1).join(t2).join(f1).join(d2).join(t3)
.where((nameToAttr("d1_pk") === nameToAttr("f1_fk1")) &&
(nameToAttr("t1_c1") === nameToAttr("f1_c1")) &&
(nameToAttr("d2_pk") === nameToAttr("f1_fk2")) &&
(nameToAttr("f1_fk2") === nameToAttr("d2_pk")) &&
(nameToAttr("d1_c2") === nameToAttr("t2_c1")) &&
(nameToAttr("t2_c2") === nameToAttr("t3_c1")))
val expected =
f1.join(d2, Inner, Some(nameToAttr("f1_fk2") === nameToAttr("d2_pk")))
.join(d1, Inner, Some(nameToAttr("f1_fk1") === nameToAttr("d1_pk")))
.join(t3.join(t2, Inner, Some(nameToAttr("t2_c2") === nameToAttr("t3_c1"))), Inner,
Some(nameToAttr("d1_c2") === nameToAttr("t2_c1")))
.join(t1, Inner, Some(nameToAttr("t1_c1") === nameToAttr("f1_c1")))
.select(outputsOf(d1, t1, t2, f1, d2, t3): _*)
assertEqualJoinPlans(Optimize, query, expected)
}
test("Test 3: Star with derived branches") {
// t3 t2
// | |
// d1 - t4 - t1
// |
// f1
// |
// d2
//
// star: (d1 f1 d2 )
// non-star: (t4 t1 t2 t3 )
//
// level 0: (t1 ), (t3 ), (f1 ), (d1 ), (t2 ), (d2 ), (t4 )
// level 1: {f1 d2 }, {t1 t4 }, {t1 t2 }, {f1 d1 }, {t3 t4 }
// level 2: {d1 f1 d2 }, {t2 t1 t4 }, {t1 t3 t4 }
// level 3: {t4 d1 f1 d2 }, {t3 t4 t1 t2 }
// level 4: {d1 f1 t4 d2 t3 }, {d1 f1 t4 d2 t1 }
// level 5: {d1 f1 t4 d2 t1 t2 }, {d1 f1 t4 d2 t1 t3 }
// level 6: {d1 f1 t4 d2 t1 t2 t3 }
//
// Number of generated plans: 22 (vs. 34)
val query =
d1.join(t1).join(t2).join(t3).join(t4).join(f1).join(d2)
.where((nameToAttr("t1_c1") === nameToAttr("t2_c1")) &&
(nameToAttr("t3_c1") === nameToAttr("t4_c1")) &&
(nameToAttr("t1_c2") === nameToAttr("t4_c2")) &&
(nameToAttr("d1_c2") === nameToAttr("t4_c3")) &&
(nameToAttr("f1_fk1") === nameToAttr("d1_pk")) &&
(nameToAttr("f1_fk2") === nameToAttr("d2_pk")))
val expected =
t3.join(t4, Inner, Some(nameToAttr("t3_c1") === nameToAttr("t4_c1")))
.join(t1.join(t2, Inner, Some(nameToAttr("t1_c1") === nameToAttr("t2_c1"))), Inner,
Some(nameToAttr("t1_c2") === nameToAttr("t4_c2")))
.join(f1
.join(d2, Inner, Some(nameToAttr("f1_fk2") === nameToAttr("d2_pk")))
.join(d1, Inner, Some(nameToAttr("f1_fk1") === nameToAttr("d1_pk"))))
.select(outputsOf(d1, t1, t2, t3, t4, f1, d2): _*)
assertEqualJoinPlans(Optimize, query, expected)
}
test("Test 4: Star with several branches") {
//
// d1 - t3 - t4
// |
// f1 - d3 - t1 - t2
// |
// d2 - t5 - t6
//
// star: {d1 f1 d2 d3 }
// non-star: {t5 t3 t6 t2 t4 t1}
//
// level 0: (t4 ), (d2 ), (t5 ), (d3 ), (d1 ), (f1 ), (t2 ), (t6 ), (t1 ), (t3 )
// level 1: {t5 t6 }, {t4 t3 }, {d3 f1 }, {t2 t1 }, {d2 f1 }, {d1 f1 }
// level 2: {d2 d1 f1 }, {d2 d3 f1 }, {d3 d1 f1 }
// level 3: {d2 d1 d3 f1 }
// level 4: {d1 t3 d3 f1 d2 }, {d1 d3 f1 t1 d2 }, {d1 t5 d3 f1 d2 }
// level 5: {d1 t5 d3 f1 t1 d2 }, {d1 t3 t4 d3 f1 d2 }, {d1 t5 t6 d3 f1 d2 },
// {d1 t5 t3 d3 f1 d2 }, {d1 t3 d3 f1 t1 d2 }, {d1 t2 d3 f1 t1 d2 }
// level 6: {d1 t5 t3 t4 d3 f1 d2 }, {d1 t3 t2 d3 f1 t1 d2 }, {d1 t5 t6 d3 f1 t1 d2 },
// {d1 t5 t3 d3 f1 t1 d2 }, {d1 t5 t2 d3 f1 t1 d2 }, ...
// ...
// level 9: {d1 t5 t3 t6 t2 t4 d3 f1 t1 d2 }
//
// Number of generated plans: 46 (vs. 82)
// TODO(SPARK-32687): find a way to make optimization result of `CostBasedJoinReorder`
// deterministic even if the input order is different.
val query =
d1.join(t3).join(t4).join(f1).join(d3).join(d2).join(t5).join(t6).join(t1).join(t2)
.where((nameToAttr("d1_c2") === nameToAttr("t3_c1")) &&
(nameToAttr("t3_c2") === nameToAttr("t4_c2")) &&
(nameToAttr("d1_pk") === nameToAttr("f1_fk1")) &&
(nameToAttr("f1_fk2") === nameToAttr("d2_pk")) &&
(nameToAttr("d2_c2") === nameToAttr("t5_c1")) &&
(nameToAttr("t5_c2") === nameToAttr("t6_c2")) &&
(nameToAttr("f1_fk3") === nameToAttr("d3_pk")) &&
(nameToAttr("d3_c2") === nameToAttr("t1_c1")) &&
(nameToAttr("t1_c2") === nameToAttr("t2_c2")))
val expected =
f1.join(d3, Inner, Some(nameToAttr("f1_fk3") === nameToAttr("d3_pk")))
.join(d2, Inner, Some(nameToAttr("f1_fk2") === nameToAttr("d2_pk")))
.join(d1, Inner, Some(nameToAttr("f1_fk1") === nameToAttr("d1_pk")))
.join(t4.join(t3, Inner, Some(nameToAttr("t3_c2") === nameToAttr("t4_c2"))), Inner,
Some(nameToAttr("d1_c2") === nameToAttr("t3_c1")))
.join(t2.join(t1, Inner, Some(nameToAttr("t1_c2") === nameToAttr("t2_c2"))), Inner,
Some(nameToAttr("d3_c2") === nameToAttr("t1_c1")))
.join(t5.join(t6, Inner, Some(nameToAttr("t5_c2") === nameToAttr("t6_c2"))), Inner,
Some(nameToAttr("d2_c2") === nameToAttr("t5_c1")))
.select(outputsOf(d1, t3, t4, f1, d3, d2, t5, t6, t1, t2): _*)
assertEqualJoinPlans(Optimize, query, expected)
}
test("Test 5: RI star only") {
// d1
// |
// f1
// / \
// d2 d3
//
// star: {f1, d1, d2, d3}
// non-star: {}
// level 0: (d1), (f1), (d2), (d3)
// level 1: {f1 d3 }, {f1 d2 }, {d1 f1 }
// level 2: {d1 f1 d2 }, {d2 f1 d3 }, {d1 f1 d3 }
// level 3: {d1 d2 f1 d3 }
// Number of generated plans: 11 (= 11)
val query =
d1.join(d2).join(f1).join(d3)
.where((nameToAttr("f1_fk1") === nameToAttr("d1_pk")) &&
(nameToAttr("f1_fk2") === nameToAttr("d2_pk")) &&
(nameToAttr("f1_fk3") === nameToAttr("d3_pk")))
val expected =
f1.join(d3, Inner, Some(nameToAttr("f1_fk3") === nameToAttr("d3_pk")))
.join(d2, Inner, Some(nameToAttr("f1_fk2") === nameToAttr("d2_pk")))
.join(d1, Inner, Some(nameToAttr("f1_fk1") === nameToAttr("d1_pk")))
.select(outputsOf(d1, d2, f1, d3): _*)
assertEqualJoinPlans(Optimize, query, expected)
}
test("Test 6: No RI star") {
//
// f1 - t1 - t2 - t3
//
// star: {}
// non-star: {f1, t1, t2, t3}
// level 0: (t1), (f1), (t2), (t3)
// level 1: {f1 t3 }, {f1 t2 }, {t1 f1 }
// level 2: {t1 f1 t2 }, {t2 f1 t3 }, {dt f1 t3 }
// level 3: {t1 t2 f1 t3 }
// Number of generated plans: 11 (= 11)
val query =
t1.join(f1).join(t2).join(t3)
.where((nameToAttr("f1_fk1") === nameToAttr("t1_c1")) &&
(nameToAttr("f1_fk2") === nameToAttr("t2_c1")) &&
(nameToAttr("f1_fk3") === nameToAttr("t3_c1")))
val expected =
f1.join(t3, Inner, Some(nameToAttr("f1_fk3") === nameToAttr("t3_c1")))
.join(t2, Inner, Some(nameToAttr("f1_fk2") === nameToAttr("t2_c1")))
.join(t1, Inner, Some(nameToAttr("f1_fk1") === nameToAttr("t1_c1")))
.select(outputsOf(t1, f1, t2, t3): _*)
assertEqualJoinPlans(Optimize, query, expected)
}
}
|
witgo/spark
|
sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/joinReorder/StarJoinCostBasedReorderSuite.scala
|
Scala
|
apache-2.0
| 16,053
|
package chat.tox.antox.activities
import java.io.{File, FileNotFoundException, FileOutputStream, IOException}
import java.util.Random
import android.content.DialogInterface.OnClickListener
import android.content.{Context, DialogInterface, Intent, SharedPreferences}
import android.graphics.drawable.ColorDrawable
import android.graphics.{Bitmap, BitmapFactory}
import android.net.Uri
import android.os.{Build, Bundle, Environment}
import android.preference.Preference.OnPreferenceClickListener
import android.preference.{ListPreference, Preference, PreferenceManager}
import android.support.v7.app.AlertDialog
import android.view.{MenuItem, View}
import android.widget.{ImageButton, Toast}
import chat.tox.QR.{Contents, QRCodeEncode}
import chat.tox.antox.R
import chat.tox.antox.activities.ProfileSettingsActivity._
import chat.tox.antox.data.State
import chat.tox.antox.fragments.{AvatarDialog, ColorPickerDialog}
import chat.tox.antox.theme.ThemeManager
import chat.tox.antox.tox.ToxSingleton
import chat.tox.antox.transfer.FileDialog
import chat.tox.antox.transfer.FileDialog.DirectorySelectedListener
import chat.tox.antox.wrapper.UserStatus
import com.google.zxing.{BarcodeFormat, WriterException}
import im.tox.tox4j.exceptions.ToxException
object ProfileSettingsActivity {
private val sBindPreferenceSummaryToValueListener: Preference.OnPreferenceChangeListener = new Preference.OnPreferenceChangeListener() {
override def onPreferenceChange(preference: Preference, value: AnyRef): Boolean = {
val stringValue = value.toString
preference match {
case lp: ListPreference =>
val index = lp.findIndexOfValue(stringValue)
preference.setSummary(if (index >= 0) lp.getEntries()(index) else null)
case _ =>
preference.setSummary(stringValue)
}
true
}
}
private def bindPreferenceSummaryToValue(preference: Preference) {
preference.setOnPreferenceChangeListener(sBindPreferenceSummaryToValueListener)
sBindPreferenceSummaryToValueListener.onPreferenceChange(preference, PreferenceManager.getDefaultSharedPreferences(preference.getContext)
.getString(preference.getKey, ""))
}
}
class ProfileSettingsActivity extends BetterPreferenceActivity {
private var avatarDialog: AvatarDialog = _
private var themeDialog: ColorPickerDialog = _
override def onCreate(savedInstanceState: Bundle) {
getDelegate.installViewFactory()
getDelegate.onCreate(savedInstanceState)
super.onCreate(savedInstanceState)
getSupportActionBar.setDisplayHomeAsUpEnabled(true)
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB) {
getSupportActionBar.setBackgroundDrawable(new ColorDrawable(ThemeManager.primaryColor))
}
addPreferencesFromResource(R.xml.pref_profile)
themeDialog = new ColorPickerDialog(ProfileSettingsActivity.this, new ColorPickerDialog.Callback {
override def onColorSelection(index: Int, color: Int, darker: Int): Unit = {
ThemeManager.primaryColor = color
ThemeManager.darkerPrimaryColor = darker
val preferences = PreferenceManager.getDefaultSharedPreferences(ProfileSettingsActivity.this)
preferences.edit.putInt("theme_color", color).apply()
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB) {
recreate()
}
}
})
avatarDialog = new AvatarDialog(ProfileSettingsActivity.this)
if (savedInstanceState != null) {
if (savedInstanceState.getBoolean("showing_avatar_dialog", false)) avatarDialog.show()
if (savedInstanceState.getBoolean("showing_theme_dialog", false)) showThemeDialog()
}
bindPreferenceSummaryToValue(findPreference("nickname"))
val passwordPreference = findPreference("password")
if (PreferenceManager.getDefaultSharedPreferences(passwordPreference.getContext)
.getString(passwordPreference.getKey, "").isEmpty) {
getPreferenceScreen.removePreference(passwordPreference)
} else {
bindPreferenceSummaryToValue(passwordPreference)
}
bindPreferenceSummaryToValue(findPreference("status"))
bindPreferenceSummaryToValue(findPreference("status_message"))
bindPreferenceSummaryToValue(findPreference("tox_id"))
bindPreferenceSummaryToValue(findPreference("active_account"))
val toxIDPreference = findPreference("tox_id")
toxIDPreference.setOnPreferenceClickListener(new Preference.OnPreferenceClickListener() {
override def onPreferenceClick(preference: Preference): Boolean = {
createToxIDDialog()
true
}
})
val avatarPreference = findPreference("avatar")
avatarPreference.setOnPreferenceClickListener(new Preference.OnPreferenceClickListener() {
override def onPreferenceClick(preference: Preference): Boolean = {
avatarDialog.show()
true
}
})
val exportProfile = findPreference("export")
val thisActivity = this
exportProfile.setOnPreferenceClickListener(new OnPreferenceClickListener {
override def onPreferenceClick(preference: Preference): Boolean = {
val fileDialog = new FileDialog(thisActivity, Environment.getExternalStorageDirectory, true)
fileDialog.addDirectoryListener(new DirectorySelectedListener {
override def directorySelected(directory: File): Unit = {
onExportDataFileSelected(directory)
}
})
fileDialog.showDialog()
true
}
})
val nospamPreference = findPreference("nospam")
nospamPreference.setOnPreferenceClickListener(new Preference.OnPreferenceClickListener() {
override def onPreferenceClick(preference: Preference): Boolean = {
val toxSingleton = ToxSingleton.getInstance()
val builder = new AlertDialog.Builder(ProfileSettingsActivity.this)
builder.setMessage(R.string.reset_tox_id_dialog_message)
.setTitle(R.string.reset_tox_id_dialog_title)
builder.setPositiveButton(getString(R.string.reset_tox_id_dialog_confirm), new OnClickListener {
override def onClick(dialog: DialogInterface, which: Int): Unit = {
try {
val random = new Random()
val nospam = random.nextInt(1234567890)
toxSingleton.tox.setNospam(nospam)
val preferences = PreferenceManager.getDefaultSharedPreferences(ProfileSettingsActivity.this)
val editor = preferences.edit()
editor.putString("tox_id", toxSingleton.tox.getAddress.toString)
editor.apply()
// Display toast to inform user of successful change
Toast.makeText(
getApplicationContext,
getApplicationContext.getResources.getString(R.string.tox_id_reset),
Toast.LENGTH_SHORT
).show()
} catch {
case e: ToxException[_] => e.printStackTrace()
}
}
})
builder.setNegativeButton(getString(R.string.button_cancel), null)
builder.show()
true
}
})
val themePreference = findPreference("theme_color")
themePreference.setOnPreferenceClickListener(new OnPreferenceClickListener {
override def onPreferenceClick(preference: Preference): Boolean = {
showThemeDialog()
true
}
})
}
def createToxIDDialog() {
val builder = new AlertDialog.Builder(ProfileSettingsActivity.this)
val inflater = ProfileSettingsActivity.this.getLayoutInflater
val view = inflater.inflate(R.layout.dialog_tox_id, null)
builder.setView(view)
builder.setPositiveButton(getString(R.string.button_ok), null)
builder.setNeutralButton(getString(R.string.dialog_tox_id), new DialogInterface.OnClickListener() {
def onClick(dialogInterface: DialogInterface, ID: Int) {
val sharedPreferences = PreferenceManager.getDefaultSharedPreferences(ProfileSettingsActivity.this)
val clipboard = ProfileSettingsActivity.this.getSystemService(Context.CLIPBOARD_SERVICE).asInstanceOf[android.text.ClipboardManager]
clipboard.setText(sharedPreferences.getString("tox_id", ""))
}
})
var file = new File(Environment.getExternalStorageDirectory.getPath + "/Antox/")
if (!file.exists()) {
file.mkdirs()
}
val noMedia = new File(Environment.getExternalStorageDirectory.getPath + "/Antox/", ".nomedia")
if (!noMedia.exists()) {
try {
noMedia.createNewFile()
} catch {
case e: IOException => e.printStackTrace()
}
}
file = new File(Environment.getExternalStorageDirectory.getPath + "/Antox/userkey_qr.png")
val pref = PreferenceManager.getDefaultSharedPreferences(ProfileSettingsActivity.this.getApplicationContext)
generateQR(pref.getString("tox_id", ""))
val bmp = BitmapFactory.decodeFile(file.getAbsolutePath)
val qrCode = view.findViewById(R.id.qr_image).asInstanceOf[ImageButton]
qrCode.setImageBitmap(bmp)
qrCode.setOnClickListener(new View.OnClickListener() {
override def onClick(v: View) {
val shareIntent = new Intent()
shareIntent.setAction(Intent.ACTION_SEND)
shareIntent.putExtra(Intent.EXTRA_STREAM, Uri.fromFile(new File(Environment.getExternalStorageDirectory.getPath + "/Antox/userkey_qr.png")))
shareIntent.setType("image/jpeg")
view.getContext.startActivity(Intent.createChooser(shareIntent, getResources.getString(R.string.share_with)))
}
})
builder.create().show()
}
def showThemeDialog(): Unit = {
val preferences = PreferenceManager.getDefaultSharedPreferences(ProfileSettingsActivity.this)
val currentColor = preferences.getInt("theme_color", -1)
themeDialog.show(currentColor match{
case -1 => None
case _ => Some(currentColor)
})
}
def onExportDataFileSelected(dest: File): Unit = {
try {
ToxSingleton.exportDataFile(dest)
Toast.makeText(getApplicationContext, "Exported data file to " + dest.getPath, Toast.LENGTH_LONG)
.show()
} catch {
case e: Exception =>
e.printStackTrace()
Toast.makeText(getApplicationContext, "Error: Could not export data file.", Toast.LENGTH_LONG).show()
}
}
private def generateQR(userKey: String) {
val qrData = "tox:" + userKey
val qrCodeSize = 400
val qrCodeEncoder = new QRCodeEncode(qrData, null, Contents.Type.TEXT, BarcodeFormat.QR_CODE.toString,
qrCodeSize)
var out: FileOutputStream = null
try {
val bitmap = qrCodeEncoder.encodeAsBitmap()
out = new FileOutputStream(Environment.getExternalStorageDirectory.getPath + "/Antox/userkey_qr.png")
bitmap.compress(Bitmap.CompressFormat.PNG, 90, out)
out.close()
} catch {
case e: WriterException => e.printStackTrace()
case e: FileNotFoundException => e.printStackTrace()
case e: IOException => e.printStackTrace()
}
}
override def onResume(): Unit = {
super.onResume()
getPreferenceScreen.getSharedPreferences.registerOnSharedPreferenceChangeListener(this)
}
override def onPause(): Unit = {
super.onPause()
getPreferenceScreen.getSharedPreferences.unregisterOnSharedPreferenceChangeListener(this)
}
override def onStop(): Unit = {
super.onStop()
avatarDialog.close()
themeDialog.close()
}
def onSharedPreferenceChanged(sharedPreferences: SharedPreferences, key: String) {
val userDb = State.userDb
val activeAccount = sharedPreferences.getString("active_account", "")
key match {
case "nickname" =>
val name = sharedPreferences.getString(key, "")
try {
println("Tox is " + ToxSingleton.tox)
ToxSingleton.tox.setName(name)
} catch {
case e: ToxException[_] => e.printStackTrace()
}
userDb.updateUserDetail(activeAccount, key, name)
case "password" =>
val password = sharedPreferences.getString(key, "")
userDb.updateUserDetail(activeAccount, key, password)
case "status" =>
val newStatusString = sharedPreferences.getString(key, "")
val newStatus = UserStatus.getToxUserStatusFromString(newStatusString)
try {
ToxSingleton.tox.setStatus(newStatus)
} catch {
case e: ToxException[_] => e.printStackTrace()
}
userDb.updateUserDetail(activeAccount, key, newStatusString)
case "status_message" =>
val statusMessage = sharedPreferences.getString(key, "")
try {
ToxSingleton.tox.setStatusMessage(sharedPreferences.getString(statusMessage, ""))
} catch {
case e: ToxException[_] => e.printStackTrace()
}
userDb.updateUserDetail(activeAccount, key, statusMessage)
case "logging_enabled" =>
val loggingEnabled = sharedPreferences.getBoolean(key, true)
userDb.updateUserDetail(activeAccount, key, loggingEnabled)
case "avatar" =>
val avatar = sharedPreferences.getString(key, "")
userDb.updateUserDetail(activeAccount, key, avatar)
case _ =>
}
}
override def onActivityResult(requestCode: Int, resultCode: Int, data: Intent): Unit = {
avatarDialog.onActivityResult(requestCode, resultCode, data)
avatarDialog.close()
avatarDialog.show()
}
override def onSaveInstanceState(savedInstanceState: Bundle): Unit = {
super.onSaveInstanceState(savedInstanceState)
savedInstanceState.putBoolean("showing_avatar_dialog", avatarDialog.isShowing)
savedInstanceState.putBoolean("showing_theme_dialog", themeDialog.isShowing)
}
override def onOptionsItemSelected(item: MenuItem): Boolean = item.getItemId match {
case android.R.id.home =>
finish()
true
}
}
|
gale320/Antox
|
app/src/main/scala/chat/tox/antox/activities/ProfileSettingsActivity.scala
|
Scala
|
gpl-3.0
| 13,768
|
package jp.kenkov.smt.japanese.sen
import net.java.sen.{SenFactory}
import java.util.Locale
import scala.collection.JavaConverters._
class Keitaiso(val hyousoukei: String,
val genkei: String,
val pos: String,
val conjugationalForm: String,
val conjugationalType: String,
val pronunciations: List[String],
val readings: List[String],
val length: Int,
val start: Int,
val cost: Double) {
override def toString: String = {
"Keitaiso(" + List(hyousoukei, genkei, pos, conjugationalForm, conjugationalType,
pronunciations, readings, length, start, cost).mkString(", ") + ")"
}
override def equals(that: Any): Boolean = that match {
case other: Keitaiso =>
other.hyousoukei == hyousoukei &&
other.genkei == genkei &&
other.pos == pos &&
other.pronunciations == pronunciations &&
other.readings == readings &&
other.length == length
case _ => false
}
}
object Keitaiso {
type Words = List[String]
def stringToKeitaisos(input: String): List[Keitaiso] = {
val tagger = SenFactory.getStringTagger(null)
val token = tagger.analyze(input).asScala.toList
// for (t <- token) yield
token map { t =>
val mor = t.getMorpheme()
new Keitaiso(
t.getSurface(),
mor.getBasicForm(),
mor.getPartOfSpeech(),
mor.getConjugationalForm(),
mor.getConjugationalType(),
mor.getPronunciations().asScala.toList,
mor.getReadings().asScala.toList,
t.getLength(),
t.getStart(),
t.getCost())
}
}
def stringToWords(input: String): Words = {
val tagger = SenFactory.getStringTagger(null)
val token = tagger.analyze(input).asScala.toList
for (t <- token) yield t.toString
}
}
object SenTest {
def main(args: Array[String]) {
val words = List("今日は初めてなの。",
"やさしくしてねっ")
words foreach { x => println(Keitaiso.stringToKeitaisos(x)) }
words foreach { x => println(Keitaiso.stringToWords(x)) }
}
}
|
kenkov/smtscala
|
src/main/scala/jp/kenkov/smt/japanese/sen/Sen.scala
|
Scala
|
mit
| 2,179
|
package xyz.discretezoo.web.db.v1
import xyz.discretezoo.web.db.ZooPostgresProfile.api._
case class GraphSPX(zooid: Int, rSPX: Int, sSPX: Int)
class GraphsSPX(tag: Tag) extends Table[GraphSPX](tag, "graph_cvt") {
def zooid: Rep[Int] = column[Int]("zooid", O.PrimaryKey)
def rSPX = column[Int]("spx_r")
def sSPX = column[Int]("spx_s")
def * = (
zooid,
rSPX,
sSPX
) <> ((GraphSPX.apply _).tupled, GraphSPX.unapply)
}
|
DiscreteZOO/DiscreteZOO-web
|
src/main/scala/xyz/discretezoo/web/db/v1/GraphSPX.scala
|
Scala
|
mit
| 443
|
/*
* @author Bharath Kumar
* @author Philip Stutz
*
* Copyright 2015 iHealth Technologies
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.signalcollect
import akka.actor.{ActorRef, Props}
import akka.pattern.ask
import akka.remote.testkit.{MultiNodeConfig, MultiNodeSpec}
import akka.testkit.ImplicitSender
import akka.util.Timeout
import com.signalcollect.ClusterTestUtils._
import com.signalcollect.examples.{PageRankEdge, PageRankVertex}
import com.signalcollect.nodeprovisioning.cluster.{ClusterNodeProvisionerActor, RetrieveNodeActors}
import com.typesafe.config.ConfigFactory
import org.scalatest.concurrent.ScalaFutures
import org.scalatest.time.{Millis, Seconds, Span}
import scala.concurrent.duration._
class ClusterPageRankSpecMultiJvmNode1 extends ClusterPageRankSpec
class ClusterPageRankSpecMultiJvmNode2 extends ClusterPageRankSpec
class ClusterPageRankSpecMultiJvmNode3 extends ClusterPageRankSpec
object ClusterPageRankConfig extends MultiNodeConfig {
val provisioner = role("provisioner")
val node1 = role("node1")
val node2 = role("node2")
val clusterName = "ClusterPageRankSpec"
val seedPort = 2558
nodeConfig(provisioner) {
MultiJvmConfig.provisionerCommonConfig(seedPort)
}
commonConfig {
val mappingsConfig =
"""akka.actor.kryo.mappings {
| "com.signalcollect.ModularAggregator" = 133,
| "com.signalcollect.ClusterPageRankSpec$$anonfun$2" = 134,
| "com.signalcollect.ClusterPageRankSpec$$anonfun$3" = 135,
| "com.signalcollect.ClusterPageRankSpec$$anonfun$4" = 136,
| "com.signalcollect.ClusterPageRankSpec$$anonfun$5" = 137
| }""".stripMargin
val largeTimeoutConfig =
"""
|akka {
| testconductor {
|
| # Timeout for joining a barrier: this is the maximum time any participants
| # waits for everybody else to join a named barrier.
| barrier-timeout = 100s
|
| # Timeout for interrogation of TestConductor’s Controller actor
| query-timeout = 20s
|
| # amount of time for the ClientFSM to wait for the connection to the conductor
| # to be successful
| connect-timeout = 60s
|
| # Number of connect attempts to be made to the conductor controller
| client-reconnects = 20
| }
|}
""".stripMargin
ConfigFactory.parseString(largeTimeoutConfig).withFallback(MultiJvmConfig.nodeCommonConfig(clusterName, seedPort, mappingsConfig))
}
}
class ClusterPageRankSpec extends MultiNodeSpec(ClusterPageRankConfig) with STMultiNodeSpec
with ImplicitSender with ScalaFutures {
import ClusterPageRankConfig._
override def initialParticipants = roles.size
val workers = roles.size
override def atStartup() = println("Starting")
override def afterTermination() = println("Terminated")
implicit override val patienceConfig =
PatienceConfig(timeout = scaled(Span(300, Seconds)), interval = scaled(Span(1000, Millis)))
def buildPageRankGraph(graph: Graph[Any, Any], edgeTuples: Traversable[Tuple2[Int, Int]]): Graph[Any, Any] = {
edgeTuples foreach {
case (sourceId: Int, targetId: Int) =>
graph.addVertex(new PageRankVertex(sourceId, 0.85))
graph.addVertex(new PageRankVertex(targetId, 0.85))
graph.addEdge(sourceId, new PageRankEdge(targetId))
}
graph
}
val pageRankFiveCycleVerifier: Vertex[_, _, _, _] => Boolean = v => {
val state = v.state.asInstanceOf[Double]
val expectedState = 1.0
val correct = (state - expectedState).abs < 0.01
if (!correct) {
System.err.println("Problematic vertex: id=" + v.id + ", expected state=" + expectedState + ", actual state=" + state)
}
correct
}
val pageRankFiveStarVerifier: (Vertex[_, _, _, _]) => Boolean = v => {
val state = v.state.asInstanceOf[Double]
val expectedState = if (v.id == 4.0) 0.66 else 0.15
val correct = (state - expectedState).abs < 0.00001
if (!correct) {
System.err.println("Problematic vertex: id=" + v.id + ", expected state=" + expectedState + ", actual state=" + state)
}
correct
}
val pageRankTwoOnTwoGridVerifier: Vertex[_, _, _, _] => Boolean = v => {
val state = v.state.asInstanceOf[Double]
val expectedState = 1.0
val correct = (state - expectedState).abs < 0.01
if (!correct) {
System.err.println("Problematic vertex: id=" + v.id + ", expected state=" + expectedState + ", actual state=" + state)
}
correct
}
val pageRankTorusVerifier: Vertex[_, _, _, _] => Boolean = v => {
val state = v.state.asInstanceOf[Double]
val expectedState = 1.0
val correct = (state - expectedState).abs < 0.01
if (!correct) {
System.err.println("Problematic vertex: id=" + v.id + ", expected state=" + expectedState + ", actual state=" + state)
}
correct
}
"PageRank algorithm" must {
implicit val timeout = Timeout(30.seconds)
"deliver correct results on a 5-cycle graph" in within(100.seconds) {
val prefix = TestConfig.prefix
val fiveCycleEdges = List((0, 1), (1, 2), (2, 3), (3, 4), (4, 0))
runOn(provisioner) {
val masterActor = system.actorOf(Props(classOf[ClusterNodeProvisionerActor], MultiJvmConfig.idleDetectionPropagationDelayInMilliseconds,
prefix, workers), "ClusterMasterBootstrap")
val nodeActorsFuture = (masterActor ? RetrieveNodeActors).mapTo[Array[ActorRef]]
whenReady(nodeActorsFuture) { nodeActors =>
assert(nodeActors.length == workers)
val computeGraphFactories: List[() => Graph[Any, Any]] = List(() => GraphBuilder.withActorSystem(system).withActorNamePrefix(prefix)
.withPreallocatedNodes(nodeActors).build)
test(graphProviders = computeGraphFactories, verify = pageRankFiveCycleVerifier, buildGraph = buildPageRankGraph(_, fiveCycleEdges),
signalThreshold = 0.001) shouldBe true
}
}
enterBarrier("PageRank - test1 done")
}
"deliver correct results on a 5-star graph" in {
val prefix = TestConfig.prefix
val fiveStarEdges = List((0, 4), (1, 4), (2, 4), (3, 4))
runOn(provisioner) {
val masterActor = system.actorOf(Props(classOf[ClusterNodeProvisionerActor], MultiJvmConfig.idleDetectionPropagationDelayInMilliseconds,
prefix, workers), "ClusterMasterBootstrap")
val nodeActorsFuture = (masterActor ? RetrieveNodeActors).mapTo[Array[ActorRef]]
whenReady(nodeActorsFuture) { nodeActors =>
assert(nodeActors.length == workers)
val computeGraphFactories: List[() => Graph[Any, Any]] = List(() => GraphBuilder.withActorSystem(system)
.withPreallocatedNodes(nodeActors).build)
test(graphProviders = computeGraphFactories, verify = pageRankFiveStarVerifier, buildGraph = buildPageRankGraph(_, fiveStarEdges)) shouldBe true
}
}
enterBarrier("PageRank - test2 done")
}
"deliver correct results on a 2*2 symmetric grid" in {
val prefix = TestConfig.prefix
val symmetricTwoOnTwoGridEdges = new Grid(2, 2)
runOn(provisioner) {
val masterActor = system.actorOf(Props(classOf[ClusterNodeProvisionerActor], MultiJvmConfig.idleDetectionPropagationDelayInMilliseconds,
prefix, workers), "ClusterMasterBootstrap")
val nodeActorsFuture = (masterActor ? RetrieveNodeActors).mapTo[Array[ActorRef]]
whenReady(nodeActorsFuture) { nodeActors =>
assert(nodeActors.length == workers)
val computeGraphFactories: List[() => Graph[Any, Any]] = List(() => GraphBuilder.withActorSystem(system)
.withPreallocatedNodes(nodeActors).build)
test(graphProviders = computeGraphFactories, verify = pageRankTwoOnTwoGridVerifier, buildGraph = buildPageRankGraph(_, symmetricTwoOnTwoGridEdges), signalThreshold = 0.001) shouldBe true
}
}
enterBarrier("PageRank - test3 done")
}
"deliver correct results on a 5*5 torus" in {
val prefix = TestConfig.prefix
val symmetricTorusEdges = new Torus(5, 5)
runOn(provisioner) {
val masterActor = system.actorOf(Props(classOf[ClusterNodeProvisionerActor], MultiJvmConfig.idleDetectionPropagationDelayInMilliseconds,
prefix, workers), "ClusterMasterBootstrap")
val nodeActorsFuture = (masterActor ? RetrieveNodeActors).mapTo[Array[ActorRef]]
whenReady(nodeActorsFuture) { nodeActors =>
assert(nodeActors.length == workers)
val computeGraphFactories: List[() => Graph[Any, Any]] = List(() => GraphBuilder.withActorSystem(system)
.withPreallocatedNodes(nodeActors).build)
test(graphProviders = computeGraphFactories, verify = pageRankTorusVerifier, buildGraph = buildPageRankGraph(_, symmetricTorusEdges), signalThreshold = 0.001) shouldBe true
}
}
enterBarrier("PageRank - test4 done")
}
}
enterBarrier("PageRank - all tests done")
}
|
hicolour/signal-collect
|
src/multi-jvm/scala/com.signalcollect/ClusterPageRankSpec.scala
|
Scala
|
apache-2.0
| 9,590
|
package org.ergoplatform.settings
import org.ergoplatform.ErgoScriptPredef
import org.ergoplatform.mining.emission.EmissionRules
import sigmastate.Values.ErgoTree
/**
* Configuration file for monetary settings of Ergo chain
*
* @see src/main/resources/application.conf for parameters description
*/
case class MonetarySettings(fixedRatePeriod: Int = 30 * 2 * 24 * 365,
epochLength: Int = 90 * 24 * 30,
fixedRate: Long = 75L * EmissionRules.CoinsInOneErgo,
oneEpochReduction: Long = 3L * EmissionRules.CoinsInOneErgo,
minerRewardDelay: Int = 720,
foundersInitialReward: Long = 75L * EmissionRules.CoinsInOneErgo / 10) {
val feeProposition: ErgoTree = ErgoScriptPredef.feeProposition(minerRewardDelay)
val feePropositionBytes: Array[Byte] = feeProposition.bytes
val emissionBoxProposition: ErgoTree = ErgoScriptPredef.emissionBoxProp(this)
val foundersBoxProposition: ErgoTree = ErgoScriptPredef.foundationScript(this)
}
|
ScorexFoundation/sigmastate-interpreter
|
sigmastate/src/main/scala/org/ergoplatform/settings/MonetarySettings.scala
|
Scala
|
mit
| 1,079
|
package com.evojam.mongodb.evolutions.model.evolution
import scala.util.control.Exception.catching
import play.api.libs.json._
object State extends Enumeration {
type State = Value
val Applied, ApplyingUp, ApplyingDown = Value
def unapply(in: String) =
catching(classOf[NoSuchElementException])
.opt(State.withName(in))
implicit object Format extends Format[State] {
override def writes(in: State) = JsString(in.toString)
override def reads(in: JsValue) = in match {
case JsString(value) =>
unapply(value).map(JsSuccess(_, __))
.getOrElse(JsError(__, "validate.error.expected.State"))
case _ => JsError(__, "validate.error.expected.State")
}
}
}
|
evojam/mongodb-evolutions-scala
|
src/main/scala/com/evojam/mongodb/evolutions/model/evolution/State.scala
|
Scala
|
apache-2.0
| 714
|
package org.jetbrains.plugins.scala.debugger.evaluateExpression
import org.jetbrains.plugins.scala.debugger.{ScalaDebuggerTestCase, ScalaVersion_2_11, ScalaVersion_2_12_M2}
/**
* User: Alexander Podkhalyuzin
* Date: 14.11.11
*/
class NewTemplateEvaluationTest extends NewTemplateEvaluationTestBase with ScalaVersion_2_11
class NewTemplateEvaluationTest_2_12_M2 extends NewTemplateEvaluationTestBase with ScalaVersion_2_12_M2
abstract class NewTemplateEvaluationTestBase extends ScalaDebuggerTestCase {
def testJavaLib() {
addFileToProject("Sample.scala",
"""
|object Sample {
| def main(args: Array[String]) {
| "stop here"
| }
|}
""".stripMargin.trim()
)
addBreakpoint("Sample.scala", 2)
runDebugger("Sample") {
waitForBreakpoint()
evalEquals("new StringBuilder(\\"test\\").append(23)", "test23")
evalEquals("new Array[Int](2)", "[0,0]")
}
}
def testInnerClass() {
addFileToProject("Sample.scala",
"""
|object Sample {
| class Expr {}
| def main(args: Array[String]) {
| "stop here"
| }
|}
""".stripMargin.trim()
)
addBreakpoint("Sample.scala", 3)
runDebugger("Sample") {
waitForBreakpoint()
evalStartsWith("new Expr", "Sample$Expr")
}
}
def testOverloadingClass() {
addFileToProject("Sample.scala",
"""
|object Sample {
| class Expr(s: String) {
| def this(t: Int) {
| this("test")
| }
| }
| def main(args: Array[String]) {
| "stop here"
| }
|}
""".stripMargin.trim()
)
addBreakpoint("Sample.scala", 7)
runDebugger("Sample") {
waitForBreakpoint()
evalStartsWith("new Expr(\\"\\")", "Sample$Expr")
evalStartsWith("new Expr(2)", "Sample$Expr")
}
}
}
|
LPTK/intellij-scala
|
test/org/jetbrains/plugins/scala/debugger/evaluateExpression/NewTemplateEvaluationTest.scala
|
Scala
|
apache-2.0
| 1,866
|
/***********************************************************************
* Copyright (c) 2013-2017 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.utils.geohash
import scala.annotation.tailrec
import scala.collection.mutable
import scala.util.Random
class BoundingBoxSampler(twoGh: TwoGeoHashBoundingBox) extends Iterator[GeoHash] {
val rand = new Random
val (latSteps, lonSteps) = GeoHash.getLatitudeLongitudeSpanCount(twoGh.ll, twoGh.ur, twoGh.prec)
val numPoints = latSteps*lonSteps
require(numPoints <= Integer.MAX_VALUE, "Bounding box too big, cannot sample.")
require(numPoints > 1, "Only one point in bounding box, cannot sample.")
private val used = mutable.HashSet[Integer]()
override def hasNext = used.size < numPoints
@tailrec
final override def next(): GeoHash = {
if(!hasNext) throw new NoSuchElementException("No more points available.")
var idx = rand.nextInt(numPoints)
while (used.contains(idx)) {
idx = rand.nextInt(numPoints)
}
used.add(idx)
val gh = GeoHash.composeGeoHashFromBitIndicesAndPrec(
GeoHash.gridIndexForLatitude(twoGh.ll) + idx / lonSteps,
GeoHash.gridIndexForLongitude(twoGh.ll) + idx % lonSteps,
twoGh.ll.prec)
if(twoGh.bbox.covers(gh.bbox)) gh else next
}
}
class WithReplacementBoundingBoxSampler(twoGh: TwoGeoHashBoundingBox) extends Iterator[GeoHash] {
var internalSampler = new BoundingBoxSampler(twoGh)
private def checkInternal() = if (!internalSampler.hasNext) {
internalSampler = new BoundingBoxSampler(twoGh)
}
override def hasNext = {
checkInternal()
internalSampler.hasNext //should always be true unless something has gone horribly wrong
}
final override def next() = {
checkInternal()
internalSampler.next()
}
}
|
ronq/geomesa
|
geomesa-utils/src/main/scala/org/locationtech/geomesa/utils/geohash/BoundingBoxSampler.scala
|
Scala
|
apache-2.0
| 2,160
|
package antonkulaga.projects.expressions
import org.denigma.binding.binders.Events
import org.denigma.binding.extensions._
import org.denigma.binding.views.BindableView
import org.scalajs.dom
import org.scalajs.dom._
import org.scalajs.dom.html.Input
import org.scalajs.dom.raw.{FileList, FileReader}
import rx.core.Var
import scala.annotation.tailrec
import scala.concurrent.{Future, Promise}
/**
* View for loading gene exrepssions data
* @param elem
*/
class ExpressionsView(val elem: Element) extends BindableView {
val click = Var(Events.createMouseEvent())
click.onChange("onClick", uniqueValue = true, skipInitial = true){
case ev=>
println("onclick works")
}
@tailrec final def filesToList(f: FileList, acc: List[File] = Nil, num: Int = 0): List[File] = {
if (f.length <= num) acc.reverse else filesToList(f, f.item(num)::acc, num + 1)
}
implicit def filesAsList(f: FileList): List[File] = filesToList(f, Nil, 0)
val onUpload = Var(Events.createEvent())
onUpload.onChange("onUpload", uniqueValue = true, skipInitial = true){case ev =>
if(ev.target == ev.currentTarget){
ev.preventDefault()
println("on upload works!")
ev.target match {
case input: Input =>
val files: List[File] = input.files
for(f <- files) {
val reader = new FileReader()
reader.readAsText(f)
readText(f)
}
case null => println("null file input")
case _ => dom.console.error("not a file input")
}
}
}
protected def readText(f: File): Future[String] = {
val result = Promise[String]
val reader = new FileReader()
def onLoadEnd(ev: ProgressEvent): Any = {
result.success(reader.result.toString)
}
def onErrorEnd(ev: Event): Any = {
result.failure(new Exception("READING FAILURE " + ev.toString))
}
reader.onloadend = onLoadEnd _
reader.onerror = onErrorEnd _
reader.readAsText(f)
result.future
}
}
|
antonkulaga/personal
|
app/js/src/main/scala/antonkulaga/projects/expressions/ExpressionsView.scala
|
Scala
|
mpl-2.0
| 2,028
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gearpump.streaming.state.impl
import org.scalacheck.Gen
import org.scalatest.prop.PropertyChecks
import org.scalatest.{Matchers, PropSpec}
class InMemoryCheckpointStoreSpec extends PropSpec with PropertyChecks with Matchers {
property("InMemoryCheckpointStore should provide read / write checkpoint") {
val timestampGen = Gen.chooseNum[Long](1, 1000)
val checkpointGen = Gen.alphaStr.map(_.getBytes("UTF-8"))
forAll(timestampGen, checkpointGen) { (timestamp: Long, checkpoint: Array[Byte]) =>
val store = new InMemoryCheckpointStore
store.recover(timestamp) shouldBe None
store.persist(timestamp, checkpoint)
store.recover(timestamp) shouldBe Some(checkpoint)
}
}
}
|
manuzhang/incubator-gearpump
|
streaming/src/test/scala/org/apache/gearpump/streaming/state/impl/InMemoryCheckpointStoreSpec.scala
|
Scala
|
apache-2.0
| 1,540
|
package tu.host.console
import tu.coreservice.annotator.KBAnnotatorImpl
import tu.model.knowledge.communication.{TrainingRequest, Request}
import tu.model.knowledge.primitive.KnowledgeString
import tu.model.knowledge.{Constant, KnowledgeURI}
import tu.coreservice.thinkinglifecycle.ThinkingLifeCycleMinimal
import tu.dataservice.knowledgebaseserver.KBAdapter
import org.slf4j.LoggerFactory
/**
* Console application of the TU.
*
*/
object AppMain {
val annotator = new KBAnnotatorImpl()
val log = LoggerFactory.getLogger(this.getClass)
def main(args: Array[String]) {
var exitConsole = false
if (args.size > 0 && args(0) == "trainfile") {
val file = args(1)
log.info("Loading train data from file " + file)
val lines = scala.io.Source.fromFile(file).mkString
train(lines)
}
log.info("Starting... ")
if (args.length <= 0) {
while (!exitConsole) {
log.info("Please type: 'exit' to quit, 'request' to enter ")
log.info("request mode, 'train' to enter training mode")
log.info("==================================================================")
log.info(" :>")
val command: String = Console.readLine()
exitConsole = command == "exit" || command == "'exit'"
if (exitConsole) {
log.info("Exiting...")
exitConsole = true
}
else if (command == "train") {
var exitTraining = false
while (!exitTraining) {
log.info("Training mode: please type train phrase or exit to terminate->")
val cmd = Console.readLine()
if (cmd == "exit") {
exitTraining = true
}
else {
train(cmd)
}
}
}
else if (command == "clean") {
KBAdapter.cleanDatabase()
}
else if (command == "request") {
//extract sentence
var exitRequest = false
while (!exitRequest) {
log.info("Request mode: please type request phrase or exit to terminate ->")
val cmd = Console.readLine()
if (cmd == "exit") {
exitRequest = true
}
else {
val requestText = cmd
log.debug("Running thinking lifecycle:" + command)
val r = new Request(KnowledgeString(requestText, "inputtext"), KnowledgeURI("testRequest"), KnowledgeURI(Constant.defaultDomainName))
val t = new ThinkingLifeCycleMinimal()
val res = t(r)
log.info("End")
}
}
}
}
}
def train(st: String) {
val command = "train"
val requestText = st
log.debug("Running thinking lifecycle:" + command)
val r = new TrainingRequest(KnowledgeString(requestText, "inputtext"), KnowledgeURI("testRequest"), KnowledgeURI(Constant.defaultDomainName))
val t = new ThinkingLifeCycleMinimal()
val res = t(r)
log.info("End")
}
}
}
|
keskival/2
|
host.console/src/main/scala/tu/App.scala
|
Scala
|
gpl-3.0
| 3,024
|
package com.dys.chatwork4s
import com.dys.chatwork4s.beans.rooms.RoomInfoWithoutDescription
import com.dys.chatwork4s.beans.users.{Contact, IncomingRequest, Me}
import com.dys.chatwork4s.beans.{MyStatus, RoomId, Task}
import com.dys.chatwork4s.http.HttpMethod
import com.dys.chatwork4s.http.parameters.{CreateRoom, GetTask}
import com.dys.chatwork4s.utils.Sync._
import scala.concurrent.ExecutionContext.global
import scala.concurrent.duration.Duration
/**
* Created by dys on 2017/02/06.
*/
class ChatWork(httpMethod: HttpMethod) {
private val chatWorkAsync = new ChatWorkAsync(httpMethod)
implicit private val atMost: Duration = Duration.Inf
implicit private val ec = global
def close(): Unit = chatWorkAsync.close()
/**
* チャットルームごとのAPI
*
* @param roomId roomId
* @return
*/
def room(roomId: Long): ChatWorkRoom = new ChatWorkRoom(roomId, httpMethod)
/**
* 自分自身の情報を取得
*
* @return
*/
def me(): Me = await[Me](chatWorkAsync.me())
/**
* 自分の未読数、未読To数、未完了タスク数を返す
*
* @return
*/
def myStatus(): MyStatus = await[MyStatus](chatWorkAsync.myStatus())
/**
* 自分のタスク一覧を取得する。(※100件まで取得可能。今後、より多くのデータを取得する為のページネーションの仕組みを提供予定)
*
* @param getTask parameters
* @return
*/
def myTasks(getTask: GetTask = GetTask.empty): Seq[Task] = await[Seq[Task]](chatWorkAsync.myTasks(getTask))
/**
* 自分のコンタクト一覧を取得
*
* @return
*/
def contacts(): Seq[Contact] = await[Seq[Contact]](chatWorkAsync.contacts())
/**
* 自分のチャット一覧の取得
*
* @return
*/
def rooms(): Seq[RoomInfoWithoutDescription] = await[Seq[RoomInfoWithoutDescription]](chatWorkAsync.rooms())
/**
* グループチャットを新規作成
*
* @param createRoom parameters
* @return
*/
def createRoom(createRoom: CreateRoom): RoomId = await[RoomId](chatWorkAsync.createRoom(createRoom))
/**
* 自分に対するコンタクト承認依頼一覧を取得する(※100件まで取得可能。今後、より多くのデータを取得する為のページネーションの仕組みを提供予定)
*
* @return
*/
def incomingRequests(): Seq[IncomingRequest] = await[Seq[IncomingRequest]](chatWorkAsync.incomingRequests())
/**
* 自分に対するコンタクト承認依頼を承認する
*
* @param requestId requestId
* @return
*/
def approveRequest(requestId: Long): Contact = await[Contact](chatWorkAsync.approveRequest(requestId))
/**
* 自分に対するコンタクト承認依頼をキャンセルする
*
* @param requestId requestId
* @return
*/
def cancelRequest(requestId: Long): Unit = await[Unit](chatWorkAsync.cancelRequest(requestId))
}
|
kado-yasuyuki/chatwork4s
|
src/main/scala/com/dys/chatwork4s/ChatWork.scala
|
Scala
|
apache-2.0
| 2,976
|
/************************************************************************\\
** Project ____ __ __ __ ____ ______ **
** / __/ / / / / / / / __/ / __ / (c) 2013 **
** / /_ / / / / / / / /_ / /_/ / **
** / __/ / / / / / / / __/ / __ / Dusan.Kysel@gmail.com **
** / /__ / /_/ / / /__ / /__ / / | | **
** /____/ /_____/ /____/ /____/ /_/ |_| All rights reserved. **
** **
** Redistribution and use permitted under the BSD 2-Clause license. **
** **
\\************************************************************************/
// 932718654
import Lib._
object Euler038
{
def pandigitalMultiple(l : Long) : Option[Long] = {
val m = Array(1, 2, 3, 4, 5, 6, 7) map (_ * l) map (_.toString)
var n = 9
var i = 0
while (n > 0) {
n -= m(i).length
i += 1
}
if (n != 0) {
return None
}
val s = m.mkString.take(9)
val r = s.toLong
if (isPandigital(r)) Some(r) else None
}
def main(args: Array[String]) {
var max = 0l
for (l <- 2l until 10000l) {
pandigitalMultiple(l) match {
case Some(x) =>
if (max < x) {
max = x
print("max = ")
}
println(x)
case None =>
}
}
}
}
|
dusank/euler
|
src/main/scala/Euler038.scala
|
Scala
|
bsd-2-clause
| 1,535
|
package xitrum.routing
private object IgnoredPackages {
/**
* @param relPath "/" separated path to the .class file that may contain
* Xitrum routes
*
* @return true for those (java/..., javax/... etc.) that should be ignored
* because they obviously don't contain Xitrum routes, to speed up the
* scanning of routes
*/
def isIgnored(relPath: String) = {
// Standard Java and Scala
relPath.startsWith("java/") ||
relPath.startsWith("javafx/") ||
relPath.startsWith("javax/") ||
relPath.startsWith("scala/") ||
relPath.startsWith("sun/") ||
relPath.startsWith("com/sun/") ||
// Others
relPath.startsWith("akka/") ||
relPath.startsWith("ch/qos/logback") ||
relPath.startsWith("com/beachape/filemanagement") ||
relPath.startsWith("com/codahale/metrics") ||
relPath.startsWith("com/esotericsoftware/kryo/") ||
relPath.startsWith("com/esotericsoftware/reflectasm/") ||
relPath.startsWith("com/fasterxml/jackson/") ||
relPath.startsWith("com/google/") ||
relPath.startsWith("com/thoughtworks/paranamer/") ||
relPath.startsWith("com/twitter/") ||
relPath.startsWith("com/typesafe/") ||
relPath.startsWith("glokka/") ||
relPath.startsWith("io/netty/") ||
relPath.startsWith("javassist/") ||
relPath.startsWith("nl/grons/metrics/") ||
relPath.startsWith("org/aopalliance/") ||
relPath.startsWith("org/apache/") ||
relPath.startsWith("org/codehaus/commons/") ||
relPath.startsWith("org/codehaus/janino/") ||
relPath.startsWith("org/cyberneko/html/") ||
relPath.startsWith("org/fusesource/hawtjni/") ||
relPath.startsWith("org/fusesource/leveldbjni/") ||
relPath.startsWith("org/fusesource/scalamd/") ||
relPath.startsWith("org/fusesource/scalate/") ||
relPath.startsWith("org/jboss/") ||
relPath.startsWith("org/json4s/") ||
relPath.startsWith("org/iq80/leveldb") ||
relPath.startsWith("org/mozilla/") ||
relPath.startsWith("org/objectweb/asm/") ||
relPath.startsWith("org/objenesis/") ||
relPath.startsWith("org/openid4java/") ||
relPath.startsWith("org/slf4j/") ||
relPath.startsWith("org/slf4s/") ||
relPath.startsWith("org/w3c/") ||
relPath.startsWith("org/xml/") ||
relPath.startsWith("org/uncommons/maths/") ||
relPath.startsWith("rx/")
}
}
|
caiiiycuk/xitrum
|
src/main/scala/xitrum/routing/IgnoredPackages.scala
|
Scala
|
mit
| 2,347
|
package com.twitter.benchmark
import com.twitter.finagle.httpx.Request
import com.twitter.finatra.http.routing.HttpRouter
import com.twitter.finatra.http.{Controller, HttpServer}
object FinatraBenchmarkServerMain extends FinatraBenchmarkServer
class FinatraBenchmarkServer extends HttpServer {
override def configureHttp(router: HttpRouter) {
router
.add(
new Controller {
get("/json") { request: Request =>
Map("message" -> "Hello, World!")
}
get("/json/:id") { request: Request =>
Map("message" -> ("Hello " + request.params("id")))
}
get("/hi") { request: Request =>
"Hello " + request.params.getOrElse("name", "unnamed")
}
})
}
}
|
deanh/finatra
|
examples/benchmark-server/src/main/scala/com/twitter/benchmark/FinatraBenchmarkServer.scala
|
Scala
|
apache-2.0
| 765
|
package com.larroy.milight
object CommandCode extends Enumeration {
type Enum = Value
/**
* The command code for "RGBW COLOR LED ALL OFF".
*/
var ALL_OFF = Value(0x41)
/**
* The command code for "GROUP 1 ALL OFF".
*/
val GROUP_1_OFF = Value(0x46)
/**
* The command code for "GROUP 2 ALL OFF".
*/
val GROUP_2_OFF = Value(0x48)
/**
* The command code for "GROUP 3 ALL OFF".
*/
val GROUP_3_OFF = Value(0x4A)
/**
* The command code for "GROUP 4 ALL OFF".
*/
val GROUP_4_OFF = Value(0x4C)
/**
* The command code for "RGBW COLOR LED ALL ON".
*/
val ALL_ON = Value(0x42)
/**
* The command code for "GROUP 1 ALL ON".
*/
val GROUP_1_ON = Value(0x45)
/**
* The command code for "GROUP 2 ALL ON".
*/
val GROUP_2_ON = Value(0x47)
/**
* The command code for "GROUP 3 ALL ON".
*/
val GROUP_3_ON = Value(0x49)
/**
* The command code for "GROUP 4 ALL ON".
*/
val GROUP_4_ON = Value(0x4B)
/**
* The command code for "SET COLOR TO WHITE (GROUP ALL)". Send an "ON"
* command 100ms before.
*/
val ALL_WHITE = Value(0xC2)
/**
* The command code for "SET COLOR TO WHITE (GROUP 1)". Send an "ON" command
* 100ms before.
*/
val GROUP_1_WHITE = Value(0xC5)
/**
* The command code for "SET COLOR TO WHITE (GROUP 2)". Send an "ON" command
* 100ms before.
*/
val GROUP_2_WHITE = Value(0xC7)
/**
* The command code for "SET COLOR TO WHITE (GROUP 3)". Send an "ON" command
* 100ms before.
*/
val GROUP_3_WHITE = Value(0xC9)
/**
* The command code for "SET COLOR TO WHITE (GROUP 4)". Send an "ON" command
* 100ms before.
*/
val GROUP_4_WHITE = Value(0xCB)
/**
* The command code for "DISCO MODE".
*/
val DISCO = Value(0x4D)
/**
* The command code for "DISCO SPEED FASTER".
*/
val DISCO_FASTER = Value(0x44)
/**
* The command code for "DISCO SPEED SLOWER".
*/
val DISCO_SLOWER = Value(0x43)
/**
* The command code for "COLOR SETTING" (part of a two-byte * command).
*/
val COLOR = Value(0x40)
/**
* The maximum color value, starting at 0.
*/
val MAX_COLOR = Value(0xFF)
/**
* The command code for "DIRECT BRIGHTNESS SETTING" (part of a two-byte * command).
*/
val BRIGHTNESS = Value(0x4E)
/**
* The maximum brightness value, starting at 0.
*/
val MAX_BRIGHTNESS = Value(0x3B)
/**
* NIGHTMODE commands, send "OFF" before. Ex. 0x41 -> 0xC1
*/
val NIGHTMODE_ALL = Value(0xC1)
val GROUP_1_NIGHTMODE = Value(0xC6)
val GROUP_2_NIGHTMODE = Value(0xC8)
val GROUP_3_NIGHTMODE = Value(0xCA)
val GROUP_4_NIGHTMODE = Value(0xCC)
}
|
larroy/milight
|
src/main/scala/com/larroy/milight/CommandCode.scala
|
Scala
|
apache-2.0
| 2,566
|
/**
* Copyright (C) 2015 Matt Christiansen (matt@nikore.net)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.nikore.gozer.config
import com.google.inject.{Singleton, Provides, AbstractModule}
import com.typesafe.config.{ConfigFactory, Config}
class ConfigFileModule(val config: Config) extends AbstractModule {
def this() {
this(ConfigFactory.load())
}
override def configure(): Unit = {}
@Provides
@Singleton
def providesConfig: Config = config
}
|
nikore/gozer
|
config/src/main/scala/net/nikore/gozer/config/ConfigFileModule.scala
|
Scala
|
apache-2.0
| 999
|
package com.datayumyum.helloPOS
import java.net.URL
import org.json._
import scala.io.Source
case class Store(uuid: String, name: String, address: Address, phone: String, url: String, catalog: Map[String, List[Product]]) {
override def toString(): String = {
name + "\\n" + address.toString() + "\\n" + phone + "\\n" + url
}
}
object Store {
def from(jsonStr: String): Store = {
val jsonObject = new JSONObject(jsonStr)
val products = jsonObject.getJSONArray("products")
val catalog = jsonObject.getJSONArray("catalog")
def getUrl(p: JSONObject): String = {
var url: String = "http://www.com"
try {
url = p.getString("url")
} catch {
case e: JSONException => url = "http://www.flaticon.com/png/256/45787.png"
}
url
}
var myCatalog = collection.mutable.Map[String, List[Product]]()
(0 until catalog.length()).foreach { index =>
val category = catalog.getJSONObject(index)
val catName = category.getString("category/name")
val products = category.getJSONArray("products")
val productList = (0 until products.length()).map { index =>
val p = products.getJSONObject(index)
if (p.has("product/components")) {
val components = p.getJSONArray("product/components")
val ingredients = (0 until components.length()).map { index =>
val c = components.getJSONObject(index)
Product(uuid = c.getString("product/uuid"), name = c.getString("product/name"),
sku = c.getString("product/sku"),
imageURL = getUrl(c),
price = c.getDouble("product/price"), ingredients = None)
}.toList
Product(uuid = p.getString("product/uuid"), name = p.getString("product/name"),
sku = p.getString("product/sku"),
imageURL = getUrl(p),
price = p.getDouble("product/price"), ingredients = Some(ingredients))
} else {
Product(uuid = p.getString("product/uuid"), name = p.getString("product/name"),
sku = p.getString("product/sku"),
imageURL = getUrl(p),
price = p.getDouble("product/price"), ingredients = None)
}
}.toList
myCatalog(catName) = productList.sortWith { (item1, item2) => item1.sku < item2.sku}
}
val addr = jsonObject.getJSONObject("address")
val address = new Address(line1 = addr.getString("address/line1"), city = addr.getString("address/city"), state = addr.getString("address/state"), zip = addr.getString("address/zip"))
val store = Store(uuid = jsonObject.getString("store/uuid"), name = jsonObject.getString("store/name"),
address = address,
phone = jsonObject.getString("phone"),
url = jsonObject.getString("url"),
catalog = myCatalog.toMap)
store
}
def findById(id: String): Store = {
val storeJsonStr: String = Source.fromInputStream(new URL(s"http://hive.kaicode.com:3000/pos/store/${id}").openStream).mkString
Store.from(storeJsonStr)
}
}
|
sonwh98/helloPOS
|
src/main/scala/com/datayumyum/helloPOS/Store.scala
|
Scala
|
gpl-2.0
| 3,035
|
/***********************************************************************
* Copyright (c) 2013-2022 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.kudu.tools.stats
import com.beust.jcommander.Parameters
import org.locationtech.geomesa.kudu.data.KuduDataStore
import org.locationtech.geomesa.kudu.tools.KuduDataStoreCommand
import org.locationtech.geomesa.kudu.tools.KuduDataStoreCommand.KuduParams
import org.locationtech.geomesa.kudu.tools.stats.KuduStatsCountCommand.KuduStatsCountParams
import org.locationtech.geomesa.tools.RequiredTypeNameParam
import org.locationtech.geomesa.tools.stats.StatsCountCommand
import org.locationtech.geomesa.tools.stats.StatsCountCommand.StatsCountParams
class KuduStatsCountCommand extends StatsCountCommand[KuduDataStore] with KuduDataStoreCommand {
override val params = new KuduStatsCountParams
}
object KuduStatsCountCommand {
@Parameters(commandDescription = "Estimate or calculate feature counts in a GeoMesa feature type")
class KuduStatsCountParams extends StatsCountParams with KuduParams with RequiredTypeNameParam
}
|
locationtech/geomesa
|
geomesa-kudu/geomesa-kudu-tools/src/main/scala/org/locationtech/geomesa/kudu/tools/stats/KuduStatsCountCommand.scala
|
Scala
|
apache-2.0
| 1,424
|
package org.ensime.indexer
import org.scalatest._
class MemberNameSpec extends WordSpec with Matchers {
"MemberName" should {
"remove \\"package\\" from FQNs" in {
MemberName(ClassName(PackageName(List("org", "example")), "package"), "member").fqnString shouldBe "org.example.member"
MemberName(ClassName(PackageName(List("org", "example")), "package$"), "member").fqnString shouldBe "org.example.member"
}
}
}
|
jacobono/ensime-server
|
core/src/test/scala/org/ensime/indexer/MethodNameSpec.scala
|
Scala
|
gpl-3.0
| 435
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.ml.feature
import org.apache.hadoop.fs.Path
import org.apache.spark.annotation.Since
import org.apache.spark.ml.{Estimator, Model}
import org.apache.spark.ml.linalg.{BLAS, Vector, Vectors, VectorUDT}
import org.apache.spark.ml.param._
import org.apache.spark.ml.param.shared._
import org.apache.spark.ml.util._
import org.apache.spark.mllib.feature
import org.apache.spark.mllib.linalg.VectorImplicits._
import org.apache.spark.sql.{DataFrame, Dataset, SparkSession}
import org.apache.spark.sql.functions._
import org.apache.spark.sql.types._
import org.apache.spark.util.{Utils, VersionUtils}
/**
* Params for [[Word2Vec]] and [[Word2VecModel]].
*/
private[feature] trait Word2VecBase extends Params
with HasInputCol with HasOutputCol with HasMaxIter with HasStepSize with HasSeed {
/**
* The dimension of the code that you want to transform from words.
* Default: 100
* @group param
*/
final val vectorSize = new IntParam(
this, "vectorSize", "the dimension of codes after transforming from words (> 0)",
ParamValidators.gt(0))
setDefault(vectorSize -> 100)
/** @group getParam */
def getVectorSize: Int = $(vectorSize)
/**
* The window size (context words from [-window, window]).
* Default: 5
* @group expertParam
*/
final val windowSize = new IntParam(
this, "windowSize", "the window size (context words from [-window, window]) (> 0)",
ParamValidators.gt(0))
setDefault(windowSize -> 5)
/** @group expertGetParam */
def getWindowSize: Int = $(windowSize)
/**
* Number of partitions for sentences of words.
* Default: 1
* @group param
*/
final val numPartitions = new IntParam(
this, "numPartitions", "number of partitions for sentences of words (> 0)",
ParamValidators.gt(0))
setDefault(numPartitions -> 1)
/** @group getParam */
def getNumPartitions: Int = $(numPartitions)
/**
* The minimum number of times a token must appear to be included in the word2vec model's
* vocabulary.
* Default: 5
* @group param
*/
final val minCount = new IntParam(this, "minCount", "the minimum number of times a token must " +
"appear to be included in the word2vec model's vocabulary (>= 0)", ParamValidators.gtEq(0))
setDefault(minCount -> 5)
/** @group getParam */
def getMinCount: Int = $(minCount)
/**
* Sets the maximum length (in words) of each sentence in the input data.
* Any sentence longer than this threshold will be divided into chunks of
* up to `maxSentenceLength` size.
* Default: 1000
* @group param
*/
final val maxSentenceLength = new IntParam(this, "maxSentenceLength", "Maximum length " +
"(in words) of each sentence in the input data. Any sentence longer than this threshold will " +
"be divided into chunks up to the size (> 0)", ParamValidators.gt(0))
setDefault(maxSentenceLength -> 1000)
/** @group getParam */
def getMaxSentenceLength: Int = $(maxSentenceLength)
setDefault(stepSize -> 0.025)
setDefault(maxIter -> 1)
/**
* Validate and transform the input schema.
*/
protected def validateAndTransformSchema(schema: StructType): StructType = {
val typeCandidates = List(new ArrayType(StringType, true), new ArrayType(StringType, false))
SchemaUtils.checkColumnTypes(schema, $(inputCol), typeCandidates)
SchemaUtils.appendColumn(schema, $(outputCol), new VectorUDT)
}
}
/**
* Word2Vec trains a model of `Map(String, Vector)`, i.e. transforms a word into a code for further
* natural language processing or machine learning process.
*/
@Since("1.4.0")
final class Word2Vec @Since("1.4.0") (
@Since("1.4.0") override val uid: String)
extends Estimator[Word2VecModel] with Word2VecBase with DefaultParamsWritable {
@Since("1.4.0")
def this() = this(Identifiable.randomUID("w2v"))
/** @group setParam */
@Since("1.4.0")
def setInputCol(value: String): this.type = set(inputCol, value)
/** @group setParam */
@Since("1.4.0")
def setOutputCol(value: String): this.type = set(outputCol, value)
/** @group setParam */
@Since("1.4.0")
def setVectorSize(value: Int): this.type = set(vectorSize, value)
/** @group expertSetParam */
@Since("1.6.0")
def setWindowSize(value: Int): this.type = set(windowSize, value)
/** @group setParam */
@Since("1.4.0")
def setStepSize(value: Double): this.type = set(stepSize, value)
/** @group setParam */
@Since("1.4.0")
def setNumPartitions(value: Int): this.type = set(numPartitions, value)
/** @group setParam */
@Since("1.4.0")
def setMaxIter(value: Int): this.type = set(maxIter, value)
/** @group setParam */
@Since("1.4.0")
def setSeed(value: Long): this.type = set(seed, value)
/** @group setParam */
@Since("1.4.0")
def setMinCount(value: Int): this.type = set(minCount, value)
/** @group setParam */
@Since("2.0.0")
def setMaxSentenceLength(value: Int): this.type = set(maxSentenceLength, value)
@Since("2.0.0")
override def fit(dataset: Dataset[_]): Word2VecModel = {
transformSchema(dataset.schema, logging = true)
val input = dataset.select($(inputCol)).rdd.map(_.getAs[Seq[String]](0))
val wordVectors = new feature.Word2Vec()
.setLearningRate($(stepSize))
.setMinCount($(minCount))
.setNumIterations($(maxIter))
.setNumPartitions($(numPartitions))
.setSeed($(seed))
.setVectorSize($(vectorSize))
.setWindowSize($(windowSize))
.setMaxSentenceLength($(maxSentenceLength))
.fit(input)
copyValues(new Word2VecModel(uid, wordVectors).setParent(this))
}
@Since("1.4.0")
override def transformSchema(schema: StructType): StructType = {
validateAndTransformSchema(schema)
}
@Since("1.4.1")
override def copy(extra: ParamMap): Word2Vec = defaultCopy(extra)
}
@Since("1.6.0")
object Word2Vec extends DefaultParamsReadable[Word2Vec] {
@Since("1.6.0")
override def load(path: String): Word2Vec = super.load(path)
}
/**
* Model fitted by [[Word2Vec]].
*/
@Since("1.4.0")
class Word2VecModel private[ml] (
@Since("1.4.0") override val uid: String,
@transient private val wordVectors: feature.Word2VecModel)
extends Model[Word2VecModel] with Word2VecBase with MLWritable {
import Word2VecModel._
/**
* Returns a dataframe with two fields, "word" and "vector", with "word" being a String and
* and the vector the DenseVector that it is mapped to.
*/
@Since("1.5.0")
@transient lazy val getVectors: DataFrame = {
val spark = SparkSession.builder().getOrCreate()
val wordVec = wordVectors.getVectors.mapValues(vec => Vectors.dense(vec.map(_.toDouble)))
spark.createDataFrame(wordVec.toSeq).toDF("word", "vector")
}
/**
* Find "num" number of words closest in similarity to the given word, not
* including the word itself.
* @return a dataframe with columns "word" and "similarity" of the word and the cosine
* similarities between the synonyms and the given word vector.
*/
@Since("1.5.0")
def findSynonyms(word: String, num: Int): DataFrame = {
val spark = SparkSession.builder().getOrCreate()
spark.createDataFrame(findSynonymsArray(word, num)).toDF("word", "similarity")
}
/**
* Find "num" number of words whose vector representation is most similar to the supplied vector.
* If the supplied vector is the vector representation of a word in the model's vocabulary,
* that word will be in the results.
* @return a dataframe with columns "word" and "similarity" of the word and the cosine
* similarities between the synonyms and the given word vector.
*/
@Since("2.0.0")
def findSynonyms(vec: Vector, num: Int): DataFrame = {
val spark = SparkSession.builder().getOrCreate()
spark.createDataFrame(findSynonymsArray(vec, num)).toDF("word", "similarity")
}
/**
* Find "num" number of words whose vector representation is most similar to the supplied vector.
* If the supplied vector is the vector representation of a word in the model's vocabulary,
* that word will be in the results.
* @return an array of the words and the cosine similarities between the synonyms given
* word vector.
*/
@Since("2.2.0")
def findSynonymsArray(vec: Vector, num: Int): Array[(String, Double)] = {
wordVectors.findSynonyms(vec, num)
}
/**
* Find "num" number of words closest in similarity to the given word, not
* including the word itself.
* @return an array of the words and the cosine similarities between the synonyms given
* word vector.
*/
@Since("2.2.0")
def findSynonymsArray(word: String, num: Int): Array[(String, Double)] = {
wordVectors.findSynonyms(word, num)
}
/** @group setParam */
@Since("1.4.0")
def setInputCol(value: String): this.type = set(inputCol, value)
/** @group setParam */
@Since("1.4.0")
def setOutputCol(value: String): this.type = set(outputCol, value)
/**
* Transform a sentence column to a vector column to represent the whole sentence. The transform
* is performed by averaging all word vectors it contains.
*/
@Since("2.0.0")
override def transform(dataset: Dataset[_]): DataFrame = {
transformSchema(dataset.schema, logging = true)
val vectors = wordVectors.getVectors
.mapValues(vv => Vectors.dense(vv.map(_.toDouble)))
.map(identity) // mapValues doesn't return a serializable map (SI-7005)
val bVectors = dataset.sparkSession.sparkContext.broadcast(vectors)
val d = $(vectorSize)
val word2Vec = udf { sentence: Seq[String] =>
if (sentence.isEmpty) {
Vectors.sparse(d, Array.empty[Int], Array.empty[Double])
} else {
val sum = Vectors.zeros(d)
sentence.foreach { word =>
bVectors.value.get(word).foreach { v =>
BLAS.axpy(1.0, v, sum)
}
}
BLAS.scal(1.0 / sentence.size, sum)
sum
}
}
dataset.withColumn($(outputCol), word2Vec(col($(inputCol))))
}
@Since("1.4.0")
override def transformSchema(schema: StructType): StructType = {
validateAndTransformSchema(schema)
}
@Since("1.4.1")
override def copy(extra: ParamMap): Word2VecModel = {
val copied = new Word2VecModel(uid, wordVectors)
copyValues(copied, extra).setParent(parent)
}
@Since("1.6.0")
override def write: MLWriter = new Word2VecModelWriter(this)
}
@Since("1.6.0")
object Word2VecModel extends MLReadable[Word2VecModel] {
private case class Data(word: String, vector: Array[Float])
private[Word2VecModel]
class Word2VecModelWriter(instance: Word2VecModel) extends MLWriter {
override protected def saveImpl(path: String): Unit = {
DefaultParamsWriter.saveMetadata(instance, path, sc)
val wordVectors = instance.wordVectors.getVectors
val dataSeq = wordVectors.toSeq.map { case (word, vector) => Data(word, vector) }
val dataPath = new Path(path, "data").toString
val bufferSizeInBytes = Utils.byteStringAsBytes(
sc.conf.get("spark.kryoserializer.buffer.max", "64m"))
val numPartitions = Word2VecModelWriter.calculateNumberOfPartitions(
bufferSizeInBytes, instance.wordVectors.wordIndex.size, instance.getVectorSize)
sparkSession.createDataFrame(dataSeq)
.repartition(numPartitions)
.write
.parquet(dataPath)
}
}
private[feature]
object Word2VecModelWriter {
/**
* Calculate the number of partitions to use in saving the model.
* [SPARK-11994] - We want to partition the model in partitions smaller than
* spark.kryoserializer.buffer.max
* @param bufferSizeInBytes Set to spark.kryoserializer.buffer.max
* @param numWords Vocab size
* @param vectorSize Vector length for each word
*/
def calculateNumberOfPartitions(
bufferSizeInBytes: Long,
numWords: Int,
vectorSize: Int): Int = {
val floatSize = 4L // Use Long to help avoid overflow
val averageWordSize = 15
// Calculate the approximate size of the model.
// Assuming an average word size of 15 bytes, the formula is:
// (floatSize * vectorSize + 15) * numWords
val approximateSizeInBytes = (floatSize * vectorSize + averageWordSize) * numWords
val numPartitions = (approximateSizeInBytes / bufferSizeInBytes) + 1
require(numPartitions < 10e8, s"Word2VecModel calculated that it needs $numPartitions " +
s"partitions to save this model, which is too large. Try increasing " +
s"spark.kryoserializer.buffer.max so that Word2VecModel can use fewer partitions.")
numPartitions.toInt
}
}
private class Word2VecModelReader extends MLReader[Word2VecModel] {
private val className = classOf[Word2VecModel].getName
override def load(path: String): Word2VecModel = {
val spark = sparkSession
import spark.implicits._
val metadata = DefaultParamsReader.loadMetadata(path, sc, className)
val (major, minor) = VersionUtils.majorMinorVersion(metadata.sparkVersion)
val dataPath = new Path(path, "data").toString
val oldModel = if (major < 2 || (major == 2 && minor < 2)) {
val data = spark.read.parquet(dataPath)
.select("wordIndex", "wordVectors")
.head()
val wordIndex = data.getAs[Map[String, Int]](0)
val wordVectors = data.getAs[Seq[Float]](1).toArray
new feature.Word2VecModel(wordIndex, wordVectors)
} else {
val wordVectorsMap = spark.read.parquet(dataPath).as[Data]
.collect()
.map(wordVector => (wordVector.word, wordVector.vector))
.toMap
new feature.Word2VecModel(wordVectorsMap)
}
val model = new Word2VecModel(metadata.uid, oldModel)
DefaultParamsReader.getAndSetParams(model, metadata)
model
}
}
@Since("1.6.0")
override def read: MLReader[Word2VecModel] = new Word2VecModelReader
@Since("1.6.0")
override def load(path: String): Word2VecModel = super.load(path)
}
|
SHASHANKB/spark
|
mllib/src/main/scala/org/apache/spark/ml/feature/Word2Vec.scala
|
Scala
|
apache-2.0
| 14,779
|
def cleanPath(s:String) = {
var r = s
while(r.exists(_=='/')) {
r = r.dropWhile(_!='/').drop(1)
}
r
}
def lineSplit(s:String) = {
s.map(x => if (x.isLetterOrDigit) x else ' ')
.split(" ").filter(!_.isEmpty)
}
def indexExample(inputPath:String, outputPath:String) = {
var r = sc.wholeTextFiles(inputPath).map(x => (cleanPath(x._1),x._2))
.flatMap{case (f,c) => lineSplit(c).map(x => ((x,f), 1))}
.reduceByKey(_+_)
.map{case ((w,f),v) => (w,(f,v))}
.groupByKey()
.filter(x => x._2 != null && !x._2.filter(_._2 > 20).isEmpty)
.map(x => (x._1,x._2.toList.sortBy(y => -y._2)))
.sortBy({case (k,v) => v.head._2}, false)
var output = scala.tools.nsc.io.File(outputPath)
output.writeAll(r.collect().mkString("\\n"))
}
indexExample("input/4-*", "output/Index.txt")
|
gorkinovich/SGDI
|
Spark/ejemplos/Index2.scala
|
Scala
|
mit
| 867
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.storage
import org.scalatest.FunSuite
import org.apache.spark.Success
import org.apache.spark.executor.TaskMetrics
import org.apache.spark.scheduler._
/**
* Test the behavior of StorageStatusListener in response to all relevant events.
*/
class StorageStatusListenerSuite extends FunSuite {
private val bm1 = BlockManagerId("big", "dog", 1)
private val bm2 = BlockManagerId("fat", "duck", 2)
private val taskInfo1 = new TaskInfo(0, 0, 0, 0, "big", "dog", TaskLocality.ANY, false)
private val taskInfo2 = new TaskInfo(0, 0, 0, 0, "fat", "duck", TaskLocality.ANY, false)
test("block manager added/removed") {
val listener = new StorageStatusListener
// Block manager add
assert(listener.executorIdToStorageStatus.size === 0)
listener.onBlockManagerAdded(SparkListenerBlockManagerAdded(1L, bm1, 1000L))
assert(listener.executorIdToStorageStatus.size === 1)
assert(listener.executorIdToStorageStatus.get("big").isDefined)
assert(listener.executorIdToStorageStatus("big").blockManagerId === bm1)
assert(listener.executorIdToStorageStatus("big").maxMem === 1000L)
assert(listener.executorIdToStorageStatus("big").numBlocks === 0)
listener.onBlockManagerAdded(SparkListenerBlockManagerAdded(1L, bm2, 2000L))
assert(listener.executorIdToStorageStatus.size === 2)
assert(listener.executorIdToStorageStatus.get("fat").isDefined)
assert(listener.executorIdToStorageStatus("fat").blockManagerId === bm2)
assert(listener.executorIdToStorageStatus("fat").maxMem === 2000L)
assert(listener.executorIdToStorageStatus("fat").numBlocks === 0)
// Block manager remove
listener.onBlockManagerRemoved(SparkListenerBlockManagerRemoved(1L, bm1))
assert(listener.executorIdToStorageStatus.size === 1)
assert(!listener.executorIdToStorageStatus.get("big").isDefined)
assert(listener.executorIdToStorageStatus.get("fat").isDefined)
listener.onBlockManagerRemoved(SparkListenerBlockManagerRemoved(1L, bm2))
assert(listener.executorIdToStorageStatus.size === 0)
assert(!listener.executorIdToStorageStatus.get("big").isDefined)
assert(!listener.executorIdToStorageStatus.get("fat").isDefined)
}
test("task end without updated blocks") {
val listener = new StorageStatusListener
listener.onBlockManagerAdded(SparkListenerBlockManagerAdded(1L, bm1, 1000L))
listener.onBlockManagerAdded(SparkListenerBlockManagerAdded(1L, bm2, 2000L))
val taskMetrics = new TaskMetrics
// Task end with no updated blocks
assert(listener.executorIdToStorageStatus("big").numBlocks === 0)
assert(listener.executorIdToStorageStatus("fat").numBlocks === 0)
listener.onTaskEnd(SparkListenerTaskEnd(1, 0, "obliteration", Success, taskInfo1, taskMetrics))
assert(listener.executorIdToStorageStatus("big").numBlocks === 0)
assert(listener.executorIdToStorageStatus("fat").numBlocks === 0)
listener.onTaskEnd(SparkListenerTaskEnd(1, 0, "obliteration", Success, taskInfo2, taskMetrics))
assert(listener.executorIdToStorageStatus("big").numBlocks === 0)
assert(listener.executorIdToStorageStatus("fat").numBlocks === 0)
}
test("task end with updated blocks") {
val listener = new StorageStatusListener
listener.onBlockManagerAdded(SparkListenerBlockManagerAdded(1L, bm1, 1000L))
listener.onBlockManagerAdded(SparkListenerBlockManagerAdded(1L, bm2, 2000L))
val taskMetrics1 = new TaskMetrics
val taskMetrics2 = new TaskMetrics
val block1 = (RDDBlockId(1, 1), BlockStatus(StorageLevel.DISK_ONLY, 0L, 100L, 0L))
val block2 = (RDDBlockId(1, 2), BlockStatus(StorageLevel.DISK_ONLY, 0L, 200L, 0L))
val block3 = (RDDBlockId(4, 0), BlockStatus(StorageLevel.DISK_ONLY, 0L, 300L, 0L))
taskMetrics1.updatedBlocks = Some(Seq(block1, block2))
taskMetrics2.updatedBlocks = Some(Seq(block3))
// Task end with new blocks
assert(listener.executorIdToStorageStatus("big").numBlocks === 0)
assert(listener.executorIdToStorageStatus("fat").numBlocks === 0)
listener.onTaskEnd(SparkListenerTaskEnd(1, 0, "obliteration", Success, taskInfo1, taskMetrics1))
assert(listener.executorIdToStorageStatus("big").numBlocks === 2)
assert(listener.executorIdToStorageStatus("fat").numBlocks === 0)
assert(listener.executorIdToStorageStatus("big").containsBlock(RDDBlockId(1, 1)))
assert(listener.executorIdToStorageStatus("big").containsBlock(RDDBlockId(1, 2)))
assert(listener.executorIdToStorageStatus("fat").numBlocks === 0)
listener.onTaskEnd(SparkListenerTaskEnd(1, 0, "obliteration", Success, taskInfo2, taskMetrics2))
assert(listener.executorIdToStorageStatus("big").numBlocks === 2)
assert(listener.executorIdToStorageStatus("fat").numBlocks === 1)
assert(listener.executorIdToStorageStatus("big").containsBlock(RDDBlockId(1, 1)))
assert(listener.executorIdToStorageStatus("big").containsBlock(RDDBlockId(1, 2)))
assert(listener.executorIdToStorageStatus("fat").containsBlock(RDDBlockId(4, 0)))
// Task end with dropped blocks
val droppedBlock1 = (RDDBlockId(1, 1), BlockStatus(StorageLevel.NONE, 0L, 0L, 0L))
val droppedBlock2 = (RDDBlockId(1, 2), BlockStatus(StorageLevel.NONE, 0L, 0L, 0L))
val droppedBlock3 = (RDDBlockId(4, 0), BlockStatus(StorageLevel.NONE, 0L, 0L, 0L))
taskMetrics1.updatedBlocks = Some(Seq(droppedBlock1, droppedBlock3))
taskMetrics2.updatedBlocks = Some(Seq(droppedBlock2, droppedBlock3))
listener.onTaskEnd(SparkListenerTaskEnd(1, 0, "obliteration", Success, taskInfo1, taskMetrics1))
assert(listener.executorIdToStorageStatus("big").numBlocks === 1)
assert(listener.executorIdToStorageStatus("fat").numBlocks === 1)
assert(!listener.executorIdToStorageStatus("big").containsBlock(RDDBlockId(1, 1)))
assert(listener.executorIdToStorageStatus("big").containsBlock(RDDBlockId(1, 2)))
assert(listener.executorIdToStorageStatus("fat").containsBlock(RDDBlockId(4, 0)))
listener.onTaskEnd(SparkListenerTaskEnd(1, 0, "obliteration", Success, taskInfo2, taskMetrics2))
assert(listener.executorIdToStorageStatus("big").numBlocks === 1)
assert(listener.executorIdToStorageStatus("fat").numBlocks === 0)
assert(!listener.executorIdToStorageStatus("big").containsBlock(RDDBlockId(1, 1)))
assert(listener.executorIdToStorageStatus("big").containsBlock(RDDBlockId(1, 2)))
assert(listener.executorIdToStorageStatus("fat").numBlocks === 0)
}
test("unpersist RDD") {
val listener = new StorageStatusListener
listener.onBlockManagerAdded(SparkListenerBlockManagerAdded(1L, bm1, 1000L))
val taskMetrics1 = new TaskMetrics
val taskMetrics2 = new TaskMetrics
val block1 = (RDDBlockId(1, 1), BlockStatus(StorageLevel.DISK_ONLY, 0L, 100L, 0L))
val block2 = (RDDBlockId(1, 2), BlockStatus(StorageLevel.DISK_ONLY, 0L, 200L, 0L))
val block3 = (RDDBlockId(4, 0), BlockStatus(StorageLevel.DISK_ONLY, 0L, 300L, 0L))
taskMetrics1.updatedBlocks = Some(Seq(block1, block2))
taskMetrics2.updatedBlocks = Some(Seq(block3))
listener.onTaskEnd(SparkListenerTaskEnd(1, 0, "obliteration", Success, taskInfo1, taskMetrics1))
listener.onTaskEnd(SparkListenerTaskEnd(1, 0, "obliteration", Success, taskInfo1, taskMetrics2))
assert(listener.executorIdToStorageStatus("big").numBlocks === 3)
// Unpersist RDD
listener.onUnpersistRDD(SparkListenerUnpersistRDD(9090))
assert(listener.executorIdToStorageStatus("big").numBlocks === 3)
listener.onUnpersistRDD(SparkListenerUnpersistRDD(4))
assert(listener.executorIdToStorageStatus("big").numBlocks === 2)
assert(listener.executorIdToStorageStatus("big").containsBlock(RDDBlockId(1, 1)))
assert(listener.executorIdToStorageStatus("big").containsBlock(RDDBlockId(1, 2)))
listener.onUnpersistRDD(SparkListenerUnpersistRDD(1))
assert(listener.executorIdToStorageStatus("big").numBlocks === 0)
}
}
|
Dax1n/spark-core
|
core/src/test/scala/org/apache/spark/storage/StorageStatusListenerSuite.scala
|
Scala
|
apache-2.0
| 8,709
|
package infra.piece.image
import infra.piece.core.Piece
/**
* @author alari (name.alari@gmail.com)
* @since 08.05.14 13:40
*/
case class ImagePiece(title: Option[String],
filename: String,
sizes: Map[String, ImageSize],
fileIds: Seq[String],
id: Option[String] = Piece.genSomeId(),
kind: String = "image") extends Piece
|
alari/play-content
|
module-code/app/infra/piece/image/ImagePiece.scala
|
Scala
|
mit
| 435
|
package com.twitter.finagle.redis.integration
import com.twitter.finagle.redis.ClientError
import com.twitter.finagle.redis.RedisClientTest
import com.twitter.finagle.redis.tags.{ClientTest, RedisTest}
import com.twitter.io.Buf
import com.twitter.util.Await
import org.junit.Ignore
import org.junit.runner.RunWith
import org.scalatest.junit.JUnitRunner
import scala.collection.{Set => CollectionSet}
@Ignore
@RunWith(classOf[JUnitRunner])
final class SetClientIntegrationSuite extends RedisClientTest {
private[this] val oneElemAdded = 1
private[this] val oneElemAddErrorMessage = "Could not add one element"
private[this] val key = Buf.Utf8("member")
test("Correctly add, then pop members of a set", RedisTest, ClientTest) {
withRedisClient { client =>
assert(Await.result(client.sAdd(key, List(bufBar))) == oneElemAdded, oneElemAddErrorMessage)
assert(Await.result(client.sPop(key)) == Some(bufBar), "Could not remove bufBar")
assert(Await.result(client.sAdd(key, List(bufBaz))) == oneElemAdded, oneElemAddErrorMessage)
assert(Await.result(client.sPop(key)) == Some(bufBaz), "Could not remove bufBaz")
}
}
test("Correctly add, then pop members from a set while counting them", RedisTest, ClientTest) {
withRedisClient { client =>
assert(Await.result(client.sCard(key)) == 0)
assert(Await.result(client.sAdd(key, List(bufBar))) == oneElemAdded, oneElemAddErrorMessage)
assert(Await.result(client.sCard(key)) == 1)
assert(Await.result(client.sAdd(key, List(bufBaz))) == oneElemAdded, oneElemAddErrorMessage)
assert(Await.result(client.sCard(key)) == 2)
Await.result(client.sPop(key))
assert(Await.result(client.sCard(key)) == 1)
Await.result(client.sPop(key))
assert(Await.result(client.sCard(key)) == 0)
}
}
test("Correctly add and pop members from a set, while looking at the set", RedisTest,
ClientTest) {
withRedisClient { client =>
assert(Await.result(client.sAdd(key, List(bufFoo))) == oneElemAdded, oneElemAddErrorMessage)
assert(Await.result(client.sIsMember(key, bufFoo)) == true, "Foo was not a member of the set")
assert(Await.result(client.sIsMember(key, bufBaz)) == false, "Baz was found in the set")
assert(Await.result(client.sAdd(key, List(bufBaz))) == oneElemAdded, oneElemAddErrorMessage)
assert(Await.result(client.sIsMember(key, bufBaz)) == true, "Baz was not found in the set")
assert(Await.result(client.sIsMember(key, bufFoo)) == true, "Foo was not a member of the set")
Await.result(client.sPop(key))
Await.result(client.sPop(key))
assert(Await.result(client.sIsMember(key, bufBaz)) == false, "Baz was found in the set")
assert(Await.result(client.sIsMember(key, bufFoo)) == false, "Foo was found in the set")
}
}
test("Correctly add, examine members of a set, then pop them off and reexamine", RedisTest,
ClientTest) {
withRedisClient { client =>
assert(Await.result(client.sAdd(key, List(bufMoo))) == oneElemAdded, oneElemAddErrorMessage)
assert(Await.result(client.sAdd(key, List(bufBoo))) == oneElemAdded, oneElemAddErrorMessage)
val strings: CollectionSet[String] = Await.result(client.sMembers(key)).map(b2s)
assert(strings == CollectionSet("moo", "boo"))
Await.result(client.sPop(key))
Await.result(client.sPop(key))
assert(Await.result(client.sMembers(key)) == CollectionSet(), "Collection set was not EMPTY")
}
}
test("Correctly add members to a set, then remove them", RedisTest, ClientTest) {
withRedisClient { client =>
assert(Await.result(client.sAdd(key, List(bufMoo))) == oneElemAdded, oneElemAddErrorMessage)
assert(Await.result(client.sAdd(key, List(bufBoo))) == oneElemAdded, oneElemAddErrorMessage)
assert(Await.result(client.sRem(key, List(bufMoo))) == 1, "Could not remove one Element")
assert(Await.result(client.sRem(key, List(bufBoo))) == 1, "Could not remove one Element")
assert(Await.result(client.sRem(key, List(bufMoo))) == 0, "Removed an element when it should "
+ "not have been possible")
}
}
test("Correctly add member to a set, and return random", RedisTest, ClientTest) {
withRedisClient { client =>
val allMembers = Seq(bufFoo, bufBar)
val empty = Await.result(client.sRandMember(key))
assert(empty.size == 0, "The empty set was not empty!")
allMembers.foreach(m => {
assert(Await.result(client.sAdd(key, List(m))) == oneElemAdded, oneElemAddErrorMessage)
})
val oneMember = Await.result(client.sRandMember(key))
assert(oneMember.size == 1, "The one member set had an incorrect number of members")
assert(allMembers.contains(oneMember.head) == true)
val twoMembers = Await.result(client.sRandMember(key, count = Some(2)))
assert(twoMembers.size == 2, "The two member set had an incorrect number of members")
assert(twoMembers.forall(allMembers.contains(_)) == true, "The two member set did not " +
"match the original Sequence")
val setMembers = Await.result(client.sRandMember(key, count = Some(5)))
assert(setMembers.size == 2)
assert(setMembers.forall(allMembers.contains(_)) == true, "The set members did not match " +
"the original Sequence")
val negMembers = Await.result(client.sRandMember(key, count = Some(-4)))
assert(negMembers.size == 4, "The set did not handle a negative member")
}
}
test("Correctly perform set intersection variations", RedisTest, ClientTest) {
withRedisClient { client =>
val a = Buf.Utf8("a")
val b = Buf.Utf8("b")
val c = Buf.Utf8("c")
val d = Buf.Utf8("d")
val e = Buf.Utf8("e")
val bufFooMembers = CollectionSet(a, b, c, d)
Await.result(client.sAdd(bufFoo, bufFooMembers.toList))
Await.result(client.sAdd(bufBoo, List(c)))
Await.result(client.sAdd(bufBaz, List(a, c, e)))
Await.result(client.sAdd(bufMoo, List(a, b)))
// Should intersect a single value
assert(Await.result(client.sInter(Seq(bufFoo, bufBoo, bufBaz))) == CollectionSet(c))
// Has no intersection
assert(Await.result(client.sInter(Seq(bufBoo, bufMoo))) == CollectionSet.empty)
// bufBar is not a known key
assert(Await.result(client.sInter(Seq(bufFoo, bufBar))) == CollectionSet.empty)
// neither num or bufBar is a known key
assert(Await.result(client.sInter(Seq(bufNum, bufBar))) == CollectionSet.empty)
val bufFooInter = Await.result(client.sInter(Seq(bufFoo)))
// Only one key will give itself as intersection
assert(bufFooMembers forall (m => bufFooInter.contains(m)))
// At least one non-empty key is required
intercept[ClientError] {
Await.result(client.sInter(Seq()))
}
intercept[ClientError] {
Await.result(client.sInter(Seq(Buf.Empty)))
}
}
}
}
|
adriancole/finagle
|
finagle-redis/src/test/scala/com/twitter/finagle/redis/commands/set/SetClientIntegrationSuite.scala
|
Scala
|
apache-2.0
| 6,984
|
/**
* Copyright (C) 2009-2011 the original author or authors.
* See the notice.md file distributed with this work for additional
* information regarding copyright ownership.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.fusesource.scalate
import filter.FilterRequest
import introspector.Introspector
import support.RenderHelper
import util._
import util.Strings.isEmpty
import java.io.File
import java.text.{ DateFormat, NumberFormat }
import java.util.{ Locale, Date }
import xml.{ Node, PCData, NodeSeq, NodeBuffer }
import collection.mutable.{ ListBuffer, HashMap }
import reflect.ClassTag
import scala.language.implicitConversions
object RenderContext {
val threadLocal = new ThreadLocal[RenderContext]
def capture(body: => Unit) = apply().capture(body)
def captureNodeSeq(body: => Unit) = apply().captureNodeSeq(body)
def apply(): RenderContext = threadLocal.get
def using[T](that: RenderContext)(func: => T): T = {
val previous = threadLocal.get
try {
threadLocal.set(that)
func
} finally {
if (previous != null) {
threadLocal.set(previous)
} else {
threadLocal.remove
}
}
}
}
/**
* Provides helper methods for rendering templates and values and for working with attributes.
*
* @see DefaultRenderContext
* @see org.fusesource.scalate.servlet.ServletRenderContext
*/
trait RenderContext {
/**
* Default string used to output null values
*/
var nullString = ""
/**
* Default string used to output None values
*/
var noneString = ""
/**
* Whether or not markup sensitive characters for HTML/XML elements like & > < are escaped or not
*/
var escapeMarkup = true
/**
* Whether we should escape CSS in CDATA sections when using the CSS filter
*/
var wrapCssInCData = true
var currentTemplate: String = _
var viewPrefixes = List("")
var viewPostfixes = engine.codeGenerators.keysIterator.map(x => "." + x).toList
def engine: TemplateEngine
/**
* Renders the provided value and inserts it into the final rendered document without sanitizing the value.
*/
def <<(value: Any): Unit
/**
* Renders the provided value, sanitizes any XML special characters and inserts
* it into the final rendered document.
*/
def <<<(value: Any): Unit
/**
* Returns the request URI
*/
def requestUri: String
/**
* Returns the Resource of the request
*/
def requestResource: Option[Resource]
/**
* Returns the file for the given request resource
*/
def requestFile: Option[File]
/**
* Returns a local link to the given file which should be within the [sourceDirectories]
*/
def uri(file: File): Option[String] = {
for (s <- engine.sourceDirectories) {
if (Files.isDescendant(s, file)) {
return Some(uri("/" + Files.relativeUri(s, file)))
}
}
None
}
/**
* Allows conversion of an absolute URL starting with "/" to be converted using the prefix of a web application
*/
def uri(u: String): String = u
/**
* Loads the given template or file as a String for inclusion into the current page.
*
* Useful if you want to include some client side template, particularly with a .jade extension
*/
def load(uri: String): String = engine.resourceLoader.load(uri)
/**
* Access the attributes available in this context
*/
def attributes: AttributeMap
/**
* Sorted list of attribute keys
*/
def attributeKeys = attributes.keySet.toList.sortWith(_ < _)
/**
* Returns the attribute of the given type or a [[org.fusesource.scalate.NoValueSetException]] exception is thrown
*/
def attribute[T](name: String): T =
attributeOrElse(name, throw new NoValueSetException(name))
/**
* Returns the attribute of the given name and type or the default value if it is not available
*/
def attributeOrElse[T](name: String, defaultValue: => T): T = {
attributes.get(name)
.getOrElse(defaultValue)
.asInstanceOf[T]
}
def setAttribute(name: String, value: Option[Any]) {
value match {
case Some(v) => attributes(name) = v
case None => attributes.remove(name)
}
}
/**
* Captured the body of the function call then sets it to the given attribute value
*/
def captureAttribute(name: String)(body: => Unit): Unit = {
val v = capture(body)
attributes(name) = v
}
/**
* Captured the body of the function call then append it to the given attribute value
*/
def captureAttributeAppend(name: String)(body: => Unit): Unit = {
val text = capture(body)
val v = attributes.get(name) match {
case Some(t) => t + text
case _ => text
}
attributes(name) = v
}
/**
* Creates an instance of the given given type using dependency injection to inject the necessary values into
* the object
*/
def inject[T](implicit manifest: ClassTag[T]): T = {
val clazz = manifest.runtimeClass
Objects.tryInstantiate(clazz, List(this)) match {
case Some(t) => t.asInstanceOf[T]
case _ => throw new NoInjectionException(clazz)
}
}
/////////////////////////////////////////////////////////////////////
//
// Rendering API
//
//////////////////////////////////x///////////////////////////////////
def value(any: Any, shouldSanitize: Boolean = escapeMarkup): Any = {
def sanitize(text: String): Any = if (shouldSanitize) { Unescaped(RenderHelper.sanitize(text)) } else { text }
any match {
case u: Unit => ""
case null => sanitize(nullString)
case None => sanitize(noneString)
case Some(a) => value(a, shouldSanitize)
case Unescaped(text) => text
case f: Function0[_] => value(f(), shouldSanitize)
case v: String => sanitize(v)
case v: Date => sanitize(dateFormat.format(v))
case n: Double if n.isNaN => "NaN"
case n: Float if n.isNaN => "NaN"
case v: Double => sanitize(numberFormat.format(v))
case v: Float => sanitize(numberFormat.format(v))
case v: Number => sanitize(numberFormat.format(v))
case f: FilterRequest => {
// NOTE assume a filter does the correct sanitizing
var rc = filter(f.filter, f.content.toString)
rc
}
// No need to sanitize nodes as they are already sanitized
case s: NodeBuffer =>
// No need to sanitize nodes as they are already sanitized
(s.foldLeft(new StringBuilder) {
(rc, x) =>
x match {
case cd: PCData => rc.append(cd.data)
case _ => rc.append(x)
}
}).toString
case n: Node => n.toString
case x: Traversable[Any] =>
x.map(value(_, shouldSanitize)).mkString("")
// TODO for any should we use the renderView?
case v: Any => sanitize(v.toString)
}
}
def valueEscaped(any: Any) = value(any, true)
def valueUnescaped(any: Any) = value(any, false)
/**
* Ensures that the string value of the parameter is not markup escaped
*/
def unescape(v: Any): Unit = this << value(v, false)
/**
* Ensures that the string value of the parameter is always markup escaped
*/
def escape(v: Any): Unit = this << value(v, true)
def filter(name: String, content: String): String = {
val context = this
engine.filter(name) match {
case None => throw new NoSuchFilterException(name)
case Some(f) => f.filter(context, content)
}
}
def include(path: String): Unit = include(path, false)
def include(path: String, layout: Boolean): Unit = include(path, layout, Nil)
/**
* Includes the given template path
*
* @param layout if true then applying the layout the included template
*/
def include(path: String, layout: Boolean, extraBindings: Traversable[Binding]): Unit = {
val uri = resolveUri(path)
withUri(uri) {
val template = engine.load(uri, extraBindings)
if (layout) {
engine.layout(template, this)
} else {
template.render(this)
}
}
}
protected def blankString: String = ""
/**
* Renders a collection of model objects with an optional separator
*/
def collection(objects: Traversable[AnyRef], viewName: String = "index", separator: => Any = blankString): Unit = {
var first = true
for (model <- objects) {
if (first) {
first = false
} else {
this << separator
}
view(model, viewName)
}
}
/**
* Renders the view of the given model object, looking for the view in
* packageName/className.viewName.ext
*/
def view(model: AnyRef, viewName: String = "index"): Unit = {
if (model == null) {
throw new NullPointerException("No model object given!")
}
val classSearchList = new ListBuffer[Class[_]]()
def buildClassList(clazz: Class[_]): Unit = {
if (clazz != null && clazz != classOf[Object] && !classSearchList.contains(clazz)) {
classSearchList.append(clazz)
buildClassList(clazz.getSuperclass)
for (i <- clazz.getInterfaces) {
buildClassList(i)
}
}
}
def viewForClass(clazz: Class[_]): String = {
for (prefix <- viewPrefixes; postfix <- viewPostfixes) {
val path = clazz.getName.replace('.', '/') + "." + viewName + postfix
val fullPath = if (isEmpty(prefix)) { "/" + path } else { "/" + prefix + "/" + path }
if (engine.resourceLoader.exists(fullPath)) {
return fullPath
}
}
null
}
def searchForView(): String = {
for (i <- classSearchList) {
val rc = viewForClass(i)
if (rc != null) {
return rc
}
}
null
}
buildClassList(model.getClass)
val templateUri = searchForView()
if (templateUri == null) {
throw new NoSuchViewException(model, viewName)
} else {
using(model) {
include(templateUri)
}
}
}
/**
* Allows a symbol to be used with arguments to the { @link render } or { @link layout } method such as
* <code>render("foo.ssp", 'foo -> 123, 'bar -> 456) {...}
*/
implicit def toStringPair(entry: (Symbol, Any)): (String, Any) = (entry._1.name, entry._2)
/**
* Renders the given template with optional attributes
*/
def render(path: String, attributeMap: Map[String, Any] = Map()): Unit = {
// TODO should we call engine.layout() instead??
val uri = resolveUri(path)
val context = this
withAttributes(attributeMap) {
withUri(uri) {
engine.load(uri).render(context)
}
}
}
/**
* Renders the given template with optional attributes passing the body block as the *body* attribute
* so that it can be layered out using the template.
*/
def layout(path: String, attrMap: Map[String, Any] = Map())(body: => Unit): Unit = {
val bodyText = capture(body)
render(path, attrMap + ("body" -> bodyText))
}
/**
* Uses the new sets of attributes for the given block, then replace them all
* (and remove any newly defined attributes)
*/
def withAttributes[T](attrMap: Map[String, Any])(block: => T): T = {
val oldValues = new HashMap[String, Any]
// lets replace attributes, saving the old values
for ((key, value) <- attrMap) {
val oldValue = attributes.get(key)
if (oldValue.isDefined) {
oldValues.put(key, oldValue.get)
}
attributes(key) = value
}
val answer = block
// restore old values
for (key <- attrMap.keysIterator) {
val oldValue = oldValues.get(key)
if (removeOldAttributes || oldValue.isDefined) {
setAttribute(key, oldValue)
}
}
answer
}
/**
* Should we remove attributes from the context after we've rendered a child request?
*/
protected def removeOldAttributes = true
def withUri[T](uri: String)(block: => T): T = {
val original = currentTemplate
try {
currentTemplate = uri
// lets keep track of the templates
attributes("scalateTemplates") = uri :: attributeOrElse[List[String]]("scalateTemplates", List()).distinct
block
} finally {
currentTemplate = original
}
}
protected def resolveUri(path: String) = if (currentTemplate != null) {
engine.resourceLoader.resolve(currentTemplate, path)
} else {
path
}
protected def using[T](model: AnyRef)(op: => T): T = {
val original = attributes.get("it")
try {
attributes("it") = model
op
} finally {
setAttribute("it", original)
}
}
/**
* Evaluates the specified body capturing any output written to this context
* during the evaluation
*/
def capture(body: => Unit): String
/**
* Evaluates the template capturing any output written to this page context during the body evaluation
*/
def capture(template: Template): String
/**
* Captures the text of the body and then parses it as markup
*/
def captureNodeSeq(body: => Unit): NodeSeq = XmlHelper.textToNodeSeq(capture(body))
/**
* Captures the text of the template rendering and then parses it as markup
*/
def captureNodeSeq(template: Template): NodeSeq = XmlHelper.textToNodeSeq(capture(template))
/*
Note due to the implicit conversions being applied to => Unit only taking the last
statement of the block as per this discussion:
http://old.nabble.com/-scala--is-this-a-compiler-bug-or-just-a-surprising-language-quirk-%28or-newbie--lack-of-understanding-%3A%29-ts27917276.html
then we can no longer support this approach which is a shame.
So tags must take => Unit as a parameter - then either take Rendercontext as the first parameter block
or use the RenderContext() to get the current active context for capturing.
implicit def bodyToStringFunction(body: => Unit): () => String = {
() => {
println("capturing the body....")
val answer = capture(body)
println("captured body: " + answer)
answer
}
}
implicit def toBody(body: => Unit): Body = new Body(this, body)
*/
/////////////////////////////////////////////////////////////////////
//
// introspection for dynamic templates or for archetype templates
//
/////////////////////////////////////////////////////////////////////
def introspect(aType: Class[_]) = Introspector(aType)
/////////////////////////////////////////////////////////////////////
//
// resource helpers/accessors
//
/////////////////////////////////////////////////////////////////////
private val resourceBeanAttribute = "it"
/**
* Returns the JAXRS resource bean of the given type or a [[org.fusesource.scalate.NoValueSetException]]
* exception is thrown
*/
def resource[T]: T = {
attribute[T](resourceBeanAttribute)
}
/**
* Returns the JAXRS resource bean of the given type or the default value if it is not available
*/
def resourceOrElse[T](defaultValue: T): T = {
attributeOrElse(resourceBeanAttribute, defaultValue)
}
/////////////////////////////////////////////////////////////////////
//
// custom object rendering
//
/////////////////////////////////////////////////////////////////////
private val _numberFormat = new Lazy(NumberFormat.getNumberInstance(locale))
private val _percentFormat = new Lazy(NumberFormat.getPercentInstance(locale))
private val _dateFormat = new Lazy(DateFormat.getDateInstance(DateFormat.FULL, locale))
/**
* Returns the formatted string using the locale of the users request or the default locale if not available
*/
def format(pattern: String, args: AnyRef*) = {
String.format(locale, pattern, args: _*)
}
def percent(number: Number) = percentFormat.format(number)
// Locale based formatters
// shame we can't use 'lazy var' for this cruft...
def numberFormat: NumberFormat = _numberFormat()
def numberFormat_=(value: NumberFormat): Unit = _numberFormat(value)
def percentFormat: NumberFormat = _percentFormat()
def percentFormat_=(value: NumberFormat): Unit = _percentFormat(value)
def dateFormat: DateFormat = _dateFormat()
def dateFormat_=(value: DateFormat): Unit = _dateFormat(value)
def locale: Locale = Locale.getDefault
/**
* Used to represent some text which does not need escaping
*/
case class Unescaped(text: String) {
override def toString = text
}
}
|
maslovalex/scalate
|
scalate-core/src/main/scala/org/fusesource/scalate/RenderContext.scala
|
Scala
|
apache-2.0
| 16,913
|
/*
* Copyright 2021 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package controllers.supervision
import org.joda.time.LocalDate
import connectors.DataCacheConnector
import controllers.{AmlsBaseController, CommonPlayDependencies}
import forms.{EmptyForm, Form2, InvalidForm, ValidForm}
import javax.inject.Inject
import models.supervision._
import play.api.mvc.MessagesControllerComponents
import utils.AuthAction
import views.html.supervision.supervision_start
import scala.concurrent.Future
class SupervisionStartController @Inject()(val dataCacheConnector: DataCacheConnector,
val authAction: AuthAction,
val ds: CommonPlayDependencies,
val cc: MessagesControllerComponents,
supervision_start: supervision_start) extends AmlsBaseController(ds, cc) {
def get(edit: Boolean = false) = authAction.async {
implicit request =>
dataCacheConnector.fetch[Supervision](request.credId, Supervision.key) map {
case Some(Supervision(anotherBody, _, _, _, _, _)) if getStartDate(anotherBody).isDefined
=> Ok(supervision_start(Form2[SupervisionStart](SupervisionStart(getStartDate(anotherBody).get)), edit))
case _ => Ok(supervision_start(EmptyForm, edit))
}
}
private def getStartDate(anotherBody: Option[AnotherBody]): Option[LocalDate] = {
anotherBody match {
case Some(body) if body.isInstanceOf[AnotherBodyYes] => body.asInstanceOf[AnotherBodyYes].startDate match {
case Some(sup) => Option(sup.startDate)
case _ => None
}
case _ => None
}
}
def post(edit: Boolean = false) = authAction.async {
implicit request =>
dataCacheConnector.fetch[Supervision](request.credId, Supervision.key) flatMap { supervision =>
def extraFields: Map[String, Seq[String]] = supervision match {
case Some(s) => getExtraFields(s)
case _ => Map()
}
def getExtraFields(s: Supervision): Map[String, Seq[String]] = {
s.anotherBody match {
case Some(data) if data.isInstanceOf[AnotherBodyYes] =>
Map("extraEndDate" -> Seq(data.asInstanceOf[AnotherBodyYes].endDate match {
case Some(date) => date.endDate.toString("yyyy-MM-dd")
case None => ""
}))
case None => Map()
}
}
Form2[SupervisionStart](request.body.asFormUrlEncoded.get ++ extraFields) match {
case f: InvalidForm => Future.successful(BadRequest(supervision_start(f, edit)))
case ValidForm(_, data) =>
dataCacheConnector.fetchAll(request.credId) flatMap {
optMap =>
val result = for {
cache <- optMap
supervision <- cache.getEntry[Supervision](Supervision.key)
anotherBody <- supervision.anotherBody
} yield {
dataCacheConnector.save[Supervision](request.credId, Supervision.key,
supervision.copy(anotherBody = Some(updateData(anotherBody, data)))) map {
_ => redirect(edit)
}
}
result getOrElse Future.failed(new Exception("Unable to retrieve sufficient data"))
}
}
}
}
private def updateData(anotherBody: AnotherBody, data: SupervisionStart): AnotherBody = {
val updatedAnotherBody = anotherBody match {
case a@AnotherBodyYes(_, _, _, _) => a.startDate(data)
case _ => throw new Exception("An UnknownException has occurred : SupervisionStartReasonsController")
}
updatedAnotherBody
}
private def redirect(edit: Boolean) = {
edit match {
case true => Redirect(routes.SummaryController.get)
case false => Redirect(routes.SupervisionEndController.get())
}
}
}
|
hmrc/amls-frontend
|
app/controllers/supervision/SupervisionStartController.scala
|
Scala
|
apache-2.0
| 4,576
|
package com.owlandrews.nescala
trait Filter {
def Step(x:Int):Int
}
// First order filters are defined by the following parameters.
// y[n] = B0*x[n] + B1*x[n-1] - A1*y[n-1]
case class FirstOrderFilter(B0:Float, B1:Float, A1:Float) extends Filter {
private var prevX = 0F
private var prevY = 0F
def Step(x:Int):Int = {
val y = (B0 * x) + (B1 * prevX) - (A1 * prevY)
prevY = y
prevX = x
y.toInt
}
}
case class FilterChain(filters:List[Filter]){
def Step(x:Int):Int = filters.foldLeft(x)((y, filter) => filter.Step(y))
}
object FilterChain {
def apply(sampleRate:Int):FilterChain = sampleRate match {
case 0 => FilterChain(List.empty[Filter])
case _ => FilterChain(initializeFilters(sampleRate))
}
private def initializeFilters(sampleRate:Float) =
Filter.HighPassFilter(sampleRate, 90) ::
Filter.HighPassFilter(sampleRate, 440) ::
Filter.LowPassFilter(sampleRate, 14000) ::
Nil
}
object Filter {
// sampleRate: samples per second
// cutoffFreq: oscillations per second
def LowPassFilter(sampleRate: Float, cutoffFreq: Float): Filter = {
val c = sampleRate / math.Pi.toFloat / cutoffFreq
val a0i = 1 / (1 + c)
FirstOrderFilter(
B0 = a0i,
B1 = a0i,
A1 = (1 - c) * a0i
)
}
def HighPassFilter(sampleRate: Float, cutoffFreq: Float): Filter = {
val c = sampleRate / math.Pi.toFloat / cutoffFreq
val a0i = 1 / (1 + c)
FirstOrderFilter(
B0 = c * a0i,
B1 = -c * a0i,
A1 = (1 - c) * a0i
)
}
}
|
hywelandrews/nescala
|
src/main/scala/com/owlandrews/nescala/Filter.scala
|
Scala
|
gpl-2.0
| 1,534
|
package dbx.api
import java.sql.Connection
import javax.inject.{Inject, Singleton}
import com.google.inject.{AbstractModule, Provider, TypeLiteral}
import dbx.api.Transactional.{TransactionSettings, TransactionSettingsBuilder}
import dbx.jdbc.{DataSourceTransactionManager, DataSourceUtils}
import dbx.transaction.PlatformTransactionManager
import play.api.Configuration
import play.api.Environment
import play.api.db.DBApi
/**
* A standard `Play` module to provide basic setup for DataSourceTransactionManager
*/
class SimpleModule extends AbstractModule {
override def configure = {
val transactionalKey = new TypeLiteral[Transactional[Connection]](){}
bind(transactionalKey).to(classOf[SimpleDBApiTransactional])
bind(classOf[TransactionManagerLookup]).to(classOf[SimpleDBApiTransactionManagerLookup])
val confProvider = binder().getProvider(classOf[Configuration])
val envProvider = binder().getProvider(classOf[Environment])
bind(classOf[TransactionSettings]).toProvider(new TransactionSettingsProvider(){
override def config = confProvider.get()
override def env = envProvider.get()
})
}
}
trait TransactionSettingsProvider extends Provider[TransactionSettings] {
def config: Configuration
def env: Environment
override def get(): TransactionSettings = {
config.getConfig("dbx.transactionSettings") match {
case Some(settings) =>
val builder = TransactionSettingsBuilder()
settings.getString("resource").foreach(builder.resource = _)
settings.getBoolean("readOnly").foreach(builder.readOnly = _)
settings.getString("isolation").foreach{ s => builder.isolation = Isolation.withName(s) }
settings.getString("propagation").foreach{ s => builder.propagation = Propagation.withName(s) }
settings.getInt("timeout").foreach(builder.timeout = _)
val cl = env.classLoader
settings.getStringSeq("rollbackFor").foreach { seq => builder.rollbackFor = seq.map(cl.loadClass(_)) }
settings.getStringSeq("noRollbackFor").foreach { seq => builder.noRollbackFor = seq.map(cl.loadClass(_)) }
builder.build()
case None => throw config.reportError("dbx.transactionSettings", "Can't load transactionSettings")
}
}
}
trait SimpleComponents {
def environment: Environment
def configuration: Configuration
def dbApi: DBApi
lazy val transactionManager: TransactionManagerLookup = new SimpleDBApiTransactionManagerLookup(dbApi)
lazy val transactionSettings: TransactionSettings = new TransactionSettingsProvider(){
override def config = configuration
override def env = environment
}.get()
lazy val transactional: Transactional[Connection] = new SimpleDBApiTransactional(dbApi, transactionManager, transactionSettings)
}
/**
* A simple Transactional function that uses jdbc Connection(obtained from DBApi and binding it to transaction
* execution context) as a transactional resource.
*/
@Singleton
class SimpleDBApiTransactional @Inject()(dbApi: DBApi, override val lookupTransactionManager: TransactionManagerLookup,
override val settings: TransactionSettings) extends Transactional[Connection] {
override def obtainResource(resource: String): Resource = {
DataSourceUtils.getConnection(dbApi.database(resource).dataSource)
}
override protected def releaseResource(resource: String, actualResource: Resource): Unit = {
DataSourceUtils.releaseConnection(actualResource, dbApi.database(resource).dataSource)
}
}
/**
* A simple TransactionManager lookuper that delegates all transaction management operations to the DataSourceTransactionManager
* for each DataSource looked up with `play.api.db.DBApi.database(name)`.
*/
@Singleton
class SimpleDBApiTransactionManagerLookup @Inject()(dbApi: DBApi) extends TransactionManagerLookup {
@volatile
private var managers = Map.empty[String, PlatformTransactionManager]
override def lookup(resource: String): PlatformTransactionManager = {
var manager = managers.get(resource)
if (manager.isEmpty) {
synchronized {
manager = Some(new DataSourceTransactionManager(dbApi.database(resource).dataSource))
managers = managers + (resource -> manager.get)
}
}
manager.get
}
}
|
lingcreative/play-dbx
|
src/main/scala/dbx/api/SimpleModule.scala
|
Scala
|
apache-2.0
| 4,308
|
package ch08
/*
* 1. Extend the following BankAccount class to a CheckingAccount
* class that charges $1 for every deposit and withdrawal.
*
* class BankAccount(initialBalance: Double) {
* private var balance = initialBalance
* def deposit(amount: Double) = { balance += amount; balance }
* def withdraw(amount: Double) = { balance -= amount; balance }
* }
*
*/
object ex01 extends App {
class BankAccount(initialBalance: Double) {
private var balance = initialBalance
def deposit(amount: Double) = { balance += amount; balance }
def withdraw(amount: Double) = { balance -= amount; balance }
}
class CheckingAccount(initialBalance: Double) extends BankAccount(initialBalance) {
override def deposit(amount: Double) = { super.deposit(amount-1)}
override def withdraw(amount: Double) = { super.withdraw(amount+1)}
}
val a1 = new CheckingAccount(100)
println(a1.deposit(10))
println(a1.withdraw(10))
}
|
tuxdna/scala-for-the-impatient-exercises
|
src/main/scala/ch08/ex01.scala
|
Scala
|
apache-2.0
| 953
|
package net.sansa_stack.rdf.common.partition.layout
import scala.reflect.runtime.universe.{Type, typeOf}
import org.apache.jena.graph.Triple
import net.sansa_stack.rdf.common.partition.core.RdfPartitionerDefault
import net.sansa_stack.rdf.common.partition.schema.SchemaStringDecimal
object TripleLayoutDecimal
extends TripleLayout {
override def schema: Type = typeOf[SchemaStringDecimal]
override def fromTriple(t: Triple): SchemaStringDecimal = {
val s = t.getSubject
val o = t.getObject
val v = if (o.isLiteral && o.getLiteral.getDatatype.getJavaClass == classOf[java.math.BigDecimal]) {
o.getLiteralValue match {
case value: Integer => java.math.BigDecimal.valueOf(value.longValue()) // Jena parses e.g. 1.0 to Integer
case value: java.lang.Long => java.math.BigDecimal.valueOf(value.longValue())
case value: java.math.BigInteger => new java.math.BigDecimal(value)
case _ => o.getLiteralValue.asInstanceOf[java.math.BigDecimal]
}
} else throw new RuntimeException(s"Layout only for BigDecimal values: $t")
val sStr = RdfPartitionerDefault.getUriOrBNodeString(s)
SchemaStringDecimal(sStr, v)
}
}
|
SANSA-Stack/SANSA-RDF
|
sansa-rdf/sansa-rdf-common/src/main/scala/net/sansa_stack/rdf/common/partition/layout/TripleLayoutDecimal.scala
|
Scala
|
apache-2.0
| 1,183
|
package figures.oop
abstract class Figure {
def area: Double
// Adding a new operation is hard:
def circumference: Double
}
|
zstoychev/fp-in-scala-presentation
|
examples/figures/oop/Figure.scala
|
Scala
|
mit
| 132
|
/*
* Copyright 2010-2011 Vilius Normantas <code@norma.lt>
*
* This file is part of Crossbow library.
*
* Crossbow is free software: you can redistribute it and/or modify it under the terms of the GNU
* General Public License as published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* Crossbow is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without
* even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* General Public License for more details.
*
* You should have received a copy of the GNU General Public License along with Crossbow. If not,
* see <http://www.gnu.org/licenses/>.
*/
package lt.norma.crossbow.ib
import com.ib.client._
trait IbListener extends EWrapper {
// Connection and Server
def currentTime(time: Long) { }
def error(id: Int, errorCode: Int, errorMsg: String) { }
def error(exception: java.lang.Exception) { }
def error(string: String) { }
def connectionClosed() { }
// Market Data
def tickPrice(tickerId: Int, field: Int, price: Double, canAutoExecute: Int) { }
def tickSize(tickerId: Int, field: Int, size: Int) { }
def tickOptionComputation(tickerId: Int, field: Int, impliedVol: Double, delta: Double,
optPrice: Double, pvDividend: Double, gamma: Double, vega: Double, theta: Double,
undPrice: Double) { }
def tickGeneric(tickerId: Int, tickType: Int, value: Double) { }
def tickString(tickerId: Int, tickType: Int, value: String) { }
def tickEFP(tickerId: Int, tickType: Int, basisPoints: Double, formattedBasisPoints: String,
impliedFuture: Double, holdDays: Int, futureExpiry: String, dividendImpact: Double,
dividendsToExpiry: Double) { }
def tickSnapshotEnd(reqId: Int) { }
def deltaNeutralValidation(reqId: Int, underComp: UnderComp) { }
// Orders
def orderStatus(orderId: Int, status: String, filled: Int, remaining: Int, avgFillPrice: Double,
permId: Int, parentId: Int, lastFillPrice: Double, clientId: Int, whyHeld: String) { }
def openOrder(orderId: Int, contract: Contract, order: Order, orderState: OrderState) { }
def nextValidId(orderId: Int) { }
def openOrderEnd() { }
// Account and Portfolio
def updateAccountValue(key: String, value: String, currency: String, accountName: String) { }
def updatePortfolio(contract: Contract, position: Int, marketPrice: Double, marketValue: Double,
averageCost: Double, unrealizedPNL: Double, realizedPNL: Double, accountName: String) { }
def updateAccountTime(timeStamp: String) { }
def accountDownloadEnd(accountName: String) { }
// Contract Details
def contractDetails(reqId: Int, contractDetails: ContractDetails) { }
def contractDetailsEnd(reqId: Int) { }
def bondContractDetails(reqId: Int, contractDetails: ContractDetails) { }
// Executions
def execDetails(reqId: Int, contract: Contract, execution: Execution) { }
def execDetailsEnd(reqId: Int) { }
// Market Depth
def updateMktDepth(tickerId: Int, position: Int, operation: Int, side: Int, price: Double,
size: Int) { }
def updateMktDepthL2(tickerId: Int, position: Int, marketMaker: String, operation: Int, side: Int,
price: Double, size: Int) { }
// News Bulletins
def updateNewsBulletin(msgId: Int, msgType: Int, message: String, origExchange: String) { }
// Financial Advisors
def managedAccounts(accountsList: String) { }
def receiveFA(faDataType: Int, xml: String) { }
// Historical Data
def historicalData(reqId: Int, date: String, open: Double, high: Double, low: Double,
close: Double, volume: Int, count: Int, WAP: Double, hasGaps: Boolean) { }
// Market Scanners
def scannerParameters(xml: String) { }
def scannerData(reqId: Int, rank: Int, contractDetails: ContractDetails, distance: String,
benchmark: String, projection: String, legsStr: String) { }
def scannerDataEnd(reqId: Int) { }
// Real Time Bars
def realtimeBar(reqId: Int, time: Long, open: Double, high: Double, low: Double, close: Double,
volume: Long, wap: Double, count: Int) { }
// Fundamental Data
def fundamentalData(reqId: Int, data: String) { }
}
|
ViliusN/Crossbow
|
crossbow-ib/src/lt/norma/crossbow/ib/IbListener.scala
|
Scala
|
gpl-3.0
| 4,180
|
package es.uvigo.ei.sing.biomsef.entity
import play.api.libs.json._
import org.scalacheck.Gen
import es.uvigo.ei.sing.biomsef.BaseSpec
import es.uvigo.ei.sing.biomsef.entity.Generators._
class SearchResultSpec extends BaseSpec {
private[this] lazy val searchResultGenerator = searchResultTupleGenerator map SearchResult.tupled
private[this] lazy val searchResultTupleGenerator = for {
document <- documentGenerator
keywordSet <- Gen.containerOf[Set, Keyword](keywordGenerator)
} yield (document, keywordSet)
private[this] def createJson(searchResult : SearchResult) =
Json.obj(
"document" -> Json.toJson(searchResult.document),
"keywords" -> JsArray((searchResult.keywords map { k => Json toJson k }).toSeq)
)
"The SearchResult entity" - {
"can be constructed with a Document and a Set of Keywords related to that Document" in {
forAll(searchResultTupleGenerator) { case (document, keywordSet) =>
SearchResult(document, keywordSet) should have (
'document (document),
'keywords (keywordSet)
)
}
}
"can be transformed to a JSON object" in {
forAll(searchResultGenerator) { searchResult : SearchResult =>
(Json toJson searchResult) should equal (createJson(searchResult))
}
}
}
}
|
agjacome/biomsef
|
src/test/scala/entity/SearchResultSpec.scala
|
Scala
|
mit
| 1,317
|
/*
*************************************************************************************
* Copyright 2011 Normation SAS
*************************************************************************************
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* In accordance with the terms of section 7 (7. Additional Terms.) of
* the GNU Affero GPL v3, the copyright holders add the following
* Additional permissions:
* Notwithstanding to the terms of section 5 (5. Conveying Modified Source
* Versions) and 6 (6. Conveying Non-Source Forms.) of the GNU Affero GPL v3
* licence, when you create a Related Module, this Related Module is
* not considered as a part of the work and may be distributed under the
* license agreement of your choice.
* A "Related Module" means a set of sources files including their
* documentation that, without modification of the Source Code, enables
* supplementary functions or services in addition to those offered by
* the Software.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/agpl.html>.
*
*************************************************************************************
*/
package com.normation.rudder.repository.xml
import org.eclipse.jgit.lib.ObjectId
import org.eclipse.jgit.revwalk.RevTag
import com.normation.cfclerk.services.GitRepositoryProvider
import com.normation.cfclerk.services.GitRevisionProvider
import com.normation.rudder.domain.policies.Rule
import com.normation.rudder.repository._
import com.normation.rudder.services.marshalling.RuleUnserialisation
import com.normation.utils.Control._
import com.normation.utils.UuidRegex
import com.normation.utils.XmlUtils
import net.liftweb.common.Box
import net.liftweb.common.Loggable
import com.normation.rudder.migration.XmlEntityMigration
class GitParseRules(
ruleUnserialisation: RuleUnserialisation
, repo : GitRepositoryProvider
, xmlMigration : XmlEntityMigration
, rulesRootDirectory : String //relative name to git root file
) extends ParseRules with Loggable {
def getArchive(archiveId:GitCommitId) = {
for {
treeId <- GitFindUtils.findRevTreeFromRevString(repo.db, archiveId.value)
archive <- getArchiveForRevTreeId(treeId)
} yield {
archive
}
}
private[this] def getArchiveForRevTreeId(revTreeId:ObjectId) = {
val root = {
val p = rulesRootDirectory.trim
if(p.size == 0) ""
else if(p.endsWith("/")) p.substring(0, p.size-1)
else p
}
val directoryPath = root + "/"
//// BE CAREFUL: GIT DOES NOT LIST DIRECTORIES
val paths = GitFindUtils.listFiles(repo.db, revTreeId, List(root), List(".xml")).filter { p =>
p.size > directoryPath.size &&
p.startsWith(directoryPath) &&
p.endsWith(".xml") &&
UuidRegex.isValid(p.substring(directoryPath.size,p.size - 4))
}
for {
xmls <- sequence(paths.toSeq) { crPath =>
GitFindUtils.getFileContent(repo.db, revTreeId, crPath){ inputStream =>
XmlUtils.parseXml(inputStream, Some(crPath))
}
}
rules <- sequence(xmls) { xml =>
for {
ruleXml <- xmlMigration.getUpToDateXml(xml)
rule <- ruleUnserialisation.unserialise(xml)
} yield {
rule
}
}
} yield {
rules
}
}
}
|
jooooooon/rudder
|
rudder-core/src/main/scala/com/normation/rudder/repository/xml/GitParseConfigurationRules.scala
|
Scala
|
agpl-3.0
| 4,047
|
package config
import play.api.Play.current
import play.api.libs.concurrent.Akka
import scala.concurrent.ExecutionContext
trait ControllerDefaults {
implicit val executionContext: ExecutionContext =
Akka.system.dispatchers.defaultGlobalDispatcher
}
|
betygen/api
|
app/config/ControllerDefaults.scala
|
Scala
|
mit
| 257
|
package lila.evalCache
import chess.variant.Variant
import play.api.libs.json._
import chess.format.FEN
import lila.socket._
import lila.user.User
final private class EvalCacheSocketHandler(
api: EvalCacheApi,
truster: EvalCacheTruster,
upgrade: EvalCacheUpgrade
)(implicit ec: scala.concurrent.ExecutionContext) {
def evalGet(
sri: Socket.Sri,
d: JsObject,
push: JsObject => Unit
): Unit =
for {
fen <- d str "fen" map FEN.apply
variant = Variant orDefault ~d.str("variant")
multiPv = (d int "mpv") | 1
path <- d str "path"
} {
def pushData(data: JsObject) = push(Socket.makeMessage("evalHit", data))
api.getEvalJson(variant, fen, multiPv) foreach {
_ foreach { json =>
pushData(json + ("path" -> JsString(path)))
}
}
if (d.value contains "up") upgrade.register(sri, variant, fen, multiPv, path)(pushData)
}
def untrustedEvalPut(sri: Socket.Sri, userId: User.ID, data: JsObject): Unit =
truster cachedTrusted userId foreach {
_ foreach { tu =>
JsonHandlers.readPutData(tu, data) foreach {
api.put(tu, _, sri)
}
}
}
}
|
luanlv/lila
|
modules/evalCache/src/main/EvalCacheSocketHandler.scala
|
Scala
|
mit
| 1,191
|
/*
* Copyright 2001-2013 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalatest.examples.wordspec.oneargtest
import org.scalatest.fixture
import java.io._
class ExampleSpec extends fixture.WordSpec {
case class FixtureParam(file: File, writer: FileWriter)
def withFixture(test: OneArgTest) = {
// create the fixture
val file = File.createTempFile("hello", "world")
val writer = new FileWriter(file)
val theFixture = FixtureParam(file, writer)
try {
writer.write("ScalaTest is ") // set up the fixture
withFixture(test.toNoArgTest(theFixture)) // "loan" the fixture to the test
}
finally writer.close() // clean up the fixture
}
"Testing" should {
"be easy" in { f =>
f.writer.write("easy!")
f.writer.flush()
assert(f.file.length === 18)
}
"be fun" in { f =>
f.writer.write("fun!")
f.writer.flush()
assert(f.file.length === 17)
}
}
}
|
dotty-staging/scalatest
|
examples/src/test/scala/org/scalatest/examples/wordspec/oneargtest/ExampleSpec.scala
|
Scala
|
apache-2.0
| 1,487
|
/* * Copyright 2001-2013 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalatest.matchers
// T is the type of the object that has a property to verify with an instance of this trait, P is the type of that particular property
// Since I should be able to pass
/**
* Trait extended by matcher objects, which may appear after the word <code>have</code>, that can match against a
* property of the type specified by the <code>HavePropertyMatcher</code>'s second type parameter <code>P</code>.
* <code>HavePropertyMatcher</code>'s first type parameter, <code>T</code>, specifies the type that declares the property. The match will succeed if and
* only if the value of the property equals the specified value.
* The object containing the property
* is passed to the <code>HavePropertyMatcher</code>'s
* <code>apply</code> method. The result is a <code>HavePropertyMatchResult[P]</code>.
* A <code>HavePropertyMatcher</code> is, therefore, a function from the specified type, <code>T</code>, to
* a <code>HavePropertyMatchResult[P]</code>.
*
* <p>
* Although <code>HavePropertyMatcher</code>
* and <code>Matcher</code> represent similar concepts, they have no inheritance relationship
* because <code>Matcher</code> is intended for use right after <code>should</code> or <code>must</code>
* whereas <code>HavePropertyMatcher</code> is intended for use right after <code>have</code>.
* </p>
*
* <p>
* A <code>HavePropertyMatcher</code> essentially allows you to write statically typed
* property assertions similar to the dynamic ones that use symbols:
* </p>
*
* <pre class="stHighlight">
* book should have ('title ("Moby Dick")) // dynamic: uses reflection
* book should have (title ("Moby Dick")) // type safe: only works on Books; no reflection used
* </pre>
*
* <p>
* One good way to organize custom matchers is to place them inside one or more traits that
* you can then mix into the suites or specs that need them. Here's an example that
* includes two methods that produce <code>HavePropertyMatcher</code>s:
* </p>
*
* <pre class="stHighlight">
* case class Book(val title: String, val author: String)
*
* trait CustomMatchers {
*
* def title(expectedValue: String) =
* new HavePropertyMatcher[Book, String] {
* def apply(book: Book) =
* HavePropertyMatchResult(
* book.title == expectedValue,
* "title",
* expectedValue,
* book.title
* )
* }
*
* def author(expectedValue: String) =
* new HavePropertyMatcher[Book, String] {
* def apply(book: Book) =
* HavePropertyMatchResult(
* book.author == expectedValue,
* "author",
* expectedValue,
* book.author
* )
* }
* }
* </pre>
*
* <p>
* Each time the <code>title</code> method is called, it returns a new <code>HavePropertyMatcher[Book, String]</code> that
* can be used to match against the <code>title</code> property of the <code>Book</code> passed to its <code>apply</code>
* method. Because the type parameter of these two <code>HavePropertyMatcher</code>s is <code>Book</code>, they
* can only be used with instances of that type. (The compiler will enforce this.) The match will succeed if the
* <code>title</code> property equals the value passed as <code>expectedValue</code>.
* If the match succeeds, the <code>matches</code> field of the returned <code>HavePropertyMatchResult</code> will be <code>true</code>.
* The second field, <code>propertyName</code>, is simply the string name of the property.
* The third and fourth fields, <code>expectedValue</code> and <code>actualValue</code> indicate the expected and actual
* values, respectively, for the property.
* Here's an example that uses these <code>HavePropertyMatchers</code>:
* </p>
*
* <pre class="stHighlight">
* class ExampleSpec extends Spec with ShouldMatchers with CustomMatchers {
*
* describe("A book") {
*
* it("should have the correct title and author") {
*
* val book = Book("Moby Dick", "Melville")
*
* book should have (
* title ("Moby Dick"),
* author ("Melville")
* )
* }
* }
* }
* </pre>
*
* <p>
* These matches should succeed, but if for example the first property, <code>title ("Moby Dick")</code>, were to fail, you would get an error message like:
* </p>
*
* <pre class="stExamples">
* The title property had value "A Tale of Two Cities", instead of its expected value "Moby Dick",
* on object Book(A Tale of Two Cities,Dickens)
* </pre>
*
* <p>
* For more information on <code>HavePropertyMatchResult</code> and the meaning of its fields, please
* see the documentation for <a href="HavePropertyMatchResult.html"><code>HavePropertyMatchResult</code></a>. To understand why <code>HavePropertyMatcher</code>
* is contravariant in its type parameter, see the section entitled "Matcher's variance" in the
* documentation for <a href="../Matcher.html"><code>Matcher</code></a>.
* </p>
*
* @author Bill Venners
*/
trait HavePropertyMatcher[-T, P] extends Function1[T, HavePropertyMatchResult[P]] {
thisHavePropertyMatcher =>
/**
* Check to see if a property on the specified object, <code>objectWithProperty</code>, matches its
* expected value, and report the result in
* the returned <code>HavePropertyMatchResult</code>. The <code>objectWithProperty</code> is
* usually the value to the left of a <code>should</code> or <code>must</code> invocation. For example, <code>book</code>
* would be passed as the <code>objectWithProperty</code> in:
*
* <pre class="stHighlight">
* book should have (title ("Moby Dick"))
* </pre>
*
* @param objectWithProperty the object with the property against which to match
* @return the <code>HavePropertyMatchResult</code> that represents the result of the match
*/
def apply(objectWithProperty: T): HavePropertyMatchResult[P]
/**
* Compose this <code>HavePropertyMatcher</code> with the passed function, returning a new <code>HavePropertyMatcher</code>.
*
* <p>
* This method overrides <code>compose</code> on <code>Function1</code> to
* return a more specific function type of <code>HavePropertyMatcher</code>.
* </p>
*/
override def compose[U](g: U => T): HavePropertyMatcher[U, P] =
new HavePropertyMatcher[U, P] {
def apply(u: U) = thisHavePropertyMatcher.apply(g(u))
}
}
/**
* Companion object for trait <code>HavePropertyMatcher</code> that provides a
* factory method that creates a <code>HavePropertyMatcher[T]</code> from a
* passed function of type <code>(T => HavePropertyMatchResult)</code>.
*
* @author Bill Venners
*/
object HavePropertyMatcher {
/**
* Factory method that creates a <code>HavePropertyMatcher[T]</code> from a
* passed function of type <code>(T => HavePropertyMatchResult)</code>.
*
* <p>
* This allows you to create a <code>HavePropertyMatcher</code> in a slightly
* more concise way, for example:
* </p>
*
* <pre class="stHighlight">
* case class Person(name: String)
* def name(expectedName: String) = {
* HavePropertyMatcher {
* (person: Person) => HavePropertyMatchResult(
* person.name == expectedName,
* "name",
* expectedName,
* person.name
* )
* }
* </pre>
*
* @author Bill Venners
*/
def apply[T, P](fun: T => HavePropertyMatchResult[P])(implicit evT: Manifest[T], evP: Manifest[P]): HavePropertyMatcher[T, P] =
new HavePropertyMatcher[T, P] {
def apply(left: T) = fun(left)
override def toString: String = "HavePropertyMatcher[" + evT.erasure.getName + ", " + evP.erasure.getName + "](" + evT.erasure.getName + " => HavePropertyMatchResult[" + evP.erasure.getName + "])"
}
}
|
travisbrown/scalatest
|
src/main/scala/org/scalatest/matchers/HavePropertyMatcher.scala
|
Scala
|
apache-2.0
| 8,384
|
/*
* Copyright (C) 2017 HAT Data Exchange Ltd
* SPDX-License-Identifier: AGPL-3.0
*
* This file is part of the Hub of All Things project (HAT).
*
* HAT is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License
* as published by the Free Software Foundation, version 3 of
* the License.
*
* HAT is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See
* the GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General
* Public License along with this program. If not, see
* <http://www.gnu.org/licenses/>.
*
* Written by Andrius Aucinas <andrius.aucinas@hatdex.org>
* 11 / 2017
*/
package org.hatdex.hat.she.service
import java.security.MessageDigest
import javax.inject.Inject
import akka.Done
import org.hatdex.hat.api.json.{
ApplicationJsonProtocol,
DataFeedItemJsonProtocol
}
import org.hatdex.hat.api.models.{ EndpointData, Owner }
import org.hatdex.hat.api.service.UsersService
import org.hatdex.hat.api.service.richData.RichDataService
import org.hatdex.hat.she.models._
import org.hatdex.libs.dal.HATPostgresProfile.api._
import org.joda.time.DateTime
import org.hatdex.hat.dal.Tables._
import org.hatdex.hat.resourceManagement.HatServer
import play.api.Logger
import play.api.libs.json.Json
import scala.concurrent.duration.FiniteDuration
import scala.concurrent.{ ExecutionContext, Future }
import scala.concurrent.duration._
import scala.util.{ Failure, Success }
class FunctionService @Inject() (
functionRegistry: FunctionExecutableRegistry,
dataService: RichDataService,
usersService: UsersService
)(implicit
ec: ExecutionContext) {
private val logger = Logger(this.getClass)
private val functionExecutionTimeout: FiniteDuration = 5.minutes
private implicit def hatServer2db(implicit hatServer: HatServer): Database =
hatServer.db
protected def mergeRegisteredSaved(
registeredFunctions: Seq[FunctionConfiguration],
savedFunctions: Seq[FunctionConfiguration]
): Seq[FunctionConfiguration] = {
val registered: Map[String, FunctionConfiguration] =
registeredFunctions.map(r => r.id -> r).toMap
val saved: Map[String, FunctionConfiguration] =
savedFunctions.map(r => r.id -> r).toMap
val functions =
for ((k, v) <- saved ++ registered)
yield k -> (if ((saved contains k) && (registered contains k))
saved(k).update(v)
else v)
functions.values.toSeq
}
def all(
active: Boolean
)(implicit db: Database
): Future[Seq[FunctionConfiguration]] = {
for {
registeredFunctions <- Future.successful(
functionRegistry.getSeq[FunctionExecutable].map(_.configuration)
)
savedFunctions <- saved()
} yield {
mergeRegisteredSaved(registeredFunctions, savedFunctions)
.filter(f =>
!active || f.status.enabled && f.status.available
) // only return enabled ones if filtering is on
}
}
def get(
id: String
)(implicit db: Database
): Future[Option[FunctionConfiguration]] = {
val query = for {
(function, status) <-
SheFunction
.filter(_.id === id)
.joinLeft(SheFunctionStatus)
.on(_.id === _.id)
bundle <- function.dataBundlesFk
} yield (function, status, bundle)
for {
r <- Future.successful(
functionRegistry
.getSeq[FunctionExecutable]
.map(_.configuration)
.filter(_.id == id)
)
f <-
db.run(query.take(1).result)
.map(
_.map(f =>
FunctionConfiguration(
f._1,
f._2,
f._3,
available =
r.exists(rf => rf.id == f._1.id && rf.status.available)
)
)
)
} yield {
mergeRegisteredSaved(r, f).headOption
}
}
def saved()(implicit db: Database): Future[Seq[FunctionConfiguration]] = {
val query = for {
(function, status) <-
SheFunction.joinLeft(SheFunctionStatus).on(_.id === _.id)
bundle <- function.dataBundlesFk
} yield (function, status, bundle)
for {
r <- Future.successful(
functionRegistry.getSeq[FunctionExecutable].map(_.configuration)
)
f <-
db.run(query.result)
.map(
_.map(f =>
FunctionConfiguration(
f._1,
f._2,
f._3,
available =
r.exists(rf => rf.id == f._1.id && rf.status.available)
)
)
)
} yield f
}
def save(
configuration: FunctionConfiguration
)(implicit db: Database
): Future[FunctionConfiguration] = {
logger.debug(s"Save function configuration $configuration")
import org.hatdex.hat.api.json.RichDataJsonFormats.propertyQueryFormat
import org.hatdex.hat.she.models.FunctionConfigurationJsonProtocol.triggerFormat
import ApplicationJsonProtocol.formattedTextFormat
import DataFeedItemJsonProtocol.feedItemFormat
import ApplicationJsonProtocol.applicationGraphicsFormat
val functionRow = SheFunctionRow(
configuration.id,
Json.toJson(configuration.info.description),
Json.toJson(configuration.trigger),
configuration.dataBundle.name,
configuration.info.headline,
configuration.info.dataPreview.map(dp => Json.toJson(dp)),
configuration.info.dataPreviewEndpoint,
Json.toJson(configuration.info.graphics),
configuration.info.name,
configuration.info.version.toString(),
configuration.info.termsUrl,
configuration.developer.id,
configuration.developer.name,
configuration.developer.url,
configuration.developer.country,
configuration.info.versionReleaseDate,
configuration.info.supportContact
)
val bundleRow = DataBundlesRow(
configuration.dataBundle.name,
Json.toJson(configuration.dataBundle.bundle)
)
val statusRow = SheFunctionStatusRow(
configuration.id,
configuration.status.enabled,
configuration.status.lastExecution,
configuration.status.executionStarted
)
db.run(
DBIO
.seq(
DataBundles.insertOrUpdate(bundleRow),
SheFunction.insertOrUpdate(functionRow),
SheFunctionStatus.insertOrUpdate(statusRow)
)
.transactionally
).flatMap(_ => get(configuration.id))
.map(_.get)
}
def run(
configuration: FunctionConfiguration,
startTime: Option[DateTime],
useAll: Boolean
)(implicit hatServer: HatServer
): Future[Done] = {
val executionTime = DateTime.now()
logger.info(
s"[${hatServer.domain}] SHE function [${configuration.id}] run @$executionTime (previously $startTime)"
)
functionRegistry
.get[FunctionExecutable](configuration.id)
.map { function: FunctionExecutable =>
val fromDate = startTime.orElse(Some(DateTime.now().minusMonths(6)))
val untilDate = Some(DateTime.now().plusMonths(3))
val dataBundleTimeFilter = if (useAll) {
None
} else {
configuration.status.lastExecution
}
val executionResult = for {
_ <- markExecuting(configuration)
bundle <- function.bundleFilterByDate(fromDate, untilDate)
data <- dataService.bundleData(
bundle,
createdAfter = dataBundleTimeFilter
) // Get all bundle data from a specific date until now
response <-
function
.execute(
configuration,
Request(data, linkRecords = true)
) // Execute the function
.map(
removeDuplicateData
) // Remove duplicate data in case some records mapped onto the same values when transformed
// TODO handle cases when function runs for longer and connection to DB needs to be reestablished
owner <-
usersService
.getUserByRole(Owner())
.map(
_.head
) // Fetch the owner user - functions run on their behalf
_ <- dataService.saveDataGroups(
owner.userId,
response.map(r =>
(
r.data.map(
EndpointData(
s"${r.namespace}/${r.endpoint}",
None,
None,
None,
_,
None
)
),
r.linkedRecords
)
),
skipErrors = true
)
_ <- markCompleted(configuration)
} yield (data.values.map(_.length).sum, Done)
executionResult
.andThen({
case Success((totalRecords, _)) =>
logger.info(
s"[${hatServer.domain}] SHE function [${configuration.id}] finished, generated $totalRecords records"
)
case Failure(e) =>
logger.error(
s"[${hatServer.domain}] SHE function [${configuration.id}] error: ${e.getMessage}"
)
})
.map(_._2)
} getOrElse {
Future.failed(
SHEFunctionNotAvailableException(
"The requested function is not available"
)
)
}
}
private def markExecuting(
function: FunctionConfiguration
)(implicit hatServer: HatServer
): Future[Done] = {
val notExecuting = SheFunctionStatus
.filter(_.id === function.id)
.filter(s =>
s.lastExecution > DateTime
.now()
.minus(functionExecutionTimeout.toMillis)
)
.result
.flatMap(r =>
if (r.isEmpty) {
DBIO.successful("No current execution")
} else {
DBIO.failed(
SHEFunctionBusyExecutingException("The function is being executed")
)
}
)
val mark = SheFunctionStatus
.filter(_.id === function.id)
.map(s => s.executionStarted)
.update(Some(DateTime.now()))
hatServer.db
.run(DBIO.seq(notExecuting, mark).transactionally)
.map(_ => Done)
}
private def markCompleted(
function: FunctionConfiguration
)(implicit hatServer: HatServer
): Future[Done] = {
val now = DateTime.now()
logger.info(
s"[${hatServer.domain}] Successfully executed function ${function.id} at $now"
)
val update = SheFunctionStatus
.filter(_.id === function.id)
.map(s => (s.executionStarted, s.lastExecution))
.update((None, Some(now)))
hatServer.db
.run(update)
.map(_ => Done)
}
//TODO: expensive operation!
private def removeDuplicateData(response: Seq[Response]): Seq[Response] = {
val md = MessageDigest.getInstance("SHA-256")
response
.flatMap({ r =>
r.data.headOption.map { data =>
val digest = md.digest(data.toString().getBytes)
(BigInt(digest), r)
}
})
.sortBy(_._1)
.foldRight(Seq[(BigInt, Response)]())({
case (e, ls) if ls.isEmpty || ls.head._1 != e._1 => e +: ls
case (_, ls) => ls
})
.unzip
._2 // Drop the digest
}
}
|
Hub-of-all-Things/HAT2.0
|
hat/app/org/hatdex/hat/she/service/FunctionService.scala
|
Scala
|
agpl-3.0
| 11,556
|
package com.cloudray.scalapress.plugin.url.friendlyurl
import org.scalatest.{OneInstancePerTest, FunSuite}
import org.scalatest.mock.MockitoSugar
import com.cloudray.scalapress.item.Item
import com.cloudray.scalapress.folder.Folder
import com.cloudray.scalapress.util.UrlGenerator
/** @author Stephen Samuel */
class FriendlyUrlGeneratorTest extends FunSuite with MockitoSugar with OneInstancePerTest {
test("item friendly url happy path") {
val obj = new Item
obj.id = 1234
obj.name = "boro for the champo"
assert("/item-1234-boro-for-the-champo" === UrlGenerator.url(obj))
}
test("folder friendly url happy path") {
val f = new Folder
f.id = 55
f.name = "uefa cup final 2006"
assert("/folder-55-uefa-cup-final-2006" === UrlGenerator.url(f))
}
}
|
vidyacraghav/scalapress
|
src/test/scala/com/cloudray/scalapress/plugin/url/friendlyurl/FriendlyUrlGeneratorTest.scala
|
Scala
|
apache-2.0
| 791
|
Subsets and Splits
Filtered Scala Code Snippets
The query filters and retrieves a sample of code snippets that meet specific criteria, providing a basic overview of the dataset's content without revealing deeper insights.