code
stringlengths 5
1M
| repo_name
stringlengths 5
109
| path
stringlengths 6
208
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 5
1M
|
|---|---|---|---|---|---|
package com.rcirka.play.dynamodb.models.enums
import play.api.libs.json._
object AttributeType extends Enumeration {
type AttributeType = Value
val Numeric = Value("N")
val String = Value("S")
implicit val attributeTypeWrites = new Writes[AttributeType] {
def writes(value: AttributeType): JsValue = JsString(value.toString)
}
}
|
rcirka/Play-DynamoDB
|
src/main/scala/com/rcirka/play/dynamodb/models/enums/AttributeType.scala
|
Scala
|
mit
| 347
|
package io.finch
import com.twitter.finagle.http.Request
import com.twitter.util.{Await, Future, Return, Throw}
import org.scalacheck.Prop.BooleanOperators
import org.scalatest.prop.Checkers
import org.scalatest.{FlatSpec, Matchers}
class RequestReaderValidationSpec extends FlatSpec with Matchers with Checkers {
val request = Request("foo" -> "6", "bar" -> "9")
val fooReader = param("foo").as[Int]
val barReader = param("bar").as[Int]
val beEven = ValidationRule[Int]("be even") { _ % 2 == 0 }
def beSmallerThan(value: Int) = ValidationRule[Int](s"be smaller than $value") { _ < value }
"A RequestReader" should "allow valid values" in {
val evenReader = fooReader.should("be even") { _ % 2 == 0 }
Await.result(evenReader(request)) shouldBe 6
}
it should "allow valid values based on negated rules" in {
val evenReader = barReader.shouldNot("be even") { _ % 2 == 0 }
Await.result(evenReader(request)) shouldBe 9
}
it should "raise a RequestReader error for invalid values" in {
val oddReader = fooReader.should("be odd") { _ % 2 != 0 }
an [Error] shouldBe thrownBy(Await.result(oddReader(request)))
}
it should "be lift-able into an optional reader that always succeeds" in {
val oddReader = fooReader.should("be odd") { _ % 2 != 0 }
Await.result(oddReader.lift(request)) shouldBe None
}
it should "allow valid values in a for-comprehension" in {
val readFoo: RequestReader[Int] = for {
foo <- fooReader if foo % 2 == 0
} yield foo
Await.result(readFoo(request)) shouldBe 6
}
it should "raise a RequestReader error for invalid values in a for-comprehension" in {
val readFoo: RequestReader[Int] = for {
foo <- fooReader if foo % 2 != 0
} yield foo
an [Error] shouldBe thrownBy(Await.result(readFoo(request)))
}
it should "be convertible to a single-member case class with a matching type" in {
case class Bar(i: Int)
val barReader = fooReader.as[Bar]
Await.result(barReader(request)) shouldBe Bar(6)
}
"A RequestReader with a predefined validation rule" should "allow valid values" in {
val evenReader = fooReader.should(beEven)
Await.result(evenReader(request)) shouldBe 6
}
it should "allow valid values based on negated rules" in {
val evenReader = barReader.shouldNot(beEven)
Await.result(evenReader(request)) shouldBe 9
}
it should "raise a RequestReader error for invalid values" in {
val oddReader = fooReader.shouldNot(beEven)
an [Error] shouldBe thrownBy(Await.result(oddReader(request)))
}
it should "allow valid values based on two rules combined with and" in {
val andReader = fooReader.should(beEven and beSmallerThan(7))
Await.result(andReader(request)) shouldBe 6
}
it should "raise a RequestReader error if one of two rules combined with and fails" in {
val andReader = fooReader.should(beEven and beSmallerThan(2))
an [Error] shouldBe thrownBy(Await.result(andReader(request)))
}
it should "allow valid values based on two rules combined with or" in {
val orReader = barReader.shouldNot(beEven or beSmallerThan(2))
Await.result(orReader(request)) shouldBe 9
}
it should "raise a RequestReader error if one of two rules combined with or in a negation fails" in {
val andReader = fooReader.shouldNot(beEven or beSmallerThan(12))
an [Error] shouldBe thrownBy(Await.result(andReader(request)))
}
it should "allow to reuse a validation rule with optional readers" in {
val optReader = paramOption("foo").as[Int].should(beEven)
Await.result(optReader(request)) shouldBe Some(6)
}
it should "raise a RequestReader error if a rule for a non-empty optional value fails" in {
val optReader = paramOption("bar").as[Int].should(beEven)
an [Error] shouldBe thrownBy(Await.result(optReader(request)))
}
it should "skip validation when applied to an empty optional value" in {
val optReader = paramOption("baz").as[Int].should(beEven)
Await.result(optReader(request)) shouldBe None
}
it should "work with predefined rules" in {
val intReader = param("foo").as[Int] should beGreaterThan(100)
val floatReader = param("bar").as[Float].should(beGreaterThan(100.0f))
val stringReader = param("baz").should(beLongerThan(10))
val optLongReader = paramOption("foo").as[Int] should beGreaterThan(100)
val ltIntReader = param("foo").as[Int] should beLessThan(100)
val stStringReader = param("baz").should(beShorterThan(10))
val req = Request("foo" -> "20", "bar" -> "20.0", "baz" -> "baz")
an [Error] shouldBe thrownBy(Await.result(intReader(req)))
an [Error] shouldBe thrownBy(Await.result(floatReader(req)))
an [Error] shouldBe thrownBy(Await.result(stringReader(req)))
an [Error] shouldBe thrownBy(Await.result(optLongReader(req)))
Await.result(ltIntReader(req)) shouldBe 20
Await.result(stStringReader(req)) shouldBe "baz"
}
it should "allow to use inline rules with optional params" in {
val optInt = paramOption("foo").as[Int].should("be greater than 50") { i: Int => i > 50 }
val optString = paramOption("bar").should("be longer than 5 chars") { s: String => s.length > 5 }
an [Error] shouldBe thrownBy(Await.result(optInt(request)))
an [Error] shouldBe thrownBy(Await.result(optString(request)))
}
"An empty optional param RequestReader" should "work correctly with inline rules" in {
val optInt = paramOption("baz").as[Int].should("be greater than 50") { i: Int => i > 50 }
Await.result(optInt(request)) shouldBe None
}
"A composite RequestReader" should "be convertible to an appropriately typed case class" in {
case class Qux(i: Int, j: Int)
val quxReader = (fooReader :: barReader).as[Qux]
Await.result(quxReader(request)) shouldBe Qux(6, 9)
}
it should "be convertible to a tuple" in {
val tupleReader = (fooReader :: barReader).asTuple
Await.result(tupleReader(request)).shouldBe((6, 9))
}
it should "correctly fail with a single error" in {
val firstBadReader = (fooReader.shouldNot(beEven) :: barReader.shouldNot(beEven)).asTuple
val secondBadReader = (fooReader.should(beEven) :: barReader.should(beEven)).asTuple
Await.result(firstBadReader(request).liftToTry) should matchPattern {
case Throw(Error.NotValid(_, _)) =>
}
Await.result(secondBadReader(request).liftToTry) should matchPattern {
case Throw(Error.NotValid(_, _)) =>
}
}
it should "correctly accumulate errors" in {
val tupleReader = (fooReader.shouldNot(beEven) :: barReader.should(beEven)).asTuple
Await.result(tupleReader(request).liftToTry) should matchPattern {
case Throw(Error.RequestErrors(Seq(Error.NotValid(_, _), Error.NotValid(_, _)))) =>
}
}
it should "be able to have a function with appropriate arity and types applied to it" in {
val add: (Int, Int) => Int = _ + _
val sumReader = (fooReader :: barReader) ~> add
check { (foo: Int, bar: Int) =>
val req = Request("foo" -> foo.toString, "bar" -> bar.toString)
Await.result(sumReader(req)) === foo + bar
}
}
it should "be able to have an appropriately-typed Future-returning function applied to it" in {
val div: (Int, Int) => Future[Int] = (x, y) => Future(x / y)
val divReader = (fooReader :: barReader) ~~> div
check { (foo: Int, bar: Int) =>
val req = Request("foo" -> foo.toString, "bar" -> bar.toString)
Await.result {
for {
result <- div(foo, bar).liftToTry
readResult <- divReader(req).liftToTry
} yield (readResult, result) match {
case (Return(r1), Return(r2)) => r1 == r2
case (Throw(e1), Throw(e2)) => e1.getMessage == e2.getMessage
case _ => false
}
}
}
}
"RequestReader's generic derivation" should "create valid readers for case classes" in {
case class User(id: Long, first: String, last: String)
val userReader = RequestReader.derive[User].fromParams
check { (id: Long, first: String, last: String) =>
(first.nonEmpty && last.nonEmpty) ==> {
val req = Request("id" -> id.toString, "first" -> first, "last" -> last)
Await.result(userReader(req)) === User(id, first, last)
}
}
}
}
|
BenWhitehead/finch
|
core/src/test/scala/io/finch/RequestReaderValidationSpec.scala
|
Scala
|
apache-2.0
| 8,310
|
/**
* Copyright 2014 Dropbox, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package djinni
import djinni.ast.Record.DerivingType
import djinni.ast._
import djinni.generatorTools._
import djinni.meta._
import djinni.writer.IndentWriter
import scala.collection.mutable
class CxCppGenerator(spec: Spec) extends Generator(spec) {
val cxcppMarshal = new CxCppMarshal(spec)
val cxMarshal = new CxMarshal(spec)
val cppMarshal = new CppMarshal(spec)
val writeCxCppFile = writeCppFileGeneric(spec.cxcppOutFolder.get, spec.cxcppNamespace, spec.cppFileIdentStyle, spec.cxcppIncludePrefix, spec.cxcppExt, spec.cxcppHeaderExt) _
def writeHxFile(name: String, origin: String, includes: Iterable[String], fwds: Iterable[String], f: IndentWriter => Unit, f2: IndentWriter => Unit = (w => {})) =
writeHppFileGeneric(spec.cxcppHeaderOutFolder.get, spec.cxcppNamespace, spec.cppFileIdentStyle, spec.cxcppHeaderExt)(name, origin, includes, fwds, f, f2)
class CxCppRefs(name: String) {
var hx = mutable.TreeSet[String]()
var hxFwds = mutable.TreeSet[String]()
var cxcpp = mutable.TreeSet[String]()
def find(ty: TypeRef) { find(ty.resolved) }
def find(tm: MExpr) {
tm.args.foreach(find)
find(tm.base)
}
def find(m: Meta) = for(r <- cxcppMarshal.references(m)) r match {
case ImportRef(arg) => hx.add("#include " + arg)
case DeclRef(decl, Some(spec.cxcppNamespace)) => hxFwds.add(decl)
case DeclRef(_, _) =>
}
}
override def generateEnum(origin: String, ident: Ident, doc: Doc, e: Enum) {
//nothing required?
}
def generateHxConstants(w: IndentWriter, consts: Seq[Const]) = {
for (c <- consts) {
w.wl
writeDoc(w, c.doc)
w.wl(s"static ${cxcppMarshal.fieldType(c.ty)} const ${idCpp.const(c.ident)};")
}
}
def generateCxCppConstants(w: IndentWriter, consts: Seq[Const], selfName: String) = {
def writeCxCppConst(w: IndentWriter, ty: TypeRef, v: Any): Unit = v match {
case l: Long => w.w(l.toString)
case d: Double if cxcppMarshal.fieldType(ty) == "float" => w.w(d.toString + "f")
case d: Double => w.w(d.toString)
case b: Boolean => w.w(if (b) "true" else "false")
case s: String => w.w(s)
case e: EnumValue => w.w(cxcppMarshal.typename(ty) + "::" + idCpp.enum(e.ty.name + "_" + e.name))
case v: ConstRef => w.w(selfName + "::" + idCpp.const(v))
case z: Map[_, _] => { // Value is record
val recordMdef = ty.resolved.base.asInstanceOf[MDef]
val record = recordMdef.body.asInstanceOf[Record]
val vMap = z.asInstanceOf[Map[String, Any]]
w.wl(cxcppMarshal.typename(ty) + "(")
w.increase()
// Use exact sequence
val skipFirst = SkipFirst()
for (f <- record.fields) {
skipFirst {w.wl(",")}
writeCxCppConst(w, f.ty, vMap.apply(f.ident.name))
w.w(" /* " + idCpp.field(f.ident) + " */ ")
}
w.w(")")
w.decrease()
}
}
val skipFirst = SkipFirst()
for (c <- consts) {
skipFirst{ w.wl }
w.w(s"${cxcppMarshal.fieldType(c.ty)} const $selfName::${idCpp.const(c.ident)} = ")
writeCxCppConst(w, c.ty, c.value)
w.wl(";")
}
}
override def generateRecord(origin: String, ident: Ident, doc: Doc, params: Seq[TypeParam], r: Record) {
val refs = new CxCppRefs(ident.name)
for (c <- r.consts)
refs.find(c.ty)
for (f <- r.fields)
refs.find(f.ty)
val cxName = ident.name + (if (r.ext.cx) "_base" else "")
val cxSelf = cxMarshal.fqTypename(ident, r)
val cppSelf = cppMarshal.fqTypename(ident, r)
refs.hx.add("#include " + q(spec.cxcppIncludeCxPrefix + (if(r.ext.cx) "../" else "") + cxcppMarshal.headerName(ident)+ "." + spec.cxcppHeaderExt))
refs.hx.add("#include " + q(spec.cxIncludePrefix + (if(r.ext.cx) "../" else "") + spec.cxFileIdentStyle(ident) + "." + spec.cxHeaderExt))
refs.hx.add("#include " + q(spec.cxcppIncludeCppPrefix + (if(r.ext.cpp) "../" else "") + spec.cppFileIdentStyle(ident) + "." + spec.cppHeaderExt))
refs.cxcpp = refs.hx.clone()
refs.cxcpp.add("#include <cassert>")
def checkMutable(tm: MExpr): Boolean = tm.base match {
case MOptional => checkMutable(tm.args.head)
case MString => true
case MBinary => true
case _ => false
}
val self = cxcppMarshal.typename(ident, r)
writeHxFile(cxcppMarshal.headerName(cxName), origin, refs.hx, refs.hxFwds, w => {
w.wl
w.wl(s"struct $self")
w.bracedSemi {
w.wl(s"using CppType = $cppSelf;")
w.wl(s"using CxType = $cxSelf^;");
w.wl
w.wl(s"using Boxed = $self;")
w.wl
w.wl(s"static CppType toCpp(CxType cx);")
w.wl(s"static CxType fromCpp(const CppType& cpp);")
}
})
writeCxCppFile(cxcppMarshal.bodyName(self), origin, refs.cxcpp, w => {
w.wl(s"auto $self::toCpp(CxType cx) -> CppType")
w.braced {
w.wl("assert(cx);")
if(r.fields.isEmpty) w.wl("(void)cx; // Suppress warnings in relase builds for empty records")
writeAlignedCall(w, "return {", r.fields, "}", f => cxcppMarshal.toCpp(f.ty, "cx->" + idCx.field(f.ident)))
w.wl(";")
}
w.wl
w.wl(s"auto $self::fromCpp(const CppType& cpp) -> CxType")
w.braced {
if(r.fields.isEmpty) w.wl("(void)cpp; // Suppress warnings in relase builds for empty records")
writeAlignedCall(w, s"return ref new $cxSelf(", r.fields, ")", f=> cxcppMarshal.fromCpp(f.ty, "cpp." + idCpp.field(f.ident)))
w.wl(";")
}
})
}
override def generateInterface(origin: String, ident: Ident, doc: Doc, typeParams: Seq[TypeParam], i: Interface) {
val refs = new CxCppRefs(ident.name)
refs.hx.add("#include \\""+spec.cppIncludePrefix + spec.cppFileIdentStyle(ident.name) + "." + spec.cppHeaderExt+"\\"")
refs.hx.add("#include \\""+spec.cxIncludePrefix + spec.cxFileIdentStyle(ident.name) + "." + spec.cxHeaderExt+"\\"")
refs.hx.add("#include <memory>")
i.methods.map(m => {
m.params.map(p => refs.find(p.ty))
m.ret.foreach(refs.find)
})
i.consts.map(c => {
refs.find(c.ty)
})
refs.cxcpp = refs.hx.clone()
refs.cxcpp.add("#include \\"CxWrapperCache.h\\"")
val self = cxcppMarshal.typename(ident, i)
val cxSelf = cxMarshal.fqTypename(ident, i)
val cppSelf = cppMarshal.fqTypename(ident, i)
val helperClass = cxcppMarshal.helperClass(ident)
writeHxFile(cxcppMarshal.headerName(ident.name), origin, refs.hx, refs.hxFwds, w => {
w.wl
w.wl(s"class $self")
w.bracedSemi {
w.wlOutdent("public:")
w.wl(s"using CppType = std::shared_ptr<$cppSelf>;")
w.wl(s"using CxType = $cxSelf^;");
w.wl
w.wl(s"using Boxed = $self;")
w.wl
w.wl(s"static CppType toCpp(CxType cx);")
w.wl(s"static CxType fromCpp(const CppType& cpp);")
if (i.ext.cx) {
w.wl
w.wlOutdent("private:")
w.wl(s"class CxProxy;")
}
}
})
writeCxCppFile(cxcppMarshal.bodyName(ident.name), origin, refs.cxcpp, w => {
//only interface classes have proxy objects
if (i.ext.cx) {
w.wl(s"class $helperClass final : public $cppSelf, public ::djinni::CxWrapperCache<CxProxy>::Handle").bracedSemi {
w.wlOutdent("public:")
w.wl("using Handle::Handle;")
w.wl(s"using CxType = $cxSelf^;")
w.wl
w.wl("CxProxy(Platform::Object^ cx) : ::djinni::CxWrapperCache<CxProxy>::Handle{ cx } {}")
w.wl
//methods
for (m <- i.methods) {
w.wl
writeDoc(w, m.doc)
val ret = cppMarshal.fqReturnType(m.ret)
val params = m.params.map(p => cppMarshal.fqParamType(p.ty) + " " + idCpp.local(p.ident))
if (m.static) {
w.wl(s"static $ret ${idCpp.method(m.ident)}${params.mkString("(", ", ", ")")}")
} else {
val constFlag = if (m.const) " const" else ""
w.wl(s"$ret ${idCpp.method(m.ident)}${params.mkString("(", ", ", ")")}$constFlag override")
}
w.braced {
val retCall = if(m.ret == None) "" else "auto r = "
val call = retCall + (if (!m.static) s"static_cast<CxType>(Handle::get())->" else cppSelf + "::") + idCx.method(m.ident) + "("
writeAlignedCall(w, call, m.params, ")", p => cxcppMarshal.fromCpp(p.ty, idCpp.local(p.ident.name)))
w.wl(";")
m.ret.fold()(r => w.wl(s"return ${cxcppMarshal.toCpp(r, "r")};"))
}
}
}
w.wl
w.wl(s"auto $self::toCpp(CxType cx) -> CppType")
w.braced {
w.wl("if (!cx)").braced {
w.wl("return nullptr;")
}
w.wl("return ::djinni::CxWrapperCache<CxProxy>::getInstance()->get(cx);")
}
w.wl
w.wl(s"auto $self::fromCpp(const CppType& cpp) -> CxType")
w.braced {
w.braced {
w.wl("if (!cpp)").braced {
w.wl("return nullptr;")
}
w.wl("return static_cast<CxType>(dynamic_cast<CxProxy &>(*cpp).Handle::get());")
}
}
} else {
w.wl(s"auto $self::toCpp(CxType cx) -> CppType")
w.braced {
w.wl("return cx->m_cppRef.get();")
}
w.wl
w.wl(s"auto $self::fromCpp(const CppType& cpp) -> CxType")
w.braced {
w.wl(s"return (CxType)::djinni::CppWrapperCache<$cppSelf>::getInstance()->get(cpp, [](const std::shared_ptr<$cppSelf>& p)").bracedEnd(");") {
w.wl(s"return ref new $cxSelf(p);")
}
}
}
})
}
def writeCxCppTypeParams(w: IndentWriter, params: Seq[TypeParam]) {
if (params.isEmpty) return
w.wl("template " + params.map(p => "typename " + idCpp.typeParam(p.ident)).mkString("<", ", ", ">"))
}
}
|
DEGoodmanWilson/djinni
|
src/source/CxCppGenerator.scala
|
Scala
|
apache-2.0
| 10,480
|
/* __ *\
** ________ ___ / / ___ Scala API **
** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
package scala.collection
package immutable
import generic._
import mutable.Builder
/** A trait for traversable collections that are guaranteed immutable.
* $traversableInfo
* @define mutability immutable
*/
trait Traversable[+A] extends scala.collection.Traversable[A]
with GenericTraversableTemplate[A, Traversable]
with TraversableLike[A, Traversable[A]]
with Immutable {
override def companion: GenericCompanion[Traversable] = Traversable
}
/** $factoryInfo
* The current default implementation of a $Coll is a `Vector`.
* @define coll immutable traversable collection
* @define Coll immutable.Traversable
*/
object Traversable extends TraversableFactory[Traversable] {
implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, Traversable[A]] = new GenericCanBuildFrom[A]
def newBuilder[A]: Builder[A, Traversable[A]] = new mutable.ListBuffer
}
|
cran/rkafkajars
|
java/scala/collection/immutable/Traversable.scala
|
Scala
|
apache-2.0
| 1,470
|
/*
Copyright 2012 Twitter, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package com.twitter.scalding.commons.source
import cascading.flow.FlowDef
import cascading.pipe.Pipe
import cascading.scheme.Scheme
import cascading.tap.Tap
import cascading.tuple.Fields
import com.backtype.cascading.scheme.KeyValueByteScheme
import com.backtype.cascading.tap.VersionedTap
import com.backtype.cascading.tap.VersionedTap.TapMode
import com.twitter.algebird.Monoid
import com.twitter.bijection.Injection
import com.twitter.chill.Externalizer
import com.twitter.scalding.TDsl._
import com.twitter.scalding._
import com.twitter.scalding.source.{ CheckedInversion, MaxFailuresCheck }
import com.twitter.scalding.typed.KeyedListLike
import com.twitter.scalding.typed.TypedSink
import org.apache.hadoop.mapred.JobConf
/**
* Source used to write key-value pairs as byte arrays into a versioned store.
* Supports incremental updates via the monoid on V.
*/
object VersionedKeyValSource {
val defaultVersionsToKeep = 3
// TODO: have two apply methods here for binary compatibility purpose. Need to clean it up in next release.
def apply[K,V](path: String, sourceVersion: Option[Long] = None, sinkVersion: Option[Long] = None, maxFailures: Int = 0)
(implicit codec: Injection[(K,V),(Array[Byte],Array[Byte])]) = {
new VersionedKeyValSource[K,V](path, sourceVersion, sinkVersion, maxFailures, defaultVersionsToKeep)
}
def apply[K,V](path: String, sourceVersion: Option[Long], sinkVersion: Option[Long], maxFailures: Int, versionsToKeep: Int)
(implicit codec: Injection[(K,V),(Array[Byte],Array[Byte])]) =
new VersionedKeyValSource[K,V](path, sourceVersion, sinkVersion, maxFailures, versionsToKeep)
}
class VersionedKeyValSource[K,V](val path: String, val sourceVersion: Option[Long], val sinkVersion: Option[Long],
val maxFailures: Int, val versionsToKeep: Int)(
implicit @transient codec: Injection[(K,V),(Array[Byte],Array[Byte])]) extends Source with Mappable[(K,V)] with TypedSink[(K,V)] {
import Dsl._
val keyField = "key"
val valField = "value"
val fields = new Fields(keyField, valField)
val codecBox = Externalizer(codec)
override def converter[U >: (K, V)] = TupleConverter.asSuperConverter[(K, V), U](TupleConverter.of[(K, V)])
override def setter[U <: (K, V)] = TupleSetter.asSubSetter[(K, V), U](TupleSetter.of[(K,V)])
def hdfsScheme =
HadoopSchemeInstance(new KeyValueByteScheme(fields).asInstanceOf[Scheme[_, _, _, _, _]])
@deprecated("This method is deprecated", "0.1.6")
def this(path: String, sourceVersion: Option[Long], sinkVersion: Option[Long], maxFailures: Int)
(implicit @transient codec: Injection[(K,V),(Array[Byte],Array[Byte])]) =
this(path, sourceVersion, sinkVersion, maxFailures, VersionedKeyValSource.defaultVersionsToKeep)(codec)
def getTap(mode: TapMode) = {
val tap = new VersionedTap(path, hdfsScheme, mode).setVersionsToKeep(versionsToKeep)
if (mode == TapMode.SOURCE && sourceVersion.isDefined)
tap.setVersion(sourceVersion.get)
else if (mode == TapMode.SINK && sinkVersion.isDefined)
tap.setVersion(sinkVersion.get)
else
tap
}
val source = getTap(TapMode.SOURCE)
val sink = getTap(TapMode.SINK)
override def validateTaps(mode: Mode): Unit = {
// if a version is explicitly supplied, ensure that it exists
sourceVersion.foreach { version =>
mode match {
case hadoopMode: HadoopMode => {
val store = source.getStore(new JobConf(hadoopMode.jobConf))
if (!store.hasVersion(version)) {
throw new IllegalArgumentException(
"Version %s does not exist. Currently available versions are: %s"
.format(version, store.getAllVersions))
}
}
case _ => throw new IllegalArgumentException(
"VersionedKeyValSource does not support mode %s. Only HadoopMode is supported"
.format(mode))
}
}
}
def resourceExists(mode: Mode) =
mode match {
case Test(buffers) => {
buffers(this) map { !_.isEmpty } getOrElse false
}
case HadoopTest(conf, buffers) => {
buffers(this) map { !_.isEmpty } getOrElse false
}
case _ => {
val conf = new JobConf(mode.asInstanceOf[HadoopMode].jobConf)
source.resourceExists(conf)
}
}
override def createTap(readOrWrite: AccessMode)(implicit mode: Mode): Tap[_,_,_] = {
import com.twitter.scalding.CastHfsTap
mode match {
case Hdfs(_strict, _config) =>
readOrWrite match {
case Read => CastHfsTap(source)
case Write => CastHfsTap(sink)
}
case _ =>
TestTapFactory(this, hdfsScheme).createTap(readOrWrite)
}
}
// Override this for more control on failure on decode
protected lazy val checkedInversion: CheckedInversion[(K,V), (Array[Byte],Array[Byte])] =
new MaxFailuresCheck(maxFailures)(codecBox.get)
override def sinkFields = fields
override def transformForRead(pipe: Pipe) = {
pipe.flatMap((keyField, valField) -> (keyField, valField)) { pair: (Array[Byte],Array[Byte]) =>
checkedInversion(pair)
}
}
override def transformForWrite(pipe: Pipe) = {
pipe.mapTo((0,1) -> (keyField, valField)) { pair: (K,V) =>
codecBox.get.apply(pair)
}
}
override def toString =
"%s path:%s,sourceVersion:%s,sinkVersion:%s".format(getClass(), path, sourceVersion, sinkVersion)
override def equals(other: Any) =
if (other.isInstanceOf[VersionedKeyValSource[_, _]]) {
val otherSrc = other.asInstanceOf[VersionedKeyValSource[K, V]]
otherSrc.path == path && otherSrc.sourceVersion == sourceVersion && otherSrc.sinkVersion == sinkVersion
} else {
false
}
override def hashCode = toString.hashCode
}
object RichPipeEx extends java.io.Serializable {
implicit def pipeToRichPipeEx(pipe: Pipe): RichPipeEx = new RichPipeEx(pipe)
implicit def typedPipeToRichPipeEx[K: Ordering, V: Monoid](pipe: TypedPipe[(K,V)]) =
new TypedRichPipeEx(pipe)
implicit def keyedListLikeToRichPipeEx[K: Ordering, V: Monoid, T[K, +V] <: KeyedListLike[K, V, T]](
kll: KeyedListLike[K, V, T]) = typedPipeToRichPipeEx(kll.toTypedPipe)
}
class TypedRichPipeEx[K: Ordering, V: Monoid](pipe: TypedPipe[(K,V)]) extends java.io.Serializable {
import Dsl._
import TDsl._
// Tap reads existing data from the `sourceVersion` (or latest
// version) of data specified in `src`, merges the K,V pairs from
// the pipe in using an implicit `Monoid[V]` and sinks all results
// into the `sinkVersion` of data (or a new version) specified by
// `src`.
def writeIncremental(src: VersionedKeyValSource[K,V], reducers: Int = 1)
(implicit flowDef: FlowDef, mode: Mode): TypedPipe[(K, V)] = {
val outPipe =
if (!src.resourceExists(mode))
pipe
else {
val oldPairs = TypedPipe
.from[(K,V)](src.read, (0,1))
.map { case (k, v) => (k, v ,0) }
val newPairs = pipe.sumByLocalKeys.map { case (k, v) => (k, v, 1) }
(oldPairs ++ newPairs)
.groupBy { _._1 }
.withReducers(reducers)
.sortBy { _._3 }
.mapValues { _._2 }
.sum
.toTypedPipe
}
outPipe.write(src)
}
}
class RichPipeEx(pipe: Pipe) extends java.io.Serializable {
import Dsl._
// VersionedKeyValSource always merges with the most recent complete
// version
def writeIncremental[K,V](src: VersionedKeyValSource[K,V], fields: Fields, reducers: Int = 1)
(implicit monoid: Monoid[V],
flowDef: FlowDef,
mode: Mode) = {
def appendToken(pipe: Pipe, token: Int) =
pipe.mapTo((0,1) -> ('key,'value,'isNew)) { pair: (K,V) => pair :+ token }
val outPipe =
if (!src.resourceExists(mode))
pipe
else {
val oldPairs = appendToken(src.read, 0)
val newPairs = appendToken(pipe, 1)
(oldPairs ++ newPairs)
.groupBy('key) { _.reducers(reducers).sortBy('isNew).sum[V]('value) }
.project(('key,'value))
.rename(('key, 'value) -> fields)
}
outPipe.write(src)
}
}
|
danosipov/scalding
|
scalding-commons/src/main/scala/com/twitter/scalding/commons/source/VersionedKeyValSource.scala
|
Scala
|
apache-2.0
| 8,633
|
package com.looker.logDataWebinar
import com.typesafe.config.Config
// define Settings class to get application.conf values
class Settings(config: Config) {
val location = config.getString("hdfs.location")
val file_format = config.getString("hdfs.file_format")
val output_type = config.getString("hdfs.output_type")
}
|
looker/spark_log_data
|
src/main/scala/Settings.scala
|
Scala
|
mit
| 328
|
/*
* This file is part of Evo2DSim.
*
* Evo2DSim is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Evo2DSim is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with Evo2DSim. If not, see <http://www.gnu.org/licenses/>.
*/
package org.vastness.evo2dsim.core.evolution.genomes
import org.vastness.evo2dsim.macros.utils.Enum
sealed trait NodeTag{
def name: String
}
object NodeTag extends Enum[NodeTag] {
case object Sensor extends NodeTag {
def name = "sensor"
}
case object Motor extends NodeTag {
def name = "motor"
}
case object Hidden extends NodeTag {
def name = "hidden"
}
}
|
vchuravy/Evo2DSim
|
core/src/main/scala/org/vastness/evo2dsim/core/evolution/genomes/NodeTag.scala
|
Scala
|
mit
| 1,085
|
/*
* IntegrateIHHEntity.scala for SparkXpehh
* Copyright (c) 2015-2016 Wei Zhou, Changchun Liu, Haibin Xie All Rights Reserved.
*
* Permission is hereby granted, free of charge, to any person
* obtaining a copy of this software and associated documentation
* files (the "Software"), to deal in the Software without
* restriction, including without limitation the rights to use,
* copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following
* conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
* OTHER DEALINGS IN THE SOFTWARE.
*/
package com.ynu.entity
/**
* @author liucc
*
* IHH积分结果数据结构
*/
case class IntegrateIHHEntity(pos: Int, sampleIntegrateIHH: BigDecimal) {
override def equals(that: Any): Boolean = {
def strictEquals(other: IntegrateIHHEntity) =
this.pos.equals(other.pos)
that match {
case a: AnyRef if this eq a => true
case p: IntegrateIHHEntity => strictEquals(p)
case _ => false
}
}
/**
* 格式化结果输出
* @return
*/
override def toString = {
List(pos.toString(), sampleIntegrateIHH.toString()).mkString("\t")
}
}
|
zhouweiyg/SparkXpehh
|
src/main/scala/com/ynu/entity/IntegrateIHHEntity.scala
|
Scala
|
mit
| 1,830
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.sources
////////////////////////////////////////////////////////////////////////////////////////////////////
// This file defines all the filters that we can push down to the data sources.
////////////////////////////////////////////////////////////////////////////////////////////////////
/**
* A filter predicate for data sources.
*
* @since 1.3.0
*/
abstract class Filter
/**
* A filter that evaluates to `true` iff the attribute evaluates to a value
* equal to `value`.
*
* @since 1.3.0
*/
case class EqualTo(attribute: String, value: Any) extends Filter
/**
* Performs equality comparison, similar to [[EqualTo]]. However, this differs from [[EqualTo]]
* in that it returns `true` (rather than NULL) if both inputs are NULL, and `false`
* (rather than NULL) if one of the input is NULL and the other is not NULL.
*
* @since 1.5.0
*/
case class EqualNullSafe(attribute: String, value: Any) extends Filter
/**
* A filter that evaluates to `true` iff the attribute evaluates to a value
* greater than `value`.
*
* @since 1.3.0
*/
case class GreaterThan(attribute: String, value: Any) extends Filter
/**
* A filter that evaluates to `true` iff the attribute evaluates to a value
* greater than or equal to `value`.
*
* @since 1.3.0
*/
case class GreaterThanOrEqual(attribute: String, value: Any) extends Filter
/**
* A filter that evaluates to `true` iff the attribute evaluates to a value
* less than `value`.
*
* @since 1.3.0
*/
case class LessThan(attribute: String, value: Any) extends Filter
/**
* A filter that evaluates to `true` iff the attribute evaluates to a value
* less than or equal to `value`.
*
* @since 1.3.0
*/
case class LessThanOrEqual(attribute: String, value: Any) extends Filter
/**
* A filter that evaluates to `true` iff the attribute evaluates to one of the values in the array.
*
* @since 1.3.0
*/
case class In(attribute: String, values: Array[Any]) extends Filter
/**
* A filter that evaluates to `true` iff the attribute evaluates to null.
*
* @since 1.3.0
*/
case class IsNull(attribute: String) extends Filter
/**
* A filter that evaluates to `true` iff the attribute evaluates to a non-null value.
*
* @since 1.3.0
*/
case class IsNotNull(attribute: String) extends Filter
/**
* A filter that evaluates to `true` iff both `left` or `right` evaluate to `true`.
*
* @since 1.3.0
*/
case class And(left: Filter, right: Filter) extends Filter
/**
* A filter that evaluates to `true` iff at least one of `left` or `right` evaluates to `true`.
*
* @since 1.3.0
*/
case class Or(left: Filter, right: Filter) extends Filter
/**
* A filter that evaluates to `true` iff `child` is evaluated to `false`.
*
* @since 1.3.0
*/
case class Not(child: Filter) extends Filter
/**
* A filter that evaluates to `true` iff the attribute evaluates to
* a string that starts with `value`.
*
* @since 1.3.1
*/
case class StringStartsWith(attribute: String, value: String) extends Filter
/**
* A filter that evaluates to `true` iff the attribute evaluates to
* a string that starts with `value`.
*
* @since 1.3.1
*/
case class StringEndsWith(attribute: String, value: String) extends Filter
/**
* A filter that evaluates to `true` iff the attribute evaluates to
* a string that contains the string `value`.
*
* @since 1.3.1
*/
case class StringContains(attribute: String, value: String) extends Filter
|
ArvinDevel/onlineAggregationOnSparkV2
|
sql/core/src/main/scala/org/apache/spark/sql/sources/filters.scala
|
Scala
|
apache-2.0
| 4,233
|
package nl.iljabooij.garmintrainer.parser.digester
import org.joda.time.DateTime
import org.joda.time.format.ISODateTimeFormat
import org.junit.Assert._
import org.mockito.Mockito._
import org.scalatest.junit.{JUnit3Suite,AssertionsForJUnit}
import org.scalatest.mock.MockitoSugar
import org.xml.sax.Attributes
class LapBuilderFactoryTest extends JUnit3Suite with AssertionsForJUnit
with MockitoSugar {
def testCreateObject {
val dateTime = new DateTime().withMillis(0)
val formatter = ISODateTimeFormat.dateTimeNoMillis
val dateTimeString = formatter.print(dateTime)
val attributes = mock[Attributes]
val attributeName = LapBuilderFactory.startTimeAttribute
when(attributes.getValue(attributeName)).thenReturn(dateTimeString)
val lapBuilderFactory = new LapBuilderFactory
val lapType = lapBuilderFactory.createObject(attributes)
assertEquals(dateTime, lapType.getStartTime)
verify(attributes, times(1)).getValue(attributeName)
}
}
|
chmandrade/garmintrainer
|
src/test/scala/nl/iljabooij/garmintrainer/parser/digester/LapBuilderFactoryTest.scala
|
Scala
|
gpl-3.0
| 999
|
/*
* Copyright © 2015-2019 the contributors (see Contributors.md).
*
* This file is part of Knora.
*
* Knora is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published
* by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Knora is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public
* License along with Knora. If not, see <http://www.gnu.org/licenses/>.
*/
package org.knora.webapi.responders.v2
import java.time.Instant
import java.util.UUID
import akka.actor.{ActorRef, Props}
import akka.testkit.ImplicitSender
import org.knora.webapi._
import org.knora.webapi.app.{APPLICATION_MANAGER_ACTOR_NAME, ApplicationActor}
import org.knora.webapi.messages.admin.responder.usersmessages.UserADM
import org.knora.webapi.messages.store.triplestoremessages._
import org.knora.webapi.messages.v2.responder._
import org.knora.webapi.messages.v2.responder.resourcemessages._
import org.knora.webapi.messages.v2.responder.searchmessages.GravsearchRequestV2
import org.knora.webapi.messages.v2.responder.standoffmessages._
import org.knora.webapi.messages.v2.responder.valuemessages._
import org.knora.webapi.responders.v2.search.gravsearch.GravsearchParser
import org.knora.webapi.store.iiif.MockSipiConnector
import org.knora.webapi.util.IriConversions._
import org.knora.webapi.util.date.{CalendarNameGregorian, DatePrecisionYear}
import org.knora.webapi.util.{MutableTestIri, PermissionUtilADM, SmartIri, StringFormatter}
import scala.concurrent.duration._
/**
* Tests [[ValuesResponderV2]].
*/
class ValuesResponderV2Spec extends CoreSpec() with ImplicitSender {
private implicit val stringFormatter: StringFormatter = StringFormatter.getGeneralInstance
private val zeitglöckleinIri = "http://rdfh.ch/0803/c5058f3a"
private val generationeIri = "http://rdfh.ch/0803/c3f913666f"
private val aThingIri = "http://rdfh.ch/0001/a-thing"
private val aThingPictureIri = "http://rdfh.ch/0001/a-thing-picture"
private val sierraIri = "http://rdfh.ch/0001/0C-0L1kORryKzJAJxxRyRQ"
private val incunabulaUser = SharedTestDataADM.incunabulaMemberUser
private val incunabulaCreatorUser = SharedTestDataADM.incunabulaCreatorUser
private val anythingUser1 = SharedTestDataADM.anythingUser1
private val anythingUser2 = SharedTestDataADM.anythingUser2
/* we need to run our app with the mocked sipi actor */
override lazy val appActor: ActorRef = system.actorOf(Props(new ApplicationActor with ManagersWithMockedSipi).withDispatcher(KnoraDispatchers.KnoraActorDispatcher), name = APPLICATION_MANAGER_ACTOR_NAME)
override lazy val rdfDataObjects = List(
RdfDataObject(path = "_test_data/responders.v2.ValuesResponderV2Spec/incunabula-data.ttl", name = "http://www.knora.org/data/0803/incunabula"),
RdfDataObject(path = "_test_data/demo_data/images-demo-data.ttl", name = "http://www.knora.org/data/00FF/images"),
RdfDataObject(path = "_test_data/all_data/anything-data.ttl", name = "http://www.knora.org/data/0001/anything")
)
// The default timeout for receiving reply messages from actors.
private val timeout = 30.seconds
private val firstIntValueVersionIri = new MutableTestIri
private val intValueIri = new MutableTestIri
private val intValueIriWithCustomPermissions = new MutableTestIri
private val zeitglöckleinCommentWithoutStandoffIri = new MutableTestIri
private val zeitglöckleinCommentWithStandoffIri = new MutableTestIri
private val zeitglöckleinCommentWithCommentIri = new MutableTestIri
private val zeitglöckleinSecondCommentWithStandoffIri = new MutableTestIri
private val lobComment1Iri = new MutableTestIri
private val lobComment2Iri = new MutableTestIri
private val decimalValueIri = new MutableTestIri
private val dateValueIri = new MutableTestIri
private val booleanValueIri = new MutableTestIri
private val geometryValueIri = new MutableTestIri
private val intervalValueIri = new MutableTestIri
private val listValueIri = new MutableTestIri
private val colorValueIri = new MutableTestIri
private val uriValueIri = new MutableTestIri
private val geonameValueIri = new MutableTestIri
private val linkValueIri = new MutableTestIri
private val standoffLinkValueIri = new MutableTestIri
private val stillImageFileValueIri = new MutableTestIri
private val sampleStandoff: Vector[StandoffTagV2] = Vector(
StandoffTagV2(
standoffTagClassIri = OntologyConstants.Standoff.StandoffBoldTag.toSmartIri,
startPosition = 0,
endPosition = 7,
uuid = UUID.randomUUID(),
originalXMLID = None,
startIndex = 0
),
StandoffTagV2(
standoffTagClassIri = OntologyConstants.Standoff.StandoffParagraphTag.toSmartIri,
startPosition = 0,
endPosition = 10,
uuid = UUID.randomUUID(),
originalXMLID = None,
startIndex = 1
)
)
private val sampleStandoffModified: Vector[StandoffTagV2] = Vector(
StandoffTagV2(
standoffTagClassIri = OntologyConstants.Standoff.StandoffBoldTag.toSmartIri,
startPosition = 1,
endPosition = 7,
uuid = UUID.randomUUID(),
originalXMLID = None,
startIndex = 0
),
StandoffTagV2(
standoffTagClassIri = OntologyConstants.Standoff.StandoffParagraphTag.toSmartIri,
startPosition = 0,
endPosition = 10,
uuid = UUID.randomUUID(),
originalXMLID = None,
startIndex = 1
)
)
private val sampleStandoffWithLink: Vector[StandoffTagV2] = Vector(
StandoffTagV2(
standoffTagClassIri = OntologyConstants.KnoraBase.StandoffLinkTag.toSmartIri,
dataType = Some(StandoffDataTypeClasses.StandoffLinkTag),
startPosition = 0,
endPosition = 7,
uuid = UUID.randomUUID(),
originalXMLID = None,
startIndex = 0,
attributes = Vector(StandoffTagIriAttributeV2(standoffPropertyIri = OntologyConstants.KnoraBase.StandoffTagHasLink.toSmartIri, value = aThingIri)),
),
StandoffTagV2(
standoffTagClassIri = OntologyConstants.Standoff.StandoffParagraphTag.toSmartIri,
startPosition = 0,
endPosition = 10,
uuid = UUID.randomUUID(),
originalXMLID = None,
startIndex = 1
)
)
private var standardMapping: Option[MappingXMLtoStandoff] = None
private def getResourceWithValues(resourceIri: IRI,
propertyIrisForGravsearch: Seq[SmartIri],
requestingUser: UserADM): ReadResourceV2 = {
// Make a Gravsearch query from a template.
val gravsearchQuery: String = queries.gravsearch.txt.getResourceWithSpecifiedProperties(
resourceIri = resourceIri,
propertyIris = propertyIrisForGravsearch
).toString()
// Run the query.
val parsedGravsearchQuery = GravsearchParser.parseQuery(gravsearchQuery)
responderManager ! GravsearchRequestV2(
constructQuery = parsedGravsearchQuery,
targetSchema = ApiV2Complex,
schemaOptions = SchemaOptions.ForStandoffWithTextValues,
requestingUser = requestingUser
)
expectMsgPF(timeout) {
case searchResponse: ReadResourcesSequenceV2 =>
// Get the resource from the response.
resourcesSequenceToResource(
requestedresourceIri = resourceIri,
readResourcesSequence = searchResponse,
requestingUser = requestingUser
)
}
}
private def getValuesFromResource(resource: ReadResourceV2,
propertyIriInResult: SmartIri): Seq[ReadValueV2] = {
resource.values.getOrElse(propertyIriInResult, throw AssertionException(s"Resource <${resource.resourceIri}> does not have property <$propertyIriInResult>"))
}
private def getValueFromResource(resource: ReadResourceV2,
propertyIriInResult: SmartIri,
expectedValueIri: IRI): ReadValueV2 = {
val propertyValues: Seq[ReadValueV2] = getValuesFromResource(resource = resource, propertyIriInResult = propertyIriInResult)
propertyValues.find(_.valueIri == expectedValueIri).getOrElse(throw AssertionException(s"Property <$propertyIriInResult> of resource <${resource.resourceIri}> does not have value <$expectedValueIri>"))
}
private def checkValueIsDeleted(resourceIri: IRI,
maybePreviousLastModDate: Option[Instant],
propertyIriForGravsearch: SmartIri,
propertyIriInResult: SmartIri,
valueIri: IRI,
requestingUser: UserADM): Unit = {
val resource = getResourceWithValues(
resourceIri = resourceIri,
propertyIrisForGravsearch = Seq(propertyIriForGravsearch),
requestingUser = requestingUser
)
checkLastModDate(
resourceIri = resourceIri,
maybePreviousLastModDate = maybePreviousLastModDate,
maybeUpdatedLastModDate = resource.lastModificationDate
)
val propertyValues: Seq[ReadValueV2] = getValuesFromResource(resource = resource, propertyIriInResult = propertyIriInResult)
propertyValues.find(_.valueIri == valueIri) match {
case Some(_) => throw AssertionException(s"Value <$valueIri was not deleted>")
case None => ()
}
}
private def checkLastModDate(resourceIri: IRI, maybePreviousLastModDate: Option[Instant], maybeUpdatedLastModDate: Option[Instant]): Unit = {
maybeUpdatedLastModDate match {
case Some(updatedLastModDate) =>
maybePreviousLastModDate match {
case Some(previousLastModDate) => assert(updatedLastModDate.isAfter(previousLastModDate))
case None => ()
}
case None => throw AssertionException(s"Resource $resourceIri has no knora-base:lastModificationDate")
}
}
private def getValue(resourceIri: IRI,
maybePreviousLastModDate: Option[Instant],
propertyIriForGravsearch: SmartIri,
propertyIriInResult: SmartIri,
expectedValueIri: IRI,
requestingUser: UserADM,
checkLastModDateChanged: Boolean = true): ReadValueV2 = {
val resource = getResourceWithValues(
resourceIri = resourceIri,
propertyIrisForGravsearch = Seq(propertyIriForGravsearch),
requestingUser = requestingUser
)
if (checkLastModDateChanged) {
checkLastModDate(
resourceIri = resourceIri,
maybePreviousLastModDate = maybePreviousLastModDate,
maybeUpdatedLastModDate = resource.lastModificationDate
)
}
getValueFromResource(
resource = resource,
propertyIriInResult = propertyIriInResult,
expectedValueIri = expectedValueIri
)
}
private def resourcesSequenceToResource(requestedresourceIri: IRI, readResourcesSequence: ReadResourcesSequenceV2, requestingUser: UserADM): ReadResourceV2 = {
if (readResourcesSequence.numberOfResources == 0) {
throw AssertionException(s"Expected one resource, <$requestedresourceIri>, but no resources were returned")
}
if (readResourcesSequence.numberOfResources > 1) {
throw AssertionException(s"More than one resource returned with IRI <$requestedresourceIri>")
}
val resourceInfo = readResourcesSequence.resources.head
if (resourceInfo.resourceIri == SearchResponderV2Constants.forbiddenResourceIri) {
throw ForbiddenException(s"User ${requestingUser.email} does not have permission to view resource <${resourceInfo.resourceIri}>")
}
resourceInfo.toOntologySchema(ApiV2Complex)
}
private def getResourceLastModificationDate(resourceIri: IRI, requestingUser: UserADM): Option[Instant] = {
responderManager ! ResourcesPreviewGetRequestV2(resourceIris = Seq(resourceIri), targetSchema = ApiV2Complex, requestingUser = requestingUser)
expectMsgPF(timeout) {
case previewResponse: ReadResourcesSequenceV2 =>
val resourcePreview: ReadResourceV2 = resourcesSequenceToResource(
requestedresourceIri = resourceIri,
readResourcesSequence = previewResponse,
requestingUser = requestingUser
)
resourcePreview.lastModificationDate
}
}
private def getValueUUID(valueIri: IRI): Option[UUID] = {
val sparqlQuery =
s"""
|PREFIX knora-base: <http://www.knora.org/ontology/knora-base#>
|
|SELECT ?valueUUID WHERE {
| <$valueIri> knora-base:valueHasUUID ?valueUUID .
|}
""".stripMargin
storeManager ! SparqlSelectRequest(sparqlQuery)
expectMsgPF(timeout) {
case response: SparqlSelectResponse =>
val rows = response.results.bindings
if (rows.isEmpty) {
None
} else if (rows.size > 1) {
throw AssertionException(s"Expected one knora-base:valueHasUUID, got ${rows.size}")
} else {
Some(stringFormatter.base64DecodeUuid(rows.head.rowMap("valueUUID")))
}
}
}
private def getValuePermissions(valueIri: IRI): Option[UUID] = {
val sparqlQuery =
s"""
|PREFIX knora-base: <http://www.knora.org/ontology/knora-base#>
|
|SELECT ?valuePermissions WHERE {
| <$valueIri> knora-base:hasPermissions ?valuePermissions .
|}
""".stripMargin
storeManager ! SparqlSelectRequest(sparqlQuery)
expectMsgPF(timeout) {
case response: SparqlSelectResponse =>
val rows = response.results.bindings
if (rows.isEmpty) {
None
} else if (rows.size > 1) {
throw AssertionException(s"Expected one knora-base:hasPermissions, got ${rows.size}")
} else {
Some(stringFormatter.base64DecodeUuid(rows.head.rowMap("valuePermissions")))
}
}
}
"Load test data" in {
responderManager ! GetMappingRequestV2(mappingIri = "http://rdfh.ch/standoff/mappings/StandardMapping", requestingUser = KnoraSystemInstances.Users.SystemUser)
expectMsgPF(timeout) {
case mappingResponse: GetMappingResponseV2 =>
standardMapping = Some(mappingResponse.mapping)
}
}
"The values responder" should {
"create an integer value" in {
// Add the value.
val resourceIri: IRI = aThingIri
val propertyIri: SmartIri = "http://0.0.0.0:3333/ontology/0001/anything/v2#hasInteger".toSmartIri
val intValue = 4
val maybeResourceLastModDate: Option[Instant] = getResourceLastModificationDate(resourceIri, anythingUser1)
responderManager ! CreateValueRequestV2(
CreateValueV2(
resourceIri = resourceIri,
resourceClassIri = "http://0.0.0.0:3333/ontology/0001/anything/v2#Thing".toSmartIri,
propertyIri = propertyIri,
valueContent = IntegerValueContentV2(
ontologySchema = ApiV2Complex,
valueHasInteger = intValue
)
),
requestingUser = anythingUser1,
apiRequestID = UUID.randomUUID
)
expectMsgPF(timeout) {
case createValueResponse: CreateValueResponseV2 =>
intValueIri.set(createValueResponse.valueIri)
firstIntValueVersionIri.set(createValueResponse.valueIri)
}
// Read the value back to check that it was added correctly.
val valueFromTriplestore = getValue(
resourceIri = resourceIri,
maybePreviousLastModDate = maybeResourceLastModDate,
propertyIriForGravsearch = propertyIri,
propertyIriInResult = propertyIri,
expectedValueIri = intValueIri.get,
requestingUser = anythingUser1
)
valueFromTriplestore.valueContent match {
case savedValue: IntegerValueContentV2 => savedValue.valueHasInteger should ===(intValue)
case _ => throw AssertionException(s"Expected integer value, got $valueFromTriplestore")
}
}
"create an integer value with custom permissions" in {
// Add the value.
val resourceIri: IRI = aThingIri
val propertyIri: SmartIri = "http://0.0.0.0:3333/ontology/0001/anything/v2#hasInteger".toSmartIri
val intValue = 1
val permissions = "CR knora-admin:Creator|V http://rdfh.ch/groups/0001/thing-searcher"
val maybeResourceLastModDate: Option[Instant] = getResourceLastModificationDate(resourceIri, anythingUser1)
responderManager ! CreateValueRequestV2(
CreateValueV2(
resourceIri = resourceIri,
resourceClassIri = "http://0.0.0.0:3333/ontology/0001/anything/v2#Thing".toSmartIri,
propertyIri = propertyIri,
valueContent = IntegerValueContentV2(
ontologySchema = ApiV2Complex,
valueHasInteger = intValue
),
permissions = Some(permissions)
),
requestingUser = anythingUser1,
apiRequestID = UUID.randomUUID
)
expectMsgPF(timeout) {
case createValueResponse: CreateValueResponseV2 => intValueIriWithCustomPermissions.set(createValueResponse.valueIri)
}
// Read the value back to check that it was added correctly.
val valueFromTriplestore = getValue(
resourceIri = resourceIri,
maybePreviousLastModDate = maybeResourceLastModDate,
propertyIriForGravsearch = propertyIri,
propertyIriInResult = propertyIri,
expectedValueIri = intValueIriWithCustomPermissions.get,
requestingUser = anythingUser1
)
valueFromTriplestore.valueContent match {
case savedValue: IntegerValueContentV2 =>
savedValue.valueHasInteger should ===(intValue)
PermissionUtilADM.parsePermissions(valueFromTriplestore.permissions) should ===(PermissionUtilADM.parsePermissions(permissions))
case _ => throw AssertionException(s"Expected integer value, got $valueFromTriplestore")
}
}
"not create an integer value with syntactically invalid custom permissions" in {
val resourceIri: IRI = aThingIri
val propertyIri: SmartIri = "http://0.0.0.0:3333/ontology/0001/anything/v2#hasInteger".toSmartIri
val intValue = 1024
val permissions = "M knora-admin:Creator,V knora-admin:KnownUser"
responderManager ! CreateValueRequestV2(
CreateValueV2(
resourceIri = resourceIri,
resourceClassIri = "http://0.0.0.0:3333/ontology/0001/anything/v2#Thing".toSmartIri,
propertyIri = propertyIri,
valueContent = IntegerValueContentV2(
ontologySchema = ApiV2Complex,
valueHasInteger = intValue
),
permissions = Some(permissions)
),
requestingUser = anythingUser1,
apiRequestID = UUID.randomUUID
)
expectMsgPF(timeout) {
case msg: akka.actor.Status.Failure => msg.cause.isInstanceOf[BadRequestException] should ===(true)
}
}
"not create an integer value with custom permissions referring to a nonexistent group" in {
val resourceIri: IRI = aThingIri
val propertyIri: SmartIri = "http://0.0.0.0:3333/ontology/0001/anything/v2#hasInteger".toSmartIri
val intValue = 1024
val permissions = "M knora-admin:Creator|V http://rdfh.ch/groups/0001/nonexistent-group"
responderManager ! CreateValueRequestV2(
CreateValueV2(
resourceIri = resourceIri,
resourceClassIri = "http://0.0.0.0:3333/ontology/0001/anything/v2#Thing".toSmartIri,
propertyIri = propertyIri,
valueContent = IntegerValueContentV2(
ontologySchema = ApiV2Complex,
valueHasInteger = intValue
),
permissions = Some(permissions)
),
requestingUser = anythingUser1,
apiRequestID = UUID.randomUUID
)
expectMsgPF(timeout) {
case msg: akka.actor.Status.Failure => msg.cause.isInstanceOf[NotFoundException] should ===(true)
}
}
"not create a value if the user does not have modify permission on the resource" in {
val resourceIri: IRI = aThingIri
val propertyIri: SmartIri = "http://0.0.0.0:3333/ontology/0001/anything/v2#hasInteger".toSmartIri
val intValue = 5
responderManager ! CreateValueRequestV2(
CreateValueV2(
resourceIri = resourceIri,
resourceClassIri = "http://0.0.0.0:3333/ontology/0001/anything/v2#Thing".toSmartIri,
propertyIri = propertyIri,
valueContent = IntegerValueContentV2(
ontologySchema = ApiV2Complex,
valueHasInteger = intValue
)
),
requestingUser = incunabulaUser,
apiRequestID = UUID.randomUUID
)
expectMsgPF(timeout) {
case msg: akka.actor.Status.Failure => msg.cause.isInstanceOf[ForbiddenException] should ===(true)
}
}
"not create a duplicate integer value" in {
val resourceIri: IRI = aThingIri
val propertyIri: SmartIri = "http://0.0.0.0:3333/ontology/0001/anything/v2#hasInteger".toSmartIri
val intValue = 4
responderManager ! CreateValueRequestV2(
CreateValueV2(
resourceIri = resourceIri,
resourceClassIri = "http://0.0.0.0:3333/ontology/0001/anything/v2#Thing".toSmartIri,
propertyIri = propertyIri,
valueContent = IntegerValueContentV2(
ontologySchema = ApiV2Complex,
valueHasInteger = intValue
)
),
requestingUser = anythingUser1,
apiRequestID = UUID.randomUUID
)
expectMsgPF(timeout) {
case msg: akka.actor.Status.Failure =>
msg.cause.isInstanceOf[DuplicateValueException] should ===(true)
}
}
"create a text value without standoff" in {
val valueHasString = "Comment 1a"
val propertyIri: SmartIri = "http://0.0.0.0:3333/ontology/0803/incunabula/v2#book_comment".toSmartIri
val maybeResourceLastModDate: Option[Instant] = getResourceLastModificationDate(zeitglöckleinIri, incunabulaUser)
responderManager ! CreateValueRequestV2(
CreateValueV2(
resourceIri = zeitglöckleinIri,
resourceClassIri = "http://0.0.0.0:3333/ontology/0803/incunabula/v2#book".toSmartIri,
propertyIri = propertyIri,
valueContent = TextValueContentV2(
ontologySchema = ApiV2Complex,
maybeValueHasString = Some(valueHasString)
)
),
requestingUser = incunabulaUser,
apiRequestID = UUID.randomUUID
)
expectMsgPF(timeout) {
case createValueResponse: CreateValueResponseV2 => zeitglöckleinCommentWithoutStandoffIri.set(createValueResponse.valueIri)
}
// Read the value back to check that it was added correctly.
val valueFromTriplestore = getValue(
resourceIri = zeitglöckleinIri,
maybePreviousLastModDate = maybeResourceLastModDate,
propertyIriForGravsearch = propertyIri,
propertyIriInResult = propertyIri,
expectedValueIri = zeitglöckleinCommentWithoutStandoffIri.get,
requestingUser = incunabulaUser
)
valueFromTriplestore.valueContent match {
case savedValue: TextValueContentV2 => assert(savedValue.valueHasString.contains(valueHasString))
case _ => throw AssertionException(s"Expected text value, got $valueFromTriplestore")
}
}
"not create a duplicate text value without standoff" in {
val valueHasString = "Comment 1a"
val propertyIri: SmartIri = "http://0.0.0.0:3333/ontology/0803/incunabula/v2#book_comment".toSmartIri
responderManager ! CreateValueRequestV2(
CreateValueV2(
resourceIri = zeitglöckleinIri,
resourceClassIri = "http://0.0.0.0:3333/ontology/0803/incunabula/v2#book".toSmartIri,
propertyIri = propertyIri,
valueContent = TextValueContentV2(
ontologySchema = ApiV2Complex,
maybeValueHasString = Some(valueHasString)
)
),
requestingUser = incunabulaUser,
apiRequestID = UUID.randomUUID
)
expectMsgPF(timeout) {
case msg: akka.actor.Status.Failure => msg.cause.isInstanceOf[DuplicateValueException] should ===(true)
}
}
"create a text value with a comment" in {
val valueHasString = "this is a text value that has a comment"
val valueHasComment = "this is a comment"
val propertyIri: SmartIri = "http://0.0.0.0:3333/ontology/0803/incunabula/v2#book_comment".toSmartIri
val maybeResourceLastModDate: Option[Instant] = getResourceLastModificationDate(zeitglöckleinIri, incunabulaUser)
responderManager ! CreateValueRequestV2(
CreateValueV2(
resourceIri = zeitglöckleinIri,
resourceClassIri = "http://0.0.0.0:3333/ontology/0803/incunabula/v2#book".toSmartIri,
propertyIri = propertyIri,
valueContent = TextValueContentV2(
ontologySchema = ApiV2Complex,
maybeValueHasString = Some(valueHasString),
comment = Some(valueHasComment)
)
),
requestingUser = incunabulaUser,
apiRequestID = UUID.randomUUID
)
expectMsgPF(timeout) {
case createValueResponse: CreateValueResponseV2 => zeitglöckleinCommentWithCommentIri.set(createValueResponse.valueIri)
}
// Read the value back to check that it was added correctly.
val valueFromTriplestore = getValue(
resourceIri = zeitglöckleinIri,
maybePreviousLastModDate = maybeResourceLastModDate,
propertyIriForGravsearch = propertyIri,
propertyIriInResult = propertyIri,
expectedValueIri = zeitglöckleinCommentWithCommentIri.get,
requestingUser = incunabulaUser
)
valueFromTriplestore.valueContent match {
case savedValue: TextValueContentV2 =>
assert(savedValue.valueHasString.contains(valueHasString))
savedValue.comment should ===(Some(valueHasComment))
case _ => throw AssertionException(s"Expected text value, got $valueFromTriplestore")
}
}
"create a text value with standoff" in {
val valueHasString = "Comment 1aa"
val propertyIri: SmartIri = "http://0.0.0.0:3333/ontology/0803/incunabula/v2#book_comment".toSmartIri
val maybeResourceLastModDate: Option[Instant] = getResourceLastModificationDate(zeitglöckleinIri, incunabulaUser)
responderManager ! CreateValueRequestV2(
CreateValueV2(
resourceIri = zeitglöckleinIri,
resourceClassIri = "http://0.0.0.0:3333/ontology/0803/incunabula/v2#book".toSmartIri,
propertyIri = propertyIri,
valueContent = TextValueContentV2(
ontologySchema = ApiV2Complex,
maybeValueHasString = Some(valueHasString),
standoff = sampleStandoff,
mappingIri = Some("http://rdfh.ch/standoff/mappings/StandardMapping"),
mapping = standardMapping
)
),
requestingUser = incunabulaUser,
apiRequestID = UUID.randomUUID
)
expectMsgPF(timeout) {
case createValueResponse: CreateValueResponseV2 => zeitglöckleinCommentWithStandoffIri.set(createValueResponse.valueIri)
}
// Read the value back to check that it was added correctly.
val valueFromTriplestore = getValue(
resourceIri = zeitglöckleinIri,
maybePreviousLastModDate = maybeResourceLastModDate,
propertyIriForGravsearch = propertyIri,
propertyIriInResult = propertyIri,
expectedValueIri = zeitglöckleinCommentWithStandoffIri.get,
requestingUser = incunabulaUser
)
valueFromTriplestore.valueContent match {
case savedValue: TextValueContentV2 =>
assert(savedValue.valueHasString.contains(valueHasString))
savedValue.standoff should ===(sampleStandoff)
assert(savedValue.mappingIri.contains("http://rdfh.ch/standoff/mappings/StandardMapping"))
assert(savedValue.mapping == standardMapping)
case _ => throw AssertionException(s"Expected text value, got $valueFromTriplestore")
}
}
"not create a duplicate text value with standoff (even if the standoff is different)" in {
val valueHasString = "Comment 1aa"
val propertyIri: SmartIri = "http://0.0.0.0:3333/ontology/0803/incunabula/v2#book_comment".toSmartIri
responderManager ! CreateValueRequestV2(
CreateValueV2(
resourceIri = zeitglöckleinIri,
resourceClassIri = "http://0.0.0.0:3333/ontology/0803/incunabula/v2#book".toSmartIri,
propertyIri = propertyIri,
valueContent = TextValueContentV2(
ontologySchema = ApiV2Complex,
maybeValueHasString = Some(valueHasString),
standoff = sampleStandoffModified,
mappingIri = Some("http://rdfh.ch/standoff/mappings/StandardMapping"),
mapping = standardMapping
)
),
requestingUser = incunabulaUser,
apiRequestID = UUID.randomUUID
)
expectMsgPF(timeout) {
case msg: akka.actor.Status.Failure => msg.cause.isInstanceOf[DuplicateValueException] should ===(true)
}
}
"create a decimal value" in {
// Add the value.
val resourceIri: IRI = aThingIri
val propertyIri: SmartIri = "http://0.0.0.0:3333/ontology/0001/anything/v2#hasDecimal".toSmartIri
val valueHasDecimal = BigDecimal("4.3")
val maybeResourceLastModDate: Option[Instant] = getResourceLastModificationDate(resourceIri, anythingUser1)
responderManager ! CreateValueRequestV2(
CreateValueV2(
resourceIri = resourceIri,
resourceClassIri = "http://0.0.0.0:3333/ontology/0001/anything/v2#Thing".toSmartIri,
propertyIri = propertyIri,
valueContent = DecimalValueContentV2(
ontologySchema = ApiV2Complex,
valueHasDecimal = valueHasDecimal
)
),
requestingUser = anythingUser1,
apiRequestID = UUID.randomUUID
)
expectMsgPF(timeout) {
case createValueResponse: CreateValueResponseV2 => decimalValueIri.set(createValueResponse.valueIri)
}
// Read the value back to check that it was added correctly.
val valueFromTriplestore = getValue(
resourceIri = resourceIri,
maybePreviousLastModDate = maybeResourceLastModDate,
propertyIriForGravsearch = propertyIri,
propertyIriInResult = propertyIri,
expectedValueIri = decimalValueIri.get,
requestingUser = anythingUser1
)
valueFromTriplestore.valueContent match {
case savedValue: DecimalValueContentV2 => savedValue.valueHasDecimal should ===(valueHasDecimal)
case _ => throw AssertionException(s"Expected decimal value, got $valueFromTriplestore")
}
}
"not create a duplicate decimal value" in {
val resourceIri: IRI = aThingIri
val propertyIri: SmartIri = "http://0.0.0.0:3333/ontology/0001/anything/v2#hasDecimal".toSmartIri
val valueHasDecimal = BigDecimal("4.3")
responderManager ! CreateValueRequestV2(
CreateValueV2(
resourceIri = resourceIri,
resourceClassIri = "http://0.0.0.0:3333/ontology/0001/anything/v2#Thing".toSmartIri,
propertyIri = propertyIri,
valueContent = DecimalValueContentV2(
ontologySchema = ApiV2Complex,
valueHasDecimal = valueHasDecimal
)
),
requestingUser = anythingUser1,
apiRequestID = UUID.randomUUID
)
expectMsgPF(timeout) {
case msg: akka.actor.Status.Failure => msg.cause.isInstanceOf[DuplicateValueException] should ===(true)
}
}
"create a date value" in {
// Add the value.
val resourceIri: IRI = aThingIri
val propertyIri: SmartIri = "http://0.0.0.0:3333/ontology/0001/anything/v2#hasDate".toSmartIri
val maybeResourceLastModDate: Option[Instant] = getResourceLastModificationDate(resourceIri, anythingUser1)
val submittedValueContent = DateValueContentV2(
ontologySchema = ApiV2Complex,
valueHasCalendar = CalendarNameGregorian,
valueHasStartJDN = 2264907,
valueHasStartPrecision = DatePrecisionYear,
valueHasEndJDN = 2265271,
valueHasEndPrecision = DatePrecisionYear
)
responderManager ! CreateValueRequestV2(
CreateValueV2(
resourceIri = resourceIri,
resourceClassIri = "http://0.0.0.0:3333/ontology/0001/anything/v2#Thing".toSmartIri,
propertyIri = propertyIri,
valueContent = submittedValueContent
),
requestingUser = anythingUser1,
apiRequestID = UUID.randomUUID
)
expectMsgPF(timeout) {
case createValueResponse: CreateValueResponseV2 => dateValueIri.set(createValueResponse.valueIri)
}
// Read the value back to check that it was added correctly.
val valueFromTriplestore = getValue(
resourceIri = resourceIri,
maybePreviousLastModDate = maybeResourceLastModDate,
propertyIriForGravsearch = propertyIri,
propertyIriInResult = propertyIri,
expectedValueIri = dateValueIri.get,
requestingUser = anythingUser1
)
valueFromTriplestore.valueContent match {
case savedValue: DateValueContentV2 =>
savedValue.valueHasCalendar should ===(submittedValueContent.valueHasCalendar)
savedValue.valueHasStartJDN should ===(submittedValueContent.valueHasStartJDN)
savedValue.valueHasStartPrecision should ===(submittedValueContent.valueHasStartPrecision)
savedValue.valueHasEndJDN should ===(submittedValueContent.valueHasEndJDN)
savedValue.valueHasEndPrecision should ===(submittedValueContent.valueHasEndPrecision)
case _ => throw AssertionException(s"Expected date value, got $valueFromTriplestore")
}
}
"not create a duplicate date value" in {
val resourceIri: IRI = aThingIri
val propertyIri: SmartIri = "http://0.0.0.0:3333/ontology/0001/anything/v2#hasDate".toSmartIri
val submittedValueContent = DateValueContentV2(
ontologySchema = ApiV2Complex,
valueHasCalendar = CalendarNameGregorian,
valueHasStartJDN = 2264907,
valueHasStartPrecision = DatePrecisionYear,
valueHasEndJDN = 2265271,
valueHasEndPrecision = DatePrecisionYear
)
responderManager ! CreateValueRequestV2(
CreateValueV2(
resourceIri = resourceIri,
resourceClassIri = "http://0.0.0.0:3333/ontology/0001/anything/v2#Thing".toSmartIri,
propertyIri = propertyIri,
valueContent = submittedValueContent
),
requestingUser = anythingUser1,
apiRequestID = UUID.randomUUID
)
expectMsgPF(timeout) {
case msg: akka.actor.Status.Failure => msg.cause.isInstanceOf[DuplicateValueException] should ===(true)
}
}
"create a boolean value" in {
// Add the value.
val resourceIri: IRI = aThingIri
val propertyIri: SmartIri = "http://0.0.0.0:3333/ontology/0001/anything/v2#hasBoolean".toSmartIri
val valueHasBoolean = true
val maybeResourceLastModDate: Option[Instant] = getResourceLastModificationDate(resourceIri, anythingUser1)
responderManager ! CreateValueRequestV2(
CreateValueV2(
resourceIri = resourceIri,
resourceClassIri = "http://0.0.0.0:3333/ontology/0001/anything/v2#Thing".toSmartIri,
propertyIri = propertyIri,
valueContent = BooleanValueContentV2(
ontologySchema = ApiV2Complex,
valueHasBoolean = valueHasBoolean
)
),
requestingUser = anythingUser1,
apiRequestID = UUID.randomUUID
)
expectMsgPF(timeout) {
case createValueResponse: CreateValueResponseV2 => booleanValueIri.set(createValueResponse.valueIri)
}
// Read the value back to check that it was added correctly.
val valueFromTriplestore = getValue(
resourceIri = resourceIri,
maybePreviousLastModDate = maybeResourceLastModDate,
propertyIriForGravsearch = propertyIri,
propertyIriInResult = propertyIri,
expectedValueIri = booleanValueIri.get,
requestingUser = anythingUser1
)
valueFromTriplestore.valueContent match {
case savedValue: BooleanValueContentV2 => savedValue.valueHasBoolean should ===(valueHasBoolean)
case _ => throw AssertionException(s"Expected boolean value, got $valueFromTriplestore")
}
}
"create a geometry value" in {
// Add the value.
val resourceIri: IRI = aThingIri
val propertyIri: SmartIri = "http://0.0.0.0:3333/ontology/0001/anything/v2#hasGeometry".toSmartIri
val valueHasGeometry = """{"status":"active","lineColor":"#ff3333","lineWidth":2,"points":[{"x":0.08098591549295775,"y":0.16741071428571427},{"x":0.7394366197183099,"y":0.7299107142857143}],"type":"rectangle","original_index":0}"""
val maybeResourceLastModDate: Option[Instant] = getResourceLastModificationDate(resourceIri, anythingUser1)
responderManager ! CreateValueRequestV2(
CreateValueV2(
resourceIri = resourceIri,
resourceClassIri = "http://0.0.0.0:3333/ontology/0001/anything/v2#Thing".toSmartIri,
propertyIri = propertyIri,
valueContent = GeomValueContentV2(
ontologySchema = ApiV2Complex,
valueHasGeometry = valueHasGeometry
)
),
requestingUser = anythingUser1,
apiRequestID = UUID.randomUUID
)
expectMsgPF(timeout) {
case createValueResponse: CreateValueResponseV2 => geometryValueIri.set(createValueResponse.valueIri)
}
// Read the value back to check that it was added correctly.
val valueFromTriplestore = getValue(
resourceIri = resourceIri,
maybePreviousLastModDate = maybeResourceLastModDate,
propertyIriForGravsearch = propertyIri,
propertyIriInResult = propertyIri,
expectedValueIri = geometryValueIri.get,
requestingUser = anythingUser1
)
valueFromTriplestore.valueContent match {
case savedValue: GeomValueContentV2 => savedValue.valueHasGeometry should ===(valueHasGeometry)
case _ => throw AssertionException(s"Expected geometry value, got $valueFromTriplestore")
}
}
"not create a duplicate geometry value" in {
val resourceIri: IRI = aThingIri
val propertyIri: SmartIri = "http://0.0.0.0:3333/ontology/0001/anything/v2#hasGeometry".toSmartIri
val valueHasGeometry = """{"status":"active","lineColor":"#ff3333","lineWidth":2,"points":[{"x":0.08098591549295775,"y":0.16741071428571427},{"x":0.7394366197183099,"y":0.7299107142857143}],"type":"rectangle","original_index":0}"""
responderManager ! CreateValueRequestV2(
CreateValueV2(
resourceIri = resourceIri,
resourceClassIri = "http://0.0.0.0:3333/ontology/0001/anything/v2#Thing".toSmartIri,
propertyIri = propertyIri,
valueContent = GeomValueContentV2(
ontologySchema = ApiV2Complex,
valueHasGeometry = valueHasGeometry
)
),
requestingUser = anythingUser1,
apiRequestID = UUID.randomUUID
)
expectMsgPF(timeout) {
case msg: akka.actor.Status.Failure => msg.cause.isInstanceOf[DuplicateValueException] should ===(true)
}
}
"create an interval value" in {
// Add the value.
val resourceIri: IRI = aThingIri
val propertyIri: SmartIri = "http://0.0.0.0:3333/ontology/0001/anything/v2#hasInterval".toSmartIri
val valueHasIntervalStart = BigDecimal("1.2")
val valueHasIntervalEnd = BigDecimal("3")
val maybeResourceLastModDate: Option[Instant] = getResourceLastModificationDate(resourceIri, anythingUser1)
responderManager ! CreateValueRequestV2(
CreateValueV2(
resourceIri = resourceIri,
resourceClassIri = "http://0.0.0.0:3333/ontology/0001/anything/v2#Thing".toSmartIri,
propertyIri = propertyIri,
valueContent = IntervalValueContentV2(
ontologySchema = ApiV2Complex,
valueHasIntervalStart = valueHasIntervalStart,
valueHasIntervalEnd = valueHasIntervalEnd
)
),
requestingUser = anythingUser1,
apiRequestID = UUID.randomUUID
)
expectMsgPF(timeout) {
case createValueResponse: CreateValueResponseV2 => intervalValueIri.set(createValueResponse.valueIri)
}
// Read the value back to check that it was added correctly.
val valueFromTriplestore = getValue(
resourceIri = resourceIri,
maybePreviousLastModDate = maybeResourceLastModDate,
propertyIriForGravsearch = propertyIri,
propertyIriInResult = propertyIri,
expectedValueIri = intervalValueIri.get,
requestingUser = anythingUser1
)
valueFromTriplestore.valueContent match {
case savedValue: IntervalValueContentV2 =>
savedValue.valueHasIntervalStart should ===(valueHasIntervalStart)
savedValue.valueHasIntervalEnd should ===(valueHasIntervalEnd)
case _ => throw AssertionException(s"Expected interval value, got $valueFromTriplestore")
}
}
"not create a duplicate interval value" in {
val resourceIri: IRI = aThingIri
val propertyIri: SmartIri = "http://0.0.0.0:3333/ontology/0001/anything/v2#hasInterval".toSmartIri
val valueHasIntervalStart = BigDecimal("1.2")
val valueHasIntervalEnd = BigDecimal("3")
responderManager ! CreateValueRequestV2(
CreateValueV2(
resourceIri = resourceIri,
resourceClassIri = "http://0.0.0.0:3333/ontology/0001/anything/v2#Thing".toSmartIri,
propertyIri = propertyIri,
valueContent = IntervalValueContentV2(
ontologySchema = ApiV2Complex,
valueHasIntervalStart = valueHasIntervalStart,
valueHasIntervalEnd = valueHasIntervalEnd
)
),
requestingUser = anythingUser1,
apiRequestID = UUID.randomUUID
)
expectMsgPF(timeout) {
case msg: akka.actor.Status.Failure => msg.cause.isInstanceOf[DuplicateValueException] should ===(true)
}
}
"create a list value" in {
// Add the value.
val resourceIri: IRI = aThingIri
val propertyIri: SmartIri = "http://0.0.0.0:3333/ontology/0001/anything/v2#hasListItem".toSmartIri
val valueHasListNode = "http://rdfh.ch/lists/0001/treeList03"
val maybeResourceLastModDate: Option[Instant] = getResourceLastModificationDate(resourceIri, anythingUser1)
responderManager ! CreateValueRequestV2(
CreateValueV2(
resourceIri = resourceIri,
resourceClassIri = "http://0.0.0.0:3333/ontology/0001/anything/v2#Thing".toSmartIri,
propertyIri = propertyIri,
valueContent = HierarchicalListValueContentV2(
ontologySchema = ApiV2Complex,
valueHasListNode = valueHasListNode
)
),
requestingUser = anythingUser1,
apiRequestID = UUID.randomUUID
)
expectMsgPF(timeout) {
case createValueResponse: CreateValueResponseV2 => listValueIri.set(createValueResponse.valueIri)
}
// Read the value back to check that it was added correctly.
val valueFromTriplestore = getValue(
resourceIri = resourceIri,
maybePreviousLastModDate = maybeResourceLastModDate,
propertyIriForGravsearch = propertyIri,
propertyIriInResult = propertyIri,
expectedValueIri = listValueIri.get,
requestingUser = anythingUser1
)
valueFromTriplestore.valueContent match {
case savedValue: HierarchicalListValueContentV2 =>
savedValue.valueHasListNode should ===(valueHasListNode)
case _ => throw AssertionException(s"Expected list value, got $valueFromTriplestore")
}
}
"not create a duplicate list value" in {
val resourceIri: IRI = aThingIri
val propertyIri: SmartIri = "http://0.0.0.0:3333/ontology/0001/anything/v2#hasListItem".toSmartIri
val valueHasListNode = "http://rdfh.ch/lists/0001/treeList03"
responderManager ! CreateValueRequestV2(
CreateValueV2(
resourceIri = resourceIri,
resourceClassIri = "http://0.0.0.0:3333/ontology/0001/anything/v2#Thing".toSmartIri,
propertyIri = propertyIri,
valueContent = HierarchicalListValueContentV2(
ontologySchema = ApiV2Complex,
valueHasListNode = valueHasListNode
)
),
requestingUser = anythingUser1,
apiRequestID = UUID.randomUUID
)
expectMsgPF(timeout) {
case msg: akka.actor.Status.Failure => msg.cause.isInstanceOf[DuplicateValueException] should ===(true)
}
}
"not create a list value referring to a nonexistent list node" in {
val resourceIri: IRI = aThingIri
val propertyIri: SmartIri = "http://0.0.0.0:3333/ontology/0001/anything/v2#hasListItem".toSmartIri
val valueHasListNode = "http://rdfh.ch/lists/0001/nonexistent"
responderManager ! CreateValueRequestV2(
CreateValueV2(
resourceIri = resourceIri,
resourceClassIri = "http://0.0.0.0:3333/ontology/0001/anything/v2#Thing".toSmartIri,
propertyIri = propertyIri,
valueContent = HierarchicalListValueContentV2(
ontologySchema = ApiV2Complex,
valueHasListNode = valueHasListNode
)
),
requestingUser = anythingUser1,
apiRequestID = UUID.randomUUID
)
expectMsgPF(timeout) {
case msg: akka.actor.Status.Failure => msg.cause.isInstanceOf[NotFoundException] should ===(true)
}
}
"create a color value" in {
// Add the value.
val resourceIri: IRI = aThingIri
val propertyIri: SmartIri = "http://0.0.0.0:3333/ontology/0001/anything/v2#hasColor".toSmartIri
val valueHasColor = "#ff3333"
val maybeResourceLastModDate: Option[Instant] = getResourceLastModificationDate(resourceIri, anythingUser1)
responderManager ! CreateValueRequestV2(
CreateValueV2(
resourceIri = resourceIri,
resourceClassIri = "http://0.0.0.0:3333/ontology/0001/anything/v2#Thing".toSmartIri,
propertyIri = propertyIri,
valueContent = ColorValueContentV2(
ontologySchema = ApiV2Complex,
valueHasColor = valueHasColor
)
),
requestingUser = anythingUser1,
apiRequestID = UUID.randomUUID
)
expectMsgPF(timeout) {
case createValueResponse: CreateValueResponseV2 => colorValueIri.set(createValueResponse.valueIri)
}
// Read the value back to check that it was added correctly.
val valueFromTriplestore = getValue(
resourceIri = resourceIri,
maybePreviousLastModDate = maybeResourceLastModDate,
propertyIriForGravsearch = propertyIri,
propertyIriInResult = propertyIri,
expectedValueIri = colorValueIri.get,
requestingUser = anythingUser1
)
valueFromTriplestore.valueContent match {
case savedValue: ColorValueContentV2 =>
savedValue.valueHasColor should ===(valueHasColor)
case _ => throw AssertionException(s"Expected color value, got $valueFromTriplestore")
}
}
"not create a duplicate color value" in {
val resourceIri: IRI = aThingIri
val propertyIri: SmartIri = "http://0.0.0.0:3333/ontology/0001/anything/v2#hasColor".toSmartIri
val valueHasColor = "#ff3333"
responderManager ! CreateValueRequestV2(
CreateValueV2(
resourceIri = resourceIri,
resourceClassIri = "http://0.0.0.0:3333/ontology/0001/anything/v2#Thing".toSmartIri,
propertyIri = propertyIri,
valueContent = ColorValueContentV2(
ontologySchema = ApiV2Complex,
valueHasColor = valueHasColor
)
),
requestingUser = anythingUser1,
apiRequestID = UUID.randomUUID
)
expectMsgPF(timeout) {
case msg: akka.actor.Status.Failure => msg.cause.isInstanceOf[DuplicateValueException] should ===(true)
}
}
"create a URI value" in {
// Add the value.
val resourceIri: IRI = aThingIri
val propertyIri: SmartIri = "http://0.0.0.0:3333/ontology/0001/anything/v2#hasUri".toSmartIri
val valueHasUri = "https://www.knora.org"
val maybeResourceLastModDate: Option[Instant] = getResourceLastModificationDate(resourceIri, anythingUser1)
responderManager ! CreateValueRequestV2(
CreateValueV2(
resourceIri = resourceIri,
resourceClassIri = "http://0.0.0.0:3333/ontology/0001/anything/v2#Thing".toSmartIri,
propertyIri = propertyIri,
valueContent = UriValueContentV2(
ontologySchema = ApiV2Complex,
valueHasUri = valueHasUri
)
),
requestingUser = anythingUser1,
apiRequestID = UUID.randomUUID
)
expectMsgPF(timeout) {
case createValueResponse: CreateValueResponseV2 => uriValueIri.set(createValueResponse.valueIri)
}
// Read the value back to check that it was added correctly.
val valueFromTriplestore = getValue(
resourceIri = resourceIri,
maybePreviousLastModDate = maybeResourceLastModDate,
propertyIriForGravsearch = propertyIri,
propertyIriInResult = propertyIri,
expectedValueIri = uriValueIri.get,
requestingUser = anythingUser1
)
valueFromTriplestore.valueContent match {
case savedValue: UriValueContentV2 =>
savedValue.valueHasUri should ===(valueHasUri)
case _ => throw AssertionException(s"Expected URI value, got $valueFromTriplestore")
}
}
"not create a duplicate URI value" in {
val resourceIri: IRI = aThingIri
val propertyIri: SmartIri = "http://0.0.0.0:3333/ontology/0001/anything/v2#hasUri".toSmartIri
val valueHasUri = "https://www.knora.org"
responderManager ! CreateValueRequestV2(
CreateValueV2(
resourceIri = resourceIri,
resourceClassIri = "http://0.0.0.0:3333/ontology/0001/anything/v2#Thing".toSmartIri,
propertyIri = propertyIri,
valueContent = UriValueContentV2(
ontologySchema = ApiV2Complex,
valueHasUri = valueHasUri
)
),
requestingUser = anythingUser1,
apiRequestID = UUID.randomUUID
)
expectMsgPF(timeout) {
case msg: akka.actor.Status.Failure => msg.cause.isInstanceOf[DuplicateValueException] should ===(true)
}
}
"create a geoname value" in {
// Add the value.
val resourceIri: IRI = aThingIri
val propertyIri: SmartIri = "http://0.0.0.0:3333/ontology/0001/anything/v2#hasGeoname".toSmartIri
val valueHasGeonameCode = "2661604"
val maybeResourceLastModDate: Option[Instant] = getResourceLastModificationDate(resourceIri, anythingUser1)
responderManager ! CreateValueRequestV2(
CreateValueV2(
resourceIri = resourceIri,
resourceClassIri = "http://0.0.0.0:3333/ontology/0001/anything/v2#Thing".toSmartIri,
propertyIri = propertyIri,
valueContent = GeonameValueContentV2(
ontologySchema = ApiV2Complex,
valueHasGeonameCode = valueHasGeonameCode
)
),
requestingUser = anythingUser1,
apiRequestID = UUID.randomUUID
)
expectMsgPF(timeout) {
case createValueResponse: CreateValueResponseV2 => geonameValueIri.set(createValueResponse.valueIri)
}
// Read the value back to check that it was added correctly.
val valueFromTriplestore = getValue(
resourceIri = resourceIri,
maybePreviousLastModDate = maybeResourceLastModDate,
propertyIriForGravsearch = propertyIri,
propertyIriInResult = propertyIri,
expectedValueIri = geonameValueIri.get,
requestingUser = anythingUser1
)
valueFromTriplestore.valueContent match {
case savedValue: GeonameValueContentV2 =>
savedValue.valueHasGeonameCode should ===(valueHasGeonameCode)
case _ => throw AssertionException(s"Expected GeoNames value, got $valueFromTriplestore")
}
}
"not create a duplicate geoname value" in {
val resourceIri: IRI = aThingIri
val propertyIri: SmartIri = "http://0.0.0.0:3333/ontology/0001/anything/v2#hasGeoname".toSmartIri
val valueHasGeonameCode = "2661604"
responderManager ! CreateValueRequestV2(
CreateValueV2(
resourceIri = resourceIri,
resourceClassIri = "http://0.0.0.0:3333/ontology/0001/anything/v2#Thing".toSmartIri,
propertyIri = propertyIri,
valueContent = GeonameValueContentV2(
ontologySchema = ApiV2Complex,
valueHasGeonameCode = valueHasGeonameCode
)
),
requestingUser = anythingUser1,
apiRequestID = UUID.randomUUID
)
expectMsgPF(timeout) {
case msg: akka.actor.Status.Failure => msg.cause.isInstanceOf[DuplicateValueException] should ===(true)
}
}
"create a link between two resources" in {
val resourceIri: IRI = "http://rdfh.ch/0803/cb1a74e3e2f6"
val linkPropertyIri: SmartIri = OntologyConstants.KnoraApiV2Complex.HasLinkTo.toSmartIri
val linkValuePropertyIri: SmartIri = OntologyConstants.KnoraApiV2Complex.HasLinkToValue.toSmartIri
val maybeResourceLastModDate: Option[Instant] = getResourceLastModificationDate(resourceIri, incunabulaUser)
val createValueRequest = CreateValueRequestV2(
CreateValueV2(
resourceIri = resourceIri,
propertyIri = linkValuePropertyIri,
resourceClassIri = OntologyConstants.KnoraApiV2Complex.LinkObj.toSmartIri,
valueContent = LinkValueContentV2(
ontologySchema = ApiV2Complex,
referredResourceIri = zeitglöckleinIri
)
),
requestingUser = incunabulaUser,
apiRequestID = UUID.randomUUID
)
responderManager ! createValueRequest
expectMsgPF(timeout) {
case createValueResponse: CreateValueResponseV2 => linkValueIri.set(createValueResponse.valueIri)
}
val valueFromTriplestore = getValue(
resourceIri = resourceIri,
maybePreviousLastModDate = maybeResourceLastModDate,
propertyIriForGravsearch = linkPropertyIri,
propertyIriInResult = linkValuePropertyIri,
expectedValueIri = linkValueIri.get,
requestingUser = incunabulaUser
)
valueFromTriplestore match {
case readLinkValueV2: ReadLinkValueV2 =>
readLinkValueV2.valueContent.referredResourceIri should ===(zeitglöckleinIri)
readLinkValueV2.valueHasRefCount should ===(1)
case _ => throw AssertionException(s"Expected link value, got $valueFromTriplestore")
}
}
"not create a duplicate link" in {
val resourceIri: IRI = "http://rdfh.ch/0803/cb1a74e3e2f6"
val linkValuePropertyIri: SmartIri = OntologyConstants.KnoraApiV2Complex.HasLinkToValue.toSmartIri
val createValueRequest = CreateValueRequestV2(
CreateValueV2(
resourceIri = resourceIri,
resourceClassIri = OntologyConstants.KnoraApiV2Complex.LinkObj.toSmartIri,
propertyIri = linkValuePropertyIri,
valueContent = LinkValueContentV2(
ontologySchema = ApiV2Complex,
referredResourceIri = zeitglöckleinIri
)
),
requestingUser = incunabulaUser,
apiRequestID = UUID.randomUUID
)
responderManager ! createValueRequest
expectMsgPF(timeout) {
case msg: akka.actor.Status.Failure => msg.cause.isInstanceOf[DuplicateValueException] should ===(true)
}
}
"not accept a link property in a request to create a link between two resources" in {
val resourceIri: IRI = "http://rdfh.ch/0803/cb1a74e3e2f6"
val linkPropertyIri: SmartIri = OntologyConstants.KnoraApiV2Complex.HasLinkTo.toSmartIri
val createValueRequest = CreateValueRequestV2(
CreateValueV2(
resourceIri = resourceIri,
resourceClassIri = OntologyConstants.KnoraApiV2Complex.LinkObj.toSmartIri,
propertyIri = linkPropertyIri,
valueContent = LinkValueContentV2(
ontologySchema = ApiV2Complex,
referredResourceIri = zeitglöckleinIri
)
),
requestingUser = incunabulaUser,
apiRequestID = UUID.randomUUID
)
responderManager ! createValueRequest
expectMsgPF(timeout) {
case msg: akka.actor.Status.Failure => msg.cause.isInstanceOf[BadRequestException] should ===(true)
}
}
"not create a standoff link directly" in {
responderManager ! CreateValueRequestV2(
CreateValueV2(
resourceIri = zeitglöckleinIri,
resourceClassIri = "http://0.0.0.0:3333/ontology/0803/incunabula/v2#book".toSmartIri,
propertyIri = OntologyConstants.KnoraApiV2Complex.HasStandoffLinkToValue.toSmartIri,
valueContent = LinkValueContentV2(
ontologySchema = ApiV2Complex,
referredResourceIri = generationeIri
)
),
requestingUser = SharedTestDataADM.superUser,
apiRequestID = UUID.randomUUID
)
expectMsgPF(timeout) {
case msg: akka.actor.Status.Failure => msg.cause.isInstanceOf[BadRequestException] should ===(true)
}
}
"not add a new value to a nonexistent resource" in {
val resourceIri: IRI = "http://rdfh.ch/0001/nonexistent"
val propertyIri: SmartIri = "http://0.0.0.0:3333/ontology/0001/anything/v2#hasInteger".toSmartIri
val intValue = 6
responderManager ! CreateValueRequestV2(
CreateValueV2(
resourceIri = resourceIri,
resourceClassIri = "http://0.0.0.0:3333/ontology/0001/anything/v2#Thing".toSmartIri,
propertyIri = propertyIri,
valueContent = IntegerValueContentV2(
ontologySchema = ApiV2Complex,
valueHasInteger = intValue
)
),
requestingUser = anythingUser1,
apiRequestID = UUID.randomUUID
)
expectMsgPF(timeout) {
case msg: akka.actor.Status.Failure =>
// msg.cause.isInstanceOf[NotFoundException] should ===(true)
msg.cause.isInstanceOf[NotFoundException] should ===(true)
}
}
"not add a new value to a deleted resource" in {
val resourceIri: IRI = "http://rdfh.ch/0803/9935159f67"
val valueHasString = "Comment 2"
val propertyIri: SmartIri = "http://0.0.0.0:3333/ontology/0803/incunabula/v2#book_comment".toSmartIri
responderManager ! CreateValueRequestV2(
CreateValueV2(
resourceIri = resourceIri,
resourceClassIri = "http://0.0.0.0:3333/ontology/0803/incunabula/v2#book".toSmartIri,
propertyIri = propertyIri,
valueContent = TextValueContentV2(
ontologySchema = ApiV2Complex,
maybeValueHasString = Some(valueHasString)
)
),
requestingUser = incunabulaUser,
apiRequestID = UUID.randomUUID
)
expectMsgPF(timeout) {
case msg: akka.actor.Status.Failure =>
// msg.cause.isInstanceOf[NotFoundException] should ===(true)
msg.cause.isInstanceOf[NotFoundException] should ===(true)
}
}
"not add a new value if the resource's rdf:type is not correctly given" in {
val resourceIri: IRI = aThingIri
val propertyIri: SmartIri = "http://0.0.0.0:3333/ontology/0001/anything/v2#hasInteger".toSmartIri
val intValue = 2048
responderManager ! CreateValueRequestV2(
CreateValueV2(
resourceIri = resourceIri,
resourceClassIri = "http://0.0.0.0:3333/ontology/0803/incunabula/v2#book".toSmartIri,
propertyIri = propertyIri,
valueContent = IntegerValueContentV2(
ontologySchema = ApiV2Complex,
valueHasInteger = intValue
)
),
requestingUser = anythingUser1,
apiRequestID = UUID.randomUUID
)
expectMsgPF(timeout) {
case msg: akka.actor.Status.Failure =>
msg.cause.isInstanceOf[BadRequestException] should ===(true)
}
}
"not add a new value of the wrong type" in {
val resourceIri: IRI = "http://rdfh.ch/0803/21abac2162"
val propertyIri: SmartIri = "http://0.0.0.0:3333/ontology/0803/incunabula/v2#pubdate".toSmartIri
responderManager ! CreateValueRequestV2(
CreateValueV2(
resourceIri = resourceIri,
resourceClassIri = "http://0.0.0.0:3333/ontology/0803/incunabula/v2#book".toSmartIri,
propertyIri = propertyIri,
valueContent = TextValueContentV2(
ontologySchema = ApiV2Complex,
maybeValueHasString = Some("this is not a date")
)
),
requestingUser = incunabulaUser,
apiRequestID = UUID.randomUUID
)
expectMsgPF(timeout) {
case msg: akka.actor.Status.Failure => msg.cause.isInstanceOf[OntologyConstraintException] should ===(true)
}
}
"not add a new value that would violate a cardinality restriction" in {
val resourceIri: IRI = "http://rdfh.ch/0803/4f11adaf"
// The cardinality of incunabula:partOf in incunabula:page is 1, and page http://rdfh.ch/0803/4f11adaf is already part of a book.
responderManager ! CreateValueRequestV2(
CreateValueV2(
resourceIri = resourceIri,
resourceClassIri = "http://0.0.0.0:3333/ontology/0803/incunabula/v2#page".toSmartIri,
propertyIri = "http://0.0.0.0:3333/ontology/0803/incunabula/v2#partOfValue".toSmartIri,
valueContent = LinkValueContentV2(
ontologySchema = ApiV2Complex,
referredResourceIri = "http://rdfh.ch/0803/e41ab5695c"
)
),
requestingUser = incunabulaUser,
apiRequestID = UUID.randomUUID
)
expectMsgPF(timeout) {
case msg: akka.actor.Status.Failure => msg.cause.isInstanceOf[OntologyConstraintException] should ===(true)
}
// The cardinality of incunabula:seqnum in incunabula:page is 0-1, and page http://rdfh.ch/0803/4f11adaf already has a seqnum.
responderManager ! CreateValueRequestV2(
CreateValueV2(
resourceIri = "http://rdfh.ch/0803/4f11adaf",
resourceClassIri = "http://0.0.0.0:3333/ontology/0803/incunabula/v2#page".toSmartIri,
propertyIri = "http://www.knora.org/ontology/0803/incunabula#seqnum".toSmartIri,
valueContent = IntegerValueContentV2(
ontologySchema = ApiV2Complex,
valueHasInteger = 1
)
),
requestingUser = incunabulaUser,
apiRequestID = UUID.randomUUID
)
expectMsgPF(timeout) {
case msg: akka.actor.Status.Failure =>
msg.cause.isInstanceOf[OntologyConstraintException] should ===(true)
}
}
"add a new text value containing a Standoff resource reference, and create a hasStandoffLinkTo direct link and a corresponding LinkValue" in {
val resourceIri: IRI = "http://rdfh.ch/0803/21abac2162"
val propertyIri: SmartIri = "http://0.0.0.0:3333/ontology/0803/incunabula/v2#book_comment".toSmartIri
val valueHasString = "This comment refers to another resource"
val maybeResourceLastModDate: Option[Instant] = getResourceLastModificationDate(resourceIri, incunabulaUser)
val standoff = Seq(StandoffTagV2(
dataType = Some(StandoffDataTypeClasses.StandoffLinkTag),
standoffTagClassIri = OntologyConstants.KnoraBase.StandoffLinkTag.toSmartIri,
startPosition = 31,
endPosition = 39,
attributes = Vector(StandoffTagIriAttributeV2(standoffPropertyIri = OntologyConstants.KnoraBase.StandoffTagHasLink.toSmartIri, value = zeitglöckleinIri)),
uuid = UUID.randomUUID(),
originalXMLID = None,
startIndex = 0
))
responderManager ! CreateValueRequestV2(
CreateValueV2(
resourceIri = resourceIri,
resourceClassIri = "http://0.0.0.0:3333/ontology/0803/incunabula/v2#book".toSmartIri,
propertyIri = propertyIri,
valueContent = TextValueContentV2(
ontologySchema = ApiV2Complex,
maybeValueHasString = Some(valueHasString),
standoff = standoff,
mappingIri = Some("http://rdfh.ch/standoff/mappings/StandardMapping"),
mapping = standardMapping
)
),
requestingUser = incunabulaUser,
apiRequestID = UUID.randomUUID
)
expectMsgPF(timeout) {
case createValueResponse: CreateValueResponseV2 => lobComment1Iri.set(createValueResponse.valueIri)
}
// Read the value back to check that it was added correctly.
val updatedResource = getResourceWithValues(
resourceIri = resourceIri,
propertyIrisForGravsearch = Seq(propertyIri, OntologyConstants.KnoraApiV2Complex.HasStandoffLinkTo.toSmartIri),
requestingUser = incunabulaUser
)
checkLastModDate(
resourceIri = resourceIri,
maybePreviousLastModDate = maybeResourceLastModDate,
maybeUpdatedLastModDate = updatedResource.lastModificationDate
)
val textValueFromTriplestore: ReadValueV2 = getValueFromResource(
resource = updatedResource,
propertyIriInResult = propertyIri,
expectedValueIri = lobComment1Iri.get
)
textValueFromTriplestore.valueContent match {
case savedTextValue: TextValueContentV2 =>
assert(savedTextValue.valueHasString.contains(valueHasString))
savedTextValue.standoff should ===(standoff)
assert(savedTextValue.mappingIri.contains("http://rdfh.ch/standoff/mappings/StandardMapping"))
savedTextValue.mapping should ===(standardMapping)
case _ => throw AssertionException(s"Expected text value, got $textValueFromTriplestore")
}
// Since this is the first Standoff resource reference between the source and target resources, we should
// now have version 1 of a LinkValue (it should have no previous version), with a reference count of 1.
val linkValuesFromTripletore: Seq[ReadValueV2] = getValuesFromResource(
resource = updatedResource,
propertyIriInResult = OntologyConstants.KnoraApiV2Complex.HasStandoffLinkToValue.toSmartIri
)
assert(linkValuesFromTripletore.size == 1)
val linkValueFromTriplestore: ReadLinkValueV2 = linkValuesFromTripletore.head match {
case readLinkValueV2: ReadLinkValueV2 => readLinkValueV2
case other => throw AssertionException(s"Expected link value, got $other")
}
linkValueFromTriplestore.previousValueIri.isEmpty should ===(true)
linkValueFromTriplestore.valueHasRefCount should ===(1)
linkValueFromTriplestore.valueContent.referredResourceIri should ===(zeitglöckleinIri)
standoffLinkValueIri.set(linkValueFromTriplestore.valueIri)
}
"add another new text value containing a Standoff resource reference, and make a new version of the LinkValue" in {
val resourceIri: IRI = "http://rdfh.ch/0803/21abac2162"
val propertyIri: SmartIri = "http://0.0.0.0:3333/ontology/0803/incunabula/v2#book_comment".toSmartIri
val valueHasString = "This remark refers to another resource"
val maybeResourceLastModDate: Option[Instant] = getResourceLastModificationDate(resourceIri, incunabulaUser)
val standoff = Seq(StandoffTagV2(
dataType = Some(StandoffDataTypeClasses.StandoffLinkTag),
standoffTagClassIri = OntologyConstants.KnoraBase.StandoffLinkTag.toSmartIri,
startPosition = 30,
endPosition = 38,
attributes = Vector(StandoffTagIriAttributeV2(standoffPropertyIri = OntologyConstants.KnoraBase.StandoffTagHasLink.toSmartIri, value = zeitglöckleinIri)),
uuid = UUID.randomUUID(),
originalXMLID = None,
startIndex = 0
))
responderManager ! CreateValueRequestV2(
CreateValueV2(
resourceIri = resourceIri,
resourceClassIri = "http://0.0.0.0:3333/ontology/0803/incunabula/v2#book".toSmartIri,
propertyIri = propertyIri,
valueContent = TextValueContentV2(
ontologySchema = ApiV2Complex,
maybeValueHasString = Some(valueHasString),
standoff = standoff,
mappingIri = Some("http://rdfh.ch/standoff/mappings/StandardMapping"),
mapping = standardMapping
)
),
requestingUser = incunabulaUser,
apiRequestID = UUID.randomUUID
)
expectMsgPF(timeout) {
case createValueResponse: CreateValueResponseV2 => lobComment2Iri.set(createValueResponse.valueIri)
}
// Read the value back to check that it was added correctly.
val updatedResource = getResourceWithValues(
resourceIri = resourceIri,
propertyIrisForGravsearch = Seq(propertyIri, OntologyConstants.KnoraApiV2Complex.HasStandoffLinkTo.toSmartIri),
requestingUser = incunabulaUser
)
checkLastModDate(
resourceIri = resourceIri,
maybePreviousLastModDate = maybeResourceLastModDate,
maybeUpdatedLastModDate = updatedResource.lastModificationDate
)
val textValueFromTriplestore: ReadValueV2 = getValueFromResource(
resource = updatedResource,
propertyIriInResult = propertyIri,
expectedValueIri = lobComment2Iri.get
)
textValueFromTriplestore.valueContent match {
case savedTextValue: TextValueContentV2 =>
assert(savedTextValue.valueHasString.contains(valueHasString))
savedTextValue.standoff should ===(standoff)
assert(savedTextValue.mappingIri.contains("http://rdfh.ch/standoff/mappings/StandardMapping"))
savedTextValue.mapping should ===(standardMapping)
case _ => throw AssertionException(s"Expected text value, got $textValueFromTriplestore")
}
// Now that we've added a different TextValue that refers to the same resource, we should have version 2
// of the LinkValue, with a reference count of 2. It should have a previousValue pointing to the previous
// version.
val linkValuesFromTripletore: Seq[ReadValueV2] = getValuesFromResource(
resource = updatedResource,
propertyIriInResult = OntologyConstants.KnoraApiV2Complex.HasStandoffLinkToValue.toSmartIri
)
assert(linkValuesFromTripletore.size == 1)
val linkValueFromTriplestore: ReadLinkValueV2 = linkValuesFromTripletore.head match {
case readLinkValueV2: ReadLinkValueV2 => readLinkValueV2
case other => throw AssertionException(s"Expected link value, got $other")
}
linkValueFromTriplestore.previousValueIri.contains(standoffLinkValueIri.get) should ===(true)
linkValueFromTriplestore.valueHasRefCount should ===(2)
linkValueFromTriplestore.valueContent.referredResourceIri should ===(zeitglöckleinIri)
standoffLinkValueIri.set(linkValueFromTriplestore.valueIri)
}
"update an integer value" in {
val resourceIri: IRI = aThingIri
val propertyIri: SmartIri = "http://0.0.0.0:3333/ontology/0001/anything/v2#hasInteger".toSmartIri
val maybeResourceLastModDate: Option[Instant] = getResourceLastModificationDate(resourceIri, anythingUser1)
// Get the value before update.
val previousValueFromTriplestore: ReadValueV2 = getValue(
resourceIri = resourceIri,
maybePreviousLastModDate = maybeResourceLastModDate,
propertyIriForGravsearch = propertyIri,
propertyIriInResult = propertyIri,
expectedValueIri = intValueIri.get,
requestingUser = anythingUser1,
checkLastModDateChanged = false
)
// Update the value.
val intValue = 5
responderManager ! UpdateValueRequestV2(
UpdateValueContentV2(
resourceIri = resourceIri,
resourceClassIri = "http://0.0.0.0:3333/ontology/0001/anything/v2#Thing".toSmartIri,
propertyIri = propertyIri,
valueIri = intValueIri.get,
valueContent = IntegerValueContentV2(
ontologySchema = ApiV2Complex,
valueHasInteger = intValue
)
),
requestingUser = anythingUser1,
apiRequestID = UUID.randomUUID
)
expectMsgPF(timeout) {
case updateValueResponse: UpdateValueResponseV2 => intValueIri.set(updateValueResponse.valueIri)
}
// Read the value back to check that it was added correctly.
val updatedValueFromTriplestore = getValue(
resourceIri = resourceIri,
maybePreviousLastModDate = maybeResourceLastModDate,
propertyIriForGravsearch = propertyIri,
propertyIriInResult = propertyIri,
expectedValueIri = intValueIri.get,
requestingUser = anythingUser1
)
updatedValueFromTriplestore.valueContent match {
case savedValue: IntegerValueContentV2 =>
savedValue.valueHasInteger should ===(intValue)
updatedValueFromTriplestore.permissions should ===(previousValueFromTriplestore.permissions)
updatedValueFromTriplestore.valueHasUUID should ===(previousValueFromTriplestore.valueHasUUID)
case _ => throw AssertionException(s"Expected integer value, got $updatedValueFromTriplestore")
}
// Check that the permissions and UUID were deleted from the previous version of the value.
assert(getValueUUID(previousValueFromTriplestore.valueIri).isEmpty)
assert(getValuePermissions(previousValueFromTriplestore.valueIri).isEmpty)
}
"not update a value if an outdated value IRI is given" in {
val resourceIri: IRI = aThingIri
val propertyIri: SmartIri = "http://0.0.0.0:3333/ontology/0001/anything/v2#hasInteger".toSmartIri
val intValue = 3
responderManager ! UpdateValueRequestV2(
UpdateValueContentV2(
resourceIri = resourceIri,
resourceClassIri = "http://0.0.0.0:3333/ontology/0001/anything/v2#Thing".toSmartIri,
propertyIri = propertyIri,
valueIri = firstIntValueVersionIri.get,
valueContent = IntegerValueContentV2(
ontologySchema = ApiV2Complex,
valueHasInteger = intValue
)
),
requestingUser = anythingUser1,
apiRequestID = UUID.randomUUID
)
expectMsgPF(timeout) {
case msg: akka.actor.Status.Failure => msg.cause.isInstanceOf[NotFoundException] should ===(true)
}
}
"not update a value if the user does not have modify permission on the value" in {
val resourceIri: IRI = aThingIri
val propertyIri: SmartIri = "http://0.0.0.0:3333/ontology/0001/anything/v2#hasInteger".toSmartIri
val intValue = 9
responderManager ! UpdateValueRequestV2(
UpdateValueContentV2(
resourceIri = resourceIri,
resourceClassIri = "http://0.0.0.0:3333/ontology/0001/anything/v2#Thing".toSmartIri,
propertyIri = propertyIri,
valueIri = intValueIri.get,
valueContent = IntegerValueContentV2(
ontologySchema = ApiV2Complex,
valueHasInteger = intValue
)
),
requestingUser = incunabulaUser,
apiRequestID = UUID.randomUUID
)
expectMsgPF(timeout) {
case msg: akka.actor.Status.Failure => msg.cause.isInstanceOf[ForbiddenException] should ===(true)
}
}
"update a value with custom permissions" in {
val resourceIri: IRI = aThingIri
val propertyIri: SmartIri = "http://0.0.0.0:3333/ontology/0001/anything/v2#hasInteger".toSmartIri
val permissions = "CR knora-admin:Creator|V knora-admin:ProjectMember"
val maybeResourceLastModDate: Option[Instant] = getResourceLastModificationDate(resourceIri, anythingUser1)
val intValue = 6
responderManager ! UpdateValueRequestV2(
UpdateValueContentV2(
resourceIri = resourceIri,
resourceClassIri = "http://0.0.0.0:3333/ontology/0001/anything/v2#Thing".toSmartIri,
propertyIri = propertyIri,
valueIri = intValueIri.get,
valueContent = IntegerValueContentV2(
ontologySchema = ApiV2Complex,
valueHasInteger = intValue
),
permissions = Some(permissions)
),
requestingUser = anythingUser1,
apiRequestID = UUID.randomUUID
)
expectMsgPF(timeout) {
case updateValueResponse: UpdateValueResponseV2 => intValueIri.set(updateValueResponse.valueIri)
}
// Read the value back to check that it was added correctly.
val updatedValueFromTriplestore = getValue(
resourceIri = resourceIri,
maybePreviousLastModDate = maybeResourceLastModDate,
propertyIriForGravsearch = propertyIri,
propertyIriInResult = propertyIri,
expectedValueIri = intValueIri.get,
requestingUser = anythingUser1
)
updatedValueFromTriplestore.valueContent match {
case savedValue: IntegerValueContentV2 =>
savedValue.valueHasInteger should ===(intValue)
updatedValueFromTriplestore.permissions should ===(permissions)
case _ => throw AssertionException(s"Expected integer value, got $updatedValueFromTriplestore")
}
}
"not update a value with custom permissions if the requesting user does not have ChangeRightsPermission on the value" in {
val resourceIri: IRI = aThingIri
val propertyIri: SmartIri = "http://0.0.0.0:3333/ontology/0001/anything/v2#hasInteger".toSmartIri
val permissions = "CR knora-admin:Creator"
val intValue = 10
responderManager ! UpdateValueRequestV2(
UpdateValueContentV2(
resourceIri = resourceIri,
resourceClassIri = "http://0.0.0.0:3333/ontology/0001/anything/v2#Thing".toSmartIri,
propertyIri = propertyIri,
valueIri = intValueIri.get,
valueContent = IntegerValueContentV2(
ontologySchema = ApiV2Complex,
valueHasInteger = intValue
),
permissions = Some(permissions)
),
requestingUser = anythingUser2,
apiRequestID = UUID.randomUUID
)
expectMsgPF(timeout) {
case msg: akka.actor.Status.Failure => msg.cause.isInstanceOf[ForbiddenException] should ===(true)
}
}
"not update a value with syntactically invalid custom permissions" in {
val resourceIri: IRI = aThingIri
val propertyIri: SmartIri = "http://0.0.0.0:3333/ontology/0001/anything/v2#hasInteger".toSmartIri
val permissions = "M knora-admin:Creator,V knora-admin:KnownUser"
val intValue = 7
responderManager ! UpdateValueRequestV2(
UpdateValueContentV2(
resourceIri = resourceIri,
resourceClassIri = "http://0.0.0.0:3333/ontology/0001/anything/v2#Thing".toSmartIri,
propertyIri = propertyIri,
valueIri = intValueIri.get,
valueContent = IntegerValueContentV2(
ontologySchema = ApiV2Complex,
valueHasInteger = intValue
),
permissions = Some(permissions)
),
requestingUser = anythingUser1,
apiRequestID = UUID.randomUUID
)
expectMsgPF(timeout) {
case msg: akka.actor.Status.Failure => msg.cause.isInstanceOf[BadRequestException] should ===(true)
}
}
"not update a value with custom permissions referring to a nonexistent group" in {
val resourceIri: IRI = aThingIri
val propertyIri: SmartIri = "http://0.0.0.0:3333/ontology/0001/anything/v2#hasInteger".toSmartIri
val permissions = "M knora-admin:Creator|V http://rdfh.ch/groups/0001/nonexistent-group"
val intValue = 8
responderManager ! UpdateValueRequestV2(
UpdateValueContentV2(
resourceIri = resourceIri,
resourceClassIri = "http://0.0.0.0:3333/ontology/0001/anything/v2#Thing".toSmartIri,
propertyIri = propertyIri,
valueIri = intValueIri.get,
valueContent = IntegerValueContentV2(
ontologySchema = ApiV2Complex,
valueHasInteger = intValue
),
permissions = Some(permissions)
),
requestingUser = anythingUser1,
apiRequestID = UUID.randomUUID
)
expectMsgPF(timeout) {
case msg: akka.actor.Status.Failure => msg.cause.isInstanceOf[NotFoundException] should ===(true)
}
}
"update a value, changing only its permissions" in {
val resourceIri: IRI = aThingIri
val propertyIri: SmartIri = "http://0.0.0.0:3333/ontology/0001/anything/v2#hasInteger".toSmartIri
val permissions = "CR knora-admin:Creator|V knora-admin:KnownUser"
val maybeResourceLastModDate: Option[Instant] = getResourceLastModificationDate(resourceIri, anythingUser1)
val oldValueFromTriplestore: ReadValueV2 = getValue(
resourceIri = resourceIri,
maybePreviousLastModDate = None,
propertyIriForGravsearch = propertyIri,
propertyIriInResult = propertyIri,
expectedValueIri = intValueIri.get,
requestingUser = anythingUser1,
checkLastModDateChanged = false
)
responderManager ! UpdateValueRequestV2(
UpdateValuePermissionsV2(
resourceIri = resourceIri,
resourceClassIri = "http://0.0.0.0:3333/ontology/0001/anything/v2#Thing".toSmartIri,
propertyIri = propertyIri,
valueIri = intValueIri.get,
valueType = OntologyConstants.KnoraApiV2Complex.IntValue.toSmartIri,
permissions = permissions
),
requestingUser = anythingUser1,
apiRequestID = UUID.randomUUID
)
expectMsgPF(timeout) {
case updateValueResponse: UpdateValueResponseV2 => intValueIri.set(updateValueResponse.valueIri)
}
// Read the value back to check that it was added correctly.
val updatedValueFromTriplestore = getValue(
resourceIri = resourceIri,
maybePreviousLastModDate = maybeResourceLastModDate,
propertyIriForGravsearch = propertyIri,
propertyIriInResult = propertyIri,
expectedValueIri = intValueIri.get,
requestingUser = anythingUser1
)
updatedValueFromTriplestore.valueContent should ===(oldValueFromTriplestore.valueContent)
updatedValueFromTriplestore.permissions should ===(permissions)
}
"not update a value, changing only its permissions, if the requesting user does not have ChangeRightsPermission on the value" in {
val resourceIri: IRI = aThingIri
val propertyIri: SmartIri = "http://0.0.0.0:3333/ontology/0001/anything/v2#hasInteger".toSmartIri
val permissions = "CR knora-admin:Creator"
responderManager ! UpdateValueRequestV2(
UpdateValuePermissionsV2(
resourceIri = resourceIri,
resourceClassIri = "http://0.0.0.0:3333/ontology/0001/anything/v2#Thing".toSmartIri,
propertyIri = propertyIri,
valueIri = intValueIri.get,
valueType = OntologyConstants.KnoraApiV2Complex.IntValue.toSmartIri,
permissions = permissions
),
requestingUser = anythingUser2,
apiRequestID = UUID.randomUUID
)
expectMsgPF(timeout) {
case msg: akka.actor.Status.Failure => msg.cause.isInstanceOf[ForbiddenException] should ===(true)
}
}
"not update a value, changing only its permissions, with syntactically invalid custom permissions" in {
val resourceIri: IRI = aThingIri
val propertyIri: SmartIri = "http://0.0.0.0:3333/ontology/0001/anything/v2#hasInteger".toSmartIri
val permissions = "M knora-admin:Creator,V knora-admin:KnownUser"
responderManager ! UpdateValueRequestV2(
UpdateValuePermissionsV2(
resourceIri = resourceIri,
resourceClassIri = "http://0.0.0.0:3333/ontology/0001/anything/v2#Thing".toSmartIri,
propertyIri = propertyIri,
valueIri = intValueIri.get,
valueType = OntologyConstants.KnoraApiV2Complex.IntValue.toSmartIri,
permissions = permissions
),
requestingUser = anythingUser1,
apiRequestID = UUID.randomUUID
)
expectMsgPF(timeout) {
case msg: akka.actor.Status.Failure => msg.cause.isInstanceOf[BadRequestException] should ===(true)
}
}
"not update a value, changing only its permissions, with permissions referring to a nonexistent group" in {
val resourceIri: IRI = aThingIri
val propertyIri: SmartIri = "http://0.0.0.0:3333/ontology/0001/anything/v2#hasInteger".toSmartIri
val permissions = "M knora-admin:Creator|V http://rdfh.ch/groups/0001/nonexistent-group"
responderManager ! UpdateValueRequestV2(
UpdateValuePermissionsV2(
resourceIri = resourceIri,
resourceClassIri = "http://0.0.0.0:3333/ontology/0001/anything/v2#Thing".toSmartIri,
propertyIri = propertyIri,
valueIri = intValueIri.get,
valueType = OntologyConstants.KnoraApiV2Complex.IntValue.toSmartIri,
permissions = permissions
),
requestingUser = anythingUser1,
apiRequestID = UUID.randomUUID
)
expectMsgPF(timeout) {
case msg: akka.actor.Status.Failure => msg.cause.isInstanceOf[NotFoundException] should ===(true)
}
}
"not update an integer value, giving it the same value as another integer value of the same property" in {
val resourceIri: IRI = aThingIri
val propertyIri: SmartIri = "http://0.0.0.0:3333/ontology/0001/anything/v2#hasInteger".toSmartIri
val intValue = 1
responderManager ! UpdateValueRequestV2(
UpdateValueContentV2(
resourceIri = resourceIri,
resourceClassIri = "http://0.0.0.0:3333/ontology/0001/anything/v2#Thing".toSmartIri,
propertyIri = propertyIri,
valueIri = intValueIri.get,
valueContent = IntegerValueContentV2(
ontologySchema = ApiV2Complex,
valueHasInteger = intValue
)
),
requestingUser = anythingUser1,
apiRequestID = UUID.randomUUID
)
expectMsgPF(timeout) {
case msg: akka.actor.Status.Failure => msg.cause.isInstanceOf[DuplicateValueException] should ===(true)
}
}
"not update an integer value without changing it" in {
val resourceIri: IRI = aThingIri
val propertyIri: SmartIri = "http://0.0.0.0:3333/ontology/0001/anything/v2#hasInteger".toSmartIri
val intValue = 6
responderManager ! UpdateValueRequestV2(
UpdateValueContentV2(
resourceIri = resourceIri,
resourceClassIri = "http://0.0.0.0:3333/ontology/0001/anything/v2#Thing".toSmartIri,
propertyIri = propertyIri,
valueIri = intValueIri.get,
valueContent = IntegerValueContentV2(
ontologySchema = ApiV2Complex,
valueHasInteger = intValue
)
),
requestingUser = anythingUser1,
apiRequestID = UUID.randomUUID
)
expectMsgPF(timeout) {
case msg: akka.actor.Status.Failure => msg.cause.isInstanceOf[DuplicateValueException] should ===(true)
}
}
"update a text value (without submitting standoff)" in {
val valueHasString = "This updated comment has no standoff"
val propertyIri: SmartIri = "http://0.0.0.0:3333/ontology/0803/incunabula/v2#book_comment".toSmartIri
val maybeResourceLastModDate: Option[Instant] = getResourceLastModificationDate(zeitglöckleinIri, incunabulaUser)
responderManager ! UpdateValueRequestV2(
UpdateValueContentV2(
resourceIri = zeitglöckleinIri,
resourceClassIri = "http://0.0.0.0:3333/ontology/0803/incunabula/v2#book".toSmartIri,
propertyIri = propertyIri,
valueIri = zeitglöckleinCommentWithoutStandoffIri.get,
valueContent = TextValueContentV2(
ontologySchema = ApiV2Complex,
maybeValueHasString = Some(valueHasString)
)
),
requestingUser = incunabulaUser,
apiRequestID = UUID.randomUUID
)
expectMsgPF(timeout) {
case updateValueResponse: UpdateValueResponseV2 => zeitglöckleinCommentWithoutStandoffIri.set(updateValueResponse.valueIri)
}
// Read the value back to check that it was added correctly.
val valueFromTriplestore = getValue(
resourceIri = zeitglöckleinIri,
maybePreviousLastModDate = maybeResourceLastModDate,
propertyIriForGravsearch = propertyIri,
propertyIriInResult = propertyIri,
expectedValueIri = zeitglöckleinCommentWithoutStandoffIri.get,
requestingUser = incunabulaUser
)
valueFromTriplestore.valueContent match {
case savedValue: TextValueContentV2 => assert(savedValue.valueHasString.contains(valueHasString))
case _ => throw AssertionException(s"Expected text value, got $valueFromTriplestore")
}
}
"update a text value (submitting standoff)" in {
val valueHasString = "Comment 1ab"
val propertyIri: SmartIri = "http://0.0.0.0:3333/ontology/0803/incunabula/v2#book_comment".toSmartIri
val maybeResourceLastModDate: Option[Instant] = getResourceLastModificationDate(zeitglöckleinIri, incunabulaUser)
responderManager ! UpdateValueRequestV2(
UpdateValueContentV2(
resourceIri = zeitglöckleinIri,
resourceClassIri = "http://0.0.0.0:3333/ontology/0803/incunabula/v2#book".toSmartIri,
propertyIri = propertyIri,
valueIri = zeitglöckleinCommentWithStandoffIri.get,
valueContent = TextValueContentV2(
ontologySchema = ApiV2Complex,
maybeValueHasString = Some(valueHasString),
standoff = sampleStandoffWithLink,
mappingIri = Some("http://rdfh.ch/standoff/mappings/StandardMapping"),
mapping = standardMapping
)
),
requestingUser = incunabulaUser,
apiRequestID = UUID.randomUUID
)
expectMsgPF(timeout) {
case updateValueResponse: UpdateValueResponseV2 => zeitglöckleinCommentWithStandoffIri.set(updateValueResponse.valueIri)
}
// Read the value back to check that it was added correctly.
val valueFromTriplestore = getValue(
resourceIri = zeitglöckleinIri,
maybePreviousLastModDate = maybeResourceLastModDate,
propertyIriForGravsearch = propertyIri,
propertyIriInResult = propertyIri,
expectedValueIri = zeitglöckleinCommentWithStandoffIri.get,
requestingUser = incunabulaUser
)
valueFromTriplestore.valueContent match {
case savedValue: TextValueContentV2 =>
assert(savedValue.valueHasString.contains(valueHasString))
savedValue.standoff should ===(sampleStandoffWithLink)
assert(savedValue.mappingIri.contains("http://rdfh.ch/standoff/mappings/StandardMapping"))
savedValue.mapping should ===(standardMapping)
case _ => throw AssertionException(s"Expected text value, got $valueFromTriplestore")
}
// There should be a link value for a standoff link.
val resource = getResourceWithValues(
resourceIri = zeitglöckleinIri,
propertyIrisForGravsearch = Seq(OntologyConstants.KnoraApiV2Complex.HasStandoffLinkTo.toSmartIri),
requestingUser = incunabulaUser
)
val standoffLinkValues: Seq[ReadValueV2] = getValuesFromResource(
resource = resource,
propertyIriInResult = OntologyConstants.KnoraApiV2Complex.HasStandoffLinkToValue.toSmartIri
)
assert(standoffLinkValues.size == 1)
val standoffLinkValueFromTriplestore = standoffLinkValues.head
standoffLinkValueFromTriplestore.valueContent match {
case linkValueContentV2: LinkValueContentV2 =>
standoffLinkValueIri.set(standoffLinkValueFromTriplestore.valueIri)
assert(linkValueContentV2.referredResourceIri == aThingIri)
case _ => throw AssertionException(s"Expected a link value, got $standoffLinkValueFromTriplestore")
}
}
"not update a text value, duplicating an existing text value (without submitting standoff)" in {
val valueHasString = "this is a text value that has a comment"
val propertyIri: SmartIri = "http://0.0.0.0:3333/ontology/0803/incunabula/v2#book_comment".toSmartIri
responderManager ! UpdateValueRequestV2(
UpdateValueContentV2(
resourceIri = zeitglöckleinIri,
resourceClassIri = "http://0.0.0.0:3333/ontology/0803/incunabula/v2#book".toSmartIri,
propertyIri = propertyIri,
valueIri = zeitglöckleinCommentWithoutStandoffIri.get,
valueContent = TextValueContentV2(
ontologySchema = ApiV2Complex,
maybeValueHasString = Some(valueHasString)
)
),
requestingUser = incunabulaUser,
apiRequestID = UUID.randomUUID
)
expectMsgPF(timeout) {
case msg: akka.actor.Status.Failure => msg.cause.isInstanceOf[DuplicateValueException] should ===(true)
}
}
"create a second text value with standoff" in {
val valueHasString = "Comment 1ac"
val propertyIri: SmartIri = "http://0.0.0.0:3333/ontology/0803/incunabula/v2#book_comment".toSmartIri
val maybeResourceLastModDate: Option[Instant] = getResourceLastModificationDate(zeitglöckleinIri, incunabulaUser)
responderManager ! CreateValueRequestV2(
CreateValueV2(
resourceIri = zeitglöckleinIri,
resourceClassIri = "http://0.0.0.0:3333/ontology/0803/incunabula/v2#book".toSmartIri,
propertyIri = propertyIri,
valueContent = TextValueContentV2(
ontologySchema = ApiV2Complex,
maybeValueHasString = Some(valueHasString),
standoff = sampleStandoff,
mappingIri = Some("http://rdfh.ch/standoff/mappings/StandardMapping"),
mapping = standardMapping
)
),
requestingUser = incunabulaUser,
apiRequestID = UUID.randomUUID
)
expectMsgPF(timeout) {
case createValueResponse: CreateValueResponseV2 => zeitglöckleinSecondCommentWithStandoffIri.set(createValueResponse.valueIri)
}
// Read the value back to check that it was added correctly.
val valueFromTriplestore = getValue(
resourceIri = zeitglöckleinIri,
maybePreviousLastModDate = maybeResourceLastModDate,
propertyIriForGravsearch = propertyIri,
propertyIriInResult = propertyIri,
expectedValueIri = zeitglöckleinSecondCommentWithStandoffIri.get,
requestingUser = incunabulaUser
)
valueFromTriplestore.valueContent match {
case savedValue: TextValueContentV2 =>
assert(savedValue.valueHasString.contains(valueHasString))
savedValue.standoff should ===(sampleStandoff)
assert(savedValue.mappingIri.contains("http://rdfh.ch/standoff/mappings/StandardMapping"))
savedValue.mapping should ===(standardMapping)
case _ => throw AssertionException(s"Expected text value, got $valueFromTriplestore")
}
}
"not update a text value, duplicating an existing text value (submitting standoff)" in {
val valueHasString = "Comment 1ac"
val propertyIri: SmartIri = "http://0.0.0.0:3333/ontology/0803/incunabula/v2#book_comment".toSmartIri
responderManager ! UpdateValueRequestV2(
UpdateValueContentV2(
resourceIri = zeitglöckleinIri,
resourceClassIri = "http://0.0.0.0:3333/ontology/0803/incunabula/v2#book".toSmartIri,
propertyIri = propertyIri,
valueIri = zeitglöckleinCommentWithStandoffIri.get,
valueContent = TextValueContentV2(
ontologySchema = ApiV2Complex,
maybeValueHasString = Some(valueHasString),
standoff = sampleStandoff,
mappingIri = Some("http://rdfh.ch/standoff/mappings/StandardMapping"),
mapping = standardMapping
)
),
requestingUser = incunabulaUser,
apiRequestID = UUID.randomUUID
)
expectMsgPF(timeout) {
case msg: akka.actor.Status.Failure => msg.cause.isInstanceOf[DuplicateValueException] should ===(true)
}
}
"update a text value, changing only the standoff" in {
val valueHasString = "Comment 1ac"
val propertyIri: SmartIri = "http://0.0.0.0:3333/ontology/0803/incunabula/v2#book_comment".toSmartIri
val maybeResourceLastModDate: Option[Instant] = getResourceLastModificationDate(zeitglöckleinIri, incunabulaUser)
responderManager ! UpdateValueRequestV2(
UpdateValueContentV2(
resourceIri = zeitglöckleinIri,
resourceClassIri = "http://0.0.0.0:3333/ontology/0803/incunabula/v2#book".toSmartIri,
propertyIri = propertyIri,
valueIri = zeitglöckleinSecondCommentWithStandoffIri.get,
valueContent = TextValueContentV2(
ontologySchema = ApiV2Complex,
maybeValueHasString = Some(valueHasString),
standoff = sampleStandoffModified,
mappingIri = Some("http://rdfh.ch/standoff/mappings/StandardMapping"),
mapping = standardMapping
)
),
requestingUser = incunabulaUser,
apiRequestID = UUID.randomUUID
)
expectMsgPF(timeout) {
case updateValueResponse: UpdateValueResponseV2 => zeitglöckleinSecondCommentWithStandoffIri.set(updateValueResponse.valueIri)
}
// Read the value back to check that it was added correctly.
val valueFromTriplestore = getValue(
resourceIri = zeitglöckleinIri,
maybePreviousLastModDate = maybeResourceLastModDate,
propertyIriForGravsearch = propertyIri,
propertyIriInResult = propertyIri,
expectedValueIri = zeitglöckleinSecondCommentWithStandoffIri.get,
requestingUser = incunabulaUser
)
valueFromTriplestore.valueContent match {
case savedValue: TextValueContentV2 =>
assert(savedValue.valueHasString.contains(valueHasString))
savedValue.standoff should ===(sampleStandoffModified)
assert(savedValue.mappingIri.contains("http://rdfh.ch/standoff/mappings/StandardMapping"))
savedValue.mapping should ===(standardMapping)
case _ => throw AssertionException(s"Expected text value, got $valueFromTriplestore")
}
}
"not update a text value so it differs only from an existing value in that it has different standoff" in {
val valueHasString = "Comment 1ac"
val propertyIri: SmartIri = "http://0.0.0.0:3333/ontology/0803/incunabula/v2#book_comment".toSmartIri
responderManager ! UpdateValueRequestV2(
UpdateValueContentV2(
resourceIri = zeitglöckleinIri,
resourceClassIri = "http://0.0.0.0:3333/ontology/0803/incunabula/v2#book".toSmartIri,
propertyIri = propertyIri,
valueIri = zeitglöckleinCommentWithStandoffIri.get,
valueContent = TextValueContentV2(
ontologySchema = ApiV2Complex,
maybeValueHasString = Some(valueHasString),
standoff = sampleStandoffModified,
mappingIri = Some("http://rdfh.ch/standoff/mappings/StandardMapping"),
mapping = standardMapping
)
),
requestingUser = incunabulaUser,
apiRequestID = UUID.randomUUID
)
expectMsgPF(timeout) {
case msg: akka.actor.Status.Failure => msg.cause.isInstanceOf[DuplicateValueException] should ===(true)
}
}
"not update a text value without changing it (without submitting standoff)" in {
val valueHasString = "This updated comment has no standoff"
val propertyIri: SmartIri = "http://0.0.0.0:3333/ontology/0803/incunabula/v2#book_comment".toSmartIri
responderManager ! UpdateValueRequestV2(
UpdateValueContentV2(
resourceIri = zeitglöckleinIri,
resourceClassIri = "http://0.0.0.0:3333/ontology/0803/incunabula/v2#book".toSmartIri,
propertyIri = propertyIri,
valueIri = zeitglöckleinCommentWithoutStandoffIri.get,
valueContent = TextValueContentV2(
ontologySchema = ApiV2Complex,
maybeValueHasString = Some(valueHasString)
)
),
requestingUser = incunabulaUser,
apiRequestID = UUID.randomUUID
)
expectMsgPF(timeout) {
case msg: akka.actor.Status.Failure => msg.cause.isInstanceOf[DuplicateValueException] should ===(true)
}
}
"update a decimal value" in {
val resourceIri: IRI = aThingIri
val propertyIri: SmartIri = "http://0.0.0.0:3333/ontology/0001/anything/v2#hasDecimal".toSmartIri
val valueHasDecimal = BigDecimal("3.1415926")
val maybeResourceLastModDate: Option[Instant] = getResourceLastModificationDate(resourceIri, anythingUser1)
responderManager ! UpdateValueRequestV2(
UpdateValueContentV2(
resourceIri = resourceIri,
resourceClassIri = "http://0.0.0.0:3333/ontology/0001/anything/v2#Thing".toSmartIri,
propertyIri = propertyIri,
valueIri = decimalValueIri.get,
valueContent = DecimalValueContentV2(
ontologySchema = ApiV2Complex,
valueHasDecimal = valueHasDecimal
)
),
requestingUser = anythingUser1,
apiRequestID = UUID.randomUUID
)
expectMsgPF(timeout) {
case updateValueResponse: UpdateValueResponseV2 => decimalValueIri.set(updateValueResponse.valueIri)
}
// Read the value back to check that it was added correctly.
val valueFromTriplestore = getValue(
resourceIri = resourceIri,
maybePreviousLastModDate = maybeResourceLastModDate,
propertyIriForGravsearch = propertyIri,
propertyIriInResult = propertyIri,
expectedValueIri = decimalValueIri.get,
requestingUser = anythingUser1
)
valueFromTriplestore.valueContent match {
case savedValue: DecimalValueContentV2 => savedValue.valueHasDecimal should ===(valueHasDecimal)
case _ => throw AssertionException(s"Expected decimal value, got $valueFromTriplestore")
}
}
"not update a decimal value without changing it" in {
val resourceIri: IRI = aThingIri
val propertyIri: SmartIri = "http://0.0.0.0:3333/ontology/0001/anything/v2#hasDecimal".toSmartIri
val valueHasDecimal = BigDecimal("3.1415926")
responderManager ! UpdateValueRequestV2(
UpdateValueContentV2(
resourceIri = resourceIri,
resourceClassIri = "http://0.0.0.0:3333/ontology/0001/anything/v2#Thing".toSmartIri,
propertyIri = propertyIri,
valueIri = decimalValueIri.get,
valueContent = DecimalValueContentV2(
ontologySchema = ApiV2Complex,
valueHasDecimal = valueHasDecimal
)
),
requestingUser = anythingUser1,
apiRequestID = UUID.randomUUID
)
expectMsgPF(timeout) {
case msg: akka.actor.Status.Failure => msg.cause.isInstanceOf[DuplicateValueException] should ===(true)
}
}
"update a date value" in {
val resourceIri: IRI = aThingIri
val propertyIri: SmartIri = "http://0.0.0.0:3333/ontology/0001/anything/v2#hasDate".toSmartIri
val maybeResourceLastModDate: Option[Instant] = getResourceLastModificationDate(resourceIri, anythingUser1)
val submittedValueContent = DateValueContentV2(
ontologySchema = ApiV2Complex,
valueHasCalendar = CalendarNameGregorian,
valueHasStartJDN = 2264908,
valueHasStartPrecision = DatePrecisionYear,
valueHasEndJDN = 2265272,
valueHasEndPrecision = DatePrecisionYear
)
responderManager ! UpdateValueRequestV2(
UpdateValueContentV2(
resourceIri = resourceIri,
resourceClassIri = "http://0.0.0.0:3333/ontology/0001/anything/v2#Thing".toSmartIri,
propertyIri = propertyIri,
valueIri = dateValueIri.get,
valueContent = submittedValueContent
),
requestingUser = anythingUser1,
apiRequestID = UUID.randomUUID
)
expectMsgPF(timeout) {
case updateValueResponse: UpdateValueResponseV2 => dateValueIri.set(updateValueResponse.valueIri)
}
// Read the value back to check that it was added correctly.
val valueFromTriplestore = getValue(
resourceIri = resourceIri,
maybePreviousLastModDate = maybeResourceLastModDate,
propertyIriForGravsearch = propertyIri,
propertyIriInResult = propertyIri,
expectedValueIri = dateValueIri.get,
requestingUser = anythingUser1
)
valueFromTriplestore.valueContent match {
case savedValue: DateValueContentV2 =>
savedValue.valueHasCalendar should ===(submittedValueContent.valueHasCalendar)
savedValue.valueHasStartJDN should ===(submittedValueContent.valueHasStartJDN)
savedValue.valueHasStartPrecision should ===(submittedValueContent.valueHasStartPrecision)
savedValue.valueHasEndJDN should ===(submittedValueContent.valueHasEndJDN)
savedValue.valueHasEndPrecision should ===(submittedValueContent.valueHasEndPrecision)
case _ => throw AssertionException(s"Expected date value, got $valueFromTriplestore")
}
}
"not update a date value without changing it" in {
val resourceIri: IRI = aThingIri
val propertyIri: SmartIri = "http://0.0.0.0:3333/ontology/0001/anything/v2#hasDate".toSmartIri
val submittedValueContent = DateValueContentV2(
ontologySchema = ApiV2Complex,
valueHasCalendar = CalendarNameGregorian,
valueHasStartJDN = 2264908,
valueHasStartPrecision = DatePrecisionYear,
valueHasEndJDN = 2265272,
valueHasEndPrecision = DatePrecisionYear
)
responderManager ! UpdateValueRequestV2(
UpdateValueContentV2(
resourceIri = resourceIri,
resourceClassIri = "http://0.0.0.0:3333/ontology/0001/anything/v2#Thing".toSmartIri,
propertyIri = propertyIri,
valueIri = dateValueIri.get,
valueContent = submittedValueContent
),
requestingUser = anythingUser1,
apiRequestID = UUID.randomUUID
)
expectMsgPF(timeout) {
case msg: akka.actor.Status.Failure => msg.cause.isInstanceOf[DuplicateValueException] should ===(true)
}
}
"update a boolean value" in {
val resourceIri: IRI = aThingIri
val propertyIri: SmartIri = "http://0.0.0.0:3333/ontology/0001/anything/v2#hasBoolean".toSmartIri
val valueHasBoolean = false
val maybeResourceLastModDate: Option[Instant] = getResourceLastModificationDate(resourceIri, anythingUser1)
responderManager ! UpdateValueRequestV2(
UpdateValueContentV2(
resourceIri = resourceIri,
resourceClassIri = "http://0.0.0.0:3333/ontology/0001/anything/v2#Thing".toSmartIri,
propertyIri = propertyIri,
valueIri = booleanValueIri.get,
valueContent = BooleanValueContentV2(
ontologySchema = ApiV2Complex,
valueHasBoolean = valueHasBoolean
)
),
requestingUser = anythingUser1,
apiRequestID = UUID.randomUUID
)
expectMsgPF(timeout) {
case updateValueResponse: UpdateValueResponseV2 => booleanValueIri.set(updateValueResponse.valueIri)
}
// Read the value back to check that it was added correctly.
val valueFromTriplestore = getValue(
resourceIri = resourceIri,
maybePreviousLastModDate = maybeResourceLastModDate,
propertyIriForGravsearch = propertyIri,
propertyIriInResult = propertyIri,
expectedValueIri = booleanValueIri.get,
requestingUser = anythingUser1
)
valueFromTriplestore.valueContent match {
case savedValue: BooleanValueContentV2 => savedValue.valueHasBoolean should ===(valueHasBoolean)
case _ => throw AssertionException(s"Expected boolean value, got $valueFromTriplestore")
}
}
"not update a boolean value without changing it" in {
val resourceIri: IRI = aThingIri
val propertyIri: SmartIri = "http://0.0.0.0:3333/ontology/0001/anything/v2#hasBoolean".toSmartIri
val valueHasBoolean = false
responderManager ! UpdateValueRequestV2(
UpdateValueContentV2(
resourceIri = resourceIri,
resourceClassIri = "http://0.0.0.0:3333/ontology/0001/anything/v2#Thing".toSmartIri,
propertyIri = propertyIri,
valueIri = booleanValueIri.get,
valueContent = BooleanValueContentV2(
ontologySchema = ApiV2Complex,
valueHasBoolean = valueHasBoolean
)
),
requestingUser = anythingUser1,
apiRequestID = UUID.randomUUID
)
expectMsgPF(timeout) {
case msg: akka.actor.Status.Failure => msg.cause.isInstanceOf[DuplicateValueException] should ===(true)
}
}
"update a geometry value" in {
val resourceIri: IRI = aThingIri
val propertyIri: SmartIri = "http://0.0.0.0:3333/ontology/0001/anything/v2#hasGeometry".toSmartIri
val valueHasGeometry = """{"status":"active","lineColor":"#ff3334","lineWidth":2,"points":[{"x":0.08098591549295775,"y":0.16741071428571427},{"x":0.7394366197183099,"y":0.7299107142857143}],"type":"rectangle","original_index":0}"""
val maybeResourceLastModDate: Option[Instant] = getResourceLastModificationDate(resourceIri, anythingUser1)
responderManager ! UpdateValueRequestV2(
UpdateValueContentV2(
resourceIri = resourceIri,
resourceClassIri = "http://0.0.0.0:3333/ontology/0001/anything/v2#Thing".toSmartIri,
propertyIri = propertyIri,
valueIri = geometryValueIri.get,
valueContent = GeomValueContentV2(
ontologySchema = ApiV2Complex,
valueHasGeometry = valueHasGeometry
)
),
requestingUser = anythingUser1,
apiRequestID = UUID.randomUUID
)
expectMsgPF(timeout) {
case updateValueResponse: UpdateValueResponseV2 => geometryValueIri.set(updateValueResponse.valueIri)
}
// Read the value back to check that it was added correctly.
val valueFromTriplestore = getValue(
resourceIri = resourceIri,
maybePreviousLastModDate = maybeResourceLastModDate,
propertyIriForGravsearch = propertyIri,
propertyIriInResult = propertyIri,
expectedValueIri = geometryValueIri.get,
requestingUser = anythingUser1
)
valueFromTriplestore.valueContent match {
case savedValue: GeomValueContentV2 => savedValue.valueHasGeometry should ===(valueHasGeometry)
case _ => throw AssertionException(s"Expected geometry value, got $valueFromTriplestore")
}
}
"not update a geometry value without changing it" in {
val resourceIri: IRI = aThingIri
val propertyIri: SmartIri = "http://0.0.0.0:3333/ontology/0001/anything/v2#hasGeometry".toSmartIri
val valueHasGeometry = """{"status":"active","lineColor":"#ff3334","lineWidth":2,"points":[{"x":0.08098591549295775,"y":0.16741071428571427},{"x":0.7394366197183099,"y":0.7299107142857143}],"type":"rectangle","original_index":0}"""
responderManager ! UpdateValueRequestV2(
UpdateValueContentV2(
resourceIri = resourceIri,
resourceClassIri = "http://0.0.0.0:3333/ontology/0001/anything/v2#Thing".toSmartIri,
propertyIri = propertyIri,
valueIri = geometryValueIri.get,
valueContent = GeomValueContentV2(
ontologySchema = ApiV2Complex,
valueHasGeometry = valueHasGeometry
)
),
requestingUser = anythingUser1,
apiRequestID = UUID.randomUUID
)
expectMsgPF(timeout) {
case msg: akka.actor.Status.Failure => msg.cause.isInstanceOf[DuplicateValueException] should ===(true)
}
}
"update an interval value" in {
val resourceIri: IRI = aThingIri
val propertyIri: SmartIri = "http://0.0.0.0:3333/ontology/0001/anything/v2#hasInterval".toSmartIri
val valueHasIntervalStart = BigDecimal("1.23")
val valueHasIntervalEnd = BigDecimal("3.45")
val maybeResourceLastModDate: Option[Instant] = getResourceLastModificationDate(resourceIri, anythingUser1)
responderManager ! UpdateValueRequestV2(
UpdateValueContentV2(
resourceIri = resourceIri,
resourceClassIri = "http://0.0.0.0:3333/ontology/0001/anything/v2#Thing".toSmartIri,
propertyIri = propertyIri,
valueIri = intervalValueIri.get,
valueContent = IntervalValueContentV2(
ontologySchema = ApiV2Complex,
valueHasIntervalStart = valueHasIntervalStart,
valueHasIntervalEnd = valueHasIntervalEnd
)
),
requestingUser = anythingUser1,
apiRequestID = UUID.randomUUID
)
expectMsgPF(timeout) {
case updateValueResponse: UpdateValueResponseV2 => intervalValueIri.set(updateValueResponse.valueIri)
}
// Read the value back to check that it was added correctly.
val valueFromTriplestore = getValue(
resourceIri = resourceIri,
maybePreviousLastModDate = maybeResourceLastModDate,
propertyIriForGravsearch = propertyIri,
propertyIriInResult = propertyIri,
expectedValueIri = intervalValueIri.get,
requestingUser = anythingUser1
)
valueFromTriplestore.valueContent match {
case savedValue: IntervalValueContentV2 =>
savedValue.valueHasIntervalStart should ===(valueHasIntervalStart)
savedValue.valueHasIntervalEnd should ===(valueHasIntervalEnd)
case _ => throw AssertionException(s"Expected interval value, got $valueFromTriplestore")
}
}
"not update an interval value without changing it" in {
val resourceIri: IRI = aThingIri
val propertyIri: SmartIri = "http://0.0.0.0:3333/ontology/0001/anything/v2#hasInterval".toSmartIri
val valueHasIntervalStart = BigDecimal("1.23")
val valueHasIntervalEnd = BigDecimal("3.45")
responderManager ! UpdateValueRequestV2(
UpdateValueContentV2(
resourceIri = resourceIri,
resourceClassIri = "http://0.0.0.0:3333/ontology/0001/anything/v2#Thing".toSmartIri,
propertyIri = propertyIri,
valueIri = intervalValueIri.get,
valueContent = IntervalValueContentV2(
ontologySchema = ApiV2Complex,
valueHasIntervalStart = valueHasIntervalStart,
valueHasIntervalEnd = valueHasIntervalEnd
)
),
requestingUser = anythingUser1,
apiRequestID = UUID.randomUUID
)
expectMsgPF(timeout) {
case msg: akka.actor.Status.Failure => msg.cause.isInstanceOf[DuplicateValueException] should ===(true)
}
}
"update a list value" in {
val resourceIri: IRI = aThingIri
val propertyIri: SmartIri = "http://0.0.0.0:3333/ontology/0001/anything/v2#hasListItem".toSmartIri
val valueHasListNode = "http://rdfh.ch/lists/0001/treeList02"
val maybeResourceLastModDate: Option[Instant] = getResourceLastModificationDate(resourceIri, anythingUser1)
responderManager ! UpdateValueRequestV2(
UpdateValueContentV2(
resourceIri = resourceIri,
resourceClassIri = "http://0.0.0.0:3333/ontology/0001/anything/v2#Thing".toSmartIri,
propertyIri = propertyIri,
valueIri = listValueIri.get,
valueContent = HierarchicalListValueContentV2(
ontologySchema = ApiV2Complex,
valueHasListNode = valueHasListNode
)
),
requestingUser = anythingUser1,
apiRequestID = UUID.randomUUID
)
expectMsgPF(timeout) {
case updateValueResponse: UpdateValueResponseV2 => listValueIri.set(updateValueResponse.valueIri)
}
// Read the value back to check that it was added correctly.
val valueFromTriplestore = getValue(
resourceIri = resourceIri,
maybePreviousLastModDate = maybeResourceLastModDate,
propertyIriForGravsearch = propertyIri,
propertyIriInResult = propertyIri,
expectedValueIri = listValueIri.get,
requestingUser = anythingUser1
)
valueFromTriplestore.valueContent match {
case savedValue: HierarchicalListValueContentV2 =>
savedValue.valueHasListNode should ===(valueHasListNode)
case _ => throw AssertionException(s"Expected list value, got $valueFromTriplestore")
}
}
"not update a list value without changing it" in {
val resourceIri: IRI = aThingIri
val propertyIri: SmartIri = "http://0.0.0.0:3333/ontology/0001/anything/v2#hasListItem".toSmartIri
val valueHasListNode = "http://rdfh.ch/lists/0001/treeList02"
responderManager ! UpdateValueRequestV2(
UpdateValueContentV2(
resourceIri = resourceIri,
resourceClassIri = "http://0.0.0.0:3333/ontology/0001/anything/v2#Thing".toSmartIri,
propertyIri = propertyIri,
valueIri = listValueIri.get,
valueContent = HierarchicalListValueContentV2(
ontologySchema = ApiV2Complex,
valueHasListNode = valueHasListNode
)
),
requestingUser = anythingUser1,
apiRequestID = UUID.randomUUID
)
expectMsgPF(timeout) {
case msg: akka.actor.Status.Failure => msg.cause.isInstanceOf[DuplicateValueException] should ===(true)
}
}
"not update a list value with the IRI of a nonexistent list node" in {
val resourceIri: IRI = aThingIri
val propertyIri: SmartIri = "http://0.0.0.0:3333/ontology/0001/anything/v2#hasListItem".toSmartIri
val valueHasListNode = "http://rdfh.ch/lists/0001/nonexistent"
responderManager ! UpdateValueRequestV2(
UpdateValueContentV2(
resourceIri = resourceIri,
resourceClassIri = "http://0.0.0.0:3333/ontology/0001/anything/v2#Thing".toSmartIri,
propertyIri = propertyIri,
valueIri = listValueIri.get,
valueContent = HierarchicalListValueContentV2(
ontologySchema = ApiV2Complex,
valueHasListNode = valueHasListNode
)
),
requestingUser = anythingUser1,
apiRequestID = UUID.randomUUID
)
expectMsgPF(timeout) {
case msg: akka.actor.Status.Failure => msg.cause.isInstanceOf[NotFoundException] should ===(true)
}
}
"update a color value" in {
val resourceIri: IRI = aThingIri
val propertyIri: SmartIri = "http://0.0.0.0:3333/ontology/0001/anything/v2#hasColor".toSmartIri
val valueHasColor = "#ff3334"
val maybeResourceLastModDate: Option[Instant] = getResourceLastModificationDate(resourceIri, anythingUser1)
responderManager ! UpdateValueRequestV2(
UpdateValueContentV2(
resourceIri = resourceIri,
resourceClassIri = "http://0.0.0.0:3333/ontology/0001/anything/v2#Thing".toSmartIri,
propertyIri = propertyIri,
valueIri = colorValueIri.get,
valueContent = ColorValueContentV2(
ontologySchema = ApiV2Complex,
valueHasColor = valueHasColor
)
),
requestingUser = anythingUser1,
apiRequestID = UUID.randomUUID
)
expectMsgPF(timeout) {
case updateValueResponse: UpdateValueResponseV2 => colorValueIri.set(updateValueResponse.valueIri)
}
// Read the value back to check that it was added correctly.
val valueFromTriplestore = getValue(
resourceIri = resourceIri,
maybePreviousLastModDate = maybeResourceLastModDate,
propertyIriForGravsearch = propertyIri,
propertyIriInResult = propertyIri,
expectedValueIri = colorValueIri.get,
requestingUser = anythingUser1
)
valueFromTriplestore.valueContent match {
case savedValue: ColorValueContentV2 =>
savedValue.valueHasColor should ===(valueHasColor)
case _ => throw AssertionException(s"Expected color value, got $valueFromTriplestore")
}
}
"not update a color value without changing it" in {
val resourceIri: IRI = aThingIri
val propertyIri: SmartIri = "http://0.0.0.0:3333/ontology/0001/anything/v2#hasColor".toSmartIri
val valueHasColor = "#ff3334"
responderManager ! UpdateValueRequestV2(
UpdateValueContentV2(
resourceIri = resourceIri,
resourceClassIri = "http://0.0.0.0:3333/ontology/0001/anything/v2#Thing".toSmartIri,
propertyIri = propertyIri,
valueIri = colorValueIri.get,
valueContent = ColorValueContentV2(
ontologySchema = ApiV2Complex,
valueHasColor = valueHasColor
)
),
requestingUser = anythingUser1,
apiRequestID = UUID.randomUUID
)
expectMsgPF(timeout) {
case msg: akka.actor.Status.Failure => msg.cause.isInstanceOf[DuplicateValueException] should ===(true)
}
}
"update a URI value" in {
val resourceIri: IRI = aThingIri
val propertyIri: SmartIri = "http://0.0.0.0:3333/ontology/0001/anything/v2#hasUri".toSmartIri
val valueHasUri = "https://en.wikipedia.org"
val maybeResourceLastModDate: Option[Instant] = getResourceLastModificationDate(resourceIri, anythingUser1)
responderManager ! UpdateValueRequestV2(
UpdateValueContentV2(
resourceIri = resourceIri,
resourceClassIri = "http://0.0.0.0:3333/ontology/0001/anything/v2#Thing".toSmartIri,
propertyIri = propertyIri,
valueIri = uriValueIri.get,
valueContent = UriValueContentV2(
ontologySchema = ApiV2Complex,
valueHasUri = valueHasUri
)
),
requestingUser = anythingUser1,
apiRequestID = UUID.randomUUID
)
expectMsgPF(timeout) {
case updateValueResponse: UpdateValueResponseV2 => uriValueIri.set(updateValueResponse.valueIri)
}
// Read the value back to check that it was added correctly.
val valueFromTriplestore = getValue(
resourceIri = resourceIri,
maybePreviousLastModDate = maybeResourceLastModDate,
propertyIriForGravsearch = propertyIri,
propertyIriInResult = propertyIri,
expectedValueIri = uriValueIri.get,
requestingUser = anythingUser1
)
valueFromTriplestore.valueContent match {
case savedValue: UriValueContentV2 =>
savedValue.valueHasUri should ===(valueHasUri)
case _ => throw AssertionException(s"Expected URI value, got $valueFromTriplestore")
}
}
"not update a URI value without changing it" in {
val resourceIri: IRI = aThingIri
val propertyIri: SmartIri = "http://0.0.0.0:3333/ontology/0001/anything/v2#hasUri".toSmartIri
val valueHasUri = "https://en.wikipedia.org"
responderManager ! UpdateValueRequestV2(
UpdateValueContentV2(
resourceIri = resourceIri,
resourceClassIri = "http://0.0.0.0:3333/ontology/0001/anything/v2#Thing".toSmartIri,
propertyIri = propertyIri,
valueIri = uriValueIri.get,
valueContent = UriValueContentV2(
ontologySchema = ApiV2Complex,
valueHasUri = valueHasUri
)
),
requestingUser = anythingUser1,
apiRequestID = UUID.randomUUID
)
expectMsgPF(timeout) {
case msg: akka.actor.Status.Failure => msg.cause.isInstanceOf[DuplicateValueException] should ===(true)
}
}
"update a geoname value" in {
val resourceIri: IRI = aThingIri
val propertyIri: SmartIri = "http://0.0.0.0:3333/ontology/0001/anything/v2#hasGeoname".toSmartIri
val valueHasGeonameCode = "2988507"
val maybeResourceLastModDate: Option[Instant] = getResourceLastModificationDate(resourceIri, anythingUser1)
responderManager ! UpdateValueRequestV2(
UpdateValueContentV2(
resourceIri = resourceIri,
resourceClassIri = "http://0.0.0.0:3333/ontology/0001/anything/v2#Thing".toSmartIri,
propertyIri = propertyIri,
valueIri = geonameValueIri.get,
valueContent = GeonameValueContentV2(
ontologySchema = ApiV2Complex,
valueHasGeonameCode = valueHasGeonameCode
)
),
requestingUser = anythingUser1,
apiRequestID = UUID.randomUUID
)
expectMsgPF(timeout) {
case updateValueResponse: UpdateValueResponseV2 => geonameValueIri.set(updateValueResponse.valueIri)
}
// Read the value back to check that it was added correctly.
val valueFromTriplestore = getValue(
resourceIri = resourceIri,
maybePreviousLastModDate = maybeResourceLastModDate,
propertyIriForGravsearch = propertyIri,
propertyIriInResult = propertyIri,
expectedValueIri = geonameValueIri.get,
requestingUser = anythingUser1
)
valueFromTriplestore.valueContent match {
case savedValue: GeonameValueContentV2 =>
savedValue.valueHasGeonameCode should ===(valueHasGeonameCode)
case _ => throw AssertionException(s"Expected GeoNames value, got $valueFromTriplestore")
}
}
"not update a geoname value without changing it" in {
val resourceIri: IRI = aThingIri
val propertyIri: SmartIri = "http://0.0.0.0:3333/ontology/0001/anything/v2#hasGeoname".toSmartIri
val valueHasGeonameCode = "2988507"
responderManager ! UpdateValueRequestV2(
UpdateValueContentV2(
resourceIri = resourceIri,
resourceClassIri = "http://0.0.0.0:3333/ontology/0001/anything/v2#Thing".toSmartIri,
propertyIri = propertyIri,
valueIri = geonameValueIri.get,
valueContent = GeonameValueContentV2(
ontologySchema = ApiV2Complex,
valueHasGeonameCode = valueHasGeonameCode
)
),
requestingUser = anythingUser1,
apiRequestID = UUID.randomUUID
)
expectMsgPF(timeout) {
case msg: akka.actor.Status.Failure => msg.cause.isInstanceOf[DuplicateValueException] should ===(true)
}
}
"update a link between two resources" in {
val resourceIri: IRI = "http://rdfh.ch/0803/cb1a74e3e2f6"
val linkPropertyIri: SmartIri = OntologyConstants.KnoraApiV2Complex.HasLinkTo.toSmartIri
val linkValuePropertyIri: SmartIri = OntologyConstants.KnoraApiV2Complex.HasLinkToValue.toSmartIri
val maybeResourceLastModDate: Option[Instant] = getResourceLastModificationDate(resourceIri, incunabulaUser)
val updateValueRequest = UpdateValueRequestV2(
UpdateValueContentV2(
resourceIri = resourceIri,
resourceClassIri = OntologyConstants.KnoraApiV2Complex.LinkObj.toSmartIri,
propertyIri = linkValuePropertyIri,
valueIri = linkValueIri.get,
valueContent = LinkValueContentV2(
ontologySchema = ApiV2Complex,
referredResourceIri = generationeIri
)
),
requestingUser = incunabulaUser,
apiRequestID = UUID.randomUUID
)
responderManager ! updateValueRequest
expectMsgPF(timeout) {
case updateValueResponse: UpdateValueResponseV2 => linkValueIri.set(updateValueResponse.valueIri)
}
val valueFromTriplestore = getValue(
resourceIri = resourceIri,
maybePreviousLastModDate = maybeResourceLastModDate,
propertyIriForGravsearch = linkPropertyIri,
propertyIriInResult = linkValuePropertyIri,
expectedValueIri = linkValueIri.get,
requestingUser = incunabulaUser
)
valueFromTriplestore match {
case readLinkValueV2: ReadLinkValueV2 =>
readLinkValueV2.valueContent.referredResourceIri should ===(generationeIri)
readLinkValueV2.valueHasRefCount should ===(1)
case _ => throw AssertionException(s"Expected link value, got $valueFromTriplestore")
}
}
"not update a link without changing it" in {
val resourceIri: IRI = "http://rdfh.ch/0803/cb1a74e3e2f6"
val linkValuePropertyIri: SmartIri = OntologyConstants.KnoraApiV2Complex.HasLinkToValue.toSmartIri
val updateValueRequest = UpdateValueRequestV2(
UpdateValueContentV2(
resourceIri = resourceIri,
resourceClassIri = OntologyConstants.KnoraApiV2Complex.LinkObj.toSmartIri,
propertyIri = linkValuePropertyIri,
valueIri = linkValueIri.get,
valueContent = LinkValueContentV2(
ontologySchema = ApiV2Complex,
referredResourceIri = generationeIri
)
),
requestingUser = incunabulaUser,
apiRequestID = UUID.randomUUID
)
responderManager ! updateValueRequest
expectMsgPF(timeout) {
case msg: akka.actor.Status.Failure => msg.cause.isInstanceOf[DuplicateValueException] should ===(true)
}
}
"not update a standoff link directly" in {
responderManager ! UpdateValueRequestV2(
UpdateValueContentV2(
resourceIri = zeitglöckleinIri,
resourceClassIri = "http://0.0.0.0:3333/ontology/0803/incunabula/v2#book".toSmartIri,
propertyIri = OntologyConstants.KnoraApiV2Complex.HasStandoffLinkToValue.toSmartIri,
valueIri = zeitglöckleinCommentWithStandoffIri.get,
valueContent = LinkValueContentV2(
ontologySchema = ApiV2Complex,
referredResourceIri = generationeIri
)
),
requestingUser = SharedTestDataADM.superUser,
apiRequestID = UUID.randomUUID
)
expectMsgPF(timeout) {
case msg: akka.actor.Status.Failure => msg.cause.isInstanceOf[BadRequestException] should ===(true)
}
}
"not update a still image file value without changing it" in {
val resourceIri: IRI = aThingPictureIri
stillImageFileValueIri.set("http://rdfh.ch/0001/a-thing-picture/values/goZ7JFRNSeqF-dNxsqAS7Q")
val valueContent = StillImageFileValueContentV2(
ontologySchema = ApiV2Complex,
fileValue = FileValueV2(
internalFilename = "B1D0OkEgfFp-Cew2Seur7Wi.jp2",
internalMimeType = "image/jp2",
originalFilename = Some("test.tiff"),
originalMimeType = Some("image/tiff")
),
dimX = 512,
dimY = 256
)
val updateValueRequest = UpdateValueRequestV2(
UpdateValueContentV2(
resourceIri = resourceIri,
resourceClassIri = "http://0.0.0.0:3333/ontology/0001/anything/v2#ThingPicture".toSmartIri,
propertyIri = OntologyConstants.KnoraApiV2Complex.HasStillImageFileValue.toSmartIri,
valueIri = stillImageFileValueIri.get,
valueContent = valueContent
),
requestingUser = anythingUser1,
apiRequestID = UUID.randomUUID
)
responderManager ! updateValueRequest
expectMsgPF(timeout) {
case msg: akka.actor.Status.Failure => msg.cause.isInstanceOf[DuplicateValueException] should ===(true)
}
}
"update a still image file value" in {
val resourceIri: IRI = aThingPictureIri
val propertyIri: SmartIri = OntologyConstants.KnoraApiV2Complex.HasStillImageFileValue.toSmartIri
val maybeResourceLastModDate: Option[Instant] = getResourceLastModificationDate(resourceIri, anythingUser1)
// Get the value before update.
val previousValueFromTriplestore: ReadValueV2 = getValue(
resourceIri = resourceIri,
maybePreviousLastModDate = maybeResourceLastModDate,
propertyIriForGravsearch = propertyIri,
propertyIriInResult = propertyIri,
expectedValueIri = stillImageFileValueIri.get,
requestingUser = anythingUser1,
checkLastModDateChanged = false
)
// Update the value.
val valueContent = StillImageFileValueContentV2(
ontologySchema = ApiV2Complex,
fileValue = FileValueV2(
internalFilename = "updated-filename.jp2",
internalMimeType = "image/jp2",
originalFilename = Some("test.tiff"),
originalMimeType = Some("image/tiff")
),
dimX = 512,
dimY = 256
)
responderManager ! UpdateValueRequestV2(
UpdateValueContentV2(
resourceIri = resourceIri,
resourceClassIri = "http://0.0.0.0:3333/ontology/0001/anything/v2#ThingPicture".toSmartIri,
propertyIri = propertyIri,
valueIri = stillImageFileValueIri.get,
valueContent = valueContent
),
requestingUser = anythingUser1,
apiRequestID = UUID.randomUUID
)
expectMsgPF(timeout) {
case updateValueResponse: UpdateValueResponseV2 => stillImageFileValueIri.set(updateValueResponse.valueIri)
}
// Read the value back to check that it was added correctly.
val updatedValueFromTriplestore = getValue(
resourceIri = resourceIri,
maybePreviousLastModDate = maybeResourceLastModDate,
propertyIriForGravsearch = propertyIri,
propertyIriInResult = propertyIri,
expectedValueIri = stillImageFileValueIri.get,
requestingUser = anythingUser1
)
updatedValueFromTriplestore.valueContent match {
case savedValue: StillImageFileValueContentV2 =>
savedValue should ===(valueContent)
updatedValueFromTriplestore.permissions should ===(previousValueFromTriplestore.permissions)
case _ => throw AssertionException(s"Expected still image file value, got $updatedValueFromTriplestore")
}
}
"not return a Sipi error if Sipi fails to delete a temporary file when Knora rejects a request" in {
val resourceIri: IRI = aThingPictureIri
val propertyIri: SmartIri = OntologyConstants.KnoraApiV2Complex.HasStillImageFileValue.toSmartIri
val valueContent = StillImageFileValueContentV2(
ontologySchema = ApiV2Complex,
fileValue = FileValueV2(
internalFilename = MockSipiConnector.FAILURE_FILENAME, // tells the mock Sipi responder to simulate failure
internalMimeType = "image/jp2",
originalFilename = Some("test.tiff"),
originalMimeType = Some("image/tiff")
),
dimX = 512,
dimY = 256
)
// Knora will reject this request.
responderManager ! UpdateValueRequestV2(
UpdateValueContentV2(
resourceIri = resourceIri,
resourceClassIri = "http://0.0.0.0:3333/ontology/0001/anything/v2#ThingPicture".toSmartIri,
propertyIri = propertyIri,
valueIri = stillImageFileValueIri.get,
valueContent = valueContent
),
requestingUser = incunabulaUser, // this user doesn't have the necessary permission
apiRequestID = UUID.randomUUID
)
expectMsgPF(timeout) {
case msg: akka.actor.Status.Failure =>
msg.cause.isInstanceOf[ForbiddenException] should ===(true)
}
}
"return a Sipi error if Sipi fails to move a file to permanent storage" in {
val resourceIri: IRI = aThingPictureIri
val propertyIri: SmartIri = OntologyConstants.KnoraApiV2Complex.HasStillImageFileValue.toSmartIri
val valueContent = StillImageFileValueContentV2(
ontologySchema = ApiV2Complex,
fileValue = FileValueV2(
internalFilename = MockSipiConnector.FAILURE_FILENAME, // tells the mock Sipi responder to simulate failure
internalMimeType = "image/jp2",
originalFilename = Some("test.tiff"),
originalMimeType = Some("image/tiff")
),
dimX = 512,
dimY = 256
)
// Knora will accept this request, but the mock Sipi responder will say it failed to move the file to permanent storage.
responderManager ! UpdateValueRequestV2(
UpdateValueContentV2(
resourceIri = resourceIri,
resourceClassIri = "http://0.0.0.0:3333/ontology/0001/anything/v2#ThingPicture".toSmartIri,
propertyIri = propertyIri,
valueIri = stillImageFileValueIri.get,
valueContent = valueContent
),
requestingUser = anythingUser1,
apiRequestID = UUID.randomUUID
)
expectMsgPF(timeout) {
case msg: akka.actor.Status.Failure =>
msg.cause.isInstanceOf[SipiException] should ===(true)
}
}
"not delete a value if the requesting user does not have DeletePermission on the value" in {
val resourceIri: IRI = aThingIri
// #toSmartIri
val propertyIri: SmartIri = "http://0.0.0.0:3333/ontology/0001/anything/v2#hasInteger".toSmartIri
// #toSmartIri
responderManager ! DeleteValueRequestV2(
resourceIri = resourceIri,
resourceClassIri = "http://0.0.0.0:3333/ontology/0001/anything/v2#Thing".toSmartIri,
propertyIri = propertyIri,
valueIri = intValueIri.get,
valueTypeIri = OntologyConstants.KnoraApiV2Complex.IntValue.toSmartIri,
deleteComment = Some("this value was incorrect"),
requestingUser = anythingUser2,
apiRequestID = UUID.randomUUID
)
expectMsgPF(timeout) {
case msg: akka.actor.Status.Failure => msg.cause.isInstanceOf[ForbiddenException] should ===(true)
}
}
"delete an integer value" in {
val resourceIri: IRI = aThingIri
val propertyIri: SmartIri = "http://0.0.0.0:3333/ontology/0001/anything/v2#hasInteger".toSmartIri
val maybeResourceLastModDate: Option[Instant] = getResourceLastModificationDate(resourceIri, anythingUser1)
responderManager ! DeleteValueRequestV2(
resourceIri = resourceIri,
resourceClassIri = "http://0.0.0.0:3333/ontology/0001/anything/v2#Thing".toSmartIri,
propertyIri = propertyIri,
valueIri = intValueIri.get,
valueTypeIri = OntologyConstants.KnoraApiV2Complex.IntValue.toSmartIri,
deleteComment = Some("this value was incorrect"),
requestingUser = anythingUser1,
apiRequestID = UUID.randomUUID
)
expectMsgType[SuccessResponseV2](timeout)
checkValueIsDeleted(resourceIri = resourceIri,
maybePreviousLastModDate = maybeResourceLastModDate,
propertyIriForGravsearch = propertyIri,
propertyIriInResult = propertyIri,
valueIri = intValueIri.get,
requestingUser = anythingUser1)
}
"not delete a standoff link directly" in {
responderManager ! DeleteValueRequestV2(
resourceIri = zeitglöckleinIri,
resourceClassIri = "http://0.0.0.0:3333/ontology/0803/incunabula/v2#book".toSmartIri,
propertyIri = OntologyConstants.KnoraApiV2Complex.HasStandoffLinkToValue.toSmartIri,
valueIri = standoffLinkValueIri.get,
valueTypeIri = OntologyConstants.KnoraApiV2Complex.LinkValue.toSmartIri,
requestingUser = SharedTestDataADM.superUser,
apiRequestID = UUID.randomUUID
)
expectMsgPF(timeout) {
case msg: akka.actor.Status.Failure => msg.cause.isInstanceOf[BadRequestException] should ===(true)
}
}
"delete a text value with a standoff link" in {
val propertyIri: SmartIri = "http://0.0.0.0:3333/ontology/0803/incunabula/v2#book_comment".toSmartIri
val maybeResourceLastModDate: Option[Instant] = getResourceLastModificationDate(zeitglöckleinIri, incunabulaUser)
responderManager ! DeleteValueRequestV2(
resourceIri = zeitglöckleinIri,
resourceClassIri = "http://0.0.0.0:3333/ontology/0803/incunabula/v2#book".toSmartIri,
propertyIri = propertyIri,
valueIri = zeitglöckleinCommentWithStandoffIri.get,
valueTypeIri = OntologyConstants.KnoraApiV2Complex.TextValue.toSmartIri,
deleteComment = Some("this value was incorrect"),
requestingUser = incunabulaUser,
apiRequestID = UUID.randomUUID
)
expectMsgType[SuccessResponseV2](timeout)
checkValueIsDeleted(resourceIri = zeitglöckleinIri,
maybePreviousLastModDate = maybeResourceLastModDate,
propertyIriForGravsearch = propertyIri,
propertyIriInResult = propertyIri,
valueIri = zeitglöckleinCommentWithStandoffIri.get,
requestingUser = incunabulaUser)
// There should be no standoff link values left in the resource.
val resource = getResourceWithValues(
resourceIri = zeitglöckleinIri,
propertyIrisForGravsearch = Seq(OntologyConstants.KnoraApiV2Complex.HasStandoffLinkTo.toSmartIri),
requestingUser = incunabulaUser
)
assert(resource.values.get(OntologyConstants.KnoraApiV2Complex.HasStandoffLinkToValue.toSmartIri).isEmpty)
}
"delete a link between two resources" in {
val resourceIri: IRI = "http://rdfh.ch/0803/cb1a74e3e2f6"
val linkPropertyIri: SmartIri = OntologyConstants.KnoraApiV2Complex.HasLinkTo.toSmartIri
val linkValuePropertyIri: SmartIri = OntologyConstants.KnoraApiV2Complex.HasLinkToValue.toSmartIri
val maybeResourceLastModDate: Option[Instant] = getResourceLastModificationDate(resourceIri, anythingUser1)
responderManager ! DeleteValueRequestV2(
resourceIri = resourceIri,
resourceClassIri = OntologyConstants.KnoraApiV2Complex.LinkObj.toSmartIri,
propertyIri = linkValuePropertyIri,
valueIri = linkValueIri.get,
valueTypeIri = OntologyConstants.KnoraApiV2Complex.LinkValue.toSmartIri,
requestingUser = incunabulaUser,
apiRequestID = UUID.randomUUID
)
expectMsgType[SuccessResponseV2](timeout)
checkValueIsDeleted(resourceIri = resourceIri,
maybePreviousLastModDate = maybeResourceLastModDate,
propertyIriForGravsearch = linkPropertyIri,
propertyIriInResult = linkValuePropertyIri,
valueIri = intValueIri.get,
requestingUser = anythingUser1)
}
"not delete a value if the property's cardinality doesn't allow it" in {
val propertyIri: SmartIri = "http://0.0.0.0:3333/ontology/0803/incunabula/v2#title".toSmartIri
responderManager ! DeleteValueRequestV2(
resourceIri = zeitglöckleinIri,
resourceClassIri = "http://0.0.0.0:3333/ontology/0803/incunabula/v2#book".toSmartIri,
propertyIri = propertyIri,
valueIri = "http://rdfh.ch/0803/c5058f3a/values/c3295339",
valueTypeIri = OntologyConstants.KnoraApiV2Complex.TextValue.toSmartIri,
requestingUser = incunabulaCreatorUser,
apiRequestID = UUID.randomUUID
)
expectMsgPF(timeout) {
case msg: akka.actor.Status.Failure => msg.cause.isInstanceOf[OntologyConstraintException] should ===(true)
}
}
"not accept custom value permissions that would give the requesting user a higher permission on a value than the default" in {
val resourceIri: IRI = stringFormatter.makeRandomResourceIri(SharedTestDataADM.imagesProject.shortcode)
val inputResource = CreateResourceV2(
resourceIri = resourceIri,
resourceClassIri = "http://0.0.0.0:3333/ontology/00FF/images/v2#bildformat".toSmartIri,
label = "test bildformat",
values = Map.empty,
projectADM = SharedTestDataADM.imagesProject,
permissions = Some("M knora-admin:ProjectMember")
)
responderManager ! CreateResourceRequestV2(
createResource = inputResource,
requestingUser = SharedTestDataADM.imagesUser01,
apiRequestID = UUID.randomUUID
)
expectMsgClass(timeout, classOf[ReadResourcesSequenceV2])
val propertyIri: SmartIri = "http://0.0.0.0:3333/ontology/00FF/images/v2#stueckzahl".toSmartIri
responderManager ! CreateValueRequestV2(
CreateValueV2(
resourceIri = resourceIri,
resourceClassIri = "http://0.0.0.0:3333/ontology/00FF/images/v2#bildformat".toSmartIri,
propertyIri = propertyIri,
valueContent = IntegerValueContentV2(
ontologySchema = ApiV2Complex,
valueHasInteger = 5,
comment = Some("this is the number five")
),
permissions = Some("CR knora-admin:Creator")
),
requestingUser = SharedTestDataADM.imagesReviewerUser,
apiRequestID = UUID.randomUUID
)
expectMsgPF(timeout) {
case msg: akka.actor.Status.Failure =>
msg.cause.isInstanceOf[ForbiddenException] should ===(true)
}
}
"accept custom value permissions that would give the requesting user a higher permission on a value than the default if the user is a system admin" in {
val resourceIri: IRI = stringFormatter.makeRandomResourceIri(SharedTestDataADM.imagesProject.shortcode)
val inputResource = CreateResourceV2(
resourceIri = resourceIri,
resourceClassIri = "http://0.0.0.0:3333/ontology/00FF/images/v2#bildformat".toSmartIri,
label = "test bildformat",
values = Map.empty,
projectADM = SharedTestDataADM.imagesProject,
permissions = Some("M knora-admin:ProjectMember")
)
responderManager ! CreateResourceRequestV2(
createResource = inputResource,
requestingUser = SharedTestDataADM.imagesUser01,
apiRequestID = UUID.randomUUID
)
expectMsgClass(timeout, classOf[ReadResourcesSequenceV2])
val propertyIri: SmartIri = "http://0.0.0.0:3333/ontology/00FF/images/v2#stueckzahl".toSmartIri
responderManager ! CreateValueRequestV2(
CreateValueV2(
resourceIri = resourceIri,
resourceClassIri = "http://0.0.0.0:3333/ontology/00FF/images/v2#bildformat".toSmartIri,
propertyIri = propertyIri,
valueContent = IntegerValueContentV2(
ontologySchema = ApiV2Complex,
valueHasInteger = 5,
comment = Some("this is the number five")
),
permissions = Some("CR knora-admin:Creator")
),
requestingUser = SharedTestDataADM.rootUser,
apiRequestID = UUID.randomUUID
)
expectMsgClass(classOf[CreateValueResponseV2])
}
"accept custom value permissions that would give the requesting user a higher permission on a value than the default if the user is a project admin" in {
val resourceIri: IRI = stringFormatter.makeRandomResourceIri(SharedTestDataADM.imagesProject.shortcode)
val inputResource = CreateResourceV2(
resourceIri = resourceIri,
resourceClassIri = "http://0.0.0.0:3333/ontology/00FF/images/v2#bildformat".toSmartIri,
label = "test bildformat",
values = Map.empty,
projectADM = SharedTestDataADM.imagesProject,
permissions = Some("M knora-admin:ProjectMember")
)
responderManager ! CreateResourceRequestV2(
createResource = inputResource,
requestingUser = SharedTestDataADM.imagesUser01,
apiRequestID = UUID.randomUUID
)
expectMsgClass(timeout, classOf[ReadResourcesSequenceV2])
val propertyIri: SmartIri = "http://0.0.0.0:3333/ontology/00FF/images/v2#stueckzahl".toSmartIri
responderManager ! CreateValueRequestV2(
CreateValueV2(
resourceIri = resourceIri,
resourceClassIri = "http://0.0.0.0:3333/ontology/00FF/images/v2#bildformat".toSmartIri,
propertyIri = propertyIri,
valueContent = IntegerValueContentV2(
ontologySchema = ApiV2Complex,
valueHasInteger = 5,
comment = Some("this is the number five")
),
permissions = Some("CR knora-admin:Creator")
),
requestingUser = SharedTestDataADM.imagesUser01,
apiRequestID = UUID.randomUUID
)
expectMsgClass(classOf[CreateValueResponseV2])
}
"create and update text values with standoff links, managing value UUIDs correctly" in {
val resourceClassIri: SmartIri = "http://0.0.0.0:3333/ontology/0001/anything/v2#Thing".toSmartIri
val propertyIri: SmartIri = "http://0.0.0.0:3333/ontology/0001/anything/v2#hasRichtext".toSmartIri
// Create a text value with a standoff link.
responderManager ! CreateValueRequestV2(
CreateValueV2(
resourceIri = sierraIri,
resourceClassIri = resourceClassIri,
propertyIri = propertyIri,
valueContent = TextValueContentV2(
ontologySchema = ApiV2Complex,
maybeValueHasString = Some("Comment 1 for UUID checking"),
standoff = sampleStandoffWithLink,
mappingIri = Some("http://rdfh.ch/standoff/mappings/StandardMapping"),
mapping = standardMapping
)
),
requestingUser = anythingUser1,
apiRequestID = UUID.randomUUID
)
val textValue1Iri: IRI = expectMsgPF(timeout) {
case createValueResponse: CreateValueResponseV2 => createValueResponse.valueIri
}
val resourceVersion1: ReadResourceV2 = getResourceWithValues(
resourceIri = sierraIri,
propertyIrisForGravsearch = Seq(
propertyIri,
OntologyConstants.KnoraApiV2Complex.HasStandoffLinkTo.toSmartIri
),
requestingUser = anythingUser1
)
// Get the UUIDs of the text value and of the standoff link value.
val textValue1: ReadValueV2 = resourceVersion1.values(propertyIri).head
assert(textValue1.valueIri == textValue1Iri)
assert(getValueUUID(textValue1.valueIri).contains(textValue1.valueHasUUID))
val standoffLinkValueVersion1: ReadLinkValueV2 = resourceVersion1.values(OntologyConstants.KnoraApiV2Complex.HasStandoffLinkToValue.toSmartIri).head.asInstanceOf[ReadLinkValueV2]
assert(getValueUUID(standoffLinkValueVersion1.valueIri).contains(standoffLinkValueVersion1.valueHasUUID))
assert(standoffLinkValueVersion1.valueHasRefCount == 1)
// Create a second text value with the same standoff link.
responderManager ! CreateValueRequestV2(
CreateValueV2(
resourceIri = sierraIri,
resourceClassIri = resourceClassIri,
propertyIri = propertyIri,
valueContent = TextValueContentV2(
ontologySchema = ApiV2Complex,
maybeValueHasString = Some("Comment 2 for UUID checking"),
standoff = sampleStandoffWithLink,
mappingIri = Some("http://rdfh.ch/standoff/mappings/StandardMapping"),
mapping = standardMapping
)
),
requestingUser = anythingUser1,
apiRequestID = UUID.randomUUID
)
val textValue2Version1Iri: IRI = expectMsgPF(timeout) {
case createValueResponse: CreateValueResponseV2 => createValueResponse.valueIri
}
val resourceVersion2: ReadResourceV2 = getResourceWithValues(
resourceIri = sierraIri,
propertyIrisForGravsearch = Seq(
propertyIri,
OntologyConstants.KnoraApiV2Complex.HasStandoffLinkTo.toSmartIri
),
requestingUser = anythingUser1
)
// Get the second text value's UUID.
val textValue2Version1: ReadValueV2 = resourceVersion2.values(propertyIri).find(_.valueIri == textValue2Version1Iri).getOrElse(throw AssertionException("Value not found"))
assert(getValueUUID(textValue2Version1.valueIri).contains(textValue2Version1.valueHasUUID))
// We should have a new version of the standoff link value, containing the UUID that was in the previous version.
val standoffLinkValueVersion2: ReadLinkValueV2 = resourceVersion2.values(OntologyConstants.KnoraApiV2Complex.HasStandoffLinkToValue.toSmartIri).head.asInstanceOf[ReadLinkValueV2]
assert(standoffLinkValueVersion2.previousValueIri.contains(standoffLinkValueVersion1.valueIri))
assert(standoffLinkValueVersion2.valueHasUUID == standoffLinkValueVersion1.valueHasUUID)
assert(getValueUUID(standoffLinkValueVersion2.valueIri).contains(standoffLinkValueVersion2.valueHasUUID))
assert(standoffLinkValueVersion2.valueHasRefCount == 2)
// The previous version of the standoff link value should have no UUID.
assert(getValueUUID(standoffLinkValueVersion1.valueIri).isEmpty)
// Update the second text value.
responderManager ! UpdateValueRequestV2(
UpdateValueContentV2(
resourceIri = sierraIri,
resourceClassIri = resourceClassIri,
propertyIri = propertyIri,
valueIri = textValue2Version1Iri,
valueContent = TextValueContentV2(
ontologySchema = ApiV2Complex,
maybeValueHasString = Some("Comment 3 for UUID checking"),
standoff = sampleStandoffWithLink,
mappingIri = Some("http://rdfh.ch/standoff/mappings/StandardMapping"),
mapping = standardMapping
)
),
requestingUser = anythingUser1,
apiRequestID = UUID.randomUUID
)
val textValue2Version2Iri = expectMsgPF(timeout) {
case updateValueResponse: UpdateValueResponseV2 => updateValueResponse.valueIri
}
val resourceVersion3: ReadResourceV2 = getResourceWithValues(
resourceIri = sierraIri,
propertyIrisForGravsearch = Seq(
propertyIri,
OntologyConstants.KnoraApiV2Complex.HasStandoffLinkTo.toSmartIri
),
requestingUser = anythingUser1
)
// We should now have a new version of the second text value, containing the UUID that was in the previous version.
val textValue2Version2: ReadValueV2 = resourceVersion3.values(propertyIri).find(_.valueIri == textValue2Version2Iri).getOrElse(throw AssertionException("Value not found"))
assert(getValueUUID(textValue2Version2.valueIri).contains(textValue2Version2.valueHasUUID))
assert(textValue2Version2.previousValueIri.contains(textValue2Version1.valueIri))
// The previous version of the second text value should have no UUID.
assert(getValueUUID(textValue2Version1.valueIri).isEmpty)
// We should not have a new version of the standoff link value.
assert(resourceVersion3.values(OntologyConstants.KnoraApiV2Complex.HasStandoffLinkToValue.toSmartIri).head.valueIri == standoffLinkValueVersion2.valueIri)
}
}
}
|
musicEnfanthen/Knora
|
webapi/src/test/scala/org/knora/webapi/responders/v2/ValuesResponderV2Spec.scala
|
Scala
|
agpl-3.0
| 180,173
|
package com.wlangiewicz.workouttracker.dao
import com.github.nscala_time.time.Imports._
import com.wlangiewicz.workouttracker.WorkoutTrackerSpec
import com.wlangiewicz.workouttracker.domain._
import org.scalatest.BeforeAndAfter
class WorkoutDaoSpec extends WorkoutTrackerSpec with BeforeAndAfter {
before {
def cleanupWorkoutDao() = {
workoutDao.workouts = Set()
val testingWorkouts = Set(
Workout(UserId(1), WorkoutId(1), "morning run", 10000, 3700, new DateTime(2016, 2, 9, 11, 0, 0, 0)),
Workout(UserId(1), WorkoutId(2), "evening run", 10000, 3650, new DateTime(2016, 2, 9, 12, 0, 0, 0)),
Workout(UserId(1), WorkoutId(3), "morning run 2", 10000, 3600, new DateTime(2016, 2, 10, 12, 0, 0, 0)),
Workout(UserId(1), WorkoutId(4), "evening run 3", 10000, 3550, new DateTime(2016, 2, 15, 12, 0, 0, 0))
)
testingWorkouts.foreach(workoutDao.add)
}
cleanupWorkoutDao()
}
"WorkoutDao" should "find all workouts by given user" in {
val workoutFound = workoutDao.findAllByUser(testingUser.userId).find(_.workoutId == WorkoutId(1)).get
workoutFound.workoutId shouldBe WorkoutId(1)
workoutFound.userId shouldBe UserId(1)
workoutDao.findAllByUser(testingUser.userId).size shouldBe 4
}
it should "find workouts in given date range" in {
val date = new DateTime(2016, 2, 9, 11, 0, 0, 0)
val rangeStart = date - 10.years
val rangeEnd = date + 10.years
workoutDao.findInDateRangeByUser(testingUser.userId, rangeStart, rangeEnd).size shouldBe 4
}
it should "filter out workouts out of date range" in {
val date = new DateTime(2016, 2, 9, 11, 0, 0, 0)
val rangeStart = date
val rangeEnd = date + 1.minute
workoutDao.findInDateRangeByUser(testingUser.userId, rangeStart, rangeEnd).size shouldBe 1
}
it should "remove workouts when requested" in {
workoutDao.findAllByUser(testingUser.userId).size shouldBe 4
workoutDao.deleteWorkout(WorkoutId(1))
workoutDao.findAllByUser(testingUser.userId).size shouldBe 3
}
it should "edit workouts when requested" in {
val oldWorkout = workoutDao.findAllByUser(testingUser.userId).find(_.workoutId == WorkoutId(1)).get
workoutDao.editWorkout(oldWorkout.copy(distanceMeters = 5555))
val updatedWorkout = workoutDao.findAllByUser(testingUser.userId).find(_.workoutId == WorkoutId(1)).get
updatedWorkout.distanceMeters shouldBe 5555
}
it should "find workouts grouped by week" in {
val workoutsGrouped = workoutDao.findAllByUserGroupedWeekly(testingUser.userId)
workoutsGrouped.size shouldBe 2
workoutsGrouped("2016-6").size shouldBe 3
workoutsGrouped("2016-6").find(_.workoutId == WorkoutId(2)).get.name shouldBe "evening run"
}
it should "find workouts grouped by week for given range" in {
val date = new DateTime(2016, 2, 9, 11, 0, 0, 0)
val rangeStart = date
val rangeEnd = date + 1.day
val workoutsGrouped = workoutDao.findAllByUserInRangeGroupedWeekly(testingUser.userId, rangeStart, rangeEnd)
workoutsGrouped.size shouldBe 1
workoutsGrouped("2016-6").find(_.workoutId == WorkoutId(1)).get.distanceMeters shouldBe 10000
}
}
|
wlk/workout-tracker-akka-http
|
src/test/scala/com/wlangiewicz/workouttracker/dao/WorkoutDaoSpec.scala
|
Scala
|
mit
| 3,181
|
package uk.co.turingatemyhamster
package owl2
package ast
/**
*
*
* @author Matthew Pocock
*/
trait AnnotationsModuleAst extends owl2.AnnotationsModule {
importedModules : owl2.EntitiesLiteralsAnonymousIndividualsModule with owl2.OntologyModule with owl2.IriModule =>
override final type Annotation = ast.Annotation
}
trait AnnotationValue
case class Annotation(annotationAnnotations: List[Annotation] = Nil,
annotationProperty: AnnotationProperty,
annotationValue: AnnotationValue)
|
drdozer/owl2
|
core/src/main/scala/uk/co/turingatemyhamster/owl2/ast/AnnotationsModuleAst.scala
|
Scala
|
apache-2.0
| 544
|
/**
* Copyright 2015 Mohiva Organisation (license at mohiva dot com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.mohiva.play.silhouette.test
import javax.inject.Inject
import com.mohiva.play.silhouette.api.{ Environment, LoginInfo, Silhouette }
import com.mohiva.play.silhouette.impl.authenticators._
import org.specs2.matcher.JsonMatchers
import play.api.libs.json.Json
import play.api.test.{ FakeRequest, PlaySpecification, WithApplication }
/**
* Test case for the [[com.mohiva.play.silhouette.test]] helpers.
*/
class FakesSpec extends PlaySpecification with JsonMatchers {
"The `retrieve` method of the `FakeIdentityService`" should {
"return the identity for the given login info" in {
val loginInfo = LoginInfo("test", "test")
val identity = FakeIdentity(loginInfo)
val service = new FakeIdentityService[FakeIdentity](loginInfo -> identity)
await(service.retrieve(loginInfo)) must beSome(identity)
}
"return None if no identity could be found for the given login info" in {
val loginInfo = LoginInfo("test", "test")
val service = new FakeIdentityService[FakeIdentity]()
await(service.retrieve(loginInfo)) must beNone
}
}
"The `find` method of the `FakeAuthenticatorDAO`" should {
"return an authenticator for the given ID" in {
val loginInfo = LoginInfo("test", "test")
val authenticator = new FakeAuthenticator(loginInfo, "test")
val dao = new FakeAuthenticatorDAO[FakeAuthenticator]()
await(dao.add(authenticator))
await(dao.find("test")) must beSome(authenticator)
}
"return None if no authenticator could be found for the given ID" in {
val dao = new FakeAuthenticatorDAO[FakeAuthenticator]()
await(dao.find("test")) must beNone
}
}
"The `add` method of the `FakeAuthenticatorDAO`" should {
"add an authenticator" in {
val loginInfo = LoginInfo("test", "test")
val authenticator = new FakeAuthenticator(loginInfo)
val dao = new FakeAuthenticatorDAO[FakeAuthenticator]()
await(dao.add(authenticator)) must be equalTo authenticator
}
}
"The `update` method of the `FakeAuthenticatorDAO`" should {
"update an authenticator" in {
val loginInfo = LoginInfo("test", "test")
val authenticator = new FakeAuthenticator(loginInfo)
val dao = new FakeAuthenticatorDAO[FakeAuthenticator]()
await(dao.update(authenticator)) must be equalTo authenticator
}
}
"The `remove` method of the `FakeAuthenticatorDAO`" should {
"remove an authenticator" in {
val loginInfo = LoginInfo("test", "test")
val authenticator = new FakeAuthenticator(loginInfo, "test")
val dao = new FakeAuthenticatorDAO[FakeAuthenticator]()
await(dao.add(authenticator))
await(dao.find("test")) must beSome(authenticator)
await(dao.remove("test"))
await(dao.find("test")) must beNone
}
}
"The `FakeAuthenticatorService` factory" should {
"return a `SessionAuthenticatorService`" in {
FakeAuthenticatorService[SessionAuthenticator]() must beAnInstanceOf[SessionAuthenticatorService]
}
"return a `CookieAuthenticatorService`" in new WithApplication {
FakeAuthenticatorService[CookieAuthenticator]() must beAnInstanceOf[CookieAuthenticatorService]
}
"return a `BearerTokenAuthenticatorService`" in {
FakeAuthenticatorService[BearerTokenAuthenticator]() must beAnInstanceOf[BearerTokenAuthenticatorService]
}
"return a `JWTAuthenticatorService`" in {
FakeAuthenticatorService[JWTAuthenticator]() must beAnInstanceOf[JWTAuthenticatorService]
}
"return a `DummyAuthenticatorService`" in {
FakeAuthenticatorService[DummyAuthenticator]() must beAnInstanceOf[DummyAuthenticatorService]
}
}
"The `FakeAuthenticator` factory" should {
"return a `SessionAuthenticator`" in new WithApplication {
val loginInfo = LoginInfo("test", "test")
val identity = FakeIdentity(loginInfo)
implicit val env = FakeEnvironment[FakeIdentity, SessionAuthenticator](Seq(loginInfo -> identity))
implicit val request = FakeRequest()
FakeAuthenticator[SessionAuthenticator](loginInfo) must beAnInstanceOf[SessionAuthenticator]
}
"return a `CookieAuthenticator`" in new WithApplication {
val loginInfo = LoginInfo("test", "test")
val identity = FakeIdentity(loginInfo)
implicit val env = FakeEnvironment[FakeIdentity, CookieAuthenticator](Seq(loginInfo -> identity))
implicit val request = FakeRequest()
FakeAuthenticator[CookieAuthenticator](loginInfo) must beAnInstanceOf[CookieAuthenticator]
}
"return a `BearerTokenAuthenticator`" in new WithApplication {
val loginInfo = LoginInfo("test", "test")
val identity = FakeIdentity(loginInfo)
implicit val env = FakeEnvironment[FakeIdentity, BearerTokenAuthenticator](Seq(loginInfo -> identity))
implicit val request = FakeRequest()
FakeAuthenticator[BearerTokenAuthenticator](loginInfo) must beAnInstanceOf[BearerTokenAuthenticator]
}
"return a `JWTAuthenticator`" in new WithApplication {
val loginInfo = LoginInfo("test", "test")
val identity = FakeIdentity(loginInfo)
implicit val env = FakeEnvironment[FakeIdentity, JWTAuthenticator](Seq(loginInfo -> identity))
implicit val request = FakeRequest()
FakeAuthenticator[JWTAuthenticator](loginInfo) must beAnInstanceOf[JWTAuthenticator]
}
"return a `DummyAuthenticator`" in new WithApplication {
val loginInfo = LoginInfo("test", "test")
val identity = FakeIdentity(loginInfo)
implicit val env = FakeEnvironment[FakeIdentity, DummyAuthenticator](Seq(loginInfo -> identity))
implicit val request = FakeRequest()
FakeAuthenticator[DummyAuthenticator](loginInfo) must beAnInstanceOf[DummyAuthenticator]
}
}
"The `securedAction` method of the `SecuredController`" should {
"return a 401 status code if no authenticator was found" in new WithApplication {
val loginInfo = LoginInfo("test", "test")
val identity = FakeIdentity(loginInfo)
val env = FakeEnvironment[FakeIdentity, CookieAuthenticator](Seq(loginInfo -> identity))
val request = FakeRequest()
val controller = new SecuredController(env)
val result = controller.securedAction(request)
status(result) must equalTo(UNAUTHORIZED)
}
"return a 401 status code if authenticator but no identity was found" in new WithApplication {
val loginInfo = LoginInfo("test", "test")
val identity = FakeIdentity(loginInfo)
implicit val env = FakeEnvironment[FakeIdentity, CookieAuthenticator](Seq(loginInfo -> identity))
val request = FakeRequest().withAuthenticator[CookieAuthenticator](LoginInfo("invalid", "invalid"))
val controller = new SecuredController(env)
val result = controller.securedAction(request)
status(result) must equalTo(UNAUTHORIZED)
}
"return a 200 status code if authenticator and identity was found" in new WithApplication {
val loginInfo = LoginInfo("test", "test")
val identity = FakeIdentity(loginInfo)
implicit val env = FakeEnvironment[FakeIdentity, CookieAuthenticator](Seq(loginInfo -> identity))
val request = FakeRequest().withAuthenticator[CookieAuthenticator](loginInfo)
val controller = new SecuredController(env)
val result = controller.securedAction(request)
status(result) must equalTo(OK)
contentAsString(result) must */("providerID" -> "test") and */("providerKey" -> "test")
}
}
"The `userAwareAction` method of the `SecuredController`" should {
"return a 401 status code if no authenticator was found" in new WithApplication {
val loginInfo = LoginInfo("test", "test")
val identity = FakeIdentity(loginInfo)
val env = FakeEnvironment[FakeIdentity, CookieAuthenticator](Seq(loginInfo -> identity))
val request = FakeRequest()
val controller = new SecuredController(env)
val result = controller.userAwareAction(request)
status(result) must equalTo(UNAUTHORIZED)
}
"return a 401 status code if authenticator but no identity was found" in new WithApplication {
val loginInfo = LoginInfo("test", "test")
val identity = FakeIdentity(loginInfo)
implicit val env = FakeEnvironment[FakeIdentity, CookieAuthenticator](Seq(loginInfo -> identity))
val request = FakeRequest().withAuthenticator[CookieAuthenticator](LoginInfo("invalid", "invalid"))
val controller = new SecuredController(env)
val result = controller.userAwareAction(request)
status(result) must equalTo(UNAUTHORIZED)
}
"return a 200 status code if authenticator and identity was found" in new WithApplication {
val loginInfo = LoginInfo("test", "test")
val identity = FakeIdentity(loginInfo)
implicit val env = FakeEnvironment[FakeIdentity, CookieAuthenticator](Seq(loginInfo -> identity))
val request = FakeRequest().withAuthenticator(loginInfo)
val controller = new SecuredController(env)
val result = controller.userAwareAction(request)
status(result) must equalTo(OK)
contentAsString(result) must */("providerID" -> "test") and */("providerKey" -> "test")
}
}
/**
* A secured controller implementation.
*
* @param env The Silhouette environment.
*/
class SecuredController @Inject() (
val env: Environment[FakeIdentity, CookieAuthenticator])
extends Silhouette[FakeIdentity, CookieAuthenticator] {
/**
* A secured action.
*
* @return The result to send to the client.
*/
def securedAction = SecuredAction { implicit request =>
Ok(Json.toJson(request.identity.loginInfo))
}
/**
* A user aware action.
*
* @return The result to send to the client.
*/
def userAwareAction = UserAwareAction { implicit request =>
request.identity match {
case Some(identity) => Ok(Json.toJson(identity.loginInfo))
case None => Unauthorized
}
}
}
}
|
rfranco/play-silhouette
|
silhouette-testkit/test/com/mohiva/play/silhouette/test/FakesSpec.scala
|
Scala
|
apache-2.0
| 10,644
|
package scala.lms
package epfl
package test14
import common._
import test1._
import util.OverloadHack
import java.io.{PrintWriter,StringWriter,FileOutputStream}
class TestCGen extends FileDiffSuite {
val prefix = home + "test-out/epfl/test14-"
trait DSL extends ScalaOpsPkg with TupledFunctions with UncheckedOps with LiftPrimitives with LiftString with LiftVariables {
// keep track of top level functions
case class TopLevel[A,B](name: String, mA: Typ[A], mB:Typ[B], f: Rep[A] => Rep[B])
val rec = new scala.collection.mutable.HashMap[String,TopLevel[_,_]]
def toplevel[A:Typ,B:Typ](name: String)(f: Rep[A] => Rep[B]): Rep[A] => Rep[B] = {
val g = (x: Rep[A]) => unchecked[B](name,"(",x,")")
rec.getOrElseUpdate(name, TopLevel(name, typ[A], typ[B], f))
g
}
}
trait Impl extends DSL with ScalaOpsPkgExp with TupledFunctionsRecursiveExp with UncheckedOpsExp { self =>
val codegen = new CCodeGenPkg with CGenVariables with CGenTupledFunctions with CGenUncheckedOps { val IR: self.type = self }
def emitAll(): Unit = {
assert(codegen ne null) //careful about initialization order
rec.foreach { case (k,x) =>
val stream = new PrintWriter(System.out)
stream.println("/* FILE: " + x.name + ".c */")
for ((_,v) <- rec) codegen.emitForwardDef(mtype(v.mA)::Nil, v.name, stream)(mtype(v.mB))
codegen.emitSource(x.f, x.name, stream)(mtype(x.mA), mtype(x.mB))
}
}
emitAll()
}
def testCGen1 = {
withOutFile(prefix+"cgen1") {
trait Prog extends DSL {
toplevel("main") { x: Rep[Int] =>
var i = 0
while (i < 10) {
printf("Hello, world! %d\\n", i)
i = i + 1
}
0
}
}
new Prog with Impl
}
assertFileEqualsCheck(prefix+"cgen1")
}
// the generated code will contain nested functions; it needs to be
// compiled with gcc -fnested-functions
def testCGen2 = {
withOutFile(prefix+"cgen2") {
trait Prog extends DSL {
toplevel("main") { x: Rep[Int] =>
def fac: Rep[((Int,Int))=>Int] = fun { (n, dummy) =>
if (n == 0) 1 else n * fac(n - 1, dummy)
}
printf("Hello, world! %d\\n", fac(4,0))
0
}
}
new Prog with Impl
}
assertFileEqualsCheck(prefix+"cgen2")
}
def testCGen3 = {
withOutFile(prefix+"cgen3") {
trait Prog extends DSL {
val main = toplevel("main") { x: Rep[Int] =>
printf("Hello, world: main\\n")
test1(x)
}
val test1 = toplevel("test1") { x: Rep[Int] =>
printf("Hello, world: test1\\n")
test2(x)
}
val test2 = toplevel("test2") { x: Rep[Int] =>
printf("Hello, world: test2\\n")
x
}
}
new Prog with Impl
}
assertFileEqualsCheck(prefix+"cgen3")
}
}
|
cedricbastin/copy-virtualization-lms-core
|
test-src/epfl/test14-scratch/TestCGen.scala
|
Scala
|
bsd-3-clause
| 2,932
|
/**
* Copyright 2009 Jorge Ortiz
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
**/
package org.scala_tools.javautils.s2j
import java.util.{Iterator => JIterator}
trait SIteratorWrapper[T] extends JIterator[T] with SWrapper {
type Wrapped <: Iterator[T]
def hasNext: Boolean =
underlying.hasNext
def next(): T =
underlying.next
def remove() =
throw new UnsupportedOperationException
}
|
jorgeortiz85/scala-javautils
|
src/main/scala/org/scala_tools/javautils/s2j/SIteratorWrapper.scala
|
Scala
|
apache-2.0
| 922
|
{{#hasEnums}}
// ----- enums
{{#enums}}
{{>enum}}
{{/enums}}
{{/hasEnums}}
|
elipoz/scrooge
|
scrooge-generator/src/main/resources/scalagen/enums.scala
|
Scala
|
apache-2.0
| 76
|
package anomalydetection.spec
import anomalydetection.util.UtilFunctions._
/**
* Created by Seif-Eddine Benkabou on 28/08/2017.
*/
trait L2GAD{
def l2gad(DI:Array[Array[Double]],beta:Double):(Array[Array[Double]],Array[Array[Double]])= {
val DI_adjusted= for (d<-DI)
yield
{ d map (_+Double.MinPositiveValue) }
val clusters=(0 until DI_adjusted(0).length).toArray
val weights=(0 until DI_adjusted.length).foldLeft(Array[Array[Double]]())((acc, current) =>
acc :+ {
(0 until DI_adjusted(0).length).foldLeft(Array[Double]())((weight, cluster_id) =>
weight :+ {
val z = getSetOfIndex(clusters ,cluster_id).foldLeft(0.0) {
case (accumulator, now) =>
accumulator +
Math.pow( DI_adjusted(current)(cluster_id),1/(beta-1)) / Math.pow(DI_adjusted(now)(cluster_id),1/(beta-1)) }
Math.pow(z, -1)
}
)
}
)
(weights,DI_adjusted)
}
}
|
B-Seif/anomaly-detection-time-series
|
anomalydetection/spec/L2GAD.scala
|
Scala
|
gpl-3.0
| 1,003
|
package example
object Lists {
/**
* This method computes the sum of all elements in the list xs. There are
* multiple techniques that can be used for implementing this method, and
* you will learn during the class.
*
* For this example assignment you can use the following methods in class
* `List`:
*
* - `xs.isEmpty: Boolean` returns `true` if the list `xs` is empty
* - `xs.head: Int` returns the head element of the list `xs`. If the list
* is empty an exception is thrown
* - `xs.tail: List[Int]` returns the tail of the list `xs`, i.e. the the
* list `xs` without its `head` element
*
* ''Hint:'' instead of writing a `for` or `while` loop, think of a recursive
* solution.
*
* @param xs A list of natural numbers
* @return The sum of all elements in `xs`
*/
def sum(xs: List[Int]): Int = {
def loop(xs: List[Int], acc: Int): Int = xs match {
case head :: tail => loop(tail, acc + head)
case _ => acc
}
loop(xs, 0)
}
/**
* This method returns the largest element in a list of integers. If the
* list `xs` is empty it throws a `java.util.NoSuchElementException`.
*
* You can use the same methods of the class `List` as mentioned above.
*
* ''Hint:'' Again, think of a recursive solution instead of using looping
* constructs. You might need to define an auxiliary method.
*
* @param xs A list of natural numbers
* @return The largest element in `xs`
* @throws java.util.NoSuchElementException if `xs` is an empty list
*/
def max(xs: List[Int]): Int = {
def loop(xs: List[Int], max: Int): Int = xs match {
case head :: tail => loop(tail, Math.max(max, head))
case _ => max
}
xs match { case x :: tail => loop(tail, x)
case _ => throw new java.util.NoSuchElementException()
}
}
}
|
huajianmao/learning
|
coursera/parprog1/week1/example/src/main/scala/example/Lists.scala
|
Scala
|
mit
| 1,898
|
object Test {
@annotation.tailrec
def bar : Nothing = {
try {
throw new RuntimeException
} catch {
case _: Throwable => bar
} finally {
bar
}
}
@annotation.tailrec
def baz : Nothing = {
try {
throw new RuntimeException
} catch {
case _: Throwable => baz
} finally {
???
}
}
@annotation.tailrec
def boz : Nothing = {
try {
throw new RuntimeException
} catch {
case _: Throwable => boz; ???
}
}
@annotation.tailrec
def bez : Nothing = {
try {
bez
} finally {
???
}
}
// the `liftedTree` local method will prevent a tail call here.
@annotation.tailrec
def bar(i : Int) : Int = {
if (i == 0) 0
else 1 + (try {
throw new RuntimeException
} catch {
case _: Throwable => bar(i - 1)
})
}
}
|
scala/scala
|
test/files/neg/t1672b.scala
|
Scala
|
apache-2.0
| 862
|
package coursier.benchmark
import java.io.ByteArrayInputStream
import java.nio.charset.StandardCharsets
import java.util.concurrent.TimeUnit
import coursier.maven.MavenRepository
import coursier.moduleString
import org.apache.maven.model.io.xpp3.MavenXpp3Reader
import org.openjdk.jmh.annotations._
import scala.concurrent.Await
import scala.concurrent.duration.Duration
@BenchmarkMode(Array(Mode.AverageTime))
@OutputTimeUnit(TimeUnit.MILLISECONDS)
class ParseTests {
@Benchmark
def parseSparkParent(state: TestState): Unit = {
val t = state.repositories.head.find(
mod"org.apache.spark:spark-parent_2.12",
"2.4.0",
state.fetcher
).run
val e = Await.result(t.future()(state.ec), Duration.Inf)
assert(e.isRight)
}
@Benchmark
def parseSparkParentXmlDom(state: TestState): Unit = {
val content = state.inMemoryCache.fromCache(
"https://repo1.maven.org/maven2/org/apache/spark/spark-parent_2.12/2.4.0/spark-parent_2.12-2.4.0.pom"
)
val res = MavenRepository.parseRawPomDom(content)
assert(res.isRight)
}
@Benchmark
def parseSparkParentXmlSax(state: TestState): Unit = {
val content = state.inMemoryCache.fromCache(
"https://repo1.maven.org/maven2/org/apache/spark/spark-parent_2.12/2.4.0/spark-parent_2.12-2.4.0.pom"
)
val res = MavenRepository.parseRawPomSax(content)
assert(res.isRight)
}
@Benchmark
def parseApacheParent(state: TestState): Unit = {
val t = state.repositories.head.find(
mod"org.apache:apache",
"18",
state.fetcher
).run
val e = Await.result(t.future()(state.ec), Duration.Inf)
assert(e.isRight)
}
@Benchmark
def parseSparkParentMavenModel(state: TestState): Unit = {
val b = state
.inMemoryCache
.fromCache(
"https://repo1.maven.org/maven2/org/apache/spark/spark-parent_2.12/2.4.0/spark-parent_2.12-2.4.0.pom"
)
.getBytes(StandardCharsets.UTF_8)
val reader = new MavenXpp3Reader
val model = reader.read(new ByteArrayInputStream(b))
}
}
|
alexarchambault/coursier
|
modules/benchmark/src/main/scala/coursier/benchmark/ParseTests.scala
|
Scala
|
apache-2.0
| 2,050
|
package actors
import akka.actor.{Actor, ActorLogging, ActorSystem}
import io.apibuilder.api.v0.models.{Diff, DiffBreaking, DiffNonBreaking, DiffUndefinedType, Publication, Version}
import io.apibuilder.internal.v0.models.{Task, TaskDataDiffVersion, TaskDataIndexApplication, TaskDataUndefinedType}
import db.{ApplicationsDao, Authorization, ChangesDao, OrganizationsDao, TasksDao, UsersDao, VersionsDao, WatchesDao}
import lib.{AppConfig, ServiceDiff, Text}
import java.util.UUID
import org.joda.time.DateTime
import scala.concurrent.duration._
import scala.util.{Failure, Success, Try}
object TaskActor {
object Messages {
case class Created(guid: UUID)
case object RestartDroppedTasks
case object PurgeOldTasks
case object NotifyFailed
}
}
@javax.inject.Singleton
class TaskActor @javax.inject.Inject() (
system: ActorSystem,
appConfig: AppConfig,
applicationsDao: ApplicationsDao,
changesDao: ChangesDao,
emails: Emails,
organizationsDao: OrganizationsDao,
search: Search,
tasksDao: TasksDao,
usersDao: UsersDao,
versionsDao: VersionsDao,
watchesDao: WatchesDao
) extends Actor with ActorLogging with ErrorHandler {
private[this] implicit val ec = system.dispatchers.lookup("task-actor-context")
private[this] val NumberDaysBeforePurge = 30
private[this] case class Process(guid: UUID)
system.scheduler.scheduleAtFixedRate(1.hour, 1.hour, self, TaskActor.Messages.RestartDroppedTasks)
system.scheduler.scheduleAtFixedRate(1.day, 1.day, self, TaskActor.Messages.NotifyFailed)
system.scheduler.scheduleAtFixedRate(1.day, 1.day, self, TaskActor.Messages.PurgeOldTasks)
def receive = {
case m @ TaskActor.Messages.Created(guid) => withVerboseErrorHandler(m) {
self ! Process(guid)
}
case m @ Process(guid) => withVerboseErrorHandler(m) {
tasksDao.findByGuid(guid).foreach { task =>
tasksDao.incrementNumberAttempts(usersDao.AdminUser, task)
task.data match {
case TaskDataDiffVersion(oldVersionGuid, newVersionGuid) => {
processTask(task, Try(diffVersion(oldVersionGuid, newVersionGuid)))
}
case TaskDataIndexApplication(applicationGuid) => {
processTask(task, Try(search.indexApplication(applicationGuid)))
}
case TaskDataUndefinedType(desc) => {
tasksDao.recordError(usersDao.AdminUser, task, "Task actor got an undefined data type: " + desc)
}
}
}
}
case m @ TaskActor.Messages.RestartDroppedTasks => withVerboseErrorHandler(m) {
tasksDao.findAll(
nOrFewerAttempts = Some(2),
createdOnOrBefore = Some(DateTime.now.minusMinutes(1))
).foreach { task =>
self ! Process(task.guid)
}
}
case m @ TaskActor.Messages.NotifyFailed => withVerboseErrorHandler(m) {
val errors = tasksDao.findAll(
nOrMoreAttempts = Some(2),
isDeleted = Some(false),
createdOnOrAfter = Some(DateTime.now.minusDays(3))
).map { task =>
val errorType = task.data match {
case TaskDataDiffVersion(a, b) => s"TaskDataDiffVersion($a, $b)"
case TaskDataIndexApplication(guid) => s"TaskDataIndexApplication($guid)"
case TaskDataUndefinedType(desc) => s"TaskDataUndefinedType($desc)"
}
val errorMsg = Text.truncate(task.lastError.getOrElse("No information on error"), 500)
s"$errorType task ${task.guid}: $errorMsg"
}
emails.sendErrors(
subject = "One or more tasks failed",
errors = errors
)
}
case m @ TaskActor.Messages.PurgeOldTasks => withVerboseErrorHandler(m) {
tasksDao.findAll(
isDeleted = Some(true),
deletedAtLeastNDaysAgo = Some(NumberDaysBeforePurge)
).foreach { task =>
tasksDao.purge(usersDao.AdminUser, task)
}
}
case m: Any => logUnhandledMessage(m)
}
private[this] def diffVersion(oldVersionGuid: UUID, newVersionGuid: UUID): Unit = {
versionsDao.findByGuid(Authorization.All, oldVersionGuid, isDeleted = None).foreach { oldVersion =>
versionsDao.findByGuid(Authorization.All, newVersionGuid).foreach { newVersion =>
ServiceDiff(oldVersion.service, newVersion.service).differences match {
case Nil => {
// No-op
}
case diffs => {
changesDao.upsert(
createdBy = usersDao.AdminUser,
fromVersion = oldVersion,
toVersion = newVersion,
differences = diffs
)
versionUpdated(newVersion, diffs)
}
}
}
}
}
private[this] def versionUpdated(
version: Version,
diffs: Seq[Diff]
): Unit = {
// Only send email if something has actually changed
if (diffs.nonEmpty) {
val breakingDiffs = diffs.flatMap { d =>
d match {
case d: DiffBreaking => Some(d.description)
case _: DiffNonBreaking => None
case d: DiffUndefinedType => Some(d.description)
}
}
val nonBreakingDiffs = diffs.flatMap { d =>
d match {
case _: DiffBreaking => None
case d: DiffNonBreaking => Some(d.description)
case _: DiffUndefinedType => None
}
}
applicationsDao.findAll(Authorization.All, version = Some(version), limit = 1).foreach { application =>
organizationsDao.findAll(Authorization.All, application = Some(application), limit = 1).foreach { org =>
emails.deliver(
context = Emails.Context.Application(application),
org = org,
publication = Publication.VersionsCreate,
subject = s"${org.name}/${application.name}:${version.version} Uploaded",
body = views.html.emails.versionCreated(
appConfig,
org,
application,
version,
breakingDiffs = breakingDiffs,
nonBreakingDiffs = nonBreakingDiffs
).toString
) { subscription =>
watchesDao.findAll(
Authorization.All,
application = Some(application),
userGuid = Some(subscription.user.guid),
limit = 1
).nonEmpty
}
}
}
}
}
def processTask[T](task: Task, attempt: Try[T]): Unit = {
attempt match {
case Success(_) => {
tasksDao.softDelete(usersDao.AdminUser, task)
}
case Failure(ex) => {
tasksDao.recordError(usersDao.AdminUser, task, ex)
}
}
}
}
|
gheine/apidoc
|
api/app/actors/TaskActor.scala
|
Scala
|
mit
| 6,588
|
/**
* (c) Copyright 2013 WibiData, Inc.
*
* See the NOTICE file distributed with this work for additional
* information regarding copyright ownership.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kiji.schema.shell
import org.kiji.annotations.ApiAudience
import org.kiji.annotations.ApiStability
import org.kiji.annotations.Inheritance
@ApiAudience.Public
@ApiStability.Evolving
@Inheritance.Sealed
class DDLException(val msg:String) extends RuntimeException(msg)
|
kijiproject/kiji-schema-shell
|
src/main/scala/org/kiji/schema/shell/DDLException.scala
|
Scala
|
apache-2.0
| 999
|
package org.jetbrains.plugins.scala
package lang.surroundWith.surrounders.scaladoc
/**
* User: Dmitry Naydanov
* Date: 3/8/12
*/
class ScalaDocWithSubscriptSurrounder extends ScalaDocWithSyntaxSurrounder {
def getSyntaxTag: String = ",,"
def getTemplateDescription: String = "Subscript: ,, ,,"
}
|
ilinum/intellij-scala
|
src/org/jetbrains/plugins/scala/lang/surroundWith/surrounders/scaladoc/ScalaDocWithSubscriptSurrounder.scala
|
Scala
|
apache-2.0
| 306
|
package com.codesimples.simple.restapi.model
case class CalculateDeductionRequestObject(jsonObject:Map[String,Any]) {
def gender(): String = {
requestBody().getOrElse("gender", "").asInstanceOf[String]
}
def height(): Int = {
requestBody().getOrElse("height", 0).asInstanceOf[Int]
}
def weight(): Int = {
requestBody().getOrElse("weight", 0).asInstanceOf[Int]
}
def age(): Int = {
requestBody().getOrElse("age", Int).asInstanceOf[Int]
}
def shapeChest(): Int = {
requestBody().getOrElse("shapeChest", Int).asInstanceOf[Int]
}
def shapeWaist(): Int = {
requestBody().getOrElse("shapeWaist", Int).asInstanceOf[Int]
}
def shapeHip(): Int = {
requestBody().getOrElse("shapeHip", Int).asInstanceOf[Int]
}
def requestBody(): Map[String, Any] = {
jsonObject.getOrElse("request_body", Map[String,Any]()).asInstanceOf[Map[String,Any]]
}
def validate(): Boolean = {
this.gender().toLowerCase match {
case "m" => {
((40 to 199).contains(this.weight()) &&
(13 to 99).contains(this.age()) &&
(152 to 204).contains(this.height()))
} case "f" => {
((45 to 120).contains(this.weight()) &&
(14 to 99).contains(this.age()) &&
(145 to 190).contains(this.height()))
} case _ => throw new IllegalStateException("invalid gender!")
}
}
def invalidJSON(): String = {
"{\"response_header\":{" +
"\"timestamp\":"+System.currentTimeMillis()+"," +
"\"code\":500," +
"\"message\":\"Internal Server Error\"," +
"\"code\":1," +
"\"message\":\"Invalid Parameters\"" +
"}}"
}
}
|
agnaldo4j/estudos_arquitetura_limpa_scala
|
planner-restapi/src/main/scala/com/codesimples/simple/restapi/model/CalculateDeductionRequestObject.scala
|
Scala
|
mit
| 1,649
|
/*
* Copyright (c) 2014-2018 by The Monix Project Developers.
* See the project homepage at: https://monix.io
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package monix.reactive.internal.operators
import monix.eval.Task
import monix.execution.Ack.Continue
import monix.execution.FutureUtils.extensions._
import monix.execution.{Ack, Scheduler}
import monix.reactive.Observable.{empty, now}
import monix.execution.exceptions.DummyException
import monix.reactive.subjects.PublishSubject
import monix.reactive.{Observable, Observer}
import scala.concurrent.Future
import scala.concurrent.duration._
import scala.util.{Failure, Random, Try}
object ConcatOneSuite extends BaseOperatorSuite {
def createObservable(sourceCount: Int) = Some {
val o = Observable.range(0, sourceCount)
.flatMap(i => Observable.now(i))
Sample(o, count(sourceCount), sum(sourceCount), waitFirst, waitNext)
}
def count(sourceCount: Int) =
sourceCount
def waitFirst = Duration.Zero
def waitNext = Duration.Zero
def observableInError(sourceCount: Int, ex: Throwable) =
if (sourceCount == 1) None else Some {
val o = createObservableEndingInError(Observable.range(0, sourceCount), ex)
.flatMap(i => Observable.now(i))
Sample(o, count(sourceCount), sum(sourceCount), waitFirst, waitNext)
}
def sum(sourceCount: Int) = {
sourceCount * (sourceCount - 1) / 2
}
def brokenUserCodeObservable(sourceCount: Int, ex: Throwable) = Some {
val o = Observable.range(0, sourceCount).flatMap { i =>
if (i == sourceCount-1)
throw ex
else
Observable.now(i)
}
Sample(o, count(sourceCount-1), sum(sourceCount-1), waitFirst, waitNext)
}
def toList[A](o: Observable[A])(implicit s: Scheduler) = {
o.foldLeft(Vector.empty[A])(_ :+ _).runAsyncGetLast
.map(_.getOrElse(Vector.empty))
}
override def cancelableObservables(): Seq[Sample] = {
val sample1 = Observable.range(1, 100)
.flatMap(x => Observable.now(x).delayExecution(1.second))
val sample2 = Observable.range(0, 100).delayOnNext(1.second)
.flatMap(x => Observable.now(x).delayExecution(1.second))
Seq(
Sample(sample1, 0, 0, 0.seconds, 0.seconds),
Sample(sample1, 1, 1, 1.seconds, 0.seconds),
Sample(sample2, 0, 0, 0.seconds, 0.seconds)
)
}
test("should work synchronously for synchronous observers") { implicit s =>
val sourceCount = Random.nextInt(300) + 100
var received = 0
var total = 0L
createObservable(sourceCount) match {
case Some(Sample(obs, count, sum, _, _)) =>
obs.unsafeSubscribeFn(new Observer[Long] {
private[this] var sum = 0L
def onNext(elem: Long): Ack = {
received += 1
sum += elem
Continue
}
def onError(ex: Throwable): Unit = throw new IllegalStateException()
def onComplete(): Unit = total = sum
})
assertEquals(received, count)
assertEquals(total, sum)
}
}
test("filter can be expressed in terms of flatMap") { implicit s =>
val obs1 = Observable.range(0, 100).filter(_ % 2 == 0)
val obs2 = Observable.range(0, 100).flatMap(x => if (x % 2 == 0) now(x) else empty)
val lst1 = toList(obs1)
val lst2 = toList(obs2)
s.tick()
assert(lst1.isCompleted && lst2.isCompleted)
assertEquals(lst1.value.get, lst2.value.get)
}
test("filterEval can be expressed in terms of flatMap") { implicit s =>
val obs1 = Observable.range(0, 100).filterEval(i => Task.pure(i % 2 == 0))
val obs2 = Observable.range(0, 100).flatMap(x => if (x % 2 == 0) now(x) else empty)
val lst1 = toList(obs1)
val lst2 = toList(obs2)
s.tick()
assert(lst1.isCompleted && lst2.isCompleted)
assertEquals(lst1.value.get, lst2.value.get)
}
test("filterEvalF can be expressed in terms of flatMap") { implicit s =>
val obs1 = Observable.range(0, 100).filterEvalF[Try](i => Try(i % 2 == 0))
val obs2 = Observable.range(0, 100).flatMap(x => if (x % 2 == 0) now(x) else empty)
val lst1 = toList(obs1)
val lst2 = toList(obs2)
s.tick()
assert(lst1.isCompleted && lst2.isCompleted)
assertEquals(lst1.value.get, lst2.value.get)
}
test("map can be expressed in terms of flatMap") { implicit s =>
val obs1 = Observable.range(0, 100).map(_ + 10)
val obs2 = Observable.range(0, 100).flatMap(x => now(x + 10))
val lst1 = toList(obs1)
val lst2 = toList(obs2)
s.tick()
assert(lst1.isCompleted && lst2.isCompleted)
assertEquals(lst1.value.get, lst2.value.get)
}
test("should wait the completion of the current, before subscribing to the next") { implicit s =>
var obs2WasStarted = false
var received = 0L
var wasCompleted = false
val obs1 = PublishSubject[Long]()
val obs2 = Observable.range(1, 100).map { x => obs2WasStarted = true; x }
Observable.fromIterable(Seq(obs1, obs2)).flatten.unsafeSubscribeFn(new Observer[Long] {
def onNext(elem: Long) = {
received += elem
if (elem == 1000)
Future.delayedResult(1.second)(Continue)
else
Continue
}
def onError(ex: Throwable) = ()
def onComplete() = wasCompleted = true
})
s.tickOne()
assertEquals(received, 0)
obs1.onNext(10)
assertEquals(received, 10)
val f = obs1.onNext(1000)
assertEquals(received, 1010)
f.onComplete(_ => obs1.onComplete())
s.tick()
assert(!obs2WasStarted)
s.tick(1.second)
assert(obs2WasStarted)
assertEquals(received, 1010 + 99 * 50)
assert(wasCompleted)
}
test("should interrupt the streaming on error") { implicit s =>
var obs1WasStarted = false
var obs2WasStarted = false
var wasThrown: Throwable = null
val sub = PublishSubject[Long]()
val obs1 = sub.doOnStart(_ => Task { obs1WasStarted = true })
val obs2 = Observable.range(1, 100).map { x => obs2WasStarted = true; x }
Observable.fromIterable(Seq(obs1, obs2)).flatten.unsafeSubscribeFn(new Observer[Long] {
def onNext(elem: Long) = Continue
def onError(ex: Throwable) = wasThrown = ex
def onComplete() = ()
})
s.tick()
sub.onNext(1)
assert(obs1WasStarted)
sub.onError(DummyException("dummy"))
s.tick()
assertEquals(wasThrown, DummyException("dummy"))
assert(!obs2WasStarted)
}
test("should not break the contract on user-level error #2") { implicit s =>
val dummy1 = DummyException("dummy1")
val dummy2 = DummyException("dummy2")
val source = Observable.now(1L).endWithError(dummy1)
val obs: Observable[Long] = source.flatMap { _ => Observable.raiseError(dummy2) }
var thrownError: Throwable = null
var received = 0
var onCompleteReceived = false
var onErrorReceived = 0
obs.unsafeSubscribeFn(new Observer[Long] {
def onNext(elem: Long): Ack = {
received += 1
Continue
}
def onComplete(): Unit =
onCompleteReceived = true
def onError(ex: Throwable): Unit = {
onErrorReceived += 1
thrownError = ex
}
})
s.tick()
assertEquals(received, 0)
assertEquals(thrownError, dummy2)
assert(!onCompleteReceived, "!onCompleteReceived")
assertEquals(onErrorReceived, 1)
}
test("should not break the contract on user-level error #3") { implicit s =>
val dummy1 = DummyException("dummy1")
val dummy2 = DummyException("dummy2")
val source = Observable.now(1L).endWithError(dummy1)
val obs: Observable[Long] = source.flatMap { _ =>
Observable.raiseError(dummy2).executeAsync
}
var thrownError: Throwable = null
var received = 0
var onCompleteReceived = false
var onErrorReceived = 0
obs.unsafeSubscribeFn(new Observer[Long] {
def onNext(elem: Long): Ack = {
received += 1
Continue
}
def onComplete(): Unit =
onCompleteReceived = true
def onError(ex: Throwable): Unit = {
onErrorReceived += 1
thrownError = ex
}
})
s.tick()
assertEquals(received, 0)
assertEquals(thrownError, dummy2)
assert(!onCompleteReceived, "!onCompleteReceived")
assertEquals(onErrorReceived, 1)
}
test("exceptions can be triggered synchronously by throw") { implicit s =>
val dummy = DummyException("dummy")
val source = Observable.now(1L).flatMap(_ => throw dummy)
val f = source.runAsyncGetLast
s.tick()
assertEquals(f.value, Some(Failure(dummy)))
assertEquals(s.state.lastReportedError, null)
}
test("exceptions can be triggered synchronously through raiseError") { implicit s =>
val dummy = DummyException("dummy")
val source = Observable.now(1L).flatMap(_ => Observable.raiseError(dummy))
val f = source.runAsyncGetLast
s.tick()
assertEquals(f.value, Some(Failure(dummy)))
assertEquals(s.state.lastReportedError, null)
}
}
|
Wogan/monix
|
monix-reactive/shared/src/test/scala/monix/reactive/internal/operators/ConcatOneSuite.scala
|
Scala
|
apache-2.0
| 9,472
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.ml.evaluation
import org.apache.spark.annotation.Since
import org.apache.spark.ml.param.{DoubleParam, Param, ParamMap, ParamValidators}
import org.apache.spark.ml.param.shared.{HasLabelCol, HasPredictionCol, HasWeightCol}
import org.apache.spark.ml.util.{DefaultParamsReadable, DefaultParamsWritable, Identifiable, SchemaUtils}
import org.apache.spark.mllib.evaluation.MulticlassMetrics
import org.apache.spark.sql.{Dataset, Row}
import org.apache.spark.sql.functions._
import org.apache.spark.sql.types.DoubleType
/**
* Evaluator for multiclass classification, which expects two input columns: prediction and label.
*/
@Since("1.5.0")
class MulticlassClassificationEvaluator @Since("1.5.0") (@Since("1.5.0") override val uid: String)
extends Evaluator with HasPredictionCol with HasLabelCol
with HasWeightCol with DefaultParamsWritable {
import MulticlassClassificationEvaluator.supportedMetricNames
@Since("1.5.0")
def this() = this(Identifiable.randomUID("mcEval"))
/**
* param for metric name in evaluation (supports `"f1"` (default), `"weightedPrecision"`,
* `"weightedRecall"`, `"accuracy"`)
* @group param
*/
@Since("1.5.0")
val metricName: Param[String] = new Param(this, "metricName",
s"metric name in evaluation ${supportedMetricNames.mkString("(", "|", ")")}",
ParamValidators.inArray(supportedMetricNames))
/** @group getParam */
@Since("1.5.0")
def getMetricName: String = $(metricName)
/** @group setParam */
@Since("1.5.0")
def setMetricName(value: String): this.type = set(metricName, value)
setDefault(metricName -> "f1")
/** @group setParam */
@Since("1.5.0")
def setPredictionCol(value: String): this.type = set(predictionCol, value)
/** @group setParam */
@Since("1.5.0")
def setLabelCol(value: String): this.type = set(labelCol, value)
/** @group setParam */
@Since("3.0.0")
def setWeightCol(value: String): this.type = set(weightCol, value)
@Since("3.0.0")
final val metricLabel: DoubleParam = new DoubleParam(this, "metricLabel",
"The class whose metric will be computed in " +
s"${supportedMetricNames.filter(_.endsWith("ByLabel")).mkString("(", "|", ")")}. " +
"Must be >= 0. The default value is 0.",
ParamValidators.gtEq(0.0))
/** @group getParam */
@Since("3.0.0")
def getMetricLabel: Double = $(metricLabel)
/** @group setParam */
@Since("3.0.0")
def setMetricLabel(value: Double): this.type = set(metricLabel, value)
setDefault(metricLabel -> 0.0)
@Since("3.0.0")
final val beta: DoubleParam = new DoubleParam(this, "beta",
"The beta value, which controls precision vs recall weighting, " +
"used in (weightedFMeasure|fMeasureByLabel). Must be > 0. The default value is 1.",
ParamValidators.gt(0.0))
/** @group getParam */
@Since("3.0.0")
def getBeta: Double = $(beta)
/** @group setParam */
@Since("3.0.0")
def setBeta(value: Double): this.type = set(beta, value)
setDefault(beta -> 1.0)
@Since("2.0.0")
override def evaluate(dataset: Dataset[_]): Double = {
val schema = dataset.schema
SchemaUtils.checkColumnType(schema, $(predictionCol), DoubleType)
SchemaUtils.checkNumericType(schema, $(labelCol))
val predictionAndLabelsWithWeights =
dataset.select(col($(predictionCol)), col($(labelCol)).cast(DoubleType),
if (!isDefined(weightCol) || $(weightCol).isEmpty) lit(1.0) else col($(weightCol)))
.rdd.map {
case Row(prediction: Double, label: Double, weight: Double) => (prediction, label, weight)
}
val metrics = new MulticlassMetrics(predictionAndLabelsWithWeights)
$(metricName) match {
case "f1" => metrics.weightedFMeasure
case "accuracy" => metrics.accuracy
case "weightedPrecision" => metrics.weightedPrecision
case "weightedRecall" => metrics.weightedRecall
case "weightedTruePositiveRate" => metrics.weightedTruePositiveRate
case "weightedFalsePositiveRate" => metrics.weightedFalsePositiveRate
case "weightedFMeasure" => metrics.weightedFMeasure($(beta))
case "truePositiveRateByLabel" => metrics.truePositiveRate($(metricLabel))
case "falsePositiveRateByLabel" => metrics.falsePositiveRate($(metricLabel))
case "precisionByLabel" => metrics.precision($(metricLabel))
case "recallByLabel" => metrics.recall($(metricLabel))
case "fMeasureByLabel" => metrics.fMeasure($(metricLabel), $(beta))
}
}
@Since("1.5.0")
override def isLargerBetter: Boolean = {
$(metricName) match {
case "weightedFalsePositiveRate" => false
case "falsePositiveRateByLabel" => false
case _ => true
}
}
@Since("1.5.0")
override def copy(extra: ParamMap): MulticlassClassificationEvaluator = defaultCopy(extra)
}
@Since("1.6.0")
object MulticlassClassificationEvaluator
extends DefaultParamsReadable[MulticlassClassificationEvaluator] {
private val supportedMetricNames = Array("f1", "accuracy", "weightedPrecision", "weightedRecall",
"weightedTruePositiveRate", "weightedFalsePositiveRate", "weightedFMeasure",
"truePositiveRateByLabel", "falsePositiveRateByLabel", "precisionByLabel", "recallByLabel",
"fMeasureByLabel")
@Since("1.6.0")
override def load(path: String): MulticlassClassificationEvaluator = super.load(path)
}
|
pgandhi999/spark
|
mllib/src/main/scala/org/apache/spark/ml/evaluation/MulticlassClassificationEvaluator.scala
|
Scala
|
apache-2.0
| 6,135
|
/**
* This code is generated using [[http://www.scala-sbt.org/contraband/ sbt-contraband]].
*/
// DO NOT EDIT MANUALLY
package sbt.protocol.testing.codec
import _root_.sjsonnew.{ Unbuilder, Builder, JsonFormat, deserializationError }
trait TestItemDetailFormats { self: sbt.internal.testing.StatusFormats with sjsonnew.BasicJsonProtocol =>
implicit lazy val TestItemDetailFormat: JsonFormat[sbt.protocol.testing.TestItemDetail] = new JsonFormat[sbt.protocol.testing.TestItemDetail] {
override def read[J](jsOpt: Option[J], unbuilder: Unbuilder[J]): sbt.protocol.testing.TestItemDetail = {
jsOpt match {
case Some(js) =>
unbuilder.beginObject(js)
val fullyQualifiedName = unbuilder.readField[String]("fullyQualifiedName")
val status = unbuilder.readField[sbt.testing.Status]("status")
val duration = unbuilder.readField[Option[Long]]("duration")
unbuilder.endObject()
sbt.protocol.testing.TestItemDetail(fullyQualifiedName, status, duration)
case None =>
deserializationError("Expected JsObject but found None")
}
}
override def write[J](obj: sbt.protocol.testing.TestItemDetail, builder: Builder[J]): Unit = {
builder.beginObject()
builder.addField("fullyQualifiedName", obj.fullyQualifiedName)
builder.addField("status", obj.status)
builder.addField("duration", obj.duration)
builder.endObject()
}
}
}
|
Duhemm/sbt
|
testing/src/main/contraband-scala/sbt/protocol/testing/codec/TestItemDetailFormats.scala
|
Scala
|
bsd-3-clause
| 1,396
|
package com.bigm.bot.engine
import akka.actor.{Actor, ActorLogging}
import com.bigm.bot.events.{IntentVote, Say, TextResponse}
import com.bigm.bot.services.ApiAiService
import com.google.inject.Inject
class ApiAiIntentActor @Inject()(service: ApiAiService)
extends Actor with ActorLogging {
import context.dispatcher
def receive = {
case ev@TextResponse(_, from, text, _) =>
log.debug("ApiAiIntentActor received TextResponse")
// avoid closing over mutable state
val currentSender = sender()
service.getIntent(text) map { response =>
if (response.getResult.getAction == "input.unknown") {
log.debug("unknown intent")
currentSender ! IntentVote(0.0, ev)
} else {
val message = response.getResult.getFulfillment.getSpeech
currentSender ! IntentVote(response.getResult.getScore, Say(from, text, message))
}
}
}
}
|
bigmlab/bigm-bot
|
src/main/scala/com/bigm/bot/engine/ApiAiIntentActor.scala
|
Scala
|
apache-2.0
| 924
|
package lila.analyse
import chess.format.pgn.{ Pgn, Tag, Turn, Move }
import chess.OpeningExplorer.Opening
import chess.{ Status, Color, Clock }
private[analyse] final class Annotator(netDomain: String) {
def apply(
p: Pgn,
analysis: Option[Analysis],
opening: Option[Opening],
winner: Option[Color],
status: Status,
clock: Option[Clock]): Pgn =
annotateStatus(winner, status) {
annotateOpening(opening) {
annotateTurns(p, analysis ?? (_.advices))
}.copy(
tags = p.tags :+ Tag("Annotator", netDomain)
)
}
import chess.{ Status => S }
private def annotateStatus(winner: Option[Color], status: Status)(p: Pgn) = (winner match {
case Some(color) =>
val loserName = (!color).toString.capitalize
status match {
case Status.Mate => s"$loserName is checkmated".some
case Status.Resign => s"$loserName resigns".some
case Status.Timeout => s"$loserName leaves the game".some
case Status.Outoftime => s"$loserName forfeits on time".some
case Status.Cheat => s"$loserName forfeits by computer assistance".some
case _ => none
}
case None => status match {
case Status.Aborted => "Game is aborted".some
case Status.Stalemate => "Stalemate".some
case Status.Draw => "Draw".some
case _ => none
}
}) match {
case Some(text) => p.updateLastPly(_.copy(result = text.some))
case None => p
}
private def annotateOpening(opening: Option[Opening])(p: Pgn) = opening.fold(p) { o =>
p.updatePly(o.size, _.copy(opening = o.name.some))
}
private def annotateTurns(p: Pgn, advices: List[Advice]): Pgn =
advices.foldLeft(p) {
case (pgn, advice) => pgn.updateTurn(advice.turn, turn =>
turn.update(advice.color, move =>
move.copy(
nag = advice.nag.code.some,
comment = advice.makeComment(true, true).some,
variation = makeVariation(turn, advice)
)
)
)
}
private def makeVariation(turn: Turn, advice: Advice): List[Turn] =
Turn.fromMoves(
advice.info.variation take 20 map { san => Move(san) },
turn plyOf advice.color
)
}
|
danilovsergey/i-bur
|
modules/analyse/src/main/Annotator.scala
|
Scala
|
mit
| 2,266
|
/***********************************************************************
* Copyright (c) 2013-2020 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.index.index
import java.time.ZonedDateTime
import org.locationtech.geomesa.curve.S2SFC
import org.locationtech.geomesa.filter.{Bounds, FilterValues}
import org.locationtech.jts.geom.Geometry
package object s3 {
case class S3IndexKey(bin: Short, s: Long, offset: Int) extends Ordered[S3IndexKey] {
override def compare(that: S3IndexKey): Int = {
val b = Ordering.Short.compare(bin, that.bin)
if (b != 0) { b } else {
Ordering.Long.compare(s, that.s)
}
}
}
case class S3IndexValues(
sfc: S2SFC,
maxTime: Int,
geometries: FilterValues[Geometry],
spatialBounds: Seq[(Double, Double, Double, Double)],
intervals: FilterValues[Bounds[ZonedDateTime]],
temporalBounds: Map[Short, Seq[(Int, Int)]],
temporalUnbounded: Seq[(Short, Short)])
}
|
aheyne/geomesa
|
geomesa-index-api/src/main/scala/org/locationtech/geomesa/index/index/s3/package.scala
|
Scala
|
apache-2.0
| 1,317
|
/***********************************************************************
* Copyright (c) 2013-2019 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.compute.spark
import com.typesafe.scalalogging.LazyLogging
import org.apache.hadoop.conf.Configuration
import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}
import org.geotools.data._
import org.locationtech.geomesa.spark.accumulo.AccumuloSpatialRDDProvider
import org.locationtech.geomesa.utils.conf.GeoMesaSystemProperties
import org.opengis.feature.simple.{SimpleFeature, SimpleFeatureType}
@deprecated
object GeoMesaSpark extends LazyLogging {
def init(conf: SparkConf, ds: DataStore): SparkConf = init(conf, ds.getTypeNames.map(ds.getSchema))
def init(conf: SparkConf, sfts: Seq[SimpleFeatureType]): SparkConf = {
import org.locationtech.geomesa.jobs.mapreduce.GeoMesaAccumuloInputFormat.SYS_PROP_SPARK_LOAD_CP
val typeOptions = GeoMesaSparkKryoRegistrator.systemProperties(sfts: _*)
typeOptions.foreach { case (k,v) => System.setProperty(k, v) }
val typeOpts = typeOptions.map { case (k,v) => s"-D$k=$v" }
val jarOpt = Option(GeoMesaSystemProperties.getProperty(SYS_PROP_SPARK_LOAD_CP)).map(v => s"-D$SYS_PROP_SPARK_LOAD_CP=$v")
val extraOpts = (typeOpts ++ jarOpt).mkString(" ")
val newOpts = if (conf.contains("spark.executor.extraJavaOptions")) {
conf.get("spark.executor.extraJavaOptions").concat(" ").concat(extraOpts)
} else {
extraOpts
}
conf.set("spark.executor.extraJavaOptions", newOpts)
// These configurations can be set in spark-defaults.conf
conf.set("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
conf.set("spark.kryo.registrator", classOf[GeoMesaSparkKryoRegistrator].getName)
}
def rdd(conf: Configuration, sc: SparkContext, params: Map[String, String], query: Query): RDD[SimpleFeature] =
new AccumuloSpatialRDDProvider().rdd(conf, sc, params, query)
/**
* Writes this RDD to a GeoMesa table.
* The type must exist in the data store, and all of the features in the RDD must be of this type.
*
* @param rdd
* @param params
* @param typeName
*/
def save(rdd: RDD[SimpleFeature], params: Map[String, String], typeName: String): Unit =
new AccumuloSpatialRDDProvider().save(rdd, params, typeName)
}
|
elahrvivaz/geomesa
|
geomesa-accumulo/geomesa-accumulo-compute/src/main/scala/org/locationtech/geomesa/compute/spark/GeoMesaSpark.scala
|
Scala
|
apache-2.0
| 2,676
|
package com.sksamuel.elastic4s.cat
import com.sksamuel.elastic4s.RefreshPolicy
import com.sksamuel.elastic4s.testkit.DockerTests
import org.scalatest.{FlatSpec, Matchers}
class CatNodesTest extends FlatSpec with Matchers with DockerTests {
client.execute {
bulk(
indexInto("catnodes1/landmarks").fields("name" -> "hampton court palace"),
indexInto("catnodes2/landmarks").fields("name" -> "hampton court palace")
).refresh(RefreshPolicy.Immediate)
}.await
"cats nodes" should "return all nodes" in {
val result = client.execute {
catNodes()
}.await.result.head
result.load_1m > 0 shouldBe true
result.cpu > 0 shouldBe true
result.heapPercent > 0 shouldBe true
result.ramPercent > 0 shouldBe true
}
}
|
Tecsisa/elastic4s
|
elastic4s-tests/src/test/scala/com/sksamuel/elastic4s/cat/CatNodesTest.scala
|
Scala
|
apache-2.0
| 761
|
// These are meant to be typed into the REPL. You can also run
// scala -Xnojline < repl-session.scala to run them all at once.
def box(s : String) { // Look carefully: no =
val border = "-" * s.length + "--\n"
println("\n" + border + "|" + s + "|\n" + border)
}
box("Fred")
box("Wilma")
|
nmt1994/Scala-Practica
|
src/week1/codes/ch02/sec10/repl-session.scala
|
Scala
|
mit
| 295
|
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kafka.utils
import kafka.api.LeaderAndIsr
import kafka.common.TopicAndPartition
import kafka.controller.{IsrChangeNotificationListener, LeaderIsrAndControllerEpoch}
import kafka.utils.ZkUtils._
import org.I0Itec.zkclient.ZkClient
import org.apache.zookeeper.data.Stat
import scala.collection._
object ReplicationUtils extends Logging {
private val IsrChangeNotificationPrefix = "isr_change_"
def updateLeaderAndIsr(zkUtils: ZkUtils, topic: String, partitionId: Int, newLeaderAndIsr: LeaderAndIsr, controllerEpoch: Int,
zkVersion: Int): (Boolean,Int) = {
debug("Updated ISR for partition [%s,%d] to %s".format(topic, partitionId, newLeaderAndIsr.isr.mkString(",")))
val path = getTopicPartitionLeaderAndIsrPath(topic, partitionId)
val newLeaderData = zkUtils.leaderAndIsrZkData(newLeaderAndIsr, controllerEpoch)
// use the epoch of the controller that made the leadership decision, instead of the current controller epoch
val updatePersistentPath: (Boolean, Int) = zkUtils.conditionalUpdatePersistentPath(path, newLeaderData, zkVersion, Some(checkLeaderAndIsrZkData))
updatePersistentPath
}
def propagateIsrChanges(zkUtils: ZkUtils, isrChangeSet: Set[TopicAndPartition]): Unit = {
val isrChangeNotificationPath: String = zkUtils.createSequentialPersistentPath(
ZkUtils.IsrChangeNotificationPath + "/" + IsrChangeNotificationPrefix,
generateIsrChangeJson(isrChangeSet))
debug("Added " + isrChangeNotificationPath + " for " + isrChangeSet)
}
def checkLeaderAndIsrZkData(zkUtils: ZkUtils, path: String, expectedLeaderAndIsrInfo: String): (Boolean,Int) = {
try {
val writtenLeaderAndIsrInfo = zkUtils.readDataMaybeNull(path)
val writtenLeaderOpt = writtenLeaderAndIsrInfo._1
val writtenStat = writtenLeaderAndIsrInfo._2
val expectedLeader = parseLeaderAndIsr(expectedLeaderAndIsrInfo, path, writtenStat)
writtenLeaderOpt match {
case Some(writtenData) =>
val writtenLeader = parseLeaderAndIsr(writtenData, path, writtenStat)
(expectedLeader,writtenLeader) match {
case (Some(expectedLeader),Some(writtenLeader)) =>
if(expectedLeader == writtenLeader)
return (true,writtenStat.getVersion())
case _ =>
}
case None =>
}
} catch {
case e1: Exception =>
}
(false,-1)
}
def getLeaderIsrAndEpochForPartition(zkUtils: ZkUtils, topic: String, partition: Int):Option[LeaderIsrAndControllerEpoch] = {
val leaderAndIsrPath = getTopicPartitionLeaderAndIsrPath(topic, partition)
val (leaderAndIsrOpt, stat) = zkUtils.readDataMaybeNull(leaderAndIsrPath)
leaderAndIsrOpt.flatMap(leaderAndIsrStr => parseLeaderAndIsr(leaderAndIsrStr, leaderAndIsrPath, stat))
}
private def parseLeaderAndIsr(leaderAndIsrStr: String, path: String, stat: Stat)
: Option[LeaderIsrAndControllerEpoch] = {
Json.parseFull(leaderAndIsrStr).flatMap {m =>
val leaderIsrAndEpochInfo = m.asInstanceOf[Map[String, Any]]
val leader = leaderIsrAndEpochInfo.get("leader").get.asInstanceOf[Int]
val epoch = leaderIsrAndEpochInfo.get("leader_epoch").get.asInstanceOf[Int]
val isr = leaderIsrAndEpochInfo.get("isr").get.asInstanceOf[List[Int]]
val controllerEpoch = leaderIsrAndEpochInfo.get("controller_epoch").get.asInstanceOf[Int]
val zkPathVersion = stat.getVersion
debug("Leader %d, Epoch %d, Isr %s, Zk path version %d for leaderAndIsrPath %s".format(leader, epoch,
isr.toString(), zkPathVersion, path))
Some(LeaderIsrAndControllerEpoch(LeaderAndIsr(leader, epoch, isr, zkPathVersion), controllerEpoch))}
}
private def generateIsrChangeJson(isrChanges: Set[TopicAndPartition]): String = {
val partitions = isrChanges.map(tp => Map("topic" -> tp.topic, "partition" -> tp.partition)).toArray
Json.encode(Map("version" -> IsrChangeNotificationListener.version, "partitions" -> partitions))
}
}
|
Zhiqiang-He/kafka-0914-edit
|
core/src/main/scala/kafka/utils/ReplicationUtils.scala
|
Scala
|
apache-2.0
| 4,780
|
package com.github.shadowsocks
import android.content.Intent
import android.os.Bundle
import android.support.design.widget.Snackbar
import android.support.v7.app.{AlertDialog, AppCompatActivity}
import android.support.v7.widget.RecyclerView.ViewHolder
import android.support.v7.widget.Toolbar.OnMenuItemClickListener
import android.support.v7.widget.helper.ItemTouchHelper
import android.support.v7.widget.helper.ItemTouchHelper.SimpleCallback
import android.support.v7.widget.{DefaultItemAnimator, LinearLayoutManager, RecyclerView, Toolbar}
import android.text.style.TextAppearanceSpan
import android.text.{SpannableStringBuilder, Spanned}
import android.view.View.{OnAttachStateChangeListener, OnClickListener}
import android.view.{LayoutInflater, MenuItem, View, ViewGroup}
import android.widget.{CheckedTextView, ImageView, LinearLayout, Toast}
import com.github.shadowsocks.database.Profile
import com.github.shadowsocks.utils.{Parser, TrafficMonitor, Utils}
import com.google.zxing.integration.android.IntentIntegrator
import net.glxn.qrgen.android.QRCode
import scala.collection.mutable.ArrayBuffer
/**
* @author Mygod
*/
class ProfileManagerActivity extends AppCompatActivity with OnMenuItemClickListener {
private class ProfileViewHolder(val view: View) extends RecyclerView.ViewHolder(view) with View.OnClickListener {
private var item: Profile = _
private val text = itemView.findViewById(android.R.id.text1).asInstanceOf[CheckedTextView]
itemView.setOnClickListener(this)
{
val qrcode = itemView.findViewById(R.id.qrcode)
qrcode.setOnClickListener((v: View) => {
val image = new ImageView(ProfileManagerActivity.this)
image.setLayoutParams(new LinearLayout.LayoutParams(-1, -1))
val qrcode = QRCode.from(Parser.generate(item))
.withSize(Utils.dpToPx(ProfileManagerActivity.this, 250), Utils.dpToPx(ProfileManagerActivity.this, 250))
.asInstanceOf[QRCode].bitmap()
image.setImageBitmap(qrcode)
new AlertDialog.Builder(ProfileManagerActivity.this)
.setCancelable(true)
.setNegativeButton(R.string.close, null)
.setView(image)
.create()
.show()
})
qrcode.setOnLongClickListener((v: View) => {
Utils.positionToast(Toast.makeText(ProfileManagerActivity.this, R.string.qrcode, Toast.LENGTH_SHORT), qrcode,
getWindow, 0, Utils.dpToPx(ProfileManagerActivity.this, 8)).show
true
})
}
def updateText() {
val builder = new SpannableStringBuilder
builder.append(item.name)
if (item.tx != 0 || item.rx != 0) {
val start = builder.length
builder.append(getString(R.string.stat_profiles,
TrafficMonitor.formatTraffic(item.tx), TrafficMonitor.formatTraffic(item.rx)))
builder.setSpan(new TextAppearanceSpan(ProfileManagerActivity.this, android.R.style.TextAppearance_Small),
start + 1, builder.length, Spanned.SPAN_EXCLUSIVE_EXCLUSIVE)
}
text.setText(builder)
}
def bind(item: Profile) {
this.item = item
updateText()
text.setChecked(item.id == ShadowsocksApplication.profileId)
}
def onClick(v: View) = {
ShadowsocksApplication.switchProfile(item.id)
finish
}
}
private class ProfilesAdapter extends RecyclerView.Adapter[ProfileViewHolder] {
private val recycleBin = new ArrayBuffer[(Int, Profile)]
private var profiles = new ArrayBuffer[Profile]
profiles ++= ShadowsocksApplication.profileManager.getAllProfiles.getOrElse(List[Profile]())
def getItemCount = profiles.length
def onBindViewHolder(vh: ProfileViewHolder, i: Int) = vh.bind(profiles(i))
def onCreateViewHolder(vg: ViewGroup, i: Int) =
new ProfileViewHolder(LayoutInflater.from(vg.getContext).inflate(R.layout.layout_profiles_item, vg, false))
def add(item: Profile) {
removedSnackbar.dismiss
commitRemoves
val pos = getItemCount
profiles += item
notifyItemInserted(pos)
}
def remove(pos: Int) {
recycleBin.append((pos, profiles(pos)))
profiles.remove(pos)
notifyItemRemoved(pos)
}
def undoRemoves {
for ((index, item) <- recycleBin.reverseIterator) {
profiles.insert(index, item)
notifyItemInserted(index)
}
recycleBin.clear
}
def commitRemoves {
for ((index, item) <- recycleBin) {
ShadowsocksApplication.profileManager.delProfile(item.id)
if (item.id == ShadowsocksApplication.profileId) ShadowsocksApplication.profileId(-1)
}
recycleBin.clear
}
}
private lazy val profilesAdapter = new ProfilesAdapter
private var removedSnackbar: Snackbar = _
override def onCreate(savedInstanceState: Bundle) {
super.onCreate(savedInstanceState)
setContentView(R.layout.layout_profiles)
val toolbar = findViewById(R.id.toolbar).asInstanceOf[Toolbar]
toolbar.setTitle(R.string.profiles)
toolbar.setNavigationIcon(R.drawable.abc_ic_ab_back_mtrl_am_alpha)
toolbar.setNavigationOnClickListener((v: View) => {
val intent = getParentActivityIntent
if (intent == null) finish else navigateUpTo(intent)
})
toolbar.inflateMenu(R.menu.add_profile_methods)
toolbar.setOnMenuItemClickListener(this)
ShadowsocksApplication.profileManager.setProfileAddedListener(profilesAdapter.add)
val profilesList = findViewById(R.id.profilesList).asInstanceOf[RecyclerView]
profilesList.setLayoutManager(new LinearLayoutManager(this))
profilesList.setItemAnimator(new DefaultItemAnimator)
profilesList.setAdapter(profilesAdapter)
removedSnackbar = Snackbar.make(findViewById(android.R.id.content), R.string.removed, Snackbar.LENGTH_LONG)
.setAction(R.string.undo, ((v: View) => profilesAdapter.undoRemoves): OnClickListener)
removedSnackbar.getView.addOnAttachStateChangeListener(new OnAttachStateChangeListener {
def onViewDetachedFromWindow(v: View) = profilesAdapter.commitRemoves
def onViewAttachedToWindow(v: View) = ()
})
new ItemTouchHelper(new SimpleCallback(0, ItemTouchHelper.START | ItemTouchHelper.END) {
def onSwiped(viewHolder: ViewHolder, direction: Int) = {
profilesAdapter.remove(viewHolder.getAdapterPosition)
removedSnackbar.show
}
def onMove(recyclerView: RecyclerView, viewHolder: ViewHolder, target: ViewHolder) = false // TODO?
}).attachToRecyclerView(profilesList)
}
override def onDestroy {
super.onDestroy
ShadowsocksApplication.profileManager.setProfileAddedListener(null)
profilesAdapter.commitRemoves
}
override def onActivityResult(requestCode: Int, resultCode: Int, data: Intent) {
val scanResult = IntentIntegrator.parseActivityResult(requestCode, resultCode, data)
if (scanResult != null) Parser.parse(scanResult.getContents) match {
case Some(profile) => ShadowsocksApplication.profileManager.createProfile(profile)
case _ => // ignore
}
}
def onMenuItemClick(item: MenuItem) = item.getItemId match {
case R.id.scan_qr_code =>
val integrator = new IntentIntegrator(this)
val list = new java.util.ArrayList(IntentIntegrator.TARGET_ALL_KNOWN)
list.add("tw.com.quickmark")
integrator.setTargetApplications(list)
integrator.initiateScan()
true
case R.id.manual_settings =>
ShadowsocksApplication.profileManager.reload(-1)
ShadowsocksApplication.switchProfile(ShadowsocksApplication.profileManager.save.id)
finish
true
case _ => false
}
}
|
tenwx/shadowsocks-android
|
src/main/scala/com/github/shadowsocks/ProfileManagerActivity.scala
|
Scala
|
gpl-3.0
| 7,585
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.ml.classification
import org.apache.spark.annotation.Since
import org.apache.spark.ml.linalg.Vector
import org.apache.spark.mllib.evaluation.{BinaryClassificationMetrics, MulticlassMetrics}
import org.apache.spark.sql.{DataFrame, Row}
import org.apache.spark.sql.functions.{col, lit}
import org.apache.spark.sql.types.DoubleType
/**
* Abstraction for multiclass classification results for a given model.
*/
private[classification] trait ClassificationSummary extends Serializable {
/**
* Dataframe output by the model's `transform` method.
*/
@Since("3.1.0")
def predictions: DataFrame
/** Field in "predictions" which gives the prediction of each class. */
@Since("3.1.0")
def predictionCol: String
/** Field in "predictions" which gives the true label of each instance (if available). */
@Since("3.1.0")
def labelCol: String
/** Field in "predictions" which gives the weight of each instance as a vector. */
@Since("3.1.0")
def weightCol: String
@transient private val multiclassMetrics = {
val weightColumn = if (predictions.schema.fieldNames.contains(weightCol)) {
col(weightCol).cast(DoubleType)
} else {
lit(1.0)
}
new MulticlassMetrics(
predictions.select(col(predictionCol), col(labelCol).cast(DoubleType), weightColumn)
.rdd.map {
case Row(prediction: Double, label: Double, weight: Double) => (prediction, label, weight)
})
}
/**
* Returns the sequence of labels in ascending order. This order matches the order used
* in metrics which are specified as arrays over labels, e.g., truePositiveRateByLabel.
*
* Note: In most cases, it will be values {0.0, 1.0, ..., numClasses-1}, However, if the
* training set is missing a label, then all of the arrays over labels
* (e.g., from truePositiveRateByLabel) will be of length numClasses-1 instead of the
* expected numClasses.
*/
@Since("3.1.0")
def labels: Array[Double] = multiclassMetrics.labels
/** Returns true positive rate for each label (category). */
@Since("3.1.0")
def truePositiveRateByLabel: Array[Double] = recallByLabel
/** Returns false positive rate for each label (category). */
@Since("3.1.0")
def falsePositiveRateByLabel: Array[Double] = {
multiclassMetrics.labels.map(label => multiclassMetrics.falsePositiveRate(label))
}
/** Returns precision for each label (category). */
@Since("3.1.0")
def precisionByLabel: Array[Double] = {
multiclassMetrics.labels.map(label => multiclassMetrics.precision(label))
}
/** Returns recall for each label (category). */
@Since("3.1.0")
def recallByLabel: Array[Double] = {
multiclassMetrics.labels.map(label => multiclassMetrics.recall(label))
}
/** Returns f-measure for each label (category). */
@Since("3.1.0")
def fMeasureByLabel(beta: Double): Array[Double] = {
multiclassMetrics.labels.map(label => multiclassMetrics.fMeasure(label, beta))
}
/** Returns f1-measure for each label (category). */
@Since("3.1.0")
def fMeasureByLabel: Array[Double] = fMeasureByLabel(1.0)
/**
* Returns accuracy.
* (equals to the total number of correctly classified instances
* out of the total number of instances.)
*/
@Since("3.1.0")
def accuracy: Double = multiclassMetrics.accuracy
/**
* Returns weighted true positive rate.
* (equals to precision, recall and f-measure)
*/
@Since("3.1.0")
def weightedTruePositiveRate: Double = weightedRecall
/** Returns weighted false positive rate. */
@Since("3.1.0")
def weightedFalsePositiveRate: Double = multiclassMetrics.weightedFalsePositiveRate
/**
* Returns weighted averaged recall.
* (equals to precision, recall and f-measure)
*/
@Since("3.1.0")
def weightedRecall: Double = multiclassMetrics.weightedRecall
/** Returns weighted averaged precision. */
@Since("3.1.0")
def weightedPrecision: Double = multiclassMetrics.weightedPrecision
/** Returns weighted averaged f-measure. */
@Since("3.1.0")
def weightedFMeasure(beta: Double): Double = multiclassMetrics.weightedFMeasure(beta)
/** Returns weighted averaged f1-measure. */
@Since("3.1.0")
def weightedFMeasure: Double = multiclassMetrics.weightedFMeasure(1.0)
}
/**
* Abstraction for training results.
*/
private[classification] trait TrainingSummary {
/**
* objective function (scaled loss + regularization) at each iteration.
* It contains one more element, the initial state, than number of iterations.
*/
@Since("3.1.0")
def objectiveHistory: Array[Double]
/** Number of training iterations. */
@Since("3.1.0")
def totalIterations: Int = {
assert(objectiveHistory.length > 0, "objectiveHistory length should be greater than 0.")
objectiveHistory.length - 1
}
}
/**
* Abstraction for binary classification results for a given model.
*/
private[classification] trait BinaryClassificationSummary extends ClassificationSummary {
private val sparkSession = predictions.sparkSession
import sparkSession.implicits._
/**
* Field in "predictions" which gives the probability or rawPrediction of each class as a
* vector.
*/
def scoreCol: String = null
@transient private val binaryMetrics = {
val weightColumn = if (predictions.schema.fieldNames.contains(weightCol)) {
col(weightCol).cast(DoubleType)
} else {
lit(1.0)
}
// TODO: Allow the user to vary the number of bins using a setBins method in
// BinaryClassificationMetrics. For now the default is set to 1000.
new BinaryClassificationMetrics(
predictions.select(col(scoreCol), col(labelCol).cast(DoubleType), weightColumn).rdd.map {
case Row(score: Vector, label: Double, weight: Double) => (score(1), label, weight)
}, 1000
)
}
/**
* Returns the receiver operating characteristic (ROC) curve,
* which is a Dataframe having two fields (FPR, TPR)
* with (0.0, 0.0) prepended and (1.0, 1.0) appended to it.
* See http://en.wikipedia.org/wiki/Receiver_operating_characteristic
*/
@Since("3.1.0")
@transient lazy val roc: DataFrame = binaryMetrics.roc().toDF("FPR", "TPR")
/**
* Computes the area under the receiver operating characteristic (ROC) curve.
*/
@Since("3.1.0")
lazy val areaUnderROC: Double = binaryMetrics.areaUnderROC()
/**
* Returns the precision-recall curve, which is a Dataframe containing
* two fields recall, precision with (0.0, 1.0) prepended to it.
*/
@Since("3.1.0")
@transient lazy val pr: DataFrame = binaryMetrics.pr().toDF("recall", "precision")
/**
* Returns a dataframe with two fields (threshold, F-Measure) curve with beta = 1.0.
*/
@Since("3.1.0")
@transient lazy val fMeasureByThreshold: DataFrame = {
binaryMetrics.fMeasureByThreshold().toDF("threshold", "F-Measure")
}
/**
* Returns a dataframe with two fields (threshold, precision) curve.
* Every possible probability obtained in transforming the dataset are used
* as thresholds used in calculating the precision.
*/
@Since("3.1.0")
@transient lazy val precisionByThreshold: DataFrame = {
binaryMetrics.precisionByThreshold().toDF("threshold", "precision")
}
/**
* Returns a dataframe with two fields (threshold, recall) curve.
* Every possible probability obtained in transforming the dataset are used
* as thresholds used in calculating the recall.
*/
@Since("3.1.0")
@transient lazy val recallByThreshold: DataFrame = {
binaryMetrics.recallByThreshold().toDF("threshold", "recall")
}
}
|
ConeyLiu/spark
|
mllib/src/main/scala/org/apache/spark/ml/classification/ClassificationSummary.scala
|
Scala
|
apache-2.0
| 8,384
|
package org.bitcoins.core.number
import org.bitcoins.testkitcore.gen.NumberGenerator
import org.scalacheck.{Prop, Properties}
import scala.util.Try
/** Created by chris on 6/21/16.
*/
class Int64Spec extends Properties("Int64Spec") {
property("Symmetrical serialization") = Prop.forAll(NumberGenerator.int64s) {
int64: Int64 =>
Int64(int64.hex) == int64
}
property("Additive identity") = Prop.forAll(NumberGenerator.int64s) {
int64: Int64 =>
int64 + Int64.zero == int64
}
property("Add two arbitrary int64s") =
Prop.forAll(NumberGenerator.int64s, NumberGenerator.int64s) {
(num1: Int64, num2: Int64) =>
val result = num1.toBigInt + num2.toBigInt
if (result >= Int64.min.toLong && result <= Int64.max.toLong)
num1 + num2 == Int64(result)
else Try(num1 + num2).isFailure
}
property("Subtractive identity") = Prop.forAll(NumberGenerator.int64s) {
int64: Int64 =>
int64 - Int64.zero == int64
}
property("Subtract two arbitrary int64s") =
Prop.forAll(NumberGenerator.int64s, NumberGenerator.int64s) {
(num1: Int64, num2: Int64) =>
val result = num1.toBigInt - num2.toBigInt
if (result >= Int64.min.toLong && result <= Int64.max.toLong)
num1 - num2 == Int64(result)
else Try(num1 - num2).isFailure
}
property("Multiplying by zero") = Prop.forAll(NumberGenerator.int64s) {
int64: Int64 =>
int64 * Int64.zero == Int64.zero
}
property("Multiplicative identity") = Prop.forAll(NumberGenerator.int64s) {
int64: Int64 =>
int64 * Int64.one == int64
}
property("Multiply two arbitrary int64s") =
Prop.forAll(NumberGenerator.int64s, NumberGenerator.int64s) {
(num1: Int64, num2: Int64) =>
val result = num1.toBigInt * num2.toBigInt
if (result >= Int64.min.toLong && result <= Int64.max.toLong)
num1 * num2 == Int64(result)
else Try(num1 * num2).isFailure
}
property("<= & >") =
Prop.forAll(NumberGenerator.int64s, NumberGenerator.int64s) {
(num1: Int64, num2: Int64) =>
if (num1.toLong <= num2.toLong) num1 <= num2
else num1 > num2
}
property("< & =>") =
Prop.forAll(NumberGenerator.int64s, NumberGenerator.int64s) {
(num1: Int64, num2: Int64) =>
if (num1.toLong < num2.toLong) num1 < num2
else num1 >= num2
}
property("== & !=") =
Prop.forAll(NumberGenerator.int64s, NumberGenerator.int64s) {
(num1: Int64, num2: Int64) =>
if (num1.toLong == num2.toLong) num1 == num2
else num1 != num2
}
property("|") = Prop.forAll(NumberGenerator.int64s, NumberGenerator.int64s) {
(num1: Int64, num2: Int64) =>
Int64(num1.toLong | num2.toLong) == (num1 | num2)
}
property("&") = Prop.forAll(NumberGenerator.int64s, NumberGenerator.int64s) {
(num1: Int64, num2: Int64) =>
Int64(num1.toLong & num2.toLong) == (num1 & num2)
}
property("negation") = {
Prop.forAll(NumberGenerator.int64s) { int64 =>
-int64 == Int64(-int64.toLong)
}
}
}
|
bitcoin-s/bitcoin-s
|
core-test/src/test/scala/org/bitcoins/core/number/Int64Spec.scala
|
Scala
|
mit
| 3,086
|
package us.feliscat.ir.fulltext.indri.ja
import us.feliscat.converter.MultiLingualNgramSegmentator
import us.feliscat.converter.ja.JapaneseNgramSegmentator
import us.feliscat.ir.fulltext.indri.MultiLingualTrecTextFileFormatReviser
import us.feliscat.m17n.Japanese
import us.feliscat.sentence.ja.JapaneseSentenceSplitter
import us.feliscat.text.StringOption
import us.feliscat.text.analyzer.mor.mecab.UnidicMecab
/**
* <pre>
* Created on 2017/02/11.
* </pre>
*
* @author K.Sakamoto
*/
class JapaneseTrecTextFileFormatReviser(nGram: Int, isChar: Boolean)
extends MultiLingualTrecTextFileFormatReviser(nGram, isChar)
with Japanese {
override protected def normalizeSentences(line: String): String = {
val builder = new StringBuilder()
JapaneseSentenceSplitter.split(StringOption(line)) foreach {
sentence =>
builder.append(sentence.text)
}
builder.result
}
override protected val segmentator: MultiLingualNgramSegmentator = {
new JapaneseNgramSegmentator(nGram)
}
override protected def segment(text: StringOption, isContentWord: Boolean): String = {
if (isContentWord) {
segmentator.segmentateWithToken(UnidicMecab.extractWords(text)).getOrElse("")
} else {
segmentator.asInstanceOf[JapaneseNgramSegmentator].segmentateWithCharacter(text).getOrElse("")
}
}
}
|
ktr-skmt/FelisCatusZero-multilingual
|
libraries/src/main/scala/us/feliscat/ir/fulltext/indri/ja/JapaneseTrecTextFileFormatReviser.scala
|
Scala
|
apache-2.0
| 1,349
|
/*
* Copyright (C) Lightbend Inc. <https://www.lightbend.com>
*/
package com.lightbend.lagom.internal.javadsl.cluster.typed
import akka.actor.ActorSystem
import akka.cluster.sharding.typed.javadsl.ClusterSharding
import javax.inject.Inject
import javax.inject.Provider
import play.api.Configuration
import play.api.Environment
import play.api.inject.Binding
import play.api.inject.Module
class ClusterShardingTypedModule extends Module {
override def bindings(environment: Environment, configuration: Configuration): Seq[Binding[_]] = Seq(
bind[ClusterSharding].toProvider[ClusterShardingTypedProvider]
)
}
private[lagom] class ClusterShardingTypedProvider @Inject() (system: ActorSystem) extends Provider[ClusterSharding] {
private val instance: ClusterSharding = {
import akka.actor.typed.scaladsl.adapter._
val actorSystemTyped = system.toTyped
ClusterSharding.get(actorSystemTyped)
}
override def get(): ClusterSharding = instance
}
|
lagom/lagom
|
cluster/javadsl/src/main/scala/com/lightbend/lagom/internal/javadsl/cluster/typed/ClusterShardingTypedModule.scala
|
Scala
|
apache-2.0
| 970
|
package net.defoo
import akka.actor.{Props, Actor}
import org.apache.commons.io.FileUtils
import java.nio.file.{Paths, StandardCopyOption, Files}
import java.io.File
/**
* Created by derek on 23/02/14.
*/
class FileCopyActor extends Actor {
def receive = {
case BackupFile(jobId, srcFile, dest, Some(lastBackup)) =>
srcFile.isDirectory match {
case true =>
dest.mkdirs
dest setLastModified srcFile.lastModified
sender ! BackupFileComplete(jobId = jobId, src = srcFile, dest = dest)
case false =>
val srcChecksum = FileUtils checksumCRC32 srcFile
val lastBackupChecksum = FileUtils checksumCRC32 lastBackup
val newBackupChecksum = (srcChecksum != lastBackupChecksum) match {
case true => //changed
FileUtils copyFile(srcFile, dest)
FileUtils checksumCRC32 dest
case false => //not changed
createLinkToLastBackup(dest, lastBackup)
lastBackupChecksum
}
assert(srcChecksum == newBackupChecksum)
sender ! BackupFileComplete(jobId = jobId, src = srcFile, dest = dest, Option(srcChecksum), Option(newBackupChecksum), Option(lastBackupChecksum))
}
case BackupFile(jobId, srcFile, dest, None) =>
srcFile.isDirectory match {
case true =>
dest.mkdirs()
dest.setLastModified(srcFile.lastModified)
sender ! BackupFileComplete(jobId = jobId, src = srcFile, dest = dest)
case false =>
FileUtils.copyFile(srcFile, dest)
val srcChecksum = FileUtils checksumCRC32 srcFile
val newBackupChecksum = FileUtils checksumCRC32 dest
assert(srcChecksum == newBackupChecksum)
sender ! BackupFileComplete(jobId = jobId, src = srcFile, dest = dest, Option(srcChecksum), Option(newBackupChecksum), None)
}
}
def createLinkToLastBackup(dest: File, lastBackup: File) {
dest.getParentFile match {
case parent if parent != null => FileUtils.forceMkdir(parent)
case _ =>
}
Files.createLink(dest.toPath, lastBackup.toPath)
}
}
|
kcderek/scala-time-machine
|
src/main/scala/net/defoo/CopyFileActor.scala
|
Scala
|
apache-2.0
| 2,158
|
/*
* Copyright 2021 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package models.declaration
import jto.validation.{Invalid, Path, Valid, ValidationError}
import org.scalatestplus.mockito.MockitoSugar
import org.scalatestplus.play.PlaySpec
import play.api.libs.json.{JsPath, JsSuccess, Json}
class RenewRegistrationSpec extends PlaySpec with MockitoSugar {
"Form Validation" must {
"successfully validate given an enum value" in {
val data = Map("renewRegistration" -> Seq("false"))
RenewRegistration.formRule.validate(data) must
be(Valid(RenewRegistrationNo))
}
"successfully validate given an `Yes` value" in {
val data = Map("renewRegistration" -> Seq("true"))
RenewRegistration.formRule.validate(data) must
be(Valid(RenewRegistrationYes))
}
"fail to validate mandatory field" in {
RenewRegistration.formRule.validate(Map.empty) must
be(Invalid(Seq(
(Path \\ "renewRegistration") -> Seq(ValidationError("error.required.declaration.renew.registration"))
)))
}
"write correct data from enum value" in {
RenewRegistration.formWrites.writes(RenewRegistrationNo) must
be(Map("renewRegistration" -> Seq("false")))
}
"write correct data from `Yes` value" in {
RenewRegistration.formWrites.writes(RenewRegistrationYes) must
be(Map("renewRegistration" -> Seq("true")))
}
}
"JSON validation" must {
"successfully validate given an enum value" in {
Json.fromJson[RenewRegistration](Json.obj("renewRegistration" -> false)) must
be(JsSuccess(RenewRegistrationNo, JsPath))
}
"successfully validate given an `Yes` value" in {
val json = Json.obj("renewRegistration" -> true)
Json.fromJson[RenewRegistration](json) must
be(JsSuccess(RenewRegistrationYes))
}
"successfully validate given an `No` value" in {
val json = Json.obj("renewRegistration" -> false)
Json.fromJson[RenewRegistration](json) must
be(JsSuccess(RenewRegistrationNo))
}
"write the correct value" in {
Json.toJson(RenewRegistrationNo.asInstanceOf[RenewRegistration]) must
be(Json.obj("renewRegistration" -> false))
Json.toJson(RenewRegistrationYes.asInstanceOf[RenewRegistration]) must
be(Json.obj("renewRegistration" -> true))
}
}
}
|
hmrc/amls-frontend
|
test/models/declaration/RenewRegistrationSpec.scala
|
Scala
|
apache-2.0
| 2,904
|
package sbt
package internals
import Def._
/** This reprsents a `Setting` expression configured by the sbt DSL. */
sealed trait DslEntry {
/** Called by the parser. Sets the position where this entry was defined in the build.sbt file. */
def withPos(pos: RangePosition): DslEntry
}
object DslEntry {
implicit def fromSettingsDef(inc: SettingsDefinition): DslEntry =
DslSetting(inc)
implicit def fromSettingsDef(inc: Seq[Setting[_]]): DslEntry =
DslSetting(inc)
}
/** Represents a DSL entry which adds settings to the current project. */
sealed trait ProjectSettings extends DslEntry {
def toSettings: Seq[Setting[_]]
}
object ProjectSettings {
def unapply(e: DslEntry): Option[Seq[Setting[_]]] =
e match {
case e: ProjectSettings => Some(e.toSettings)
case _ => None
}
}
/** Represents a DSL entry which manipulates the current project. */
sealed trait ProjectManipulation extends DslEntry {
def toFunction: Project => Project
// TODO - Should we store this?
final def withPos(pos: RangePosition): DslEntry = this
}
object ProjectManipulation {
def unapply(e: DslEntry): Option[Project => Project] =
e match {
case e: ProjectManipulation => Some(e.toFunction)
case _ => None
}
}
/** this represents an actually Setting[_] or Seq[Setting[_]] configured by the sbt DSL. */
case class DslSetting(settings: SettingsDefinition) extends ProjectSettings {
def toSettings = settings.settings
final def withPos(pos: RangePosition): DslEntry = DslSetting(settings.settings.map(_.withPos(pos)))
}
/** this represents an `enablePlugins()` in the sbt DSL */
case class DslEnablePlugins(plugins: Seq[AutoPlugin]) extends ProjectManipulation {
override val toFunction: Project => Project = _.enablePlugins(plugins: _*)
}
/** this represents an `disablePlugins()` in the sbt DSL */
case class DslDisablePlugins(plugins: Seq[AutoPlugin]) extends ProjectManipulation {
override val toFunction: Project => Project = _.disablePlugins(plugins: _*)
}
/** Represents registering a set of configurations with the current project. */
case class DslConfigs(cs: Seq[Configuration]) extends ProjectManipulation {
override val toFunction: Project => Project = _.configs(cs: _*)
}
/** Represents registering an internal dependency for the current project */
case class DslDependsOn(cs: Seq[ClasspathDep[ProjectReference]]) extends ProjectManipulation {
override val toFunction: Project => Project = _.dependsOn(cs: _*)
}
/** this represents an `aggregate()` in the sbt DSL */
case class DslAggregate(refs: Seq[ProjectReference]) extends ProjectManipulation {
override val toFunction: Project => Project = _.aggregate(refs: _*)
}
|
som-snytt/xsbt
|
main/src/main/scala/sbt/internals/DslAst.scala
|
Scala
|
bsd-3-clause
| 2,725
|
package model
import java.io.File
import play.api.Play
import play.router.{DynamicPart, Parsers, StaticPart}
import scala.util.Try
/** Transforms the play Routes AST -> a representation of a model that we can use internally. */
object Transformer {
val DefaultEndpoint = "http://localhost:9000"
def <<<(routesFile: File): Try[Application] = {
Try {
val baseUrl = Play.maybeApplication.fold(DefaultEndpoint) {
app => app.configuration.getString("default.explorer.endpoint").getOrElse(DefaultEndpoint)
}
val routes = Parsers.parseRoutesFile(routesFile)
val endpoints: List[Endpoint] = for {
route <- routes
} yield {
// http verb
val verb = HttpMethod.withName(route.verb.toString())
val pathPattern = route.path
// path fragments
val pathFragments: Seq[PathFragment] = pathPattern.parts.flatMap {
case static: StaticPart =>
// remove the terminating "/" character
val value = if (static.value.endsWith("/")) static.value.substring(0, static.value.lastIndexOf('/')) else static.value
Some(StaticPathFragment(value))
case dynamic: DynamicPart =>
Some(DynamicPathFragment(dynamic.name, dynamic.constraint))
case _ => None
}
// package name
val packageName = route.call.packageName
// controller name
val controllerName = route.call.controller
// method name
val methodName = route.call.method
// parameters
val parameters = if (route.call.parameters.isDefined) {
val routeParameters = route.call.parameters.get
for {
parameter <- routeParameters
} yield {
val name = parameter.name
// param ?= "json" gives a value of "\\"json\\""
// so we have to remove the quotes surrounding the value
// the same happens for both fixed and default values
val default = parameter.default.map(removeQuotes)
val fixed = parameter.fixed.map(removeQuotes)
BasicParameter(name, fixed.getOrElse(""), required = false, default = default)
}
} else {
Seq.empty[Parameter]
}
val dynamicFragmentNames = pathFragments.flatMap {
case dynamic: DynamicPathFragment => Some(dynamic.identifier)
case _ => None
}.toSet
// remove parameters for matching fragments
val filteredParameters = parameters.filterNot {
parameter => dynamicFragmentNames.contains(parameter.name)
}
BasicEndpoint(packageName, controllerName, methodName, method = verb, fragments = pathFragments, parameters = filteredParameters)
}
BasicApplication(baseUrl, endpoints)
}
}
def transform(routesFile: File): Try[Application] = Transformer.<<<(routesFile)
/** This is to work around the quotes surrounding strings in parameter values inside the routes file. */
private[Transformer] def removeQuotes(in: String): String = {
if (in.startsWith("\\"")) in.substring(1, in.lastIndexOf("\\"")) else in
}
}
|
tikurahul/play-explorer
|
app/model/play.scala
|
Scala
|
mit
| 3,154
|
/*
* Shadowsocks - A shadowsocks client for Android
* Copyright (C) 2013 <max.c.lv@gmail.com>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
*
* ___====-_ _-====___
* _--^^^#####// \\\\#####^^^--_
* _-^##########// ( ) \\\\##########^-_
* -############// |\\^^/| \\\\############-
* _/############// (@::@) \\\\############\\_
* /#############(( \\\\// ))#############\\
* -###############\\\\ (oo) //###############-
* -#################\\\\ / VV \\ //#################-
* -###################\\\\/ \\//###################-
* _#/|##########/\\######( /\\ )######/\\##########|\\#_
* |/ |#/\\#/\\#/\\/ \\#/\\##\\ | | /##/\\#/ \\/\\#/\\#/\\#| \\|
* ` |/ V V ` V \\#\\| | | |/#/ V ' V V \\| '
* ` ` ` ` / | | | | \\ ' ' ' '
* ( | | | | )
* __\\ | | | | /__
* (vvv(VVV)(VVV)vvv)
*
* HERE BE DRAGONS
*
*/
package com.github.shadowsocks.database
import com.j256.ormlite.android.apptools.OrmLiteSqliteOpenHelper
import android.content.Context
import android.database.sqlite.SQLiteDatabase
import com.j256.ormlite.support.ConnectionSource
import com.j256.ormlite.table.TableUtils
import com.j256.ormlite.dao.Dao
object DBHelper {
val PROFILE = "profile.db"
}
class DBHelper(val context: Context)
extends OrmLiteSqliteOpenHelper(context, DBHelper.PROFILE, null, 11) {
lazy val profileDao: Dao[Profile, Int] = getDao(classOf[Profile])
def onCreate(database: SQLiteDatabase, connectionSource: ConnectionSource) {
TableUtils.createTable(connectionSource, classOf[Profile])
}
def onUpgrade(database: SQLiteDatabase, connectionSource: ConnectionSource, oldVersion: Int,
newVersion: Int) {
if (oldVersion != newVersion) {
if (oldVersion < 8) {
profileDao.executeRaw("ALTER TABLE `profile` ADD COLUMN udpdns SMALLINT;")
profileDao.executeRaw("ALTER TABLE `profile` ADD COLUMN route VARCHAR;")
} else if (oldVersion < 9) {
profileDao.executeRaw("ALTER TABLE `profile` ADD COLUMN route VARCHAR;")
} else if (oldVersion < 10) {
profileDao.executeRaw("ALTER TABLE `profile` ADD COLUMN auth SMALLINT;")
} else if (oldVersion < 11) {
profileDao.executeRaw("ALTER TABLE `profile` ADD COLUMN ipv6 SMALLINT;")
} else {
profileDao.executeRaw("DROP TABLE IF EXISTS 'profile';")
onCreate(database, connectionSource)
}
}
}
}
|
Austinpb/shadowsocks-android
|
src/main/scala/com/github/shadowsocks/database/DBHelper.scala
|
Scala
|
gpl-3.0
| 3,304
|
/*
* Copyright 2012 Twitter Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.twitter.zipkin.hadoop.config
import com.twitter.util.Config
import com.twitter.zipkin.hadoop.WorstRuntimesPerTraceClient
class WorstRuntimesPerTraceClientConfig extends Config[WorstRuntimesPerTraceClient] {
val zipkinTraceUrl = "your.zipkin.url/traces"
def apply() = {
new WorstRuntimesPerTraceClient(zipkinTraceUrl)
}
}
|
jameswei/zipkin
|
zipkin-hadoop-job-runner/src/main/scala/com/twitter/zipkin/hadoop/config/WorstRuntimesPerTraceClientConfig.scala
|
Scala
|
apache-2.0
| 932
|
package com.sksamuel.elastic4s
import org.elasticsearch.action.admin.indices.recovery.{RecoveryRequest, RecoveryResponse}
import org.elasticsearch.client.Client
import scala.concurrent.Future
trait IndexRecoveryDsl {
implicit object IndexRecoveryDefinitionExecutable
extends Executable[IndexRecoveryDefinition, RecoveryResponse, RecoveryResponse] {
override def apply(c: Client, t: IndexRecoveryDefinition): Future[RecoveryResponse] = {
injectFuture(c.admin.indices.recoveries(t.build, _))
}
}
}
class IndexRecoveryDefinition(indices: Seq[String]) {
private def builder = new RecoveryRequest(indices: _*)
def build = builder
}
|
beni55/elastic4s
|
elastic4s-core/src/main/scala/com/sksamuel/elastic4s/IndexRecoveryDsl.scala
|
Scala
|
apache-2.0
| 658
|
package xitrum.util
import java.util.{Collections, LinkedHashMap}
import java.util.Map.Entry
/**
* Non-threadsafe, non-distributed LRU cache.
*
* http://stackoverflow.com/questions/221525/how-would-you-implement-an-lru-cache-in-java-6
*/
private class NonThreadsafeLocalLruCache[K, V](capacity: Int) extends LinkedHashMap[K, V](capacity + 1, 1.0f, true) {
protected override def removeEldestEntry(eldest: Entry[K, V]) = size > capacity
}
/**
* Threadsafe, non-distributed LRU cache.
*
* http://stackoverflow.com/questions/221525/how-would-you-implement-an-lru-cache-in-java-6
*
* Xitrum uses this for storing etags for static files. Each web server in a
* cluster has its own cache of (file path, mtime) -> etag.
*/
object LocalLruCache {
def apply[K, V](capacity: Int) = Collections.synchronizedMap(new NonThreadsafeLocalLruCache[K, V](capacity))
}
|
georgeOsdDev/xitrum
|
src/main/scala/xitrum/util/LocalLruCache.scala
|
Scala
|
mit
| 868
|
package navarrus
/**
* Represents a currency. Currencies are identified by their ISO 4217 currency codes.
*
* @param code the ISO 4217 code of the currency
* @param symbol the symbol of this currency (e.g. for the US Dollar, the symbol is "$")
* @param scale the default number of fraction digits used with this currency
*/
class Currency private (val code:String, val symbol:String, val scale:Int) {
override def equals(other: Any) = other match {
case that:Currency => this.code == that.code
case _ => false
}
override def toString = s"[$code, $symbol, $scale]"
}
object Currency {
private[this] val defaultLocale = java.util.Locale.getDefault()
private[this] val defaultCurrencyCode = java.util.Currency.getInstance(defaultLocale).getCurrencyCode
/**
* Create a Currency instance having the supplied currency code
*
* @param code the ISO 4217 currency code
* @return a Currency instance
*/
def apply(code:String = defaultCurrencyCode, locale: java.util.Locale = defaultLocale) = {
val jc = java.util.Currency.getInstance(code)
new Currency(
jc.getCurrencyCode,
// symbol is local-sensitive (e.g. it is "[GBP]" when locale is "US_*")
jc.getSymbol(locale),
jc.getDefaultFractionDigits
)
}
}
|
angiolep/navarrus
|
src/main/scala/navarrus/Currency.scala
|
Scala
|
apache-2.0
| 1,283
|
package io.fintrospect.templating
import com.twitter.finagle.Service
import com.twitter.finagle.http.{Request, Status}
import com.twitter.io.Bufs
import com.twitter.util.Await.result
import io.fintrospect.formats.Html
import io.fintrospect.templating.View.Redirect
import org.scalatest.{FunSpec, Matchers}
class RenderViewTest extends FunSpec with Matchers {
describe("RenderView") {
val renderView = new RenderView(Html.ResponseBuilder, new TemplateRenderer {
override def toBuf(view: View) = Bufs.utf8Buf(view.template)
})
it("creates a standard View") {
val response = result(renderView(Request(), Service.const(OnClasspath(Nil))))
response.status shouldBe Status.Ok
response.contentString shouldBe "io/fintrospect/templating/OnClasspath"
}
it("creates a standard View with an overridden status") {
val response = result(renderView(Request(), Service.const(OnClasspath(Nil, Status.NotFound))))
response.status shouldBe Status.NotFound
response.contentString shouldBe "io/fintrospect/templating/OnClasspath"
}
it("creates redirect when passed a RenderView.Redirect") {
val response = result(renderView(Request(), Service.const(Redirect("newLocation", Status.BadGateway))))
response.status shouldBe Status.BadGateway
response.headerMap("Location") shouldBe "newLocation"
}
}
}
|
daviddenton/fintrospect
|
core/src/test/scala/io/fintrospect/templating/RenderViewTest.scala
|
Scala
|
apache-2.0
| 1,381
|
package edu.mit.csail.cap.query
import scala.collection.mutable
import util._
/** Cumulative counts */
trait Statistics {
def traces: List[Trace]
/** Group events by a function: event => T */
def eventsBy[T](f: PartialFunction[Event, T])(implicit top: Int = 20): List[(T, Int)] = {
val out = new mutable.OpenHashMap[T, Int]
for (
t <- traces;
e <- t
) {
if (f.isDefinedAt(e)) {
val i = f(e)
out.put(i, out.getOrElse(i, 0) + 1)
}
}
out.toList.sortBy(_._2).takeRight(top).reverse
}
def eventsByClass(implicit top: Int = 20) =
eventsBy { case e if e.caller.isDefined => e.parent.get.method.declarer }
def eventsByPkg(implicit top: Int = 20) =
eventsBy { case e if e.caller.isDefined => e.parent.get.method.declarer.packag }
def callsByCut(b: FrameworkBoundary) =
traces.flatMap(_.calls.collect { case (Some(e), f) if b.cut(e, f) => (e, f) })
/** Group calls by a function: (caller method, callee method) => T */
def callsBy[T](f: PartialFunction[(Method, Method), T])(implicit top: Int = 20): List[(T, Int)] = {
val out = new mutable.HashMap[T, Int]
for (
t <- traces;
(Some(caller), callee) <- t.calls
) {
val input = (caller.method, callee.method)
if (f.isDefinedAt(input)) {
val i = f(input)
out.put(i, out.getOrElse(i, 0) + 1)
}
}
out.toList.sortBy(_._2).takeRight(top).reverse
}
/** Count calls for same value of f */
def selfcalls[T](f: Method => T)(implicit top: Int = 20) =
callsBy { case (m1, m2) if f(m1) == f(m2) => f(m1) }
/** Count calls for different values of f */
def crosscalls[T, U](f: Method => T, g: (T, T) => U)(implicit top: Int = 20) =
callsBy { case (m1, m2) if f(m1) != f(m2) => g(f(m1), f(m2)) }
/** Self calls by package */
def selfByPkg(implicit top: Int = 20) =
selfcalls(_.declarer.packag)
/** Cross calls by package */
def crossByPkg(implicit top: Int = 20) =
crosscalls(_.declarer.packag, (x: String, y: String) => (x, y))
/** Cross calls by package target */
def crossByPkgDst(implicit top: Int = 20) =
crosscalls(_.declarer.packag, (_: String, y: String) => y)
/** Cross calls by package source */
def crossByPkgSrc(implicit top: Int = 20) =
crosscalls(_.declarer.packag, (x: String, _: String) => x)
/** Group events by their field and method */
def eventsByMember(implicit top: Int = 20): List[(Member, Int)] =
traces match {
case (t: DeclTrace) :: Nil =>
t.select(Enter || Read || Write) match {
case DeclTrace(c, where) =>
c.read(s"select id, COUNT(*) as x from LOG where ${where.sql} group by id order by x desc limit $top") {
rs => (t.meta.member(rs.getLong(1)), rs.getInt(2))
}
}
case (proxy: TraceProxy) :: Nil if proxy.t != proxy =>
proxy.t.eventsByMember
case _ =>
eventsBy {
case e: Enter => e.method
case e: Read => e.field
case e: Write => e.field
}
}
/** Group events by their type */
def eventsByKind: Map[Kind, Int] = {
var out: Map[Kind, Int] = Map()
for (t <- traces) {
val counts = t match {
case DeclTrace(c, where) =>
c.read(s"select event_type, COUNT(*) as x from LOG where ${where.sql} group by event_type order by x desc") {
rs => (Kinds.find(_.key == rs.getInt(1)).get, rs.getInt(2))
}
case proxy: TraceProxy if proxy.t != proxy =>
proxy.t.eventsByKind
case _ =>
debug(s"streaming from $t")
eventsBy {
case e => Kinds.find(_.apply(e)).get
}
}
for ((kind, count) <- counts)
out += kind -> (out.getOrElse(kind, 0) + count)
}
out
}
/** Group all objects in the connection by the type */
// TODO: only works for one trace
def objectsByType(implicit top: Int = 20): List[(Type, Int)] =
traces(0).c.read(s"select type, COUNT(*) as x from OBJECTS group by type order by x desc limit $top") {
rs => (traces(0).meta.typ(rs.getLong(1)), rs.getInt(2))
}
/** Graph of calls by declarer */
def classDependencies(): Digraph[ClassType, Int] = {
val out = new CountingDigraph[ClassType]
for (
t <- traces;
(Some(from), to) <- t.calls
) out.add(from.method.declarer, 1, to.method.definition.declarer)
out
}
/**
* Return a new graph whose vertices are package names.
* Source package is the concrete package.
* Destination package is the most abstract possible.
*/
def packageDependencies(): Digraph[Package, Int] = {
val out = new CountingDigraph[Package]
for (
t <- traces;
(Some(from), to) <- t.calls
) out.add(from.method.declarer.packag, 1, to.method.definition.declarer.packag)
out
}
def moduleDependencies(modules: List[ClassMask] = Nil): Digraph[ClassMask, Int] = {
// always add two special modules
val ms = PackageName("java.lang.reflect") :: PackageName("") :: modules
def module(t: ClassType): ClassMask =
ms.find(_(t)) match {
case Some(module) => module
case None => PackageName(t.packag)
}
val out = new CountingDigraph[ClassMask]
for (
t <- traces;
(Some(from), to) <- t.calls
) out.add(module(from.method.declarer), 1, module(to.method.definition.declarer))
out
}
def callsByPkg(from: ClassMask, to: ClassMask) =
traces.flatMap(_.calls.collect { case (Some(a), b) if from(a.method.declarer) && to(b.method.definition.declarer) => (a, b) })
def modules(modules: List[ClassMask]): Digraph[Set[ClassMask], Unit] = {
val out = moduleDependencies(modules).withoutSelfLoops
out.remove(PackageName(""))
out.remove(PackageName("java.lang.reflect"))
out.condensation.transitiveReduction
}
}
|
kyessenov/semeru
|
src/main/scala/Statistics.scala
|
Scala
|
gpl-3.0
| 5,879
|
package com.sksamuel.elastic4s.requests.searches.aggs
import com.sksamuel.elastic4s.requests.common.FetchSourceContext
import com.sksamuel.elastic4s.requests.script.Script
import com.sksamuel.elastic4s.requests.searches.sort.Sort
import com.sksamuel.elastic4s.ext.OptionImplicits._
case class TopHitsAggregation(name: String,
explain: Option[Boolean] = None,
fetchSource: Option[FetchSourceContext] = None,
size: Option[Int] = None,
from: Option[Int] = None,
sorts: Seq[Sort] = Nil,
trackScores: Option[Boolean] = None,
version: Option[Boolean] = None,
scripts: Map[String, Script] = Map.empty,
storedFields: Seq[String] = Nil,
subaggs: Seq[AbstractAggregation] = Nil,
metadata: Map[String, AnyRef] = Map.empty)
extends Aggregation {
type T = TopHitsAggregation
def explain(explain: Boolean): TopHitsAggregation = copy(explain = explain.some)
def fetchSource(includes: Array[String], excludes: Array[String]): TopHitsAggregation =
copy(fetchSource = FetchSourceContext(true, includes, excludes).some)
def fetchSource(fetchSource: Boolean): TopHitsAggregation =
copy(fetchSource = FetchSourceContext(fetchSource).some)
def size(size: Int): TopHitsAggregation = copy(size = size.some)
def from(from: Int): TopHitsAggregation = copy(from = from.some)
def sortBy(first: Sort, rest: Sort*): TopHitsAggregation = sortBy(first +: rest)
def sortBy(sorts: Iterable[Sort]): TopHitsAggregation = copy(sorts = sorts.toSeq)
def storedField(field: String): TopHitsAggregation = storedFields(field)
def storedFields(first: String, rest: String*): TopHitsAggregation = storedFields(first +: rest)
def storedFields(fields: Iterable[String]): TopHitsAggregation = copy(storedFields = fields.toSeq)
def version(version: Boolean): TopHitsAggregation = copy(version = version.some)
def trackScores(trackScores: Boolean): TopHitsAggregation = copy(trackScores = trackScores.some)
def script(name: String, script: Script): T = copy(scripts = scripts + (name -> script))
override def subAggregations(aggs: Iterable[AbstractAggregation]): T =
sys.error("Top Hits does not support sub aggregations")
override def metadata(map: Map[String, AnyRef]): T = copy(metadata = map)
}
|
sksamuel/elastic4s
|
elastic4s-core/src/main/scala/com/sksamuel/elastic4s/requests/searches/aggs/TopHitsAggregation.scala
|
Scala
|
apache-2.0
| 2,572
|
package com.azavea.opentransit.indicators.stations
import com.azavea.opentransit._
import com.azavea.opentransit.database._
import org.apache.commons.csv.CSVPrinter
import org.apache.commons.csv.CSVStrategy.DEFAULT_STRATEGY
import java.io.{ByteArrayOutputStream, OutputStreamWriter, FileOutputStream}
import java.util.UUID
class StationStatsCSV(
bufferDistance: Double,
commuteTime: Int,
jobId: Int,
csvStore: StationCSVStore = StationCSVDatabase,
outputStream: ByteArrayOutputStream = new ByteArrayOutputStream()
) extends CSVPrinter(new OutputStreamWriter(outputStream)) {
private val wrapper = this
def value: String = new String(outputStream.toByteArray)
def save(status: CSVStatus): Unit =
csvStore.set(CSVJob(status, bufferDistance, commuteTime, jobId, outputStream.toByteArray))
def writeFile(path: String): Unit = {
val fos = new FileOutputStream(path)
try {
fos.write(outputStream.toByteArray)
} finally {
fos.close()
}
}
case class StationStats(
id: String,
name: String,
proximalPop1: Int,
proximalPop2: Int,
proximalJobs: Int,
accessibleJobs: Int
) {
def write(): Unit = {
wrapper.print(id)
wrapper.print(name)
wrapper.print(proximalPop1.toString)
wrapper.print(proximalPop2.toString)
wrapper.print(proximalJobs.toString)
wrapper.print(accessibleJobs.toString)
wrapper.println()
}
}
private var header = false
def attachHeader(): Unit = {
if (!header) {
wrapper.print("stationID")
wrapper.print("stationName")
wrapper.print("proximalPop1")
wrapper.print("proximalPop2")
wrapper.print("proximalJobs")
wrapper.print("accessibleJobs")
wrapper.println()
header = true
}
}
}
object StationStatsCSV {
def apply(
bufferDistance: Double,
commuteTime: Int,
jobId: Int,
csvStore: StationCSVStore = StationCSVDatabase
): StationStatsCSV = {
val printer = new StationStatsCSV(bufferDistance, commuteTime, jobId)
csvStore.set(CSVJob(Processing, bufferDistance, commuteTime, jobId, Array.empty))
printer.setStrategy(DEFAULT_STRATEGY)
printer.attachHeader()
printer
}
}
|
flibbertigibbet/open-transit-indicators
|
scala/opentransit/src/main/scala/com/azavea/opentransit/indicators/stations/StationStats.scala
|
Scala
|
gpl-3.0
| 2,212
|
package wiro
package client.akkaHttp
import scala.language.experimental.macros
import scala.reflect.macros.blackbox.Context
trait ClientDerivationModule extends TypePathMacro {
def deriveClientContext[A]: RPCClientContext[A] = macro ClientDerivationMacro.deriveClientContextImpl[A]
}
object ClientDerivationMacro extends ClientDerivationModule {
def deriveClientContextImpl[A: c.WeakTypeTag](c: Context): c.Tree = {
import c.universe._
val tpe = weakTypeOf[A]
q"""
import wiro.{ OperationType, MethodMetaData }
new RPCClientContext[$tpe] {
override val methodsMetaData = deriveMetaData[$tpe]
override val tp = typePath[$tpe]
override val path = derivePath[$tpe]
}
"""
}
}
|
federico-pellegatta/wiro
|
clientAkkaHttp/src/main/scala/ClientDerivation.scala
|
Scala
|
mit
| 729
|
package example
import com.typesafe.scalalogging.slf4j.Logging
import com.typesafe.config.ConfigFactory
object Configurator extends Logging {
val conf = ConfigFactory.load( "~/testcluster" )
.withFallback(ConfigFactory.load("./testcluster"))
.withFallback(ConfigFactory.load("testcluster"))
conf.checkValid(ConfigFactory.load("testcluster"),"testcluster")
logger.info( s"Server Mode ${conf.getString("cluster.api.runmode")} ")
}
|
johanprinsloo/akka-cassandra-cluster-test
|
src/main/scala/example/Configurator.scala
|
Scala
|
apache-2.0
| 450
|
package com.epicport.db
import scala.slick.driver.MySQLDriver.simple._
object Db {
val db = Database.forURL("jdbc:mysql://localhost/epicport?useUnicode=true&characterEncoding=UTF-8",
driver = "com.mysql.jdbc.Driver",
user = "root",
password = "simple")
}
trait Db {
implicit val db = Db.db
}
|
caiiiycuk/epicport
|
web/src/main/scala/com/epicport/db/Db.scala
|
Scala
|
gpl-2.0
| 310
|
package edu.gsu.cs.kgem.model
import java.lang.Math.min
import edu.gsu.cs.kgem.exec.log
import scala.collection.mutable
/**
* Created with IntelliJ IDEA.
* User: aartyomenko
* Date: 4/13/13
* Time: 1:43 PM
* Class to wrap derandomized KGEM, i. e. initialization with maximizing distance between
* seeds and detection of size of the population via distance threshold.
*/
object MaxDistanceSeedFinder extends SeedFinder {
/**
* Find seeds according to maximization Hamming distance between all pairs
* of reads with pre-specified threshold. This is a 2-approximation to the
* metric k-center problem.
*
* @param reads
* Collection of reads
* @param k
* Maximum size of sample
* @param threshold
* Min hamming distance between seeds
* @return
*/
def findSeeds(reads: Iterable[Read], k: Int, threshold: Int, count_threshold: Int): Iterable[Genotype] = {
val readArr = reads.toArray
val first = getFirstSeed(readArr)
var seeds = new mutable.MutableList[Genotype]()
seeds += new Genotype(first.seq)
var distanceMap = readArr.view.filter(r => !r.equals(first)).map(r => r -> hammingDistance(first.seq, r.seq) * r.freq).toMap
var maxHD = Double.MaxValue
var count = maxHD
log("Count threshold: %d".format(count_threshold))
while (seeds.size < k && (maxHD >= threshold && count > count_threshold)) {
if (distanceMap.isEmpty) return seeds//.map(r => new Genotype(r.seq))
val cur = distanceMap.filter(x => x._1.seq.count(_ != 'N')>= x._1.len * 0.85).maxBy(r => r._2 * r._1.freq)
maxHD = distanceMap.view.map(r => r._2 / r._1.freq).max
//if (maxHD < threshold) count = readArr.view.filter(r => hammingDistance(r, cur._1) < distanceMap(r) / r.freq).map(_.freq).sum
val voronoi = readArr.view.filter(r => hammingDistance(r, cur._1) < distanceMap(r) / r.freq).toList
val center = new Genotype(voronoi)
count = voronoi.map(_.freq).sum
val seed = seeds.find(s => hammingDistance(s.toIntegralString, center.toIntegralString) == 0)
log(seed.toString)
if (seed == None) seeds += center
log("Read to center dist: %.2f".format(hammingDistance(cur._1.seq, center.toIntegralString)))
distanceMap = distanceMap.map(e => e._1 -> min(e._2, hammingDistance(cur._1, e._1) * e._1.freq)).toMap
}
log("Estimated k: %d".format(seeds.size))
log("Final max distance: %.0f count: %.0f".format(maxHD, count))
seeds//.map(r => new Genotype(r.seq))
}
/**
* Select first read randomly
* @param readArr
* Array with all reads
* @return
* one read
*/
private def getFirstSeed(readArr: Array[Read]) = {
val mc = readArr.map(r => r.seq.count(_ != 'N')).max
val candidates = readArr.filter(r => r.seq.count(_ != 'N') == mc)
candidates.maxBy(_.freq)
//val s = candidates.size
//val rnd = new Random()
//candidates(rnd.nextInt(s))
}
}
|
night-stalker/2SNV
|
src/main/edu/gsu/cs/2snv/model/MaxDistanceSeedFinder.scala
|
Scala
|
gpl-2.0
| 2,917
|
package io.eels.cli
import java.io.PrintStream
import io.eels.{Constants, SourceParser}
import io.eels.component.avro.AvroSchemaFn
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.FileSystem
import org.apache.hadoop.hive.conf.HiveConf
object ShowSchemaMain {
implicit val fs = FileSystem.get(new Configuration)
implicit val hiveConf = new HiveConf
def apply(args: Seq[String], out: PrintStream = System.out): Unit = {
val parser = new scopt.OptionParser[Options]("eel") {
head("eel schema", Constants.EelVersion)
opt[String]("source") required() action { (source, o) =>
o.copy(source = source)
} text "specify source, eg hive:database:table or parquet:/path/to/file"
}
parser.parse(args, Options()) match {
case Some(options) =>
val builder = SourceParser(options.source).getOrElse(sys.error(s"Unsupported source ${options.source}"))
val source = builder()
val schema = source.schema
val avroSchema = AvroSchemaFn.toAvro(schema)
out.println(avroSchema)
case _ =>
}
}
case class Options(source: String = "")
}
|
sksamuel/hadoop-streams
|
eel-cli/src/main/scala/io/eels/cli/ShowSchemaMain.scala
|
Scala
|
apache-2.0
| 1,146
|
package polynomial
import algebra.ring.CommutativeRing
import polynomial.AdjoiningOperations._
import core.InfixOps._
trait PolynomialRing[A] extends CommutativeRing[Polynomial[A]] {
def parameter: FormalParameter
def coefficients: CommutativeRing[A]
private implicit val delegate = coefficients r_adjoin parameter
override def zero: Polynomial[A] = delegate.zero
override def one: Polynomial[A] = delegate.one
override def plus(x: Polynomial[A], y: Polynomial[A]): Polynomial[A] = x + y
override def negate(x: Polynomial[A]): Polynomial[A] = x.negate
override def times(x: Polynomial[A], y: Polynomial[A]): Polynomial[A] = x * y
}
object PolynomialRing {
def apply[A](coefficientRing: CommutativeRing[A], p: FormalParameter): PolynomialRing[A] = new PolynomialRing[A] {
override def coefficients: CommutativeRing[A] = coefficientRing
override def parameter: FormalParameter = p
}
}
|
dkettlestrings/thunder
|
src/main/scala/polynomial/PolynomialRing.scala
|
Scala
|
gpl-3.0
| 926
|
// Copyright 2015 Willem Meints
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package nl.fizzylogic.reactivepi.gpio
import java.io._
/**
* Use the output pin class to configure a pin on the
* GPIO connector of your raspberry PI as output
* @param pinNumber Pin number to use
*/
class OutputPin(pinNumber: Int) extends GPIOPin(pinNumber, "out") {
/**
* Sets the output pin to a low state
*/
def low(): Unit = {
try {
val pinValueWriter = new PrintWriter(new File(pinValueFilePath))
pinValueWriter.write("0")
pinValueWriter.close()
} catch {
case e:IOException => throw new GPIOException(
s"Failed to set the pin state for pin ${pinNumber}: ${e.getMessage()}")
}
}
/**
* Sets the output pin to a high state
*/
def high(): Unit = {
try {
val pinValueWriter = new PrintWriter(new File(pinValueFilePath))
pinValueWriter.write("1")
pinValueWriter.close()
} catch {
case e:IOException => throw new GPIOException(
s"Failed to set the pin state for pin ${pinNumber}: ${e.getMessage()}")
}
}
}
|
wmeints/ReactivePI
|
core/src/main/scala/nl/fizzylogic/reactivepi/gpio/OutputPin.scala
|
Scala
|
apache-2.0
| 1,624
|
/*
Deduction Tactics
Copyright (C) 2012-2017 Raymond Dodge
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package com.rayrobdod.boardGame
package view
import scala.collection.immutable.BitSet
import scala.util.Random
/**
* A tilesheet that makes solid color tile images, where the color of each
* tile is dependent upon that tile's space's spaceClass's hashcode.
*
* @group TrivialTilesheet
*
* @constructor
* Creates a HashcodeColorTilesheet
* @param transparentIcon a transparent icon
* @param colorToIcon an icon that deisplays the specified color
* @param iconDimensions the size of each tile
* @tparam Index the index used to specify a space in a tiling
* @tparam Dimension the dimensions of the thlesheet's tiles
* @tparam Icon the icon produced by this tilesheet
*/
final class HashcodeColorTilesheet[Index, Dimension, Icon](
transparentIcon:Function0[Icon]
, colorToIcon:Function2[java.awt.Color, Index, Icon]
, override val iconDimensions:Dimension
) extends Tilesheet[Any, Index, Dimension, Icon] {
override def getIconFor(
f:Tiling[_ <: Any, Index, _]
, idx:Index
, rng:Random
):TileLocationIcons[Icon] = TileLocationIcons(
colorToIcon(getColorFor(f,idx), idx)
, transparentIcon()
)
private[this] def getColorFor(f:Tiling[_ <: Any, Index, _], xy:Index):java.awt.Color = {
val hash = f.spaceClass(xy).map{_.hashCode}.getOrElse{0}
// reorder bits to make most colors not really close to black
val set1 = BitSet.fromBitMask(Array(hash))
val color = Seq(
set1(0), set1(3), set1(6), set1(9), false,false,false,false,
set1(1), set1(4), set1(7), set1(10), false,false,false,false,
set1(2), set1(5), set1(8), set1(11), false,false,false,false
).reverse.zipWithIndex.filter{_._1}.map{_._2}.foldLeft(BitSet.empty){_ + _}.toBitMask.head.intValue
new java.awt.Color(color)
}
}
|
rayrobdod/boardGame
|
View/src/main/scala/HashcodeColorTilesheet.scala
|
Scala
|
gpl-3.0
| 2,431
|
package de.tu_berlin.formic.example
import akka.http.scaladsl.model.StatusCodes
import akka.http.scaladsl.model.headers.BasicHttpCredentials
import akka.http.scaladsl.server.AuthenticationFailedRejection
import akka.http.scaladsl.testkit.{ScalatestRouteTest, WSProbe}
import akka.stream.ActorMaterializer
import akka.stream.scaladsl.{Flow, Sink, Source}
import org.scalatest.{BeforeAndAfterAll, Matchers, WordSpecLike}
/**
* @author Ronny Bräunlich
*/
class NetworkRouteSpec extends WordSpecLike
with ScalatestRouteTest
with Matchers
with BeforeAndAfterAll{
override def afterAll(): Unit = {
super.afterAll()
}
"Network route" must {
"reject users without authentification" in {
implicit val materializer = ActorMaterializer()
val probe = WSProbe()
val route = new NetworkRoute()(system).route((_) => Flow.fromSinkAndSource(Sink.ignore, Source.empty))
WS("/formic", probe.flow) ~> route ~> check {
rejection shouldBe a[AuthenticationFailedRejection]
}
}
"accept users with unique username independent of password" in {
implicit val materializer = ActorMaterializer()
val probe = WSProbe()
val route = new NetworkRoute()(system).route((_) => Flow.fromSinkAndSource(Sink.ignore, Source.empty))
WS("/formic", probe.flow).addCredentials(BasicHttpCredentials("NetworkRoute", "")) ~> route ~> check {
isWebSocketUpgrade should be(true)
status should be(StatusCodes.SwitchingProtocols)
}
}
"reject users with duplicate username" in {
implicit val materializer = ActorMaterializer()
val probe = WSProbe()
val probe2 = WSProbe()
val route = new NetworkRoute()(system).route((_) => Flow.fromSinkAndSource(Sink.ignore, Source.empty))
WS("/formic", probe.flow).addCredentials(BasicHttpCredentials("NetworkRoute2", "")) ~> route ~> check {
isWebSocketUpgrade should be(true)
status should be(StatusCodes.SwitchingProtocols)
}
WS("/formic", probe2.flow).addCredentials(BasicHttpCredentials("NetworkRoute2", "")) ~> route ~> check {
rejection shouldBe a[AuthenticationFailedRejection]
}
}
}
}
|
rbraeunlich/formic
|
example/jvm/src/test/scala/de/tu_berlin/formic/example/NetworkRouteSpec.scala
|
Scala
|
apache-2.0
| 2,190
|
/*
* Copyright (c) 2002-2018 "Neo Technology,"
* Network Engine for Objects in Lund AB [http://neotechnology.com]
*
* This file is part of Neo4j.
*
* Neo4j is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.neo4j.cypher.internal.frontend.v2_3.ast
import org.neo4j.cypher.internal.frontend.v2_3.test_helpers.CypherTestSupport
import org.neo4j.cypher.internal.frontend.v2_3.{DummyPosition, InputPosition}
trait AstConstructionTestSupport extends CypherTestSupport {
protected val pos = DummyPosition(0)
implicit def withPos[T](expr: InputPosition => T): T = expr(pos)
def ident(name: String): Identifier = Identifier(name)(pos)
def hasLabels(identifier: String, label: String) =
HasLabels(ident(identifier), Seq(LabelName(label)(pos)))(pos)
def propEquality(identifier: String, propKey: String, intValue: Int) = {
val prop: Expression = Property(ident(identifier), PropertyKeyName(propKey)(pos))(pos)
val literal: Expression = SignedDecimalIntegerLiteral(intValue.toString)(pos)
Equals(prop, literal)(pos)
}
def literalInt(intValue: Int): SignedDecimalIntegerLiteral =
SignedDecimalIntegerLiteral(intValue.toString)(pos)
}
|
HuangLS/neo4j
|
community/cypher/frontend-2.3/src/test/scala/org/neo4j/cypher/internal/frontend/v2_3/ast/AstConstructionTestSupport.scala
|
Scala
|
apache-2.0
| 1,763
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.ml.clustering
import scala.collection.mutable
import org.apache.hadoop.fs.Path
import org.apache.spark.annotation.Since
import org.apache.spark.ml.{Estimator, Model, PipelineStage}
import org.apache.spark.ml.linalg.Vector
import org.apache.spark.ml.param._
import org.apache.spark.ml.param.shared._
import org.apache.spark.ml.util._
import org.apache.spark.ml.util.Instrumentation.instrumented
import org.apache.spark.mllib.clustering.{DistanceMeasure, KMeans => MLlibKMeans, KMeansModel => MLlibKMeansModel}
import org.apache.spark.mllib.linalg.{Vector => OldVector, Vectors => OldVectors}
import org.apache.spark.mllib.linalg.VectorImplicits._
import org.apache.spark.sql.{DataFrame, Dataset, SparkSession}
import org.apache.spark.sql.functions.udf
import org.apache.spark.sql.types.{IntegerType, StructType}
import org.apache.spark.storage.StorageLevel
import org.apache.spark.util.VersionUtils.majorVersion
/**
* Common params for KMeans and KMeansModel
*/
private[clustering] trait KMeansParams extends Params with HasMaxIter with HasFeaturesCol
with HasSeed with HasPredictionCol with HasTol with HasDistanceMeasure {
/**
* The number of clusters to create (k). Must be > 1. Note that it is possible for fewer than
* k clusters to be returned, for example, if there are fewer than k distinct points to cluster.
* Default: 2.
* @group param
*/
@Since("1.5.0")
final val k = new IntParam(this, "k", "The number of clusters to create. " +
"Must be > 1.", ParamValidators.gt(1))
/** @group getParam */
@Since("1.5.0")
def getK: Int = $(k)
/**
* Param for the initialization algorithm. This can be either "random" to choose random points as
* initial cluster centers, or "k-means||" to use a parallel variant of k-means++
* (Bahmani et al., Scalable K-Means++, VLDB 2012). Default: k-means||.
* @group expertParam
*/
@Since("1.5.0")
final val initMode = new Param[String](this, "initMode", "The initialization algorithm. " +
"Supported options: 'random' and 'k-means||'.",
(value: String) => MLlibKMeans.validateInitMode(value))
/** @group expertGetParam */
@Since("1.5.0")
def getInitMode: String = $(initMode)
/**
* Param for the number of steps for the k-means|| initialization mode. This is an advanced
* setting -- the default of 2 is almost always enough. Must be > 0. Default: 2.
* @group expertParam
*/
@Since("1.5.0")
final val initSteps = new IntParam(this, "initSteps", "The number of steps for k-means|| " +
"initialization mode. Must be > 0.", ParamValidators.gt(0))
/** @group expertGetParam */
@Since("1.5.0")
def getInitSteps: Int = $(initSteps)
/**
* Validates and transforms the input schema.
* @param schema input schema
* @return output schema
*/
protected def validateAndTransformSchema(schema: StructType): StructType = {
SchemaUtils.validateVectorCompatibleColumn(schema, getFeaturesCol)
SchemaUtils.appendColumn(schema, $(predictionCol), IntegerType)
}
}
/**
* Model fitted by KMeans.
*
* @param parentModel a model trained by spark.mllib.clustering.KMeans.
*/
@Since("1.5.0")
class KMeansModel private[ml] (
@Since("1.5.0") override val uid: String,
private[clustering] val parentModel: MLlibKMeansModel)
extends Model[KMeansModel] with KMeansParams with GeneralMLWritable
with HasTrainingSummary[KMeansSummary] {
@Since("1.5.0")
override def copy(extra: ParamMap): KMeansModel = {
val copied = copyValues(new KMeansModel(uid, parentModel), extra)
copied.setSummary(trainingSummary).setParent(this.parent)
}
/** @group setParam */
@Since("2.0.0")
def setFeaturesCol(value: String): this.type = set(featuresCol, value)
/** @group setParam */
@Since("2.0.0")
def setPredictionCol(value: String): this.type = set(predictionCol, value)
@Since("2.0.0")
override def transform(dataset: Dataset[_]): DataFrame = {
transformSchema(dataset.schema, logging = true)
val predictUDF = udf((vector: Vector) => predict(vector))
dataset.withColumn($(predictionCol),
predictUDF(DatasetUtils.columnToVector(dataset, getFeaturesCol)))
}
@Since("1.5.0")
override def transformSchema(schema: StructType): StructType = {
validateAndTransformSchema(schema)
}
@Since("3.0.0")
def predict(features: Vector): Int = parentModel.predict(features)
@Since("2.0.0")
def clusterCenters: Array[Vector] = parentModel.clusterCenters.map(_.asML)
/**
* Returns a [[org.apache.spark.ml.util.GeneralMLWriter]] instance for this ML instance.
*
* For [[KMeansModel]], this does NOT currently save the training [[summary]].
* An option to save [[summary]] may be added in the future.
*
*/
@Since("1.6.0")
override def write: GeneralMLWriter = new GeneralMLWriter(this)
/**
* Gets summary of model on training set. An exception is
* thrown if `hasSummary` is false.
*/
@Since("2.0.0")
override def summary: KMeansSummary = super.summary
}
/** Helper class for storing model data */
private case class ClusterData(clusterIdx: Int, clusterCenter: Vector)
/** A writer for KMeans that handles the "internal" (or default) format */
private class InternalKMeansModelWriter extends MLWriterFormat with MLFormatRegister {
override def format(): String = "internal"
override def stageName(): String = "org.apache.spark.ml.clustering.KMeansModel"
override def write(path: String, sparkSession: SparkSession,
optionMap: mutable.Map[String, String], stage: PipelineStage): Unit = {
val instance = stage.asInstanceOf[KMeansModel]
val sc = sparkSession.sparkContext
// Save metadata and Params
DefaultParamsWriter.saveMetadata(instance, path, sc)
// Save model data: cluster centers
val data: Array[ClusterData] = instance.clusterCenters.zipWithIndex.map {
case (center, idx) =>
ClusterData(idx, center)
}
val dataPath = new Path(path, "data").toString
sparkSession.createDataFrame(data).repartition(1).write.parquet(dataPath)
}
}
/** A writer for KMeans that handles the "pmml" format */
private class PMMLKMeansModelWriter extends MLWriterFormat with MLFormatRegister {
override def format(): String = "pmml"
override def stageName(): String = "org.apache.spark.ml.clustering.KMeansModel"
override def write(path: String, sparkSession: SparkSession,
optionMap: mutable.Map[String, String], stage: PipelineStage): Unit = {
val instance = stage.asInstanceOf[KMeansModel]
val sc = sparkSession.sparkContext
instance.parentModel.toPMML(sc, path)
}
}
@Since("1.6.0")
object KMeansModel extends MLReadable[KMeansModel] {
@Since("1.6.0")
override def read: MLReader[KMeansModel] = new KMeansModelReader
@Since("1.6.0")
override def load(path: String): KMeansModel = super.load(path)
/**
* We store all cluster centers in a single row and use this class to store model data by
* Spark 1.6 and earlier. A model can be loaded from such older data for backward compatibility.
*/
private case class OldData(clusterCenters: Array[OldVector])
private class KMeansModelReader extends MLReader[KMeansModel] {
/** Checked against metadata when loading model */
private val className = classOf[KMeansModel].getName
override def load(path: String): KMeansModel = {
// Import implicits for Dataset Encoder
val sparkSession = super.sparkSession
import sparkSession.implicits._
val metadata = DefaultParamsReader.loadMetadata(path, sc, className)
val dataPath = new Path(path, "data").toString
val clusterCenters = if (majorVersion(metadata.sparkVersion) >= 2) {
val data: Dataset[ClusterData] = sparkSession.read.parquet(dataPath).as[ClusterData]
data.collect().sortBy(_.clusterIdx).map(_.clusterCenter).map(OldVectors.fromML)
} else {
// Loads KMeansModel stored with the old format used by Spark 1.6 and earlier.
sparkSession.read.parquet(dataPath).as[OldData].head().clusterCenters
}
val model = new KMeansModel(metadata.uid, new MLlibKMeansModel(clusterCenters))
metadata.getAndSetParams(model)
model
}
}
}
/**
* K-means clustering with support for k-means|| initialization proposed by Bahmani et al.
*
* @see <a href="https://doi.org/10.14778/2180912.2180915">Bahmani et al., Scalable k-means++.</a>
*/
@Since("1.5.0")
class KMeans @Since("1.5.0") (
@Since("1.5.0") override val uid: String)
extends Estimator[KMeansModel] with KMeansParams with DefaultParamsWritable {
setDefault(
k -> 2,
maxIter -> 20,
initMode -> MLlibKMeans.K_MEANS_PARALLEL,
initSteps -> 2,
tol -> 1e-4,
distanceMeasure -> DistanceMeasure.EUCLIDEAN)
@Since("1.5.0")
override def copy(extra: ParamMap): KMeans = defaultCopy(extra)
@Since("1.5.0")
def this() = this(Identifiable.randomUID("kmeans"))
/** @group setParam */
@Since("1.5.0")
def setFeaturesCol(value: String): this.type = set(featuresCol, value)
/** @group setParam */
@Since("1.5.0")
def setPredictionCol(value: String): this.type = set(predictionCol, value)
/** @group setParam */
@Since("1.5.0")
def setK(value: Int): this.type = set(k, value)
/** @group expertSetParam */
@Since("1.5.0")
def setInitMode(value: String): this.type = set(initMode, value)
/** @group expertSetParam */
@Since("2.4.0")
def setDistanceMeasure(value: String): this.type = set(distanceMeasure, value)
/** @group expertSetParam */
@Since("1.5.0")
def setInitSteps(value: Int): this.type = set(initSteps, value)
/** @group setParam */
@Since("1.5.0")
def setMaxIter(value: Int): this.type = set(maxIter, value)
/** @group setParam */
@Since("1.5.0")
def setTol(value: Double): this.type = set(tol, value)
/** @group setParam */
@Since("1.5.0")
def setSeed(value: Long): this.type = set(seed, value)
@Since("2.0.0")
override def fit(dataset: Dataset[_]): KMeansModel = instrumented { instr =>
transformSchema(dataset.schema, logging = true)
val handlePersistence = dataset.storageLevel == StorageLevel.NONE
val instances = DatasetUtils.columnToOldVector(dataset, getFeaturesCol)
if (handlePersistence) {
instances.persist(StorageLevel.MEMORY_AND_DISK)
}
instr.logPipelineStage(this)
instr.logDataset(dataset)
instr.logParams(this, featuresCol, predictionCol, k, initMode, initSteps, distanceMeasure,
maxIter, seed, tol)
val algo = new MLlibKMeans()
.setK($(k))
.setInitializationMode($(initMode))
.setInitializationSteps($(initSteps))
.setMaxIterations($(maxIter))
.setSeed($(seed))
.setEpsilon($(tol))
.setDistanceMeasure($(distanceMeasure))
val parentModel = algo.run(instances, Option(instr))
val model = copyValues(new KMeansModel(uid, parentModel).setParent(this))
val summary = new KMeansSummary(
model.transform(dataset),
$(predictionCol),
$(featuresCol),
$(k),
parentModel.numIter,
parentModel.trainingCost)
model.setSummary(Some(summary))
instr.logNamedValue("clusterSizes", summary.clusterSizes)
if (handlePersistence) {
instances.unpersist()
}
model
}
@Since("1.5.0")
override def transformSchema(schema: StructType): StructType = {
validateAndTransformSchema(schema)
}
}
@Since("1.6.0")
object KMeans extends DefaultParamsReadable[KMeans] {
@Since("1.6.0")
override def load(path: String): KMeans = super.load(path)
}
/**
* Summary of KMeans.
*
* @param predictions `DataFrame` produced by `KMeansModel.transform()`.
* @param predictionCol Name for column of predicted clusters in `predictions`.
* @param featuresCol Name for column of features in `predictions`.
* @param k Number of clusters.
* @param numIter Number of iterations.
* @param trainingCost K-means cost (sum of squared distances to the nearest centroid for all
* points in the training dataset). This is equivalent to sklearn's inertia.
*/
@Since("2.0.0")
class KMeansSummary private[clustering] (
predictions: DataFrame,
predictionCol: String,
featuresCol: String,
k: Int,
numIter: Int,
@Since("2.4.0") val trainingCost: Double)
extends ClusteringSummary(predictions, predictionCol, featuresCol, k, numIter)
|
pgandhi999/spark
|
mllib/src/main/scala/org/apache/spark/ml/clustering/KMeans.scala
|
Scala
|
apache-2.0
| 13,166
|
trait A {
type R = Int
}
def fop[R](parameter: A): parameter./*resolved: true*/R = 0
def fop[R](parameter: A): parameter./*resolved: true*/R
|
LPTK/intellij-scala
|
testdata/resolve2/bug3/SCL3773.scala
|
Scala
|
apache-2.0
| 143
|
/*
* Copyright 2015 [See AUTHORS file for list of authors]
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.util.concurrent.atomic.AtomicIntegerArray
import java.util.concurrent.Executors
import it.unimi.dsi.webgraph.BVGraph
import scala.util.Random
/**
* Example program that runs KwikCluster, C4 (BSP, async), ClusterWild! (BSP, async), and CDK on an input graph.
*/
object Example {
/**
* Main program
* @param args Set of optional command line arguments
*
* inputFile: pointer to the input graph (with no file extensions)
*
* randSeedOrd: random seed for generating random permutation
*
* randSeedCDK: random seed for CDK
*
* randSeedPCC: random seed to pass to parallel correlation clustering algorithms C4 and ClusterWild!
*
* nTrials: number of trials to run
*
* maxnNThreads: maximum number of threads to test with
*
* doSer: Boolean indicating whether to do the serial KwikCluster algorithm
*
* doC4: Boolean indicating whether to do C4
*
* doCW: Boolean indicating whether to do ClusterWild!
*
* doCDK: Boolean indicating whether to do CDK multiple times
*
* doCDKOnce: Boolean indicating whether to do CDK only once, with maxNThreads
*
* doBSP: Boolean indicating whether to do BSP for C4, ClusterWild!
*
* doAsync: Boolean indicating whether to do asynchronous for C4, ClusterWild!
*
* epsilon: Determines the sampling rate per round: epsilon / maxDegree of the remaining vertices are sampled.
*
* delta: Delta is used to compute the number of rounds required for max degree to halve, with probability 1-delta.
*/
def main(args: Array[String]) {
val argmap = args.map { a =>
val argPair = a.split("=")
val name = argPair(0).toLowerCase
val value = argPair(1)
(name, value)
}.toMap
// Default options
val inputFile = argmap.getOrElse("inputfile", "data/eswiki-2013_rand_symm")
val randSeedOrd = argmap.getOrElse("randseedord", "94720").toInt
val randSeedCDK = argmap.getOrElse("randseedcdk", "47").toInt
val randSeedPCC = argmap.getOrElse("randseedpcc", "53").toInt
val nTrials = argmap.getOrElse("ntrials", "10").toInt
val maxNThreads = argmap.getOrElse("maxnthreads", "8").toInt
val doSer = argmap.getOrElse("doser", "true").toBoolean
val doC4 = argmap.getOrElse("doc4", "true").toBoolean
val doCW = argmap.getOrElse("docw", "true").toBoolean
val doCDK = argmap.getOrElse("docdk", "false").toBoolean
val doCDKOnce = argmap.getOrElse("docdkonce", "true").toBoolean
val doBSP = argmap.getOrElse("dobsp", "true").toBoolean
val doAsync = argmap.getOrElse("doasync", "true").toBoolean
val epsilon = argmap.getOrElse("epsilon", "0.5").toDouble
val delta = argmap.getOrElse("delta", "0.1").toDouble
// Create graph
val graph: SparseGraph = new SparseGraph()
graph.loadFromBVGraph(BVGraph.load(inputFile))
val nVertices = graph.numNodes()
println(s"Graph has ${graph.numArcs()} edges, ${graph.numNodes()} vertices")
println(s"epsilon = $epsilon")
val orderingRand : Random = new Random(randSeedOrd)
val CDKRand : Random = new Random(randSeedCDK)
val PCCRand : Random = new Random(randSeedPCC)
// Cluster ids
val seqClusterID_int : Array[Int] = Array.fill(nVertices)(0)
val seqClusterID_atomic: AtomicIntegerArray = new AtomicIntegerArray(nVertices)
val c4aClusterID_int : Array[Int] = Array.fill(nVertices)(0)
val c4aClusterID_atomic: AtomicIntegerArray = new AtomicIntegerArray(nVertices)
val c4bClusterID_int : Array[Int] = Array.fill(nVertices)(0)
val c4bClusterID_atomic: AtomicIntegerArray = new AtomicIntegerArray(nVertices)
val cwaClusterID_int : Array[Int] = Array.fill(nVertices)(0)
val cwaClusterID_atomic: AtomicIntegerArray = new AtomicIntegerArray(nVertices)
val cwbClusterID_int : Array[Int] = Array.fill(nVertices)(0)
val cwbClusterID_atomic: AtomicIntegerArray = new AtomicIntegerArray(nVertices)
val cdkClusterID_int : Array[Int] = Array.fill(nVertices)(0)
val cdkClusterID_atomic: AtomicIntegerArray = new AtomicIntegerArray(nVertices)
// Sequential statistics
var seqStartTime: Long = 0
var seqEndTime: Long = 0
var seqEdgesExamined: Long = 0
var seqObjVal: Long = 0
// C4BS statistics
var c4bStartTime: Long = 0
var c4bEndTime: Long = 0
var c4bEdgesExamined: Long = 0
var c4bNumWaits: Int = 0
var c4bNumRejected: Int = 0
var c4bNumIterations: Int = 0
var c4bObjVal: Long = 0
// C4AS statistics
var c4aStartTime: Long = 0
var c4aEndTime: Long = 0
var c4aEdgesExamined: Long = 0
var c4aNumWaits: Int = 0
var c4aNumRejected: Int = 0
var c4aObjVal: Long = 0
// CWBS statistics
var cwbStartTime: Long = 0
var cwbEndTime: Long = 0
var cwbEdgesExamined: Long = 0
var cwbNumIterations: Int = 0
var cwbObjVal: Long = 0
// CWAS statistics
var cwaStartTime: Long = 0
var cwaEndTime: Long = 0
var cwaEdgesExamined: Long = 0
var cwaObjVal: Long = 0
// CDK statistics
var cdkStartTime: Long = 0
var cdkEndTime: Long = 0
var cdkEdgesExamined: Long = 0
var cdkNumIterations: Int = 0
var cdkObjVal: Long = 0
(0 until nTrials).foreach ( iter => {
// Ordering
val orderingThreads = Executors.newFixedThreadPool(maxNThreads)
val orderingRandSeed = orderingRand.nextInt()
val (ordering, invOrder) = AuxiliaryFunctions.parallelRandomPermutation(maxNThreads, orderingThreads, orderingRandSeed, nVertices)
// PCC rand
val pccRandSeed = PCCRand.nextLong()
val threads = Executors.newFixedThreadPool(maxNThreads)
(1 to maxNThreads).foreach(nThreads => {
// Run sequential
if (nThreads == 1 && doSer) {
(0 until 1).foreach { _ => {
System.gc()
(0 until nVertices).foreach(v => {
seqClusterID_int(v) = 0
seqClusterID_atomic.set(v, 0)
})
seqStartTime = System.currentTimeMillis()
val sqRunner: SequentialRandomGreedyPeeling = new SequentialRandomGreedyPeeling(graph)
val sqResults = sqRunner.run(1, null, ordering, invOrder, seqClusterID_int)
seqEndTime = System.currentTimeMillis()
seqEdgesExamined = sqResults._1
}
}
seqObjVal = AuxiliaryFunctions.computeObjective(maxNThreads, threads, graph, seqClusterID_int, seqClusterID_atomic)
println(s"[Trial $iter] Sequential KwikCluster with 1 threads:")
println("\\tTime: " + s"${seqEndTime - seqStartTime}")
println("\\t#edges examined: " + s"$seqEdgesExamined")
println("\\tObjective: " + s"$seqObjVal")
}
if (doC4 && doBSP) {
// Run C4BS
System.gc()
(0 until nVertices).foreach(v => {
c4bClusterID_atomic.set(v, 0)
c4bClusterID_int(v) = 0
})
c4bStartTime = System.currentTimeMillis()
val c4bRunner: ParallelCorrelationClustering = new ParallelCorrelationClustering(graph)
val c4bStats = c4bRunner.run(nThreads, threads, ordering, invOrder, c4bClusterID_int, c4bClusterID_atomic, doClusterWild = false, doBSP = true, epsilon = epsilon, delta = delta, randSeed = pccRandSeed)
c4bEndTime = System.currentTimeMillis()
c4bEdgesExamined = c4bStats._1
c4bNumWaits = c4bStats._2
c4bNumRejected = c4bStats._3
c4bNumIterations = c4bStats._4
c4bObjVal = AuxiliaryFunctions.computeObjective(maxNThreads, threads, graph, c4bClusterID_int, c4bClusterID_atomic)
println(s"[Trial $iter] C4 (BSP) with $nThreads threads:")
println("\\tTime: " + s"${c4bEndTime - c4bStartTime}")
println("\\t#edges examined: " + s"$c4bEdgesExamined")
println("\\tObjective: " + s"$c4bObjVal")
println("\\t#BSP rounds: " + s"$c4bNumIterations")
}
if (doC4 && doAsync) {
// Run C4AS
System.gc()
(0 until nVertices).foreach(v => {
c4aClusterID_atomic.set(v, 0)
c4aClusterID_int(v) = 0
})
c4aStartTime = System.currentTimeMillis()
val c4aRunner: ParallelCorrelationClustering = new ParallelCorrelationClustering(graph)
val c4aStats = c4aRunner.run(nThreads, threads, ordering, invOrder, c4aClusterID_int, c4aClusterID_atomic, doClusterWild = false, doBSP = false, epsilon = epsilon, randSeed = pccRandSeed)
c4aEndTime = System.currentTimeMillis()
c4aEdgesExamined = c4aStats._1
c4aNumWaits = c4aStats._2
c4aNumRejected = c4aStats._3
c4aObjVal = AuxiliaryFunctions.computeObjective(maxNThreads, threads, graph, c4aClusterID_int, c4aClusterID_atomic)
println(s"[Trial $iter] C4 (async) with $nThreads threads:")
println("\\tTime: " + s"${c4aEndTime - c4aStartTime}")
println("\\t#edges examined: " + s"$c4aEdgesExamined")
println("\\tObjective: " + s"$c4aObjVal")
}
if (doCW && doBSP) {
// Run CWBS
System.gc()
(0 until nVertices).foreach(v => {
cwbClusterID_atomic.set(v, 0)
cwbClusterID_int(v) = 0
})
cwbStartTime = System.currentTimeMillis()
val cwbRunner: ParallelCorrelationClustering = new ParallelCorrelationClustering(graph)
val cwbStats = cwbRunner.run(nThreads, threads, ordering, invOrder, cwbClusterID_int, cwbClusterID_atomic, doClusterWild = true, doBSP = true, epsilon = epsilon, delta = delta, randSeed = pccRandSeed)
cwbEndTime = System.currentTimeMillis()
cwbEdgesExamined = cwbStats._1
cwbNumIterations = cwbStats._4
cwbObjVal = AuxiliaryFunctions.computeObjective(maxNThreads, threads, graph, cwbClusterID_int, cwbClusterID_atomic)
println(s"[Trial $iter] ClusterWild! (BSP) with $nThreads threads:")
println("\\tTime: " + s"${cwbEndTime - cwbStartTime}")
println("\\t#edges examined: " + s"$cwbEdgesExamined")
println("\\tObjective: " + s"$cwbObjVal")
println("\\t#BSP rounds: " + s"$cwbNumIterations")
}
if (doCW && doAsync) {
// Run CWAS
System.gc()
(0 until nVertices).foreach(v => {
cwaClusterID_atomic.set(v, 0)
cwaClusterID_int(v) = 0
})
cwaStartTime = System.currentTimeMillis()
val cwaRunner: ParallelCorrelationClustering = new ParallelCorrelationClustering(graph)
val cwaStats = cwaRunner.run(nThreads, threads, ordering, invOrder, cwaClusterID_int, cwaClusterID_atomic, doClusterWild = true, doBSP = false, epsilon = epsilon, delta = delta, randSeed = pccRandSeed)
cwaEndTime = System.currentTimeMillis()
cwaEdgesExamined = cwaStats._1
cwaObjVal = AuxiliaryFunctions.computeObjective(maxNThreads, threads, graph, cwaClusterID_int, cwaClusterID_atomic)
println(s"[Trial $iter] ClusterWild! (async) with $nThreads threads:")
println("\\tTime: " + s"${cwaEndTime - cwaStartTime}")
println("\\t#edges examined: " + s"$cwaEdgesExamined")
println("\\tObjective: " + s"$cwaObjVal")
}
if (doCDK || (doCDKOnce && nThreads == maxNThreads)) {
// Run CDK
System.gc()
(0 until nVertices).foreach(v => {
cdkClusterID_atomic.set(v, 0)
cdkClusterID_int(v) = 0
})
cdkStartTime = System.currentTimeMillis()
val cdkRunner: ParallelCorrelationClustering_CDK = new ParallelCorrelationClustering_CDK(graph)
val cdkStats = cdkRunner.run(nThreads, threads, cdkClusterID_int, cdkClusterID_atomic, epsilon = epsilon, delta = delta, randSeed = CDKRand.nextLong())
cdkEndTime = System.currentTimeMillis()
cdkEdgesExamined = cdkStats._1
cdkNumIterations = cdkStats._2
cdkObjVal = AuxiliaryFunctions.computeObjective(maxNThreads, threads, graph, cdkClusterID_int, cdkClusterID_atomic)
println(s"[Trial $iter] CDK with $nThreads threads:")
println("\\tTime: " + s"${cdkEndTime - cdkStartTime}")
println("\\t#edges examined: " + s"$cdkEdgesExamined")
println("\\tObjective: " + s"$cdkObjVal")
println("\\t#BSP rounds: " + s"$cdkNumIterations")
}
// println(s"" +
// s"${nThreads}\\t" +
// s"${seqEndTime - seqStartTime}\\t" +
// s"${seqEdgesExamined}\\t" +
// s"${seqObjVal}\\t" +
// s"${c4bEndTime - c4bStartTime}\\t" +
// s"${c4bEdgesExamined}\\t" +
// s"${c4bNumWaits}\\t" +
// s"${c4bNumRejected}\\t" +
// s"${c4bNumIterations}\\t" +
// s"${c4aEndTime - c4aStartTime}\\t" +
// s"${c4aEdgesExamined}\\t" +
// s"${c4aNumWaits}\\t" +
// s"${c4aNumRejected}\\t" +
// s"${cwbEndTime - cwbStartTime}\\t" +
// s"${cwbEdgesExamined}\\t" +
// s"${cwbNumIterations}\\t" +
// s"${cwbObjVal}\\t" +
// s"${cwaEndTime - cwaStartTime}\\t" +
// s"${cwaEdgesExamined}\\t" +
// s"${cwaObjVal}\\t" +
// s"${cdkEndTime - cdkStartTime}\\t" +
// s"${cdkEdgesExamined}\\t" +
// s"${cdkNumIterations}\\t" +
// s"${cdkObjVal}\\t" +
// s"")
})
threads.shutdown()
orderingThreads.shutdown()
})
}
}
|
pxinghao/ParallelCorrelationClustering
|
src/main/scala/Example.scala
|
Scala
|
apache-2.0
| 14,705
|
package com.github.mauricio.async.db.mysql
import org.specs2.mutable.Specification
import scala.concurrent.duration.Duration
import com.github.mauricio.async.db.RowData
class ZeroDatesSpec extends Specification with ConnectionHelper {
val createStatement =
"""CREATE TEMPORARY TABLE dates (
|`name` varchar (255) NOT NULL,
|`timestamp_column` timestamp NOT NULL DEFAULT '0000-00-00 00:00:00',
|`date_column` date NOT NULL DEFAULT '0000-00-00',
|`datetime_column` datetime NOT NULL DEFAULT '0000-00-00 00:00:00',
|`time_column` time NOT NULL DEFAULT '00:00:00',
|`year_column` year NOT NULL DEFAULT '0000'
|)
|ENGINE=MyISAM DEFAULT CHARSET=utf8;""".stripMargin
val insertStatement = "INSERT INTO dates (name) values ('Joe')"
val selectStatement = "SELECT * FROM dates"
def matchValues( result : RowData ) = {
result("name") === "Joe"
result("timestamp_column") must beNull
result("datetime_column") must beNull
result("date_column") must beNull
result("year_column") === 0
result("time_column") === Duration.Zero
}
"client" should {
"correctly parse the MySQL zeroed dates as NULL values in text protocol" in {
withConnection {
connection =>
executeQuery(connection, createStatement)
executeQuery(connection, insertStatement)
matchValues(executeQuery(connection, selectStatement).rows.get(0))
}
}
"correctly parse the MySQL zeroed dates as NULL values in binary protocol" in {
withConnection {
connection =>
executeQuery(connection, createStatement)
executeQuery(connection, insertStatement)
matchValues(executePreparedStatement(connection, selectStatement).rows.get(0))
}
}
}
}
|
outbrain/postgresql-async
|
mysql-async/src/test/scala/com/github/mauricio/async/db/mysql/ZeroDatesSpec.scala
|
Scala
|
apache-2.0
| 1,798
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst.catalog.v2
import org.mockito.ArgumentMatchers.any
import org.mockito.Mockito.{mock, when}
import org.mockito.invocation.InvocationOnMock
import org.scalatest.Inside
import org.scalatest.Matchers._
import org.apache.spark.SparkFunSuite
import org.apache.spark.sql.catalog.v2.{CatalogManager, CatalogNotFoundException, CatalogPlugin, Identifier, LookupCatalog}
import org.apache.spark.sql.catalyst.TableIdentifier
import org.apache.spark.sql.catalyst.parser.CatalystSqlParser
import org.apache.spark.sql.util.CaseInsensitiveStringMap
private case class TestCatalogPlugin(override val name: String) extends CatalogPlugin {
override def initialize(name: String, options: CaseInsensitiveStringMap): Unit = Unit
}
class LookupCatalogSuite extends SparkFunSuite with LookupCatalog with Inside {
import CatalystSqlParser._
private val catalogs = Seq("prod", "test").map(x => x -> TestCatalogPlugin(x)).toMap
override val catalogManager: CatalogManager = {
val manager = mock(classOf[CatalogManager])
when(manager.catalog(any())).thenAnswer((invocation: InvocationOnMock) => {
val name = invocation.getArgument[String](0)
catalogs.getOrElse(name, throw new CatalogNotFoundException(s"$name not found"))
})
when(manager.defaultCatalog).thenReturn(None)
manager
}
test("catalog object identifier") {
Seq(
("tbl", None, Seq.empty, "tbl"),
("db.tbl", None, Seq("db"), "tbl"),
("prod.func", catalogs.get("prod"), Seq.empty, "func"),
("ns1.ns2.tbl", None, Seq("ns1", "ns2"), "tbl"),
("prod.db.tbl", catalogs.get("prod"), Seq("db"), "tbl"),
("test.db.tbl", catalogs.get("test"), Seq("db"), "tbl"),
("test.ns1.ns2.ns3.tbl", catalogs.get("test"), Seq("ns1", "ns2", "ns3"), "tbl"),
("`db.tbl`", None, Seq.empty, "db.tbl"),
("parquet.`file:/tmp/db.tbl`", None, Seq("parquet"), "file:/tmp/db.tbl"),
("`org.apache.spark.sql.json`.`s3://buck/tmp/abc.json`", None,
Seq("org.apache.spark.sql.json"), "s3://buck/tmp/abc.json")).foreach {
case (sql, expectedCatalog, namespace, name) =>
inside(parseMultipartIdentifier(sql)) {
case CatalogObjectIdentifier(catalog, ident) =>
catalog shouldEqual expectedCatalog
ident shouldEqual Identifier.of(namespace.toArray, name)
}
}
}
test("table identifier") {
Seq(
("tbl", "tbl", None),
("db.tbl", "tbl", Some("db")),
("`db.tbl`", "db.tbl", None),
("parquet.`file:/tmp/db.tbl`", "file:/tmp/db.tbl", Some("parquet")),
("`org.apache.spark.sql.json`.`s3://buck/tmp/abc.json`", "s3://buck/tmp/abc.json",
Some("org.apache.spark.sql.json"))).foreach {
case (sql, table, db) =>
inside (parseMultipartIdentifier(sql)) {
case AsTableIdentifier(ident) =>
ident shouldEqual TableIdentifier(table, db)
}
}
Seq(
"prod.func",
"prod.db.tbl",
"ns1.ns2.tbl").foreach { sql =>
parseMultipartIdentifier(sql) match {
case AsTableIdentifier(_) =>
fail(s"$sql should not be resolved as TableIdentifier")
case _ =>
}
}
}
test("temporary table identifier") {
Seq(
("tbl", TableIdentifier("tbl")),
("db.tbl", TableIdentifier("tbl", Some("db"))),
("`db.tbl`", TableIdentifier("db.tbl")),
("parquet.`file:/tmp/db.tbl`", TableIdentifier("file:/tmp/db.tbl", Some("parquet"))),
("`org.apache.spark.sql.json`.`s3://buck/tmp/abc.json`",
TableIdentifier("s3://buck/tmp/abc.json", Some("org.apache.spark.sql.json")))).foreach {
case (sqlIdent: String, expectedTableIdent: TableIdentifier) =>
// when there is no catalog and the namespace has one part, the rule should match
inside(parseMultipartIdentifier(sqlIdent)) {
case AsTemporaryViewIdentifier(ident) =>
ident shouldEqual expectedTableIdent
}
}
Seq("prod.func", "prod.db.tbl", "test.db.tbl", "ns1.ns2.tbl", "test.ns1.ns2.ns3.tbl")
.foreach { sqlIdent =>
inside(parseMultipartIdentifier(sqlIdent)) {
case AsTemporaryViewIdentifier(_) =>
fail("AsTemporaryViewIdentifier should not match when " +
"the catalog is set or the namespace has multiple parts")
case _ =>
// expected
}
}
}
}
class LookupCatalogWithDefaultSuite extends SparkFunSuite with LookupCatalog with Inside {
import CatalystSqlParser._
private val catalogs = Seq("prod", "test").map(x => x -> TestCatalogPlugin(x)).toMap
override val catalogManager: CatalogManager = {
val manager = mock(classOf[CatalogManager])
when(manager.catalog(any())).thenAnswer((invocation: InvocationOnMock) => {
val name = invocation.getArgument[String](0)
catalogs.getOrElse(name, throw new CatalogNotFoundException(s"$name not found"))
})
when(manager.defaultCatalog).thenReturn(catalogs.get("prod"))
manager
}
test("catalog object identifier") {
Seq(
("tbl", catalogs.get("prod"), Seq.empty, "tbl"),
("db.tbl", catalogs.get("prod"), Seq("db"), "tbl"),
("prod.func", catalogs.get("prod"), Seq.empty, "func"),
("ns1.ns2.tbl", catalogs.get("prod"), Seq("ns1", "ns2"), "tbl"),
("prod.db.tbl", catalogs.get("prod"), Seq("db"), "tbl"),
("test.db.tbl", catalogs.get("test"), Seq("db"), "tbl"),
("test.ns1.ns2.ns3.tbl", catalogs.get("test"), Seq("ns1", "ns2", "ns3"), "tbl"),
("`db.tbl`", catalogs.get("prod"), Seq.empty, "db.tbl"),
("parquet.`file:/tmp/db.tbl`", catalogs.get("prod"), Seq("parquet"), "file:/tmp/db.tbl"),
("`org.apache.spark.sql.json`.`s3://buck/tmp/abc.json`", catalogs.get("prod"),
Seq("org.apache.spark.sql.json"), "s3://buck/tmp/abc.json")).foreach {
case (sql, expectedCatalog, namespace, name) =>
inside(parseMultipartIdentifier(sql)) {
case CatalogObjectIdentifier(catalog, ident) =>
catalog shouldEqual expectedCatalog
ident shouldEqual Identifier.of(namespace.toArray, name)
}
}
}
test("table identifier") {
Seq(
"tbl",
"db.tbl",
"`db.tbl`",
"parquet.`file:/tmp/db.tbl`",
"`org.apache.spark.sql.json`.`s3://buck/tmp/abc.json`",
"prod.func",
"prod.db.tbl",
"ns1.ns2.tbl").foreach { sql =>
parseMultipartIdentifier(sql) match {
case AsTableIdentifier(_) =>
fail(s"$sql should not be resolved as TableIdentifier")
case _ =>
}
}
}
test("temporary table identifier") {
Seq(
("tbl", TableIdentifier("tbl")),
("db.tbl", TableIdentifier("tbl", Some("db"))),
("`db.tbl`", TableIdentifier("db.tbl")),
("parquet.`file:/tmp/db.tbl`", TableIdentifier("file:/tmp/db.tbl", Some("parquet"))),
("`org.apache.spark.sql.json`.`s3://buck/tmp/abc.json`",
TableIdentifier("s3://buck/tmp/abc.json", Some("org.apache.spark.sql.json")))).foreach {
case (sqlIdent: String, expectedTableIdent: TableIdentifier) =>
// when there is no catalog and the namespace has one part, the rule should match
inside(parseMultipartIdentifier(sqlIdent)) {
case AsTemporaryViewIdentifier(ident) =>
ident shouldEqual expectedTableIdent
}
}
Seq("prod.func", "prod.db.tbl", "test.db.tbl", "ns1.ns2.tbl", "test.ns1.ns2.ns3.tbl")
.foreach { sqlIdent =>
inside(parseMultipartIdentifier(sqlIdent)) {
case AsTemporaryViewIdentifier(_) =>
fail("AsTemporaryViewIdentifier should not match when " +
"the catalog is set or the namespace has multiple parts")
case _ =>
// expected
}
}
}
}
|
techaddict/spark
|
sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/v2/LookupCatalogSuite.scala
|
Scala
|
apache-2.0
| 8,636
|
// Copyright 2015,2016,2017,2018,2019,2020 Commonwealth Bank of Australia
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package commbank.grimlock.test
import commbank.grimlock.framework.position._
import cascading.flow.FlowDef
import com.twitter.scalding.{ Config, Local }
import com.twitter.scalding.typed.{ IterablePipe, TypedPipe }
import org.apache.log4j.{ Level, Logger }
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.SparkSession
import org.scalatest.{ FlatSpec, Matchers }
import scala.reflect.ClassTag
import shapeless.HList
trait TestGrimlock extends FlatSpec with Matchers {
implicit def positionOrdering[P <: HList] = Position.ordering[P]()
}
trait TestScala extends TestGrimlock {
implicit val ctx = commbank.grimlock.scala.environment.Context()
def toU[T](list: List[T]): List[T] = list
implicit def toList[T](list: List[T]): List[T] = list
}
trait TestScalding extends TestGrimlock {
private implicit val flow = new FlowDef
private implicit val mode = Local(true)
private implicit val config = Config.defaultFrom(mode)
implicit val ctx = commbank.grimlock.scalding.environment.Context()
def toU[T](list: List[T]): TypedPipe[T] = IterablePipe(list)
implicit def toList[T](pipe: TypedPipe[T]): List[T] = pipe
.toIterableExecution
.waitFor(config, mode)
.getOrElse(throw new Exception("couldn't get pipe as list"))
.toList
}
trait TestSpark extends TestGrimlock {
implicit val ctx = commbank.grimlock.spark.environment.Context(TestSpark.session)
def toU[T](list: List[T])(implicit ev: ClassTag[T]): RDD[T] = TestSpark.session.sparkContext.parallelize(list)
implicit def toList[T](rdd: RDD[T]): List[T] = rdd.toLocalIterator.toList
}
object TestSpark {
val session = SparkSession.builder().master("local").appName("Test Spark").getOrCreate()
Logger.getRootLogger().setLevel(Level.WARN);
}
|
CommBank/grimlock
|
grimlock-core/src/test/scala/commbank/grimlock/TestGrimlock.scala
|
Scala
|
apache-2.0
| 2,390
|
package jp.co.cyberagent.aeromock.server.http
import io.netty.channel.ChannelHandlerContext
import io.netty.handler.codec.http.{FullHttpRequest, HttpResponse, HttpResponseStatus}
import jp.co.cyberagent.aeromock.config.Project
import jp.co.cyberagent.aeromock.core.el.VariableHelper
import jp.co.cyberagent.aeromock.core.http.VariableManager
import jp.co.cyberagent.aeromock.data.{DataFileReaderFactory, DataPathResolver}
import jp.co.cyberagent.aeromock.helper._
import jp.co.cyberagent.aeromock.{AeromockApiNotFoundException, AeromockSystemException}
import scaldi.Injector
import scala.collection.JavaConverters._
/**
* [[jp.co.cyberagent.aeromock.server.http.HttpRequestProcessor]] for JSON API.
* @author stormcat24
*/
class JsonApiHttpRequestProcessor(implicit inj: Injector) extends HttpRequestProcessor with HttpResponseWriter {
val project = inject[Project]
override def process(request: FullHttpRequest)(implicit context: ChannelHandlerContext): HttpResponse = {
val ajax = project._ajax
val ajaxRoot = ajax.root
val naming = project._naming
val dataFile = DataPathResolver.resolve(ajaxRoot, request.parsedRequest, naming) match {
case None => throw new AeromockApiNotFoundException(request.parsedRequest.url)
case Some(file) => file
}
val dataMap = DataFileReaderFactory.create(dataFile) match {
case None => throw new AeromockSystemException(s"Cannot read Data file '${dataFile.toString}'")
case Some(reader) => reader.readFile(dataFile)
}
val variableHelper = new VariableHelper(VariableManager.getRequestMap ++ VariableManager.getOriginalVariableMap().asScala)
val responseWriter = JsonApiResponseWriterFactory.create(project, variableHelper, dataMap)
val response = responseWriter.write
ajax.jsonpCallbackName.flatMap(p => request.parsedRequest.queryParameters.get(p)) match {
case None => renderJson(response._1, HttpResponseStatus.OK, response._2)
case Some(callbackName) => renderJsonp(response._1, callbackName, HttpResponseStatus.OK, response._2)
}
}
}
|
CyberAgent/aeromock
|
aeromock-server/src/main/scala/jp/co/cyberagent/aeromock/server/http/JsonApiHttpRequestProcessor.scala
|
Scala
|
mit
| 2,077
|
/*
* Copyright 2014–2018 SlamData Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package quasar.impl.datasource.local
import slamdata.Predef._
import quasar.api.resource.ResourcePath
import quasar.concurrent.BlockingContext
import quasar.connector.{Datasource, MonadResourceErr, ParsableType, QueryResult}
import quasar.qscript.InterpretedRead
import java.nio.file.{Path => JPath}
import cats.effect.{ContextShift, Effect, Timer}
import fs2.{io, Stream}
import scalaz.syntax.tag._
object LocalDatasource {
/* @param readChunkSizeBytes the number of bytes per chunk to use when reading files.
*/
def apply[F[_]: ContextShift: Effect: MonadResourceErr: Timer](
root: JPath,
readChunkSizeBytes: Int,
blockingPool: BlockingContext)
: Datasource[F, Stream[F, ?], InterpretedRead[ResourcePath], QueryResult[F]] = {
import ParsableType.JsonVariant
EvaluableLocalDatasource[F](LocalType, root) { iRead =>
QueryResult.typed(
ParsableType.json(JsonVariant.LineDelimited, true),
io.file.readAll[F](iRead.path, blockingPool.unwrap, readChunkSizeBytes),
iRead.stages)
}
}
}
|
slamdata/slamengine
|
impl/src/main/scala/quasar/impl/datasource/local/LocalDatasource.scala
|
Scala
|
apache-2.0
| 1,667
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.openwhisk.core.monitoring.metrics
import com.typesafe.config.ConfigFactory
import io.prometheus.client.CollectorRegistry
import kamon.prometheus.PrometheusReporter
import org.apache.openwhisk.core.connector.{Activation, EventMessage}
import org.apache.openwhisk.core.entity.{ActivationId, ActivationResponse, Subject, UUID}
import org.apache.openwhisk.core.monitoring.metrics.OpenWhiskEvents.MetricConfig
import org.junit.runner.RunWith
import org.scalatest.BeforeAndAfterEach
import org.scalatest.junit.JUnitRunner
import pureconfig._
import pureconfig.generic.auto._
import scala.concurrent.duration._
@RunWith(classOf[JUnitRunner])
class PrometheusRecorderTests extends KafkaSpecBase with BeforeAndAfterEach with PrometheusMetricNames {
behavior of "PrometheusConsumer"
val namespaceDemo = "demo"
val namespaceGuest = "guest"
val actionWithCustomPackage = "apimgmt/createApiOne"
val actionWithDefaultPackage = "createApi"
val kind = "nodejs:10"
val memory = "256"
createCustomTopic(EventConsumer.userEventTopic)
it should "push user events to kamon" in {
CollectorRegistry.defaultRegistry.clear()
val metricConfig = loadConfigOrThrow[MetricConfig](system.settings.config, "user-events")
val metricRecorder = PrometheusRecorder(new PrometheusReporter, metricConfig)
val consumer = createConsumer(kafkaPort, system.settings.config, metricRecorder)
publishStringMessageToKafka(
EventConsumer.userEventTopic,
newActivationEvent(s"$namespaceDemo/$actionWithCustomPackage", kind, memory).serialize)
publishStringMessageToKafka(
EventConsumer.userEventTopic,
newActivationEvent(s"$namespaceDemo/$actionWithDefaultPackage", kind, memory).serialize)
publishStringMessageToKafka(
EventConsumer.userEventTopic,
newActivationEvent(s"$namespaceGuest/$actionWithDefaultPackage", kind, memory).serialize)
// Custom package
sleep(sleepAfterProduce, "sleeping post produce")
consumer.shutdown().futureValue
counterTotal(activationMetric, namespaceDemo, actionWithCustomPackage) shouldBe 1
counter(coldStartMetric, namespaceDemo, actionWithCustomPackage) shouldBe 1
counterStatus(statusMetric, namespaceDemo, actionWithCustomPackage, ActivationResponse.statusDeveloperError) shouldBe 1
histogramCount(waitTimeMetric, namespaceDemo, actionWithCustomPackage) shouldBe 1
histogramSum(waitTimeMetric, namespaceDemo, actionWithCustomPackage) shouldBe (0.03 +- 0.001)
histogramCount(initTimeMetric, namespaceDemo, actionWithCustomPackage) shouldBe 1
histogramSum(initTimeMetric, namespaceDemo, actionWithCustomPackage) shouldBe (433.433 +- 0.01)
histogramCount(durationMetric, namespaceDemo, actionWithCustomPackage) shouldBe 1
histogramSum(durationMetric, namespaceDemo, actionWithCustomPackage) shouldBe (1.254 +- 0.01)
gauge(memoryMetric, namespaceDemo, actionWithCustomPackage).intValue shouldBe 256
// Default package
counterTotal(activationMetric, namespaceDemo, actionWithDefaultPackage) shouldBe 1
// Blacklisted namespace should not be tracked
counterTotal(activationMetric, namespaceGuest, actionWithDefaultPackage) shouldBe (null)
}
it should "push user event to kamon with prometheus metrics tags relabel" in {
val httpPort = freePort()
val globalConfig = system.settings.config
val config = ConfigFactory.parseString(s"""
| whisk {
| user-events {
| port = $httpPort
| enable-kamon = false
| ignored-namespaces = ["guest"]
| rename-tags {
| namespace = "ow_namespace"
| }
| retry {
| min-backoff = 3 secs
| max-backoff = 30 secs
| random-factor = 0.2
| max-restarts = 10
| }
| }
| }
""".stripMargin)
CollectorRegistry.defaultRegistry.clear()
val metricConfig = loadConfigOrThrow[MetricConfig](config, "whisk.user-events")
val metricRecorder = PrometheusRecorder(new PrometheusReporter, metricConfig)
val consumer = createConsumer(kafkaPort, system.settings.config, metricRecorder)
publishStringMessageToKafka(
EventConsumer.userEventTopic,
newActivationEvent(s"$namespaceDemo/$actionWithCustomPackage", kind, memory).serialize)
sleep(sleepAfterProduce, "sleeping post produce")
consumer.shutdown().futureValue
CollectorRegistry.defaultRegistry.getSampleValue(
activationMetric,
Array("ow_namespace", "initiator", "action", "kind", "memory"),
Array(namespaceDemo, namespaceDemo, actionWithCustomPackage, kind, memory)) shouldBe 1
}
private def newActivationEvent(actionPath: String, kind: String, memory: String) =
EventMessage(
"test",
Activation(
actionPath,
ActivationId.generate().asString,
2,
1254.millis,
30.millis,
433433.millis,
kind,
false,
memory.toInt,
None),
Subject("testuser"),
actionPath.split("/")(0),
UUID("test"),
Activation.typeName)
private def gauge(metricName: String, namespace: String, action: String) =
CollectorRegistry.defaultRegistry.getSampleValue(
metricName,
Array("namespace", "initiator", "action"),
Array(namespace, namespace, action))
private def counter(metricName: String, namespace: String, action: String) =
CollectorRegistry.defaultRegistry.getSampleValue(
metricName,
Array("namespace", "initiator", "action"),
Array(namespace, namespace, action))
private def counterTotal(metricName: String, namespace: String, action: String) =
CollectorRegistry.defaultRegistry.getSampleValue(
metricName,
Array("namespace", "initiator", "action", "kind", "memory"),
Array(namespace, namespace, action, kind, memory))
private def counterStatus(metricName: String, namespace: String, action: String, status: String) =
CollectorRegistry.defaultRegistry.getSampleValue(
metricName,
Array("namespace", "initiator", "action", "status"),
Array(namespace, namespace, action, status))
private def histogramCount(metricName: String, namespace: String, action: String) =
CollectorRegistry.defaultRegistry.getSampleValue(
s"${metricName}_count",
Array("namespace", "initiator", "action"),
Array(namespace, namespace, action))
private def histogramSum(metricName: String, namespace: String, action: String) =
CollectorRegistry.defaultRegistry
.getSampleValue(
s"${metricName}_sum",
Array("namespace", "initiator", "action"),
Array(namespace, namespace, action))
.doubleValue()
}
|
akrabat/openwhisk
|
core/monitoring/user-events/src/test/scala/org/apache/openwhisk/core/monitoring/metrics/PrometheusRecorderTests.scala
|
Scala
|
apache-2.0
| 7,591
|
package edu.neu.coe.csye._7200
package animal
trait Animal {
def alive: Boolean
}
trait Dog extends Animal {
def name: String
def alive = true
}
case class CairnTerrier(name: String, var stripped: Boolean = false) extends Dog
case class Chuweenie(name: String) extends Dog
trait Grooming[A <: Dog, B >: Dog] extends (A=>B)
// see https://en.wikipedia.org/wiki/Cairn_Terrier#Grooming
class Stripping extends Grooming[CairnTerrier,Animal] {
def apply(x: CairnTerrier) = {x.stripped = true; x.asInstanceOf[Animal]}
}
object CairnTerrier extends App {
def apply(name: String): CairnTerrier = new CairnTerrier(name,false)
val cindy = CairnTerrier("Cindy")
val grooming = new Stripping()
grooming(cindy).alive
val bentley = Chuweenie("Bentley")
// grooming(bentley) does not compile because Bentley is not a CairnTerrier
// grooming(cindy).name does not compile because Animals don't have names
}
|
rchillyard/Scalaprof
|
FunctionalProgramming/src/main/scala/edu/neu/coe/csye/_7200/Animal.scala
|
Scala
|
gpl-2.0
| 918
|
import sbt._
object Resolvers {
val boundlessResolver = Seq(
"Boundless Repository" at "http://repo.boundlessgeo.com/main")
}
|
jmarin/scale
|
project/Resolvers.scala
|
Scala
|
apache-2.0
| 133
|
/*
* Scala (https://www.scala-lang.org)
*
* Copyright EPFL and Lightbend, Inc.
*
* Licensed under Apache License 2.0
* (http://www.apache.org/licenses/LICENSE-2.0).
*
* See the NOTICE file distributed with this work for
* additional information regarding copyright ownership.
*/
package scala.tools.partest
import scala.reflect.{ classTag, ClassTag }
trait TestUtil {
/** Given function and block of code, evaluates code block,
* calls function with nanoseconds elapsed, and returns block result.
*/
def timed[T](f: Long => Unit)(body: => T): T = {
val start = System.nanoTime
val result = body
val end = System.nanoTime
f(end - start)
result
}
/** Times body and returns (nanos, result).
*/
def alsoNanos[T](body: => T): (Long, T) = {
var nanos = 0L
val result = timed(nanos = _)(body)
(nanos, result)
}
def nanos(body: => Unit): Long = alsoNanos(body)._1
def intercept[T <: Exception : ClassTag](code: => Unit): Unit =
try {
code
assert(false, "did not throw " + classTag[T])
} catch {
case ex: Exception if classTag[T].runtimeClass isInstance ex =>
}
}
// Used in tests.
object TestUtil extends TestUtil {
}
|
martijnhoekstra/scala
|
src/partest/scala/tools/partest/TestUtil.scala
|
Scala
|
apache-2.0
| 1,216
|
package com.mpakhomov.model
import java.sql.Timestamp
// this class represents a candlestick. this what we parse incoming messages to.
// candlesticks are aggregated and sent to the clients
case class Candlestick(
ticker: String,
timestamp: Timestamp,
open: Double,
high: Double,
low: Double,
close: Double,
volume: Long // another option is to use BigDecimal. but for the sake of this task, `long` is good enough
)
|
mpakhomov/akka-io-demo
|
src/main/scala/com/mpakhomov/model/Candlestick.scala
|
Scala
|
apache-2.0
| 433
|
/* Copyright 2012 Christian Douven
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package almhirt.xml
import java.util.{ UUID ⇒ JUUID }
import scala.concurrent.duration._
import scalaz.syntax.validation._
import scalaz.std._
import _root_.java.time.{ ZonedDateTime, LocalDateTime }
import almhirt.common._
import almhirt.almvalidation.funs._
import almhirt.problem.inst._
import almhirt.syntax.almvalidation._
trait XmlFunctions {
import scala.xml.{ XML, Node, Elem, NodeSeq }
import scala.xml.XML
import scalaz.Validation.FlatMap._
import almhirt.problem.all._
def allElems(elem: Elem): Seq[Elem] =
elem.child flatMap { (n: Node) ⇒
n match {
case e: Elem ⇒ Some(e)
case _ ⇒ None
}
}
def elems(elem: Elem)(label: String): Seq[Elem] = allElems(elem) filter (_.label == label)
def tryGetChild(elem: Elem)(label: String): AlmValidation[Option[Elem]] =
elems(elem)(label) match {
case Seq() ⇒ None.success
case Seq(x) ⇒ Some(x).success
case _ ⇒ UnspecifiedProblem(s"""More than one child element found for label "$label" in element "${elem.label}".""".format(label)).failure
}
def getChild(elem: Elem)(label: String): AlmValidation[Elem] =
tryGetChild(elem)(label).flatMap(childOpt ⇒
option.cata(childOpt)(
e ⇒ e.success,
UnspecifiedProblem(s"""The element "${elem.label}" did not contain a child labeled "$label".""").failure))
def xmlFromString(xmlString: String): AlmValidation[Elem] = {
try {
XML.loadString(xmlString).success
} catch {
case err: Exception ⇒ BadDataProblem("Could not parse xml: %s".format(err.getMessage), cause = Some(err)).failure[Elem]
}
}
def stringFromXmlNode(node: Elem): AlmValidation[String] =
notEmptyOrWhitespace(node.text)
def optionalStringFromXmlNode(node: Elem): Option[String] =
notEmptyOrWhitespace(node.text).toOption
def booleanFromXmlNode(node: Elem): AlmValidation[Boolean] =
notEmptyOrWhitespace(node.text) flatMap (ne ⇒ parseBooleanAlm(ne)) bimap (f ⇒ f.withLabel(node.label), s ⇒ s)
def optionalBooleanFromXmlNode(node: Elem): AlmValidation[Option[Boolean]] =
if (node.text.trim.isEmpty)
None.success
else
booleanFromXmlNode(node) fold (_.failure, Some(_).success)
def byteFromXmlNode(node: Elem): AlmValidation[Byte] =
notEmptyOrWhitespace(node.text) flatMap (ne ⇒ parseByteAlm(ne)) bimap (f ⇒ f.withLabel(node.label), s ⇒ s)
def optionalByteFromXmlNode(node: Elem): AlmValidation[Option[Byte]] =
if (node.text.trim.isEmpty)
None.success
else
byteFromXmlNode(node) fold (_.failure, Some(_).success)
def shortFromXmlNode(node: Elem): AlmValidation[Short] =
notEmptyOrWhitespace(node.text) flatMap (ne ⇒ parseShortAlm(ne)) bimap (f ⇒ f.withLabel(node.label), s ⇒ s)
def optionalShortFromXmlNode(node: Elem): AlmValidation[Option[Short]] =
if (node.text.trim.isEmpty)
None.success
else
shortFromXmlNode(node) fold (_.failure, Some(_).success)
def intFromXmlNode(node: Elem): AlmValidation[Int] =
notEmptyOrWhitespace(node.text) flatMap (ne ⇒ parseIntAlm(ne)) bimap (f ⇒ f.withLabel(node.label), s ⇒ s)
def optionalIntFromXmlNode(node: Elem): AlmValidation[Option[Int]] =
if (node.text.trim.isEmpty)
None.success
else
intFromXmlNode(node) fold (_.failure, Some(_).success)
def longFromXmlNode(node: Elem): AlmValidation[Long] =
notEmptyOrWhitespace(node.text) flatMap (ne ⇒ parseLongAlm(ne)) bimap (f ⇒ f.withLabel(node.label), s ⇒ s)
def optionalLongFromXmlNode(node: Elem): AlmValidation[Option[Long]] =
if (node.text.trim.isEmpty)
None.success
else
longFromXmlNode(node) fold (_.failure, Some(_).success)
def bigIntFromXmlNode(node: Elem): AlmValidation[BigInt] =
notEmptyOrWhitespace(node.text) flatMap (ne ⇒ parseBigIntAlm(ne)) bimap (f ⇒ f.withLabel(node.label), s ⇒ s)
def optionalBigIntFromXmlNode(node: Elem): AlmValidation[Option[BigInt]] =
if (node.text.trim.isEmpty)
None.success
else
bigIntFromXmlNode(node) fold (_.failure, Some(_).success)
def floatFromXmlNode(node: Elem): AlmValidation[Float] =
notEmptyOrWhitespace(node.text) flatMap (ne ⇒ parseFloatAlm(ne)) bimap (f ⇒ f.withLabel(node.label), s ⇒ s)
def optionalFloatFromXmlNode(node: Elem): AlmValidation[Option[Float]] =
if (node.text.trim.isEmpty)
None.success
else
floatFromXmlNode(node) fold (_.failure, Some(_).success)
def doubleFromXmlNode(node: Elem): AlmValidation[Double] =
notEmptyOrWhitespace(node.text) flatMap (ne ⇒ parseDoubleAlm(ne)) bimap (f ⇒ f.withLabel(node.label), s ⇒ s)
def optionalDoubleFromXmlNode(node: Elem): AlmValidation[Option[Double]] =
if (node.text.trim.isEmpty)
None.success
else
doubleFromXmlNode(node) fold (_.failure, Some(_).success)
def decimalFromXmlNode(node: Elem): AlmValidation[BigDecimal] =
notEmptyOrWhitespace(node.text) flatMap (ne ⇒ parseDecimalAlm(ne)) bimap (f ⇒ f.withLabel(node.label), s ⇒ s)
def optionalDecimalFromXmlNode(node: Elem): AlmValidation[Option[BigDecimal]] =
if (node.text.trim.isEmpty)
None.success
else
decimalFromXmlNode(node) fold (_.failure, Some(_).success)
def dateTimeFromXmlNode(node: Elem): AlmValidation[ZonedDateTime] =
notEmptyOrWhitespace(node.text) flatMap (ne ⇒ parseDateTimeAlm(ne)) bimap (f ⇒ f.withLabel(node.label), s ⇒ s)
def optionalDateTimeFromXmlNode(node: Elem): AlmValidation[Option[ZonedDateTime]] =
if (node.text.trim.isEmpty)
None.success
else
dateTimeFromXmlNode(node) fold (_.failure, Some(_).success)
def localDateTimeFromXmlNode(node: Elem): AlmValidation[LocalDateTime] =
notEmptyOrWhitespace(node.text) flatMap (ne ⇒ parseLocalDateTimeAlm(ne)) bimap (f ⇒ f.withLabel(node.label), s ⇒ s)
def optionalLocalDateTimeFromXmlNode(node: Elem): AlmValidation[Option[LocalDateTime]] =
if (node.text.trim.isEmpty)
None.success
else
localDateTimeFromXmlNode(node) fold (_.failure, Some(_).success)
def durationFromXmlNode(node: Elem): AlmValidation[FiniteDuration] =
notEmptyOrWhitespace(node.text) flatMap (ne ⇒ parseDurationAlm(ne)) bimap (f ⇒ f.withLabel(node.label), s ⇒ s)
def optionalDurationFromXmlNode(node: Elem): AlmValidation[Option[FiniteDuration]] =
if (node.text.trim.isEmpty)
None.success
else
durationFromXmlNode(node) fold (_.failure, Some(_).success)
def uuidFromXmlNode(node: Elem): AlmValidation[JUUID] =
notEmptyOrWhitespace(node.text) flatMap (ne ⇒ parseUuidAlm(ne)) bimap (f ⇒ f.withLabel(node.label), s ⇒ s)
def optionalUuidFromXmlNode(node: Elem): AlmValidation[Option[JUUID]] =
if (node.text.trim.isEmpty)
None.success
else
uuidFromXmlNode(node) fold (_.failure, Some(_).success)
def uriFromXmlNode(node: Elem): AlmValidation[_root_.java.net.URI] =
notEmptyOrWhitespace(node.text) flatMap (ne ⇒ parseUriAlm(ne)) bimap (f ⇒ f.withLabel(node.label), s ⇒ s)
def optionalUriFromXmlNode(node: Elem): AlmValidation[Option[_root_.java.net.URI]] =
if (node.text.trim.isEmpty)
None.success
else
uriFromXmlNode(node) fold (_.failure, Some(_).success)
def isBooleanSetTrue(elem: Elem): AlmValidation[Boolean] =
if (elem.text.trim.isEmpty)
false.success
else
parseBooleanAlm(elem.text) bimap (f ⇒ f.withLabel(elem.label), s ⇒ s)
def firstChildNodeMandatory(node: Elem, label: String): AlmValidation[Elem] = {
elems(node)(label).toList match {
case Nil ⇒
BadDataProblem("Element '%s' not found.".format(label)).withLabel(label).failure[Elem]
case l :: ls ⇒ l.success
}
}
def getFirstChildNode(node: Elem): AlmValidation[Elem] = {
allElems(node).toList match {
case Nil ⇒ BadDataProblem(s"""Element "${node.label}" has no children.""").withLabel(node.label).failure
case l :: ls ⇒ l.success
}
}
def getFirstChildNodeExcluding(node: Elem, excludeLabel: String): AlmValidation[Elem] = {
allElems(node).filterNot { _.label == excludeLabel }.toList match {
case Nil ⇒ BadDataProblem(s"""Element "${node.label}" has no children.""").withLabel(node.label).failure
case l :: ls ⇒ l.success
}
}
def mapOptionalFirstChild[T](node: Elem, label: String, compute: Elem ⇒ AlmValidation[T]): AlmValidation[Option[T]] =
elems(node)(label).headOption match {
case Some(t) ⇒ compute(t) map { r ⇒ Some(r) }
case None ⇒ None.success
}
def flatMapOptionalFirstChild[T](node: Elem, label: String, compute: Elem ⇒ AlmValidation[Option[T]]): AlmValidation[Option[T]] =
elems(node)(label).headOption match {
case Some(t) ⇒ compute(t)
case None ⇒ None.success
}
def stringFromChild(node: Elem, label: String): AlmValidation[String] =
firstChildNodeMandatory(node, label)
.flatMap { node ⇒ notEmptyOrWhitespace(node.text) } bimap (f ⇒ f.withLabel(label), s ⇒ s)
def stringOptionFromChild(node: Elem, label: String): Option[String] =
elems(node)(label).headOption.flatMap(optionalStringFromXmlNode)
def shortFromChild(node: Elem, label: String): AlmValidation[Short] =
firstChildNodeMandatory(node, label)
.flatMap { node ⇒ parseShortAlm(node.text) } bimap (f ⇒ f.withLabel(label), s ⇒ s)
def shortOptionFromChild(node: Elem, label: String): AlmValidation[Option[Short]] =
flatMapOptionalFirstChild(node, label, n ⇒ emptyStringIsNone(n.text, s ⇒ parseShortAlm(s))) bimap (f ⇒ f.withLabel(label), s ⇒ s)
def intFromChild(node: Elem, label: String): AlmValidation[Int] =
firstChildNodeMandatory(node, label)
.flatMap { node ⇒ parseIntAlm(node.text) } bimap (f ⇒ f.withLabel(label), s ⇒ s)
def intOptionFromChild(node: Elem, label: String): AlmValidation[Option[Int]] =
flatMapOptionalFirstChild(node, label, n ⇒ emptyStringIsNone(n.text, s ⇒ parseIntAlm(s))) bimap (f ⇒ f.withLabel(label), s ⇒ s)
def longFromChild(node: Elem, label: String): AlmValidation[Long] =
firstChildNodeMandatory(node, label)
.flatMap { node ⇒ parseLongAlm(node.text) } bimap (f ⇒ f.withLabel(label), s ⇒ s)
def longOptionFromChild(node: Elem, label: String): AlmValidation[Option[Long]] =
flatMapOptionalFirstChild(node, label, n ⇒ emptyStringIsNone(n.text, s ⇒ parseLongAlm(s))) bimap (f ⇒ f.withLabel(label), s ⇒ s)
def doubleFromChild(node: Elem, label: String): AlmValidation[Double] =
firstChildNodeMandatory(node, label)
.flatMap { node ⇒ parseDoubleAlm(node.text) } bimap (f ⇒ f.withLabel(label), s ⇒ s)
def doubleOptionFromChild(node: Elem, label: String): AlmValidation[Option[Double]] =
flatMapOptionalFirstChild(node, label, n ⇒ emptyStringIsNone(n.text, s ⇒ parseDoubleAlm(s))) bimap (f ⇒ f.withLabel(label), s ⇒ s)
def dateTimeFromChild(node: Elem, label: String): AlmValidation[ZonedDateTime] =
firstChildNodeMandatory(node, label)
.flatMap { node ⇒ parseDateTimeAlm(node.text) } bimap (f ⇒ f.withLabel(label), s ⇒ s)
def dateTimeOptionFromChild(node: Elem, label: String): AlmValidation[Option[ZonedDateTime]] =
flatMapOptionalFirstChild(node, label, n ⇒ emptyStringIsNone(n.text, s ⇒ parseDateTimeAlm(s))) bimap (f ⇒ f.withLabel(label), s ⇒ s)
def uuidFromChild(node: Elem, label: String): AlmValidation[JUUID] =
firstChildNodeMandatory(node, label)
.flatMap { node ⇒ parseUuidAlm(node.text) } bimap (f ⇒ f.withLabel(label), s ⇒ s)
def uuidOptionFromChild(node: Elem, label: String): AlmValidation[Option[JUUID]] =
flatMapOptionalFirstChild(node, label, n ⇒ emptyStringIsNone(n.text, s ⇒ parseUuidAlm(s))) bimap (f ⇒ f.withLabel(label), s ⇒ s)
private def emptyStringIsNone[T](str: String, compute: String ⇒ AlmValidation[T]): AlmValidation[Option[T]] =
if (str.trim().isEmpty)
None.success
else
compute(str) fold (_.failure, Some(_).success)
def getAttributeValue(node: Elem, name: String): AlmValidation[String] =
(node \ s"@$name") match {
case NodeSeq.Empty ⇒ NoSuchElementProblem(s"""Attribute "$name" not found on <${node.label} !$name!="...">...<${node.label}>""").withLabel(name).failure
case x ⇒ x.text.success
}
def getOptionalAttributeValue(node: Elem, name: String): Option[String] =
getAttributeValue(node, name).toOption
}
|
chridou/almhirt
|
almhirt-common/src/main/scala/almhirt/xml/XmlFunctions.scala
|
Scala
|
apache-2.0
| 13,072
|
package spinoco.protocol.mgcp.mgcppackage
trait MGCPPackage
|
Spinoco/protocol
|
mgcp/src/main/scala/spinoco/protocol/mgcp/mgcppackage/MGCPPackage.scala
|
Scala
|
mit
| 62
|
package sample.standalone_app
import skinny.micro._
import skinny.micro.contrib.jackson.JSONSupport
import scala.util._
/**
* Simple JSON formatter application.
*
* How to run:
*
* sbt samples/run
*/
object OnlineJSONFormatter extends App {
WebServer.mount(
new AsyncWebApp with JSONSupport {
post("/prettify") { implicit ctx =>
contentType = "application/json"
fromJSONString[Map[String, Any]](request.body) match {
case Success(value) => Ok(toPrettyJSONString(value))
case _ => BadRequest(toJSONString(Map("error" -> "Failed to parse JSON string")))
}
}
}
).port(4567).start()
println
println("""curl -XPOST localhost:4567/prettify -H'Content-Type: application/json' -d'{"glossary":{"title":"example glossary","GlossDiv":{"title":"S","GlossList":{"GlossEntry":{"ID":"SGML","SortAs":"SGML","GlossTerm":"Standard Generalized Markup Language","Acronym":"SGML","Abbrev":"ISO 8879:1986","GlossDef":{"para":"A meta-markup language, used to create markup languages such as DocBook.","GlossSeeAlso":["GML","XML"]},"GlossSee":"markup"}}}}}'""")
println
/*
{
"glossary" : {
"title" : "example glossary",
"GlossDiv" : {
"title" : "S",
"GlossList" : {
"GlossEntry" : {
"ID" : "SGML",
"SortAs" : "SGML",
"GlossTerm" : "Standard Generalized Markup Language",
"Acronym" : "SGML",
"Abbrev" : "ISO 8879:1986",
"GlossDef" : {
"para" : "A meta-markup language, used to create markup languages such as DocBook.",
"GlossSeeAlso" : [ "GML", "XML" ]
},
"GlossSee" : "markup"
}
}
}
}
}
*/
}
|
xerial/skinny-micro
|
samples/src/main/scala/sample/standalone_app/OnlineJSONFormatter.scala
|
Scala
|
bsd-2-clause
| 1,713
|
/**
* Copyright (c) 2012-2013, Tomasz Kaczmarzyk.
*
* This file is part of BeanDiff.
*
* BeanDiff is free software; you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation; either version 3 of the License, or
* (at your option) any later version.
*
* BeanDiff is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with BeanDiff; if not, write to the Free Software
* Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
*/
package org.beandiff.support
import java.lang.reflect.Field
import java.lang.reflect.Modifier
import org.beandiff.support.ClassSupport.RichClass
object FieldSupport {
implicit class RichField(val f: Field) extends AnyVal {
def isStatic = Modifier.isStatic(f.getModifiers())
def getFrom(src: Any): Option[Any] = {
if (src.getClass.hasField(f.getName))
Some(f.get(src))
else
None
}
}
}
|
tkaczmarzyk/beandiff
|
src/main/scala/org/beandiff/support/FieldSupport.scala
|
Scala
|
lgpl-3.0
| 1,251
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.util
import java.lang.management.ManagementFactory
import java.lang.reflect.{Field, Modifier}
import java.util.{IdentityHashMap, Random}
import scala.collection.mutable.ArrayBuffer
import scala.runtime.ScalaRunTime
import com.google.common.collect.MapMaker
import org.apache.spark.annotation.DeveloperApi
import org.apache.spark.internal.Logging
import org.apache.spark.internal.config.Tests.TEST_USE_COMPRESSED_OOPS_KEY
import org.apache.spark.util.collection.OpenHashSet
/**
* A trait that allows a class to give [[SizeEstimator]] more accurate size estimation.
* When a class extends it, [[SizeEstimator]] will query the `estimatedSize` first.
* If `estimatedSize` does not return `None`, [[SizeEstimator]] will use the returned size
* as the size of the object. Otherwise, [[SizeEstimator]] will do the estimation work.
* The difference between a [[KnownSizeEstimation]] and
* [[org.apache.spark.util.collection.SizeTracker]] is that, a
* [[org.apache.spark.util.collection.SizeTracker]] still uses [[SizeEstimator]] to
* estimate the size. However, a [[KnownSizeEstimation]] can provide a better estimation without
* using [[SizeEstimator]].
*/
private[spark] trait KnownSizeEstimation {
def estimatedSize: Long
}
/**
* :: DeveloperApi ::
* Estimates the sizes of Java objects (number of bytes of memory they occupy), for use in
* memory-aware caches.
*
* Based on the following JavaWorld article:
* http://www.javaworld.com/javaworld/javaqa/2003-12/02-qa-1226-sizeof.html
*/
@DeveloperApi
object SizeEstimator extends Logging {
/**
* Estimate the number of bytes that the given object takes up on the JVM heap. The estimate
* includes space taken up by objects referenced by the given object, their references, and so on
* and so forth.
*
* This is useful for determining the amount of heap space a broadcast variable will occupy on
* each executor or the amount of space each object will take when caching objects in
* deserialized form. This is not the same as the serialized size of the object, which will
* typically be much smaller.
*/
def estimate(obj: AnyRef): Long = estimate(obj, new IdentityHashMap[AnyRef, AnyRef])
// Sizes of primitive types
private val BYTE_SIZE = 1
private val BOOLEAN_SIZE = 1
private val CHAR_SIZE = 2
private val SHORT_SIZE = 2
private val INT_SIZE = 4
private val LONG_SIZE = 8
private val FLOAT_SIZE = 4
private val DOUBLE_SIZE = 8
// Fields can be primitive types, sizes are: 1, 2, 4, 8. Or fields can be pointers. The size of
// a pointer is 4 or 8 depending on the JVM (32-bit or 64-bit) and UseCompressedOops flag.
// The sizes should be in descending order, as we will use that information for fields placement.
private val fieldSizes = List(8, 4, 2, 1)
// Alignment boundary for objects
// TODO: Is this arch dependent ?
private val ALIGN_SIZE = 8
// A cache of ClassInfo objects for each class
// We use weakKeys to allow GC of dynamically created classes
private val classInfos = new MapMaker().weakKeys().makeMap[Class[_], ClassInfo]()
// Object and pointer sizes are arch dependent
private var is64bit = false
// Size of an object reference
// Based on https://wikis.oracle.com/display/HotSpotInternals/CompressedOops
private var isCompressedOops = false
private var pointerSize = 4
// Minimum size of a java.lang.Object
private var objectSize = 8
initialize()
// Sets object size, pointer size based on architecture and CompressedOops settings
// from the JVM.
private def initialize() {
val arch = System.getProperty("os.arch")
is64bit = arch.contains("64") || arch.contains("s390x")
isCompressedOops = getIsCompressedOops
objectSize = if (!is64bit) 8 else {
if (!isCompressedOops) {
16
} else {
12
}
}
pointerSize = if (is64bit && !isCompressedOops) 8 else 4
classInfos.clear()
classInfos.put(classOf[Object], new ClassInfo(objectSize, Nil))
}
private def getIsCompressedOops: Boolean = {
// This is only used by tests to override the detection of compressed oops. The test
// actually uses a system property instead of a SparkConf, so we'll stick with that.
if (System.getProperty(TEST_USE_COMPRESSED_OOPS_KEY) != null) {
return System.getProperty(TEST_USE_COMPRESSED_OOPS_KEY).toBoolean
}
// java.vm.info provides compressed ref info for IBM and OpenJ9 JDKs
val javaVendor = System.getProperty("java.vendor")
if (javaVendor.contains("IBM") || javaVendor.contains("OpenJ9")) {
return System.getProperty("java.vm.info").contains("Compressed Ref")
}
try {
val hotSpotMBeanName = "com.sun.management:type=HotSpotDiagnostic"
val server = ManagementFactory.getPlatformMBeanServer()
// NOTE: This should throw an exception in non-Sun JVMs
// scalastyle:off classforname
val hotSpotMBeanClass = Class.forName("com.sun.management.HotSpotDiagnosticMXBean")
val getVMMethod = hotSpotMBeanClass.getDeclaredMethod("getVMOption",
Class.forName("java.lang.String"))
// scalastyle:on classforname
val bean = ManagementFactory.newPlatformMXBeanProxy(server,
hotSpotMBeanName, hotSpotMBeanClass)
// TODO: We could use reflection on the VMOption returned ?
getVMMethod.invoke(bean, "UseCompressedOops").toString.contains("true")
} catch {
case e: Exception =>
// Guess whether they've enabled UseCompressedOops based on whether maxMemory < 32 GB
val guess = Runtime.getRuntime.maxMemory < (32L*1024*1024*1024)
val guessInWords = if (guess) "yes" else "not"
logWarning("Failed to check whether UseCompressedOops is set; assuming " + guessInWords)
return guess
}
}
/**
* The state of an ongoing size estimation. Contains a stack of objects to visit as well as an
* IdentityHashMap of visited objects, and provides utility methods for enqueueing new objects
* to visit.
*/
private class SearchState(val visited: IdentityHashMap[AnyRef, AnyRef]) {
val stack = new ArrayBuffer[AnyRef]
var size = 0L
def enqueue(obj: AnyRef) {
if (obj != null && !visited.containsKey(obj)) {
visited.put(obj, null)
stack += obj
}
}
def isFinished(): Boolean = stack.isEmpty
def dequeue(): AnyRef = {
val elem = stack.last
stack.trimEnd(1)
elem
}
}
/**
* Cached information about each class. We remember two things: the "shell size" of the class
* (size of all non-static fields plus the java.lang.Object size), and any fields that are
* pointers to objects.
*/
private class ClassInfo(
val shellSize: Long,
val pointerFields: List[Field]) {}
private def estimate(obj: AnyRef, visited: IdentityHashMap[AnyRef, AnyRef]): Long = {
val state = new SearchState(visited)
state.enqueue(obj)
while (!state.isFinished) {
visitSingleObject(state.dequeue(), state)
}
state.size
}
private def visitSingleObject(obj: AnyRef, state: SearchState) {
val cls = obj.getClass
if (cls.isArray) {
visitArray(obj, cls, state)
} else if (cls.getName.startsWith("scala.reflect")) {
// Many objects in the scala.reflect package reference global reflection objects which, in
// turn, reference many other large global objects. Do nothing in this case.
} else if (obj.isInstanceOf[ClassLoader] || obj.isInstanceOf[Class[_]]) {
// Hadoop JobConfs created in the interpreter have a ClassLoader, which greatly confuses
// the size estimator since it references the whole REPL. Do nothing in this case. In
// general all ClassLoaders and Classes will be shared between objects anyway.
} else {
obj match {
case s: KnownSizeEstimation =>
state.size += s.estimatedSize
case _ =>
val classInfo = getClassInfo(cls)
state.size += alignSize(classInfo.shellSize)
for (field <- classInfo.pointerFields) {
state.enqueue(field.get(obj))
}
}
}
}
// Estimate the size of arrays larger than ARRAY_SIZE_FOR_SAMPLING by sampling.
private val ARRAY_SIZE_FOR_SAMPLING = 400
private val ARRAY_SAMPLE_SIZE = 100 // should be lower than ARRAY_SIZE_FOR_SAMPLING
private def visitArray(array: AnyRef, arrayClass: Class[_], state: SearchState) {
val length = ScalaRunTime.array_length(array)
val elementClass = arrayClass.getComponentType()
// Arrays have object header and length field which is an integer
var arrSize: Long = alignSize(objectSize + INT_SIZE)
if (elementClass.isPrimitive) {
arrSize += alignSize(length.toLong * primitiveSize(elementClass))
state.size += arrSize
} else {
arrSize += alignSize(length.toLong * pointerSize)
state.size += arrSize
if (length <= ARRAY_SIZE_FOR_SAMPLING) {
var arrayIndex = 0
while (arrayIndex < length) {
state.enqueue(ScalaRunTime.array_apply(array, arrayIndex).asInstanceOf[AnyRef])
arrayIndex += 1
}
} else {
// Estimate the size of a large array by sampling elements without replacement.
// To exclude the shared objects that the array elements may link, sample twice
// and use the min one to calculate array size.
val rand = new Random(42)
val drawn = new OpenHashSet[Int](2 * ARRAY_SAMPLE_SIZE)
val s1 = sampleArray(array, state, rand, drawn, length)
val s2 = sampleArray(array, state, rand, drawn, length)
val size = math.min(s1, s2)
state.size += math.max(s1, s2) +
(size * ((length - ARRAY_SAMPLE_SIZE) / (ARRAY_SAMPLE_SIZE))).toLong
}
}
}
private def sampleArray(
array: AnyRef,
state: SearchState,
rand: Random,
drawn: OpenHashSet[Int],
length: Int): Long = {
var size = 0L
for (i <- 0 until ARRAY_SAMPLE_SIZE) {
var index = 0
do {
index = rand.nextInt(length)
} while (drawn.contains(index))
drawn.add(index)
val obj = ScalaRunTime.array_apply(array, index).asInstanceOf[AnyRef]
if (obj != null) {
size += SizeEstimator.estimate(obj, state.visited).toLong
}
}
size
}
private def primitiveSize(cls: Class[_]): Int = {
if (cls == classOf[Byte]) {
BYTE_SIZE
} else if (cls == classOf[Boolean]) {
BOOLEAN_SIZE
} else if (cls == classOf[Char]) {
CHAR_SIZE
} else if (cls == classOf[Short]) {
SHORT_SIZE
} else if (cls == classOf[Int]) {
INT_SIZE
} else if (cls == classOf[Long]) {
LONG_SIZE
} else if (cls == classOf[Float]) {
FLOAT_SIZE
} else if (cls == classOf[Double]) {
DOUBLE_SIZE
} else {
throw new IllegalArgumentException(
"Non-primitive class " + cls + " passed to primitiveSize()")
}
}
/**
* Get or compute the ClassInfo for a given class.
*/
private def getClassInfo(cls: Class[_]): ClassInfo = {
// Check whether we've already cached a ClassInfo for this class
val info = classInfos.get(cls)
if (info != null) {
return info
}
val parent = getClassInfo(cls.getSuperclass)
var shellSize = parent.shellSize
var pointerFields = parent.pointerFields
val sizeCount = Array.fill(fieldSizes.max + 1)(0)
// iterate through the fields of this class and gather information.
for (field <- cls.getDeclaredFields) {
if (!Modifier.isStatic(field.getModifiers)) {
val fieldClass = field.getType
if (fieldClass.isPrimitive) {
sizeCount(primitiveSize(fieldClass)) += 1
} else {
// Note: in Java 9+ this would be better with trySetAccessible and canAccess
try {
field.setAccessible(true) // Enable future get()'s on this field
pointerFields = field :: pointerFields
} catch {
// If the field isn't accessible, we can still record the pointer size
// but can't know more about the field, so ignore it
case _: SecurityException =>
// do nothing
// Java 9+ can throw InaccessibleObjectException but the class is Java 9+-only
case re: RuntimeException
if re.getClass.getSimpleName == "InaccessibleObjectException" =>
// do nothing
}
sizeCount(pointerSize) += 1
}
}
}
// Based on the simulated field layout code in Aleksey Shipilev's report:
// http://cr.openjdk.java.net/~shade/papers/2013-shipilev-fieldlayout-latest.pdf
// The code is in Figure 9.
// The simplified idea of field layout consists of 4 parts (see more details in the report):
//
// 1. field alignment: HotSpot lays out the fields aligned by their size.
// 2. object alignment: HotSpot rounds instance size up to 8 bytes
// 3. consistent fields layouts throughout the hierarchy: This means we should layout
// superclass first. And we can use superclass's shellSize as a starting point to layout the
// other fields in this class.
// 4. class alignment: HotSpot rounds field blocks up to HeapOopSize not 4 bytes, confirmed
// with Aleksey. see https://bugs.openjdk.java.net/browse/CODETOOLS-7901322
//
// The real world field layout is much more complicated. There are three kinds of fields
// order in Java 8. And we don't consider the @contended annotation introduced by Java 8.
// see the HotSpot classloader code, layout_fields method for more details.
// hg.openjdk.java.net/jdk8/jdk8/hotspot/file/tip/src/share/vm/classfile/classFileParser.cpp
var alignedSize = shellSize
for (size <- fieldSizes if sizeCount(size) > 0) {
val count = sizeCount(size).toLong
// If there are internal gaps, smaller field can fit in.
alignedSize = math.max(alignedSize, alignSizeUp(shellSize, size) + size * count)
shellSize += size * count
}
// Should choose a larger size to be new shellSize and clearly alignedSize >= shellSize, and
// round up the instance filed blocks
shellSize = alignSizeUp(alignedSize, pointerSize)
// Create and cache a new ClassInfo
val newInfo = new ClassInfo(shellSize, pointerFields)
classInfos.put(cls, newInfo)
newInfo
}
private def alignSize(size: Long): Long = alignSizeUp(size, ALIGN_SIZE)
/**
* Compute aligned size. The alignSize must be 2^n, otherwise the result will be wrong.
* When alignSize = 2^n, alignSize - 1 = 2^n - 1. The binary representation of (alignSize - 1)
* will only have n trailing 1s(0b00...001..1). ~(alignSize - 1) will be 0b11..110..0. Hence,
* (size + alignSize - 1) & ~(alignSize - 1) will set the last n bits to zeros, which leads to
* multiple of alignSize.
*/
private def alignSizeUp(size: Long, alignSize: Int): Long =
(size + alignSize - 1) & ~(alignSize - 1)
}
|
pgandhi999/spark
|
core/src/main/scala/org/apache/spark/util/SizeEstimator.scala
|
Scala
|
apache-2.0
| 15,793
|
/**
* Copyright: Copyright (C) 2016, ATS Advanced Telematic Systems GmbH
* License: MPL-2.0
*/
package org.genivi.sota.core.campaigns
import akka.actor.ActorSystem
import cats.implicits._
import org.genivi.sota.common.DeviceRegistry
import org.genivi.sota.core.UpdateService
import org.genivi.sota.core.data.{Campaign, UpdateRequest}
import org.genivi.sota.core.db.{Campaigns, Packages, UpdateSpecs}
import org.genivi.sota.data.{Interval, Namespace, PackageId, Uuid}
import org.genivi.sota.messaging.MessageBusPublisher
import org.genivi.sota.messaging.Messages.{CampaignLaunched, UriWithSimpleEncoding}
import org.slf4j.LoggerFactory
import slick.driver.MySQLDriver.api._
import scala.concurrent.{ExecutionContext, Future}
object CampaignLauncher {
import Campaign._
import UpdateService._
private val log = LoggerFactory.getLogger(this.getClass)
def cancel(id: Campaign.Id)(implicit db: Database, ec: ExecutionContext): Future[Unit] = {
val dbIO = Campaigns.fetchGroups(id).flatMap { grps =>
val cancelIO = grps.collect {
case CampaignGroup(_, Some(ur)) =>
UpdateSpecs.cancelAllUpdatesByRequest(ur)
}
DBIO.sequence(cancelIO).map(_ => ())
}
db.run(dbIO.transactionally)
}
def resolve(devices: Seq[Uuid]): DependencyResolver = { pkg =>
Future.successful(devices.map(_ -> Set(pkg.id)).toMap)
}
def sendMsg(namespace: Namespace, devices: Set[Uuid], pkgId: PackageId, updateId: Uuid,
messageBus: MessageBusPublisher)
(implicit db: Database, system: ActorSystem, ec: ExecutionContext)
: Future[Unit] = {
import scala.async.Async._
async {
val pkg = await(db.run(Packages.byId(namespace, pkgId)))
val msg = CampaignLaunched(namespace, updateId, devices, UriWithSimpleEncoding(pkg.uri),
pkgId, pkg.size, pkg.checkSum)
await(messageBus.publish(msg))
}
}
def launch (deviceRegistry: DeviceRegistry, updateService: UpdateService, id: Campaign.Id, lc: LaunchCampaign,
messageBus: MessageBusPublisher)
(implicit db: Database, system: ActorSystem, ec: ExecutionContext)
: Future[List[Uuid]] = {
def updateUpdateRequest(ur: UpdateRequest): UpdateRequest = {
ur.copy(periodOfValidity = Interval(lc.startDate.getOrElse(ur.periodOfValidity.start),
lc.endDate.getOrElse(ur.periodOfValidity.end)),
priority = lc.priority.getOrElse(ur.priority),
signature = lc.signature.getOrElse(ur.signature),
description = lc.description,
requestConfirmation = lc.requestConfirmation.getOrElse(ur.requestConfirmation)
)
}
def launchGroup (ns: Namespace, pkgId: PackageId, campGrp: CampaignGroup): Future[Uuid] = {
val groupId = campGrp.group
for {
updateRequest <- updateService.updateRequest(ns, pkgId).map(updateUpdateRequest)
devices <- deviceRegistry.fetchDevicesInGroup(ns, groupId)
_ <- updateService.queueUpdate(ns, updateRequest, resolve(devices))
uuid = Uuid.fromJava(updateRequest.id)
_ <- db.run(Campaigns.setUpdateUuid(id, groupId, uuid))
_ <- sendMsg(ns, devices.toSet, pkgId, uuid, messageBus)
} yield uuid
}
for {
camp <- db.run(Campaigns.fetch(id))
updateRefs <- camp.groups.toList.traverse(campGrp =>
launchGroup(camp.meta.namespace, camp.packageId.get, campGrp))
_ <- db.run(Campaigns.setAsLaunch(id))
} yield updateRefs
}
}
|
PDXostc/rvi_sota_server
|
core/src/main/scala/org/genivi/sota/core/campaigns/CampaignLauncher.scala
|
Scala
|
mpl-2.0
| 3,618
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.api
import _root_.java.lang.{Boolean => JBool}
import _root_.java.util.concurrent.atomic.AtomicInteger
import org.apache.calcite.plan.RelOptUtil
import org.apache.calcite.plan.hep.HepMatchOrder
import org.apache.calcite.rel.RelNode
import org.apache.calcite.rel.`type`.{RelDataType, RelDataTypeField, RelDataTypeFieldImpl, RelRecordType}
import org.apache.calcite.sql2rel.RelDecorrelator
import org.apache.calcite.tools.{RuleSet, RuleSets}
import org.apache.flink.api.common.functions.MapFunction
import org.apache.flink.api.common.typeinfo.{SqlTimeTypeInfo, TypeInformation}
import org.apache.flink.api.common.typeutils.CompositeType
import org.apache.flink.api.java.tuple.{Tuple2 => JTuple2}
import org.apache.flink.api.java.typeutils.{RowTypeInfo, TupleTypeInfo}
import org.apache.flink.api.scala.typeutils.CaseClassTypeInfo
import org.apache.flink.streaming.api.TimeCharacteristic
import org.apache.flink.streaming.api.datastream.DataStream
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment
import org.apache.flink.table.calcite.{FlinkTypeFactory, RelTimeIndicatorConverter}
import org.apache.flink.table.descriptors.{ConnectorDescriptor, StreamTableDescriptor}
import org.apache.flink.table.explain.PlanJsonParser
import org.apache.flink.table.expressions._
import org.apache.flink.table.plan.nodes.FlinkConventions
import org.apache.flink.table.plan.nodes.datastream.{DataStreamRel, UpdateAsRetractionTrait}
import org.apache.flink.table.plan.rules.FlinkRuleSets
import org.apache.flink.table.plan.schema._
import org.apache.flink.table.plan.util.UpdatingPlanChecker
import org.apache.flink.table.runtime.conversion._
import org.apache.flink.table.runtime.types.{CRow, CRowTypeInfo}
import org.apache.flink.table.runtime.{CRowMapRunner, OutputRowtimeProcessFunction}
import org.apache.flink.table.sinks._
import org.apache.flink.table.sources.{StreamTableSource, TableSource, TableSourceUtil}
import org.apache.flink.table.typeutils.{TimeIndicatorTypeInfo, TypeCheckUtils}
import _root_.scala.collection.JavaConverters._
/**
* The base class for stream TableEnvironments.
*
* A TableEnvironment can be used to:
* - convert [[DataStream]] to a [[Table]]
* - register a [[DataStream]] as a table in the catalog
* - register a [[Table]] in the catalog
* - scan a registered table to obtain a [[Table]]
* - specify a SQL query on registered tables to obtain a [[Table]]
* - convert a [[Table]] into a [[DataStream]]
*
* @param execEnv The [[StreamExecutionEnvironment]] which is wrapped in this
* [[StreamTableEnvironment]].
* @param config The [[TableConfig]] of this [[StreamTableEnvironment]].
*/
abstract class StreamTableEnvironment(
private[flink] val execEnv: StreamExecutionEnvironment,
config: TableConfig)
extends TableEnvironment(config) {
// a counter for unique table names
private val nameCntr: AtomicInteger = new AtomicInteger(0)
// the naming pattern for internally registered tables.
private val internalNamePattern = "^_DataStreamTable_[0-9]+$".r
override def queryConfig: StreamQueryConfig = new StreamQueryConfig
/**
* Checks if the chosen table name is valid.
*
* @param name The table name to check.
*/
override protected def checkValidTableName(name: String): Unit = {
val m = internalNamePattern.findFirstIn(name)
m match {
case Some(_) =>
throw new TableException(s"Illegal Table name. " +
s"Please choose a name that does not contain the pattern $internalNamePattern")
case None =>
}
}
/** Returns a unique table name according to the internal naming pattern. */
override protected def createUniqueTableName(): String =
"_DataStreamTable_" + nameCntr.getAndIncrement()
/**
* Registers an internal [[StreamTableSource]] in this [[TableEnvironment]]'s catalog without
* name checking. Registered tables can be referenced in SQL queries.
*
* @param name The name under which the [[TableSource]] is registered.
* @param tableSource The [[TableSource]] to register.
*/
override protected def registerTableSourceInternal(
name: String,
tableSource: TableSource[_])
: Unit = {
tableSource match {
// check for proper stream table source
case streamTableSource: StreamTableSource[_] =>
// check that event-time is enabled if table source includes rowtime attributes
if (TableSourceUtil.hasRowtimeAttribute(streamTableSource) &&
execEnv.getStreamTimeCharacteristic != TimeCharacteristic.EventTime) {
throw new TableException(
s"A rowtime attribute requires an EventTime time characteristic in stream " +
s"environment. But is: ${execEnv.getStreamTimeCharacteristic}")
}
// register
getTable(name) match {
// check if a table (source or sink) is registered
case Some(table: TableSourceSinkTable[_, _]) => table.tableSourceTable match {
// wrapper contains source
case Some(_: TableSourceTable[_]) =>
throw new TableException(s"Table '$name' already exists. " +
s"Please choose a different name.")
// wrapper contains only sink (not source)
case _ =>
val enrichedTable = new TableSourceSinkTable(
Some(new StreamTableSourceTable(streamTableSource)),
table.tableSinkTable)
replaceRegisteredTable(name, enrichedTable)
}
// no table is registered
case _ =>
val newTable = new TableSourceSinkTable(
Some(new StreamTableSourceTable(streamTableSource)),
None)
registerTableInternal(name, newTable)
}
// not a stream table source
case _ =>
throw new TableException("Only StreamTableSource can be registered in " +
"StreamTableEnvironment")
}
}
/**
* Creates a table source and/or table sink from a descriptor.
*
* Descriptors allow for declaring the communication to external systems in an
* implementation-agnostic way. The classpath is scanned for suitable table factories that match
* the desired configuration.
*
* The following example shows how to read from a Kafka connector using a JSON format and
* registering a table source "MyTable" in append mode:
*
* {{{
*
* tableEnv
* .connect(
* new Kafka()
* .version("0.11")
* .topic("clicks")
* .property("zookeeper.connect", "localhost")
* .property("group.id", "click-group")
* .startFromEarliest())
* .withFormat(
* new Json()
* .jsonSchema("{...}")
* .failOnMissingField(false))
* .withSchema(
* new Schema()
* .field("user-name", "VARCHAR").from("u_name")
* .field("count", "DECIMAL")
* .field("proc-time", "TIMESTAMP").proctime())
* .inAppendMode()
* .registerSource("MyTable")
* }}}
*
* @param connectorDescriptor connector descriptor describing the external system
*/
def connect(connectorDescriptor: ConnectorDescriptor): StreamTableDescriptor = {
new StreamTableDescriptor(this, connectorDescriptor)
}
/**
* Registers an external [[TableSink]] with given field names and types in this
* [[TableEnvironment]]'s catalog.
* Registered sink tables can be referenced in SQL DML statements.
*
* Example:
*
* {{{
* // create a table sink and its field names and types
* val fieldNames: Array[String] = Array("a", "b", "c")
* val fieldTypes: Array[TypeInformation[_]] = Array(Types.STRING, Types.INT, Types.LONG)
* val tableSink: StreamTableSink = new YourTableSinkImpl(...)
*
* // register the table sink in the catalog
* tableEnv.registerTableSink("output_table", fieldNames, fieldsTypes, tableSink)
*
* // use the registered sink
* tableEnv.sqlUpdate("INSERT INTO output_table SELECT a, b, c FROM sourceTable")
* }}}
*
* @param name The name under which the [[TableSink]] is registered.
* @param fieldNames The field names to register with the [[TableSink]].
* @param fieldTypes The field types to register with the [[TableSink]].
* @param tableSink The [[TableSink]] to register.
*/
def registerTableSink(
name: String,
fieldNames: Array[String],
fieldTypes: Array[TypeInformation[_]],
tableSink: TableSink[_]): Unit = {
checkValidTableName(name)
if (fieldNames == null) throw new TableException("fieldNames must not be null.")
if (fieldTypes == null) throw new TableException("fieldTypes must not be null.")
if (fieldNames.length == 0) throw new TableException("fieldNames must not be empty.")
if (fieldNames.length != fieldTypes.length) {
throw new TableException("Same number of field names and types required.")
}
val configuredSink = tableSink.configure(fieldNames, fieldTypes)
registerTableSinkInternal(name, configuredSink)
}
/**
* Registers an external [[TableSink]] with already configured field names and field types in
* this [[TableEnvironment]]'s catalog.
* Registered sink tables can be referenced in SQL DML statements.
*
* @param name The name under which the [[TableSink]] is registered.
* @param configuredSink The configured [[TableSink]] to register.
*/
def registerTableSink(name: String, configuredSink: TableSink[_]): Unit = {
registerTableSinkInternal(name, configuredSink)
}
private def registerTableSinkInternal(name: String, configuredSink: TableSink[_]): Unit = {
// validate
checkValidTableName(name)
if (configuredSink.getFieldNames == null || configuredSink.getFieldTypes == null) {
throw new TableException("Table sink is not configured.")
}
if (configuredSink.getFieldNames.length == 0) {
throw new TableException("Field names must not be empty.")
}
if (configuredSink.getFieldNames.length != configuredSink.getFieldTypes.length) {
throw new TableException("Same number of field names and types required.")
}
// register
configuredSink match {
// check for proper batch table sink
case _: StreamTableSink[_] =>
// check if a table (source or sink) is registered
getTable(name) match {
// table source and/or sink is registered
case Some(table: TableSourceSinkTable[_, _]) => table.tableSinkTable match {
// wrapper contains sink
case Some(_: TableSinkTable[_]) =>
throw new TableException(s"Table '$name' already exists. " +
s"Please choose a different name.")
// wrapper contains only source (not sink)
case _ =>
val enrichedTable = new TableSourceSinkTable(
table.tableSourceTable,
Some(new TableSinkTable(configuredSink)))
replaceRegisteredTable(name, enrichedTable)
}
// no table is registered
case _ =>
val newTable = new TableSourceSinkTable(
None,
Some(new TableSinkTable(configuredSink)))
registerTableInternal(name, newTable)
}
// not a stream table sink
case _ =>
throw new TableException(
"Only AppendStreamTableSink, UpsertStreamTableSink, and RetractStreamTableSink can be " +
"registered in StreamTableEnvironment.")
}
}
/**
* Writes a [[Table]] to a [[TableSink]].
*
* Internally, the [[Table]] is translated into a [[DataStream]] and handed over to the
* [[TableSink]] to write it.
*
* @param table The [[Table]] to write.
* @param sink The [[TableSink]] to write the [[Table]] to.
* @param queryConfig The configuration for the query to generate.
* @tparam T The expected type of the [[DataStream]] which represents the [[Table]].
*/
override private[flink] def writeToSink[T](
inputTable: Table,
sink: TableSink[T],
queryConfig: QueryConfig): Unit = {
val table = inputTable.asInstanceOf[TableImpl]
// Check query configuration
val streamQueryConfig = queryConfig match {
case streamConfig: StreamQueryConfig => streamConfig
case _ =>
throw new TableException("StreamQueryConfig required to configure stream query.")
}
sink match {
case retractSink: RetractStreamTableSink[_] =>
// retraction sink can always be used
val outputType = sink.getOutputType
// translate the Table into a DataStream and provide the type that the TableSink expects.
val result: DataStream[T] =
translate(
table,
streamQueryConfig,
updatesAsRetraction = true,
withChangeFlag = true)(outputType)
// Give the DataStream to the TableSink to emit it.
retractSink.asInstanceOf[RetractStreamTableSink[Any]]
.emitDataStream(result.asInstanceOf[DataStream[JTuple2[JBool, Any]]])
case upsertSink: UpsertStreamTableSink[_] =>
// optimize plan
val optimizedPlan = optimize(table.getRelNode, updatesAsRetraction = false)
// check for append only table
val isAppendOnlyTable = UpdatingPlanChecker.isAppendOnly(optimizedPlan)
upsertSink.setIsAppendOnly(isAppendOnlyTable)
// extract unique key fields
val tableKeys: Option[Array[String]] = UpdatingPlanChecker.getUniqueKeyFields(optimizedPlan)
// check that we have keys if the table has changes (is not append-only)
tableKeys match {
case Some(keys) => upsertSink.setKeyFields(keys)
case None if isAppendOnlyTable => upsertSink.setKeyFields(null)
case None if !isAppendOnlyTable => throw new TableException(
"UpsertStreamTableSink requires that Table has full primary keys if it is updated.")
}
val outputType = sink.getOutputType
val resultType = getResultType(table.getRelNode, optimizedPlan)
// translate the Table into a DataStream and provide the type that the TableSink expects.
val result: DataStream[T] =
translate(
optimizedPlan,
resultType,
streamQueryConfig,
withChangeFlag = true)(outputType)
// Give the DataStream to the TableSink to emit it.
upsertSink.asInstanceOf[UpsertStreamTableSink[Any]]
.emitDataStream(result.asInstanceOf[DataStream[JTuple2[JBool, Any]]])
case appendSink: AppendStreamTableSink[_] =>
// optimize plan
val optimizedPlan = optimize(table.getRelNode, updatesAsRetraction = false)
// verify table is an insert-only (append-only) table
if (!UpdatingPlanChecker.isAppendOnly(optimizedPlan)) {
throw new TableException(
"AppendStreamTableSink requires that Table has only insert changes.")
}
val outputType = sink.getOutputType
val resultType = getResultType(table.getRelNode, optimizedPlan)
// translate the Table into a DataStream and provide the type that the TableSink expects.
val result: DataStream[T] =
translate(
optimizedPlan,
resultType,
streamQueryConfig,
withChangeFlag = false)(outputType)
// Give the DataStream to the TableSink to emit it.
appendSink.asInstanceOf[AppendStreamTableSink[T]].emitDataStream(result)
case _ =>
throw new TableException("Stream Tables can only be emitted by AppendStreamTableSink, " +
"RetractStreamTableSink, or UpsertStreamTableSink.")
}
}
/**
* Creates a final converter that maps the internal row type to external type.
*
* @param inputTypeInfo the input of the sink
* @param schema the input schema with correct field names (esp. for POJO field mapping)
* @param requestedTypeInfo the output type of the sink
* @param functionName name of the map function. Must not be unique but has to be a
* valid Java class identifier.
*/
protected def getConversionMapper[OUT](
inputTypeInfo: TypeInformation[CRow],
schema: RowSchema,
requestedTypeInfo: TypeInformation[OUT],
functionName: String)
: MapFunction[CRow, OUT] = {
val converterFunction = generateRowConverterFunction[OUT](
inputTypeInfo.asInstanceOf[CRowTypeInfo].rowType,
schema,
requestedTypeInfo,
functionName
)
converterFunction match {
case Some(func) =>
new CRowMapRunner[OUT](func.name, func.code, func.returnType)
case _ =>
new CRowToRowMapFunction().asInstanceOf[MapFunction[CRow, OUT]]
}
}
/**
* Creates a converter that maps the internal CRow type to Scala or Java Tuple2 with change flag.
*
* @param physicalTypeInfo the input of the sink
* @param schema the input schema with correct field names (esp. for POJO field mapping)
* @param requestedTypeInfo the output type of the sink.
* @param functionName name of the map function. Must not be unique but has to be a
* valid Java class identifier.
*/
private def getConversionMapperWithChanges[OUT](
physicalTypeInfo: TypeInformation[CRow],
schema: RowSchema,
requestedTypeInfo: TypeInformation[OUT],
functionName: String)
: MapFunction[CRow, OUT] = requestedTypeInfo match {
// Scala tuple
case t: CaseClassTypeInfo[_]
if t.getTypeClass == classOf[(_, _)] && t.getTypeAt(0) == Types.BOOLEAN =>
val reqType = t.getTypeAt[Any](1)
// convert Row into requested type and wrap result in Tuple2
val converterFunction = generateRowConverterFunction(
physicalTypeInfo.asInstanceOf[CRowTypeInfo].rowType,
schema,
reqType,
functionName
)
converterFunction match {
case Some(func) =>
new CRowToScalaTupleMapRunner(
func.name,
func.code,
requestedTypeInfo.asInstanceOf[TypeInformation[(Boolean, Any)]]
).asInstanceOf[MapFunction[CRow, OUT]]
case _ =>
new CRowToScalaTupleMapFunction().asInstanceOf[MapFunction[CRow, OUT]]
}
// Java tuple
case t: TupleTypeInfo[_]
if t.getTypeClass == classOf[JTuple2[_, _]] && t.getTypeAt(0) == Types.BOOLEAN =>
val reqType = t.getTypeAt[Any](1)
// convert Row into requested type and wrap result in Tuple2
val converterFunction = generateRowConverterFunction(
physicalTypeInfo.asInstanceOf[CRowTypeInfo].rowType,
schema,
reqType,
functionName
)
converterFunction match {
case Some(func) =>
new CRowToJavaTupleMapRunner(
func.name,
func.code,
requestedTypeInfo.asInstanceOf[TypeInformation[JTuple2[JBool, Any]]]
).asInstanceOf[MapFunction[CRow, OUT]]
case _ =>
new CRowToJavaTupleMapFunction().asInstanceOf[MapFunction[CRow, OUT]]
}
}
/**
* Registers a [[DataStream]] as a table under a given name in the [[TableEnvironment]]'s
* catalog.
*
* @param name The name under which the table is registered in the catalog.
* @param dataStream The [[DataStream]] to register as table in the catalog.
* @tparam T the type of the [[DataStream]].
*/
protected def registerDataStreamInternal[T](
name: String,
dataStream: DataStream[T]): Unit = {
val (fieldNames, fieldIndexes) = getFieldInfo[T](dataStream.getType)
val dataStreamTable = new DataStreamTable[T](
dataStream,
fieldIndexes,
fieldNames
)
registerTableInternal(name, dataStreamTable)
}
/**
* Registers a [[DataStream]] as a table under a given name with field names as specified by
* field expressions in the [[TableEnvironment]]'s catalog.
*
* @param name The name under which the table is registered in the catalog.
* @param dataStream The [[DataStream]] to register as table in the catalog.
* @param fields The field expressions to define the field names of the table.
* @tparam T The type of the [[DataStream]].
*/
protected def registerDataStreamInternal[T](
name: String,
dataStream: DataStream[T],
fields: Array[Expression])
: Unit = {
val streamType = dataStream.getType
val bridgedFields = fields.map(expressionBridge.bridge).toArray[Expression]
// get field names and types for all non-replaced fields
val (fieldNames, fieldIndexes) = getFieldInfo[T](streamType, bridgedFields)
// validate and extract time attributes
val (rowtime, proctime) = validateAndExtractTimeAttributes(streamType, bridgedFields)
// check if event-time is enabled
if (rowtime.isDefined && execEnv.getStreamTimeCharacteristic != TimeCharacteristic.EventTime) {
throw new TableException(
s"A rowtime attribute requires an EventTime time characteristic in stream environment. " +
s"But is: ${execEnv.getStreamTimeCharacteristic}")
}
// adjust field indexes and field names
val indexesWithIndicatorFields = adjustFieldIndexes(fieldIndexes, rowtime, proctime)
val namesWithIndicatorFields = adjustFieldNames(fieldNames, rowtime, proctime)
val dataStreamTable = new DataStreamTable[T](
dataStream,
indexesWithIndicatorFields,
namesWithIndicatorFields
)
registerTableInternal(name, dataStreamTable)
}
/**
* Checks for at most one rowtime and proctime attribute.
* Returns the time attributes.
*
* @return rowtime attribute and proctime attribute
*/
private def validateAndExtractTimeAttributes(
streamType: TypeInformation[_],
exprs: Array[Expression])
: (Option[(Int, String)], Option[(Int, String)]) = {
val (isRefByPos, fieldTypes) = streamType match {
case c: CompositeType[_] =>
// determine schema definition mode (by position or by name)
(isReferenceByPosition(c, exprs), (0 until c.getArity).map(i => c.getTypeAt(i)).toArray)
case t: TypeInformation[_] =>
(false, Array(t))
}
var fieldNames: List[String] = Nil
var rowtime: Option[(Int, String)] = None
var proctime: Option[(Int, String)] = None
def checkRowtimeType(t: TypeInformation[_]): Unit = {
if (!(TypeCheckUtils.isLong(t) || TypeCheckUtils.isTimePoint(t))) {
throw new TableException(
s"The rowtime attribute can only replace a field with a valid time type, " +
s"such as Timestamp or Long. But was: $t")
}
}
def extractRowtime(idx: Int, name: String, origName: Option[String]): Unit = {
if (rowtime.isDefined) {
throw new TableException(
"The rowtime attribute can only be defined once in a table schema.")
} else {
// if the fields are referenced by position,
// it is possible to replace an existing field or append the time attribute at the end
if (isRefByPos) {
// aliases are not permitted
if (origName.isDefined) {
throw new TableException(
s"Invalid alias '${origName.get}' because fields are referenced by position.")
}
// check type of field that is replaced
if (idx < fieldTypes.length) {
checkRowtimeType(fieldTypes(idx))
}
}
// check reference-by-name
else {
val aliasOrName = origName.getOrElse(name)
streamType match {
// both alias and reference must have a valid type if they replace a field
case ct: CompositeType[_] if ct.hasField(aliasOrName) =>
val t = ct.getTypeAt(ct.getFieldIndex(aliasOrName))
checkRowtimeType(t)
// alias could not be found
case _ if origName.isDefined =>
throw new TableException(s"Alias '${origName.get}' must reference an existing field.")
case _ => // ok
}
}
rowtime = Some(idx, name)
}
}
def extractProctime(idx: Int, name: String): Unit = {
if (proctime.isDefined) {
throw new TableException(
"The proctime attribute can only be defined once in a table schema.")
} else {
// if the fields are referenced by position,
// it is only possible to append the time attribute at the end
if (isRefByPos) {
// check that proctime is only appended
if (idx < fieldTypes.length) {
throw new TableException(
"The proctime attribute can only be appended to the table schema and not replace " +
s"an existing field. Please move '$name' to the end of the schema.")
}
}
// check reference-by-name
else {
streamType match {
// proctime attribute must not replace a field
case ct: CompositeType[_] if ct.hasField(name) =>
throw new TableException(
s"The proctime attribute '$name' must not replace an existing field.")
case _ => // ok
}
}
proctime = Some(idx, name)
}
}
exprs.zipWithIndex.foreach {
case (RowtimeAttribute(UnresolvedFieldReference(name)), idx) =>
extractRowtime(idx, name, None)
case (Alias(RowtimeAttribute(UnresolvedFieldReference(origName)), name, _), idx) =>
extractRowtime(idx, name, Some(origName))
case (ProctimeAttribute(UnresolvedFieldReference(name)), idx) =>
extractProctime(idx, name)
case (Alias(ProctimeAttribute(UnresolvedFieldReference(_)), name, _), idx) =>
extractProctime(idx, name)
case (UnresolvedFieldReference(name), _) => fieldNames = name :: fieldNames
case (Alias(UnresolvedFieldReference(_), name, _), _) => fieldNames = name :: fieldNames
case (e, _) =>
throw new TableException(s"Time attributes can only be defined on field references. " +
s"Rowtime attributes can replace existing fields, proctime attributes can not. " +
s"But was: $e")
}
if (rowtime.isDefined && fieldNames.contains(rowtime.get._2)) {
throw new TableException(
"The rowtime attribute may not have the same name as an another field.")
}
if (proctime.isDefined && fieldNames.contains(proctime.get._2)) {
throw new TableException(
"The proctime attribute may not have the same name as an another field.")
}
(rowtime, proctime)
}
/**
* Injects markers for time indicator fields into the field indexes.
*
* @param fieldIndexes The field indexes into which the time indicators markers are injected.
* @param rowtime An optional rowtime indicator
* @param proctime An optional proctime indicator
* @return An adjusted array of field indexes.
*/
private def adjustFieldIndexes(
fieldIndexes: Array[Int],
rowtime: Option[(Int, String)],
proctime: Option[(Int, String)]): Array[Int] = {
// inject rowtime field
val withRowtime = rowtime match {
case Some(rt) =>
fieldIndexes.patch(rt._1, Seq(TimeIndicatorTypeInfo.ROWTIME_STREAM_MARKER), 0)
case _ =>
fieldIndexes
}
// inject proctime field
val withProctime = proctime match {
case Some(pt) =>
withRowtime.patch(pt._1, Seq(TimeIndicatorTypeInfo.PROCTIME_STREAM_MARKER), 0)
case _ =>
withRowtime
}
withProctime
}
/**
* Injects names of time indicator fields into the list of field names.
*
* @param fieldNames The array of field names into which the time indicator field names are
* injected.
* @param rowtime An optional rowtime indicator
* @param proctime An optional proctime indicator
* @return An adjusted array of field names.
*/
private def adjustFieldNames(
fieldNames: Array[String],
rowtime: Option[(Int, String)],
proctime: Option[(Int, String)]): Array[String] = {
// inject rowtime field
val withRowtime = rowtime match {
case Some(rt) => fieldNames.patch(rt._1, Seq(rowtime.get._2), 0)
case _ => fieldNames
}
// inject proctime field
val withProctime = proctime match {
case Some(pt) => withRowtime.patch(pt._1, Seq(proctime.get._2), 0)
case _ => withRowtime
}
withProctime
}
/**
* Returns the decoration rule set for this environment
* including a custom RuleSet configuration.
*/
protected def getDecoRuleSet: RuleSet = {
val calciteConfig = config.getCalciteConfig
calciteConfig.getDecoRuleSet match {
case None =>
getBuiltInDecoRuleSet
case Some(ruleSet) =>
if (calciteConfig.replacesDecoRuleSet) {
ruleSet
} else {
RuleSets.ofList((getBuiltInDecoRuleSet.asScala ++ ruleSet.asScala).asJava)
}
}
}
/**
* Returns the built-in normalization rules that are defined by the environment.
*/
protected def getBuiltInNormRuleSet: RuleSet = FlinkRuleSets.DATASTREAM_NORM_RULES
/**
* Returns the built-in optimization rules that are defined by the environment.
*/
protected def getBuiltInPhysicalOptRuleSet: RuleSet = FlinkRuleSets.DATASTREAM_OPT_RULES
/**
* Returns the built-in decoration rules that are defined by the environment.
*/
protected def getBuiltInDecoRuleSet: RuleSet = FlinkRuleSets.DATASTREAM_DECO_RULES
/**
* Generates the optimized [[RelNode]] tree from the original relational node tree.
*
* @param relNode The root node of the relational expression tree.
* @param updatesAsRetraction True if the sink requests updates as retraction messages.
* @return The optimized [[RelNode]] tree
*/
private[flink] def optimize(relNode: RelNode, updatesAsRetraction: Boolean): RelNode = {
val convSubQueryPlan = optimizeConvertSubQueries(relNode)
val expandedPlan = optimizeExpandPlan(convSubQueryPlan)
val decorPlan = RelDecorrelator.decorrelateQuery(expandedPlan, getRelBuilder)
val planWithMaterializedTimeAttributes =
RelTimeIndicatorConverter.convert(decorPlan, getRelBuilder.getRexBuilder)
val normalizedPlan = optimizeNormalizeLogicalPlan(planWithMaterializedTimeAttributes)
val logicalPlan = optimizeLogicalPlan(normalizedPlan)
val physicalPlan = optimizePhysicalPlan(logicalPlan, FlinkConventions.DATASTREAM)
optimizeDecoratePlan(physicalPlan, updatesAsRetraction)
}
private[flink] def optimizeDecoratePlan(
relNode: RelNode,
updatesAsRetraction: Boolean): RelNode = {
val decoRuleSet = getDecoRuleSet
if (decoRuleSet.iterator().hasNext) {
val planToDecorate = if (updatesAsRetraction) {
relNode.copy(
relNode.getTraitSet.plus(new UpdateAsRetractionTrait(true)),
relNode.getInputs)
} else {
relNode
}
runHepPlannerSequentially(
HepMatchOrder.BOTTOM_UP,
decoRuleSet,
planToDecorate,
planToDecorate.getTraitSet)
} else {
relNode
}
}
/**
* Translates a [[Table]] into a [[DataStream]].
*
* The transformation involves optimizing the relational expression tree as defined by
* Table API calls and / or SQL queries and generating corresponding [[DataStream]] operators.
*
* @param table The root node of the relational expression tree.
* @param queryConfig The configuration for the query to generate.
* @param updatesAsRetraction Set to true to encode updates as retraction messages.
* @param withChangeFlag Set to true to emit records with change flags.
* @param tpe The [[TypeInformation]] of the resulting [[DataStream]].
* @tparam A The type of the resulting [[DataStream]].
* @return The [[DataStream]] that corresponds to the translated [[Table]].
*/
protected def translate[A](
table: Table,
queryConfig: StreamQueryConfig,
updatesAsRetraction: Boolean,
withChangeFlag: Boolean)(implicit tpe: TypeInformation[A]): DataStream[A] = {
val relNode = table.asInstanceOf[TableImpl].getRelNode
val dataStreamPlan = optimize(relNode, updatesAsRetraction)
val rowType = getResultType(relNode, dataStreamPlan)
translate(dataStreamPlan, rowType, queryConfig, withChangeFlag)
}
/**
* Translates a logical [[RelNode]] into a [[DataStream]].
*
* @param logicalPlan The root node of the relational expression tree.
* @param logicalType The row type of the result. Since the logicalPlan can lose the
* field naming during optimization we pass the row type separately.
* @param queryConfig The configuration for the query to generate.
* @param withChangeFlag Set to true to emit records with change flags.
* @param tpe The [[TypeInformation]] of the resulting [[DataStream]].
* @tparam A The type of the resulting [[DataStream]].
* @return The [[DataStream]] that corresponds to the translated [[Table]].
*/
protected def translate[A](
logicalPlan: RelNode,
logicalType: RelDataType,
queryConfig: StreamQueryConfig,
withChangeFlag: Boolean)
(implicit tpe: TypeInformation[A]): DataStream[A] = {
// if no change flags are requested, verify table is an insert-only (append-only) table.
if (!withChangeFlag && !UpdatingPlanChecker.isAppendOnly(logicalPlan)) {
throw new TableException(
"Table is not an append-only table. " +
"Use the toRetractStream() in order to handle add and retract messages.")
}
// get CRow plan
val plan: DataStream[CRow] = translateToCRow(logicalPlan, queryConfig)
val rowtimeFields = logicalType
.getFieldList.asScala
.filter(f => FlinkTypeFactory.isRowtimeIndicatorType(f.getType))
// convert the input type for the conversion mapper
// the input will be changed in the OutputRowtimeProcessFunction later
val convType = if (rowtimeFields.size > 1) {
throw new TableException(
s"Found more than one rowtime field: [${rowtimeFields.map(_.getName).mkString(", ")}] in " +
s"the table that should be converted to a DataStream.\\n" +
s"Please select the rowtime field that should be used as event-time timestamp for the " +
s"DataStream by casting all other fields to TIMESTAMP.")
} else if (rowtimeFields.size == 1) {
val origRowType = plan.getType.asInstanceOf[CRowTypeInfo].rowType
val convFieldTypes = origRowType.getFieldTypes.map { t =>
if (FlinkTypeFactory.isRowtimeIndicatorType(t)) {
SqlTimeTypeInfo.TIMESTAMP
} else {
t
}
}
CRowTypeInfo(new RowTypeInfo(convFieldTypes, origRowType.getFieldNames))
} else {
plan.getType
}
// convert CRow to output type
val conversion: MapFunction[CRow, A] = if (withChangeFlag) {
getConversionMapperWithChanges(
convType,
new RowSchema(logicalType),
tpe,
"DataStreamSinkConversion")
} else {
getConversionMapper(
convType,
new RowSchema(logicalType),
tpe,
"DataStreamSinkConversion")
}
val rootParallelism = plan.getParallelism
val withRowtime = if (rowtimeFields.isEmpty) {
// no rowtime field to set
plan.map(conversion)
} else {
// set the only rowtime field as event-time timestamp for DataStream
// and convert it to SQL timestamp
plan.process(new OutputRowtimeProcessFunction[A](conversion, rowtimeFields.head.getIndex))
}
withRowtime
.returns(tpe)
.name(s"to: ${tpe.getTypeClass.getSimpleName}")
.setParallelism(rootParallelism)
}
/**
* Translates a logical [[RelNode]] plan into a [[DataStream]] of type [[CRow]].
*
* @param logicalPlan The logical plan to translate.
* @param queryConfig The configuration for the query to generate.
* @return The [[DataStream]] of type [[CRow]].
*/
protected def translateToCRow(
logicalPlan: RelNode,
queryConfig: StreamQueryConfig): DataStream[CRow] = {
logicalPlan match {
case node: DataStreamRel =>
node.translateToPlan(this, queryConfig)
case _ =>
throw new TableException("Cannot generate DataStream due to an invalid logical plan. " +
"This is a bug and should not happen. Please file an issue.")
}
}
/**
* Returns the record type of the optimized plan with field names of the logical plan.
*/
private def getResultType(originRelNode: RelNode, optimizedPlan: RelNode): RelRecordType = {
// zip original field names with optimized field types
val fieldTypes = originRelNode.getRowType.getFieldList.asScala
.zip(optimizedPlan.getRowType.getFieldList.asScala)
// get name of original plan and type of optimized plan
.map(x => (x._1.getName, x._2.getType))
// add field indexes
.zipWithIndex
// build new field types
.map(x => new RelDataTypeFieldImpl(x._1._1, x._2, x._1._2))
// build a record type from list of field types
new RelRecordType(
fieldTypes.toList.asInstanceOf[List[RelDataTypeField]].asJava)
}
/**
* Returns the AST of the specified Table API and SQL queries and the execution plan to compute
* the result of the given [[Table]].
*
* @param table The table for which the AST and execution plan will be returned.
*/
def explain(table: Table): String = {
val ast = table.asInstanceOf[TableImpl].getRelNode
val optimizedPlan = optimize(ast, updatesAsRetraction = false)
val dataStream = translateToCRow(optimizedPlan, queryConfig)
val env = dataStream.getExecutionEnvironment
val jsonSqlPlan = env.getExecutionPlan
val sqlPlan = PlanJsonParser.getSqlExecutionPlan(jsonSqlPlan, false)
s"== Abstract Syntax Tree ==" +
System.lineSeparator +
s"${RelOptUtil.toString(ast)}" +
System.lineSeparator +
s"== Optimized Logical Plan ==" +
System.lineSeparator +
s"${RelOptUtil.toString(optimizedPlan)}" +
System.lineSeparator +
s"== Physical Execution Plan ==" +
System.lineSeparator +
s"$sqlPlan"
}
}
|
ueshin/apache-flink
|
flink-table/flink-table-planner/src/main/scala/org/apache/flink/table/api/StreamTableEnvironment.scala
|
Scala
|
apache-2.0
| 39,271
|
/***********************************************************************
* Copyright (c) 2013-2019 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.fs.storage.converter
import java.util.regex.Pattern
import com.typesafe.scalalogging.LazyLogging
import org.locationtech.geomesa.fs.storage.api._
import org.locationtech.geomesa.fs.storage.converter.ConverterStorageFactory._
import org.locationtech.geomesa.utils.geotools.{SftArgResolver, SftArgs}
class ConverterMetadataFactory extends StorageMetadataFactory with LazyLogging {
import scala.collection.JavaConverters._
override def name: String = ConverterStorage.Encoding
override def load(context: FileSystemContext): Option[StorageMetadata] = {
if (!Option(context.conf.get(ConverterPathParam)).contains(context.root.getName)) { None } else {
try {
val sft = {
val sftArg = Option(context.conf.get(SftConfigParam))
.orElse(Option(context.conf.get(SftNameParam)))
.getOrElse(throw new IllegalArgumentException(s"Must provide either simple feature type config or name"))
SftArgResolver.getArg(SftArgs(sftArg, null)) match {
case Left(e) => throw new IllegalArgumentException("Could not load SimpleFeatureType with provided parameters", e)
case Right(schema) => schema
}
}
val partitionSchemeOpts =
context.conf.getValByRegex(Pattern.quote(PartitionOptsPrefix) + ".*").asScala.map {
case (k, v) => k.substring(PartitionOptsPrefix.length) -> v
}
val scheme = {
val partitionSchemeName =
Option(context.conf.get(PartitionSchemeParam))
.getOrElse(throw new IllegalArgumentException(s"Must provide partition scheme name"))
PartitionSchemeFactory.load(sft, NamedOptions(partitionSchemeName, partitionSchemeOpts.toMap))
}
val leafStorage = Option(context.conf.get(LeafStorageParam)).map(_.toBoolean).getOrElse {
val deprecated = partitionSchemeOpts.get("leaf-storage").map { s =>
logger.warn("Using deprecated leaf-storage partition-scheme option. Please define leaf-storage using " +
s"'$LeafStorageParam'")
s.toBoolean
}
deprecated.getOrElse(true)
}
Some(new ConverterMetadata(context, sft, scheme, leafStorage))
} catch {
case e: IllegalArgumentException => logger.warn(s"Couldn't create converter storage metadata: $e", e); None
}
}
}
override def create(context: FileSystemContext, config: Map[String, String], meta: Metadata): StorageMetadata =
throw new NotImplementedError("Converter storage is read only")
}
|
elahrvivaz/geomesa
|
geomesa-fs/geomesa-fs-storage/geomesa-fs-storage-convert/src/main/scala/org/locationtech/geomesa/fs/storage/converter/ConverterMetadataFactory.scala
|
Scala
|
apache-2.0
| 3,065
|
package com.twitter.scrooge.java_generator
import com.twitter.scrooge.ast._
import com.twitter.scrooge.frontend.ScroogeInternalException
// The rendered represents what gets printed to the output stream. value is what render_const_value returns
class ConstValue(val rendered: String, val value: String)
class PrintConstController(
val name: String,
fieldType: FieldType,
value: RHS,
generator: ApacheJavaGenerator,
ns: Option[Identifier],
val in_static: Boolean = false,
val defval: Boolean = false)
extends BaseController(generator, ns) {
val field_type = new FieldTypeController(fieldType, generator)
def rendered_value = renderConstValue(value, fieldType).value
def map_values = {
val values = value.asInstanceOf[MapRHS]
val mapType = fieldType.asInstanceOf[MapType]
values.elems map { case (k, v) =>
val renderedKey = renderConstValue(k, mapType.keyType)
val renderedValue = renderConstValue(v, mapType.valueType)
Map("key" -> renderedKey.value, "value" -> renderedValue.value, "rendered_key" -> renderedKey.rendered,
"rendered_value" -> renderedValue.rendered)
}
}
def list_or_set_values = {
value match {
case SetRHS(elems) => {
val setType = fieldType.asInstanceOf[SetType]
elems.map { v =>
val renderedValue = renderConstValue(v, setType.eltType)
Map("value" -> renderedValue.value, "rendered_value" -> renderedValue.rendered)
}
}
case ListRHS(elems) => {
val listType = fieldType.asInstanceOf[ListType]
elems.map { v =>
val renderedValue = renderConstValue(v, listType.eltType)
Map("value" -> renderedValue.value, "rendered_value" -> renderedValue.rendered)
}
}
case _ => throw new ScroogeInternalException("Invalid state PrintConstController")
}
}
def struct_values: Seq[Map[String, String]] = {
value match {
case struct: StructRHS =>
val values = value.asInstanceOf[StructRHS].elems
val structType = fieldType.asInstanceOf[StructType]
for {
f <- structType.struct.fields
v <- values.get(f)
} yield {
val renderedValue = renderConstValue(v, f.fieldType)
Map(
"key" -> f.sid.name,
"value" -> renderedValue.value,
"rendered_value" -> renderedValue.rendered)
}
case union: UnionRHS =>
val renderedValue = renderConstValue(union.initializer, union.field.fieldType)
Seq(Map(
"key" -> union.field.sid.name,
"value" -> renderedValue.value,
"rendered_value" -> renderedValue.rendered))
}
}
private def renderConstValue(constant: RHS, fieldType: FieldType): ConstValue = {
fieldType match {
case TString => {
val constValue = constant.asInstanceOf[StringLiteral].value
new ConstValue(null, "\\"" + constValue + "\\"")
}
case TBool => {
constant match {
case intValue: IntLiteral =>
new ConstValue(null, if (intValue.value > 0) "true" else "false")
case bool: BoolLiteral =>
new ConstValue(null, if (bool.value) "true" else "false")
case _ => throw new ScroogeInternalException("BoolType has invalid value: " + constant)
}
}
case TByte => new ConstValue(null, "(byte)" + constant.asInstanceOf[IntLiteral].value)
case TI16 => new ConstValue(null, "(short)" + constant.asInstanceOf[IntLiteral].value)
case TI32 => new ConstValue(null, constant.asInstanceOf[IntLiteral].value.toString)
case TI64 => new ConstValue(null, constant.asInstanceOf[IntLiteral].value + "L")
case TDouble => {
constant match {
case DoubleLiteral(value) => {
// TODO: this is here to match apache code but probably can be removed.
if (value.floor == value) {
new ConstValue(null, value.toInt.toString)
} else {
new ConstValue(null, value.toString)
}
}
case IntLiteral(value) => new ConstValue(null, "(double)" + value.toString)
case _ => throw new ScroogeInternalException("Invalid state renderConstValue")
}
}
case EnumType(enumValue, scope) => {
val ordinalValue = constant match {
case intValue: IntLiteral => intValue.value.toInt
case enumValue: EnumRHS => enumValue.value.value
case _ => throw new ScroogeInternalException("Invalid state for renderConstValue")
}
val namedValue = enumValue.values filter { v =>
v.value == ordinalValue
}
if (namedValue.isEmpty) {
throw new ScroogeInternalException("Enum value not found")
} else {
val enumFqn = generator.qualifyNamedType(enumValue.sid, scope)
val enumValueFqn = namedValue(0).sid.addScope(enumFqn)
new ConstValue(null, enumValueFqn.fullName)
}
}
case _ => {
val tmpVal = generator.tmp()
new ConstValue(generator.printConstValue(tmpVal, fieldType, constant, ns, in_static = true), tmpVal)
}
}
}
}
|
benhoyt/scrooge
|
scrooge-generator/src/main/scala/com/twitter/scrooge/java_generator/PrintConstController.scala
|
Scala
|
apache-2.0
| 5,192
|
package com.soteradefense.datawake.trails.topology.search.bolts
import java.sql.{Connection, DriverManager, PreparedStatement}
import java.util
import backtype.storm.task.TopologyContext
import backtype.storm.topology.{BasicOutputCollector, OutputFieldsDeclarer}
import backtype.storm.tuple.Tuple
import com.soteradefense.datawake.trails.bolts.HighLevelKafkaProducer
import com.soteradefense.datawake.trails.sql.SqlCredentials
import kafka.producer.KeyedMessage
import kafka.common.FailedToSendMessageException
class UpdateRankKafkaProducer(sqlCredentials: SqlCredentials, selectSql: String, topic: String, brokers: String) extends HighLevelKafkaProducer(brokers, topic) {
var connection: Connection = null
override def prepare(stormConf: util.Map[_, _], context: TopologyContext): Unit = {
super.prepare(stormConf, context)
Class.forName("com.mysql.jdbc.Driver")
connection = DriverManager.getConnection(sqlCredentials.jdbc, sqlCredentials.username, sqlCredentials.password)
}
override def execute(input: Tuple, collector: BasicOutputCollector): Unit = {
val org = input.getStringByField("updateOrg")
val domain = input.getStringByField("updateDomain")
val trail = input.getStringByField("updateTrail")
var selectAllUrls: PreparedStatement = null
try {
selectAllUrls = connection.prepareStatement(selectSql)
selectAllUrls.setString(1, org)
selectAllUrls.setString(2, domain)
selectAllUrls.setString(3, trail)
val urlSet = selectAllUrls.executeQuery()
val builder = new StringBuilder
while (urlSet.next()) {
val url = urlSet.getString("url")
builder.append(org)
builder.append("\\0")
builder.append(domain)
builder.append("\\0")
builder.append(trail)
builder.append("\\0")
builder.append(url)
val message = new KeyedMessage[String, String](topic, builder.toString())
try {
kafkaProducer.send(message)
} catch {
case e:FailedToSendMessageException => logger.error("Error publishing " + message + " to topic: " + topic)
}
builder.setLength(0)
}
} finally {
if (selectAllUrls != null)
selectAllUrls.close()
}
}
override def cleanup(): Unit = super.cleanup()
override def declareOutputFields(declarer: OutputFieldsDeclarer): Unit = {
}
}
|
Sotera/datawake-prefetch
|
trail-specific-search/src/main/scala/com/soteradefense/datawake/trails/topology/search/bolts/UpdateRankKafkaProducer.scala
|
Scala
|
apache-2.0
| 2,388
|
/*
* Copyright 2012 Pellucid and Zenexity
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package datomisca
class AddDbFunction(
val ident: Keyword,
lang: String,
params: Seq[String],
code: String,
imports: String = "",
requires: String = "",
partition: Partition = Partition.USER
) extends TxData with KeywordIdentified {
def toTxData: AnyRef =
datomic.Util.map(
Namespace.DB / "id", DId(partition).toDatomicId,
Namespace.DB / "ident", ident,
Namespace.DB / "fn", datomic.Peer.function(
datomic.Util.map(
AddDbFunction.lang, lang,
AddDbFunction.imports, datomic.Util.read(imports),
AddDbFunction.requires, datomic.Util.read(requires),
AddDbFunction.params, datomic.Util.list(params: _*),
AddDbFunction.code, code
)
)
)
override def toString = toTxData.toString
}
abstract class TypedAddDbFunction(fn: AddDbFunction) extends TxData with KeywordIdentified {
override val ident = fn.ident
def toTxData: AnyRef = fn.toTxData
}
// TypedAddDbFunction 0-22 in managed source
/*
* Construct a vanila database function.
*/
object AddDbFunction {
private val lang: Keyword = clojure.lang.Keyword.intern(null, "lang")
private val imports: Keyword = clojure.lang.Keyword.intern(null, "imports")
private val requires: Keyword = clojure.lang.Keyword.intern(null, "requires")
private val params: Keyword = clojure.lang.Keyword.intern(null, "params")
private val code: Keyword = clojure.lang.Keyword.intern(null, "code")
def apply(kw: Keyword)
(params: String*)
(lang: String, partition: Partition = Partition.USER, imports: String = "", requires: String = "")
(code: String) =
new AddDbFunction(kw, lang, params, code, imports, requires, partition)
}
|
Enalmada/datomisca
|
core/src/main/scala/datomisca/dbFunctions.scala
|
Scala
|
apache-2.0
| 2,395
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.graphx.util
import org.apache.spark.SparkContext
import org.apache.spark.graphx.Graph
import org.apache.spark.storage.StorageLevel
import org.apache.spark.util.PeriodicCheckpointer
/**
* This class helps with persisting and checkpointing Graphs.
* Specifically, it automatically handles persisting and (optionally) checkpointing, as well as
* unpersisting and removing checkpoint files.
*
* Users should call update() when a new graph has been created,
* before the graph has been materialized. After updating [[PeriodicGraphCheckpointer]], users are
* responsible for materializing the graph to ensure that persisting and checkpointing actually
* occur.
*
* When update() is called, this does the following:
* - Persist new graph (if not yet persisted), and put in queue of persisted graphs.
* - Unpersist graphs from queue until there are at most 3 persisted graphs.
* - If using checkpointing and the checkpoint interval has been reached,
* - Checkpoint the new graph, and put in a queue of checkpointed graphs.
* - Remove older checkpoints.
*
* WARNINGS:
* - This class should NOT be copied (since copies may conflict on which Graphs should be
* checkpointed).
* - This class removes checkpoint files once later graphs have been checkpointed.
* However, references to the older graphs will still return isCheckpointed = true.
*
* Example usage:
* {{{
* val (graph1, graph2, graph3, ...) = ...
* val cp = new PeriodicGraphCheckpointer(2, sc)
* cp.updateGraph(graph1)
* graph1.vertices.count(); graph1.edges.count()
* // persisted: graph1
* cp.updateGraph(graph2)
* graph2.vertices.count(); graph2.edges.count()
* // persisted: graph1, graph2
* // checkpointed: graph2
* cp.updateGraph(graph3)
* graph3.vertices.count(); graph3.edges.count()
* // persisted: graph1, graph2, graph3
* // checkpointed: graph2
* cp.updateGraph(graph4)
* graph4.vertices.count(); graph4.edges.count()
* // persisted: graph2, graph3, graph4
* // checkpointed: graph4
* cp.updateGraph(graph5)
* graph5.vertices.count(); graph5.edges.count()
* // persisted: graph3, graph4, graph5
* // checkpointed: graph4
* }}}
*
* @param checkpointInterval Graphs will be checkpointed at this interval.
* If this interval was set as -1, then checkpointing will be disabled.
* @tparam VD Vertex descriptor type
* @tparam ED Edge descriptor type
*
*/
private[spark] class PeriodicGraphCheckpointer[VD, ED](
checkpointInterval: Int,
sc: SparkContext)
extends PeriodicCheckpointer[Graph[VD, ED]](checkpointInterval, sc) {
override protected def checkpoint(data: Graph[VD, ED]): Unit = data.checkpoint()
override protected def isCheckpointed(data: Graph[VD, ED]): Boolean = data.isCheckpointed
override protected def persist(data: Graph[VD, ED]): Unit = {
if (data.vertices.getStorageLevel == StorageLevel.NONE) {
/* We need to use cache because persist does not honor the default storage level requested
* when constructing the graph. Only cache does that.
*/
data.vertices.cache()
}
if (data.edges.getStorageLevel == StorageLevel.NONE) {
data.edges.cache()
}
}
override protected def unpersist(data: Graph[VD, ED]): Unit = data.unpersist(blocking = false)
override protected def getCheckpointFiles(data: Graph[VD, ED]): Iterable[String] = {
data.getCheckpointFiles
}
}
|
bravo-zhang/spark
|
graphx/src/main/scala/org/apache/spark/graphx/util/PeriodicGraphCheckpointer.scala
|
Scala
|
apache-2.0
| 4,263
|
/***********************************************************************
* Copyright (c) 2017-2020 IBM
* Copyright (c) 2013-2020 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.cassandra.index
import java.nio.ByteBuffer
import java.nio.charset.StandardCharsets
import org.locationtech.geomesa.cassandra.{ColumnSelect, NamedColumn, RowSelect}
import org.locationtech.geomesa.index.api._
import org.locationtech.geomesa.index.index.attribute.AttributeIndexKey
object SharedAttributeColumnMapper extends CassandraColumnMapper {
private val Index = NamedColumn("attrIdx", 0, "smallint", classOf[Short], partition = true)
private val Value = NamedColumn("attrVal", 1, "text", classOf[String])
private val Secondary = NamedColumn("secondary", 2, "blob", classOf[ByteBuffer])
private val FeatureId = CassandraColumnMapper.featureIdColumn(3)
private val Feature = CassandraColumnMapper.featureColumn(4)
override val columns: Seq[NamedColumn] = Seq(Index, Value, Secondary, FeatureId, Feature)
override def bind(value: SingleRowKeyValue[_]): Seq[AnyRef] = {
val AttributeIndexKey(i, v, _) = value.key
val secondary = ByteBuffer.wrap(value.tier)
val fid = new String(value.id, StandardCharsets.UTF_8)
val Seq(feature) = value.values.map(v => ByteBuffer.wrap(v.value))
Seq(Short.box(i), v, secondary, fid, feature)
}
override def bindDelete(value: SingleRowKeyValue[_]): Seq[AnyRef] = {
val AttributeIndexKey(i, v, _) = value.key
val secondary = ByteBuffer.wrap(value.tier)
val fid = new String(value.id, StandardCharsets.UTF_8)
Seq(Short.box(i), v, secondary, fid)
}
override def select(range: ScanRange[_], tieredKeyRanges: Seq[ByteRange]): Seq[RowSelect] = {
val primary = range.asInstanceOf[ScanRange[AttributeIndexKey]] match {
case SingleRowRange(row) =>
val i = Short.box(row.i)
val indexSelect = ColumnSelect(Index, i, i, startInclusive = true, endInclusive = true)
val valueSelect = ColumnSelect(Value, row.value, row.value, startInclusive = true, endInclusive = true)
Seq(indexSelect, valueSelect)
case BoundedRange(lo, hi) =>
val i = Short.box(lo.i) // note: should be the same for upper and lower
val indexSelect = ColumnSelect(Index, i, i, startInclusive = true, endInclusive = true)
val valueSelect = ColumnSelect(Value, lo.value, hi.value, lo.inclusive, hi.inclusive)
Seq(indexSelect, valueSelect)
case LowerBoundedRange(lo) =>
val i = Short.box(lo.i)
val indexSelect = ColumnSelect(Index, i, i, startInclusive = true, endInclusive = true)
val valueSelect = ColumnSelect(Value, lo.value, null, lo.inclusive, endInclusive = false)
Seq(indexSelect, valueSelect)
case UpperBoundedRange(hi) =>
val i = Short.box(hi.i)
val indexSelect = ColumnSelect(Index, i, i, startInclusive = true, endInclusive = true)
val valueSelect = ColumnSelect(Value, null, hi.value, startInclusive = false, hi.inclusive)
Seq(indexSelect, valueSelect)
case PrefixRange(prefix) =>
val i = Short.box(prefix.i)
val indexSelect = ColumnSelect(Index, i, i, startInclusive = true, endInclusive = true)
val valueSelect = ColumnSelect(Value, prefix.value, s"${prefix.value}zzzz", prefix.inclusive, endInclusive = false) // TODO ?
Seq(indexSelect, valueSelect)
case UnboundedRange(empty) =>
val i = Short.box(empty.i)
Seq(ColumnSelect(Index, i, i, startInclusive = true, endInclusive = true))
case _ => throw new IllegalArgumentException(s"Unexpected range type $range")
}
val clause = if (tieredKeyRanges.isEmpty) { primary } else {
val minTier = ByteRange.min(tieredKeyRanges)
val maxTier = ByteRange.max(tieredKeyRanges)
primary :+ ColumnSelect(Secondary, ByteBuffer.wrap(minTier), ByteBuffer.wrap(maxTier), startInclusive = true, endInclusive = true)
}
Seq(RowSelect(clause))
}
}
|
aheyne/geomesa
|
geomesa-cassandra/geomesa-cassandra-datastore/src/main/scala/org/locationtech/geomesa/cassandra/index/SharedAttributeColumnMapper.scala
|
Scala
|
apache-2.0
| 4,356
|
package hu.frankdavid.ranking.strategy.util
object RichDouble {
case class Precision(p: Double)
implicit class DoubleWithAlmostEquals(val d: Double) extends AnyVal {
def ~=(d2: Double)(implicit p: Precision) = (d - d2).abs < p.p
}
}
|
frankdavid/ranking
|
src/main/scala/hu/frankdavid/ranking/strategy/util/RichDouble.scala
|
Scala
|
apache-2.0
| 245
|
/*
* Copyright (C) Lightbend Inc. <https://www.lightbend.com>
*/
package play.api.http
import java.io.File
import akka.util.ByteString
import org.specs2.mutable.Specification
import play.api.libs.Files.TemporaryFile
import play.api.mvc.Codec
import play.api.mvc.MultipartFormData
import play.api.mvc.MultipartFormData.FilePart
import play.api.libs.Files.SingletonTemporaryFileCreator._
class WriteableSpec extends Specification {
"Writeable" in {
"of multipart" should {
"work for temporary files" in {
val multipartFormData = createMultipartFormData[TemporaryFile](
create(new File("src/test/resources/multipart-form-data-file.txt").toPath)
)
val contentType = Some("text/plain")
val codec = Codec.utf_8
val writeable = Writeable.writeableOf_MultipartFormData(codec, contentType)
val transformed: ByteString = writeable.transform(multipartFormData)
transformed.utf8String must contain("Content-Disposition: form-data; name=name")
transformed.utf8String must contain(
"""Content-Disposition: form-data; name="thefile"; filename="something.text""""
)
transformed.utf8String must contain("Content-Type: text/plain")
transformed.utf8String must contain("multipart-form-data-file")
}
"work composing with another writeable" in {
val multipartFormData = createMultipartFormData[String]("file part value")
val contentType = Some("text/plain")
val codec = Codec.utf_8
val writeable = Writeable.writeableOf_MultipartFormData(
codec,
Writeable[FilePart[String]]((f: FilePart[String]) => codec.encode(f.ref), contentType)
)
val transformed: ByteString = writeable.transform(multipartFormData)
transformed.utf8String must contain("Content-Disposition: form-data; name=name")
transformed.utf8String must contain(
"""Content-Disposition: form-data; name="thefile"; filename="something.text""""
)
transformed.utf8String must contain("Content-Type: text/plain")
transformed.utf8String must contain("file part value")
}
"use multipart/form-data content-type" in {
val contentType = Some("text/plain")
val codec = Codec.utf_8
val writeable = Writeable.writeableOf_MultipartFormData(
codec,
Writeable[FilePart[String]]((f: FilePart[String]) => codec.encode(f.ref), contentType)
)
writeable.contentType must beSome(startWith("multipart/form-data; boundary="))
}
}
"of urlEncodedForm" should {
"encode keys and values" in {
val codec = Codec.utf_8
val writeable = Writeable.writeableOf_urlEncodedForm(codec)
val transformed: ByteString = writeable.transform(Map("foo$bar" -> Seq("ba$z")))
transformed.utf8String must contain("foo%24bar=ba%24z")
}
}
}
def createMultipartFormData[A](ref: A): MultipartFormData[A] = {
MultipartFormData[A](
dataParts = Map(
"name" -> Seq("value")
),
files = Seq(
FilePart[A](
key = "thefile",
filename = "something.text",
contentType = Some("text/plain"),
ref = ref
)
),
badParts = Seq.empty
)
}
}
|
benmccann/playframework
|
core/play/src/test/scala/play/api/http/WriteableSpec.scala
|
Scala
|
apache-2.0
| 3,394
|
/*
* Copyright 2016 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.ct.ct600.v3
import uk.gov.hmrc.ct.box.{CtInteger, CtBoxIdentifier, Linked}
import uk.gov.hmrc.ct.computations.CP259
case class B170(value: Int) extends CtBoxIdentifier(name = "Bank, building society or other interest, and profits from non-trading loan relationships") with CtInteger
object B170 extends Linked[CP259, B170] {
override def apply(source: CP259): B170 = B170(source.value)
}
|
ahudspith-equalexperts/ct-calculations
|
src/main/scala/uk/gov/hmrc/ct/ct600/v3/B170.scala
|
Scala
|
apache-2.0
| 1,019
|
/*
* Copyright 2001-2014 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalactic
/**
* An <code>Equality[T]</code> that offers a <code>hashCodeFor</code> method that
* can provide an <code>Int</code> hash code for a given <code>T</code> instance, whose
* contract is constrainted by that of <code>areEqual</code>.
*
* <p>
* The general contract of <code>hashCodeFor</code> is:
* </p>
*
* <ul>
* <li>
* Whenever <code>hashCodeFor</code> is passed on the same object more than once during an execution an application,
* <code>hashCodeFor</code> must consistently return the same integer, provided no information used in <code>areEqual</code>
* comparisons on the object is modified. This integer need not remain consistent from one execution of an application
* to another execution of the same application.
* </li>
* <li>
* If two objects are equal according to the <code>areEqual</code> method, then passing either of those objects to
* the <code>hashCodeFor</code> method must produce the same integer result.
* </li>
* <li>
* It is not required that if two objects are unequal according to the <code>areEqual</code> method, then calling the
* <code>hashCodeFor</code> method on each of the two objects must produce distinct integer results. However, you should
* be aware that producing distinct integer results for unequal objects may improve the performance of hashtables.
* </p>
*
* <p>
* Trait <code>HashingEquality</code> is used by instances of <a href="EquaPath$EquaSet.html"><code>EquaPath#EquaSet</code></a> to implement hash sets based
* on custom equalities.
* </p>
*/
trait HashingEquality[T] extends Equality[T] {
/**
* Returns a hash code for the specified object that is consistent with <code>areEqual</code>.
*
* <p>
* See the main documentation of this trait for more detail on the contract of <code>hashCodeFor</code>.
* </p>
*/
def hashCodeFor(a: T): Int
}
|
cheeseng/scalatest
|
scalactic/src/main/scala/org/scalactic/HashingEquality.scala
|
Scala
|
apache-2.0
| 2,478
|
package io.finch.internal
import java.nio.charset.Charset
import scala.annotation.implicitNotFound
import com.twitter.concurrent.AsyncStream
import com.twitter.finagle.http.{Response, Status, Version}
import com.twitter.io.Buf
import io.finch._
import shapeless._
/**
* Represents a conversion from `A` to [[Response]].
*/
trait ToResponse[A] {
type ContentType <: String
def apply(a: A, cs: Charset): Response
}
trait LowPriorityToResponseInstances {
@implicitNotFound(
"""An Endpoint you're trying to convert into a Finagle service is missing one or more encoders.
Make sure ${A} is one of the following:
* A com.twitter.finagle.Response
* A value of a type with an io.finch.Encode instance (with the corresponding content-type)
* A coproduct made up of some combination of the above
See https://github.com/finagle/finch/blob/master/docs/cookbook.md#fixing-the-toservice-compile-error
"""
)
type Aux[A, CT] = ToResponse[A] { type ContentType = CT }
def instance[A, CT <: String](fn: (A, Charset) => Response): Aux[A, CT] = new ToResponse[A] {
type ContentType = CT
def apply(a: A, cs: Charset): Response = fn(a, cs)
}
protected def asyncResponseBuilder[A, CT <: String](writer: (A, Charset) => Buf)(implicit
w: Witness.Aux[CT]
): Aux[AsyncStream[A], CT] = instance { (as, cs) =>
val rep = Response()
rep.setChunked(true)
val writable = rep.writer
as.foreachF(chunk => writable.write(writer(chunk, cs))).ensure(writable.close())
rep.contentType = w.value
rep
}
implicit def asyncBufToResponse[CT <: String](implicit
w: Witness.Aux[CT]
): Aux[AsyncStream[Buf], CT] = asyncResponseBuilder((a, _) => a)
}
trait HighPriorityToResponseInstances extends LowPriorityToResponseInstances {
private[this] val newLine: Buf = Buf.Utf8("\\n")
implicit def jsonAsyncStreamToResponse[A](implicit
e: Encode.Json[A]
): Aux[AsyncStream[A], Application.Json] =
asyncResponseBuilder((a, cs) => e(a, cs).concat(newLine))
implicit def textAsyncStreamToResponse[A](implicit
e: Encode.Text[A]
): Aux[AsyncStream[A], Text.Plain] =
asyncResponseBuilder((a, cs) => e(a, cs).concat(newLine))
implicit def responseToResponse[CT <: String]: Aux[Response, CT] = instance((r, _) => r)
implicit def valueToResponse[A, CT <: String](implicit
e: Encode.Aux[A, CT],
w: Witness.Aux[CT]
): Aux[A, CT] = instance { (a, cs) =>
val buf = e(a, cs)
val rep = Response()
if (!buf.isEmpty) {
rep.content = buf
rep.contentType = w.value
}
rep
}
}
object ToResponse extends HighPriorityToResponseInstances {
implicit def cnilToResponse[CT <: String]: Aux[CNil, CT] =
instance((_, _) => Response(Version.Http10, Status.NotFound))
implicit def coproductToResponse[H, T <: Coproduct, CT <: String](implicit
trH: ToResponse.Aux[H, CT],
trT: ToResponse.Aux[T, CT]
): Aux[H :+: T, CT] = instance {
case (Inl(h), cs) => trH(h, cs)
case (Inr(t), cs) => trT(t, cs)
}
}
|
ilya-murzinov/finch
|
core/src/main/scala/io/finch/internal/ToResponse.scala
|
Scala
|
apache-2.0
| 3,016
|
package org.ngseq.metagenomics
import Codondict._
/**
* Created by davbzh on 2017-05-01.
*/
object ORF {
//Reverse complement dna string
def reverscomplement(seq: String): String = {
val reversecomp = new StringBuilder
val result = new StringBuilder
/*
for (nuceotide <- seq ) {
if (nuceotide == 'A') reversecomp.append('T')
else if (nuceotide == 'T') reversecomp.append('A')
else if (nuceotide == 'C') reversecomp.append('G')
else if (nuceotide == 'G') reversecomp.append('C')
}
*/
seq.map(nuceotide => {
if (nuceotide == 'A') reversecomp.append('T')
else if (nuceotide == 'T') reversecomp.append('A')
else if (nuceotide == 'C') reversecomp.append('G')
else if (nuceotide == 'G') reversecomp.append('C')
else if (nuceotide == 'N') reversecomp.append('N')
}
)
val reverse_comp = reversecomp.toString
for (i <- (reverse_comp.length - 1) to 1 by -1) {
result.append(reverse_comp(i))
}
return result.toString
}
//DNA to proteing translator
def dna2orf (id: String, dnaseq: String, framestart: Int, dir: String, minlength: Int): Array[String] = {
//Initiate necessary variables
val dna_sequence = dnaseq.toUpperCase
//Start end variables
var start = -1
var end = -1
var tmp_end = -1
var nrorf = 0
//result variables
val protein_orf = new StringBuilder
val dna_orf = new StringBuilder
val result = new Array[String](2)
//Codon dictionnary
val codon2aa = Map (
"ATA"->"I", "ATC"->"I", "ATT"->"I", "ATG"->"M",
"ACA"->"T", "ACC"->"T", "ACG"->"T", "ACT"->"T",
"AAC"->"N", "AAT"->"N", "AAA"->"K", "AAG"->"K",
"AGC"->"S", "AGT"->"S", "AGA"->"R", "AGG"->"R",
"CTA"->"L", "CTC"->"L", "CTG"->"L", "CTT"->"L",
"CCA"->"P", "CCC"->"P", "CCG"->"P", "CCT"->"P",
"CAC"->"H", "CAT"->"H", "CAA"->"Q", "CAG"->"Q",
"CGA"->"R", "CGC"->"R", "CGG"->"R", "CGT"->"R",
"GTA"->"V", "GTC"->"V", "GTG"->"V", "GTT"->"V",
"GCA"->"A", "GCC"->"A", "GCG"->"A", "GCT"->"A",
"GAC"->"D", "GAT"->"D", "GAA"->"E", "GAG"->"E",
"GGA"->"G", "GGC"->"G", "GGG"->"G", "GGT"->"G",
"TCA"->"S", "TCC"->"S", "TCG"->"S", "TCT"->"S",
"TTC"->"F", "TTT"->"F", "TTA"->"L", "TTG"->"L",
"TAC"->"Y", "TAT"->"Y", "TAA"->"*", "TAG"->"*",
"TGC"->"C", "TGT"->"C", "TGA"->"*", "TGG"->"W"
)
//Loop throuhg dna sequence by window of 3
for (i <- 0 to dna_sequence.length by 3) {
//Catch if codon is not present in the dictionary and continue silently
scala.util.Try {
//If start codon
val codon = dna_sequence.substring(i, i + 3)
//Using all alternative start codons
if (codon.equals("ATG") || codon.equals("TTG") || codon.equals("GTG") || codon.equals("CTG")){
start = i
var protein = new StringBuilder
for (from_start_codon <- start to dna_sequence.length by 3) {
protein.append(codon2aa(dna_sequence.substring(from_start_codon, from_start_codon + 3)))
if (codon2aa(dna_sequence.substring(from_start_codon, from_start_codon + 3)).equals("*")){
if ((from_start_codon + 3) - start >= minlength && start >= 0) {
if (nrorf == 0) {
end = from_start_codon + 3
tmp_end = end
nrorf += 1
protein_orf.append(s">${id}_${dir}_${framestart}_${nrorf}\\n${protein.dropRight(1)}\\n")
dna_orf.append(s">${id}_${dir}_${framestart}_${nrorf}_${start}-${end}\\n${dna_sequence.substring(start, end)}\\n")
protein.clear()
} else if (nrorf > 0 && start > tmp_end) {
end = from_start_codon + 3
tmp_end = end
protein_orf.append(s">${id}_${dir}_${framestart}_${nrorf}\\n${protein.dropRight(1)}\\n")
dna_orf.append(s">${id}_${dir}_${framestart}_${nrorf}_${start}-${end}\\n${dna_sequence.substring(start, end)}\\n")
end = -1
nrorf += 1
protein.clear()
}
} else {
start = -1
protein.clear()
}
}
}
}
}
}
//Add to result
result(0) = protein_orf.toString().trim()
result(1) = dna_orf.toString().trim()
return result
}
def dnaOrfGenerator (id: String, dnaseq: String, minlength: Int ): Tuple2[String, Array[Array[String]]] = {
//Initiate variables:
var orfStart: Int = -1
var orfend: Int = -1
var orfNR: Int = 0
//There will be 6 frames:
val all_orfs = new Array[Array[String]](6)
//upper case
val dna_sequence = dnaseq.toUpperCase
//reverse complement
var reverse_comp = reverscomplement(dna_sequence)
//rcsu result
val forw_rcsu_result = new StringBuilder
val rev_rcsu_result = new StringBuilder
//Construct dna frames
val frame1 = dna_sequence
val frame2 = dna_sequence.substring(1, dnaseq.length-1)
val frame3 = dna_sequence.substring(2, dnaseq.length-2)
val revframe1 = reverse_comp
val revframe2 = reverse_comp.substring(1, reverse_comp.length-1)
val revframe3 = reverse_comp.substring(2, reverse_comp.length-2)
//Construct and Add all ORFs in a list
all_orfs(0) = dna2orf(id, frame1, 1, "forw", minlength)
all_orfs(1) = dna2orf(id, frame2, 2, "forw", minlength)
all_orfs(2) = dna2orf(id, frame3, 3, "forw", minlength)
all_orfs(3) = dna2orf(id, revframe1, 1, "rev", minlength)
all_orfs(4) = dna2orf(id, revframe2, 2, "rev", minlength)
all_orfs(5) = dna2orf(id, revframe3, 3, "rev", minlength)
//Construct result value
val result: Tuple2[String, Array[Array[String]]] = (id, all_orfs)
//and return
return result
}
}
|
NGSeq/ViraPipe
|
src/main/scala/org/ngseq/metagenomics/ORF.scala
|
Scala
|
mit
| 5,864
|
package coursier.ivy
import coursier.core._
import coursier.maven.{MavenAttributes, MavenComplete}
import coursier.util.{Artifact, EitherT, Monad}
import dataclass._
@data class IvyRepository(
pattern: Pattern,
metadataPatternOpt: Option[Pattern] = None,
changingOpt: Option[Boolean] = None,
withChecksums: Boolean = true,
withSignatures: Boolean = true,
withArtifacts: Boolean = true,
// hack for sbt putting infos in properties
dropInfoAttributes: Boolean = false,
authentication: Option[Authentication] = None,
@since
override val versionsCheckHasModule: Boolean = true
) extends Repository {
def withMetadataPattern(metadataPattern: Pattern): IvyRepository =
withMetadataPatternOpt(Some(metadataPattern))
def withChanging(changing: Boolean): IvyRepository =
withChangingOpt(Some(changing))
override def repr: String =
"ivy:" + pattern.string + metadataPatternOpt.fold("")("|" + _.string)
def metadataPattern: Pattern = metadataPatternOpt.getOrElse(pattern)
private[ivy] def patternUpTo(chunk: Pattern.Chunk): Option[Pattern] = {
val idx = metadataPattern.chunks.indexWhere(_ == chunk)
if (idx < 0)
None
else
Some(Pattern(metadataPattern.chunks.take(idx)))
}
lazy val revisionListingPatternOpt: Option[Pattern] =
patternUpTo(Pattern.Chunk.Var("revision"))
import Repository._
private[ivy] def orgVariables(org: Organization): Map[String, String] =
Map(
"organization" -> org.value,
"organisation" -> org.value,
"orgPath" -> org.value.replace('.', '/')
)
// See http://ant.apache.org/ivy/history/latest-milestone/concept.html for a
// list of variables that should be supported.
// Some are missing (branch, conf, originalName).
private def variables(
module: Module,
versionOpt: Option[String],
`type`: Type,
artifact: String,
ext: Extension,
classifierOpt: Option[Classifier]
): Map[String, String] =
orgVariables(module.organization) ++
Seq(
"module" -> module.name.value,
"type" -> `type`.value,
"artifact" -> artifact,
"ext" -> ext.value
) ++
module.attributes ++
classifierOpt.map("classifier" -> _.value).toSeq ++
versionOpt.map("revision" -> _).toSeq
def artifacts(
dependency: Dependency,
project: Project,
overrideClassifiers: Option[Seq[Classifier]]
): Seq[(Publication, Artifact)] =
if (withArtifacts) {
val retained =
overrideClassifiers match {
case None =>
if (dependency.publication.name.nonEmpty) {
val tpe =
if (dependency.publication.`type`.isEmpty) Type.jar
else dependency.publication.`type`
val ext =
if (dependency.publication.ext.isEmpty) MavenAttributes.typeExtension(tpe)
else dependency.publication.ext
Seq(
dependency.publication.withType(tpe).withExt(ext)
)
}
else if (dependency.attributes.classifier.nonEmpty)
// FIXME We're ignoring dependency.attributes.`type` in this case
project.publications.collect {
case (_, p) if p.classifier == dependency.attributes.classifier =>
p
}
else if (dependency.attributes.`type`.nonEmpty)
project.publications.collect {
case (conf, p)
if (conf == Configuration.all ||
conf == dependency.configuration ||
project.allConfigurations.getOrElse(
dependency.configuration,
Set.empty
).contains(conf)) &&
(
p.`type` == dependency.attributes.`type` ||
(p.ext == dependency.attributes.`type`.asExtension && project.packagingOpt.toSeq.contains(
p.`type`
)) // wow
) =>
p
}
else
project.publications.collect {
case (conf, p)
if conf == Configuration.all ||
conf == dependency.configuration ||
project.allConfigurations.getOrElse(
dependency.configuration,
Set.empty
).contains(conf) =>
p
}
case Some(classifiers) =>
val classifiersSet = classifiers.toSet
project.publications.collect {
case (_, p) if classifiersSet(p.classifier) =>
p
}
}
val retainedWithUrl = retained.distinct.flatMap { p =>
pattern.substituteVariables(variables(
dependency.module,
Some(project.actualVersion),
p.`type`,
p.name,
p.ext,
Some(p.classifier).filter(_.nonEmpty)
)).toSeq.toList.map(p -> _) // FIXME Validation errors are ignored
}
retainedWithUrl.map {
case (p, url) =>
var artifact = artifactFor(
url,
changing = changingOpt.getOrElse(IvyRepository.isSnapshot(project.version))
)
if (withChecksums)
artifact = artifact.withDefaultChecksums
if (withSignatures)
artifact = artifact.withDefaultSignature
(p, artifact)
}
}
else
Nil
private def artifactFor(url: String, changing: Boolean, cacheErrors: Boolean = false) =
Artifact(
url,
Map.empty,
if (cacheErrors)
Map("cache-errors" -> Artifact(
"",
Map.empty,
Map.empty,
changing = false,
optional = false,
None
))
else
Map.empty,
changing = changing,
optional = false,
authentication
)
private[ivy] def listing[F[_]](
listingPatternOpt: Option[Pattern],
listingName: String,
variables: Map[String, String],
fetch: Repository.Fetch[F],
prefix: String
)(implicit
F: Monad[F]
): EitherT[F, String, Option[(String, Seq[String])]] =
listingPatternOpt match {
case None =>
EitherT(F.point(Right(None)))
case Some(listingPattern) =>
val listingUrl = listingPattern
.substituteVariables(variables)
.flatMap { s =>
if (s.endsWith("/"))
Right(s)
else
Left(s"Don't know how to list $listingName of ${metadataPattern.string}")
}
for {
url <- EitherT(F.point(listingUrl))
s <- fetch(artifactFor(url + ".links", changing = true, cacheErrors = true))
} yield Some((url, MavenComplete.split0(s, '\\n', prefix)))
}
private[ivy] def availableVersions[F[_]](
module: Module,
fetch: Repository.Fetch[F],
prefix: String
)(implicit
F: Monad[F]
): EitherT[F, String, Option[(String, Seq[Version])]] =
listing(
revisionListingPatternOpt,
"revisions",
variables(module, None, Type.ivy, "ivy", Extension("xml"), None),
fetch,
prefix
).map(_.map(t => t._1 -> t._2.map(Parse.version).collect { case Some(v) => v }))
override protected def fetchVersions[F[_]](
module: Module,
fetch: Repository.Fetch[F]
)(implicit
F: Monad[F]
): EitherT[F, String, (Versions, String)] =
availableVersions(module, fetch, "").map {
case Some((listingUrl, l)) if l.nonEmpty =>
val latest = l.max.repr
val release = {
val l0 = l.filter(!_.repr.endsWith("SNAPSHOT"))
if (l0.isEmpty)
""
else
l0.max.repr
}
val v = Versions(
latest,
release,
l.map(_.repr).toList,
None
)
(v, listingUrl)
case Some((listingUrl, _)) =>
(Versions.empty, listingUrl)
case None =>
(Versions.empty, "")
}
def find[F[_]](
module: Module,
version: String,
fetch: Repository.Fetch[F]
)(implicit
F: Monad[F]
): EitherT[F, String, (ArtifactSource, Project)] = {
val eitherArtifact: Either[String, Artifact] =
for {
url <- metadataPattern.substituteVariables(
variables(module, Some(version), Type.ivy, "ivy", Extension("xml"), None)
)
} yield {
var artifact = artifactFor(
url,
changing = changingOpt.getOrElse(IvyRepository.isSnapshot(version))
)
if (withChecksums)
artifact = artifact.withDefaultChecksums
if (withSignatures)
artifact = artifact.withDefaultSignature
artifact
}
for {
artifact <- EitherT(F.point(eitherArtifact))
ivy <- fetch(artifact)
proj0 <- EitherT(
F.point {
for {
xml <- compatibility.xmlParseDom(ivy)
_ <- (if (xml.label == "ivy-module") Right(()) else Left("Module definition not found"))
proj <- IvyXml.project(xml)
} yield proj
}
)
} yield {
val proj =
if (dropInfoAttributes)
proj0
.withModule(
proj0.module.withAttributes(
proj0.module.attributes.filter {
case (k, _) => !k.startsWith("info.")
}
)
)
.withDependencies(
proj0.dependencies.map {
case (config, dep0) =>
val dep = dep0.withModule(
dep0.module.withAttributes(
dep0.module.attributes.filter {
case (k, _) => !k.startsWith("info.")
}
)
)
config -> dep
}
)
else
proj0
this -> proj.withActualVersionOpt(Some(version))
}
}
override def completeOpt[F[_]: Monad](fetch: Fetch[F]): Some[Repository.Complete[F]] =
Some(IvyComplete(this, fetch, Monad[F]))
}
object IvyRepository {
def isSnapshot(version: String): Boolean =
version.endsWith("SNAPSHOT")
def parse(
pattern: String,
metadataPatternOpt: Option[String] = None,
changing: Option[Boolean] = None,
properties: Map[String, String] = Map.empty,
withChecksums: Boolean = true,
withSignatures: Boolean = true,
withArtifacts: Boolean = true,
// hack for sbt putting infos in properties
dropInfoAttributes: Boolean = false,
authentication: Option[Authentication] = None,
substituteDefault: Boolean = true
): Either[String, IvyRepository] =
for {
propertiesPattern <- PropertiesPattern.parse(pattern)
metadataPropertiesPatternOpt <- metadataPatternOpt
.fold[Either[String, Option[PropertiesPattern]]](Right(None))(
PropertiesPattern.parse(_).map(Some(_))
)
pattern <- propertiesPattern.substituteProperties(properties)
metadataPatternOpt <- metadataPropertiesPatternOpt
.fold[Either[String, Option[Pattern]]](Right(None))(
_.substituteProperties(properties).map(Some(_))
)
} yield IvyRepository(
if (substituteDefault) pattern.substituteDefault else pattern,
metadataPatternOpt.map(p => if (substituteDefault) p.substituteDefault else p),
changing,
withChecksums,
withSignatures,
withArtifacts,
dropInfoAttributes,
authentication
)
// because of the compatibility apply method below, we can't give default values
// to the default constructor of IvyPattern
// this method accepts the same arguments as this constructor, with default values when possible
def fromPattern(
pattern: Pattern,
metadataPatternOpt: Option[Pattern] = None,
changing: Option[Boolean] = None,
withChecksums: Boolean = true,
withSignatures: Boolean = true,
withArtifacts: Boolean = true,
// hack for sbt putting infos in properties
dropInfoAttributes: Boolean = false,
authentication: Option[Authentication] = None
): IvyRepository =
IvyRepository(
pattern,
metadataPatternOpt,
changing,
withChecksums,
withSignatures,
withArtifacts,
dropInfoAttributes,
authentication
)
}
|
alexarchambault/coursier
|
modules/core/shared/src/main/scala/coursier/ivy/IvyRepository.scala
|
Scala
|
apache-2.0
| 12,419
|
/*
Copyright (c) 2016, Rice University
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
1. Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following
disclaimer in the documentation and/or other materials provided
with the distribution.
3. Neither the name of Rice University
nor the names of its contributors may be used to endorse or
promote products derived from this software without specific
prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.apache.spark.rdd.cl.tests
import java.util.LinkedList
import com.amd.aparapi.internal.writer.ScalaArrayParameter
import org.apache.spark.rdd.cl.CodeGenTest
import org.apache.spark.rdd.cl.CodeGenTests
import org.apache.spark.rdd.cl.AsyncCodeGenTest
import com.amd.aparapi.internal.model.HardCodedClassModels
object AsyncMapTest extends AsyncCodeGenTest[Int] {
def getExpectedException() : String = { return null }
def getExpectedKernel() : String = { getExpectedKernelHelper(getClass) }
def getExpectedNumInputs() : Int = {
1
}
def init() : HardCodedClassModels = { new HardCodedClassModels() }
def complete(params : LinkedList[ScalaArrayParameter]) { }
def getFunction() : Function0[Int] = {
val v : Int = 3
new Function0[Int] {
override def apply() : Int = {
v * 5
}
}
}
}
|
agrippa/spark-swat
|
swat/src/test/scala/org/apache/spark/rdd/cl/tests/AsyncMapTest.scala
|
Scala
|
bsd-3-clause
| 2,347
|
package io.hnfmr.chapter10
import cats.Semigroup
import cats.data.Validated.{Valid, Invalid}
import cats.data.Validated
import cats.syntax.semigroup._
import cats.syntax.apply._
sealed trait Check[E, A, B] {
def apply(a: A)(implicit s: Semigroup[E]): Validated[E, B]
def map[C](f: B => C): Check[E, A, C] = Map[E, A, B, C](this, f)
def flatMap[C](f: B => Check[E, A, C]): Check[E, A, C] = FlatMap[E, A, B, C](this, f)
def andThen[C](next: Check[E, B, C]): Check[E, A, C] = AndThen[E, A, B, C](this, next)
}
object Check {
def apply[E, A](pred: Predicate[E, A]): Check[E, A, A] =
PurePredicate(pred)
def apply[E, A, B](f: A => Validated[E, B]): Check[E, A, B] = Pure(f)
}
final case class Map[E, A, B, C]( check: Check[E, A, B], f: B => C) extends Check[E, A, C] {
def apply(a: A)(implicit s: Semigroup[E]): Validated[E, C] =
check(a).map(f)
}
final case class PurePredicate[E, A]( pred: Predicate[E, A]) extends Check[E, A, A] {
def apply(a: A)(implicit s: Semigroup[E]): Validated[E, A] =
pred(a)
}
final case class Pure[E, A, B]( f: A => Validated[E, B]) extends Check[E, A, B] {
def apply(a: A)(implicit s: Semigroup[E]): Validated[E, B] =
f(a)
}
final case class FlatMap[E, A, B, C]( check: Check[E, A, B], f: B => Check[E, A, C]) extends Check[E, A, C] {
def apply(a: A)(implicit s: Semigroup[E]): Validated[E, C] =
check(a) match {
case Valid(x) => f(x)(a)
case x @ Invalid(_) => x
}
}
final case class AndThen[E, A, B, C]( left: Check[E, A, B], right: Check[E, B, C]) extends Check[E, A, C] {
def apply(a: A)(implicit s: Semigroup[E]): Validated[E, C] =
left(a) match {
case Valid(x) => right(x)
case x @ Invalid(_) => x
}
}
|
hnfmr/advanced-scala
|
src/main/scala/io/hnfmr/chapter10/Check.scala
|
Scala
|
mit
| 1,724
|
package us.feliscat.time
import us.feliscat.m17n.MultiLingual
import us.feliscat.text.StringOption
/**
* <pre>
* Created on 2017/02/09.
* </pre>
*
* @author K.Sakamoto
*/
trait MultiLingualJapaneseEraNameParser extends MultiLingual {
def convertToRomanCalendar(text: StringOption): StringOption
}
|
ktr-skmt/FelisCatusZero-multilingual
|
libraries/src/main/scala/us/feliscat/time/MultiLingualJapaneseEraNameParser.scala
|
Scala
|
apache-2.0
| 313
|
/*
* Copyright (C) 2007-2008 Artima, Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Example code from:
*
* Programming in Scala (First Edition, Version 6)
* by Martin Odersky, Lex Spoon, Bill Venners
*
* http://booksites.artima.com/programming_in_scala
*/
/** The capitals example from Chapter 1, both with
* immutable and mutable collections.
*/
object Capitals {
def useImmut() {
var capital = Map("US" -> "Washington", "France" -> "Paris")
capital += ("Japan" -> "Tokyo")
println(capital("France"))
}
def useMut() {
import scala.collection.mutable.Map // only change needed!
var capital = Map("US" -> "Washington", "France" -> "Paris")
capital += ("Japan" -> "Tokyo")
println(capital("France"))
}
def main(args: Array[String]) {
useImmut()
useMut()
}
}
|
peachyy/scalastu
|
collections/Capitals.scala
|
Scala
|
apache-2.0
| 1,366
|
Subsets and Splits
Filtered Scala Code Snippets
The query filters and retrieves a sample of code snippets that meet specific criteria, providing a basic overview of the dataset's content without revealing deeper insights.