code stringlengths 5 1M | repo_name stringlengths 5 109 | path stringlengths 6 208 | language stringclasses 1 value | license stringclasses 15 values | size int64 5 1M |
|---|---|---|---|---|---|
/*
* Copyright 2014–2018 SlamData Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package quasar.yggdrasil
import quasar.precog.common._
import quasar.yggdrasil.bytecode.JType
object TransSpecModule {
object paths {
val Key = CPathField("key")
val Value = CPathField("value")
val Group = CPathField("group")
val SortKey = CPathField("sortkey")
}
sealed trait Definedness
case object AnyDefined extends Definedness
case object AllDefined extends Definedness
}
trait TransSpecModule extends FNModule {
import TransSpecModule._
type GroupId
type Scanner
type Mapper
object trans {
sealed trait TransSpec[+A <: SourceType]
sealed trait SourceType
sealed trait ObjectSpec[+A <: SourceType] extends TransSpec[A]
sealed trait ArraySpec[+A <: SourceType] extends TransSpec[A]
sealed trait Source1 extends SourceType
case object Source extends Source1
sealed trait Source2 extends SourceType
case object SourceLeft extends Source2
case object SourceRight extends Source2
case class Leaf[+A <: SourceType](source: A) extends TransSpec[A]
case class Filter[+A <: SourceType](source: TransSpec[A], predicate: TransSpec[A]) extends TransSpec[A]
// Adds a column to the output in the manner of scanLeft
case class Scan[+A <: SourceType](source: TransSpec[A], scanner: Scanner) extends TransSpec[A]
case class MapWith[+A <: SourceType](source: TransSpec[A], mapper: Mapper) extends TransSpec[A]
case class Map1[+A <: SourceType](source: TransSpec[A], f: F1) extends TransSpec[A]
case class DeepMap1[+A <: SourceType](source: TransSpec[A], f: F1) extends TransSpec[A]
// apply a function to the cartesian product of the transformed left and right subsets of columns
case class Map2[+A <: SourceType](left: TransSpec[A], right: TransSpec[A], f: F2) extends TransSpec[A]
// apply a function to an array
case class MapN[+A <: SourceType](contents: TransSpec[A], f: FN) extends TransSpec[A]
// Perform the specified transformation on the all sources, and then create a new set of columns
// containing all the resulting columns.
case class InnerObjectConcat[+A <: SourceType](objects: TransSpec[A]*) extends ObjectSpec[A]
case class OuterObjectConcat[+A <: SourceType](objects: TransSpec[A]*) extends ObjectSpec[A]
case class ObjectDelete[+A <: SourceType](source: TransSpec[A], fields: Set[CPathField]) extends TransSpec[A]
case class InnerArrayConcat[+A <: SourceType](arrays: TransSpec[A]*) extends ArraySpec[A]
case class OuterArrayConcat[+A <: SourceType](arrays: TransSpec[A]*) extends ArraySpec[A]
// Take the output of the specified TransSpec and prefix all of the resulting selectors with the
// specified field.
case class WrapObject[+A <: SourceType](source: TransSpec[A], field: String) extends ObjectSpec[A]
case class WrapObjectDynamic[+A <: SourceType](left: TransSpec[A], right: TransSpec[A]) extends TransSpec[A]
case class WrapArray[+A <: SourceType](source: TransSpec[A]) extends ArraySpec[A]
case class DerefObjectStatic[+A <: SourceType](source: TransSpec[A], field: CPathField) extends TransSpec[A]
case class DerefMetadataStatic[+A <: SourceType](source: TransSpec[A], field: CPathMeta) extends TransSpec[A]
case class DerefObjectDynamic[+A <: SourceType](left: TransSpec[A], right: TransSpec[A]) extends TransSpec[A]
case class DerefArrayStatic[+A <: SourceType](source: TransSpec[A], element: CPathIndex) extends TransSpec[A]
case class DerefArrayDynamic[+A <: SourceType](left: TransSpec[A], right: TransSpec[A]) extends TransSpec[A]
case class ArraySwap[+A <: SourceType](source: TransSpec[A], index: Int) extends TransSpec[A]
// Filter out all the source columns whose selector and CType are not specified by the supplied JType
case class Typed[+A <: SourceType](source: TransSpec[A], tpe: JType) extends TransSpec[A]
// Filter out all the source columns whose selector and CType are not specified by the supplied JType
// if the set of columns does not cover the JType specified, this will return the empty slice.
case class TypedSubsumes[+A <: SourceType](source: TransSpec[A], tpe: JType) extends TransSpec[A]
// return a Boolean column
// returns true for a given row when all of the columns specified by the supplied JType are defined
case class IsType[+A <: SourceType](source: TransSpec[A], tpe: JType) extends TransSpec[A]
case class Equal[+A <: SourceType](left: TransSpec[A], right: TransSpec[A]) extends TransSpec[A]
case class EqualLiteral[+A <: SourceType](left: TransSpec[A], right: CValue, invert: Boolean) extends TransSpec[A]
// this has to be primitive because of how nutso equality is
case class Within[+A <: SourceType](item: TransSpec[A], in: TransSpec[A]) extends TransSpec[A]
// this has to be primitive because it produces an array
case class Range[+A <: SourceType](lower: TransSpec[A], upper: TransSpec[A]) extends TransSpec[A]
// target is the transspec that provides defineedness information. The resulting table will be defined
// and have the constant value wherever a row provided by the target transspec has at least one member
// that is not undefined
case class ConstLiteral[+A <: SourceType](value: CValue, target: TransSpec[A]) extends TransSpec[A]
case class FilterDefined[+A <: SourceType](source: TransSpec[A], definedFor: TransSpec[A], definedness: Definedness) extends TransSpec[A]
case class Cond[+A <: SourceType](pred: TransSpec[A], left: TransSpec[A], right: TransSpec[A]) extends TransSpec[A]
type TransSpec1 = TransSpec[Source1]
object TransSpec {
import CPath._
def concatChildren[A <: SourceType](tree: CPathTree[Int], leaf: TransSpec[A] = Leaf(Source)): TransSpec[A] = {
def createSpecs(trees: Seq[CPathTree[Int]]): Seq[TransSpec[A]] = trees.map { child =>
child match {
case node @ RootNode(seq) => concatChildren(node, leaf)
case node @ FieldNode(CPathField(name), _) => trans.WrapObject(concatChildren(node, leaf), name)
case node @ IndexNode(CPathIndex(_), _) => trans.WrapArray(concatChildren(node, leaf)) //assuming that indices received in order
case LeafNode(idx) => trans.DerefArrayStatic(leaf, CPathIndex(idx))
}
}
val initialSpecs = tree match {
case RootNode(children) => createSpecs(children)
case FieldNode(_, children) => createSpecs(children)
case IndexNode(_, children) => createSpecs(children)
case LeafNode(_) => Seq()
}
val result = initialSpecs reduceOption { (t1, t2) =>
(t1, t2) match {
case (t1: ObjectSpec[_], t2: ObjectSpec[_]) => trans.InnerObjectConcat(t1, t2)
case (t1: ArraySpec[_], t2: ArraySpec[_]) => trans.InnerArrayConcat(t1, t2)
case _ => sys.error("cannot have this")
}
}
result getOrElse leaf
}
def mapSources[A <: SourceType, B <: SourceType](spec: TransSpec[A])(f: A => B): TransSpec[B] = {
spec match {
case Leaf(source) => Leaf(f(source))
case trans.ConstLiteral(value, target) => trans.ConstLiteral(value, mapSources(target)(f))
case trans.Filter(source, pred) => trans.Filter(mapSources(source)(f), mapSources(pred)(f))
case trans.FilterDefined(source, definedFor, definedness) =>
trans.FilterDefined(mapSources(source)(f), mapSources(definedFor)(f), definedness)
case Scan(source, scanner) => Scan(mapSources(source)(f), scanner)
case MapWith(source, mapper) => MapWith(mapSources(source)(f), mapper)
case trans.Map1(source, f1) => trans.Map1(mapSources(source)(f), f1)
case trans.DeepMap1(source, f1) => trans.DeepMap1(mapSources(source)(f), f1)
case trans.Map2(left, right, f2) => trans.Map2(mapSources(left)(f), mapSources(right)(f), f2)
case trans.MapN(contents, f1) => trans.MapN(mapSources(contents)(f), f1)
case trans.OuterObjectConcat(objects @ _ *) => trans.OuterObjectConcat(objects.map(mapSources(_)(f)): _*)
case trans.InnerObjectConcat(objects @ _ *) => trans.InnerObjectConcat(objects.map(mapSources(_)(f)): _*)
case trans.ObjectDelete(source, fields) => trans.ObjectDelete(mapSources(source)(f), fields)
case trans.InnerArrayConcat(arrays @ _ *) => trans.InnerArrayConcat(arrays.map(mapSources(_)(f)): _*)
case trans.OuterArrayConcat(arrays @ _ *) => trans.OuterArrayConcat(arrays.map(mapSources(_)(f)): _*)
case trans.WrapObject(source, field) => trans.WrapObject(mapSources(source)(f), field)
case trans.WrapObjectDynamic(left, right) => trans.WrapObjectDynamic(mapSources(left)(f), mapSources(right)(f))
case trans.WrapArray(source) => trans.WrapArray(mapSources(source)(f))
case DerefMetadataStatic(source, field) => DerefMetadataStatic(mapSources(source)(f), field)
case DerefObjectStatic(source, field) => DerefObjectStatic(mapSources(source)(f), field)
case DerefObjectDynamic(left, right) => DerefObjectDynamic(mapSources(left)(f), mapSources(right)(f))
case DerefArrayStatic(source, element) => DerefArrayStatic(mapSources(source)(f), element)
case DerefArrayDynamic(left, right) => DerefArrayDynamic(mapSources(left)(f), mapSources(right)(f))
case trans.ArraySwap(source, index) => trans.ArraySwap(mapSources(source)(f), index)
case Typed(source, tpe) => Typed(mapSources(source)(f), tpe)
case TypedSubsumes(source, tpe) => TypedSubsumes(mapSources(source)(f), tpe)
case IsType(source, tpe) => IsType(mapSources(source)(f), tpe)
case trans.Equal(left, right) => trans.Equal(mapSources(left)(f), mapSources(right)(f))
case trans.EqualLiteral(source, value, invert) => trans.EqualLiteral(mapSources(source)(f), value, invert)
case trans.Within(item, in) => trans.Within(mapSources(item)(f), mapSources(in)(f))
case trans.Range(upper, lower) => trans.Range(mapSources(upper)(f), mapSources(lower)(f))
case trans.Cond(pred, left, right) => trans.Cond(mapSources(pred)(f), mapSources(left)(f), mapSources(right)(f))
}
}
def deepMap[A <: SourceType](spec: TransSpec[A])(f: PartialFunction[TransSpec[A], TransSpec[A]]): TransSpec[A] = spec match {
case x if f isDefinedAt x => f(x)
case x @ Leaf(_) => x
case trans.ConstLiteral(value, target) => trans.ConstLiteral(value, deepMap(target)(f))
case trans.Filter(source, pred) => trans.Filter(deepMap(source)(f), deepMap(pred)(f))
case trans.FilterDefined(source, definedFor, definedness) =>
trans.FilterDefined(deepMap(source)(f), deepMap(definedFor)(f), definedness)
case Scan(source, scanner) => Scan(deepMap(source)(f), scanner)
case MapWith(source, mapper) => MapWith(deepMap(source)(f), mapper)
case trans.Map1(source, f1) => trans.Map1(deepMap(source)(f), f1)
case trans.DeepMap1(source, f1) => trans.DeepMap1(deepMap(source)(f), f1)
case trans.Map2(left, right, f2) => trans.Map2(deepMap(left)(f), deepMap(right)(f), f2)
case trans.MapN(contents, f1) => trans.MapN(deepMap(contents)(f), f1)
case trans.OuterObjectConcat(objects @ _ *) => trans.OuterObjectConcat(objects.map(deepMap(_)(f)): _*)
case trans.InnerObjectConcat(objects @ _ *) => trans.InnerObjectConcat(objects.map(deepMap(_)(f)): _*)
case trans.ObjectDelete(source, fields) => trans.ObjectDelete(deepMap(source)(f), fields)
case trans.InnerArrayConcat(arrays @ _ *) => trans.InnerArrayConcat(arrays.map(deepMap(_)(f)): _*)
case trans.OuterArrayConcat(arrays @ _ *) => trans.OuterArrayConcat(arrays.map(deepMap(_)(f)): _*)
case trans.WrapObject(source, field) => trans.WrapObject(deepMap(source)(f), field)
case trans.WrapObjectDynamic(source, right) => trans.WrapObjectDynamic(deepMap(source)(f), deepMap(right)(f))
case trans.WrapArray(source) => trans.WrapArray(deepMap(source)(f))
case DerefMetadataStatic(source, field) => DerefMetadataStatic(deepMap(source)(f), field)
case DerefObjectStatic(source, field) => DerefObjectStatic(deepMap(source)(f), field)
case DerefObjectDynamic(left, right) => DerefObjectDynamic(deepMap(left)(f), deepMap(right)(f))
case DerefArrayStatic(source, element) => DerefArrayStatic(deepMap(source)(f), element)
case DerefArrayDynamic(left, right) => DerefArrayDynamic(deepMap(left)(f), deepMap(right)(f))
case trans.ArraySwap(source, index) => trans.ArraySwap(deepMap(source)(f), index)
case Typed(source, tpe) => Typed(deepMap(source)(f), tpe)
case TypedSubsumes(source, tpe) => TypedSubsumes(deepMap(source)(f), tpe)
case IsType(source, tpe) => IsType(deepMap(source)(f), tpe)
case trans.Equal(left, right) => trans.Equal(deepMap(left)(f), deepMap(right)(f))
case trans.EqualLiteral(source, value, invert) => trans.EqualLiteral(deepMap(source)(f), value, invert)
case trans.Within(item, in) => trans.Within(deepMap(item)(f), deepMap(in)(f))
case trans.Range(upper, lower) => trans.Range(deepMap(upper)(f), deepMap(lower)(f))
case trans.Cond(pred, left, right) => trans.Cond(deepMap(pred)(f), deepMap(left)(f), deepMap(right)(f))
}
// reduce the TransSpec to a "normal form", in which nested *Concats are flattened into
// single vararg calls and statically-known *DerefStatics are performed
def normalize[A <: SourceType](ts: TransSpec[A], undef: TransSpec[A]): TransSpec[A] = {
import scalaz.syntax.std.option._, scalaz.std.option._
def flattenConcats: Option[TransSpec[A]] = {
def flattenOuterArrayConcats[A <: SourceType](proj: TransSpec[A]): Option[List[TransSpec[A]]] = proj match {
case OuterArrayConcat(ls@_*) =>
Some(ls.toList.flatMap(a => flattenOuterArrayConcats(a).getOrElse(a :: Nil)))
case _ => None
}
def flattenOuterObjectConcats[A <: SourceType](proj: TransSpec[A]): Option[List[TransSpec[A]]] = proj match {
case OuterObjectConcat(ls@_*) =>
Some(ls.toList.flatMap(a => flattenOuterObjectConcats(a).getOrElse(a :: Nil)))
case _ => None
}
def flattenInnerArrayConcats[A <: SourceType](proj: TransSpec[A]): Option[List[TransSpec[A]]] = proj match {
case InnerArrayConcat(ls@_*) =>
Some(ls.toList.flatMap(a => flattenInnerArrayConcats(a).getOrElse(a :: Nil)))
case _ => None
}
def flattenInnerObjectConcats[A <: SourceType](proj: TransSpec[A]): Option[List[TransSpec[A]]] = proj match {
case InnerObjectConcat(ls@_*) =>
Some(ls.toList.flatMap(a => flattenInnerObjectConcats(a).getOrElse(a :: Nil)))
case _ => None
}
flattenOuterArrayConcats(ts).map(ks => OuterArrayConcat(ks.map(normalize(_, undef)): _*))
.orElse(flattenOuterObjectConcats(ts).map(ks => OuterObjectConcat(ks.map(normalize(_, undef)): _*)))
.orElse(flattenInnerArrayConcats(ts).map(ks => InnerArrayConcat(ks.map(normalize(_, undef)): _*)))
.orElse(flattenInnerObjectConcats(ts).map(ks => InnerObjectConcat(ks.map(normalize(_, undef)): _*)))
}
flattenConcats.getOrElse {
ts match {
case WrapArray(t) =>
WrapArray(normalize(t, undef))
case WrapObject(t, f) =>
WrapObject(normalize(t, undef), f)
case WrapObjectDynamic(t, f) =>
WrapObjectDynamic(normalize(t, undef), normalize(f, undef))
case ConstLiteral(t, f) =>
ConstLiteral(t, normalize(f, undef))
case DerefArrayStatic(t, f@CPathIndex(i)) =>
normalize[A](t, undef) match {
case n@OuterArrayConcat(ks@_*) =>
if (ks.length < (i + 1) && ks.forall(_.isInstanceOf[WrapArray[_]])) {
undef
} else {
ks.foldLeft((0.some, none[TransSpec[A]])) {
case ((Some(a), b), WrapArray(nt)) =>
((a + 1).some, if (a == i) normalize(nt, undef).some else b)
case ((_, b), _) => (none, b)
}._2.getOrElse(DerefArrayStatic(n, f))
}
case WrapArray(k) =>
if (i == 0) {
k
} else {
undef
}
case `undef` => undef
case n@_ => DerefArrayStatic(n, f)
}
case DerefObjectStatic(t, f@CPathField(k)) =>
normalize[A](t, undef) match {
// ks is reversed before being folded, because keys are overriden
// by operands on the right, not the left
case n@OuterObjectConcat(ks@_*) =>
ks.reverse.foldRight(undef) {
case (WrapObject(s, k2), o) =>
if (k == k2) {
normalize(s, undef)
} else {
o
}
case _ => DerefObjectStatic(n, f)
}
case WrapObject(s, `k`) => normalize(s, undef)
case WrapObject(_, _) => undef
case `undef` => undef
case n@_ => DerefObjectStatic(n, f)
}
case DerefMetadataStatic(t, f) =>
DerefMetadataStatic(normalize[A](t, undef), f)
case DerefObjectDynamic(s, f) =>
normalize(s, undef) match {
case `undef` => undef
case sn => normalize(f, undef) match {
case `undef` => undef
case fn => DerefObjectDynamic(sn, fn)
}
}
case DerefArrayDynamic(s, f) =>
normalize(s, undef) match {
case `undef` => undef
case sn => normalize(f, undef) match {
case `undef` => undef
case fn => DerefArrayDynamic(sn, fn)
}
}
case IsType(s, t) => IsType(normalize(s, undef), t)
case Equal(f, s) => Equal(normalize(f, undef), normalize(s, undef))
case EqualLiteral(f, v, i) => EqualLiteral(normalize(f, undef), v, i)
case Within(item, in) => Within(normalize(item, undef), normalize(in, undef))
case Range(upper, lower) => Range(normalize(upper, undef), normalize(lower, undef))
case Cond(p, l, r) => Cond(normalize(p, undef), normalize(l, undef), normalize(r, undef))
case Filter(s, t) => Filter(normalize(s, undef), normalize(t, undef))
case FilterDefined(s, df, t) => FilterDefined(normalize(s, undef), normalize(df, undef), t)
case Typed(s, t) => Typed(normalize(s, undef), t)
case TypedSubsumes(s, t) => TypedSubsumes(normalize(s, undef), t)
case Map1(s, fun) => Map1(normalize(s, undef), fun)
case DeepMap1(s, fun) => DeepMap1(normalize(s, undef), fun)
case Map2(s, f, fun) => Map2(normalize(s, undef), normalize(f, undef), fun)
case MapN(s, fun) => MapN(normalize(s, undef), fun)
case ArraySwap(s, i) => ArraySwap(normalize(s, undef), i)
case _ => ts
}
}
}
// rephrase(p, r)(p(a)) --> r(a), if this is possible.
// rephrase(p, r) "pulls back" r from p.
// selectors:
// rephrase(.a.b, .a.b.c)(x) --> x.c
// ==> rephrase(.a.b, .a.b.c)(x.a.b) --> x.a.b.c
// rephrase(.[0], .[0].[1])(x) --> [[undef] ++ [x]].[0].[1]
// ==> rephrase(.[0], .[0].[1])(x.[0]) --> x.[0].[1]
// rephrase({k: f}, f)(x) --> x.k
// ==> rephrase({k: f}, f)({k: f(x))) --> f(x)
// arrays:
// rephrase([a] ++ [b] ++ [c] ++ [d], a)(x) --> x.[0]
// rephrase([a] ++ [b] ++ ([c] ++ [d], d)(x) --> x.[3]
// constants:
// rephrase(1, f) --> none
// rephrase(f, 1) --> 1
def rephrase[A <: SourceType](projection: TransSpec[A],
rootSource: A,
root: TransSpec1): Option[TransSpec1] = {
import scalaz.syntax.std.option._, scalaz.std.option._
// every iteration of peelInvert returns:
// - a change in the current index inside a nested *ArrayConcat, or None if the index information has been lost
// (e.g., by concatting with Leaf(Source))
// - a change in the set of keys known to be inside a nested *ObjectConcat, or None if the key information has been lost
// (e.g., by concatting with Leaf(Source))
// - a map of substitutions, which maps from subtrees of `root` that have been found in `projection`
// to the accumulated inverse of the TransSpec layers seen inside `projection` upwards of that point
final case class PeelState(delta: Option[Int], keys: Option[Set[String]], substitutions: Map[TransSpec1, TransSpec1])
val rootWithSourceReplaced = TransSpec.mapSources(root)(_ => rootSource)
// find all paths (subtrees) through a `TransSpec[A]`
// note we don't need to include `r` in the set of paths when `r` is a `WrapArray`, `WrapObject`, or `*Concat`,
// because actually substituting that subtree messes up the index handling while traversing `projection`.
def paths(r: TransSpec[A]): Set[TransSpec[A]] = r match {
case OuterArrayConcat(rs@_*) => rs.flatMap(paths).toSet
case InnerArrayConcat(rs@_*) => rs.flatMap(paths).toSet
case OuterObjectConcat(rs@_*) => rs.flatMap(paths).toSet
case InnerObjectConcat(rs@_*) => rs.flatMap(paths).toSet
case WrapArray(s) => paths(s)
case WrapObject(s, _) => paths(s)
case DerefObjectStatic(s, _) => paths(s) + r
case DerefObjectDynamic(f, s) => paths(f) ++ paths(s) + r
case DerefArrayStatic(s, _) => paths(s) + r
case DerefArrayDynamic(f, s) => paths(f) ++ paths(s) + r
case DerefMetadataStatic(s, _) => paths(s) + r
case ArraySwap(s, _) => paths(s)
case Cond(f, s, _) => paths(f) ++ paths(s) + r
case ConstLiteral(_, s) => paths(s) + r
case Equal(f, s) => paths(f) ++ paths(s) + r
case EqualLiteral(s, _, _) => paths(s) + r
case Within(item, in) => paths(item) ++ paths(in) + r
case Range(upper, lower) => paths(upper) ++ paths(lower) + r
case Filter(f, p) => paths(f) ++ paths(p) + r
case FilterDefined(s, p, _) => paths(s) ++ paths(p) + r
case IsType(s, _) => paths(s) + r
case Map1(s, _) => paths(s) + r
case Map2(f, s, _) => paths(f) ++ paths(s) + r
case MapN(s, _) => paths(s) + r
case DeepMap1(s, _) => paths(s) + r
case MapWith(s, _) => paths(s) + r
case ObjectDelete(s, _) => paths(s) + r
case Scan(s, _) => paths(s) + r
case Typed(s, _) => paths(s) + r
case TypedSubsumes(s, _) => paths(s) + r
case WrapObjectDynamic(f, s) => paths(f) ++ paths(s) + r
case Leaf(`rootSource`) => Set(r)
case Leaf(_) => sys.error("impossible")
}
// find all subtrees of `root`, so that they can be substituted with inverses of the
// outer layers of `projection` from inside `root` when they're encountered inside `projection`.
val allRootPaths = paths(rootWithSourceReplaced)
// peels layers off of `projection`, building up the inverses of every layer and substituting those layers for common
// occurrences of subtrees of `root` inside `projection` when they're reached.
def peelInvert(projection: TransSpec[A], currentIndex: Int, keys: Set[String], inverseLayers: TransSpec1 => TransSpec1): PeelState = {
// folds over every branch in a *ArrayConcat, from the *left*, collecting substitutions.
// the state carried along consists of:
// a) the index into the array formed by the *ArrayConcat (and outer *ArrayConcats, because it's passed into peelInvert)
// b) the Option[TransSpec1] returned by the first successfully rephrased branch of the *ArrayConcat.
// If the index returned by any rephrase call is none before a branch is successfully rephrased,
// we have insufficient information to continue searching for a successful branch,
// so we have to halt and return none. Otherwise, the first successfully rephrased branch is returned.
def arrayConcat(rs: Seq[TransSpec[A]]) = {
val (delta, substsOut) = rs.foldLeft((0.some, Map.empty[TransSpec1, TransSpec1])) {
case ((Some(i), substs), ts) =>
val PeelState(newDelta, _, newSubsts) = peelInvert(ts, currentIndex = currentIndex + i, keys = Set.empty, inverseLayers)
(newDelta.map(_ + i), newSubsts ++ substs)
case ((None, substs), _) => (none, substs)
}
PeelState(delta, Set.empty[String].some, substsOut)
}
// folds over every branch in a *ObjectConcat, from the *right*, collecting substitutions.
// the state carried along consists of:
// a) the set of keys which have been observed in the *ObjectConcat (and outer *ObjectConcats, because it's passed into peelInvert)
// b) the Option[TransSpec1] returned by the first successfully rephrased branch of the *ObjectConcat.
// if the set of keys returned by any rephrase call is none before a branch is successfully rephrased,
// we have insufficient information to continue searching for a successful branch,
// so we have to halt and return none. Otherwise, the first successfully rephrased branch is returned.
def objectConcat(rs: Seq[TransSpec[A]]) = {
val (resultKeys, out) = rs.foldRight((Set.empty[String].some, Map.empty[TransSpec1, TransSpec1])) {
case (ts, (Some(ks), substs)) =>
val PeelState(_, newKeys, newSubsts) = peelInvert(ts, currentIndex = 0, keys = keys ++ ks, inverseLayers)
(newKeys.map(_ ++ ks), newSubsts ++ substs)
case (_, (None, substs)) => (none, substs)
}
PeelState(0.some, resultKeys, out)
}
projection match {
// if the key has already been spotted (to the right in a nested *ObjectConcat of this WrapObject),
// the object resulting from the *ObjectConcat has had this WrapObject call's result overriden.
// so this branch's value is inaccessible, so we return none.
case WrapObject(s, k) =>
if (keys(k)) {
PeelState(none, Set.empty[String].some, Map.empty)
} else {
val PeelState(_, _, nestedSubstitutions) =
peelInvert(s, currentIndex = 0, Set.empty, inverseLayers andThen (DerefObjectStatic(_, CPathField(k))))
PeelState(0.some, Set(k).some, nestedSubstitutions)
}
case ObjectDelete(s, k) =>
val PeelState(_, nestedKeys, nestedSubstitutions) = peelInvert(s, currentIndex = 0, keys, inverseLayers)
PeelState(0.some, nestedKeys.map(_ -- k.map(_.name)), nestedSubstitutions)
// encountering a WrapArray inside a *ArrayConcat requires shifting the index by 1.
// however inside the WrapArray, the index is reset to 0.
case WrapArray(s) =>
val PeelState(_, _, nestedSubstitutions) = peelInvert(s, currentIndex = 0, Set.empty, inverseLayers andThen (DerefArrayStatic(_, CPathIndex(currentIndex))))
PeelState(1.some, none, nestedSubstitutions)
case OuterArrayConcat(rs@_*) => arrayConcat(rs)
case InnerArrayConcat(rs@_*) => arrayConcat(rs)
case OuterObjectConcat(rs@_*) => objectConcat(rs)
case InnerObjectConcat(rs@_*) => objectConcat(rs)
case ArraySwap(s, i) => peelInvert(s, currentIndex = 0, Set.empty, inverseLayers andThen (ArraySwap(_, i)))
// this branch of `projection` is a subtree of `root`, so we can substitute it with the inverted layers of
// `projection` we've encountered so far.
case rootSubtree if allRootPaths(rootSubtree) =>
val rootSubtreeAsTransSpec1 =
if (rootSource == Source) {
rootSubtree.asInstanceOf[TransSpec1]
} else {
TransSpec.mapSources(rootSubtree)(_ => Source)
}
PeelState(0.some, Set.empty[String].some, Map(rootSubtreeAsTransSpec1 -> inverseLayers(Leaf(Source))))
// this won't be Leaf(rootSource), because that would be a subtree of root
case Leaf(_) => PeelState(none, none, Map.empty)
case _ => PeelState(none, none, Map.empty)
}
}
val peelSubstitutions =
peelInvert(projection, currentIndex = 0, keys = Set.empty, inverseLayers = identity[TransSpec1]).substitutions
if (peelSubstitutions.isEmpty) {
// there wasn't even an occurrence of `Leaf(rootSource)` in `projection`,
// so `projection` has definitely destroyed the information necessary to get back to root
none
} else {
// deepMap takes care of running substitutions over the largest subtrees first
val substitutedRoot = TransSpec.deepMap(root)(peelSubstitutions)
// normalize the output, so that equivalent sort orders are more likely to be comparable
normalize(substitutedRoot, TransSpec1.Undef).some
}
}
}
object TransSpec1 {
import constants._
val Id: TransSpec1 = Leaf(Source)
// fakes an undefinedness literal by derefing an empty object
val Undef: TransSpec1 =
DerefObjectStatic(ConstLiteral(CEmptyObject, Id), CPathField("bogus"))
val DerefArray0 = DerefArrayStatic(Leaf(Source), CPathIndex(0))
val DerefArray1 = DerefArrayStatic(Leaf(Source), CPathIndex(1))
val DerefArray2 = DerefArrayStatic(Leaf(Source), CPathIndex(2))
val PruneToKeyValue = InnerObjectConcat(WrapObject(SourceKey.Single, paths.Key.name), WrapObject(SourceValue.Single, paths.Value.name))
val DeleteKeyValue = ObjectDelete(Leaf(Source), Set(paths.Key, paths.Value))
}
type TransSpec2 = TransSpec[Source2]
object TransSpec2 {
val LeftId: TransSpec2 = Leaf(SourceLeft)
val RightId: TransSpec2 = Leaf(SourceRight)
// fakes an undefinedness literal by derefing an empty object
val Undef: TransSpec2 =
DerefObjectStatic(ConstLiteral(CEmptyObject, LeftId), CPathField("bogus"))
/** Flips all `SourceLeft`s to `SourceRight`s and vice versa. */
def flip(spec: TransSpec2): TransSpec2 = TransSpec.mapSources(spec) {
case SourceLeft => SourceRight
case SourceRight => SourceLeft
}
def DerefArray0(source: Source2) = DerefArrayStatic(Leaf(source), CPathIndex(0))
def DerefArray1(source: Source2) = DerefArrayStatic(Leaf(source), CPathIndex(1))
def DerefArray2(source: Source2) = DerefArrayStatic(Leaf(source), CPathIndex(2))
val DeleteKeyValueLeft = ObjectDelete(Leaf(SourceLeft), Set(paths.Key, paths.Value))
val DeleteKeyValueRight = ObjectDelete(Leaf(SourceRight), Set(paths.Key, paths.Value))
}
sealed trait GroupKeySpec
/**
* Definition for a single (non-composite) key part.
*
* @param key The key which will be used by `merge` to access this particular tic-variable (which may be refined by more than one `GroupKeySpecSource`)
* @param spec A transform which defines this key part as a function of the source table in `GroupingSource`.
*/
case class GroupKeySpecSource(key: CPathField, spec: TransSpec1) extends GroupKeySpec
case class GroupKeySpecAnd(left: GroupKeySpec, right: GroupKeySpec) extends GroupKeySpec
case class GroupKeySpecOr(left: GroupKeySpec, right: GroupKeySpec) extends GroupKeySpec
object GroupKeySpec {
def dnf(keySpec: GroupKeySpec): GroupKeySpec = {
keySpec match {
case GroupKeySpecSource(key, spec) => GroupKeySpecSource(key, spec)
case GroupKeySpecAnd(GroupKeySpecOr(ol, or), right) => GroupKeySpecOr(dnf(GroupKeySpecAnd(ol, right)), dnf(GroupKeySpecAnd(or, right)))
case GroupKeySpecAnd(left, GroupKeySpecOr(ol, or)) => GroupKeySpecOr(dnf(GroupKeySpecAnd(left, ol)), dnf(GroupKeySpecAnd(left, or)))
case gand @ GroupKeySpecAnd(left, right) =>
val leftdnf = dnf(left)
val rightdnf = dnf(right)
if (leftdnf == left && rightdnf == right) gand else dnf(GroupKeySpecAnd(leftdnf, rightdnf))
case gor @ GroupKeySpecOr(left, right) =>
val leftdnf = dnf(left)
val rightdnf = dnf(right)
if (leftdnf == left && rightdnf == right) gor else dnf(GroupKeySpecOr(leftdnf, rightdnf))
}
}
def toVector(keySpec: GroupKeySpec): Vector[GroupKeySpec] = {
keySpec match {
case GroupKeySpecOr(left, right) => toVector(left) ++ toVector(right)
case x => Vector(x)
}
}
}
object constants {
import paths._
object SourceKey {
val Single = DerefObjectStatic(Leaf(Source), Key)
val Left = DerefObjectStatic(Leaf(SourceLeft), Key)
val Right = DerefObjectStatic(Leaf(SourceRight), Key)
}
object SourceValue {
val Single = DerefObjectStatic(Leaf(Source), Value)
val Left = DerefObjectStatic(Leaf(SourceLeft), Value)
val Right = DerefObjectStatic(Leaf(SourceRight), Value)
}
}
def transRValue[A <: SourceType](rvalue: RValue, target: TransSpec[A]): TransSpec[A] = {
RValue.toCValue(rvalue) map { cvalue =>
trans.ConstLiteral(cvalue, target)
} getOrElse {
rvalue match {
case RArray(elements) =>
InnerArrayConcat(elements map { element =>
trans.WrapArray(transRValue(element, target))
}: _*)
case RObject(fields) =>
InnerObjectConcat(fields.toSeq map {
case (key, value) => trans.WrapObject(transRValue(value, target), key)
}: _*)
case _ =>
sys.error("Can't handle RValue")
}
}
}
}
import trans._
type TableTransSpec[+A <: SourceType] = Map[CPathField, TransSpec[A]]
type TableTransSpec1 = TableTransSpec[Source1]
type TableTransSpec2 = TableTransSpec[Source2]
def makeTableTrans(tableTrans: TableTransSpec1): TransSpec1 = {
val wrapped = for ((key @ CPathField(fieldName), value) <- tableTrans) yield {
val mapped = TransSpec.deepMap(value) {
case Leaf(_) => DerefObjectStatic(Leaf(Source), key)
}
trans.WrapObject(mapped, fieldName)
}
wrapped.foldLeft[TransSpec1](ObjectDelete(Leaf(Source), Set(tableTrans.keys.toSeq: _*))) { (acc, ts) =>
trans.InnerObjectConcat(acc, ts)
}
}
def liftToValues(trans: TransSpec1): TransSpec1 =
makeTableTrans(Map(paths.Value -> trans))
def buildConstantWrapSpec[A <: SourceType](source: TransSpec[A]): TransSpec[A] = {
val bottomWrapped = trans.WrapObject(trans.ConstLiteral(CEmptyArray, source), paths.Key.name)
trans.InnerObjectConcat(bottomWrapped, trans.WrapObject(source, paths.Value.name))
}
}
| jedesah/Quasar | yggdrasil/src/main/scala/quasar/yggdrasil/TransSpecModule.scala | Scala | apache-2.0 | 37,173 |
/*
* slibexif - Scala library to parse JPEG EXIF data.
* Copyright (C) Niklas Grossmann
*
* This file is part of libexif.
*
* slibexif is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* slibexif is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser Public License for more details.
*
* You should have received a copy of the GNU Lesser Public License
* along with libexif. If not, see <http://www.gnu.org/licenses/>.
*/
package net.n12n.exif
case class Rational(val numerator: Long, val denominator: Long) {
override def toString = numerator + "/" + denominator
def toDouble() = numerator.toDouble / denominator.toDouble
}
case class SignedRational(val numerator: Int, val denominator: Int) {
override def toString = numerator + "/" + denominator
def toDouble() = numerator.toDouble / denominator.toDouble
}
| ngrossmann/slibexif | src/main/scala/net/n12n/exif/Rational.scala | Scala | gpl-3.0 | 1,163 |
package fpinscala.datastructures
sealed trait List[+A] // `List` data type, parameterized on a type, `A`
case object Nil extends List[Nothing] // A `List` data constructor representing the empty list
/* Another data constructor, representing nonempty lists. Note that `tail` is another `List[A]`,
which may be `Nil` or another `Cons`.
*/
case class Cons[+A](head: A, tail: List[A]) extends List[A]
object List { // `List` companion object. Contains functions for creating and working with lists.
def sum(ints: List[Int]): Int = ints match { // A function that uses pattern matching to add up a list of integers
case Nil => 0 // The sum of the empty list is 0.
case Cons(x,xs) => x + sum(xs) // The sum of a list starting with `x` is `x` plus the sum of the rest of the list.
}
def product(ds: List[Double]): Double = ds match {
case Nil => 1.0
case Cons(0.0, _) => 0.0
case Cons(x,xs) => x * product(xs)
}
def apply[A](as: A*): List[A] = // Variadic function syntax
if (as.isEmpty) Nil
else Cons(as.head, apply(as.tail: _*))
val x = List(1,2,3,4,5) match {
case Cons(x, Cons(2, Cons(4, _))) => x
case Nil => 42
case Cons(x, Cons(y, Cons(3, Cons(4, _)))) => x + y
case Cons(h, t) => h + sum(t)
case _ => 101
}
def append[A](a1: List[A], a2: List[A]): List[A] =
a1 match {
case Nil => a2
case Cons(h,t) => Cons(h, append(t, a2))
}
/*
This implementation of foldRight is not tail-recursive because the
recursive call is not in tail position.
*/
def foldRight[A,B](as: List[A], z: B)(f: (A, B) => B): B = // Utility functions
as match {
case Nil => z
case Cons(x, xs) => f(x, foldRight(xs, z)(f))
}
def sum2(ns: List[Int]) =
foldRight(ns, 0)((x,y) => x + y)
/*
Product2, implemented in terms of foldRight, can't immediately halt
recursion on encountering a 0.0 because foldRight doesn't have any
logic for short-circuiting a recursive traversal of a list.
For short-circuiting to work, we would need to redefine foldRight to
pass it a 'sentinel' value; if it encounters the sentinel value in the
list, it would short-circuit and immediately return the accumulated
value.
*/
def product2(ns: List[Double]) =
foldRight(ns, 1.0)(_ * _) // `_ * _` is more concise notation for `(x,y) => x * y`; see sidebar
def tail[A](l: List[A]): List[A] =
l match {
case Nil => ???
case Cons(_, t) => t
}
def setHead[A](l: List[A], h: A): List[A] =
l match {
case Nil => ???
case Cons(_, t) => Cons(h, t)
}
def drop[A](l: List[A], n: Int): List[A] =
if (n < 1) l else drop(tail(l), n - 1)
def dropWhile[A](l: List[A], f: A => Boolean): List[A] =
l match {
case Nil => Nil
case Cons(h, t) =>
if (f(h)) dropWhile(t, f)
else l
}
/*
init can't be implemented in constant time because it needs to
traverse the entire list in order to find all elements but the last.
*/
def init[A](l: List[A]): List[A] = {
def reverse(l: List[A]): List[A] = {
def go(l: List[A], accum: List[A]): List[A] =
l match {
case Nil => accum
case Cons(h, t) => go(t, Cons(h, accum))
}
go(l, Nil)
}
def go(l: List[A], accum: List[A]): List[A] =
l match {
case Nil => ???
case Cons(h, Nil) => Nil
case Cons(h1, Cons(h2, Nil)) => Cons(h1, accum)
case Cons(h, t) => go(t, Cons(h, accum))
}
reverse(go(l, Nil))
}
def length[A](l: List[A]): Int = foldRight(l, 0) { (x, z) => z + 1 }
def foldLeft[A,B](l: List[A], z: B)(f: (B, A) => B): B =
l match {
case Nil => z
case Cons(x, xs) => foldLeft(xs, f(z, x))(f)
}
def sum3(xs: List[Int]): Int = foldLeft(xs, 0)(_ + _)
def product3(xs: List[Int]): Int = foldLeft(xs, 1)(_ * _)
def length3(xs: List[Int]): Int = foldLeft(xs, 0) { (z, x) => z + 1 }
def reverse[A](l: List[A]): List[A] =
foldLeft(l, Nil: List[A]) { (b, a) => Cons(a, b) }
def foldLeft_using_foldRight[A, B](l: List[A], z: B)(f: (B, A) => B): B = {
def foldF(l: List[A]) =
foldRight(l, (b: B) => b) {
(a: A, fz: B => B) => (b: B) => fz(f(b, a))
}
l match {
case Nil => z
case Cons(x, xs) => foldF(xs)(f(z, x))
}
}
def foldRight_using_foldLeft[A, B](l: List[A], z: B)(f: (A, B) => B): B =
foldLeft(reverse(l), z) { (b, a) => f(a, b) }
def append_using_foldRight[A](a1: List[A], a2: List[A]): List[A] =
foldRight(a1, a2) { (a: A, as: List[A]) => Cons(a, append(as, a2)) }
def concat[A](xss: List[List[A]]): List[A] =
foldRight_using_foldLeft(xss, Nil: List[A])(append(_, _))
def add1(xs: List[Int]): List[Int] =
foldRight_using_foldLeft(xs, Nil: List[Int]) {
(x: Int, xs: List[Int]) => Cons(x + 1, xs)
}
def toString(xs: List[Double]): List[String] =
foldRight_using_foldLeft(xs, Nil: List[String]) {
(x: Double, xs: List[String]) => Cons(x.toString, xs)
}
def map[A,B](l: List[A])(f: A => B): List[B] =
foldRight_using_foldLeft(l, Nil: List[B]) {
(x: A, xs: List[B]) => Cons(f(x), xs)
}
def filter[A](l: List[A])(f: A => Boolean): List[A] =
foldRight_using_foldLeft(l, Nil: List[A]) {
(x: A, xs: List[A]) => if (f(x)) Cons(x, xs) else xs
}
def flatMap[A, B](as: List[A])(f: A => List[B]): List[B] =
concat(map(as)(f))
def filter_using_flatMap[A](l: List[A])(f: A => Boolean): List[A] =
flatMap(l)(x => if (f(x)) List(x) else Nil)
def addIntLists(xs: List[Int], ys: List[Int]): List[Int] =
(xs, ys) match {
case (_, Nil) => Nil
case (Nil, _) => Nil
case (Cons(x, xs), Cons(y, ys)) => Cons(x + y, addIntLists(xs, ys))
}
def zipWith[A, B, C](as: List[A], bs: List[B])(f: (A, B) => C): List[C] =
(as, bs) match {
case (_, Nil) => Nil
case (Nil, _) => Nil
case (Cons(a, as), Cons(b, bs)) =>
Cons(f(a, b), zipWith(as, bs)(f))
}
def hasSubsequence[A](
superList: List[A],
subList: List[A]
): Boolean = {
def startsWith(xs: List[A], ys: List[A]): Boolean =
(xs, ys) match {
case (_, Nil) => true
case (Nil, _) => false
case (Cons(x, xs), Cons(y, ys)) if (x == y) =>
startsWith(xs, ys)
case _ => false
}
(superList, subList) match {
case (_, Nil) => true
case (Nil, _) => false
case (Cons(x, xs), Cons(y, ys)) =>
if (x == y) startsWith(xs, ys) else hasSubsequence(xs, ys)
}
}
}
| yawaramin/fpinscala | exercises/src/main/scala/fpinscala/datastructures/List.scala | Scala | mit | 6,574 |
object StockPediction extends App{
val lines = io.Source.stdin.getLines.toList
//val lines = Iterator.continually(Console.readLine).takeWhile(_.nonEmpty).map(_.trim).toList
val arrayLength = lines(0).toInt
val array = lines(1).split(" ").map(_.toInt)
val queriesLength = lines(2).toInt
val queries = lines.drop(3)
queries.foreach {
query =>
val a = query.split(" ").map(_.toInt)
val d = a(0)
val margin = a(1)
// array.take
println(array.drop(array.indexWhere(_ >= array(d))).takeWhile(stock(_, array(d), margin)).length)
}
def stock(a: Int, d: Int, margin: Int): Boolean =
a >= d && a <= d + margin
} | PaulNoth/hackerrank | contest/lambda_calculi9/StockPediction.scala | Scala | mit | 658 |
package com.github.tarao
package slickjdbc
package getresult
import scala.language.implicitConversions
import helper.{UnitSpec, TraitSingletonBehavior}
import org.scalamock.scalatest.MockFactory
import java.sql.ResultSet
import java.io.{
InputStream,
ByteArrayInputStream,
InputStreamReader,
BufferedReader,
Reader
}
class TypeBinderSpec extends UnitSpec with MockFactory {
def column[T](rs: ResultSet, index: Int, expected: T)(implicit
binder: TypeBinder[T]
) = { binder.apply(rs, index) should be (expected) }
def column[T](rs: ResultSet, field: String, expected: T)(implicit
binder: TypeBinder[T]
) = { binder.apply(rs, field) should be (expected) }
def throwingFromColumn[T](rs: ResultSet, index: Int)(implicit
binder: TypeBinder[T]
) = { a [NoSuchElementException] should be thrownBy binder.apply(rs, index) }
def throwingFromColumn[T](rs: ResultSet, field: String)(implicit
binder: TypeBinder[T]
) = { a [NoSuchElementException] should be thrownBy binder.apply(rs, field) }
describe("TypeBinder[String]") {
it("should be able to get a String value") {
val rs = mock[ResultSet]
(rs.getString(_: Int)).expects(1).twice.returning("foo bar")
(rs.getString(_: String)).expects("column1").twice.returning("foo bar")
it should behave like column(rs, 1, Option("foo bar"))
it should behave like column(rs, "column1", Option("foo bar"))
assertTypeError("""
it should behave like column(rs, 1, "foo bar")
""")
assertTypeError("""
it should behave like column(rs, "column1", "foo bar")
""")
import AutoUnwrapOption._
it should behave like column(rs, 1, "foo bar")
it should behave like column(rs, "column1", "foo bar")
}
it("should not be able to get a String from null") {
val rs = mock[ResultSet]
(rs.getString(_: Int)).expects(0).twice.returning(null)
(rs.getString(_: String)).expects("null").twice.returning(null)
it should behave like column[Option[String]](rs, 0, None)
it should behave like column[Option[String]](rs, "null", None)
import AutoUnwrapOption._
it should behave like throwingFromColumn[String](rs, 0)
it should behave like throwingFromColumn[String](rs, "null")
}
}
describe("TypeBinder[BigDecimal]") {
it("should be able to get a BigDecimal value") {
val rs = mock[ResultSet]
(rs.getBigDecimal(_: Int)).expects(1).repeat(2).returning(
new java.math.BigDecimal("1234567")
)
(rs.getBigDecimal(_: String)).expects("column1").repeat(2).returning(
new java.math.BigDecimal("1234567")
)
(rs.getBigDecimal(_: Int)).expects(2).repeat(2).returning(
new java.math.BigDecimal("12345678901234567")
)
(rs.getBigDecimal(_: String)).expects("column2").repeat(2).returning(
new java.math.BigDecimal("12345678901234567")
)
it should behave like column(rs, 1, Option(BigDecimal("1234567")))
it should behave like column(rs, "column1", Option(BigDecimal("1234567")))
it should behave like
column(rs, 2, Option(BigDecimal("12345678901234567")))
it should behave like
column(rs, "column2", Option(BigDecimal("12345678901234567")))
assertTypeError("""
it should behave like column(rs, 1, BigDecimal("1234567"))
""")
assertTypeError("""
it should behave like column(rs, "column1", BigDecimal("1234567"))
""")
import AutoUnwrapOption._
it should behave like column(rs, 1, BigDecimal("1234567"))
it should behave like column(rs, "column1", BigDecimal("1234567"))
it should behave like
column(rs, 2, BigDecimal("12345678901234567"))
it should behave like
column(rs, "column2", BigDecimal("12345678901234567"))
}
it("should not be able to get a BigDecimal from null") {
val rs = mock[ResultSet]
(rs.getBigDecimal(_: Int)).expects(0).repeat(2).returning(null)
(rs.getBigDecimal(_: String)).expects("null").repeat(2).returning(null)
it should behave like column[Option[BigDecimal]](rs, 0, None)
it should behave like column[Option[BigDecimal]](rs, "null", None)
import AutoUnwrapOption._
it should behave like throwingFromColumn[BigDecimal](rs, 0)
it should behave like throwingFromColumn[BigDecimal](rs, "null")
}
}
describe("TypeBinder[Boolean]") {
it("should be able to get a Boolean value") {
val rs = mock[ResultSet]
(rs.getObject(_: Int)).expects(1).repeat(2).returning(
new java.lang.Boolean(true)
)
(rs.getObject(_: String)).expects("column1").repeat(2).returning(
new java.lang.Boolean(true)
)
(rs.getObject(_: Int)).expects(2).repeat(2).returning(
new java.lang.Boolean(false)
)
(rs.getObject(_: String)).expects("column2").repeat(2).returning(
new java.lang.Boolean(false)
)
(rs.getObject(_: Int)).expects(3).repeat(2).returning(
java.math.BigDecimal.ONE
)
(rs.getObject(_: String)).expects("column3").repeat(2).returning(
java.math.BigDecimal.ONE
)
(rs.getObject(_: Int)).expects(4).repeat(2).returning(
java.math.BigDecimal.ZERO
)
(rs.getObject(_: String)).expects("column4").repeat(2).returning(
java.math.BigDecimal.ZERO
)
(rs.getObject(_: Int)).expects(5).repeat(2).returning(
new java.lang.Integer(1)
)
(rs.getObject(_: String)).expects("column5").repeat(2).returning(
new java.lang.Integer(1)
)
(rs.getObject(_: Int)).expects(6).repeat(2).returning(
new java.lang.Integer(0)
)
(rs.getObject(_: String)).expects("column6").repeat(2).returning(
new java.lang.Integer(0)
)
(rs.getObject(_: Int)).expects(7).repeat(2).returning(
new java.lang.Float(1.0)
)
(rs.getObject(_: String)).expects("column7").repeat(2).returning(
new java.lang.Float(1.0)
)
(rs.getObject(_: Int)).expects(8).repeat(2).returning(
new java.lang.Float(0.0)
)
(rs.getObject(_: String)).expects("column8").repeat(2).returning(
new java.lang.Float(0.0)
)
(rs.getObject(_: Int)).expects(9).repeat(2).returning(
new java.lang.String("1")
)
(rs.getObject(_: String)).expects("column9").repeat(2).returning(
new java.lang.String("1")
)
(rs.getObject(_: Int)).expects(10).repeat(2).returning(
new java.lang.String("0")
)
(rs.getObject(_: String)).expects("column10").repeat(2).returning(
new java.lang.String("0")
)
(rs.getObject(_: Int)).expects(11).repeat(2).returning(
new java.lang.String("hoge")
)
(rs.getObject(_: String)).expects("column11").repeat(2).returning(
new java.lang.String("hoge")
)
(rs.getObject(_: Int)).expects(12).repeat(2).returning(
new java.lang.String("")
)
(rs.getObject(_: String)).expects("column12").repeat(2).returning(
new java.lang.String("")
)
val N = 12
for (i <- 1 to N) {
val b = i % 2 != 0
it should behave like column(rs, i, Option(b))
it should behave like column(rs, "column"+i, Option(b))
}
assertTypeError("""
it should behave like column(rs, 1, true)
""")
assertTypeError("""
it should behave like column(rs, "column1", true)
""")
import AutoUnwrapOption._
for (i <- 1 to N) {
val b = i % 2 != 0
it should behave like column(rs, i, b)
it should behave like column(rs, "column"+i, b)
}
}
it("should not be able to get a Boolean value from null") {
val rs = mock[ResultSet]
(rs.getObject(_: Int)).expects(0).repeat(2).returning(null)
(rs.getObject(_: String)).expects("null").repeat(2).returning(null)
it should behave like column[Option[Boolean]](rs, 0, None)
it should behave like column[Option[Boolean]](rs, "null", None)
import AutoUnwrapOption._
it should behave like throwingFromColumn[Boolean](rs, 0)
it should behave like throwingFromColumn[Boolean](rs, "null")
}
}
describe("TypeBinder[Byte]") {
it("should be able to get a Byte value") {
val rs = mock[ResultSet]
(rs.getObject(_: Int)).expects(1).repeat(2).returning(
new java.lang.Byte(12.toByte)
)
(rs.getObject(_: String)).expects("column1").repeat(2).returning(
new java.lang.Byte(12.toByte)
)
(rs.getObject(_: Int)).expects(2).repeat(2).returning(
new java.lang.Byte(224.toByte)
)
(rs.getObject(_: String)).expects("column2").repeat(2).returning(
new java.lang.Byte(224.toByte)
)
(rs.getObject(_: Int)).expects(3).repeat(2).returning(
new java.lang.Integer(1281)
)
(rs.getObject(_: String)).expects("column3").repeat(2).returning(
new java.lang.Integer(1281)
)
(rs.getObject(_: Int)).expects(4).repeat(2).returning(
new java.lang.Integer(-1281)
)
(rs.getObject(_: String)).expects("column4").repeat(2).returning(
new java.lang.Integer(-1281)
)
(rs.getObject(_: Int)).expects(5).repeat(2).returning("12")
(rs.getObject(_: String)).expects("column5").repeat(2).returning("12")
(rs.getObject(_: Int)).expects(6).repeat(2).returning("-3")
(rs.getObject(_: String)).expects("column6").repeat(2).returning("-3")
(rs.getObject(_: Int)).expects(7).repeat(2).returning("010")
(rs.getObject(_: String)).expects("column7").repeat(2).returning("010")
it should behave like column(rs, 1, Option(12.toByte))
it should behave like column(rs, "column1", Option(12.toByte))
it should behave like column(rs, 2, Option(224.toByte))
it should behave like column(rs, "column2", Option(224.toByte))
it should behave like column(rs, 3, Option(1.toByte))
it should behave like column(rs, "column3", Option(1.toByte))
it should behave like column(rs, 4, Option(255.toByte))
it should behave like column(rs, "column4", Option(255.toByte))
it should behave like column(rs, 5, Option(12.toByte))
it should behave like column(rs, "column5", Option(12.toByte))
it should behave like column(rs, 6, Option(-3.toByte))
it should behave like column(rs, "column6", Option(-3.toByte))
it should behave like column(rs, 7, Option(10.toByte))
it should behave like column(rs, "column7", Option(10.toByte))
assertTypeError("""
it should behave like column(rs, 1, 12.toByte)
""")
assertTypeError("""
it should behave like column(rs, "column1", 12.toByte)
""")
import AutoUnwrapOption._
it should behave like column(rs, 1, 12.toByte)
it should behave like column(rs, "column1", 12.toByte)
it should behave like column(rs, 2, 224.toByte)
it should behave like column(rs, "column2", 224.toByte)
it should behave like column(rs, 3, 1.toByte)
it should behave like column(rs, "column3", 1.toByte)
it should behave like column(rs, 4, 255.toByte)
it should behave like column(rs, "column4", 255.toByte)
it should behave like column(rs, 5, 12.toByte)
it should behave like column(rs, "column5", 12.toByte)
it should behave like column(rs, 6, -3.toByte)
it should behave like column(rs, "column6", -3.toByte)
it should behave like column(rs, 7, 10.toByte)
it should behave like column(rs, "column7", 10.toByte)
}
it("should not be able to get a Byte value from an invalid rep.") {
val rs = mock[ResultSet]
(rs.getObject(_: Int)).expects(1).repeat(2).returning("")
(rs.getObject(_: String)).expects("column1").repeat(2).returning("")
(rs.getObject(_: Int)).expects(2).repeat(2).returning("1281")
(rs.getObject(_: String)).expects("column2").repeat(2).returning("1281")
(rs.getObject(_: Int)).expects(3).repeat(2).returning("foo")
(rs.getObject(_: String)).expects("column3").repeat(2).returning("foo")
val N = 3
for (i <- 1 to N) {
it should behave like column[Option[Byte]](rs, i, None)
it should behave like column[Option[Byte]](rs, "column"+i, None)
}
import AutoUnwrapOption._
for (i <- 1 to N) {
it should behave like throwingFromColumn[Byte](rs, i)
it should behave like throwingFromColumn[Byte](rs, "column"+i)
}
}
it("should not be able to get a Byte from null") {
val rs = mock[ResultSet]
(rs.getObject(_: Int)).expects(0).repeat(2).returning(null)
(rs.getObject(_: String)).expects("null").repeat(2).returning(null)
it should behave like column[Option[Byte]](rs, 0, None)
it should behave like column[Option[Byte]](rs, "null", None)
import AutoUnwrapOption._
it should behave like throwingFromColumn[Byte](rs, 0)
it should behave like throwingFromColumn[Byte](rs, "null")
}
}
describe("TypeBinder[Short]") {
it("should be able to get a Short value") {
val rs = mock[ResultSet]
(rs.getObject(_: Int)).expects(1).repeat(2).returning(
new java.lang.Short(12.toShort)
)
(rs.getObject(_: String)).expects("column1").repeat(2).returning(
new java.lang.Short(12.toShort)
)
(rs.getObject(_: Int)).expects(2).repeat(2).returning(
new java.lang.Short(38000.toShort)
)
(rs.getObject(_: String)).expects("column2").repeat(2).returning(
new java.lang.Short(38000.toShort)
)
(rs.getObject(_: Int)).expects(3).repeat(2).returning(
new java.lang.Integer(129780)
)
(rs.getObject(_: String)).expects("column3").repeat(2).returning(
new java.lang.Integer(129780)
)
(rs.getObject(_: Int)).expects(4).repeat(2).returning(
new java.lang.Integer(-129781)
)
(rs.getObject(_: String)).expects("column4").repeat(2).returning(
new java.lang.Integer(-129781)
)
(rs.getObject(_: Int)).expects(5).repeat(2).returning("12")
(rs.getObject(_: String)).expects("column5").repeat(2).returning("12")
(rs.getObject(_: Int)).expects(6).repeat(2).returning("-3")
(rs.getObject(_: String)).expects("column6").repeat(2).returning("-3")
(rs.getObject(_: Int)).expects(7).repeat(2).returning("010")
(rs.getObject(_: String)).expects("column7").repeat(2).returning("010")
it should behave like column(rs, 1, Option(12.toShort))
it should behave like column(rs, "column1", Option(12.toShort))
it should behave like column(rs, 2, Option(38000.toShort))
it should behave like column(rs, "column2", Option(38000.toShort))
it should behave like column(rs, 3, Option(64244.toShort))
it should behave like column(rs, "column3", Option(64244.toShort))
it should behave like column(rs, 4, Option(1291.toShort))
it should behave like column(rs, "column4", Option(1291.toShort))
it should behave like column(rs, 5, Option(12.toShort))
it should behave like column(rs, "column5", Option(12.toShort))
it should behave like column(rs, 6, Option(-3.toShort))
it should behave like column(rs, "column6", Option(-3.toShort))
it should behave like column(rs, 7, Option(10.toShort))
it should behave like column(rs, "column7", Option(10.toShort))
assertTypeError("""
it should behave like column(rs, 1, 12.toShort)
""")
assertTypeError("""
it should behave like column(rs, "column1", 12.toShort)
""")
import AutoUnwrapOption._
it should behave like column(rs, 1, 12.toShort)
it should behave like column(rs, "column1", 12.toShort)
it should behave like column(rs, 2, 38000.toShort)
it should behave like column(rs, "column2", 38000.toShort)
it should behave like column(rs, 3, 64244.toShort)
it should behave like column(rs, "column3", 64244.toShort)
it should behave like column(rs, 4, 1291.toShort)
it should behave like column(rs, "column4", 1291.toShort)
it should behave like column(rs, 5, 12.toShort)
it should behave like column(rs, "column5", 12.toShort)
it should behave like column(rs, 6, -3.toShort)
it should behave like column(rs, "column6", -3.toShort)
it should behave like column(rs, 7, 10.toShort)
it should behave like column(rs, "column7", 10.toShort)
}
it("should not be able to get a Short value from an invalid rep.") {
val rs = mock[ResultSet]
(rs.getObject(_: Int)).expects(1).repeat(2).returning("")
(rs.getObject(_: String)).expects("column1").repeat(2).returning("")
(rs.getObject(_: Int)).expects(2).repeat(2).returning("38000")
(rs.getObject(_: String)).expects("column2").repeat(2).returning("38000")
(rs.getObject(_: Int)).expects(3).repeat(2).returning("foo")
(rs.getObject(_: String)).expects("column3").repeat(2).returning("foo")
val N = 3
for (i <- 1 to N) {
it should behave like column[Option[Short]](rs, i, None)
it should behave like column[Option[Short]](rs, "column"+i, None)
}
import AutoUnwrapOption._
for (i <- 1 to N) {
it should behave like throwingFromColumn[Short](rs, i)
it should behave like throwingFromColumn[Short](rs, "column"+i)
}
}
it("should not be able to get a Short from null") {
val rs = mock[ResultSet]
(rs.getObject(_: Int)).expects(0).repeat(2).returning(null)
(rs.getObject(_: String)).expects("null").repeat(2).returning(null)
it should behave like column[Option[Short]](rs, 0, None)
it should behave like column[Option[Short]](rs, "null", None)
import AutoUnwrapOption._
it should behave like throwingFromColumn[Short](rs, 0)
it should behave like throwingFromColumn[Short](rs, "null")
}
}
describe("TypeBinder[Int]") {
it("should be able to get a Int value") {
val rs = mock[ResultSet]
(rs.getObject(_: Int)).expects(1).repeat(2).returning(
new java.lang.Integer(12)
)
(rs.getObject(_: String)).expects("column1").repeat(2).returning(
new java.lang.Integer(12)
)
(rs.getObject(_: Int)).expects(2).repeat(2).returning(
new java.lang.Integer(3000000000L.toInt)
)
(rs.getObject(_: String)).expects("column2").repeat(2).returning(
new java.lang.Integer(3000000000L.toInt)
)
(rs.getObject(_: Int)).expects(3).repeat(2).returning("12")
(rs.getObject(_: String)).expects("column3").repeat(2).returning("12")
(rs.getObject(_: Int)).expects(4).repeat(2).returning("-3")
(rs.getObject(_: String)).expects("column4").repeat(2).returning("-3")
(rs.getObject(_: Int)).expects(5).repeat(2).returning("010")
(rs.getObject(_: String)).expects("column5").repeat(2).returning("010")
it should behave like column(rs, 1, Option(12))
it should behave like column(rs, "column1", Option(12))
it should behave like column(rs, 2, Option(3000000000L.toInt))
it should behave like column(rs, "column2", Option(3000000000L.toInt))
it should behave like column(rs, 3, Option(12))
it should behave like column(rs, "column3", Option(12))
it should behave like column(rs, 4, Option(-3))
it should behave like column(rs, "column4", Option(-3))
it should behave like column(rs, 5, Option(10))
it should behave like column(rs, "column5", Option(10))
assertTypeError("""
it should behave like column(rs, 1, 12)
""")
assertTypeError("""
it should behave like column(rs, "column1", 12)
""")
import AutoUnwrapOption._
it should behave like column(rs, 1, 12)
it should behave like column(rs, "column1", 12)
it should behave like column(rs, 2, 3000000000L.toInt)
it should behave like column(rs, "column2", 3000000000L.toInt)
it should behave like column(rs, 3, 12)
it should behave like column(rs, "column3", 12)
it should behave like column(rs, 4, -3)
it should behave like column(rs, "column4", -3)
it should behave like column(rs, 5, 10)
it should behave like column(rs, "column5", 10)
}
it("should not be able to get a Int value from an invalid rep.") {
val rs = mock[ResultSet]
(rs.getObject(_: Int)).expects(1).repeat(2).returning("")
(rs.getObject(_: String)).expects("column1").repeat(2).returning("")
(rs.getObject(_: Int)).expects(2).repeat(2).returning("6000000000")
(rs.getObject(_: String)).expects("column2").repeat(2).returning("6000000000")
(rs.getObject(_: Int)).expects(3).repeat(2).returning("foo")
(rs.getObject(_: String)).expects("column3").repeat(2).returning("foo")
val N = 3
for (i <- 1 to N) {
it should behave like column[Option[Int]](rs, i, None)
it should behave like column[Option[Int]](rs, "column"+i, None)
}
import AutoUnwrapOption._
for (i <- 1 to N) {
it should behave like throwingFromColumn[Int](rs, i)
it should behave like throwingFromColumn[Int](rs, "column"+i)
}
}
it("should not be able to get a Int from null") {
val rs = mock[ResultSet]
(rs.getObject(_: Int)).expects(0).repeat(2).returning(null)
(rs.getObject(_: String)).expects("null").repeat(2).returning(null)
it should behave like column[Option[Int]](rs, 0, None)
it should behave like column[Option[Int]](rs, "null", None)
import AutoUnwrapOption._
it should behave like throwingFromColumn[Int](rs, 0)
it should behave like throwingFromColumn[Int](rs, "null")
}
}
describe("TypeBinder[Long]") {
it("should be able to get a Long value") {
val rs = mock[ResultSet]
(rs.getObject(_: Int)).expects(1).repeat(2).returning(
new java.lang.Long(12)
)
(rs.getObject(_: String)).expects("column1").repeat(2).returning(
new java.lang.Long(12)
)
(rs.getObject(_: Int)).expects(2).repeat(2).returning(
new java.lang.Long(6000000000L)
)
(rs.getObject(_: String)).expects("column2").repeat(2).returning(
new java.lang.Long(6000000000L)
)
(rs.getObject(_: Int)).expects(3).repeat(2).returning(
new java.math.BigInteger("1"+"0"*19)
)
(rs.getObject(_: String)).expects("column3").repeat(2).returning(
new java.math.BigInteger("1"+"0"*19)
)
(rs.getObject(_: Int)).expects(4).repeat(2).returning("12")
(rs.getObject(_: String)).expects("column4").repeat(2).returning("12")
(rs.getObject(_: Int)).expects(5).repeat(2).returning("-3")
(rs.getObject(_: String)).expects("column5").repeat(2).returning("-3")
(rs.getObject(_: Int)).expects(6).repeat(2).returning("010")
(rs.getObject(_: String)).expects("column6").repeat(2).returning("010")
it should behave like column(rs, 1, Option(12))
it should behave like column(rs, "column1", Option(12))
it should behave like column(rs, 2, Option(6000000000L))
it should behave like column(rs, "column2", Option(6000000000L))
it should behave like column(rs, 3, Option(BigInt("1"+"0"*19).longValue))
it should behave like column(rs, "column3", Option(BigInt("1"+"0"*19).longValue))
it should behave like column(rs, 4, Option(12))
it should behave like column(rs, "column4", Option(12))
it should behave like column(rs, 5, Option(-3))
it should behave like column(rs, "column5", Option(-3))
it should behave like column(rs, 6, Option(10))
it should behave like column(rs, "column6", Option(10))
assertTypeError("""
it should behave like column(rs, 1, 12)
""")
assertTypeError("""
it should behave like column(rs, "column1", 12)
""")
import AutoUnwrapOption._
it should behave like column(rs, 1, 12)
it should behave like column(rs, "column1", 12)
it should behave like column(rs, 2, 6000000000L)
it should behave like column(rs, "column2", 6000000000L)
it should behave like column(rs, 3, BigInt("1"+"0"*19).longValue)
it should behave like column(rs, "column3", BigInt("1"+"0"*19).longValue)
it should behave like column(rs, 4, 12)
it should behave like column(rs, "column4", 12)
it should behave like column(rs, 5, -3)
it should behave like column(rs, "column5", -3)
it should behave like column(rs, 6, 10)
it should behave like column(rs, "column6", 10)
}
it("should not be able to get a Long value from an invalid rep.") {
val rs = mock[ResultSet]
(rs.getObject(_: Int)).expects(1).repeat(2).returning("")
(rs.getObject(_: String)).expects("column1").repeat(2).returning("")
(rs.getObject(_: Int)).expects(2).repeat(2).returning("1"+"0"*19)
(rs.getObject(_: String)).expects("column2").repeat(2).returning("1"+"0"*19)
(rs.getObject(_: Int)).expects(3).repeat(2).returning("foo")
(rs.getObject(_: String)).expects("column3").repeat(2).returning("foo")
val N = 3
for (i <- 1 to N) {
it should behave like column[Option[Long]](rs, i, None)
it should behave like column[Option[Long]](rs, "column"+i, None)
}
import AutoUnwrapOption._
for (i <- 1 to N) {
it should behave like throwingFromColumn[Long](rs, i)
it should behave like throwingFromColumn[Long](rs, "column"+i)
}
}
it("should not be able to get a Long from null") {
val rs = mock[ResultSet]
(rs.getObject(_: Int)).expects(0).repeat(2).returning(null)
(rs.getObject(_: String)).expects("null").repeat(2).returning(null)
it should behave like column[Option[Long]](rs, 0, None)
it should behave like column[Option[Long]](rs, "null", None)
import AutoUnwrapOption._
it should behave like throwingFromColumn[Long](rs, 0)
it should behave like throwingFromColumn[Long](rs, "null")
}
}
describe("TypeBinder[Float]") {
it("should be able to get a Float value") {
val rs = mock[ResultSet]
(rs.getObject(_: Int)).expects(1).repeat(2).returning(
new java.lang.Float(1.2f)
)
(rs.getObject(_: String)).expects("column1").repeat(2).returning(
new java.lang.Float(1.2f)
)
(rs.getObject(_: Int)).expects(2).repeat(2).returning("1.2")
(rs.getObject(_: String)).expects("column2").repeat(2).returning("1.2")
(rs.getObject(_: Int)).expects(3).repeat(2).returning("-3.5")
(rs.getObject(_: String)).expects("column3").repeat(2).returning("-3.5")
(rs.getObject(_: Int)).expects(4).repeat(2).returning("010.8")
(rs.getObject(_: String)).expects("column4").repeat(2).returning("010.8")
(rs.getObject(_: Int)).expects(5).repeat(2).returning("30")
(rs.getObject(_: String)).expects("column5").repeat(2).returning("30")
(rs.getObject(_: Int)).expects(6).repeat(2).returning("1"+"0"*20)
(rs.getObject(_: String)).expects("column6").repeat(2).returning("1"+"0"*20)
it should behave like column(rs, 1, Option(1.2f))
it should behave like column(rs, "column1", Option(1.2f))
it should behave like column(rs, 2, Option(1.2f))
it should behave like column(rs, "column2", Option(1.2f))
it should behave like column(rs, 3, Option(-3.5f))
it should behave like column(rs, "column3", Option(-3.5f))
it should behave like column(rs, 4, Option(10.8f))
it should behave like column(rs, "column4", Option(10.8f))
it should behave like column(rs, 5, Option(30.0f))
it should behave like column(rs, "column5", Option(30.0f))
it should behave like column(rs, 6, Option(1.0e20f))
it should behave like column(rs, "column6", Option(1.0e20f))
assertTypeError("""
it should behave like column(rs, 1, 1.2f)
""")
assertTypeError("""
it should behave like column(rs, "column1", 1.2f)
""")
import AutoUnwrapOption._
it should behave like column(rs, 1, 1.2f)
it should behave like column(rs, "column1", 1.2f)
it should behave like column(rs, 2, 1.2f)
it should behave like column(rs, "column2", 1.2f)
it should behave like column(rs, 3, -3.5f)
it should behave like column(rs, "column3", -3.5f)
it should behave like column(rs, 4, 10.8f)
it should behave like column(rs, "column4", 10.8f)
it should behave like column(rs, 5, 30.0f)
it should behave like column(rs, "column5", 30.0f)
it should behave like column(rs, 6, 1.0e20f)
it should behave like column(rs, "column6", 1.0e20f)
}
it("should not be able to get a Float value from an invalid rep.") {
val rs = mock[ResultSet]
(rs.getObject(_: Int)).expects(1).repeat(2).returning("")
(rs.getObject(_: String)).expects("column1").repeat(2).returning("")
(rs.getObject(_: Int)).expects(2).repeat(2).returning("foo")
(rs.getObject(_: String)).expects("column2").repeat(2).returning("foo")
val N = 2
for (i <- 1 to N) {
it should behave like column[Option[Float]](rs, i, None)
it should behave like column[Option[Float]](rs, "column"+i, None)
}
import AutoUnwrapOption._
for (i <- 1 to N) {
it should behave like throwingFromColumn[Float](rs, i)
it should behave like throwingFromColumn[Float](rs, "column"+i)
}
}
it("should not be able to get a Float from null") {
val rs = mock[ResultSet]
(rs.getObject(_: Int)).expects(0).repeat(2).returning(null)
(rs.getObject(_: String)).expects("null").repeat(2).returning(null)
it should behave like column[Option[Float]](rs, 0, None)
it should behave like column[Option[Float]](rs, "null", None)
import AutoUnwrapOption._
it should behave like throwingFromColumn[Float](rs, 0)
it should behave like throwingFromColumn[Float](rs, "null")
}
}
describe("TypeBinder[Double]") {
it("should be able to get a Double value") {
val rs = mock[ResultSet]
(rs.getObject(_: Int)).expects(1).repeat(2).returning(
new java.lang.Double(1.2)
)
(rs.getObject(_: String)).expects("column1").repeat(2).returning(
new java.lang.Double(1.2)
)
(rs.getObject(_: Int)).expects(2).repeat(2).returning("1.2")
(rs.getObject(_: String)).expects("column2").repeat(2).returning("1.2")
(rs.getObject(_: Int)).expects(3).repeat(2).returning("-3.5")
(rs.getObject(_: String)).expects("column3").repeat(2).returning("-3.5")
(rs.getObject(_: Int)).expects(4).repeat(2).returning("010.8")
(rs.getObject(_: String)).expects("column4").repeat(2).returning("010.8")
(rs.getObject(_: Int)).expects(5).repeat(2).returning("30")
(rs.getObject(_: String)).expects("column5").repeat(2).returning("30")
(rs.getObject(_: Int)).expects(6).repeat(2).returning("1"+"0"*20)
(rs.getObject(_: String)).expects("column6").repeat(2).returning("1"+"0"*20)
it should behave like column(rs, 1, Option(1.2))
it should behave like column(rs, "column1", Option(1.2))
it should behave like column(rs, 2, Option(1.2f))
it should behave like column(rs, "column2", Option(1.2))
it should behave like column(rs, 3, Option(-3.5f))
it should behave like column(rs, "column3", Option(-3.5))
it should behave like column(rs, 4, Option(10.8f))
it should behave like column(rs, "column4", Option(10.8))
it should behave like column(rs, 5, Option(30.0f))
it should behave like column(rs, "column5", Option(30.0))
it should behave like column(rs, 6, Option(1.0e20f))
it should behave like column(rs, "column6", Option(1.0e20))
assertTypeError("""
it should behave like column(rs, 1, 1.2f)
""")
assertTypeError("""
it should behave like column(rs, "column1", 1.2f)
""")
import AutoUnwrapOption._
it should behave like column(rs, 1, 1.2)
it should behave like column(rs, "column1", 1.2)
it should behave like column(rs, 2, 1.2)
it should behave like column(rs, "column2", 1.2)
it should behave like column(rs, 3, -3.5)
it should behave like column(rs, "column3", -3.5)
it should behave like column(rs, 4, 10.8)
it should behave like column(rs, "column4", 10.8)
it should behave like column(rs, 5, 30.0)
it should behave like column(rs, "column5", 30.0)
it should behave like column(rs, 6, 1.0e20)
it should behave like column(rs, "column6", 1.0e20)
}
it("should not be able to get a Double value from an invalid rep.") {
val rs = mock[ResultSet]
(rs.getObject(_: Int)).expects(1).repeat(2).returning("")
(rs.getObject(_: String)).expects("column1").repeat(2).returning("")
(rs.getObject(_: Int)).expects(2).repeat(2).returning("foo")
(rs.getObject(_: String)).expects("column2").repeat(2).returning("foo")
val N = 2
for (i <- 1 to N) {
it should behave like column[Option[Double]](rs, i, None)
it should behave like column[Option[Double]](rs, "column"+i, None)
}
import AutoUnwrapOption._
for (i <- 1 to N) {
it should behave like throwingFromColumn[Double](rs, i)
it should behave like throwingFromColumn[Double](rs, "column"+i)
}
}
it("should not be able to get a Double from null") {
val rs = mock[ResultSet]
(rs.getObject(_: Int)).expects(0).repeat(2).returning(null)
(rs.getObject(_: String)).expects("null").repeat(2).returning(null)
it should behave like column[Option[Double]](rs, 0, None)
it should behave like column[Option[Double]](rs, "null", None)
import AutoUnwrapOption._
it should behave like throwingFromColumn[Double](rs, 0)
it should behave like throwingFromColumn[Double](rs, "null")
}
}
describe("TypeBinder[java.net.URL]") {
import java.net.URL
it("should be able to get a URL value") {
val rs = mock[ResultSet]
(rs.getURL(_: Int)).expects(1).repeat(2).returning(
new URL("http://github.com/tarao/")
)
(rs.getURL(_: String)).expects("column1").repeat(2).returning(
new URL("http://github.com/tarao/")
)
it should behave like
column(rs, 1, Option(new URL("http://github.com/tarao/")))
it should behave like
column(rs, "column1", Option(new URL("http://github.com/tarao/")))
assertTypeError("""
it should behave like
column(rs, 1, new URL("http://github.com/tarao/"))
""")
assertTypeError("""
it should behave like
column(rs, "column1", new URL("http://github.com/tarao/"))
""")
import AutoUnwrapOption._
it should behave like
column(rs, 1, new URL("http://github.com/tarao/"))
it should behave like
column(rs, "column1", new URL("http://github.com/tarao/"))
}
it("should not be able to get a URL from null") {
val rs = mock[ResultSet]
(rs.getURL(_: Int)).expects(0).repeat(2).returning(null)
(rs.getURL(_: String)).expects("null").repeat(2).returning(null)
it should behave like column[Option[URL]](rs, 0, None)
it should behave like column[Option[URL]](rs, "null", None)
import AutoUnwrapOption._
it should behave like throwingFromColumn[URL](rs, 0)
it should behave like throwingFromColumn[URL](rs, "null")
}
}
describe("TypeBinder[java.sql.Date]") {
import java.sql.Date
it("should be able to get a Date value") {
val today = new Date(System.currentTimeMillis)
val rs = mock[ResultSet]
(rs.getDate(_: Int)).expects(1).repeat(2).returning(today)
(rs.getDate(_: String)).expects("column1").repeat(2).returning(today)
it should behave like column(rs, 1, Option(today))
it should behave like column(rs, "column1", Option(today))
assertTypeError("""
it should behave like column(rs, 1, today)
""")
assertTypeError("""
it should behave like column(rs, "column1", today)
""")
import AutoUnwrapOption._
it should behave like column(rs, 1, today)
it should behave like column(rs, "column1", today)
}
it("should not be able to get a Date from null") {
val rs = mock[ResultSet]
(rs.getDate(_: Int)).expects(0).repeat(2).returning(null)
(rs.getDate(_: String)).expects("null").repeat(2).returning(null)
it should behave like column[Option[Date]](rs, 0, None)
it should behave like column[Option[Date]](rs, "null", None)
import AutoUnwrapOption._
it should behave like throwingFromColumn[Date](rs, 0)
it should behave like throwingFromColumn[Date](rs, "null")
}
}
describe("TypeBinder[java.sql.Time]") {
import java.sql.Time
it("should be able to get a Time value") {
val now = new Time(System.currentTimeMillis)
val rs = mock[ResultSet]
(rs.getTime(_: Int)).expects(1).repeat(2).returning(now)
(rs.getTime(_: String)).expects("column1").repeat(2).returning(now)
it should behave like column(rs, 1, Option(now))
it should behave like column(rs, "column1", Option(now))
assertTypeError("""
it should behave like column(rs, 1, now)
""")
assertTypeError("""
it should behave like column(rs, "column1", now)
""")
import AutoUnwrapOption._
it should behave like column(rs, 1, now)
it should behave like column(rs, "column1", now)
}
it("should not be able to get a Time from null") {
val rs = mock[ResultSet]
(rs.getTime(_: Int)).expects(0).repeat(2).returning(null)
(rs.getTime(_: String)).expects("null").repeat(2).returning(null)
it should behave like column[Option[Time]](rs, 0, None)
it should behave like column[Option[Time]](rs, "null", None)
import AutoUnwrapOption._
it should behave like throwingFromColumn[Time](rs, 0)
it should behave like throwingFromColumn[Time](rs, "null")
}
}
describe("TypeBinder[java.sql.Timestamp]") {
import java.sql.Timestamp
it("should be able to get a Timestamp value") {
val now = new Timestamp(System.currentTimeMillis)
val rs = mock[ResultSet]
(rs.getTimestamp(_: Int)).expects(1).repeat(2).returning(now)
(rs.getTimestamp(_: String)).expects("column1").repeat(2).returning(now)
it should behave like column(rs, 1, Option(now))
it should behave like column(rs, "column1", Option(now))
assertTypeError("""
it should behave like column(rs, 1, now)
""")
assertTypeError("""
it should behave like column(rs, "column1", now)
""")
import AutoUnwrapOption._
it should behave like column(rs, 1, now)
it should behave like column(rs, "column1", now)
}
it("should not be able to get a Timestamp from null") {
val rs = mock[ResultSet]
(rs.getTimestamp(_: Int)).expects(0).repeat(2).returning(null)
(rs.getTimestamp(_: String)).expects("null").repeat(2).returning(null)
it should behave like column[Option[Timestamp]](rs, 0, None)
it should behave like column[Option[Timestamp]](rs, "null", None)
import AutoUnwrapOption._
it should behave like throwingFromColumn[Timestamp](rs, 0)
it should behave like throwingFromColumn[Timestamp](rs, "null")
}
}
describe("TypeBinder[Array[Byte]]") {
it("should be able to get an Array[Byte] value") {
val rs = mock[ResultSet]
(rs.getBytes(_: Int)).expects(1).repeat(2).returning(
Array[Byte](1, 2, 3)
)
(rs.getBytes(_: String)).expects("column1").repeat(2).returning(
Array[Byte](1, 2, 3)
)
(rs.getBytes(_: Int)).expects(2).repeat(2).returning(
Array[Byte]()
)
(rs.getBytes(_: String)).expects("column2").repeat(2).returning(
Array[Byte]()
)
implicitly[TypeBinder[Option[Array[Byte]]]].apply(rs, 1).get should
be (Array[Byte](1, 2, 3))
implicitly[TypeBinder[Option[Array[Byte]]]].apply(rs, "column1").get should
be (Array[Byte](1, 2, 3))
implicitly[TypeBinder[Option[Array[Byte]]]].apply(rs, 2).get should
be (Array[Byte]())
implicitly[TypeBinder[Option[Array[Byte]]]].apply(rs, "column2").get should
be (Array[Byte]())
assertTypeError("""
it should behave like column(rs, 1, Array[Byte](1, 2, 3))
""")
assertTypeError("""
it should behave like column(rs, "column1", Array[Byte](1, 2, 3))
""")
import AutoUnwrapOption._
it should behave like column(rs, 1, Array[Byte](1, 2, 3))
it should behave like column(rs, "column1", Array[Byte](1, 2, 3))
it should behave like column(rs, 2, Array[Byte]())
it should behave like column(rs, "column2", Array[Byte]())
}
it("should not be able to get an Array[Byte] from null") {
val rs = mock[ResultSet]
(rs.getBytes(_: Int)).expects(0).repeat(2).returning(null)
(rs.getBytes(_: String)).expects("null").repeat(2).returning(null)
it should behave like column[Option[Array[Byte]]](rs, 0, None)
it should behave like column[Option[Array[Byte]]](rs, "null", None)
import AutoUnwrapOption._
it should behave like throwingFromColumn[Array[Byte]](rs, 0)
it should behave like throwingFromColumn[Array[Byte]](rs, "null")
}
}
class Reader2Seq(val r: Reader) {
def toSeq = {
val reader = new BufferedReader(r)
Iterator.continually{reader.read}.takeWhile(_ >= 0).toSeq
}
}
implicit def reader2Seq(r: Reader) = new Reader2Seq(r)
class InputStream2Seq(val is: InputStream) {
def toSeq = {
val reader = new BufferedReader(new InputStreamReader(is))
Iterator.continually{reader.read}.takeWhile(_ >= 0).toSeq
}
}
implicit def inputStream2Seq(is: InputStream) =
new Reader2Seq(new InputStreamReader(is))
describe("TypeBinder[java.io.InputStream]") {
it("should be able to get an InputStream value") {
val rs = mock[ResultSet]
(rs.getBinaryStream(_: Int)).expects(1).returning(
new ByteArrayInputStream(Array[Byte](1, 2, 3))
)
(rs.getBinaryStream(_: String)).expects("column1").returning(
new ByteArrayInputStream(Array[Byte](1, 2, 3))
)
(rs.getBinaryStream(_: Int)).expects(2).returning(
new ByteArrayInputStream(Array[Byte](4, 5, 6))
)
(rs.getBinaryStream(_: String)).expects("column2").returning(
new ByteArrayInputStream(Array[Byte](4, 5, 6))
)
implicitly[TypeBinder[Option[InputStream]]].apply(rs, 1)
.get.toSeq should be (Seq(1, 2, 3))
implicitly[TypeBinder[Option[InputStream]]].apply(rs, "column1")
.get.toSeq should be (Seq(1, 2, 3))
assertTypeError("""
implicitly[TypeBinder[InputStream]].apply(rs, 2)
.toSeq should be (Seq(4, 5, 6))
""")
assertTypeError("""
implicitly[TypeBinder[InputStream]].apply(rs, "column2")
.toSeq should be (Seq(4, 5, 6))
""")
import AutoUnwrapOption._
implicitly[TypeBinder[InputStream]].apply(rs, 2)
.toSeq should be (Seq(4, 5, 6))
implicitly[TypeBinder[InputStream]].apply(rs, "column2")
.toSeq should be (Seq(4, 5, 6))
}
it("should not be be able to get an InputStream from null") {
val rs = mock[ResultSet]
(rs.getBinaryStream(_: Int)).expects(0).repeat(2).returning(null)
(rs.getBinaryStream(_: String)).expects("null").repeat(2).returning(null)
it should behave like column[Option[InputStream]](rs, 0, None)
it should behave like column[Option[InputStream]](rs, "null", None)
import AutoUnwrapOption._
it should behave like throwingFromColumn[InputStream](rs, 0)
it should behave like throwingFromColumn[InputStream](rs, "null")
}
}
describe("TypeBinder[java.io.Reader]") {
it("should be able to get an InputStream value") {
val rs = mock[ResultSet]
(rs.getCharacterStream(_: Int)).expects(1).returning(
new InputStreamReader(new ByteArrayInputStream("foo bar".getBytes))
)
(rs.getCharacterStream(_: String)).expects("column1").returning(
new InputStreamReader(new ByteArrayInputStream("foo bar".getBytes))
)
(rs.getCharacterStream(_: Int)).expects(2).returning(
new InputStreamReader(new ByteArrayInputStream("foo bar".getBytes))
)
(rs.getCharacterStream(_: String)).expects("column2").returning(
new InputStreamReader(new ByteArrayInputStream("foo bar".getBytes))
)
implicitly[TypeBinder[Option[Reader]]].apply(rs, 1)
.get.toSeq.map(_.asInstanceOf[Char]).mkString should be ("foo bar")
equal (Seq(1, 2, 3))
implicitly[TypeBinder[Option[Reader]]].apply(rs, "column1")
.get.toSeq.map(_.asInstanceOf[Char]).mkString should be ("foo bar")
assertTypeError("""
implicitly[TypeBinder[Reader]].apply(rs, 2)
.toSeq.map(_.asInstanceOf[Char]).mkString should be ("foo bar")
""")
assertTypeError("""
implicitly[TypeBinder[Reader]].apply(rs, "column2")
.toSeq.map(_.asInstanceOf[Char]).mkString should be ("foo bar")
""")
import AutoUnwrapOption._
implicitly[TypeBinder[Reader]].apply(rs, 2)
.toSeq.map(_.asInstanceOf[Char]).mkString should be ("foo bar")
implicitly[TypeBinder[Reader]].apply(rs, "column2")
.toSeq.map(_.asInstanceOf[Char]).mkString should be ("foo bar")
}
it("should not be be able to get an Reader from null") {
val rs = mock[ResultSet]
(rs.getCharacterStream(_: Int)).expects(0).repeat(2).returning(null)
(rs.getCharacterStream(_: String)).expects("null").repeat(2).returning(null)
it should behave like column[Option[Reader]](rs, 0, None)
it should behave like column[Option[Reader]](rs, "null", None)
import AutoUnwrapOption._
it should behave like throwingFromColumn[Reader](rs, 0)
it should behave like throwingFromColumn[Reader](rs, "null")
}
}
}
class AutoUnwrapOptionSpec extends UnitSpec
with TraitSingletonBehavior {
describe("object AutoUnwrapOption") {
it ("shuold inherit the trait") {
it should behave like exportingTheTraitMethods
[AutoUnwrapOption](AutoUnwrapOption)
}
}
}
| tarao/slick-jdbc-extension-scala | src/test/scala/com/github/tarao/slickjdbc/getresult/TypeBinderSpec.scala | Scala | mit | 46,732 |
/*
* Copyright (C) 2009-2018 Lightbend Inc. <https://www.lightbend.com>
*/
package play.filters.csp
import akka.util.ByteString
import javax.inject.Inject
import play.api.libs.streams.Accumulator
import play.api.mvc.request.RequestAttrKey
import play.api.mvc.{ EssentialAction, RequestHeader, Result }
/**
* A result processor that applies a CSPResult to a play request pipeline -- either an ActionBuilder or a Filter.
*/
trait CSPResultProcessor {
def apply(next: EssentialAction, request: RequestHeader): Accumulator[ByteString, Result]
}
object CSPResultProcessor {
def apply(processor: CSPProcessor): CSPResultProcessor = new DefaultCSPResultProcessor(processor)
}
/**
* This trait is used by CSPActionBuilder and CSPFilter to apply the CSPResult to a
* Play HTTP result as headers.
*
* Appends as `play.api.http.HeaderNames.CONTENT_SECURITY_POLICY` or
* `play.api.http.HeaderNames.CONTENT_SECURITY_POLICY_REPORT_ONLY`,
* depending on config.reportOnly.
*
* If `cspResult.nonceHeader` is defined then
* `play.api.http.HeaderNames.X_CONTENT_SECURITY_POLICY_NONCE_HEADER``
* is set as an additional header.
*/
class DefaultCSPResultProcessor @Inject() (cspProcessor: CSPProcessor)
extends CSPResultProcessor {
def apply(next: EssentialAction, request: RequestHeader): Accumulator[ByteString, Result] = {
cspProcessor
.process(request)
.map { cspResult =>
val maybeNonceRequest = cspResult.nonce
.map { nonce =>
request.addAttr(RequestAttrKey.CSPNonce, nonce)
}
.getOrElse(request)
next(maybeNonceRequest).map { result =>
result.withHeaders(generateHeaders(cspResult): _*)
}(play.core.Execution.trampoline)
}
.getOrElse {
next(request)
}
}
protected def generateHeaders(cspResult: CSPResult): Seq[(String, String)] = {
import play.api.http.HeaderNames._
val headerName = if (cspResult.reportOnly) {
CONTENT_SECURITY_POLICY_REPORT_ONLY
} else {
CONTENT_SECURITY_POLICY
}
var cspHeader = collection.immutable.Seq(headerName -> cspResult.directives)
cspResult.nonce match {
case Some(nonce) if cspResult.nonceHeader =>
cspHeader :+ (X_CONTENT_SECURITY_POLICY_NONCE_HEADER -> nonce)
case _ =>
cspHeader
}
}
}
| Shenker93/playframework | framework/src/play-filters-helpers/src/main/scala/play/filters/csp/CSPResultProcessor.scala | Scala | apache-2.0 | 2,331 |
/*
* Copyright (C) Lightbend Inc. <https://www.lightbend.com>
*/
package play.api.libs.ws.ahc.cache
import java.time.ZonedDateTime
import play.shaded.ahc.org.asynchttpclient._
import com.typesafe.play.cachecontrol.ResponseServeAction
import org.slf4j.Logger
import org.slf4j.LoggerFactory
import play.shaded.ahc.io.netty.handler.codec.http.HttpHeaders
import scala.concurrent.Await
import scala.concurrent.duration.Duration
/**
* An async handler that accumulates response data to place in cache with the given key.
*/
class AsyncCachingHandler[T](
request: Request,
handler: AsyncCompletionHandler[T],
cache: AhcHttpCache,
maybeAction: Option[ResponseServeAction],
ahcConfig: AsyncHttpClientConfig
) extends AsyncHandler[T]
with TimeoutResponse
with Debug {
private val DATE = "Date"
import com.typesafe.play.cachecontrol.HttpDate
import AsyncCachingHandler._
protected val builder = new CacheableResponseBuilder(ahcConfig)
protected val requestTime: ZonedDateTime = HttpDate.now
protected val key: EffectiveURIKey = EffectiveURIKey(request)
protected val timeout: Duration = scala.concurrent.duration.Duration(1, "second")
protected lazy val timeoutResponse: CacheableResponse = generateTimeoutResponse(request, ahcConfig)
/**
* Invoked if something wrong happened inside the previous methods or when an I/O exception occurs.
*/
override def onThrowable(t: Throwable): Unit = {
import com.typesafe.play.cachecontrol.ResponseServeActions._
maybeAction match {
case Some(ValidateOrTimeout(reason)) =>
logger.debug(s"onCompleted: returning timeout because $reason", t)
// If no-cache or must-revalidate exist, then a
// successful validation has to happen -- i.e. both stale AND fresh
// cached responses may not be returned on disconnect.
// https://tools.ietf.org/html/rfc7234#section-5.2.2.1
// https://tools.ietf.org/html/rfc7234#section-5.2.2.2
processTimeoutResponse()
case other =>
// If not, then sending a cached response on a disconnect is acceptable
// as long as 110 and 112 warnings are sent along with it.
// https://tools.ietf.org/html/rfc7234#section-4.2.4
logger.debug(s"onCompleted: action = $other", t)
processDisconnectedResponse()
}
}
/**
* Called when the status line has been processed.
*/
override def onStatusReceived(responseStatus: HttpResponseStatus): AsyncHandler.State = {
builder.accumulate(responseStatus)
handler.onStatusReceived(responseStatus)
}
/**
* Called when all response’s headers has been processed.
*/
override def onHeadersReceived(responseHeaders: HttpHeaders): AsyncHandler.State = {
if (!responseHeaders.contains(DATE)) {
/*
A recipient with a clock that receives a response message without a
Date header field MUST record the time it was received and append a
corresponding Date header field to the message's header section if it
is cached or forwarded downstream.
https://tools.ietf.org/html/rfc7231#section-7.1.1.2
*/
val currentDate = HttpDate.format(HttpDate.now)
responseHeaders.add(DATE, currentDate)
}
builder.accumulate(responseHeaders)
handler.onHeadersReceived(responseHeaders)
}
/**
* Body parts has been received. This method can be invoked many time depending of the response’s bytes body.
*/
override def onBodyPartReceived(bodyPart: HttpResponseBodyPart): AsyncHandler.State = {
builder.accumulate(bodyPart)
handler.onBodyPartReceived(bodyPart)
}
/**
* onCompleted: Invoked when the full response has been read, or if the processing get aborted (more on this below).
*/
override def onCompleted(): T = {
import com.typesafe.play.cachecontrol.ResponseServeActions._
if (logger.isTraceEnabled) {
logger.trace(s"onCompleted: this = $this")
}
val response = builder.build
if (logger.isDebugEnabled) {
logger.debug(s"onCompleted: response = ${debug(response)}")
}
// We got a response. First, invalidate if unsafe according to
// https://tools.ietf.org/html/rfc7234#section-4.4
cache.invalidateIfUnsafe(request, response)
// "Handling a Validation Response"
// https://tools.ietf.org/html/rfc7234#section-4.3.3
if (cache.isNotModified(response)) {
processNotModifiedResponse(response)
} else if (cache.isError(response)) {
//o However, if a cache receives a 5xx (Server Error) response while
//attempting to validate a response, it can either forward this
//response to the requesting client, or act as if the server failed
//to respond. In the latter case, the cache MAY send a previously
//stored response (see Section 4.2.4).
maybeAction match {
case Some(Validate(reason, staleIfError)) if staleIfError =>
processStaleResponse(response)
case other =>
processFullResponse(response)
}
} else {
processFullResponse(response)
}
}
protected def processTimeoutResponse(): Unit = {
handler.onCompleted(timeoutResponse)
}
protected def processDisconnectedResponse(): T = {
logger.debug(s"processDisconnectedResponse:")
val result = Await.result(cache.get(key), timeout)
val finalResponse = result match {
case Some(entry) =>
val currentAge = cache.calculateCurrentAge(request, entry, requestTime)
val freshnessLifetime = cache.calculateFreshnessLifetime(request, entry)
val isFresh = freshnessLifetime.isGreaterThan(currentAge)
cache.addRevalidationFailed {
cache.addDisconnectHeader {
cache.generateCachedResponse(request, entry, currentAge, isFresh = isFresh)
}
}
case None =>
// Nothing in cache. Return the timeout.
timeoutResponse
}
handler.onCompleted(finalResponse)
}
protected def processStaleResponse(response: CacheableResponse): T = {
logger.debug(s"processCachedResponse: response = ${debug(response)}")
val result = Await.result(cache.get(key), timeout)
val finalResponse = result match {
case Some(entry) =>
val currentAge = cache.calculateCurrentAge(request, entry, requestTime)
cache.addRevalidationFailed {
cache.generateCachedResponse(request, entry, currentAge, isFresh = false)
}
case None =>
// Nothing in cache. Return the error.
response
}
handler.onCompleted(finalResponse)
}
protected def processFullResponse(fullResponse: CacheableResponse): T = {
logger.debug(s"processFullResponse: fullResponse = ${debug(fullResponse)}")
import com.typesafe.play.cachecontrol.ResponseCachingActions._
cache.cachingAction(request, fullResponse) match {
case DoNotCacheResponse(reason) =>
logger.debug(s"onCompleted: DO NOT CACHE, because $reason")
case DoCacheResponse(reason) =>
logger.debug(s"isCacheable: DO CACHE, because $reason")
cache.cacheResponse(request, fullResponse)
}
handler.onCompleted(fullResponse)
}
protected def processNotModifiedResponse(notModifiedResponse: CacheableResponse): T = {
logger.trace(s"processNotModifiedResponse: notModifiedResponse = $notModifiedResponse")
val result = Await.result(cache.get(key), timeout)
logger.debug(s"processNotModifiedResponse: result = $result")
// FIXME XXX Find the response which matches the secondary keys...
val fullResponse = result match {
case Some(entry) =>
val newHeaders = notModifiedResponse.getHeaders
val freshResponse = cache.freshenResponse(newHeaders, entry.response)
cache.cacheResponse(request, freshResponse)
freshResponse
case None =>
notModifiedResponse
}
handler.onCompleted(fullResponse)
}
override def toString = {
s"CacheAsyncHandler(key = $key, requestTime = $requestTime, builder = $builder, asyncHandler = ${debug(handler)}})"
}
}
object AsyncCachingHandler {
private val logger: Logger = LoggerFactory.getLogger("play.api.libs.ws.ahc.cache.AsyncCachingHandler")
}
| playframework/play-ws | play-ahc-ws-standalone/src/main/scala/play/api/libs/ws/ahc/cache/AsyncCachingHandler.scala | Scala | apache-2.0 | 8,245 |
package com.twitter.finagle.stream
import org.jboss.netty.channel._
import org.jboss.netty.handler.codec.http._
import com.twitter.concurrent.Offer
import com.twitter.util.{Try, Return, Throw}
import com.twitter.finagle.netty3.Conversions._
import com.twitter.finagle.channel.BrokerChannelHandler
/**
* Convert a StreamResponse into a chunked (streaming) HTTP response.
*/
class HttpChunker extends BrokerChannelHandler {
// this process is always entered with downstream events already proxied.
private[this] def write(
ctx: ChannelHandlerContext,
res: StreamResponse,
ack: Option[Offer[Try[Unit]]] = None)
{
def close() {
res.release()
if (ctx.getChannel.isOpen) ctx.getChannel.close()
upstreamEvent foreach {
case Message(_, _) => /* drop */
case e => e.sendUpstream()
}
}
Offer.select(
ack match {
// if we're awaiting an ack, don't offer to synchronize
// on messages. thus we exert backpressure.
case Some(ack) =>
ack {
case Return(_) =>
write(ctx, res, None)
case Throw(_) =>
close()
}
case None =>
res.messages { bytes =>
val chunk = new DefaultHttpChunk(bytes)
val writeFuture = Channels.future(ctx.getChannel)
Channels.write(ctx, writeFuture, chunk)
write(ctx, res, Some(writeFuture.toTwitterFuture.toOffer))
}
},
res.error { _ =>
val future = Channels.future(ctx.getChannel)
val trailer =
new DefaultHttpChunkTrailer {
override def isLast(): Boolean = res.httpResponse.isChunked
}
Channels.write(ctx, future, trailer)
future {
// Close only after we sucesfully write the trailer.
// todo: can this be a source of resource leaks?
case _ => close()
}
},
upstreamEvent {
case Message(_, _) =>
// A pipelined request. We don't support this,
// and will just drop it.
write(ctx, res)
case e@(Closed(_, _) | Disconnected(_, _)) =>
e.sendUpstream()
close()
case e =>
e.sendUpstream()
write(ctx, res)
}
)
}
private[this] def awaitResponse(dead: Boolean) {
Offer.select(
downstreamEvent {
case e@WriteValue(res: StreamResponse, ctx) if !dead =>
val httpRes = res.httpResponse
val chunked = httpRes.getProtocolVersion == HttpVersion.HTTP_1_1 && httpRes.getHeader(HttpHeaders.Names.CONTENT_LENGTH) == null
httpRes.setChunked(chunked)
if (chunked)
HttpHeaders.setHeader(httpRes, HttpHeaders.Names.TRANSFER_ENCODING, HttpHeaders.Values.CHUNKED)
else
HttpHeaders.setHeader(httpRes, HttpHeaders.Names.CONNECTION, HttpHeaders.Values.CLOSE)
val writeComplete = Channels.future(ctx.getChannel)
Channels.write(ctx, writeComplete, httpRes)
writeComplete.proxyTo(e.e.getFuture)
proxyDownstream()
write(ctx, res)
case WriteValue(res: StreamResponse, _) if dead =>
res.release()
awaitResponse(dead)
case WriteValue(invalid, ctx) =>
Channels.fireExceptionCaught(ctx,
new IllegalArgumentException(
"Invalid reply \\"%s\\"".format(invalid)))
awaitResponse(dead)
case e@Close(_, _) =>
e.sendDownstream()
awaitResponse(true)
case e =>
e.sendDownstream()
awaitResponse(dead)
},
upstreamEvent { e =>
e.sendUpstream()
awaitResponse(dead)
}
)
}
awaitResponse(false)
}
| firebase/finagle | finagle-stream/src/main/scala/com/twitter/finagle/stream/HttpChunker.scala | Scala | apache-2.0 | 3,740 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kudu.spark.kudu
import java.math.BigDecimal
import java.net.InetAddress
import java.sql.Timestamp
import scala.collection.JavaConverters._
import scala.util.Try
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.sources._
import org.apache.spark.sql.types._
import org.apache.spark.sql.{DataFrame, Row, SQLContext, SaveMode}
import org.apache.yetus.audience.InterfaceStability
import org.apache.kudu.client.KuduPredicate.ComparisonOp
import org.apache.kudu.client._
import org.apache.kudu.spark.kudu.SparkUtil._
/**
* Data source for integration with Spark's [[DataFrame]] API.
*
* Serves as a factory for [[KuduRelation]] instances for Spark. Spark will
* automatically look for a [[RelationProvider]] implementation named
* `DefaultSource` when the user specifies the path of a source during DDL
* operations through [[org.apache.spark.sql.DataFrameReader.format]].
*/
@InterfaceStability.Unstable
class DefaultSource extends RelationProvider with CreatableRelationProvider
with SchemaRelationProvider {
val TABLE_KEY = "kudu.table"
val KUDU_MASTER = "kudu.master"
val OPERATION = "kudu.operation"
val FAULT_TOLERANT_SCANNER = "kudu.faultTolerantScan"
val SCAN_LOCALITY = "kudu.scanLocality"
val IGNORE_NULL = "kudu.ignoreNull"
val IGNORE_DUPLICATE_ROW_ERRORS = "kudu.ignoreDuplicateRowErrors"
def defaultMasterAddrs: String = InetAddress.getLocalHost.getCanonicalHostName
/**
* Construct a BaseRelation using the provided context and parameters.
*
* @param sqlContext SparkSQL context
* @param parameters parameters given to us from SparkSQL
* @return a BaseRelation Object
*/
override def createRelation(sqlContext: SQLContext,
parameters: Map[String, String]):
BaseRelation = {
val tableName = parameters.getOrElse(TABLE_KEY,
throw new IllegalArgumentException(
s"Kudu table name must be specified in create options using key '$TABLE_KEY'"))
val kuduMaster = parameters.getOrElse(KUDU_MASTER, defaultMasterAddrs)
val operationType = getOperationType(parameters.getOrElse(OPERATION, "upsert"))
val faultTolerantScanner = Try(parameters.getOrElse(FAULT_TOLERANT_SCANNER, "false").toBoolean)
.getOrElse(false)
val scanLocality = getScanLocalityType(parameters.getOrElse(SCAN_LOCALITY, "closest_replica"))
val ignoreDuplicateRowErrors = Try(parameters(IGNORE_DUPLICATE_ROW_ERRORS).toBoolean).getOrElse(false) ||
Try(parameters(OPERATION) == "insert-ignore").getOrElse(false)
val ignoreNull = Try(parameters.getOrElse(IGNORE_NULL, "false").toBoolean).getOrElse(false)
val writeOptions = new KuduWriteOptions(ignoreDuplicateRowErrors, ignoreNull)
new KuduRelation(tableName, kuduMaster, faultTolerantScanner,
scanLocality, operationType, None, writeOptions)(sqlContext)
}
/**
* Creates a relation and inserts data to specified table.
*
* @param sqlContext
* @param mode Only Append mode is supported. It will upsert or insert data
* to an existing table, depending on the upsert parameter
* @param parameters Necessary parameters for kudu.table, kudu.master, etc..
* @param data Dataframe to save into kudu
* @return returns populated base relation
*/
override def createRelation(sqlContext: SQLContext, mode: SaveMode,
parameters: Map[String, String], data: DataFrame): BaseRelation = {
val kuduRelation = createRelation(sqlContext, parameters)
mode match {
case SaveMode.Append => kuduRelation.asInstanceOf[KuduRelation].insert(data, false)
case _ => throw new UnsupportedOperationException("Currently, only Append is supported")
}
kuduRelation
}
override def createRelation(sqlContext: SQLContext, parameters: Map[String, String],
schema: StructType): BaseRelation = {
val tableName = parameters.getOrElse(TABLE_KEY,
throw new IllegalArgumentException(s"Kudu table name must be specified in create options " +
s"using key '$TABLE_KEY'"))
val kuduMaster = parameters.getOrElse(KUDU_MASTER, defaultMasterAddrs)
val operationType = getOperationType(parameters.getOrElse(OPERATION, "upsert"))
val faultTolerantScanner = Try(parameters.getOrElse(FAULT_TOLERANT_SCANNER, "false").toBoolean)
.getOrElse(false)
val scanLocality = getScanLocalityType(parameters.getOrElse(SCAN_LOCALITY, "closest_replica"))
new KuduRelation(tableName, kuduMaster, faultTolerantScanner,
scanLocality, operationType, Some(schema))(sqlContext)
}
private def getOperationType(opParam: String): OperationType = {
opParam.toLowerCase match {
case "insert" => Insert
case "insert-ignore" => Insert
case "upsert" => Upsert
case "update" => Update
case "delete" => Delete
case _ => throw new IllegalArgumentException(s"Unsupported operation type '$opParam'")
}
}
private def getScanLocalityType(opParam: String): ReplicaSelection = {
opParam.toLowerCase match {
case "leader_only" => ReplicaSelection.LEADER_ONLY
case "closest_replica" => ReplicaSelection.CLOSEST_REPLICA
case _ => throw new IllegalArgumentException(s"Unsupported replica selection type '$opParam'")
}
}
}
/**
* Implementation of Spark BaseRelation.
*
* @param tableName Kudu table that we plan to read from
* @param masterAddrs Kudu master addresses
* @param faultTolerantScanner scanner type to be used. Fault tolerant if true,
* otherwise, use non fault tolerant one
* @param scanLocality If true scan locality is enabled, so that the scan will
* take place at the closest replica.
* @param operationType The default operation type to perform when writing to the relation
* @param userSchema A schema used to select columns for the relation
* @param writeOptions Kudu write options
* @param sqlContext SparkSQL context
*/
@InterfaceStability.Unstable
class KuduRelation(private val tableName: String,
private val masterAddrs: String,
private val faultTolerantScanner: Boolean,
private val scanLocality: ReplicaSelection,
private val operationType: OperationType,
private val userSchema: Option[StructType],
private val writeOptions: KuduWriteOptions = new KuduWriteOptions)(
val sqlContext: SQLContext)
extends BaseRelation
with PrunedFilteredScan
with InsertableRelation {
import KuduRelation._
private val context: KuduContext = new KuduContext(masterAddrs, sqlContext.sparkContext)
private val table: KuduTable = context.syncClient.openTable(tableName)
override def unhandledFilters(filters: Array[Filter]): Array[Filter] =
filters.filterNot(supportsFilter)
/**
* Generates a SparkSQL schema object so SparkSQL knows what is being
* provided by this BaseRelation.
*
* @return schema generated from the Kudu table's schema
*/
override def schema: StructType = {
sparkSchema(table.getSchema, userSchema.map(_.fieldNames))
}
/**
* Build the RDD to scan rows.
*
* @param requiredColumns columns that are being requested by the requesting query
* @param filters filters that are being applied by the requesting query
* @return RDD will all the results from Kudu
*/
override def buildScan(requiredColumns: Array[String], filters: Array[Filter]): RDD[Row] = {
val predicates = filters.flatMap(filterToPredicate)
new KuduRDD(context, 1024 * 1024 * 20, requiredColumns, predicates,
table, faultTolerantScanner, scanLocality, sqlContext.sparkContext)
}
/**
* Converts a Spark [[Filter]] to a Kudu [[KuduPredicate]].
*
* @param filter the filter to convert
* @return the converted filter
*/
private def filterToPredicate(filter : Filter) : Array[KuduPredicate] = {
filter match {
case EqualTo(column, value) =>
Array(comparisonPredicate(column, ComparisonOp.EQUAL, value))
case GreaterThan(column, value) =>
Array(comparisonPredicate(column, ComparisonOp.GREATER, value))
case GreaterThanOrEqual(column, value) =>
Array(comparisonPredicate(column, ComparisonOp.GREATER_EQUAL, value))
case LessThan(column, value) =>
Array(comparisonPredicate(column, ComparisonOp.LESS, value))
case LessThanOrEqual(column, value) =>
Array(comparisonPredicate(column, ComparisonOp.LESS_EQUAL, value))
case In(column, values) =>
Array(inListPredicate(column, values))
case StringStartsWith(column, prefix) =>
prefixInfimum(prefix) match {
case None => Array(comparisonPredicate(column, ComparisonOp.GREATER_EQUAL, prefix))
case Some(inf) =>
Array(comparisonPredicate(column, ComparisonOp.GREATER_EQUAL, prefix),
comparisonPredicate(column, ComparisonOp.LESS, inf))
}
case IsNull(column) => Array(isNullPredicate(column))
case IsNotNull(column) => Array(isNotNullPredicate(column))
case And(left, right) => filterToPredicate(left) ++ filterToPredicate(right)
case _ => Array()
}
}
/**
* Returns the smallest string s such that, if p is a prefix of t,
* then t < s, if one exists.
*
* @param p the prefix
* @return Some(the prefix infimum), or None if none exists.
*/
private def prefixInfimum(p: String): Option[String] = {
p.reverse.dropWhile(_ == Char.MaxValue).reverse match {
case "" => None
case q => Some(q.slice(0, q.length - 1) + (q(q.length - 1) + 1).toChar)
}
}
/**
* Creates a new comparison predicate for the column, comparison operator, and comparison value.
*
* @param column the column name
* @param operator the comparison operator
* @param value the comparison value
* @return the comparison predicate
*/
private def comparisonPredicate(column: String,
operator: ComparisonOp,
value: Any): KuduPredicate = {
val columnSchema = table.getSchema.getColumn(column)
value match {
case value: Boolean => KuduPredicate.newComparisonPredicate(columnSchema, operator, value)
case value: Byte => KuduPredicate.newComparisonPredicate(columnSchema, operator, value)
case value: Short => KuduPredicate.newComparisonPredicate(columnSchema, operator, value)
case value: Int => KuduPredicate.newComparisonPredicate(columnSchema, operator, value)
case value: Long => KuduPredicate.newComparisonPredicate(columnSchema, operator, value)
case value: Timestamp => KuduPredicate.newComparisonPredicate(columnSchema, operator, value)
case value: Float => KuduPredicate.newComparisonPredicate(columnSchema, operator, value)
case value: Double => KuduPredicate.newComparisonPredicate(columnSchema, operator, value)
case value: String => KuduPredicate.newComparisonPredicate(columnSchema, operator, value)
case value: Array[Byte] => KuduPredicate.newComparisonPredicate(columnSchema, operator, value)
case value: BigDecimal => KuduPredicate.newComparisonPredicate(columnSchema, operator, value)
}
}
/**
* Creates a new in list predicate for the column and values.
*
* @param column the column name
* @param values the values
* @return the in list predicate
*/
private def inListPredicate(column: String, values: Array[Any]): KuduPredicate = {
KuduPredicate.newInListPredicate(table.getSchema.getColumn(column), values.toList.asJava)
}
/**
* Creates a new `IS NULL` predicate for the column.
*
* @param column the column name
* @return the `IS NULL` predicate
*/
private def isNullPredicate(column: String): KuduPredicate = {
KuduPredicate.newIsNullPredicate(table.getSchema.getColumn(column))
}
/**
* Creates a new `IS NULL` predicate for the column.
*
* @param column the column name
* @return the `IS NULL` predicate
*/
private def isNotNullPredicate(column: String): KuduPredicate = {
KuduPredicate.newIsNotNullPredicate(table.getSchema.getColumn(column))
}
/**
* Writes data into an existing Kudu table.
*
* If the `kudu.operation` parameter is set, the data will use that operation
* type. If the parameter is unset, the data will be upserted.
*
* @param data [[DataFrame]] to be inserted into Kudu
* @param overwrite must be false; otherwise, throws [[UnsupportedOperationException]]
*/
override def insert(data: DataFrame, overwrite: Boolean): Unit = {
if (overwrite) {
throw new UnsupportedOperationException("overwrite is not yet supported")
}
context.writeRows(data, tableName, operationType, writeOptions)
}
}
private[spark] object KuduRelation {
/**
* Returns `true` if the filter is able to be pushed down to Kudu.
*
* @param filter the filter to test
*/
private def supportsFilter(filter: Filter): Boolean = filter match {
case EqualTo(_, _)
| GreaterThan(_, _)
| GreaterThanOrEqual(_, _)
| LessThan(_, _)
| LessThanOrEqual(_, _)
| In(_, _)
| StringStartsWith(_, _)
| IsNull(_)
| IsNotNull(_) => true
case And(left, right) => supportsFilter(left) && supportsFilter(right)
case _ => false
}
}
| andrwng/kudu | java/kudu-spark/src/main/scala/org/apache/kudu/spark/kudu/DefaultSource.scala | Scala | apache-2.0 | 14,314 |
package org.openurp.edu.eams.teach.election.service
import org.beangle.commons.dao.query.builder.Condition
import org.beangle.commons.event.Event
import org.beangle.commons.text.i18n.Message
import org.beangle.security.blueprint.User
import org.openurp.edu.eams.base.CourseUnit
import org.openurp.base.Semester
import org.openurp.code.person.Gender
import org.openurp.edu.base.Adminclass
import org.openurp.edu.base.Project
import org.openurp.edu.base.Student
import org.openurp.edu.base.Teacher
import org.openurp.edu.eams.system.msg.service.SystemMessageService
import org.openurp.edu.eams.teach.election.model.Enum.AssignStdType
import org.openurp.edu.eams.teach.election.service.context.CourseTakeStat
import org.openurp.edu.teach.lesson.CourseTake
import org.openurp.edu.teach.lesson.Lesson
import org.openurp.edu.eams.web.util.OutputObserver
trait CourseTakeService {
def publish(event: Event): Unit
def assignStds(lessons: Iterable[Lesson],
`type`: AssignStdType,
semester: Semester,
observer: OutputObserver): Unit
def assignStds(tasks: Iterable[Lesson], semester: Semester, observer: OutputObserver): Unit
def getCourseTakes(students: Iterable[Student], semester: Semester): Map[Student, List[CourseTake]]
def getCourseTakes(student: Student, semester: Semester, week: Int): List[CourseTake]
def getCourseTakes(student: Student, semesters: Semester*): List[CourseTake]
def getCourseTable(student: Student,
semester: Semester,
week: Int,
units: List[CourseUnit]): Array[Array[List[CourseTake]]]
def getCourseTable(courseTakes: List[CourseTake], units: List[CourseUnit]): Array[Array[List[CourseTake]]]
def election(student: Student,
existedCourseTakes: Iterable[CourseTake],
lessonCollection: Iterable[Lesson],
unCheckTimeConflict: Boolean): List[Message]
def election(students: Iterable[Student],
electedCourseTakes: Iterable[CourseTake],
lesson: Lesson,
unCheckTimeConflict: Boolean): List[Message]
def withdraw(courseTakes: List[CourseTake], sender: User): List[Message]
def filter(amount: Int, takes: List[CourseTake], params: Map[String, Any]): List[Message]
def getCourseTakesByTeacher(teacher: Teacher,
semester: Semester,
weekCondition: Condition,
project: Project): List[CourseTake]
def getCourseTakesByAdminclass(semester: Semester,
weekCondition: Condition,
project: Project,
adminclasses: Adminclass*): List[CourseTake]
def getCourseTakesByAdminclass(semester: Semester,
weekCondition: Condition,
project: Project,
adminclasses: Iterable[Adminclass]): List[CourseTake]
def getCourseTakesByAdminclassId(semester: Semester,
weekCondition: Condition,
project: Project,
adminclassIds: java.lang.Integer*): List[CourseTake]
def getSystemMessageService(): SystemMessageService
def stateGender(project: Project, semester: Semester, ids: java.lang.Long*): List[CourseTakeStat[String]]
def stateGender(project: Project, semester: List[Semester], ids: java.lang.Long*): List[CourseTakeStat[String]]
def stateGender(project: Project,
genders: List[Gender],
semesters: List[Semester],
ids: java.lang.Long*): List[CourseTakeStat[String]]
}
| openurp/edu-eams-webapp | election/src/main/scala/org/openurp/edu/eams/teach/election/service/CourseTakeService.scala | Scala | gpl-3.0 | 3,303 |
package no.uio.musit.models
import play.api.libs.json._
case class PagedResult[A](totalMatches: Int, matches: Seq[A])
object PagedResult {
implicit def PageResultWrites[T](implicit fmt: Writes[T]): Writes[PagedResult[T]] =
new Writes[PagedResult[T]] {
def writes(pr: PagedResult[T]) = Json.obj(
"totalMatches" -> pr.totalMatches,
"matches" -> Json.toJson[Seq[T]](pr.matches)
)
}
}
| MUSIT-Norway/musit | musit-models/src/main/scala/no/uio/musit/models/PagedResult.scala | Scala | gpl-2.0 | 427 |
package net.nomadicalien.twitter.service
import net.nomadicalien.twitter.models.{ApplicationError, Tweet}
import net.nomadicalien.twitter.repository.{ClassPathEmojiRepository, EmojiRepository}
import scala.annotation.tailrec
trait TweetService {
def findEmojisCount(tweet: Tweet): Either[ApplicationError, Map[String, Int]]
}
private[service] trait TweetServiceInterpreter extends TweetService {
import cats.implicits._
def emojiRepository: EmojiRepository
def findEmojisCount(tweet: Tweet): Either[ApplicationError, Map[String, Int]] = {
val tweetText = tweet.text
for {
textEmojis <- emojiRepository.getTextEmojis
} yield {
textEmojis.foldLeft(Map.empty[String, Int]) {
case (emojiMap, emoji) =>
val shortName = emoji.short_name
val emojiCount = emoji.text.map(findEmojiCount(tweetText, _)).getOrElse(0)
if(emojiCount > 0)
emojiMap.updated(shortName, emojiCount)
else
emojiMap
}
}
}
def findEmojiCount(text: String, emoji: String): Int = {
val emojiLength = emoji.length
val textLength = text.length
@tailrec def recFindEmojiCount(startIndex: Int, count: Int): Int = {
if(startIndex >= textLength) {
count
} else {
val foundIndex = text.indexOf(emoji, startIndex)
if (foundIndex == -1) {
count
} else {
recFindEmojiCount(foundIndex + emojiLength, count + 1)
}
}
}
recFindEmojiCount(0, 0)
}
}
object TweetService extends TweetServiceInterpreter {
lazy val emojiRepository: EmojiRepository = ClassPathEmojiRepository
}
| BusyByte/twitter-statistics | src/main/scala/services.scala | Scala | apache-2.0 | 1,650 |
/*
Copyright 2012 Twitter, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package com.twitter.scalding
import com.twitter.algebird.monad.Reader
import com.twitter.chill.config.{ ScalaAnyRefMapConfig, ConfiguredInstantiator }
import cascading.pipe.assembly.AggregateBy
import cascading.flow.{ Flow, FlowDef, FlowProps, FlowListener, FlowStepListener, FlowSkipStrategy, FlowStepStrategy }
import cascading.pipe.Pipe
import cascading.property.AppProps
import cascading.tuple.collect.SpillableProps
import cascading.stats.CascadingStats
import com.twitter.scalding.reducer_estimation.EstimatorConfig
import org.apache.hadoop.io.serializer.{ Serialization => HSerialization }
import org.apache.hadoop.mapred.JobConf
import org.slf4j.LoggerFactory
//For java -> scala implicits on collections
import scala.collection.JavaConversions._
import scala.concurrent.{ Future, Promise }
import scala.util.Try
import java.io.{ BufferedWriter, File, FileOutputStream, OutputStreamWriter }
import java.util.{ Calendar, UUID }
import java.util.concurrent.{ Executors, TimeUnit, ThreadFactory, Callable, TimeoutException }
import java.util.concurrent.atomic.AtomicInteger
object Job {
/**
* Use reflection to create the job by name. We use the thread's
* context classloader so that classes in the submitted jar and any
* jars included via -libjar can be found.
*/
def apply(jobName: String, args: Args): Job = {
Class.forName(jobName, true, Thread.currentThread().getContextClassLoader)
.getConstructor(classOf[Args])
.newInstance(args)
.asInstanceOf[Job]
}
}
/**
* Job is a convenience class to make using Scalding easier.
* Subclasses of Job automatically have a number of nice implicits to enable more concise
* syntax, including:
* conversion from Pipe, Source or Iterable to RichPipe
* conversion from Source or Iterable to Pipe
* conversion to collections or Tuple[1-22] to cascading.tuple.Fields
*
* Additionally, the job provides an implicit Mode and FlowDef so that functions that
* register starts or ends of a flow graph, specifically anything that reads or writes data
* on Hadoop, has the needed implicits available.
*
* If you want to write code outside of a Job, you will want to either:
*
* make all methods that may read or write data accept implicit FlowDef and Mode parameters.
*
* OR:
*
* write code that rather than returning values, it returns a (FlowDef, Mode) => T,
* these functions can be combined Monadically using algebird.monad.Reader.
*/
class Job(val args: Args) extends FieldConversions with java.io.Serializable {
// Set specific Mode
implicit def mode: Mode = Mode.getMode(args).getOrElse(sys.error("No Mode defined"))
/**
* Use this if a map or reduce phase takes a while before emitting tuples.
*/
def keepAlive {
val flowProcess = RuntimeStats.getFlowProcessForUniqueId(uniqueId)
flowProcess.keepAlive
}
/**
* you should never call this directly, it is here to make
* the DSL work. Just know, you can treat a Pipe as a RichPipe
* within a Job
*/
implicit def pipeToRichPipe(pipe: Pipe): RichPipe = new RichPipe(pipe)
/**
* This implicit is to enable RichPipe methods directly on Source
* objects, such as map/flatMap, etc...
*
* Note that Mappable is a subclass of Source, and Mappable already
* has mapTo and flatMapTo BUT WITHOUT incoming fields used (see
* the Mappable trait). This creates some confusion when using these methods
* (this is an unfortunate mistake in our design that was not noticed until later).
* To remove ambiguity, explicitly call .read on any Source that you begin
* operating with a mapTo/flatMapTo.
*/
implicit def sourceToRichPipe(src: Source): RichPipe = new RichPipe(src.read)
// This converts an Iterable into a Pipe or RichPipe with index (int-based) fields
implicit def toPipe[T](iter: Iterable[T])(implicit set: TupleSetter[T], conv: TupleConverter[T]): Pipe =
IterableSource[T](iter)(set, conv).read
implicit def iterableToRichPipe[T](iter: Iterable[T])(implicit set: TupleSetter[T], conv: TupleConverter[T]): RichPipe =
RichPipe(toPipe(iter)(set, conv))
// Override this if you want to change how the mapred.job.name is written in Hadoop
def name: String = getClass.getName
//This is the FlowDef used by all Sources this job creates
@transient
implicit protected val flowDef = {
val fd = new FlowDef
fd.setName(name)
fd
}
// Do this before the job is submitted, because the flowDef is transient
private[this] val uniqueId = UniqueID.getIDFor(flowDef)
/**
* Copy this job
* By default, this uses reflection and the single argument Args constructor
*/
def clone(nextargs: Args): Job =
this.getClass
.getConstructor(classOf[Args])
.newInstance(Mode.putMode(mode, nextargs))
.asInstanceOf[Job]
/**
* Implement this method if you want some other jobs to run after the current
* job. These will not execute until the current job has run successfully.
*/
def next: Option[Job] = None
/**
* Keep 100k tuples in memory by default before spilling
* Turn this up as high as you can without getting OOM.
*
* This is ignored if there is a value set in the incoming jobConf on Hadoop
*/
def defaultSpillThreshold: Int = 100 * 1000
/** Override this to control how dates are parsed */
implicit def dateParser: DateParser = DateParser.default
// Generated the MD5 hex of the the bytes in the job classfile
def classIdentifier: String = Config.md5Identifier(getClass)
/**
* This is the exact config that is passed to the Cascading FlowConnector.
* By default:
* if there are no spill thresholds in mode.config, we replace with defaultSpillThreshold
* we overwrite io.serializations with ioSerializations
* we overwrite cascading.tuple.element.comparator.default to defaultComparator
* we add some scalding keys for debugging/logging
*
* Tip: override this method, call super, and ++ your additional
* map to add or overwrite more options
*
* This returns Map[AnyRef, AnyRef] for compatibility with older code
*/
def config: Map[AnyRef, AnyRef] = {
val base = Config.empty
.setListSpillThreshold(defaultSpillThreshold)
.setMapSpillThreshold(defaultSpillThreshold)
.setMapSideAggregationThreshold(defaultSpillThreshold)
// This is setting a property for cascading/driven
System.setProperty(AppProps.APP_FRAMEWORKS,
String.format("scalding:%s", scaldingVersion))
val modeConf = mode match {
case h: HadoopMode => Config.fromHadoop(h.jobConf)
case _ => Config.empty
}
val init = base ++ modeConf
defaultComparator.map(init.setDefaultComparator)
.getOrElse(init)
.setSerialization(Right(classOf[serialization.KryoHadoop]), ioSerializations)
.setScaldingVersion
.setCascadingAppName(name)
.setCascadingAppId(name)
.setScaldingFlowClass(getClass)
.setArgs(args)
.maybeSetSubmittedTimestamp()._2
.toMap.toMap // the second one is to lift from String -> AnyRef
}
/**
* This is here so that Mappable.toIterator can find an implicit config
*/
implicit protected def scaldingConfig: Config = Config.tryFrom(config).get
def skipStrategy: Option[FlowSkipStrategy] = None
/**
* Specify a callback to run before the start of each flow step.
*
* Defaults to what Config.getReducerEstimator specifies.
* @see ExecutionContext.buildFlow
*/
def stepStrategy: Option[FlowStepStrategy[_]] = None
private def executionContext: Try[ExecutionContext] =
Config.tryFrom(config).map { conf =>
ExecutionContext.newContext(conf)(flowDef, mode)
}
/**
* combine the config, flowDef and the Mode to produce a flow
*/
def buildFlow: Flow[_] =
executionContext
.flatMap(_.buildFlow)
.map { flow =>
listeners.foreach { flow.addListener(_) }
stepListeners.foreach { flow.addStepListener(_) }
skipStrategy.foreach { flow.setFlowSkipStrategy(_) }
stepStrategy.foreach { flow.setFlowStepStrategy(_) }
flow
}
.get
// called before run
// only override if you do not use flowDef
def validate {
FlowStateMap.validateSources(flowDef, mode)
}
// called after successfull run
// only override if you do not use flowDef
def clear {
FlowStateMap.clear(flowDef)
}
protected def handleStats(statsData: CascadingStats) {
scaldingCascadingStats = Some(statsData)
// TODO: Why the two ways to do stats? Answer: jank-den.
if (args.boolean("scalding.flowstats")) {
val statsFilename = args.getOrElse("scalding.flowstats", name + "._flowstats.json")
val br = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(statsFilename), "utf-8"))
br.write(JobStats(statsData).toJson)
br.close
}
// Print custom counters unless --scalding.nocounters is used or there are no custom stats
if (!args.boolean("scalding.nocounters")) {
implicit val statProvider = statsData
val jobStats = Stats.getAllCustomCounters
if (!jobStats.isEmpty) {
println("Dumping custom counters:")
jobStats.foreach {
case (counter, value) =>
println("%s\\t%s".format(counter, value))
}
}
}
}
// TODO design a better way to test stats.
// This awful name is designed to avoid collision
// with subclasses
@transient
private[scalding] var scaldingCascadingStats: Option[CascadingStats] = None
/**
* Save the Flow object after a run to allow clients to inspect the job.
* @see HadoopPlatformJobTest
*/
@transient
private[scalding] var completedFlow: Option[Flow[_]] = None
//Override this if you need to do some extra processing other than complete the flow
def run: Boolean = {
val flow = buildFlow
flow.complete
val statsData = flow.getFlowStats
handleStats(statsData)
completedFlow = Some(flow)
statsData.isSuccessful
}
//override these to add any listeners you need
def listeners: List[FlowListener] = Nil
def stepListeners: List[FlowStepListener] = Nil
/**
* These are user-defined serializations IN-ADDITION to (but deduped)
* with the required serializations
*/
def ioSerializations: List[Class[_ <: HSerialization[_]]] = Nil
/**
* Override this if you want to customize comparisons/hashing for your job
* the config method overwrites using this before sending to cascading
* The one we use by default is needed used to make Joins in the
* Fields-API more robust to Long vs Int differences.
* If you only use the Typed-API, consider changing this to return None
*/
def defaultComparator: Option[Class[_ <: java.util.Comparator[_]]] =
Some(classOf[IntegralComparator])
/**
* This is implicit so that a Source can be used as the argument
* to a join or other method that accepts Pipe.
*/
implicit def read(src: Source): Pipe = src.read
/**
* This is only here for Java jobs which cannot automatically
* access the implicit Pipe => RichPipe which makes: pipe.write( )
* convenient
*/
def write(pipe: Pipe, src: Source) { src.writeFrom(pipe) }
/*
* Need to be lazy to be used within pipes.
*/
private lazy val timeoutExecutor =
Executors.newSingleThreadExecutor(new NamedPoolThreadFactory("job-timer", true))
/*
* Safely execute some operation within a deadline.
*
* TODO: once we have a mechanism to access FlowProcess from user functions, we can use this
* function to allow long running jobs by notifying Cascading of progress.
*/
def timeout[T](timeout: AbsoluteDuration)(t: => T): Option[T] = {
val f = timeoutExecutor.submit(new Callable[Option[T]] {
def call(): Option[T] = Some(t)
});
try {
f.get(timeout.toMillisecs, TimeUnit.MILLISECONDS)
} catch {
case _: TimeoutException =>
f.cancel(true)
None
}
}
}
/*
* NamedPoolThreadFactory is copied from util.core to avoid dependency.
*/
class NamedPoolThreadFactory(name: String, makeDaemons: Boolean) extends ThreadFactory {
def this(name: String) = this(name, false)
val group = new ThreadGroup(Thread.currentThread().getThreadGroup(), name)
val threadNumber = new AtomicInteger(1)
def newThread(r: Runnable) = {
val thread = new Thread(group, r, name + "-" + threadNumber.getAndIncrement())
thread.setDaemon(makeDaemons)
if (thread.getPriority != Thread.NORM_PRIORITY) {
thread.setPriority(Thread.NORM_PRIORITY)
}
thread
}
}
/**
* Sets up an implicit dateRange to use in your sources and an implicit
* timezone.
* Example args: --date 2011-10-02 2011-10-04 --tz UTC
* If no timezone is given, Pacific is assumed.
*/
trait DefaultDateRangeJob extends Job {
//Get date implicits and PACIFIC and UTC vals.
import DateOps._
// Optionally take --tz argument, or use Pacific time. Derived classes may
// override defaultTimeZone to change the default.
def defaultTimeZone = PACIFIC
implicit lazy val tz = args.optional("tz") match {
case Some(tzn) => java.util.TimeZone.getTimeZone(tzn)
case None => defaultTimeZone
}
// Optionally take a --period, which determines how many days each job runs over (rather
// than over the whole date range)
// --daily and --weekly are aliases for --period 1 and --period 7 respectively
val period =
if (args.boolean("daily"))
1
else if (args.boolean("weekly"))
7
else
args.getOrElse("period", "0").toInt
lazy val (startDate, endDate) = {
val DateRange(s, e) = DateRange.parse(args.list("date"))
(s, e)
}
implicit lazy val dateRange = DateRange(startDate, if (period > 0) startDate + Days(period) - Millisecs(1) else endDate)
override def next: Option[Job] =
if (period > 0) {
val nextStartDate = startDate + Days(period)
if (nextStartDate + Days(period - 1) > endDate)
None // we're done
else // return a new job with the new startDate
Some(clone(args + ("date" -> List(nextStartDate.toString("yyyy-MM-dd"), endDate.toString("yyyy-MM-dd")))))
} else
None
}
// DefaultDateRangeJob with default time zone as UTC instead of Pacific.
trait UtcDateRangeJob extends DefaultDateRangeJob {
override def defaultTimeZone = DateOps.UTC
}
/**
* This is a simple job that allows you to launch Execution[T]
* instances using scalding.Tool and scald.rb. You cannot print
* the graph.
*/
abstract class ExecutionJob[+T](args: Args) extends Job(args) {
import scala.concurrent.{ Await, ExecutionContext => scEC }
/**
* To avoid serialization issues, this should not be a val, but a def,
* and prefer to keep as much as possible inside the method.
*/
def execution: Execution[T]
/*
* Override this to control the execution context used
* to execute futures
*/
protected def concurrentExecutionContext: scEC = scEC.global
@transient private[this] val resultPromise: Promise[T] = Promise[T]()
def result: Future[T] = resultPromise.future
override def buildFlow: Flow[_] =
sys.error("ExecutionJobs do not have a single accessible flow. " +
"You cannot print the graph as it may be dynamically built or recurrent")
final override def run = {
val r = Config.tryFrom(config)
.map { conf =>
Await.result(execution.run(conf, mode)(concurrentExecutionContext),
scala.concurrent.duration.Duration.Inf)
}
if (!resultPromise.tryComplete(r)) {
// The test framework can call this more than once.
println("Warning: run called more than once, should not happen in production")
}
// Force an exception if the run failed
r.get
true
}
}
/*
* this allows you to use ExecutionContext style, but wrap it in a job
* val ecFn = { (implicit ec: ExecutionContext) =>
* // do stuff here
* };
* class MyClass(args: Args) extends ExecutionContextJob(args) {
* def job = ecFn
* }
* Now you can run it with Tool as a standard Job-framework style.
* Only use this if you have an existing ExecutionContext style function
* you want to run as a Job
*/
@deprecated("Use ExecutionJob", "2014-07-29")
abstract class ExecutionContextJob[+T](args: Args) extends Job(args) {
/**
* This can be assigned from a Function1:
* def job = (ectxJob: (ExecutionContext => T))
*/
def job: Reader[ExecutionContext, T]
/**
* This is the result of calling the job on the context for this job
* you should NOT call this in the job Reader (or reference this class at all
* in reader
*/
@transient final lazy val result: Try[T] = ec.map(job(_)) // mutate the flowDef with the job
private[this] final def ec: Try[ExecutionContext] =
Config.tryFrom(config).map { conf => ExecutionContext.newContext(conf)(flowDef, mode) }
override def buildFlow: Flow[_] = {
val forcedResult = result.get // make sure we have applied job once
super.buildFlow
}
}
/*
* Run a list of shell commands through bash in the given order. Return success
* when all commands succeed. Excution stops after the first failure. The
* failing command is printed to stdout.
*/
class ScriptJob(cmds: Iterable[String]) extends Job(Args("")) {
override def run = {
try {
cmds.dropWhile {
cmd: String =>
{
new java.lang.ProcessBuilder("bash", "-c", cmd).start().waitFor() match {
case x if x != 0 =>
println(cmd + " failed, exitStatus: " + x)
false
case 0 => true
}
}
}.isEmpty
} catch {
case e: Exception => {
e.printStackTrace
false
}
}
}
}
| zirpins/scalding | scalding-core/src/main/scala/com/twitter/scalding/Job.scala | Scala | apache-2.0 | 18,263 |
package summingbird
import sbt._
import Keys._
import sbtassembly.Plugin._
import AssemblyKeys._
import com.typesafe.tools.mima.plugin.MimaPlugin.mimaDefaultSettings
import com.typesafe.tools.mima.plugin.MimaKeys.previousArtifact
import scalariform.formatter.preferences._
import com.typesafe.sbt.SbtScalariform._
object SummingbirdBuild extends Build {
def withCross(dep: ModuleID) =
dep cross CrossVersion.binaryMapped {
case "2.9.3" => "2.9.2" // TODO: hack because twitter hasn't built things against 2.9.3
case version if version startsWith "2.10" => "2.10" // TODO: hack because sbt is broken
case x => x
}
def specs2Import(scalaVersion: String) = scalaVersion match {
case version if version startsWith "2.9" => "org.specs2" %% "specs2" % "1.12.4.1" % "test"
case version if version startsWith "2.10" => "org.specs2" %% "specs2" % "1.13" % "test"
}
def isScala210x(scalaVersion: String) = scalaVersion match {
case version if version startsWith "2.9" => false
case version if version startsWith "2.10" => true
}
def real210Version(dep: ModuleID) = dep cross CrossVersion.binaryMapped {
case version if version startsWith "2.10" => "2.10.3"
case x => x
}
val extraSettings = Project.defaultSettings ++ mimaDefaultSettings ++ scalariformSettings
val sharedSettings = extraSettings ++ Seq(
organization := "com.twitter",
version := "0.5.1",
scalaVersion := "2.9.3",
crossScalaVersions := Seq("2.9.3", "2.10.4"),
// To support hadoop 1.x
javacOptions ++= Seq("-source", "1.6", "-target", "1.6"),
javacOptions in doc ~= { (options: Seq[String]) =>
val targetPos = options.indexOf("-target")
if(targetPos > -1) {
options.take(targetPos) ++ options.drop(targetPos + 2)
} else options
},
libraryDependencies ++= Seq(
"junit" % "junit" % "4.11" % "test",
"org.slf4j" % "slf4j-api" % slf4jVersion force(),
"org.scalacheck" %% "scalacheck" % "1.10.0" % "test",
// These satisify's scaldings log4j needs when in test mode
"log4j" % "log4j" % "1.2.16" % "test",
"org.slf4j" % "slf4j-log4j12" % slf4jVersion % "test"
),
libraryDependencies <+= scalaVersion(specs2Import(_)),
libraryDependencies += "com.novocode" % "junit-interface" % "0.10" % "test",
resolvers ++= Seq(
Opts.resolver.sonatypeSnapshots,
Opts.resolver.sonatypeReleases,
"Clojars Repository" at "http://clojars.org/repo",
"Conjars Repository" at "http://conjars.org/repo",
"Twitter Maven" at "http://maven.twttr.com"
),
parallelExecution in Test := true,
scalacOptions ++= Seq(
"-unchecked",
"-deprecation",
"-Xlint",
"-Yresolve-term-conflict:package"
),
// Publishing options:
publishMavenStyle := true,
publishArtifact in Test := false,
pomIncludeRepository := { x => false },
publishTo <<= version { v =>
Some(
if (v.trim.toUpperCase.endsWith("SNAPSHOT"))
Opts.resolver.sonatypeSnapshots
else
Opts.resolver.sonatypeStaging
//"twttr" at "http://artifactory.local.twitter.com/libs-releases-local"
)
},
pomExtra := (
<url>https://github.com/twitter/summingbird</url>
<licenses>
<license>
<name>Apache 2</name>
<url>http://www.apache.org/licenses/LICENSE-2.0.txt</url>
<distribution>repo</distribution>
<comments>A business-friendly OSS license</comments>
</license>
</licenses>
<scm>
<url>git@github.com:twitter/summingbird.git</url>
<connection>scm:git:git@github.com:twitter/summingbird.git</connection>
</scm>
<developers>
<developer>
<id>oscar</id>
<name>Oscar Boykin</name>
<url>http://twitter.com/posco</url>
</developer>
<developer>
<id>sritchie</id>
<name>Sam Ritchie</name>
<url>http://twitter.com/sritchie</url>
</developer>
<developer>
<id>asinghal</id>
<name>Ashutosh Singhal</name>
<url>http://twitter.com/daashu</url>
</developer>
</developers>)
)
lazy val formattingPreferences = {
import scalariform.formatter.preferences._
FormattingPreferences().
setPreference(AlignParameters, false).
setPreference(PreserveSpaceBeforeArguments, true)
}
lazy val summingbird = Project(
id = "summingbird",
base = file("."),
settings = sharedSettings ++ DocGen.publishSettings
).settings(
test := { },
publish := { }, // skip publishing for this root project.
publishLocal := { }
).aggregate(
summingbirdCore,
summingbirdCoreJava,
summingbirdBatch,
summingbirdBatchHadoop,
summingbirdOnline,
summingbirdClient,
summingbirdStorm,
summingbirdStormTest,
summingbirdStormJava,
summingbirdScalding,
summingbirdScaldingTest,
summingbirdSpark,
summingbirdBuilder,
summingbirdChill,
summingbirdExample
)
val dfsDatastoresVersion = "1.3.4"
val bijectionVersion = "0.6.3"
val algebirdVersion = "0.7.0"
val scaldingVersion = "0.11.3rc1"
val storehausVersion = "0.9.1"
val utilVersion = "6.3.8"
val chillVersion = "0.4.0"
val tormentaVersion = "0.7.0"
lazy val slf4jVersion = "1.6.6"
/**
* This returns the youngest jar we released that is compatible with
* the current.
*/
val unreleasedModules = Set[String]()
def youngestForwardCompatible(subProj: String) =
Some(subProj)
.filterNot(unreleasedModules.contains(_))
.map { s => "com.twitter" % ("summingbird-" + s + "_2.9.3") % "0.5.0" }
def module(name: String) = {
val id = "summingbird-%s".format(name)
Project(id = id, base = file(id), settings = sharedSettings ++ Seq(
Keys.name := id,
previousArtifact := youngestForwardCompatible(name))
)
}
lazy val summingbirdBatch = module("batch").settings(
libraryDependencies ++= Seq(
"com.twitter" %% "algebird-core" % algebirdVersion,
"com.twitter" %% "bijection-core" % bijectionVersion,
"com.twitter" %% "scalding-date" % scaldingVersion
)
)
lazy val summingbirdChill = module("chill").settings(
libraryDependencies ++= Seq(
"com.twitter" %% "chill" % chillVersion,
"com.twitter" %% "chill-bijection" % chillVersion
)
).dependsOn(
summingbirdCore,
summingbirdBatch
)
lazy val summingbirdClient = module("client").settings(
libraryDependencies ++= Seq(
"com.twitter" %% "algebird-core" % algebirdVersion,
"com.twitter" %% "algebird-util" % algebirdVersion,
"com.twitter" %% "bijection-core" % bijectionVersion,
"com.twitter" %% "storehaus-core" % storehausVersion,
"com.twitter" %% "storehaus-algebra" % storehausVersion
)
).dependsOn(summingbirdBatch)
lazy val summingbirdCore = module("core").settings(
libraryDependencies += "com.twitter" %% "algebird-core" % algebirdVersion
)
lazy val summingbirdCoreJava = module("core-java").dependsOn(
summingbirdCore % "test->test;compile->compile"
)
lazy val summingbirdOnline = module("online").settings(
libraryDependencies ++= Seq(
"com.twitter" %% "algebird-core" % algebirdVersion,
"com.twitter" %% "algebird-util" % algebirdVersion,
"com.twitter" %% "bijection-core" % bijectionVersion,
"com.twitter" %% "storehaus-core" % storehausVersion,
"com.twitter" %% "chill" % chillVersion,
"com.twitter" %% "storehaus-algebra" % storehausVersion,
withCross("com.twitter" %% "util-core" % utilVersion)
)
).dependsOn(
summingbirdCore % "test->test;compile->compile",
summingbirdBatch
)
lazy val summingbirdStorm = module("storm").settings(
parallelExecution in Test := false,
libraryDependencies ++= Seq(
"com.twitter" %% "algebird-core" % algebirdVersion,
"com.twitter" %% "bijection-core" % bijectionVersion,
"com.twitter" %% "chill" % chillVersion,
"com.twitter" % "chill-storm" % chillVersion,
"com.twitter" %% "chill-bijection" % chillVersion,
"com.twitter" %% "storehaus-core" % storehausVersion,
"com.twitter" %% "storehaus-algebra" % storehausVersion,
"com.twitter" %% "scalding-args" % scaldingVersion,
"com.twitter" %% "tormenta-core" % tormentaVersion,
withCross("com.twitter" %% "util-core" % utilVersion),
"storm" % "storm" % "0.9.0-wip15" % "provided"
)
).dependsOn(
summingbirdCore % "test->test;compile->compile",
summingbirdOnline,
summingbirdChill,
summingbirdBatch
)
lazy val summingbirdStormTest = module("storm-test").settings(
parallelExecution in Test := false,
libraryDependencies ++= Seq(
"com.twitter" %% "algebird-core" % algebirdVersion,
"com.twitter" %% "bijection-core" % bijectionVersion,
"com.twitter" %% "storehaus-core" % storehausVersion,
"com.twitter" %% "storehaus-algebra" % storehausVersion,
"com.twitter" %% "tormenta-core" % tormentaVersion,
withCross("com.twitter" %% "util-core" % utilVersion),
"storm" % "storm" % "0.9.0-wip15" % "provided"
)
).dependsOn(
summingbirdCore % "test->test;compile->compile",
summingbirdStorm
)
lazy val summingbirdStormJava = module("storm-java").settings(
libraryDependencies ++= Seq(
"storm" % "storm" % "0.9.0-wip15" % "provided"
)
).dependsOn(
summingbirdCore % "test->test;compile->compile",
summingbirdCoreJava % "test->test;compile->compile",
summingbirdStorm % "test->test;compile->compile"
)
val scaldingDeps = Seq(
"com.backtype" % "dfs-datastores" % dfsDatastoresVersion,
"com.backtype" % "dfs-datastores-cascading" % dfsDatastoresVersion,
"com.twitter" %% "algebird-core" % algebirdVersion,
"com.twitter" %% "algebird-util" % algebirdVersion,
"com.twitter" %% "algebird-bijection" % algebirdVersion,
"com.twitter" %% "bijection-json" % bijectionVersion,
"com.twitter" %% "chill" % chillVersion,
"com.twitter" % "chill-hadoop" % chillVersion,
"com.twitter" %% "chill-bijection" % chillVersion,
"commons-lang" % "commons-lang" % "2.6",
"com.twitter" %% "scalding-core" % scaldingVersion exclude("org.scalacheck", "scalacheck_2.10"),
"com.twitter" %% "scalding-commons" % scaldingVersion exclude("ch.qos.logback", "logback-core") exclude("ch.qos.logback", "logback-classic")
)
def scaldingDeps210Extras = Seq(
"com.twitter" %% "storehaus-cascading" % storehausVersion,
"com.twitter" %% "storehaus-cassandra" % storehausVersion exclude("ch.qos.logback", "logback-core") exclude("ch.qos.logback", "logback-classic"),
real210Version("com.chuusai" %% "shapeless" % "2.0.0")
)
def buildScaldingDeps(scalaVersion: String) = if (!isScala210x(scalaVersion)) { scaldingDeps } else { scaldingDeps ++ scaldingDeps210Extras }
def buildScaldingExcludeFilter(scalaVersion: String) = if (isScala210x(scalaVersion)) {
HiddenFileFilter
} else {
HiddenFileFilter || "storehaus" || "Storehaus*.scala"
}
lazy val summingbirdScalding = module("scalding").settings(
libraryDependencies ++= buildScaldingDeps(scalaVersion.value),
excludeFilter in unmanagedSources := buildScaldingExcludeFilter(scalaVersion.value)
).dependsOn(
summingbirdCore % "test->test;compile->compile",
summingbirdChill,
summingbirdBatchHadoop,
summingbirdBatch
)
lazy val summingbirdScaldingTest = module("scalding-test").settings(
dependencyOverrides += "org.scalacheck" %% "scalacheck" % "1.10.0",
libraryDependencies ++= Seq(
"org.scalacheck" %% "scalacheck" % "1.10.0"
)
).dependsOn(
summingbirdCore % "test->test;compile->compile",
summingbirdChill,
summingbirdBatchHadoop,
summingbirdScalding
)
lazy val summingbirdBatchHadoop = module("batch-hadoop").settings(
libraryDependencies ++= Seq(
"com.backtype" % "dfs-datastores" % dfsDatastoresVersion,
"com.twitter" %% "algebird-core" % algebirdVersion,
"com.twitter" %% "bijection-json" % bijectionVersion,
"com.twitter" %% "scalding-date" % scaldingVersion
)
).dependsOn(
summingbirdCore % "test->test;compile->compile",
summingbirdBatch
)
lazy val summingbirdBuilder = module("builder").settings(
libraryDependencies ++= Seq(
"storm" % "storm" % "0.9.0-wip15" % "provided"
)
).dependsOn(
summingbirdCore,
summingbirdStorm,
summingbirdScalding
)
lazy val summingbirdExample = module("example").settings(
libraryDependencies ++= Seq(
"log4j" % "log4j" % "1.2.16",
"org.slf4j" % "slf4j-log4j12" % slf4jVersion,
"storm" % "storm" % "0.9.0-wip15" exclude("org.slf4j", "log4j-over-slf4j") exclude("ch.qos.logback", "logback-classic"),
"com.twitter" %% "bijection-netty" % bijectionVersion,
"com.twitter" %% "tormenta-twitter" % tormentaVersion,
"com.twitter" %% "storehaus-memcache" % storehausVersion
)
).dependsOn(summingbirdCore, summingbirdCoreJava, summingbirdStorm, summingbirdStormJava)
lazy val sparkAssemblyMergeSettings = assemblySettings :+ {
mergeStrategy in assembly <<= (mergeStrategy in assembly) { (old) =>
{
//case PathList("org", "w3c", xs @ _*) => MergeStrategy.first
//case "about.html" => MergeStrategy.discard
case PathList("com", "esotericsoftware", "minlog", xs @ _*) => MergeStrategy.first
case PathList("org", "apache", "commons", "beanutils", xs @ _*) => MergeStrategy.first
case PathList("org", "apache", "commons", "collections", xs @ _*) => MergeStrategy.first
case PathList("org", "apache", "jasper", xs @ _*) => MergeStrategy.first
case "log4j.properties" => MergeStrategy.concat
case x if x.endsWith(".xsd") || x.endsWith(".dtd") => MergeStrategy.first
case x => old(x)
}
}
}
val sparkDeps = Seq(
"com.twitter" %% "algebird-core" % algebirdVersion,
"com.twitter" %% "algebird-util" % algebirdVersion,
"com.twitter" %% "algebird-bijection" % algebirdVersion,
"com.twitter" %% "bijection-json" % bijectionVersion,
"com.twitter" %% "chill" % chillVersion,
"com.twitter" % "chill-hadoop" % chillVersion,
"com.twitter" %% "chill-bijection" % chillVersion,
"commons-lang" % "commons-lang" % "2.6",
"commons-httpclient" % "commons-httpclient" % "3.1",
"org.apache.spark" %% "spark-core" % "0.9.0-incubating" % "provided"
)
def buildSparkDeps(scalaVersion: String) = if (isScala210x(scalaVersion)) sparkDeps else Seq()
lazy val summingbirdSpark = module("spark").settings(
resolvers += "Typesafe Repo" at "http://repo.typesafe.com/typesafe/releases/",
skip in compile := !isScala210x(scalaVersion.value),
skip in doc := !isScala210x(scalaVersion.value),
skip in test := !isScala210x(scalaVersion.value),
publishArtifact := isScala210x(scalaVersion.value),
libraryDependencies ++= buildSparkDeps(scalaVersion.value)
)
.settings(sparkAssemblyMergeSettings:_*)
.dependsOn(
summingbirdCore % "test->test;compile->compile",
summingbirdChill
)
}
| zirpins/summingbird | project/Build.scala | Scala | apache-2.0 | 15,240 |
/*
* Copyright 2016 University of Basel, Graphics and Vision Research Group
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package scalismo.color
import scalismo.geometry.{EuclideanVector, NDSpace}
import scala.annotation.tailrec
/** vector space operations for a pixel type, necessary for filtering */
trait ColorSpaceOperations[@specialized(Double, Float) A] {
/** blending using vector space operations */
def blend(v: A, w: A, l: Double): A = add(scale(v, l), scale(w, 1.0 - l))
/** add two pixels */
def add(pix1: A, pix2: A): A
/** scalar multiplication */
def scale(pix: A, l: Double): A
/** dot product */
def dot(pix1: A, pix2: A): Double
/** channel-wise multiplication */
def multiply(pix1: A, pix2: A): A
/** zero element */
def zero: A
/** check if zero */
def isZero(pix: A): Boolean = zero == pix
/** squared norm, derived from dot product */
def normSq(pix: A): Double = dot(pix, pix)
/** dimensionality of underlying vector space */
def dimensionality: Int
/** linear combination of vectors */
@tailrec
final def linearCombination(first: (A, Double), rest: (A, Double)*): A = {
val (v, f: Double) = first
if (rest.nonEmpty) {
val (v1, f1: Double) = rest.head
val combined = add(scale(v, f), scale(v1, f1))
linearCombination((combined, 1.0), rest.tail: _*) // Seq to varargs: _*
} else {
scale(v, f)
}
}
}
object ColorSpaceOperations {
/** implicit implementation of ColorSpaceOperations for plain Float */
implicit val floatColorSpace: ColorSpaceOperations[Float] = new ColorSpaceOperations[Float] {
override def add(pix1: Float, pix2: Float): Float = pix1 + pix2
override def multiply(pix1: Float, pix2: Float): Float = pix1 * pix2
override def dot(pix1: Float, pix2: Float): Double = pix1 * pix2
override def scale(pix: Float, l: Double): Float = (pix * l).toFloat
override val zero: Float = 0f
override val dimensionality = 1
}
/** implicit implementation of ColorSpaceOperations for plain Double */
implicit val doubleColorSpace: ColorSpaceOperations[Double] = new ColorSpaceOperations[Double] {
override def add(pix1: Double, pix2: Double): Double = pix1 + pix2
override def multiply(pix1: Double, pix2: Double): Double = pix1 * pix2
override def dot(pix1: Double, pix2: Double): Double = pix1 * pix2
override def scale(pix: Double, l: Double): Double = pix * l
override val zero: Double = 0.0
override val dimensionality = 1
}
/** implementation for vectors of arbitrary dimension */
implicit def vecColorSpaceND[D: NDSpace]: ColorSpaceOperations[EuclideanVector[D]] =
new ColorSpaceOperations[EuclideanVector[D]] {
override def add(pix1: EuclideanVector[D], pix2: EuclideanVector[D]): EuclideanVector[D] = pix1 + pix2
override def multiply(pix1: EuclideanVector[D], pix2: EuclideanVector[D]): EuclideanVector[D] = pix1 :* pix2
override def dot(pix1: EuclideanVector[D], pix2: EuclideanVector[D]): Double = pix1 dot pix2
override def scale(pix: EuclideanVector[D], l: Double): EuclideanVector[D] = pix * l
override val zero: EuclideanVector[D] = EuclideanVector.zeros[D]
override val dimensionality: Int = NDSpace[D].dimensionality
}
/** implementation for type A wrapped in Option[A] */
implicit def optionSpace[A](implicit ops: ColorSpaceOperations[A]): ColorSpaceOperations[Option[A]] =
new ColorSpaceOperations[Option[A]] {
override def add(pix1: Option[A], pix2: Option[A]): Option[A] = for (p1 <- pix1; p2 <- pix2) yield ops.add(p1, p2)
override def multiply(pix1: Option[A], pix2: Option[A]): Option[A] =
for (p1 <- pix1; p2 <- pix2) yield ops.multiply(p1, p2)
override def dot(pix1: Option[A], pix2: Option[A]): Double =
(for (p1 <- pix1; p2 <- pix2) yield ops.dot(p1, p2)).getOrElse(0.0)
override def scale(pix: Option[A], l: Double): Option[A] = for (p1 <- pix) yield ops.scale(p1, l)
override val zero: Option[A] = Some(ops.zero)
override val dimensionality: Int = ops.dimensionality
}
/** implicit conversions to work with infix operator notations for ColorSpaceOperations[A] */
object implicits {
import scala.language.implicitConversions
implicit def toVector[A](color: A)(implicit space: ColorSpaceOperations[A]): ColorSpaceVector[A] =
new ColorSpaceVector[A](color)
implicit def toColor[A](vector: ColorSpaceVector[A]): A = vector.color
class ColorSpaceVector[A](val color: A)(implicit space: ColorSpaceOperations[A]) {
val dimensionality: Int = space.dimensionality
def +(other: A): A = space.add(color, other)
def -(other: A): A = space.add(color, space.scale(other, -1.0))
def *(factor: Double): A = space.scale(color, factor)
def /(factor: Double): A = space.scale(color, 1.0 / factor)
def *:(factor: Double): A = space.scale(color, factor)
def dot(other: A): Double = space.dot(color, other)
def multiply(other: A): A = space.multiply(color, other)
def x(other: A): A = space.multiply(color, other)
def normSq: Double = space.normSq(color)
def unary_- : A = space.scale(color, -1.0)
def isZero: Boolean = space.isZero(color)
}
}
}
| unibas-gravis/scalismo | src/main/scala/scalismo/color/ColorSpaceOperations.scala | Scala | apache-2.0 | 5,790 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kafka.common
/**
* Indicates the client has requested a range no longer available on the server
*/
class OffsetOutOfRangeException(message: String) extends RuntimeException(message) {
def this() = this(null)
}
| unix1986/universe | tool/kafka-0.8.1.1-src/core/src/main/scala/kafka/common/OffsetOutOfRangeException.scala | Scala | bsd-2-clause | 1,028 |
package com.sksamuel.elastic4s.requests
import com.sksamuel.elastic4s.requests.mappings.{BasicField, CompletionField, DenseVectorField, GeoshapeField, JoinField, KeywordField, NestedField, ObjectField, RangeField, SearchAsYouTypeField, TextField}
import com.sksamuel.elastic4s.requests.script.{Script, ScriptField}
trait TypesApi {
def keywordField(name: String): KeywordField = KeywordField(name)
def textField(name: String): TextField = TextField(name)
def searchAsYouType(name: String): SearchAsYouTypeField = SearchAsYouTypeField(name)
def byteField(name: String): BasicField = BasicField(name, "byte")
def doubleField(name: String): BasicField = BasicField(name, "double")
def floatField(name: String): BasicField = BasicField(name, "float")
def halfFloatField(name: String): BasicField = BasicField(name, "half_float")
def intField(name: String): BasicField = BasicField(name, "integer")
def longField(name: String): BasicField = BasicField(name, "long")
def scaledFloatField(name: String): BasicField = BasicField(name, "scaled_float")
def shortField(name: String): BasicField = BasicField(name, "short")
def booleanField(name: String): BasicField = BasicField(name, "boolean")
def binaryField(name: String): BasicField = BasicField(name, "binary")
def dateField(name: String): BasicField = BasicField(name, "date")
def geopointField(name: String): BasicField = BasicField(name, "geo_point")
def geoshapeField(name: String): GeoshapeField = GeoshapeField(name)
// range
def integerRangeField(name: String): RangeField = RangeField(name, "integer_range")
def floatRangeField(name: String): RangeField = RangeField(name, "float_range")
def longRangeField(name: String): RangeField = RangeField(name, "long_range")
def doubleRangeField(name: String): RangeField = RangeField(name, "double_range")
def dateRangeField(name: String): RangeField = RangeField(name, "date_range")
def ipRangeField(name: String): RangeField = RangeField(name, "ip_range")
def flattenedField(name: String): BasicField = BasicField(name, "flattened")
// complex datatypes
def nestedField(name: String): NestedField = NestedField(name)
def objectField(name: String): ObjectField = ObjectField(name)
// specialized
def completionField(name: String): CompletionField = CompletionField(name)
def ipField(name: String): BasicField = BasicField(name, "ip")
def tokenCountField(name: String): BasicField = BasicField(name, "token_count")
def percolatorField(name: String): BasicField = BasicField(name, "percolator")
def joinField(name: String): JoinField = JoinField(name)
def denseVectorField(name: String, dims: Int): DenseVectorField = DenseVectorField(name, dims)
def scriptField(name: String, script: String): ScriptField = ScriptField(name, script)
def scriptField(name: String, script: Script): ScriptField = ScriptField(name, script)
@deprecated("use scriptField(name script)")
def scriptField(name: String): ExpectsScript = ExpectsScript(name)
case class ExpectsScript(name: String) {
@deprecated("use scriptField(name script)")
def script(script: String): ScriptField = ScriptField(name, script)
@deprecated("use scriptField(name script)")
def script(script: Script): ScriptField = ScriptField(name, script)
}
}
| stringbean/elastic4s | elastic4s-core/src/main/scala/com/sksamuel/elastic4s/requests/TypesApi.scala | Scala | apache-2.0 | 3,441 |
/**
* Copyright (C) 2014 Stratio (http://stratio.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.stratio.ingestion.provider
object Provider extends ZkProviderComponent {
val providerConfigName = "provider"
val repositoryField = "repository"
val pathField = "path"
val overwriteField = "overwrite"
val defaultRepositoryType = "zookeeper"
val defaultPath = "/tmp/workflow"
val defaultOverwrite = "false"
def downloadFiles(workflowId: String) =
repositoryType match {
case "zookeeper" => zkProvider.downloadFiles(workflowId, path, overwrite.toBoolean)
case repo => println(s"Repository $repo not supported")
}
def uploadFiles(workflowId: String) =
println("Upload files is not supported yet")
private def repositoryType =
getFromProviderConfig(repositoryField, defaultRepositoryType)
private def path =
getFromProviderConfig(pathField, defaultPath)
private def overwrite =
getFromProviderConfig(overwriteField, defaultOverwrite)
private def getFromProviderConfig(field: String, default: String): String =
config.getConfig(providerConfigName).flatMap(_.getString(field))
.getOrElse(default)
}
| Stratio/Ingestion | tools/src/main/scala/com/stratio/ingestion/provider/Provider.scala | Scala | apache-2.0 | 1,708 |
object Test {
erased def foo: Foo = new Foo
foo.x() // error
foo.y // error
foo.z // error
}
class Foo {
def x(): String = "abc"
def y: String = "abc"
val z: String = "abc"
} | som-snytt/dotty | tests/neg-custom-args/erased/erased-6.scala | Scala | apache-2.0 | 189 |
/*
* Copyright 2015 the original author or authors.
* @https://github.com/scouter-project/scouter
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package scouter.server.tagcnt.next;
import java.io.File
import java.io.FileNotFoundException
import java.io.IOException
import java.io.RandomAccessFile
import scouter.io.DataInputX
import scouter.io.DataOutputX
import scouter.util.IClose;
import scouter.util.FileUtil
class ITEM(_pos24h: Array[Long], _key: Array[Byte], _link: Long, _next: Long, _count: Float) {
val pos24h = _pos24h
val key = _key
val link = _link
val next = _next
val count = _count
}
class KeyFile(path: String) extends IClose {
val file = new File(path + ".kfile")
var raf = new RandomAccessFile(file, "rw");
if (this.raf.length() == 0) {
this.raf.write(Array[Byte](0xCA.toByte, 0xFE.toByte))
}
def getRecord(pos: Long): ITEM = {
this.synchronized {
this.raf.seek(pos);
val in = new DataInputX(this.raf);
val buf = in.read(5 + 4 + 24 * 5);
val in2 = new DataInputX(buf);
val link = in2.readLong5();
val count = in2.readFloat();
val pos24h = new Array[Long](24)
for (i <- 0 to 23) {
pos24h(i) = in2.readLong5();
}
val key = in.readShortBytes();
val next = this.raf.getFilePointer();
return new ITEM(pos24h, key, link, next, count);
}
}
def getHashLink(pos: Long): Long = {
this.synchronized {
this.raf.seek(pos);
return new DataInputX(this.raf).readLong5();
}
}
def getKey(pos: Long): Array[Byte] = {
this.synchronized {
this.raf.seek(pos + 5 + 4 + 24 * 5);
val in = new DataInputX(this.raf);
return in.readShortBytes();
}
}
def getVPos(pos: Long): Array[Long] = {
this.synchronized {
this.raf.seek(pos + 5 + 4);
val bytes = new DataInputX(this.raf).read(24 * 5);
//
val in = new DataInputX(bytes);
val value = new Array[Long](24)
for (i <- 0 to 23) {
value(i) = in.readLong5();
}
return value;
}
}
def getTotalCount(pos: Long): Float = {
this.synchronized {
this.raf.seek(pos + 5);
val in = new DataInputX(this.raf);
return in.readFloat();
}
}
def setHashLink(pos: Long, link: Long) {
this.synchronized {
this.raf.seek(pos);
new DataOutputX(this.raf).writeLong5(link);
}
}
def write(pos: Long, next: Long, key: Array[Byte], cntSum: Float, vpos: Array[Long]) {
this.synchronized {
val out = new DataOutputX();
out.writeLong5(next);
out.writeFloat(cntSum);
for (i <- 0 to 23) {
out.writeLong5(vpos(i));
}
out.writeShortBytes(key);
this.raf.seek(pos);
this.raf.write(out.toByteArray());
}
}
def update(pos: Long, hour: Int, vpos: Long) {
this.synchronized {
this.raf.seek(pos + 5 + 4 + hour * 5);
this.raf.write(DataOutputX.toBytes5(vpos));
}
}
def addTotalCount(pos: Long, totalCount: Float): Float = {
this.synchronized {
this.raf.seek(pos + 5);
val old = new DataInputX(this.raf).readFloat();
this.raf.seek(pos + 5);
this.raf.write(DataOutputX.toBytes(old + totalCount));
return old + totalCount;
}
}
def append(next: Long, key: Array[Byte], count: Float, vpos: Array[Long]): Long = {
val pos = this.raf.length();
write(pos, next, key, count, vpos);
return pos;
}
def close() {
this.synchronized {
if (this.raf != null) {
FileUtil.close(this.raf);
this.raf = null
}
}
}
def getFirstPos() = 2L
def getLength() = if (raf == null) 0 else raf.length()
}
| scouter-project/scouter | scouter.server/src/main/scala/scouter/server/tagcnt/next/KeyFile.scala | Scala | apache-2.0 | 4,684 |
package monocle.function
import monocle.MonocleSuite
class LastOptionExample extends MonocleSuite {
test("lastOption creates a Traversal from a List, Stream or Vector to its optional last element") {
(List(1, 2, 3) applyOptional lastOption getOption) shouldEqual Some(3)
(Vector(1, 2, 3) applyOptional lastOption getOption) shouldEqual Some(3)
(List.empty[Int] applyOptional lastOption getOption) shouldEqual None
(List.empty[Int] applyOptional lastOption modify (_ + 1)) shouldEqual Nil
(List(1, 2, 3) applyOptional lastOption set 0) shouldEqual List(1, 2, 0)
}
test("lastOption creates a Traversal from a String to its optional last Char") {
("Hello" applyOptional lastOption getOption) shouldEqual Some('o')
("Hello" applyOptional lastOption set 'a') shouldEqual "Hella"
}
}
| aoiroaoino/Monocle | example/src/test/scala/monocle/function/LastOptionExample.scala | Scala | mit | 820 |
package org.camunda.worker.akka
import java.text.SimpleDateFormat
import java.util.Date
object DateFormat {
val format = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss")
def parse(date: String): Date = format.parse(date)
def format(date: Date) : String = format.format(date)
} | saig0/camunda-worker-akka | src/main/scala/org/camunda/worker/akka/DateFormat.scala | Scala | apache-2.0 | 291 |
package org.littlewings.javaee7.beanvalidation
import javax.validation.{ConstraintViolation, Validation}
import org.scalatest.FunSpec
import org.scalatest.Matchers._
class AnnotationListSpec extends FunSpec {
describe("AnnotationList Spec") {
it("Default Group, valid") {
val person = new Person
person.firstName = "カツオ"
person.lastName = "磯野"
val factory = Validation.buildDefaultValidatorFactory
val validator = factory.getValidator
val constraintViolations =
validator
.validate(person)
.toArray(Array.empty[ConstraintViolation[Any]])
.sortWith(_.getPropertyPath.toString < _.getPropertyPath.toString)
constraintViolations should be(empty)
}
it("Default Group, invalid case 1") {
val person = new Person
person.firstName = "カツオ"
person.lastName = "磯の"
val factory = Validation.buildDefaultValidatorFactory
val validator = factory.getValidator
val constraintViolations =
validator
.validate(person)
.toArray(Array.empty[ConstraintViolation[Any]])
.sortWith(_.getPropertyPath.toString < _.getPropertyPath.toString)
constraintViolations should have size (1)
constraintViolations(0).getPropertyPath.toString should be("lastName")
constraintViolations(0).getMessage should be("must match \\".*野$\\"")
}
it("Default Group, invalid case 2") {
val person = new Person
person.firstName = "カツオ?"
person.lastName = "いそ野"
val factory = Validation.buildDefaultValidatorFactory
val validator = factory.getValidator
val constraintViolations =
validator
.validate(person)
.toArray(Array.empty[ConstraintViolation[Any]])
.sortWith(_.getPropertyPath.toString < _.getPropertyPath.toString)
constraintViolations should have size (2)
constraintViolations(0).getPropertyPath.toString should be("firstName")
constraintViolations(0).getMessage should be("size must be between 3 and 3")
constraintViolations(1).getPropertyPath.toString should be("lastName")
constraintViolations(1).getMessage should be("must match \\"^磯.*\\"")
}
it("MyGroup, valid") {
val person = new Person
person.firstName = "katsuo"
person.lastName = "isono"
val factory = Validation.buildDefaultValidatorFactory
val validator = factory.getValidator
val constraintViolations =
validator
.validate(person, classOf[MyGroup])
.toArray(Array.empty[ConstraintViolation[Any]])
.sortWith(_.getPropertyPath.toString < _.getPropertyPath.toString)
constraintViolations should be(empty)
}
it("MyGroup, invalid") {
val person = new Person
person.firstName = "isono katsuo"
person.lastName = "isono"
val factory = Validation.buildDefaultValidatorFactory
val validator = factory.getValidator
val constraintViolations =
validator
.validate(person, classOf[MyGroup])
.toArray(Array.empty[ConstraintViolation[Any]])
.sortWith(_.getPropertyPath.toString < _.getPropertyPath.toString)
constraintViolations should have size(1)
constraintViolations(0).getPropertyPath.toString should be("firstName")
constraintViolations(0).getMessage should be("size must be between 4 and 6")
}
}
}
| kazuhira-r/javaee7-scala-examples | bean-validation-list/src/test/scala/org/littlewings/javaee7/beanvalidation/AnnotationListSpec.scala | Scala | mit | 3,460 |
package com.twitter.util.jackson.caseclass
import com.fasterxml.jackson.annotation.{JsonIgnore, JsonProperty}
import com.fasterxml.jackson.databind.JavaType
import com.fasterxml.jackson.databind.annotation.JsonDeserialize
import com.fasterxml.jackson.databind.deser.std.NumberDeserializers.BigDecimalDeserializer
import com.twitter.util.jackson.{
JacksonScalaObjectMapperType,
ScalaObjectMapper,
TestInjectableValue,
_
}
import com.twitter.util.validation.ScalaValidator
import jakarta.validation.constraints.NotEmpty
import org.junit.runner.RunWith
import org.scalatest.funsuite.AnyFunSuite
import org.scalatest.matchers.should.Matchers
import org.scalatestplus.junit.JUnitRunner
@RunWith(classOf[JUnitRunner])
class CaseClassFieldTest extends AnyFunSuite with Matchers {
private[this] val mapper: JacksonScalaObjectMapperType =
ScalaObjectMapper().underlying
test("CaseClassField.createFields have field name foo") {
val fields = deserializerFor(classOf[WithEmptyJsonProperty]).fields
fields.length should equal(1)
fields.head.name should equal("foo")
}
test("CaseClassField.createFields also have field name foo") {
val fields = deserializerFor(classOf[WithoutJsonPropertyAnnotation]).fields
fields.length should equal(1)
fields.head.name should equal("foo")
}
test("CaseClassField.createFields have field name bar") {
val fields = deserializerFor(classOf[WithNonemptyJsonProperty]).fields
fields.length should equal(1)
fields.head.name should equal("bar")
}
test("CaseClassField.createFields sees inherited JsonProperty annotation") {
val fields = deserializerFor(classOf[Aum]).fields
fields.length should equal(2)
val iField = fields.head
iField.name should equal("i")
iField.annotations.length should equal(1)
iField.annotations.head.annotationType() should be(classOf[JsonProperty])
val jField = fields.last
jField.name should equal("j")
jField.annotations.length should equal(1)
jField.annotations.head.annotationType() should be(classOf[JsonProperty])
}
test("CaseClassField.createFields sees inherited JsonProperty annotation 2") {
val fields = deserializerFor(classOf[FooBar]).fields
fields.length should equal(1)
val helloField = fields.head
helloField.name should equal("helloWorld")
helloField.annotations.length should equal(2)
helloField.annotations.head.annotationType() should be(classOf[JsonProperty])
helloField.annotations.exists(_.annotationType() == classOf[TestInjectableValue]) should be(
true)
helloField.annotations.last
.asInstanceOf[TestInjectableValue].value() should be("accept") // from Bar
}
test("CaseClassField.createFields sees inherited JsonProperty annotation 3") {
val fields = deserializerFor(classOf[TestTraitImpl]).fields
/*
in trait:
---------
@JsonProperty("oldness")
def age: Int
@NotEmpty
def name: String
case class constructor:
-----------------------
@JsonProperty("ageness") age: Int, // should override inherited annotation from trait
@TestInjectableValue name: String, // should have two annotations, one from trait and one here
@TestInjectableValue dateTime: LocalDate,
@JsonProperty foo: String, // empty JsonProperty should default to field name
@JsonDeserialize(contentAs = classOf[BigDecimal], using = classOf[BigDecimalDeserializer])
double: BigDecimal,
@JsonIgnore ignoreMe: String
*/
fields.length should equal(6)
val fieldMap: Map[String, CaseClassField] =
fields.map(field => field.name -> field).toMap
val ageField = fieldMap("ageness")
ageField.annotations.length should equal(1)
ageField.annotations.exists(_.annotationType() == classOf[JsonProperty]) should be(true)
ageField.annotations.head.asInstanceOf[JsonProperty].value() should equal("ageness")
val nameField = fieldMap("name")
nameField.annotations.length should equal(2)
nameField.annotations.exists(_.annotationType() == classOf[NotEmpty]) should be(true)
nameField.annotations.exists(_.annotationType() == classOf[TestInjectableValue]) should be(true)
val dateTimeField = fieldMap("dateTime")
dateTimeField.annotations.length should equal(1)
dateTimeField.annotations.exists(_.annotationType() == classOf[TestInjectableValue]) should be(
true)
val fooField = fieldMap("foo")
fooField.annotations.length should equal(1)
fooField.annotations.exists(_.annotationType() == classOf[JsonProperty]) should be(true)
fooField.annotations.head.asInstanceOf[JsonProperty].value() should equal("")
val doubleField = fieldMap("double")
doubleField.annotations.length should equal(1)
doubleField.annotations.exists(_.annotationType() == classOf[JsonDeserialize]) should be(true)
doubleField.annotations.head.asInstanceOf[JsonDeserialize].contentAs() should be(
classOf[BigDecimal])
doubleField.annotations.head.asInstanceOf[JsonDeserialize].using() should be(
classOf[BigDecimalDeserializer])
val ignoreMeField = fieldMap("ignoreMe")
ignoreMeField.annotations.length should equal(1)
ignoreMeField.annotations.exists(_.annotationType() == classOf[JsonIgnore]) should be(true)
}
test("CaseClassField.createFields sees inherited JsonProperty annotation 4") {
val fields: Seq[CaseClassField] = deserializerFor(classOf[FooBaz]).fields
fields.length should equal(1)
val helloField: CaseClassField = fields.head
helloField.annotations.length should equal(2)
helloField.annotations.exists(_.annotationType() == classOf[JsonProperty]) should be(true)
helloField.annotations.head
.asInstanceOf[JsonProperty].value() should equal("goodbyeWorld") // from Baz
helloField.annotations.exists(_.annotationType() == classOf[TestInjectableValue]) should be(
true)
helloField.annotations.last
.asInstanceOf[TestInjectableValue].value() should be("accept") // from Bar
}
test("CaseClassField.createFields sees inherited JsonProperty annotation 5") {
val fields = deserializerFor(classOf[FooBarBaz]).fields
fields.length should equal(1)
val helloField: CaseClassField = fields.head
helloField.annotations.length should equal(2)
helloField.annotations.exists(_.annotationType() == classOf[JsonProperty]) should be(true)
helloField.annotations.head
.asInstanceOf[JsonProperty].value() should equal("goodbye") // from BarBaz
helloField.annotations.exists(_.annotationType() == classOf[TestInjectableValue]) should be(
true)
helloField.annotations.last
.asInstanceOf[TestInjectableValue].value() should be("accept") // from Bar
}
test("CaseClassField.createFields sees inherited JsonProperty annotation 6") {
val fields = deserializerFor(classOf[File]).fields
fields.length should equal(1)
val uriField: CaseClassField = fields.head
uriField.annotations.length should equal(1)
uriField.annotations.exists(_.annotationType() == classOf[JsonProperty]) should be(true)
uriField.annotations.head.asInstanceOf[JsonProperty].value() should equal("file")
}
test("CaseClassField.createFields sees inherited JsonProperty annotation 7") {
val fields = deserializerFor(classOf[Folder]).fields
fields.length should equal(1)
val uriField: CaseClassField = fields.head
uriField.annotations.length should equal(1)
uriField.annotations.exists(_.annotationType() == classOf[JsonProperty]) should be(true)
uriField.annotations.head.asInstanceOf[JsonProperty].value() should equal("folder")
}
test("CaseClassField.createFields sees inherited JsonProperty annotation 8") {
val fields = deserializerFor(classOf[LoadableFile]).fields
fields.length should equal(1)
val uriField: CaseClassField = fields.head
uriField.annotations.length should equal(1)
uriField.annotations.exists(_.annotationType() == classOf[JsonProperty]) should be(true)
uriField.annotations.head.asInstanceOf[JsonProperty].value() should equal("file")
}
test("CaseClassField.createFields sees inherited JsonProperty annotation 9") {
val fields = deserializerFor(classOf[LoadableFolder]).fields
fields.length should equal(1)
val uriField: CaseClassField = fields.head
uriField.annotations.length should equal(1)
uriField.annotations.exists(_.annotationType() == classOf[JsonProperty]) should be(true)
uriField.annotations.head.asInstanceOf[JsonProperty].value() should equal("folder")
}
test("Seq[Long]") {
val fields = deserializerFor(classOf[CaseClassWithArrayLong]).fields
fields.length should equal(1)
val arrayField: CaseClassField = fields.head
arrayField.javaType.getTypeName should be(
"[array type, component type: [simple type, class long]]")
}
private[this] def deserializerFor(clazz: Class[_]): CaseClassDeserializer = {
val javaType: JavaType = mapper.constructType(clazz)
new CaseClassDeserializer(
javaType = javaType,
mapper.getDeserializationConfig,
mapper.getDeserializationConfig.introspect(javaType),
Some(ScalaValidator())
)
}
}
| twitter/util | util-jackson/src/test/scala/com/twitter/util/jackson/caseclass/CaseClassFieldTest.scala | Scala | apache-2.0 | 9,198 |
package io.flatmap.ml.som
import java.awt.Color
import breeze.linalg.DenseMatrix
import breeze.numerics.closeTo
import io.flatmap.ml.util.Plot
import org.apache.spark.mllib.linalg.DenseVector
import org.apache.spark.mllib.random.RandomRDDs
import org.scalatest._
import util.TestSparkContext
class GaussianSelfOrganizingMapSpec extends FlatSpec with Matchers with BeforeAndAfterEach with TestSparkContext {
"train" should "return a fitted SOM instance" in {
val path = getClass.getResource("/rgb.csv").getPath
val rgb = sparkSession.sparkContext
.textFile(path)
.map(_.split(",").map(_.toDouble / 255.0))
.map(new DenseVector(_))
val som = GaussianSelfOrganizingMap(6, 6, _sigma = 0.5, _learningRate = 0.3).initialize(rgb)
val initialCodeBook = som.codeBook.copy
val codeBookVectorToRGB: List[Double] => Double = {
case red :: green :: blue :: Nil =>
new Color((red*255.0).toInt, (green*255.0).toInt, (blue*255.0).toInt).getRGB.toDouble
case _ => Color.white.getRGB.toDouble
}
Plot.som("Initial SOM", som.codeBook, "initial_som.png")(codeBookVectorToRGB)
val (newSom, params) = som.train(rgb, 20)
Plot.som(f"Trained SOM (error=${params.errors.head}%1.4f)", newSom.codeBook, "trained_som.png")(codeBookVectorToRGB)
Plot.errors(params.errors.reverse)
newSom.codeBook should not equal initialCodeBook
assert(closeTo(params.errors.head, 0.15, relDiff = 1e-2))
}
}
| ShokuninSan/som | src/test/scala/io/flatmap/ml/som/GaussianSelfOrganizingMapSpec.scala | Scala | mit | 1,455 |
/*******************************************************************************
* Copyright (c) 2019. Carl Minden
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
******************************************************************************/
package com.anathema_roguelike
package entities.characters.player.perks.abilities.shapeshifting
import com.anathema_roguelike.entities.characters.actions.TargetedAction
import com.anathema_roguelike.entities.characters.player.perks.abilities.Ability
import com.anathema_roguelike.entities.characters.Character
class ShapeshiftBear() extends Shapeshift("Shapeshift: Bear") with Ability {
override protected def createAction: TargetedAction[Character] = ??? // TODO Auto-generated method stub
} | carlminden/anathema-roguelike | src/com/anathema_roguelike/entities/characters/player/perks/abilities/shapeshifting/ShapeshiftBear.scala | Scala | gpl-3.0 | 1,343 |
/*
* Copyright 2017 PayPal
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.squbs.unicomplex
import java.io._
import java.net.URL
import java.util.concurrent.{TimeUnit, TimeoutException}
import java.util.jar.JarFile
import java.util.{Timer, TimerTask}
import akka.actor._
import akka.pattern.ask
import akka.routing.FromConfig
import akka.util.Timeout
import com.typesafe.config._
import com.typesafe.scalalogging.LazyLogging
import org.squbs.lifecycle.ExtensionLifecycle
import org.squbs.pipeline.PipelineSetting
import org.squbs.unicomplex.{Extension => SqubsExtension}
import org.squbs.unicomplex.UnicomplexBoot.CubeInit
import org.squbs.util.ConfigUtil._
import scala.annotation.tailrec
import scala.collection.concurrent.TrieMap
import scala.collection.mutable
import scala.jdk.CollectionConverters._
import scala.concurrent.duration._
import scala.concurrent.{Await, Future}
import scala.language.{postfixOps, existentials}
import scala.util.control.NonFatal
import scala.util.{Failure, Success, Try}
object UnicomplexBoot extends LazyLogging {
final val extConfigDirKey = "squbs.external-config-dir"
final val extConfigNameKey = "squbs.external-config-files"
final val actorSystemNameKey = "squbs.actorsystem-name"
val defaultStartupTimeout: Timeout =
Try(System.getProperty("startup.timeout").toLong) map { millis =>
akka.util.Timeout(millis, TimeUnit.MILLISECONDS)
} getOrElse (1 minute)
object StartupType extends Enumeration {
type StartupType = Value
val
// Identifies extensions
EXTENSIONS,
// Identifies actors as startup type
ACTORS,
// Identifies service as startup type
SERVICES = Value
}
case class CubeInit(info: Cube, components: Map[StartupType.Value, Seq[Config]])
val actorSystems = TrieMap.empty[String, ActorSystem]
def apply(addOnConfig: Config): UnicomplexBoot = {
val startTime = Timestamp(System.nanoTime, System.currentTimeMillis)
UnicomplexBoot(startTime, Option(addOnConfig), getFullConfig(Option(addOnConfig)))
}
def apply(actorSystemCreator: (String, Config) => ActorSystem): UnicomplexBoot = {
val startTime = Timestamp(System.nanoTime, System.currentTimeMillis)
UnicomplexBoot(startTime, None, getFullConfig(None), actorSystemCreator)
}
def getFullConfig(addOnConfig: Option[Config]): Config = {
val baseConfig = ConfigFactory.load()
// 1. See whether add-on config is there.
addOnConfig match {
case Some(config) =>
ConfigFactory.load(config withFallback baseConfig)
case None =>
// Sorry, the configDir is used to read the file. So it cannot be read from this config file.
val configDir = new File(baseConfig.getString(extConfigDirKey))
val configNames = baseConfig.getStringList(extConfigNameKey)
configNames.add("application")
val parseOptions = ConfigParseOptions.defaults().setAllowMissing(true)
val addConfigs = configNames.asScala.map {
name => ConfigFactory.parseFileAnySyntax(new File(configDir, name), parseOptions)
}
if (addConfigs.isEmpty) baseConfig
else ConfigFactory.load(addConfigs.foldRight(baseConfig){ _ withFallback _ })
}
}
private[unicomplex] def scan(jarNames: Seq[String])(boot: UnicomplexBoot): UnicomplexBoot = {
val configEntries = jarNames map readConfigs
val jarConfigs = jarNames zip configEntries collect { case (jar, Some(cfg)) => (jar, cfg) }
resolveCubes(jarConfigs, boot.copy(jarNames = jarNames))
}
private[unicomplex] def scanResources(resources: Seq[URL],
withClassPath: Boolean = true)(boot: UnicomplexBoot): UnicomplexBoot = {
val cpResources: Seq[URL] =
if (withClassPath) {
val loader = getClass.getClassLoader
Seq("conf", "json", "properties").flatMap { ext => loader.getResources(s"META-INF/squbs-meta.$ext").asScala }
} else Seq.empty
// Dedup the resources, just in case.
val allResources = mutable.LinkedHashSet(cpResources ++ resources : _*).toSeq
val jarConfigs = allResources map readConfigs collect { case Some(jarCfg) => jarCfg }
resolveCubes(jarConfigs, boot)
}
private[this] def resolveCubes(jarConfigs: Seq[(String, Config)], boot: UnicomplexBoot) = {
val cubeList = resolveAliasConflicts(jarConfigs map { case (jar, config) => readCube(jar, config) } collect {
case Some(cube) => cube
})
// Read listener and alias information.
val (activeAliases, activeListeners, missingAliases) = findListeners(boot.config, cubeList)
missingAliases foreach { name => logger.warn(s"Requested listener $name not found!") }
boot.copy(cubes = cubeList, jarConfigs = jarConfigs, listeners = activeListeners, listenerAliases = activeAliases)
}
private def createReaderFromFS(directory: File): String => Option[Reader] = {
(filePath: String) => Option(new File(directory, filePath)) collect {
case configFile if configFile.isFile => new InputStreamReader(new FileInputStream(configFile), "UTF-8")
}
}
private def createReaderFromJarFile(file: File): String => Option[Reader] = {
val triedJarFile = Try(new JarFile(file))
(filePath: String) => triedJarFile match {
case Success(jarFile) => Option(jarFile.getEntry(filePath)) collect {
case configFile if !configFile.isDirectory => new InputStreamReader(jarFile.getInputStream(configFile), "UTF-8")
}
case Failure(e) => throw e
}
}
private def getConfigReader(jarName: String): Option[(Option[Reader], String)] = {
// Make it extra lazy, so that we do not create the next File if the previous one succeeds.
val configExtensions = Iterator("conf", "json", "properties")
val maybeConfFileReader = Option(new File(jarName)) collect {
case file if file.isDirectory => createReaderFromFS(file)
case file if file.isFile => createReaderFromJarFile(file)
}
maybeConfFileReader flatMap (fileReader => configExtensions map { ext =>
val currentFile = s"META-INF/squbs-meta.$ext"
Try(fileReader(currentFile)) match {
case Failure(e) =>
logger.info(s"${e.getClass.getName} reading configuration from $jarName : $currentFile.\n${e.getMessage}")
None
case Success(maybeReader) => Option(maybeReader, currentFile)
}
} find (_.isDefined) flatten)
}
private[this] def readConfigs(jarName: String): Option[Config] = {
getConfigReader(jarName) flatMap ((maybeReader: Option[Reader], fileName: String) => {
val maybeConfig = Try(maybeReader map ConfigFactory.parseReader) match {
case Failure(e) =>
logger.info(s"${e.getClass.getName} reading configuration from $jarName : $fileName.\n${e.getMessage}")
None
case Success(cfg) => cfg
}
maybeReader foreach(_.close())
maybeConfig
}).tupled
}
private[this] def readConfigs(resource: URL): Option[(String, Config)] = {
// Taking the best guess at the jar name or classpath entry. Should work most of the time.
val jarName = resource.getProtocol match {
case "jar" =>
val jarURL = new URL(resource.getPath.split('!')(0))
jarURL.getProtocol match {
case "file" => jarURL.getPath
case _ => jarURL.toString
}
case "file" => // We assume the classpath entry ends before the last /META-INF/
val path = resource.getPath
val endIdx = path.lastIndexOf("/META-INF/")
if (endIdx > 0) path.substring(0, endIdx) else path
case _ =>
val path = resource.toString
val endIdx = path.lastIndexOf("/META-INF/")
if (endIdx > 0) path.substring(0, endIdx) else path
}
try {
val config = ConfigFactory.parseURL(resource, ConfigParseOptions.defaults().setAllowMissing(false))
Some((jarName, config))
} catch {
case NonFatal(e) =>
logger.warn(s"${e.getClass.getName} reading configuration from $jarName.\n ${e.getMessage}")
None
}
}
private[this] def readCube(jarPath: String, config: Config): Option[CubeInit] = {
val cubeName =
try {
config.getString("cube-name")
} catch {
case e: ConfigException => return None
}
val cubeVersion =
try {
config.getString("cube-version")
} catch {
case e: ConfigException => return None
}
val cubeAlias = cubeName.substring(cubeName.lastIndexOf('.') + 1)
val c = Seq(
config.getOption[Seq[Config]]("squbs-actors") map ((StartupType.ACTORS, _)),
config.getOption[Seq[Config]]("squbs-services") map ((StartupType.SERVICES, _)),
config.getOption[Seq[Config]]("squbs-extensions") map ((StartupType.EXTENSIONS, _))
).collect { case Some((sType, configs)) => (sType, configs) }.toMap
Some(CubeInit(Cube(cubeAlias, cubeName, cubeVersion, jarPath), c))
}
// Resolve cube alias conflict by making it longer on demand.
@tailrec
private[unicomplex] def resolveAliasConflicts(cubeList: Seq[CubeInit]): Seq[CubeInit] = {
val aliasConflicts = cubeList
.map { cube =>
(cube.info.name, cube.info.fullName)
}
.groupBy (_._1)
.map { case (name, seq) => name -> (seq map (_._2)).toSet }
.filter { _._2.size > 1 }
if (aliasConflicts.isEmpty) cubeList
else {
var updated = false
val newAliases = (aliasConflicts flatMap { case (alias, conflicts) =>
conflicts.toSeq map { symName =>
val idx = symName.lastIndexOf('.', symName.length - alias.length - 2)
if (idx > 0) {
updated = true
(symName, symName.substring(idx + 1))
}
else (symName, symName)
}
}).toSeq
if (updated) {
val updatedList = cubeList map { cube =>
newAliases find { case (symName, alias) => symName == cube.info.fullName } match {
case Some((symName, alias)) => cube.copy(info = cube.info.copy(name = alias))
case None => cube
}
}
resolveAliasConflicts(updatedList)
}
else sys.error("Duplicate cube names: " + (aliasConflicts flatMap (_._2) mkString ", "))
}
}
private[unicomplex] def startComponents(cube: CubeInit, aliases: Map[String, String])
(implicit actorSystem: ActorSystem,
timeout: Timeout = UnicomplexBoot.defaultStartupTimeout) = {
import cube.components
import cube.info.{fullName, jarPath, name, version}
val cubeSupervisor = actorSystem.actorOf(Props[CubeSupervisor](), name)
Unicomplex(actorSystem).uniActor ! CubeRegistration(cube.info, cubeSupervisor)
def startActor(actorConfig: Config): Option[(String, String, String, Class[_])] = {
val className = actorConfig getString "class-name"
val name = actorConfig.get[String]("name", className substring (className.lastIndexOf('.') + 1))
val withRouter = actorConfig.get[Boolean]("with-router", false)
val initRequired = actorConfig.get[Boolean]("init-required", false)
try {
val clazz = Class.forName(className, true, getClass.getClassLoader)
clazz asSubclass classOf[Actor]
// Create and the props for this actor to be started, optionally enabling the router.
val props = if (withRouter) Props(clazz) withRouter FromConfig() else Props(clazz)
// Send the props to be started by the cube.
cubeSupervisor ! StartCubeActor(props, name, initRequired)
Some((fullName, name, version, clazz))
} catch {
case NonFatal(e) =>
val t = getRootCause(e)
logger.warn(s"Can't load actor: $className.\n" +
s"Cube: $fullName $version\n" +
s"Path: $jarPath\n" +
s"${t.getClass.getName}: ${t.getMessage}")
t.printStackTrace()
cubeSupervisor ! StartFailure(e)
None
}
}
def startServiceRoute(clazz: Class[_], webContext: String, listeners: Seq[String],
ps: PipelineSetting) = {
Try {
(clazz asSubclass classOf[RouteDefinition], classOf[RouteActor])
} orElse Try {
(clazz asSubclass classOf[FlowDefinition], classOf[FlowActor])
} orElse Try {
(clazz asSubclass classOf[AbstractRouteDefinition], classOf[JavaRouteActor])
} orElse Try {
(clazz asSubclass classOf[AbstractFlowDefinition], classOf[JavaFlowActor])
} map { case (routeClass, routeActor) =>
val props = Props(routeActor, webContext, routeClass)
val className = clazz.getSimpleName
val actorName =
if (webContext.length > 0) s"${webContext.replace('/', '_')}-$className-route"
else s"root-$className-route"
cubeSupervisor ! StartCubeService(webContext, listeners, props, actorName, ps, initRequired = true)
(fullName, name, version, clazz)
}
}
// This same creator class is available in Akka's Props.scala but it is inaccessible to us.
class TypedCreatorFunctionConsumer(clz: Class[_ <: Actor], creator: () => Actor) extends IndirectActorProducer {
override def actorClass = clz
override def produce() = creator()
}
def startServiceActor(clazz: Class[_], webContext: String, listeners: Seq[String],
ps: PipelineSetting, initRequired: Boolean) =
Try {
val actorClass = clazz asSubclass classOf[Actor]
def actorCreator: Actor = WithWebContext(webContext) { actorClass.newInstance() }
val props = Props(classOf[TypedCreatorFunctionConsumer], clazz, () => actorCreator)
val className = clazz.getSimpleName
val actorName =
if (webContext.length > 0) s"${webContext.replace('/', '_')}-$className-handler"
else s"root-$className-handler"
cubeSupervisor ! StartCubeService(webContext, listeners, props, actorName, ps, initRequired)
(fullName, name, version, actorClass)
}
def startService(serviceConfig: Config): Option[(String, String, String, Class[_])] =
Try {
val className = serviceConfig.getString("class-name")
val clazz = Class.forName(className, true, getClass.getClassLoader)
val webContext = serviceConfig.getString("web-context")
val pipeline = serviceConfig.getOption[String]("pipeline")
val defaultFlowsOn = serviceConfig.getOption[Boolean]("defaultPipeline")
val pipelineSettings = (pipeline, defaultFlowsOn)
val listeners = serviceConfig.getOption[Seq[String]]("listeners").fold(Seq("default-listener")) { list =>
if (list.contains("*")) aliases.values.toSeq.distinct
else list flatMap { entry =>
aliases.get(entry) match {
case Some(listener) => Seq(listener)
case None =>
logger.warn(s"Listener $entry required by $fullName is not configured. Ignoring.")
Seq.empty[String]
}
}
}
val service = startServiceRoute(clazz, webContext, listeners, pipelineSettings) orElse
startServiceActor(clazz, webContext, listeners, pipelineSettings,
serviceConfig.get[Boolean]("init-required", false))
service match {
case Success(svc) => svc
case Failure(e) =>
throw new IOException(s"Class $className is neither a RouteDefinition nor an Actor.", e)
}
} match {
case Success(svc) => Some(svc)
case Failure(e) =>
val t = getRootCause(e)
logger.warn(s"Can't load service definition $serviceConfig.\n" +
s"Cube: $fullName $version\n" +
s"Path: $jarPath\n" +
s"${t.getClass.getName}: ${t.getMessage}")
t.printStackTrace()
cubeSupervisor ! StartFailure(e)
None
}
val actorConfigs = components.getOrElse(StartupType.ACTORS, Seq.empty)
val routeConfigs = components.getOrElse(StartupType.SERVICES, Seq.empty)
val actorInfo = actorConfigs map startActor
val routeInfo = routeConfigs map startService
val startedF = cubeSupervisor ? Started // Tell the cube all actors to be started are started.
logger.info(s"Started cube $fullName $version")
val componentInfo = (actorInfo ++ routeInfo) collect { case Some(component) => component }
(startedF, componentInfo)
}
def configuredListeners(config: Config): Map[String, Config] = {
val listeners = config.root.asScala.toSeq.collect {
case (n, v: ConfigObject) if v.toConfig.getOption[String]("type").contains("squbs.listener") => (n, v.toConfig)
}
resolveDuplicates[Config](listeners, (name, conf, c) =>
logger.warn(s"Duplicate listener $name already declared. Ignoring.")
)
}
def findListenerAliases(listeners: Map[String, Config]): Map[String, String] = {
val aliases = for ((name, config) <- listeners) yield {
val aliasNames = config.get[Seq[String]]("aliases", Seq.empty[String])
(name, name) +: (aliasNames map ((_, name)))
}
resolveDuplicates[String](aliases.toSeq.flatten, (alias, listener, l) =>
logger.warn(s"Duplicate alias $alias for listener $listener already declared for listener $l. Ignoring.")
)
}
def resolveDuplicates[T](in: Seq[(String, T)], duplicateHandler: (String, T, T) => Unit): Map[String, T] = {
in.groupBy(_._1).map {
case (key, Seq((k, v))) => key -> v
case (key, head::tail) =>
tail.foreach { case (k, ass) => duplicateHandler(k, ass, head._2)}
key -> head._2
}
}
def findListeners(config: Config, cubes: Seq[CubeInit]) = {
val demandedListeners =
for {
routes <- cubes.map { _.components.get(StartupType.SERVICES) }.collect { case Some(routes) => routes }.flatten
routeListeners <- routes.get[Seq[String]]("listeners", Seq("default-listener"))
if routeListeners != "*" // Filter out wildcard listener bindings, not starting those.
} yield {
routeListeners
}
val listeners = configuredListeners(config)
val aliases = findListenerAliases(listeners)
val activeAliases = aliases filter { case (n, _) => demandedListeners contains n }
val missingAliases = demandedListeners filterNot { l => activeAliases exists { case (n, _) => n == l } }
val activeListenerNames = activeAliases.values
val activeListeners = listeners filter { case (n, c) => activeListenerNames exists (_ == n) }
(activeAliases, activeListeners, missingAliases)
}
def startServiceInfra(boot: UnicomplexBoot)(implicit actorSystem: ActorSystem): Unit = {
def getTimeout(keyRelPath: String): Option[Timeout] = {
val key = s"squbs.service-infra.$keyRelPath"
val timeoutDuration = actorSystem.settings.config.getOptionalDuration(key)
timeoutDuration.map { d =>
require(d.toMillis > 0, s"The config property, $key, must be greater than 0 milliseconds.")
Timeout(d)
}
}
val overallTimeout = getTimeout("timeout").getOrElse(Timeout(60.seconds))
val listenerTimeout =
getTimeout("listener-timeout")
.getOrElse(Timeout(10.seconds))
startServiceInfra(boot, overallTimeout, listenerTimeout)
}
def startServiceInfra(
boot: UnicomplexBoot,
timeout: Timeout,
listenerTimeout: Timeout
)(implicit actorSystem: ActorSystem): Unit = {
import actorSystem.dispatcher
val startTime = System.nanoTime
implicit val to = listenerTimeout
val ackFutures =
for ((listenerName, config) <- boot.listeners) yield {
val responseFuture = Unicomplex(actorSystem).uniActor ? StartListener(listenerName, config)
responseFuture.onComplete {
case Failure(t) if (t.isInstanceOf[TimeoutException]) =>
logger.error(s"The Unicomplex could not start the listener, $listenerName, within $to.", t)
case Failure(t) =>
logger.error(s"The Unicomplex failed to start the listener, $listenerName.", t)
case Success(StartFailure(t)) =>
logger.error(s"The Unicomplex reported a start failure for the listener, $listenerName.", t)
case _ =>
}
responseFuture
}
// Block for the web service to be started.
Await.ready(Future.sequence(ackFutures), timeout.duration)
val elapsed = (System.nanoTime - startTime) / 1000000
logger.info(s"Web Service started in $elapsed milliseconds")
}
@tailrec
private[unicomplex] def getRootCause(e: Throwable): Throwable = {
Option(e.getCause) match {
case Some(ex) => getRootCause(ex)
case None => e
}
}
}
case class UnicomplexBoot private[unicomplex](startTime: Timestamp,
addOnConfig: Option[Config] = None,
config: Config,
actorSystemCreator: (String, Config) => ActorSystem = { (name, config) => ActorSystem(name, config) },
cubes: Seq[CubeInit] = Seq.empty,
listeners: Map[String, Config] = Map.empty,
listenerAliases: Map[String, String] = Map.empty,
jarConfigs: Seq[(String, Config)] = Seq.empty,
jarNames: Seq[String] = Seq.empty,
actors: Seq[(String, String, String, Class[_])] = Seq.empty,
extensions: Seq[SqubsExtension] = Seq.empty,
started: Boolean = false,
stopJVM: Boolean = false) extends LazyLogging {
import UnicomplexBoot._
def actorSystemName = config.getString(actorSystemNameKey)
def actorSystem = UnicomplexBoot.actorSystems(actorSystemName)
def externalConfigDir = config.getString(extConfigDirKey)
def createUsing(actorSystemCreator: (String, Config) => ActorSystem) = copy(actorSystemCreator = actorSystemCreator)
def scanComponents(jarNames: Seq[String]): UnicomplexBoot = scan(jarNames)(this)
def scanComponents(jarNames: Array[String]): UnicomplexBoot = scan(jarNames.toSeq)(this)
def scanResources(withClassPath: Boolean, resources: String*): UnicomplexBoot =
UnicomplexBoot.scanResources(resources map (new File(_).toURI.toURL), withClassPath)(this)
def scanResources(resources: String*): UnicomplexBoot =
UnicomplexBoot.scanResources(resources map (new File(_).toURI.toURL))(this)
def scanResources(resources: java.util.List[String]): UnicomplexBoot = scanResources(resources.asScala.toSeq: _*)
def scanResources(withClassPath: Boolean, resources: Array[String]): UnicomplexBoot =
scanResources(withClassPath, resources.toIndexedSeq: _*)
def initExtensions: UnicomplexBoot = {
val initSeq = cubes.flatMap { cube =>
cube.components.getOrElse(StartupType.EXTENSIONS, Seq.empty) map { config =>
val className = config getString "class-name"
val seqNo = config.get[Int]("sequence", Int.MaxValue)
(seqNo, className, cube)
}
}.sortBy(_._1)
// load extensions
val extensions = initSeq map (loadExtension _).tupled
// preInit extensions
val preInitExtensions = extensions map extensionOp("preInit", _.preInit())
// Init extensions
val initExtensions = preInitExtensions map extensionOp("init", _.init())
copy(extensions = initExtensions)
}
def stopJVMOnExit: UnicomplexBoot = copy(stopJVM = true)
def start(): UnicomplexBoot = start(defaultStartupTimeout)
def start(implicit timeout: Timeout): UnicomplexBoot = synchronized {
if (started) throw new IllegalStateException("Unicomplex already started!")
// Extensions may have changed the config. So we need to reload the config here.
val newConfig = UnicomplexBoot.getFullConfig(addOnConfig)
val newName = config.getString(UnicomplexBoot.actorSystemNameKey)
implicit val actorSystem = {
val system = actorSystemCreator(newName, newConfig)
system.registerExtension(Unicomplex)
Unicomplex(system).setScannedComponents(jarNames)
system
}
UnicomplexBoot.actorSystems += actorSystem.name -> actorSystem
actorSystem.registerOnTermination {
UnicomplexBoot.actorSystems -= actorSystem.name
}
registerExtensionShutdown(actorSystem)
val uniActor = Unicomplex(actorSystem).uniActor
// Send start time to Unicomplex
uniActor ! startTime
// Register extensions in Unicomplex actor
uniActor ! Extensions(extensions)
val startServices = listeners.nonEmpty && cubes.exists(_.components.contains(StartupType.SERVICES))
// Notify Unicomplex that services will be started.
if (startServices) uniActor ! PreStartWebService(listeners)
// Signal started to Unicomplex.
uniActor ! Started
val preCubesInitExtensions = extensions map extensionOp("preCubesInit", _.preCubesInit())
uniActor ! Extensions(preCubesInitExtensions)
// Start all actors
val (futures, actorsUnflat) = cubes.map(startComponents(_, listenerAliases)).unzip
val actors = actorsUnflat.flatten
import actorSystem.dispatcher
Await.ready(Future.sequence(futures), timeout.duration)
// Start the service infrastructure if services are enabled and registered.
if (startServices) startServiceInfra(this)
val postInitExtensions = preCubesInitExtensions map extensionOp("postInit", _.postInit())
// Update the extension errors in Unicomplex actor, in case there are errors.
uniActor ! Extensions(postInitExtensions)
{
// Tell Unicomplex we're done.
val stateFuture = Unicomplex(actorSystem).uniActor ? Activate
Try(Await.result(stateFuture, timeout.duration)) recoverWith { case _: TimeoutException =>
val recoverFuture = Unicomplex(actorSystem).uniActor ? ActivateTimedOut
Try(Await.result(recoverFuture, timeout.duration))
} match {
case Success(Active) => logger.info(s"[$actorSystemName] activated")
case Success(Failed) => logger.info(s"[$actorSystemName] initialization failed.")
case e => logger.warn(s"[$actorSystemName] awaiting confirmation, $e.")
}
}
val boot = copy(config = actorSystem.settings.config, actors = actors, extensions = postInitExtensions, started = true)
Unicomplex(actorSystem).boot.set(boot)
boot
}
def registerExtensionShutdown(actorSystem: ActorSystem): Unit = {
if (extensions.nonEmpty) {
actorSystem.registerOnTermination {
// Run the shutdown in a different thread, not in the ActorSystem's onTermination thread.
import scala.concurrent.Future
// Kill the JVM if the shutdown takes longer than the timeout.
if (stopJVM) {
val shutdownTimer = new Timer(true)
shutdownTimer.schedule(new TimerTask {
def run(): Unit = {
System.exit(0)
}
}, 5000)
}
// Then run the shutdown in the global execution context.
import scala.concurrent.ExecutionContext.Implicits.global
Future {
extensions.reverse foreach { e =>
import e.info._
e.extLifecycle foreach { elc =>
logger.info(s"Shutting down extension ${elc.getClass.getName} in $fullName $version")
elc.shutdown()
}
}
} onComplete {
case Success(result) =>
logger.info(s"ActorSystem ${actorSystem.name} shutdown complete")
if (stopJVM) System.exit(0)
case Failure(e) =>
logger.error(s"Error occurred during shutdown extensions: $e", e)
if (stopJVM) System.exit(-1)
}
}
}
}
def loadExtension(seqNo: Int, className: String, cube: CubeInit): SqubsExtension = {
try {
val clazz = Class.forName(className, true, getClass.getClassLoader)
val extLifecycle = ExtensionLifecycle(this) { clazz.asSubclass(classOf[ExtensionLifecycle]).newInstance }
SqubsExtension(cube.info, seqNo, Some(extLifecycle), Seq.empty)
} catch {
case NonFatal(e) =>
import cube.info._
val t = getRootCause(e)
logger.warn(s"Can't load extension $className.\n" +
s"Cube: $fullName $version\n" +
s"Path: $jarPath\n" +
s"${t.getClass.getName}: ${t.getMessage}")
t.printStackTrace()
SqubsExtension(cube.info, seqNo, None, Seq("load" -> t))
}
}
def extensionOp(opName: String, opFn: ExtensionLifecycle => Unit)
(extension: SqubsExtension): SqubsExtension = {
import extension.info._
extension.extLifecycle match {
case None => extension
case Some(l) =>
try {
opFn(l)
logger.info(s"Success $opName extension ${l.getClass.getName} in $fullName $version")
extension
} catch {
case NonFatal(e) =>
val t = getRootCause(e)
logger.warn(s"Error on $opName extension ${l.getClass.getName}\n" +
s"Cube: $fullName $version\n" +
s"${t.getClass.getName}: ${t.getMessage}")
t.printStackTrace()
extension.copy(exceptions = extension.exceptions :+ (opName -> t))
}
}
}
}
| akara/squbs | squbs-unicomplex/src/main/scala/org/squbs/unicomplex/UnicomplexBoot.scala | Scala | apache-2.0 | 30,013 |
package bifrost.programBoxRegistry
import java.io.File
import java.util.UUID
import bifrost.NodeViewModifier.ModifierId
import bifrost.forging.ForgingSettings
import bifrost.transaction.bifrostTransaction.BifrostTransaction
import bifrost.transaction.box.StateBox
import com.google.common.primitives.Longs
import io.iohk.iodb.{ByteArrayWrapper, LSMStore}
import bifrost.utils.ScorexLogging
import scorex.crypto.hash.Sha256
import scala.util.Try
import scala.util.{Failure, Success}
//TODO remove
class ProgramBoxRegistryOld(initialMap: Map[ByteArrayWrapper, ByteArrayWrapper], storage: PBRStorage) extends ScorexLogging {
var UUID2BoxID = initialMap
def updateIfStateBoxTransaction(tx: BifrostTransaction) : Unit = {
// tx.newBoxes.foreach(b => if b.isInstanceOf[StateBox])
}
def insertNewStateBox(modifierId: ModifierId, v: Array[Byte]): Try[(UUID, Array[Byte])] = Try {
val k_uuid = UUID.nameUUIDFromBytes(v)
update(modifierId, k_uuid, v)
k_uuid -> v
}
def update(modifierId: ModifierId, k: UUID, v: Array[Byte]) : Unit = {
val k_baw = ProgramBoxRegistryOld.uuid2baw(k)
val v_baw = ByteArrayWrapper(v)
storage.update(ByteArrayWrapper(modifierId), Seq((k_baw, v_baw)))
match {
case Success(_) =>
case Failure(e) => new Exception("Unable to insert in StateBox registry")
}
UUID2BoxID += (k_baw -> v_baw)
}
def get(k: UUID) : Try[(UUID, Array[Byte])] = Try {
val k_baw = ProgramBoxRegistryOld.uuid2baw(k)
k -> UUID2BoxID.getOrElse(k_baw, storage.get(k_baw).get).data
// ProgramBoxRegistryOld.parseLine(Option(UUID2BoxID.getOrElse(k_baw, storage.get(k_baw).get)))
}
def checkpoint(modifierId: ModifierId): Try[Unit] = Try { storage.checkpoint(ByteArrayWrapper(modifierId)) }
def rollback(modifierId: ModifierId): Try[Unit] = Try { storage.rollback(ByteArrayWrapper(modifierId)) }
}
object ProgramBoxRegistryOld extends ScorexLogging {
final val bytesInAUUID = 16
final val bytesInABoxID = 32
def apply(s: PBRStorage) : Try[ProgramBoxRegistryOld] = Try {
new ProgramBoxRegistryOld(Map[ByteArrayWrapper, ByteArrayWrapper](), s)
}
//parsing a byteArrayWrapper which has UUID in bytes concatenated to boxID in bytes?
def parseLine(raw: Option[ByteArrayWrapper]) : Try[(UUID, Array[Byte])] = Try {
val rawLine : Array[Byte] = raw.get.data
val uUIDBytes = rawLine.take(bytesInAUUID)
val iDBytes = rawLine.slice(bytesInAUUID, bytesInAUUID + bytesInABoxID)
(
new UUID(Longs.fromByteArray(uUIDBytes.take(Longs.BYTES)), Longs.fromByteArray(uUIDBytes.slice(Longs.BYTES, Longs.BYTES*2))),
iDBytes
)
}
// UUID -> ByteArrayWrapper
//Currently appending UUID to itself to reach 32 byte length requirement for keys in LSMStore
def uuid2baw(v: UUID) : ByteArrayWrapper =
// ByteArrayWrapper(ByteArrayWrapper.fromLong(v.getLeastSignificantBits).data
// ++ ByteArrayWrapper.fromLong(v.getMostSignificantBits).data ++ ByteArrayWrapper.fromLong(v.getLeastSignificantBits).data
// ++ ByteArrayWrapper.fromLong(v.getMostSignificantBits).data)
ByteArrayWrapper(ByteArrayWrapper.fromLong(v.getMostSignificantBits).data
++ ByteArrayWrapper.fromLong(v.getLeastSignificantBits).data ++ ByteArrayWrapper.fromLong(v.getMostSignificantBits).data
++ ByteArrayWrapper.fromLong(v.getLeastSignificantBits).data)
def readOrGenerate(settings: ForgingSettings): ProgramBoxRegistryOld = {
val pbrDirOpt = settings.pbrDirOpt.ensuring(_.isDefined, "programBoxRegistry dir must be specified")
val pbrDir = pbrDirOpt.get
val logDirOpt = settings.logDirOpt
readOrGenerate(pbrDir, logDirOpt, settings)
}
def readOrGenerate(dataDir: String, logDirOpt: Option[String], settings: ForgingSettings): ProgramBoxRegistryOld = {
val iFile = new File(s"$dataDir/map")
iFile.mkdirs()
val pbrStorage = new LSMStore(iFile)
Runtime.getRuntime.addShutdownHook(new Thread() {
override def run(): Unit = {
log.info("Closing programBoxRegistry storage...")
pbrStorage.close()
}
})
val storage = new PBRStorage(pbrStorage)
ProgramBoxRegistryOld(storage).get
}
}
| Topl/Project-Bifrost | src/main/scala/bifrost/programBoxRegistry/ProgramBoxRegistryOld.scala | Scala | mpl-2.0 | 4,177 |
package com.adamek.example.controller
import com.adamek.example.dao.SomeObjectDao
import com.adamek.example.model.SomeObject
import org.springframework.beans.factory.annotation.Autowired
import org.springframework.http.MediaType
import org.springframework.web.bind.annotation._
@RestController
@RequestMapping(value = Array("/someobject"))
class SomeObjectController {
@Autowired
private val dao: SomeObjectDao = null
@RequestMapping(
method = Array(RequestMethod.GET),
produces = Array(MediaType.APPLICATION_JSON_VALUE))
def get = dao.getAll
@RequestMapping(
value = Array("/{id}"),
method = Array(RequestMethod.GET),
produces = Array(MediaType.APPLICATION_JSON_VALUE))
def get(@PathVariable id: Int) = dao.get(id)
@RequestMapping(
method = Array(RequestMethod.POST),
consumes = Array(MediaType.APPLICATION_JSON_VALUE),
produces = Array(MediaType.APPLICATION_JSON_VALUE))
def post(@RequestBody obj: SomeObject) = dao.create(obj)
@RequestMapping(
value = Array("/{id}"),
method = Array(RequestMethod.PUT),
consumes = Array(MediaType.APPLICATION_JSON_VALUE),
produces = Array(MediaType.APPLICATION_JSON_VALUE))
def put(@RequestBody obj: SomeObject) = dao.update(obj)
@RequestMapping(
value = Array("/{id}"),
method = Array(RequestMethod.DELETE))
def delete(@PathVariable id: Int) = dao.remove(id)
}
| marekadamek/spring4-scala-rest | src/main/scala/com/adamek/example/controller/SomeObjectController.scala | Scala | mit | 1,387 |
package pl.touk.nussknacker.engine.flink.api.process
import org.apache.flink.streaming.api.scala.DataStream
import pl.touk.nussknacker.engine.api.typed.typing.TypingResult
import pl.touk.nussknacker.engine.api.typed.{ReturningType, typing}
import pl.touk.nussknacker.engine.api.{Context, ValueWithContext}
object FlinkCustomStreamTransformation {
def apply(fun: DataStream[Context] => DataStream[ValueWithContext[AnyRef]]): FlinkCustomStreamTransformation
= apply((data, _) => fun(data))
def apply(fun: (DataStream[Context], FlinkCustomNodeContext) => DataStream[ValueWithContext[AnyRef]])
: FlinkCustomStreamTransformation = new FlinkCustomStreamTransformation {
override def transform(start: DataStream[Context], context: FlinkCustomNodeContext)
: DataStream[ValueWithContext[AnyRef]] = fun(start, context)
}
def apply(fun: (DataStream[Context], FlinkCustomNodeContext) => DataStream[ValueWithContext[AnyRef]],
rType: TypingResult): FlinkCustomStreamTransformation with ReturningType =
new FlinkCustomStreamTransformation with ReturningType {
override def transform(start: DataStream[Context], context: FlinkCustomNodeContext)
: DataStream[ValueWithContext[AnyRef]] = fun(start, context)
override def returnType: typing.TypingResult = rType
}
}
trait FlinkCustomStreamTransformation {
// TODO: To be consistent with ContextTransformation should return Context
def transform(start: DataStream[Context], context: FlinkCustomNodeContext): DataStream[ValueWithContext[AnyRef]]
}
/**
* Join functionality is not complete, many things are not implemented yet
* - validation context passed to both BranchExpression and JoinContextTransformationDef should be taken from incoming branches
* - cannot test&generate test data from other branches
*
* Additionally, a lot of refactoring should be done
* - removing tree structures
* - should CustomNode and Join be sth different in ConfigCreator
*
* Some important TODOs are marked with TODO JOIN
*/
trait FlinkCustomJoinTransformation {
// TODO: To be consistent with ContextTransformation should return Context
def transform(inputs: Map[String, DataStream[Context]], context: FlinkCustomNodeContext): DataStream[ValueWithContext[AnyRef]]
} | TouK/nussknacker | engine/flink/components-api/src/main/scala/pl/touk/nussknacker/engine/flink/api/process/FlinkCustomStreamTransformation.scala | Scala | apache-2.0 | 2,278 |
package chapter.two
object ExerciseThree extends App {
object YAssignment {
private var _y: Int = _
def y = _y
def y_=(i: Int) = { _y = i; true }
}
def testAssignment(int: Int): Boolean = {
import YAssignment._
val x = y = int
x
}
}
| deekim/impatient-scala | src/main/scala/chapter/two/ExerciseThree.scala | Scala | apache-2.0 | 272 |
package au.id.cxd.math.probability.regression
import breeze.linalg.{DenseMatrix, inv, DenseVector}
/**
* ##import MathJax
*
* Least squares is a simpler form of regression approximating $Y$ as
*
* $$
* \\hat{Y} = \\beta_0 + \\sum X_i \\beta_j
* $$
*
* $$
* \\hat{Y} = X\\beta
* $$
*
* The parameter $\\beta$ is estimated using the sample instances and the sample outputs as shown
*
* $$
* \\hat{\\beta} = (X'X)^{-1}X'Y
* $$
*
* $\\hat{\\beta}$ is assumed to have a normal distribution with mean $\\beta$
*
* and variance $Q \\sigma^2$
*
* where $Q = (X'X)^{-1}$.
*
* The residual squared error can be calculated as:
*
* $$
* RSS(\\beta) = \\sum (Y_i - \\hat{Y_i})^2
* $$
*
* The residuals $\\epsilon$ are assumed distributed as $N(0,\\sigma^2)$
*
* Inference on $\\beta$ can be performed using the standardised coefficient z-score for $\\beta$.
*
* $$
* z_j = \\frac{\\hat{\\beta_j}}{\\hat{\\sigma}\\sqrt{v_j}}
* $$
*
* The value for $v_j$ is derived from the $jth$ position on diagonal from the matrix $(X'X)^{-1}$.
*
* The Z-score from the normal distribution at a corresponding alpha level can be used to form
* the associated confidence interval for $\\beta$ at $p-value = z$ at the $1 - \\alpha$ level .
*
*
* $$
* \\hat{\\beta_j} \\pm z^{(1-\\alpha)} \\sqrt{v} \\hat{\\sigma}
* $$
*
* As $\\beta$ defines the coefficients of the $pth$ attribute in $X$ it is possible to test whether
* the $kth$ coefficient can be set to $0$ (in which case the contribution of $X_k$ to estimating $Y$ is not significant)
* by using an F-score.
* Let $k_1$ equal the $k$ parameters and $k_0$ be the a smaller model where $k_1 - k_0$ parameters are set to $0$
* the F-score can be calculated as
*
* $$
* F = \\frac{(RSS_0 - RSS_1)/(k_1 - k_0)}{RSS_1/(N - k_1 - 1)}
* $$
*
* this statistic can be used to determine if the residual sum of squares error is changed significantly
* by setting the $k_1 - k_0$ parameters to 0. If the RSS decreases, and the F-score can be tested
* against a corresponding p-value for an associated $\\alpha$ level to determine if the improvement
* is significant change. If so, the corresponding attributes contribution in determining $Y$ is marginal.
*
* For further details refer to
*
* Hastie, T. Tibshirani, R. Friedman, J. The Elements of Statistical Learning, Second Ed. Springer 2009.
*
* Created by cd on 18/10/14.
*/
class LeastSquares(X: DenseMatrix[Double], Y: DenseVector[Double]) {
/**
* The beta parameter is approximated by
* $$
* \\hat{\\beta} = (X'X)^{-1}X'Y
* $$
*
* Note we assume that $\\beta$ is normally distributed as
*
*
*
* $$
* \\hat{\\beta} = N( \\beta, Q \\sigma^2 )
* $$
*
* where $Q = (X'X)^{-1}$
*
* $\\hat{\\beta}$ is the Maximum likelihood estimate of $\\beta$.
*
*
*
*/
var Beta: DenseVector[Double] = DenseVector.ones(Y.size);
/**
* The variance of beta is given by
*
* $$
* Var(\\beta) = Q \\sigma^2
* $$
*
* with $Q = (X'X)^{-1}$
*
* The variance parameter for $\\sigma^2$ is approximated as
*
* $$
* \\frac{1}{N - p - 1} \\sum (y_i - \\hat{y_i})^2
* $$
*
* This value is computed during the training cycle.
*/
var betaVariance: DenseMatrix[Double] = DenseMatrix.zeros(X.cols, X.cols)
/**
* The variance parameter for $\\sigma^2$ is approximated as
*
* $$
* \\frac{1}{N - p - 1} \\sum (y_i - \\hat{y_i})^2
* $$
*
* This value is computed during the training cycle.
*/
var variance: Double = 0
var X1: DenseMatrix[Double] = DenseMatrix.horzcat(DenseMatrix.ones[Double](X.rows, 1), X)
/**
* train the least squares to produce the estimator for beta
*/
def train() = {
val C = inv(X1.t * X1)
val D = X1.t * Y
Beta = C * D
val yHat = predict(X)
val yDelta = (Y - yHat).foldLeft(0.0d) {
(total, delta) => total + Math.pow(delta, 2.0)
}
// form the estimate of the beta variance
variance = 1.0 / (Y.size - X.cols - 1) * yDelta
betaVariance = variance * C
Beta
}
/**
* predict the result of multiplying the beta estimater against a new vector X
* note the length of X must equal the length of Beta
*
* $$
* \\hat{Y} = \\beta_0 + \\sum X_i \\beta_j
* $$
*
* $$
* \\hat{Y} = X\\beta
* $$
*
* @param x
* @return
*/
def predict(x: DenseMatrix[Double]): DenseVector[Double] = {
val xBias = DenseMatrix.horzcat(DenseMatrix.ones[Double](X.rows, 1), x)
val yHat = Beta.t * xBias
yHat.inner
}
}
object LeastSquares {
def apply(X: DenseMatrix[Double], Y: DenseVector[Double]) =
new LeastSquares(X, Y)
} | cxd/scala-au.id.cxd.math | math/src/main/scala/au/id/cxd/math/probability/regression/LeastSquares.scala | Scala | mit | 4,765 |
import sbt._
object Dependencies {
lazy val kafka = "org.apache.kafka" % "kafka_2.12" % "2.2.1"
lazy val typesafeConfig = "com.typesafe" % "config" % "1.3.4"
lazy val scalaTest = "org.scalatest" %% "scalatest" % "3.0.8"
}
| ysden123/poc | pkafka/kafka-commit/project/Dependencies.scala | Scala | mit | 232 |
package org.orbeon.oxf.xml
import java.util
import org.xml.sax.{Attributes, Locator}
class TeeXMLReceiver extends XMLReceiver {
// NOTE: Use an` Array`, as `List` and `Iterator` are less efficient (profiling)
private var xmlReceivers: Array[XMLReceiver] = null
def this(receivers: util.List[XMLReceiver]) = {
this()
xmlReceivers = new Array[XMLReceiver](receivers.size)
receivers.toArray(xmlReceivers)
}
def this(xmlReceiver1: XMLReceiver, xmlReceiver2: XMLReceiver) = {
this()
xmlReceivers = new Array[XMLReceiver](2)
xmlReceivers(0) = xmlReceiver1
xmlReceivers(1) = xmlReceiver2
}
def this(xmlReceiver1: XMLReceiver, xmlReceiver2: XMLReceiver, xmlReceiver3: XMLReceiver) = {
this()
xmlReceivers = new Array[XMLReceiver](3)
xmlReceivers(0) = xmlReceiver1
xmlReceivers(1) = xmlReceiver2
xmlReceivers(2) = xmlReceiver3
}
def setDocumentLocator(locator: Locator): Unit =
for (i <- xmlReceivers.indices) {
val contentHandler = xmlReceivers(i)
contentHandler.setDocumentLocator(locator)
}
def startDocument(): Unit =
for (i <- xmlReceivers.indices) {
val contentHandler = xmlReceivers(i)
contentHandler.startDocument()
}
def endDocument(): Unit =
for (i <- xmlReceivers.indices) {
val contentHandler = xmlReceivers(i)
contentHandler.endDocument()
}
def startPrefixMapping(prefix: String, uri: String): Unit =
for (i <- xmlReceivers.indices) {
val contentHandler = xmlReceivers(i)
contentHandler.startPrefixMapping(prefix, uri)
}
def endPrefixMapping(prefix: String): Unit =
for (i <- xmlReceivers.indices) {
val contentHandler = xmlReceivers(i)
contentHandler.endPrefixMapping(prefix)
}
def startElement(namespaceURI: String, localName: String, qName: String, atts: Attributes): Unit =
for (i <- xmlReceivers.indices) {
val contentHandler = xmlReceivers(i)
contentHandler.startElement(namespaceURI, localName, qName, atts)
}
def endElement(namespaceURI: String, localName: String, qName: String): Unit =
for (i <- xmlReceivers.indices) {
val contentHandler = xmlReceivers(i)
contentHandler.endElement(namespaceURI, localName, qName)
}
def characters(ch: Array[Char], start: Int, length: Int): Unit =
for (i <- xmlReceivers.indices) {
val contentHandler = xmlReceivers(i)
contentHandler.characters(ch, start, length)
}
def ignorableWhitespace(ch: Array[Char], start: Int, length: Int): Unit =
for (i <- xmlReceivers.indices) {
val contentHandler = xmlReceivers(i)
contentHandler.ignorableWhitespace(ch, start, length)
}
def processingInstruction(target: String, data: String): Unit =
for (i <- xmlReceivers.indices) {
val contentHandler = xmlReceivers(i)
contentHandler.processingInstruction(target, data)
}
def skippedEntity(name: String): Unit =
for (i <- xmlReceivers.indices) {
val contentHandler = xmlReceivers(i)
contentHandler.skippedEntity(name)
}
def startDTD(name: String, publicId: String, systemId: String): Unit =
for (xmlReceiver <- xmlReceivers)
xmlReceiver.startDTD(name, publicId, systemId)
def endDTD(): Unit =
for (xmlReceiver <- xmlReceivers)
xmlReceiver.endDTD()
def startEntity(name: String): Unit =
for (xmlReceiver <- xmlReceivers)
xmlReceiver.startEntity(name)
def endEntity(name: String): Unit =
for (xmlReceiver <- xmlReceivers)
xmlReceiver.endEntity(name)
def startCDATA(): Unit =
for (xmlReceiver <- xmlReceivers)
xmlReceiver.startCDATA()
def endCDATA(): Unit =
for (xmlReceiver <- xmlReceivers)
xmlReceiver.endCDATA()
def comment(ch: Array[Char], start: Int, length: Int): Unit =
for (xmlReceiver <- xmlReceivers)
xmlReceiver.comment(ch, start, length)
} | orbeon/orbeon-forms | core-cross-platform/shared/src/main/scala/org/orbeon/oxf/xml/TeeXMLReceiver.scala | Scala | lgpl-2.1 | 3,895 |
package org.jetbrains.plugins.scala.lang.optimize
package generated
class OptimizeImportsImplicitsTest extends OptimizeImportsTestBase {
//This class was generated by build script, please don't change this
override def folderPath: String = super.folderPath + "implicits/"
def testAssignment() = doTest()
def testImplicitApply() = doTest()
def testImplicitReference() = doTest()
def testImplicitReference2() = doTest()
def testImplicitReturnAndValVarAssignment() = doTest()
def testImplicitlyConvertedAndProvidedArguments() = doTest()
def testImplicitParamatersUsed() = doTest()
def testImplicitParametersDeeperLevel() = doTest()
def testImplicitsNewClass() = doTest()
def testJConversions() = doTest()
def testOverloadedImplicits() = doTest()
def testSCL6783() = doTest()
} | consulo/consulo-scala | test/org/jetbrains/plugins/scala/lang/optimize/generated/OptimizeImportsImplicitsTest.scala | Scala | apache-2.0 | 816 |
package com.delprks.productservicesprototype.api.rejection
import akka.http.scaladsl.model.StatusCodes
import akka.http.scaladsl.server.RejectionHandler
import com.delprks.productservicesprototype.api.directives.ErrorResponseDirectives.ErrorResponseData
import com.delprks.productservicesprototype.api.directives.ErrorResponseDirectives
import com.delprks.productservicesprototype.config.Config
trait ProductServicesPrototypeRejectionHandler extends ErrorResponseDirectives
with Config {
implicit val rejectionHandler: RejectionHandler = RejectionHandler.newBuilder()
.handle { case productServicesPrototypeRejection: ProductServicesPrototypeRejection =>
completeWithError(errorSchemaUrl, errorDocumentationUrl) {
ErrorResponseData(productServicesPrototypeRejection.statusCode, productServicesPrototypeRejection.message)
}
}
.handleNotFound {
completeWithError(errorSchemaUrl, errorDocumentationUrl) {
ErrorResponseData(StatusCodes.NotFound, "Not found")
}
}
.result()
}
| delprks/product-services-prototype | src/main/scala/com/delprks/productservicesprototype/api/rejection/ProductServicesPrototypeRejectionHandler.scala | Scala | mit | 1,038 |
/**
* Copyright 2009 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS-IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.appjet.oui;
import org.mozilla.javascript.{Context,Scriptable,ScriptableObject};
import net.sf.json.util.JSONStringer;
import net.sf.json.{JSONObject,JSONArray};
import scala.util.matching.Regex;
object FastJSON {
val dateRe = """^(\\d{4}-\\d{2}-\\d{2}T\\d{2}:\\d{2}:\\d{2}\\.\\d{3}Z)$""".r;
def stringify(rhinoObj: Scriptable): String = {
return FastJSONStringify.stringify(rhinoObj);
}
def parse(exctx: ExecutionContext, source: String, parseDate: Boolean = false): Scriptable = {
return (new FastJSONParser(exctx, dateRe, parseDate)).parse(source);
}
}
//----------------------------------------------------------------
// FastJSONStringify
//----------------------------------------------------------------
object FastJSONStringify {
def stringify(rhinoObj: Scriptable): String = {
val stringer = new JSONStringer();
stringerizeScriptable(stringer, rhinoObj);
return stringer.toString();
}
private def stringerize(s: JSONStringer, v: Object) {
if (v == Context.getUndefinedValue) {
return;
}
v match {
case (o:Scriptable) => stringerizeScriptable(s, o);
case (o:Number) => {
val d = o.doubleValue;
if (d.toLong.toDouble == d) {
s.value(d.toLong);
}
else {
s.value(o);
}
}
case o => s.value(o);
}
}
private def stringerizeScriptable(stringer: JSONStringer, rhinoObj: Scriptable) {
if (rhinoObj.getClassName() == "Date") {
stringerizeDate(stringer, rhinoObj);
} else if (rhinoObj.getClassName() == "Array") {
stringerizeArray(stringer, rhinoObj);
} else {
stringerizeObj(stringer, rhinoObj);
}
}
private def stringerizeObj(stringer: JSONStringer, rhinoObj: Scriptable) {
stringer.`object`();
for (id <- rhinoObj.getIds()) {
val k = id.toString();
var v:Object = null;
id match {
case (s:String) => { v = rhinoObj.get(s, rhinoObj); }
case (n:Number) => { v = rhinoObj.get(n.intValue, rhinoObj); }
case _ => {}
}
if (v != null && v != Scriptable.NOT_FOUND && v != Context.getUndefinedValue) {
stringer.key(k);
stringerize(stringer, v);
}
}
stringer.endObject();
}
private def stringerizeDate(stringer: JSONStringer, rhinoObj: Scriptable) {
val v:Object = ScriptableObject.callMethod(rhinoObj, "toJSON", Array());
if (v != null && v != Scriptable.NOT_FOUND && v != Context.getUndefinedValue) {
stringerize(stringer, v);
}
}
private def stringerizeArray(stringer: JSONStringer, rhinoArray: Scriptable) {
stringer.`array`();
val ids:Array[Object] = rhinoArray.getIds();
var x = 0;
for (i <- 0 until ids.length) {
// we ignore string keys on js arrays. crockford's "offical"
// json library does this as well.
if (ids(i).isInstanceOf[Number]) {
val id:Int = ids(i).asInstanceOf[Number].intValue;
while (x < id) {
stringer.value(null);
x += 1;
}
val v:Object = rhinoArray.get(id, rhinoArray);
stringerize(stringer, v);
x += 1;
}
}
stringer.endArray();
}
}
//----------------------------------------------------------------
// FastJSONParse
//----------------------------------------------------------------
class FastJSONParser(val ctx:ExecutionContext, val dateRe:Regex, val parseDate:Boolean) {
def parse(source: String): Scriptable = {
if (source(0) == '[') {
jsonToRhino(JSONArray.fromObject(source)).asInstanceOf[Scriptable];
} else {
jsonToRhino(JSONObject.fromObject(source)).asInstanceOf[Scriptable];
}
}
private def newDate(dateString: String): Scriptable = {
Context.getCurrentContext().newObject(ctx.runner.globalScope, "Date", Array(dateString));
}
private def newObj(): Scriptable = {
Context.getCurrentContext().newObject(ctx.runner.globalScope);
}
private def newArray(): Scriptable = {
Context.getCurrentContext().newArray(ctx.runner.globalScope, 0);
}
private def jsonToRhino(json: Object): Object = {
json match {
case (o:JSONArray) => jsonArrayToRhino(o);
case (o:JSONObject) if (o.isNullObject()) => null;
case (o:JSONObject) => jsonObjectToRhino(o);
case (o:String) if (parseDate && dateRe.pattern.matcher(o).matches) => newDate(o);
case o => o;
}
}
private def jsonArrayToRhino(json: JSONArray): Scriptable = {
val o:Scriptable = newArray();
for (i <- 0 until json.size()) {
o.put(i, o, jsonToRhino(json.get(i)));
}
return o;
}
private def jsonObjectToRhino(json: JSONObject): Scriptable = {
val o:Scriptable = newObj();
val names:Array[Object] = json.names().toArray();
if (names != null) {
for (n <- names) {
val i = try { Some(n.asInstanceOf[String].toInt); } catch { case (e:NumberFormatException) => None };
if (i.isDefined) {
o.put(i.get, o, jsonToRhino(json.get(n.asInstanceOf[String])));
} else {
o.put(n.asInstanceOf[String], o, jsonToRhino(json.get(n.asInstanceOf[String])));
}
}
}
return o;
}
}
| whackpad/whackpad | infrastructure/net.appjet.oui/FastJSON.scala | Scala | apache-2.0 | 5,794 |
// CC#9248 is conspicuously absent from the printed trees at every phase.
class A {
def f[A, CC[X] <: Traversable[X]](): Unit = ()
}
// % work/check all -uniqid -Xprint:typer work/fail2.scala
//
// TypeStack init: REFERENCE(type AnyRef#2783)
// [Not checkable: parser]
// [Not checkable: namer]
// [Not checkable: packageobjects]
// [[syntax trees at end of typer]]// Scala source: fail2.scala
// package <empty>#3 {
// class A#9239 extends java.lang.Object#2488 with ScalaObject#1481 {
// def this#9243(): A#9239 = {
// A#9239.super.this#5850();
// ()
// };
// def f#9244[A#9245 >: Nothing#5846 <: Any#46, CC#9246[X#11055 >: Nothing#5846 <: Any#46] >: [X#11055]Nothing#5846 <: [X#11055]Traversable#3199[X#11055]](): Unit#3819 = ()
// }
// }
//
// [Now checking: typer]
// [check: typer] work/fail2.scala:3: Expected owner type CC#9248, found type CC#9246: Trees$TypeDef / type X#11055>: Nothing#5846 <: Any#46
// [Now checking: superaccessors]
// [check: superaccessors] work/fail2.scala:3: Expected owner type CC#9248, found type CC#9246: Trees$TypeDef / type X#11055>: Nothing#5846 <: Any#46
// [Now checking: pickler]
// [check: pickler] work/fail2.scala:3: Expected owner type CC#9248, found type CC#9246: Trees$TypeDef / type X#11055>: Nothing#5846 <: Any#46
// [Now checking: refchecks]
// [check: refchecks] work/fail2.scala:3: Expected owner type CC#9248, found type CC#9246: Trees$TypeDef / type X#11055>: Nothing#5846 <: Any#46
// [Now checking: selectiveanf]
// [check: selectiveanf] work/fail2.scala:3: Expected owner type CC#9248, found type CC#9246: Trees$TypeDef / type X#11055>: Nothing#5846 <: Any#46
// [Now checking: liftcode]
// [check: liftcode] work/fail2.scala:3: Expected owner type CC#9248, found type CC#9246: Trees$TypeDef / type X#11055>: Nothing#5846 <: Any#46
// [Now checking: selectivecps]
// [check: selectivecps] work/fail2.scala:3: Expected owner type CC#9248, found type CC#9246: Trees$TypeDef / type X#11055>: Nothing#5846 <: Any#46
// [Now checking: uncurry]
// [check: uncurry] work/fail2.scala:3: Expected owner type CC#9248, found type CC#9246: Trees$TypeDef / type X#11055>: Nothing#5846 <: Any#46
// [Now checking: tailcalls]
// [check: tailcalls] work/fail2.scala:3: Expected owner type CC#9248, found type CC#9246: Trees$TypeDef / type X#11055>: Nothing#5846 <: Any#46
// [Not checkable: specialize]
// [Not checkable: explicitouter]
// [Now checking: erasure]
// [Now checking: lazyvals]
// [Now checking: lambdalift]
// [Now checking: constructors]
// [Now checking: flatten]
// [Now checking: mixin]
// [Now checking: cleanup]
// ... | felixmulder/scala | test/checker-tests/fail2.scala | Scala | bsd-3-clause | 2,615 |
/*
* Copyright 2013-2016 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package laika.rewrite
import org.scalatest.FlatSpec
import org.scalatest.Matchers
import laika.tree.Elements._
import laika.tree.helper.ModelBuilder
class RewriteSpec extends FlatSpec
with Matchers
with ModelBuilder {
"The rewriter" should "replace the first element of the children in a container" in {
val rootElem = root(p("a"), p("b"), p("c"))
rootElem rewrite { case Paragraph(Seq(Text("a",_)),_) => Some(p("x")) } should be (root(p("x"), p("b"), p("c")))
}
it should "replace an element in the middle of the list of children in a container" in {
val rootElem = root(p("a"), p("b"), p("c"))
rootElem rewrite { case Paragraph(Seq(Text("b",_)),_) => Some(p("x")) } should be (root(p("a"), p("x"), p("c")))
}
it should "replace the last element of the children in a container" in {
val rootElem = root(p("a"), p("b"), p("c"))
rootElem rewrite { case Paragraph(Seq(Text("c",_)),_) => Some(p("x")) } should be (root(p("a"), p("b"), p("x")))
}
it should "remove the first element of the children in a container" in {
val rootElem = root(p("a"), p("b"), p("c"))
rootElem rewrite { case Paragraph(Seq(Text("a",_)),_) => None } should be (root(p("b"), p("c")))
}
it should "remove an element in the middle of the list of children in a container" in {
val rootElem = root(p("a"), p("b"), p("c"))
rootElem rewrite { case Paragraph(Seq(Text("b",_)),_) => None } should be (root(p("a"), p("c")))
}
it should "remove the last element of the children in a container" in {
val rootElem = root(p("a"), p("b"), p("c"))
rootElem rewrite { case Paragraph(Seq(Text("c",_)),_) => None } should be (root(p("a"), p("b")))
}
it should "replace the header of a section, which is not part of the content list" in {
val rootElem = root(Section(h(1, txt("Title")), List(p("Text"))))
rootElem rewrite { case Header(1, content, _) => Some(Header(2, content)) } should be (root(Section(h(2, txt("Title")), List(p("Text")))))
}
it should "return the same instance if no rewrite rule matches" in {
val rootElem = root(p("a"), p("b"), p("c"))
rootElem rewrite { case Paragraph(Seq(Text("f",_)),_) => None } should be theSameInstanceAs (rootElem)
}
it should "return the same instance if the rewrite rule always returns the same instance" in {
val rootElem = root(p("a"), p("b"), p("c"))
rootElem rewrite { case element => Some(element) } should be theSameInstanceAs (rootElem)
}
it should "return a new instance for a branch in the document tree that contains one or more modified children" in {
val before = root(quote(p("a")), quote(p("b")), quote(p("c")))
val after = before rewrite { case Paragraph(Seq(Text("a",_)),_) => Some(p("x")) }
before.content(0) should not be theSameInstanceAs (after.content(0))
}
it should "return the same instance for a branch in the document tree that does not contain any modified children" in {
val before = root(quote(p("a")), quote(p("b")), quote(p("c")))
val after = before rewrite { case Paragraph(Seq(Text("a",_)),_) => Some(p("x")) }
before.content(1) should be theSameInstanceAs (after.content(1))
}
it should "throw an exception when a rewrite rule produces a new element that violates the contract of its parent element" in {
val rootElem = root(Section(h(1,"Title"), Nil))
an [rootElem.RewriteException] should be thrownBy { rootElem rewrite { case Header(_,_,_) => Some(em("x")) }}
}
}
| amuramatsu/Laika | core/src/test/scala/laika/rewrite/RewriteSpec.scala | Scala | apache-2.0 | 4,163 |
/*
Copyright (c) 2017-2021, Robby, Kansas State University
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.sireum
object Z extends $ZCompanion[Z] {
type Index = Z
val longMin = scala.BigInt(scala.Long.MinValue)
val longMax = scala.BigInt(scala.Long.MaxValue)
object MP {
val zero: Z = MP.Long(0)
val one: Z = MP.Long(1)
val mone: Z = MP.Long(-1)
final case class Long(value: scala.Long) extends Z {
override def toBigInt: scala.BigInt = scala.BigInt(value)
override def toIntOpt: scala.Option[scala.Int] =
if (scala.Int.MinValue <= value && value <= scala.Int.MaxValue) scala.Some(value.toInt)
else scala.None
override def toLongOpt: scala.Option[scala.Long] = scala.Some(value)
override def toString: Predef.String = value.toString
override def toInt: scala.Int = value.toInt
override def toLong: scala.Long = value.toLong
override def hashCode: scala.Int = value.toInt
}
final case class BigInt(value: scala.BigInt) extends Z {
override def toBigInt: scala.BigInt = value
override def toIntOpt: scala.Option[scala.Int] =
if (scala.Int.MinValue <= value && value <= scala.Int.MaxValue) scala.Some(value.toInt)
else scala.None
override def toLongOpt: scala.Option[scala.Long] =
if (scala.Long.MinValue <= value && value <= scala.Long.MaxValue) scala.Some(value.toLong)
else scala.None
override def toInt: scala.Int = value.toInt
override def toLong: scala.Long = value.toLong
override def toString: Predef.String = value.toString
def pack: Z =
if ((value.compareTo(Z.longMin) >= 0) &&
(value.compareTo(Z.longMax) <= 0)) Long(value.longValue)
else this
override def hashCode: scala.Int = value.toInt
}
@inline def unsupported(op: Predef.String, other: Z): Nothing =
halt(s"Unsupported Z operation '$op' with ${other.Name}")
@inline def unary_-(n: Z): Z = {
n match {
case Long(m) =>
if (m != scala.Long.MinValue)
return Long(-m)
case _: Z =>
}
BigInt(-n.toBigInt)
}
@inline def +(n: Z, other: Z): Z = {
(n, other) match {
case (Long(n1), Long(n2)) =>
val r = n1 + n2
if (((n1 ^ r) & (n2 ^ r)) >= 0L)
return Long(r)
case (_: Z, _: Z) =>
}
BigInt(n.toBigInt + other.toBigInt)
}
@inline def -(n: Z, other: Z): Z = this.+(n, -other)
@inline def *(n: Z, other: Z): Z = {
(n, other) match {
case (Long(n1), Long(n2)) =>
val r = n1 * n2
if (r == 0) return zero
var upgrade = false
if (n2 > n1) {
if (((n2 == -1) && (n1 == scala.Long.MinValue)) || (r / n2 != n1))
upgrade = true
} else {
if (((n1 == -1) && (n2 == scala.Long.MinValue)) || (r / n1 != n2))
upgrade = true
}
if (!upgrade) return Long(r)
case (_: Z, _: Z) =>
}
BigInt(n.toBigInt * other.toBigInt)
}
@inline def /(n: Z, other: Z): Z = {
(n, other) match {
case (Long(n1), Long(n2)) =>
val r = n1 / n2
if (!((n1 == scala.Long.MinValue) && (n2 == -1)))
return Long(r)
case (_: Z, _: Z) =>
}
BigInt(n.toBigInt / other.toBigInt).pack
}
@inline def %(n: Z, other: Z): Z = {
(n, other) match {
case (Long(n1), Long(n2)) => return Long(n1 % n2)
case (_: Z, _: Z) =>
}
BigInt(n.toBigInt % other.toBigInt).pack
}
@inline def >(n: Z, other: Z): B = (n, other) match {
case (Long(n1), Long(n2)) => n1 > n2
case (_: Z, _: Z) => n.toBigInt > other.toBigInt
}
@inline def >=(n: Z, other: Z): B = (n, other) match {
case (Long(n1), Long(n2)) => n1 >= n2
case (_: Z, _: Z) => n.toBigInt >= other.toBigInt
}
@inline def <(n: Z, other: Z): B = (n, other) match {
case (Long(n1), Long(n2)) => n1 < n2
case (_: Z, _: Z) => n.toBigInt < other.toBigInt
}
@inline def <=(n: Z, other: Z): B = (n, other) match {
case (Long(n1), Long(n2)) => n1 <= n2
case (_: Z, _: Z) => n.toBigInt <= other.toBigInt
}
@inline def isEqual(n: Z, other: Z): B = (n, other) match {
case (n: Long, other: Long) => n.value == other.value
case (_: Z, _: Z) => n.toBigInt == other.toBigInt
}
@inline def apply(n: scala.Int): Z = MP.Long(n)
@inline def apply(n: scala.Long): Z = MP.Long(n)
@inline def apply(n: scala.BigInt): Z = MP.BigInt(n).pack
@inline def apply(n: _root_.java.math.BigInteger): Z = MP(scala.BigInt(n))
@inline def apply(s: String): Z = {
val ns = helper.normNum(s.value)
if (ns.length > 2 && ns.head == '0' && ns.charAt(1).toLower == 'x') MP(scala.BigInt(ns.substring(2), 16))
else MP(scala.BigInt(ns))
}
}
object Boxer {
trait Byte extends $internal.Boxer {
def box[T](o: scala.Any): T = o match {
case o: scala.Byte => make(o).asInstanceOf[T]
}
def unbox(o: scala.Any): scala.Byte = o match {
case o: BV.Byte[_] => o.value
case o: Range[_] =>
val v: scala.Int = o.value
v.toByte
}
override def copyMut(src: AnyRef, srcPos: Index, dest: AnyRef, destPos: Index, length: Index): Unit =
copy(src, srcPos, dest, destPos, length)
override def create(length: Z): scala.AnyRef = new Array[scala.Byte](length)
override def lookup[T](a: scala.AnyRef, i: Z): T = a match {
case a: Array[scala.Byte] => box(a(i))
}
override def store(a: scala.AnyRef, i: Z, v: scala.Any): Unit = a match {
case a: Array[scala.Byte] => a(i) = unbox(v)
}
def make(o: scala.Byte): scala.Any
}
trait Short extends $internal.Boxer {
def box[T](o: scala.Any): T = o match {
case o: scala.Short => make(o).asInstanceOf[T]
}
def unbox(o: scala.Any): scala.Short = o match {
case o: BV.Short[_] => o.value
case o: Range[_] =>
val v: scala.Int = o.value
v.toShort
}
override def copyMut(src: AnyRef, srcPos: Index, dest: AnyRef, destPos: Index, length: Index): Unit =
copy(src, srcPos, dest, destPos, length)
override def create(length: Z): scala.AnyRef = new Array[scala.Short](length)
override def lookup[T](a: scala.AnyRef, i: Z): T = a match {
case a: Array[scala.Short] => box(a(i))
}
override def store(a: scala.AnyRef, i: Z, v: scala.Any): Unit = a match {
case a: Array[scala.Short] => a(i) = unbox(v)
}
def make(o: scala.Short): scala.Any
}
trait Int extends $internal.Boxer {
def box[T](o: scala.Any): T = o match {
case o: scala.Int => make(o).asInstanceOf[T]
}
def unbox(o: scala.Any): scala.Int = o match {
case o: BV.Int[_] => o.value
case o: Range[_] => o.value
}
override def copyMut(src: AnyRef, srcPos: Index, dest: AnyRef, destPos: Index, length: Index): Unit =
copy(src, srcPos, dest, destPos, length)
override def create(length: Z): scala.AnyRef = new Array[scala.Int](length)
override def lookup[T](a: scala.AnyRef, i: Z): T = a match {
case a: Array[scala.Int] => box(a(i))
}
override def store(a: scala.AnyRef, i: Z, v: scala.Any): Unit = a match {
case a: Array[scala.Int] => a(i) = unbox(v)
}
def make(o: scala.Int): scala.Any
}
trait Long extends $internal.Boxer {
def box[T](o: scala.Any): T = o match {
case o: scala.Long => make(o).asInstanceOf[T]
}
def unbox(o: scala.Any): scala.Long = o match {
case o: BV.Long[_] => o.value
case o: Range[_] => o.value
}
override def copyMut(src: AnyRef, srcPos: Index, dest: AnyRef, destPos: Index, length: Index): Unit =
copy(src, srcPos, dest, destPos, length)
override def create(length: Z): scala.AnyRef = new Array[scala.Long](length)
override def lookup[T](a: scala.AnyRef, i: Z): T = a match {
case a: Array[scala.Long] => box(a(i))
}
override def store(a: scala.AnyRef, i: Z, v: scala.Any): Unit = a match {
case a: Array[scala.Long] => a(i) = unbox(v)
}
def make(o: scala.Long): scala.Any
}
object Z extends $internal.Boxer {
def box[T](o: scala.Any): T = o match {
case o: MP.Long => o.asInstanceOf[T]
case o: scala.BigInt => MP.BigInt(o).asInstanceOf[T]
}
def unbox(o: scala.Any): scala.Any = o match {
case o: MP.Long => o
case o: MP.BigInt => o.value
}
override def copyMut(src: AnyRef, srcPos: Index, dest: AnyRef, destPos: Index, length: Index): Unit =
copy(src, srcPos, dest, destPos, length)
}
}
object U {
object _8 {
def apply(value: scala.Byte): _8 = new _8(value)
def apply(value: scala.Int): _8 = new _8(value.toByte)
}
class _8(val value: scala.Byte) extends AnyVal {
def unary_- : _8 = _8(-value)
def unary_~ : _8 = _8(~value)
def +(other: _8): _8 = _8(value + other.value)
def -(other: _8): _8 = _8(value - other.value)
def *(other: _8): _8 = _8(value * other.value)
def /(other: _8): _8 = _8(toInt / other.toInt)
def %(other: _8): _8 = _8(toInt % other.toInt)
def <<(n: scala.Int): _8 = _8((value & 0xff) << (n & 7))
def >>(n: scala.Int): _8 = _8((value & 0xff) >>> (n & 7))
def >>>(n: scala.Int): _8 = _8((value & 0xff) >>> (n & 7))
def &(other: _8): _8 = _8((value & 0xff) & (other.value & 0xff))
def |(other: _8): _8 = _8((value & 0xff) | (other.value & 0xff))
def ^(other: _8): _8 = _8((value & 0xff) ^ (other.value & 0xff))
def <(other: _8): scala.Boolean = toInt < other.toInt
def <=(other: _8): scala.Boolean = toInt <= other.toInt
def >(other: _8): scala.Boolean = toInt > other.toInt
def >=(other: _8): scala.Boolean = toInt >= other.toInt
def toInt: scala.Int = value & 0xff
override def toString: Predef.String = f"$value%02X"
}
object _16 {
def apply(value: scala.Short): _16 = new _16(value)
def apply(value: scala.Int): _16 = new _16(value.toShort)
}
class _16(val value: scala.Short) extends AnyVal {
def unary_- : _16 = _16(-value)
def unary_~ : _16 = _16(~value)
def +(other: _16): _16 = _16(value + other.value)
def -(other: _16): _16 = _16(value - other.value)
def *(other: _16): _16 = _16(value * other.value)
def /(other: _16): _16 = _16(toInt / other.toInt)
def %(other: _16): _16 = _16(toInt % other.toInt)
def <<(n: scala.Int): _16 = _16((value & 0xFFFF) << (n & 15))
def >>(n: scala.Int): _16 = _16((value & 0xFFFF) >>> (n & 15))
def >>>(n: scala.Int): _16 = _16((value & 0xFFFF) >>> (n & 15))
def &(other: _16): _16 = _16((value & 0xFFFF) & (other.value & 0xFFFF))
def |(other: _16): _16 = _16((value & 0xFFFF) | (other.value & 0xFFFF))
def ^(other: _16): _16 = _16((value & 0xFFFF) ^ (other.value & 0xFFFF))
def <(other: _16): scala.Boolean = toInt < other.toInt
def <=(other: _16): scala.Boolean = toInt <= other.toInt
def >(other: _16): scala.Boolean = toInt > other.toInt
def >=(other: _16): scala.Boolean = toInt >= other.toInt
def toInt: scala.Int = value & 0xFFFF
override def toString: Predef.String = f"$value%04X"
}
object _32 {
def apply(value: scala.Int): _32 = new _32(value)
def apply(value: scala.Long): _32 = new _32(value.toInt)
}
class _32(val value: scala.Int) extends AnyVal {
def unary_- : _32 = _32(-value)
def unary_~ : _32 = _32(~value)
def +(other: _32): _32 = _32(value + other.value)
def -(other: _32): _32 = _32(value - other.value)
def *(other: _32): _32 = _32(value * other.value)
def /(other: _32): _32 = _32(_root_.java.lang.Integer.divideUnsigned(value, other.value))
def %(other: _32): _32 = _32(_root_.java.lang.Integer.remainderUnsigned(value, other.value))
def <<(n: scala.Int): _32 = _32(value << n)
def >>(n: scala.Int): _32 = _32(value >> n)
def >>>(n: scala.Int): _32 = _32(value >>> n)
def &(other: _32): _32 = _32(value & other.value)
def |(other: _32): _32 = _32(value | other.value)
def ^(other: _32): _32 = _32(value ^ other.value)
def <(other: _32): scala.Boolean = _root_.java.lang.Integer.compareUnsigned(value, other.value) < 0
def <=(other: _32): scala.Boolean = _root_.java.lang.Integer.compareUnsigned(value, other.value) <= 0
def >(other: _32): scala.Boolean = _root_.java.lang.Integer.compareUnsigned(value, other.value) > 0
def >=(other: _32): scala.Boolean = _root_.java.lang.Integer.compareUnsigned(value, other.value) >= 0
def toLong: scala.Long = value & 0xFFFFFFFFL
override def toString: Predef.String = f"$value%08X"
}
object _64 {
val NumValues: scala.BigInt = scala.BigInt(1) << 64
def apply(value: scala.Long): _64 = new _64(value)
}
class _64(val value: scala.Long) extends AnyVal {
def unary_- : _64 = _64(-value)
def unary_~ : _64 = _64(~value)
def +(other: _64): _64 = _64(value + other.value)
def -(other: _64): _64 = _64(value - other.value)
def *(other: _64): _64 = _64(value * other.value)
def /(other: _64): _64 = _64(_root_.java.lang.Long.divideUnsigned(value, other.value))
def %(other: _64): _64 = _64(_root_.java.lang.Long.remainderUnsigned(value, other.value))
def <<(n: scala.Int): _64 = _64(value << n)
def >>(n: scala.Int): _64 = _64(value >> n)
def >>>(n: scala.Int): _64 = _64(value >>> n)
def &(other: _64): _64 = _64(value & other.value)
def |(other: _64): _64 = _64(value | other.value)
def ^(other: _64): _64 = _64(value ^ other.value)
def <(other: _64): scala.Boolean = _root_.java.lang.Long.compareUnsigned(value, other.value) < 0
def <=(other: _64): scala.Boolean = _root_.java.lang.Long.compareUnsigned(value, other.value) <= 0
def >(other: _64): scala.Boolean = _root_.java.lang.Long.compareUnsigned(value, other.value) > 0
def >=(other: _64): scala.Boolean = _root_.java.lang.Long.compareUnsigned(value, other.value) >= 0
def toBigInt: scala.BigInt =
if (value < 0) _64.NumValues + value else scala.BigInt(value)
override def toString: Predef.String = f"$value%016X"
}
}
object BV {
trait Byte[T <: Byte[T]] extends Any with ZLike[T] with $internal.HasBoxer {
this: T =>
final def isBitVector: scala.Boolean = true
final def hasMin: scala.Boolean = true
final def hasMax: scala.Boolean = true
def value: scala.Byte
def make(value: scala.Byte): T
def Min: T
def Max: T
def Index: T
def isZeroIndex: scala.Boolean
def isWrapped: scala.Boolean
def BitWidth: scala.Int
@inline private final def toByte: scala.Byte = value
@inline private final def toU8: U._8 = U._8(toByte)
@inline private final def make(value: Z): T = {
assert(Min.toMP <= value, s"$value should not be less than $Name.Min ($Min)")
assert(value <= Max.toMP, s"$value should not be greater than $Name.Max ($Max)")
make(value match {
case MP.Long(n) => n.toByte
case MP.BigInt(n) => n.toByte
})
}
@inline private final def umake(value: U._8): T = make(value.value)
@inline private final def makeByte(value: scala.Int): T =
if (isSigned) make(value.toByte) else make(U._8(value).value)
final def unary_- : T =
if (!isWrapped) make(-toMP)
else if (isSigned) makeByte(-toByte)
else umake(-toU8)
final def +(other: T): T = {
if (!isWrapped) make(toMP + other.toMP)
else if (isSigned) makeByte(toByte + other.toByte)
else umake(toU8 + other.toU8)
}
final def -(other: T): T = {
if (!isWrapped) make(toMP - other.toMP)
else if (isSigned) makeByte(toByte - other.toByte)
else umake(toU8 - other.toU8)
}
final def *(other: T): T = {
if (!isWrapped) make(toMP * other.toMP)
else if (isSigned) makeByte(toByte * other.toByte)
else umake(toU8 * other.toU8)
}
final def /(other: T): T = {
if (!isWrapped) make(toMP / other.toMP)
else if (isSigned) makeByte(toByte / other.toByte)
else umake(toU8 / other.toU8)
}
final def %(other: T): T = {
if (!isWrapped) make(toMP % other.toMP)
else if (isSigned) makeByte(toByte % other.toByte)
else umake(toU8 % other.toU8)
}
final def >(other: T): B = {
if (isSigned) toByte > other.toByte
else toU8 > other.toU8
}
final def >=(other: T): B = {
if (isSigned) toByte >= other.toByte
else toU8 >= other.toU8
}
final def <(other: T): B = {
if (isSigned) toByte < other.toByte
else toU8 < other.toU8
}
final def <=(other: T): B = {
if (isSigned) toByte <= other.toByte
else toU8 <= other.toU8
}
final def >>(other: T): T = {
if (isSigned) makeByte(toByte >> other.toByte)
else this >>> other
}
final def >>>(other: T): T = {
if (isSigned) makeByte(toByte >>> other.toByte)
else umake(toU8 >>> other.toU8.toInt)
}
final def <<(other: T): T = {
if (isSigned) makeByte(toByte << other.toByte)
else umake(toU8 << other.toU8.toInt)
}
final def &(other: T): T = {
if (isSigned) makeByte(toByte & other.toByte)
else umake(toU8 & other.toU8)
}
final def |(other: T): T = {
if (isSigned) makeByte(toByte | other.toByte)
else umake(toU8 | other.toU8)
}
final def |^(other: T): T = {
if (isSigned) makeByte(toByte ^ other.toByte)
else umake(toU8 ^ other.toU8)
}
final def unary_~ : T =
if (isSigned) makeByte(~toByte)
else umake(~toU8)
final def increase: T =
if (isSigned) makeByte(toByte + 1)
else umake(toU8 + U._8(1))
final def decrease: T =
if (isSigned) makeByte(toByte - 1)
else umake(toU8 - U._8(1))
final override def toString: Predef.String =
if (isSigned) toByte.toString
else toU8.toString
final override def toBigInt: scala.BigInt =
if (isSigned) scala.BigInt(toByte)
else toU8.toInt.toBigInt
final override def toMP: Z =
if (isSigned) MP(toByte)
else MP(toU8.toInt)
final override def toIndex: Z.Index =
if (isZeroIndex) toMP else toMP - Index.toMP
}
trait Short[T <: Short[T]] extends Any with ZLike[T] with $internal.HasBoxer {
this: T =>
final def isBitVector: scala.Boolean = true
final def hasMin: scala.Boolean = true
final def hasMax: scala.Boolean = true
def value: scala.Short
def make(value: scala.Short): T
def Min: T
def Max: T
def Index: T
def isZeroIndex: scala.Boolean
def isWrapped: scala.Boolean
@inline private final def toShort: scala.Short = value
@inline private final def toU16: U._16 = U._16(toShort)
@inline private final def make(value: Z): T = {
assert(Min.toMP <= value, s"$value should not be less than $Name.Min ($Min)")
assert(value <= Max.toMP, s"$value should not be greater than $Name.Max ($Max)")
make(value match {
case MP.Long(n) => n.toShort
case MP.BigInt(n) => n.toShort
})
}
@inline private final def umake(value: U._16): T = make(value.value)
@inline private final def makeShort(value: scala.Int): T =
if (isSigned) make(value.toShort) else make(U._16(value).value)
final def unary_- : T =
if (!isWrapped) make(-toMP)
else if (isSigned) makeShort(-toShort)
else umake(-toU16)
final def +(other: T): T = {
if (!isWrapped) make(toMP + other.toMP)
else if (isSigned) makeShort(toShort + other.toShort)
else umake(toU16 + other.toU16)
}
final def -(other: T): T = {
if (!isWrapped) make(toMP - other.toMP)
else if (isSigned) makeShort(toShort - other.toShort)
else umake(toU16 - other.toU16)
}
final def *(other: T): T = {
if (!isWrapped) make(toMP * other.toMP)
else if (isSigned) makeShort(toShort * other.toShort)
else umake(toU16 * other.toU16)
}
final def /(other: T): T = {
if (!isWrapped) make(toMP / other.toMP)
else if (isSigned) makeShort(toShort / other.toShort)
else umake(toU16 / other.toU16)
}
final def %(other: T): T = {
if (!isWrapped) make(toMP % other.toMP)
else if (isSigned) makeShort(toShort % other.toShort)
else umake(toU16 % other.toU16)
}
final def >(other: T): B = {
if (isSigned) toShort > other.toShort
else toU16 > other.toU16
}
final def >=(other: T): B = {
if (isSigned) toShort >= other.toShort
else toU16 >= other.toU16
}
final def <(other: T): B = {
if (isSigned) toShort < other.toShort
else toU16 < other.toU16
}
final def <=(other: T): B = {
if (isSigned) toShort <= other.toShort
else toU16 <= other.toU16
}
final def >>(other: T): T = {
if (isSigned) makeShort(toShort >> other.toShort)
else this >>> other
}
final def >>>(other: T): T = {
if (isSigned) makeShort(toShort >>> other.toShort)
else umake(toU16 >>> other.toU16.toInt)
}
final def <<(other: T): T = {
if (isSigned) makeShort(toShort << other.toShort)
else umake(toU16 << other.toU16.toInt)
}
final def &(other: T): T = {
if (isSigned) makeShort(toShort & other.toShort)
else umake(toU16 & other.toU16)
}
final def |(other: T): T = {
if (isSigned) makeShort(toShort | other.toShort)
else umake(toU16 | other.toU16)
}
final def |^(other: T): T = {
if (isSigned) makeShort(toShort ^ other.toShort)
else umake(toU16 ^ other.toU16)
}
final def unary_~ : T =
if (isSigned) makeShort(~toShort)
else umake(~toU16)
final def increase: T =
if (isSigned) makeShort(toShort + 1)
else umake(toU16 + U._16(1))
final def decrease: T =
if (isSigned) makeShort(toShort - 1)
else umake(toU16 - U._16(1))
final override def toString: Predef.String =
if (isSigned) toShort.toString
else toU16.toString
final override def toBigInt: scala.BigInt =
if (isSigned) scala.BigInt(toShort)
else toU16.toInt.toBigInt
final override def toMP: Z =
if (isSigned) MP(toShort)
else MP(toU16.toInt)
final override def toIndex: Z.Index =
if (isZeroIndex) toMP else toMP - Index.toMP
}
trait Int[T <: Int[T]] extends Any with ZLike[T] with $internal.HasBoxer {
this: T =>
final def isBitVector: scala.Boolean = true
final def hasMin: scala.Boolean = true
final def hasMax: scala.Boolean = true
def value: scala.Int
def make(value: scala.Int): T
def Min: T
def Max: T
def Index: T
def isZeroIndex: scala.Boolean
def isWrapped: scala.Boolean
@inline private final def toU32: U._32 = U._32(value)
@inline private final def make(value: Z): T = {
assert(Min.toMP <= value, s"$value should not be less than $Name.Min ($Min)")
assert(value <= Max.toMP, s"$value should not be greater than $Name.Max ($Max)")
make(value match {
case MP.Long(n) => n.toInt
case MP.BigInt(n) => n.toInt
})
}
@inline private final def umake(value: U._32): T = make(value.value)
final def unary_- : T =
if (!isWrapped) make(-toMP)
else if (isSigned) make(-value)
else umake(-toU32)
final def +(other: T): T = {
if (!isWrapped) make(toMP + other.toMP)
else if (isSigned) make(value + other.value)
else umake(toU32 + other.toU32)
}
final def -(other: T): T = {
if (!isWrapped) make(toMP - other.toMP)
else if (isSigned) make(value - other.value)
else umake(toU32 - other.toU32)
}
final def *(other: T): T = {
if (!isWrapped) make(toMP * other.toMP)
else if (isSigned) make(value * other.value)
else umake(toU32 * other.toU32)
}
final def /(other: T): T = {
if (!isWrapped) make(toMP / other.toMP)
else if (isSigned) make(value / other.value)
else umake(toU32 / other.toU32)
}
final def %(other: T): T = {
if (!isWrapped) make(toMP % other.toMP)
else if (isSigned) make(value % other.value)
else umake(toU32 % other.toU32)
}
final def >(other: T): B = {
if (isSigned) value > other.value
else toU32 > other.toU32
}
final def >=(other: T): B = {
if (isSigned) value >= other.value
else toU32 >= other.toU32
}
final def <(other: T): B = {
if (isSigned) value < other.value
else toU32 < other.toU32
}
final def <=(other: T): B = {
if (isSigned) value <= other.value
else toU32 <= other.toU32
}
final def >>(other: T): T = {
if (isSigned) make(value >> other.value)
else this >>> other
}
final def >>>(other: T): T = {
if (isSigned) make(value >>> other.value)
else umake(toU32 >>> other.value)
}
final def <<(other: T): T = {
if (isSigned) make(value << other.value)
else umake(toU32 << other.value)
}
final def &(other: T): T = {
if (isSigned) make(value & other.value)
else umake(toU32 & other.toU32)
}
final def |(other: T): T = {
if (isSigned) make(value | other.value)
else umake(toU32 | other.toU32)
}
final def |^(other: T): T = {
if (isSigned) make(value ^ other.value)
else umake(toU32 ^ other.toU32)
}
final def unary_~ : T =
if (isSigned) make(~value)
else umake(~toU32)
final def increase: T =
if (isSigned) make(value + 1)
else umake(toU32 + U._32(1))
final def decrease: T =
if (isSigned) make(value - 1)
else umake(toU32 - U._32(1))
final override def toString: Predef.String =
if (isSigned) value.toString
else toU32.toString
final override def toBigInt: scala.BigInt =
if (isSigned) scala.BigInt(value)
else toU32.toLong.toBigInt
final override def toMP: Z =
if (isSigned) MP(value)
else MP(toU32.toLong)
final override def toIndex: Z.Index =
if (isZeroIndex) toMP else toMP - Index.toMP
}
trait Long[T <: Long[T]] extends Any with ZLike[T] with $internal.HasBoxer {
this: T =>
final def isBitVector: scala.Boolean = true
final def hasMin: scala.Boolean = true
final def hasMax: scala.Boolean = true
def value: scala.Long
def make(value: scala.Long): T
def Min: T
def Max: T
def Index: T
def isZeroIndex: scala.Boolean
def isWrapped: scala.Boolean
@inline private final def toU64: U._64 = U._64(value)
@inline private final def make(value: Z): T = {
assert(Min.toMP <= value, s"$value should not be less than $Name.Min ($Min)")
assert(value <= Max.toMP, s"$value should not be greater than $Name.Max ($Max)")
make(value match {
case MP.Long(n) => n
case MP.BigInt(n) => n.toLong
})
}
@inline private final def umake(value: U._64): T = make(value.value)
final def unary_- : T =
if (!isWrapped) make(-toMP)
else if (isSigned) make(-value)
else umake(-toU64)
final def +(other: T): T = {
if (!isWrapped) make(toMP + other.toMP)
else if (isSigned) make(value + other.value)
else umake(toU64 + other.toU64)
}
final def -(other: T): T = {
if (!isWrapped) make(toMP - other.toMP)
else if (isSigned) make(value - other.value)
else umake(toU64 - other.toU64)
}
final def *(other: T): T = {
if (!isWrapped) make(toMP * other.toMP)
else if (isSigned) make(value * other.value)
else umake(toU64 * other.toU64)
}
final def /(other: T): T = {
if (!isWrapped) make(toMP / other.toMP)
else if (isSigned) make(value / other.value)
else umake(toU64 / other.toU64)
}
final def %(other: T): T = {
if (!isWrapped) make(toMP % other.toMP)
else if (isSigned) make(value % other.value)
else umake(toU64 % other.toU64)
}
final def >(other: T): B = {
if (isSigned) value > other.value
else toU64 > other.toU64
}
final def >=(other: T): B = {
if (isSigned) value >= other.value
else toU64 >= other.toU64
}
final def <(other: T): B = {
if (isSigned) value < other.value
else toU64 < other.toU64
}
final def <=(other: T): B = {
if (isSigned) value <= other.value
else toU64 <= other.toU64
}
final def >>(other: T): T = {
if (isSigned) make(value >> other.value)
else this >>> other
}
final def >>>(other: T): T = {
if (isSigned) make(value >>> other.value)
else umake(toU64 >>> (other.value & 0x3F).toInt)
}
final def <<(other: T): T = {
if (isSigned) make(value << other.value)
else umake(toU64 << (other.value & 0x3F).toInt)
}
final def &(other: T): T = {
if (isSigned) make(value & other.value)
else umake(toU64 & other.toU64)
}
final def |(other: T): T = {
if (isSigned) make(value | other.value)
else umake(toU64 | other.toU64)
}
final def |^(other: T): T = {
if (isSigned) make(value ^ other.value)
else umake(toU64 ^ other.toU64)
}
final def unary_~ : T =
if (isSigned) make(~value)
else umake(~toU64)
final def increase: T =
if (isSigned) make(value + 1)
else umake(toU64 + U._64(1))
final def decrease: T =
if (isSigned) make(value - 1)
else umake(toU64 - U._64(1))
final override def toString: Predef.String =
if (isSigned) value.toString
else toU64.toString
final override def toBigInt: scala.BigInt =
if (isSigned) scala.BigInt(value)
else toU64.toBigInt
final override def toMP: Z =
if (isSigned) MP(value)
else MP(toU64.toBigInt)
final override def toIndex: Z.Index =
if (isZeroIndex) toMP else toMP - Index.toMP
}
}
trait Range[T <: Range[T]] extends Any with ZLike[T] with $internal.HasBoxer {
this: T =>
def value: Z
def make(n: Z): T
def Min: T
def Max: T
def Index: T
def isZeroIndex: scala.Boolean
@inline final def isBitVector: scala.Boolean = false
@inline final def BitWidth: Int = unsupported("BitWidth")
@inline final def toMP: Z = value
@inline final def unary_- : T = make(-value)
@inline final def +(other: T): T = {
make(value + other.value)
}
@inline final def -(other: T): T = {
make(value - other.value)
}
@inline final def *(other: T): T = {
make(value * other.value)
}
@inline final def /(other: T): T = {
make(value / other.value)
}
@inline final def %(other: T): T = {
make(value % other.value)
}
@inline final def <(other: T): B = {
value < other.value
}
@inline final def <=(other: T): B = {
value <= other.value
}
@inline final def >(other: T): B = {
value > other.value
}
@inline final def >=(other: T): B = {
value >= other.value
}
@inline final def decrease: T = make(value - MP.one)
@inline final def increase: T = make(value + MP.one)
@inline final override def toBigInt: BigInt = value.toBigInt
@inline final def toIndex: Z.Index = if (isZeroIndex) value else value - Index.value
@inline final override def toString: Predef.String = value.toString
@inline private final def unsupported(op: Predef.String): Nothing =
halt(s"Unsupported $Name operation '$op'.")
}
def apply(n: Z): Z = n match {
case n: Z => n
case _ => halt(s"Unsupported Z creation from ${n.Name}.")
}
def apply(s: String): Option[Z] =
try Some(Z.$String(s.value))
catch {
case _: Throwable => None[Z]()
}
object Int extends $ZCompanionInt[Z] {
@inline def apply(n: scala.Int): Z = MP(n)
def unapply(n: Z): scala.Option[scala.Int] = n match {
case n: Z => n.toIntOpt
case _ => scala.None
}
}
object Long extends $ZCompanionLong[Z] {
@inline def apply(n: scala.Long): Z = MP(n)
def unapply(n: Z): scala.Option[scala.Long] = n match {
case n: Z => n.toLongOpt
case _ => scala.None
}
}
object $String extends $ZCompanionString[Z] {
@inline def apply(s: Predef.String): Z = MP(s)
def unapply(n: Z): scala.Option[Predef.String] = n match {
case n: Z => scala.Some(n.toString)
case _ => scala.None
}
}
object BigInt extends $ZCompanionBigInt[Z] {
@inline def apply(n: scala.BigInt): Z = MP(n)
def unapply(n: Z): scala.Option[scala.BigInt] = n match {
case n: Z => scala.Some(n.toBigInt)
case _ => scala.None
}
}
val Name: Predef.String = "Z"
val isBitVector: scala.Boolean = false
val isSigned: scala.Boolean = true
val isZeroIndex: scala.Boolean = true
val Index: Z = MP.zero
val hasMin: scala.Boolean = false
val hasMax: scala.Boolean = false
def Min: Z = halt(s"Unsupported $Name operation 'Min'")
def Max: Z = halt(s"Unsupported $Name operation 'Max'")
def BitWidth: scala.Int = halt(s"Unsupported $Name operation 'BitWidth'")
def random: Z = {
val r = new scala.util.Random
MP(scala.BigInt(numbits = r.nextInt(r.nextInt(1024) + 1), rnd = r))
}
def randomSeed(seed: Z): Z = {
val r = new scala.util.Random((seed.toMP % Z.MP(U._64(-1).toBigInt + 1)).toLongOpt.get)
MP(scala.BigInt(numbits = r.nextInt(r.nextInt(1024) + 1), rnd = r))
}
import scala.language.implicitConversions
@inline implicit def apply(n: scala.Int): Z = Int(n)
@inline implicit def apply(n: scala.Long): Z = Long(n)
@inline implicit def apply(n: scala.BigInt): Z = BigInt(n)
}
trait $ZCompanion[T] {
def Name: Predef.String
def isBitVector: scala.Boolean
def isSigned: scala.Boolean
def isZeroIndex: scala.Boolean
def Index: T
def hasMin: scala.Boolean
def hasMax: scala.Boolean
def Min: T
def Max: T
def BitWidth: scala.Int
def random: T
def randomSeed(seed: Z): T
def Int: $ZCompanionInt[T]
def Long: $ZCompanionLong[T]
def $String: $ZCompanionString[T]
def BigInt: $ZCompanionBigInt[T]
def apply(n: Z): T
}
trait $ZCompanionInt[T] {
def apply(n: scala.Int): T
def unapply(n: T): scala.Option[scala.Int]
}
trait $ZCompanionLong[T] {
def apply(n: scala.Long): T
def unapply(n: T): scala.Option[scala.Long]
}
trait $ZCompanionString[T] {
def apply(s: Predef.String): T
def unapply(n: T): scala.Option[Predef.String]
}
trait $ZCompanionBigInt[T] {
def apply(s: scala.BigInt): T
def unapply(n: T): scala.Option[scala.BigInt]
}
trait ZLike[T <: ZLike[T]] extends Any with Number with Comparable[T] {
this: T =>
def Name: Predef.String
def isBitVector: scala.Boolean
def isSigned: scala.Boolean
def isZeroIndex: scala.Boolean
def Index: T
def hasMin: scala.Boolean
def hasMax: scala.Boolean
def Min: T
def Max: T
def BitWidth: scala.Int
def <(other: T): B
def <=(other: T): B
def >(other: T): B
def >=(other: T): B
@inline def ===(other: Z): B = this == other
@inline def =!=(other: Z): B = this != other
def +(other: T): T
def -(other: T): T
def *(other: T): T
def /(other: T): T
def %(other: T): T
def increase: T
def decrease: T
def unary_- : T
def toIndex: Z.Index
final override def string: String = toString
def toBigInt: scala.BigInt
def toMP: Z
def to(n: T): ZRange[T] = ZRange[T](T, this, n, 1, new ZRange.CondIncDec[T] {
@pure def cond(i: T): B = T
@pure def increase(i: T): T = i.increase
@pure def decrease(i: T): T = i.decrease
})
def until(n: T): ZRange[T] = ZRange[T](F, this, n, 1, new ZRange.CondIncDec[T] {
@pure def cond(i: T): B = T
@pure def increase(i: T): T = i.increase
@pure def decrease(i: T): T = i.decrease
})
def compareTo(other: T): scala.Int =
if (this < other) -1 else if (this > other) 1 else 0
}
sealed trait Z extends ZLike[Z] with $internal.HasBoxer {
final def Name: Predef.String = Z.Name
final def isBitVector: scala.Boolean = Z.isBitVector
final def isSigned: scala.Boolean = Z.isSigned
final def Index: Z = Z.Index
final def isZeroIndex: scala.Boolean = Z.isZeroIndex
final def hasMin: scala.Boolean = Z.hasMin
final def hasMax: scala.Boolean = Z.hasMax
final def Min: Z = Z.Min
final def Max: Z = Z.Max
final def BitWidth: Int = Z.BitWidth
final def toIndex: Z.Index = this
final def unary_- : Z = Z.MP.unary_-(this)
final def +(other: Z): Z = Z.MP.+(this, other)
final def -(other: Z): Z = Z.MP.-(this, other)
final def *(other: Z): Z = Z.MP.*(this, other)
final def /(other: Z): Z = Z.MP./(this, other)
final def %(other: Z): Z = Z.MP.%(this, other)
final def >(other: Z): B = Z.MP.>(this, other)
final def >=(other: Z): B = Z.MP.>=(this, other)
final def <(other: Z): B = Z.MP.<(this, other)
final def <=(other: Z): B = Z.MP.<=(this, other)
final def increase: Z = this + Z.MP.one
final def decrease: Z = this - Z.MP.one
final override def equals(other: scala.Any): scala.Boolean = other match {
case other: Z => if (this eq other) true else Z.MP.isEqual(this, other).value
case other: scala.Int => Z.MP.isEqual(this, other)
case other: scala.Long => Z.MP.isEqual(this, other)
case other: scala.BigInt => Z.MP.isEqual(this, other)
case other: _root_.java.lang.Integer => Z.MP.isEqual(this, other.intValue)
case other: _root_.java.lang.Long => Z.MP.isEqual(this, other.longValue)
case other: _root_.java.math.BigInteger => Z.MP.isEqual(this, scala.BigInt(other))
case _ => false
}
final def toMP: Z = this
def toIntOpt: scala.Option[scala.Int]
def toLongOpt: scala.Option[scala.Long]
def toInt: scala.Int
def toLong: scala.Long
def boxer: $internal.Boxer = Z.Boxer.Z
}
object ZRange {
trait CondIncDec[I] {
@pure def cond(i: I): B
@pure def increase(i: I): I
@pure def decrease(i: I): I
}
}
final case class ZRange[I](
isInclusive: B,
init: I,
to: I,
by: Z,
cid: ZRange.CondIncDec[I]
) {
def foreach[V](f: I => V): Unit = {
val initZ = init.asInstanceOf[ZLike[_]].toMP
val toZ = to.asInstanceOf[ZLike[_]].toMP
var iZ = initZ
var i = init
def loopCond: B = {
if (isInclusive) (iZ <= toZ && by > 0) || (iZ >= toZ && by < 0)
else (iZ < toZ && by > 0) || (iZ > toZ && by < 0)
}
while (loopCond) {
if (cid.cond(i)) {
f(i)
}
var j = Z.MP.zero
if (by < 0) {
val n = -by
while (j < n) {
i = cid.decrease(i)
j = j + 1
}
} else {
val n = by
while (j < n) {
i = cid.increase(i)
j = j + 1
}
}
iZ = i.asInstanceOf[ZLike[_]].toMP
}
}
@pure def map[V](f: I => V): ISZ[V] = {
val initZ = init.asInstanceOf[ZLike[_]].toMP
val toZ = to.asInstanceOf[ZLike[_]].toMP
var iZ = initZ
var i = init
var r = ISZ[V]()
def loopCond: B = {
if (isInclusive) (iZ <= toZ && by > 0) || (iZ >= toZ && by < 0)
else (iZ < toZ && by > 0) || (iZ > toZ && by < 0)
}
while (loopCond) {
if (cid.cond(i)) {
r = r :+ f(i)
}
var j = Z.MP.zero
if (by < 0) {
val n = -by
while (j < n) {
i = cid.decrease(i)
j = j + 1
}
} else {
val n = by
while (j < n) {
i = cid.increase(i)
j = j + 1
}
}
iZ = i.asInstanceOf[ZLike[_]].toMP
}
r
}
@pure def flatMap[V](f: I => ISZ[V]): ISZ[V] = {
val initZ = init.asInstanceOf[ZLike[_]].toMP
val toZ = to.asInstanceOf[ZLike[_]].toMP
var iZ = initZ
var i = init
var r = ISZ[V]()
def loopCond: B = {
if (isInclusive) (iZ <= toZ && by > 0) || (iZ >= toZ && by < 0)
else (iZ < toZ && by > 0) || (iZ > toZ && by < 0)
}
while (loopCond) {
if (cid.cond(i)) {
r = r ++ f(i)
}
var j = Z.MP.zero
if (by < 0) {
val n = -by
while (j < n) {
i = cid.decrease(i)
j = j + 1
}
} else {
val n = by
while (j < n) {
i = cid.increase(i)
j = j + 1
}
}
iZ = i.asInstanceOf[ZLike[_]].toMP
}
r
}
@pure def by(n: Z): ZRange[I] = ZRange(isInclusive, init, to, n, cid)
def withFilter(filter: I => B): ZRange[I] =
ZRange(isInclusive, init, to, by, new ZRange.CondIncDec[I] {
@pure def cond(i: I): B = cid.cond(i) && filter(i)
@pure def increase(i: I): I = cid.increase(i)
@pure def decrease(i: I): I = cid.decrease(i)
})
@pure def reverse: ZRange[I] =
ZRange(T, if (isInclusive) to else if (by > 0) cid.decrease(to) else cid.increase(to), init, -by, cid)
}
| sireum/v3-logika-runtime | library/shared/src/main/scala/org/sireum/Z.scala | Scala | bsd-2-clause | 43,579 |
@ImplementedBy(classOf[Provider])
trait Service {
def someMethod(): Unit
}
class Provider
extends Service
{
// test enumeration java annotations
@Ann(Days.Friday) def someMethod() = ()
// #2103
@scala.beans.BeanProperty
@Ann(value = Days.Sunday)
val t2103 = "test"
}
| yusuke2255/dotty | tests/pos/java-interop/t1782/Test_1.scala | Scala | bsd-3-clause | 287 |
package io.digitalmagic.akka.dsl
import java.util.concurrent.ExecutionException
import scalaz._
import Scalaz._
import scala.concurrent.{ExecutionContext, Future}
import scala.util.Try
case class LazyFuture[T](f: ExecutionContext => Future[T]) extends AnyVal {
def apply(ec: ExecutionContext): Future[T] = f(ec)
def map[S](f: T => S): LazyFuture[S] =
LazyFuture(implicit ec => this(ec).map(f))
def flatMap[S](f: T => LazyFuture[S]): LazyFuture[S] =
LazyFuture(implicit ec => this(ec).flatMap(f(_)(ec)))
def recover[U >: T](pf: PartialFunction[Throwable, U]): LazyFuture[U] =
LazyFuture(implicit ec => this(ec).recover(pf))
def recoverWith[U >: T](pf: PartialFunction[Throwable, LazyFuture[U]]): LazyFuture[U] =
LazyFuture(implicit ec => this(ec).recoverWith { case t if pf.isDefinedAt(t) => pf(t)(ec) } )
def fallbackTo[U >: T](that: LazyFuture[U]): LazyFuture[U] =
LazyFuture(implicit ec => this(ec).fallbackTo(that(ec)))
def transform[S](s: T => S, f: Throwable => Throwable) =
LazyFuture(implicit ec => this(ec).transform(s, f))
def transformWith[S](f: Try[T] => LazyFuture[S]): LazyFuture[S] =
LazyFuture(implicit ec => this(ec).transformWith(t => f(t)(ec)))
def mapError(f: Throwable => Throwable): LazyFuture[T] =
transform(identity, f)
implicit def toFuture(implicit ec: ExecutionContext): Future[T] = f(ec)
}
object LazyFuture {
implicit val lazyFutureInstances: MonadError[LazyFuture, Throwable] = new MonadError[LazyFuture, Throwable] {
override def map[A, B](fa: LazyFuture[A])(f: A => B): LazyFuture[B] = fa.map(f)
override def point[A](a: => A): LazyFuture[A] = LazyFuture(implicit ec => Future(a))
override def ap[A, B](fa: => LazyFuture[A])(f: => LazyFuture[A => B]): LazyFuture[B] = LazyFuture(implicit ec => fa(ec) <*> f(ec))
override def bind[A, B](fa: LazyFuture[A])(f: A => LazyFuture[B]): LazyFuture[B] = fa.flatMap(f)
override def raiseError[A](e: Throwable): LazyFuture[A] = LazyFuture.failed(e)
override def handleError[A](fa: LazyFuture[A])(f: Throwable => LazyFuture[A]): LazyFuture[A] = fa.recoverWith {
case ee: ExecutionException if ee.getCause != null => f(ee.getCause)
case t => f(t)
}
}
def failed[T](exception: Throwable): LazyFuture[T] = LazyFuture(_ => Future.failed(exception))
def successful[T](result: T): LazyFuture[T] = LazyFuture(_ => Future.successful(result))
}
| digital-magic-io/akka-cqrs-dsl | akka-cqrs-dsl-core/src/main/scala/io/digitalmagic/akka/dsl/LazyFuture.scala | Scala | apache-2.0 | 2,413 |
package com.mentatlabs.nsa
package scalac
package options
/* -Xverify
* ========
* 2.9.1 - 2.9.3: Verify generic signatures in generated bytecode.
* 2.10.0 - 2.11.8: Verify generic signatures in generated bytecode (asm backend only.)
* 2.12.0: Verify generic signatures in generated bytecode.
*/
case object ScalacXVerify
extends ScalacOptionBoolean("-Xverify", ScalacVersions.`2.9.1`)
| melezov/sbt-nsa | nsa-core/src/main/scala/com/mentatlabs/nsa/scalac/options/advanced/ScalacXVerify.scala | Scala | bsd-3-clause | 413 |
//
// Copyright 2014-2020 Paytronix Systems, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
package com.paytronix.utils.interchange.format.json
import java.io.{
InputStream, OutputStream, Reader, Writer,
ByteArrayInputStream, ByteArrayOutputStream, StringReader, StringWriter
}
import scala.annotation.{StaticAnnotation, implicitNotFound, tailrec}
import scala.collection.mutable.Queue
import com.fasterxml.jackson.core.{JsonEncoding, JsonFactory, JsonGenerator, JsonLocation, JsonParser, JsonParseException, JsonToken}
import scalaz.BijectionT
import com.paytronix.utils.interchange.base.{
Coder, CoderFailure, CoderResult, Decoder, Encoder, Format, Receiver, atTerminal, formatFailedPath, terminal
}
import com.paytronix.utils.scala.resource.{Closeable, withResource}
import com.paytronix.utils.scala.result.{FailedG, Okay, Result, tryCatchValue, tryCatchResult, tryCatchResultG}
object closeables {
implicit object JsonGeneratorCloseable extends Closeable[JsonGenerator] {
def close(gen: JsonGenerator) = gen.close()
}
implicit object JsonParserCloseable extends Closeable[JsonParser] {
def close(gen: JsonParser) = gen.close()
}
}
import closeables.{JsonGeneratorCloseable, JsonParserCloseable}
object InterchangeJsonGenerator {
/**
* Trait of objects which filter objects as they're written, for example to write additional
* fields at the beginning or end, or watch what fields are created
*/
trait ObjectFilter {
val suppressStartAndEnd: Boolean = false
def beginning(): CoderResult[Unit] = Okay.unit
def fieldName(name: String): CoderResult[Unit] = Okay.unit
def end(): CoderResult[Unit] = Okay.unit
}
object ObjectFilter {
val zero = new ObjectFilter {}
val flatten = new ObjectFilter {
override val suppressStartAndEnd = true
}
// The operator >> as used in this function is basically a flatmap,
// and the lines return Okay.unit if both things happen successfully
def combineFilters(a: ObjectFilter, b: ObjectFilter): ObjectFilter = new ObjectFilter {
override val suppressStartAndEnd = a.suppressStartAndEnd || b.suppressStartAndEnd
override def beginning(): CoderResult[Unit] = a.beginning() >> b.beginning()
override def fieldName(name: String): CoderResult[Unit] = a.fieldName(name) >> b.fieldName(name)
override def end(): CoderResult[Unit] = a.end() >> b.end()
}
}
}
/** Interface of something which can sink JSON events from the encoder hierarchy, usually `InterchangeJacksonJsonGenerator` */
trait InterchangeJsonGenerator {
import InterchangeJsonGenerator._
def writeNothingOrNull(): CoderResult[Unit]
def writeFieldName(name: String): CoderResult[Unit]
def writeFieldNameNotMissing(name: String): CoderResult[Unit]
def omitNextMissing(): Unit
def filterNextObject(filter: ObjectFilter): Unit
def writeBoolean(b: Boolean): CoderResult[Unit]
def writeNumber(s: Short): CoderResult[Unit]
def writeNumber(i: Int): CoderResult[Unit]
def writeNumber(l: Long): CoderResult[Unit]
def writeNumber(f: Float): CoderResult[Unit]
def writeNumber(d: Double): CoderResult[Unit]
def writeNumber(bi: java.math.BigInteger): CoderResult[Unit]
def writeNull(): CoderResult[Unit]
def writeString(s: String): CoderResult[Unit]
def writeStartArray(): CoderResult[Unit]
def writeEndArray(): CoderResult[Unit]
def writeStartObject(): CoderResult[Unit]
def writeEndObject(): CoderResult[Unit]
}
/**
* Wrapper around a `JsonGenerator` which includes additional state for writing out objects correctly.
*
* In particular, some encoders want the option of writing no value (not `null`, but really no value) and if the enclosing
* coder has already called `writeFieldName` then it's too late.
*
* So, the `InterchangeJsonGenerator` keeps track of the intended field name so the enclosed encoder can trigger
* writing the field name or not.
*
* This class essentially encapsulates all the messiness that results from generating JSON incrementally and is not intended
* for prettiness.
*/
final class InterchangeJacksonJsonGenerator(generator: JsonGenerator) extends InterchangeJsonGenerator {
import InterchangeJsonGenerator._
private var _fieldName: String = null // yeah null!
private var _omitNextMissing: Boolean = false
private var _nextObjectFilter: ObjectFilter = null
private var _objectFilters: List[ObjectFilter] = Nil
private def clearLatched(): Unit = {
_fieldName = null
_omitNextMissing = false
_nextObjectFilter = null
}
private def aboutToWriteToken(): CoderResult[Unit] =
if (_fieldName != null) {
writeFieldNameNotMissing(_fieldName)
} else Okay.unit
def writeNothingOrNull(): CoderResult[Unit] =
if (_fieldName == null && !_omitNextMissing) {
try {
generator.writeNull()
clearLatched()
Okay.unit
} catch {
case e: Exception => FailedG(e, CoderFailure.terminal)
}
} else {
clearLatched()
Okay.unit
}
def writeFieldName(name: String): CoderResult[Unit] = { _fieldName = name; Okay.unit }
def writeFieldNameNotMissing(name: String): CoderResult[Unit] =
tryCatchResultG(terminal) { clearLatched(); generator.writeFieldName(name); Okay.unit } >>
(_objectFilters match {
case filter :: _ => filter.fieldName(name)
case _ => Okay.unit
})
def omitNextMissing(): Unit = { _omitNextMissing = true }
def filterNextObject(newFilter: ObjectFilter): Unit = {
_nextObjectFilter = Option(_nextObjectFilter) match {
case Some(oldFilter) => ObjectFilter.combineFilters(oldFilter, newFilter)
case None => newFilter
}
}
def writeBoolean(b: Boolean): CoderResult[Unit] =
aboutToWriteToken() >> tryCatchResultG(terminal) { generator.writeBoolean(b); Okay.unit }
def writeNumber(s: Short): CoderResult[Unit] =
aboutToWriteToken() >> tryCatchResultG(terminal) { generator.writeNumber(s); Okay.unit }
def writeNumber(i: Int): CoderResult[Unit] =
aboutToWriteToken() >> tryCatchResultG(terminal) { generator.writeNumber(i); Okay.unit }
def writeNumber(l: Long): CoderResult[Unit] =
aboutToWriteToken() >> tryCatchResultG(terminal) { generator.writeNumber(l); Okay.unit }
def writeNumber(f: Float): CoderResult[Unit] =
aboutToWriteToken() >> tryCatchResultG(terminal) { generator.writeNumber(f); Okay.unit }
def writeNumber(d: Double): CoderResult[Unit] =
aboutToWriteToken() >> tryCatchResultG(terminal) { generator.writeNumber(d); Okay.unit }
def writeNumber(bi: java.math.BigInteger): CoderResult[Unit] =
aboutToWriteToken() >> tryCatchResultG(terminal) { generator.writeNumber(bi); Okay.unit }
def writeNull(): CoderResult[Unit] =
aboutToWriteToken() >> tryCatchResultG(terminal) { generator.writeNull(); Okay.unit }
def writeString(s: String): CoderResult[Unit] =
aboutToWriteToken() >> tryCatchResultG(terminal) { generator.writeString(s); Okay.unit }
def writeStartArray(): CoderResult[Unit] =
aboutToWriteToken() >> tryCatchResultG(terminal) { generator.writeStartArray(); Okay.unit }
def writeEndArray(): CoderResult[Unit] =
aboutToWriteToken() >> tryCatchResultG(terminal) { generator.writeEndArray(); Okay.unit }
def writeStartObject(): CoderResult[Unit] =
tryCatchResultG(terminal) {
val nextObjectFilter = _nextObjectFilter
aboutToWriteToken()
if (nextObjectFilter != null) {
_nextObjectFilter = null
_objectFilters ::= nextObjectFilter
if (!nextObjectFilter.suppressStartAndEnd) generator.writeStartObject()
nextObjectFilter.beginning()
} else if (_objectFilters.nonEmpty) {
// push a null object filter so that the current object filter doesn't get calls until the current object is left
_objectFilters ::= ObjectFilter.zero
generator.writeStartObject()
Okay.unit
} else {
generator.writeStartObject()
Okay.unit
}
}
def writeEndObject(): CoderResult[Unit] =
tryCatchResultG(terminal) {
clearLatched()
_objectFilters match {
case filter :: tail =>
_objectFilters = tail
filter.end() >> tryCatchResultG(terminal) {
if (!filter.suppressStartAndEnd) generator.writeEndObject()
Okay.unit
}
case _ =>
generator.writeEndObject()
Okay.unit
}
}
}
/** Interface of things which can provide tokens from a source of JSON, usually `InterchangeJacksonJsonParser` */
trait InterchangeJsonParser {
type Mark
def hasValue: Boolean
def currentValueIsMissing(): Unit
def currentToken: JsonToken
def currentLocation: JsonLocation
def advanceToken(): CoderResult[Unit]
def advanceTokenUnguarded(): CoderResult[Unit]
def mark(): Mark
def rewind(m: Mark): Unit
def byteValue: Byte
def shortValue: Short
def intValue: Int
def longValue: Long
def bigIntegerValue: java.math.BigInteger
def floatValue: Float
def doubleValue: Double
def bigDecimalValue: java.math.BigDecimal
def stringValue: String
def fieldName: String
/** Perform some excursion into future JSON data, rewinding back to the beginning of the excursion when the enclosed function returns */
def excursion[A](f: => A): A = {
val m = mark()
try f finally rewind(m)
}
/** Peek at the fields of the current object (expecting `currentToken` to be `START_OBJECT`) extracting the string value of each */
def peekFields(names: Array[String]): CoderResult[Array[Option[String]]] =
excursion {
val out = Array.fill(names.length)(None: Option[String])
var found = 0
val skip = FailedG("done peeking fields", CoderFailure.terminal)
foreachFields { name =>
names.indexOf(name) match {
case -1 => skipToEndOfValue()
case n =>
if (out(n) == None) found += 1
out(n) = Some(stringValue)
if (found == names.length) skip
else skipToEndOfValue()
}
} match {
case (_: Okay[_])|`skip` =>
Okay(out)
case failed: FailedG[_] =>
failed
}
}
/** Skip past the current value, even if it's complicated */
def skipToEndOfValue(): CoderResult[Unit] = {
//println(s"skipToEndOfValue: currentToken=$currentToken, currentLocation=$currentLocation")
tryCatchResultG(terminal) {
def go(depth: Int): Unit = {
//println(s"go: depth=$depth, currentToken=$currentToken, currentLocation=$currentLocation")
currentToken match {
case JsonToken.START_OBJECT|JsonToken.START_ARRAY =>
advanceTokenUnguarded().orThrow
go(depth+1)
case JsonToken.END_OBJECT|JsonToken.END_ARRAY if depth == 1 =>
()
case JsonToken.END_OBJECT|JsonToken.END_ARRAY =>
advanceTokenUnguarded().orThrow
go(depth-1)
case _ if depth == 0 =>
()
case null =>
sys.error("EOF reached while skipping")
case _ =>
advanceTokenUnguarded().orThrow
go(depth)
}
}
go(0)
Okay.unit
}
}
/**
* While positioned at the start of an object, call the function for each field in the enclosed object.
* If any invocation of the function fails, abort the parse
*/
def foreachFields(f: String => CoderResult[Unit]): CoderResult[Unit] =
require(JsonToken.START_OBJECT) >>
{
@tailrec
def iterate(): CoderResult[Unit] = {
var atEnd = false
val fieldResult = advanceTokenUnguarded() >> {
currentToken match {
case JsonToken.FIELD_NAME =>
val n = fieldName
advanceTokenUnguarded() >> f(n)
case JsonToken.END_OBJECT =>
atEnd = true
Okay.unit
case _ =>
unexpectedToken("field name")
}
}
fieldResult match {
case _: Okay[_] if !atEnd =>
iterate()
case other =>
other
}
}
iterate()
}
/** Fail if the current token is not the given one */
def require(token: JsonToken): CoderResult[Unit] =
if (!hasValue || currentToken != token) unexpectedToken(if (token != null) token.asString else "<eof>")
else Okay.unit
/** Fail if the current token is not the given one */
def requireValue: CoderResult[Unit] =
if (!hasValue) unexpectedMissingValue
else Okay.unit
/** Format an error indicating that the current token was unexpected */
def unexpectedToken(expectedWhat: String): CoderResult[Unit] = {
val what =
currentToken match {
case null => "EOF"
case JsonToken.FIELD_NAME => s""" field "$fieldName" """.trim
case JsonToken.VALUE_FALSE => s"false"
case JsonToken.VALUE_TRUE => s"true"
case JsonToken.VALUE_NULL => s"null"
case JsonToken.VALUE_NUMBER_INT => s"integer ($bigIntegerValue)"
case JsonToken.VALUE_NUMBER_FLOAT => s"decimal ($bigDecimalValue)"
case JsonToken.VALUE_STRING => s"string ($stringValue)"
case JsonToken.START_OBJECT => s"start of object ({)"
case JsonToken.END_OBJECT => s"end of object (})"
case JsonToken.START_ARRAY => s"start of array ([)"
case JsonToken.END_ARRAY => s"end of array (])"
case other => other.toString
}
if (!hasValue) FailedG(s"required but missing. Expected $expectedWhat, but found nothing.", terminal)
else FailedG(s"expected $expectedWhat but instead got $what", terminal)
}
/** Format an error indicating a value was expected but missing */
def unexpectedMissingValue: CoderResult[Unit] =
FailedG("required but missing", terminal)
/** Compute the path name and source position of the JSON parser for use in error messages */
def sourceLocation: Option[String] =
currentLocation match {
case loc if loc == JsonLocation.NA => None
case loc => Some(s"${loc.getLineNr}:${loc.getColumnNr}")
}
/** An `orElse` failure processing function which sets the source location appropriately for the location of the parser */
def noteSourceLocation: FailedG[CoderFailure] => FailedG[CoderFailure] =
{ case FailedG(t, p) => FailedG(t, p.copy(sourceLocation=this.sourceLocation)) }
/** A terminal `CoderFailure` at the current parsing location */
def terminal: CoderFailure =
sourceLocation map CoderFailure.terminalAt getOrElse CoderFailure.terminal
}
object InterchangeJacksonJsonParser {
/**
* Recorded parse event along with any value.
*
* Recording a long stream of events can cause a bunch of allocs and slowdown as they will be stored in a pretty
* inefficient form (BigInteger / BigDecimal / String) to make sure we have the full fidelity version since we won't
* know how it will be used later.
*/
sealed abstract class RecordedParseEvent {
val token: JsonToken
}
final case class RecordedInteger(value: java.math.BigInteger) extends RecordedParseEvent {
val token = JsonToken.VALUE_NUMBER_INT
}
final case class RecordedDecimal(value: java.math.BigDecimal) extends RecordedParseEvent {
val token = JsonToken.VALUE_NUMBER_FLOAT
}
final case class RecordedString(value: String) extends RecordedParseEvent {
val token = JsonToken.VALUE_STRING
}
final case class RecordedFieldName(field: String) extends RecordedParseEvent {
val token = JsonToken.FIELD_NAME
}
case object RecordedStartObject extends RecordedParseEvent { val token = JsonToken.START_OBJECT }
case object RecordedEndObject extends RecordedParseEvent { val token = JsonToken.END_OBJECT }
case object RecordedStartArray extends RecordedParseEvent { val token = JsonToken.START_ARRAY }
case object RecordedEndArray extends RecordedParseEvent { val token = JsonToken.END_ARRAY }
case object RecordedTrue extends RecordedParseEvent { val token = JsonToken.VALUE_TRUE }
case object RecordedFalse extends RecordedParseEvent { val token = JsonToken.VALUE_FALSE }
case object RecordedNull extends RecordedParseEvent { val token = JsonToken.VALUE_NULL }
val MIN_BYTE = new java.math.BigInteger("-128")
val MAX_BYTE = new java.math.BigInteger("255")
val MIN_SHORT = new java.math.BigInteger("-32768")
val MAX_SHORT = new java.math.BigInteger("65535")
val MIN_INT = new java.math.BigInteger("-2147483648")
val MAX_INT = new java.math.BigInteger("4294967295")
val MIN_LONG = new java.math.BigInteger("-9223372036854775808")
val MAX_LONG = new java.math.BigInteger("18446744073709551615")
final class Mark private[json] (val point: RecordBuffer) {
var consumed: Boolean = false
override def toString = s"Mark($point, consumed=$consumed)"
}
private[json] final class RecordBuffer(val event: RecordedParseEvent, val location: JsonLocation) {
var next: RecordBuffer = null
override def toString = {
var i = 0
var b = next
while (b != null) {
i += 1
b = b.next
}
s"RecordBuffer($event, <location $location>, next=<$i more>)"
}
}
}
/**
* Wrapper around a `JsonParser` which includes additional state to support missing values and record/rewind.
*
* Because the object decoding incrementally walks through field names and so knows which fields are missing by the end
* but things like `nullableJsonDecoder` and `optionJsonDecoder` are what handles missing values, those decoders need to
* be run but have it signalled to them that the value is missing.
*
* Unions are encoded intensionally with an additional field indicating which union alternative was encoded, so for decoding
* those the union decoder needs to scan ahead into the object of find the discriminant, then rewind and replay the visited
* tokens back to the alternate decoder.
*/
final class InterchangeJacksonJsonParser(parser: JsonParser) extends InterchangeJsonParser {
import InterchangeJacksonJsonParser._
type Mark = InterchangeJacksonJsonParser.Mark
private var _currentValueIsMissing = false
private var _didCheckMissingValue = false // make sure to fail fast if anybody doesn't call `hasValue`
private var _replaying: RecordBuffer = null
private var _recording: RecordBuffer = null
private var _activeMarks: List[Mark] = Nil
/** Yield `true` if the current value is missing and `currentToken` should not be called */
def hasValue: Boolean = {
_didCheckMissingValue = true
!_currentValueIsMissing
}
/** Signal that the current value is missing and that `currentToken` should fail because there is no valid token to give */
def currentValueIsMissing(): Unit = {
_didCheckMissingValue = false
_currentValueIsMissing = true
}
/** Yield the current (i.e. most recently read) token */
def currentToken: JsonToken =
if (!_didCheckMissingValue) sys.error("decoder should have checked whether a value was present prior to calling currentToken")
else if (_replaying == null) parser.getCurrentToken
else _replaying.event.token
/** Yield the location of the current token in the source material or `JsonLocation.NA` if not available */
def currentLocation: JsonLocation =
if (_replaying == null) parser.getCurrentLocation
else _replaying.location
/** Move the parser forward to the next token from the input. */
def advanceToken(): CoderResult[Unit] = {
_didCheckMissingValue = false
_currentValueIsMissing = false
_advance()
}
/**
* Move the parser forward to the next token from the input but don't require a call to `hasValue` to follow.
* This is the "real" form of `advanceToken` without the missing value protection.
*/
def advanceTokenUnguarded(): CoderResult[Unit] = {
_didCheckMissingValue = true
_currentValueIsMissing = false
_advance()
}
/** Mark the current location in the token stream and allow resuming to that point with `rewind` */
def mark(): Mark = {
if (!_didCheckMissingValue) sys.error("decoder should have checked whether a value was present prior to calling mark")
val point =
if (_replaying != null) {
// marking while replaying, so just snap another copy of the replay point
_replaying
} else if (_recording != null) {
// we're already recording, so just mark the current recording point
_recording
} else {
// marking while reading from the parser, so set the recording point and use that
val buf = _record()
_recording = buf
buf
}
val m = new Mark(point)
_activeMarks ::= m
//println(s"mark(): _recording = ${_recording}, _replaying = ${_replaying}, _activeMarks = ${_activeMarks}, mark = $m")
m
}
/** Rewind to a given marked location */
def rewind(m: Mark): Unit =
if (m.consumed) sys.error("attempted to rewind back to consumed mark")
else {
_replaying = m.point
m.consumed = true
_activeMarks = _activeMarks.filterNot(_ == m)
_didCheckMissingValue = true // mark blows up if you haven't checked
_currentValueIsMissing = false
//println(s"rewind($m): _replaying = ${_replaying}, _activeMarks = ${_activeMarks}, _recording = ${_recording}")
}
private def _advance(): CoderResult[Unit] = {
//println(s"_advance(): _replaying = ${_replaying}, _activeMarks = ${_activeMarks}, _recording = ${_recording}")
if (_replaying == null || _replaying.next == null) {
_replaying = null
// not replaying or there is no more to replay, so read from the underlying parser
val res = tryCatchResultG(terminal) {
parser.nextToken()
Okay.unit
}
if (_activeMarks.nonEmpty) {
// but if there are active marks we should record what we just read and advance the recording point
val buf = _record()
_recording.next = buf
//println(s"_advance: appended new buf = $buf, _recording = ${_recording}")
_recording = buf
} else {
// if no active marks, no reason to record so make sure to clear that
_recording = null
}
res
} else {
// _replaying is non-null and has more links, so just advance to the next link and call it a day
_replaying = _replaying.next
Okay.unit
}
}
private def _record(): RecordBuffer =
_replaying match {
case null =>
val event = parser.getCurrentToken match {
case JsonToken.VALUE_NULL => RecordedNull
case JsonToken.VALUE_TRUE => RecordedTrue
case JsonToken.VALUE_FALSE => RecordedFalse
case JsonToken.VALUE_STRING => RecordedString(parser.getText)
case JsonToken.VALUE_NUMBER_INT => RecordedInteger(parser.getBigIntegerValue)
case JsonToken.VALUE_NUMBER_FLOAT => RecordedDecimal(parser.getDecimalValue)
case JsonToken.START_ARRAY => RecordedStartArray
case JsonToken.END_ARRAY => RecordedEndArray
case JsonToken.START_OBJECT => RecordedStartObject
case JsonToken.END_OBJECT => RecordedEndObject
case JsonToken.FIELD_NAME => RecordedFieldName(parser.getCurrentName)
case null => null // null means EOF in the parser, so this is acceptable
case other => sys.error(s"unexpected JSON token $other that shouldn't show up for normal JSON parser")
}
val buf = new RecordBuffer(event, parser.getCurrentLocation)
//println(s"_record(): new buf = $buf")
buf
case buf =>
sys.error("_record() called when not reading from the parser")
}
private def replayingEvent: RecordedParseEvent =
if (_replaying == null) null
else _replaying.event
private def inBound(bi: java.math.BigInteger, min: java.math.BigInteger, max: java.math.BigInteger): Boolean =
bi.compareTo(min) >= 0 && bi.compareTo(max) <= 0
/** Yield the current integer value as a `Byte`, throwing `JsonParseException` if the current value is out of bounds or not an integer */
def byteValue: Byte =
replayingEvent match {
case null => parser.getByteValue
case RecordedInteger(bi) if !inBound(bi, MIN_BYTE, MAX_BYTE) => throw new JsonParseException("number out of bounds", currentLocation)
case RecordedInteger(bi) => bi.byteValue
case _ => throw new JsonParseException("not an integer", currentLocation)
}
/** Yield the current integer value as a `Short`, throwing `JsonParseException` if the current value is out of bounds or not an integer */
def shortValue: Short =
replayingEvent match {
case null => parser.getShortValue
case RecordedInteger(bi) if !inBound(bi, MIN_SHORT, MAX_SHORT) => throw new JsonParseException("number out of bounds", currentLocation)
case RecordedInteger(bi) => bi.shortValue
case _ => throw new JsonParseException("not an integer", currentLocation)
}
/** Yield the current integer value as a `Int`, throwing `JsonParseException` if the current value is out of bounds or not an integer */
def intValue: Int =
replayingEvent match {
case null => parser.getIntValue
case RecordedInteger(bi) if !inBound(bi, MIN_INT, MAX_INT) => throw new JsonParseException("number out of bounds", currentLocation)
case RecordedInteger(bi) => bi.intValue
case _ => throw new JsonParseException("not an integer", currentLocation)
}
/** Yield the current integer value as a `Short`, throwing `JsonParseException` if the current value is out of bounds or not an integer */
def longValue: Long =
replayingEvent match {
case null => parser.getLongValue
case RecordedInteger(bi) if !inBound(bi, MIN_LONG, MAX_LONG) => throw new JsonParseException("number out of bounds", currentLocation)
case RecordedInteger(bi) => bi.longValue
case _ => throw new JsonParseException("not an integer", currentLocation)
}
/** Yield the current integer value as a `BigInteger`, throwing `JsonParseException` if the current value is not an integer */
def bigIntegerValue: java.math.BigInteger =
replayingEvent match {
case null => parser.getBigIntegerValue
case RecordedInteger(bi) => bi
case _ => throw new JsonParseException("not an integer", currentLocation)
}
/** Yield the current decimal value as a `Float`, throwing `JsonParseException` if the current value is not a decimal */
def floatValue: Float =
replayingEvent match {
case null => parser.getFloatValue
case RecordedDecimal(bd) => bd.floatValue
case _ => throw new JsonParseException("not a decimal", currentLocation)
}
/** Yield the current decimal value as a `Double`, throwing `JsonParseException` if the current value is not a decimal */
def doubleValue: Double =
replayingEvent match {
case null => parser.getDoubleValue
case RecordedDecimal(bd) => bd.doubleValue
case _ => throw new JsonParseException("not a decimal", currentLocation)
}
/** Yield the current decimal value as a `BigDecimal`, throwing `JsonParseException` if the current value is not a decimal */
def bigDecimalValue: java.math.BigDecimal =
replayingEvent match {
case null => parser.getDecimalValue
case RecordedDecimal(bd) => bd
case _ => throw new JsonParseException("not a decimal", currentLocation)
}
/** Yield the current string value, throwing `JsonParseException` if the current value is not a string */
def stringValue: String =
replayingEvent match {
case null => parser.getText
case RecordedString(s) => s
case _ => throw new JsonParseException("not a string", currentLocation)
}
/** Yield the current field name, throwing `JsonParseException` if the current value is not a field */
def fieldName: String =
replayingEvent match {
case null => parser.getCurrentName
case RecordedFieldName(s) => s
case _ => throw new JsonParseException("not a field name", currentLocation)
}
}
/** Format which consumes and produces JSON using the Jackson streaming parser/generator */
object JsonFormat extends Format {
type Source = InterchangeJsonParser
type Sink = InterchangeJsonGenerator
/**
* Default `JsonFactory` used by `JsonEncoder` and `JsonDecoder` methods where some other factory has not been
* given explicitly or implicitly.
*
* While there's nothing to prevent it, it's highly recommended to not tweak settings of this factory, as it may cause
* confusing side effects with other code that is expecting the default settings.
*/
lazy val defaultFactory: JsonFactory = {
val fact = new JsonFactory
fact.disable(JsonParser.Feature.AUTO_CLOSE_SOURCE)
fact.disable(JsonGenerator.Feature.AUTO_CLOSE_TARGET)
fact
}
}
object JsonEncoder extends JsonEncoderLPI {
/** Materializer for `JsonEncoder`, just finds the `JsonEncoder` in the implicit scope. For example: `JsonEncoder[Int]` */
def apply[A](implicit encoder: JsonEncoder[A]): JsonEncoder[A] = implicitly
}
trait JsonEncoderLPI {
implicit def fromCoder[A](implicit coder: JsonCoder[A]): JsonEncoder[A] = coder.encode
}
/** Common attributes of both `JsonEncoder` and `JsonDecoder` */
trait JsonEncoderOrDecoder {
/**
* Whether this coder might encode to `null` and therefore need a wrapper.
* The primary use case of this is stacked `Option`s - in the case of `Option[Option[String]]` for example, it's impossible to tell
* whether `null` means `None` or `Some(None)` so this bit is investigated by the outer `Option` coder and if `true` then the outer coder
* will wrap a `Some` with a one-element array, so that `Some(Some("foo"))` encodes as `["foo"]`, `Some(None)` encodes as `[]` and `None`
* encodes as nothing or `null` like `Option[String]` would.
*/
val mightBeNull: Boolean
/**
* Whether this coder will encode/decode from an object.
* Used so that coders which want to add fields to / look at fields of an object encoded by this coder,
* such as union coders using an intensional discriminant field ("type" field).
*/
val codesAsObject: Boolean
}
/** Encoder which encodes JSON incrementally via a Jackson `JsonGenerator` */
@implicitNotFound(msg="No JsonEncoder for ${A} found in the implicit scope. Perhaps you forgot to import something from com.paytronix.utils.interchange.format.json.coders, or need to add some @derive annotations")
trait JsonEncoder[A] extends Encoder[A, JsonFormat.type] with JsonEncoderOrDecoder { outer =>
/** Map a function over this encoder, yielding an encoder which takes in values of type `B` */
def map[B](f: B => A): JsonEncoder[B] = mapKleisli(b => Okay(f(b)))
/** Map a kleisli (function which may fail gracefully) over this encoder, yielding an encoder which takes in values of type `B` */
def mapKleisli[B](k: B => Result[A]): JsonEncoder[B] = new JsonEncoder[B] {
val mightBeNull = outer.mightBeNull
val codesAsObject = outer.codesAsObject
def run(b: B, sink: InterchangeJsonGenerator) = atTerminal(k(b)) >>= { outer.run(_, sink) }
}
/** Encode a value to a byte array encoded using UTF-8 */
def toBytes(in: A, enc: JsonEncoding = JsonEncoding.UTF8, pretty: Boolean = false)
(implicit jsonFactory: JsonFactory = JsonFormat.defaultFactory): Result[Array[Byte]] =
withResource(new ByteArrayOutputStream()) { baos =>
toOutputStream(in, baos, pretty=pretty) map { _ => baos.toByteArray }
}
/**
* Encode a value to an `OutputStream` with the specified encoding.
* When using the default `JsonFactory` the underlying output stream will NOT be closed by this method completing
*/
def toOutputStream(in: A, out: OutputStream, enc: JsonEncoding = JsonEncoding.UTF8, pretty: Boolean = false)
(implicit jsonFactory: JsonFactory = JsonFormat.defaultFactory): Result[Unit] =
tryCatchResult {
withResource(jsonFactory.createGenerator(out, enc)) { gen =>
toGenerator(in, gen, pretty=pretty)
}
}
/** Encode a value to a String */
def toString(in: A, pretty: Boolean = false)(implicit jsonFactory: JsonFactory = JsonFormat.defaultFactory): Result[String] =
withResource(new StringWriter()) { sw =>
toWriter(in, sw, pretty=pretty) map { _ => sw.toString }
}
/**
* Encode a value to a `Writer`.
* When using the default `JsonFactory` the underlying writer will NOT be closed by this method completing
*/
def toWriter(in: A, out: Writer, pretty: Boolean = false)(implicit jsonFactory: JsonFactory = JsonFormat.defaultFactory): Result[Unit] =
tryCatchResult {
withResource(jsonFactory.createGenerator(out)) { gen =>
toGenerator(in, gen, pretty=pretty)
}
}
/** Encode a value to a `JsonGenerator`. This differs from `run` in that it returns `Result[Unit]` not `ResultG[FailedPath, Unit]` */
def toGenerator(in: A, gen: JsonGenerator, pretty: Boolean = false): Result[Unit] = {
if (pretty) gen.useDefaultPrettyPrinter()
formatFailedPath(run(in, new InterchangeJacksonJsonGenerator(gen)))
}
}
object JsonDecoder extends JsonDecoderLPI {
/** Materializer for JsonDecoder, just finds the JsonDecoder in implicit scope. For example: JsonDecoder[Int] */
def apply[A](implicit decoder: JsonDecoder[A]): JsonDecoder[A] = implicitly
}
trait JsonDecoderLPI {
implicit def fromCoder[A](implicit coder: JsonCoder[A]): JsonDecoder[A] = coder.decode
}
/** Decoder which consumes JSON via a the Jackson `JsonParser`, as wrapped with a `InterchangeJsonParser` which manages Interchange parsing state */
@implicitNotFound(msg="No JsonDecoder for ${A} found in the implicit scope. Perhaps you forgot to import something from com.paytronix.utils.interchange.format.json.coders, or need to add some @derive annotations")
trait JsonDecoder[A] extends Decoder[A, JsonFormat.type] with JsonEncoderOrDecoder { outer =>
/** Map a function over this decoder, yielding an decoder which produces values of type `B` by transforming `A`s using the given function */
def map[B](f: A => B): JsonDecoder[B] = mapKleisli(a => Okay(f(a)))
/** Map a kleisli (function which may fail gracefully) over this decoder, yielding an decoder which produces values of type `B` */
def mapKleisli[B](k: A => Result[B]): JsonDecoder[B] = new JsonDecoder[B] {
val mightBeNull = outer.mightBeNull
val codesAsObject = outer.codesAsObject
def run(source: InterchangeJsonParser, outB: Receiver[B]) = {
val outA = new Receiver[A]
outer.run(source, outA) match {
case _: Okay[_] =>
k(outA.value) match {
case Okay(b) => outB(b)
case failed: FailedG[_] => failed.mapFailure { _ => source.terminal }
}
case failed => failed
}
}
}
/** Wrap this decoder with a `defaultJsonDecoder` to provide a default in the case where the value is missing or null */
def default(value: A): JsonDecoder[A] =
container.defaultJsonDecoder(value)(this)
/** Attempt conversion of a byte array to a value of the mapped type */
def fromBytes(in: Array[Byte])(implicit jsonFactory: JsonFactory = JsonFormat.defaultFactory): Result[A] =
fromBytesRange(in, 0, in.length)
/** Attempt conversion of a byte array to a value of the mapped type */
def fromBytesRange(in: Array[Byte], offset: Int, length: Int)(implicit jsonFactory: JsonFactory = JsonFormat.defaultFactory): Result[A] =
tryCatchResult {
withResource(jsonFactory.createParser(in, offset, length))(fromParser)
}
/** Attempt conversion of a character array to a value of the mapped type */
def fromChars(in: Array[Char])(implicit jsonFactory: JsonFactory = JsonFormat.defaultFactory): Result[A] = fromCharsRange(in, 0, in.length)
/** Attempt conversion of a character array to a value of the mapped type */
def fromCharsRange(in: Array[Char], offset: Int, length: Int)(implicit jsonFactory: JsonFactory = JsonFormat.defaultFactory): Result[A] =
tryCatchResult {
withResource(jsonFactory.createParser(in, offset, length))(fromParser)
}
/** Attempt conversion of an input stream to a value of the mapped type */
def fromInputStream(in: InputStream)(implicit jsonFactory: JsonFactory = JsonFormat.defaultFactory): Result[A] =
tryCatchResult {
withResource(jsonFactory.createParser(in))(fromParser)
}
/** Attempt conversion of JSON string to a value of the mapped type */
def fromString(in: String)(implicit jsonFactory: JsonFactory = JsonFormat.defaultFactory): Result[A] =
withResource(new StringReader(in)) { sr =>
fromReader(sr)
}
/** Attempt conversion of an input reader to a value of the mapped type */
def fromReader(in: Reader)(implicit jsonFactory: JsonFactory = JsonFormat.defaultFactory): Result[A] =
tryCatchResult {
withResource(jsonFactory.createParser(in))(fromParser)
}
/**
* Decode a value from a `JsonParser`.
* This differs from `run` in that it yields `Result[A]` rather than taking a `Receiver[A]` and yielding a `ResultG[FailedPath, Unit]`.
*/
def fromParser(in: JsonParser): Result[A] = {
val receiver = new Receiver[A]
val ijp = new InterchangeJacksonJsonParser(in)
tryCatchValue(ijp.advanceToken()) >> formatFailedPath(run(ijp, receiver)).map { _ => receiver.value }
}
}
object JsonCoder {
/** Materializer for `JsonCoder` which looks in the implicit scope. Equivalent to `implicitly[JsonCoder[A]]` */
def apply[A](implicit coder: JsonCoder[A]): JsonCoder[A] = implicitly
/** Make an `JsonCoder` from an `JsonEncoder` and `JsonDecoder` */
def make[A](implicit encoder: JsonEncoder[A], decoder: JsonDecoder[A]): JsonCoder[A] =
new JsonCoder[A] {
val encode = encoder
val decode = decoder
}
}
/** Module of a Avro streaming encoder and decoder pair */
@implicitNotFound(msg="No JsonCoder for ${A} found in the implicit scope. Perhaps you forgot to import something from com.paytronix.utils.interchange.format.json.coders, or need to add some @derive annotations")
trait JsonCoder[A] extends Coder[JsonEncoder, JsonDecoder, A, JsonFormat.type] {
/** Encoder for the type `A` */
val encode: JsonEncoder[A]
/** Decoder for the type `A` */
val decode: JsonDecoder[A]
/**
* Map a bijection (pair of kleislis `A => Result[B]` and `B => Result[A]`) over this coder pair, yielding a new coder pair for some
* type `B` which can be bijected to the underlying type `A`.
*
* For example, to make a `JsonCoder` that is encoded in Json as a string but decodes to Scala as a sequence of space separated tokens:
*
* val tokenizedStringJsonCoder = scalar.stringJsonCoder.mapBijection(bijection (
* (tokens: Seq[String]) => Okay(tokens.mkString(" ")),
* (s: String) => Okay(s.split(' '))
* ))
*/
def mapBijection[B](bijection: BijectionT[Result, Result, B, A]) =
JsonCoder.make(encode.mapKleisli(bijection.to), decode.mapKleisli(bijection.from))
/** Wrap the decoder with a `defaultJsonDecoder` to provide a default in the case where the value is missing or null */
def default(value: A): JsonCoder[A] =
JsonCoder.make(encode, decode.default(value))
}
/**
* Specify that a field's value should "flatten" into the enclosing object rather than being a discrete field.
*
* For example, the usual coding of:
*
* @derive.structure.implicitCoder
* final case class Foo(a: Int, b: Bar)
* @derive.structure.implicitCoder
* final case class Bar(c: Int, d: Int)
*
* would be:
*
* {
* "a": 123,
* "b": { "c": 123, "d": 123 }
* }
*
* however if flattening is turned on for Bar:
*
* @derive.structure.implicitCoder
* final case class Foo(a: Int, @flatten b: Bar)
* @derive.structure.implicitCoder
* final case class Bar(c: Int, d: Int)
*
* then that coding becomes;
*
* {
* "a": 123,
* "c": 123, "d": 123
* }
*/
class flatten extends StaticAnnotation
| paytronix/utils-open | interchange/json/src/main/scala/com/paytronix/utils/interchange/format/json/package.scala | Scala | apache-2.0 | 44,711 |
package breeze.stats.distributions
/*
Copyright 2009 David Hall, Daniel Ramage
Licensed under the Apache License, Version 2.0 (the "License")
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
import breeze.optimize.DiffFunction
import breeze.linalg._
import breeze.math.{TensorSpace, MutableCoordinateSpace}
import breeze.numerics._
import breeze.numerics
/**
* Represents a multinomial Distribution over elements.
*
* @author dlwh
*/
case class Multinomial[T,I](params: T)(implicit ev: T=>QuasiTensor[I, Double], rand: RandBasis=Rand) extends DiscreteDistr[I] {
val sum = params.sum
require(sum != 0.0, "There's no mass!")
// check rep
for ((k,v) <- params.iterator) {
if (v < 0) {
throw new IllegalArgumentException("Multinomial has negative mass at index "+k)
}
}
def draw():I = {
var prob = rand.uniform.get() * sum
assert(!prob.isNaN, "NaN Probability!")
for((i,w) <- params.activeIterator) {
prob -= w
if(prob <= 0) return i
}
params.activeKeysIterator.next()
}
def probabilityOf(e : I) = params(e) / sum
override def unnormalizedProbabilityOf(e:I) = params(e)
override def toString = ev(params).activeIterator.mkString("Multinomial{",",","}")
}
/**
* Provides routines to create Multinomials
* @author(dlwh)
*/
object Multinomial {
class ExpFam[T,I](exemplar: T)(implicit space: TensorSpace[T, I, Double]) extends ExponentialFamily[Multinomial[T,I],I] with HasConjugatePrior[Multinomial[T,I],I] {
import space._
type ConjugatePrior = Dirichlet[T,I]
val conjugateFamily = new Dirichlet.ExpFam[T,I](exemplar)
def predictive(parameter: conjugateFamily.Parameter) = new Polya(parameter)
def posterior(prior: conjugateFamily.Parameter, evidence: TraversableOnce[I]) = {
val copy : T = space.copy(prior)
for( e <- evidence) {
copy(e) += 1.0
}
copy
}
type Parameter = T
case class SufficientStatistic(t: T) extends breeze.stats.distributions.SufficientStatistic[SufficientStatistic] {
def +(tt: SufficientStatistic) = SufficientStatistic(t + tt.t)
def *(w: Double) = SufficientStatistic(t * w)
}
def emptySufficientStatistic = SufficientStatistic(zeros(exemplar))
def sufficientStatisticFor(t: I) = {
val r = zeros(exemplar)
r(t) = 1.0
SufficientStatistic(r)
}
def mle(stats: SufficientStatistic) = log(stats.t)
def likelihoodFunction(stats: SufficientStatistic) = new DiffFunction[T] {
def calculate(x: T) = {
val nn: T = logNormalize(x)
val lp = nn dot stats.t
val sum = stats.t.sum
val exped = numerics.exp(nn)
val grad = exped * sum - stats.t
(-lp,grad)
}
}
def distribution(p: Parameter) = {
new Multinomial(numerics.exp(p))
}
}
}
| tjhunter/scalanlp-core | learn/src/main/scala/breeze/stats/distributions/Multinomial.scala | Scala | apache-2.0 | 3,263 |
package models
import scalikejdbc._
import skinny.orm.{Alias, SkinnyCRUDMapperWithId}
case class Item(id: Long, name: String, detail: Option[ItemDetail] = None)
object Item extends SkinnyCRUDMapperWithId[Long, Item] {
import Aliases.{i, id}
override val defaultAlias: Alias[Item] = createAlias("i")
override def extract(rs: WrappedResultSet, n: ResultName[Item]): Item = autoConstruct(rs, n, "detail")
override def idToRawValue(id: Long): Any = id
override def rawValueToId(value: Any): Long = value.toString.toLong
belongsToWithFkAndJoinCondition[ItemDetail](
right = ItemDetail,
fk = "id",
on = sqls.eq(i.id, id.id),
merge = (i, d) => i.copy(detail = d)
).byDefault
def create(name: String)(implicit session: DBSession): Long =
createWithAttributes('name -> name)
def create(i: Item)(implicit session: DBSession): Long = create(i.name)
def name(id: Long)(implicit session: DBSession = AutoSession): String =
findById(id).map(_.name).get
}
object PersistItem {
// 存在しない場合はItem作ってIDを返す
def getItemId(name: String)(implicit session: DBSession): Long = {
import Aliases.i
Item.findBy(sqls.eq(i.name, name)).map(_.id)
.getOrElse(Item.create(name))
}
}
| ponkotuy/FactorioRecipe | app/models/Item.scala | Scala | apache-2.0 | 1,252 |
package jgo.tools.compiler
package parser
package exprs
import interm.types._
import interm.codeseq._
import interm.expr._
import interm.expr.Combinators._
trait ExprUtils {
self: Base =>
protected implicit def convTuple[A, R](f: (A, Pos) => R): ((A, Pos)) => R =
f.tupled
protected implicit def convPrefix[A, R](f: A => Pos => Err[R]): (Pos ~ Err[A]) => Err[R] = {
case p ~ aErr =>
for {
a <- aErr
res <- f(a)(p)
} yield res
}
protected implicit def convSuffix[A, R](f: A => Pos => Err[R]): (Err[A] ~ Pos) => Err[R] = {
case aErr ~ p =>
for {
a <- aErr
res <- f(a)(p)
} yield res
}
protected implicit def convBinary[A, B, R](f: (A, B) => Pos => Err[R])
: (Err[A] ~ Pos ~ Err[B]) => Err[R] = {
case aErr ~ p ~ bErr =>
for {
(a, b) <- (aErr, bErr)
res <- f(a, b)(p)
} yield res
}
protected implicit def convTernary[A, B, C, R](f: (A, B, C) => Pos => Err[R])
: (Err[A] ~ Pos ~ Err[B] ~ Err[C]) => Err[R] = {
case aErr ~ p ~ bErr ~ cErr =>
for {
(a, b, c) <- (aErr, bErr, cErr)
res <- f(a, b, c)(p)
} yield res
}
protected implicit def convBfuncInvoke[A, B, R](f: (A, B) => Pos => Err[R])
: (A ~ Pos ~ Err[B]) => Err[R] = {
case a ~ p ~ bErr =>
for {
b <- bErr
res <- f(a, b)(p)
} yield res
}
protected implicit def convBfuncTypeInvoke[A, B, C, R](f: (A, B, C) => Pos => Err[R])
: (A ~ Pos ~ Err[B] ~ Err[C]) => Err[R] = {
case a ~ p ~ bErr ~ cErr =>
for {
(b, c) <- (bErr, cErr)
res <- f(a, b, c)(p)
} yield res
}
protected implicit def convSelect(f: (Expr, String) => Pos => Err[Expr])
: (Err[Expr] ~ Pos ~ String) => Err[Expr] = {
case eErr ~ p ~ str =>
for {
e <- eErr
res <- f(e, str)(p)
} yield res
}
protected implicit def convSlice(f: (Expr, Option[Expr], Option[Expr]) => Pos => Err[Expr])
: (Err[Expr] ~ Pos ~ Option[Err[Expr]] ~ Option[Err[Expr]]) => Err[Expr] = {
case e1Err ~ p ~ e2Ugly ~ e3Ugly =>
val e2Err = Err.liftOpt(e2Ugly)
val e3Err = Err.liftOpt(e3Ugly)
for {
(e1, e2, e3) <- (e1Err, e2Err, e3Err)
res <- f(e1, e2, e3)(p)
} yield res
}
protected def map[A, B](f: A => B): Err[A] => Err[B] = _ map f
}
| thomasmodeneis/jgo | src/src/main/scala/jgo/tools/compiler/parser/exprs/ExprUtils.scala | Scala | gpl-3.0 | 2,398 |
package org.tuubes.core.engine.messages
import org.tuubes.core.engine.ExecutionGroup
/**
* @author TheElectronWill
*/
final case class MoveToGroup(newGroup: ExecutionGroup) extends EngineMessage {}
| mcphoton/Photon-Server | core/src/main/scala/org/tuubes/core/engine/messages/MoveToGroup.scala | Scala | lgpl-3.0 | 202 |
/* __ *\\
** ________ ___ / / ___ Scala API **
** / __/ __// _ | / / / _ | (c) 2007-2013, LAMP/EPFL **
** __\\ \\/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\\___/_/ |_/____/_/ | | **
** |/ **
\\* */
package scala.swing
object GridPanel {
val Adapt = 0
}
/**
* A panel that lays out its contents in a uniform grid.
*
* @see java.awt.GridLayout
*/
class GridPanel(rows0: Int, cols0: Int) extends Panel with SequentialContainer.Wrapper {
override lazy val peer =
new javax.swing.JPanel(new java.awt.GridLayout(rows0, cols0)) with SuperMixin
/*type Constraints = (Int, Int)
protected def constraintsFor(comp: Component) = {
assert(peer.getComponentOrientation.isHorizontal)
val idx = contents.indexOf(comp)
val (r, c) = (((idx-1)/columns)+1, ((idx-1)%columns)+1)
if (peer.getComponentOrientation.isLeftToRight) (r, c)
else (r, columns-c+1)
}
protected def add(c: Component, l: Constraints) { peer.add(c.peer, (l._1-1)*columns+l._2) }
protected def areValid(c: Constraints): (Boolean, String) =
((c._1 > 0 && c._2 > 0), "Grid coordinates (row,col) must be >= 1 but where " + c)*/
private def layoutManager = peer.getLayout.asInstanceOf[java.awt.GridLayout]
def rows: Int = layoutManager.getRows
def rows_=(n: Int) { layoutManager.setRows(n) }
def columns: Int = layoutManager.getColumns
def columns_=(n: Int) { layoutManager.setColumns(n) }
def vGap: Int = layoutManager.getVgap
def vGap_=(n: Int) { layoutManager.setVgap(n) }
def hGap: Int = layoutManager.getHgap
def hGap_=(n: Int) { layoutManager.setHgap(n) }
}
| benhutchison/scala-swing | src/main/scala/scala/swing/GridPanel.scala | Scala | bsd-3-clause | 1,911 |
/*
* La Trobe University - Distributed Deep Learning System
* Copyright 2016 Matthias Langer (t3l@threelights.de)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package edu.latrobe.blaze.regularizers
import edu.latrobe.blaze._
/**
* Dependent regularizers control the execution of child regularizers.
*/
abstract class ComplexRegularizer[TBuilder <: ComplexRegularizerBuilder[_]]
extends RegularizerEx[TBuilder] {
final val children
: Seq[Regularizer] = builder.children.map(
_.build(platformHint, seed)
)
}
abstract class ComplexRegularizerBuilder[TThis <: ComplexRegularizerBuilder[_]]
extends RegularizerExBuilder[TThis] {
def children
: Seq[RegularizerBuilder]
}
| bashimao/ltudl | blaze/src/main/scala/edu/latrobe/blaze/regularizers/ComplexRegularizer.scala | Scala | apache-2.0 | 1,216 |
package org.jetbrains.plugins.scala.lang.parser.parsing.types
import org.jetbrains.plugins.scala.ScalaBundle
import org.jetbrains.plugins.scala.lang.lexer.ScalaTokenTypes
import org.jetbrains.plugins.scala.lang.parser.ScalaElementType
import org.jetbrains.plugins.scala.lang.parser.parsing.ParsingRule
import org.jetbrains.plugins.scala.lang.parser.parsing.builder.ScalaPsiBuilder
/**
* TypeCaseClause ::= ‘case’ InfixType ‘=>’ Type [nl]
*/
object TypeCaseClause extends ParsingRule {
override def parse(implicit builder: ScalaPsiBuilder): Boolean = {
val marker = builder.mark()
builder.getTokenType match {
case ScalaTokenTypes.kCASE =>
builder.advanceLexer()
builder.disableNewlines()
case _ =>
marker.drop()
return false
}
if (!InfixType()) builder.error(ScalaBundle.message("wrong.type"))
builder.getTokenType match {
case ScalaTokenTypes.tFUNTYPE =>
builder.advanceLexer()
builder.restoreNewlinesState()
case _ =>
builder.restoreNewlinesState()
builder.error(ScalaBundle.message("fun.sign.expected"))
marker.done(ScalaElementType.TYPE_CASE_CLAUSE)
return true
}
if (!Type()) builder.error(ScalaBundle.message("wrong.type"))
marker.done(ScalaElementType.TYPE_CASE_CLAUSE)
true
}
}
| JetBrains/intellij-scala | scala/scala-impl/src/org/jetbrains/plugins/scala/lang/parser/parsing/types/TypeCaseClause.scala | Scala | apache-2.0 | 1,345 |
/*
* Copyright 2016 The BigDL Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.bigdl.dllib.utils.tf
import com.intel.analytics.bigdl.dllib.utils.TestUtils.processPath
import java.io.{OutputStream, File => JFile}
import java.nio.ByteOrder
import com.google.protobuf.CodedOutputStream
import com.intel.analytics.bigdl.dllib.nn.Module
import com.intel.analytics.bigdl.dllib.nn.abstractnn.Activity
import com.intel.analytics.bigdl.dllib.tensor.Tensor
import com.intel.analytics.bigdl.dllib.tensor.TensorNumericMath.{NumericWildCard, TensorNumeric}
import com.intel.analytics.bigdl.dllib.utils.{BigDLSpecHelper, FileWriter, T}
import com.intel.analytics.bigdl.dllib.utils.tf.Tensorflow.const
import com.intel.analytics.bigdl.dllib.utils._
import org.tensorflow.framework.{GraphDef, NodeDef}
import scala.language.postfixOps
import scala.reflect.ClassTag
import scala.sys.process._
import scala.util.control.NonFatal
abstract class TensorflowSpecHelper extends BigDLSpecHelper {
protected def tfCheck(): Unit = {
var exitValue : String = ""
try {
exitValue = ((Seq("python", "-c", "import sys; print ','.join(sys.path)"))!!)
((Seq("python", "-c", "import tensorflow"))!!)
} catch {
case NonFatal(e) => cancel("python or tensorflow is not installed", e)
}
if (!exitValue.contains("models")) {
cancel("Tensorflow models path is not exported")
}
}
protected def runPython(cmd: String): Boolean = {
try {
logger.info("run command\\n" + cmd)
val proc = s"python $cmd".run
return proc.exitValue() == 0
} catch {
case NonFatal(e) => false
}
}
protected def runPythonSaveTest(graphPath: String, outputSuffix: String) : Boolean = {
val resource = getClass().getClassLoader().getResource("tf")
val path = processPath(resource.getPath()) + JFile.separator +
s"save_test.py $graphPath $outputSuffix"
runPython(path)
}
/**
* Compare the output from tf operation and BigDL
* @param nodeDefBuilder
* @param inputs
* @param outputIndex start from 0
* @param delta error tolerant
*/
protected def compare[T: ClassTag](nodeDefBuilder: NodeDef.Builder,
inputs: Seq[Tensor[_]], outputIndex: Int,
delta: Double = 1e-5)(implicit ev: TensorNumeric[T])
: Unit = {
val graphFile = saveGraph(nodeDefBuilder, inputs)
val bigdlOutput = runGraphBigDL[T](graphFile, nodeDefBuilder.getName)
val bigdlOutputTensor = if (bigdlOutput.isTensor) {
require(outputIndex == 0, s"invalid output index $outputIndex")
bigdlOutput.asInstanceOf[Tensor[_]]
} else {
bigdlOutput.toTable.apply[Tensor[_]](outputIndex + 1)
}
val tfOutput = runGraphTF[T](graphFile, nodeDefBuilder.getName + s":$outputIndex")
bigdlOutputTensor.asInstanceOf[Tensor[NumericWildCard]]
.almostEqual(tfOutput.asInstanceOf[Tensor[NumericWildCard]], delta) should be(true)
}
protected def compare[T: ClassTag](nodeDefBuilder: NodeDef.Builder,
inputs: Seq[Tensor[_]], outputIndexes: Seq[Int],
delta: Double)(implicit ev: TensorNumeric[T])
: Unit = {
outputIndexes.foreach(compare(nodeDefBuilder.clone(), inputs, _, delta))
}
protected def getResult[T: ClassTag, D](nodeDefBuilder: NodeDef.Builder, inputs: Seq[Tensor[_]],
outputIndex: Int)(implicit ev: TensorNumeric[T]): (Tensor[D], Tensor[D]) = {
val graphFile = saveGraph(nodeDefBuilder, inputs)
val bigdlOutput = runGraphBigDL[T](graphFile, nodeDefBuilder.getName)
val bigdlOutputTensor = if (bigdlOutput.isTensor) {
require(outputIndex == 0, s"invalid output index $outputIndex")
bigdlOutput.asInstanceOf[Tensor[D]]
} else {
bigdlOutput.toTable.apply[Tensor[D]](outputIndex + 1)
}
val tfOutput = runGraphTF(graphFile, nodeDefBuilder.getName + s":$outputIndex")
(bigdlOutputTensor, tfOutput.asInstanceOf[Tensor[D]])
}
private def saveGraph(nodeDefBuilder: NodeDef.Builder, inputs: Seq[Tensor[_]]): String = {
var i = 0
val inputConsts = inputs.map(input => {
i += 1
const(input, s"TensorflowLoaderSpecInput_$i", ByteOrder.LITTLE_ENDIAN)
})
inputConsts.foreach(p => nodeDefBuilder.addInput(p.getName))
val graphBuilder = GraphDef.newBuilder()
graphBuilder.addNode(nodeDefBuilder.build())
inputConsts.foreach(graphBuilder.addNode(_))
var fw: FileWriter = null
var out: OutputStream = null
try {
val file = createTmpFile()
fw = FileWriter(file.getAbsolutePath)
out = fw.create(true)
val output = CodedOutputStream.newInstance(out)
val graph = graphBuilder.build()
graph.writeTo(output)
output.flush()
out.flush()
file.getAbsolutePath
} finally {
if (out != null) out.close()
if (fw != null) fw.close()
}
}
private def runGraphBigDL[T: ClassTag](graph: String, output: String)
(implicit ev: TensorNumeric[T]): Activity = {
val m = Module.loadTF[T](graph, Seq(), Seq(output))
m.forward(null)
}
private def runGraphTF[T: ClassTag](graph: String, output: String)
(implicit ev: TensorNumeric[T]): Tensor[_] = {
tfCheck()
val outputFile = createTmpFile()
val outputFolder = getFileFolder(outputFile.getAbsolutePath())
val outputFileName = getFileName(outputFile.getAbsolutePath())
val resource = getClass().getClassLoader().getResource("tf")
val path = processPath(resource.getPath()) + JFile.separator +
s"run-graph.py $graph $output $outputFolder $outputFileName result"
runPython(path)
val m = Module.loadTF[T](outputFile.getAbsolutePath, Seq(), Seq("result"))
m.forward(null).asInstanceOf[Tensor[_]]
}
}
| intel-analytics/BigDL | scala/dllib/src/test/scala/com/intel/analytics/bigdl/dllib/utils/tf/TensorflowSpecHelper.scala | Scala | apache-2.0 | 6,302 |
package mau.mauannotation
import scala.concurrent.ExecutionContext.Implicits.global
import mau._
import mau.test._
class CustomIndexAnnotationTest extends MauRedisSpec("CustomIndexAnnotationTest", true) {
describe("@customIndex annotation") {
it("should allow to find instances") {
val personMauRepo = Person.mauRepo
val person = Person(None, "Hans", 27)
val savedPerson = await(personMauRepo.save(person))
val id = savedPerson.id.get
val retrievedPeople = await(personMauRepo.findByFirstLetter('H'))
retrievedPeople should be(Seq(savedPerson))
val retrievedPerson = retrievedPeople(0)
retrievedPerson.name should be(person.name)
}
it("should allow to delete instances") {
val personMauRepo = Person.mauRepo
val person = Person(None, "Hans", 27)
val savedPerson = await(personMauRepo.save(person))
val id = savedPerson.id.get
val deleteResult = await(personMauRepo.deleteByFirstLetter('H'))
deleteResult should be(1)
val retrievedPerson = await(personMauRepo.get(id))
retrievedPerson should be(None)
}
it("should allow to count instances") {
val personMauRepo = Person.mauRepo
val person = Person(None, "Hans", 27)
val savedPerson = await(personMauRepo.save(person))
val id = savedPerson.id.get
val countResult = await(personMauRepo.countByFirstLetter('H'))
countResult should be(1)
}
}
describe("@customIndex annotation (tuple)") {
it("should allow to find instances") {
val personMauRepo = Person.mauRepo
val person = Person(None, "Hans", 27)
val savedPerson = await(personMauRepo.save(person))
val id = savedPerson.id.get
val retrievedPeople = await(personMauRepo.findByFirstLetterAge(('H', 27)))
retrievedPeople should be(Seq(savedPerson))
val retrievedPerson = retrievedPeople(0)
retrievedPerson.name should be(person.name)
}
}
@mauModel("Mau:Test:CustomIndexAnnotationTest", true)
@sprayJson
case class Person(
id: Option[Id],
name: String,
age: Int)
object Person {
@customIndex("FirstLetter")
private val firstLetterIndex = CustomIndexDeclaration[Person, Char](
keySaveFunction =
(person: Person) ⇒ Set(s"firstLetterOfName=${person.name.headOption.getOrElse("")}"),
keyGetFunction =
(char: Char) ⇒ Set(s"firstLetterOfName=$char")
)
@customIndex("FirstLetterAge")
private val firstLetterAgeIndex = CustomIndexDeclaration[Person, (Char, Int)](
keySaveFunction =
(person: Person) ⇒ Set(s"firstLetter=${person.name.headOption.getOrElse("")}:age=${person.age}"),
keyGetFunction =
(charAge: (Char, Int)) ⇒ Set(s"firstLetter=${charAge._1}:age=${charAge._2}")
)
}
}
| ExNexu/mau | mau-annotation/src/test/scala/mau/annotation/CustomIndexAnnotationTest.scala | Scala | apache-2.0 | 2,807 |
class Hello5Factory(paramName: String) {
class Hello5Name extends { val name = this.paramName } with Hello5
def create = new Hello5Name
}
| grzegorzbalcerek/scala-book-examples | examples/TraitParams7.scala | Scala | mit | 142 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.mnemonic.spark
import java.io.{File, FileOutputStream}
import java.util.concurrent.TimeUnit
import scala.reflect.ClassTag
import scala.collection.JavaConverters._
import org.apache.mnemonic.ConfigurationException
import org.apache.mnemonic.DurableType
import org.apache.mnemonic.EntityFactoryProxy
import org.apache.mnemonic.NonVolatileMemAllocator
import org.apache.mnemonic.Utils
import org.apache.mnemonic.collections.DurableSinglyLinkedList
import org.apache.mnemonic.collections.DurableSinglyLinkedListFactory
import org.apache.mnemonic.sessions.{DurableInputSession, ObjectCreator, SessionIterator, TransformFunction}
case class SessionIteratorState(var position: Int, var streamMode: Option[Boolean])
private[spark] class MneDurableInputSession[V: ClassTag, T: ClassTag] (
serviceName: String,
durableTypes: Array[DurableType],
entityFactoryProxies: Array[EntityFactoryProxy],
slotKeyId: Long,
var memPoolListGen: ()=>Option[Array[File]],
outsess: MneDurableOutputSession[V], /*would be closed once no more element from source iterator*/
srciterator: Iterator[T],
f: (T, ObjectCreator[V, NonVolatileMemAllocator]) => Option[V],
claimfile: File)
extends DurableInputSession[V, NonVolatileMemAllocator, T, SessionIteratorState] {
var iteratorHolder: SessionIterator[V, NonVolatileMemAllocator, T, SessionIteratorState] = null
var streamMode: Option[Boolean] = None
initialize
def initialize: Unit = {
setServiceName(serviceName)
setDurableTypes(durableTypes)
setEntityFactoryProxies(entityFactoryProxies)
setSlotKeyId(slotKeyId)
setOutSession(outsess)
setTransformFunction(new TransformFunction[V, NonVolatileMemAllocator, T] {
override def transform(value:T, objcreator:ObjectCreator[V, NonVolatileMemAllocator]):V = {
f(value, objcreator).getOrElse(null).asInstanceOf[V]
}
})
}
def updateMode: Option[Boolean] = {
if (null != srciterator && null != getOutSession &&
null != getTransformFunction && null != claimfile) {
streamMode = Option(true)
} else if (null != memPoolListGen) {
streamMode = Option(false)
} else {
throw new ConfigurationException("Cannot determine which mode Input Session works on")
}
streamMode
}
def transformAhead(): Unit = {
if (updateMode.get) {
streamMode = Option(tryClaim)
}
if (streamMode.get) {
try {
for (item <- srciterator) {
f(item, outsess) match {
case Some(res) => outsess.post(res)
case None =>
}
}
} finally {
outsess.close
setOutSession(null)
setTransformFunction(null)
}
streamMode = Option(false)
}
}
override def init(sessiter: SessionIterator[V, NonVolatileMemAllocator, T, SessionIteratorState]): Boolean = {
sessiter.setState(SessionIteratorState(0, streamMode))
null != sessiter.getState
}
def waitNextPool(file: File): Unit = {
val lckfile: File = new File(file.toString + ".lck")
while (lckfile.exists) {
TimeUnit.SECONDS.sleep(1)
}
}
def tryClaim: Boolean = {
if (null == claimfile) {
false
} else {
if (claimfile.exists) {
false
} else {
new FileOutputStream(claimfile).close
true
}
}
}
override def initNextPool(sessiter: SessionIterator[V, NonVolatileMemAllocator, T, SessionIteratorState]): Boolean = {
var ret: Boolean = false
var flist: Array[File] = null
val state: SessionIteratorState = sessiter.getState
if (null != sessiter.getAllocator) {
sessiter.getAllocator.close
sessiter.setAllocator(null)
}
flist = memPoolListGen().getOrElse(null)
if (state.streamMode.isEmpty) {
state.streamMode = Option(tryClaim)
if (state.streamMode.get) {
println(s"Input Session run in stream mode with ${claimfile}")
} else {
println(s"Input Session run in durable mode with ${claimfile}")
}
}
if (!state.streamMode.get) {
if (null != flist && flist.length > state.position) {
waitNextPool(flist(state.position))
sessiter.setAllocator(new NonVolatileMemAllocator(Utils.getNonVolatileMemoryAllocatorService(
getServiceName), 1024000L, flist(state.position).toString, false))
assert(null != sessiter.getAllocator)
sessiter.setHandler(sessiter.getAllocator.getHandler(getSlotKeyId))
if (0L != sessiter.getHandler) {
val dsllist: DurableSinglyLinkedList[V] = DurableSinglyLinkedListFactory.restore(
sessiter.getAllocator, getEntityFactoryProxies, getDurableTypes, sessiter.getHandler, false)
if (null != dsllist) {
sessiter.setIterator(dsllist.iterator)
ret = null != sessiter.getIterator
if (ret) {
println(s"Input Session obtained the iterator of ${flist(state.position)}")
}
} else {
throw new ConfigurationException("The singly linked list cannot be restored")
}
} else {
throw new ConfigurationException("The value of slot handler is null")
}
state.position += 1
}
} else {
sessiter.setSourceIterator(srciterator.asJava)
ret = null != sessiter.getSourceIterator
if (ret) {
println(s"Input Session obtained the iterator of upstream with ${claimfile}")
}
}
ret
}
override def iterator: SessionIterator[V, NonVolatileMemAllocator, T, SessionIteratorState] = {
if (null != iteratorHolder) {
iteratorHolder.close
println("force to close an input session iterator prematurely")
}
iteratorHolder = super.iterator
iteratorHolder
}
override def close: Unit = {
super.close();
if (null != getOutSession) {
getOutSession.close
setOutSession(null)
}
if (null != iteratorHolder) {
iteratorHolder.close
iteratorHolder = null
}
}
}
object MneDurableInputSession {
def apply[V: ClassTag](
serviceName: String,
durableTypes: Array[DurableType],
entityFactoryProxies: Array[EntityFactoryProxy],
slotKeyId: Long,
memPoolListGen: ()=>Option[Array[File]]): MneDurableInputSession[V, Nothing] = {
val ret = new MneDurableInputSession[V, Nothing] (
serviceName, durableTypes, entityFactoryProxies,
slotKeyId, memPoolListGen, null, null, null, null)
ret
}
def apply[V: ClassTag, T: ClassTag](
serviceName: String,
durableTypes: Array[DurableType],
entityFactoryProxies: Array[EntityFactoryProxy],
slotKeyId: Long,
memPoolListGen: ()=>Option[Array[File]],
outsess: MneDurableOutputSession[V],
srciterator: Iterator[T],
f: (T, ObjectCreator[V, NonVolatileMemAllocator]) => Option[V],
claimfile: File):
MneDurableInputSession[V, T] = {
val ret = new MneDurableInputSession[V, T] (
serviceName, durableTypes, entityFactoryProxies,
slotKeyId, memPoolListGen, outsess, srciterator, f, claimfile)
ret
}
}
| lql5083psu/incubator-mnemonic | mnemonic-spark/mnemonic-spark-core/src/main/scala/org/apache/mnemonic/spark/MneDurableInputSession.scala | Scala | apache-2.0 | 8,229 |
/**
* This code is generated using [[https://www.scala-sbt.org/contraband/ sbt-contraband]].
*/
// DO NOT EDIT MANUALLY
package sbt.internal.bsp
/**
* Test Result
* @param originId An optional request id to know the origin of this report.
* @param statusCode A status code for the execution.
*/
final class TestResult private (
val originId: Option[String],
val statusCode: Int) extends Serializable {
override def equals(o: Any): Boolean = this.eq(o.asInstanceOf[AnyRef]) || (o match {
case x: TestResult => (this.originId == x.originId) && (this.statusCode == x.statusCode)
case _ => false
})
override def hashCode: Int = {
37 * (37 * (37 * (17 + "sbt.internal.bsp.TestResult".##) + originId.##) + statusCode.##)
}
override def toString: String = {
"TestResult(" + originId + ", " + statusCode + ")"
}
private[this] def copy(originId: Option[String] = originId, statusCode: Int = statusCode): TestResult = {
new TestResult(originId, statusCode)
}
def withOriginId(originId: Option[String]): TestResult = {
copy(originId = originId)
}
def withOriginId(originId: String): TestResult = {
copy(originId = Option(originId))
}
def withStatusCode(statusCode: Int): TestResult = {
copy(statusCode = statusCode)
}
}
object TestResult {
def apply(originId: Option[String], statusCode: Int): TestResult = new TestResult(originId, statusCode)
def apply(originId: String, statusCode: Int): TestResult = new TestResult(Option(originId), statusCode)
}
| xuwei-k/xsbt | protocol/src/main/contraband-scala/sbt/internal/bsp/TestResult.scala | Scala | apache-2.0 | 1,526 |
/*
* Copyright (c) 2013, Scodec
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification,
* are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* 3. Neither the name of the copyright holder nor the names of its contributors
* may be used to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
* ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package scodec
package codecs
import scodec.bits.*
class FixedSizeCodecTest extends CodecSuite:
test("roundtrip") {
roundtrip(fixedSizeBits(32, utf8), "test")
roundtrip(fixedSizeBits(8, uint8), 12)
roundtrip(fixedSizeBits(16, uint8), 12)
}
test("pad appropriately") {
assertEquals(fixedSizeBits(16, uint8).encode(12).require, BitVector(hex"0c00"))
}
test("fail encoding when value is too large to be encoded by size codec") {
val encoded = utf8.encode("test").require
assertEquals(
fixedSizeBits(32, utf8).decode(encoded ++ BitVector.low(48)),
Attempt.successful(
DecodeResult("test", BitVector.low(48))
)
)
assertEquals(
fixedSizeBits(24, utf8).encode("test"),
Attempt.failure(
Err("[test] requires 32 bits but field is fixed size of 24 bits")
)
)
}
| scodec/scodec | unitTests/src/test/scala/scodec/codecs/FixedSizeCodecTest.scala | Scala | bsd-3-clause | 2,432 |
/*
* Copyright 2022 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package services
import connectors.PersonalDetailsValidationConnector
import javax.inject.{Inject, Singleton}
import models.PersonalDetails
import uk.gov.hmrc.http.HeaderCarrier
import scala.concurrent.Future
@Singleton
class PersonalDetailsValidationService @Inject()(personalDetailsValidationConnector: PersonalDetailsValidationConnector) {
def retrieveValidationResult(validationId: String)(implicit hc: HeaderCarrier): Future[PersonalDetails] = {
personalDetailsValidationConnector.retrieveValidationResult(validationId)
}
}
| hmrc/vat-registration-frontend | app/services/PersonalDetailsValidationService.scala | Scala | apache-2.0 | 1,145 |
/** Default (Template) Project
*
* Copyright (c) 2017 Hugo Firth
* Email: <me@hugofirth.com/>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.ac.ncl.la.soar.db
/**
* Case class defining the expected structure of a config file for the glance project.
* Note: Investigate the warning which tells me not to define simple case classes like this a package object?
*/
case class Config(database: DatabaseConfig)
| NewcastleComputingScience/student-outcome-accelerator | db/src/main/scala/uk/ac/ncl/la/soar/db/Config.scala | Scala | apache-2.0 | 954 |
package com.lightreporter.registration
/**
* Created by y28yang on 4/5/2016.
*/
trait UserChangedNotifiable {
def userChanged(users: Iterable[String])
}
| wjingyao2008/firsttry | lightreporter/src/main/scala/com/lightreporter/Registration/UserChangedNotifiable.scala | Scala | apache-2.0 | 162 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark
import org.apache.spark.annotation.DeveloperApi
/**
* :: DeveloperApi ::
* Exception thrown when a task is explicitly killed (i.e., task failure is expected).
*/
@DeveloperApi
class TaskKilledException(val reason: String) extends RuntimeException {
def this() = this("unknown reason")
}
| aokolnychyi/spark | core/src/main/scala/org/apache/spark/TaskKilledException.scala | Scala | apache-2.0 | 1,121 |
package com.hyenawarrior.OldNorseGrammar.grammar.morphophonology
import com.hyenawarrior.OldNorseGrammar.grammar.Syllable.Length.SHORT
import com.hyenawarrior.OldNorseGrammar.grammar.{Syllable, Syllables}
import com.hyenawarrior.OldNorseGrammar.grammar.morphophonology.ProductiveTransforms.VowelLengthening
import com.hyenawarrior.OldNorseGrammar.grammar.phonology.Consonant
import com.hyenawarrior.OldNorseGrammar.grammar.phonology.Consonant._
import com.hyenawarrior.auxiliary.&
/**
* Created by HyenaWarrior on 2017.10.20..
*/
object StemTransform {
trait Transformation {
def apply(stemStr: String): Option[String]
def unapply(stemStr: String): Option[String]
}
trait NucleusTransformation extends Transformation {
protected val SRC_NUCLEUS: String
protected val DST_NUCLEUS: String
final def apply(stemStr: String): Option[String] = transform(stemStr, SRC_NUCLEUS, DST_NUCLEUS)
final def unapply(stemStr: String): Option[String] = transform(stemStr, DST_NUCLEUS, SRC_NUCLEUS)
protected def transform(stemStr: String, nucleus: String, newNucleus: String): Option[String]
}
object Breaking extends NucleusTransformation {
// assume that a stem has only one syllable (?)
val SRC_NUCLEUS: String = "e"
val DST_NUCLEUS: String = "ja"
override def transform(stemStr: String, nucleus: String, newNucleus: String): Option[String] = {
val idxOfJa = stemStr indexOf nucleus
if(idxOfJa == -1) return None
val idxOfNucleusEnd = idxOfJa + nucleus.length
val onset = stemStr.substring(0, idxOfJa)
val coda = stemStr substring idxOfNucleusEnd
if (!isEligible(onset, coda)) return None
Some(onset + newNucleus + coda)
}
/**
https://lrc.la.utexas.edu/eieol/norol/20#grammar_1398
https://lrc.la.utexas.edu/eieol/norol/60#grammar_1454
> This rule only applies to [the infinitive and present plural forms]* of verbs whose stem ends in
a consonant cluster beginning with l or r.
> Fracture does not occur at all if *e is preceded by v, l, or r, e.g. verða, leðr.
* Sg 1-3 has I-umlaut, that reverses -ja- to -e- with the help of semivowel-deletion.
So I assume that it's applied to the whole present stem.
*/
private def isEligible(onset: String, coda: String): Boolean = {
val prevCons = onset.lastOption.getOrElse(' ')
val clusterFirstCons = coda.charAt(0)
val clusterSecondCons = coda.charAt(1)
val firstIsVLR = "vlr" contains prevCons
val secondIsLR = "lr" contains clusterFirstCons
val thirdIsCons = isConsonant(clusterSecondCons)
!firstIsVLR && secondIsLR && thirdIsCons
}
}
// TODO: Should it be renamed as A-mutation?
object JuToJo extends NucleusTransformation {
protected val SRC_NUCLEUS: String = "jú"
protected val DST_NUCLEUS: String = "jó"
protected def transform(stemStr: String, nucleus: String, newNucleus: String): Option[String] = {
val idxOfNucleus = stemStr.indexOf(nucleus)
val idxOfNucleusEnd = idxOfNucleus + nucleus.length
if(isEligible(stemStr, idxOfNucleusEnd)) {
val onset = stemStr.substring(0, idxOfNucleus)
val coda = stemStr.substring(idxOfNucleusEnd)
Some(onset + newNucleus + coda)
} else None
}
private def isEligible(stemStr: String, idxOfNucleusEnd: Int): Boolean = (stemStr.length > idxOfNucleusEnd) && {
val nextLetter = stemStr.charAt(idxOfNucleusEnd)
Consonant.isDental(nextLetter)
}
}
object Raising extends NucleusTransformation {
val SRC_NUCLEUS: String = "e"
val DST_NUCLEUS: String = "i"
override def transform(stemStr: String, nucleus: String, newNucleus: String): Option[String] = {
val idxOfE = stemStr.indexOf(nucleus)
val coda = stemStr.substring(idxOfE + nucleus.length)
if(idxOfE > 0 && (coda.startsWith("n") || coda.endsWith("j"))) {
val onset = stemStr.substring(0, idxOfE)
Some(onset + newNucleus + coda)
} else None
}
}
// TODO: make the code generic
object PerfectRaising extends NucleusTransformation {
val SRC_NUCLEUS: String = "o"
val DST_NUCLEUS: String = "u"
override def transform(stemStr: String, nucleus: String, newNucleus: String): Option[String] = {
val idxOfE = stemStr.indexOf(nucleus)
val coda = stemStr.substring(idxOfE + nucleus.length)
if(idxOfE > 0 && coda.startsWith("n")) {
val onset = stemStr.substring(0, idxOfE)
Some(onset + newNucleus + coda)
} else None
}
}
object NasalAssimilation extends Transformation {
override def apply(stemStr: String): Option[String] = {
val (prefix, lastChars) = split(stemStr)
val newSuffix = lastChars.toSeq match {
case _ :+ c :+ d if isNasal(c) && isVoicedStop(d) => Some(s"${devoice(d)}" * 2)
case _ => None
}
newSuffix.map(prefix + _)
}
private def split(stemStr: String) = stemStr splitAt stemStr.length - 2
override def unapply(stemStr: String): Option[String] = {
val (prefix, lastChars) = split(stemStr)
val suffix = lastChars.toSeq match {
case _ :+ c :+ d if c==d && isVoicelessStop(c) =>
val n = if(c=='p') 'm' else 'n'
Some(s"$n${voice(d)}")
case _ => None
}
suffix.map(prefix + _)
}
}
object DevoiceAfterLateral extends Transformation {
override def apply(stemStr: String): Option[String] = swap(stemStr, "ld", "lt")
override def unapply(stemStr: String): Option[String] = swap(stemStr, "lt", "ld")
private def swap(stemStr: String, from: String, to: String): Option[String] = {
val (prefix, lastChars) = stemStr splitAt stemStr.length - 2
if(lastChars == from) Some(prefix + to) else None
}
}
object ReduceStemFinalG extends Transformation {
private val reduceFinalG = "^(.+?)g$".r
override def apply(stemStr: String): Option[String] = Some {
stemStr match {
case reduceFinalG(reducedStemStr) => VowelLengthening(reducedStemStr)
case _ => stemStr
}
}
override def unapply(stemStr: String): Option[String] = Some {
val VowelLengthening(shortenedStemStr) = stemStr
if (!isConsonant(shortenedStemStr.last))
shortenedStemStr + "g"
else
shortenedStemStr
}
}
// root <-> stem
object JAugment extends Transformation {
private val singleG = "(?<!g)g$".r
private val doubleG = "gg$".r
override def apply(stemStr: String): Option[String] = {
val hasE = stemStr.indexOf('e') != -1
if(hasE) {
val augmentedStem = singleG.replaceFirstIn(stemStr, "gg") + "j"
Raising(augmentedStem)
} else {
None
}
}
override def unapply(stemStr: String): Option[String] = stemStr match {
case Raising(unRaisedStemStr) =>
val withOutAugment = unRaisedStemStr stripSuffix "j"
Some(doubleG.replaceAllIn(withOutAugment, "g"))
case _ => None
}
}
/**
* restore the J-augmented stem
* The only purpose of it, is to undo the semivowel deletion
*/
object FixJAugmentation {
def unapply(stemStr: String): Option[String] = {
val Syllables(sys @ (sy :: _)) = stemStr
val endsSingleCons = sys.length == 1 && sys.last.coda.length == 1
sy.nucleus match {
case "i" | "í" if endsSingleCons || stemStr.endsWith("gg") => Some(if(stemStr endsWith "j") stemStr else stemStr + "j")
case _ => None
}
}
}
/**
* restore the V-augmented stem
* The only purpose of it, is to undo the semivowel deletion
*
* [https://lrc.la.utexas.edu/eieol/norol/10#grammar_1389]
* The w (v in the orthography) of the stem remained only when it followed
* - a short syllable,
* - a g, or a k,
* - and preceded a or u.
*
* <SHORT|"[:alpha:]+[kg]">w<"[au][:alpha:]*">
*/
@deprecated(message = "Use the split versions below")
object FixVAugmentation {
// this regex also prevent to add a final -v to a stem that already has augmentation
private val velarEnd = "^(.+(?:ng|gg|kk))$".r
def unapply(stemStr: String): Option[String] = stemStr match {
// first syllable is affected by (V-augmented) U-Umlaut and stem ends in a velar consonant
case Syllables(Syllable(_, "a" | "y", _, _, _) :: _) & velarEnd(_) => Some(stemStr + "v")
// the last syllable of the stem is short
case Syllables(_ :+ Syllable(_, _, _, _, SHORT)) => Some(stemStr + "v")
case _ => None
}
}
object FixVAugmentatAfterShortSyllable {
def unapply(stemStr: String): Option[String] = stemStr match {
case s if s.endsWith("v") => Some(stemStr)
// the last syllable of the stem is short
case Syllables(_ :+ Syllable(_, "a" | "i" | "e", _, _, SHORT)) => Some(stemStr + "v")
case _ => None
}
}
object FixVAugmentatAfterVelar {
// this regex also prevent to add a final -v to a stem that already has augmentation
private val velarEnd = "^(.+(?:ng|gg|kk))$".r
def unapply(stemStr: String): Option[String] = stemStr match {
case s if s.endsWith("v") => Some(stemStr)
// first syllable is affected by (V-augmented) U-Umlaut and stem ends in a velar consonant
case Syllables(Syllable(_, "a" | "i" | "e", _, _, _) :: _) & velarEnd(_) => Some(stemStr + "v")
case _ => None
}
}
object VelarIUmlaut extends Transformation {
private def stemEndsInVelar(stemStr: String) = Consonant isVelar stemStr.last
override def apply(stemStr: String): Option[String] = if(stemEndsInVelar(stemStr)) I_Umlaut(stemStr) else None
override def unapply(stemStr: String): Option[String] = stemStr match {
case I_Umlaut(normalizedStem) if stemEndsInVelar(stemStr) => Some(normalizedStem)
case _ => None
}
}
}
| HyenaSoftware/IG-Dictionary | OldNorseGrammarEngine/src/main/scala/com/hyenawarrior/OldNorseGrammar/grammar/morphophonology/StemTransform.scala | Scala | lgpl-3.0 | 9,943 |
/*
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
* */
package io.github.mandar2812.dynaml.algebra
import breeze.linalg.NumericOps
import org.apache.spark.rdd.RDD
import org.apache.spark.storage.StorageLevel
import scala.collection.immutable.NumericRange
/**
* @author mandar2812 date: 28/09/2016.
*
* A distributed vector backed by a spark [[RDD]]
*/
class SparkVector(baseVector: RDD[(Long, Double)], size: Long = -1L, sanityChecks: Boolean = true)
extends SparkMatrix(baseVector.map(c => ((c._1, 0L), c._2)), size, 1L, sanityChecks)
with NumericOps[SparkVector] with SparkVectorLike[Double] {
protected var vector = baseVector
override lazy val cols = 1L
override def repr: SparkVector = this
override def t: DualSparkVector = new DualSparkVector(_vector)
def apply(r: NumericRange[Long]): SparkVector =
new SparkVector(_vector.filterByRange(r.min, r.max).map(e => (e._1-r.min, e._2)))
def apply(r: Range): SparkVector =
new SparkVector(_vector.filterByRange(r.min, r.max).map(e => (e._1-r.min, e._2)))
override def persist: Unit = {
vector.persist(StorageLevel.MEMORY_AND_DISK)
}
override def unpersist: Unit = {
vector.unpersist()
}
}
object SparkVector {
/**
* Tabulate a [[SparkVector]]
*/
def apply(list: RDD[Long])(eval: (Long) => Double) = new SparkVector(
list.map(e => (e, eval(e))),
sanityChecks = false)
def vertcat(vectors: SparkVector*): SparkVector = {
val sizes = vectors.map(_.rows)
new SparkVector(vectors.zipWithIndex.map(couple => {
val offset = sizes.slice(0, couple._2).sum
couple._1._vector.map(c => (c._1+offset, c._2))
}).reduce((a,b) => a.union(b)))
}
} | transcendent-ai-labs/DynaML | dynaml-core/src/main/scala/io/github/mandar2812/dynaml/algebra/SparkVector.scala | Scala | apache-2.0 | 2,405 |
// Copyright (C) 2011-2012 the original author or authors.
// See the LICENCE.txt file distributed with this work for additional
// information regarding copyright ownership.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package org.scalastyle.scalariform
import org.junit.Test
import org.scalastyle.file.CheckerTest
import org.scalatest.junit.AssertionsForJUnit
// scalastyle:off magic.number
class MagicNumberCheckerTest extends AssertionsForJUnit with CheckerTest {
val key = "magic.number"
val classUnderTest = classOf[MagicNumberChecker]
@Test def testVal(): Unit = {
val source = """
package foobar
class Foobar {
val foo0 = -2
val foo1 = -1
val foo2 = 0
val foo3 = 1
val foo4 = 2
val foo5 = 3
val foo6 = 4
}
"""
assertErrors(List(), source)
}
@Test def testVar(): Unit = {
val source = """
package foobar
class Foobar {
var foo0 = -2
var foo1 = -1
var foo2 = 0
var foo3 = 1
var foo4 = 2
var foo5 = 3
var foo6 = 4
}
"""
assertErrors(List(columnError(5, 13), columnError(10, 13), columnError(11, 13)), source)
}
@Test def testVar2(): Unit = {
val source = """
package foobar
class Foobar {
var foo6 = 4
var foo7 = +4
var foo8 = -4
var bar1 = fn(7, -5)
var bar2 = fn(1, -5)
def fn(i: Int, j: Int) = i + j
}
"""
assertErrors(List(columnError(5, 13), columnError(6, 13), columnError(7, 13), columnError(8, 16), columnError(8, 19), columnError(9, 19)), source)
}
@Test def testValLong(): Unit = {
val source = """
package foobar
class Foobar {
val foo0 = -2L
val foo1 = -1L
val foo2 = 0L
val foo3 = 1L
val foo4 = 2L
val foo5 = 3L
val foo6 = 4L
}
"""
assertErrors(List(), source)
}
@Test def testVarLong(): Unit = {
val source = """
package foobar
class Foobar {
var foo0 = -2L
var foo1 = -1L
var foo2 = 0L
var foo3 = 1L
var foo4 = 2L
var foo5 = 3L
var foo6 = 4L
}
"""
assertErrors(List(columnError(5, 13), columnError(10, 13), columnError(11, 13)), source)
}
@Test def testVar2Long(): Unit = {
val source = """
package foobar
class Foobar {
var foo6 = 4L
var foo7 = +4L
var foo8 = -4L
var bar1 = fn(7L, -5L)
var bar2 = fn(1L, -5L)
def fn(i: Int, j: Int) = i + j
}
"""
assertErrors(List(columnError(5, 13), columnError(6, 13), columnError(7, 13), columnError(8, 16), columnError(8, 20), columnError(9, 20)), source)
}
@Test def testIgnoreParamShouldTolerateSpaces(): Unit = {
val source = """
package foobar
class Foobar {
var fooOk: Long = 1
var fooFail: Long = 100L
}
"""
assertErrors(List(), source, params = Map("ignore" -> "-1,0,1,2,100 "))
}
}
| scalastyle/scalastyle | src/test/scala/org/scalastyle/scalariform/MagicNumberCheckerTest.scala | Scala | apache-2.0 | 3,178 |
/*
* Copyright (C) 2016-2019 Lightbend Inc. <https://www.lightbend.com>
*/
package com.lightbend.lagom.internal.scaladsl.persistence.cassandra
import akka.actor.ActorSystem
import akka.event.Logging
import akka.persistence.cassandra.query.scaladsl.CassandraReadJournal
import akka.persistence.query.PersistenceQuery
import akka.persistence.query.scaladsl.EventsByTagQuery
import com.lightbend.lagom.internal.persistence.cassandra.CassandraKeyspaceConfig
import com.lightbend.lagom.internal.scaladsl.persistence.AbstractPersistentEntityRegistry
/**
* Internal API
*/
private[lagom] final class CassandraPersistentEntityRegistry(system: ActorSystem)
extends AbstractPersistentEntityRegistry(system) {
private val log = Logging.getLogger(system, getClass)
CassandraKeyspaceConfig.validateKeyspace("cassandra-journal", system.settings.config, log)
CassandraKeyspaceConfig.validateKeyspace("cassandra-snapshot-store", system.settings.config, log)
override protected val queryPluginId = Some(CassandraReadJournal.Identifier)
}
| rstento/lagom | persistence-cassandra/scaladsl/src/main/scala/com/lightbend/lagom/internal/scaladsl/persistence/cassandra/CassandraPersistentEntityRegistry.scala | Scala | apache-2.0 | 1,041 |
package tryp
package state
package core
import shapeless.{HList, Nat}
import shapeless.ops.{hlist, nat}
import shapeless.tag.@@
import cats.{Eval, Monad}
import cats.data.{StateT, WriterT}
@exportTypes(CState, InternalMessage, StateIO, Message, FastMessage, Fast, TransResultElem, LoopSendable)
trait Types
{
type SLR = cats.data.StateT[cats.Eval, tryp.state.core.CStates, tryp.state.core.IOs]
}
@exportNames(Pristine, Cell, Parcel, ControlIO, IOStateIO, StateIO, Restart, LogError, Comm)
trait Names
{
type HNil = shapeless.HNil
val HNil: shapeless.HNil = shapeless.HNil
}
@exportVals(replaceCell, replaceCellFast, Loop, Exit, transition, transitionFast, process, processAll, ExitLoop,
NopMessage, LoopData, NopIO, InstallComm, InitCell, RestartAsync)
trait Vals
@export
trait Exports
extends Types
with Names
with Vals
{
val exit = tryp.state.core.ExitLoop
}
trait All
extends ToMessageOps
with ToLoopSendableTaskOps
object `package`
extends tryp.AllSyntax
{
val HNil: shapeless.HNil = shapeless.HNil
type HNil = shapeless.HNil
type IOs = List[StateIO]
type Msgs = List[Message]
type CStates = List[CState]
type CellW[A] = WriterT[Eval, IOs, A]
type CellWO[A] = WriterT[Eval, IOs, Option[A]]
type S[Ss, A] = StateT[CellW, Ss, A]
type SO[Ss, A] = StateT[CellW, Ss, Option[A]]
type CellS[A] = S[CState, A]
type SM[Ss, A <: HList] = S[Ss, MsgsOut[A]]
type CellSMO[A <: HList] = SO[CState, MsgsOut[A]]
type SR[Ss] = StateT[Eval, Ss, IOs]
type SLR = StateT[Eval, CStates, IOs]
def cellW[A](a: A): CellW[A] = WriterT.value(a)
def cellWS[A, Ss](a: A): S[Ss, A] = StateT.lift(cellW(a))
val hnilWriter: CellW[MsgsOut[HNil]] = cellW(MsgsOut(HNil))
def hnilSM[Ss]: SM[Ss, HNil] = cellWS(MsgsOut(HNil))
def nilSR[Ss]: SR[Ss] = StateT.pure(Nil)
val hnilCSM: CellS[MsgsOut[HNil]] = hnilSM[CState]
def noneCSMO[A <: HList]: CellSMO[A] = cellWS[Option[MsgsOut[A]], CState](None)
def exit = tryp.state.core.ExitLoop
implicit def Functor_S[Ss]: Monad[S[Ss, ?]] = StateT.catsDataMonadStateForStateT[cats.data.WriterT[Eval, IOs, ?], Ss]
trait Fast
type FastMessage = Message @@ Fast
}
trait Length[A]
{
def length: Int
}
object Length
{
implicit def hlistLength[A <: HList, L <: Nat](implicit l: hlist.Length.Aux[A, L], toInt: nat.ToInt[L]): Length[A] =
new Length[A] {
def length = toInt()
}
}
| tek/pulsar | state-core/src/package.scala | Scala | mit | 2,373 |
package org.scalaide.ui.internal.preferences
import org.eclipse.core.runtime.preferences.AbstractPreferenceInitializer
import org.eclipse.jdt.internal.ui.preferences.OverlayPreferenceStore
import org.eclipse.jface.layout.TableColumnLayout
import org.eclipse.jface.preference.PreferencePage
import org.eclipse.jface.text.IDocument
import org.eclipse.jface.viewers.CheckStateChangedEvent
import org.eclipse.jface.viewers.CheckboxTableViewer
import org.eclipse.jface.viewers.ColumnWeightData
import org.eclipse.jface.viewers.IStructuredContentProvider
import org.eclipse.jface.viewers.SelectionChangedEvent
import org.eclipse.jface.viewers.TableViewerColumn
import org.eclipse.jface.viewers.Viewer
import org.eclipse.swt.SWT
import org.eclipse.swt.events.ModifyEvent
import org.eclipse.swt.layout.GridData
import org.eclipse.swt.layout.GridLayout
import org.eclipse.swt.widgets.Composite
import org.eclipse.swt.widgets.Control
import org.eclipse.swt.widgets.Table
import org.eclipse.swt.widgets.Text
import org.eclipse.ui.IWorkbench
import org.eclipse.ui.IWorkbenchPreferencePage
import org.scalaide.core.IScalaPlugin
import org.scalaide.core.internal.extensions.SaveActions
import org.scalaide.extensions.SaveActionSetting
import org.scalaide.util.eclipse.SWTUtils._
/** This class is referenced through plugin.xml */
class SaveActionsPreferencePage extends PreferencePage with IWorkbenchPreferencePage {
private val MinSaveActionTimeout = 100
private var textBefore: IDocument = _
private var textAfter: IDocument = _
private var descriptionArea: Text = _
private var timeoutValue: Text = _
private var viewer: CheckboxTableViewer = _
private val settings = SaveActions.saveActionSettings.toArray
private val prefStore = {
val ps = IScalaPlugin().getPreferenceStore
import OverlayPreferenceStore._
val keys = new OverlayKey(STRING, SaveActions.SaveActionTimeoutId) +: settings.map { s =>
new OverlayKey(BOOLEAN, s.id)
}
val store = new OverlayPreferenceStore(ps, keys)
store.load()
store
}
override def createContents(parent: Composite): Control = {
val base = new Composite(parent, SWT.NONE)
base.setLayout(new GridLayout(2, true))
mkLabel(base, "Save actions are executed for open editors whenever a save event occurs for one of them.", columnSize = 2)
val timeout = new Composite(base, SWT.NONE)
timeout.setLayoutData(new GridData(SWT.NONE, SWT.FILL, true, false, 2, 1))
timeout.setLayout(new GridLayout(2, false))
timeoutValue = new Text(timeout, SWT.BORDER | SWT.SINGLE)
timeoutValue.setText(prefStore.getString(SaveActions.SaveActionTimeoutId))
timeoutValue.addModifyListener { e: ModifyEvent =>
def error() = {
setValid(false)
setErrorMessage(s"Timeout value needs to be >= $MinSaveActionTimeout ms")
}
util.Try(timeoutValue.getText().toInt) match {
case util.Success(e) =>
if (e >= MinSaveActionTimeout) {
setValid(true)
setErrorMessage(null)
}
else error
case util.Failure(_) =>
error
}
}
mkLabel(timeout, "Timout in milliseconds (this is the time the IDE waits for a result of the save action)")
val tableComposite = new Composite(base, SWT.NONE)
tableComposite.setLayoutData(new GridData(SWT.FILL, SWT.FILL, true, true, 2, 1))
val table = new Table(tableComposite, SWT.CHECK | SWT.BORDER | SWT.SINGLE | SWT.FULL_SELECTION | SWT.V_SCROLL)
table.setHeaderVisible(true)
table.setLinesVisible(true)
val tcl = new TableColumnLayout
tableComposite.setLayout(tcl)
viewer = new CheckboxTableViewer(table)
viewer.setContentProvider(ContentProvider)
viewer.addSelectionChangedListener { e: SelectionChangedEvent =>
table.getSelection().headOption foreach { item =>
selectSaveAction(item.getData().asInstanceOf[SaveActionSetting])
}
}
viewer.addCheckStateListener { e: CheckStateChangedEvent =>
prefStore.setValue(e.getElement.asInstanceOf[SaveActionSetting].id, e.getChecked)
}
val columnEnabled = new TableViewerColumn(viewer, SWT.NONE)
columnEnabled.getColumn().setText("Name")
columnEnabled onLabelUpdate { _.asInstanceOf[SaveActionSetting].name }
tcl.setColumnData(columnEnabled.getColumn(), new ColumnWeightData(1, true))
viewer.setInput(settings.sortBy(_.name))
viewer.setAllChecked(false)
viewer.setCheckedElements(settings.filter(isEnabled).asInstanceOf[Array[AnyRef]])
mkLabel(base, "Description:", columnSize = 2)
descriptionArea = mkTextArea(base, lineHeight = 3, initialText = "", columnSize = 2)
mkLabel(base, "Before:")
mkLabel(base, "After:")
val previewTextBefore = createPreviewer(base) {
textBefore = _
}
previewTextBefore.setLayoutData(new GridData(SWT.FILL, SWT.FILL, true, true))
val previewTextAfter = createPreviewer(base) {
textAfter = _
}
previewTextAfter.setLayoutData(new GridData(SWT.FILL, SWT.FILL, true, true))
base
}
override def init(workbench: IWorkbench): Unit = ()
override def performOk(): Boolean = {
prefStore.setValue(SaveActions.SaveActionTimeoutId, timeoutValue.getText())
prefStore.propagate()
super.performOk()
}
override def performDefaults(): Unit = {
timeoutValue.setText(SaveActionsPreferenceInitializer.SaveActionDefaultTimeout.toString())
viewer.setAllChecked(false)
prefStore.loadDefaults()
super.performDefaults
}
private def mkTextArea(parent: Composite, lineHeight: Int, initialText: String, columnSize: Int): Text = {
val t = new Text(parent, SWT.MULTI | SWT.BORDER | SWT.V_SCROLL | SWT.WRAP | SWT.READ_ONLY)
t.setText(initialText)
t.setLayoutData({
val gd = new GridData(SWT.FILL, SWT.FILL, true, false, columnSize, 1)
gd.heightHint = lineHeight*t.getLineHeight()
gd
})
t
}
private def isEnabled(saveAction: SaveActionSetting): Boolean =
prefStore.getBoolean(saveAction.id)
private def selectSaveAction(saveAction: SaveActionSetting) = {
textBefore.set(saveAction.codeExample)
textAfter.set("Previewing the behavior of the save action is not yet implemented.")
descriptionArea.setText(saveAction.description)
}
private def createPreviewer(parent: Composite)(f: IDocument => Unit): Control = {
val previewer = new PreviewerFactory(ScalaPreviewerFactoryConfiguration).createPreviewer(parent, IScalaPlugin().getPreferenceStore, "")
f(previewer.getDocument())
previewer.getControl
}
private object ContentProvider extends IStructuredContentProvider {
override def dispose(): Unit = ()
override def getElements(input: Any): Array[AnyRef] = {
input.asInstanceOf[Array[AnyRef]]
}
override def inputChanged(viewer: Viewer, oldInput: Any, newInput: Any): Unit = ()
}
}
object SaveActionsPreferenceInitializer {
/** Default timeout value in milliseconds */
final val SaveActionDefaultTimeout: Int = 200
}
/** This class is referenced through plugin.xml */
class SaveActionsPreferenceInitializer extends AbstractPreferenceInitializer {
import SaveActionsPreferenceInitializer._
override def initializeDefaultPreferences(): Unit = {
SaveActions.saveActionSettings foreach { s =>
IScalaPlugin().getPreferenceStore().setDefault(s.id, false)
}
IScalaPlugin().getPreferenceStore().setDefault(SaveActions.SaveActionTimeoutId, SaveActionDefaultTimeout)
}
}
| stephenh/scala-ide | org.scala-ide.sdt.core/src/org/scalaide/ui/internal/preferences/SaveActionsPreferencePage.scala | Scala | bsd-3-clause | 7,484 |
package java.util
import scala.annotation.tailrec
import java.lang.Math
import scala.scalanative.native.stdlib
class Random(seed_in: Long) extends AnyRef with java.io.Serializable {
private var seed: Long = _
// see nextGaussian()
private var nextNextGaussian: Double = _
private var haveNextNextGaussian: Boolean = false
setSeed(seed_in)
def this() = this(Random.randomSeed())
def setSeed(seed_in: Long): Unit = {
seed = (seed_in ^ 0x5DEECE66DL) & ((1L << 48) - 1)
haveNextNextGaussian = false
}
protected def next(bits: Int): Int = {
seed = (seed * 0x5DEECE66DL + 0xBL) & ((1L << 48) - 1)
(seed >>> (48 - bits)).toInt
}
def nextDouble(): Double =
((next(26).toLong << 27) + next(27)) / (1L << 53).toDouble
def nextBoolean(): Boolean = next(1) != 0
def nextInt(): Int = next(32)
def nextInt(n: Int): Int = {
if (n <= 0)
throw new IllegalArgumentException("n must be positive");
if ((n & -n) == n) // i.e., n is a power of 2
((n * next(31).toLong) >> 31).toInt
else {
@tailrec
def loop(): Int = {
val bits = next(31)
val value = bits % n
if (bits - value + (n - 1) < 0) loop()
else value
}
loop()
}
}
def nextLong(): Long = (next(32).toLong << 32) + next(32)
def nextFloat(): Float = next(24) / (1 << 24).toFloat
def nextBytes(bytes: Array[Byte]): Unit = {
var i = 0
while (i < bytes.length) {
var rnd = nextInt()
var n = Math.min(bytes.length - i, 4)
while (n > 0) {
bytes(i) = rnd.toByte
rnd >>= 8
n -= 1
i += 1
}
}
}
def nextGaussian(): Double = {
// See http://www.protonfish.com/jslib/boxmuller.shtml
/* The Box-Muller algorithm produces two random numbers at once. We save
* the second one in `nextNextGaussian` to be used by the next call to
* nextGaussian().
*/
if (haveNextNextGaussian) {
haveNextNextGaussian = false
return nextNextGaussian
}
var x, y, rds: Double = 0
/* Get two random numbers from -1 to 1.
* If the radius is zero or greater than 1, throw them out and pick two new
* ones.
* Rejection sampling throws away about 20% of the pairs.
*/
do {
x = nextDouble() * 2 - 1
y = nextDouble() * 2 - 1
rds = x * x + y * y
} while (rds == 0 || rds > 1)
val c = Math.sqrt(-2 * Math.log(rds) / rds)
// Save y*c for next time
nextNextGaussian = y * c
haveNextNextGaussian = true
// And return x*c
x * c
}
}
object Random {
/** Generate a random long from JS RNG to seed a new Random */
private def randomSeed(): Long =
(randomInt().toLong << 32) | (randomInt().toLong & 0xffffffffL)
private def randomInt(): Int =
(Math.floor(stdlib.rand() * 4294967296.0) - 2147483648.0).toInt
}
| cedricviaccoz/scala-native | javalib/src/main/scala/java/util/Random.scala | Scala | bsd-3-clause | 2,879 |
object Test extends App {
import scala.reflect.runtime.universe._
val enum = typeOf[JavaSimpleEnumeration_1].baseClasses(1).asClass
// make sure that the E's in Enum<E extends Enum<E>> are represented by the same symbol
val e1 = enum.typeParams(0).asType
val TypeBounds(_, TypeRef(_, _, List(TypeRef(_, e2: TypeSymbol, _)))) = e1.info
println(e1, e2, e1 eq e2)
} | som-snytt/dotty | tests/disabled/reflect/run/reflection-java-crtp/Main_2.scala | Scala | apache-2.0 | 374 |
package com.campudus.tableaux.database.model.structure
import com.campudus.tableaux.database._
import com.campudus.tableaux.database.domain._
import com.campudus.tableaux.database.model.TableauxModel._
import com.campudus.tableaux.helper.ResultChecker._
import org.vertx.scala.core.json._
import scala.concurrent.Future
object TableGroupModel {
def apply(connection: DatabaseConnection): TableGroupModel = {
new TableGroupModel(connection)
}
}
class TableGroupModel(val connection: DatabaseConnection) extends DatabaseQuery {
def create(displayInfos: Seq[DisplayInfo]): Future[TableGroup] = {
connection.transactional { t =>
for {
(t, result) <- t.query(s"INSERT INTO system_tablegroup(id) VALUES(DEFAULT) RETURNING id")
id = insertNotNull(result).head.get[TableGroupId](0)
(t, _) <- createTableDisplayInfos(t, id, displayInfos)
} yield (t, TableGroup(id, displayInfos))
}
}
private def createTableDisplayInfos(
t: connection.Transaction,
tableGroupId: TableGroupId,
displayInfos: Seq[DisplayInfo]
): Future[(connection.Transaction, JsonObject)] = {
if (displayInfos.nonEmpty) {
val (statement, binds) = TableGroupDisplayInfos(tableGroupId, displayInfos).createSql
for {
(t, result) <- t.query(statement, Json.arr(binds: _*))
} yield (t, result)
} else {
Future.successful((t, Json.obj()))
}
}
def retrieve(id: TableGroupId): Future[TableGroup] = {
for {
table <- retrieveWithDisplayInfos(id)
} yield table
}
def retrieveAll(): Future[Seq[TableGroup]] = {
for {
table <- retrieveAllWithDisplayInfos()
} yield table
}
private def retrieveWithDisplayInfos(id: TableGroupId): Future[TableGroup] = {
for {
displayInfoResult <- connection
.query("SELECT id, langtag, name, description FROM system_tablegroup_lang WHERE id = ?", Json.arr(id))
_ = selectNotNull(displayInfoResult)
} yield {
mapDisplayInfosIntoTableGroup(displayInfoResult).head
}
}
private def retrieveAllWithDisplayInfos(): Future[Seq[TableGroup]] = {
for {
displayInfoResult <- connection.query("SELECT id, langtag, name, description FROM system_tablegroup_lang")
} yield {
mapDisplayInfosIntoTableGroup(displayInfoResult)
}
}
private def mapDisplayInfosIntoTableGroup(result: JsonObject): Seq[TableGroup] = {
val displayInfoTable = resultObjectToJsonArray(result)
.groupBy(_.getLong(0).longValue())
.mapValues(
_.filter(arr => Option(arr.getString(2)).isDefined || Option(arr.getString(3)).isDefined)
.map(arr => DisplayInfos.fromString(arr.getString(1), arr.getString(2), arr.getString(3)))
)
displayInfoTable
.map({
case (id, displayInfos) =>
TableGroup(id, displayInfos)
})
.toList
}
def delete(tableGroupId: TableGroupId): Future[Unit] = {
for {
t <- connection.begin()
(t, result) <- t.query("DELETE FROM system_tablegroup WHERE id = ?", Json.arr(tableGroupId))
_ = deleteNotNull(result)
_ <- t.commit()
} yield ()
}
def change(tableGroupId: TableGroupId, displayInfos: Option[Seq[DisplayInfo]]): Future[Unit] = {
for {
t <- connection.begin()
t <- insertOrUpdateTableDisplayInfo(t, tableGroupId, displayInfos)
_ <- t.commit()
} yield ()
}
private def insertOrUpdateTableDisplayInfo(
t: connection.Transaction,
tableGroupId: TableGroupId,
optDisplayInfos: Option[Seq[DisplayInfo]]
): Future[connection.Transaction] = {
optDisplayInfos match {
case Some(displayInfos) =>
val dis = TableGroupDisplayInfos(tableGroupId, displayInfos)
dis.entries.foldLeft(Future.successful(t)) {
case (future, di) =>
for {
t <- future
(t, select) <- t.query("SELECT COUNT(*) FROM system_tablegroup_lang WHERE id = ? AND langtag = ?",
Json.arr(tableGroupId, di.langtag))
count = select.getJsonArray("results").getJsonArray(0).getLong(0)
(statement, binds) = if (count > 0) {
dis.updateSql(di.langtag)
} else {
dis.insertSql(di.langtag)
}
(t, _) <- t.query(statement, Json.arr(binds: _*))
} yield t
}
case None => Future.successful(t)
}
}
}
| campudus/tableaux | src/main/scala/com/campudus/tableaux/database/model/structure/TableGroupModel.scala | Scala | apache-2.0 | 4,458 |
package djinni
import djinni.ast._
import djinni.generatorTools._
import djinni.meta._
import scala.language.implicitConversions
// Generate code for marshalling a specific type from/to C++ including header and type names.
// This only generates information relevant to a single language interface.
// This means the C++ Marshal generates only C++ types and includes, but not JNI or ObjC++.
// As a consequence a typical code generator needs two Marshals: one for C++ and one for the destination, e.g. JNI.
abstract class Marshal(spec: Spec) {
// Typename string to be used to declare a type or template parameter, without namespace or package, except for extern types which are always fully qualified.
def typename(tm: MExpr): String
def typename(ty: TypeRef): String = typename(ty.resolved)
// Same as typename() but always fully namespace or package qualified
def fqTypename(tm: MExpr): String
def fqTypename(ty: TypeRef): String = fqTypename(ty.resolved)
// Type signature for a function parameter
def paramType(tm: MExpr): String
def paramType(ty: TypeRef): String = paramType(ty.resolved)
def fqParamType(tm: MExpr): String
def fqParamType(ty: TypeRef): String = fqParamType(ty.resolved)
def returnType(ret: Option[TypeRef]): String
def fqReturnType(ret: Option[TypeRef]): String
def fieldType(tm: MExpr): String
def fieldType(ty: TypeRef): String = fieldType(ty.resolved)
def fqFieldType(tm: MExpr): String
def fqFieldType(ty: TypeRef): String = fqFieldType(ty.resolved)
// Generate code for an expression that transforms an expression `expr` of the non-C++ type `tm` to its C++ counterpart
def toCpp(tm: MExpr, expr: String): String = ""
def toCpp(ty: TypeRef, expr: String): String = toCpp(ty.resolved, expr)
// Generate code for an expression that transforms an expression `expr` of the C++ type `tm` to its non-C++ counterpart
def fromCpp(tm: MExpr, expr: String): String = ""
def fromCpp(ty: TypeRef, expr: String): String = fromCpp(ty.resolved, expr)
implicit def identToString(ident: Ident): String = ident.name
protected val idCpp = spec.cppIdentStyle
protected val idJava = spec.javaIdentStyle
protected val idObjc = spec.objcIdentStyle
protected val idPython = spec.pyIdentStyle
protected def withNs(namespace: Option[String], t: String) = namespace match {
case None => t
case Some("") => "::" + t
case Some(s) => "::" + s + "::" + t
}
protected def withCppNs(t: String) = withNs(Some(spec.cppNamespace), t)
}
| trafi/djinni | src/source/Marshal.scala | Scala | apache-2.0 | 2,517 |
package io.dylemma.spac.impl
import io.dylemma.spac.Transformer
case class TransformerScan[In, Out](init: Out, op: (Out, In) => Out) extends Transformer[In, Out] {
def newHandler = new Transformer.Handler[In, Out] {
private var state = init
def push(in: In, out: Transformer.HandlerWrite[Out]) = {
state = op(state, in)
out.push(state)
}
def finish(out: Transformer.HandlerWrite[Out]): Unit = ()
}
}
| dylemma/xml-spac | core/src/main/scala/io/dylemma/spac/impl/TransformerScan.scala | Scala | mit | 418 |
package org.helgoboss.domino
import org.osgi.framework.{ BundleContext, BundleActivator }
/**
* A bundle activator which contains empty `start` and `stop` methods. This is the basis
* for the trait [[OsgiContext]] which hooks into the `start` and `stop` methods.
*
* @see [[http://www.artima.com/scalazine/articles/stackable_trait_pattern.html Stackable Trait Pattern]]
*/
trait EmptyBundleActivator extends BundleActivator {
def start(context: BundleContext) {
}
def stop(context: BundleContext) {
}
} | lefou/domino | src/main/scala/org/helgoboss/domino/EmptyBundleActivator.scala | Scala | mit | 519 |
package reactivemongo.api
/**
* The [[https://docs.mongodb.com/manual/reference/write-concern/index.html write concern]].
*
* {{{
* import scala.concurrent.ExecutionContext
* import reactivemongo.api.{ DB, WriteConcern }
* import reactivemongo.api.bson.BSONDocument
*
* def foo(db: DB)(implicit ec: ExecutionContext) =
* db.collection("myColl").
* insert(ordered = false, WriteConcern.Acknowledged).
* one(BSONDocument("foo" -> "bar"))
* }}}
*/
final class WriteConcern private[api] (
_w: WriteConcern.W,
_j: Boolean,
_fsync: Boolean,
_wtimeout: Option[Int]) {
@inline def w: WriteConcern.W = _w
/** The journal flag */
@inline def j: Boolean = _j
@inline def fsync: Boolean = _fsync
/**
* The time limit, in milliseconds
* (only applicable for `w` values greater than 1)
*/
@inline def wtimeout: Option[Int] = _wtimeout
@SuppressWarnings(Array("VariableShadowing"))
def copy(
w: WriteConcern.W = _w,
j: Boolean = _j,
fsync: Boolean = _fsync,
wtimeout: Option[Int] = _wtimeout): WriteConcern =
new WriteConcern(w, j, fsync, wtimeout)
override def equals(that: Any): Boolean = that match {
case other: WriteConcern => tupled == other.tupled
case _ => false
}
override def hashCode: Int = tupled.hashCode
override def toString = s"WriteConcern${tupled.toString}"
private lazy val tupled = Tuple4(w, j, fsync, wtimeout)
}
/** [[WriteConcern]] utilities. */
object WriteConcern {
def apply(
w: WriteConcern.W,
j: Boolean,
fsync: Boolean,
wtimeout: Option[Int]): WriteConcern =
new WriteConcern(w, j, fsync, wtimeout)
// ---
/** [[https://docs.mongodb.com/manual/reference/write-concern/index.html#w-option Acknowledgment]] specification (w) */
sealed trait W
/** [[https://docs.mongodb.com/manual/reference/write-concern/index.html#writeconcern._dq_majority_dq_ Majority]] acknowledgment */
object Majority extends W {
override def toString = "Majority"
}
/** [[https://docs.mongodb.com/manual/reference/write-concern/index.html#writeconcern.%3Ccustom-write-concern-name%3E Tagged]] acknowledgment */
final class TagSet private[api] (val tag: String) extends W {
override def equals(that: Any): Boolean = that match {
case other: TagSet =>
(tag == null && other.tag == null) || (tag != null && tag == other.tag)
case _ =>
false
}
override def hashCode: Int = tag.hashCode
override def toString = s"TagSet($tag)"
}
object TagSet {
@inline def apply(tag: String): TagSet = new TagSet(tag)
private[api] def unapply(that: W): Option[String] = that match {
case other: TagSet => Option(other.tag)
case _ => None
}
}
/** Requests acknowledgment [[https://docs.mongodb.com/manual/reference/write-concern/index.html#writeconcern.%3Cnumber%3E by at least]] `i` nodes. */
final class WaitForAcknowledgments private[api] (val i: Int) extends W {
override def equals(that: Any): Boolean = that match {
case other: WaitForAcknowledgments => i == other.i
case _ => false
}
override def hashCode: Int = i
override def toString = s"WaitForAcknowledgments($i)"
}
object WaitForAcknowledgments {
@inline def apply(i: Int): WaitForAcknowledgments =
new WaitForAcknowledgments(i)
private[api] def unapply(that: W): Option[Int] = that match {
case other: WaitForAcknowledgments => Some(other.i)
case _ => None
}
}
/** [[WriteConcern]] with no acknowledgment required. */
val Unacknowledged: WriteConcern =
WriteConcern(new WaitForAcknowledgments(0), false, false, None)
/** [[WriteConcern]] with one acknowledgment required. */
val Acknowledged: WriteConcern =
WriteConcern(new WaitForAcknowledgments(1), false, false, None)
/**
* [[WriteConcern]] with one acknowledgment and operation
* written to the [[https://docs.mongodb.com/manual/reference/write-concern/index.html#j-option on-disk journal]].
*/
val Journaled: WriteConcern =
WriteConcern(new WaitForAcknowledgments(1), true, false, None)
@SuppressWarnings(Array("MethodNames"))
def ReplicaAcknowledged(n: Int, timeout: Int, journaled: Boolean): WriteConcern = WriteConcern(new WaitForAcknowledgments(if (n < 2) 2 else n), journaled, false, (if (timeout <= 0) None else Some(timeout)))
@SuppressWarnings(Array("MethodNames"))
def TagReplicaAcknowledged(tag: String, timeout: Int, journaled: Boolean): WriteConcern = WriteConcern(new TagSet(tag), journaled, false, (if (timeout <= 0) None else Some(timeout)))
/** The default [[WriteConcern]] */
@SuppressWarnings(Array("MethodNames"))
def Default: WriteConcern = Acknowledged
}
| ReactiveMongo/ReactiveMongo | driver/src/main/scala/api/WriteConcern.scala | Scala | apache-2.0 | 4,802 |
/**
* Licensed to Big Data Genomics (BDG) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The BDG licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.bdgenomics.adam.rdd.read
import java.util.UUID
import org.bdgenomics.adam.models.{ RecordGroup, RecordGroupDictionary }
import org.bdgenomics.adam.rdd.ADAMContext._
import org.bdgenomics.adam.util.ADAMFunSuite
import org.bdgenomics.formats.avro.{ AlignmentRecord, Contig }
class MarkDuplicatesSuite extends ADAMFunSuite {
val rgd = new RecordGroupDictionary(Seq(
new RecordGroup("sammy sample",
"machine foo",
library = Some("library bar"))))
def createUnmappedRead() = {
AlignmentRecord.newBuilder()
.setReadMapped(false)
.build()
}
def createMappedRead(referenceName: String, start: Long, end: Long,
readName: String = UUID.randomUUID().toString, avgPhredScore: Int = 20,
numClippedBases: Int = 0, isPrimaryAlignment: Boolean = true,
isNegativeStrand: Boolean = false) = {
assert(avgPhredScore >= 10 && avgPhredScore <= 50)
val qual = (for (i <- 0 until 100) yield (avgPhredScore + 33).toChar).toString()
val cigar = if (numClippedBases > 0) "%dS%dM".format(numClippedBases, 100 - numClippedBases) else "100M"
val contig = Contig.newBuilder
.setContigName(referenceName)
.build
AlignmentRecord.newBuilder()
.setContig(contig)
.setStart(start)
.setQual(qual)
.setCigar(cigar)
.setEnd(end)
.setReadMapped(true)
.setPrimaryAlignment(isPrimaryAlignment)
.setReadName(readName)
.setRecordGroupName("machine foo")
.setDuplicateRead(false)
.setReadNegativeStrand(isNegativeStrand)
.build()
}
def createPair(firstReferenceName: String, firstStart: Long, firstEnd: Long,
secondReferenceName: String, secondStart: Long, secondEnd: Long,
readName: String = UUID.randomUUID().toString,
avgPhredScore: Int = 20): Seq[AlignmentRecord] = {
val firstContig = Contig.newBuilder
.setContigName(firstReferenceName)
.build
val secondContig = Contig.newBuilder
.setContigName(secondReferenceName)
.build
val firstOfPair = createMappedRead(firstReferenceName, firstStart, firstEnd,
readName = readName, avgPhredScore = avgPhredScore)
firstOfPair.setReadInFragment(0)
firstOfPair.setMateMapped(true)
firstOfPair.setMateContig(secondContig)
firstOfPair.setMateAlignmentStart(secondStart)
firstOfPair.setReadPaired(true)
val secondOfPair = createMappedRead(secondReferenceName, secondStart, secondEnd,
readName = readName, avgPhredScore = avgPhredScore, isNegativeStrand = true)
secondOfPair.setReadInFragment(1)
secondOfPair.setMateMapped(true)
secondOfPair.setMateContig(firstContig)
secondOfPair.setMateAlignmentStart(firstStart)
secondOfPair.setReadPaired(true)
Seq(firstOfPair, secondOfPair)
}
private def markDuplicates(reads: AlignmentRecord*) = {
sc.parallelize(reads).adamMarkDuplicates(rgd).collect()
}
sparkTest("single read") {
val read = createMappedRead("0", 100, 200)
val marked = markDuplicates(read)
// Can't have duplicates with a single read, should return the read unchanged.
assert(marked(0) == read)
}
sparkTest("reads at different positions") {
val read1 = createMappedRead("0", 42, 142)
val read2 = createMappedRead("0", 43, 143)
val marked = markDuplicates(read1, read2)
// Reads shouldn't be modified
assert(marked.contains(read1) && marked.contains(read2))
}
sparkTest("reads at the same position") {
val poorReads = for (i <- 0 until 10) yield {
createMappedRead("1", 42, 142, avgPhredScore = 20, readName = "poor%d".format(i))
}
val bestRead = createMappedRead("1", 42, 142, avgPhredScore = 30, readName = "best")
val marked = markDuplicates(List(bestRead) ++ poorReads: _*)
val (dups, nonDup) = marked.partition(p => p.getDuplicateRead)
assert(nonDup.size == 1 && nonDup(0) == bestRead)
assert(dups.forall(p => p.getReadName.startsWith("poor")))
}
sparkTest("reads at the same position with clipping") {
val poorClippedReads = for (i <- 0 until 5) yield {
createMappedRead("1", 44, 142, numClippedBases = 2, avgPhredScore = 20, readName = "poorClipped%d".format(i))
}
val poorUnclippedReads = for (i <- 0 until 5) yield {
createMappedRead("1", 42, 142, avgPhredScore = 20, readName = "poorUnclipped%d".format(i))
}
val bestRead = createMappedRead("1", 42, 142, avgPhredScore = 30, readName = "best")
val marked = markDuplicates(List(bestRead) ++ poorClippedReads ++ poorUnclippedReads: _*)
val (dups, nonDup) = marked.partition(p => p.getDuplicateRead)
assert(nonDup.size == 1 && nonDup(0) == bestRead)
assert(dups.forall(p => p.getReadName.startsWith("poor")))
}
sparkTest("reads on reverse strand") {
val poorReads = for (i <- 0 until 7) yield {
createMappedRead("10", 42, 142, isNegativeStrand = true, avgPhredScore = 20, readName = "poor%d".format(i))
}
val bestRead = createMappedRead("10", 42, 142, isNegativeStrand = true, avgPhredScore = 30, readName = "best")
val marked = markDuplicates(List(bestRead) ++ poorReads: _*)
val (dups, nonDup) = marked.partition(p => p.getDuplicateRead)
assert(nonDup.size == 1 && nonDup(0) == bestRead)
assert(dups.forall(p => p.getReadName.startsWith("poor")))
}
sparkTest("unmapped reads") {
val unmappedReads = for (i <- 0 until 10) yield createUnmappedRead()
val marked = markDuplicates(unmappedReads: _*)
assert(marked.size == unmappedReads.size)
// Unmapped reads should never be marked duplicates
assert(marked.forall(p => !p.getDuplicateRead))
}
sparkTest("read pairs") {
val poorPairs = for (
i <- 0 until 10;
read <- createPair("0", 10, 110, "0", 110, 210, avgPhredScore = 20, readName = "poor%d".format(i))
) yield read
val bestPair = createPair("0", 10, 110, "0", 110, 210, avgPhredScore = 30, readName = "best")
val marked = markDuplicates(bestPair ++ poorPairs: _*)
val (dups, nonDups) = marked.partition(_.getDuplicateRead)
assert(nonDups.size == 2 && nonDups.forall(p => p.getReadName.toString == "best"))
assert(dups.forall(p => p.getReadName.startsWith("poor")))
}
sparkTest("read pairs with fragments") {
val fragments = for (i <- 0 until 10) yield {
createMappedRead("2", 33, 133, avgPhredScore = 40, readName = "fragment%d".format(i))
}
// Even though the phred score is lower, pairs always score higher than fragments
val pairs = createPair("2", 33, 133, "2", 100, 200, avgPhredScore = 20, readName = "pair")
val marked = markDuplicates(fragments ++ pairs: _*)
val (dups, nonDups) = marked.partition(_.getDuplicateRead)
assert(nonDups.size == 2 && nonDups.forall(p => p.getReadName.toString == "pair"))
assert(dups.size == 10 && dups.forall(p => p.getReadName.startsWith("fragment")))
}
test("quality scores") {
// The ascii value 53 is equal to a phred score of 20
val qual = 53.toChar.toString * 100
val record = AlignmentRecord.newBuilder().setQual(qual).build()
assert(MarkDuplicates.score(record) == 2000)
}
sparkTest("read pairs that cross chromosomes") {
val poorPairs = for (
i <- 0 until 10;
read <- createPair("ref0", 10, 110, "ref1", 110, 210, avgPhredScore = 20, readName = "poor%d".format(i))
) yield read
val bestPair = createPair("ref0", 10, 110, "ref1", 110, 210, avgPhredScore = 30, readName = "best")
val marked = markDuplicates(bestPair ++ poorPairs: _*)
val (dups, nonDups) = marked.partition(_.getDuplicateRead)
assert(nonDups.size == 2 && nonDups.forall(p => p.getReadName.toString == "best"))
assert(dups.forall(p => p.getReadName.startsWith("poor")))
}
}
| rnpandya/adam | adam-core/src/test/scala/org/bdgenomics/adam/rdd/read/MarkDuplicatesSuite.scala | Scala | apache-2.0 | 8,596 |
package sttp.client3.impl.fs2
import cats.effect.kernel.{Concurrent, Ref}
import cats.effect.kernel.syntax.monadCancel._
import fs2.{Pipe, Stream}
import sttp.ws.{WebSocket, WebSocketClosed, WebSocketFrame}
object Fs2WebSockets {
/** Handle the websocket through a [[Pipe]] which receives the incoming events and produces the messages to be sent to
* the server. Not that by the nature of a [[Pipe]], there no need that these two streams are coupled. Just make sure
* to consume the input as otherwise the receiving buffer might overflow (use [[Stream.drain]] if you want to
* discard).
* @param ws
* the websocket to handle
* @param pipe
* the pipe to handle the socket
* @tparam F
* the effect type
* @return
* an Unit effect describing the full run of the websocket through the pipe
*/
def handleThroughPipe[F[_]: Concurrent](
ws: WebSocket[F]
)(pipe: Pipe[F, WebSocketFrame.Data[_], WebSocketFrame]): F[Unit] = {
Stream
.eval(Ref.of[F, Option[WebSocketFrame.Close]](None))
.flatMap { closeRef =>
Stream
.repeatEval(ws.receive()) // read incoming messages
.flatMap[F, Option[WebSocketFrame.Data[_]]] {
case WebSocketFrame.Close(code, reason) =>
Stream.eval(closeRef.set(Some(WebSocketFrame.Close(code, reason)))).as(None)
case WebSocketFrame.Ping(payload) =>
Stream.eval(ws.send(WebSocketFrame.Pong(payload))).drain
case WebSocketFrame.Pong(_) =>
Stream.empty // ignore
case in: WebSocketFrame.Data[_] => Stream.emit(Some(in))
}
.handleErrorWith {
case _: WebSocketClosed => Stream.eval(closeRef.set(None)).as(None)
case e => Stream.eval(Concurrent[F].raiseError(e))
}
.unNoneTerminate // terminate once we got a Close
.through(pipe)
// end with matching Close or user-provided Close or no Close at all
.append(Stream.eval(closeRef.get).unNone) // A Close isn't a continuation
.evalMap(ws.send(_)) // send messages
}
.compile
.drain
.guarantee(ws.close())
}
def fromTextPipe[F[_]]: (String => WebSocketFrame) => fs2.Pipe[F, WebSocketFrame, WebSocketFrame] =
f => fromTextPipeF(_.map(f))
def fromTextPipeF[F[_]]: fs2.Pipe[F, String, WebSocketFrame] => fs2.Pipe[F, WebSocketFrame, WebSocketFrame] =
p => p.compose(combinedTextFrames)
def combinedTextFrames[F[_]]: fs2.Pipe[F, WebSocketFrame, String] = { input =>
input
.collect { case tf: WebSocketFrame.Text => tf }
.flatMap { tf =>
if (tf.finalFragment) {
Stream(tf.copy(finalFragment = false), tf.copy(payload = ""))
} else {
Stream(tf)
}
}
.split(_.finalFragment)
.map(chunks => chunks.map(_.payload).toList.mkString)
}
}
| softwaremill/sttp | effects/fs2/src/main/scala/sttp/client3/impl/fs2/Fs2WebSockets.scala | Scala | apache-2.0 | 2,926 |
package com.datamountaineer.streamreactor.connect.pulsar
import com.datamountaineer.kcql.Kcql
import com.typesafe.scalalogging.StrictLogging
import org.apache.pulsar.client.api.{ConsumerConfiguration, SubscriptionType}
/**
* Created by andrew@datamountaineer.com on 22/01/2018.
* stream-reactor
*/
object ConsumerConfigFactory extends StrictLogging {
def apply(name: String, kcqls: Set[Kcql]): Map[String, ConsumerConfiguration] = {
kcqls.map(kcql => {
val config = new ConsumerConfiguration
if (kcql.getBatchSize > 0) config.setReceiverQueueSize(kcql.getBatchSize)
config.setSubscriptionType(getSubscriptionType(kcql))
config.setConsumerName(name)
(kcql.getSource, config)
}).toMap
}
def getSubscriptionType(kcql: Kcql): SubscriptionType = {
if (kcql.getWithSubscription() != null) {
kcql.getWithSubscription.toUpperCase.trim match {
case "EXCLUSIVE" =>
SubscriptionType.Exclusive
case "FAILOVER" =>
SubscriptionType.Failover
case "SHARED" =>
SubscriptionType.Shared
case _ =>
logger.error(s"Unsupported subscription type ${kcql.getWithType} set in WITHTYPE. Defaulting to Failover")
SubscriptionType.Failover
}
} else {
logger.info("Defaulting to failover subscription type")
SubscriptionType.Failover
}
}
}
| datamountaineer/stream-reactor | kafka-connect-pulsar/src/main/scala/com/datamountaineer/streamreactor/connect/pulsar/ConsumerConfigFactory.scala | Scala | apache-2.0 | 1,391 |
package com.mentatlabs.nsa
package scalac
package options
object XMaxClassfileNameGenerator extends App {
val sb = new StringBuilder
for (length <- 72 to 255) {
sb ++= s""" def `$length` = apply($length)""" += '\\n'
}
println(sb)
}
| melezov/sbt-nsa | nsa-core/src/misc/XMaxClassfileNameGenerator.scala | Scala | bsd-3-clause | 247 |
package org.locationtech.geomesa.convert.xml
import com.typesafe.config.ConfigFactory
import org.junit.runner.RunWith
import org.locationtech.geomesa.convert.SimpleFeatureConverters
import org.locationtech.geomesa.convert.Transformers.{DefaultCounter, EvaluationContext}
import org.locationtech.geomesa.utils.geotools.SimpleFeatureTypes
import org.specs2.mutable.Specification
import org.specs2.runner.JUnitRunner
@RunWith(classOf[JUnitRunner])
class XMLConverterTest extends Specification {
val sftConf = ConfigFactory.parseString(
"""{ type-name = "xmlFeatureType"
| attributes = [
| {name = "number", type = "Integer"}
| {name = "color", type = "String"}
| {name = "weight", type = "Double"}
| {name = "source", type = "String"}
| ]
|}
""".stripMargin)
val sft = SimpleFeatureTypes.createType(sftConf)
"XML Converter" should {
"parse multiple features out of a single document" >> {
val xml =
"""<doc>
| <DataSource>
| <name>myxml</name>
| </DataSource>
| <Feature>
| <number>123</number>
| <color>red</color>
| <physical weight="127.5" height="5'11"/>
| </Feature>
| <Feature>
| <number>456</number>
| <color>blue</color>
<physical weight="150" height="h2"/>
| </Feature>
|</doc>
""".stripMargin
val parserConf = ConfigFactory.parseString(
"""
| converter = {
| type = "xml"
| id-field = "uuid()"
| feature-path = "Feature" // can be any xpath - relative to the root, or absolute
| fields = [
| // paths can be any xpath - relative to the feature-path, or absolute
| { name = "number", path = "number", transform = "$0::integer" }
| { name = "color", path = "color", transform = "trim($0)" }
| { name = "weight", path = "physical/@weight", transform = "$0::double" }
| { name = "source", path = "/doc/DataSource/name/text()" }
| ]
| }
""".stripMargin)
val converter = SimpleFeatureConverters.build[String](sft, parserConf)
val features = converter.processInput(Iterator(xml)).toList
features must haveLength(2)
features.head.getAttribute("number").asInstanceOf[Integer] mustEqual 123
features.head.getAttribute("color").asInstanceOf[String] mustEqual "red"
features.head.getAttribute("weight").asInstanceOf[Double] mustEqual 127.5
features.head.getAttribute("source").asInstanceOf[String] mustEqual "myxml"
features(1).getAttribute("number").asInstanceOf[Integer] mustEqual 456
features(1).getAttribute("color").asInstanceOf[String] mustEqual "blue"
features(1).getAttribute("weight").asInstanceOf[Double] mustEqual 150
features(1).getAttribute("source").asInstanceOf[String] mustEqual "myxml"
}
"parse nested feature nodes" >> {
val xml =
"""<doc>
| <DataSource>
| <name>myxml</name>
| </DataSource>
| <IgnoreMe>
| <Feature>
| <number>123</number>
| <color>red</color>
| <physical weight="127.5" height="5'11"/>
| </Feature>
| </IgnoreMe>
| <IgnoreMe>
| <Feature>
| <number>456</number>
| <color>blue</color>
| <physical weight="150" height="h2"/>
| </Feature>
| </IgnoreMe>
|</doc>
""".stripMargin
val parserConf = ConfigFactory.parseString(
"""
| converter = {
| type = "xml"
| id-field = "uuid()"
| feature-path = "/doc/IgnoreMe/Feature" // can be any xpath - relative to the root, or absolute
| fields = [
| // paths can be any xpath - relative to the feature-path, or absolute
| { name = "number", path = "number", transform = "$0::integer" }
| { name = "color", path = "color", transform = "trim($0)" }
| { name = "weight", path = "physical/@weight", transform = "$0::double" }
| { name = "source", path = "/doc/DataSource/name/text()" }
| ]
| }
""".stripMargin)
val converter = SimpleFeatureConverters.build[String](sft, parserConf)
val features = converter.processInput(Iterator(xml)).toList
features must haveLength(2)
features.head.getAttribute("number").asInstanceOf[Integer] mustEqual 123
features.head.getAttribute("color").asInstanceOf[String] mustEqual "red"
features.head.getAttribute("weight").asInstanceOf[Double] mustEqual 127.5
features.head.getAttribute("source").asInstanceOf[String] mustEqual "myxml"
features(1).getAttribute("number").asInstanceOf[Integer] mustEqual 456
features(1).getAttribute("color").asInstanceOf[String] mustEqual "blue"
features(1).getAttribute("weight").asInstanceOf[Double] mustEqual 150
features(1).getAttribute("source").asInstanceOf[String] mustEqual "myxml"
}
"apply xpath functions" >> {
val xml =
"""<doc>
| <DataSource>
| <name>myxml</name>
| </DataSource>
| <Feature>
| <number>123</number>
| <color>red</color>
| <physical weight="127.5" height="5'11"/>
| </Feature>
|</doc>
""".stripMargin
val parserConf = ConfigFactory.parseString(
"""
| converter = {
| type = "xml"
| id-field = "uuid()"
| feature-path = "Feature" // can be any xpath - relative to the root, or absolute
| fields = [
| // paths can be any xpath - relative to the feature-path, or absolute
| { name = "number", path = "number", transform = "$0::integer" }
| { name = "color", path = "color", transform = "trim($0)" }
| { name = "weight", path = "floor(physical/@weight)", transform = "$0::double" }
| { name = "source", path = "/doc/DataSource/name/text()" }
| ]
| }
""".stripMargin)
val converter = SimpleFeatureConverters.build[String](sft, parserConf)
val features = converter.processInput(Iterator(xml)).toList
features must haveLength(1)
features.head.getAttribute("number").asInstanceOf[Integer] mustEqual 123
features.head.getAttribute("color").asInstanceOf[String] mustEqual "red"
features.head.getAttribute("weight").asInstanceOf[Double] mustEqual 127
features.head.getAttribute("source").asInstanceOf[String] mustEqual "myxml"
}
"use an ID hash for each node" >> {
val xml =
"""<doc>
| <DataSource>
| <name>myxml</name>
| </DataSource>
| <Feature>
| <number>123</number>
| <color>red</color>
| <physical weight="127.5" height="5'11"/>
| </Feature>
| <Feature>
| <number>456</number>
| <color>blue</color>
<physical weight="150" height="h2"/>
| </Feature>
|</doc>
""".stripMargin
val parserConf = ConfigFactory.parseString(
"""
| converter = {
| type = "xml"
| id-field = "md5(string2bytes(xml2string($0)))"
| feature-path = "Feature" // can be any xpath - relative to the root, or absolute
| fields = [
| // paths can be any xpath - relative to the feature-path, or absolute
| { name = "number", path = "number", transform = "$0::integer" }
| { name = "color", path = "color", transform = "trim($0)" }
| { name = "weight", path = "physical/@weight", transform = "$0::double" }
| { name = "source", path = "/doc/DataSource/name/text()" }
| ]
| }
""".stripMargin)
val converter = SimpleFeatureConverters.build[String](sft, parserConf)
val features = converter.processInput(Iterator(xml)).toList
features must haveLength(2)
features.head.getAttribute("number").asInstanceOf[Integer] mustEqual 123
features.head.getAttribute("color").asInstanceOf[String] mustEqual "red"
features.head.getAttribute("weight").asInstanceOf[Double] mustEqual 127.5
features.head.getAttribute("source").asInstanceOf[String] mustEqual "myxml"
features(1).getAttribute("number").asInstanceOf[Integer] mustEqual 456
features(1).getAttribute("color").asInstanceOf[String] mustEqual "blue"
features(1).getAttribute("weight").asInstanceOf[Double] mustEqual 150
features(1).getAttribute("source").asInstanceOf[String] mustEqual "myxml"
features.head.getID mustEqual "441dd9114a1a345fe59f0dfe461f01ca"
features(1).getID mustEqual "42aae6286c7204c3aa1aa99a4e8dae35"
}
"validate with an xsd" >> {
val xml =
"""<f:doc xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:f="http://geomesa.org/test-feature">
| <f:DataSource>
| <f:name>myxml</f:name>
| </f:DataSource>
| <f:Feature>
| <f:number>123</f:number>
| <f:color>red</f:color>
| <f:physical weight="127.5" height="5'11"/>
| </f:Feature>
|</f:doc>
""".stripMargin
val parserConf = ConfigFactory.parseString(
"""
| converter = {
| type = "xml"
| id-field = "uuid()"
| feature-path = "Feature" // can be any xpath - relative to the root, or absolute
| xsd = "xml-feature.xsd" // looked up by class.getResource
| fields = [
| // paths can be any xpath - relative to the feature-path, or absolute
| { name = "number", path = "number", transform = "$0::integer" }
| { name = "color", path = "color", transform = "trim($0)" }
| { name = "weight", path = "physical/@weight", transform = "$0::double" }
| { name = "source", path = "/doc/DataSource/name/text()" }
| ]
| }
""".stripMargin)
val converter = SimpleFeatureConverters.build[String](sft, parserConf)
val features = converter.processInput(Iterator(xml)).toList
features must haveLength(1)
features.head.getAttribute("number").asInstanceOf[Integer] mustEqual 123
features.head.getAttribute("color").asInstanceOf[String] mustEqual "red"
features.head.getAttribute("weight").asInstanceOf[Double] mustEqual 127.5
features.head.getAttribute("source").asInstanceOf[String] mustEqual "myxml"
}
"invalidate with an xsd" >> {
val xml =
"""<f:doc2 xmlns:f="http://geomesa.org/test-feature" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
| <f:DataSource>
| <f:name>myxml</f:name>
| </f:DataSource>
| <f:Feature>
| <f:number>123</f:number>
| <f:color>red</f:color>
| <f:physical weight="127.5" height="5'11"/>
| </f:Feature>
|</f:doc2>
""".stripMargin
val parserConf = ConfigFactory.parseString(
"""
| converter = {
| type = "xml"
| id-field = "uuid()"
| feature-path = "Feature" // can be any xpath - relative to the root, or absolute
| xsd = "xml-feature.xsd" // looked up by class.getResource
| fields = [
| // paths can be any xpath - relative to the feature-path, or absolute
| { name = "number", path = "number", transform = "$0::integer" }
| { name = "color", path = "color", transform = "trim($0)" }
| { name = "weight", path = "physical/@weight", transform = "$0::double" }
| { name = "source", path = "/doc/DataSource/name/text()" }
| ]
| }
""".stripMargin)
val converter = SimpleFeatureConverters.build[String](sft, parserConf)
val features = converter.processInput(Iterator(xml)).toList
features must haveLength(0)
}
}
}
| vpipkt/geomesa | geomesa-convert/geomesa-convert-xml/src/test/scala/org/locationtech/geomesa/convert/xml/XMLConverterTest.scala | Scala | apache-2.0 | 12,618 |
/*
* Copyright 2015-2016 IBM Corporation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package packages
import common._
import org.junit.runner.RunWith
import org.scalatest.Matchers
import org.scalatest.junit.JUnitRunner
import spray.json._
import spray.json.DefaultJsonProtocol.StringJsonFormat
import scala.collection.immutable.HashMap
import org.scalatest.FlatSpecLike
@RunWith(classOf[JUnitRunner])
class MessageHubTests extends TestHelpers with WskTestHelpers with Matchers {
implicit val wskprops = WskProps()
val wsk = new Wsk()
val credentials = TestUtils.getVCAPcredentials("messagehub");
val restUrl = credentials.get("restUrl");
val restPort = credentials.get("restPort");
val apikey = credentials.get("apikey");
val serviceEndpoint = credentials.get("serviceEndpoint");
behavior of "MessageHub Package"
"get topic action" should "return all topics" in {
val actionName = "/whisk.system/messagehub/getTopics";
val params = HashMap("restUrl" -> restUrl.toJson, "restPort" -> restPort.toJson, "apikey" -> apikey.toJson);
withActivation(wsk.activation, wsk.action.invoke(actionName, params)) {
_.fields("response").toString should include(""""markedForDeletion":""")
}
}
"create topic action" should "return, create successfull" in {
val actionName = "/whisk.system/messagehub/createTopic"
val params = HashMap("restUrl" -> restUrl.toJson,
"restPort" -> restPort.toJson,
"apikey" -> apikey.toJson,
"topic" -> "testTopic".toJson );
withActivation(wsk.activation, wsk.action.invoke(actionName, params)) {
_.fields("response").toString should include("""successfully""")
}
}
"publish message action" should "return, publish successfull" in {
val actionName = "/whisk.system/messagehub/publish"
val params = HashMap("restUrl" -> restUrl.toJson,
"restPort" -> restPort.toJson,
"apikey" -> apikey.toJson,
"topic" -> "testTopic".toJson,
"message" -> "test message input".toJson );
withActivation(wsk.activation, wsk.action.invoke(actionName, params)) {
_.fields("response").toString should include("""offsets""")
}
}
"delete topic action" should "return, deletion successfull" in {
val actionName = "/whisk.system/messagehub/deleteTopic"
val params = HashMap("restUrl" -> restUrl.toJson,
"restPort" -> restPort.toJson,
"apikey" -> apikey.toJson,
"topic" -> "testTopic".toJson );
withActivation(wsk.activation, wsk.action.invoke(actionName, params)) {
_.fields("response").toString should include("""successfully""")
}
}
}
| saschoff91/wsk-pkg-messagehub | tests/src/MessageHubTests.scala | Scala | apache-2.0 | 3,333 |
/***********************************************************************
* Copyright (c) 2013-2016 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
*************************************************************************/
package org.locationtech.geomesa.convert.avro
import java.io.ByteArrayInputStream
import com.typesafe.config.ConfigFactory
import org.junit.runner.RunWith
import org.locationtech.geomesa.convert.SimpleFeatureConverters
import org.locationtech.geomesa.utils.geotools.SimpleFeatureTypes
import org.specs2.mutable.Specification
import org.specs2.runner.JUnitRunner
@RunWith(classOf[JUnitRunner])
class AvroSimpleFeatureConverterTest extends Specification with AvroUtils {
sequential
"Avro2SimpleFeature should" should {
val conf = ConfigFactory.parseString(
"""
| {
| type = "avro"
| schema-file = "/schema.avsc"
| sft = "testsft"
| id-field = "uuid()"
| fields = [
| { name = "tobj", transform = "avroPath($1, '/content$type=TObj')" },
| { name = "dtg", transform = "date('YYYY-MM-dd', avroPath($tobj, '/kvmap[$k=dtg]/v'))" },
| { name = "lat", transform = "avroPath($tobj, '/kvmap[$k=lat]/v')" },
| { name = "lon", transform = "avroPath($tobj, '/kvmap[$k=lon]/v')" },
| { name = "geom", transform = "point($lon, $lat)" }
| ]
| }
""".stripMargin)
"properly convert a GenericRecord to a SimpleFeature" >> {
val sft = SimpleFeatureTypes.createType(ConfigFactory.load("sft_testsft.conf"))
val converter = SimpleFeatureConverters.build[Array[Byte]](sft, conf)
val ec = converter.createEvaluationContext()
val sf = converter.processInput(Iterator.apply[Array[Byte]](bytes), ec).next()
sf.getAttributeCount must be equalTo 2
sf.getAttribute("dtg") must not beNull
ec.counter.getFailure mustEqual 0L
ec.counter.getSuccess mustEqual 1L
ec.counter.getLineCount mustEqual 1L // only 1 record passed in itr
}
"properly convert an input stream" >> {
val sft = SimpleFeatureTypes.createType(ConfigFactory.load("sft_testsft.conf"))
val converter = SimpleFeatureConverters.build[Array[Byte]](sft, conf)
val ec = converter.createEvaluationContext()
val sf = converter.process(new ByteArrayInputStream(bytes), ec).next()
sf.getAttributeCount must be equalTo 2
sf.getAttribute("dtg") must not beNull
ec.counter.getFailure mustEqual 0L
ec.counter.getSuccess mustEqual 1L
ec.counter.getLineCount mustEqual 1L // zero indexed so this is 2 records
}
}
}
| mdzimmerman/geomesa | geomesa-convert/geomesa-convert-avro/src/test/scala/org/locationtech/geomesa/convert/avro/AvroSimpleFeatureConverterTest.scala | Scala | apache-2.0 | 2,906 |
package com.github.jmccrae.yuzu
import com.github.jmccrae.sqlutils._
import com.github.jmccrae.yuzu.YuzuSettings._
import com.github.jmccrae.yuzu.YuzuUserText._
import com.github.jmccrae.yuzu.ql.{PrefixCCLookup, QueryBuilder, YuzuQLSyntax}
import com.hp.hpl.jena.graph.{Node, NodeFactory, Triple}
import com.hp.hpl.jena.query.{QueryExecutionFactory, QueryFactory}
import com.hp.hpl.jena.rdf.model.{AnonId, Model, ModelFactory}
import com.hp.hpl.jena.sparql.core.Quad
import com.hp.hpl.jena.vocabulary._
import java.io.File
import java.net.URI
import java.sql.DriverManager
import java.util.regex.Pattern
import org.apache.jena.atlas.web.TypedInputStream
import org.apache.jena.riot.system.{StreamRDF, StreamRDFBase}
import org.apache.jena.riot.{Lang, RDFDataMgr}
object UnicodeEscape {
/** Fix unicode escape characters */
def unescape(str : String) : String = {
val sb = new StringBuilder(str)
var i = sb.indexOf('\\')
while(i >= 0 && i < sb.length - 5) {
if(sb.charAt(i) == '\\' && sb.charAt(i+1) == 'u') {
try {
sb.replace(i,i+6,
Integer.parseInt(sb.slice(i+2,i+6).toString, 16).toChar.toString) }
catch {
case x : NumberFormatException =>
System.err.println("Bad unicode string %s" format sb.slice(i,i+6)) }}
i = sb.indexOf('\\', i + 1) }
sb.toString }
private def encodeDangerous(s : String) = {
val p = Pattern.compile("([\"<>{}\\[\\]|\\\\\\p{IsWhite_Space}])")
val m = p.matcher(s)
val sb = new StringBuffer()
while(m.find()) {
m.appendReplacement(sb, java.net.URLEncoder.encode(m.group(1), "UTF-8"))
}
m.appendTail(sb)
sb }
private def doubleEncode(s : CharSequence) = {
// Double encode already encoded special characters to avoid
// creating invalid URIs
val p = Pattern.compile(
"(%23|%25|%2F|%3B|%3F|%2B|%22|%3C|%3E|%7B|%7D|%5C|%5E|%5B|%5D|" +
"%C2%A0|%E1%9A%80|%E1%A0%8E|%E2%80%8[0-9AB]|" +
"%E2%80%AF|%E2%81%9F|%E3%80%80|%EF%BB%BF)", Pattern.CASE_INSENSITIVE)
val m = p.matcher(s)
val sb = new StringBuffer()
while(m.find()) {
m.appendReplacement(sb, m.group(1).replaceAll("%", "%25")) }
m.appendTail(sb)
sb.toString }
/**
* Make a URI safe in that it avoids all of the most unsafe characters.
* The following character are unsafe and should always be
* encoded
* " < > { } | \ ^ [ ]
* Anything matching \p{IsWhite_Space}
* The following should never be decoded to avoid ambiguity
* %23 (#) %2F (/) %3B (;) %3F (?) %2B (+) %25 (%) */
def safeURI(uri : String) =
java.net.URLDecoder.decode(
doubleEncode(
encodeDangerous(uri)), "UTF-8").replaceAll(" ", "+")
def fixURI(n : Node) = if(n.isURI()) {
NodeFactory.createURI(safeURI(n.getURI())) }
else { n }
/**
* Make a path safe by encoding all dangerous characters
*/
def safePath(s : String) = encodeDangerous(s).toString()
}
object N3 {
/** Convert an N3 string to a node */
def fromN3(n3 : String) = if(n3.startsWith("<") && n3.endsWith(">")) {
NodeFactory.createURI(n3.drop(1).dropRight(1)) }
else if(n3.startsWith("_:")) {
NodeFactory.createAnon(AnonId.create(n3.drop(2))) }
else if(n3.startsWith("\"") && n3.contains("^^")) {
val Array(lit, typ) = n3.split("\"\\^\\^",2)
NodeFactory.createLiteral(UnicodeEscape.unescape(lit.drop(1)), NodeFactory.getType(typ.drop(1).dropRight(1))) }
else if(n3.startsWith("\"") && n3.contains("\"@")) {
val Array(lit, lang) = n3.split("\"@", 2)
NodeFactory.createLiteral(UnicodeEscape.unescape(lit.drop(1)), lang, false) }
else if(n3.startsWith("\"") && n3.endsWith("\"")) {
NodeFactory.createLiteral(UnicodeEscape.unescape(n3.drop(1).dropRight(1))) }
else {
throw new IllegalArgumentException("Not N3: %s" format n3) }
/** Convert a node to an N3 String */
def toN3(node : Node) : String = if(node.isURI()) {
"<%s>" format node.getURI() }
else if(node.isBlank()) {
"_:%s" format node.getBlankNodeId().toString() }
else if(node.getLiteralLanguage() != "") {
"\"%s\"@%s" format (
node.getLiteralLexicalForm().toString().replaceAll("\"","\\\\\""),
node.getLiteralLanguage()) }
else if(node.getLiteralDatatypeURI() != null) {
"\"%s\"^^<%s>" format (
node.getLiteralLexicalForm().toString().replaceAll("\"","\\\\\""),
node.getLiteralDatatypeURI()) }
else {
"\"%s\"" format (
node.getLiteralLexicalForm().toString().replaceAll("\"","\\\\\"")) }
}
/**
* Standard 3-column SQL implementation of a triple store, with foreign keys
* for N3 form of the triple
*/
class TripleBackend(db : String) extends Backend {
import UnicodeEscape._
import N3._
try {
Class.forName("org.sqlite.JDBC") }
catch {
case x : ClassNotFoundException => throw new RuntimeException("No Database Driver", x) }
/** Create a connection */
private def conn = DriverManager.getConnection("jdbc:sqlite:" + db)
/** To make many of the queries easier */
object GetNode extends GetResult[Node] {
def apply(rs : java.sql.ResultSet, index : Int) = {
fromN3(rs.getString(index)) }
}
object GetIntAsNode extends GetResult[Node] {
def apply(rs : java.sql.ResultSet, index : Int) = {
NodeFactory.createLiteral(rs.getInt(index).toString, NodeFactory.getType(XSD.integer.getURI())) }
}
/** The ID cache */
private def cache(implicit session : Session) = {
new SimpleCache {
val size = 1000000
def load(key : String) = {
sql"""SELECT id FROM ids WHERE n3=$key""".as1[Int].headOption match {
case Some(id) =>
id
case None =>
sql"""INSERT INTO ids (n3, main) VALUES (?)""".insert(key, pageN3(key))
sql"""SELECT id FROM ids WHERE n3=$key""".as1[Int].head }}}}
/** The database schema */
private def createTables(implicit session : Session) = {
sql"""CREATE TABLE IF NOT EXISTS ids (id integer primary key,
n3 text not null,
main text not null,
label text, unique(n3))""".execute
sql"""CREATE INDEX n3s on ids (n3)""".execute
sql"""CREATE TABLE IF NOT EXISTS tripids (sid integer not null,
pid integer not null,
oid integer not null,
page text,
head boolean,
foreign key (sid) references ids,
foreign key (pid) references ids,
foreign key (oid) references ids)""".execute
sql"""CREATE INDEX subjects ON tripids(sid)""".execute
sql"""CREATE INDEX properties ON tripids(pid)""".execute
sql"""CREATE INDEX objects ON tripids(oid)""".execute
sql"""CREATE INDEX pages ON tripids(page)""".execute
sql"""CREATE VIEW triples AS SELECT page, sid, pid, oid,
subj.n3 AS subject, subj.label AS subj_label,
prop.n3 AS property, prop.label AS prop_label,
obj.n3 AS object, obj.label AS obj_label, head
FROM tripids
JOIN ids AS subj ON tripids.sid=subj.id
JOIN ids AS prop ON tripids.pid=prop.id
JOIN ids AS obj ON tripids.oid=obj.id""".execute
sql"""CREATE VIRTUAL TABLE free_text USING fts4(sid integer, pid integer,
object TEXT NOT NULL)""".execute
sql"""CREATE TABLE links (count integer, target text)""".execute
sql"""CREATE TABLE value_cache (object text not null,
obj_label text,
count int,
property text not null)""".execute }
/** Work out the page for a node (assuming the node is in the base namespace */
def node2page(n : Node) = uri2page(n.getURI())
def uri2page(uri : String) =
if(uri.contains('#')) {
uri.take(uri.indexOf('#')).drop(BASE_NAME.size) }
else { uri.drop(BASE_NAME.size) }
def readNTriples(handler : StreamRDF, inputStream : java.io.InputStream,
ignoreErrors : Boolean) {
for(line <- io.Source.fromInputStream(inputStream).getLines()) {
val elems = line.split(" ")
try {
handler.triple(new Triple(
fromN3(elems(0)),
fromN3(elems(1)),
fromN3(elems.slice(2, elems.size - 1).mkString(" ")))) }
catch {
case x : Exception =>
if(ignoreErrors) {
x.printStackTrace() }
else {
throw x }}}}
def pageN3(n3 : String) = {
if(n3.startsWith("<") && n3.contains("#")) {
n3.take(n3.indexOf("#")) + ">" }
else {
n3 }}
def dumpMap(map : Map[Node, Int])(implicit session : Session) {
val keys = map.keys.toSeq.sortBy(map(_))
val insertKey = sql"""INSERT OR IGNORE INTO ids (n3, main) VALUES (?, ?)""".
insert2[String, String]
var i = 0
for(key <- keys) {
insertKey(toN3(key), pageN3(toN3(key)))
i += 1
if(i % 10000 == 0) {
insertKey.execute }}
insertKey.execute }
def fromN3orInt(s : String) = if(s.startsWith("<") || s.startsWith("_") ||
s.startsWith("\"")) {
Left(fromN3(s)) }
else {
Right(fromIntN3(s)) }
def fromIntN3(s : String) =
try {
val (d, n3) = s.splitAt(s.indexOf("="))
(d.toInt, fromN3(n3.drop(1))) }
catch {
case x : Exception => {
System.err.println(s)
throw x }}
def removeFrag(uriStr : String) = try {
val uri = URI.create(uriStr)
new URI(uri.getScheme(), uri.getHost(),
uri.getPath(), null).toString
} catch {
case x : IllegalArgumentException =>
System.err.println("Bad uri: " + uriStr)
uriStr
}
def load(inputStream : => java.io.InputStream, ignoreErrors : Boolean,
maxCache : Int = 1000000) {
val c = conn
c.setAutoCommit(false)
withSession(c) { implicit session =>
createTables
var stream = inputStream
var skip = 0
var offset = 1
var oldOutFile : Option[File] = None
var outFile : File = null
var eof = true
var first = true
do {
eof = true
var read = 0
outFile = File.createTempFile("yuzu", ".nt")
outFile.deleteOnExit()
val out = new java.io.PrintWriter(outFile)
val known = collection.mutable.Map[Node, Int]()
for(line <- io.Source.fromInputStream(stream).getLines()) {
try {
read += 1
if(read < skip) {
out.println(line) }
else {
val elems = line.split(" ")
val subj = fromN3orInt(elems(0))
val prop = fromN3orInt(elems(1))
val obj = fromN3orInt(elems.slice(2, elems.size - 1).mkString(" "))
for(e <- Seq(subj, prop, obj)) {
e match {
case Left(n2) =>
val n = if(first) { fixURI(n2) } else { n2 }
known.get(n) match {
case Some(i) => out.print("%d=%s" format(i, toN3(n)))
case None => if(known.size < maxCache) {
val v = offset + known.size
known.put(n, v)
out.print("%d=%s" format(v, toN3(n))) }
else {
if(eof) {
System.err.println("Preprocessed to %d" format (read))
skip = read }
eof = false
out.print(toN3(n)) }}
case Right((v, n)) => out.print("%d=%s" format(v, toN3(n))) }
out.print(" ") }
out.println(". ") }}
catch {
case x : Exception =>
if(ignoreErrors) {
x.printStackTrace() }
else {
throw x }}}
out.flush()
out.close()
oldOutFile.foreach(_.delete())
stream = new java.io.FileInputStream(outFile)
oldOutFile = Some(outFile)
offset += known.size
dumpMap(known.toMap)
c.commit()
first = false
} while(!eof)
System.err.println("Preprocessing done")
val insertTriples = sql"""INSERT INTO tripids VALUES (?, ?, ?, ?, ?)""".
insert5[Int, Int, Int, String, Boolean]
val insertFreeText = sql"""INSERT INTO free_text VALUES (?, ?, ?)""".
insert3[Int, Int, String]
val updateLabel = sql"""UPDATE ids SET label=? WHERE id=?""".
insert2[String, Int]
var linkCounts = collection.mutable.Map[String, Int]()
var n = 0
for(line <- io.Source.fromFile(outFile).getLines) {
try {
val elems = line.split(" ")
val (sid, subj) = fromIntN3(elems(0))
val (pid, prop) = fromIntN3(elems(1))
val (oid, obj) = fromIntN3(elems.slice(2, elems.size - 1).mkString(" "))
if(subj.isURI()) {
if(subj.getURI().startsWith(BASE_NAME)) {
val page = node2page(subj)
insertTriples(sid, pid, oid, page, !subj.getURI().contains("#"))
//if(FACETS.exists(_("uri") == prop.getURI())) {
if(obj.isLiteral()) {
insertFreeText(sid, pid, obj.getLiteralLexicalForm()) }
else {
insertFreeText(sid, pid, obj.toString) }//}
if(LABELS.contains("<" + prop.getURI() + ">") && !subj.getURI().contains('#') && obj.isLiteral()) {
updateLabel(obj.getLiteralLexicalForm(), sid) }
if(obj.isURI()) {
try {
val objUri = URI.create(obj.getURI())
if(!(NOT_LINKED :+ BASE_NAME).exists(obj.getURI().startsWith(_)) &&
obj.getURI().startsWith("http")) {
val target = LINKED_SETS.find(obj.getURI().startsWith(_)) match {
case Some(l) => l
case None => new URI(
objUri.getScheme(),
objUri.getUserInfo(),
objUri.getHost(),
objUri.getPort(),
"/", null, null).toString }
if(linkCounts.contains(target)) {
linkCounts(target) += 1 }
else {
linkCounts(target) = 1 }}}
catch {
case x : Exception => // oh well
}}}
else {
if(LABELS.contains("<" + prop.getURI() + ">") && !subj.getURI().contains('#') && obj.isLiteral()) {
updateLabel(obj.getLiteralLexicalForm(), sid) }}
if(obj.isURI() && obj.getURI().startsWith(BASE_NAME) &&
!NO_INVERSE.contains(removeFrag(obj.getURI()))) {
val page = node2page(obj)
insertTriples(sid, pid, oid, page, false) }}
else {
insertTriples(sid, pid, oid, "<BLANK>", false) }
n += 1
if(n % 100000 == 0) {
System.err.print(".")
System.err.flush()
insertTriples.execute
insertFreeText.execute
updateLabel.execute }}
catch {
case x : Exception =>
if(ignoreErrors) {
x.printStackTrace() }
else {
throw x }}}
insertTriples.execute
insertFreeText.execute
updateLabel.execute
c.commit()
System.err.println("")
val insertLinkCount = sql"""INSERT INTO links VALUES (?, ?)""".insert2[Int, String]
linkCounts.foreach { case (target, count) => if(count >= MIN_LINKS) {
insertLinkCount(count, target) }}
insertLinkCount.execute
sql"""INSERT INTO value_cache
SELECT DISTINCT object, obj_label, count(*), property FROM triples
WHERE head=1 GROUP BY oid""".execute
sql"""INSERT INTO free_text
SELECT sid, pid, label FROM tripids
JOIN ids on oid=id
WHERE label != "" """.execute
c.commit() } }
/** Look up a single page */
def lookup(page : String) = withSession(conn) { implicit session =>
val model = ModelFactory.createDefaultModel()
var found = false
sql"""SELECT subject, property, object FROM triples WHERE page=$page""".
as3[String, String, String].
foreach {
case (s, p, o) =>
found = true
val subj = fromN3(s)
val prop = fromN3(p)
val obj = fromN3(o)
model.add(
model.createStatement(
model.getRDFNode(subj).asResource(),
model.getProperty(prop.getURI()),
model.getRDFNode(obj)))
if(obj.isBlank()) {
addBlanks(obj, model) }}
if(found) {
Some(model) }
else {
None }}
def summarize(page : String) = withSession(conn) { implicit session =>
val model = ModelFactory.createDefaultModel()
val subject = "<%s%s>" format (BASE_NAME, page)
var added = 0
sql"""SELECT subject, property, object FROM triples WHERE subject=$subject""".
as3[String, String, String].
foreach {
case (s, p, o) if added < 20 && FACETS.exists(_("uri") == p.drop(1).dropRight(1)) =>
added += 1
model.add(
model.createStatement(
model.getRDFNode(fromN3(s)).asResource(),
model.getProperty(fromN3(p).getURI()),
model.getRDFNode(fromN3(o))))
case _ =>
}
model }
/** Add all blank nodes that have this subject */
private def addBlanks(subj : Node, model : Model)(implicit session : Session) {
val s = toN3(subj)
sql"""SELECT property, object FROM triples WHERE subject=$s""".
as2[String, String].
foreach {
case (p, o) =>
val prop = fromN3(p)
val obj = fromN3(o)
model.add(
model.createStatement(
model.getRDFNode(subj).asResource(),
model.getProperty(prop.getURI()),
model.getRDFNode(obj)))
if(obj.isBlank()) {
addBlanks(obj, model) }}}
/** List all pages by property and/or object */
def listResources(offset : Int, limit : Int, prop : Option[String] = None,
obj : Option[String] = None) = {
withSession(conn) { implicit session =>
val limit2 = limit + 1
val results = prop match {
case Some(p) =>
obj match {
case Some(o) =>
sql"""SELECT DISTINCT page, subj_label FROM triples
WHERE property=$p AND object=$o AND page!="<BLANK>"
AND head=1
LIMIT $limit2 OFFSET $offset""".as2[String, String]
case None =>
sql"""SELECT DISTINCT page, subj_label FROM triples
WHERE property=$p AND page!="<BLANK>"
AND head=1
LIMIT $limit2 OFFSET $offset""".as2[String, String] }
case None =>
sql"""SELECT DISTINCT page, subj_label FROM triples
WHERE page!="<BLANK>" AND head=1
LIMIT $limit2 OFFSET $offset""".as2[String, String] }
val results2 = results.toVector
(results2.size > limit,
results2.map {
case (s, null) => SearchResult(CONTEXT + "/" + s,
DISPLAYER.uriToStr(BASE_NAME + s), s)
case (s, "") => SearchResult(CONTEXT + "/" + s,
DISPLAYER.uriToStr(BASE_NAME + s), s)
case (s, l) => SearchResult(CONTEXT + "/" + s, UnicodeEscape.unescape(l), s) })}}
/** List all pages by value */
def listValues(offset : Int , limit2 : Int, prop : String) = {
withSession(conn) { implicit session =>
val limit = limit2 + 1
//val results = sql"""SELECT DISTINCT object, obj_label, count(*) FROM triples
// WHERE property=$prop AND head=1
// GROUP BY oid ORDER BY count(*) DESC
// LIMIT $limit OFFSET $offset""".as3[String, String, Int].toVector
val results = sql"""SELECT object, obj_label, count FROM value_cache
WHERE property=$prop
ORDER BY count DESC
LIMIT $limit OFFSET $offset""".as3[String, String, Int].toVector
(results.size > limit2,
results.map {
case (s, null, c) => SearchResultWithCount(s, DISPLAYER(fromN3(s)), s, c)
case (s, "", c) => SearchResultWithCount(s, DISPLAYER(fromN3(s)), s, c)
case (s, l, c) => SearchResultWithCount(s, UnicodeEscape.unescape(l), s, c) })}}
/** Free text search */
def search(query : String, property : Option[String], offset : Int,
limit : Int) = {
withSession(conn) { implicit session =>
val result = property match {
case Some(p) =>
sql"""SELECT DISTINCT subj.main FROM free_text
JOIN ids AS subj ON free_text.sid=subj.id
JOIN ids AS prop ON free_text.pid=prop.id
WHERE prop.n3=$p AND object MATCH $query
ORDER BY length(object) asc
LIMIT $limit OFFSET $offset""".as1[String]
// sql"""SELECT DISTINCT subj.n3, subj.label FROM free_text
// JOIN ids AS subj ON free_text.sid=subj.id
// JOIN ids AS prop ON free_text.pid=prop.id
// WHERE prop.n3=$p and object match $query
// LIMIT $limit OFFSET $offset""".as2[String, String]
case None =>
sql"""SELECT DISTINCT subj.main FROM free_text
JOIN ids AS subj ON free_text.sid=subj.id
WHERE object MATCH $query
ORDER BY length(object) asc
LIMIT $limit OFFSET $offset""".as1[String]}
// sql"""SELECT DISTINCT subj.n3, subj.label FROM free_text
// JOIN ids AS subj ON free_text.sid=subj.id
// WHERE object match $query
// LIMIT $limit OFFSET $offset""".as2[String, String] }
def n32page(s : String) = uri2page(s.drop(1).dropRight(1))
result.toVector.map { n3 =>
val page = n32page(n3)
getLabel(page) match {
case Some("") => SearchResult(CONTEXT + "/" + page, DISPLAYER.uriToStr(page), page)
case Some(null) => SearchResult(CONTEXT + "/" + page, DISPLAYER.uriToStr(page), page)
case Some(l) => SearchResult(CONTEXT + "/" + page, UnicodeEscape.unescape(l), page)
case None => SearchResult(CONTEXT + "/" + page, DISPLAYER.uriToStr(page), page) }}}}
def label(page : String) = withSession(conn) { implicit session =>
getLabel(page)
}
def getLabel(page : String)(implicit session : Session) = {
val n3 = "<%s%s>" format (BASE_NAME, page)
sql"""SELECT label FROM ids WHERE n3=$n3""".as1[String].headOption
}
/** Get link counts for DataID */
def linkCounts = withSession(conn) { implicit session =>
sql"""SELECT target, count FROM links""".as2[String, Int].toVector }
/** Get the size of the dataset for DataID */
def tripleCount = withSession(conn) { implicit session =>
sql"""SELECT count(*) FROM tripids""".as1[Int].head }
private def buildPrefixMapping = {
val lookup = new PrefixCCLookup()
lookup.set(PREFIX1_QN, PREFIX1_URI)
lookup.set(PREFIX2_QN, PREFIX2_URI)
lookup.set(PREFIX3_QN, PREFIX3_URI)
lookup.set(PREFIX4_QN, PREFIX4_URI)
lookup.set(PREFIX5_QN, PREFIX5_URI)
lookup.set(PREFIX6_QN, PREFIX6_URI)
lookup.set(PREFIX7_QN, PREFIX7_URI)
lookup.set(PREFIX8_QN, PREFIX8_URI)
lookup.set(PREFIX9_QN, PREFIX9_URI)
lookup.set("rdf", RDF.getURI())
lookup.set("rdfs", RDFS.getURI())
lookup.set("owl", OWL.getURI())
lookup.set("dc", DC_11.getURI())
lookup.set("dct", DCTerms.getURI())
lookup.set("xsd", XSD.getURI())
lookup }
/** Answer a SPARQL or YuzuQL query */
def query(query : String, mimeType : ResultType, defaultGraphURI : Option[String],
timeout : Int = 10) = {
try {
val select = YuzuQLSyntax.parse(query, buildPrefixMapping)
if(select.limit < 0 || (select.limit >= YUZUQL_LIMIT &&
YUZUQL_LIMIT >= 0)) {
ErrorResult(YZ_QUERY_LIMIT_EXCEEDED format YUZUQL_LIMIT) }
val builder = new QueryBuilder(select)
val sqlQuery = builder.build
val vars = builder.vars
withSession(conn) { implicit session =>
val results = SQLQuery(sqlQuery).as { rs =>
(for((v, idx) <- vars.zipWithIndex) yield {
if(v == "count(*)") {
v -> GetIntAsNode(rs, idx + 1) }
else {
v -> GetNode(rs, idx + 1) }}).toMap }
TableResult(ResultSet(vars, results.toVector)) }}
catch {
case x : IllegalArgumentException =>
SPARQL_ENDPOINT match {
case Some(endpoint) => {
val q = defaultGraphURI match {
case Some(uri) => QueryFactory.create(query, uri)
case None => QueryFactory.create(query) }
val qx = QueryExecutionFactory.sparqlService(endpoint, q)
if(q.isAskType()) {
val r = qx.execAsk()
BooleanResult(r)
} else if(q.isConstructType()) {
val model2 = ModelFactory.createDefaultModel()
val r = qx.execConstruct(model2)
ModelResult(model2)
} else if(q.isDescribeType()) {
val model2 = ModelFactory.createDefaultModel()
val r = qx.execDescribe(model2)
ModelResult(model2)
} else if(q.isSelectType()) {
val r = qx.execSelect()
TableResult(ResultSet(r))
} else {
ErrorResult("Unsupported query type")
}
}
case None =>
ErrorResult("Query not valid in YuzuQL: " + x.getMessage()) } } }
}
trait SimpleCache {
private val theMap = collection.mutable.Map[String, Int]()
private val addList = collection.mutable.Queue[String]()
def size : Int
def load(key : String) : Int
def get(key : String) = theMap.get(key) match {
case Some(id) =>
id
case None =>
val id = load(key)
put(key, id)
id }
def put(key : String, id : Int) {
theMap.put(key, id)
addList.enqueue(key)
if(theMap.size > size) {
val oldKey = addList.dequeue()
theMap.remove(oldKey) }}
}
| jmccrae/yuzu | scala/src/main/scala/yuzu/triple_backend.scala | Scala | apache-2.0 | 26,748 |
/**
* Copyright: Copyright (C) 2016, ATS Advanced Telematic Systems GmbH
* License: MPL-2.0
*/
package org.genivi.sota.marshalling
import de.heikoseeberger.akkahttpcirce.CirceSupport
object CirceMarshallingSupport extends CirceSupport with CirceInstances
| PDXostc/rvi_sota_server | common/src/main/scala/org/genivi/sota/marshalling/CirceMarshallingSupport.scala | Scala | mpl-2.0 | 261 |
/*
* Copyright 2013 Eike Kettner
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.eknet.county.plot.xchart
import com.xeiam.xchart.StyleManager.ChartType
import com.xeiam.xchart.{Chart, ChartBuilder}
import org.eknet.county.{CounterKey, TimeKey}
/**
* @author Eike Kettner eike.kettner@gmail.com
* @since 25.03.13 20:10
*/
final case class ChartOptions(width: Int = 600,
height: Int = 500,
chart: ChartType = ChartType.Bar,
title: String = "",
xaxis: String = "",
yaxis: String = "",
seriesName: CounterKey => String = k => k.lastSegment.head,
range: Option[(TimeKey, TimeKey)] = None,
resolution: Option[TimeKey => TimeKey] = None,
compact: Boolean = true,
customizer: Chart => Unit = c => ()) {
def toBuilder = {
new ChartBuilder().chartType(chart)
.height(height)
.width(width)
.title(title)
.xAxisTitle(xaxis)
.yAxisTitle(yaxis)
}
def consolidate(key: TimeKey) = resolution match {
case Some(res) => res(key)
case _ => key
}
def inRange(key: TimeKey) = range match {
case Some(r) => key >= r._1 && key <= r._2
case _ => true
}
def transform(keys: Iterable[TimeKey]) = {
val k0 = range map { r => keys.filter(k => k >= r._1 && k <= r._2) } getOrElse(keys)
resolution map { r => k0.map(r).toList.distinct } getOrElse(keys)
}
}
| eikek/county | xchart/src/main/scala/org/eknet/county/plot/xchart/ChartOptions.scala | Scala | apache-2.0 | 2,139 |
package archery
import scala.collection.mutable.ArrayBuffer
import scala.util.Try
import org.scalacheck.Arbitrary._
import org.scalatest._
import prop._
import org.scalacheck._
import Arbitrary.arbitrary
import Check._
class RTreeCheck extends PropSpec with Matchers with GeneratorDrivenPropertyChecks {
property("rtree.insert works") {
forAll { (tpls: List[(Point, Int)]) =>
val es = tpls.map { case (p, n) => Entry(p, n) }
val rt1 = build(es)
rt1.entries.toSet shouldBe es.toSet
rt1.values.toSet shouldBe tpls.map(_._2).toSet
val rt2 = tpls.foldLeft(RTree.empty[Int]) { case (rt, (p, n)) =>
rt.insert(p.x, p.y, n)
}
rt1 shouldBe rt2
}
}
property("rtree.insertAll works") {
forAll { (es1: List[Entry[Int]], es2: List[Entry[Int]]) =>
val rt1 = build(es1 ++ es2)
val rt2 = build(es1).insertAll(es2)
rt1 shouldBe rt2
}
}
property("rtree.removeAll works") {
forAll { (es1: List[Entry[Int]], es2: List[Entry[Int]]) =>
val rt1 = build(es1)
val rt2 = build(es1 ++ es2).removeAll(es2)
rt1 shouldBe rt2
}
}
property("rtree.contains works") {
forAll { (es: List[Entry[Int]], e: Entry[Int]) =>
val rt = build(es)
es.forall(rt.contains) shouldBe true
es.forall {
case Entry(Point(x, y), v) => rt.contains(x, y, v)
case _ => true
} shouldBe true
rt.contains(e) shouldBe es.contains(e)
}
}
property("rtree.remove works") {
forAll { (es: List[Entry[Int]]) =>
var rt = build(es)
var size = rt.size
// very small chance of failing
rt shouldBe rt.remove(Entry(Point(1234F, 5678F), 0xbad))
es.foreach { e =>
rt = rt.remove(e)
size -= 1
rt.size shouldBe size
}
}
}
property("rtree.remove out-of-order") {
forAll { (es: List[Entry[Int]]) =>
val buf = ArrayBuffer(es: _*)
shuffle(buf)
var rt = build(es)
while (buf.nonEmpty) {
buf.toSet shouldBe rt.entries.toSet
val x = buf.remove(0)
rt = rt.remove(x)
}
buf.toSet shouldBe rt.entries.toSet
}
}
property("rtree.search/count ignores nan/inf") {
forAll { (rt: RTree[Int]) =>
val nil = Seq.empty[Entry[Int]]
rt.search(Box(Float.PositiveInfinity, 3F, 9F, 9F)) shouldBe nil
rt.search(Box(2F, Float.NaN, 9F, 9F)) shouldBe nil
rt.search(Box(2F, 3F, Float.NegativeInfinity, 9F)) shouldBe nil
rt.search(Box(2F, 3F, 9F, Float.NaN)) shouldBe nil
rt.count(Box(Float.PositiveInfinity, 3F, 9F, 9F)) shouldBe 0
rt.count(Box(2F, Float.NaN, 9F, 9F)) shouldBe 0
rt.count(Box(2F, 3F, Float.NegativeInfinity, 9F)) shouldBe 0
rt.count(Box(2F, 3F, 9F, Float.NaN)) shouldBe 0
}
}
property("rtree.search works") {
forAll { (es: List[Entry[Int]], p: Point) =>
val rt = build(es)
val box1 = bound(p, 10)
val results1 = rt.search(box1).toSet
results1 shouldBe es.filter(e => box1.contains(e.geom)).toSet
val f = (e: Entry[Int]) => e.value % 2 == 0
val results1f = rt.search(box1, f).toSet
results1f shouldBe es.filter(e => box1.contains(e.geom) && f(e)).toSet
results1f shouldBe results1.filter(f)
val g = (n: Long, e: Entry[Int]) => n + e.value
es.foreach { e =>
val box2 = bound(e.geom, 10)
val results = rt.search(box2)
results.toSet shouldBe es.filter(e => box2.contains(e.geom)).toSet
val x = results.foldLeft(0L)(g)
val y = rt.foldSearch(box2, 0L)(g)
if (x != y) {
println(box2)
println(rt.pretty)
println(rt.root.searchIterator(box2, _ => true).toList)
}
x shouldBe y
}
}
}
property("rtree.searchIntersection works") {
forAll { (es: List[Entry[Int]], p: Point) =>
val rt = build(es)
val box1 = bound(p, 10)
rt.searchIntersection(box1).toSet shouldBe es.filter(e => box1.intersects(e.geom)).toSet
val f = (e: Entry[Int]) => e.value % 3 != 1
rt.searchIntersection(box1, f).toSet shouldBe es.filter(e => box1.intersects(e.geom) && f(e)).toSet
es.foreach { e =>
val box2 = bound(e.geom, 10)
rt.searchIntersection(box2).toSet shouldBe es.filter(e => box2.intersects(e.geom)).toSet
}
}
}
property("rtree.nearest works") {
forAll { (es: List[Entry[Int]], p: Point) =>
val rt = build(es)
if (es.isEmpty) {
rt.nearest(p) shouldBe None
} else {
val e = es.min(Ordering.by((e: Entry[Int]) => e.geom.distance(p)))
val d = e.geom.distance(p)
// it's possible that several points are tied for closest
// in these cases, the distances still must be equal.
rt.nearest(p).map(_.geom.distance(p)) shouldBe Some(d)
}
}
}
property("rtree.nearestK works") {
forAll { (es: List[Entry[Int]], p: Point, k0: Int) =>
val k = (k0 % 1000).abs
val rt = build(es)
val as = es.map(_.geom.distance(p)).sorted.take(k).toVector
val bs = rt.nearestK(p, k).map(_.geom.distance(p))
as shouldBe bs
}
}
property("rtree.count works") {
val es = (1 to 10000).map(n => Entry(gaussianPoint, n))
val rt = build(es)
val box = gaussianBox
rt.count(box) shouldBe es.filter(e => box.contains(e.geom)).size
val box2 = Box(1e10F, 1e10F, 1e11F, 1e11F)
rt.count(box2) shouldBe es.filter(e => box2.contains(e.geom)).size
}
property("rtree.map works") {
forAll { (es: List[Entry[Int]]) =>
val f = (x: Int) => x + 1
val rt = build(es)
val es2 = es.map(e => Entry(e.geom, f(e.value)))
rt.map(f) shouldBe build(es2)
}
}
property("rtree equals/hashCode work") {
forAll { (es1: List[Entry[Int]], e: Entry[Int]) =>
val es2 = ArrayBuffer(es1: _*)
shuffle(es2)
val (rt1, rt2) = (build(es1), build(es2))
rt1 shouldBe rt2
rt1.hashCode shouldBe rt2.hashCode
rt1 should not be (999)
val rt3 = rt1.insert(e)
rt3 should not be (rt1)
// this should only have a very small chance of failing,
// assuming RTree#hashCode is a good hashing function.
rt3.hashCode should not be (rt1.hashCode)
}
}
property("dense rtree") {
val es = (1 to 100000).map(n => Entry(gaussianPoint, n))
val rt = build(es)
rt.size shouldBe es.size
es.forall(rt.contains) shouldBe true
var rt0 = rt
var size = rt.size
es.foreach { e =>
rt0 = rt0.remove(e)
size -= 1
rt0.size shouldBe size
}
}
property("sane toString/pretty-printing") {
forAll { (rt: RTree[Int]) =>
rt.toString.length should be < 20
Try(rt.pretty).isSuccess shouldBe true
}
}
sealed trait Action {
def test(rt: RTree[Int]): RTree[Int]
def control(es: List[Entry[Int]]): List[Entry[Int]]
}
object Action {
def run(rt: RTree[Int], es: List[Entry[Int]])(as: List[Action]): Unit =
as match {
case a :: as =>
val rt2 = a.test(rt)
val es2 = a.control(es)
rt2.entries.toSet shouldBe es2.toSet
run(rt2, es2)(as)
case Nil =>
()
}
}
case class Insert(e: Entry[Int]) extends Action {
def test(rt: RTree[Int]): RTree[Int] =
rt.insert(e)
def control(es: List[Entry[Int]]): List[Entry[Int]] =
e :: es
}
case class Remove(e: Entry[Int]) extends Action {
def test(rt: RTree[Int]): RTree[Int] =
if (rt.contains(e)) rt.remove(e) else rt
def control(es: List[Entry[Int]]): List[Entry[Int]] =
es match {
case Nil => Nil
case `e` :: t => t
case h :: t => h :: control(t)
}
}
implicit val arbaction: Arbitrary[Action] =
Arbitrary(for {
e <- arbitrary[Entry[Int]]
b <- arbitrary[Boolean]
} yield {
val a: Action = if (b) Insert(e) else Remove(e)
a
})
property("ad-hoc rtree") {
forAll { (es: List[Entry[Int]], as: List[Action]) =>
Action.run(build(es), es)(as)
}
}
property("prove coverage of inlined constant") {
Constants.MaxEntries shouldBe 50
}
}
| arunma/archery | core/src/test/scala/archery/RTreeCheck.scala | Scala | mit | 8,167 |
package breeze.linalg
private class Counter2KeySet[K1, K2, V](data: scala.collection.mutable.Map[K1, Counter[K2, V]]) extends Set[(K1, K2)] {
def contains(elem: (K1, K2)): Boolean = data.contains(elem._1) && data(elem._1).contains(elem._2)
def incl(elem: (K1, K2)): Set[(K1, K2)] = Set() ++ iterator + elem
def excl(elem: (K1, K2)): Set[(K1, K2)] = Set() ++ iterator - elem
def iterator: Iterator[(K1, K2)] = for ((k1, m) <- data.iterator; k2 <- m.keysIterator) yield (k1, k2)
}
| scalanlp/breeze | math/src/main/scala_2.13+/breeze/linalg/Counter2KeySet.scala | Scala | apache-2.0 | 490 |
/*
* This file is part of the regex project.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package edu.cmu.feature.vregex.vm
import scala.collection.immutable.Queue
/** An immutable queue that maintains a hashset of the contained thread's pc.
* to allow for quick existence check.
*
* @author Lucas Satabin
*/
class VThreadQueue(val threads: Queue[VThread]) {
def enqueue(newt: VThread): VThreadQueue = {
var foundPC = false;
var q = threads.map(t => if (t.pc == newt.pc && t.matched==newt.matched) {foundPC =true; VThread(t.startIdx, t.pc, t.ctx or newt.ctx, t.saved)} else t)
if (!foundPC)
q=q.enqueue(newt)
new VThreadQueue(q)
}
// def dequeue: (VThread, VThreadQueue) = {
// val (thread, rest) = threads.dequeue
// (thread, new VThreadQueue(rest, pcs - thread.pc))
// }
// def isEmpty: Boolean =
// threads.isEmpty
//
// def nonEmpty: Boolean =
// threads.nonEmpty
//
// def contains(t: VThread): Boolean =
// pcs.contains(t.pc)
def hasUnmatched: Boolean = threads.filterNot(_.isMatched).nonEmpty
def matched = threads.filter(_.isMatched)
}
object VThreadQueue {
def apply(): VThreadQueue =
new VThreadQueue(Queue())
}
| ckaestne/regex | src/main/scala/edu/cmu/feature/vregex/vm/VThreadQueue.scala | Scala | apache-2.0 | 1,696 |
package domain.modelgeneration
/**
* GraPHPizer source code analytics engine
* Copyright (C) 2015 Martin Helmich <kontakt@martin-helmich.de>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
import domain.model.AstEdgeType._
import org.neo4j.graphdb.traversal.{Evaluation, Evaluator, Uniqueness}
import org.neo4j.graphdb.{Direction, Node, Path}
import persistence.BackendInterface
import play.api.Logger
import scala.collection.JavaConversions._
import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.duration.Duration
import scala.concurrent.{Await, Future}
import scala.language.postfixOps
import persistence.NodeWrappers._
class NamespaceResolver(backend: BackendInterface) {
protected val logger = Logger("namespace-resolver")
def run(): Unit = {
val f1 = Future { treatNamespacedNodes() }
val f2 = Future { treatUnnamespacedNodes() }
Await.result(f1, Duration.Inf)
Await.result(f2, Duration.Inf)
}
protected def treatUnnamespacedNodes(): Unit = {
backend transactional { (b, t) =>
val cypher = """MATCH (c:Collection:File)-[:HAS]->(ns)-[:SUB|HAS*]->(n:Name)
WHERE NOT ns:Stmt_Namespace
SET n.fullName = n.allParts"""
b.execute(cypher).run().close()
}
}
protected def treatNamespacedNodes(): Future[Unit] = {
backend transactional { (b, t) =>
b.execute("MATCH (name:Name_FullyQualified) SET name.fullName = name.allParts").run().close()
b.execute("MATCH (ns:Stmt_Namespace)-[:SUB {type: \"name\"}]->(name) SET name.fullName = name.allParts").run().close()
val label = backend createLabel "Name"
val evaluator = (path: Path) => {
if (path.end ? label) {
(!(path.endNode ? "fullName"), false)
} else (false, true)
}
val cypher = """MATCH (ns:Stmt_Namespace)
OPTIONAL MATCH (ns)-[:SUB {type: "stmts"}]->(s)-->(:Stmt_Use)-->()-->(u:Stmt_UseUse)
RETURN ns, collect(u) AS imports"""
val traversal = backend
.traversal
.depthFirst()
.relationships(SUB, Direction.OUTGOING)
.relationships(HAS, Direction.OUTGOING)
.evaluator(evaluator)
.uniqueness(Uniqueness.NODE_GLOBAL)
b execute cypher foreach { (ns: Node, imports: java.util.List[Node]) =>
val Some(namespaceName) = ns[String]("name")
val knownImports = imports map { p => (p.property[String]("alias").get, p.property[String]("name").get) } toMap
ns >--> SUB filter { r =>
r("type") match { case Some("stmts") => true case _ => false }
} map { _.getEndNode } foreach { root =>
traversal traverse root map { _.endNode } filter { _ ? "allParts" } foreach { nameNode =>
val Some(name) = nameNode[String]("allParts")
knownImports get name match {
case Some(s: String) => nameNode("fullName") = s
case _ => nameNode("fullName") = s"$namespaceName\\$name"
}
}
}
}
null
}
}
}
| martin-helmich/graphpizer-server | app/domain/modelgeneration/NamespaceResolver.scala | Scala | gpl-3.0 | 3,692 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.sources.wmstrategies
import org.apache.flink.streaming.api.watermark.Watermark
import org.apache.flink.types.Row
/**
* Provides a strategy to generate watermarks for a rowtime attribute.
*
* A watermark strategy is either a [[PeriodicWatermarkAssigner]] or
* [[PunctuatedWatermarkAssigner]].
*
*/
sealed abstract class WatermarkStrategy extends Serializable
/** A periodic watermark assigner. */
abstract class PeriodicWatermarkAssigner extends WatermarkStrategy {
/**
* Updates the assigner with the next timestamp.
*
* @param timestamp The next timestamp to update the assigner.
*/
def nextTimestamp(timestamp: Long): Unit
/**
* Returns the current watermark.
*
* @return The current watermark.
*/
def getWatermark: Watermark
}
/** A punctuated watermark assigner. */
abstract class PunctuatedWatermarkAssigner extends WatermarkStrategy {
/**
* Returns the watermark for the current row or null if no watermark should be generated.
*
* @param row The current row.
* @param timestamp The value of the timestamp attribute for the row.
* @return The watermark for this row or null if no watermark should be generated.
*/
def getWatermark(row: Row, timestamp: Long): Watermark
}
/** A strategy which indicates the watermarks should be preserved from the underlying datastream.*/
class PreserveWatermarks extends WatermarkStrategy
object PreserveWatermarks {
val INSTANCE: PreserveWatermarks = new PreserveWatermarks
}
| zimmermatt/flink | flink-libraries/flink-table/src/main/scala/org/apache/flink/table/sources/wmstrategies/watermarkStrategies.scala | Scala | apache-2.0 | 2,344 |
// These are meant to be typed into the REPL. You can also run
// scala -Xnojline < repl-session.scala to run them all at once.
class Person {
private var name = ""
private var age = 0
def this(name: String) { // An auxiliary constructor
this() // Calls primary constructor
this.name = name
}
def this(name: String, age: Int) { // Another auxiliary constructor
this(name) // Calls previous auxiliary constructor
this.age = age
}
def description = name + " is " + age + " years old"
}
val p1 = new Person // Primary constructor
val p2 = new Person("Fred") // First auxiliary constructor
val p3 = new Person("Fred", 42) // Second auxiliary constructor
p1.description
p2.description
p3.description
| nmt1994/Scala-Practica | src/week1/codes/ch05/sec06/repl-session.scala | Scala | mit | 730 |
/*
* Copyright 2017 Datamountaineer.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.datamountaineer.streamreactor.connect.elastic7.indexname
import scala.annotation.tailrec
class InvalidCustomIndexNameException(message: String) extends RuntimeException(message)
case class CustomIndexName(fragments: Vector[IndexNameFragment]) {
override def toString: String = fragments.map(_.getFragment).mkString
}
object CustomIndexName {
@tailrec
private def parseIndexName(remainingChars: Vector[Char], currentFragment: StringBuilder, results: Vector[Option[IndexNameFragment]]): Vector[IndexNameFragment] =
remainingChars match {
case head +: rest => head match {
case DateTimeFragment.OpeningChar =>
val (dateTimeFormat, afterDateTimeFormatIncludingClosingChar) = rest.span { _ != DateTimeFragment.ClosingChar }
val afterDateTimeFormat = afterDateTimeFormatIncludingClosingChar.tail
val maybeCurrentFragment = currentFragment.mkString.toOption
val maybeDateTimeFormat = dateTimeFormat.mkString.toOption
val newResultsWithDateTimeFragment = results :+ maybeCurrentFragment.map(TextFragment.apply) :+ maybeDateTimeFormat.map(DateTimeFragment(_))
parseIndexName(afterDateTimeFormat, new StringBuilder, newResultsWithDateTimeFragment)
case DateTimeFragment.ClosingChar => throw new InvalidCustomIndexNameException(s"Found closing '${DateTimeFragment.ClosingChar}' but no opening character")
case anyOtherChar => parseIndexName(rest, currentFragment.append(anyOtherChar), results)
}
case Vector() =>
val maybeCurrentFragment = currentFragment.mkString.toOption
(results :+ maybeCurrentFragment.map(TextFragment.apply)).flatten
}
def parseIndexName(indexName: String): CustomIndexName =
CustomIndexName(parseIndexName(indexName.toVector, new StringBuilder, Vector.empty))
}
| datamountaineer/stream-reactor | kafka-connect-elastic7/src/main/scala/com/datamountaineer/streamreactor/connect/elastic7/indexname/CustomIndexName.scala | Scala | apache-2.0 | 2,484 |
/* ___ _ ___ _ _ *\\
** / __| |/ (_) | | The SKilL Generator **
** \\__ \\ ' <| | | |__ (c) 2013-16 University of Stuttgart **
** |___/_|\\_\\_|_|____| see LICENSE **
\\* */
package de.ust.skill.parser
import java.io.File
import scala.collection.JavaConversions.asScalaBuffer
import scala.language.implicitConversions
import org.junit.runner.RunWith
import org.scalatest.FunSuite
import org.scalatest.junit.JUnitRunner
/**
* non generic front-end tests, mostly regressions
* @author Timm Felden
*/
@RunWith(classOf[JUnitRunner])
class ParserTest extends FunSuite {
implicit private def basePath(path : String) : File = new File("src/test/resources/frontend" + path);
private def check(filename : String) = {
assert(0 != Parser.process(filename).allTypeNames.size)
}
test("bad hints") {
val e = intercept[de.ust.skill.ir.ParseException] { check("/ParseException/badHints.skill") }
assert("NotAHint is not the name of a hint." === e.getMessage())
}
test("restrictions") {
check("/restrictions.skill")
}
test("empty")(assert(0 === Parser.process("/empty.skill").removeSpecialDeclarations().getUsertypes().size))
test("strict type ordered IR") {
val IR = Parser.process("/typeOrderIR.skill").getUsertypes()
val order = IR.map(_.getSkillName).mkString("")
assert(order == "abdc", order + " is not in type order!")
}
test("regression: casing of user types") {
val ir = Parser.process("/regressionCasing.skill").getUsertypes
assert(2 === ir.size)
// note: this is a valid test, because IR has to be in type order
assert(ir.get(0).getSkillName === "message")
assert(ir.get(1).getSkillName === "datedmessage")
}
test("regression: report missing types") {
val e = intercept[de.ust.skill.ir.ParseException] {
Parser.process("/ParseException/missingTypeCausedBySpelling.skill", false, false).allTypeNames.size
}
assert("""The type "MessSage" parent of DatedMessage is unknown!
Declaration in src/test/resources/frontend/ParseException/missingTypeCausedBySpelling.skill.
Did you forget to include MessSage.skill?
Known types are: Message, DatedMessage""" === e.getMessage().replaceAll("\\\\\\\\", "/"))
}
test("regression: comments - declaration") {
val d = Parser.process("/comments.skill").getUsertypes.get(0)
assert(d.getComment.format("/**\\n", " *", 120, " */") === """/**
* this is a class comment with ugly formatting but completely legal. We want to have this in a single line.
*/""")
}
test("regression: comments - field") {
val d = Parser.process("/comments.skill").getUsertypes.get(0)
for (f ← d.getFields()) {
println(f.getComment)
if (f.getName().camel == "commentWithStar") assert(f.getComment.format("/**\\n", " *", 120, " */") === """/**
* * <- the only real star here!
*/""")
else if (f.getName().camel == "commentWithoutStars") assert(f.getComment.format("/**\\n", " *", 120, " */") === """/**
* funny formated comment .
*/""")
}
}
}
| skill-lang/skill | src/test/scala/de/ust/skill/parser/ParserTest.scala | Scala | bsd-3-clause | 3,227 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.streaming
import java.io.File
import java.sql.Date
import org.apache.commons.io.FileUtils
import org.scalatest.BeforeAndAfterAll
import org.scalatest.exceptions.TestFailedException
import org.apache.spark.SparkException
import org.apache.spark.api.java.function.FlatMapGroupsWithStateFunction
import org.apache.spark.sql.Encoder
import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.expressions.{GenericInternalRow, UnsafeProjection, UnsafeRow}
import org.apache.spark.sql.catalyst.plans.logical.FlatMapGroupsWithState
import org.apache.spark.sql.catalyst.plans.physical.UnknownPartitioning
import org.apache.spark.sql.catalyst.streaming.InternalOutputModes._
import org.apache.spark.sql.execution.RDDScanExec
import org.apache.spark.sql.execution.streaming._
import org.apache.spark.sql.execution.streaming.state.{FlatMapGroupsWithStateExecHelper, MemoryStateStore, StateStore, StateStoreId, StateStoreMetrics, UnsafeRowPair}
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.streaming.util.StreamManualClock
import org.apache.spark.sql.types.{DataType, IntegerType}
import org.apache.spark.util.Utils
/** Class to check custom state types */
case class RunningCount(count: Long)
case class Result(key: Long, count: Int)
class FlatMapGroupsWithStateSuite extends StateStoreMetricsTest {
import testImplicits._
import GroupStateImpl._
import GroupStateTimeout._
import FlatMapGroupsWithStateSuite._
test("GroupState - get, exists, update, remove") {
var state: GroupStateImpl[String] = null
def testState(
expectedData: Option[String],
shouldBeUpdated: Boolean = false,
shouldBeRemoved: Boolean = false): Unit = {
if (expectedData.isDefined) {
assert(state.exists)
assert(state.get === expectedData.get)
} else {
assert(!state.exists)
intercept[NoSuchElementException] {
state.get
}
}
assert(state.getOption === expectedData)
assert(state.hasUpdated === shouldBeUpdated)
assert(state.hasRemoved === shouldBeRemoved)
}
// === Tests for state in streaming queries ===
// Updating empty state
state = GroupStateImpl.createForStreaming(
None, 1, 1, NoTimeout, hasTimedOut = false, watermarkPresent = false)
testState(None)
state.update("")
testState(Some(""), shouldBeUpdated = true)
// Updating exiting state
state = GroupStateImpl.createForStreaming(
Some("2"), 1, 1, NoTimeout, hasTimedOut = false, watermarkPresent = false)
testState(Some("2"))
state.update("3")
testState(Some("3"), shouldBeUpdated = true)
// Removing state
state.remove()
testState(None, shouldBeRemoved = true, shouldBeUpdated = false)
state.remove() // should be still callable
state.update("4")
testState(Some("4"), shouldBeRemoved = false, shouldBeUpdated = true)
// Updating by null throw exception
intercept[IllegalArgumentException] {
state.update(null)
}
}
test("GroupState - setTimeout - with NoTimeout") {
for (initValue <- Seq(None, Some(5))) {
val states = Seq(
GroupStateImpl.createForStreaming(
initValue, 1000, 1000, NoTimeout, hasTimedOut = false, watermarkPresent = false),
GroupStateImpl.createForBatch(NoTimeout, watermarkPresent = false)
)
for (state <- states) {
// for streaming queries
testTimeoutDurationNotAllowed[UnsupportedOperationException](state)
testTimeoutTimestampNotAllowed[UnsupportedOperationException](state)
// for batch queries
testTimeoutDurationNotAllowed[UnsupportedOperationException](state)
testTimeoutTimestampNotAllowed[UnsupportedOperationException](state)
}
}
}
test("GroupState - setTimeout - with ProcessingTimeTimeout") {
// for streaming queries
var state: GroupStateImpl[Int] = GroupStateImpl.createForStreaming(
None, 1000, 1000, ProcessingTimeTimeout, hasTimedOut = false, watermarkPresent = false)
assert(state.getTimeoutTimestamp === NO_TIMESTAMP)
state.setTimeoutDuration("-1 month 31 days 1 second")
assert(state.getTimeoutTimestamp === 2000)
state.setTimeoutDuration(500)
assert(state.getTimeoutTimestamp === 1500) // can be set without initializing state
testTimeoutTimestampNotAllowed[UnsupportedOperationException](state)
state.update(5)
assert(state.getTimeoutTimestamp === 1500) // does not change
state.setTimeoutDuration(1000)
assert(state.getTimeoutTimestamp === 2000)
state.setTimeoutDuration("2 second")
assert(state.getTimeoutTimestamp === 3000)
testTimeoutTimestampNotAllowed[UnsupportedOperationException](state)
state.remove()
assert(state.getTimeoutTimestamp === 3000) // does not change
state.setTimeoutDuration(500) // can still be set
assert(state.getTimeoutTimestamp === 1500)
testTimeoutTimestampNotAllowed[UnsupportedOperationException](state)
// for batch queries
state = GroupStateImpl.createForBatch(
ProcessingTimeTimeout, watermarkPresent = false).asInstanceOf[GroupStateImpl[Int]]
assert(state.getTimeoutTimestamp === NO_TIMESTAMP)
state.setTimeoutDuration(500)
testTimeoutTimestampNotAllowed[UnsupportedOperationException](state)
state.update(5)
state.setTimeoutDuration(1000)
state.setTimeoutDuration("2 second")
testTimeoutTimestampNotAllowed[UnsupportedOperationException](state)
state.remove()
state.setTimeoutDuration(500)
testTimeoutTimestampNotAllowed[UnsupportedOperationException](state)
}
test("GroupState - setTimeout - with EventTimeTimeout") {
var state: GroupStateImpl[Int] = GroupStateImpl.createForStreaming(
None, 1000, 1000, EventTimeTimeout, false, watermarkPresent = true)
assert(state.getTimeoutTimestamp === NO_TIMESTAMP)
testTimeoutDurationNotAllowed[UnsupportedOperationException](state)
state.setTimeoutTimestamp(5000)
assert(state.getTimeoutTimestamp === 5000) // can be set without initializing state
state.update(5)
assert(state.getTimeoutTimestamp === 5000) // does not change
state.setTimeoutTimestamp(10000)
assert(state.getTimeoutTimestamp === 10000)
state.setTimeoutTimestamp(new Date(20000))
assert(state.getTimeoutTimestamp === 20000)
testTimeoutDurationNotAllowed[UnsupportedOperationException](state)
state.remove()
assert(state.getTimeoutTimestamp === 20000)
state.setTimeoutTimestamp(5000)
assert(state.getTimeoutTimestamp === 5000) // can be set after removing state
testTimeoutDurationNotAllowed[UnsupportedOperationException](state)
// for batch queries
state = GroupStateImpl.createForBatch(EventTimeTimeout, watermarkPresent = false)
.asInstanceOf[GroupStateImpl[Int]]
assert(state.getTimeoutTimestamp === NO_TIMESTAMP)
testTimeoutDurationNotAllowed[UnsupportedOperationException](state)
state.setTimeoutTimestamp(5000)
state.update(5)
state.setTimeoutTimestamp(10000)
state.setTimeoutTimestamp(new Date(20000))
testTimeoutDurationNotAllowed[UnsupportedOperationException](state)
state.remove()
state.setTimeoutTimestamp(5000)
testTimeoutDurationNotAllowed[UnsupportedOperationException](state)
}
test("GroupState - illegal params to setTimeout") {
var state: GroupStateImpl[Int] = null
// Test setTimeout****() with illegal values
def testIllegalTimeout(body: => Unit): Unit = {
intercept[IllegalArgumentException] {
body
}
assert(state.getTimeoutTimestamp === NO_TIMESTAMP)
}
state = GroupStateImpl.createForStreaming(
Some(5), 1000, 1000, ProcessingTimeTimeout, hasTimedOut = false, watermarkPresent = false)
testIllegalTimeout {
state.setTimeoutDuration(-1000)
}
testIllegalTimeout {
state.setTimeoutDuration(0)
}
testIllegalTimeout {
state.setTimeoutDuration("-2 second")
}
testIllegalTimeout {
state.setTimeoutDuration("-1 month")
}
testIllegalTimeout {
state.setTimeoutDuration("1 month -31 day")
}
state = GroupStateImpl.createForStreaming(
Some(5), 1000, 1000, EventTimeTimeout, hasTimedOut = false, watermarkPresent = false)
testIllegalTimeout {
state.setTimeoutTimestamp(-10000)
}
testIllegalTimeout {
state.setTimeoutTimestamp(10000, "-3 second")
}
testIllegalTimeout {
state.setTimeoutTimestamp(10000, "-1 month")
}
testIllegalTimeout {
state.setTimeoutTimestamp(10000, "1 month -32 day")
}
testIllegalTimeout {
state.setTimeoutTimestamp(new Date(-10000))
}
testIllegalTimeout {
state.setTimeoutTimestamp(new Date(-10000), "-3 second")
}
testIllegalTimeout {
state.setTimeoutTimestamp(new Date(-10000), "-1 month")
}
testIllegalTimeout {
state.setTimeoutTimestamp(new Date(-10000), "1 month -32 day")
}
}
test("GroupState - hasTimedOut") {
for (timeoutConf <- Seq(NoTimeout, ProcessingTimeTimeout, EventTimeTimeout)) {
// for streaming queries
for (initState <- Seq(None, Some(5))) {
val state1 = GroupStateImpl.createForStreaming(
initState, 1000, 1000, timeoutConf, hasTimedOut = false, watermarkPresent = false)
assert(state1.hasTimedOut === false)
val state2 = GroupStateImpl.createForStreaming(
initState, 1000, 1000, timeoutConf, hasTimedOut = true, watermarkPresent = false)
assert(state2.hasTimedOut)
}
// for batch queries
assert(
GroupStateImpl.createForBatch(timeoutConf, watermarkPresent = false).hasTimedOut === false)
}
}
test("GroupState - getCurrentWatermarkMs") {
def streamingState(timeoutConf: GroupStateTimeout, watermark: Option[Long]): GroupState[Int] = {
GroupStateImpl.createForStreaming(
None, 1000, watermark.getOrElse(-1), timeoutConf,
hasTimedOut = false, watermark.nonEmpty)
}
def batchState(timeoutConf: GroupStateTimeout, watermarkPresent: Boolean): GroupState[Any] = {
GroupStateImpl.createForBatch(timeoutConf, watermarkPresent)
}
def assertWrongTimeoutError(test: => Unit): Unit = {
val e = intercept[UnsupportedOperationException] { test }
assert(e.getMessage.contains(
"Cannot get event time watermark timestamp without setting watermark"))
}
for (timeoutConf <- Seq(NoTimeout, EventTimeTimeout, ProcessingTimeTimeout)) {
// Tests for getCurrentWatermarkMs in streaming queries
assertWrongTimeoutError { streamingState(timeoutConf, None).getCurrentWatermarkMs() }
assert(streamingState(timeoutConf, Some(1000)).getCurrentWatermarkMs() === 1000)
assert(streamingState(timeoutConf, Some(2000)).getCurrentWatermarkMs() === 2000)
// Tests for getCurrentWatermarkMs in batch queries
assertWrongTimeoutError {
batchState(timeoutConf, watermarkPresent = false).getCurrentWatermarkMs()
}
assert(batchState(timeoutConf, watermarkPresent = true).getCurrentWatermarkMs() === -1)
}
}
test("GroupState - getCurrentProcessingTimeMs") {
def streamingState(
timeoutConf: GroupStateTimeout,
procTime: Long,
watermarkPresent: Boolean): GroupState[Int] = {
GroupStateImpl.createForStreaming(
None, procTime, -1, timeoutConf, hasTimedOut = false, watermarkPresent = false)
}
def batchState(timeoutConf: GroupStateTimeout, watermarkPresent: Boolean): GroupState[Any] = {
GroupStateImpl.createForBatch(timeoutConf, watermarkPresent)
}
for (timeoutConf <- Seq(NoTimeout, EventTimeTimeout, ProcessingTimeTimeout)) {
for (watermarkPresent <- Seq(false, true)) {
// Tests for getCurrentProcessingTimeMs in streaming queries
assert(streamingState(timeoutConf, NO_TIMESTAMP, watermarkPresent)
.getCurrentProcessingTimeMs() === -1)
assert(streamingState(timeoutConf, 1000, watermarkPresent)
.getCurrentProcessingTimeMs() === 1000)
assert(streamingState(timeoutConf, 2000, watermarkPresent)
.getCurrentProcessingTimeMs() === 2000)
// Tests for getCurrentProcessingTimeMs in batch queries
val currentTime = System.currentTimeMillis()
assert(batchState(timeoutConf, watermarkPresent).getCurrentProcessingTimeMs >= currentTime)
}
}
}
test("GroupState - primitive type") {
var intState = GroupStateImpl.createForStreaming[Int](
None, 1000, 1000, NoTimeout, hasTimedOut = false, watermarkPresent = false)
intercept[NoSuchElementException] {
intState.get
}
assert(intState.getOption === None)
intState = GroupStateImpl.createForStreaming[Int](
Some(10), 1000, 1000, NoTimeout, hasTimedOut = false, watermarkPresent = false)
assert(intState.get == 10)
intState.update(0)
assert(intState.get == 0)
intState.remove()
intercept[NoSuchElementException] {
intState.get
}
}
// Values used for testing InputProcessor
val currentBatchTimestamp = 1000
val currentBatchWatermark = 1000
val beforeTimeoutThreshold = 999
val afterTimeoutThreshold = 1001
// Tests for InputProcessor.processNewData() when timeout = NoTimeout
for (priorState <- Seq(None, Some(0))) {
val priorStateStr = if (priorState.nonEmpty) "prior state set" else "no prior state"
val testName = s"NoTimeout - $priorStateStr - "
testStateUpdateWithData(
testName + "no update",
stateUpdates = state => {
assert(state.getCurrentProcessingTimeMs() === currentBatchTimestamp)
intercept[Exception] { state.getCurrentWatermarkMs() } // watermark not specified
/* no updates */
},
timeoutConf = GroupStateTimeout.NoTimeout,
priorState = priorState,
expectedState = priorState) // should not change
testStateUpdateWithData(
testName + "state updated",
stateUpdates = state => { state.update(5) },
timeoutConf = GroupStateTimeout.NoTimeout,
priorState = priorState,
expectedState = Some(5)) // should change
testStateUpdateWithData(
testName + "state removed",
stateUpdates = state => { state.remove() },
timeoutConf = GroupStateTimeout.NoTimeout,
priorState = priorState,
expectedState = None) // should be removed
}
// Tests for InputProcessor.processTimedOutState() when timeout != NoTimeout
for (priorState <- Seq(None, Some(0))) {
for (priorTimeoutTimestamp <- Seq(NO_TIMESTAMP, 1000)) {
var testName = ""
if (priorState.nonEmpty) {
testName += "prior state set, "
if (priorTimeoutTimestamp == 1000) {
testName += "prior timeout set"
} else {
testName += "no prior timeout"
}
} else {
testName += "no prior state"
}
for (timeoutConf <- Seq(ProcessingTimeTimeout, EventTimeTimeout)) {
testStateUpdateWithData(
s"$timeoutConf - $testName - no update",
stateUpdates = state => {
assert(state.getCurrentProcessingTimeMs() === currentBatchTimestamp)
intercept[Exception] { state.getCurrentWatermarkMs() } // watermark not specified
/* no updates */
},
timeoutConf = timeoutConf,
priorState = priorState,
priorTimeoutTimestamp = priorTimeoutTimestamp,
expectedState = priorState, // state should not change
expectedTimeoutTimestamp = NO_TIMESTAMP) // timestamp should be reset
testStateUpdateWithData(
s"$timeoutConf - $testName - state updated",
stateUpdates = state => { state.update(5) },
timeoutConf = timeoutConf,
priorState = priorState,
priorTimeoutTimestamp = priorTimeoutTimestamp,
expectedState = Some(5), // state should change
expectedTimeoutTimestamp = NO_TIMESTAMP) // timestamp should be reset
testStateUpdateWithData(
s"$timeoutConf - $testName - state removed",
stateUpdates = state => { state.remove() },
timeoutConf = timeoutConf,
priorState = priorState,
priorTimeoutTimestamp = priorTimeoutTimestamp,
expectedState = None) // state should be removed
}
// Tests with ProcessingTimeTimeout
if (priorState == None) {
testStateUpdateWithData(
s"ProcessingTimeTimeout - $testName - timeout updated without initializing state",
stateUpdates = state => { state.setTimeoutDuration(5000) },
timeoutConf = ProcessingTimeTimeout,
priorState = None,
priorTimeoutTimestamp = priorTimeoutTimestamp,
expectedState = None,
expectedTimeoutTimestamp = currentBatchTimestamp + 5000)
}
testStateUpdateWithData(
s"ProcessingTimeTimeout - $testName - state and timeout duration updated",
stateUpdates =
(state: GroupState[Int]) => { state.update(5); state.setTimeoutDuration(5000) },
timeoutConf = ProcessingTimeTimeout,
priorState = priorState,
priorTimeoutTimestamp = priorTimeoutTimestamp,
expectedState = Some(5), // state should change
expectedTimeoutTimestamp = currentBatchTimestamp + 5000) // timestamp should change
testStateUpdateWithData(
s"ProcessingTimeTimeout - $testName - timeout updated after state removed",
stateUpdates = state => { state.remove(); state.setTimeoutDuration(5000) },
timeoutConf = ProcessingTimeTimeout,
priorState = priorState,
priorTimeoutTimestamp = priorTimeoutTimestamp,
expectedState = None,
expectedTimeoutTimestamp = currentBatchTimestamp + 5000)
// Tests with EventTimeTimeout
if (priorState == None) {
testStateUpdateWithData(
s"EventTimeTimeout - $testName - setting timeout without init state not allowed",
stateUpdates = state => {
state.setTimeoutTimestamp(10000)
},
timeoutConf = EventTimeTimeout,
priorState = None,
priorTimeoutTimestamp = priorTimeoutTimestamp,
expectedState = None,
expectedTimeoutTimestamp = 10000)
}
testStateUpdateWithData(
s"EventTimeTimeout - $testName - state and timeout timestamp updated",
stateUpdates =
(state: GroupState[Int]) => { state.update(5); state.setTimeoutTimestamp(5000) },
timeoutConf = EventTimeTimeout,
priorState = priorState,
priorTimeoutTimestamp = priorTimeoutTimestamp,
expectedState = Some(5), // state should change
expectedTimeoutTimestamp = 5000) // timestamp should change
testStateUpdateWithData(
s"EventTimeTimeout - $testName - timeout timestamp updated to before watermark",
stateUpdates =
(state: GroupState[Int]) => {
state.update(5)
intercept[IllegalArgumentException] {
state.setTimeoutTimestamp(currentBatchWatermark - 1) // try to set to < watermark
}
},
timeoutConf = EventTimeTimeout,
priorState = priorState,
priorTimeoutTimestamp = priorTimeoutTimestamp,
expectedState = Some(5), // state should change
expectedTimeoutTimestamp = NO_TIMESTAMP) // timestamp should not update
testStateUpdateWithData(
s"EventTimeTimeout - $testName - setting timeout with state removal not allowed",
stateUpdates = state => {
state.remove(); state.setTimeoutTimestamp(10000)
},
timeoutConf = EventTimeTimeout,
priorState = priorState,
priorTimeoutTimestamp = priorTimeoutTimestamp,
expectedState = None,
expectedTimeoutTimestamp = 10000)
}
}
// Tests for InputProcessor.processTimedOutState()
val preTimeoutState = Some(5)
for (timeoutConf <- Seq(ProcessingTimeTimeout, EventTimeTimeout)) {
testStateUpdateWithTimeout(
s"$timeoutConf - should not timeout",
stateUpdates = state => { assert(false, "function called without timeout") },
timeoutConf = timeoutConf,
priorTimeoutTimestamp = afterTimeoutThreshold,
expectedState = preTimeoutState, // state should not change
expectedTimeoutTimestamp = afterTimeoutThreshold) // timestamp should not change
testStateUpdateWithTimeout(
s"$timeoutConf - should timeout - no update/remove",
stateUpdates = state => {
assert(state.getCurrentProcessingTimeMs() === currentBatchTimestamp)
intercept[Exception] { state.getCurrentWatermarkMs() } // watermark not specified
/* no updates */
},
timeoutConf = timeoutConf,
priorTimeoutTimestamp = beforeTimeoutThreshold,
expectedState = preTimeoutState, // state should not change
expectedTimeoutTimestamp = NO_TIMESTAMP) // timestamp should be reset
testStateUpdateWithTimeout(
s"$timeoutConf - should timeout - update state",
stateUpdates = state => { state.update(5) },
timeoutConf = timeoutConf,
priorTimeoutTimestamp = beforeTimeoutThreshold,
expectedState = Some(5), // state should change
expectedTimeoutTimestamp = NO_TIMESTAMP) // timestamp should be reset
testStateUpdateWithTimeout(
s"$timeoutConf - should timeout - remove state",
stateUpdates = state => { state.remove() },
timeoutConf = timeoutConf,
priorTimeoutTimestamp = beforeTimeoutThreshold,
expectedState = None, // state should be removed
expectedTimeoutTimestamp = NO_TIMESTAMP)
}
testStateUpdateWithTimeout(
"ProcessingTimeTimeout - should timeout - timeout duration updated",
stateUpdates = state => { state.setTimeoutDuration(2000) },
timeoutConf = ProcessingTimeTimeout,
priorTimeoutTimestamp = beforeTimeoutThreshold,
expectedState = preTimeoutState, // state should not change
expectedTimeoutTimestamp = currentBatchTimestamp + 2000) // timestamp should change
testStateUpdateWithTimeout(
"ProcessingTimeTimeout - should timeout - timeout duration and state updated",
stateUpdates = state => { state.update(5); state.setTimeoutDuration(2000) },
timeoutConf = ProcessingTimeTimeout,
priorTimeoutTimestamp = beforeTimeoutThreshold,
expectedState = Some(5), // state should change
expectedTimeoutTimestamp = currentBatchTimestamp + 2000) // timestamp should change
testStateUpdateWithTimeout(
"EventTimeTimeout - should timeout - timeout timestamp updated",
stateUpdates = state => { state.setTimeoutTimestamp(5000) },
timeoutConf = EventTimeTimeout,
priorTimeoutTimestamp = beforeTimeoutThreshold,
expectedState = preTimeoutState, // state should not change
expectedTimeoutTimestamp = 5000) // timestamp should change
testStateUpdateWithTimeout(
"EventTimeTimeout - should timeout - timeout and state updated",
stateUpdates = state => { state.update(5); state.setTimeoutTimestamp(5000) },
timeoutConf = EventTimeTimeout,
priorTimeoutTimestamp = beforeTimeoutThreshold,
expectedState = Some(5), // state should change
expectedTimeoutTimestamp = 5000) // timestamp should change
testWithAllStateVersions("flatMapGroupsWithState - streaming") {
// Function to maintain running count up to 2, and then remove the count
// Returns the data and the count if state is defined, otherwise does not return anything
val stateFunc = (key: String, values: Iterator[String], state: GroupState[RunningCount]) => {
assertCanGetProcessingTime { state.getCurrentProcessingTimeMs() >= 0 }
assertCannotGetWatermark { state.getCurrentWatermarkMs() }
val count = state.getOption.map(_.count).getOrElse(0L) + values.size
if (count == 3) {
state.remove()
Iterator.empty
} else {
state.update(RunningCount(count))
Iterator((key, count.toString))
}
}
val inputData = MemoryStream[String]
val result =
inputData.toDS()
.groupByKey(x => x)
.flatMapGroupsWithState(Update, GroupStateTimeout.NoTimeout)(stateFunc)
testStream(result, Update)(
AddData(inputData, "a"),
CheckNewAnswer(("a", "1")),
assertNumStateRows(total = 1, updated = 1),
AddData(inputData, "a", "b"),
CheckNewAnswer(("a", "2"), ("b", "1")),
assertNumStateRows(total = 2, updated = 2),
StopStream,
StartStream(),
AddData(inputData, "a", "b"), // should remove state for "a" and not return anything for a
CheckNewAnswer(("b", "2")),
assertNumStateRows(total = 1, updated = 2),
StopStream,
StartStream(),
AddData(inputData, "a", "c"), // should recreate state for "a" and return count as 1 and
CheckNewAnswer(("a", "1"), ("c", "1")),
assertNumStateRows(total = 3, updated = 2)
)
}
test("flatMapGroupsWithState - streaming + func returns iterator that updates state lazily") {
// Function to maintain running count up to 2, and then remove the count
// Returns the data and the count if state is defined, otherwise does not return anything
// Additionally, it updates state lazily as the returned iterator get consumed
val stateFunc = (key: String, values: Iterator[String], state: GroupState[RunningCount]) => {
values.flatMap { _ =>
val count = state.getOption.map(_.count).getOrElse(0L) + 1
if (count == 3) {
state.remove()
None
} else {
state.update(RunningCount(count))
Some((key, count.toString))
}
}
}
val inputData = MemoryStream[String]
val result =
inputData.toDS()
.groupByKey(x => x)
.flatMapGroupsWithState(Update, GroupStateTimeout.NoTimeout)(stateFunc)
testStream(result, Update)(
AddData(inputData, "a", "a", "b"),
CheckNewAnswer(("a", "1"), ("a", "2"), ("b", "1")),
StopStream,
StartStream(),
AddData(inputData, "a", "b"), // should remove state for "a" and not return anything for a
CheckNewAnswer(("b", "2")),
StopStream,
StartStream(),
AddData(inputData, "a", "c"), // should recreate state for "a" and return count as 1 and
CheckNewAnswer(("a", "1"), ("c", "1"))
)
}
testWithAllStateVersions("flatMapGroupsWithState - streaming + aggregation") {
// Function to maintain running count up to 2, and then remove the count
// Returns the data and the count (-1 if count reached beyond 2 and state was just removed)
val stateFunc = (key: String, values: Iterator[String], state: GroupState[RunningCount]) => {
val count = state.getOption.map(_.count).getOrElse(0L) + values.size
if (count == 3) {
state.remove()
Iterator(key -> "-1")
} else {
state.update(RunningCount(count))
Iterator(key -> count.toString)
}
}
val inputData = MemoryStream[String]
val result =
inputData.toDS()
.groupByKey(x => x)
.flatMapGroupsWithState(Append, GroupStateTimeout.NoTimeout)(stateFunc)
.groupByKey(_._1)
.count()
testStream(result, Complete)(
AddData(inputData, "a"),
CheckNewAnswer(("a", 1)),
AddData(inputData, "a", "b"),
// mapGroups generates ("a", "2"), ("b", "1"); so increases counts of a and b by 1
CheckNewAnswer(("a", 2), ("b", 1)),
StopStream,
StartStream(),
AddData(inputData, "a", "b"),
// mapGroups should remove state for "a" and generate ("a", "-1"), ("b", "2") ;
// so increment a and b by 1
CheckNewAnswer(("a", 3), ("b", 2)),
StopStream,
StartStream(),
AddData(inputData, "a", "c"),
// mapGroups should recreate state for "a" and generate ("a", "1"), ("c", "1") ;
// so increment a and c by 1
CheckNewAnswer(("a", 4), ("b", 2), ("c", 1))
)
}
test("flatMapGroupsWithState - batch") {
// Function that returns running count only if its even, otherwise does not return
val stateFunc = (key: String, values: Iterator[String], state: GroupState[RunningCount]) => {
assertCanGetProcessingTime { state.getCurrentProcessingTimeMs() > 0 }
assertCannotGetWatermark { state.getCurrentWatermarkMs() }
if (state.exists) throw new IllegalArgumentException("state.exists should be false")
Iterator((key, values.size))
}
val df = Seq("a", "a", "b").toDS
.groupByKey(x => x)
.flatMapGroupsWithState(Update, GroupStateTimeout.NoTimeout)(stateFunc).toDF
checkAnswer(df, Seq(("a", 2), ("b", 1)).toDF)
}
testWithAllStateVersions("flatMapGroupsWithState - streaming with processing time timeout") {
// Function to maintain the count as state and set the proc. time timeout delay of 10 seconds.
// It returns the count if changed, or -1 if the state was removed by timeout.
val stateFunc = (key: String, values: Iterator[String], state: GroupState[RunningCount]) => {
assertCanGetProcessingTime { state.getCurrentProcessingTimeMs() >= 0 }
assertCannotGetWatermark { state.getCurrentWatermarkMs() }
if (state.hasTimedOut) {
state.remove()
Iterator((key, "-1"))
} else {
val count = state.getOption.map(_.count).getOrElse(0L) + values.size
state.update(RunningCount(count))
state.setTimeoutDuration("10 seconds")
Iterator((key, count.toString))
}
}
val clock = new StreamManualClock
val inputData = MemoryStream[String]
val result =
inputData.toDS()
.groupByKey(x => x)
.flatMapGroupsWithState(Update, ProcessingTimeTimeout)(stateFunc)
testStream(result, Update)(
StartStream(Trigger.ProcessingTime("1 second"), triggerClock = clock),
AddData(inputData, "a"),
AdvanceManualClock(1 * 1000),
CheckNewAnswer(("a", "1")),
assertNumStateRows(total = 1, updated = 1),
AddData(inputData, "b"),
AdvanceManualClock(1 * 1000),
CheckNewAnswer(("b", "1")),
assertNumStateRows(total = 2, updated = 1),
AddData(inputData, "b"),
AdvanceManualClock(10 * 1000),
CheckNewAnswer(("a", "-1"), ("b", "2")),
assertNumStateRows(total = 1, updated = 2),
StopStream,
StartStream(Trigger.ProcessingTime("1 second"), triggerClock = clock),
AddData(inputData, "c"),
AdvanceManualClock(11 * 1000),
CheckNewAnswer(("b", "-1"), ("c", "1")),
assertNumStateRows(total = 1, updated = 2),
AdvanceManualClock(12 * 1000),
AssertOnQuery { _ => clock.getTimeMillis() == 35000 },
Execute { q =>
failAfter(streamingTimeout) {
while (q.lastProgress.timestamp != "1970-01-01T00:00:35.000Z") {
Thread.sleep(1)
}
}
},
CheckNewAnswer(("c", "-1")),
assertNumStateRows(total = 0, updated = 1)
)
}
testWithAllStateVersions("flatMapGroupsWithState - streaming w/ event time timeout + watermark") {
// Function to maintain the max event time as state and set the timeout timestamp based on the
// current max event time seen. It returns the max event time in the state, or -1 if the state
// was removed by timeout.
val stateFunc = (key: String, values: Iterator[(String, Long)], state: GroupState[Long]) => {
assertCanGetProcessingTime { state.getCurrentProcessingTimeMs() >= 0 }
assertCanGetWatermark { state.getCurrentWatermarkMs() >= -1 }
val timeoutDelaySec = 5
if (state.hasTimedOut) {
state.remove()
Iterator((key, -1))
} else {
val valuesSeq = values.toSeq
val maxEventTimeSec = math.max(valuesSeq.map(_._2).max, state.getOption.getOrElse(0L))
val timeoutTimestampSec = maxEventTimeSec + timeoutDelaySec
state.update(maxEventTimeSec)
state.setTimeoutTimestamp(timeoutTimestampSec * 1000)
Iterator((key, maxEventTimeSec.toInt))
}
}
val inputData = MemoryStream[(String, Int)]
val result =
inputData.toDS
.select($"_1".as("key"), $"_2".cast("timestamp").as("eventTime"))
.withWatermark("eventTime", "10 seconds")
.as[(String, Long)]
.groupByKey(_._1)
.flatMapGroupsWithState(Update, EventTimeTimeout)(stateFunc)
testStream(result, Update)(
StartStream(),
AddData(inputData, ("a", 11), ("a", 13), ("a", 15)),
// Max event time = 15. Timeout timestamp for "a" = 15 + 5 = 20. Watermark = 15 - 10 = 5.
CheckNewAnswer(("a", 15)), // Output = max event time of a
AddData(inputData, ("a", 4)), // Add data older than watermark for "a"
CheckNewAnswer(), // No output as data should get filtered by watermark
AddData(inputData, ("a", 10)), // Add data newer than watermark for "a"
CheckNewAnswer(("a", 15)), // Max event time is still the same
// Timeout timestamp for "a" is still 20 as max event time for "a" is still 15.
// Watermark is still 5 as max event time for all data is still 15.
AddData(inputData, ("b", 31)), // Add data newer than watermark for "b", not "a"
// Watermark = 31 - 10 = 21, so "a" should be timed out as timeout timestamp for "a" is 20.
CheckNewAnswer(("a", -1), ("b", 31)) // State for "a" should timeout and emit -1
)
}
test("flatMapGroupsWithState - uses state format version 2 by default") {
val stateFunc = (key: String, values: Iterator[String], state: GroupState[RunningCount]) => {
val count = state.getOption.map(_.count).getOrElse(0L) + values.size
state.update(RunningCount(count))
Iterator((key, count.toString))
}
val inputData = MemoryStream[String]
val result = inputData.toDS()
.groupByKey(x => x)
.flatMapGroupsWithState(Update, GroupStateTimeout.NoTimeout)(stateFunc)
testStream(result, Update)(
AddData(inputData, "a"),
CheckNewAnswer(("a", "1")),
Execute { query =>
// Verify state format = 2
val f = query.lastExecution.executedPlan.collect { case f: FlatMapGroupsWithStateExec => f }
assert(f.size == 1)
assert(f.head.stateFormatVersion == 2)
}
)
}
test("flatMapGroupsWithState - recovery from checkpoint uses state format version 1") {
// Function to maintain the max event time as state and set the timeout timestamp based on the
// current max event time seen. It returns the max event time in the state, or -1 if the state
// was removed by timeout.
val stateFunc = (key: String, values: Iterator[(String, Long)], state: GroupState[Long]) => {
assertCanGetProcessingTime { state.getCurrentProcessingTimeMs() >= 0 }
assertCanGetWatermark { state.getCurrentWatermarkMs() >= -1 }
val timeoutDelaySec = 5
if (state.hasTimedOut) {
state.remove()
Iterator((key, -1))
} else {
val valuesSeq = values.toSeq
val maxEventTimeSec = math.max(valuesSeq.map(_._2).max, state.getOption.getOrElse(0L))
val timeoutTimestampSec = maxEventTimeSec + timeoutDelaySec
state.update(maxEventTimeSec)
state.setTimeoutTimestamp(timeoutTimestampSec * 1000)
Iterator((key, maxEventTimeSec.toInt))
}
}
val inputData = MemoryStream[(String, Int)]
val result =
inputData.toDS
.select($"_1".as("key"), $"_2".cast("timestamp").as("eventTime"))
.withWatermark("eventTime", "10 seconds")
.as[(String, Long)]
.groupByKey(_._1)
.flatMapGroupsWithState(Update, EventTimeTimeout)(stateFunc)
val resourceUri = this.getClass.getResource(
"/structured-streaming/checkpoint-version-2.3.1-flatMapGroupsWithState-state-format-1/").toURI
val checkpointDir = Utils.createTempDir().getCanonicalFile
// Copy the checkpoint to a temp dir to prevent changes to the original.
// Not doing this will lead to the test passing on the first run, but fail subsequent runs.
FileUtils.copyDirectory(new File(resourceUri), checkpointDir)
inputData.addData(("a", 11), ("a", 13), ("a", 15))
inputData.addData(("a", 4))
testStream(result, Update)(
StartStream(
checkpointLocation = checkpointDir.getAbsolutePath,
additionalConfs = Map(SQLConf.FLATMAPGROUPSWITHSTATE_STATE_FORMAT_VERSION.key -> "2")),
/*
Note: The checkpoint was generated using the following input in Spark version 2.3.1
AddData(inputData, ("a", 11), ("a", 13), ("a", 15)),
// Max event time = 15. Timeout timestamp for "a" = 15 + 5 = 20. Watermark = 15 - 10 = 5.
CheckNewAnswer(("a", 15)), // Output = max event time of a
AddData(inputData, ("a", 4)), // Add data older than watermark for "a"
CheckNewAnswer(), // No output as data should get filtered by watermark
*/
AddData(inputData, ("a", 10)), // Add data newer than watermark for "a"
CheckNewAnswer(("a", 15)), // Max event time is still the same
// Timeout timestamp for "a" is still 20 as max event time for "a" is still 15.
// Watermark is still 5 as max event time for all data is still 15.
Execute { query =>
// Verify state format = 1
val f = query.lastExecution.executedPlan.collect { case f: FlatMapGroupsWithStateExec => f }
assert(f.size == 1)
assert(f.head.stateFormatVersion == 1)
},
AddData(inputData, ("b", 31)), // Add data newer than watermark for "b", not "a"
// Watermark = 31 - 10 = 21, so "a" should be timed out as timeout timestamp for "a" is 20.
CheckNewAnswer(("a", -1), ("b", 31)) // State for "a" should timeout and emit -1
)
}
test("mapGroupsWithState - streaming") {
// Function to maintain running count up to 2, and then remove the count
// Returns the data and the count (-1 if count reached beyond 2 and state was just removed)
val stateFunc = (key: String, values: Iterator[String], state: GroupState[RunningCount]) => {
assertCanGetProcessingTime { state.getCurrentProcessingTimeMs() >= 0 }
assertCannotGetWatermark { state.getCurrentWatermarkMs() }
val count = state.getOption.map(_.count).getOrElse(0L) + values.size
if (count == 3) {
state.remove()
(key, "-1")
} else {
state.update(RunningCount(count))
(key, count.toString)
}
}
val inputData = MemoryStream[String]
val result =
inputData.toDS()
.groupByKey(x => x)
.mapGroupsWithState(stateFunc) // Types = State: MyState, Out: (Str, Str)
testStream(result, Update)(
AddData(inputData, "a"),
CheckNewAnswer(("a", "1")),
assertNumStateRows(total = 1, updated = 1),
AddData(inputData, "a", "b"),
CheckNewAnswer(("a", "2"), ("b", "1")),
assertNumStateRows(total = 2, updated = 2),
StopStream,
StartStream(),
AddData(inputData, "a", "b"), // should remove state for "a" and return count as -1
CheckNewAnswer(("a", "-1"), ("b", "2")),
assertNumStateRows(total = 1, updated = 2),
StopStream,
StartStream(),
AddData(inputData, "a", "c"), // should recreate state for "a" and return count as 1
CheckNewAnswer(("a", "1"), ("c", "1")),
assertNumStateRows(total = 3, updated = 2)
)
}
test("mapGroupsWithState - batch") {
// Test the following
// - no initial state
// - timeouts operations work, does not throw any error [SPARK-20792]
// - works with primitive state type
// - can get processing time
val stateFunc = (key: String, values: Iterator[String], state: GroupState[Int]) => {
assertCanGetProcessingTime { state.getCurrentProcessingTimeMs() > 0 }
assertCannotGetWatermark { state.getCurrentWatermarkMs() }
if (state.exists) throw new IllegalArgumentException("state.exists should be false")
state.setTimeoutTimestamp(0, "1 hour")
state.update(10)
(key, values.size)
}
checkAnswer(
spark.createDataset(Seq("a", "a", "b"))
.groupByKey(x => x)
.mapGroupsWithState(EventTimeTimeout)(stateFunc)
.toDF,
spark.createDataset(Seq(("a", 2), ("b", 1))).toDF)
}
testQuietly("StateStore.abort on task failure handling") {
val stateFunc = (key: String, values: Iterator[String], state: GroupState[RunningCount]) => {
if (FlatMapGroupsWithStateSuite.failInTask) throw new Exception("expected failure")
val count = state.getOption.map(_.count).getOrElse(0L) + values.size
state.update(RunningCount(count))
(key, count)
}
val inputData = MemoryStream[String]
val result =
inputData.toDS()
.groupByKey(x => x)
.mapGroupsWithState(stateFunc) // Types = State: MyState, Out: (Str, Str)
def setFailInTask(value: Boolean): AssertOnQuery = AssertOnQuery { q =>
FlatMapGroupsWithStateSuite.failInTask = value
true
}
testStream(result, Update)(
setFailInTask(false),
AddData(inputData, "a"),
CheckNewAnswer(("a", 1L)),
AddData(inputData, "a"),
CheckNewAnswer(("a", 2L)),
setFailInTask(true),
AddData(inputData, "a"),
ExpectFailure[SparkException](), // task should fail but should not increment count
setFailInTask(false),
StartStream(),
CheckNewAnswer(("a", 3L)) // task should not fail, and should show correct count
)
}
test("output partitioning is unknown") {
val stateFunc = (key: String, values: Iterator[String], state: GroupState[RunningCount]) => key
val inputData = MemoryStream[String]
val result = inputData.toDS.groupByKey(x => x).mapGroupsWithState(stateFunc)
testStream(result, Update)(
AddData(inputData, "a"),
CheckNewAnswer("a"),
AssertOnQuery(_.lastExecution.executedPlan.outputPartitioning === UnknownPartitioning(0))
)
}
test("disallow complete mode") {
val stateFunc = (key: String, values: Iterator[String], state: GroupState[Int]) => {
Iterator[String]()
}
var e = intercept[IllegalArgumentException] {
MemoryStream[String].toDS().groupByKey(x => x).flatMapGroupsWithState(
OutputMode.Complete, GroupStateTimeout.NoTimeout)(stateFunc)
}
assert(e.getMessage === "The output mode of function should be append or update")
val javaStateFunc = new FlatMapGroupsWithStateFunction[String, String, Int, String] {
import java.util.{Iterator => JIterator}
override def call(
key: String,
values: JIterator[String],
state: GroupState[Int]): JIterator[String] = { null }
}
e = intercept[IllegalArgumentException] {
MemoryStream[String].toDS().groupByKey(x => x).flatMapGroupsWithState(
javaStateFunc, OutputMode.Complete,
implicitly[Encoder[Int]], implicitly[Encoder[String]], GroupStateTimeout.NoTimeout)
}
assert(e.getMessage === "The output mode of function should be append or update")
}
def testWithTimeout(timeoutConf: GroupStateTimeout): Unit = {
test("SPARK-20714: watermark does not fail query when timeout = " + timeoutConf) {
// Function to maintain running count up to 2, and then remove the count
// Returns the data and the count (-1 if count reached beyond 2 and state was just removed)
val stateFunc =
(key: String, values: Iterator[(String, Long)], state: GroupState[RunningCount]) => {
if (state.hasTimedOut) {
state.remove()
Iterator((key, "-1"))
} else {
val count = state.getOption.map(_.count).getOrElse(0L) + values.size
state.update(RunningCount(count))
state.setTimeoutDuration("10 seconds")
Iterator((key, count.toString))
}
}
val clock = new StreamManualClock
val inputData = MemoryStream[(String, Long)]
val result =
inputData.toDF().toDF("key", "time")
.selectExpr("key", "cast(time as timestamp) as timestamp")
.withWatermark("timestamp", "10 second")
.as[(String, Long)]
.groupByKey(x => x._1)
.flatMapGroupsWithState(Update, ProcessingTimeTimeout)(stateFunc)
testStream(result, Update)(
StartStream(Trigger.ProcessingTime("1 second"), triggerClock = clock),
AddData(inputData, ("a", 1L)),
AdvanceManualClock(1 * 1000),
CheckNewAnswer(("a", "1"))
)
}
}
testWithTimeout(NoTimeout)
testWithTimeout(ProcessingTimeTimeout)
def testStateUpdateWithData(
testName: String,
stateUpdates: GroupState[Int] => Unit,
timeoutConf: GroupStateTimeout,
priorState: Option[Int],
priorTimeoutTimestamp: Long = NO_TIMESTAMP,
expectedState: Option[Int] = None,
expectedTimeoutTimestamp: Long = NO_TIMESTAMP,
expectedException: Class[_ <: Exception] = null): Unit = {
if (priorState.isEmpty && priorTimeoutTimestamp != NO_TIMESTAMP) {
return // there can be no prior timestamp, when there is no prior state
}
test(s"InputProcessor - process new data - $testName") {
val mapGroupsFunc = (key: Int, values: Iterator[Int], state: GroupState[Int]) => {
assert(state.hasTimedOut === false, "hasTimedOut not false")
assert(values.nonEmpty, "Some value is expected")
stateUpdates(state)
Iterator.empty
}
testStateUpdate(
testTimeoutUpdates = false, mapGroupsFunc, timeoutConf,
priorState, priorTimeoutTimestamp,
expectedState, expectedTimeoutTimestamp, expectedException)
}
}
def testStateUpdateWithTimeout(
testName: String,
stateUpdates: GroupState[Int] => Unit,
timeoutConf: GroupStateTimeout,
priorTimeoutTimestamp: Long,
expectedState: Option[Int],
expectedTimeoutTimestamp: Long = NO_TIMESTAMP): Unit = {
test(s"InputProcessor - process timed out state - $testName") {
val mapGroupsFunc = (key: Int, values: Iterator[Int], state: GroupState[Int]) => {
assert(state.hasTimedOut, "hasTimedOut not true")
assert(values.isEmpty, "values not empty")
stateUpdates(state)
Iterator.empty
}
testStateUpdate(
testTimeoutUpdates = true, mapGroupsFunc, timeoutConf = timeoutConf,
preTimeoutState, priorTimeoutTimestamp, expectedState, expectedTimeoutTimestamp, null)
}
}
def testStateUpdate(
testTimeoutUpdates: Boolean,
mapGroupsFunc: (Int, Iterator[Int], GroupState[Int]) => Iterator[Int],
timeoutConf: GroupStateTimeout,
priorState: Option[Int],
priorTimeoutTimestamp: Long,
expectedState: Option[Int],
expectedTimeoutTimestamp: Long,
expectedException: Class[_ <: Exception]): Unit = {
val store = newStateStore()
val mapGroupsSparkPlan = newFlatMapGroupsWithStateExec(
mapGroupsFunc, timeoutConf, currentBatchTimestamp)
val inputProcessor = new mapGroupsSparkPlan.InputProcessor(store)
val stateManager = mapGroupsSparkPlan.stateManager
val key = intToRow(0)
// Prepare store with prior state configs
if (priorState.nonEmpty || priorTimeoutTimestamp != NO_TIMESTAMP) {
stateManager.putState(store, key, priorState.orNull, priorTimeoutTimestamp)
}
// Call updating function to update state store
def callFunction() = {
val returnedIter = if (testTimeoutUpdates) {
inputProcessor.processTimedOutState()
} else {
inputProcessor.processNewData(Iterator(key))
}
returnedIter.size // consume the iterator to force state updates
}
if (expectedException != null) {
// Call function and verify the exception type
val e = intercept[Exception] { callFunction() }
assert(e.getClass === expectedException, "Exception thrown but of the wrong type")
} else {
// Call function to update and verify updated state in store
callFunction()
val updatedState = stateManager.getState(store, key)
assert(Option(updatedState.stateObj).map(_.toString.toInt) === expectedState,
"final state not as expected")
assert(updatedState.timeoutTimestamp === expectedTimeoutTimestamp,
"final timeout timestamp not as expected")
}
}
def newFlatMapGroupsWithStateExec(
func: (Int, Iterator[Int], GroupState[Int]) => Iterator[Int],
timeoutType: GroupStateTimeout = GroupStateTimeout.NoTimeout,
batchTimestampMs: Long = NO_TIMESTAMP): FlatMapGroupsWithStateExec = {
val stateFormatVersion = spark.conf.get(SQLConf.FLATMAPGROUPSWITHSTATE_STATE_FORMAT_VERSION)
val emptyRdd = spark.sparkContext.emptyRDD[InternalRow]
MemoryStream[Int]
.toDS
.groupByKey(x => x)
.flatMapGroupsWithState[Int, Int](Append, timeoutConf = timeoutType)(func)
.logicalPlan.collectFirst {
case FlatMapGroupsWithState(f, k, v, g, d, o, s, m, _, t, _) =>
FlatMapGroupsWithStateExec(
f, k, v, g, d, o, None, s, stateFormatVersion, m, t,
Some(currentBatchTimestamp), Some(currentBatchWatermark),
RDDScanExec(g, emptyRdd, "rdd"))
}.get
}
def testTimeoutDurationNotAllowed[T <: Exception: Manifest](state: GroupStateImpl[_]): Unit = {
val prevTimestamp = state.getTimeoutTimestamp
intercept[T] { state.setTimeoutDuration(1000) }
assert(state.getTimeoutTimestamp === prevTimestamp)
intercept[T] { state.setTimeoutDuration("2 second") }
assert(state.getTimeoutTimestamp === prevTimestamp)
}
def testTimeoutTimestampNotAllowed[T <: Exception: Manifest](state: GroupStateImpl[_]): Unit = {
val prevTimestamp = state.getTimeoutTimestamp
intercept[T] { state.setTimeoutTimestamp(2000) }
assert(state.getTimeoutTimestamp === prevTimestamp)
intercept[T] { state.setTimeoutTimestamp(2000, "1 second") }
assert(state.getTimeoutTimestamp === prevTimestamp)
intercept[T] { state.setTimeoutTimestamp(new Date(2000)) }
assert(state.getTimeoutTimestamp === prevTimestamp)
intercept[T] { state.setTimeoutTimestamp(new Date(2000), "1 second") }
assert(state.getTimeoutTimestamp === prevTimestamp)
}
def newStateStore(): StateStore = new MemoryStateStore()
val intProj = UnsafeProjection.create(Array[DataType](IntegerType))
def intToRow(i: Int): UnsafeRow = {
intProj.apply(new GenericInternalRow(Array[Any](i))).copy()
}
def rowToInt(row: UnsafeRow): Int = row.getInt(0)
def testWithAllStateVersions(name: String)(func: => Unit): Unit = {
for (version <- FlatMapGroupsWithStateExecHelper.supportedVersions) {
test(s"$name - state format version $version") {
withSQLConf(SQLConf.FLATMAPGROUPSWITHSTATE_STATE_FORMAT_VERSION.key -> version.toString) {
func
}
}
}
}
}
object FlatMapGroupsWithStateSuite {
var failInTask = true
def assertCanGetProcessingTime(predicate: => Boolean): Unit = {
if (!predicate) throw new TestFailedException("Could not get processing time", 20)
}
def assertCanGetWatermark(predicate: => Boolean): Unit = {
if (!predicate) throw new TestFailedException("Could not get processing time", 20)
}
def assertCannotGetWatermark(func: => Unit): Unit = {
try {
func
} catch {
case u: UnsupportedOperationException =>
return
case _: Throwable =>
throw new TestFailedException("Unexpected exception when trying to get watermark", 20)
}
throw new TestFailedException("Could get watermark when not expected", 20)
}
}
| skonto/spark | sql/core/src/test/scala/org/apache/spark/sql/streaming/FlatMapGroupsWithStateSuite.scala | Scala | apache-2.0 | 52,976 |
package slick.migration.dialect
import slick.migration.ast._
import scala.slick.ast.FieldSymbol
import scala.slick.model.ForeignKeyAction
class SQLServerDialect(driver: SQLServerDriver) extends Dialect[SQLServerDriver](driver) {
override def autoInc(ci: ColumnInfo) = if (ci.autoInc) " IDENTITY" else ""
override def renameTable(table: TableInfo, to: String) =
s"sp_rename ${quoteTableName(table)}, ${quoteIdentifier(to)}"
override def createForeignKey(sourceTable: TableInfo, name: String, sourceColumns: Seq[FieldSymbol], targetTable: TableInfo, targetColumns: Seq[FieldSymbol], onUpdate: ForeignKeyAction, onDelete: ForeignKeyAction): String =
s"""alter table ${quoteTableName(sourceTable)}
| add constraint ${quoteIdentifier(name)}
| foreign key ${columnList(sourceColumns: _*)}
| references ${quoteTableName(targetTable)}
| (${quotedColumnNames(targetColumns: _*) mkString ", "})
| on update
| ${if (onUpdate == ForeignKeyAction.Restrict) "NO ACTION" else onUpdate.action}
| on delete
| ${if (onDelete == ForeignKeyAction.Restrict) "NO ACTION" else onDelete.action}""".stripMargin
override def renameIndex(old: IndexInfo, newName: String): Seq[String] = List(
s"""sp_rename ${quoteName(old.table, old.name)}, ${quoteIdentifier(newName)}, 'INDEX'"""
)
override def dropIndex(index: IndexInfo) =
s"drop index ${quoteIdentifier(index.name)} on ${quoteTableName(index.table)}"
override def addColumn(table: TableInfo, column: ColumnInfo) =
s"""alter table ${quoteTableName(table)} add ${columnSql(column, false)}"""
override def renameColumn(table: TableInfo, from: ColumnInfo, to: String) =
s"""sp_rename ${quoteName(table, from.name)}, ${quoteIdentifier(to)}, 'COLUMN'"""
override def alterColumnType(table: TableInfo, column: ColumnInfo): Seq[String] =
List(alterColumnNullability(table, column))
override def alterColumnDefault(table: TableInfo, column: ColumnInfo) =
s"""BEGIN
| declare @TableName nvarchar(256)
| declare @ColumnName nvarchar(256)
| set @TableName = N'${table.tableName}'
| set @ColumnName = N'${column.name}'
| declare @ConstraintName nvarchar(256)
|
| select @ConstraintName = d.name
| from sys.tables t
| join sys.default_constraints d
| on d.parent_object_id = t.object_id
| join
| sys.columns c
| on c.object_id = t.object_id
| and c.column_id = d.parent_column_id
| where t.name = @TableName
| and c.name = @ColumnName
|
| declare @SqlCmd nvarchar(256)
| SET @SqlCmd = N'ALTER TABLE ' + @TableName + N' DROP CONSTRAINT ' + @ConstraintName
| EXEC sp_executesql @SqlCmd
|
| ALTER TABLE ${quoteTableName(table)}
| ADD CONSTRAINT ${column.name}_DEFAULT DEFAULT (${column.default getOrElse "null"})
| FOR ${quoteIdentifier(column.name)}
|
| END """.stripMargin
override def alterColumnNullability(table: TableInfo, column: ColumnInfo) =
s"""alter table ${quoteTableName(table)}
| alter column ${quoteIdentifier(column.name)}
| ${column.sqlType.get}
| ${if (column.notNull) "NOT NULL" else "NULL"}""".stripMargin
private def quoteName(t: TableInfo, name: String) = {
val tableName = (t.schemaName match {
case Some(s) => s + "." + t.tableName
case None => t.tableName
}) + "."
quoteIdentifier(tableName + name)
}
} | itryapitsin/slick-migration | drivers/sqlserver/src/main/scala/slick/migration/dialect/SQLServerDialect.scala | Scala | apache-2.0 | 3,411 |
/*
* Copyright 2018 Analytics Zoo Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.zoo.pipeline.api.keras.serializer
import java.io.File
import com.intel.analytics.bigdl.nn.abstractnn.{AbstractModule, Activity}
import com.intel.analytics.bigdl.utils.RandomGenerator._
import com.intel.analytics.bigdl.utils.serializer.{ModuleLoader, ModulePersister}
import com.intel.analytics.zoo.pipeline.api.keras.ZooSpecHelper
import org.scalatest.{BeforeAndAfterAll, FlatSpec, Matchers}
class SerializerSpec extends SerializerSpecHelper {
runTests(getExpectedTests())
}
private[zoo] abstract class ModuleSerializationTest
extends FlatSpec with Matchers with BeforeAndAfterAll{
val postFix = "analytics-zoo"
def test(): Unit
protected def createTmpDir() = {
ZooSpecHelper.createTmpDir()
}
protected def runSerializationTest(
module: AbstractModule[_, _, Float],
input: Activity, cls: Class[_] = null) : Unit = {
runSerializationTestWithMultiClass(module, input,
if (cls == null) Array(module.getClass) else Array(cls))
}
protected def runSerializationTestWithMultiClass(
module: AbstractModule[_, _, Float],
input: Activity, classes: Array[Class[_]]) : Unit = {
val name = module.getName
val serFile = File.createTempFile(name, postFix)
val originForward = module.evaluate().forward(input)
ModulePersister.saveToFile[Float](serFile.getAbsolutePath, null, module.evaluate(), true)
RNG.setSeed(1000)
val loadedModule = ModuleLoader.loadFromFile[Float](serFile.getAbsolutePath)
val afterLoadForward = loadedModule.forward(input)
if (serFile.exists) {
serFile.delete
}
afterLoadForward should be (originForward)
}
}
| intel-analytics/analytics-zoo | zoo/src/test/scala/com/intel/analytics/zoo/pipeline/api/keras/serializer/SerializerSpec.scala | Scala | apache-2.0 | 2,270 |
package org.sisioh.aws4s.cfn.model
import com.amazonaws.services.cloudformation.model.{ EstimateTemplateCostRequest, Parameter }
import org.sisioh.aws4s.PimpedType
import scala.collection.JavaConverters._
object EstimateTemplateCostRequestFactory {
def create(): EstimateTemplateCostRequest = new EstimateTemplateCostRequest()
}
class RichEstimateTemplateCostRequest(val underlying: EstimateTemplateCostRequest)
extends AnyVal with PimpedType[EstimateTemplateCostRequest] {
def templateBodyOpt: Option[String] = Option(underlying.getTemplateBody)
def templateBodyOpt_=(value: Option[String]): Unit =
underlying.setTemplateBody(value.orNull)
def withTemplateBodyOpt(value: Option[String]): EstimateTemplateCostRequest =
underlying.withTemplateBody(value.orNull)
// ---
def templateURLOpt: Option[String] = Option(underlying.getTemplateURL)
def templateURLOpt_=(value: Option[String]): Unit =
underlying.setTemplateURL(value.orNull)
def withTemplateURLOpt(value: Option[String]): EstimateTemplateCostRequest =
underlying.withTemplateURL(value.orNull)
// ---
def parameters: Seq[Parameter] = underlying.getParameters.asScala.toVector
def parameters_=(value: Seq[Parameter]): Unit =
underlying.setParameters(value.asJava)
def withParameters(value: Seq[Parameter]): EstimateTemplateCostRequest =
underlying.withParameters(value.asJava)
}
| everpeace/aws4s | aws4s-cfn/src/main/scala/org/sisioh/aws4s/cfn/model/RichEstimateTemplateCostRequest.scala | Scala | mit | 1,401 |
/**
* Copyright (C) 2009-2013 Typesafe Inc. <http://www.typesafe.com>
*/
package achilles.backend.services
/*
* comments like //#<tag> are there for inclusion into docs, please don’t remove
*/
import akka.actor.{ActorSystem, ActorLogging, Actor, ActorRef, Props}
import akka.kernel.Bootable
import akka.pattern._
import akka.util.Timeout
import com.typesafe.config.ConfigFactory
import scala.concurrent.duration._
import scala.concurrent.ExecutionContext.Implicits.global
import scala.util.{Failure, Success}
import scala.concurrent.Future
//#actor
class MainServerActor(streamActor: ActorRef, dbActor: ActorRef) extends Actor with ActorLogging {
def receive = {
case QueryRecom(uid, content, location) =>
log.info("Query recommendation from Rec Actor")
implicit val timeout = Timeout(1.second)
val streamFuture = streamActor ? QueryRecom(uid, content, location)
val dbFuture = dbActor ? QueryRecom(uid, content, location)
val requester = sender
streamFuture map {
requester.!
} recover {
case _ => dbFuture map {
requester.!
} recover {
case _ => log.info("Time out both in streaming and database recommendation")
}
}
}
}
//#actor
class ServerActorApp extends Bootable {
//#setup
val system = ActorSystem("ServerActorApp",
ConfigFactory.load.getConfig("mainserver"))
val actor = system.actorOf(Props[MainServerActor], "ServerActorApp")
//#setup
def startup() {
}
def shutdown() {
system.shutdown()
}
}
object RecApp {
def main(args: Array[String]) {
new ServerActorApp
println("Started Calculator Application - waiting for messages")
}
}
| yinxusen/achilles | src/main/scala/achilles/backend/services/MainServer.scala | Scala | apache-2.0 | 1,699 |
package controllers
import anorm.Row
import play.api.libs.json.{JsNumber, JsObject, JsValue}
object LeadRecord extends REST {
val tableName: String = "lead_records"
val parameters: Map[String, (String, String) => ValidationResult] = Map(
"number" -> validateFloat,
"stat_record_id" -> validateInt
)
protected def single(row: Row): JsValue = {
row match {
case Row(id: Long, number: Float, stat_record_id: Long) => {
JsObject(
"id" -> JsNumber(id) ::
"number" -> JsNumber(number) ::
"stat_record_id" -> JsNumber(stat_record_id) ::
Nil
)
}
case _ => throw new IllegalArgumentException("Row provided is invalid!")
}
}
} | ishakir/PokeStat | app/controllers/LeadRecord.scala | Scala | mit | 751 |
package cwe.scala.library.runtime.test
import cwe.scala.library.visitor._
import cwe.scala.library.boxes.ExceptionMatcher
import cwe.scala.library.runtime.ServiceProvider
import cwe.scala.library.audit.AuditServiceProvider
abstract class ScalaTest {
private var testRunner: TestRunner = null
private var _printAssertions: Boolean = false
// Dependency injection
def setTestRunner(tr: TestRunner) = testRunner = tr
protected def getTestRunner(): TestRunner = testRunner
// Test sandbox
protected def printAssertions: Boolean = _printAssertions
protected def printAssertions_=(x: Boolean) = _printAssertions = x
private def tr_printAssertions = getTestRunner().printAssertions
private var tr_forcePrintAssertionsOnRun: Boolean = false
/**
* Prints a message
*/
protected def log(msg: String): Unit = getTestRunner log msg
/**
* Prints a message if condition is true
*/
protected def log(cond: Boolean, msg: String) = if (cond) getTestRunner log msg
/**
* Logs the string conversion of this object
*/
protected def log(obj: Any): Unit = this.log(obj.toString())
/**
* Inspects the memory size of an object which is marked as Visitable and computes the number of steps to build it
*/
protected def inspect(obj: => Visitable)(implicit objName: String = "obj") = {
// Object statistics
class Statistics(_name: String) {
def name: String = _name
val xmlB = ServiceProvider.createXmlBuilder()
var nElements: Long = 0
var sizeByte: Long = 0
/**
* stat accumulator and xml writer
*/
def add(s: Statistics) = {
// updates statistics
this.nElements += s.nElements
this.sizeByte += s.sizeByte
// writes xml
if (s.xmlB.length > 0) xmlB.writeStartTag(s.name, "nElements", s.nElements, "sizeByte", s.sizeByte).writeXml(s.xmlB.toString()).writeEndTag(s.name)
else xmlB.writeClosedTag(s.name, "nElements", s.nElements, "sizeByte", s.sizeByte)
}
}
// Contextual Visitor
val cv = new ContextualVisitor[Statistics](objName, { (n: String) => new Statistics(n) }) {
def observeInContext(currentContextName: String, currentContext: Statistics, value: Any): VisitorCommand = {
currentContext.nElements += 1
currentContext.sizeByte += estimateSize(value)
CONTINUE
}
def observeSingleValueInContext(currentContextName: String, currentContext: Statistics, name: String, value: Any): VisitorCommand = {
currentContext.nElements += 1
val size = estimateSize(value)
currentContext.sizeByte += size
// adds xml
currentContext.xmlB.writeClosedTag(name, "nElements", 1, "sizeByte", size)
CONTINUE
}
def startNewContextForObj(newContextName: String, newContext: Statistics, obj: Any): VisitorCommand = {
CONTINUE
}
def endCurrentContext(endingContextName: String, endingContext: Statistics): VisitorCommand = {
// reports subtotal
this.currentContext.add(endingContext)
CONTINUE
}
/**
* Estimates the size in bytes of an object
* (it is a theoric estimation, used for comparison, not an absolute runtime value)
*/
def estimateSize(value: Any): Long = {
value match {
case _: Boolean => 1 // we assume a boolean is coded on one Byte
case _: Byte => 1
case _: Short => 2
case _: Int => 4
case _: Long => 8
case _: Float => 4
case _: Double => 8
case _: Char => 2
case s: String => 2 * s.length
case _ => value.toString.length * 2
}
}
}
// profiles the expression
val profiler = AuditServiceProvider.createProfiler()
val evaluatedObj = profiler.profile(obj).asInstanceOf[Visitable]
// computes the memory statistics
evaluatedObj.accept(cv)
// extracts the memory statistics
val s = cv.currentContext
// writes xml
val xmlB = ServiceProvider.createXmlBuilder()
xmlB.writeStartTag(s.name, "nElements", s.nElements, "sizeByte", s.sizeByte)
/* inject profiling result */ .writeXml(profiler.getResultsAsXml("profiling"))
.writeXml(s.xmlB.toString())
.writeEndTag(s.name)
this.log(xmlB.toString())
}
/**
* Asserts condition is true else fails with message
*/
protected def assert(cond: Boolean, elseMsg: String) = if (!cond) throw new TestException("ASSERT", elseMsg)
else if (tr_forcePrintAssertionsOnRun || tr_printAssertions && printAssertions) log("OK: " + elseMsg)
/**
* Asserts condition is true else fails with message
*/
protected def assertTrue(cond: Boolean, elseMsg: String) = this.assert(cond, elseMsg)
/**
* Asserts condition is false else fails with message
*/
protected def assertFalse(cond: Boolean, elseMsg: String) = this.assert(!cond, elseMsg)
/**
* Checks if a == b else fails with message
*/
protected def assertEqual(a: Any, b: Any, msg: String) = if (a != b) throw new TestException("ASSERT EQUAL (" + a + "=" + b + ")", msg)
else if (tr_forcePrintAssertionsOnRun || tr_printAssertions && printAssertions) log("OK: " + msg)
/**
* Checks if a != b else fails with message
*/
protected def assertNotEqual(a: Any, b: Any, msg: String) = if (a == b) throw new TestException("ASSERT NOT EQUAL (" + a + "!=" + b + ")", msg)
else if (tr_forcePrintAssertionsOnRun || tr_printAssertions && printAssertions) log("OK: " + msg)
/**
* Checks if a is null else fails with message
*/
protected def assertNull(a: Any, msg: String) = if (a != null) throw new TestException("ASSERT NULL (" + a + ")", msg)
else if (tr_forcePrintAssertionsOnRun || tr_printAssertions && printAssertions) log("OK: " + msg)
/**
* Checks if a is not null else fails with message
*/
protected def assertNotNull(a: Any, msg: String) = if (a == null) throw new TestException("ASSERT NOT NULL", msg)
else if (tr_forcePrintAssertionsOnRun || tr_printAssertions && printAssertions) log("OK: " + msg)
/**
* Fails the test with a message
*/
protected def fail(msg: String) = throw new TestException("FORCE FAILURE", msg)
// Test execution
/**
* Executes the test
*/
def run()
def run(forcePrintAssertions: Boolean) {
this.tr_forcePrintAssertionsOnRun = forcePrintAssertions
run()
}
/**
* Runs a piece of code which should throw an exception in this testing case
* if exception is not thrown, then test fails
*/
def runAndCatchException[T <: Exception](code: => Unit, assertionMsg: String)(implicit eMatcher: ExceptionMatcher[T]) {
var ok = false
try {
code
} catch {
case e: Exception => eMatcher.catchOrThrow(e, { () => ok = true })
}
this.assert(ok, assertionMsg)
}
/**
* Runs another test in the same execution context
*/
def runOtherTest(test: ScalaTest) = {
test.setTestRunner(this.getTestRunner())
test.printAssertions = this.printAssertions
test.run(this.tr_forcePrintAssertionsOnRun)
}
} | wwwigii-system/research | cwe-scala-library/src/cwe/scala/library/runtime/test/ScalaTest.scala | Scala | gpl-3.0 | 6,738 |
/*
* Copyright 2001-2011 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalatest
import org.scalatest.matchers.ShouldMatchers
import org.scalatest.prop.TableDrivenPropertyChecks
class StackDepthExceptionSpec extends FunSpec with ShouldMatchers with TableDrivenPropertyChecks {
class FunException(
messageFun: StackDepthException => Option[String],
cause: Option[Throwable],
failedCodeStackDepthFun: StackDepthException => Int
) extends StackDepthException(messageFun, cause, failedCodeStackDepthFun) {
def severedAtStackDepth: FunException = {
val truncated = getStackTrace.drop(failedCodeStackDepth)
val e = new FunException(messageFun, cause, e => 0)
e.setStackTrace(truncated)
e
}
}
class NoFunException(
message: Option[String],
cause: Option[Throwable],
failedCodeStackDepth: Int
) extends StackDepthException(message, cause, failedCodeStackDepth) {
def severedAtStackDepth: NoFunException = {
val truncated = getStackTrace.drop(failedCodeStackDepth)
val e = new NoFunException(message, cause, 0)
e.setStackTrace(truncated)
e
}
}
val invalidFunCombos =
Table[StackDepthException => Option[String], Option[Throwable], StackDepthException => Int](
("messageFun", "cause", "failedCodeStackDepthFun"),
(null, Some(new Exception), e => 17),
(e => Some("hi"), null, e => 17),
(e => Some("hi"), Some(null), e => 17),
(e => Some("hi"), Some(new Exception), null)
)
val invalidNoFunCombos =
Table(
("message", "cause"),
(null, Some(new Exception)),
(Some(null), Some(new Exception)),
(Some("hi"), null),
(Some("hi"), Some(null))
)
describe("A StackDepthException") {
it should behave like aStackDepthExceptionWhenGivenNulls(
(message, cause, failedCodeStackDepth) => new NoFunException(message, cause, failedCodeStackDepth),
(messageFun, cause, failedCodeStackDepthFun) => new FunException(messageFun, cause, failedCodeStackDepthFun)
)
}
describe("A TestFailedException") {
it should behave like aStackDepthExceptionWhenGivenNulls(
(message, cause, failedCodeStackDepth) => new TestFailedException(message, cause, failedCodeStackDepth),
(messageFun, cause, failedCodeStackDepthFun) => new TestFailedException(messageFun, cause, failedCodeStackDepthFun)
)
}
def aStackDepthExceptionWhenGivenNulls(
newSDE: (Option[String], Option[Throwable], Int) => StackDepthException,
newFunSDE: (StackDepthException => Option[String], Option[Throwable], StackDepthException => Int) => StackDepthException
) {
it("should throw NPE if passed nulls or Some(null)s") {
forAll (invalidFunCombos) { (msgFun, cause, fcsdFun) =>
evaluating {
newFunSDE(msgFun, cause, fcsdFun)
} should produce [NullPointerException]
}
forAll (invalidNoFunCombos) { (msg, cause) =>
evaluating {
newSDE(msg, cause, 17)
} should produce [NullPointerException]
}
}
it("should produce the Some(message) from getMessage, or null if message was None") {
val eDefined = newSDE(Some("howdy!"), None, 17)
eDefined.getMessage should be ("howdy!")
val eEmpty = newSDE(None, None, 17)
eEmpty.getMessage should be (null)
}
it("should produce the Some(cause) from getCause, or null if cause was None") {
val e = new Exception
val eDefined = newSDE(Some("howdy!"), Some(e), 17)
eDefined.getCause should be (e)
val eEmpty = newSDE(Some("howdy!"), None, 17)
eEmpty.getCause should be (null)
}
it("should produce the Some(message) from message, or None if message was None") {
val eDefined = newSDE(Some("howdy!"), None, 17)
eDefined.message should be (Some("howdy!"))
val eEmpty = newSDE(None, None, 17)
eEmpty.message should be (None)
}
it("should produce the Some(cause) from cause, or None if cause was None") {
val e = new Exception
val eDefined = newSDE(Some("howdy!"), Some(e), 17)
eDefined.cause should be (Some(e))
val eEmpty = newSDE(Some("howdy!"), None, 17)
eEmpty.cause should be (None)
}
}
}
| yyuu/scalatest | src/test/scala/org/scalatest/StackDepthExceptionSpec.scala | Scala | apache-2.0 | 4,886 |
Subsets and Splits
Filtered Scala Code Snippets
The query filters and retrieves a sample of code snippets that meet specific criteria, providing a basic overview of the dataset's content without revealing deeper insights.