code stringlengths 5 1M | repo_name stringlengths 5 109 | path stringlengths 6 208 | language stringclasses 1 value | license stringclasses 15 values | size int64 5 1M |
|---|---|---|---|---|---|
// Copyright: 2010 - 2017 https://github.com/ensime/ensime-server/graphs
// License: http://www.gnu.org/licenses/gpl-3.0.en.html
package org.ensime.server
import akka.util.ByteString
import org.ensime.api._
import org.ensime.util.EnsimeSpec
class FramedStringProtocolSpec extends EnsimeSpec {
// subclassed FramedStringProtocol so we can get access we want to test
trait Proto extends FramedStringProtocol {
override def decode(
bytes: ByteString
): (Option[RpcRequestEnvelope], ByteString) = ???
override def encode(msg: RpcResponseEnvelope): ByteString = ???
}
"FramedStringProtocol" should "write framed strings" in new Proto {
val buffer = writeString("foobar")
val written = buffer.utf8String
written shouldBe "000006foobar"
}
it should "write multi-byte UTF-8 strings" in new Proto {
val buffer = writeString("€")
val written = buffer.utf8String
written shouldBe "000003€"
}
it should "read framed strings" in new Proto {
val read = tryReadString(ByteString("000006foobar", "UTF-8"))
read shouldBe ((Some("foobar"), ByteString()))
}
it should "read multi-byte UTF-8 strings" in new Proto {
val read = tryReadString(ByteString("000003€000003€", "UTF-8"))
read shouldBe ((Some("€"), ByteString("000003€", "UTF-8")))
}
}
| jozi-k/ensime-server | server/src/test/scala/org/ensime/server/FramedStringProtocolSpec.scala | Scala | gpl-3.0 | 1,343 |
package com.faacets
package tools
import data._
import spire.math.Rational
object SimpleScenarios {
val list = List(
Scenario("{[2 2] [2 2]}"),
Scenario("{[2 2 2] [2 2 2]}"),
Scenario("{[3 2] [2 2]}"),
Scenario("{[3 3] [2 2]}"),
Scenario("{[3 3] [3 2]}"),
Scenario("{[3 3] [3 3]}"),
Scenario("{[2 2] [2 2] [2 2]}"),
Scenario("{[2 2 2] [2 2]}"),
Scenario("{[3 2 2] [2 2]}"),
Scenario("{[3 3 2] [2 2]}"),
Scenario("{[3 3 3] [2 2]}"),
Scenario("{[2 2 2] [3 2]}"),
Scenario("{[3 2 2] [3 2]}"),
Scenario("{[3 3 2] [3 2]}"),
Scenario("{[2 2 2] [3 3]}"),
Scenario("{[3 2 2] [3 3]}")
)/*
def writeNS {
root.mkdir("solved")
val folder = root.solved
for (s <- list) {
val ident = "NS" + s.toIdentifier
println(ident)
folder.mkdir(ident)
val newFolder = folder(ident)
val boxes = NonSignalingPolytope(s).boxes
for (b <- boxes) {
val data = BoxData.fromNonSignalingBox(b)
data.sources.add("Polytope solved by Sympol.")
data.path = newFolder.nextKey.get
data.write()
}
}
}*/
def writeLocal {
root.solved.mkdirs
for (s <- list) {
val ident = "L" + s.toIdentifier
val folder = root.solved.apply(ident)
folder.mkdirs
val ineqs = LocalPolytope(s).inequalities
for (i <- ineqs) {
val data = Inequality(
bra = i,
localBound = Some(Rational.zero),
localFacet = Some(true),
sources = List("Polytope solved by Sympol."),
findGroup = true
)
data.path = Some(folder.makeNextInteger())
data.write()
}
}
}
def write {
writeLocal
//writeNS
}
}
| denisrosset/faacets-families | src/main/scala/tools/SimpleScenarios.scala | Scala | bsd-3-clause | 1,727 |
/* Copyright 2009-2014 EPFL, Lausanne */
package leon.custom
import leon._
import leon.lang._
import leon.collection._
import leon.annotation._
sealed abstract class List[T] {
def size: BigInt = (this match {
case Nil() => BigInt(0)
case Cons(h, t) => BigInt(1) + t.size
}) ensuring (_ >= 0)
def content: Set[T] = this match {
case Nil() => Set()
case Cons(h, t) => Set(h) ++ t.content
}
def contains(v: T): Boolean = (this match {
case Cons(h, t) if h == v => true
case Cons(_, t) => t.contains(v)
case Nil() => false
}) ensuring { res => res == (content contains v) }
def ++(that: List[T]): List[T] = (this match {
case Nil() => that
case Cons(x, xs) => Cons(x, xs ++ that)
}) ensuring { res => (res.content == this.content ++ that.content) && (res.size == this.size + that.size)}
def head: T = {
require(this != Nil[T]())
this match {
case Cons(h, t) => h
}
}
def tail: List[T] = {
require(this != Nil[T]())
this match {
case Cons(h, t) => t
}
}
def apply(index: BigInt): T = {
require(0 <= index && index < size)
if (index == 0) {
head
} else {
tail(index-1)
}
}
def ::(t:T): List[T] = Cons(t, this)
def :+(t:T): List[T] = {
this match {
case Nil() => Cons(t, this)
case Cons(x, xs) => Cons(x, xs :+ (t))
}
} ensuring(res => (res.size == size + 1) && (res.content == content ++ Set(t)))
def reverse: List[T] = {
this match {
case Nil() => this
case Cons(x,xs) => xs.reverse :+ x
}
} ensuring (res => (res.size == size) && (res.content == content))
def take(i: BigInt): List[T] = (this, i) match {
case (Nil(), _) => Nil()
case (Cons(h, t), i) =>
if (i == 0) {
Nil()
} else {
Cons(h, t.take(i-1))
}
}
def drop(i: BigInt): List[T] = (this, i) match {
case (Nil(), _) => Nil()
case (Cons(h, t), i) =>
if (i == 0) {
Cons(h, t)
} else {
t.drop(i-1)
}
}
def slice(from: BigInt, to: BigInt): List[T] = {
require(from < to && to < size && from >= 0)
drop(from).take(to-from)
}
def replace(from: T, to: T): List[T] = this match {
case Nil() => Nil()
case Cons(h, t) =>
val r = t.replace(from, to)
if (h == from) {
Cons(to, r)
} else {
Cons(h, r)
}
}
private def chunk0(s: BigInt, l: List[T], acc: List[T], res: List[List[T]], s0: BigInt): List[List[T]] = l match {
case Nil() =>
if (acc.size > 0) {
res :+ acc
} else {
res
}
case Cons(h, t) =>
if (s0 == 0) {
chunk0(s, l, Nil(), res :+ acc, s)
} else {
chunk0(s, t, acc :+ h, res, s0-1)
}
}
def chunks(s: BigInt): List[List[T]] = {
require(s > 0)
chunk0(s, this, Nil(), Nil(), s)
}
def zip[B](that: List[B]): List[(T, B)] = (this, that) match {
case (Cons(h1, t1), Cons(h2, t2)) =>
Cons((h1, h2), t1.zip(t2))
case (_) =>
Nil()
}
def -(e: T): List[T] = this match {
case Cons(h, t) =>
if (e == h) {
t - e
} else {
Cons(h, t - e)
}
case Nil() =>
Nil()
}
def --(that: List[T]): List[T] = this match {
case Cons(h, t) =>
if (that.contains(h)) {
t -- that
} else {
Cons(h, t -- that)
}
case Nil() =>
Nil()
}
def &(that: List[T]): List[T] = this match {
case Cons(h, t) =>
if (that.contains(h)) {
Cons(h, t & that)
} else {
t & that
}
case Nil() =>
Nil()
}
def pad(s: BigInt, e: T): List[T] = { (this, s) match {
case (_, s) if s <= 0 =>
this
case (Nil(), s) =>
Cons(e, Nil().pad(s-1, e))
case (Cons(h, t), s) =>
Cons(h, t.pad(s, e))
}} ensuring { res =>
((this,s,e), res) passes {
case (Cons(a,Nil()), BigInt(2), x) => Cons(a, Cons(x, Cons(x, Nil())))
}
}
def find(e: T): Option[BigInt] = { this match {
case Nil() => None[BigInt]()
case Cons(h, t) =>
if (h == e) {
Some(BigInt(0))
} else {
t.find(e) match {
case None() => None[BigInt]()
case Some(i) => Some(i) // FIXME forgot +1
}
}
}} ensuring { res =>
if (this.content contains e) {
res.isDefined && this.size > res.get && this.apply(res.get) == e
} else {
res.isEmpty
}
}
def init: List[T] = (this match {
case Cons(h, Nil()) =>
Nil[T]()
case Cons(h, t) =>
Cons[T](h, t.init)
case Nil() =>
Nil[T]()
}) ensuring ( (r: List[T]) => ((r.size < this.size) || (this.size == 0)) )
def lastOption: Option[T] = this match {
case Cons(h, t) =>
t.lastOption.orElse(Some(h))
case Nil() =>
None()
}
def firstOption: Option[T] = this match {
case Cons(h, t) =>
Some(h)
case Nil() =>
None()
}
def unique: List[T] = this match {
case Nil() => Nil()
case Cons(h, t) =>
Cons(h, t.unique - h)
}
def splitAt(e: T): List[List[T]] = split(Cons(e, Nil()))
def split(seps: List[T]): List[List[T]] = this match {
case Cons(h, t) =>
if (seps.contains(h)) {
Cons(Nil(), t.split(seps))
} else {
val r = t.split(seps)
Cons(Cons(h, r.head), r.tail)
}
case Nil() =>
Cons(Nil(), Nil())
}
def count(e: T): BigInt = this match {
case Cons(h, t) =>
if (h == e) {
1 + t.count(e)
} else {
t.count(e)
}
case Nil() =>
0
}
def evenSplit: (List[T], List[T]) = {
val c = size/2
(take(c), drop(c))
}
def insertAt(pos: BigInt, l: List[T]): List[T] = {
if(pos < 0) {
insertAt(size + pos, l)
} else if(pos == 0) {
l ++ this
} else {
this match {
case Cons(h, t) =>
Cons(h, t.insertAt(pos-1, l))
case Nil() =>
l
}
}
}
def replaceAt(pos: BigInt, l: List[T]): List[T] = {
if(pos < 0) {
replaceAt(size + pos, l)
} else if(pos == 0) {
l ++ this.drop(l.size)
} else {
this match {
case Cons(h, t) =>
Cons(h, t.replaceAt(pos-1, l))
case Nil() =>
l
}
}
}
def rotate(s: BigInt): List[T] = {
if (s < 0) {
rotate(size+s)
} else {
val s2 = s % size
drop(s2) ++ take(s2)
}
}
def isEmpty = this match {
case Nil() => true
case _ => false
}
}
@ignore
object List {
def apply[T](elems: T*): List[T] = ???
}
@library
object ListOps {
def flatten[T](ls: List[List[T]]): List[T] = ls match {
case Cons(h, t) => h ++ flatten(t)
case Nil() => Nil()
}
def isSorted(ls: List[BigInt]): Boolean = ls match {
case Nil() => true
case Cons(_, Nil()) => true
case Cons(h1, Cons(h2, _)) if(h1 > h2) => false
case Cons(_, t) => isSorted(t)
}
def sorted(ls: List[BigInt]): List[BigInt] = ls match {
case Cons(h, t) => insSort(sorted(t), h)
case Nil() => Nil()
}
def insSort(ls: List[BigInt], v: BigInt): List[BigInt] = ls match {
case Nil() => Cons(v, Nil())
case Cons(h, t) =>
if (v <= h) {
Cons(v, t)
} else {
Cons(h, insSort(t, v))
}
}
}
case class Cons[T](h: T, t: List[T]) extends List[T]
case class Nil[T]() extends List[T]
@library
object ListSpecs {
def snocIndex[T](l : List[T], t : T, i : BigInt) : Boolean = {
require(0 <= i && i < l.size + 1)
// proof:
(l match {
case Nil() => true
case Cons(x, xs) => if (i > 0) snocIndex[T](xs, t, i-1) else true
}) &&
// claim:
((l :+ t).apply(i) == (if (i < l.size) l(i) else t))
}.holds
def reverseIndex[T](l : List[T], i : BigInt) : Boolean = {
require(0 <= i && i < l.size)
(l match {
case Nil() => true
case Cons(x,xs) => snocIndex(l, x, i) && reverseIndex[T](l,i)
}) &&
(l.reverse.apply(i) == l.apply(l.size - 1 - i))
}.holds
def appendIndex[T](l1 : List[T], l2 : List[T], i : BigInt) : Boolean = {
require(0 <= i && i < l1.size + l2.size)
(l1 match {
case Nil() => true
case Cons(x,xs) => if (i==0) true else appendIndex[T](xs,l2,i-1)
}) &&
((l1 ++ l2).apply(i) == (if (i < l1.size) l1(i) else l2(i - l1.size)))
}.holds
def appendAssoc[T](l1 : List[T], l2 : List[T], l3 : List[T]) : Boolean = {
(l1 match {
case Nil() => true
case Cons(x,xs) => appendAssoc(xs,l2,l3)
}) &&
(((l1 ++ l2) ++ l3) == (l1 ++ (l2 ++ l3)))
}.holds
def snocIsAppend[T](l : List[T], t : T) : Boolean = {
(l match {
case Nil() => true
case Cons(x,xs) => snocIsAppend(xs,t)
}) &&
((l :+ t) == l ++ Cons[T](t, Nil()))
}.holds
def snocAfterAppend[T](l1 : List[T], l2 : List[T], t : T) : Boolean = {
(l1 match {
case Nil() => true
case Cons(x,xs) => snocAfterAppend(xs,l2,t)
}) &&
((l1 ++ l2) :+ t == (l1 ++ (l2 :+ t)))
}.holds
def snocReverse[T](l : List[T], t : T) : Boolean = {
(l match {
case Nil() => true
case Cons(x,xs) => snocReverse(xs,t)
}) &&
((l :+ t).reverse == Cons(t, l.reverse))
}.holds
def reverseReverse[T](l : List[T]) : Boolean = {
(l match {
case Nil() => true
case Cons(x,xs) => reverseReverse[T](xs) && snocReverse[T](xs.reverse, x)
}) &&
(l.reverse.reverse == l)
}.holds
//// my hand calculation shows this should work, but it does not seem to be found
//def reverseAppend[T](l1 : List[T], l2 : List[T]) : Boolean = {
// (l1 match {
// case Nil() => true
// case Cons(x,xs) => {
// reverseAppend(xs,l2) &&
// snocAfterAppend[T](l2.reverse, xs.reverse, x) &&
// l1.reverse == (xs.reverse :+ x)
// }
// }) &&
// ((l1 ++ l2).reverse == (l2.reverse ++ l1.reverse))
//}.holds
}
| ericpony/scala-examples | testcases/repair/List/List7.scala | Scala | mit | 9,876 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.deploy.rest.kubernetes
import java.io.InputStream
import javax.ws.rs.{Consumes, GET, HeaderParam, Path, PathParam, POST, Produces}
import javax.ws.rs.core.{MediaType, StreamingOutput}
import org.glassfish.jersey.media.multipart.FormDataParam
import org.apache.spark.deploy.kubernetes.KubernetesCredentials
import org.apache.spark.deploy.kubernetes.submit.SubmittedResourceIdAndSecret
/**
* Service that receives application data that can be retrieved later on. This is primarily used
* in the context of Spark, but the concept is generic enough to be used for arbitrary applications.
* The use case is to have a place for Kubernetes application submitters to bootstrap dynamic,
* heavyweight application data for pods. Application submitters may have data stored on their
* local disks that they want to provide to the pods they create through the API server. ConfigMaps
* are one way to provide this data, but the data in ConfigMaps are stored in etcd which cannot
* maintain data in the hundreds of megabytes in size.
* <p>
* The general use case is for an application submitter to ship the dependencies to the server via
* {@link uploadResources}; the application submitter will then receive a unique secure token.
* The application submitter then ought to convert the token into a secret, and use this secret in
* a pod that fetches the uploaded dependencies via {@link downloadResources}. An application can
* provide multiple resource bundles simply by hitting the upload endpoint multiple times and
* downloading each bundle with the appropriate secret.
*/
@Path("/v0")
private[spark] trait ResourceStagingService {
/**
* Register a resource with the dependency service, so that pods with the given labels can
* retrieve them when they run.
*
* @param resources Application resources to upload, compacted together in tar + gzip format.
* The tarball should contain the files laid out in a flat hierarchy, without
* any directories. We take a stream here to avoid holding these entirely in
* memory.
* @param resourcesOwner A description of the "owner" of a resource. A resource owner is a
* Kubernetes API object in a given namespace, with a specific set of
* labels. When there are no resources of the owner's type in the given
* namespace with the given labels, the resources are cleaned up. The owner
* bundle also includes any Kubernetes credentials that are required for
* resource staging server to watch the object's state over time.
* @return A unique token that should be provided when retrieving these dependencies later.
*/
@POST
@Consumes(Array(MediaType.MULTIPART_FORM_DATA, MediaType.APPLICATION_JSON, MediaType.TEXT_PLAIN))
@Produces(Array(MediaType.APPLICATION_JSON))
@Path("/resources")
def uploadResources(
@FormDataParam("resources") resources: InputStream,
@FormDataParam("resourcesOwner") resourcesOwner: StagedResourcesOwner)
: SubmittedResourceIdAndSecret
/**
* Download an application's resources. The resources are provided as a stream, where the stream's
* underlying data matches the stream that was uploaded in uploadResources.
*/
@GET
@Consumes(Array(MediaType.APPLICATION_JSON))
@Produces(Array(MediaType.APPLICATION_OCTET_STREAM))
@Path("/resources/{resourceId}")
def downloadResources(
@PathParam("resourceId") resourceId: String,
@HeaderParam("Authorization") resourceSecret: String): StreamingOutput
/**
* Health check.
*/
@GET
@Consumes(Array(MediaType.APPLICATION_JSON))
@Produces(Array(MediaType.TEXT_PLAIN))
@Path("/ping")
def ping(): String
}
| kimoonkim/spark | resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/rest/kubernetes/ResourceStagingService.scala | Scala | apache-2.0 | 4,624 |
/*******************************************************************************
* Copyright 2010 Maxime Lévesque
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
***************************************************************************** */
package org.squeryl.dsl
import ast._
import collection.mutable.ArrayBuffer
import org.squeryl.Schema
import org.squeryl.internals.{AttributeValidOnMultipleColumn, ColumnAttribute, FieldMetaData}
trait CompositeKey {
private [squeryl] var _members: Option[Seq[SelectElementReference[_,_]]] = None
private [squeryl] var _propertyName: Option[String] = None
private [squeryl] def _fields: Seq[FieldMetaData] =
if(_members == None)
List.empty
else
_members.get.map(_.selectElement.asInstanceOf[FieldSelectElement].fieldMetaData)
protected def constantMembers: Iterable[TypedExpression[_,_]]
protected def members: Iterable[TypedExpression[_,_]] =
_members.getOrElse(constantMembers)
private [squeryl] def buildEquality(ck: CompositeKey): LogicalBoolean = {
val equalities = (members zip ck.members).map(t => new EqualityExpression(t._1, t._2))
val head = equalities.head
val tail = equalities.tail
tail.foldLeft(equalities.head : LogicalBoolean)(_ and _)
}
def is(attributes: AttributeValidOnMultipleColumn*) = new CompositeKeyAttributeAssignment(this, attributes)
protected def inExpr(cks: Iterable[CompositeKey]): LogicalBoolean = {
new InclusionOperator(
new RowValueConstructorNode(members.toList),
new RightHandSideOfIn(
new ListExpressionNode(
cks.toList map (ck =>
new RowValueConstructorNode(ck.members.toList)
)
),
Some(true)
)
)
}
protected def notInExpr(cks: Iterable[CompositeKey]): LogicalBoolean = {
new ExclusionOperator(
new RowValueConstructorNode(members.toList),
new RightHandSideOfIn(
new ListExpressionNode(
cks.toList map (ck =>
new RowValueConstructorNode(ck.members.toList)
)
),
Some(false)
)
)
}
}
case class CompositeKey2[A1,A2](val a1:A1, val a2: A2)(
implicit
ev1: A1 => TypedExpression[A1, _],
ev2: A2 => TypedExpression[A2, _])
extends CompositeKey {
def ===(ck: CompositeKey2[A1,A2]) =
buildEquality(ck)
def ===(ck: Tuple2[A1,A2]) =
buildEquality(new CompositeKey2(ck._1, ck._2))
def in(cks: CompositeKey2[A1, A2]*) = inExpr(cks)
def inTuples(cks: (A1, A2)*) = inExpr(cks map (ck => new CompositeKey2(ck._1, ck._2)))
def notIn(cks: CompositeKey2[A1, A2]*) = notInExpr(cks)
def notInTuples(cks: (A1, A2)*) = notInExpr(cks map (ck => new CompositeKey2(ck._1, ck._2)))
protected def constantMembers: Iterable[TypedExpression[_,_]] = List(a1, a2)
}
case class CompositeKey3[A1,A2,A3](val a1:A1, val a2: A2, val a3: A3)(
implicit
ev1: A1 => TypedExpression[A1, _],
ev2: A2 => TypedExpression[A2, _],
ev3: A3 => TypedExpression[A3, _])
extends CompositeKey {
def ===(ck: CompositeKey3[A1,A2,A3]) =
buildEquality(ck)
def ===(ck: Tuple3[A1,A2,A3]) =
buildEquality(new CompositeKey3(ck._1, ck._2, ck._3))
def in(cks: CompositeKey3[A1, A2, A3]*) = inExpr(cks)
def inTuples(cks: (A1, A2, A3)*) = inExpr(cks map (ck => new CompositeKey3(ck._1, ck._2, ck._3)))
def notIn(cks: CompositeKey3[A1, A2, A3]*) = notInExpr(cks)
def notInTuples(cks: (A1, A2, A3)*) = notInExpr(cks map (ck => new CompositeKey3(ck._1, ck._2, ck._3)))
protected def constantMembers: Iterable[TypedExpression[_,_]] = List(a1, a2, a3)
}
case class CompositeKey4[A1,A2,A3,A4](val a1:A1, val a2: A2, val a3: A3, val a4: A4)(
implicit
ev1: A1 => TypedExpression[A1, _],
ev2: A2 => TypedExpression[A2, _],
ev3: A3 => TypedExpression[A3, _],
ev4: A4 => TypedExpression[A4, _])
extends CompositeKey {
def ===(ck: CompositeKey4[A1,A2,A3,A4]) =
buildEquality(ck)
def ===(ck: Tuple4[A1,A2,A3,A4]) =
buildEquality(new CompositeKey4(ck._1, ck._2, ck._3, ck._4))
def in(cks: CompositeKey4[A1, A2, A3, A4]*) = inExpr(cks)
def inTuples(cks: (A1, A2, A3, A4)*) = inExpr(cks map (ck => new CompositeKey4(ck._1, ck._2, ck._3, ck._4)))
def notIn(cks: CompositeKey4[A1, A2, A3, A4]*) = notInExpr(cks)
def notInTuples(cks: (A1, A2, A3, A4)*) = notInExpr(cks map (ck => new CompositeKey4(ck._1, ck._2, ck._3, ck._4)))
protected def constantMembers: Iterable[TypedExpression[_,_]] = List(a1, a2, a3, a4)
}
case class CompositeKey5[A1,A2,A3,A4,A5](val a1:A1, val a2: A2, val a3: A3, val a4: A4, val a5: A5)(
implicit
ev1: A1 => TypedExpression[A1, _],
ev2: A2 => TypedExpression[A2, _],
ev3: A3 => TypedExpression[A3, _],
ev4: A4 => TypedExpression[A4, _],
ev5: A5 => TypedExpression[A5, _])
extends CompositeKey {
def ===(ck: CompositeKey5[A1,A2,A3,A4,A5]) =
buildEquality(ck)
def ===(ck: Tuple5[A1,A2,A3,A4,A5]) =
buildEquality(new CompositeKey5(ck._1, ck._2, ck._3, ck._4, ck._5))
def in(cks: CompositeKey5[A1, A2, A3, A4, A5]*) = inExpr(cks)
def inTuples(cks: (A1, A2, A3, A4, A5)*) = inExpr(cks map (ck => new CompositeKey5(ck._1, ck._2, ck._3, ck._4, ck._5)))
def notIn(cks: CompositeKey5[A1, A2, A3, A4, A5]*) = notInExpr(cks)
def notInTuples(cks: (A1, A2, A3, A4, A5)*) = notInExpr(cks map (ck => new CompositeKey5(ck._1, ck._2, ck._3, ck._4, ck._5)))
protected def constantMembers: Iterable[TypedExpression[_,_]] = List(a1, a2, a3, a4, a5)
}
case class CompositeKey6[A1,A2,A3,A4,A5,A6](val a1:A1, val a2: A2, val a3: A3, val a4: A4, val a5: A5, val a6: A6)(
implicit
ev1: A1 => TypedExpression[A1, _],
ev2: A2 => TypedExpression[A2, _],
ev3: A3 => TypedExpression[A3, _],
ev4: A4 => TypedExpression[A4, _],
ev5: A5 => TypedExpression[A5, _],
ev6: A6 => TypedExpression[A6, _])
extends CompositeKey {
def ===(ck: CompositeKey6[A1,A2,A3,A4,A5,A6]) =
buildEquality(ck)
def ===(ck: Tuple6[A1,A2,A3,A4,A5,A6]) =
buildEquality(new CompositeKey6(ck._1, ck._2, ck._3, ck._4, ck._5, ck._6))
def in(cks: CompositeKey6[A1, A2, A3, A4, A5, A6]*) = inExpr(cks)
def inTuples(cks: (A1, A2, A3, A4, A5, A6)*) = inExpr(cks map (ck => new CompositeKey6(ck._1, ck._2, ck._3, ck._4, ck._5, ck._6)))
def notIn(cks: CompositeKey6[A1, A2, A3, A4, A5, A6]*) = notInExpr(cks)
def notInTuples(cks: (A1, A2, A3, A4, A5, A6)*) = notInExpr(cks map (ck => new CompositeKey6(ck._1, ck._2, ck._3, ck._4, ck._5, ck._6)))
protected def constantMembers: Iterable[TypedExpression[_,_]] = List(a1, a2, a3, a4, a5, a6)
}
case class CompositeKey7[A1,A2,A3,A4,A5,A6,A7](val a1:A1, val a2: A2, val a3: A3, val a4: A4, val a5: A5, val a6: A6, val a7: A7)(
implicit
ev1: A1 => TypedExpression[A1, _],
ev2: A2 => TypedExpression[A2, _],
ev3: A3 => TypedExpression[A3, _],
ev4: A4 => TypedExpression[A4, _],
ev5: A5 => TypedExpression[A5, _],
ev6: A6 => TypedExpression[A6, _],
ev7: A7 => TypedExpression[A7, _])
extends CompositeKey {
def ===(ck: CompositeKey7[A1,A2,A3,A4,A5,A6,A7]) =
buildEquality(ck)
def ===(ck: Tuple7[A1,A2,A3,A4,A5,A6,A7]) =
buildEquality(new CompositeKey7(ck._1, ck._2, ck._3, ck._4, ck._5, ck._6,ck._7))
def in(cks: CompositeKey7[A1, A2, A3, A4, A5, A6, A7]*) = inExpr(cks)
def inTuples(cks: (A1, A2, A3, A4, A5, A6, A7)*) = inExpr(cks map (ck => new CompositeKey7(ck._1, ck._2, ck._3, ck._4, ck._5, ck._6, ck._7)))
def notIn(cks: CompositeKey7[A1, A2, A3, A4, A5, A6, A7]*) = notInExpr(cks)
def notInTuples(cks: (A1, A2, A3, A4, A5, A6, A7)*) = notInExpr(cks map (ck => new CompositeKey7(ck._1, ck._2, ck._3, ck._4, ck._5, ck._6, ck._7)))
protected def constantMembers: Iterable[TypedExpression[_,_]] = List(a1, a2, a3, a4, a5, a6, a7)
}
case class CompositeKey8[A1,A2,A3,A4,A5,A6,A7,A8](val a1:A1, val a2: A2, val a3: A3, val a4: A4, val a5: A5, val a6: A6, val a7: A7, val a8: A8)(
implicit
ev1: A1 => TypedExpression[A1, _],
ev2: A2 => TypedExpression[A2, _],
ev3: A3 => TypedExpression[A3, _],
ev4: A4 => TypedExpression[A4, _],
ev5: A5 => TypedExpression[A5, _],
ev6: A6 => TypedExpression[A6, _],
ev7: A7 => TypedExpression[A7, _],
ev8: A8 => TypedExpression[A8, _])
extends CompositeKey {
def ===(ck: CompositeKey8[A1,A2,A3,A4,A5,A6,A7,A8]) =
buildEquality(ck)
def ===(ck: Tuple8[A1,A2,A3,A4,A5,A6,A7,A8]) =
buildEquality(new CompositeKey8(ck._1, ck._2, ck._3, ck._4, ck._5, ck._6,ck._7,ck._8))
def in(cks: CompositeKey8[A1, A2, A3, A4, A5, A6, A7, A8]*) = inExpr(cks)
def inTuples(cks: (A1, A2, A3, A4, A5, A6, A7, A8)*) = inExpr(cks map (ck => new CompositeKey8(ck._1, ck._2, ck._3, ck._4, ck._5, ck._6, ck._7, ck._8)))
def notIn(cks: CompositeKey8[A1, A2, A3, A4, A5, A6, A7, A8]*) = notInExpr(cks)
def notInTuples(cks: (A1, A2, A3, A4, A5, A6, A7, A8)*) = notInExpr(cks map (ck => new CompositeKey8(ck._1, ck._2, ck._3, ck._4, ck._5, ck._6, ck._7, ck._8)))
protected def constantMembers: Iterable[TypedExpression[_,_]] = List(a1, a2, a3, a4, a5, a6, a7, a8)
}
case class CompositeKey9[A1,A2,A3,A4,A5,A6,A7,A8,A9](val a1:A1, val a2: A2, val a3: A3, val a4: A4, val a5: A5, val a6: A6, val a7: A7, val a8: A8,val a9: A9)(
implicit
ev1: A1 => TypedExpression[A1, _],
ev2: A2 => TypedExpression[A2, _],
ev3: A3 => TypedExpression[A3, _],
ev4: A4 => TypedExpression[A4, _],
ev5: A5 => TypedExpression[A5, _],
ev6: A6 => TypedExpression[A6, _],
ev7: A7 => TypedExpression[A7, _],
ev8: A8 => TypedExpression[A8, _],
ev9: A9 => TypedExpression[A9, _])
extends CompositeKey {
def ===(ck: CompositeKey9[A1,A2,A3,A4,A5,A6,A7,A8,A9]) =
buildEquality(ck)
def ===(ck: Tuple9[A1,A2,A3,A4,A5,A6,A7,A8,A9]) =
buildEquality(new CompositeKey9(ck._1, ck._2, ck._3, ck._4, ck._5, ck._6,ck._7,ck._8,ck._9))
def in(cks: CompositeKey9[A1, A2, A3, A4, A5, A6, A7, A8, A9]*) = inExpr(cks)
def inTuples(cks: (A1, A2, A3, A4, A5, A6, A7, A8, A9)*) = inExpr(cks map (ck => new CompositeKey9(ck._1, ck._2, ck._3, ck._4, ck._5, ck._6, ck._7, ck._8, ck._9)))
def notIn(cks: CompositeKey9[A1, A2, A3, A4, A5, A6, A7, A8, A9]*) = notInExpr(cks)
def notInTuples(cks: (A1, A2, A3, A4, A5, A6, A7, A8, A9)*) = notInExpr(cks map (ck => new CompositeKey9(ck._1, ck._2, ck._3, ck._4, ck._5, ck._6, ck._7, ck._8, ck._9)))
protected def constantMembers: Iterable[TypedExpression[_,_]] = List(a1, a2, a3, a4, a5, a6, a7, a8, a9)
}
| rreckel/Squeryl | src/main/scala/org/squeryl/dsl/CompositeKey.scala | Scala | apache-2.0 | 11,375 |
/**
* Copyright (C) 2012 Inria, University Lille 1.
*
* This file is part of PowerAPI.
*
* PowerAPI is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* PowerAPI is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with PowerAPI. If not, see <http://www.gnu.org/licenses/>.
*
* Contact: powerapi-user-list@googlegroups.com.
*/
package fr.inria.powerapi.core
import akka.actor.{Props, ActorSystem}
import scala.concurrent.Await
import scala.concurrent.duration.DurationInt
import akka.pattern.ask
import akka.util.Timeout
import org.junit.Test
import org.scalatest.junit.{ShouldMatchersForJUnit, JUnitSuite}
case class FooMessage() extends Message
case class BarMessage() extends Message
class SimpleActor extends Component {
def messagesToListen = Array(classOf[FooMessage], classOf[BarMessage])
def acquire = {
case str: String => sender ! str
}
}
class BaseSuite extends JUnitSuite with ShouldMatchersForJUnit {
val system = ActorSystem("base-suite")
val simpleActor = system.actorOf(Props[SimpleActor])
implicit val timeout = Timeout(5.seconds)
@Test
def testMessagesToListen() {
val request = simpleActor ? MessagesToListen
val messages = Await.result(request, timeout.duration).asInstanceOf[Array[Class[_ <: Message]]]
messages should have size 2
messages(0) should be(classOf[FooMessage])
messages(1) should be(classOf[BarMessage])
}
@Test
def testListen() {
val request = simpleActor ? "hello"
val answer = Await.result(request, timeout.duration).asInstanceOf[String]
answer should equal("hello")
}
} | abourdon/powerapi-akka | core/src/test/scala/fr/inria/powerapi/core/BaseSuite.scala | Scala | agpl-3.0 | 2,053 |
package db
import java.util.UUID
import io.flow.dependency.v0.models._
import org.scalatest.BeforeAndAfterAll
import org.scalatest.concurrent.{Eventually, IntegrationPatience}
import util.DependencySpec
class InternalItemsDaoSpec extends DependencySpec
with helpers.TaskHelpers
with BeforeAndAfterAll
with Eventually with IntegrationPatience
{
override def beforeAll(): Unit = {
deleteAllNonProcessedTasks()
}
private[this] lazy val org = createOrganization()
"replace" in {
val project = createProjectSummary(org)
eventually {
itemsDao.findByObjectId(Authorization.All, project.id).get
}
val form = createItemForm(org)(project)
val item1 = itemsDao.replace(systemUser, form)
val item2 = itemsDao.replace(systemUser, form)
item1.id must be(item2.id)
item1.label must be(item2.label)
val newLabel = createTestId()
val item3 = itemsDao.replace(systemUser, form.copy(
label = newLabel
))
item3.id must be(item2.id)
item3.label must be(newLabel)
}
"findById - binary" in {
val binary = createBinary(org)
val item = eventually {
itemsDao.findByObjectId(Authorization.All, binary.id).get
}
itemsDao.findById(Authorization.All, item.id).get.id must be(item.id)
}
"findById - library" in {
val library = createLibrary(org)
val item = eventually {
itemsDao.findByObjectId(Authorization.All, library.id).get
}
itemsDao.findById(Authorization.All, item.id).get.id must be(item.id)
}
"findById - project" in {
val project = createProject(org)
val item = eventually {
itemsDao.findByObjectId(Authorization.All, project.id).get
}
itemsDao.findById(Authorization.All, item.id).get.id must be(item.id)
}
"findByObjectId" in {
val binary = createBinary(org)
eventually {
itemsDao.findByObjectId(Authorization.All, binary.id).get
}
}
"findAll by ids" in {
val binary1 = createBinary(org)
val binary2 = createBinary(org)
val item1 = eventually {
itemsDao.findByObjectId(Authorization.All, binary1.id).get
}
val item2 = eventually {
itemsDao.findByObjectId(Authorization.All, binary2.id).get
}
itemsDao.findAll(Authorization.All, ids = Some(Seq(item1.id, item2.id)), limit = None).map(_.id).sorted must be(
Seq(item1.id, item2.id).sorted
)
itemsDao.findAll(Authorization.All, ids = Some(Nil), limit = None) must be(Nil)
itemsDao.findAll(Authorization.All, ids = Some(Seq(UUID.randomUUID.toString)), limit = None) must be(Nil)
itemsDao.findAll(Authorization.All, ids = Some(Seq(item1.id, UUID.randomUUID.toString)), limit = None).map(_.id) must be(Seq(item1.id))
}
"supports binaries" in {
val binary = createBinary(org)
itemsDao.replaceBinary(systemUser, binary)
val actual = itemsDao.findByObjectId(Authorization.All, binary.id).getOrElse {
sys.error("Failed to create binary")
}
actual.label must be(binary.name.toString)
actual.summary must be(
BinarySummary(
id = binary.id,
organization = OrganizationSummary(org.id, org.key),
name = binary.name
)
)
itemsDao.findAll(Authorization.All, q = Some(binary.id.toString), limit = None).headOption.map(_.id) must be(Some(actual.id))
itemsDao.findAll(Authorization.All, q = Some(UUID.randomUUID.toString), limit = None) must be(Nil)
}
"supports libraries" in {
val library = createLibrary(org)()
itemsDao.replaceLibrary(systemUser, library)
val actual = itemsDao.findByObjectId(Authorization.All, library.id).getOrElse {
sys.error("Failed to create library")
}
actual.label must be(Seq(library.groupId, library.artifactId).mkString("."))
actual.summary must be(
LibrarySummary(
id = library.id,
organization = OrganizationSummary(org.id, org.key),
groupId = library.groupId,
artifactId = library.artifactId
)
)
itemsDao.findAll(Authorization.All, q = Some(library.id.toString), limit = None).headOption.map(_.id) must be(Some(actual.id))
itemsDao.findAll(Authorization.All, q = Some(UUID.randomUUID.toString), limit = None) must be(Nil)
}
"supports projects" in {
val project = createProject(org)
itemsDao.replaceProject(systemUser, project)
val actual = itemsDao.findByObjectId(Authorization.All, project.id).getOrElse {
sys.error("Failed to create project")
}
actual.label must be(project.name)
actual.summary must be(
ProjectSummary(
id = project.id,
organization = OrganizationSummary(org.id, org.key),
name = project.name
)
)
itemsDao.findAll(Authorization.All, q = Some(project.id.toString), limit = None).headOption.map(_.id) must be(Some(actual.id))
itemsDao.findAll(Authorization.All, q = Some(UUID.randomUUID.toString), limit = None) must be(Nil)
}
"authorization for public projects" in {
val user = createUser()
val org = createOrganization(user = user)
val project = createProject(org)(createProjectForm(org).copy(visibility = Visibility.Public))
val item = itemsDao.replaceProject(systemUser, project)
itemsDao.findAll(Authorization.PublicOnly, objectId = Some(project.id), limit = None).map(_.id) must be(Seq(item.id))
itemsDao.findAll(Authorization.All, objectId = Some(project.id), limit = None).map(_.id) must be(Seq(item.id))
itemsDao.findAll(Authorization.Organization(org.id), objectId = Some(project.id), limit = None).map(_.id) must be(Seq(item.id))
itemsDao.findAll(Authorization.Organization(createOrganization().id), objectId = Some(project.id), limit = None).map(_.id) must be(Seq(item.id))
itemsDao.findAll(Authorization.User(user.id), objectId = Some(project.id), limit = None).map(_.id) must be(Seq(item.id))
}
"authorization for private projects" in {
val user = createUser()
val org = createOrganization(user = user)
val project = createProject(org)(createProjectForm(org).copy(visibility = Visibility.Private))
val item = itemsDao.replaceProject(systemUser, project)
itemsDao.findAll(Authorization.PublicOnly, objectId = Some(project.id), limit = None) must be(Nil)
itemsDao.findAll(Authorization.All, objectId = Some(project.id), limit = None).map(_.id) must be(Seq(item.id))
itemsDao.findAll(Authorization.Organization(org.id), objectId = Some(project.id), limit = None).map(_.id) must be(Seq(item.id))
itemsDao.findAll(Authorization.Organization(createOrganization().id), objectId = Some(project.id), limit = None) must be(Nil)
itemsDao.findAll(Authorization.User(user.id), objectId = Some(project.id), limit = None).map(_.id) must be(Seq(item.id))
itemsDao.findAll(Authorization.User(createUser().id), objectId = Some(project.id), limit = None) must be(Nil)
}
}
| flowcommerce/dependency | api/test/db/InternalItemsDaoSpec.scala | Scala | mit | 6,850 |
package pl.touk.nussknacker.engine.util.namespaces
import com.typesafe.config.Config
import com.typesafe.scalalogging.LazyLogging
import pl.touk.nussknacker.engine.api.namespaces._
import pl.touk.nussknacker.engine.util.namespaces.DefaultNamespacedObjectNamingParameters.{namespaceTag, originalNameTag}
import scala.util.matching.Regex
/*
This is default ObjectNaming, it assumes that namespace is configured via configuration. If it's not configured - we leave
object names untouched
*/
object DefaultNamespacedObjectNaming extends ObjectNaming with LazyLogging {
final val NamespacePath = "namespace"
/**
* We don't want create Regex each time as it's expensive. Instead we store simple mapping.
* TODO: Consider replacing it by caffeine if we'll need many namespaces
*/
protected val regexMap = Map.empty[String, Regex]
override def prepareName(originalName: String, config: Config, namingContext: NamingContext): String =
forNamespace(config) { namespace =>
logger.debug(s"Prepending $namespace to $originalName for ${namingContext.usageKey}")
s"${namespace}_$originalName"
}.getOrElse {
logger.debug(s"Namespace has not been configured, $originalName left")
originalName
}
override def objectNamingParameters(originalName: String, config: Config, namingContext: NamingContext): Option[ObjectNamingParameters] = {
forNamespace(config) { namespace =>
DefaultNamespacedObjectNamingParameters(originalName, namespace)
}
}
override def decodeName(preparedName: String, config: Config, namingContext: NamingContext): Option[String] = {
forNamespace(config) { namespace =>
val patternMatcher = namespacePattern(namespace)
preparedName match {
case patternMatcher(value) => Some(value)
case _ => Option.empty
}
}.getOrElse(Some(preparedName))
}
private def forNamespace[T](config: Config)(action: String => T): Option[T] = {
if (config.hasPath(NamespacePath)) {
Some(action(config.getString(NamespacePath)))
} else {
None
}
}
private def namespacePattern(namespace: String): Regex =
regexMap.getOrElse(namespace, s"${namespace}_(.*)".r)
}
object DefaultNamespacedObjectNamingParameters {
final val originalNameTag = "originalProcessName"
final val namespaceTag = "namespace"
}
case class DefaultNamespacedObjectNamingParameters(originalName: String,
namespace: String) extends ObjectNamingParameters {
override def toTags: Map[String, String] = {
Map(
originalNameTag -> originalName,
namespaceTag -> namespace
)
}
}
| TouK/nussknacker | utils/utils-internal/src/main/scala/pl/touk/nussknacker/engine/util/namespaces/DefaultNamespacedObjectNaming.scala | Scala | apache-2.0 | 2,664 |
package lila.app
import akka.actor._
import com.typesafe.config.Config
final class Env(
config: Config,
val scheduler: lila.common.Scheduler,
system: ActorSystem,
appPath: String) {
val CliUsername = config getString "cli.username"
private val RendererName = config getString "app.renderer.name"
private val RouterName = config getString "app.router.name"
private val WebPath = config getString "app.web_path"
lazy val bus = lila.common.Bus(system)
lazy val preloader = new mashup.Preload(
tv = Env.tv.tv,
leaderboard = Env.user.cached.topToday,
tourneyWinners = Env.tournament.winners.scheduled,
timelineEntries = Env.timeline.entryRepo.userEntries _,
dailyPuzzle = Env.puzzle.daily,
streamsOnAir = () => Env.tv.streamsOnAir.all,
countRounds = Env.round.count,
lobbyApi = Env.api.lobbyApi,
getPlayban = Env.playban.api.currentBan _,
lightUser = Env.user.lightUser)
lazy val userInfo = mashup.UserInfo(
countUsers = () => Env.user.countEnabled,
bookmarkApi = Env.bookmark.api,
relationApi = Env.relation.api,
trophyApi = Env.user.trophyApi,
gameCached = Env.game.cached,
crosstableApi = Env.game.crosstableApi,
postApi = Env.forum.postApi,
getRatingChart = Env.history.ratingChartApi.apply,
getRanks = Env.user.cached.ranking.getAll,
isDonor = Env.donation.isDonor,
isHostingSimul = Env.simul.isHosting,
isStreamer = Env.tv.isStreamer.apply,
insightShare = Env.insight.share) _
system.actorOf(Props(new actor.Renderer), name = RendererName)
system.actorOf(Props(new actor.Router(
baseUrl = Env.api.Net.BaseUrl,
protocol = Env.api.Net.Protocol,
domain = Env.api.Net.Domain
)), name = RouterName)
if (!Env.ai.ServerOnly) {
play.api.Logger("boot").info("Preloading modules")
List(Env.socket,
Env.site,
Env.tournament,
Env.lobby,
Env.game,
Env.setup,
Env.round,
Env.team,
Env.message,
Env.timeline,
Env.gameSearch,
Env.teamSearch,
Env.forumSearch,
Env.relation,
Env.report,
Env.notification,
Env.bookmark,
Env.pref,
Env.chat,
Env.puzzle,
Env.tv,
Env.blog,
Env.video,
Env.shutup, // required to load the actor
Env.insight // required to load the actor
)
play.api.Logger("boot").info("Preloading complete")
}
if (Env.ai.ServerOnly) println("Running as AI server")
}
object Env {
lazy val current = "app" boot new Env(
config = lila.common.PlayApp.loadConfig,
scheduler = lila.common.PlayApp.scheduler,
system = lila.common.PlayApp.system,
appPath = lila.common.PlayApp withApp (_.path.getCanonicalPath))
def api = lila.api.Env.current
def db = lila.db.Env.current
def user = lila.user.Env.current
def security = lila.security.Env.current
def wiki = lila.wiki.Env.current
def hub = lila.hub.Env.current
def socket = lila.socket.Env.current
def message = lila.message.Env.current
def notification = lila.notification.Env.current
def i18n = lila.i18n.Env.current
def game = lila.game.Env.current
def bookmark = lila.bookmark.Env.current
def search = lila.search.Env.current
def gameSearch = lila.gameSearch.Env.current
def timeline = lila.timeline.Env.current
def forum = lila.forum.Env.current
def forumSearch = lila.forumSearch.Env.current
def team = lila.team.Env.current
def teamSearch = lila.teamSearch.Env.current
def ai = lila.ai.Env.current
def analyse = lila.analyse.Env.current
def mod = lila.mod.Env.current
def monitor = lila.monitor.Env.current
def site = lila.site.Env.current
def round = lila.round.Env.current
def lobby = lila.lobby.Env.current
def setup = lila.setup.Env.current
def importer = lila.importer.Env.current
def tournament = lila.tournament.Env.current
def simul = lila.simul.Env.current
def relation = lila.relation.Env.current
def report = lila.report.Env.current
def pref = lila.pref.Env.current
def chat = lila.chat.Env.current
def puzzle = lila.puzzle.Env.current
def coordinate = lila.coordinate.Env.current
def tv = lila.tv.Env.current
def blog = lila.blog.Env.current
def donation = lila.donation.Env.current
def qa = lila.qa.Env.current
def history = lila.history.Env.current
def opening = lila.opening.Env.current
def video = lila.video.Env.current
def playban = lila.playban.Env.current
def shutup = lila.shutup.Env.current
def insight = lila.insight.Env.current
}
| terokinnunen/lila | app/Env.scala | Scala | mit | 4,526 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution
import org.json4s.jackson.JsonMethods._
import org.apache.spark.SparkFunSuite
import org.apache.spark.sql.LocalSparkSession
import org.apache.spark.sql.execution.ui.{SparkListenerSQLExecutionEnd, SparkListenerSQLExecutionStart}
import org.apache.spark.sql.test.TestSparkSession
import org.apache.spark.util.JsonProtocol
class SQLJsonProtocolSuite extends SparkFunSuite with LocalSparkSession {
test("SparkPlanGraph backward compatibility: metadata") {
val SQLExecutionStartJsonString =
"""
|{
| "Event":"org.apache.spark.sql.execution.ui.SparkListenerSQLExecutionStart",
| "executionId":0,
| "description":"test desc",
| "details":"test detail",
| "physicalPlanDescription":"test plan",
| "sparkPlanInfo": {
| "nodeName":"TestNode",
| "simpleString":"test string",
| "children":[],
| "metadata":{},
| "metrics":[]
| },
| "time":0
|}
""".stripMargin
val reconstructedEvent = JsonProtocol.sparkEventFromJson(parse(SQLExecutionStartJsonString))
val expectedEvent = SparkListenerSQLExecutionStart(0, "test desc", "test detail", "test plan",
new SparkPlanInfo("TestNode", "test string", Nil, Map(), Nil), 0)
assert(reconstructedEvent == expectedEvent)
}
test("SparkListenerSQLExecutionEnd backward compatibility") {
spark = new TestSparkSession()
val qe = spark.sql("select 1").queryExecution
val event = SparkListenerSQLExecutionEnd(1, 10)
event.duration = 1000
event.executionName = Some("test")
event.qe = qe
event.executionFailure = Some(new RuntimeException("test"))
val json = JsonProtocol.sparkEventToJson(event)
assert(json == parse(
"""
|{
| "Event" : "org.apache.spark.sql.execution.ui.SparkListenerSQLExecutionEnd",
| "executionId" : 1,
| "time" : 10
|}
""".stripMargin))
val readBack = JsonProtocol.sparkEventFromJson(json)
event.duration = 0
event.executionName = None
event.qe = null
event.executionFailure = None
assert(readBack == event)
}
}
| maropu/spark | sql/core/src/test/scala/org/apache/spark/sql/execution/SQLJsonProtocolSuite.scala | Scala | apache-2.0 | 2,999 |
/*
* Copyright (C) 2009-2018 Lightbend Inc. <https://www.lightbend.com>
*/
package play.docs
import java.io.InputStream
import play.doc.{ FileHandle, FileRepository }
/**
* A file repository that aggregates multiple file repositories
*
* @param repos The repositories to aggregate
*/
class AggregateFileRepository(repos: Seq[FileRepository]) extends FileRepository {
def this(repos: Array[FileRepository]) = this(repos.toSeq)
private def fromFirstRepo[A](load: FileRepository => Option[A]) = repos.collectFirst(Function.unlift(load))
def loadFile[A](path: String)(loader: (InputStream) => A) = fromFirstRepo(_.loadFile(path)(loader))
def handleFile[A](path: String)(handler: (FileHandle) => A) = fromFirstRepo(_.handleFile(path)(handler))
def findFileWithName(name: String) = fromFirstRepo(_.findFileWithName(name))
}
| Shenker93/playframework | framework/src/play-docs/src/main/scala/play/docs/AggregateFileRepository.scala | Scala | apache-2.0 | 843 |
package gitbucket.core.controller
import gitbucket.core.issues.milestones.html
import gitbucket.core.service.{RepositoryService, MilestonesService, AccountService}
import gitbucket.core.util.{ReferrerAuthenticator, CollaboratorsAuthenticator}
import gitbucket.core.util.Implicits._
import jp.sf.amateras.scalatra.forms._
class MilestonesController extends MilestonesControllerBase
with MilestonesService with RepositoryService with AccountService
with ReferrerAuthenticator with CollaboratorsAuthenticator
trait MilestonesControllerBase extends ControllerBase {
self: MilestonesService with RepositoryService
with ReferrerAuthenticator with CollaboratorsAuthenticator =>
case class MilestoneForm(title: String, description: Option[String], dueDate: Option[java.util.Date])
val milestoneForm = mapping(
"title" -> trim(label("Title", text(required, maxlength(100)))),
"description" -> trim(label("Description", optional(text()))),
"dueDate" -> trim(label("Due Date", optional(date())))
)(MilestoneForm.apply)
get("/:owner/:repository/issues/milestones")(referrersOnly { repository =>
html.list(
params.getOrElse("state", "open"),
getMilestonesWithIssueCount(repository.owner, repository.name),
repository,
hasWritePermission(repository.owner, repository.name, context.loginAccount))
})
get("/:owner/:repository/issues/milestones/new")(collaboratorsOnly {
html.edit(None, _)
})
post("/:owner/:repository/issues/milestones/new", milestoneForm)(collaboratorsOnly { (form, repository) =>
createMilestone(repository.owner, repository.name, form.title, form.description, form.dueDate)
redirect(s"/${repository.owner}/${repository.name}/issues/milestones")
})
get("/:owner/:repository/issues/milestones/:milestoneId/edit")(collaboratorsOnly { repository =>
params("milestoneId").toIntOpt.map{ milestoneId =>
html.edit(getMilestone(repository.owner, repository.name, milestoneId), repository)
} getOrElse NotFound
})
post("/:owner/:repository/issues/milestones/:milestoneId/edit", milestoneForm)(collaboratorsOnly { (form, repository) =>
params("milestoneId").toIntOpt.flatMap{ milestoneId =>
getMilestone(repository.owner, repository.name, milestoneId).map { milestone =>
updateMilestone(milestone.copy(title = form.title, description = form.description, dueDate = form.dueDate))
redirect(s"/${repository.owner}/${repository.name}/issues/milestones")
}
} getOrElse NotFound
})
get("/:owner/:repository/issues/milestones/:milestoneId/close")(collaboratorsOnly { repository =>
params("milestoneId").toIntOpt.flatMap{ milestoneId =>
getMilestone(repository.owner, repository.name, milestoneId).map { milestone =>
closeMilestone(milestone)
redirect(s"/${repository.owner}/${repository.name}/issues/milestones")
}
} getOrElse NotFound
})
get("/:owner/:repository/issues/milestones/:milestoneId/open")(collaboratorsOnly { repository =>
params("milestoneId").toIntOpt.flatMap{ milestoneId =>
getMilestone(repository.owner, repository.name, milestoneId).map { milestone =>
openMilestone(milestone)
redirect(s"/${repository.owner}/${repository.name}/issues/milestones")
}
} getOrElse NotFound
})
get("/:owner/:repository/issues/milestones/:milestoneId/delete")(collaboratorsOnly { repository =>
params("milestoneId").toIntOpt.flatMap{ milestoneId =>
getMilestone(repository.owner, repository.name, milestoneId).map { milestone =>
deleteMilestone(repository.owner, repository.name, milestone.milestoneId)
redirect(s"/${repository.owner}/${repository.name}/issues/milestones")
}
} getOrElse NotFound
})
}
| intermezzo-fr/gitbucket | src/main/scala/gitbucket/core/controller/MilestonesController.scala | Scala | apache-2.0 | 3,769 |
// scalastyle:off line.size.limit
/*
* Ported by Alistair Johnson from
* https://github.com/gwtproject/gwt/blob/master/user/test/com/google/gwt/emultest/java/math/BigIntegerDivideTest.java
*/
// scalastyle:on line.size.limit
package org.scalajs.testsuite.javalib.math
import java.math.BigInteger
import org.scalajs.jasminetest.JasmineTest
object BigIntegerDivideTest extends JasmineTest {
describe("BigIntegerDivideTest") {
it("testCase1") {
val aBytes = Array[Byte](1, 2, 3, 4, 5, 6, 7)
val bBytes = Array[Byte](0)
val aSign = 1
val bSign = 0
val aNumber = new BigInteger(aSign, aBytes)
val bNumber = new BigInteger(bSign, bBytes)
expect(() => aNumber.divide(bNumber)).toThrow()
}
it("testCase10") {
val aBytes = Array[Byte](1, 100, 56, 7, 98, -1, 39, -128, 127, 5, 6, 7, 8, 9)
val bBytes = Array[Byte](15, 48, -29, 7, 98, -1, 39, -128)
val aSign = -1
val bSign = -1
val rBytes = Array[Byte](23, 115, 11, 78, 35, -11)
val aNumber = new BigInteger(aSign, aBytes)
val bNumber = new BigInteger(bSign, bBytes)
val result = aNumber.divide(bNumber)
var resBytes = Array.ofDim[Byte](rBytes.length)
resBytes = result.toByteArray()
for (i <- 0 until resBytes.length) {
expect(resBytes(i)).toEqual(rBytes(i))
}
expect(result.signum()).toEqual(1)
}
it("testCase11") {
val aBytes = Array[Byte](0)
val bBytes = Array[Byte](15, 48, -29, 7, 98, -1, 39, -128)
val aSign = 0
val bSign = -1
val rBytes = Array[Byte](0)
val aNumber = new BigInteger(aSign, aBytes)
val bNumber = new BigInteger(bSign, bBytes)
val result = aNumber.divide(bNumber)
var resBytes = Array.ofDim[Byte](rBytes.length)
resBytes = result.toByteArray()
for (i <- 0 until resBytes.length) {
expect(resBytes(i)).toEqual(rBytes(i))
}
expect(result.signum()).toEqual(0)
}
it("testCase12") {
val bBytes = Array[Byte](15, 48, -29, 7, 98, -1, 39, -128)
val bSign = -1
val rBytes = Array[Byte](0)
val aNumber = BigInteger.ZERO
val bNumber = new BigInteger(bSign, bBytes)
val result = aNumber.divide(bNumber)
var resBytes = Array.ofDim[Byte](rBytes.length)
resBytes = result.toByteArray()
for (i <- 0 until resBytes.length) {
expect(resBytes(i)).toEqual(rBytes(i))
}
expect(result.signum()).toEqual(0)
}
it("testCase13") {
val aBytes = Array[Byte](15, 48, -29, 7, 98, -1, 39, -128)
val aSign = 1
val rBytes = Array[Byte](15, 48, -29, 7, 98, -1, 39, -128)
val aNumber = new BigInteger(aSign, aBytes)
val bNumber = BigInteger.ONE
val result = aNumber.divide(bNumber)
var resBytes = Array.ofDim[Byte](rBytes.length)
resBytes = result.toByteArray()
for (i <- 0 until resBytes.length) {
expect(resBytes(i)).toEqual(rBytes(i))
}
expect(result.signum()).toEqual(1)
}
it("testCase14") {
val rBytes = Array[Byte](1)
val aNumber = BigInteger.ONE
val bNumber = BigInteger.ONE
val result = aNumber.divide(bNumber)
var resBytes = Array.ofDim[Byte](rBytes.length)
resBytes = result.toByteArray()
for (i <- 0 until resBytes.length) {
expect(resBytes(i)).toEqual(rBytes(i))
}
expect(result.signum()).toEqual(1)
}
it("testCase15") {
val aBytes = Array[Byte](1, 2, 3, 4, 5, 6, 7)
val bBytes = Array[Byte](0)
val aSign = 1
val bSign = 0
val aNumber = new BigInteger(aSign, aBytes)
val bNumber = new BigInteger(bSign, bBytes)
expect(() => aNumber.remainder(bNumber)).toThrow()
}
it("testCase16") {
val aBytes = Array[Byte](-127, 100, 56, 7, 98, -1, 39, -128, 127)
val bBytes = Array[Byte](-127, 100, 56, 7, 98, -1, 39, -128, 127)
val aSign = 1
val bSign = 1
val rBytes = Array[Byte](0)
val aNumber = new BigInteger(aSign, aBytes)
val bNumber = new BigInteger(bSign, bBytes)
val result = aNumber.remainder(bNumber)
var resBytes = Array.ofDim[Byte](rBytes.length)
resBytes = result.toByteArray()
for (i <- 0 until resBytes.length) {
expect(resBytes(i)).toEqual(rBytes(i))
}
expect(result.signum()).toEqual(0)
}
it("testCase17") {
val aBytes = Array[Byte](-127, 100, 56, 7, 98, -1, 39, -128, 127, 75)
val bBytes = Array[Byte](27, -15, 65, 39, 100)
val aSign = 1
val bSign = 1
val rBytes = Array[Byte](12, -21, 73, 56, 27)
val aNumber = new BigInteger(aSign, aBytes)
val bNumber = new BigInteger(bSign, bBytes)
val result = aNumber.remainder(bNumber)
var resBytes = Array.ofDim[Byte](rBytes.length)
resBytes = result.toByteArray()
for (i <- 0 until resBytes.length) {
expect(resBytes(i)).toEqual(rBytes(i))
}
expect(result.signum()).toEqual(1)
}
it("testCase18") {
val aBytes = Array[Byte](-127, 100, 56, 7, 98, -1, 39, -128, 127, 75)
val bBytes = Array[Byte](27, -15, 65, 39, 100)
val aSign = -1
val bSign = -1
val rBytes = Array[Byte](-13, 20, -74, -57, -27)
val aNumber = new BigInteger(aSign, aBytes)
val bNumber = new BigInteger(bSign, bBytes)
val result = aNumber.remainder(bNumber)
var resBytes = Array.ofDim[Byte](rBytes.length)
resBytes = result.toByteArray()
for (i <- 0 until resBytes.length) {
expect(resBytes(i)).toEqual(rBytes(i))
}
expect(result.signum()).toEqual(-1)
}
it("testCase19") {
val aBytes = Array[Byte](-127, 100, 56, 7, 98, -1, 39, -128, 127, 75)
val bBytes = Array[Byte](27, -15, 65, 39, 100)
val aSign = 1
val bSign = -1
val rBytes = Array[Byte](12, -21, 73, 56, 27)
val aNumber = new BigInteger(aSign, aBytes)
val bNumber = new BigInteger(bSign, bBytes)
val result = aNumber.remainder(bNumber)
var resBytes = Array.ofDim[Byte](rBytes.length)
resBytes = result.toByteArray()
for (i <- 0 until resBytes.length) {
expect(resBytes(i)).toEqual(rBytes(i))
}
expect(result.signum()).toEqual(1)
}
it("testCase2") {
val aBytes = Array[Byte](1, 2, 3, 4, 5, 6, 7)
val aSign = 1
val aNumber = new BigInteger(aSign, aBytes)
val bNumber = BigInteger.ZERO
expect(() => aNumber.divide(bNumber)).toThrow()
}
it("testCase20") {
val aBytes = Array[Byte](-127, 100, 56, 7, 98, -1, 39, -128, 127, 75)
val bBytes = Array[Byte](27, -15, 65, 39, 100)
val aSign = -1
val bSign = 1
val rBytes = Array[Byte](-13, 20, -74, -57, -27)
val aNumber = new BigInteger(aSign, aBytes)
val bNumber = new BigInteger(bSign, bBytes)
val result = aNumber.remainder(bNumber)
var resBytes = Array.ofDim[Byte](rBytes.length)
resBytes = result.toByteArray()
for (i <- 0 until resBytes.length) {
expect(resBytes(i)).toEqual(rBytes(i))
}
expect(result.signum()).toEqual(-1)
}
it("testCase21") {
val aBytes = Array[Byte](-127, 100, 56, 7, 98, -1, 39, -128, 127, 75)
val bBytes = Array[Byte](27, -15, 65, 39, 100)
val aSign = -1
val bSign = 1
val rBytes = Array[Array[Byte]](Array[Byte](-5, 94, -115, -74, -85, 84), Array[Byte](-13, 20, -74, -57, -27))
val aNumber = new BigInteger(aSign, aBytes)
val bNumber = new BigInteger(bSign, bBytes)
val result = aNumber.divideAndRemainder(bNumber)
var resBytes = Array.ofDim[Byte](rBytes.length)
resBytes = result(0).toByteArray()
for (i <- 0 until resBytes.length){
expect(resBytes(i)).toEqual(rBytes(0)(i))
}
expect(result(0).signum()).toEqual(-1)
resBytes = result(1).toByteArray()
for (i <- 0 until resBytes.length) {
expect(resBytes(i)).toEqual(rBytes(1)(i))
expect(result(1).signum()).toEqual(-1)
}
}
it("testCase22") {
val aBytes = Array[Byte](1, 2, 3, 4, 5, 6, 7)
val bBytes = Array[Byte](1, 30, 40, 56, -1, 45)
val aSign = 1
val bSign = -1
val aNumber = new BigInteger(aSign, aBytes)
val bNumber = new BigInteger(bSign, bBytes)
expect(() => aNumber.mod(bNumber)).toThrow()
}
it("testCase23") {
val aBytes = Array[Byte](-127, 100, 56, 7, 98, -1, 39, -128, 127, 75)
val bBytes = Array[Byte](27, -15, 65, 39, 100)
val aSign = 1
val bSign = 1
val rBytes = Array[Byte](12, -21, 73, 56, 27)
val aNumber = new BigInteger(aSign, aBytes)
val bNumber = new BigInteger(bSign, bBytes)
val result = aNumber.mod(bNumber)
var resBytes = Array.ofDim[Byte](rBytes.length)
resBytes = result.toByteArray()
for (i <- 0 until resBytes.length) {
expect(resBytes(i)).toEqual(rBytes(i))
}
expect(result.signum()).toEqual(1)
}
it("testCase24") {
val aBytes = Array[Byte](-127, 100, 56, 7, 98, -1, 39, -128, 127, 75)
val bBytes = Array[Byte](27, -15, 65, 39, 100)
val aSign = -1
val bSign = 1
val rBytes = Array[Byte](15, 5, -9, -17, 73)
val aNumber = new BigInteger(aSign, aBytes)
val bNumber = new BigInteger(bSign, bBytes)
val result = aNumber.mod(bNumber)
var resBytes = Array.ofDim[Byte](rBytes.length)
resBytes = result.toByteArray()
for (i <- 0 until resBytes.length) {
expect(resBytes(i)).toEqual(rBytes(i))
}
expect(result.signum()).toEqual(1)
}
it("testCase3") {
val aBytes = Array[Byte](-127, 100, 56, 7, 98, -1, 39, -128, 127)
val bBytes = Array[Byte](-127, 100, 56, 7, 98, -1, 39, -128, 127)
val aSign = 1
val bSign = 1
val rBytes = Array[Byte](1)
val aNumber = new BigInteger(aSign, aBytes)
val bNumber = new BigInteger(bSign, bBytes)
val result = aNumber.divide(bNumber)
var resBytes = Array.ofDim[Byte](rBytes.length)
resBytes = result.toByteArray()
for (i <- 0 until resBytes.length) {
expect(resBytes(i)).toEqual(rBytes(i))
}
expect(result.signum()).toEqual(1)
}
it("testCase4") {
val aBytes = Array[Byte](-127, 100, 56, 7, 98, -1, 39, -128, 127)
val bBytes = Array[Byte](-127, 100, 56, 7, 98, -1, 39, -128, 127)
val aSign = -1
val bSign = 1
val rBytes = Array[Byte](-1)
val aNumber = new BigInteger(aSign, aBytes)
val bNumber = new BigInteger(bSign, bBytes)
val result = aNumber.divide(bNumber)
var resBytes = Array.ofDim[Byte](rBytes.length)
resBytes = result.toByteArray()
for (i <- 0 until resBytes.length) {
expect(resBytes(i)).toEqual(rBytes(i))
}
expect(result.signum()).toEqual(-1)
}
it("testCase5") {
val aBytes = Array[Byte](-127, 100, 56, 7, 98, -1, 39, -128, 127)
val bBytes = Array[Byte](-127, 100, 56, 7, 98, -1, 39, -128, 127, 1, 2, 3, 4, 5)
val aSign = -1
val bSign = 1
val rBytes = Array[Byte](0)
val aNumber = new BigInteger(aSign, aBytes)
val bNumber = new BigInteger(bSign, bBytes)
val result = aNumber.divide(bNumber)
var resBytes = Array.ofDim[Byte](rBytes.length)
resBytes = result.toByteArray()
for (i <- 0 until resBytes.length) {
expect(resBytes(i)).toEqual(rBytes(i))
}
expect(result.signum()).toEqual(0)
}
it("testCase6") {
val aBytes = Array[Byte](1, 100, 56, 7, 98, -1, 39, -128, 127)
val bBytes = Array[Byte](15, 100, 56, 7, 98, -1, 39, -128, 127)
val aSign = 1
val bSign = 1
val rBytes = Array[Byte](0)
val aNumber = new BigInteger(aSign, aBytes)
val bNumber = new BigInteger(bSign, bBytes)
val result = aNumber.divide(bNumber)
var resBytes = Array.ofDim[Byte](rBytes.length)
resBytes = result.toByteArray()
for (i <- 0 until resBytes.length) {
expect(resBytes(i)).toEqual(rBytes(i))
}
expect(result.signum()).toEqual(0)
}
it("testCase7") {
val aBytes = Array[Byte](1, 100, 56, 7, 98, -1, 39, -128, 127, 5, 6, 7, 8, 9)
val bBytes = Array[Byte](15, 48, -29, 7, 98, -1, 39, -128)
val aSign = 1
val bSign = 1
val rBytes = Array[Byte](23, 115, 11, 78, 35, -11)
val aNumber = new BigInteger(aSign, aBytes)
val bNumber = new BigInteger(bSign, bBytes)
val result = aNumber.divide(bNumber)
var resBytes = Array.ofDim[Byte](rBytes.length)
resBytes = result.toByteArray()
for (i <- 0 until resBytes.length) {
expect(resBytes(i)).toEqual(rBytes(i))
}
expect(result.signum()).toEqual(1)
}
it("testCase8") {
val aBytes = Array[Byte](1, 100, 56, 7, 98, -1, 39, -128, 127, 5, 6, 7, 8, 9)
val bBytes = Array[Byte](15, 48, -29, 7, 98, -1, 39, -128)
val aSign = 1
val bSign = -1
val rBytes = Array[Byte](-24, -116, -12, -79, -36, 11)
val aNumber = new BigInteger(aSign, aBytes)
val bNumber = new BigInteger(bSign, bBytes)
val result = aNumber.divide(bNumber)
var resBytes = Array.ofDim[Byte](rBytes.length)
resBytes = result.toByteArray()
for (i <- 0 until resBytes.length) {
expect(resBytes(i)).toEqual(rBytes(i))
}
expect(result.signum()).toEqual(-1)
}
it("testCase9") {
val aBytes = Array[Byte](1, 100, 56, 7, 98, -1, 39, -128, 127, 5, 6, 7, 8, 9)
val bBytes = Array[Byte](15, 48, -29, 7, 98, -1, 39, -128)
val aSign = -1
val bSign = 1
val rBytes = Array[Byte](-24, -116, -12, -79, -36, 11)
val aNumber = new BigInteger(aSign, aBytes)
val bNumber = new BigInteger(bSign, bBytes)
val result = aNumber.divide(bNumber)
var resBytes = Array.ofDim[Byte](rBytes.length)
resBytes = result.toByteArray()
for (i <- 0 until resBytes.length) {
expect(resBytes(i)).toEqual(rBytes(i))
}
expect(result.signum()).toEqual(-1)
}
it("testDivisionKnuth1") {
val aBytes = Array[Byte](-7, -6, -5, -4, -3, -2, -1, 0, 1, 2, 3, 4, 5, 6, 7)
val bBytes = Array[Byte](-3, -3, -3, -3)
val aSign = 1
val bSign = 1
val rBytes = Array[Byte](0, -5, -12, -33, -96, -36, -105, -56, 92, 15, 48, -109)
val aNumber = new BigInteger(aSign, aBytes)
val bNumber = new BigInteger(bSign, bBytes)
val result = aNumber.divide(bNumber)
var resBytes = Array.ofDim[Byte](rBytes.length)
resBytes = result.toByteArray()
for (i <- 0 until resBytes.length) {
expect(resBytes(i)).toEqual(rBytes(i))
}
expect(result.signum()).toEqual(1)
}
it("testDivisionKnuthFirstDigitsEqual") {
val aBytes = Array[Byte](2, -3, -4, -5, -1, -5, -4, -3, -2, -1, 0, 1, 2, 3, 4, 5)
val bBytes = Array[Byte](2, -3, -4, -5, -1, -1, -1, -1)
val aSign = -1
val bSign = -1
val rBytes = Array[Byte](0, -1, -1, -1, -1, -2, -88, -60, 41)
val aNumber = new BigInteger(aSign, aBytes)
val bNumber = new BigInteger(bSign, bBytes)
val result = aNumber.divide(bNumber)
var resBytes = Array.ofDim[Byte](rBytes.length)
resBytes = result.toByteArray()
for (i <- 0 until resBytes.length) {
expect(resBytes(i)).toEqual(rBytes(i))
}
expect(result.signum()).toEqual(1)
}
it("testDivisionKnuthIsNormalized") {
val aBytes = Array[Byte](-9, -8, -7, -6, -5, -4, -3, -2, -1, 0, 1, 2, 3, 4, 5)
val bBytes = Array[Byte](-1, -1, -1, -1, -1, -1, -1, -1)
val aSign = -1
val bSign = -1
val rBytes = Array[Byte](0, -9, -8, -7, -6, -5, -4, -3)
val aNumber = new BigInteger(aSign, aBytes)
val bNumber = new BigInteger(bSign, bBytes)
val result = aNumber.divide(bNumber)
var resBytes = Array.ofDim[Byte](rBytes.length)
resBytes = result.toByteArray()
for (i <- 0 until resBytes.length) {
expect(resBytes(i)).toEqual(rBytes(i))
}
expect(result.signum()).toEqual(1)
}
it("testDivisionKnuthMultiDigitsByOneDigit") {
val aBytes = Array[Byte](113, -83, 123, -5, 18, -34, 67, 39, -29)
val bBytes = Array[Byte](2, -3, -4, -5)
val aSign = 1
val bSign = -1
val rBytes = Array[Byte](-38, 2, 7, 30, 109, -43)
val aNumber = new BigInteger(aSign, aBytes)
val bNumber = new BigInteger(bSign, bBytes)
val result = aNumber.divide(bNumber)
var resBytes = Array.ofDim[Byte](rBytes.length)
resBytes = result.toByteArray()
for (i <- 0 until resBytes.length) {
expect(resBytes(i)).toEqual(rBytes(i))
}
expect(result.signum()).toEqual(-1)
}
it("testDivisionKnuthOneDigitByOneDigit") {
val aBytes = Array[Byte](113, -83, 123, -5)
val bBytes = Array[Byte](2, -3, -4, -5)
val aSign = 1
val bSign = -1
val rBytes = Array[Byte](-37)
val aNumber = new BigInteger(aSign, aBytes)
val bNumber = new BigInteger(bSign, bBytes)
val result = aNumber.divide(bNumber)
var resBytes = Array.ofDim[Byte](rBytes.length)
resBytes = result.toByteArray()
for (i <- 0 until resBytes.length) {
expect(resBytes(i)).toEqual(rBytes(i))
}
expect(result.signum()).toEqual(-1)
}
it("testRemainderKnuth1") {
val aBytes = Array[Byte](-9, -8, -7, -6, -5, -4, -3, -2, -1, 0, 1)
val bBytes = Array[Byte](0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10)
val aSign = 1
val bSign = 1
val rBytes = Array[Byte](1, 2, 3, 4, 5, 6, 7, 7, 18, -89)
val aNumber = new BigInteger(aSign, aBytes)
val bNumber = new BigInteger(bSign, bBytes)
val result = aNumber.remainder(bNumber)
var resBytes = Array.ofDim[Byte](rBytes.length)
resBytes = result.toByteArray()
for (i <- 0 until resBytes.length) {
expect(resBytes(i)).toEqual(rBytes(i))
}
expect(result.signum()).toEqual(1)
}
it("testRemainderKnuthMultiDigitsByOneDigit") {
val aBytes = Array[Byte](113, -83, 123, -5, 18, -34, 67, 39, -29)
val bBytes = Array[Byte](2, -3, -4, -50)
val aSign = 1
val bSign = -1
val rBytes = Array[Byte](2, -37, -60, 59)
val aNumber = new BigInteger(aSign, aBytes)
val bNumber = new BigInteger(bSign, bBytes)
val result = aNumber.remainder(bNumber)
var resBytes = Array.ofDim[Byte](rBytes.length)
resBytes = result.toByteArray()
for (i <- 0 until resBytes.length) {
expect(resBytes(i)).toEqual(rBytes(i))
}
expect(result.signum()).toEqual(1)
}
it("testRemainderKnuthOneDigitByOneDigit") {
val aBytes = Array[Byte](113, -83, 123, -5)
val bBytes = Array[Byte](2, -3, -4, -50)
val aSign = 1
val bSign = -1
val rBytes = Array[Byte](2, -9, -14, 53)
val aNumber = new BigInteger(aSign, aBytes)
val bNumber = new BigInteger(bSign, bBytes)
val result = aNumber.remainder(bNumber)
var resBytes = Array.ofDim[Byte](rBytes.length)
resBytes = result.toByteArray()
for (i <- 0 until resBytes.length) {
expect(resBytes(i)).toEqual(rBytes(i))
}
expect(result.signum()).toEqual(1)
}
}
}
| renyaoxiang/scala-js | test-suite/src/test/scala/org/scalajs/testsuite/javalib/math/BigIntegerDivideTest.scala | Scala | bsd-3-clause | 19,269 |
package ucesoft.cbm.misc
import java.awt.{Color, Component, Dimension, FlowLayout}
import javax.swing._
import ucesoft.cbm.peripheral.rs232.{BridgeRS232, RS232}
object RS232ConfigPanel {
private var AVAILABLE_RS232 : Array[RS232] = _
private var activeRs232 : Option[RS232] = None
private var configPanel : JDialog = _
private var selectingRs232 : Option[RS232] = None
private val connectedToLabel = new JLabel()
private val RS232StatusPanel = new RS232StatusPanel {
override def connectedTo(address: String): Unit = {
super.connectedTo(address)
connectedToLabel.setText(address)
}
override def disconnected: Unit = {
super.disconnected
connectedToLabel.setText("")
}
}
def RS232ConfigDialog : JDialog = configPanel
def registerAvailableRS232Drivers(parent:JFrame,rs232Drivers:Array[RS232]): Unit = {
AVAILABLE_RS232 = rs232Drivers
configPanel = initConfigPanel(parent)
BridgeRS232.setRS232Listener(RS232StatusPanel)
}
private def initConfigPanel(parent:JFrame) : JDialog = {
val dialog = new JDialog(parent,"RS232 Configuration panel")
dialog.setIconImage(new ImageIcon(getClass.getResource("/resources/commodore.png")).getImage)
val pane = dialog.getContentPane
val group = new ButtonGroup
val conf = new JTextField(30)
val applyButton = new JButton("Apply")
conf.addActionListener(_ => applySelected(dialog,conf.getText) )
applyButton.addActionListener(_ => applySelected(dialog,conf.getText) )
conf.setEnabled(false)
val radios = AVAILABLE_RS232 map { r =>
val radio = new JRadioButton(r.toString)
group.add(radio)
radio.addActionListener(_ => {
if (radio.isSelected) {
selectingRs232 = Some(r)
conf.setToolTipText(r.getDescription)
conf.setEnabled(true)
}
} )
radio.setAlignmentX(Component.LEFT_ALIGNMENT)
radio
}
val radioPanel = new JPanel
radioPanel.setBorder(BorderFactory.createTitledBorder("RS-232 drivers"))
radioPanel.setLayout(new BoxLayout(radioPanel,BoxLayout.Y_AXIS))
val noneRadio = new JRadioButton("None")
noneRadio.setSelected(true)
group.add(noneRadio)
noneRadio.addActionListener(_ => {
conf.setToolTipText("")
conf.setEnabled(false)
selectingRs232 = None
})
radioPanel.add(noneRadio)
for(r <- radios) radioPanel.add(r)
val confPanel = new JPanel
confPanel.add(new JLabel("Connection string:"))
confPanel.add(conf)
confPanel.add(applyButton)
val connPanel = new JPanel(new FlowLayout(FlowLayout.LEFT))
connectedToLabel.setForeground(Color.BLUE)
connPanel.add(new JLabel("Connected to:"))
connPanel.add(connectedToLabel)
connPanel.setAlignmentX(Component.LEFT_ALIGNMENT)
confPanel.setAlignmentX(Component.LEFT_ALIGNMENT)
radioPanel.add(confPanel)
radioPanel.add(connPanel)
pane.add("Center",radioPanel)
pane.add("South",RS232StatusPanel)
RS232StatusPanel.setVisible(true)
val handshakePanel = new JPanel(new FlowLayout(FlowLayout.LEFT))
handshakePanel.setBorder(BorderFactory.createTitledBorder("RS-232 handshacking"))
val hscb = new JCheckBox("RS-232 flow control enabled")
hscb.addActionListener(_ => BridgeRS232.setFlowControlEnabled(hscb.isSelected) )
handshakePanel.add(hscb)
pane.add("North",handshakePanel)
dialog.pack()
dialog.setResizable(false)
dialog.setDefaultCloseOperation(WindowConstants.HIDE_ON_CLOSE)
dialog
}
private def applySelected(parent:JDialog,conf:String): Unit = {
selectingRs232 match {
case None => // disable RS-232
activeRs232 match {
case Some(ars232) =>
ars232.setEnabled(false)
activeRs232 = None
BridgeRS232.unsetRS232
JOptionPane.showMessageDialog(parent,"RS-232 disabled", "RS-232 configuration",JOptionPane.INFORMATION_MESSAGE)
case None =>
}
case Some(ars232) =>
try {
ars232.setConfiguration(conf)
activeRs232 foreach {
_.setEnabled(false)
}
activeRs232 = Some(ars232)
BridgeRS232.setRS232(ars232)
BridgeRS232.setEnabled(true)
JOptionPane.showMessageDialog(parent,"RS-232 enabled with new configuration", "RS-232 configuration",JOptionPane.INFORMATION_MESSAGE)
}
catch {
case t:Throwable =>
JOptionPane.showMessageDialog(parent,t.toString, "RS-232 configuration error",JOptionPane.ERROR_MESSAGE)
}
}
}
}
| abbruzze/kernal64 | Kernal64/src/ucesoft/cbm/misc/RS232ConfigPanel.scala | Scala | mit | 4,579 |
package scutil.naming
object implicits extends implicits
trait implicits
extends instances
| ritschwumm/scutil | modules/jdk/src/main/scala/scutil/naming/implicits.scala | Scala | bsd-2-clause | 94 |
package atlas
package tokens
sealed trait Token extends Product {
def raw: String
def pos: LinePos
}
case class Whitespace(raw: String)(val pos: LinePos) extends Token
case class Identifier(raw: String)(val pos: LinePos) extends Token
case class Reserve(raw: String)(val pos: LinePos) extends Token
case class Newline(raw: String)(val pos: LinePos) extends Token
case class Unknown(raw: String)(val pos: LinePos) extends Token
case class Comment(raw: String)(val pos: LinePos) extends Token
case class Integer(raw: String)(val pos: LinePos) extends Token
case class Boolean(raw: String)(val pos: LinePos) extends Token
// The tokens below are currently generated by the lexer.
// This is why, as default, their lexemes are empty.
// However, future design considerations may allow these tokens
// to accept other lexemes (e.g. brackets for Indentations).
case class Indent(raw: String = "indent")(val pos: LinePos) extends Token
case class Dedent(raw: String = "dedent")(val pos: LinePos) extends Token
case class Badent(raw: String = "badent")(val pos: LinePos) extends Token
case class EOF(raw: String = "EOF")(val pos: LinePos) extends Token
| jankdc/atlas | src/main/scala/atlas/tokens/tokens.scala | Scala | mit | 1,153 |
package com.socrata.balboa.server
import org.eclipse.jetty.http.HttpStatus._
import org.scalatra.{ActionResult, BadRequest, NotAcceptable}
case class ResponseWithType(contentType: String, result: ActionResult)
case class Error(error: Int, message: String)
object ResponseWithType {
val json = "application/json; charset=utf-8"
val protobuf = "application/x-protobuf"
def unacceptable: ResponseWithType =
ResponseWithType(json, NotAcceptable(Error(NOT_ACCEPTABLE_406, "Not acceptable.")))
def required(parameter: String): ResponseWithType =
ResponseWithType(json, BadRequest(Error(BAD_REQUEST_400, s"Parameter $parameter required.")))
def malformedDate(parameter: String): ResponseWithType =
ResponseWithType(json, BadRequest(Error(BAD_REQUEST_400, "Unable to parse date " + parameter)))
def badRequest(parameter: String, msg: String): ResponseWithType =
ResponseWithType(json, BadRequest(Error(BAD_REQUEST_400, s"Unable to parse $parameter : " + msg)))
}
| socrata-platform/balboa | balboa-http/src/main/scala/com/socrata/balboa/server/ResponseWithType.scala | Scala | apache-2.0 | 990 |
package coursier.cache
import java.io.{File, IOException}
import java.nio.channels.{FileChannel, FileLock, OverlappingFileLockException}
import java.nio.file.{Files, Path, StandardOpenOption}
import java.util.concurrent.{Callable, ConcurrentHashMap}
import coursier.paths.{CachePath, Util}
import scala.annotation.tailrec
object CacheLocks {
/** Should be acquired when doing operations changing the file structure of the cache (creating
* new directories, creating / acquiring locks, ...), so that these don't hinder each other.
*
* Should hopefully address some transient errors seen on the CI of ensime-server.
*/
def withStructureLock[T](cache: File)(f: => T): T =
CachePath.withStructureLock(cache, new Callable[T] { def call() = f })
def withStructureLock[T](cache: Path)(f: => T): T =
CachePath.withStructureLock(cache, new Callable[T] { def call() = f })
def withLockOr[T](
cache: File,
file: File
)(
f: => T,
ifLocked: => Option[T]
): T = {
val lockFile = CachePath.lockFile(file).toPath
var channel: FileChannel = null
withStructureLock(cache) {
Util.createDirectories(lockFile.getParent)
channel = FileChannel.open(
lockFile,
StandardOpenOption.CREATE,
StandardOpenOption.WRITE
)
}
@tailrec
def loop(): T = {
val resOpt = {
var lock: FileLock = null
try {
// kind of meh…
// same workaround as https://github.com/sbt/launcher/blob/24b07ded3edab14f574cbfb2064e6e30cc048618/launcher-implementation/src/main/scala/xsbt/boot/Locks.scala#L55-L71
// for those pesky "Resource deadlock avoided" errors
var deadlockAvoided = false
lock =
try channel.tryLock()
catch {
case ex: IOException if ex.getMessage == "Resource deadlock avoided" =>
deadlockAvoided = true
Thread.sleep(200L)
null
}
if (deadlockAvoided)
None
else if (lock == null)
ifLocked
else
try Some(f)
finally {
lock.release()
lock = null
channel.close()
channel = null
Files.deleteIfExists(lockFile)
}
}
catch {
case _: OverlappingFileLockException =>
ifLocked
}
finally if (lock != null)
lock.release()
}
resOpt match {
case Some(res) => res
case None =>
loop()
}
}
try loop()
finally if (channel != null)
channel.close()
}
def withLockFor[T](
cache: File,
file: File
)(f: => Either[ArtifactError, T]): Either[ArtifactError, T] =
withLockOr(cache, file)(f, Some(Left(new ArtifactError.Locked(file))))
private val urlLocks = new ConcurrentHashMap[String, Object]
private val urlLockDummyObject = new Object
def withUrlLock[T](url: String)(f: => T): Option[T] = {
val prev = urlLocks.putIfAbsent(url, urlLockDummyObject)
if (prev == null)
try Some(f)
finally urlLocks.remove(url)
else
None
}
}
| coursier/coursier | modules/cache/jvm/src/main/scala/coursier/cache/CacheLocks.scala | Scala | apache-2.0 | 3,223 |
package com.github.tarao
package bullet
/** A monad to resolve a collection all together.
*
* A monad instance can implicitly be converted into an option value
* of a resolved object. A list of monads can implicitly be
* converted into a list of resolved objects.
*/
sealed trait Monad[R] {
def run(): Option[R]
def runWithDefault(default: R): R
}
object Monad {
sealed abstract class Sig[R, Q, N, M](implicit
monad1: M <:< Monad[Q],
check1: IsConcreteType[M],
monad2: N <:< Monad[R],
check2: IsConcreteType[N]
) extends Monad[R] {
private type This = Sig[R, Q, N, M]
def map[S](f: R => S): Monad.FlatMapped[S, R, Unit[S], This] =
Monad.FlatMapped({ (x: R) => Monad.Unit(f(x)) }, this)
def flatMap[S, Q, N, M](f: R => Sig[S, Q, N, M])(implicit
monad3: M <:< Monad[Q],
check3: IsConcreteType[M],
monad4: N <:< Monad[S],
check4: IsConcreteType[N]
): Monad.FlatMapped[S, R, Sig[S, Q, N, M], This] = Monad.FlatMapped(f, this)
def run(): Option[R] = {
implicit val guard: RunOnImplicitConversion = new RunOnImplicitConversion
Monad.run(this)
}
def runWithDefault(default: R): R = run.getOrElse(default)
}
/** A class to create a monad instance from an object of the result type. */
case class Unit[R](value: R) extends Sig[R, Null, Null, Null] {
protected[Monad] def run(ms: Seq[Unit[R]]): Seq[R] = ms.map(_.value)
}
/** A class to define a resolution from source values to target values.
*
* Override `run` to define a concrete resolution.
*/
abstract case class Resolve[R, Q](value: Q) extends Sig[R, Q, Null, Null] {
protected[Monad] def run(ms: Seq[Resolve[R, Q]]): Seq[R]
}
case class FlatMapped[R, Q, N, M](
f: Q => N, m: M
)(implicit
monad1: M <:< Monad[Q],
check1: IsConcreteType[M],
monad2: N <:< Monad[R],
check2: IsConcreteType[N]
) extends Sig[R, Q, N, M] {
protected[Monad] def run(ms: Seq[FlatMapped[R, Q, N, M]]): Seq[R] = {
val fs = ms.map(_.f)
val mapped = Internal.run(ms.map { m => monad1(m.m) })
Internal.run((fs, mapped).zipped.map { (f, m) => monad2(f(m)) })
}
}
private[Monad] object Internal {
// These are literally unsafe. Overall type safety is achieved by
// assuming that `Seq(m)` and `ms` are of the same type. This is
// guaranteed externally by `IsConcreteType[]` and type arguments
// of `Sig[]`.
private def run[R](m: Unit[R])(ms: Seq[Monad[R]]): Seq[R] =
m.run(ms.asInstanceOf[Seq[Unit[R]]])
private def run[R, Q](m: Resolve[R, Q])(ms: Seq[Monad[R]]): Seq[R] =
m.run(ms.asInstanceOf[Seq[Resolve[R, Q]]])
private def run[R, Q, N, M](
m: FlatMapped[R, Q, N, M]
)(ms: Seq[Monad[R]]): Seq[R] =
m.run(ms.asInstanceOf[Seq[FlatMapped[R, Q, N, M]]])
private[Monad] def run[R](ms: Seq[Monad[R]]): Seq[R] = ms match {
case (m @ Unit(_)) +: _ => run(m)(ms)
case (m @ Resolve(_)) +: _ => run(m)(ms)
case (m @ FlatMapped(_, _)) +: _ => run(m)(ms)
case Seq() => Seq.empty
}
}
import scala.annotation.implicitNotFound
@implicitNotFound("Invalid monad type: ${M}")
private[Monad] class IsConcreteType[M](val dummy: Int = 0) extends AnyVal
private[Monad] object IsConcreteType {
type This[M] = IsConcreteType[M]
implicit val none: This[Null] = new This[Null]
implicit def sig[R, Q, N, M](implicit
monad1: M <:< Monad[Q],
check1: IsConcreteType[M],
monad2: N <:< Monad[R],
check2: IsConcreteType[N]
): This[Sig[R, Q, N, M]] = new This[Sig[R, Q, N, M]]
implicit def unit[R]: This[Unit[R]] = new This[Unit[R]]
implicit def resolve[R, Q]: This[Resolve[R, Q]] = new This[Resolve[R, Q]]
implicit def flatMapped[R, Q, N, M](implicit
monad1: M <:< Monad[Q],
check1: IsConcreteType[M],
monad2: N <:< Monad[R],
check2: IsConcreteType[N]
): This[FlatMapped[R, Q, N, M]] = new This[FlatMapped[R, Q, N, M]]
}
import scala.language.implicitConversions
class RunOnImplicitConversion(val dummy: Int = 0) extends AnyVal
implicit def run[R, M](ms: Seq[M])(implicit
guard: RunOnImplicitConversion,
monad: M <:< Monad[R],
check: IsConcreteType[M]
): Seq[R] = Internal.run(ms.asInstanceOf[Seq[Monad[R]]])
implicit def run[R, M](m: M)(implicit
guard: RunOnImplicitConversion,
monad: M <:< Monad[R],
check: IsConcreteType[M]
): Option[R] = run(Seq(m)).headOption
implicit def flatten[R, M, T](ms: Seq[M])(implicit
guard: RunOnImplicitConversion,
monad: M <:< Monad[T],
check: IsConcreteType[M],
seq: T => Seq[R]
): Seq[R] = run(ms).flatten
implicit def flatten[R, M, T](m: M)(implicit
guard: RunOnImplicitConversion,
monad: M <:< Monad[T],
check: IsConcreteType[M],
seq: T => Seq[R]
): Seq[R] = run(Seq(m)).flatten
/** A type class to run `Monad[]`s all together */
implicit class Runnable[R, M, S](ms: S)(implicit
seq: S => Seq[M],
monad: M <:< Monad[R],
check: IsConcreteType[M]
) {
def run(): Seq[R] = {
implicit val guard = new RunOnImplicitConversion
Monad.run(ms)
}
}
/** A type class to run each element of `Monad[]`s separately. */
class Divergent[R](val ms: Seq[Monad[R]]) extends AnyVal {
def run(): Seq[R] = ms.map(_.run).flatten
}
implicit class Divergeable[R, S](ms: S)(implicit seq: S => Seq[Monad[R]]) {
def diverge(): Divergent[R] = new Divergent(seq(ms))
}
class Fallback[T] {
def hasValue(): Boolean = false
def fallback(option: Option[T]): Option[T] = option
}
object Fallback { implicit def none[T]: Fallback[T] = new Fallback[T] }
/** A default value provider.
*
* If you wish to receive the result of `Monad[]` not in an option
* value, declare an implicit value of `Default[]` of the result
* type. For example, if you wish to receive an `Engine` rather
* than an `Option[Engine]`, declare an implicit value of
* `Default[Engine]`.
* {{{
* implicit val defaultEngine: Default[Engine] =
* Default[Engine] { /* a default value : Engine */ }
* }}}
*/
class Default[T](default: () => T) extends Fallback[T] {
override def hasValue(): Boolean = true
override def fallback(option: Option[T]): Option[T] =
option.orElse(Some(default()))
def apply(option: Option[T]): T = option.getOrElse { default() }
}
object Default {
def apply[T](default: => T): Default[T] =
new Default[T]({ () => default })
}
implicit def runWithDefault[R, M](m: M)(implicit
guard: RunOnImplicitConversion,
monad: M <:< Monad[R],
check: IsConcreteType[M],
unoption: Default[R]
): R = unoption(run(m))
/** A type tag to forbid implicit conversion on a list of monads.
*
* It forbids an conversion from `Seq[Monad[SingleValue[T]]]` to
* `Seq[T]`. This is useful when you provide no `Resolve.run`
* which resolves multiple values all together but provide one
* which resolves each element separately (via `Seq.map` for
* example) and want to prevent users from expecting that they can
* be resolved at once.
*/
case class SingleValue[T](value: T) extends AnyVal
object SingleValue {
// $COVERAGE-OFF$
implicit def runToSingleValueIsProhibited[R, M](m: Seq[M])(implicit
guard: RunOnImplicitConversion,
monad: M <:< Monad[SingleValue[R]],
check: IsConcreteType[M]
): Seq[SingleValue[R]] = sys.error("unexpected")
implicit def runnableOfSingleValueIsProhibited[R, M, S](ms: S)(implicit
seq: S => Seq[M],
monad: M <:< Monad[R],
check: IsConcreteType[M]
): Runnable[R, M, S] = sys.error("unexpected")
// $COVERAGE-ON$
implicit def run[R, M](m: M)(implicit
guard: RunOnImplicitConversion,
monad: M <:< Monad[SingleValue[R]],
check: IsConcreteType[M]
): Option[R] = Monad.run(m).map(_.value)
implicit def flatten[R, M, T](m: M)(implicit
guard: RunOnImplicitConversion,
monad: M <:< Monad[SingleValue[T]],
check: IsConcreteType[M],
seq: T => Seq[R]
): Seq[R] = Monad.run(Seq(m)).map(_.value).flatten
implicit def runWithDefault[R, M](m: M)(implicit
guard: RunOnImplicitConversion,
monad: M <:< Monad[SingleValue[R]],
check: IsConcreteType[M],
unoption: Default[R]
): R = unoption(SingleValue.run(m))
}
}
| tarao/bullet-scala | src/main/scala/com/github/tarao/bullet/Monad.scala | Scala | mit | 8,452 |
package com.eevolution.context.dictionary.infrastructure.repository
import java.util.UUID
import com.eevolution.context.dictionary.domain._
import com.eevolution.context.dictionary.domain.model.{Attribute, Entity}
import com.eevolution.context.dictionary.infrastructure.db.DbContext._
import com.eevolution.utils.PaginatedSequence
import com.lightbend.lagom.scaladsl.persistence.jdbc.JdbcSession
import scala.concurrent.{ExecutionContext, Future}
/**
* Copyright (C) 2003-2017, e-Evolution Consultants S.A. , http://www.e-evolution.com
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
* Email: victor.perez@e-evolution.com, http://www.e-evolution.com , http://github.com/e-Evolution
* Created by victor.perez@e-evolution.com , www.e-evolution.com
*/
/**
* Entity Repository
* @param session
* @param executionContext
*/
class EntityRepository(session: JdbcSession)(implicit executionContext: ExecutionContext)
extends api.repository.EntityRepository[Entity , Int]
with EntityMapping
with AttributeMapping {
def getById(id: Int): Future[Entity] = {
Future(run(queryEntity.filter(_.entityId == lift(id))).headOption.get)
}
def getAttributes(id: Int): Future[List[Attribute]] = {
Future(run(queryAttribute.filter(_.entityId == lift(id))))
}
def getByUUID(uuid: UUID): Future[Entity] = {
Future(run(queryEntity.filter(_.uuid == lift(uuid.toString))).headOption.get)
}
def getAll() : Future[List[Entity]] = {
Future(run(queryEntity))
}
def getAllByPage(page: Int, pageSize: Int): Future[PaginatedSequence[Entity]] = {
val offset = page * pageSize
val limit = (page + 1) * pageSize
for {
count <- countEntity()
elements <- if (offset > count) Future.successful(Nil)
else selectEntity(offset, limit)
} yield {
PaginatedSequence(elements, page, pageSize, count)
}
}
private def countEntity() = {
Future(run(queryEntity.size).toInt)
}
private def selectEntity(offset: Int, limit: Int): Future[Seq[Entity]] = {
Future(run(queryEntity).drop(offset).take(limit).toSeq)
}
}
| adempiere/ADReactiveSystem | dictionary-impl/src/main/scala/com/eevolution/context/dictionary/infrastructure/repository/EntityRepository.scala | Scala | gpl-3.0 | 2,692 |
/**
* Copyright 2015 Yahoo Inc. Licensed under the Apache License, Version 2.0
* See accompanying LICENSE file.
*/
package loader
import controllers.KafkaManagerContext
import features.ApplicationFeatures
import models.navigation.Menus
import play.api.ApplicationLoader
import play.api.ApplicationLoader.Context
import play.api.BuiltInComponentsFromContext
import play.api.i18n.I18nComponents
import play.api.routing.Router
import router.Routes
import controllers.BasicAuthenticationFilter
/**
* Created by hiral on 12/2/15.
*/
class KafkaManagerLoader extends ApplicationLoader {
def load(context: Context) = {
new ApplicationComponents(context).application
}
}
class ApplicationComponents(context: Context) extends BuiltInComponentsFromContext(context) with I18nComponents {
private[this] implicit val applicationFeatures = ApplicationFeatures.getApplicationFeatures(context.initialConfiguration.underlying)
private[this] implicit val menus = new Menus
private[this] val kafkaManagerContext = new KafkaManagerContext(applicationLifecycle, context.initialConfiguration)
private[this] lazy val applicationC = new controllers.Application(messagesApi, kafkaManagerContext)
private[this] lazy val clusterC = new controllers.Cluster(messagesApi, kafkaManagerContext)
private[this] lazy val topicC = new controllers.Topic(messagesApi, kafkaManagerContext)
private[this] lazy val logKafkaC = new controllers.Logkafka(messagesApi, kafkaManagerContext)
private[this] lazy val consumerC = new controllers.Consumer(messagesApi, kafkaManagerContext)
private[this] lazy val preferredReplicaElectionC= new controllers.PreferredReplicaElection(messagesApi, kafkaManagerContext)
private[this] lazy val reassignPartitionsC = new controllers.ReassignPartitions(messagesApi, kafkaManagerContext)
private[this] lazy val kafkaStateCheckC = new controllers.api.KafkaStateCheck(messagesApi, kafkaManagerContext)
private[this] lazy val assetsC = new controllers.Assets(httpErrorHandler)
private[this] lazy val webJarsAssetsC = new controllers.WebJarAssets(httpErrorHandler, context.initialConfiguration, context.environment)
private[this] lazy val apiHealthC = new controllers.ApiHealth(messagesApi)
override lazy val httpFilters = Seq(BasicAuthenticationFilter(context.initialConfiguration))
override val router: Router = new Routes(
httpErrorHandler,
applicationC,
clusterC,
topicC,
logKafkaC,
consumerC,
preferredReplicaElectionC,
reassignPartitionsC,
kafkaStateCheckC,
assetsC,
webJarsAssetsC,
apiHealthC
).withPrefix(context.initialConfiguration.getString("play.http.context").orNull)
}
| krux/kafka-manager | app/loader/KafkaManagerLoader.scala | Scala | apache-2.0 | 2,681 |
package rugloom.util
import javax.script.ScriptEngineManager
import scala.tools.nsc.Settings
import scala.tools.nsc.interpreter.IMain
/**
* RugLoom - Explorative analysis pipeline prototype
* Created by oliverr on 7/31/2015.
*/
object IMainApp extends App {
trait Whatever
val settings = new Settings()
settings.embeddedDefaults[Whatever]
val imain = new IMain(settings)
imain.interpret("2+3")
}
| curoli/rugloom-client | app/rugloom/util/IMainApp.scala | Scala | mit | 415 |
package mypipe.producer
import java.util
import mypipe.api.event.{AlterEvent, Mutation}
import mypipe.api.producer.Producer
import scala.collection.JavaConverters._
class QueueProducer(queue: util.Queue[Mutation]) extends Producer(config = null) {
override def flush() = true
override def handleAlter(event: AlterEvent): Boolean = true
override def queueList(mutationz: List[Mutation]): Boolean = {
queue.addAll(mutationz.asJava)
true
}
override def queue(mutation: Mutation): Boolean = {
queue.add(mutation)
true
}
override def toString: String = {
s"QueueProducer(elems=${queue.size})"
}
}
| mardambey/mypipe | mypipe-api/src/main/scala/mypipe/producer/QueueProducer.scala | Scala | apache-2.0 | 638 |
package keystoneml.nodes.stats
import keystoneml.workflow.Transformer
/**
* Transformer that maps a Seq[Any] of objects to a Seq[(Any, Double)] of (unique object, weighting_scheme(tf)),
* where tf is the number of times the unique object appeared in the original Seq[Any],
* and the weighting_scheme is a lambda of Double => Double that defaults to the identity function.
*
* As an example, the following would return a transformer that maps a Seq[Any]
* to all objects seen with the log of their count plus 1:
* {{{
* TermFrequency(x => math.log(x) + 1)
* }}}
*
* @param fun the weighting scheme to apply to the frequencies (defaults to identity)
*/
case class TermFrequency[T](fun: Double => Double = identity) extends Transformer[Seq[T], Seq[(T, Double)]] {
override def apply(in: Seq[T]): Seq[(T, Double)] = in.groupBy(identity).mapValues(x => fun(x.size)).toSeq
}
| amplab/keystone | src/main/scala/keystoneml/nodes/stats/TermFrequency.scala | Scala | apache-2.0 | 887 |
/***********************************************************************
* Copyright (c) 2013-2018 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.accumulo.data
import org.geotools.factory.Hints
import org.geotools.filter.text.ecql.ECQL
import org.junit.runner.RunWith
import org.locationtech.geomesa.accumulo.TestWithDataStore
import org.locationtech.geomesa.features.avro.AvroSimpleFeatureFactory
import org.locationtech.geomesa.utils.collection.SelfClosingIterator
import org.locationtech.geomesa.utils.geotools.SftBuilder
import org.locationtech.geomesa.utils.geotools.SftBuilder.Opts
import org.locationtech.geomesa.utils.stats.Cardinality
import org.locationtech.geomesa.utils.text.WKTUtils
import org.specs2.mutable.Specification
import org.specs2.runner.JUnitRunner
import scala.collection.JavaConversions._
@RunWith(classOf[JUnitRunner])
class HighCardinalityAttributeOrQueryTest extends Specification with TestWithDataStore {
val spec = new SftBuilder()
.stringType("high", Opts(index = true, cardinality = Cardinality.HIGH))
.stringType("low", Opts(index = true, cardinality = Cardinality.LOW))
.date("dtg", default = true)
.point("geom", default = true)
.getSpec
val numFeatures = 10
val builder = AvroSimpleFeatureFactory.featureBuilder(sft)
val features = (0 until numFeatures).map { i =>
builder.set("geom", WKTUtils.read(s"POINT(45.0 45.$i)"))
builder.set("dtg", f"2014-01-01T01:00:$i%02d.000Z")
builder.set("high", "h" + i.toString)
builder.set("low", "l" + i.toString)
val sf = builder.buildFeature(i.toString)
sf.getUserData.update(Hints.USE_PROVIDED_FID, java.lang.Boolean.TRUE)
sf
}
addFeatures(features)
"AccumuloDataStore" should {
"return correct features for high cardinality OR attribute queries" >> {
def query(attrPart: String) = {
val filterString = s"($attrPart) AND BBOX(geom, 40.0,40.0,50.0,50.0) AND dtg DURING 2014-01-01T00:00:00+00:00/2014-01-01T23:59:59+00:00"
val filter = ECQL.toFilter(filterString)
val res = SelfClosingIterator(ds.getFeatureSource("HighCardinalityAttributeOrQueryTest").getFeatures(filter).features)
res.length mustEqual numFeatures
}
val inQuery = s"high in (${(0 until numFeatures).map(i => s"'h$i'").mkString(", ")})"
val orQuery = (0 until numFeatures).map( i => s"high = 'h$i'").mkString(" OR ")
Seq(inQuery, orQuery).forall(query)
}
}
}
| jahhulbert-ccri/geomesa | geomesa-accumulo/geomesa-accumulo-datastore/src/test/scala/org/locationtech/geomesa/accumulo/data/HighCardinalityAttributeOrQueryTest.scala | Scala | apache-2.0 | 2,794 |
package uk.co.myspots.actors
import akka.actor.Actor
import uk.co.myspots.model.{Spot, User}
class UserActor extends Actor {
var users = Set.empty[User]
var spots = Map.empty[String, Map[String, Spot]]
override def receive = {
case CreateUser(user) =>
users += user
spots += user.userId -> Map.empty[String, Spot]
case GetUser(username) =>
sender ! users.find(_.userId == username)
case DeleteUser(username) =>
users.find(_.userId == username).foreach(users -= _)
case GetAllSpots(username) =>
sender ! spots.get(username)
case AddSpotToUser(username, spot) => {
if (spots.contains(username)) {
val spotId = spot.id( username)
spots += username -> (spots(username) + (spotId -> spot))
sender ! Some(spotId)
} else
sender ! None
}
case GetSpot(username, spotId) =>
sender ! spots.get(username).flatMap(_.get(spotId))
case SearchSpot(username, tag) => {
val mySpots: Option[Map[String, Spot]] = spots.get(username)
val resp = mySpots match{
case Some(m) => {
val fm = m.filter(_._2.tags.contains(tag))
if (fm.isEmpty) None else Some(fm)
}
case _ => None
}
sender ! resp
}
case DeleteSpot(username, spotId) => {
if (spots.contains(username) && spots(username).contains(spotId)) {
spots += username -> (spots(username) - spotId)
sender ! Some(spotId)
} else
sender ! None
}
}
}
| MySpots/restapi | src/main/scala/uk/co/myspots/actors/UserActor.scala | Scala | gpl-2.0 | 1,529 |
/* scala-stm - (c) 2009-2011, Stanford University, PPL */
package scala.concurrent.stm
package skel
import org.scalatest.FunSuite
class SimpleRandomSuite extends FunSuite {
test("nextInt") {
val f = new SimpleRandom
var s = 0
for (_ <- 0 until 100000) {
s |= SimpleRandom.nextInt
s |= f.nextInt
}
assert(s != 0)
}
test("nextInt(n) in range") {
val f = new SimpleRandom
val rand = new scala.util.Random
for (_ <- 0 until 100000) {
val n = rand.nextInt(Int.MaxValue - 1) + 1
val gr = SimpleRandom.nextInt(n)
assert(gr >= 0 && gr < n)
val lr = f.nextInt(n)
assert(lr >= 0 && lr < n)
}
}
test("clone") {
val f1 = new SimpleRandom
for (_ <- 0 until 1000)
f1.nextInt()
val f2 = f1.clone
for (_ <- 0 until 1000)
assert(f1.nextInt(9999) === f2.nextInt(9999))
}
test("seeded") {
val f1 = new SimpleRandom(100)
val f2 = new SimpleRandom(100)
for (_ <- 0 until 1000)
assert(f1.nextInt === f2.nextInt)
}
test("global SimpleRandom distribution") {
val buckets = new Array[Int](100)
for (_ <- 0 until 100000)
buckets(SimpleRandom.nextInt(buckets.length)) += 1
for (b <- buckets)
assert(b > 0)
}
test("local SimpleRandom distribution") {
val f = new SimpleRandom
val buckets = new Array[Int](100)
for (_ <- 0 until 100000)
buckets(f.nextInt(buckets.length)) += 1
for (b <- buckets)
assert(b > 0)
}
} | nbronson/scala-stm | src/test/scala/scala/concurrent/stm/skel/SimpleRandomSuite.scala | Scala | bsd-3-clause | 1,493 |
/*
* Copyright 2014 DataGenerator Contributors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.finra.datagenerator.common.SocialNetwork_Example.scala
import org.finra.datagenerator.common.NodeData.NodeData
import scala.beans.BeanProperty
/*
Social network user
*/
class User( var dataType: UserType.UserType,
@BeanProperty var firstName: String,
@BeanProperty var lastName: String,
@BeanProperty val dateOfBirth: java.sql.Date, // Assert > 13 years old when creating
@BeanProperty var geographicalLocation: (Double, Double),
@BeanProperty var isSecret: Boolean,
@BeanProperty val socialNetworkId: Long) extends NodeData(None) {
override def defaultDisplayableDataId: String = s"$socialNetworkId (${dataType.name}): $lastName, $firstName"
}
| Brijeshrpatel9/SingleThreaderProcessingDG | dg-common/src/main/code/org/finra/datagenerator/common/SocialNetwork_Example/scala/User.scala | Scala | apache-2.0 | 1,347 |
package scalajssupport
import scala.scalajs.js
import scala.scalajs.js.JSON
class PhantomFile(path: String) extends JsFile {
def this(path: String, child: String) = {
this(PhantomFile.pathJoin(path, child))
}
def delete(): Unit = {
if (isDirectory()) PhantomFile.removeDirectory(path)
else PhantomFile.remove(path)
}
def getAbsolutePath(): String = {
PhantomFile.absolute(path)
}
def getName(): String = {
path.split("\\\\" + PhantomFile.separator).last
}
def getPath(): String = {
path
}
def isDirectory(): Boolean = {
PhantomFile.isDirectory(path)
}
def mkdirs(): Unit = {
PhantomFile.makeTree(path)
}
def listFiles(): Array[File] = {
val files = PhantomFile.list(path)
val filesArray = new Array[File](files.length)
for ((item, i) <- filesArray.zipWithIndex) {
filesArray(i) = new File(PhantomFile.pathJoin(this.getPath(), files(i)))
}
filesArray
}
def readFile(): String = {
PhantomFile.read(path)
}
}
private[scalajssupport] object PhantomFile extends JsFileObject {
def fsCallArray(method: String, args: js.Array[js.Any]): js.Dynamic = {
val d = js.Dynamic.global.callPhantom(
js.Dynamic.literal(
action = "require.fs",
method = method,
args = args
)
)
JSON.parse(d.asInstanceOf[String])
}
def fsCall(method: String, arg: js.Any = null): js.Dynamic = {
fsCallArray(method, js.Array(arg))
}
def absolute(path: String): String =
fsCall("absolute", path).asInstanceOf[String]
def isDirectory(path: String): Boolean =
fsCall("isDirectory", path).asInstanceOf[Boolean]
def list(path: String): js.Array[String] =
fsCall("list", path).asInstanceOf[js.Array[String]]
def makeTree(path: String): Boolean =
fsCall("makeTree", path).asInstanceOf[Boolean]
def read(path: String): String = fsCall("read", path).asInstanceOf[String]
def remove(path: String): Boolean =
fsCall("remove", path).asInstanceOf[Boolean]
def removeDirectory(path: String): Boolean =
fsCall("removeDirectory", path).asInstanceOf[Boolean]
val separator: String = fsCall("separator").asInstanceOf[String]
def write(path: String, content: String, mode: String): Unit =
fsCallArray("write", js.Array(path, content, mode))
def pathJoin(path: String, child: String): String = path + separator + child
def apply(path: String) = {
new PhantomFile(path)
}
}
| scoverage/scalac-scoverage-plugin | scalac-scoverage-runtime/js/src/main/scala/scalajssupport/PhantomFile.scala | Scala | apache-2.0 | 2,444 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.ml.feature
import org.apache.spark.{SparkException, SparkFunSuite}
import org.apache.spark.ml.util.DefaultReadWriteTest
import org.apache.spark.mllib.util.MLlibTestSparkContext
import org.apache.spark.sql._
import org.apache.spark.sql.functions.udf
class QuantileDiscretizerSuite
extends SparkFunSuite with MLlibTestSparkContext with DefaultReadWriteTest {
test("Test observed number of buckets and their sizes match expected values") {
val spark = this.spark
import spark.implicits._
val datasetSize = 100000
val numBuckets = 5
val df = sc.parallelize(1.0 to datasetSize by 1.0).map(Tuple1.apply).toDF("input")
val discretizer = new QuantileDiscretizer()
.setInputCol("input")
.setOutputCol("result")
.setNumBuckets(numBuckets)
val result = discretizer.fit(df).transform(df)
val observedNumBuckets = result.select("result").distinct.count
assert(observedNumBuckets === numBuckets,
"Observed number of buckets does not equal expected number of buckets.")
val relativeError = discretizer.getRelativeError
val isGoodBucket = udf {
(size: Int) => math.abs( size - (datasetSize / numBuckets)) <= (relativeError * datasetSize)
}
val numGoodBuckets = result.groupBy("result").count.filter(isGoodBucket($"count")).count
assert(numGoodBuckets === numBuckets,
"Bucket sizes are not within expected relative error tolerance.")
}
test("Test on data with high proportion of duplicated values") {
val spark = this.spark
import spark.implicits._
val numBuckets = 5
val expectedNumBuckets = 3
val df = sc.parallelize(Array(1.0, 3.0, 2.0, 1.0, 1.0, 2.0, 3.0, 2.0, 2.0, 2.0, 1.0, 3.0))
.map(Tuple1.apply).toDF("input")
val discretizer = new QuantileDiscretizer()
.setInputCol("input")
.setOutputCol("result")
.setNumBuckets(numBuckets)
val result = discretizer.fit(df).transform(df)
val observedNumBuckets = result.select("result").distinct.count
assert(observedNumBuckets == expectedNumBuckets,
s"Observed number of buckets are not correct." +
s" Expected $expectedNumBuckets but found $observedNumBuckets")
}
test("Test transform on data with NaN value") {
val spark = this.spark
import spark.implicits._
val numBuckets = 3
val validData = Array(-0.9, -0.5, -0.3, 0.0, 0.2, 0.5, 0.9, Double.NaN, Double.NaN, Double.NaN)
val expectedKeep = Array(0.0, 0.0, 1.0, 1.0, 2.0, 2.0, 2.0, 3.0, 3.0, 3.0)
val expectedSkip = Array(0.0, 0.0, 1.0, 1.0, 2.0, 2.0, 2.0)
val discretizer = new QuantileDiscretizer()
.setInputCol("input")
.setOutputCol("result")
.setNumBuckets(numBuckets)
withClue("QuantileDiscretizer with handleInvalid=error should throw exception for NaN values") {
val dataFrame: DataFrame = validData.toSeq.toDF("input")
intercept[SparkException] {
discretizer.fit(dataFrame).transform(dataFrame).collect()
}
}
List(("keep", expectedKeep), ("skip", expectedSkip)).foreach{
case(u, v) =>
discretizer.setHandleInvalid(u)
val dataFrame: DataFrame = validData.zip(v).toSeq.toDF("input", "expected")
val result = discretizer.fit(dataFrame).transform(dataFrame)
result.select("result", "expected").collect().foreach {
case Row(x: Double, y: Double) =>
assert(x === y,
s"The feature value is not correct after bucketing. Expected $y but found $x")
}
}
}
test("Test transform method on unseen data") {
val spark = this.spark
import spark.implicits._
val trainDF = sc.parallelize(1.0 to 100.0 by 1.0).map(Tuple1.apply).toDF("input")
val testDF = sc.parallelize(-10.0 to 110.0 by 1.0).map(Tuple1.apply).toDF("input")
val discretizer = new QuantileDiscretizer()
.setInputCol("input")
.setOutputCol("result")
.setNumBuckets(5)
val result = discretizer.fit(trainDF).transform(testDF)
val firstBucketSize = result.filter(result("result") === 0.0).count
val lastBucketSize = result.filter(result("result") === 4.0).count
assert(firstBucketSize === 30L,
s"Size of first bucket ${firstBucketSize} did not equal expected value of 30.")
assert(lastBucketSize === 31L,
s"Size of last bucket ${lastBucketSize} did not equal expected value of 31.")
}
test("read/write") {
val t = new QuantileDiscretizer()
.setInputCol("myInputCol")
.setOutputCol("myOutputCol")
.setNumBuckets(6)
testDefaultReadWrite(t)
}
test("Verify resulting model has parent") {
val spark = this.spark
import spark.implicits._
val df = sc.parallelize(1 to 100).map(Tuple1.apply).toDF("input")
val discretizer = new QuantileDiscretizer()
.setInputCol("input")
.setOutputCol("result")
.setNumBuckets(5)
val model = discretizer.fit(df)
assert(model.hasParent)
}
}
| aokolnychyi/spark | mllib/src/test/scala/org/apache/spark/ml/feature/QuantileDiscretizerSuite.scala | Scala | apache-2.0 | 5,748 |
/**
* © 2019 Refinitiv. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the “License”); you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an “AS IS” BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
*
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cmwell.crashableworker
import java.io.ByteArrayOutputStream
import actions.RequestMonitor
import akka.actor.{Actor, ActorRef, Props}
import cmwell.ctrl.config.Jvms
import cmwell.util.collections._
import cmwell.web.ld.cmw.CMWellRDFHelper
import cmwell.web.ld.query.{Config, DataFetcher, DataFetcherImpl, JenaArqExtensions}
import com.google.inject.{AbstractModule, Guice}
import com.typesafe.scalalogging.LazyLogging
import controllers._
import k.grid.{Grid, GridConnection}
import ld.query.{ArqCache, JenaArqExtensionsUtils}
import ld.query.JenaArqExtensionsUtils.BakedSparqlQuery
import logic.CRUDServiceFS
import org.apache.jena.query.{QueryFactory, ResultSetFormatter}
import org.apache.jena.riot.{RDFDataMgr, RDFFormat}
import org.slf4j.LoggerFactory
import uk.org.lidalia.sysoutslf4j.context.SysOutOverSLF4J
import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent._
import scala.concurrent.duration._
import scala.util.{Failure, Success, Try}
class CWModule extends AbstractModule {
override def configure() {
// bind singletons here, example:
//bind(classOf[MySingleton]).asEagerSingleton()
}
}
object WorkerMain extends App with LazyLogging {
logger.info("Starting CW process")
//SLF4J initialization is not thread safe, so it's "initialized" by writing some log and only then using sendSystemOutAndErrToSLF4J.
//Without it there will be en error in stderr and some log line at the beginning will be lost
SysOutOverSLF4J.sendSystemOutAndErrToSLF4J()
Grid.extraDataCollector = () => QueryEvaluatorActor.getExtraData
Grid.setGridConnection(GridConnection(memberName = "cw"))
Grid.joinClient
Thread.sleep(5000)
RequestMonitor.init
// Use injected singletons example:
// val injector = Guice.createInjector(new CWModule())
// val mySingleton = injector.getInstance(classOf[MySingleton])
val crudServiceFS = new CRUDServiceFS()(implicitly, Grid.system)
val cmwellRDFHelper = new CMWellRDFHelper(crudServiceFS, implicitly, Grid.system)
val arqCache = new ArqCache(crudServiceFS)
val dataFetcher = new DataFetcherImpl(Config.defaultConfig, crudServiceFS)
val jenaArqExtensionsUtils =
new JenaArqExtensionsUtils(arqCache, crudServiceFS.passiveFieldTypesCache, cmwellRDFHelper, dataFetcher)
val jarsImporter = new JarsImporter(crudServiceFS)
val queriesImporter = new QueriesImporter(crudServiceFS)
val sourcesImporter = new SourcesImporter(crudServiceFS)
val ref = Grid.create(
classOf[QueryEvaluatorActor],
"QueryEvaluatorActor",
crudServiceFS,
arqCache,
jenaArqExtensionsUtils,
dataFetcher,
jarsImporter,
queriesImporter,
sourcesImporter
)
Grid.create(Props(classOf[QueryEvaluatorActorWatcher], ref), "QueryEvaluatorActorWatcher")
val jenaArqExtensions = JenaArqExtensions.get(jenaArqExtensionsUtils)
}
sealed trait QueryResponse {
def content: String
def stats: Map[String, String]
}
case class Plain(content: String, stats: Map[String, String] = Map.empty) extends QueryResponse
case class Filename(content: String, stats: Map[String, String] = Map.empty) extends QueryResponse
case class RemoteFailure(failure: Throwable, stats: Map[String, String] = Map.empty) extends QueryResponse {
override def content = throw failure
}
case class ThroughPipe(pipeName: String, stats: Map[String, String] = Map.empty) extends QueryResponse {
override def content: String = ??? //read from pipe
}
case class ShortCircuitOverloaded(numActiveRequests: Int, stats: Map[String, String] = Map.empty)
extends QueryResponse {
override def content: String = ???
}
case class Status(counter: Int, stats: Map[String, String] = Map.empty) extends QueryResponse { // for debugging purposes
override def content: String = s"numActiveQueries is $counter"
}
object QueryEvaluatorActor {
private var activeQueryMap = Map.empty[String, Int]
def get(name: String): Int = activeQueryMap.get(name).getOrElse(0)
def set(name: String, value: Int) = activeQueryMap = activeQueryMap.updated(name, value)
def getExtraData = {
activeQueryMap
.map {
case (n, v) => s"#aq: $v"
}
.mkString("\\n")
}
}
class QueryEvaluatorActor(crudServiceFS: CRUDServiceFS,
arqCache: ArqCache,
jenaArqExtensionsUtils: JenaArqExtensionsUtils,
dataFetcher: DataFetcher,
jarsImporter: JarsImporter,
queriesImporter: QueriesImporter,
sourcesImporter: SourcesImporter)
extends Actor
with SpFileUtils {
import QueryEvaluatorActor._
private case class SpResponse(sender: ActorRef, queryResponse: QueryResponse)
private case class SpFailure(sender: ActorRef, ex: Throwable)
private def myName = self.path.name
private def numActiveQueries = get(myName)
private def updateActiveQueries(delta: Int) = set(myName, get(myName) + delta)
val responseThreshold = 64 * 1024
val ACTIVE_REQUESTS_DELAY_THRESHOLD = Runtime.getRuntime.availableProcessors
val CIRCUIT_BREAKER = Runtime.getRuntime.availableProcessors * 4
// var numActiveQueries = 0
@scala.throws[Exception](classOf[Exception])
override def preStart(): Unit = {
logger.info("Creating actor QueryEvaluatorActor")
deleteTempFiles()
}
private def isNeedToDelay = numActiveQueries > ACTIVE_REQUESTS_DELAY_THRESHOLD
override def receive = watchedReceive()
def watchedReceive(watcher: Option[ActorRef] = None): Receive = {
case /*Luke,*/ IAmYourWatcher => {
context.become(watchedReceive(Some(sender())))
}
case StatusRequest => sender ! Status(numActiveQueries) // for debugging purposes
case paq: PopulateAndQuery if numActiveQueries > CIRCUIT_BREAKER =>
watcher.foreach(_ ! Activate)
sender() ! ShortCircuitOverloaded(numActiveQueries)
case paq: PopulateAndQuery => {
updateActiveQueries(+1)
Try(paq.evaluate(jarsImporter, queriesImporter, sourcesImporter)) match {
case Success(queryResults) => {
val results = queryResults.flatMap {
case (qr, stats) => rawDataToResponseMsg(qr, stats, paq.rp.forceUsingFile)
}
val originalSender = sender
results.onComplete {
case Success(res) => self ! SpResponse(originalSender, res)
case Failure(err) => self ! SpFailure(originalSender, err)
}
}
case Failure(err) => {
updateActiveQueries(-1)
watcher.foreach(_ ! Reset)
sender() ! RemoteFailure(err)
}
}
}
case OverallSparqlQuery(_, _, _) if numActiveQueries > CIRCUIT_BREAKER =>
sender() ! ShortCircuitOverloaded(numActiveQueries)
case OverallSparqlQuery(query, host, rp) => {
updateActiveQueries(+1)
Try(QueryFactory.create(query)) match {
case Failure(e) => sender() ! RemoteFailure(e)
case Success(sprqlQuery) => {
val config = Config(rp.doNotOptimize,
rp.intermediateLimit,
rp.resultsLimit,
rp.verbose,
SpHandler.queryTimeout,
Some(SpHandler.queryTimeout.fromNow),
rp.explainOnly)
val JenaArqExtensionsUtils.BakedSparqlQuery(queryExecution, driver) =
JenaArqExtensionsUtils.buildCmWellQueryExecution(sprqlQuery,
host,
config,
crudServiceFS,
arqCache,
jenaArqExtensionsUtils,
dataFetcher)
if (!sprqlQuery.isConstructType && !sprqlQuery.isSelectType) {
sender() ! RemoteFailure(new IllegalArgumentException("Query Type must be either SELECT or CONSTRUCT"))
} else {
val os = new ByteArrayOutputStream()
if (config.explainOnly)
driver.logMsg(
"Expl",
"AST:\\n" + JenaArqExtensionsUtils.queryToSseString(sprqlQuery).lines.map("\\t".+).mkString("\\n")
)
driver.logVerboseMsg("Plan", "Planning started.")
if (sprqlQuery.isSelectType)
ResultSetFormatter.out(os, queryExecution.execSelect(), sprqlQuery)
if (sprqlQuery.isConstructType)
RDFDataMgr.write(os, queryExecution.execConstruct(), RDFFormat.NTRIPLES)
driver.logVerboseMsg("Exec", "Executing completed.")
val msgsBa = driver.msgs.map { case (k, v) => s"[$k] $v" }.mkString("", "\\n", "\\n\\n").getBytes("UTF-8")
val resultsBa =
if (config.explainOnly) Array.emptyByteArray
else os.toByteArray
val results = rawDataToResponseMsg(msgsBa ++ resultsBa, Map.empty[String, String], forceWriteFile = false)
val originalSender = sender
results.onComplete {
case Success(res) => self ! SpResponse(originalSender, res)
case Failure(err) => self ! SpFailure(originalSender, err)
}
}
}
}
}
case SpFailure(originalSender, err) => {
updateActiveQueries(-1)
watcher.foreach(_ ! Reset)
if (isNeedToDelay) {
context.system.scheduler.scheduleOnce(3.seconds, originalSender, RemoteFailure(err))
} else {
originalSender ! RemoteFailure(err)
}
}
case SpResponse(originalSender, queryResponse) => {
updateActiveQueries(-1)
watcher.foreach(_ ! Reset)
if (isNeedToDelay) {
context.system.scheduler.scheduleOnce(3.seconds, originalSender, queryResponse)
} else {
originalSender ! queryResponse
}
}
}
protected def rawDataToResponseMsg(qr: String,
stats: Map[String, String],
forceWriteFile: Boolean): Future[QueryResponse] =
rawDataToResponseMsg(qr.getBytes("UTF-8"), stats, Some(qr), forceWriteFile)
protected def rawDataToResponseMsg(data: Array[Byte],
stats: Map[String, String],
originalStringData: Option[String] = None,
forceWriteFile: Boolean): Future[QueryResponse] = {
if (forceWriteFile || data.length > responseThreshold) {
Future {
val path = generateTempFileName
writeToFile(path)(data)
Filename(path, stats)
}
} else {
Future.successful(Plain(originalStringData.getOrElse(new String(data, "UTF-8")), stats))
}
}
}
// will HCN its CW once 66 seconds were passed from Activate without any Reset received
class QueryEvaluatorActorWatcher(qeaRef: ActorRef) extends Actor with LazyLogging {
@scala.throws[Exception](classOf[Exception])
override def preStart(): Unit = {
logger.info("Creating actor QueryEvaluatorActorWatcher for " + qeaRef.path.name)
qeaRef ! IAmYourWatcher
}
private val interval = 11.seconds
private val threshold = 6
private var counter = 0
override def receive = inactive
def inactive: Receive = {
case Activate =>
context.system.scheduler.scheduleOnce(interval, self, Tick)
context.become(active)
}
def active: Receive = {
case Tick =>
counter += 1
if (counter >= threshold) {
logger.info(s"Watcher detected QueryEvaluatorActor[${qeaRef.path.name}] is hung. Goodbye!")
System.exit(1)
}
context.system.scheduler.scheduleOnce(interval, self, Tick)
case Reset =>
counter = 0
context.become(inactive)
}
}
case object IAmYourWatcher
case object Activate
case object Tick
case object Reset
| dudi3001/CM-Well | server/cmwell-ws/app/crashableworker/QueryEvaluatorActor.scala | Scala | apache-2.0 | 12,625 |
/*
* Copyright 2006-2015 WorldWide Conferencing, LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.liftweb
package util
import java.util.{Calendar, Date, TimeZone}
import net.liftweb.common._
import net.liftweb.util.TimeHelpers._
import org.joda.time.{Period, DateTimeZone, DateTime}
import org.scalacheck.Gen._
import org.scalacheck.Prop._
import org.specs2.ScalaCheck
import org.specs2.execute.AsResult
import org.specs2.matcher.MatchersImplicits
import org.specs2.mutable.{Around, Specification}
/**
* Systems under specification for TimeHelpers.
*/
class TimeHelpersSpec extends Specification with ScalaCheck with TimeAmountsGen {
"TimeHelpers Specification".title
"A TimeSpan" can {
"be created from a number of milliseconds" in forAllTimeZones {
TimeSpan(3000) must_== TimeSpan(3 * 1000)
}
"be created from a number of seconds" in forAllTimeZones {
3.seconds must_== TimeSpan(3 * 1000)
}
"be created from a number of minutes" in forAllTimeZones {
3.minutes must_== TimeSpan(3 * 60 * 1000)
}
"be created from a number of hours" in forAllTimeZones {
3.hours must_== TimeSpan(3 * 60 * 60 * 1000)
}
"be created from a number of days" in forAllTimeZones {
3.days must_== TimeSpan(3 * 24 * 60 * 60 * 1000)
}
"be created from a number of weeks" in forAllTimeZones {
3.weeks must_== TimeSpan(3 * 7 * 24 * 60 * 60 * 1000)
}
"be created from a number of months" in forAllTimeZones {
3.months must_== Period.months(3)
}
"be created from a number of years" in forAllTimeZones {
3.years must_== Period.years(3)
}
"be converted implicitly to a date starting from the epoch time" in forAllTimeZones {
3.seconds.after(new Date(0)) must beTrue
}
"be converted to a date starting from the epoch time, using the date method" in forAllTimeZones {
3.seconds.after(new Date(0)) must beTrue
}
"be implicitly converted to a Long" in forAllTimeZones {
(3.seconds == 3000L) must_== true
}
"be compared to an int" in forAllTimeZones {
(3.seconds == 3000) must_== true
(3.seconds != 2000) must_== true
}
"be compared to a long" in forAllTimeZones {
(3.seconds == 3000L) must_== true
(3.seconds != 2000L) must_== true
}
"be compared to another TimeSpan" in forAllTimeZones {
3.seconds must_== 3.seconds
3.seconds must_!= 2.seconds
}
"be compared to another object" in forAllTimeZones {
3.seconds must_!= "string"
}
}
"A TimeSpan" should {
"return a new TimeSpan representing the sum of the 2 times when added with another TimeSpan" in forAllTimeZones {
3.seconds + 3.seconds must_== 6.seconds
}
"return a new TimeSpan representing the difference of the 2 times when substracted with another TimeSpan" in forAllTimeZones {
3.seconds - 4.seconds must_== (-1).seconds
}
"have a later method returning a date relative to now plus the time span" in forAllTimeZones {
val expectedTime = new Date().getTime + 3.seconds.millis
3.seconds.later.getMillis must beCloseTo(expectedTime, 1000L)
}
"have an ago method returning a date relative to now minus the time span" in forAllTimeZones {
val expectedTime = new Date().getTime - 3.seconds.millis
3.seconds.ago.getMillis must beCloseTo(expectedTime, 1000L)
}
"have a toString method returning the relevant number of weeks, days, hours, minutes, seconds, millis" in forAllTimeZones {
val conversionIsOk = forAll(timeAmounts)((t: TimeAmounts) => { val (timeSpanToString, timeSpanAmounts) = t
timeSpanAmounts forall { case (amount, unit) =>
amount >= 1 &&
timeSpanToString.contains(amount.toString) || true }
})
val timeSpanStringIsPluralized = forAll(timeAmounts)((t: TimeAmounts) => { val (timeSpanToString, timeSpanAmounts) = t
timeSpanAmounts forall { case (amount, unit) =>
amount > 1 && timeSpanToString.contains(unit + "s") ||
amount == 1 && timeSpanToString.contains(unit) ||
amount == 0 && !timeSpanToString.contains(unit)
}
})
conversionIsOk && timeSpanStringIsPluralized
}
}
"the TimeHelpers" should {
"provide a 'seconds' function transforming a number of seconds into millis" in forAllTimeZones {
seconds(3) must_== 3 * 1000
}
"provide a 'minutes' function transforming a number of minutes into millis" in forAllTimeZones {
minutes(3) must_== 3 * 60 * 1000
}
"provide a 'hours' function transforming a number of hours into milliss" in forAllTimeZones {
hours(3) must_== 3 * 60 * 60 * 1000
}
"provide a 'days' function transforming a number of days into millis" in forAllTimeZones {
days(3) must_== 3 * 24 * 60 * 60 * 1000
}
"provide a 'weeks' function transforming a number of weeks into millis" in forAllTimeZones {
weeks(3) must_== 3 * 7 * 24 * 60 * 60 * 1000
}
"provide a noTime function on Date objects to transform a date into a date at the same day but at 00:00" in forAllTimeZones {
hourFormat(now.noTime) must_== "00:00:00"
}
"make sure noTime does not change the day" in forAllTimeZones {
dateFormatter.format(0.days.ago.noTime.toDate) must_== dateFormatter.format(new DateTime().toDate)
dateFormatter.format(3.days.ago.noTime.toDate) must_== dateFormatter.format(new Date(millis - (3 * 24 * 60 * 60 * 1000)))
}
"provide a day function returning the day of month corresponding to a given date (relative to UTC)" in forAllTimeZones {
day(today.setTimezone(utc).setDay(3).getTime) must_== 3
}
"provide a month function returning the month corresponding to a given date" in forAllTimeZones {
month(today.setTimezone(utc).setMonth(4).getTime) must_== 4
}
"provide a year function returning the year corresponding to a given date" in forAllTimeZones {
year(today.setTimezone(utc).setYear(2008).getTime) must_== 2008
}
"provide a millisToDays function returning the number of days since the epoch time" in forAllTimeZones {
millisToDays(new Date(0).getTime) must_== 0
millisToDays(today.setYear(1970).setMonth(0).setDay(1).getTime.getTime) must_== 0 // the epoch time
// on the 3rd day after the epoch time, 2 days are passed
millisToDays(today.setTimezone(utc).setYear(1970).setMonth(0).setDay(3).getTime.getTime) must_== 2
}
"provide a daysSinceEpoch function returning the number of days since the epoch time" in forAllTimeZones {
daysSinceEpoch must_== millisToDays(now.getTime)
}
"provide a time function creating a new Date object from a number of millis" in forAllTimeZones {
time(1000) must_== new Date(1000)
}
"provide a calcTime function returning the time taken to evaluate a block in millis and the block's result" in forAllTimeZones {
val (time, result) = calcTime((1 to 10).reduceLeft[Int](_ + _))
time.toInt must beCloseTo(0, 1000) // it should take less than 1 second!
result must_== 55
}
"provide a hourFormat function to format the time of a date object" in forAllTimeZones {
hourFormat(Calendar.getInstance(utc).noTime.getTime) must_== "00:00:00"
}
"provide a formattedDateNow function to format todays date" in forAllTimeZones {
formattedDateNow must beMatching("\\\\d\\\\d\\\\d\\\\d/\\\\d\\\\d/\\\\d\\\\d")
}
"provide a formattedTimeNow function to format now's time with the TimeZone" in forAllTimeZones {
val regex = "\\\\d\\\\d:\\\\d\\\\d (....?.?|GMT((\\\\+|\\\\-)\\\\d\\\\d:\\\\d\\\\d)?)"
"10:00 CEST" must beMatching(regex)
"10:00 GMT+02:00" must beMatching(regex)
"10:00 GMT" must beMatching(regex)
"10:00 XXX" must beMatching(regex)
formattedTimeNow must beMatching(regex)
}
"provide a parseInternetDate function to parse a string formatted using the internet format" in forAllTimeZones {
parseInternetDate(internetDateFormatter.format(now)).getTime.toLong must beCloseTo(now.getTime.toLong, 1000L)
}
"provide a parseInternetDate function returning new Date(0) if the input date cant be parsed" in forAllTimeZones {
parseInternetDate("unparsable") must_== new Date(0)
}
"provide a toInternetDate function formatting a date to the internet format" in forAllTimeZones {
toInternetDate(now) must beMatching("..., \\\\d* ... \\\\d\\\\d\\\\d\\\\d \\\\d\\\\d:\\\\d\\\\d:\\\\d\\\\d .*")
}
"provide a toDate returning a Full(date) from many kinds of objects" in forAllTimeZones {
val d = now
List(null, Nil, None, Failure("", Empty, Empty)) forall { toDate(_) must_== Empty }
List(Full(d), Some(d), List(d)) forall { toDate(_) must_== Full(d) }
toDate(internetDateFormatter.format(d)) must beLike {
case Full(converted) =>
converted.getTime.toLong must beCloseTo(d.getTime.toLong, 1000L)
}
}
}
"The Calendar class" should {
"have a setDay method setting the day of month and returning the updated Calendar" in forAllTimeZones {
day(today.setTimezone(utc).setDay(1).getTime) must_== 1
}
"have a setMonth method setting the month and returning the updated Calendar" in forAllTimeZones {
month(today.setTimezone(utc).setMonth(0).getTime) must_== 0
}
"have a setYear method setting the year and returning the updated Calendar" in forAllTimeZones {
year(today.setTimezone(utc).setYear(2008).getTime) must_== 2008
}
"have a setTimezone method to setting the time zone and returning the updated Calendar" in forAllTimeZones {
today.setTimezone(utc).getTimeZone must_== utc
}
"have a noTime method to setting the time to 00:00:00 and returning the updated Calendar" in forAllTimeZones {
hourFormat(today.noTime.getTime) must_== "00:00:00"
}
}
}
object forAllTimeZones extends Around {
import MatchersImplicits._
override def around[T: AsResult](f: => T) = synchronized {
import scala.collection.JavaConverters._
// setDefault is on static context so tests should be sequenced
// some timezones for java (used in formatters) and for Joda (other computations) has other offset
val commonJavaAndJodaTimeZones = (TimeZone.getAvailableIDs.toSet & DateTimeZone.getAvailableIDs.asScala.toSet).filter { timeZoneId =>
TimeZone.getTimeZone(timeZoneId).getOffset(millis) == DateTimeZone.forID(timeZoneId).getOffset(millis)
}
val tzBefore = TimeZone.getDefault
val dtzBefore = DateTimeZone.getDefault
try {
forall(commonJavaAndJodaTimeZones) { timeZoneId =>
TimeZone.setDefault(TimeZone.getTimeZone(timeZoneId))
DateTimeZone.setDefault(DateTimeZone.forID(timeZoneId))
f
}
} finally {
TimeZone.setDefault(tzBefore)
DateTimeZone.setDefault(dtzBefore)
}
}
}
trait TimeAmountsGen {
type TimeAmounts = (String, List[(Int, String)])
val timeAmounts =
for {
w <- choose(0, 2)
d <- choose(0, 6)
h <- choose(0, 23)
m <- choose(0, 59)
s <- choose(0, 59)
ml <- choose(0, 999)
}
yield (
TimeSpan(weeks(w) + days(d) + hours(h) + minutes(m) + seconds(s) + ml).toString,
(w, "week") :: (d, "day") :: (h, "hour") :: (m, "minute") :: (s, "second") :: (ml, "milli") :: Nil
)
}
| lift/framework | core/util/src/test/scala/net/liftweb/util/TimeHelpersSpec.scala | Scala | apache-2.0 | 11,867 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution
import java.util.concurrent.ConcurrentHashMap
import java.util.concurrent.atomic.AtomicLong
import org.apache.spark.SparkContext
import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.execution.ui.{SparkListenerSQLExecutionEnd, SparkListenerSQLExecutionStart}
object SQLExecution {
val EXECUTION_ID_KEY = "spark.sql.execution.id"
private val _nextExecutionId = new AtomicLong(0)
private def nextExecutionId: Long = _nextExecutionId.getAndIncrement
private val executionIdToQueryExecution = new ConcurrentHashMap[Long, QueryExecution]()
def getQueryExecution(executionId: Long): QueryExecution = {
executionIdToQueryExecution.get(executionId)
}
private val testing = sys.props.contains("spark.testing")
private[sql] def checkSQLExecutionId(sparkSession: SparkSession): Unit = {
val sc = sparkSession.sparkContext
// only throw an exception during tests. a missing execution ID should not fail a job.
if (testing && sc.getLocalProperty(EXECUTION_ID_KEY) == null) {
// Attention testers: when a test fails with this exception, it means that the action that
// started execution of a query didn't call withNewExecutionId. The execution ID should be
// set by calling withNewExecutionId in the action that begins execution, like
// Dataset.collect or DataFrameWriter.insertInto.
throw new IllegalStateException("Execution ID should be set")
}
}
/**
* Wrap an action that will execute "queryExecution" to track all Spark jobs in the body so that
* we can connect them with an execution.
*/
def withNewExecutionId[T](
sparkSession: SparkSession,
queryExecution: QueryExecution)(body: => T): T = {
val sc = sparkSession.sparkContext
val oldExecutionId = sc.getLocalProperty(EXECUTION_ID_KEY)
val executionId = SQLExecution.nextExecutionId
sc.setLocalProperty(EXECUTION_ID_KEY, executionId.toString)
executionIdToQueryExecution.put(executionId, queryExecution)
try {
// sparkContext.getCallSite() would first try to pick up any call site that was previously
// set, then fall back to Utils.getCallSite(); call Utils.getCallSite() directly on
// streaming queries would give us call site like "run at <unknown>:0"
val callSite = sc.getCallSite()
withSQLConfPropagated(sparkSession) {
sc.listenerBus.post(SparkListenerSQLExecutionStart(
executionId, callSite.shortForm, callSite.longForm, queryExecution.toString,
SparkPlanInfo.fromSparkPlan(queryExecution.executedPlan), System.currentTimeMillis()))
try {
body
} finally {
sc.listenerBus.post(SparkListenerSQLExecutionEnd(
executionId, System.currentTimeMillis()))
}
}
} finally {
executionIdToQueryExecution.remove(executionId)
sc.setLocalProperty(EXECUTION_ID_KEY, oldExecutionId)
}
}
/**
* Wrap an action with a known executionId. When running a different action in a different
* thread from the original one, this method can be used to connect the Spark jobs in this action
* with the known executionId, e.g., `BroadcastExchangeExec.relationFuture`.
*/
def withExecutionId[T](sparkSession: SparkSession, executionId: String)(body: => T): T = {
val sc = sparkSession.sparkContext
val oldExecutionId = sc.getLocalProperty(SQLExecution.EXECUTION_ID_KEY)
withSQLConfPropagated(sparkSession) {
try {
sc.setLocalProperty(SQLExecution.EXECUTION_ID_KEY, executionId)
body
} finally {
sc.setLocalProperty(SQLExecution.EXECUTION_ID_KEY, oldExecutionId)
}
}
}
/**
* Wrap an action with specified SQL configs. These configs will be propagated to the executor
* side via job local properties.
*/
def withSQLConfPropagated[T](sparkSession: SparkSession)(body: => T): T = {
val sc = sparkSession.sparkContext
// Set all the specified SQL configs to local properties, so that they can be available at
// the executor side.
val allConfigs = sparkSession.sessionState.conf.getAllConfs
val originalLocalProps = allConfigs.collect {
case (key, value) if key.startsWith("spark") =>
val originalValue = sc.getLocalProperty(key)
sc.setLocalProperty(key, value)
(key, originalValue)
}
try {
body
} finally {
for ((key, value) <- originalLocalProps) {
sc.setLocalProperty(key, value)
}
}
}
}
| lvdongr/spark | sql/core/src/main/scala/org/apache/spark/sql/execution/SQLExecution.scala | Scala | apache-2.0 | 5,323 |
package io.github.mandar2812.dynaml.models.neuralnets
import breeze.linalg.{DenseVector, DenseMatrix}
import breeze.numerics.sigmoid
import io.github.mandar2812.dynaml.models.ParameterizedLearner
/**
* Top level trait defining
* the most important properties
* of a neural network
*/
trait NeuralNetwork[G] extends
ParameterizedLearner[G, Int, List[DenseMatrix[Double]],
DenseVector[Double], DenseVector[Double],
(DenseVector[Double], DenseVector[Double])] {
val inputDimensions: Int
val outputDimensions: Int
val hiddenLayers: Int
val activations: List[(Double) => Double]
val neuronCounts: List[Int]
}
| Koldh/DynaML | src/main/scala/io/github/mandar2812/dynaml/models/neuralnets/NeuralNetwork.scala | Scala | apache-2.0 | 629 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.hive
import java.io.{File, PrintWriter}
import scala.reflect.ClassTag
import org.apache.spark.sql.{AnalysisException, QueryTest, Row}
import org.apache.spark.sql.catalyst.TableIdentifier
import org.apache.spark.sql.execution.command.AnalyzeTableCommand
import org.apache.spark.sql.execution.joins._
import org.apache.spark.sql.hive.test.TestHiveSingleton
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.test.SQLTestUtils
class StatisticsSuite extends QueryTest with TestHiveSingleton with SQLTestUtils {
test("parse analyze commands") {
def assertAnalyzeCommand(analyzeCommand: String, c: Class[_]) {
val parsed = spark.sessionState.sqlParser.parsePlan(analyzeCommand)
val operators = parsed.collect {
case a: AnalyzeTableCommand => a
case o => o
}
assert(operators.size === 1)
if (operators(0).getClass() != c) {
fail(
s"""$analyzeCommand expected command: $c, but got ${operators(0)}
|parsed command:
|$parsed
""".stripMargin)
}
}
assertAnalyzeCommand(
"ANALYZE TABLE Table1 COMPUTE STATISTICS",
classOf[AnalyzeTableCommand])
assertAnalyzeCommand(
"ANALYZE TABLE Table1 PARTITION(ds='2008-04-09', hr=11) COMPUTE STATISTICS",
classOf[AnalyzeTableCommand])
assertAnalyzeCommand(
"ANALYZE TABLE Table1 PARTITION(ds='2008-04-09', hr=11) COMPUTE STATISTICS noscan",
classOf[AnalyzeTableCommand])
assertAnalyzeCommand(
"ANALYZE TABLE Table1 PARTITION(ds, hr) COMPUTE STATISTICS",
classOf[AnalyzeTableCommand])
assertAnalyzeCommand(
"ANALYZE TABLE Table1 PARTITION(ds, hr) COMPUTE STATISTICS noscan",
classOf[AnalyzeTableCommand])
assertAnalyzeCommand(
"ANALYZE TABLE Table1 COMPUTE STATISTICS nOscAn",
classOf[AnalyzeTableCommand])
}
test("MetastoreRelations fallback to HDFS for size estimation") {
val enableFallBackToHdfsForStats = spark.sessionState.conf.fallBackToHdfsForStatsEnabled
try {
withTempDir { tempDir =>
// EXTERNAL OpenCSVSerde table pointing to LOCATION
val file1 = new File(tempDir + "/data1")
val writer1 = new PrintWriter(file1)
writer1.write("1,2")
writer1.close()
val file2 = new File(tempDir + "/data2")
val writer2 = new PrintWriter(file2)
writer2.write("1,2")
writer2.close()
sql(
s"""CREATE EXTERNAL TABLE csv_table(page_id INT, impressions INT)
ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.OpenCSVSerde'
WITH SERDEPROPERTIES (
\\"separatorChar\\" = \\",\\",
\\"quoteChar\\" = \\"\\\\\\"\\",
\\"escapeChar\\" = \\"\\\\\\\\\\")
LOCATION '$tempDir'
""")
spark.conf.set(SQLConf.ENABLE_FALL_BACK_TO_HDFS_FOR_STATS.key, true)
val relation = spark.sessionState.catalog.lookupRelation(TableIdentifier("csv_table"))
.asInstanceOf[MetastoreRelation]
val properties = relation.hiveQlTable.getParameters
assert(properties.get("totalSize").toLong <= 0, "external table totalSize must be <= 0")
assert(properties.get("rawDataSize").toLong <= 0, "external table rawDataSize must be <= 0")
val sizeInBytes = relation.statistics.sizeInBytes
assert(sizeInBytes === BigInt(file1.length() + file2.length()))
}
} finally {
spark.conf.set(SQLConf.ENABLE_FALL_BACK_TO_HDFS_FOR_STATS.key, enableFallBackToHdfsForStats)
sql("DROP TABLE csv_table ")
}
}
test("analyze MetastoreRelations") {
def queryTotalSize(tableName: String): BigInt =
spark.sessionState.catalog.lookupRelation(TableIdentifier(tableName)).statistics.sizeInBytes
// Non-partitioned table
sql("CREATE TABLE analyzeTable (key STRING, value STRING)").collect()
sql("INSERT INTO TABLE analyzeTable SELECT * FROM src").collect()
sql("INSERT INTO TABLE analyzeTable SELECT * FROM src").collect()
sql("ANALYZE TABLE analyzeTable COMPUTE STATISTICS noscan")
assert(queryTotalSize("analyzeTable") === BigInt(11624))
sql("DROP TABLE analyzeTable").collect()
// Partitioned table
sql(
"""
|CREATE TABLE analyzeTable_part (key STRING, value STRING) PARTITIONED BY (ds STRING)
""".stripMargin).collect()
sql(
"""
|INSERT INTO TABLE analyzeTable_part PARTITION (ds='2010-01-01')
|SELECT * FROM src
""".stripMargin).collect()
sql(
"""
|INSERT INTO TABLE analyzeTable_part PARTITION (ds='2010-01-02')
|SELECT * FROM src
""".stripMargin).collect()
sql(
"""
|INSERT INTO TABLE analyzeTable_part PARTITION (ds='2010-01-03')
|SELECT * FROM src
""".stripMargin).collect()
assert(queryTotalSize("analyzeTable_part") === spark.sessionState.conf.defaultSizeInBytes)
sql("ANALYZE TABLE analyzeTable_part COMPUTE STATISTICS noscan")
assert(queryTotalSize("analyzeTable_part") === BigInt(17436))
sql("DROP TABLE analyzeTable_part").collect()
// Try to analyze a temp table
sql("""SELECT * FROM src""").createOrReplaceTempView("tempTable")
intercept[AnalysisException] {
sql("ANALYZE TABLE tempTable COMPUTE STATISTICS")
}
spark.sessionState.catalog.dropTable(
TableIdentifier("tempTable"), ignoreIfNotExists = true)
}
test("estimates the size of a test MetastoreRelation") {
val df = sql("""SELECT * FROM src""")
val sizes = df.queryExecution.analyzed.collect { case mr: MetastoreRelation =>
mr.statistics.sizeInBytes
}
assert(sizes.size === 1, s"Size wrong for:\\n ${df.queryExecution}")
assert(sizes(0).equals(BigInt(5812)),
s"expected exact size 5812 for test table 'src', got: ${sizes(0)}")
}
test("auto converts to broadcast hash join, by size estimate of a relation") {
def mkTest(
before: () => Unit,
after: () => Unit,
query: String,
expectedAnswer: Seq[Row],
ct: ClassTag[_]): Unit = {
before()
var df = sql(query)
// Assert src has a size smaller than the threshold.
val sizes = df.queryExecution.analyzed.collect {
case r if ct.runtimeClass.isAssignableFrom(r.getClass) => r.statistics.sizeInBytes
}
assert(sizes.size === 2 && sizes(0) <= spark.sessionState.conf.autoBroadcastJoinThreshold
&& sizes(1) <= spark.sessionState.conf.autoBroadcastJoinThreshold,
s"query should contain two relations, each of which has size smaller than autoConvertSize")
// Using `sparkPlan` because for relevant patterns in HashJoin to be
// matched, other strategies need to be applied.
var bhj = df.queryExecution.sparkPlan.collect { case j: BroadcastHashJoinExec => j }
assert(bhj.size === 1,
s"actual query plans do not contain broadcast join: ${df.queryExecution}")
checkAnswer(df, expectedAnswer) // check correctness of output
spark.sessionState.conf.settings.synchronized {
val tmp = spark.sessionState.conf.autoBroadcastJoinThreshold
sql(s"""SET ${SQLConf.AUTO_BROADCASTJOIN_THRESHOLD.key}=-1""")
df = sql(query)
bhj = df.queryExecution.sparkPlan.collect { case j: BroadcastHashJoinExec => j }
assert(bhj.isEmpty, "BroadcastHashJoin still planned even though it is switched off")
val shj = df.queryExecution.sparkPlan.collect { case j: SortMergeJoinExec => j }
assert(shj.size === 1,
"SortMergeJoin should be planned when BroadcastHashJoin is turned off")
sql(s"""SET ${SQLConf.AUTO_BROADCASTJOIN_THRESHOLD.key}=$tmp""")
}
after()
}
/** Tests for MetastoreRelation */
val metastoreQuery = """SELECT * FROM src a JOIN src b ON a.key = 238 AND a.key = b.key"""
val metastoreAnswer = Seq.fill(4)(Row(238, "val_238", 238, "val_238"))
mkTest(
() => (),
() => (),
metastoreQuery,
metastoreAnswer,
implicitly[ClassTag[MetastoreRelation]]
)
}
test("auto converts to broadcast left semi join, by size estimate of a relation") {
val leftSemiJoinQuery =
"""SELECT * FROM src a
|left semi JOIN src b ON a.key=86 and a.key = b.key""".stripMargin
val answer = Row(86, "val_86")
var df = sql(leftSemiJoinQuery)
// Assert src has a size smaller than the threshold.
val sizes = df.queryExecution.analyzed.collect {
case r if implicitly[ClassTag[MetastoreRelation]].runtimeClass
.isAssignableFrom(r.getClass) =>
r.statistics.sizeInBytes
}
assert(sizes.size === 2 && sizes(1) <= spark.sessionState.conf.autoBroadcastJoinThreshold
&& sizes(0) <= spark.sessionState.conf.autoBroadcastJoinThreshold,
s"query should contain two relations, each of which has size smaller than autoConvertSize")
// Using `sparkPlan` because for relevant patterns in HashJoin to be
// matched, other strategies need to be applied.
var bhj = df.queryExecution.sparkPlan.collect {
case j: BroadcastHashJoinExec => j
}
assert(bhj.size === 1,
s"actual query plans do not contain broadcast join: ${df.queryExecution}")
checkAnswer(df, answer) // check correctness of output
spark.sessionState.conf.settings.synchronized {
val tmp = spark.sessionState.conf.autoBroadcastJoinThreshold
sql(s"SET ${SQLConf.AUTO_BROADCASTJOIN_THRESHOLD.key}=-1")
df = sql(leftSemiJoinQuery)
bhj = df.queryExecution.sparkPlan.collect {
case j: BroadcastHashJoinExec => j
}
assert(bhj.isEmpty, "BroadcastHashJoin still planned even though it is switched off")
val shj = df.queryExecution.sparkPlan.collect {
case j: SortMergeJoinExec => j
}
assert(shj.size === 1,
"SortMergeJoinExec should be planned when BroadcastHashJoin is turned off")
sql(s"SET ${SQLConf.AUTO_BROADCASTJOIN_THRESHOLD.key}=$tmp")
}
}
}
| gioenn/xSpark | sql/hive/src/test/scala/org/apache/spark/sql/hive/StatisticsSuite.scala | Scala | apache-2.0 | 10,824 |
package chiselutils.interfaces.exanicx4
import Chisel._
/** Factory method for HandshakeIO
*/
object HandshakeIO {
def apply() : HandshakeIO = {
new HandshakeIO
}
}
/** A bundle to represent the data handshaking interface to enable easy connection
* Between rx and tx
*/
class HandshakeIO extends Bundle {
val data = UInt( OUTPUT, 64 )
val sof = Bool( OUTPUT )
val eof = Bool( OUTPUT )
val len = UInt( OUTPUT, 3 )
val vld = Bool( OUTPUT )
def setNames( prefix : String, suffix : String ) {
data.setName( prefix + "_data_" + suffix )
sof.setName( prefix + "_sof_" + suffix )
eof.setName( prefix + "_eof_" + suffix)
len.setName( prefix + "_len_" + suffix )
vld.setName( prefix + "_vld_" + suffix )
}
}
| da-steve101/chisel-utils | src/main/scala/chiselutils/interfaces/exanicx4/HandShakeIO.scala | Scala | lgpl-3.0 | 752 |
package im.actor.server.api.rpc.service.privacy
import akka.actor.ActorSystem
import im.actor.api.rpc._
import im.actor.api.rpc.ClientData
import im.actor.api.rpc.misc.ResponseSeq
import im.actor.api.rpc.peers.ApiUserOutPeer
import im.actor.api.rpc.privacy.{ PrivacyService, ResponseLoadBlockedUsers, UpdateUserBlocked, UpdateUserUnblocked }
import im.actor.server.acl.ACLUtils
import im.actor.server.db.DbExtension
import im.actor.server.model.social.{ Relation, RelationStatus }
import im.actor.server.persist.social.RelationRepo
import im.actor.server.sequence.SeqUpdatesExtension
import scala.concurrent.{ ExecutionContext, Future }
private object PrivacyServiceErrors {
val UserAlreadyBlocked = RpcError(400, "USER_ALREADY_BLOCKED", "User is already blocked.", false, None)
val UserNotBlocked = RpcError(400, "USER_NOT_BLOCKED", "User is not blocked.", false, None)
}
final class PrivacyServiceImpl(implicit system: ActorSystem) extends PrivacyService {
import FutureResultRpc._
import PrivacyServiceErrors._
implicit protected val ec: ExecutionContext = system.dispatcher
private val db = DbExtension(system).db
private val seqUpdExt = SeqUpdatesExtension(system)
protected def doHandleBlockUser(peer: ApiUserOutPeer, clientData: ClientData): Future[HandlerResult[ResponseSeq]] =
authorized(clientData) { client ⇒
(for {
optRelation ← fromFuture(db.run(RelationRepo.find(client.userId, peer.userId)))
_ ← optRelation match {
case Some(relation) ⇒
for {
_ ← fromBoolean(UserAlreadyBlocked)(relation.status != RelationStatus.Blocked)
_ ← fromFuture(db.run(RelationRepo.block(client.userId, peer.userId)))
} yield ()
case None ⇒
val newRelation = Relation(client.userId, peer.userId, RelationStatus.Blocked)
fromFuture(db.run(RelationRepo.create(newRelation)))
}
s ← fromFuture(seqUpdExt.deliverSingleUpdate(client.userId, UpdateUserBlocked(peer.userId)))
} yield ResponseSeq(s.seq, s.state.toByteArray)).value
}
protected def doHandleUnblockUser(peer: ApiUserOutPeer, clientData: ClientData): Future[HandlerResult[ResponseSeq]] =
authorized(clientData) { client ⇒
(for {
optRelation ← fromFuture(db.run(RelationRepo.find(client.userId, peer.userId)))
_ ← optRelation match {
case Some(relation) ⇒
for {
_ ← fromBoolean(UserNotBlocked)(relation.status == RelationStatus.Blocked)
_ ← fromFuture(db.run(RelationRepo.unblock(client.userId, peer.userId)))
} yield ()
case None ⇒
val newRelation = Relation(client.userId, peer.userId, RelationStatus.Approved)
fromFuture(db.run(RelationRepo.create(newRelation)))
}
s ← fromFuture(seqUpdExt.deliverSingleUpdate(client.userId, UpdateUserUnblocked(peer.userId)))
} yield ResponseSeq(s.seq, s.state.toByteArray)).value
}
protected def doHandleLoadBlockedUsers(clientData: ClientData): Future[HandlerResult[ResponseLoadBlockedUsers]] =
authorized(clientData) { client ⇒
for {
ids ← db.run(RelationRepo.fetchBlockedIds(client.userId))
outPeers ← Future.sequence(ids map (id ⇒ ACLUtils.getUserOutPeer(id, client.authId)))
} yield Ok(ResponseLoadBlockedUsers(outPeers.toVector))
}
} | ljshj/actor-platform | actor-server/actor-rpc-api/src/main/scala/im/actor/server/api/rpc/service/privacy/PrivacyServiceImpl.scala | Scala | mit | 3,424 |
/**
* Copyright (C) 2010 Orbeon, Inc.
*
* This program is free software; you can redistribute it and/or modify it under the terms of the
* GNU Lesser General Public License as published by the Free Software Foundation; either version
* 2.1 of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
* without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
* See the GNU Lesser General Public License for more details.
*
* The full text of the license is available at http://www.gnu.org/copyleft/lesser.html
*/
package org.orbeon.oxf.xforms.submission
import java.io.IOException
import java.util.concurrent.Callable
import org.orbeon.oxf.common.OXFException
import org.orbeon.oxf.externalcontext.{LocalResponse, LocalRequest, URLRewriter}
import org.orbeon.oxf.http.{EmptyInputStream, Headers, StreamedContent}
import org.orbeon.oxf.pipeline.api.ExternalContext
import org.orbeon.oxf.pipeline.api.ExternalContext.Request
import org.orbeon.oxf.util.{Connection, ConnectionResult, IndentedLogger, NetUtils}
import org.orbeon.oxf.xforms.event.events.XFormsSubmitErrorEvent
import org.orbeon.oxf.xforms.{XFormsProperties, XFormsUtils}
trait SubmissionProcess {
def process(request: ExternalContext.Request, response: ExternalContext.Response)
}
abstract class BaseSubmission(val submission: XFormsModelSubmission) extends Submission {
import BaseSubmission._
val containingDocument = submission.containingDocument
protected def getAbsoluteSubmissionURL(
resolvedActionOrResource : String,
queryString : String,
isNorewrite : Boolean
): String = {
// NOTE: For resolveServiceURL: If the resource or service URL does not start with a protocol or with '/', the
// URL is resolved against the request path, then against the service base. Example in servlet environment:
//
// - action path: my/service
// - request URL: http://orbeon.com/orbeon/myapp/mypage
// - request path: /myapp/mypage
// - service base: http://services.com/myservices/
// - resulting service URL: http://services.com/myservices/myapp/my/service
val resolve =
if (submission.getUrlType == "resource")
XFormsUtils.resolveResourceURL _
else
XFormsUtils.resolveServiceURL _
resolve(
containingDocument,
submission.getSubmissionElement,
NetUtils.appendQueryString(resolvedActionOrResource, queryString),
if (isNorewrite) URLRewriter.REWRITE_MODE_ABSOLUTE_NO_CONTEXT else URLRewriter.REWRITE_MODE_ABSOLUTE
)
}
/**
* Submit the Callable for synchronous or asynchronous execution.
*
* @return ConnectionResult or null if asynchronous
*/
protected def submitCallable(
p : XFormsModelSubmission#SubmissionParameters,
p2 : XFormsModelSubmission#SecondPassParameters,
callable : Callable[SubmissionResult]
): SubmissionResult =
if (p2.isAsynchronous) {
// Tell XFCD that we have one more async submission
containingDocument.getAsynchronousSubmissionManager(true).addAsynchronousSubmission(callable)
// Tell caller he doesn't need to do anything
null
} else if (p.isDeferredSubmissionSecondPass) {
// Tell XFCD that we have a submission replace="all" ready for a second pass
// Tell caller he doesn't need to do anything
containingDocument.setReplaceAllCallable(callable)
null
} else {
// Just run it now
callable.call
}
protected def getDetailsLogger(
p : XFormsModelSubmission#SubmissionParameters,
p2 : XFormsModelSubmission#SecondPassParameters
) = submission.getDetailsLogger(p, p2)
protected def getTimingLogger(
p : XFormsModelSubmission#SubmissionParameters,
p2 : XFormsModelSubmission#SecondPassParameters
) = submission.getTimingLogger(p, p2)
/**
* Perform a local (request dispatcher or portlet) submission.
*/
protected def openLocalConnection(
incomingRequest : Request,
response : ExternalContext.Response,
indentedLogger : IndentedLogger,
resource : String,
p : XFormsModelSubmission#SubmissionParameters,
actualRequestMediatype : String,
encoding : String,
messageBodyOrNull : Array[Byte],
queryString : String,
customHeaderNameValues : Map[String, List[String]],
submissionProcess : SubmissionProcess,
isContextRelative : Boolean,
isDefaultContext : Boolean
): ConnectionResult = {
// Action must be an absolute path
if (! resource.startsWith("/"))
throw new OXFException("Action does not start with a '/': " + resource)
val httpMethodUpper = p.actualHttpMethod
// handle case of empty body
val messageBody = Option(messageBodyOrNull)
// Destination context path is the context path of the current request, or the context path implied by the new URI
val destinationContextPath =
if (isDefaultContext)
""
else if (isContextRelative)
incomingRequest.getContextPath
else
NetUtils.getFirstPathElement(resource)
// Determine headers
val requestHeaders =
Connection.buildConnectionHeadersLowerWithSOAPIfNeeded(
scheme = "http",
httpMethodUpper = httpMethodUpper,
hasCredentials = false,
mediatype = actualRequestMediatype,
encodingForSOAP = encoding,
customHeaders = customHeaderNameValues,
headersToForward = Connection.headersToForwardFromProperty)(
logger = indentedLogger
)
val effectiveResourceURI = {
val updatedActionStringBuilder = new StringBuilder(resource)
if (queryString ne null) {
if (resource.indexOf('?') == -1)
updatedActionStringBuilder.append('?')
else
updatedActionStringBuilder.append('&')
updatedActionStringBuilder.append(queryString)
}
updatedActionStringBuilder.toString
}
val rootAdjustedResourceURI =
if (isDefaultContext || isContextRelative)
effectiveResourceURI
else
NetUtils.removeFirstPathElement(effectiveResourceURI)
if (rootAdjustedResourceURI eq null)
throw new OXFException("Action must start with a servlet context path: " + resource)
val content =
messageBody map { bytes ⇒
if (Connection.requiresRequestBody(httpMethodUpper) && indentedLogger.isDebugEnabled && isLogBody)
Connection.logRequestBody(actualRequestMediatype, bytes)(indentedLogger)
StreamedContent.fromBytes(bytes, Headers.firstHeaderIgnoreCase(requestHeaders, Headers.ContentType))
}
val localRequest =
new LocalRequest(
incomingRequest = incomingRequest,
contextPath = destinationContextPath,
pathQuery = rootAdjustedResourceURI,
methodUpper = httpMethodUpper,
headersMaybeCapitalized = requestHeaders,
content = content
)
if (indentedLogger.isDebugEnabled)
indentedLogger.logDebug(
"",
"dispatching request",
"method", httpMethodUpper,
"mediatype", actualRequestMediatype,
"context path", destinationContextPath,
"effective resource URI (original)", effectiveResourceURI,
"effective resource URI (relative to servlet root)", rootAdjustedResourceURI
)
// Reason we use a Response passed is for the case of replace="all" when XFormsContainingDocument provides a Response
val effectiveResponse = if (! p.isReplaceAll) null else response
if (p.isReplaceAll) {
val replaceAllResponse = new AllReplacer.ReplaceAllResponse(effectiveResponse)
submissionProcess.process(localRequest, replaceAllResponse)
val dummyContent = new StreamedContent(
EmptyInputStream,
None,
None,
None
) {
override def close(): Unit = {
super.close()
// Try to obtain, flush and close the stream to work around WebSphere issue
try {
if (effectiveResponse ne null) {
val os = effectiveResponse.getOutputStream
os.flush()
os.close()
}
} catch {
case e: IllegalStateException ⇒
indentedLogger.logDebug("", "IllegalStateException caught while closing OutputStream after forward")
try {
if (effectiveResponse ne null) {
val writer = effectiveResponse.getWriter
writer.flush()
writer.close()
}
} catch {
case f: IllegalStateException ⇒
indentedLogger.logDebug("", "IllegalStateException caught while closing Writer after forward")
case f: IOException ⇒
indentedLogger.logDebug("", "IOException caught while closing Writer after forward")
}
case e: IOException ⇒
indentedLogger.logDebug("", "IOException caught while closing OutputStream after forward")
}
}
}
val cxr = ConnectionResult(
url = effectiveResourceURI,
statusCode = replaceAllResponse.getStatus max 0,
headers = org.orbeon.oxf.http.Headers.EmptyHeaders,
content = dummyContent,
dontHandleResponse = true
)
// Here we cause dispatch xforms-submit-error upon getting a non-success error code, even though the
// response has already been written out. This gives the form author a chance to do something in cases
// the response is buffered, for example do a sendError().
// HOWEVER: We don't do this
if (! p.isDeferredSubmissionSecondPass) {
if (! NetUtils.isSuccessCode(cxr.statusCode) && ! p.isDeferredSubmissionSecondPass)
throw new XFormsSubmissionException(
submission,
s"xf:submission for submission id: ${submission.getId}, error code received when submitting instance: ${cxr.statusCode}",
"processing submission response",
new XFormsSubmitErrorEvent(submission, XFormsSubmitErrorEvent.RESOURCE_ERROR, cxr)
)
} else {
// Two reasons: 1. We don't want to modify the document state 2. This can be called outside of the document
// lock, see XFormsServer.
}
cxr
} else {
// We must intercept the reply
val responseAdapter = new LocalResponse(response: URLRewriter)
submissionProcess.process(localRequest, responseAdapter)
val responseHeaders = responseAdapter.capitalizedHeaders
ConnectionResult(
url = effectiveResourceURI,
statusCode = responseAdapter.statusCode max 0,
headers = responseHeaders,
content = StreamedContent(
inputStream = responseAdapter.getInputStream,
contentType = Headers.firstHeaderIgnoreCase(responseHeaders, Headers.ContentType),
contentLength = Headers.firstLongHeaderIgnoreCase(responseHeaders, Headers.ContentLength),
title = None
)
)
}
}
}
object BaseSubmission {
def isLogBody =
XFormsProperties.getDebugLogging.contains("submission-body")
}
| ajw625/orbeon-forms | src/main/scala/org/orbeon/oxf/xforms/submission/BaseSubmission.scala | Scala | lgpl-2.1 | 13,087 |
/*
* Copyright (c) 2014-2018 by The Monix Project Developers.
* See the project homepage at: https://monix.io
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package monix.execution.exceptions
import scala.runtime.AbstractFunction1
/** Generic exception thrown on API contract violations. */
class APIContractViolationException(val message: String, cause: Throwable)
extends RuntimeException(message, cause) with Serializable {
def this(message: String) = this(message, null)
def this(cause: Throwable) = this(null, cause)
}
object APIContractViolationException
extends AbstractFunction1[String, APIContractViolationException] {
/** Builder for [[APIContractViolationException]]. */
def apply(message: String): APIContractViolationException =
new APIContractViolationException(message)
} | Wogan/monix | monix-execution/shared/src/main/scala/monix/execution/exceptions/APIContractViolationException.scala | Scala | apache-2.0 | 1,327 |
/*
* Copyright 2013-2015 Websudos, Limited.
*
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* - Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
*
* - Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* - Explicit consent must be obtained from the copyright owner, Websudos Limited before any redistribution is made.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*/
package com.websudos.phantom.server
import scala.concurrent.Await
import scala.concurrent.duration._
import org.joda.time.format.DateTimeFormat
import org.json4s.{DefaultFormats, Formats}
import org.scalatra.ScalatraServlet
import org.scalatra.json.JacksonJsonSupport
import org.scalatra.scalate.ScalateSupport
import com.websudos.phantom.dsl._
import com.websudos.phantom.testkit.PhantomCassandraConnector
class PricesAccess extends ScalatraServlet with JacksonJsonSupport with ScalateSupport with PhantomCassandraConnector {
private[this] val dateFormat = DateTimeFormat.forPattern("YYYYMMdd")
protected implicit val jsonFormats: Formats =
DefaultFormats.withBigDecimal ++ org.json4s.ext.JodaTimeSerializers.all
before() {
contentType = formats("json")
}
get("/prices/equity/:id/from/:from/to/:to") {
val id = params("id")
val from = dateFormat.parseLocalDate(params("from"))
val to = dateFormat.parseLocalDate(params("to"))
val prices = EquityPrices.select
.where(_.instrumentId eqs id)
.and(_.tradeDate gte from.toDate)
.and(_.tradeDate lte to.toDate)
.fetch()
Await.result(prices, 10.seconds)
}
get("/prices/option/:id/from/:from/to/:to") {
val id = params("id")
val from = dateFormat.parseLocalDate(params("from"))
val to = dateFormat.parseLocalDate(params("to"))
val prices = OptionPrices.select
.where(_.instrumentId eqs id)
.and(_.tradeDate gte from.toDate)
.and(_.tradeDate lte to.toDate)
.fetch()
Await.result(prices, 10.seconds)
}
}
| analytically/phantom | phantom-scalatra-test/src/main/scala/com/websudos/phantom/server/PricesAccess.scala | Scala | bsd-2-clause | 3,067 |
package org.jetbrains.plugins.scala.findUsages.compilerReferences.indices
import org.jetbrains.jps.backwardRefs.index.CompilerReferenceIndex
import org.jetbrains.plugins.scala.indices.protocol.CompilationInfo
sealed trait IndexingStage
object IndexingStage {
type IndexingHandler = Option[IndexerFailure] => Unit
type Callback = () => Unit
final case class OpenWriter(isCleanBuild: Boolean) extends IndexingStage
final case class ProcessCompilationInfo(data: CompilationInfo, onFinish: Callback) extends IndexingStage
final case class CloseWriter(onFinish: IndexingHandler) extends IndexingStage
final case class InvalidateIndex(index: Option[CompilerReferenceIndex[_]]) extends IndexingStage
}
| JetBrains/intellij-scala | scala/scala-impl/src/org/jetbrains/plugins/scala/findUsages/compilerReferences/indices/IndexingStage.scala | Scala | apache-2.0 | 785 |
package org.antipathy.mvn_scalafmt.logging
import java.io.File
import org.apache.maven.monitor.logging.DefaultLog
import org.codehaus.plexus.logging.console.ConsoleLogger
import org.scalafmt.dynamic.exceptions.ScalafmtException
import org.scalatest.flatspec.AnyFlatSpec
import org.scalatest.GivenWhenThen
import org.scalatest.matchers.should.Matchers
import org.codehaus.plexus.logging.Logger
class MavenLogReporterSpec extends AnyFlatSpec with GivenWhenThen with Matchers {
val reporter = new MavenLogReporter(
new DefaultLog(new ConsoleLogger(Logger.LEVEL_DEBUG, this.getClass.getSimpleName))
)
behavior of "MavenLogReporter"
it should "throw an error if error is reported by Scalafmt" in {
val ex1 = intercept[ScalafmtException] {
reporter.error(new File("").toPath, "Oops")
}
ex1.getMessage shouldEqual "Oops"
val ex2 = intercept[RuntimeException] {
reporter.error(new File("").toPath, "No way!", new RuntimeException("Oops"))
}
ex2.getMessage shouldEqual "No way!"
val ex3 = intercept[RuntimeException] {
reporter.error(new File("").toPath, new RuntimeException("Oops"))
}
ex3.getMessage shouldEqual "Oops"
}
}
| SimonJPegg/mvn_scalafmt | src/test/scala/org/antipathy/mvn_scalafmt/logging/MavenLogReporterSpec.scala | Scala | apache-2.0 | 1,197 |
package akka.contrib.persistence.mongodb
import akka.actor.ActorSystem
import akka.contrib.persistence.mongodb.ConfigLoanFixture._
import com.typesafe.config.ConfigFactory
import org.junit.runner.RunWith
import org.scalatest.BeforeAndAfterAll
import org.scalatest.junit.JUnitRunner
import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent._
import scala.concurrent.duration._
@RunWith(classOf[JUnitRunner])
class RxMongoPersistenceDriverShutdownSpec extends BaseUnitTest with ContainerMongo with BeforeAndAfterAll {
override def afterAll() = cleanup()
override def embedDB = "rxmongo-shutdown"
val shutdownConfig = ConfigFactory.parseString(
s"""
|akka.contrib.persistence.mongodb.mongo {
| mongouri = "mongodb://$host:$noAuthPort/$embedDB"
| db = "shutdown-spec"
|}
""".stripMargin)
class MockRxMongoPersistenceDriver(actorSystem:ActorSystem) extends RxMongoDriver(actorSystem, ConfigFactory.empty()) {
def showCollections = db.flatMap(_.collectionNames)
}
"An rxmongo driver" should "close the mongodb connection pool on actor system shutdown" in withConfig(shutdownConfig,"akka-contrib-mongodb-persistence-journal","shutdown-config") { case (actorSystem,_) =>
val underTest = new MockRxMongoPersistenceDriver(actorSystem)
underTest.actorSystem.terminate()
Await.result(underTest.actorSystem.whenTerminated, 10.seconds)
intercept[IllegalStateException] {
Await.result(underTest.showCollections,3.seconds).size
}
()
}
it should "reconnect if a new driver is created" in withConfig(shutdownConfig,"akka-contrib-mongodb-persistence-journal","shutdown-config") { case (actorSystem,_) =>
val underTest = new MockRxMongoPersistenceDriver(actorSystem)
underTest.actorSystem.terminate()
Await.result(underTest.actorSystem.whenTerminated, 10.seconds)
val test2 = ActorSystem("test2",shutdownConfig)
try {
val underTest2 = new MockRxMongoPersistenceDriver(test2)
Await.result(underTest2.showCollections, 3.seconds).size should be(0)
} finally {
test2.terminate()
()
}
()
}
}
@RunWith(classOf[JUnitRunner])
class RxMongoPersistenceDriverAuthSpec extends BaseUnitTest with ContainerMongo with BeforeAndAfterAll {
val authMode = if( "3.0" :: "3.2" :: Nil exists envMongoVersion.contains) "?authMode=scram-sha1" else ""
val authConfig = ConfigFactory.parseString(
s"""
|akka.contrib.persistence.mongodb.mongo {
| mongouri = "mongodb://admin:password@$host:$authPort/admin$authMode"
|}
""".stripMargin)
"A secured mongodb instance" should "be connectable via user and pass" in withConfig(authConfig,"akka-contrib-mongodb-persistence-journal","authentication-config") { case (actorSystem, config) =>
val underTest = new RxMongoDriver(actorSystem, config)
val collections = Await.result(underTest.db.flatMap(_.collectionNames),3.seconds)
collections should contain ("system.users")
()
}
} | alari/akka-persistence-mongo | rxmongo/src/test/scala/akka/contrib/persistence/mongodb/RxMongoPersistenceDriverSpec.scala | Scala | apache-2.0 | 3,022 |
/**
* The MIT License (MIT)
* <p/>
* Copyright (c) 2016 ScalateKids
* <p/>
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
* <p/>
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
* <p/>
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
* <p/>
* @author Scalatekids
* @version 1.0
* @since 1.0
*/
package com.actorbase.cli.models
/**
* A command of the ActorbaseCLI.
* This class extends the Command trait
* in the models package of the ActorbaseCLI.
* @param cr an instance of command reciver to implement design pattern
*/
class ChangePasswordCommand(cr: CommandReceiver) extends Command {
/**
* Method used to call the right method of a class that contains the code to run the Command.
*
* @return a String containing the result to return to the user of the Command invoked
*/
override def execute() : String = cr.changePassword
}
| ScalateKids/Actorbase-Client | src/main/scala/com/actorbase/cli/models/ChangePasswordCommand.scala | Scala | mit | 1,822 |
package chapter.nine
object ExerciseTwo extends App {
}
| deekim/impatient-scala | src/main/scala/chapter/nine/ExerciseTwo.scala | Scala | apache-2.0 | 59 |
package aia.channels
import akka.actor.Actor
class EchoActor extends Actor {
def receive = {
case msg: AnyRef =>
sender() ! msg
}
}
| RayRoestenburg/akka-in-action | chapter-channels/src/main/scala/aia/channels/DeadLetterControl.scala | Scala | mit | 149 |
package grammarcomp
package grammar.examples
import grammar._
import CFGrammar._
import GrammarReaders.GrammarImplicit
object Harrison1978First extends QuizResult {
import GrammarReaders._
import QuizResult._
implicit def MAX_GRADE = 0
override def quizName = "Harrison-Havel 1978"
override def reference = bnfgrammar"""S1 -> a A B
A -> a A B | b
B -> b"""
this add bnfgrammar"""S2 -> a C
C -> a C D | b D
D -> b""" not correct comment "Page 177"
} | epfl-lara/GrammarComparison | src/main/scala/grammarcomp/grammar/examples/Harrison1978-1.scala | Scala | mit | 490 |
package eu.ace_design.island.game.actions
import eu.ace_design.island.game._
import eu.ace_design.island.stdlib.PointOfInterests.Creek
import eu.ace_design.island.stdlib.Resources
import eu.ace_design.island.stdlib.Resources._
import org.junit.runner.RunWith
import org.specs2.mutable.SpecificationWithJUnit
import org.specs2.runner.JUnitRunner
@RunWith(classOf[JUnitRunner])
class TransformTest extends SpecificationWithJUnit {
"Transform Action Specification".title
val g = Game(Budget(800), Crew(50), Set((Resources.WOOD, 600)))
val b = GameBoard(size = 600, m = null, pois = Map((0,0) -> Set(Creek(identifier = "c", location = None))))
"The Transform action" should {
val getSomeRum = Transform(Map(SUGAR_CANE -> 102, FRUITS -> 15)) // should produced around 10 units, +/- 10%
val getSomeRum2 = Transform(Map(SUGAR_CANE -> -100, FRUITS -> -100)) // should produced around 10 units, +/- 10%
val onLand = exec(Seq(MovedBoatResult(loc = (0,0), men = 2)), g) // Mens are on land now
"reject to build something with no mens on land" in {
getSomeRum.buildResult(b, g) must throwAn[IllegalArgumentException]
}
"reject to build something without enough resources to do so" in {
getSomeRum.buildResult(b, onLand) must throwAn[IllegalArgumentException]
}
"reject to build something with negative value" in {
getSomeRum2.buildResult(b, onLand) must throwAn[IllegalArgumentException]
}
"reject to transform using an unknown recipe" in {
val unknown = Transform(Map(WOOD -> 15, FUR -> 15))
val g1 = onLand.harvest(WOOD, (0,0), 100).harvest(FUR,(0,0),100)
unknown.buildResult(b,g1) must throwAn[IllegalArgumentException]
}
"transform resources according to recipes" in {
val g1 = onLand.harvest(SUGAR_CANE, (0,0), 900).harvest(FRUITS,(0,0),50)
val res = getSomeRum.buildResult(b, g1).asInstanceOf[TransformResult]
res.consumed must_== getSomeRum.materials
res.kind must_== RUM
res.production must beGreaterThanOrEqualTo(9)
res.production must beLessThanOrEqualTo(11)
val cost = getSomeRum.computeCost(b, g1)
cost must beGreaterThan(0.0)
}
"produce the minimum of what can be actually produced with the given resources" in {
// Asking to transform GLASS with 1200 units of QUARTZ but only 10 WOODs will only yield ~2 units of GLASS
// The remaining resources are wasted
val stupidTransformation = Transform(Map(QUARTZ -> 1200, WOOD -> 10))
val g1 = onLand.harvest(QUARTZ, (0,0), 1500).harvest(WOOD,(0,0),50)
val res = stupidTransformation.buildResult(b, g1).asInstanceOf[TransformResult]
res.consumed must_== stupidTransformation.materials
res.kind must_== GLASS
res.production must beGreaterThanOrEqualTo(1)
res.production must beLessThanOrEqualTo(3)
val cost = stupidTransformation.computeCost(b, g1)
cost must beGreaterThan(0.0)
}
"adapt the game with the relevant resources after transformation" in {
val result = TransformResult(kind = RUM, production = 1, consumed = getSomeRum.materials)
val updated = exec(Seq(result), onLand.harvest(SUGAR_CANE, (0,0), 900).harvest(FRUITS,(0,0),50))
updated.collectedResources must contain(SUGAR_CANE -> 798)
updated.collectedResources must contain(FRUITS -> 35)
updated.collectedResources must contain(RUM -> 1)
}
}
}
| ace-design/island | engine/src/test/scala/eu/ace_design/island/game/actions/TransformTest.scala | Scala | lgpl-3.0 | 3,445 |
package com.scalableQuality.quick.core.fileProcessingPhase
import com.scalableQuality.quick.core.fileComponentDescriptions.OrderedRowDescription
import com.scalableQuality.quick.mantle.parsing.RawRow
import scala.annotation.tailrec
import scala.collection.{immutable, mutable}
object ValidationProcess {
def apply(
orderedRowDescription: OrderedRowDescription,
leftFileRows: List[RawRow],
rightFileRows: List[RawRow]
): (List[RawRow], List[RawRow]) = {
val leftFileAsHashMap =
buildValidationHashMap(orderedRowDescription, leftFileRows)
validate(leftFileAsHashMap, orderedRowDescription, rightFileRows, Nil)
}
private type validationSignature = immutable.List[Option[List[Byte]]]
// since the validation is just looking up whether a row in the left file
// and a row in the left file have the same validation signature,
// a hashmap with the validation signature as the key, is used to enable efficient validation
// a multimap trait is used to avoid processing multiple rows with the same signature as a single row
private type validationSignatureHashMap = mutable.HashMap[
validationSignature,
mutable.Set[RawRow]] with mutable.MultiMap[validationSignature, RawRow]
private def buildValidationHashMap(
orderedRowDescription: OrderedRowDescription,
file: List[RawRow]
): validationSignatureHashMap = {
val validationHashMap =
new mutable.HashMap[validationSignature, mutable.Set[RawRow]]
with mutable.MultiMap[validationSignature, RawRow]
file.foreach { row =>
val validationSignature = orderedRowDescription.validationSignatureOf(row)
validationHashMap.addBinding(validationSignature, row)
}
validationHashMap
}
@tailrec private def validate(
leftFileAsHashMap: validationSignatureHashMap,
rowDescription: OrderedRowDescription,
rightFile: List[RawRow],
rowsAbsentFromLeftFile: List[RawRow]
): (List[RawRow], List[RawRow]) = rightFile match {
case Nil =>
val rowsAbsentFromRightFile = for {
tupleOfKeyAndRowSet <- leftFileAsHashMap
rowSet <- tupleOfKeyAndRowSet._2
} yield rowSet
(rowsAbsentFromRightFile.toList, rowsAbsentFromLeftFile)
case _ if (leftFileAsHashMap.isEmpty) =>
(Nil, rightFile ::: rowsAbsentFromLeftFile)
case rightRow :: restOfRightFile =>
val rightRowValidationSignature =
rowDescription.validationSignatureOf(rightRow)
val equivalentLeftRowOpt =
leftFileAsHashMap.get(rightRowValidationSignature).flatMap(_.headOption)
equivalentLeftRowOpt match {
case None =>
validate(
leftFileAsHashMap,
rowDescription,
restOfRightFile,
rightRow :: rowsAbsentFromLeftFile
)
case Some(equivalentLeftRow) =>
leftFileAsHashMap.removeBinding(rightRowValidationSignature,
equivalentLeftRow)
validate(
leftFileAsHashMap,
rowDescription,
restOfRightFile,
rowsAbsentFromLeftFile
)
}
}
}
| MouslihAbdelhakim/Quick | src/main/scala/com/scalableQuality/quick/core/fileProcessingPhase/ValidationProcess.scala | Scala | apache-2.0 | 3,140 |
/**
Measures the similarity of two strings whereby two identical
strings will measure 100%. Two strings that are closely similar
have a higher score than two completley dissimilar strings
which measure 0%.
Routine first finds the longest matching substring in two
similar strings (source and target strings). For example, in the
almost matching strings of "philadelphia" and "philedelphia", the
longest matching, common substring is "delphia". The substrings to
the left of the longest common substring ("phila" and "phile"), are
subject to the same treatment; in which case, the common substring
"phil" is found. The substrings to the left of "phil"
would repeat the operation if any existed.
The substrings to the right of the longest matching, common
substring are processed in the same way. There are no substring to
the right of "delphia". The substrings to the right of "phil"
are "a" and "e"; the routine found no match in these substrings.
The length of the matching substrings ("delphia" and "phil") are
totaled and divided by the length of the target string
("philadelphia") to yield a measure of similarity (92%).
*/
package com.client
import scala.collection.mutable.ListBuffer
//class Gestalt {
object Gestalt {
def testGestalt(percent:String, target:String, source:String): Boolean={
val rating=gestaltRating(source, target)
if(rating.toInt >= percent.toInt)
true
else
false
}
// Ratings range from 0 to 100 as a
// measure of 'source' and 'target'
// string comparison.
def gestaltRating(source:String, target:String): Int={
if(source==target)
100 // complete match so return 100%
else
// find the degree 'source' and
// 'target' are similar.
ratePartialMatch(source,target)
}
def ratePartialMatch(source:String, target:String): Int={
// list of matching substrings
val matches=matchSubstrings(source, target)
// match.length/target.length
rateSourceTargetAggrement(matches, target)
}
def matchSubstrings(source:String, target:String): List[String]= {
// finds all substring matches between 'source'
// and 'target'.
// for example: xapplle & zappell yields
// the substrings 'app' 'll' 'e' in that order.
// 'buffer' collects the substring matches.
val buffer= new ListBuffer[String]
// find 1st longest, common substring
val longest=findLongestSubstringMatch(source, target)
//recursive function to find other
//common substring in 'source' & 'target'
leftAndRightOfLongestSubstring(buffer, longest, source, target)
buffer.toList
}
def findLongestSubstringMatch(o:String, t:String): String={
// matching 'o' and 't' may produce many matches.
// the routine finds the longest match.
var substrings:List[String]=Nil
var longest=""
// compare each char of 'o' with all
// chars of 't'
for(i <- 0 until o.length)
for(j<-0 until t.length)
// 1st chars match,
if(o.charAt(i)==t.charAt(j))
// now find other matches, for example,
// "ppp" and "ppp" will find the following
// matches "ppp", "pp", "p"
substrings=getSubMatch(o.substring(i),t.substring(j))::substrings
// find substring with greatest length, in
// example of "ppp", "pp" "p", longest is "ppp"
substrings.foreach((s)=> if(s.length > longest.length) longest=s)
longest
}
def leftAndRightOfLongestSubstring(buffer:ListBuffer[String],
longest:String,
source:String,
target:String) {
// capture the prior "longest" match in 'buffer',
// extract the substrings to the right and left of
// the match, and then recursively call itself for
// the right and for the left substrings.
if(longest !="")
buffer += longest //collect matches
val sourceIndex=source.indexOf(longest)
val targetIndex=target.indexOf(longest)
val xsource=source.substring(0, sourceIndex)
val xtarget=target.substring(0, targetIndex)
val ysource=source.substring(sourceIndex+ longest.length)
val ytarget=target.substring(targetIndex+ longest.length)
var newLong=""
// recursive call for the left substring
if(xtarget!="" && xsource!="" && longest!="") {
newLong=findLongestSubstringMatch(xsource, xtarget)
leftAndRightOfLongestSubstring(buffer, newLong, xsource, xtarget)
}
// recursive call for the right substring
if(ytarget!="" && ysource!="" && longest!="") {
newLong=findLongestSubstringMatch(ysource, ytarget)
leftAndRightOfLongestSubstring(buffer, newLong,ysource,ytarget)
}
}
def getSubMatch(s:String, t:String):String={
// Recursively collect common chars
// in 's' and 't' until noncommon chars
// are detected or until at end of string.
if(s.length==0 || t.length==0 || s.charAt(0) != t.charAt(0) )
""
else s.charAt(0) + getSubMatch(s.substring(1), t.substring(1))
} // Operator detected by inspection of 1st character and
def rateSourceTargetAggrement(matches:List[String], target:String): Int={
// number of chars in all the matching substrings is
// divided by the number of chars in target.
val sourceSize= (0 /: matches) (_+_.length)
((sourceSize/(target.length * 1.0)) * 100.0 +0.5).toInt
}
}
| hangle/Notecard | src/Gestalt.scala | Scala | apache-2.0 | 5,229 |
package edvorg.cf.zepto.heap
object A {
val lines = io.Source.fromFile("Heap/A.txt").getLines.drop(1).toBuffer
// val lines = io.Source.fromInputStream(System.in).getLines.drop(1).toArray
val ind = lines.indices.toArray
val res = new Array[Int](ind.length)
val heap = new Heap(lines, (x: String, y: String) => x < y, true, swap(_, _))
def swap(i: Int, j: Int) {
val tmp = ind(i)
ind(i) = ind(j)
ind(j) = tmp
}
def main(arr: Array[String]) {
heap.makeHeap(lines.length)
var i = 0
do {
res(ind(i)) = i + 1
i = i + 1
} while (i < ind.length)
res foreach println
}
}
| edvorg/scala-cf | src/main/scala/edvorg/cf/zepto/Heap/A.scala | Scala | gpl-3.0 | 601 |
package net.fehmicansaglam.bson.reader
import java.nio.ByteBuffer
import net.fehmicansaglam.bson.element.BsonNull
case class BsonNullReader(buffer: ByteBuffer) extends Reader[BsonNull] {
override def read: Option[BsonNull] = {
Some(BsonNull(readCString()))
}
}
| danielwegener/tepkin | bson/src/main/scala/net/fehmicansaglam/bson/reader/BsonNullReader.scala | Scala | apache-2.0 | 272 |
package com.artclod.math
import org.junit.runner._
import org.scalatest.junit.JUnitRunner
import org.junit.runner.RunWith
import org.scalatestplus.play._
import play.api.test.Helpers._
import scala.math.{Pi => π}
import scala.util.Success
class DoubleParseSpec extends PlaySpec {
"DoubleParse.apply(String)" should {
"parse inf" in { DoubleParse("inf") mustEqual( Success(Double.PositiveInfinity)) }
"parse Inf" in { DoubleParse("Inf") mustEqual( Success(Double.PositiveInfinity)) }
"parse infinity" in { DoubleParse("infinity") mustEqual( Success(Double.PositiveInfinity)) }
"parse positive infinity" in { DoubleParse("positive infinity") mustEqual( Success(Double.PositiveInfinity)) }
"parse pos inf" in { DoubleParse("pos inf") mustEqual( Success(Double.PositiveInfinity)) }
"parse +inf" in { DoubleParse("+inf") mustEqual( Success(Double.PositiveInfinity)) }
"parse negative infinity" in { DoubleParse("negative infinity") mustEqual( Success(Double.NegativeInfinity)) }
"parse neg inf" in { DoubleParse("neg inf") mustEqual( Success(Double.NegativeInfinity)) }
"parse -inf" in { DoubleParse("-inf") mustEqual( Success(Double.NegativeInfinity)) }
"parse 3" in { DoubleParse("3") mustEqual( Success(3d)) }
"parse 3.1" in { DoubleParse("3.1") mustEqual( Success(3.1d)) }
}
}
| kristiankime/calc-tutor | test/com/artclod/math/DoubleParseSpec.scala | Scala | mit | 1,332 |
package com.github.meln1k.reactive.telegrambot.methods
import com.github.meln1k.reactive.telegrambot.models.ReplyMarkup
trait Params {
def allParams: Map[String, Any]
}
abstract class ApiMethod extends Params {
def name: String
protected def methodParams: Map[String, Any]
override def allParams: Map[String, Any] = methodParams
}
trait HasChatId extends Params {
def chat_id: Long
abstract override def allParams: Map[String, Any] = super.allParams + ("chat_id" -> chat_id)
}
trait HasReplyTo extends Params {
def reply_to_message_id: Option[Long]
abstract override def allParams: Map[String, Any] = super.allParams + ("reply_to_message_id" -> reply_to_message_id)
}
trait HasCustomKeyboard extends Params {
def reply_markup: Option[ReplyMarkup]
abstract override def allParams: Map[String, Any] = super.allParams + ("reply_markup" -> reply_markup)
}
| meln1k/reactive-telegrambot | src/main/scala/com/github/meln1k/reactive/telegrambot/methods/ApiMethod.scala | Scala | mit | 888 |
package equellatests.sections.search
import equellatests.browserpage.BrowserPage
import org.openqa.selenium.support.ui.ExpectedCondition
trait ResultsUpdatable extends BrowserPage {
def resultsUpdateExpectation: ExpectedCondition[_]
}
| equella/Equella | autotest/Tests/src/test/scala/equellatests/sections/search/ResultsUpdatable.scala | Scala | apache-2.0 | 241 |
/*
* Copyright 2021 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.ct.ct600.v3
import uk.gov.hmrc.ct.box._
import uk.gov.hmrc.ct.ct600.v3.retriever.CT600DeclarationBoxRetriever
import uk.gov.hmrc.ct.ct600e.validations.ValidateDeclarationNameOrStatus
case class B985(value: Option[String]) extends CtBoxIdentifier("Declaration status") with CtOptionalString with Input
with ValidatableBox[CT600DeclarationBoxRetriever] with ValidateDeclarationNameOrStatus[CT600DeclarationBoxRetriever] {
override def validate(boxRetriever: CT600DeclarationBoxRetriever): Set[CtValidation] = validateDeclarationNameOrStatus("B985", this)
}
| hmrc/ct-calculations | src/main/scala/uk/gov/hmrc/ct/ct600/v3/B985.scala | Scala | apache-2.0 | 1,186 |
/*
* Copyright (c) 2014-2021 by The Monix Project Developers.
* See the project homepage at: https://monix.io
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package monix.reactive.internal.operators
import monix.execution.cancelables.BooleanCancelable
import monix.reactive.Observable
import monix.execution.atomic.Atomic
import scala.concurrent.duration._
import scala.concurrent.duration.Duration.Zero
object TakeWhileNotCanceledSuite extends BaseOperatorSuite {
def sum(sourceCount: Int): Long =
sourceCount.toLong * (sourceCount + 1) / 2
def count(sourceCount: Int) =
sourceCount
def createObservable(sourceCount: Int) = {
require(sourceCount > 0, "sourceCount should be strictly positive")
Some {
val c = BooleanCancelable()
val o =
if (sourceCount == 1)
Observable.range(1, 10).takeWhileNotCanceled(c).map { x =>
c.cancel(); x
} else
Observable.range(1L, sourceCount.toLong * 2).takeWhileNotCanceled(c).map { x =>
if (x == sourceCount) c.cancel(); x
}
Sample(o, count(sourceCount), sum(sourceCount), Zero, Zero)
}
}
def brokenUserCodeObservable(sourceCount: Int, ex: Throwable) = Some {
require(sourceCount > 0, "sourceCount should be strictly positive")
val c = new BooleanCancelable {
def cancel(): Unit = ()
val counter = Atomic(0)
def isCanceled =
if (counter.incrementAndGet() < sourceCount)
false
else
throw ex
}
val o = Observable.range(1L, sourceCount.toLong * 2).takeWhileNotCanceled(c)
Sample(o, count(sourceCount - 1), sum(sourceCount - 1), Zero, Zero)
}
def observableInError(sourceCount: Int, ex: Throwable) = Some {
require(sourceCount > 0, "sourceCount should be strictly positive")
val c = BooleanCancelable()
val o =
if (sourceCount == 1)
createObservableEndingInError(Observable.now(1), ex)
.takeWhileNotCanceled(c)
else
createObservableEndingInError(Observable.range(1, sourceCount.toLong + 1), ex)
.takeWhileNotCanceled(c)
Sample(o, count(sourceCount), sum(sourceCount), Zero, Zero)
}
override def cancelableObservables() = {
val c = BooleanCancelable()
val o = Observable.range(1, 10).delayOnNext(1.second).takeWhileNotCanceled(c)
Seq(Sample(o, 0, 0, 0.seconds, 0.seconds))
}
}
| monixio/monix | monix-reactive/shared/src/test/scala/monix/reactive/internal/operators/TakeWhileNotCanceledSuite.scala | Scala | apache-2.0 | 2,915 |
package com.chainstaysoftware.unitofmeasure
import org.scalatest.{Matchers, FunSuite, BeforeAndAfter}
class EngineeringScaleTest extends FunSuite with Matchers with BeforeAndAfter with EpsilonEquals {
test("test factor") {
Kilo.factor should be (1000.0)
Atto.factor should be (1E-18)
}
test("test prefix") {
Kilo.prefix should be ("kilo")
Atto.prefix should be ("atto")
}
test("test symbol") {
Kilo.symbol should be ("K")
Atto.symbol should be ("a")
}
test("test conversions") {
val epsilon: Double = 1.0E-10
val scaledKilo: Double = 12.34
val unscaledKilo: Double = scaledKilo * 1000.0
epsilonEquals(Kilo.convertToUnscaled(scaledKilo), unscaledKilo, epsilon) should be (true)
epsilonEquals(Kilo.convertToScaled(unscaledKilo), scaledKilo, epsilon) should be (true)
}
test("test scale") {
val smallest: Double = 1.0E-20
val smaller: Double = 1.0E-14
val small: Double = 1.0E-4
val justRight: Double = 1.0
val big: Double = 1.0E4
val bigger: Double = 1.0E14
val biggest: Double = 1.0E20
val smallestScale = Atto
val smallerScale = Femto
val smallScale = Micro
val justRightScale = None
val bigScale = Kilo
val biggerScale = Tera
val biggestScale = Exa
None.getScale(smallest) should be (smallestScale)
None.getScale(smaller) should be (smallerScale)
None.getScale(small) should be (smallScale)
None.getScale(justRight) should be (justRightScale)
None.getScale(big) should be (bigScale)
None.getScale(bigger) should be (biggerScale)
None.getScale(biggest) should be (biggestScale)
}
}
| ricemery/unitofmeasure | src/test/scala/com/chainstaysoftware/unitofmeasure/EngineeringScaleTest.scala | Scala | apache-2.0 | 1,639 |
package monocle.internal
/** From cats: Represents two values of the same type that are expected to be equal.
*/
final case class IsEq[A](lhs: A, rhs: A)
object IsEq {
implicit def syntax[A](lhs: A): IsEqOps[A] = new IsEqOps(lhs)
final class IsEqOps[A](private val lhs: A) extends AnyVal {
def <==>(rhs: A): IsEq[A] = IsEq(lhs, rhs)
}
}
| julien-truffaut/Monocle | core/shared/src/main/scala/monocle/internal/IsEq.scala | Scala | mit | 352 |
package akka.io
import akka.actor.{Actor, ActorRef}
import java.nio.file.{StandardOpenOption, Path}
import akka.io.File._
class FileSlurp(path: Path, receiver: ActorRef, chunkSize: Int) extends Actor {
import context.system
def this(path: Path, receiver: ActorRef) = this(path, receiver, 256)
var size: Long = _
override def preStart() {
IO(File) ! Open(path, StandardOpenOption.READ :: Nil)
}
override def receive = {
case Opened(handler) =>
handler ! GetSize
case Size(_size) =>
size = _size
sender() ! Read(chunkSize, 0)
context.become(slurping(0), true)
}
def slurping(currentPos: Long): Receive = {
case res @ ReadResult(_, read,`currentPos`) =>
receiver ! res
val nextPos = currentPos + read
if (nextPos >= size) {
sender() ! Close
context.become(closing)
} else {
sender() ! Read(chunkSize, nextPos)
context.become(slurping(nextPos), true)
}
}
def closing: Receive = {
case Closed =>
receiver ! FileSlurp.Done
context.stop(self)
}
}
object FileSlurp {
case object Done
}
| drexin/akka-io-file | src/main/scala/akka/io/FileSlurp.scala | Scala | apache-2.0 | 1,130 |
package net.liftweb.mapper
/*
* Copyright 2006-2009 WorldWide Conferencing, LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions
* and limitations under the License.
*/
import _root_.scala.collection.mutable._
import _root_.java.lang.reflect.Method
import _root_.java.sql.{ResultSet, Types}
import _root_.scala.xml.{Elem, Node, NodeSeq}
import _root_.net.liftweb.http.{S, FieldError}
import S._
import _root_.net.liftweb.http.js._
import _root_.net.liftweb.util.{Box, Empty, Full, Failure}
trait BaseMapper {
type MapperType <: Mapper[MapperType]
}
@serializable
trait Mapper[A<:Mapper[A]] extends BaseMapper {
self: A =>
type MapperType = A
private val secure_# = Safe.next
private var was_deleted_? = false
private var dbConnectionIdentifier: Box[ConnectionIdentifier] = Empty
private[mapper] var addedPostCommit = false
def getSingleton : MetaMapper[A];
final def safe_? : Boolean = {
Safe.safe_?(secure_#)
}
implicit def thisToMappee(in: Mapper[A]): A = this.asInstanceOf[A]
def runSafe[T](f : => T) : T = {
Safe.runSafe(secure_#)(f)
}
def connectionIdentifier(id: ConnectionIdentifier): A = {
if (id != getSingleton.dbDefaultConnectionIdentifier || dbConnectionIdentifier.isDefined) dbConnectionIdentifier = Full(id)
thisToMappee(this)
}
def connectionIdentifier = dbConnectionIdentifier openOr calcDbId
def dbCalculateConnectionIdentifier: PartialFunction[A, ConnectionIdentifier] = Map.empty
private def calcDbId = if (dbCalculateConnectionIdentifier.isDefinedAt(this)) dbCalculateConnectionIdentifier(this)
else getSingleton.dbDefaultConnectionIdentifier
/**
* Append a function to perform after the commit happens
* @param func - the function to perform after the commit happens
*/
def doPostCommit(func: () => Unit): A = {
DB.appendPostFunc(connectionIdentifier, func)
this
}
/**
* Save the instance and return the instance
*/
def saveMe(): A = {
this.save
this
}
def save(): Boolean = {
runSafe {
getSingleton.save(this)
}
}
def htmlLine : NodeSeq = {
getSingleton.doHtmlLine(this)
}
def asHtml : NodeSeq = {
getSingleton.asHtml(this)
}
/**
* If the instance calculates any additional
* fields for JSON object, put the calculated fields
* here
*/
def suplementalJs(ob: Box[KeyObfuscator]): List[(String, JsExp)] = Nil
def validate : List[FieldError] = {
runSafe {
getSingleton.validate(this)
}
}
/**
* Convert the model to a JavaScript object
*/
def asJs: JsExp = getSingleton.asJs(this)
/**
* Delete the model from the RDBMS
*/
def delete_! : Boolean = {
if (!db_can_delete_?) false else
runSafe {
was_deleted_? = getSingleton.delete_!(this)
was_deleted_?
}
}
/**
* Get the fields (in order) for displaying a form
*/
def formFields: List[MappedField[_, A]] =
getSingleton.formFields(this)
/**
* map the fields titles and forms to generate a list
* @param func called with displayHtml, fieldId, form
*/
def mapFieldTitleForm[T](func: (NodeSeq, Box[NodeSeq], NodeSeq) => T): List[T] =
getSingleton.mapFieldTitleForm(this, func)
/**
* flat map the fields titles and forms to generate a list
* @param func called with displayHtml, fieldId, form
*/
def flatMapFieldTitleForm[T]
(func: (NodeSeq, Box[NodeSeq], NodeSeq) => Seq[T]): List[T] =
getSingleton.flatMapFieldTitleForm(this, func)
/**
* Present the model as a form and execute the function on submission of the form
*
* @param button - If it's Full, put a submit button on the form with the value of the parameter
* @param onSuccess - redirect to the URL if the model validates, otherwise display the errors
*
* @return the form
*/
def toForm(button: Box[String], onSuccess: String): NodeSeq =
toForm(button, (what: A) => {what.validate match {
case Nil => what.save ; S.redirectTo(onSuccess)
case xs => S.error(xs)
}})
/**
* Append the JSON representation of this model object to the string builder
* @param the string builder to append the JSON representation of this model to
*
* @return the StringBuilder
*/
def asJSON(sb: StringBuilder): StringBuilder = {
getSingleton.asJSON(this, sb)
sb
}
/**
* Create a JSON representation of this model object
*/
def asJSON: String = asJSON(new StringBuilder).toString
/**
* Present the model as a form and execute the function on submission of the form
*
* @param button - If it's Full, put a submit button on the form with the value of the parameter
* @param f - the function to execute on form submission
*
* @return the form
*/
def toForm(button: Box[String], f: A => Any): NodeSeq =
getSingleton.toForm(this) ++
S.fmapFunc((ignore: List[String]) => f(this)){
(name: String) =>
(<input type='hidden' name={name} value="n/a" />)} ++
(button.map(b => getSingleton.formatFormElement( <xml:group> </xml:group> , <input type="submit" value={b}/> )) openOr _root_.scala.xml.Text(""))
def toForm(button: Box[String], redoSnippet: NodeSeq => NodeSeq, onSuccess: A => Unit): NodeSeq = {
val snipName = S.currentSnippet
def doSubmit() {
this.validate match {
case Nil => onSuccess(this)
case xs => S.error(xs)
snipName.foreach(n => S.mapSnippet(n, redoSnippet))
}
}
getSingleton.toForm(this) ++
S.fmapFunc((ignore: List[String]) => doSubmit())(name => <input type='hidden' name={name} value="n/a" />) ++
(button.map(b => getSingleton.formatFormElement( <xml:group> </xml:group> , <input type="submit" value={b}/> )) openOr _root_.scala.xml.Text(""))
}
def saved_? : Boolean = getSingleton.saved_?(this)
/**
* Can this model object be deleted?
*/
def db_can_delete_? : Boolean = getSingleton.saved_?(this) && !was_deleted_?
def dirty_? : Boolean = getSingleton.dirty_?(this)
override def toString = {
val ret = new StringBuilder
ret.append(this.getClass.getName)
ret.append("={")
ret.append(getSingleton.appendFieldToStrings(this))
ret.append("}")
ret.toString
}
def toXml: Elem = {
getSingleton.toXml(this)
}
def checkNames {
runSafe {
getSingleton match {
case null =>
case s => s.checkFieldNames(this)
}
}
}
def comparePrimaryKeys(other: A) = false
/**
* Find the field by name
* @param fieldName -- the name of the field to find
*
* @return Box[MappedField]
*/
def fieldByName[T](fieldName: String): Box[MappedField[T, A]] = getSingleton.fieldByName[T](fieldName, this)
type FieldPF = PartialFunction[String, NodeSeq => NodeSeq]
def fieldMapperPF(transform: (BaseOwnedMappedField[A] => NodeSeq)): FieldPF = {
getSingleton.fieldMapperPF(transform, this)
}
private var fieldPF_i: FieldPF = Map.empty
def fieldPF = fieldPF_i
def appendField(pf: FieldPF) {
fieldPF_i = fieldPF_i orElse pf
fieldPF_i
}
def prependField(pf: FieldPF) {
fieldPF_i = pf orElse fieldPF_i
fieldPF_i
}
/**
* If there's a field in this record that defines the locale, return it
*/
def localeField: Box[MappedLocale[A]] = Empty
def timeZoneField: Box[MappedTimeZone[A]] = Empty
def countryField: Box[MappedCountry[A]] = Empty
}
trait LongKeyedMapper[OwnerType <: LongKeyedMapper[OwnerType]] extends KeyedMapper[Long, OwnerType] with BaseLongKeyedMapper {
self: OwnerType =>
}
trait BaseKeyedMapper extends BaseMapper {
type TheKeyType
}
trait BaseLongKeyedMapper extends BaseKeyedMapper {
override type TheKeyType = Long
}
trait IdPK extends BaseLongKeyedMapper {
def primaryKeyField = id
object id extends MappedLongIndex[MapperType](this.asInstanceOf[MapperType])
}
trait KeyedMapper[KeyType, OwnerType<:KeyedMapper[KeyType, OwnerType]] extends Mapper[OwnerType] with BaseKeyedMapper {
self: OwnerType =>
type TheKeyType = KeyType
def primaryKeyField: MappedField[KeyType, OwnerType] with IndexedField[KeyType];
def getSingleton: KeyedMetaMapper[KeyType, OwnerType];
override def comparePrimaryKeys(other: OwnerType) = primaryKeyField.is == other.primaryKeyField.is
def reload: OwnerType = getSingleton.find(By(primaryKeyField, primaryKeyField)) openOr this
def asSafeJs(f: KeyObfuscator): JsExp = getSingleton.asSafeJs(this, f)
override def equals(other: Any): Boolean = {
other match {
case null => false
case km: KeyedMapper[Nothing, Nothing] if this.getClass.isAssignableFrom(km.getClass) ||
km.getClass.isAssignableFrom(this.getClass) => this.primaryKeyField == km.primaryKeyField
case k => super.equals(k)
}
}
}
| beni55/liftweb | lift-mapper/src/main/scala/net/liftweb/mapper/Mapper.scala | Scala | apache-2.0 | 9,214 |
package mesosphere.marathon
package api.akkahttp
import akka.http.scaladsl.model.{ HttpMethods, HttpResponse, StatusCodes }
import akka.http.scaladsl.server.{ Directives => AkkaDirectives, _ }
import mesosphere.marathon.plugin.http.{ HttpRequest => PluginRequest }
import mesosphere.marathon.plugin.{ Group, RunSpec }
import mesosphere.marathon.plugin.auth.{ AuthorizedAction, AuthorizedResource, Authorizer, CreateGroup, CreateResource, CreateRunSpec, DeleteGroup, DeleteResource, DeleteRunSpec, Identity, UpdateGroup, UpdateResource, UpdateRunSpec, ViewGroup, ViewResource, ViewRunSpec, Authenticator => MarathonAuthenticator }
import scala.util.{ Failure, Success }
trait AuthDirectives extends AkkaDirectives {
import AuthDirectives._
/**
* Using the active authentication plugin, authenticates the current user, yielding the identity
*
* If unsuccessful, rejects with NotAuthenticated
*
* Because this method takes an implicit, you explicitly call `.apply` when invoking it in a route; IE:
*
* authenticated.apply { implicit identity =>
* ...
* }
*
* Note that an Authenticator is expected to be in scope
*/
def authenticated(implicit authenticator: MarathonAuthenticator): Directive1[Identity] = extractRequest.flatMap { request =>
extractClientIP.flatMap { clientIP =>
val pluginRequest: PluginRequest = HttpPluginFacade.request(request, clientIP)
onComplete(authenticator.authenticate(pluginRequest)).flatMap {
case Success(Some(identity)) => provide(identity)
case Success(None) => reject(NotAuthenticated(HttpPluginFacade.response(authenticator.handleNotAuthenticated(pluginRequest, _))))
case Failure(_) => reject(AuthServiceUnavailable)
}
}
}
/**
* Using the active Authorizer, check for authorization for the specified request
*
* Because this method takes implicits, you must explicitly call apply when applying the directive. IE:
*
*
* authenticated.apply { implicit identity =>
* ...
* // both an Identity and Authorizer are implicitly provided
* authorized(ViewResource, info.app).apply {
* ...
* }
* ...
* }
*
* @param action The action for which to check authorization for the given identity
* @param resource The entity for which authorization should be checked
*/
def authorized[Resource](action: AuthorizedAction[Resource], resource: Resource)(implicit authorizer: Authorizer, identity: Identity): Directive0 =
if (authorizer.isAuthorized(identity, action, resource))
pass
else
reject(NotAuthorized(HttpPluginFacade.response(authorizer.handleNotAuthorized(identity, _))))
/**
* Using the active Authorizer, check for authorization for the specified request
* The authorized action itself is derived from the http request method.
*
* Because this method takes implicits, you must explicitly call apply when applying the directive. IE:
*
* authorized(info.app).apply {
* ...
* }
*
* @param resource The entity for which authorization should be checked
*/
def authorized[Resource](resource: Resource)(implicit authorizer: Authorizer, identity: Identity, actionSet: AuthorizedActionSet[_ >: Resource]): Directive0 =
extractAuthorizedAction(actionSet).flatMap(authorized(_, resource))
/**
* This will extract the authorized action for resource type R based on the HTTP method.
* Use this extraction only, if the HTTP verb defines the underlying action.
* @param actionSet the related action set for the given resource.
* @tparam R the resource type
* @return the authorized action, for the given http method. Otherwise reject.
*/
def extractAuthorizedAction[R](implicit actionSet: AuthorizedActionSet[R]): Directive1[AuthorizedAction[R]] = extractRequest.flatMap { request =>
import HttpMethods._
request.method match {
case GET | OPTIONS | HEAD => provide(actionSet.view)
case POST => provide(actionSet.create)
case PUT | PATCH => provide(actionSet.update)
case DELETE => provide(actionSet.delete)
case _ => reject
}
}
// Bring action sets for all available resources into implicit scope
implicit val groupAuthorizedActionSet: AuthorizedActionSet[Group] = AuthorizedActionSet(CreateGroup, UpdateGroup, DeleteGroup, ViewGroup)
implicit val runSpecAuthorizedActionSet: AuthorizedActionSet[RunSpec] = AuthorizedActionSet(CreateRunSpec, UpdateRunSpec, DeleteRunSpec, ViewRunSpec)
implicit val resourceAuthorizedActionSet: AuthorizedActionSet[AuthorizedResource] = AuthorizedActionSet(CreateResource, UpdateResource, DeleteResource, ViewResource)
}
object AuthDirectives {
private[AuthDirectives] case object AuthServiceUnavailable extends Rejection
private[AuthDirectives] case class NotAuthorized(toResponse: HttpResponse) extends Rejection
private[AuthDirectives] case class NotAuthenticated(toResponse: HttpResponse) extends Rejection
def handleAuthRejections: PartialFunction[Rejection, Route] = {
case AuthServiceUnavailable => Directives.complete(StatusCodes.ServiceUnavailable -> "Auth Service currently not available.")
case NotAuthorized(pluginResponse) => Directives.complete(pluginResponse)
case NotAuthenticated(pluginResponse) => Directives.complete(pluginResponse)
}
/**
* This action set defines all actions for create, update, delete and view based on a given resource type.
* @tparam Resource The resource type
*/
case class AuthorizedActionSet[Resource](
create: AuthorizedAction[Resource],
update: AuthorizedAction[Resource],
delete: AuthorizedAction[Resource],
view: AuthorizedAction[Resource]
)
}
| Caerostris/marathon | src/main/scala/mesosphere/marathon/api/akkahttp/AuthDirectives.scala | Scala | apache-2.0 | 5,801 |
package justin.httpapi
import akka.http.scaladsl.model.StatusCodes
import akka.http.scaladsl.server.Route
import akka.http.scaladsl.testkit.ScalatestRouteTest
import org.scalatest.{FlatSpec, Matchers}
class HealthCheckRouterTest extends FlatSpec with Matchers with ScalatestRouteTest {
behavior of "Health Check Router"
it should "get 200 OK http status" in {
Get("/health") ~> Route.seal(new HealthCheckRouter().routes) ~> check {
status shouldBe StatusCodes.OK
responseAs[String] shouldBe "OK"
}
}
}
| speedcom/JustinDB | justin-http-api/src/test/scala/justin/httpapi/HealthCheckRouterTest.scala | Scala | apache-2.0 | 544 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.streaming.amqp
import java.util.concurrent.TimeUnit._
import java.util.concurrent.{Executors, ScheduledExecutorService, ScheduledFuture}
import com.google.common.util.concurrent.{RateLimiter => GuavaRateLimiter}
import io.vertx.proton.{ProtonDelivery, ProtonMessageHandler, ProtonReceiver}
import org.apache.qpid.proton.amqp.{Symbol => AmqpSymbol}
import org.apache.qpid.proton.amqp.messaging.Rejected
import org.apache.qpid.proton.amqp.transport.ErrorCondition
import org.apache.qpid.proton.message.Message
import org.apache.spark.streaming.receiver.BlockGenerator
import org.slf4j.LoggerFactory
/**
* Provides message rate control with related throttling
*
* @param blockGenerator BlockGenerator instance used for storing messages
* @param receiver AMQP receiver instance
*/
abstract class AMQPRateController(
blockGenerator: BlockGenerator,
receiver: ProtonReceiver
) {
// check on the receiver and block generator instances
if (Option(receiver).isEmpty)
throw new IllegalArgumentException("The receiver instance cannot be null")
if (Option(blockGenerator).isEmpty)
throw new IllegalArgumentException("The block generator instance cannot be null")
protected final val AmqpRecvError = "org.apache:amqp-recv-error"
protected final val AmqpRecvThrottling = "Throttling : Max rate limit exceeded"
// throttling healthy checked for 1 sec + 50%
private final val throttlingHealthyPeriod = 1500l
private lazy val rateLimiter = GuavaRateLimiter.create(blockGenerator.getCurrentLimit.toDouble)
private val mutex: AnyRef = new Object()
private var throttling: Boolean = false
// timer used in order to raise the throttling end even when no other messages
// arrive after the first one which caused the throttling start
private val scheduledExecutorService: ScheduledExecutorService = Executors.newScheduledThreadPool(1)
private var scheduledThrottlingHealthy: ScheduledFuture[_] = _
private val throttlingHealthy: ThrottlingHealthy = new ThrottlingHealthy()
private val log = LoggerFactory.getLogger(getClass)
/**
* Open/start the rate controller activity
*/
final def open(): Unit = {
receiver
.setAutoAccept(false)
.handler(new ProtonMessageHandler() {
override def handle(delivery: ProtonDelivery, message: Message): Unit = {
// handling received message and related delivery
acquire(delivery, message)
}
})
// extension point before opening receiver
beforeOpen()
receiver.open()
}
/**
* Close/end the rate controller activity
*/
final def close(): Unit = {
// extension point before closing receiver
beforeClose()
if (Option(receiver).isDefined) {
receiver.close()
}
scheduledExecutorService.shutdown()
scheduledExecutorService.awaitTermination(1, SECONDS)
}
/**
* Try to acquire the permit to handle incoming message with related delivery
*
* @param delivery Delivery information
* @param message AMQP message received
*/
final def acquire(delivery: ProtonDelivery, message: Message): Unit = {
mutex.synchronized {
// try to acquire the rate limiter in order to have permits at current rate
if (rateLimiter.tryAcquire()) {
if (throttling) {
log.info("Throttling ended ... ")
throttling = false
onThrottlingEnded()
// throttling ended thanks to acquired permits at current rate
// no more healthy control is needed
scheduledThrottlingHealthy.cancel(false)
}
onAcquired(delivery, message)
// permit not acquired, max rate exceeded
} else {
if (!throttling) {
// throttling start now
throttling = true
onThrottlingStarted()
log.warn("Throttling started ... ")
// starting throttling healthy thread in order to end throttling
// when no more messages are received (silence from sender)
scheduledThrottlingHealthy = scheduledExecutorService.schedule(throttlingHealthy, throttlingHealthyPeriod, MILLISECONDS)
}
if (throttling) {
log.error("Throttling ... ")
// already in throttling
onThrottling(delivery, message)
}
}
}
}
def beforeOpen(): Unit = { }
def beforeClose(): Unit = { }
def onAcquired(delivery: ProtonDelivery, message: Message): Unit = { }
def onThrottlingStarted(): Unit = { }
def onThrottlingEnded(): Unit = { }
def onThrottling(delivery: ProtonDelivery, message: Message): Unit = { }
/**
* Return current max rate
*
* @return Max rate
*/
final def getCurrentLimit: Long = {
rateLimiter.getRate.toLong
}
/**
* Runnable class for the throttling healthy checker
*/
class ThrottlingHealthy extends Runnable {
override def run(): Unit = {
mutex.synchronized {
if (throttling) {
log.info("Healthy: Throttling ended ... ")
throttling = false
onThrottlingEnded()
}
}
}
}
}
/**
* AMQP rate controller implementation using "prefetch"
*
* @param blockGenerator BlockGenerator instance used for storing messages
* @param receiver AMQP receiver instance
*/
private final class AMQPPrefetchRateController(
blockGenerator: BlockGenerator,
receiver: ProtonReceiver
) extends AMQPRateController(blockGenerator, receiver) {
override def beforeOpen(): Unit = {
// if MaxValue or negative it means no max rate limit specified in the Spark configuration
// so the prefetch isn't explicitly set but default Vert.x Proton value is used
if ((blockGenerator.getCurrentLimit != Long.MaxValue) && (blockGenerator.getCurrentLimit >= 0))
receiver.setPrefetch(blockGenerator.getCurrentLimit.toInt)
super.beforeOpen()
}
override def beforeClose(): Unit = {
super.beforeClose()
}
override def onAcquired(delivery: ProtonDelivery, message: Message): Unit = {
// only AMQP message will be stored into BlockGenerator internal buffer;
// delivery is passed as metadata to onAddData and saved here internally
blockGenerator.addDataWithCallback(message, delivery)
super.onAcquired(delivery, message)
}
override def onThrottlingStarted(): Unit = {
super.onThrottlingStarted()
}
override def onThrottlingEnded(): Unit = {
super.onThrottlingEnded()
}
override def onThrottling(delivery: ProtonDelivery, message: Message): Unit = {
// during throttling (max rate limit exceeded), all messages are rejected
val rejected: Rejected = new Rejected()
val errorCondition: ErrorCondition = new ErrorCondition(AmqpSymbol.valueOf(AmqpRecvError), AmqpRecvThrottling)
rejected.setError(errorCondition)
delivery.disposition(rejected, true)
super.onThrottling(delivery, message)
}
}
/**
* AMQP rate controller implementation using "manual" flow control
*
* @param blockGenerator BlockGenerator instance used for storing messages
* @param receiver AMQP receiver instance
*/
private final class AMQPManualRateController(
blockGenerator: BlockGenerator,
receiver: ProtonReceiver
) extends AMQPRateController(blockGenerator, receiver) {
private final val CreditsDefault = 1000
private final val CreditsThreshold = 0
var count = 0
var credits = 0
override def beforeOpen(): Unit = {
count = 0
// if MaxValue or negative it means no max rate limit specified in the Spark configuration
if ((blockGenerator.getCurrentLimit != Long.MaxValue) && (blockGenerator.getCurrentLimit >= 0)) {
credits = blockGenerator.getCurrentLimit.toInt
} else {
credits = CreditsDefault
}
// disable prefetch in order to use manual flow control
receiver.setPrefetch(0)
// grant the first bunch of credits
receiver.flow(credits)
super.beforeOpen()
}
override def beforeClose(): Unit = {
super.beforeClose()
}
override def onAcquired(delivery: ProtonDelivery, message: Message): Unit = {
// only AMQP message will be stored into BlockGenerator internal buffer;
// delivery is passed as metadata to onAddData and saved here internally
blockGenerator.addDataWithCallback(message, delivery)
count += 1
// if the credits exhaustion is near, need to grant more credits
if (count >= credits - CreditsThreshold) {
receiver.flow(credits - CreditsThreshold)
count = 0
}
super.onAcquired(delivery, message)
}
override def onThrottlingStarted(): Unit = {
super.onThrottlingStarted()
}
override def onThrottlingEnded(): Unit = {
// if the credits exhaustion is near, need to grant more credits
if (count >= credits - CreditsThreshold) {
receiver.flow(credits - CreditsThreshold)
count = 0
}
super.onThrottlingEnded()
}
override def onThrottling(delivery: ProtonDelivery, message: Message): Unit = {
count += 1
// during throttling (max rate limit exceeded), all messages are rejected
val rejected: Rejected = new Rejected()
val errorCondition: ErrorCondition = new ErrorCondition(AmqpSymbol.valueOf(AmqpRecvError), AmqpRecvThrottling)
rejected.setError(errorCondition)
delivery.disposition(rejected, true)
super.onThrottling(delivery, message)
}
}
| redhatanalytics/dstream-amqp | src/main/scala/org/apache/spark/streaming/amqp/AMQPRateController.scala | Scala | apache-2.0 | 10,509 |
/*
* Copyright (c) 2013 Scott Abernethy.
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package gate
/*
control
one in and one out per gateway location
presences and clones both
*/
class Shifter {
// map of gateway location to waiting jobs
}
| scott-abernethy/opener-of-the-way | app/gate/Shifter.scala | Scala | gpl-3.0 | 853 |
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** @author John Miller
* @version 1.3
* @date Sun Oct 2 22:43:44 EDT 2011
* @see LICENSE (MIT style license file).
* @see http://www.ai7.uni-bayreuth.de/test_problem_coll.pdf
*/
package scalation.minima
import scala.math.pow
import scalation.calculus.Differential.FunctionV2S
import scalation.linalgebra.VectorD
import scalation.math.double_exp
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** The `NLPTestCases1` object used to test several Non-Linear Programming (NLP)
* algorithms on unconstrained problems.
* Algorithms:
* 'sdcs' - Steepest Descent with Custom Line Search
* 'sdgs' - Steepest Descent with Golden Section Line Search
* 'prcg' - Polak-Ribiere Conjugate Gradient with Golden Section Line Search
* 'sdws' - Steepest Descent with Wolfe Line Search
* 'bfgs' - Broyden–Fletcher–Goldfarb–Shanno with Wolfe Line Search
*/
object NLPTestCases1 extends App
{
println ("NLPTest")
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Test the NLP algorithms on objective function 'f'.
* @param f the objective function to minimize
* @param n the dimensionality of the problem
*/
def test (f: FunctionV2S, n: Int)
{
val x0 = new VectorD (n) // zero vector
var x = x0
val sdgs = new SteepestDescent (f)
x = sdgs.solve (x0)
println ("sdgs: optimal solution x = " + x + " with an objective value f(x) = " + f(x))
val prcg = new ConjGradient (f)
x = prcg.solve (x0)
println ("prcg: optimal solution x = " + x + " with an objective value f(x) = " + f(x))
val sdws = new QuasiNewton (f); sdws.setSteepest ()
x = sdws.solve (x0)
println ("sdws: optimal solution x = " + x + " with an objective value f(x) = " + f(x))
val bfgs = new QuasiNewton (f)
x = bfgs.solve (x0)
println ("bfgs: optimal solution x = " + x + " with an objective value f(x) = " + f(x))
} // test
def test1 ()
{
println ("\nMinimize f(x) = (x_0 - 3)^2 + (x_1 - 4)^2 + 1")
def f (x: VectorD): Double = (x(0) - 3.0) * (x(0) - 3.0) + (x(1) - 4.0) * (x(1) - 4.0) + 1.0
test (f, 2)
} // test1
def test2 ()
{
println ("\nMinimize f(x) = (x_0 - 30)^2 + (x_1 - 40)^2 + 1")
def f (x: VectorD): Double = (x(0) - 30.0) * (x(0) - 30.0) + (x(1) - 40.0) * (x(1) - 40.0) + 1.0
test (f, 2)
} // test2
def test3 ()
{
println ("\nMinimize f(x) = x_0^4 + (x_0 - 3)^2 + (x_1 - 4)^2 + 1")
def f (x: VectorD): Double = pow (x(0), 4.0) + (x(0) - 3.0) * (x(0) - 3.0) + (x(1) - 4.0) * (x(1) - 4.0) + 1.0
test (f, 2)
} // test3
// @see http://math.fullerton.edu/mathews/n2003/gradientsearch/GradientSearchMod/Links/GradientSearchMod_lnk_5.html
def test4 ()
{
println ("\nMinimize f(x) = x_0/4 + 5x_0^2 + x_0^4 - 9x_0^2 x_1 + 3x_1^2 + 2x_1^4")
def f (x: VectorD): Double = x(0)/4.0 + 5.0*x(0)*x(0) + pow(x(0),4) -
9.0*x(0)*x(0)*x(1) + 3.0*x(1)*x(1) + 2.0*pow(x(1),4)
test (f, 2)
} // test4
test1 ()
test2 ()
test3 ()
test4 ()
} // NLPTestCases1 object
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** The `NLPTestCases2` object used to test several Non-Linear Programming (NLP)
* algorithms on constrained problems.
*/
object NLPTestCases2 extends App
{
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Test the NLP algorithms on objective function 'f' with constraint function 'g'.
* @param f the objective function to minimize
* @param g the constraint function to be satisfied
* @param n the dimensionality of the problem
* @param f_str the f function as a string
* @param g_str the g function as a string
*/
def test (f: FunctionV2S, g: FunctionV2S, n: Int, f_str: String, g_str: String)
{
println ("\nMinimize f(x) = " + f_str +
"\nSubject to g(x) = " + g_str)
val x0 = new VectorD (n) // zero vector
var x = x0
val sdcs = new SteepestDescent (f)
x = sdcs.solve (x0)
println ("sdgs: optimal solution x = " + x + " with an objective value f(x) = " + f(x))
val prcg = new ConjGradient (f, g)
x = prcg.solve (x0)
println ("prcg: optimal solution x = " + x + " with an objective value f(x) = " + f(x))
val sdws = new QuasiNewton (f, g); sdws.setSteepest ()
x = sdws.solve (x0)
println ("sdws: optimal solution x = " + x + " with an objective value f(x) = " + f(x))
val bfgs = new QuasiNewton (f, g)
x = bfgs.solve (x0)
println ("bfgs: optimal solution x = " + x + " with an objective value f(x) = " + f(x))
} // test
def test0 () // x* = (3, 4), f* = 1
{
val f_str = "f(x) = (x_0 - 3)^2 + (x_1 - 4)^2 + 1"
val g_str = "g(x) = x_0 <= 1"
def f (x: VectorD) = (x(0) - 3.0)~^2 + (x(1) - 40.0)~^2 + 1.0
def g (x: VectorD) = x(0) - 1.0
test (f, g, 2, f_str, g_str)
} // test0
def test1 () // x* = (
{
val f_str = "f(x) = 100(x_1 - x_0^2)^2 + (1 - x_0)^2"
val g_str = "g(x) = x_1 >= -1.5"
def f (x: VectorD) = 100.0 * (x(1) - x(0)~^2)~^2 + (1.0 - x(0))~^2
def g (x: VectorD) = -x(1) - 1.5
test (f, g, 2, f_str, g_str)
} // test1
test0 ()
} // NLPTestCases2
| NBKlepp/fda | scalation_1.3/scalation_modeling/src/main/scala/scalation/minima/NLPTestCases.scala | Scala | mit | 5,720 |
/**
* ____ __ ____ ____ ____,,___ ____ __ __ ____
* ( _ \\ /__\\ (_ )(_ _)( ___)/ __) ( _ \\( )( )( _ \\ Read
* ) / /(__)\\ / /_ _)(_ )__) \\__ \\ )___/ )(__)( ) _ < README.txt
* (_)\\_)(__)(__)(____)(____)(____)(___/ (__) (______)(____/ LICENSE.txt
*/
package razie.diesel.ext
import razie.diesel.dom._
/** flows seq/par and depys are processed directly by the engine */
case class EFlow(e: EMatch, ex: FlowExpr) extends CanHtml with EApplicable with HasPosition {
var pos : Option[EPos] = None
override def test(m: EMsg, cole: Option[MatchCollector] = None)(implicit ctx: ECtx) =
e.test(m, cole)
override def apply(in: EMsg, destSpec: Option[EMsg])(implicit ctx: ECtx): List[Any] =
Nil
override def toHtml = span("$flow::") + s" ${e.toHtml} => $ex <br>"
override def toString = s"$$flow:: $e => $ex"
}
/** base class for flow expressions */
class FlowExpr()
/** either sequence or parallel */
case class SeqExpr(op: String, l: Seq[FlowExpr]) extends FlowExpr {
override def toString = l.mkString(op)
}
/** a single message in a flow */
case class MsgExpr(ea: String) extends FlowExpr {
override def toString = ea
}
/** a block of a flow expr, i.e. in brackets */
case class BFlowExpr(b: FlowExpr) extends FlowExpr {
override def toString = s"( $b )"
}
| razie/wikireactor | diesel/src/main/scala/razie/diesel/ext/EFlow.scala | Scala | apache-2.0 | 1,345 |
package com.atomist.rug.runtime.js.interop
import java.util.Map.Entry
import java.util.Objects
import jdk.nashorn.api.scripting.ScriptObjectMirror
import jdk.nashorn.internal.runtime.ConsString
import scala.util.control.Exception._
/**
* Utilities to help in binding to Nashorn.
*/
object NashornUtils {
import scala.collection.JavaConverters._
/**
* Get all key values, ignoring exceptions, which can be thrown by Nashorn entrySet.
*/
private def safeObjectMap(som: ScriptObjectMirror): Map[String, AnyRef] =
som.keySet().asScala
.flatMap(key => {
// TypeScript "get" properties can throw exceptions
val maybeValue = allCatch.opt(som.get(key))
maybeValue.map(value => key -> value)
}).toMap
def extractProperties(som: ScriptObjectMirror): Map[String, AnyRef] =
safeObjectMap(som) flatMap {
case (_, som: ScriptObjectMirror) if som.isFunction =>
None
case (key, value) =>
Some(key -> value)
}
/**
* Return the current state of no-arg methods on this object
*/
def extractNoArgFunctionValues(som: ScriptObjectMirror): Map[String, AnyRef] = {
val m = safeObjectMap(som) flatMap {
case (key, f: ScriptObjectMirror) if isNoArgFunction(f) =>
// If calling the function throws an exception, discard the value.
// This will happen with builder stubs that haven't been fully initialized
// Otherwise, use it
allCatch.opt(som.callMember(key))
.map(result => {
(key, result)
})
case _ => None
}
m
}
// TODO this is fragile but can't find a Nashorn method to do it
private def isNoArgFunction(f: ScriptObjectMirror): Boolean = {
f.isFunction && {
val s = f.toString
s.startsWith("function ()")
}
}
def toJavaType(nashornReturn: Object): Object = nashornReturn match {
case s: ConsString => s.toString
case r: ScriptObjectMirror if r.isArray =>
r.values().asScala
case x => x
}
def toJavaMap(nashornReturn: Object): Map[String, Object] =
nashornReturn match {
case som: ScriptObjectMirror =>
val scalaMap = som.entrySet().asScala.map {
e: Entry[String, Object] => (e.getKey, e.getValue)
}.toMap
scalaMap.mapValues {
case som: ScriptObjectMirror =>
toJavaMap(som)
case x =>
toJavaType(x)
}
}
def toScalaSeq(nashornReturn: Object): Seq[Object] = nashornReturn match {
case r: ScriptObjectMirror if r.isArray =>
r.values().asScala.toSeq
}
/**
* Return the given property of the JavaScript object or default value if not found
*
* @param default default value if not found. Defaults to null.
*/
def stringProperty(som: ScriptObjectMirror, name: String, default: String = null): String =
som.get(name) match {
case null => default
case x => Objects.toString(x)
}
/**
* Call the given JavaScript function, which must return a string
*/
def stringFunction(som: ScriptObjectMirror, name: String): String =
som.callMember(name) match {
case null => null
case x => Objects.toString(x)
}
/**
* Are all these properties defined
*/
def hasDefinedProperties(som: ScriptObjectMirror, properties: String*): Boolean =
properties.forall(p => som.get(p) != null)
}
| atomist/rug | src/main/scala/com/atomist/rug/runtime/js/interop/NashornUtils.scala | Scala | gpl-3.0 | 3,399 |
package services
import be.objectify.deadbolt.scala.models._
import com.feth.play.module.pa.providers.password.UsernamePasswordAuthUser
import generated.Tables.{LinkedAccountRow, UserRow}
/**
* UserRow adapter trait and object that offer implicit conversion from generated
* UserRow to be.objectify.deadbolt.scala.models.Subject and other User sensitive
* inline operations. The implementations are provided by the UserService
*/
trait PluggableUserService extends Subject {
//------------------------------------------------------------------------
// public
//------------------------------------------------------------------------
def providers : Seq[String]
//------------------------------------------------------------------------
def changePassword(authUser: UsernamePasswordAuthUser, create: Boolean): Unit
//------------------------------------------------------------------------
def resetPassword(authUser: UsernamePasswordAuthUser, create: Boolean): Unit
//------------------------------------------------------------------------
def verify: Unit
//------------------------------------------------------------------------
def linkedAccounts : Seq[LinkedAccountRow]
}
object PluggableUserService {
/**
* Enables converting from a UserRow to a PluggableUserService type and most importantly
* to a Subject.
* @param user the input user instance to convert to PluggableUserService
* @param userService the implicit userService instance needed for doing the actual conversion
*/
implicit class toPluggableUserService(user: UserRow)(implicit userService: UserService) extends PluggableUserService {
//------------------------------------------------------------------------
override def identifier: String = {
userService.identifier(user)
}
//------------------------------------------------------------------------
override def roles: List[Role] = {
userService.roles(user)
}
//------------------------------------------------------------------------
override def permissions: List[Permission] = {
userService.permissions(user)
}
//------------------------------------------------------------------------
override def providers: Seq[String] = {
userService.providers(user)
}
//------------------------------------------------------------------------
override def changePassword(authUser: UsernamePasswordAuthUser, create: Boolean): Unit = {
userService.changePassword(user, authUser, create)
}
//------------------------------------------------------------------------
override def resetPassword(authUser: UsernamePasswordAuthUser, create: Boolean): Unit = {
userService.resetPassword(user, authUser, create)
}
//------------------------------------------------------------------------
def verify: Unit = {
userService.verify(user)
}
//------------------------------------------------------------------------
override def linkedAccounts: Seq[LinkedAccountRow] = {
userService.linkedAccounts(user)
}
}
/**
* Enables converting from Option[UseRow] to Option[PluggableUserService] or
* more importantly Option[Subject].
* @param t user instance
* @param ev evidence that can generate a PluggableUserService from a UserRow
* @tparam T UserRow type
* @return
*/
implicit def toPluggableUserServiceOpt[T](t: Option[T])(implicit ev: T => PluggableUserService): Option[PluggableUserService] = t.map(ev)
} | bravegag/play-authenticate-usage-scala | app/services/PluggableUserService.scala | Scala | apache-2.0 | 3,554 |
/*
* Copyright 2008-present MongoDB, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.mongodb.scala
import com.mongodb.{MongoCredential => JMongoCredential}
import org.scalatest.{FlatSpec, Matchers}
class MongoCredentialSpec extends FlatSpec with Matchers {
"MongoCredential" should "have the same methods as the wrapped MongoClient" in {
val wrapped = classOf[JMongoCredential].getMethods.map(_.getName)
val local = classOf[MongoCredential].getMethods.map(_.getName)
wrapped.foreach((name: String) => {
val cleanedName = name.stripPrefix("get")
assert(local.contains(name) | local.contains(cleanedName.head.toLower + cleanedName.tail))
})
}
it should "create the expected credential" in {
MongoCredential.createCredential("user", "source", "pass".toCharArray) should
equal(JMongoCredential.createCredential("user", "source", "pass".toCharArray))
}
it should "create the expected createScramSha1Credential" in {
MongoCredential.createScramSha1Credential("user", "source", "pass".toCharArray) should
equal(JMongoCredential.createScramSha1Credential("user", "source", "pass".toCharArray))
}
it should "create the expected createScramSha256Credential" in {
MongoCredential.createScramSha256Credential("user", "source", "pass".toCharArray) should
equal(JMongoCredential.createScramSha256Credential("user", "source", "pass".toCharArray))
}
it should "create the expected createMongoCRCredential" in {
MongoCredential.createMongoCRCredential("user", "source", "pass".toCharArray) should
equal(JMongoCredential.createMongoCRCredential("user", "source", "pass".toCharArray))
}
it should "create the expected createMongoX509Credential" in {
MongoCredential.createMongoX509Credential() should equal(JMongoCredential.createMongoX509Credential())
MongoCredential.createMongoX509Credential("user") should equal(JMongoCredential.createMongoX509Credential("user"))
}
it should "create the expected createPlainCredential" in {
MongoCredential.createPlainCredential("user", "source", "pass".toCharArray) should
equal(JMongoCredential.createPlainCredential("user", "source", "pass".toCharArray))
}
it should "create the expected createGSSAPICredential" in {
MongoCredential.createGSSAPICredential("user") should equal(JMongoCredential.createGSSAPICredential("user"))
}
}
| rozza/mongo-scala-driver | driver/src/test/scala/org/mongodb/scala/MongoCredentialSpec.scala | Scala | apache-2.0 | 2,916 |
package muster
import org.scalameter.api._
import muster.codec.json.{Benchmarks, CursorBench}
class JawnBenchmark extends CursorBench {
import Benchmarks._
performance of "Muster Jackson format" in {
measure method "nextNode" config (
exec.benchRuns -> 500
) in {
using(jsonGen) in {
r => codec.jawn.JawnCodec.createCursor(r).nextNode()
}
}
}
} | json4s/muster | codecs/jawn/src/test/scala/muster/JawnBenchmark.scala | Scala | mit | 396 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst.expressions
import java.util.Locale
import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.analysis.{TypeCheckResult, TypeCoercion}
import org.apache.spark.sql.catalyst.expressions.aggregate.DeclarativeAggregate
import org.apache.spark.sql.catalyst.expressions.codegen._
import org.apache.spark.sql.catalyst.expressions.codegen.Block._
import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
import org.apache.spark.sql.catalyst.trees.TreeNode
import org.apache.spark.sql.catalyst.util.truncatedString
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.types._
////////////////////////////////////////////////////////////////////////////////////////////////////
// This file defines the basic expression abstract classes in Catalyst.
////////////////////////////////////////////////////////////////////////////////////////////////////
/**
* An expression in Catalyst.
*
* If an expression wants to be exposed in the function registry (so users can call it with
* "name(arguments...)", the concrete implementation must be a case class whose constructor
* arguments are all Expressions types. See [[Substring]] for an example.
*
* There are a few important traits or abstract classes:
*
* - [[Nondeterministic]]: an expression that is not deterministic.
* - [[Stateful]]: an expression that contains mutable state. For example, MonotonicallyIncreasingID
* and Rand. A stateful expression is always non-deterministic.
* - [[Unevaluable]]: an expression that is not supposed to be evaluated.
* - [[CodegenFallback]]: an expression that does not have code gen implemented and falls back to
* interpreted mode.
* - [[NullIntolerant]]: an expression that is null intolerant (i.e. any null input will result in
* null output).
* - [[NonSQLExpression]]: a common base trait for the expressions that do not have SQL
* expressions like representation. For example, `ScalaUDF`, `ScalaUDAF`,
* and object `MapObjects` and `Invoke`.
* - [[UserDefinedExpression]]: a common base trait for user-defined functions, including
* UDF/UDAF/UDTF.
* - [[HigherOrderFunction]]: a common base trait for higher order functions that take one or more
* (lambda) functions and applies these to some objects. The function
* produces a number of variables which can be consumed by some lambda
* functions.
* - [[NamedExpression]]: An [[Expression]] that is named.
* - [[TimeZoneAwareExpression]]: A common base trait for time zone aware expressions.
* - [[SubqueryExpression]]: A base interface for expressions that contain a [[LogicalPlan]].
*
* - [[LeafExpression]]: an expression that has no child.
* - [[UnaryExpression]]: an expression that has one child.
* - [[BinaryExpression]]: an expression that has two children.
* - [[TernaryExpression]]: an expression that has three children.
* - [[BinaryOperator]]: a special case of [[BinaryExpression]] that requires two children to have
* the same output data type.
*
* A few important traits used for type coercion rules:
* - [[ExpectsInputTypes]]: an expression that has the expected input types. This trait is typically
* used by operator expressions (e.g. [[Add]], [[Subtract]]) to define
* expected input types without any implicit casting.
* - [[ImplicitCastInputTypes]]: an expression that has the expected input types, which can be
* implicitly castable using [[TypeCoercion.ImplicitTypeCasts]].
* - [[ComplexTypeMergingExpression]]: to resolve output types of the complex expressions
* (e.g., [[CaseWhen]]).
*/
abstract class Expression extends TreeNode[Expression] {
/**
* Returns true when an expression is a candidate for static evaluation before the query is
* executed. A typical use case: [[org.apache.spark.sql.catalyst.optimizer.ConstantFolding]]
*
* The following conditions are used to determine suitability for constant folding:
* - A [[Coalesce]] is foldable if all of its children are foldable
* - A [[BinaryExpression]] is foldable if its both left and right child are foldable
* - A [[Not]], [[IsNull]], or [[IsNotNull]] is foldable if its child is foldable
* - A [[Literal]] is foldable
* - A [[Cast]] or [[UnaryMinus]] is foldable if its child is foldable
*/
def foldable: Boolean = false
/**
* Returns true when the current expression always return the same result for fixed inputs from
* children. The non-deterministic expressions should not change in number and order. They should
* not be evaluated during the query planning.
*
* Note that this means that an expression should be considered as non-deterministic if:
* - it relies on some mutable internal state, or
* - it relies on some implicit input that is not part of the children expression list.
* - it has non-deterministic child or children.
* - it assumes the input satisfies some certain condition via the child operator.
*
* An example would be `SparkPartitionID` that relies on the partition id returned by TaskContext.
* By default leaf expressions are deterministic as Nil.forall(_.deterministic) returns true.
*/
lazy val deterministic: Boolean = children.forall(_.deterministic)
def nullable: Boolean
/**
* Workaround scala compiler so that we can call super on lazy vals
*/
@transient
private lazy val _references: AttributeSet =
AttributeSet.fromAttributeSets(children.map(_.references))
def references: AttributeSet = _references
/** Returns the result of evaluating this expression on a given input Row */
def eval(input: InternalRow = null): Any
/**
* Returns an [[ExprCode]], that contains the Java source code to generate the result of
* evaluating the expression on an input row.
*
* @param ctx a [[CodegenContext]]
* @return [[ExprCode]]
*/
def genCode(ctx: CodegenContext): ExprCode = {
ctx.subExprEliminationExprs.get(this).map { subExprState =>
// This expression is repeated which means that the code to evaluate it has already been added
// as a function before. In that case, we just re-use it.
ExprCode(ctx.registerComment(this.toString), subExprState.isNull, subExprState.value)
}.getOrElse {
val isNull = ctx.freshName("isNull")
val value = ctx.freshName("value")
val eval = doGenCode(ctx, ExprCode(
JavaCode.isNullVariable(isNull),
JavaCode.variable(value, dataType)))
reduceCodeSize(ctx, eval)
if (eval.code.toString.nonEmpty) {
// Add `this` in the comment.
eval.copy(code = ctx.registerComment(this.toString) + eval.code)
} else {
eval
}
}
}
private def reduceCodeSize(ctx: CodegenContext, eval: ExprCode): Unit = {
// TODO: support whole stage codegen too
val splitThreshold = SQLConf.get.methodSplitThreshold
if (eval.code.length > splitThreshold && ctx.INPUT_ROW != null && ctx.currentVars == null) {
val setIsNull = if (!eval.isNull.isInstanceOf[LiteralValue]) {
val globalIsNull = ctx.addMutableState(CodeGenerator.JAVA_BOOLEAN, "globalIsNull")
val localIsNull = eval.isNull
eval.isNull = JavaCode.isNullGlobal(globalIsNull)
s"$globalIsNull = $localIsNull;"
} else {
""
}
val javaType = CodeGenerator.javaType(dataType)
val newValue = ctx.freshName("value")
val funcName = ctx.freshName(nodeName)
val funcFullName = ctx.addNewFunction(funcName,
s"""
|private $javaType $funcName(InternalRow ${ctx.INPUT_ROW}) {
| ${eval.code}
| $setIsNull
| return ${eval.value};
|}
""".stripMargin)
eval.value = JavaCode.variable(newValue, dataType)
eval.code = code"$javaType $newValue = $funcFullName(${ctx.INPUT_ROW});"
}
}
/**
* Returns Java source code that can be compiled to evaluate this expression.
* The default behavior is to call the eval method of the expression. Concrete expression
* implementations should override this to do actual code generation.
*
* @param ctx a [[CodegenContext]]
* @param ev an [[ExprCode]] with unique terms.
* @return an [[ExprCode]] containing the Java source code to generate the given expression
*/
protected def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode
/**
* Returns `true` if this expression and all its children have been resolved to a specific schema
* and input data types checking passed, and `false` if it still contains any unresolved
* placeholders or has data types mismatch.
* Implementations of expressions should override this if the resolution of this type of
* expression involves more than just the resolution of its children and type checking.
*/
lazy val resolved: Boolean = childrenResolved && checkInputDataTypes().isSuccess
/**
* Returns the [[DataType]] of the result of evaluating this expression. It is
* invalid to query the dataType of an unresolved expression (i.e., when `resolved` == false).
*/
def dataType: DataType
/**
* Returns true if all the children of this expression have been resolved to a specific schema
* and false if any still contains any unresolved placeholders.
*/
def childrenResolved: Boolean = children.forall(_.resolved)
/**
* Returns an expression where a best effort attempt has been made to transform `this` in a way
* that preserves the result but removes cosmetic variations (case sensitivity, ordering for
* commutative operations, etc.) See [[Canonicalize]] for more details.
*
* `deterministic` expressions where `this.canonicalized == other.canonicalized` will always
* evaluate to the same result.
*/
lazy val canonicalized: Expression = {
val canonicalizedChildren = children.map(_.canonicalized)
Canonicalize.execute(withNewChildren(canonicalizedChildren))
}
/**
* Returns true when two expressions will always compute the same result, even if they differ
* cosmetically (i.e. capitalization of names in attributes may be different).
*
* See [[Canonicalize]] for more details.
*/
def semanticEquals(other: Expression): Boolean =
deterministic && other.deterministic && canonicalized == other.canonicalized
/**
* Returns a `hashCode` for the calculation performed by this expression. Unlike the standard
* `hashCode`, an attempt has been made to eliminate cosmetic differences.
*
* See [[Canonicalize]] for more details.
*/
def semanticHash(): Int = canonicalized.hashCode()
/**
* Checks the input data types, returns `TypeCheckResult.success` if it's valid,
* or returns a `TypeCheckResult` with an error message if invalid.
* Note: it's not valid to call this method until `childrenResolved == true`.
*/
def checkInputDataTypes(): TypeCheckResult = TypeCheckResult.TypeCheckSuccess
/**
* Returns a user-facing string representation of this expression's name.
* This should usually match the name of the function in SQL.
*/
def prettyName: String = nodeName.toLowerCase(Locale.ROOT)
protected def flatArguments: Iterator[Any] = productIterator.flatMap {
case t: Iterable[_] => t
case single => single :: Nil
}
// Marks this as final, Expression.verboseString should never be called, and thus shouldn't be
// overridden by concrete classes.
final override def verboseString(maxFields: Int): String = simpleString(maxFields)
override def simpleString(maxFields: Int): String = toString
override def toString: String = prettyName + truncatedString(
flatArguments.toSeq, "(", ", ", ")", SQLConf.get.maxToStringFields)
/**
* Returns SQL representation of this expression. For expressions extending [[NonSQLExpression]],
* this method may return an arbitrary user facing string.
*/
def sql: String = {
val childrenSQL = children.map(_.sql).mkString(", ")
s"$prettyName($childrenSQL)"
}
}
/**
* An expression that cannot be evaluated. These expressions don't live past analysis or
* optimization time (e.g. Star) and should not be evaluated during query planning and
* execution.
*/
trait Unevaluable extends Expression {
final override def eval(input: InternalRow = null): Any =
throw new UnsupportedOperationException(s"Cannot evaluate expression: $this")
final override protected def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode =
throw new UnsupportedOperationException(s"Cannot generate code for expression: $this")
}
/**
* An expression that gets replaced at runtime (currently by the optimizer) into a different
* expression for evaluation. This is mainly used to provide compatibility with other databases.
* For example, we use this to support "nvl" by replacing it with "coalesce".
*
* A RuntimeReplaceable should have the original parameters along with a "child" expression in the
* case class constructor, and define a normal constructor that accepts only the original
* parameters. For an example, see [[Nvl]]. To make sure the explain plan and expression SQL
* works correctly, the implementation should also override flatArguments method and sql method.
*/
trait RuntimeReplaceable extends UnaryExpression with Unevaluable {
override def nullable: Boolean = child.nullable
override def foldable: Boolean = child.foldable
override def dataType: DataType = child.dataType
// As this expression gets replaced at optimization with its `child" expression,
// two `RuntimeReplaceable` are considered to be semantically equal if their "child" expressions
// are semantically equal.
override lazy val canonicalized: Expression = child.canonicalized
}
/**
* An aggregate expression that gets rewritten (currently by the optimizer) into a
* different aggregate expression for evaluation. This is mainly used to provide compatibility
* with other databases. For example, we use this to support every, any/some aggregates by rewriting
* them with Min and Max respectively.
*/
trait UnevaluableAggregate extends DeclarativeAggregate {
override def nullable: Boolean = true
override lazy val aggBufferAttributes =
throw new UnsupportedOperationException(s"Cannot evaluate aggBufferAttributes: $this")
override lazy val initialValues: Seq[Expression] =
throw new UnsupportedOperationException(s"Cannot evaluate initialValues: $this")
override lazy val updateExpressions: Seq[Expression] =
throw new UnsupportedOperationException(s"Cannot evaluate updateExpressions: $this")
override lazy val mergeExpressions: Seq[Expression] =
throw new UnsupportedOperationException(s"Cannot evaluate mergeExpressions: $this")
override lazy val evaluateExpression: Expression =
throw new UnsupportedOperationException(s"Cannot evaluate evaluateExpression: $this")
}
/**
* Expressions that don't have SQL representation should extend this trait. Examples are
* `ScalaUDF`, `ScalaUDAF`, and object expressions like `MapObjects` and `Invoke`.
*/
trait NonSQLExpression extends Expression {
final override def sql: String = {
transform {
case a: Attribute => new PrettyAttribute(a)
case a: Alias => PrettyAttribute(a.sql, a.dataType)
}.toString
}
}
/**
* An expression that is nondeterministic.
*/
trait Nondeterministic extends Expression {
final override lazy val deterministic: Boolean = false
final override def foldable: Boolean = false
@transient
private[this] var initialized = false
/**
* Initializes internal states given the current partition index and mark this as initialized.
* Subclasses should override [[initializeInternal()]].
*/
final def initialize(partitionIndex: Int): Unit = {
initializeInternal(partitionIndex)
initialized = true
}
protected def initializeInternal(partitionIndex: Int): Unit
/**
* @inheritdoc
* Throws an exception if [[initialize()]] is not called yet.
* Subclasses should override [[evalInternal()]].
*/
final override def eval(input: InternalRow = null): Any = {
require(initialized,
s"Nondeterministic expression ${this.getClass.getName} should be initialized before eval.")
evalInternal(input)
}
protected def evalInternal(input: InternalRow): Any
}
/**
* An expression that contains mutable state. A stateful expression is always non-deterministic
* because the results it produces during evaluation are not only dependent on the given input
* but also on its internal state.
*
* The state of the expressions is generally not exposed in the parameter list and this makes
* comparing stateful expressions problematic because similar stateful expressions (with the same
* parameter list) but with different internal state will be considered equal. This is especially
* problematic during tree transformations. In order to counter this the `fastEquals` method for
* stateful expressions only returns `true` for the same reference.
*
* A stateful expression should never be evaluated multiple times for a single row. This should
* only be a problem for interpreted execution. This can be prevented by creating fresh copies
* of the stateful expression before execution, these can be made using the `freshCopy` function.
*/
trait Stateful extends Nondeterministic {
/**
* Return a fresh uninitialized copy of the stateful expression.
*/
def freshCopy(): Stateful
/**
* Only the same reference is considered equal.
*/
override def fastEquals(other: TreeNode[_]): Boolean = this eq other
}
/**
* A leaf expression, i.e. one without any child expressions.
*/
abstract class LeafExpression extends Expression {
override final def children: Seq[Expression] = Nil
}
/**
* An expression with one input and one output. The output is by default evaluated to null
* if the input is evaluated to null.
*/
abstract class UnaryExpression extends Expression {
def child: Expression
override final def children: Seq[Expression] = child :: Nil
override def foldable: Boolean = child.foldable
override def nullable: Boolean = child.nullable
/**
* Default behavior of evaluation according to the default nullability of UnaryExpression.
* If subclass of UnaryExpression override nullable, probably should also override this.
*/
override def eval(input: InternalRow): Any = {
val value = child.eval(input)
if (value == null) {
null
} else {
nullSafeEval(value)
}
}
/**
* Called by default [[eval]] implementation. If subclass of UnaryExpression keep the default
* nullability, they can override this method to save null-check code. If we need full control
* of evaluation process, we should override [[eval]].
*/
protected def nullSafeEval(input: Any): Any =
sys.error(s"UnaryExpressions must override either eval or nullSafeEval")
/**
* Called by unary expressions to generate a code block that returns null if its parent returns
* null, and if not null, use `f` to generate the expression.
*
* As an example, the following does a boolean inversion (i.e. NOT).
* {{{
* defineCodeGen(ctx, ev, c => s"!($c)")
* }}}
*
* @param f function that accepts a variable name and returns Java code to compute the output.
*/
protected def defineCodeGen(
ctx: CodegenContext,
ev: ExprCode,
f: String => String): ExprCode = {
nullSafeCodeGen(ctx, ev, eval => {
s"${ev.value} = ${f(eval)};"
})
}
/**
* Called by unary expressions to generate a code block that returns null if its parent returns
* null, and if not null, use `f` to generate the expression.
*
* @param f function that accepts the non-null evaluation result name of child and returns Java
* code to compute the output.
*/
protected def nullSafeCodeGen(
ctx: CodegenContext,
ev: ExprCode,
f: String => String): ExprCode = {
val childGen = child.genCode(ctx)
val resultCode = f(childGen.value)
if (nullable) {
val nullSafeEval = ctx.nullSafeExec(child.nullable, childGen.isNull)(resultCode)
ev.copy(code = code"""
${childGen.code}
boolean ${ev.isNull} = ${childGen.isNull};
${CodeGenerator.javaType(dataType)} ${ev.value} = ${CodeGenerator.defaultValue(dataType)};
$nullSafeEval
""")
} else {
ev.copy(code = code"""
${childGen.code}
${CodeGenerator.javaType(dataType)} ${ev.value} = ${CodeGenerator.defaultValue(dataType)};
$resultCode""", isNull = FalseLiteral)
}
}
}
/**
* An expression with two inputs and one output. The output is by default evaluated to null
* if any input is evaluated to null.
*/
abstract class BinaryExpression extends Expression {
def left: Expression
def right: Expression
override final def children: Seq[Expression] = Seq(left, right)
override def foldable: Boolean = left.foldable && right.foldable
override def nullable: Boolean = left.nullable || right.nullable
/**
* Default behavior of evaluation according to the default nullability of BinaryExpression.
* If subclass of BinaryExpression override nullable, probably should also override this.
*/
override def eval(input: InternalRow): Any = {
val value1 = left.eval(input)
if (value1 == null) {
null
} else {
val value2 = right.eval(input)
if (value2 == null) {
null
} else {
nullSafeEval(value1, value2)
}
}
}
/**
* Called by default [[eval]] implementation. If subclass of BinaryExpression keep the default
* nullability, they can override this method to save null-check code. If we need full control
* of evaluation process, we should override [[eval]].
*/
protected def nullSafeEval(input1: Any, input2: Any): Any =
sys.error(s"BinaryExpressions must override either eval or nullSafeEval")
/**
* Short hand for generating binary evaluation code.
* If either of the sub-expressions is null, the result of this computation
* is assumed to be null.
*
* @param f accepts two variable names and returns Java code to compute the output.
*/
protected def defineCodeGen(
ctx: CodegenContext,
ev: ExprCode,
f: (String, String) => String): ExprCode = {
nullSafeCodeGen(ctx, ev, (eval1, eval2) => {
s"${ev.value} = ${f(eval1, eval2)};"
})
}
/**
* Short hand for generating binary evaluation code.
* If either of the sub-expressions is null, the result of this computation
* is assumed to be null.
*
* @param f function that accepts the 2 non-null evaluation result names of children
* and returns Java code to compute the output.
*/
protected def nullSafeCodeGen(
ctx: CodegenContext,
ev: ExprCode,
f: (String, String) => String): ExprCode = {
val leftGen = left.genCode(ctx)
val rightGen = right.genCode(ctx)
val resultCode = f(leftGen.value, rightGen.value)
if (nullable) {
val nullSafeEval =
leftGen.code + ctx.nullSafeExec(left.nullable, leftGen.isNull) {
rightGen.code + ctx.nullSafeExec(right.nullable, rightGen.isNull) {
s"""
${ev.isNull} = false; // resultCode could change nullability.
$resultCode
"""
}
}
ev.copy(code = code"""
boolean ${ev.isNull} = true;
${CodeGenerator.javaType(dataType)} ${ev.value} = ${CodeGenerator.defaultValue(dataType)};
$nullSafeEval
""")
} else {
ev.copy(code = code"""
${leftGen.code}
${rightGen.code}
${CodeGenerator.javaType(dataType)} ${ev.value} = ${CodeGenerator.defaultValue(dataType)};
$resultCode""", isNull = FalseLiteral)
}
}
}
/**
* A [[BinaryExpression]] that is an operator, with two properties:
*
* 1. The string representation is "x symbol y", rather than "funcName(x, y)".
* 2. Two inputs are expected to be of the same type. If the two inputs have different types,
* the analyzer will find the tightest common type and do the proper type casting.
*/
abstract class BinaryOperator extends BinaryExpression with ExpectsInputTypes {
/**
* Expected input type from both left/right child expressions, similar to the
* [[ImplicitCastInputTypes]] trait.
*/
def inputType: AbstractDataType
def symbol: String
def sqlOperator: String = symbol
override def toString: String = s"($left $sqlOperator $right)"
override def inputTypes: Seq[AbstractDataType] = Seq(inputType, inputType)
override def checkInputDataTypes(): TypeCheckResult = {
// First check whether left and right have the same type, then check if the type is acceptable.
if (!left.dataType.sameType(right.dataType)) {
TypeCheckResult.TypeCheckFailure(s"differing types in '$sql' " +
s"(${left.dataType.catalogString} and ${right.dataType.catalogString}).")
} else if (!inputType.acceptsType(left.dataType)) {
TypeCheckResult.TypeCheckFailure(s"'$sql' requires ${inputType.simpleString} type," +
s" not ${left.dataType.catalogString}")
} else {
TypeCheckResult.TypeCheckSuccess
}
}
override def sql: String = s"(${left.sql} $sqlOperator ${right.sql})"
}
object BinaryOperator {
def unapply(e: BinaryOperator): Option[(Expression, Expression)] = Some((e.left, e.right))
}
/**
* An expression with three inputs and one output. The output is by default evaluated to null
* if any input is evaluated to null.
*/
abstract class TernaryExpression extends Expression {
override def foldable: Boolean = children.forall(_.foldable)
override def nullable: Boolean = children.exists(_.nullable)
/**
* Default behavior of evaluation according to the default nullability of TernaryExpression.
* If subclass of TernaryExpression override nullable, probably should also override this.
*/
override def eval(input: InternalRow): Any = {
val exprs = children
val value1 = exprs(0).eval(input)
if (value1 != null) {
val value2 = exprs(1).eval(input)
if (value2 != null) {
val value3 = exprs(2).eval(input)
if (value3 != null) {
return nullSafeEval(value1, value2, value3)
}
}
}
null
}
/**
* Called by default [[eval]] implementation. If subclass of TernaryExpression keep the default
* nullability, they can override this method to save null-check code. If we need full control
* of evaluation process, we should override [[eval]].
*/
protected def nullSafeEval(input1: Any, input2: Any, input3: Any): Any =
sys.error(s"TernaryExpressions must override either eval or nullSafeEval")
/**
* Short hand for generating ternary evaluation code.
* If either of the sub-expressions is null, the result of this computation
* is assumed to be null.
*
* @param f accepts three variable names and returns Java code to compute the output.
*/
protected def defineCodeGen(
ctx: CodegenContext,
ev: ExprCode,
f: (String, String, String) => String): ExprCode = {
nullSafeCodeGen(ctx, ev, (eval1, eval2, eval3) => {
s"${ev.value} = ${f(eval1, eval2, eval3)};"
})
}
/**
* Short hand for generating ternary evaluation code.
* If either of the sub-expressions is null, the result of this computation
* is assumed to be null.
*
* @param f function that accepts the 3 non-null evaluation result names of children
* and returns Java code to compute the output.
*/
protected def nullSafeCodeGen(
ctx: CodegenContext,
ev: ExprCode,
f: (String, String, String) => String): ExprCode = {
val leftGen = children(0).genCode(ctx)
val midGen = children(1).genCode(ctx)
val rightGen = children(2).genCode(ctx)
val resultCode = f(leftGen.value, midGen.value, rightGen.value)
if (nullable) {
val nullSafeEval =
leftGen.code + ctx.nullSafeExec(children(0).nullable, leftGen.isNull) {
midGen.code + ctx.nullSafeExec(children(1).nullable, midGen.isNull) {
rightGen.code + ctx.nullSafeExec(children(2).nullable, rightGen.isNull) {
s"""
${ev.isNull} = false; // resultCode could change nullability.
$resultCode
"""
}
}
}
ev.copy(code = code"""
boolean ${ev.isNull} = true;
${CodeGenerator.javaType(dataType)} ${ev.value} = ${CodeGenerator.defaultValue(dataType)};
$nullSafeEval""")
} else {
ev.copy(code = code"""
${leftGen.code}
${midGen.code}
${rightGen.code}
${CodeGenerator.javaType(dataType)} ${ev.value} = ${CodeGenerator.defaultValue(dataType)};
$resultCode""", isNull = FalseLiteral)
}
}
}
/**
* A trait used for resolving nullable flags, including `nullable`, `containsNull` of [[ArrayType]]
* and `valueContainsNull` of [[MapType]], containsNull, valueContainsNull flags of the output date
* type. This is usually utilized by the expressions (e.g. [[CaseWhen]]) that combine data from
* multiple child expressions of non-primitive types.
*/
trait ComplexTypeMergingExpression extends Expression {
/**
* A collection of data types used for resolution the output type of the expression. By default,
* data types of all child expressions. The collection must not be empty.
*/
@transient
lazy val inputTypesForMerging: Seq[DataType] = children.map(_.dataType)
def dataTypeCheck: Unit = {
require(
inputTypesForMerging.nonEmpty,
"The collection of input data types must not be empty.")
require(
TypeCoercion.haveSameType(inputTypesForMerging),
"All input types must be the same except nullable, containsNull, valueContainsNull flags." +
s" The input types found are\\n\\t${inputTypesForMerging.mkString("\\n\\t")}")
}
override def dataType: DataType = {
dataTypeCheck
inputTypesForMerging.reduceLeft(TypeCoercion.findCommonTypeDifferentOnlyInNullFlags(_, _).get)
}
}
/**
* Common base trait for user-defined functions, including UDF/UDAF/UDTF of different languages
* and Hive function wrappers.
*/
trait UserDefinedExpression
| aosagie/spark | sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Expression.scala | Scala | apache-2.0 | 31,359 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst.expressions
import java.util.Locale
import java.util.regex.{MatchResult, Pattern}
import org.apache.commons.text.StringEscapeUtils
import org.apache.spark.sql.catalyst.expressions.codegen._
import org.apache.spark.sql.catalyst.expressions.codegen.Block._
import org.apache.spark.sql.catalyst.util.{GenericArrayData, StringUtils}
import org.apache.spark.sql.types._
import org.apache.spark.unsafe.types.UTF8String
abstract class StringRegexExpression extends BinaryExpression
with ImplicitCastInputTypes with NullIntolerant {
def escape(v: String): String
def matches(regex: Pattern, str: String): Boolean
override def dataType: DataType = BooleanType
override def inputTypes: Seq[DataType] = Seq(StringType, StringType)
// try cache the pattern for Literal
private lazy val cache: Pattern = right match {
case x @ Literal(value: String, StringType) => compile(value)
case _ => null
}
protected def compile(str: String): Pattern = if (str == null) {
null
} else {
// Let it raise exception if couldn't compile the regex string
Pattern.compile(escape(str))
}
protected def pattern(str: String) = if (cache == null) compile(str) else cache
protected override def nullSafeEval(input1: Any, input2: Any): Any = {
val regex = pattern(input2.asInstanceOf[UTF8String].toString)
if(regex == null) {
null
} else {
matches(regex, input1.asInstanceOf[UTF8String].toString)
}
}
override def sql: String = s"${left.sql} ${prettyName.toUpperCase(Locale.ROOT)} ${right.sql}"
}
// scalastyle:off line.contains.tab
/**
* Simple RegEx pattern matching function
*/
@ExpressionDescription(
usage = "str _FUNC_ pattern - Returns true if str matches pattern, " +
"null if any arguments are null, false otherwise.",
arguments = """
Arguments:
* str - a string expression
* pattern - a string expression. The pattern is a string which is matched literally, with
exception to the following special symbols:
_ matches any one character in the input (similar to . in posix regular expressions)
% matches zero or more characters in the input (similar to .* in posix regular
expressions)
The escape character is '\\'. If an escape character precedes a special symbol or another
escape character, the following character is matched literally. It is invalid to escape
any other character.
Since Spark 2.0, string literals are unescaped in our SQL parser. For example, in order
to match "\\abc", the pattern should be "\\\\abc".
When SQL config 'spark.sql.parser.escapedStringLiterals' is enabled, it fallbacks
to Spark 1.6 behavior regarding string literal parsing. For example, if the config is
enabled, the pattern to match "\\abc" should be "\\abc".
""",
examples = """
Examples:
> SET spark.sql.parser.escapedStringLiterals=true;
spark.sql.parser.escapedStringLiterals true
> SELECT '%SystemDrive%\\Users\\John' _FUNC_ '\\%SystemDrive\\%\\\\Users%';
true
> SET spark.sql.parser.escapedStringLiterals=false;
spark.sql.parser.escapedStringLiterals false
> SELECT '%SystemDrive%\\\\Users\\\\John' _FUNC_ '\\%SystemDrive\\%\\\\\\\\Users%';
true
""",
note = """
Use RLIKE to match with standard regular expressions.
""",
since = "1.0.0")
// scalastyle:on line.contains.tab
case class Like(left: Expression, right: Expression) extends StringRegexExpression {
override def escape(v: String): String = StringUtils.escapeLikeRegex(v)
override def matches(regex: Pattern, str: String): Boolean = regex.matcher(str).matches()
override def toString: String = s"$left LIKE $right"
override protected def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
val patternClass = classOf[Pattern].getName
val escapeFunc = StringUtils.getClass.getName.stripSuffix("$") + ".escapeLikeRegex"
if (right.foldable) {
val rVal = right.eval()
if (rVal != null) {
val regexStr =
StringEscapeUtils.escapeJava(escape(rVal.asInstanceOf[UTF8String].toString()))
val pattern = ctx.addMutableState(patternClass, "patternLike",
v => s"""$v = $patternClass.compile("$regexStr");""")
// We don't use nullSafeCodeGen here because we don't want to re-evaluate right again.
val eval = left.genCode(ctx)
ev.copy(code = code"""
${eval.code}
boolean ${ev.isNull} = ${eval.isNull};
${CodeGenerator.javaType(dataType)} ${ev.value} = ${CodeGenerator.defaultValue(dataType)};
if (!${ev.isNull}) {
${ev.value} = $pattern.matcher(${eval.value}.toString()).matches();
}
""")
} else {
ev.copy(code = code"""
boolean ${ev.isNull} = true;
${CodeGenerator.javaType(dataType)} ${ev.value} = ${CodeGenerator.defaultValue(dataType)};
""")
}
} else {
val pattern = ctx.freshName("pattern")
val rightStr = ctx.freshName("rightStr")
nullSafeCodeGen(ctx, ev, (eval1, eval2) => {
s"""
String $rightStr = $eval2.toString();
$patternClass $pattern = $patternClass.compile($escapeFunc($rightStr));
${ev.value} = $pattern.matcher($eval1.toString()).matches();
"""
})
}
}
}
// scalastyle:off line.contains.tab
@ExpressionDescription(
usage = "str _FUNC_ regexp - Returns true if `str` matches `regexp`, or false otherwise.",
arguments = """
Arguments:
* str - a string expression
* regexp - a string expression. The regex string should be a Java regular expression.
Since Spark 2.0, string literals (including regex patterns) are unescaped in our SQL
parser. For example, to match "\\abc", a regular expression for `regexp` can be
"^\\\\abc$".
There is a SQL config 'spark.sql.parser.escapedStringLiterals' that can be used to
fallback to the Spark 1.6 behavior regarding string literal parsing. For example,
if the config is enabled, the `regexp` that can match "\\abc" is "^\\abc$".
""",
examples = """
Examples:
> SET spark.sql.parser.escapedStringLiterals=true;
spark.sql.parser.escapedStringLiterals true
> SELECT '%SystemDrive%\\Users\\John' _FUNC_ '%SystemDrive%\\\\Users.*';
true
> SET spark.sql.parser.escapedStringLiterals=false;
spark.sql.parser.escapedStringLiterals false
> SELECT '%SystemDrive%\\\\Users\\\\John' _FUNC_ '%SystemDrive%\\\\\\\\Users.*';
true
""",
note = """
Use LIKE to match with simple string pattern.
""",
since = "1.0.0")
// scalastyle:on line.contains.tab
case class RLike(left: Expression, right: Expression) extends StringRegexExpression {
override def escape(v: String): String = v
override def matches(regex: Pattern, str: String): Boolean = regex.matcher(str).find(0)
override def toString: String = s"$left RLIKE $right"
override protected def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
val patternClass = classOf[Pattern].getName
if (right.foldable) {
val rVal = right.eval()
if (rVal != null) {
val regexStr =
StringEscapeUtils.escapeJava(rVal.asInstanceOf[UTF8String].toString())
val pattern = ctx.addMutableState(patternClass, "patternRLike",
v => s"""$v = $patternClass.compile("$regexStr");""")
// We don't use nullSafeCodeGen here because we don't want to re-evaluate right again.
val eval = left.genCode(ctx)
ev.copy(code = code"""
${eval.code}
boolean ${ev.isNull} = ${eval.isNull};
${CodeGenerator.javaType(dataType)} ${ev.value} = ${CodeGenerator.defaultValue(dataType)};
if (!${ev.isNull}) {
${ev.value} = $pattern.matcher(${eval.value}.toString()).find(0);
}
""")
} else {
ev.copy(code = code"""
boolean ${ev.isNull} = true;
${CodeGenerator.javaType(dataType)} ${ev.value} = ${CodeGenerator.defaultValue(dataType)};
""")
}
} else {
val rightStr = ctx.freshName("rightStr")
val pattern = ctx.freshName("pattern")
nullSafeCodeGen(ctx, ev, (eval1, eval2) => {
s"""
String $rightStr = $eval2.toString();
$patternClass $pattern = $patternClass.compile($rightStr);
${ev.value} = $pattern.matcher($eval1.toString()).find(0);
"""
})
}
}
}
/**
* Splits str around matches of the given regex.
*/
@ExpressionDescription(
usage = "_FUNC_(str, regex, limit) - Splits `str` around occurrences that match `regex`" +
" and returns an array with a length of at most `limit`",
arguments = """
Arguments:
* str - a string expression to split.
* regex - a string representing a regular expression. The regex string should be a
Java regular expression.
* limit - an integer expression which controls the number of times the regex is applied.
* limit > 0: The resulting array's length will not be more than `limit`,
and the resulting array's last entry will contain all input
beyond the last matched regex.
* limit <= 0: `regex` will be applied as many times as possible, and
the resulting array can be of any size.
""",
examples = """
Examples:
> SELECT _FUNC_('oneAtwoBthreeC', '[ABC]');
["one","two","three",""]
> SELECT _FUNC_('oneAtwoBthreeC', '[ABC]', -1);
["one","two","three",""]
> SELECT _FUNC_('oneAtwoBthreeC', '[ABC]', 2);
["one","twoBthreeC"]
""",
since = "1.5.0")
case class StringSplit(str: Expression, regex: Expression, limit: Expression)
extends TernaryExpression with ImplicitCastInputTypes {
override def dataType: DataType = ArrayType(StringType)
override def inputTypes: Seq[DataType] = Seq(StringType, StringType, IntegerType)
override def children: Seq[Expression] = str :: regex :: limit :: Nil
def this(exp: Expression, regex: Expression) = this(exp, regex, Literal(-1));
override def nullSafeEval(string: Any, regex: Any, limit: Any): Any = {
val strings = string.asInstanceOf[UTF8String].split(
regex.asInstanceOf[UTF8String], limit.asInstanceOf[Int])
new GenericArrayData(strings.asInstanceOf[Array[Any]])
}
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
val arrayClass = classOf[GenericArrayData].getName
nullSafeCodeGen(ctx, ev, (str, regex, limit) => {
// Array in java is covariant, so we don't need to cast UTF8String[] to Object[].
s"""${ev.value} = new $arrayClass($str.split($regex,$limit));""".stripMargin
})
}
override def prettyName: String = "split"
}
/**
* Replace all substrings of str that match regexp with rep.
*
* NOTE: this expression is not THREAD-SAFE, as it has some internal mutable status.
*/
// scalastyle:off line.size.limit
@ExpressionDescription(
usage = "_FUNC_(str, regexp, rep) - Replaces all substrings of `str` that match `regexp` with `rep`.",
examples = """
Examples:
> SELECT _FUNC_('100-200', '(\\\\d+)', 'num');
num-num
""",
since = "1.5.0")
// scalastyle:on line.size.limit
case class RegExpReplace(subject: Expression, regexp: Expression, rep: Expression)
extends TernaryExpression with ImplicitCastInputTypes {
// last regex in string, we will update the pattern iff regexp value changed.
@transient private var lastRegex: UTF8String = _
// last regex pattern, we cache it for performance concern
@transient private var pattern: Pattern = _
// last replacement string, we don't want to convert a UTF8String => java.langString every time.
@transient private var lastReplacement: String = _
@transient private var lastReplacementInUTF8: UTF8String = _
// result buffer write by Matcher
@transient private lazy val result: StringBuffer = new StringBuffer
override def nullSafeEval(s: Any, p: Any, r: Any): Any = {
if (!p.equals(lastRegex)) {
// regex value changed
lastRegex = p.asInstanceOf[UTF8String].clone()
pattern = Pattern.compile(lastRegex.toString)
}
if (!r.equals(lastReplacementInUTF8)) {
// replacement string changed
lastReplacementInUTF8 = r.asInstanceOf[UTF8String].clone()
lastReplacement = lastReplacementInUTF8.toString
}
val m = pattern.matcher(s.toString())
result.delete(0, result.length())
while (m.find) {
m.appendReplacement(result, lastReplacement)
}
m.appendTail(result)
UTF8String.fromString(result.toString)
}
override def dataType: DataType = StringType
override def inputTypes: Seq[AbstractDataType] = Seq(StringType, StringType, StringType)
override def children: Seq[Expression] = subject :: regexp :: rep :: Nil
override def prettyName: String = "regexp_replace"
override protected def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
val termResult = ctx.freshName("termResult")
val classNamePattern = classOf[Pattern].getCanonicalName
val classNameStringBuffer = classOf[java.lang.StringBuffer].getCanonicalName
val matcher = ctx.freshName("matcher")
val termLastRegex = ctx.addMutableState("UTF8String", "lastRegex")
val termPattern = ctx.addMutableState(classNamePattern, "pattern")
val termLastReplacement = ctx.addMutableState("String", "lastReplacement")
val termLastReplacementInUTF8 = ctx.addMutableState("UTF8String", "lastReplacementInUTF8")
val setEvNotNull = if (nullable) {
s"${ev.isNull} = false;"
} else {
""
}
nullSafeCodeGen(ctx, ev, (subject, regexp, rep) => {
s"""
if (!$regexp.equals($termLastRegex)) {
// regex value changed
$termLastRegex = $regexp.clone();
$termPattern = $classNamePattern.compile($termLastRegex.toString());
}
if (!$rep.equals($termLastReplacementInUTF8)) {
// replacement string changed
$termLastReplacementInUTF8 = $rep.clone();
$termLastReplacement = $termLastReplacementInUTF8.toString();
}
$classNameStringBuffer $termResult = new $classNameStringBuffer();
java.util.regex.Matcher $matcher = $termPattern.matcher($subject.toString());
while ($matcher.find()) {
$matcher.appendReplacement($termResult, $termLastReplacement);
}
$matcher.appendTail($termResult);
${ev.value} = UTF8String.fromString($termResult.toString());
$termResult = null;
$setEvNotNull
"""
})
}
}
/**
* Extract a specific(idx) group identified by a Java regex.
*
* NOTE: this expression is not THREAD-SAFE, as it has some internal mutable status.
*/
@ExpressionDescription(
usage = "_FUNC_(str, regexp[, idx]) - Extracts a group that matches `regexp`.",
examples = """
Examples:
> SELECT _FUNC_('100-200', '(\\\\d+)-(\\\\d+)', 1);
100
""",
since = "1.5.0")
case class RegExpExtract(subject: Expression, regexp: Expression, idx: Expression)
extends TernaryExpression with ImplicitCastInputTypes {
def this(s: Expression, r: Expression) = this(s, r, Literal(1))
// last regex in string, we will update the pattern iff regexp value changed.
@transient private var lastRegex: UTF8String = _
// last regex pattern, we cache it for performance concern
@transient private var pattern: Pattern = _
override def nullSafeEval(s: Any, p: Any, r: Any): Any = {
if (!p.equals(lastRegex)) {
// regex value changed
lastRegex = p.asInstanceOf[UTF8String].clone()
pattern = Pattern.compile(lastRegex.toString)
}
val m = pattern.matcher(s.toString)
if (m.find) {
val mr: MatchResult = m.toMatchResult
val group = mr.group(r.asInstanceOf[Int])
if (group == null) { // Pattern matched, but not optional group
UTF8String.EMPTY_UTF8
} else {
UTF8String.fromString(group)
}
} else {
UTF8String.EMPTY_UTF8
}
}
override def dataType: DataType = StringType
override def inputTypes: Seq[AbstractDataType] = Seq(StringType, StringType, IntegerType)
override def children: Seq[Expression] = subject :: regexp :: idx :: Nil
override def prettyName: String = "regexp_extract"
override protected def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
val classNamePattern = classOf[Pattern].getCanonicalName
val matcher = ctx.freshName("matcher")
val matchResult = ctx.freshName("matchResult")
val termLastRegex = ctx.addMutableState("UTF8String", "lastRegex")
val termPattern = ctx.addMutableState(classNamePattern, "pattern")
val setEvNotNull = if (nullable) {
s"${ev.isNull} = false;"
} else {
""
}
nullSafeCodeGen(ctx, ev, (subject, regexp, idx) => {
s"""
if (!$regexp.equals($termLastRegex)) {
// regex value changed
$termLastRegex = $regexp.clone();
$termPattern = $classNamePattern.compile($termLastRegex.toString());
}
java.util.regex.Matcher $matcher =
$termPattern.matcher($subject.toString());
if ($matcher.find()) {
java.util.regex.MatchResult $matchResult = $matcher.toMatchResult();
if ($matchResult.group($idx) == null) {
${ev.value} = UTF8String.EMPTY_UTF8;
} else {
${ev.value} = UTF8String.fromString($matchResult.group($idx));
}
$setEvNotNull
} else {
${ev.value} = UTF8String.EMPTY_UTF8;
$setEvNotNull
}"""
})
}
}
| rezasafi/spark | sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/regexpExpressions.scala | Scala | apache-2.0 | 18,397 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.eventhubs.utils
import org.apache.spark.eventhubs.{ NameAndPartition, PartitionsStatusTracker, SequenceNumber }
import scala.collection.breakOut
private[spark] object SimulatedPartitionStatusTracker {
val sourceTracker = PartitionsStatusTracker.getPartitionStatusTracker
def updatePartitionPerformance(nAndP: NameAndPartition,
requestSeqNo: SequenceNumber,
batchSize: Int,
receiveTimeInMillis: Long): Unit = {
sourceTracker.updatePartitionPerformance(nAndP, requestSeqNo, batchSize, receiveTimeInMillis)
}
def getPerformancePercentages: Map[NameAndPartition, Double] = {
sourceTracker.partitionsPerformancePercentage match {
case Some(percentages) => (percentages.map(par => (par._1, roundDouble(par._2, 2))))(breakOut)
case None => Map[NameAndPartition, Double]()
}
}
private def roundDouble(num: Double, precision: Int): Double = {
val scale = Math.pow(10, precision)
Math.round(num * scale) / scale
}
def currentBatchIdsInTracker: scala.collection.Set[Long] = sourceTracker.batchIdsInTracker
}
| hdinsight/spark-eventhubs | core/src/main/scala/org/apache/spark/eventhubs/utils/SimulatedPartitionStatusTracker.scala | Scala | apache-2.0 | 1,991 |
package es.weso.rdf
import es.weso.tgraph.{ TContext, TGraph }
import es.weso.rdf.nodes.{ IRI, BNodeId, RDFNode }
import es.weso.rdf.triples.RDFTriple
import scala.collection.immutable.Map
import scala.collection.Set
/*
case class that provides implementation for RDF graph, it is also used in RDFGraph object factory
*/
case class Ground(graph: TGraph[RDFNode])(implicit seed: BNodeId) extends RDFGraph {
override def isEmpty = graph.isEmpty
override def insertTriple(triple: RDFTriple): RDFGraph = {
Ground(graph.addTriple(triple.subj, triple.pred, triple.obj))
}
def getBNodeMap(node: RDFNode, map: Map[BNodeId, BNodeId]): RDFNode = {
node match {
case b @ BNodeId(_) => map(b)
case other => other
}
}
override def insertTripleMap(
triple: RDFTriple,
map: Map[BNodeId, BNodeId]
): RDFGraph = {
val s = getBNodeMap(triple.subj, map)
val p = triple.pred
val o = getBNodeMap(triple.obj, map)
Ground(graph.addTriple(s, p, o))
}
override def addTriplesBNodes(
bnodes: Set[BNodeId],
triples: Set[RDFTriple],
map: Map[BNodeId, BNodeId]
): RDFGraph = {
if (bnodes.isEmpty) {
val current: RDFGraph = this
triples.foldLeft(current)((g, triple) =>
g.insertTripleMap(triple, map))
} else {
Exists {
case (bnode) =>
addTriplesBNodes(
bnodes.tail,
triples,
map + (bnodes.head -> bnode)
)
}
}
}
/**
* addTriples inserts a set of triples into a graph
*
* It takes control of possible bnodes in the triples renaming them
* @param triples set of triples
*
*/
override def addTriples(triples: Set[RDFTriple]): RDFGraph = {
val bnodes = RDFTriple.collectBNodes(triples)
addTriplesBNodes(bnodes, triples, Map.empty)
}
override def IRIs: Set[IRI] = {
graph.nodes.filter(_.isIRI).map(_.toIRI)
}
/**
* get the triples of a graph
*
* @param seed represents the seed for blank node identifier generation
* (default value = 0)
*
* Ground graphs ignore the seed parameter
*
*/
override def triples(implicit seed: BNodeId): Set[RDFTriple] = {
graph.triples.map((t) => RDFTriple(t._1, t._2.toIRI, t._3))
}
override def merge(other: RDFGraph): RDFGraph = {
val g = this.addTriples(other.triples)
g
}
override def show(implicit seed: BNodeId): String = {
this.toString
}
/**
* Decompose a graph from a given IRI
* @param node resource from which we are decomposing the graph
*
* @author labra
*/
def decomp(node: IRI): Option[(TContext[RDFNode], RDFGraph)] = {
graph.decomp(node) match {
case None => None
case Some((ctx, g)) => Some((ctx, Ground(g)))
}
}
override def foldRDFGraphSeed[A](e: A, f: (A, TContext[RDFNode]) => A, seed: BNodeId): A = {
graph.foldTGraph(e)(f)
}
def foldRDFGraphSeedOrd[A](
e: A,
f: (A, TContext[RDFNode]) => A,
seed: BNodeId
)(implicit ord: Ordering[RDFNode]): A = {
graph.foldTGraphOrd(e)(f)(ord)
}
}
| labra/wesin | src/main/scala/es/weso/rdfgraph/Ground.scala | Scala | lgpl-3.0 | 3,074 |
/*
* Copyright 2016 Nicolas Rinaudo
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kantan.sbt.kantan
import com.jsuereth.sbtpgp.PgpKeys.publishSigned
import com.typesafe.sbt.SbtGit.git
import kantan.sbt.release.KantanRelease
import kantan.sbt.scalafmt.KantanScalafmtPlugin, KantanScalafmtPlugin.autoImport._
import kantan.sbt.scalastyle.KantanScalastylePlugin, KantanScalastylePlugin.autoImport._
import sbt._, Keys._
import sbtrelease.ReleasePlugin.autoImport._, ReleaseTransformations._
/** Plugin that sets kantan-specific values.
*
* This is really only meant for kantan projects. Don't use this unless you're me.
*
* In order for kantan builds to behave properly, the following two lines *must* be present in the `build.sbt` files:
* {{{
* ThisBuild / kantanProject := "foobar"
* ThisBuild / startYear := Some(1978)
* }}}
*/
object KantanKantanPlugin extends AutoPlugin {
object autoImport {
val kantanProject: SettingKey[String] = settingKey("Name of the kantan project")
}
import autoImport._
override def trigger = allRequirements
override def requires = KantanScalastylePlugin && KantanScalafmtPlugin
override lazy val projectSettings = generalSettings ++ remoteSettings
@SuppressWarnings(Array("org.wartremover.warts.TraversableOps"))
lazy val generalSettings: Seq[Setting[_]] = Seq(
name := s"kantan.${kantanProject.value}",
organization := "com.nrinaudo",
organizationHomepage := Some(url("https://nrinaudo.github.io")),
organizationName := "Nicolas Rinaudo",
crossScalaVersions := Seq("2.12.15", "2.13.6"),
scalaVersion := crossScalaVersions.value.last,
licenses := Seq("Apache-2.0" -> url("https://www.apache.org/licenses/LICENSE-2.0.html")),
scalastyleResource := Some("/kantan/sbt/scalastyle-config.xml"),
scalafmtResource := Some("/kantan/sbt/scalafmt.conf"),
// This must be enabled for all modules, to make sure that aggregation picks up on multi-release. Typically,
// root projects are unpublished, but if they do not have releaseCrossBuilder set to true, no underlying project
// will either.
releaseCrossBuild := true,
releasePublishArtifactsAction := publishSigned.value,
releaseProcess := Seq[ReleaseStep](
checkSnapshotDependencies,
inquireVersions,
runClean,
KantanRelease.runCoverageOff,
KantanRelease.runCheckStyle,
runTest,
setReleaseVersion,
commitReleaseVersion,
tagRelease,
publishArtifacts,
releaseStepCommand("sonatypeReleaseAll"),
KantanRelease.runPushSite,
setNextVersion,
commitNextVersion,
pushChanges
),
developers := List(
Developer("nrinaudo", "Nicolas Rinaudo", "nicolas@nrinaudo.com", url("https://twitter.com/nicolasrinaudo"))
)
)
/** Remote identifiers, computed from [[autoImport.kantanProject]]. */
lazy val remoteSettings: Seq[Setting[_]] = Seq(
homepage := Some(url(s"https://nrinaudo.github.io/kantan.${kantanProject.value}")),
apiURL := Some(url(s"https://nrinaudo.github.io/kantan.${kantanProject.value}/api/")),
git.remoteRepo := s"git@github.com:nrinaudo/kantan.${kantanProject.value}.git",
scmInfo := Some(
ScmInfo(
url(s"https://github.com/nrinaudo/kantan.${kantanProject.value}"),
s"scm:git:git@github.com:nrinaudo/kantan.${kantanProject.value}.git"
)
)
)
}
| nrinaudo/kantan.sbt | kantan/src/main/scala/kantan/sbt/kantan/KantanKantanPlugin.scala | Scala | apache-2.0 | 3,996 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package whisk.core.entity.test
import org.scalatest.Matchers
import org.scalatest.Suite
import common.StreamLogging
import common.WskActorSystem
import whisk.core.WhiskConfig
import whisk.core.entity._
import whisk.core.entity.ArgNormalizer.trim
import whisk.core.entity.ExecManifest._
trait ExecHelpers
extends Matchers
with WskActorSystem
with StreamLogging {
self: Suite =>
private val config = new WhiskConfig(ExecManifest.requiredProperties)
ExecManifest.initialize(config) should be a 'success
protected val NODEJS = "nodejs"
protected val NODEJS6 = "nodejs:6"
protected val SWIFT = "swift"
protected val SWIFT3 = "swift:3"
protected def imagename(name: String) = ExecManifest.ImageName(s"${name}action".replace(":", ""), Some("openwhisk"), Some("latest"))
protected def js(code: String, main: Option[String] = None) = {
CodeExecAsString(RuntimeManifest(NODEJS, imagename(NODEJS), deprecated = Some(true)), trim(code), main.map(_.trim))
}
protected def js6(code: String, main: Option[String] = None) = {
CodeExecAsString(RuntimeManifest(NODEJS6, imagename(NODEJS6), default = Some(true), deprecated = Some(false)), trim(code), main.map(_.trim))
}
protected def jsDefault(code: String, main: Option[String] = None) = {
js6(code, main)
}
protected def swift(code: String, main: Option[String] = None) = {
CodeExecAsString(RuntimeManifest(SWIFT, imagename(SWIFT), deprecated = Some(true)), trim(code), main.map(_.trim))
}
protected def swift3(code: String, main: Option[String] = None) = {
val default = ExecManifest.runtimesManifest.resolveDefaultRuntime(SWIFT3).flatMap(_.default)
CodeExecAsString(RuntimeManifest(SWIFT3, imagename(SWIFT3), default = default, deprecated = Some(false)), trim(code), main.map(_.trim))
}
protected def sequence(components: Vector[FullyQualifiedEntityName]) = SequenceExec(components)
protected def bb(image: String) = BlackBoxExec(ExecManifest.ImageName(trim(image)), None, None, false)
protected def bb(image: String, code: String, main: Option[String] = None) = {
BlackBoxExec(ExecManifest.ImageName(trim(image)), Some(trim(code)).filter(_.nonEmpty), main, false)
}
}
| prccaraujo/openwhisk | tests/src/test/scala/whisk/core/entity/test/ExecHelpers.scala | Scala | apache-2.0 | 3,079 |
/*
* Copyright (C) 2009-2013 Mathias Doenitz, Alexander Myltsev
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.http4s.internal.parboiled2
import org.specs2.specification.Scope
import org.specs2.specification.dsl.NoReferenceDsl
import org.specs2.mutable.Specification
import org.specs2.control.NoNumberOfTimes
import org.http4s.internal.parboiled2.support.Unpack
import org.http4s.internal.parboiled2.support._
abstract class TestParserSpec extends Specification with NoReferenceDsl with NoNumberOfTimes {
type TestParser0 = TestParser[HNil, Unit]
type TestParser1[T] = TestParser[T :: HNil, T]
type TestParserN[L <: HList] = TestParser[L, L]
// work-around for https://github.com/etorreborre/specs2/issues/514
override def mutableLinkFragment(alias: String): mutableLinkFragment = ???
override def mutableSeeFragment(alias: String): mutableSeeFragment = ???
abstract class TestParser[L <: HList, Out](implicit unpack: Unpack.Aux[L, Out]) extends Parser with Scope {
var input: ParserInput = _
def errorFormatter: ErrorFormatter = new ErrorFormatter(showTraces = true)
def targetRule: RuleN[L]
def beMatched = beTrue ^^ (parse(_: String).isRight)
def beMatchedWith(r: Out) = parse(_: String) === Right(r)
def beMismatched = beTrue ^^ (parse(_: String).isLeft)
def beMismatchedWithError(pe: ParseError) = parse(_: String).left.toOption.get === pe
def beMismatchedWithErrorMsg(msg: String) =
parse(_: String).left.toOption.map(formatError(_, errorFormatter)).get === msg.stripMargin
def parse(input: String): Either[ParseError, Out] = {
this.input = input
import Parser.DeliveryScheme.Either
targetRule.run()
}
}
}
| ZizhengTai/http4s | parboiled2/src/test/scala/org/http4s/internal/parboiled2/TestParserSpec.scala | Scala | apache-2.0 | 2,224 |
import scala.collection.mutable
import scala.util.hashing.Hashing
import mutable.{BagOfMultiplicitiesBagBucket, TreeBag}
object QuickTest extends App {
object StrSize extends Ordering[String] with Hashing[String] {
def hash(x: String): Int = x.size
def compare(x: String, y: String): Int = x.size compare y.size
}
implicit val config = TreeBag.configuration.compactWithEquiv(StrSize)
val emptyBag = TreeBag.empty[String]
val catInBag = emptyBag + "Cat"
val twoCatsInBag = catInBag + "Cat"
val twoCatsAndAnEnginePartInBag = twoCatsInBag + "Cam"
val bag = twoCatsAndAnEnginePartInBag + "Mouse"
bag += "Bat"
val roundtripped = bag - "Bat"
val shouldBeTheSameAsRoundtripped = roundtripped - "Quack"
bag += "Quack"
println(
(bag.bucketsIterator map (_.asInstanceOf[BagOfMultiplicitiesBagBucket[
String]].bag.bucketsIterator.toList)).toList)
println(bag.multiplicities.toList)
println(roundtripped)
println(shouldBeTheSameAsRoundtripped)
println(bag.removedAll("Cam"))
}
| nicolasstucki/multisets | src/main/scala/QuickTest.scala | Scala | bsd-3-clause | 1,073 |
// There really should be a license here
import fr.umlv.unitex.jni.UnitexJni
/**
* Created by aborsu on 26/06/15.
*
* This program is a scala adaptation of the UnitexJniDemo distributed with Unitex' source code
* The original file can be found on the Unitex svn server https://svnigm.univ-mlv.fr/svn/unitex
* and contained the following copyright :
*** Unitex
*
* Copyright (C) 2001-2014 Université Paris-Est Marne-la-Vallée <unitex@univ-mlv.fr>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA.
*
*
***
* File created and contributed by Gilles Vollant (Ergonotics SAS)
* as part of an UNITEX optimization and reliability effort
*
* additional information: http://www.ergonotics.com/unitex-contribution/
* contact : unitex-contribution@ergonotics.com
*
*
*/
object UnitexJniDemo {
private val pathSeparator = if(UnitexJni.isUnderWindows()) "\\\\" else "/"
def getVirtualFilePfx():Option[String] = {
if (UnitexJni.unitexAbstractPathExists("*")) {
Some("*")
} else if (UnitexJni.unitexAbstractPathExists("$:")) {
Some("$:")
} else {
None
}
}
def processUnitexWork(othersResDir: String,workingDicoFileName:String ,
workingGraphFileName:String ,corpusPath:String ,corpusText:String ): String = {
val pSep = pathSeparator
UnitexJni.writeUnitexFile(UnitexJni.combineUnitexFileComponent(corpusPath,"corpus.txt"),corpusText)
// we create offsets file offset1.txt and offset2.txt to get position against the original corpus in the xml file
val cmdNorm = "Normalize " + UnitexJni.combineUnitexFileComponentWithQuote(corpusPath,"corpus.txt") + " -r "+UnitexJni.combineUnitexFileComponentWithQuote(othersResDir,"Norm.txt") + " --output_offsets="+ UnitexJni.combineUnitexFileComponentWithQuote(corpusPath,"offset1.txt") ;
val cmdTok = "Tokenize " + UnitexJni.combineUnitexFileComponentWithQuote(corpusPath,"corpus.txt") + " -a "+ UnitexJni.combineUnitexFileComponentWithQuote(othersResDir,"Alphabet.txt") + " --input_offsets="+ UnitexJni.combineUnitexFileComponentWithQuote(corpusPath,"offset1.txt") + " --output_offsets="+ UnitexJni.combineUnitexFileComponentWithQuote(corpusPath,"offset2.txt") ;
val cmdDico = "Dico -t "+ UnitexJni.combineUnitexFileComponentWithQuote(corpusPath,"corpus.snt")+ " -a " + UnitexJni.combineUnitexFileComponentWithQuote(othersResDir,"Alphabet.txt")+" "+UnitexJni.combineUnitexFileComponentWithQuote(workingDicoFileName) ;
val cmdLocate = "Locate -t "+UnitexJni.combineUnitexFileComponentWithQuote(corpusPath,"corpus.snt")+ " " + UnitexJni.combineUnitexFileComponentWithQuote(workingGraphFileName)+ " -a " + UnitexJni.combineUnitexFileComponentWithQuote(othersResDir,"Alphabet.txt")+ " -L -R --all -b -Y";
val cmdConcord = "Concord "+ UnitexJni.combineUnitexFileComponentWithQuote(corpusPath,"corpus_snt","concord.ind")+ " -m " + UnitexJni.combineUnitexFileComponentWithQuote(corpusPath,"corpus.txt") ;
val cmdConcordXml = "Concord "+ UnitexJni.combineUnitexFileComponentWithQuote(corpusPath,"corpus_snt","concord.ind")+
" --uima="+ UnitexJni.combineUnitexFileComponentWithQuote(corpusPath,"offset2.txt") +" --xml";
UnitexJni.execUnitexTool("UnitexTool " + cmdNorm)
UnitexJni.execUnitexTool("UnitexTool " + cmdTok)
UnitexJni.execUnitexTool("UnitexTool " + cmdDico)
UnitexJni.execUnitexTool("UnitexTool " + cmdLocate)
UnitexJni.execUnitexTool("UnitexTool " + cmdConcord)
UnitexJni.execUnitexTool("UnitexTool " + cmdConcordXml)
// these 6 lines can be replaced by only one execution (with very small speed improvement)
/*
UnitexJni.execUnitexTool("UnitexTool { " + cmdNorm + " } { " + cmdTok + " } { " + cmdDico + " } { " + cmdLocate + " } { " + cmdConcord + " } { " + cmdConcord2+ " }");
*/
val merged = UnitexJni.getUnitexFileString(UnitexJni.combineUnitexFileComponent(corpusPath,"corpus.txt"))
val xml = UnitexJni.getUnitexFileString(UnitexJni.combineUnitexFileComponent(corpusPath,"corpus_snt","concord.xml"))
xml
}
def main(args: Array[String]): Unit = {
println("is ms-windows:"+UnitexJni.isUnderWindows()+" : "+System.getProperty("os.name")+ " "+java.io.File.separator)
println("Usage : UnitexJniDemo [ressource_dir] [base_work_dir] [nb_loop] [param]")
println(" param=0 : no vfs and no persistance")
println(" param=1 : vfs and no persistance")
println(" param=2 : no vfs and persistance")
println(" param=3 : vfs and persistance (fastest)")
println("")
var baseWorkDir = if (args.length>=2) args(1) else "." + pathSeparator + "demojnires"
val ressourceDir = if (args.length>=1) args(0) else "." + pathSeparator + "demojnires"
var nbLoop=8
var cfgParam = 3
if (args.length >= 3)
nbLoop = Integer.parseInt(args(2))
if (nbLoop < 1)
nbLoop = 1
if (args.length>=4)
cfgParam = Integer.parseInt(args(3))
println("resource path : '"+ressourceDir+"' and work path is '"+baseWorkDir+"' and "+nbLoop+" executions");
var graphResDir = UnitexJni.combineUnitexFileComponent(ressourceDir, "Graphs")
var dictionnaryResDir = UnitexJni.combineUnitexFileComponent(ressourceDir, "Dela")
// val othersResDir = UnitexJni.combineUnitexFileComponent(ressourceDir, "others");
// UnitexJni.setStdOutTrashMode(true)
val fusevfs = (cfgParam == 1) || (cfgParam == 3)
val fusepersist = (cfgParam == 2) || (cfgParam == 3)
val prefixVFS = if (fusevfs) getVirtualFilePfx() else None
print("use vfs: " + fusevfs)
if (fusevfs) {
println(s"on: ${prefixVFS.get}")
} else {
println()
}
println("use persist: "+fusepersist)
if (fusevfs) {
println("Not implemented yet ...")
assert(prefixVFS.isDefined)
baseWorkDir = prefixVFS.get + baseWorkDir
UnitexJni.copyUnitexFile(UnitexJni.combineUnitexFileComponent(baseWorkDir,"Alphabet.txt"),
UnitexJni.combineUnitexFileComponent(prefixVFS+baseWorkDir,"Alphabet.txt"))
UnitexJni.copyUnitexFile(UnitexJni.combineUnitexFileComponent(baseWorkDir,"Norm.txt"),
UnitexJni.combineUnitexFileComponent(prefixVFS+baseWorkDir,"Norm.txt"))
baseWorkDir = prefixVFS.get + baseWorkDir
if (!fusepersist)
{
UnitexJni.copyUnitexFile(UnitexJni.combineUnitexFileComponent(dictionnaryResDir,"dela-en-public.bin"),
UnitexJni.combineUnitexFileComponent(prefixVFS+dictionnaryResDir,"dela-en-public.bin"));
UnitexJni.copyUnitexFile(UnitexJni.combineUnitexFileComponent(dictionnaryResDir,"dela-en-public.inf"),
UnitexJni.combineUnitexFileComponent(prefixVFS+dictionnaryResDir,"dela-en-public.inf"))
dictionnaryResDir = prefixVFS + dictionnaryResDir
UnitexJni.copyUnitexFile(UnitexJni.combineUnitexFileComponent(graphResDir,"AAA-hours-demo.fst2"),
UnitexJni.combineUnitexFileComponent(prefixVFS+graphResDir,"AAA-hours-demo.fst2"))
graphResDir = prefixVFS + graphResDir
}
}
val dicoFileName = UnitexJni.combineUnitexFileComponent(dictionnaryResDir,"dela-en-public.bin")
val graphFileName = UnitexJni.combineUnitexFileComponent(graphResDir,"AAA-hours-demo.fst2")
val workingDicoFileName = if(fusepersist) {
UnitexJni.loadPersistentDictionary(dicoFileName)
} else {
dicoFileName
}
val workingGraphFileName = if(fusepersist) {
UnitexJni.loadPersistentFst2(graphFileName)
} else {
graphFileName
}
val CorpusWorkPath = UnitexJni.combineUnitexFileComponent(baseWorkDir, "workUnitexThread" + Thread.currentThread().getId())
println("will work on "+CorpusWorkPath)
UnitexJni.createUnitexFolder(CorpusWorkPath)
UnitexJni.createUnitexFolder(UnitexJni.combineUnitexFileComponent(CorpusWorkPath,"corpus_snt"))
var res=""
val startT = System.currentTimeMillis();
0.until(nbLoop).foreach{ i =>
res = processUnitexWork(ressourceDir,workingDicoFileName,workingGraphFileName,CorpusWorkPath,
"I want watch at "+ ((i%10)+1)+":02 am see at 6:00 pm before leave at 15.47")
}
val endT = System.currentTimeMillis();
// debug : you can remove this line to inspect files
UnitexJni.removeUnitexFolder(CorpusWorkPath)
if (fusepersist)
{
UnitexJni.freePersistentDictionary(workingDicoFileName)
UnitexJni.freePersistentFst2(workingGraphFileName)
}
println("")
println("result:")
println(res)
println("time : "+(endT-startT)+" ms (average "+ ((endT-startT)/nbLoop)+" ms per iteration)")
println("result:")
println(res)
println("time : "+(endT-startT)+" ms (average "+ ((endT-startT)/nbLoop)+" ms per iteration)")
}
}
| aborsu/scala-unitex | src/main/scala/Main.scala | Scala | gpl-3.0 | 9,543 |
/*
* This file is part of Gwaihir
* Copyright (C) 2013, 2014 Alvaro Polo
*
* Gwaihir is free software: you can redistribute it and/or modify it under the terms of the GNU
* General Public License as published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* Gwaihir is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without
* even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* General Public License for more details.
*
* You should have received a copy of the GNU General Public License along with Gwaihir. If not,
* see <http://www.gnu.org/licenses/>.
*/
package org.oac.gwaihir.core
import org.scalatest.{Matchers, FlatSpec}
class EventChannelTest extends FlatSpec with Matchers {
"Event channel" must "subscribe and send events" in new FreshChannel {
chan.subscribe(acGen1)(observe)
chan.send(acGen1, devRunning)
lastSender should be (Some(acGen1))
lastEvent should be (Some(devRunning))
}
it must "send events to subscription on a sender ancestor" in new FreshChannel {
chan.subscribe(ac)(observe)
chan.send(acGen1, devRunning)
lastSender should be (Some(acGen1))
lastEvent should be (Some(devRunning))
}
it must "not send events to subscription on a non-sender ancestor" in new FreshChannel {
chan.subscribe(ac)(observe)
chan.send(dcEssBus, devRunning)
lastSender should be (None)
lastEvent should be (None)
}
it must "not send anything when no subscription is done" in new FreshChannel {
chan.send(dcEssBus, devRunning)
lastSender should be (None)
lastEvent should be (None)
}
case class DummyEvent(msg: String)
trait CallbackUtils {
var lastSender: Option[DeviceId] = None
var lastEvent: Option[DummyEvent] = None
val observe: PartialFunction[(DeviceId, Any), Unit] = {
case (sender: DeviceId, event: DummyEvent) =>
lastSender = Some(sender)
lastEvent = Some(event)
}
}
trait SampleDevices {
val ac = DeviceId("/elec/ac")
val acGen1 = DeviceId("/elec/ac/gen-1")
val dcEssBus = DeviceId("/elec/dc/ess-bus")
}
trait SampleEvents {
val devRunning = DummyEvent("running")
}
trait FreshChannel extends CallbackUtils with SampleDevices with SampleEvents {
var chan = EventChannel()
}
}
| apoloval/gwaihir | src/test/scala/org/oac/gwaihir/core/EventChannelTest.scala | Scala | gpl-2.0 | 2,400 |
package lila.setup
import chess.format.FEN
import scala.concurrent.ExecutionContext
import lila.game.{ Game, IdGenerator, Player, Pov, Source }
import lila.lobby.Color
import lila.user.User
case class AiConfig(
variant: chess.variant.Variant,
timeMode: TimeMode,
time: Double,
increment: Int,
days: Int,
level: Int,
color: Color,
fen: Option[FEN] = None
) extends Config
with Positional {
val strictFen = true
def >> = (variant.id, timeMode.id, time, increment, days, level, color.name, fen).some
private def game(user: Option[User])(implicit idGenerator: IdGenerator): Fu[Game] =
fenGame { chessGame =>
val perfPicker = lila.game.PerfPicker.mainOrDefault(
chess.Speed(chessGame.clock.map(_.config)),
chessGame.situation.board.variant,
makeDaysPerTurn
)
Game
.make(
chess = chessGame,
whitePlayer = creatorColor.fold(
Player.make(chess.White, user, perfPicker),
Player.make(chess.White, level.some)
),
blackPlayer = creatorColor.fold(
Player.make(chess.Black, level.some),
Player.make(chess.Black, user, perfPicker)
),
mode = chess.Mode.Casual,
source = if (chessGame.board.variant.fromPosition) Source.Position else Source.Ai,
daysPerTurn = makeDaysPerTurn,
pgnImport = None
)
.withUniqueId
}.dmap(_.start)
def pov(user: Option[User])(implicit idGenerator: IdGenerator) = game(user) dmap { Pov(_, creatorColor) }
def timeControlFromPosition = variant != chess.variant.FromPosition || time >= 1
}
object AiConfig extends BaseConfig {
def from(v: Int, tm: Int, t: Double, i: Int, d: Int, level: Int, c: String, fen: Option[FEN]) =
new AiConfig(
variant = chess.variant.Variant(v) err "Invalid game variant " + v,
timeMode = TimeMode(tm) err s"Invalid time mode $tm",
time = t,
increment = i,
days = d,
level = level,
color = Color(c) err "Invalid color " + c,
fen = fen
)
val default = AiConfig(
variant = variantDefault,
timeMode = TimeMode.Unlimited,
time = 5d,
increment = 8,
days = 2,
level = 1,
color = Color.default
)
val levels = (1 to 8).toList
val levelChoices = levels map { l =>
(l.toString, l.toString, none)
}
import lila.db.BSON
import lila.db.dsl._
implicit private[setup] val aiConfigBSONHandler = new BSON[AiConfig] {
def reads(r: BSON.Reader): AiConfig =
AiConfig(
variant = chess.variant.Variant orDefault (r int "v"),
timeMode = TimeMode orDefault (r int "tm"),
time = r double "t",
increment = r int "i",
days = r int "d",
level = r int "l",
color = Color.White,
fen = r.getO[FEN]("f").filter(_.value.nonEmpty)
)
def writes(w: BSON.Writer, o: AiConfig) =
$doc(
"v" -> o.variant.id,
"tm" -> o.timeMode.id,
"t" -> o.time,
"i" -> o.increment,
"d" -> o.days,
"l" -> o.level,
"f" -> o.fen
)
}
}
| luanlv/lila | modules/setup/src/main/AiConfig.scala | Scala | mit | 3,151 |
/***********************************************************************
* Copyright (c) 2013-2016 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
*************************************************************************/
package org.locationtech.geomesa.blob.api
import org.locationtech.geomesa.utils.geotools.{SftBuilder, SimpleFeatureTypes}
object GeoMesaBlobStoreSFT {
val BlobFeatureTypeName = "blob"
val IdFieldName = "storeId"
val GeomFieldName = "geom"
val FilenameFieldName = "filename"
val DtgFieldName = "dtg"
val ThumbnailFieldName = "thumbnail"
// TODO: Add metadata hashmap?
// TODO GEOMESA-1186 allow for configurable geometry types
val sft = new SftBuilder()
.stringType(FilenameFieldName)
.stringType(IdFieldName, index = true)
.geometry(GeomFieldName, default = true)
.date(DtgFieldName, default = true)
.stringType(ThumbnailFieldName)
.userData(SimpleFeatureTypes.Configs.MIXED_GEOMETRIES, "true")
.build(BlobFeatureTypeName)
}
| MutahirKazmi/geomesa | geomesa-blobstore/geomesa-blobstore-api/src/main/scala/org/locationtech/geomesa/blob/api/GeoMesaBlobStoreSFT.scala | Scala | apache-2.0 | 1,260 |
package artisanal.pickle.maker
import models._
import parser._
import org.specs2._
import mutable._
import specification._
import scala.reflect.internal.pickling.ByteCodecs
import scala.tools.scalap.scalax.rules.scalasig._
import com.novus.salat.annotations.util._
import scala.reflect.ScalaSignature
class ListAnySpec extends mutable.Specification {
"a ScalaSig for case class MyRecord_ListAny(ab: List[Any])" should {
"have the correct string" in {
val mySig = new artisanal.pickle.maker.ScalaSig(List("case class"), List("models", "MyRecord_ListAny"), List(("ab", "List[Any]")))
val correctParsedSig = SigParserHelper.parseByteCodeFromAnnotation(classOf[MyRecord_ListAny]).map(ScalaSigAttributeParsers.parse(_)).get
val myParsedSig = SigParserHelper.parseByteCodeFromMySig(mySig).map(ScalaSigAttributeParsers.parse(_)).get
correctParsedSig.toString === myParsedSig.toString
}
}
}
| julianpeeters/artisanal-pickle-maker | src/test/scala/singleValueMember/ListSpecs/List[Any]Spec.scala | Scala | apache-2.0 | 926 |
package io.plasmap.geo.producing
import akka.NotUsed
import akka.actor.ActorSystem
import akka.stream.scaladsl.{Broadcast, Flow, GraphDSL, Sink, Source}
import akka.stream.{ActorMaterializer, SinkShape}
import com.typesafe.config.ConfigFactory
import com.typesafe.scalalogging.Logger
import io.plasmap.geo.util.KafkaTopics._
import io.plasmap.geo.util.KafkaUtil._
import io.plasmap.model.{OsmNode, OsmObject, OsmRelation, OsmWay}
import io.plasmap.parser.OsmParser
import io.plasmap.serializer.OsmSerializer._
import org.slf4j.LoggerFactory
import scala.concurrent.duration.Deadline
import scala.io.{Codec, StdIn}
/**
* Main class for producing osm elements to Kafka MQ.
*
* @author Jan Schulte <jan@plasmap.io>
*/
object OsmProducer {
lazy val log = Logger(LoggerFactory.getLogger(OsmProducer.getClass.getName))
lazy implicit val actorSystem = ActorSystem("producer")
lazy implicit val materializer = ActorMaterializer()
lazy val config = ConfigFactory.load()
lazy val kafkaHost = config.getString("plasmap.producing.kafka")
lazy val zkHost = config.getString("plasmap.producing.zk")
var producedMessagesCount = 0L
var parsedMessagesCount = 0L
def countParsed[T](parsed: T): T = {
if (parsedMessagesCount % 100000 == 0) {
log.info(s"Parsed $parsedMessagesCount elements in total.")
}
parsedMessagesCount += 1
parsed
}
var lastTimestamp = Deadline.now
var lastProducedMessagesCount = 0L
def countProduced[T](encoded: T): T = {
val timestamp = Deadline.now
val timeframe: Long = (timestamp - lastTimestamp) toSeconds
if (timeframe >= 1) {
lastTimestamp = timestamp
val messageDifference = producedMessagesCount - lastProducedMessagesCount
val messageRate = messageDifference / timeframe
log.info(s"Produced $producedMessagesCount elements in total at a rate of: $messageDifference / $timeframe s (=$messageRate/s)")
lastProducedMessagesCount = producedMessagesCount
}
producedMessagesCount += 1
encoded
}
var messageSize = 0L
def countSize(encoded: Array[Byte]): Array[Byte] = {
val currentSize = encoded.length
messageSize += currentSize
if (producedMessagesCount % 100000 == 0) {
val messageSizeDifference = if (producedMessagesCount == 0) 0L else messageSize / producedMessagesCount
log.info(s"Message size at $messageSizeDifference bytes / element")
lastProducedMessagesCount = producedMessagesCount
}
encoded
}
def produce(fileName: String, offset: Int = 0, typ: Option[String] = None) = {
val kafka = kafkaSink(kafkaHost)
val source: Source[OsmObject, NotUsed] =
parser(fileName)
.collect { case Some(element) => element }
.drop(offset)
.map(countParsed)
val nodeSink = Flow[OsmNode]
.map(countProduced)
.map(toBinary)
.map(bytesToProducerRecord(nodesTopic))
.to(kafka)
val waySink = Flow[OsmWay]
.map(countProduced)
.map(toBinary)
.map(bytesToProducerRecord(waysTopic))
.to(kafka)
val relationSink = Flow[OsmRelation]
.map(countProduced)
.map(toBinary)
.map(bytesToProducerRecord(relationsTopic))
.to(kafka)
val branch = branchElements(nodeSink, waySink, relationSink)
log.info(s"${Console.GREEN}Producing at kafka[$kafkaHost], zk[$zkHost]${Console.RESET}")
source.to(branch).run()
log.info(s"Running producer...")
}
def branchElements(nodeSink: Sink[OsmNode, NotUsed], waySink: Sink[OsmWay, NotUsed], relationSink: Sink[OsmRelation, NotUsed]): Sink[OsmObject, NotUsed] =
Sink.fromGraph(GraphDSL.create() { implicit builder =>
import GraphDSL.Implicits._
val broadcast = builder.add(Broadcast[OsmObject](3))
broadcast.out(0) ~> Flow[OsmObject]
.collect { case node: OsmNode => node } ~> nodeSink
broadcast.out(1) ~> Flow[OsmObject]
.collect { case way: OsmWay => way } ~> waySink
broadcast.out(2) ~> Flow[OsmObject]
.collect { case relation: OsmRelation => relation } ~> relationSink
SinkShape(broadcast.in)
})
def displayUsage() = {
println("Usage OsmProducer <fileName> [offset] [typ]")
}
def main(args: Array[String]): Unit = {
args match {
case Array(fileName, offset, typ) =>
produce(fileName, offset.toInt, Some(typ))
case Array(fileName, offset) =>
produce(fileName, offset.toInt)
case Array(fileName) =>
produce(fileName, 0)
case _ => displayUsage()
}
waitForUserInput()
}
def waitForUserInput(): Unit = {
log.info(s"${Console.RED}Press key to exit.${Console.RESET}")
StdIn.readLine()
import scala.concurrent.ExecutionContext.Implicits.global
val f = actorSystem.terminate()
f.onComplete {
case _ ⇒ System.exit(0)
}
}
def parser(fileName: String) = {
val parser = () => OsmParser(fileName)(Codec.UTF8)
Source.fromIterator(parser)
}
}
| plasmap/plasmap | processing/src/main/scala/io/plasmap/geo/producing/OsmProducer.scala | Scala | apache-2.0 | 4,982 |
package com.taxis99.amazon.serializers
import play.api.libs.json.Json
import test.BaseSpec
class SerializersSpec extends BaseSpec {
val msg = Json.obj("str" -> "foo bar", "num" -> 100, "bool" -> true, "double" -> 1.23, "map" -> Map("a" -> "b"),
"list" -> Seq(1, 2, 4))
"PlayJson" should "encode and decode a JsValue back and forth" in {
PlayJson.decode(PlayJson.encode(msg)) shouldBe msg
}
"MsgPack" should "encode and decode a JsValue back and forth" in {
MsgPack.decode(MsgPack.encode(msg)) shouldBe msg
}
}
| 99Taxis/common-sqs | src/test/scala/com/taxis99/amazon/serializers/SerializersSpec.scala | Scala | apache-2.0 | 537 |
/**
* Licensed to the Minutemen Group under one or more contributor license
* agreements. See the COPYRIGHT file distributed with this work for
* additional information regarding copyright ownership.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you
* may not use this file except in compliance with the License. You may
* obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package silhouette.provider.social
import io.circe.Json
import scala.concurrent.Future
/**
* Builds the social profile.
*/
trait SocialProfileBuilder {
self: SocialProvider[_] =>
/**
* The content type to parse a profile from.
*
* The content type is mostly JSON. A concrete provider is able to override it if needed.
*/
type Content = Json
/**
* The type of the profile a profile builder is responsible for.
*/
type Profile <: SocialProfile
/**
* Subclasses need to implement this method to populate the profile information from the service provider.
*
* @param authInfo The auth info received from the provider.
* @return On success the build social profile, otherwise a failure.
*/
protected def buildProfile(authInfo: A): Future[Profile]
/**
* Returns the profile parser implementation.
*
* @return The profile parser implementation.
*/
protected def profileParser: SocialProfileParser[Content, Profile, A]
}
/**
* The profile builder for the common social profile.
*/
trait CommonProfileBuilder {
self: SocialProfileBuilder =>
/**
* The type of the profile a profile builder is responsible for.
*/
type Profile = CommonSocialProfile
}
| mohiva/silhouette | modules/provider-social/src/main/scala/silhouette/provider/social/SocialProfileBuilder.scala | Scala | apache-2.0 | 1,993 |
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** @author John Miller
* @version 1.2
* @date Sun Mar 10 20:54:50 EDT 2013
* @see LICENSE (MIT style license file).
*-----------------------------------------------------------------------------
* @see Linear Programming and Network Flows, Bazaraa and Jarvis
* www.wiley.com/WileyCDA/WileyTitle/productCd-0470462728,subjectCd-BA04.html
* @see Algorithms, 4th Edition, Robert Sedgewick and Kevin Wayne
* www.cs.princeton.edu/algs4/63or/Simplex.java.html
* @see en.wikibooks.org/wiki/Operations_Research/The_Simplex_Method
*/
package scalation.minima
import math.abs
import util.control.Breaks.{breakable, break}
import scalation.linalgebra.{MatrixD, VectorD}
import scalation.random.Randi
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** This class solves Linear Programming (LP) problems using a tableau based
* Simplex Algorithm. Given a constraint matrix 'a', limit/RHS vector 'b' and
* cost vector 'c', find values for the solution/decision vector 'x' that minimize
* the objective function f(x), while satisfying all of the constraints, i.e.,
*
* minimize f(x) = c x
* subject to a x <= b, x >= 0
*
* In case of "a_i x >= b_i", use -b_i as an indicator of a ">=" constraint.
* The program will flip such negative b_i back to positive as well as use
* a surplus variable instead of the usual slack variable, i.e.,
* a_i x <= b_i => a_i x + s_i = b_i // use slack variable s_i with coeff 1
* a_i x >= b_i => a_i x + s_i = b_i // use surplus variable s_i with coeff -1
*
* Creates an MM-by-NN simplex tableau with
* -- [0..M-1, 0..N-1] = a (constraint matrix)
* -- [0..M-1, N..M+N-1] = s (slack/surplus variable matrix)
* -- [0..M-1, NN-1] = b (limit/RHS vector)
* -- [M, 0..NN-2] = c (cost vector)
*
* @param a the M-by-N constraint matrix
* @param b the M-length limit/RHS vector (input b_i negative for surplus)
* @param c the N-length cost vector
* @param x_B the indices of the initial basis (if not available use Simple2P)
* @param n_eq the number of equality contraints (must come last)
*/
class Simplex (a: MatrixD, b: VectorD, c: VectorD, x_B: Array [Int], n_eq: Int = 0)
extends MinimizerLP
{
private val DANTIZ = true // use Dantiz's pivot rule, else Bland's
private val DEBUG = false // DEBUG mode => show all pivot steps
private val CHECK = true // CHECK mode => check feasibility for each pivot
private val _0 = 0.0 // zero, for Floating Point Error (FPE) try setting to EPSILON
private val M = a.dim1 // the number of constraints
private val N = a.dim2 // the number of decision variables
private val MpN = M + N - n_eq // the number of variables (decision/slack/surplus)
private val MM = M + 1 // # rows in tableau
private val NN = MpN + 1 // # columns in tableau
private val JJ = NN - 1 // the last column (b)
private val MAX_ITER = 200 * N // maximum number of iterations
private var flip = 1.0 // 1(slack) or -1(surplus) depending on b_i
if (b.dim != M) flaw ("constructor", "b.dim = " + b.dim + " != " + M)
if (c.dim != N) flaw ("constructor", "c.dim = " + c.dim + " != " + N)
if (x_B.length != M) flaw ("constructor", "x_B.length = " + x_B.length + " != " + M)
private val t = new MatrixD (MM, NN) // the MM-by-NN simplex tableau
for (i <- 0 until M) {
flip = if (b(i) < _0) -1.0 else 1.0
t.set (i, a(i)) // col x: constraint matrix a
t(i, N+i) = flip // col y: slack/surplus variable matrix s
t(i, JJ) = b(i) * flip // col b: limit/RHS vector b
} // for
t(M)(0 until N) = -c // set cost row (M) in the tableau to given cost
val checker = new CheckLP (a, b, c) // checker determines if the LP solution is correct
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** In case there are no surplus variables (only slacks), the slack variables
* can form an inittial basis.
* @param a the M-by-N constraint matrix
* @param b the M-length limit/RHS vector (input b_i negative for surplus)
* @param c the N-length cost vector
*/
def this (a: MatrixD, b: VectorD, c: VectorD)
{
this (a, b, c, Array.range (a.dim2, a.dim1 + a.dim2))
} // constructor
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Find the best variable x_l to enter the basis. Determine the index of
* entering variable corresponding to COLUMN l (e.g., using Dantiz's Rule
* or Bland's Rule). Return -1 to indicate no such column.
* t(M).argmaxPos (JJ) // use Dantiz's rule (index of max +ve, cycling possible)
* t(M).firstPos (JJ) // use Bland's rule (index of first +ve, FPE possible)
*/
def entering (): Int =
{
if (DANTIZ) t(M).argmaxPos (JJ) else t(M).firstPos (JJ)
} // entering
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Find the best variable x_k to leave the basis given that x_l is entering.
* Determine the index of the leaving variable corresponding to ROW k using
* the Min-Ratio Rule. Return -1 to indicate no such row.
* @param l the entering variable (column)
*/
def leaving (l: Int): Int =
{
val b_ = t.col (JJ) // updated b column (RHS)
var k = -1
for (i <- 0 until M if t(i, l) > _0) { // find the pivot row
if (k == -1) k = i
else if (b_(i) / t(i, l) <= b_(k) / t(k, l)) k = i // lower ratio => reset k
} // for
if (k == -1) flaw ("leaving", "the solution is UNBOUNDED")
k
} // leaving
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Pivot on entry (k, l) using Gauss-Jordan elimination to replace variable
* x_k with x_l in the basis.
* @param k the leaving variable (row)
* @param l the entering variable (column)
*/
def pivot (k: Int, l: Int)
{
println ("pivot: entering = " + l + " leaving = " + k)
t(k) /= t(k, l) // make pivot 1
for (i <- 0 to M if i != k) t(i) -= t(k) * t(i, l) // zero rest of column l
x_B(k) = l // update basis (l replaces k)
} // pivot
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Run the Simplex Algorithm starting from an initial BFS and iteratively
* find a non-basic variable to replace a variable in the current basis
* so long as the objective function improves. Return the optimal vector x.
*/
def solve (): VectorD =
{
if (DEBUG) showTableau (0) // for iter = 0
var k = -1 // the leaving variable (row)
var l = -1 // the entering variable (column)
breakable { for (it <- 1 to MAX_ITER) {
l = entering (); if (l == -1) break // -1 => optimal solution found
k = leaving (l); if (k == -1) break // -1 => solution is unbounded
pivot (k, l) // pivot: k leaves and l enters
if (CHECK && infeasible) break // quit if infeasible
if (DEBUG) showTableau (it)
}} // for
primal // return the optimal vector x
} // solve
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Determine whether the current solution (x = primal) is still primal feasible.
*/
def infeasible: Boolean =
{
if ( ! checker.isPrimalFeasible (primal)) {
flaw ("infeasible", "solution x is no longer PRIMAL FEASIBLE")
true
} else {
false
} // if
} // infeasible
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Return the primal solution vector x (only the basic variables are non-zero).
*/
def primal: VectorD =
{
val x = new VectorD (N)
for (i <- 0 until M if x_B(i) < N) x(x_B(i)) = t(i, JJ) // RHS value
x
} // primal
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Return the dual solution vector y (cost row (M) under the slack columns).
*/
def dual: VectorD = t(M)(N until MpN)
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Return the value of the objective function f(x) = c x.
*/
def objF (x: VectorD): Double = t(M, JJ) // bottom, right cell in tableau
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Show the current tableau.
* @param iter the number of iterations do far
*/
def showTableau (iter: Int)
{
println ("showTableau: --------------------------------------------------------")
println (this)
println ("showTableau: after " + iter + " iterations, with limit of " + MAX_ITER)
} // showTableau
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Convert the current tableau and basis to a string suitable for display.
*/
override def toString: String =
{
var s = new StringBuilder ()
for (i <- 0 to M) {
s ++= (if (i == 0) "tableau = | " else " | ")
for (j <- 0 until JJ-1) s++= "%8.3f, ".format (t(i, j))
s ++= "%8.3f | %8.3f |\\n".format (t(i, JJ-1), t(i, JJ))
} // for
s ++= "basis = " + x_B.deep
s.toString
} // toString
} // Simplex class
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** This object is used to test the Simplex class.
*/
object SimplexTest extends App
{
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Test the Simplex Algorithm for solving Linear Programming problems.
* @param a the constraint matrix
* @param b the limit/RHS vector
* @param c the cost vector
* @param x_B the indices of the intial basis
*/
def test (a: MatrixD, b: VectorD, c: VectorD, x_B: Array [Int] = null)
{
// val lp = new Simplex (a, b, c, x_B) // test with user specified basis
val lp = new Simplex (a, b, c) // test with default basis
val x = lp.solve () // the primal solution vector x
val y = lp.dual // the dual solution vector y
val f = lp.objF (x) // the minimum value of the objective function
println ("primal x = " + x)
println ("dual y = " + y)
println ("objF f = " + f)
println ("optimal? = " + lp.check (x, y, f))
} // test
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Test case 1: Initialize matrix 'a', vectors 'b' and 'c', and optionally
* the basis 'x_B'. For Simplex, matrix 'a' and vector 'c' are not augmented.
*-------------------------------------------------------------------------
* Minimize z = -1x_0 - 2x_1 + 1x_2 - 1x_3 - 4x_4 + 2x_5
* Subject to 1x_0 + 1x_1 + 1x_2 + 1y_3 + 1y_4 + 1x_5 <= 6
* 2x_0 - 1x_1 - 2x_2 + 1y_3 + 0y_4 + 0x_5 <= 4
* 0x_0 + 0x_1 + 1x_2 + 1y_3 + 2y_4 + 1x_5 <= 4
* where z is the objective variable and x is the decision vector.
*-------------------------------------------------------------------------
* Solution: primal x_1 = 4, x_7 = 8, x_4 = 2
* dual y_1 = -2, y_2 = 0, y_3 = -1
* objF f = -16
* i.e., x = (4, 8, 2), x_B = (1, 7, 4), y = (-2, 0, -1), f = -16
* @see Linear Programming and Network Flows, Example 5.1
*/
def test1 ()
{
val a = new MatrixD ((3, 6), 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, // constraint matrix
2.0, -1.0, -2.0, 1.0, 0.0, 0.0,
0.0, 0.0, 1.0, 1.0, 2.0, 1.0)
val c = VectorD (-1.0, -2.0, 1.0, -1.0, -4.0, 2.0) // cost vector
val b = VectorD (6.0, 4.0, 4.0) // constant vector
val x_B = Array (6, 7, 8) // starting basis
test (a, b, c) // x_B is optional
} // test1
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Test case 2:
* Solution: x = (2/3, 10/3, 0), x_B = (0, 1, 5), f = -22/3
* @see Linear Programming and Network Flows, Example 5.2
*/
def test2 ()
{
val a = new MatrixD ((3, 3), 1.0, 1.0, 1.0, // constraint matrix
-1.0, 2.0, -2.0,
2.0, 1.0, 0.0)
val c = VectorD (-1.0, -2.0, 1.0) // cost vector
val b = VectorD (4.0, 6.0, 5.0) // constant vector
val x_B = Array (3, 4, 5) // starting basis
test (a, b, c, x_B)
} // test2
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Test case 3:
* Solution: x = (1/3, 0, 13/3), x_B = (0, 2, 4), f = -17
* @see Linear Programming and Network Flows, Example 3.9
*/
def test3 ()
{
val a = new MatrixD ((3, 3), 1.0, 1.0, 2.0, // constraint matrix
1.0, 1.0, -1.0,
-1.0, 1.0, 1.0)
val c = VectorD (1.0, 1.0, -4.0) // cost vector
val b = VectorD (9.0, 2.0, 4.0) // constant vector
val x_B = Array (3, 4, 5) // starting basis
test (a, b, c, x_B)
} // test3
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Test case 4: randomly generated LP problem.
*/
def test4 ()
{
val rn = Randi (0, 8)
val (m, n) = (100, 100)
val a = new MatrixD (m, n)
val b = new VectorD (m)
val c = new VectorD (n)
for (i <- 0 until m; j <- 0 until n) a(i, j) = rn.igen
for (i <- 0 until m) b(i) = 100.0 * (rn.igen + 1)
for (j <- 0 until n) c(j) = -10.0 * (rn.igen + 1)
test (a, b, c, null)
} // test4
println ("\\ntest1 ========================================================")
test1 ()
println ("\\ntest2 ========================================================")
test2 ()
println ("\\ntest3 ========================================================")
test3 ()
println ("\\ntest4 ========================================================")
test4 ()
} // SimplexTest object
| mvnural/scalation | src/main/scala/scalation/minima/Simplex.scala | Scala | mit | 15,548 |
package ml.sparkling.graph.loaders.csv.providers
import ml.sparkling.graph.loaders.csv.types.Types
import ml.sparkling.graph.loaders.csv.types.Types.ToVertexId
import org.apache.spark.graphx.{Edge, Graph, VertexId}
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.{DataFrame, Row}
import org.apache.spark.storage.StorageLevel
import org.apache.spark.sql.SparkSession;
import scala.reflect.ClassTag
/**
* Created by Roman Bartusiak (roman.bartusiak@pwr.edu.pl http://riomus.github.io).
*/
object GraphProviders {
val defaultStorageLevel=StorageLevel.MEMORY_ONLY
def simpleGraphBuilder[VD: ClassTag, ED: ClassTag](defaultVertex: Option[VD]=None,
vertexProvider: Row => Seq[(VertexId, VD)],
edgeProvider: Row => Seq[Edge[ED]],
edgeStorageLevel: StorageLevel = defaultStorageLevel,
vertexStorageLevel: StorageLevel =defaultStorageLevel)
(dataFrame: DataFrame): Graph[VD, ED] = {
def mapRows[MT: ClassTag](mappingFunction: (Row) => Seq[MT]): RDD[MT] = {
dataFrame.rdd.mapPartitionsWithIndex((id, rowIterator) => {
rowIterator.flatMap { case row => mappingFunction(row) }
})
}
val vertices: RDD[(VertexId, VD)] = mapRows(vertexProvider)
val edges: RDD[Edge[ED]] = mapRows(edgeProvider)
defaultVertex match{
case None => Graph(vertices,edges,edgeStorageLevel=edgeStorageLevel,vertexStorageLevel=vertexStorageLevel)
case Some(defaultVertexValue)=> Graph(vertices,edges,defaultVertexValue,edgeStorageLevel,vertexStorageLevel)
}
}
def indexedGraphBuilder[VD:ClassTag, ED: ClassTag](defaultVertex: Option[VD]=None,
vertexProvider: (Row, ToVertexId[VD]) => Seq[(VertexId, VD)],
edgeProvider: (Row, ToVertexId[VD]) => Seq[Edge[ED]],
columnsToIndex: Seq[Int],
edgeStorageLevel: StorageLevel = defaultStorageLevel,
vertexStorageLevel: StorageLevel = defaultStorageLevel)
(dataFrame: DataFrame): Graph[VD, ED] = {
val index = dataFrame.rdd.flatMap(row => columnsToIndex.map(row(_))).distinct().zipWithUniqueId().collect().toMap
def extractIdFromIndex(vertex: VD) = index(vertex)
simpleGraphBuilder(defaultVertex,
vertexProvider(_: Row, extractIdFromIndex _),
edgeProvider(_: Row, extractIdFromIndex _),
edgeStorageLevel,
vertexStorageLevel)(dataFrame)
}
} | sparkling-graph/sparkling-graph | loaders/src/main/scala/ml/sparkling/graph/loaders/csv/providers/GraphProviders.scala | Scala | bsd-2-clause | 2,848 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.scheduler.local
import java.io.File
import java.net.URL
import java.nio.ByteBuffer
import org.apache.spark.{SparkConf, SparkContext, SparkEnv, TaskState}
import org.apache.spark.TaskState.TaskState
import org.apache.spark.executor.{Executor, ExecutorBackend}
import org.apache.spark.internal.{config, Logging}
import org.apache.spark.launcher.{LauncherBackend, SparkAppHandle}
import org.apache.spark.rpc.{RpcCallContext, RpcEndpointRef, RpcEnv, ThreadSafeRpcEndpoint}
import org.apache.spark.scheduler._
import org.apache.spark.scheduler.cluster.ExecutorInfo
import org.apache.spark.util.Utils
private case class ReviveOffers()
private case class StatusUpdate(taskId: Long, state: TaskState, serializedData: ByteBuffer)
private case class KillTask(taskId: Long, interruptThread: Boolean, reason: String)
private case class StopExecutor()
/**
* Calls to [[LocalSchedulerBackend]] are all serialized through LocalEndpoint. Using an
* RpcEndpoint makes the calls on [[LocalSchedulerBackend]] asynchronous, which is necessary
* to prevent deadlock between [[LocalSchedulerBackend]] and the [[TaskSchedulerImpl]].
*/
private[spark] class LocalEndpoint(
override val rpcEnv: RpcEnv,
userClassPath: Seq[URL],
scheduler: TaskSchedulerImpl,
executorBackend: LocalSchedulerBackend,
private val totalCores: Int)
extends ThreadSafeRpcEndpoint with Logging {
private var freeCores = totalCores
val localExecutorId = SparkContext.DRIVER_IDENTIFIER
val localExecutorHostname = Utils.localCanonicalHostName()
private val executor = new Executor(
localExecutorId, localExecutorHostname, SparkEnv.get, userClassPath, isLocal = true)
override def receive: PartialFunction[Any, Unit] = {
case ReviveOffers =>
reviveOffers()
case StatusUpdate(taskId, state, serializedData) =>
scheduler.statusUpdate(taskId, state, serializedData)
if (TaskState.isFinished(state)) {
freeCores += scheduler.CPUS_PER_TASK
reviveOffers()
}
case KillTask(taskId, interruptThread, reason) =>
executor.killTask(taskId, interruptThread, reason)
}
override def receiveAndReply(context: RpcCallContext): PartialFunction[Any, Unit] = {
case StopExecutor =>
executor.stop()
context.reply(true)
}
def reviveOffers() {
// local mode doesn't support extra resources like GPUs right now
val offers = IndexedSeq(new WorkerOffer(localExecutorId, localExecutorHostname, freeCores,
Some(rpcEnv.address.hostPort)))
for (task <- scheduler.resourceOffers(offers).flatten) {
freeCores -= scheduler.CPUS_PER_TASK
executor.launchTask(executorBackend, task)
}
}
}
/**
* Used when running a local version of Spark where the executor, backend, and master all run in
* the same JVM. It sits behind a [[TaskSchedulerImpl]] and handles launching tasks on a single
* Executor (created by the [[LocalSchedulerBackend]]) running locally.
*/
private[spark] class LocalSchedulerBackend(
conf: SparkConf,
scheduler: TaskSchedulerImpl,
val totalCores: Int)
extends SchedulerBackend with ExecutorBackend with Logging {
private val appId = "local-" + System.currentTimeMillis
private var localEndpoint: RpcEndpointRef = null
private val userClassPath = getUserClasspath(conf)
private val listenerBus = scheduler.sc.listenerBus
private val launcherBackend = new LauncherBackend() {
override def conf: SparkConf = LocalSchedulerBackend.this.conf
override def onStopRequest(): Unit = stop(SparkAppHandle.State.KILLED)
}
/**
* Returns a list of URLs representing the user classpath.
*
* @param conf Spark configuration.
*/
def getUserClasspath(conf: SparkConf): Seq[URL] = {
val userClassPathStr = conf.get(config.EXECUTOR_CLASS_PATH)
userClassPathStr.map(_.split(File.pathSeparator)).toSeq.flatten.map(new File(_).toURI.toURL)
}
launcherBackend.connect()
override def start() {
val rpcEnv = SparkEnv.get.rpcEnv
val executorEndpoint = new LocalEndpoint(rpcEnv, userClassPath, scheduler, this, totalCores)
localEndpoint = rpcEnv.setupEndpoint("LocalSchedulerBackendEndpoint", executorEndpoint)
listenerBus.post(SparkListenerExecutorAdded(
System.currentTimeMillis,
executorEndpoint.localExecutorId,
new ExecutorInfo(executorEndpoint.localExecutorHostname, totalCores, Map.empty,
Map.empty)))
launcherBackend.setAppId(appId)
launcherBackend.setState(SparkAppHandle.State.RUNNING)
}
override def stop() {
stop(SparkAppHandle.State.FINISHED)
}
override def reviveOffers() {
localEndpoint.send(ReviveOffers)
}
override def defaultParallelism(): Int =
scheduler.conf.getInt("spark.default.parallelism", totalCores)
override def killTask(
taskId: Long, executorId: String, interruptThread: Boolean, reason: String) {
localEndpoint.send(KillTask(taskId, interruptThread, reason))
}
override def statusUpdate(taskId: Long, state: TaskState, serializedData: ByteBuffer) {
localEndpoint.send(StatusUpdate(taskId, state, serializedData))
}
override def applicationId(): String = appId
override def maxNumConcurrentTasks(): Int = totalCores / scheduler.CPUS_PER_TASK
private def stop(finalState: SparkAppHandle.State): Unit = {
localEndpoint.ask(StopExecutor)
try {
launcherBackend.setState(finalState)
} finally {
launcherBackend.close()
}
}
}
| pgandhi999/spark | core/src/main/scala/org/apache/spark/scheduler/local/LocalSchedulerBackend.scala | Scala | apache-2.0 | 6,265 |
package com.sksamuel.elastic4s.requests.indexes
case class IndexTemplateExistsRequest()
| sksamuel/elastic4s | elastic4s-domain/src/main/scala/com/sksamuel/elastic4s/requests/indexes/IndexTemplateExistsRequest.scala | Scala | apache-2.0 | 89 |
/** **\\
** Copyright (c) 2012 Center for Organic and Medicinal Chemistry **
** Zurich University of Applied Sciences **
** Wädenswil, Switzerland **
\\** **/
package chemf.parser
import chemf._, Stereo.Undefined
import scalaz._, Scalaz._
/**
* @author Stefan Höck
*/
trait SmilesBuilder[A] {
type STrans = A ⇒ ValRes[A]
val empty: A
def addAtom (
isotope: Isotope,
charge: Int,
hydrogens: Option[Int],
aromatic: Boolean,
stereo: Stereo,
atomClass: Int
): STrans
def addElem (e: Element) = addAtom(Isotope(e), 0, None, false, Undefined, 0)
def addAromElem (e: Element) =
addAtom (Isotope(e), 0, None, true, Undefined, 0)
def clear: STrans
def closeBranch: STrans
def openBranch: STrans
def ring (i: Int): STrans
def ring (a: Char, b: Char): STrans = ring(a.asDigit * 10 + b.asDigit)
def setBond (b: Bond): STrans
def setDbStereo (c: Char): STrans
}
// vim: set ts=2 sw=2 et:
| stefan-hoeck/chemf | src/main/scala/chemf/parser/SmilesBuilder.scala | Scala | gpl-3.0 | 1,141 |
package org.geow.serializer.test
import org.specs2.mutable.Specification
import org.scalacheck.{ Arbitrary, Gen }
import org.scalacheck._
import org.scalacheck.Test._
import Gen._
import Arbitrary.arbitrary
import org.specs2.runner._
import org.junit.runner._
import scala.util.Try
import scala.concurrent.Await
import scala.concurrent.duration._
import org.specs2.ScalaCheck
import Prop.forAll
import org.scalacheck.Arbitrary.arbitrary
import org.geow.model._
import org.geow.model.geometry._
import org.geow.serializer.OsmSerializer._
import org.geow.generator.OsmObjectGenerator
@RunWith(classOf[JUnitRunner])
class OsmSerializerTest extends Specification with ScalaCheck{
sequential
val generator = OsmObjectGenerator()
def nodeGenerator = Gen.resultOf[Int,OsmNode](t => generator.generateNode)
implicit def osmNodesArb = Arbitrary { nodeGenerator }
def wayGenerator = Gen.resultOf[Int,OsmWay](t => generator.generateWay)
implicit def osmWaysArb = Arbitrary { wayGenerator }
def relationGenerator = Gen.resultOf[Int,OsmRelation](t => generator.generateRelation)
implicit def osmRelationsArb = Arbitrary { relationGenerator }
"The OsmSerializer" should {
"serialize and deserialize an OsmNode object" ! check(prop{ osmNode: OsmNode =>
{
val serialized = toBinary(osmNode)
val deserialized = fromBinary(serialized)
deserialized must be_==(osmNode)
}
})
"serialize and deserialize an OsmWay object" ! check(prop{ osmWay: OsmWay =>
{
val serialized = toBinary(osmWay)
val deserialized = fromBinary(serialized)
deserialized must be_==(osmWay)
}
})
"serialize and deserialize an OsmRelation object" ! check(prop{ osmRelation: OsmRelation =>
{
val serialized = toBinary(osmRelation)
val deserialized = fromBinary(serialized)
deserialized must be_==(osmRelation)
}
})
}
} | geow-org/api | src/test/scala/org/geow/serializer/test/OsmSerializerTest.scala | Scala | apache-2.0 | 1,952 |
/*
* Copyright 2014 Treode, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.treode.disk
import com.treode.async.{Async, Callback}, Async.async
import com.treode.async.implicits._
import Detacher._
/** Assist the LogWriter and PageWWriter in beginning a drain.
*
* When the user wants to drain a disk drive, its writers may be either enqueued in a dispatcher
* awaiting a batch of items, or serializing and flushing a batch. In the first case, the writer
* can detach immediately but must return the batch when it finally receives one from the
* dispatcher. In the second case, the writer cannot detach until after it completes its flush.
* The detacher tracks that writer's status and coordinates starting a drain.
*
* @param draining Should the detacher start already detached?
*/
private class Detacher (draining: Boolean) {
private var state: State =
if (draining) Detached else Waiting
/** The user wants to start draining the disk drive.
* @return When the writer will not write any more items.
*/
def startDraining(): Async [Unit] =
async { cb =>
synchronized {
state match {
case Waiting =>
state = Detached
cb.pass (())
case Flushing =>
state = Detaching (cb)
case _ =>
// We do not expect to start a drain twice.
throw new IllegalStateException
}}}
/** The writer has received items and it might begin serializing and flushing them.
* @return true if the writer can write the items, false if it should return them.
*/
def startFlush(): Boolean =
synchronized {
state match {
case Waiting =>
state = Flushing
true
case Detached =>
false
case _ =>
// We do not expect to start a flush when already flushing.
throw new IllegalStateException
}}
/** The writer has finished serializing and flushing items, and it might enqueue itself to for
* another batch.
* @return true if the writer can await another batch, false if it should not.
*/
def finishFlush(): Boolean =
synchronized {
state match {
case Flushing =>
state = Waiting
true
case Detaching (cb) =>
state = Detached
cb.pass (())
false
case _ =>
// We do not expect to finish a flush when not flushing.
throw new IllegalStateException
}}}
private object Detacher {
sealed abstract class State
/** The writer is enqueued with a dispatcher and waiting to receive a batch of records or pages.
*/
case object Waiting extends State
/** The writer is serializing and flushing a batch; we have _not_ received a request to drain.
*/
case object Flushing extends State
/** The writer is serializing and flushing a batch; we _have_ received a request to start
* draining.
*/
case class Detaching (cb: Callback [Unit]) extends State
/** The writer is not enqueued with a dispatcher, nor is it serializing and flushing a batch.
*/
case object Detached extends State
} | Treode/store | disk/src/com/treode/disk/Detacher.scala | Scala | apache-2.0 | 3,682 |
package org.scaladebugger.api.lowlevel.threads
import com.sun.jdi.request.{EventRequestManager, ThreadStartRequest}
import org.scaladebugger.api.lowlevel.requests.Implicits._
import org.scaladebugger.api.lowlevel.requests.JDIRequestArgument
import org.scaladebugger.api.lowlevel.requests.properties.{EnabledProperty, SuspendPolicyProperty}
import org.scaladebugger.api.utils.{Logging, MultiMap}
import scala.util.Try
/**
* Represents the manager for thread start requests.
*
* @param eventRequestManager The manager used to create thread start requests
*/
class StandardThreadStartManager(
private val eventRequestManager: EventRequestManager
) extends ThreadStartManager with Logging {
private val threadStartRequests =
new MultiMap[ThreadStartRequestInfo, ThreadStartRequest]
/**
* Retrieves the list of thread start requests contained by this manager.
*
* @return The collection of thread start requests in the form of ids
*/
override def threadStartRequestList: Seq[String] = threadStartRequests.ids
/**
* Creates a new thread start request for the specified class and method.
*
* @param requestId The id of the request used to retrieve and delete it
* @param extraArguments Any additional arguments to provide to the request
*
* @return Success(id) if successful, otherwise Failure
*/
override def createThreadStartRequestWithId(
requestId: String,
extraArguments: JDIRequestArgument*
): Try[String] = {
val request = Try(eventRequestManager.createThreadStartRequest(
Seq(
EnabledProperty(value = true),
SuspendPolicyProperty.EventThread
) ++ extraArguments: _*
))
if (request.isSuccess) {
logger.trace(s"Created thread start request with id '$requestId'")
threadStartRequests.putWithId(
requestId,
ThreadStartRequestInfo(requestId, isPending = false, extraArguments),
request.get
)
}
// If no exception was thrown, assume that we succeeded
request.map(_ => requestId)
}
/**
* Determines if a thread start request with the specified id.
*
* @param id The id of the Thread Start Request
*
* @return True if a thread start request with the id exists, otherwise false
*/
override def hasThreadStartRequest(id: String): Boolean = {
threadStartRequests.hasWithId(id)
}
/**
* Retrieves the thread start request using the specified id.
*
* @param id The id of the Thread Start Request
*
* @return Some thread start request if it exists, otherwise None
*/
override def getThreadStartRequest(id: String): Option[ThreadStartRequest] = {
threadStartRequests.getWithId(id)
}
/**
* Retrieves the information for a thread start request with the
* specified id.
*
* @param id The id of the Thread Start Request
*
* @return Some information about the request if it exists, otherwise None
*/
override def getThreadStartRequestInfo(id: String): Option[ThreadStartRequestInfo] = {
threadStartRequests.getKeyWithId(id)
}
/**
* Removes the specified thread start request.
*
* @param id The id of the Thread Start Request
*
* @return True if the thread start request was removed (if it existed),
* otherwise false
*/
override def removeThreadStartRequest(id: String): Boolean = {
val request = threadStartRequests.removeWithId(id)
request.foreach(eventRequestManager.deleteEventRequest)
request.nonEmpty
}
}
| ensime/scala-debugger | scala-debugger-api/src/main/scala/org/scaladebugger/api/lowlevel/threads/StandardThreadStartManager.scala | Scala | apache-2.0 | 3,497 |
package appcache
import java.io.{ File, FileNotFoundException }
import play.api.test._
import play.api.test.Helpers._
import org.specs2.mutable._
class ResourceSpec extends Specification {
"Resouce.apply" should {
"return AssetResource for asset directory path string" in {
running(FakeApplication()) {
val res = Resource("@/test/assets", """/test/assets/""".r, "/assets/")
res must have size 3
res(0).isInstanceOf[AssetResource] must beTrue
}
}
"return AssetResource for asset file path string" in {
running(FakeApplication()) {
val res = Resource("@/test/assets/test.coffee", """/test/assets/""".r, "/assets/")
res must have size 1
res(0).isInstanceOf[AssetResource] must beTrue
}
}
"throw NoSouchElementException for not found asset path string" in {
running(FakeApplication()) {
{
Resource("@/test/assets/aaa", """/test/assets/""".r, "/assets/")
} must throwA[FileNotFoundException]
}
}
"return RelativeResource for HTTP path string" in {
running(FakeApplication()) {
val res = Resource("/", """/test/assets/""".r, "/assets/")
res must have size 1
res(0).isInstanceOf[RelativeResource] must beTrue
}
}
"return URLResource for HTTP URL string" in {
running(FakeApplication()) {
val res = Resource("http://example.com", """/test/assets/""".r, "/assets/")
res must have size 1
res(0).isInstanceOf[URLResource] must beTrue
}
}
"return URLResource for HTTPS URL string" in {
running(FakeApplication()) {
val res = Resource("https://example.com", """/test/assets/""".r, "/assets/")
res must have size 1
res(0).isInstanceOf[URLResource] must beTrue
}
}
}
"AssetResouce#lastModified" should {
"return not 0" in {
running(FakeApplication()) {
val res = Resource("@/test/assets/test.coffee", """/test/assets/""".r, "/assets/")
res(0).lastModified must_!= 0
}
}
}
"AssetResouce#toString" should {
"return /assets/test.js" in {
running(FakeApplication()) {
val res = Resource("@/test/assets/test.coffee", """/test/assets/""".r, "/assets/")
res(0).toString must_== "/assets/test.js"
}
}
"return /assets/test.css" in {
running(FakeApplication()) {
val res = Resource("@/test/assets/test.less", """/test/assets/""".r, "/assets/")
res(0).toString must_== "/assets/test.css"
}
}
"return /assets/test_sass.css" in {
running(FakeApplication()) {
val res = Resource("@/test/assets/test_sass.scss", """/test/assets/""".r, "/assets/")
res(0).toString must_== "/assets/test_sass.css"
}
}
"return /assets/test.min.js" in {
running(new FakeApplication() {
override val mode = play.api.Mode.Prod
}) {
val res = Resource("@/test/assets/test.coffee", """/test/assets/""".r, "/assets/")
res(0).toString must_== "/assets/test.min.js"
}
}
"return /assets/test.min.css" in {
running(new FakeApplication() {
override val mode = play.api.Mode.Prod
}) {
val res = Resource("@/test/assets/test.less", """/test/assets/""".r, "/assets/")
res(0).toString must_== "/assets/test.min.css"
}
}
"return /assets/test_sass.min.css" in {
running(new FakeApplication() {
override val mode = play.api.Mode.Prod
}) {
val res = Resource("@/test/assets/test_sass.scss", """/test/assets/""".r, "/assets/")
res(0).toString must_== "/assets/test_sass.min.css"
}
}
}
"RelativeResouce#lastModified" should {
"return 0" in {
running(FakeApplication()) {
val res = Resource("/", """/test/assets/""".r, "/assets/")
res(0).lastModified must_== 0
}
}
}
"RelativeResouce#toString" should {
"return *" in {
running(FakeApplication()) {
val res = Resource("*", """/test/assets/""".r, "/assets/")
res(0).toString must_== "*"
}
}
}
"URLResouce#lastModified" should {
"return 0" in {
running(FakeApplication()) {
val res = Resource("http://example.com", """/test/assets/""".r, "/assets/")
res(0).lastModified must_== 0
}
}
}
"URLResouce#toString" should {
"return http://example.com" in {
running(FakeApplication()) {
val res = Resource("http://example.com", """/test/assets/""".r, "/assets/")
res(0).toString must_== "http://example.com"
}
}
}
}
| hnakagawa/play2-appcache | project-code/test/appcache/ResourceSpec.scala | Scala | apache-2.0 | 4,675 |
class C1 {
val v: Int = 1
}
class C2 extends C1 {
println(/* line: 2 */ v)
} | ilinum/intellij-scala | testdata/resolve2/inheritance/element/Value.scala | Scala | apache-2.0 | 81 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution.datasources.v2.csv
import org.apache.hadoop.mapreduce.{Job, TaskAttemptContext}
import org.apache.spark.sql.catalyst.csv.CSVOptions
import org.apache.spark.sql.catalyst.util.CompressionCodecs
import org.apache.spark.sql.execution.datasources.{CodecStreams, OutputWriter, OutputWriterFactory}
import org.apache.spark.sql.execution.datasources.csv.CsvOutputWriter
import org.apache.spark.sql.execution.datasources.v2.FileWriteBuilder
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.types.{DataType, StructType}
import org.apache.spark.sql.util.CaseInsensitiveStringMap
class CSVWriteBuilder(
options: CaseInsensitiveStringMap,
paths: Seq[String],
formatName: String,
supportsDataType: DataType => Boolean)
extends FileWriteBuilder(options, paths, formatName, supportsDataType) {
override def prepareWrite(
sqlConf: SQLConf,
job: Job,
options: Map[String, String],
dataSchema: StructType): OutputWriterFactory = {
val conf = job.getConfiguration
val csvOptions = new CSVOptions(
options,
columnPruning = sqlConf.csvColumnPruning,
sqlConf.sessionLocalTimeZone)
csvOptions.compressionCodec.foreach { codec =>
CompressionCodecs.setCodecConfiguration(conf, codec)
}
new OutputWriterFactory {
override def newInstance(
path: String,
dataSchema: StructType,
context: TaskAttemptContext): OutputWriter = {
new CsvOutputWriter(path, dataSchema, context, csvOptions)
}
override def getFileExtension(context: TaskAttemptContext): String = {
".csv" + CodecStreams.getCompressionExtension(context)
}
}
}
}
| pgandhi999/spark | sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/csv/CSVWriteBuilder.scala | Scala | apache-2.0 | 2,525 |
package com.github.ellchow.scaramouch
import scalaz.{ Ordering => _, _ }, Scalaz._
import scalaz.stream._, Process._
import scalaz.concurrent._
import argonaut._, Argonaut._
import com.github.ellchow.scaramouch.util._
import com.github.ellchow.scaramouch.util.Path._
package object collection {
/* do binary search on a sorted sequence
if element is found, then return index on right
otherwise, return index to insert the element on the left
*/
def binarySearch[A : math.Ordering](xs: scala.collection.IndexedSeq[A], a: A):
Int \\/ Int = {
val ord = implicitly[math.Ordering[A]]
sealed trait Direction
case object Up extends Direction
case object Down extends Direction
@annotation.tailrec
def loop(lb: Int, ub: Int, dir: Direction): Int \\/ Int = {
lazy val midpoint = (ub - lb) / 2 + lb
if(ub lt lb){
dir match {
case Up => lb.left
case Down => (ub + 1).left
}
}else if(ord.equiv(xs(midpoint), a)){
midpoint.right
}else if(ord.lt(a, xs(midpoint))){
loop(lb, midpoint - 1, Down)
}else{
loop(midpoint + 1, ub, Up)
}
}
loop(0, xs.size - 1, Up)
}
}
| ellchow/scaramouch | scaramouch-collection/src/main/scala/com/github/ellchow/scaramouch/collection/package.scala | Scala | apache-2.0 | 1,204 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst.expressions
import java.{lang => jl}
import java.util.Locale
import org.apache.spark.sql.catalyst.analysis.TypeCheckResult
import org.apache.spark.sql.catalyst.analysis.TypeCheckResult.{TypeCheckFailure, TypeCheckSuccess}
import org.apache.spark.sql.catalyst.expressions.codegen._
import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.util.NumberConverter
import org.apache.spark.sql.types._
import org.apache.spark.unsafe.types.UTF8String
/**
* A leaf expression specifically for math constants. Math constants expect no input.
*
* There is no code generation because they should get constant folded by the optimizer.
*
* @param c The math constant.
* @param name The short name of the function
*/
abstract class LeafMathExpression(c: Double, name: String)
extends LeafExpression with CodegenFallback with Serializable {
override def dataType: DataType = DoubleType
override def foldable: Boolean = true
override def nullable: Boolean = false
override def toString: String = s"$name()"
override def prettyName: String = name
override def eval(input: InternalRow): Any = c
}
/**
* A unary expression specifically for math functions. Math Functions expect a specific type of
* input format, therefore these functions extend `ExpectsInputTypes`.
*
* @param f The math function.
* @param name The short name of the function
*/
abstract class UnaryMathExpression(val f: Double => Double, name: String)
extends UnaryExpression with Serializable with ImplicitCastInputTypes {
override def inputTypes: Seq[AbstractDataType] = Seq(DoubleType)
override def dataType: DataType = DoubleType
override def nullable: Boolean = true
override def toString: String = s"$name($child)"
override def prettyName: String = name
protected override def nullSafeEval(input: Any): Any = {
f(input.asInstanceOf[Double])
}
// name of function in java.lang.Math
def funcName: String = name.toLowerCase(Locale.ROOT)
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
defineCodeGen(ctx, ev, c => s"java.lang.Math.${funcName}($c)")
}
}
abstract class UnaryLogExpression(f: Double => Double, name: String)
extends UnaryMathExpression(f, name) {
override def nullable: Boolean = true
// values less than or equal to yAsymptote eval to null in Hive, instead of NaN or -Infinity
protected val yAsymptote: Double = 0.0
protected override def nullSafeEval(input: Any): Any = {
val d = input.asInstanceOf[Double]
if (d <= yAsymptote) null else f(d)
}
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
nullSafeCodeGen(ctx, ev, c =>
s"""
if ($c <= $yAsymptote) {
${ev.isNull} = true;
} else {
${ev.value} = java.lang.Math.${funcName}($c);
}
"""
)
}
}
/**
* A binary expression specifically for math functions that take two `Double`s as input and returns
* a `Double`.
*
* @param f The math function.
* @param name The short name of the function
*/
abstract class BinaryMathExpression(f: (Double, Double) => Double, name: String)
extends BinaryExpression with Serializable with ImplicitCastInputTypes {
override def inputTypes: Seq[DataType] = Seq(DoubleType, DoubleType)
override def toString: String = s"$name($left, $right)"
override def prettyName: String = name
override def dataType: DataType = DoubleType
protected override def nullSafeEval(input1: Any, input2: Any): Any = {
f(input1.asInstanceOf[Double], input2.asInstanceOf[Double])
}
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
defineCodeGen(ctx, ev, (c1, c2) =>
s"java.lang.Math.${name.toLowerCase(Locale.ROOT)}($c1, $c2)")
}
}
////////////////////////////////////////////////////////////////////////////////////////////////////
////////////////////////////////////////////////////////////////////////////////////////////////////
// Leaf math functions
////////////////////////////////////////////////////////////////////////////////////////////////////
////////////////////////////////////////////////////////////////////////////////////////////////////
/**
* Euler's number. Note that there is no code generation because this is only
* evaluated by the optimizer during constant folding.
*/
@ExpressionDescription(
usage = "_FUNC_() - Returns Euler's number, e.",
extended = """
Examples:
> SELECT _FUNC_();
2.718281828459045
""")
case class EulerNumber() extends LeafMathExpression(math.E, "E")
/**
* Pi. Note that there is no code generation because this is only
* evaluated by the optimizer during constant folding.
*/
@ExpressionDescription(
usage = "_FUNC_() - Returns pi.",
extended = """
Examples:
> SELECT _FUNC_();
3.141592653589793
""")
case class Pi() extends LeafMathExpression(math.Pi, "PI")
////////////////////////////////////////////////////////////////////////////////////////////////////
////////////////////////////////////////////////////////////////////////////////////////////////////
// Unary math functions
////////////////////////////////////////////////////////////////////////////////////////////////////
////////////////////////////////////////////////////////////////////////////////////////////////////
// scalastyle:off line.size.limit
@ExpressionDescription(
usage = "_FUNC_(expr) - Returns the inverse cosine (a.k.a. arccosine) of `expr` if -1<=`expr`<=1 or NaN otherwise.",
extended = """
Examples:
> SELECT _FUNC_(1);
0.0
> SELECT _FUNC_(2);
NaN
""")
// scalastyle:on line.size.limit
case class Acos(child: Expression) extends UnaryMathExpression(math.acos, "ACOS")
// scalastyle:off line.size.limit
@ExpressionDescription(
usage = "_FUNC_(expr) - Returns the inverse sine (a.k.a. arcsine) the arc sin of `expr` if -1<=`expr`<=1 or NaN otherwise.",
extended = """
Examples:
> SELECT _FUNC_(0);
0.0
> SELECT _FUNC_(2);
NaN
""")
// scalastyle:on line.size.limit
case class Asin(child: Expression) extends UnaryMathExpression(math.asin, "ASIN")
// scalastyle:off line.size.limit
@ExpressionDescription(
usage = "_FUNC_(expr) - Returns the inverse tangent (a.k.a. arctangent).",
extended = """
Examples:
> SELECT _FUNC_(0);
0.0
""")
// scalastyle:on line.size.limit
case class Atan(child: Expression) extends UnaryMathExpression(math.atan, "ATAN")
@ExpressionDescription(
usage = "_FUNC_(expr) - Returns the cube root of `expr`.",
extended = """
Examples:
> SELECT _FUNC_(27.0);
3.0
""")
case class Cbrt(child: Expression) extends UnaryMathExpression(math.cbrt, "CBRT")
@ExpressionDescription(
usage = "_FUNC_(expr) - Returns the smallest integer not smaller than `expr`.",
extended = """
Examples:
> SELECT _FUNC_(-0.1);
0
> SELECT _FUNC_(5);
5
""")
case class Ceil(child: Expression) extends UnaryMathExpression(math.ceil, "CEIL") {
override def dataType: DataType = child.dataType match {
case dt @ DecimalType.Fixed(_, 0) => dt
case DecimalType.Fixed(precision, scale) =>
DecimalType.bounded(precision - scale + 1, 0)
case _ => LongType
}
override def inputTypes: Seq[AbstractDataType] =
Seq(TypeCollection(DoubleType, DecimalType))
protected override def nullSafeEval(input: Any): Any = child.dataType match {
case DoubleType => f(input.asInstanceOf[Double]).toLong
case DecimalType.Fixed(precision, scale) => input.asInstanceOf[Decimal].ceil
}
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
child.dataType match {
case DecimalType.Fixed(_, 0) => defineCodeGen(ctx, ev, c => s"$c")
case DecimalType.Fixed(precision, scale) =>
defineCodeGen(ctx, ev, c => s"$c.ceil()")
case _ => defineCodeGen(ctx, ev, c => s"(long)(java.lang.Math.${funcName}($c))")
}
}
}
@ExpressionDescription(
usage = "_FUNC_(expr) - Returns the cosine of `expr`.",
extended = """
Examples:
> SELECT _FUNC_(0);
1.0
""")
case class Cos(child: Expression) extends UnaryMathExpression(math.cos, "COS")
@ExpressionDescription(
usage = "_FUNC_(expr) - Returns the hyperbolic cosine of `expr`.",
extended = """
Examples:
> SELECT _FUNC_(0);
1.0
""")
case class Cosh(child: Expression) extends UnaryMathExpression(math.cosh, "COSH")
/**
* Convert a num from one base to another
*
* @param numExpr the number to be converted
* @param fromBaseExpr from which base
* @param toBaseExpr to which base
*/
@ExpressionDescription(
usage = "_FUNC_(num, from_base, to_base) - Convert `num` from `from_base` to `to_base`.",
extended = """
Examples:
> SELECT _FUNC_('100', 2, 10);
4
> SELECT _FUNC_(-10, 16, -10);
16
""")
case class Conv(numExpr: Expression, fromBaseExpr: Expression, toBaseExpr: Expression)
extends TernaryExpression with ImplicitCastInputTypes {
override def children: Seq[Expression] = Seq(numExpr, fromBaseExpr, toBaseExpr)
override def inputTypes: Seq[AbstractDataType] = Seq(StringType, IntegerType, IntegerType)
override def dataType: DataType = StringType
override def nullable: Boolean = true
override def nullSafeEval(num: Any, fromBase: Any, toBase: Any): Any = {
NumberConverter.convert(
num.asInstanceOf[UTF8String].getBytes,
fromBase.asInstanceOf[Int],
toBase.asInstanceOf[Int])
}
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
val numconv = NumberConverter.getClass.getName.stripSuffix("$")
nullSafeCodeGen(ctx, ev, (num, from, to) =>
s"""
${ev.value} = $numconv.convert($num.getBytes(), $from, $to);
if (${ev.value} == null) {
${ev.isNull} = true;
}
"""
)
}
}
@ExpressionDescription(
usage = "_FUNC_(expr) - Returns e to the power of `expr`.",
extended = """
Examples:
> SELECT _FUNC_(0);
1.0
""")
case class Exp(child: Expression) extends UnaryMathExpression(math.exp, "EXP")
@ExpressionDescription(
usage = "_FUNC_(expr) - Returns exp(`expr`) - 1.",
extended = """
Examples:
> SELECT _FUNC_(0);
0.0
""")
case class Expm1(child: Expression) extends UnaryMathExpression(math.expm1, "EXPM1")
@ExpressionDescription(
usage = "_FUNC_(expr) - Returns the largest integer not greater than `expr`.",
extended = """
Examples:
> SELECT _FUNC_(-0.1);
-1
> SELECT _FUNC_(5);
5
""")
case class Floor(child: Expression) extends UnaryMathExpression(math.floor, "FLOOR") {
override def dataType: DataType = child.dataType match {
case dt @ DecimalType.Fixed(_, 0) => dt
case DecimalType.Fixed(precision, scale) =>
DecimalType.bounded(precision - scale + 1, 0)
case _ => LongType
}
override def inputTypes: Seq[AbstractDataType] =
Seq(TypeCollection(DoubleType, DecimalType))
protected override def nullSafeEval(input: Any): Any = child.dataType match {
case DoubleType => f(input.asInstanceOf[Double]).toLong
case DecimalType.Fixed(precision, scale) => input.asInstanceOf[Decimal].floor
}
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
child.dataType match {
case DecimalType.Fixed(_, 0) => defineCodeGen(ctx, ev, c => s"$c")
case DecimalType.Fixed(precision, scale) =>
defineCodeGen(ctx, ev, c => s"$c.floor()")
case _ => defineCodeGen(ctx, ev, c => s"(long)(java.lang.Math.${funcName}($c))")
}
}
}
object Factorial {
def factorial(n: Int): Long = {
if (n < factorials.length) factorials(n) else Long.MaxValue
}
private val factorials: Array[Long] = Array[Long](
1,
1,
2,
6,
24,
120,
720,
5040,
40320,
362880,
3628800,
39916800,
479001600,
6227020800L,
87178291200L,
1307674368000L,
20922789888000L,
355687428096000L,
6402373705728000L,
121645100408832000L,
2432902008176640000L
)
}
@ExpressionDescription(
usage = "_FUNC_(expr) - Returns the factorial of `expr`. `expr` is [0..20]. Otherwise, null.",
extended = """
Examples:
> SELECT _FUNC_(5);
120
""")
case class Factorial(child: Expression) extends UnaryExpression with ImplicitCastInputTypes {
override def inputTypes: Seq[DataType] = Seq(IntegerType)
override def dataType: DataType = LongType
// If the value not in the range of [0, 20], it still will be null, so set it to be true here.
override def nullable: Boolean = true
protected override def nullSafeEval(input: Any): Any = {
val value = input.asInstanceOf[jl.Integer]
if (value > 20 || value < 0) {
null
} else {
Factorial.factorial(value)
}
}
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
nullSafeCodeGen(ctx, ev, eval => {
s"""
if ($eval > 20 || $eval < 0) {
${ev.isNull} = true;
} else {
${ev.value} =
org.apache.spark.sql.catalyst.expressions.Factorial.factorial($eval);
}
"""
})
}
}
@ExpressionDescription(
usage = "_FUNC_(expr) - Returns the natural logarithm (base e) of `expr`.",
extended = """
Examples:
> SELECT _FUNC_(1);
0.0
""")
case class Log(child: Expression) extends UnaryLogExpression(math.log, "LOG")
@ExpressionDescription(
usage = "_FUNC_(expr) - Returns the logarithm of `expr` with base 2.",
extended = """
Examples:
> SELECT _FUNC_(2);
1.0
""")
case class Log2(child: Expression)
extends UnaryLogExpression((x: Double) => math.log(x) / math.log(2), "LOG2") {
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
nullSafeCodeGen(ctx, ev, c =>
s"""
if ($c <= $yAsymptote) {
${ev.isNull} = true;
} else {
${ev.value} = java.lang.Math.log($c) / java.lang.Math.log(2);
}
"""
)
}
}
@ExpressionDescription(
usage = "_FUNC_(expr) - Returns the logarithm of `expr` with base 10.",
extended = """
Examples:
> SELECT _FUNC_(10);
1.0
""")
case class Log10(child: Expression) extends UnaryLogExpression(math.log10, "LOG10")
@ExpressionDescription(
usage = "_FUNC_(expr) - Returns log(1 + `expr`).",
extended = """
Examples:
> SELECT _FUNC_(0);
0.0
""")
case class Log1p(child: Expression) extends UnaryLogExpression(math.log1p, "LOG1P") {
protected override val yAsymptote: Double = -1.0
}
// scalastyle:off line.size.limit
@ExpressionDescription(
usage = "_FUNC_(expr) - Returns the double value that is closest in value to the argument and is equal to a mathematical integer.",
extended = """
Examples:
> SELECT _FUNC_(12.3456);
12.0
""")
// scalastyle:on line.size.limit
case class Rint(child: Expression) extends UnaryMathExpression(math.rint, "ROUND") {
override def funcName: String = "rint"
}
@ExpressionDescription(
usage = "_FUNC_(expr) - Returns -1.0, 0.0 or 1.0 as `expr` is negative, 0 or positive.",
extended = """
Examples:
> SELECT _FUNC_(40);
1.0
""")
case class Signum(child: Expression) extends UnaryMathExpression(math.signum, "SIGNUM")
@ExpressionDescription(
usage = "_FUNC_(expr) - Returns the sine of `expr`.",
extended = """
Examples:
> SELECT _FUNC_(0);
0.0
""")
case class Sin(child: Expression) extends UnaryMathExpression(math.sin, "SIN")
@ExpressionDescription(
usage = "_FUNC_(expr) - Returns the hyperbolic sine of `expr`.",
extended = """
Examples:
> SELECT _FUNC_(0);
0.0
""")
case class Sinh(child: Expression) extends UnaryMathExpression(math.sinh, "SINH")
@ExpressionDescription(
usage = "_FUNC_(expr) - Returns the square root of `expr`.",
extended = """
Examples:
> SELECT _FUNC_(4);
2.0
""")
case class Sqrt(child: Expression) extends UnaryMathExpression(math.sqrt, "SQRT")
@ExpressionDescription(
usage = "_FUNC_(expr) - Returns the tangent of `expr`.",
extended = """
Examples:
> SELECT _FUNC_(0);
0.0
""")
case class Tan(child: Expression) extends UnaryMathExpression(math.tan, "TAN")
@ExpressionDescription(
usage = "_FUNC_(expr) - Returns the hyperbolic tangent of `expr`.",
extended = """
Examples:
> SELECT _FUNC_(0);
0.0
""")
case class Tanh(child: Expression) extends UnaryMathExpression(math.tanh, "TANH")
@ExpressionDescription(
usage = "_FUNC_(expr) - Converts radians to degrees.",
extended = """
Examples:
> SELECT _FUNC_(3.141592653589793);
180.0
""")
case class ToDegrees(child: Expression) extends UnaryMathExpression(math.toDegrees, "DEGREES") {
override def funcName: String = "toDegrees"
}
@ExpressionDescription(
usage = "_FUNC_(expr) - Converts degrees to radians.",
extended = """
Examples:
> SELECT _FUNC_(180);
3.141592653589793
""")
case class ToRadians(child: Expression) extends UnaryMathExpression(math.toRadians, "RADIANS") {
override def funcName: String = "toRadians"
}
// scalastyle:off line.size.limit
@ExpressionDescription(
usage = "_FUNC_(expr) - Returns the string representation of the long value `expr` represented in binary.",
extended = """
Examples:
> SELECT _FUNC_(13);
1101
> SELECT _FUNC_(-13);
1111111111111111111111111111111111111111111111111111111111110011
> SELECT _FUNC_(13.3);
1101
""")
// scalastyle:on line.size.limit
case class Bin(child: Expression)
extends UnaryExpression with Serializable with ImplicitCastInputTypes {
override def inputTypes: Seq[DataType] = Seq(LongType)
override def dataType: DataType = StringType
protected override def nullSafeEval(input: Any): Any =
UTF8String.fromString(jl.Long.toBinaryString(input.asInstanceOf[Long]))
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
defineCodeGen(ctx, ev, (c) =>
s"UTF8String.fromString(java.lang.Long.toBinaryString($c))")
}
}
object Hex {
val hexDigits = Array[Char](
'0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'A', 'B', 'C', 'D', 'E', 'F'
).map(_.toByte)
// lookup table to translate '0' -> 0 ... 'F'/'f' -> 15
val unhexDigits = {
val array = Array.fill[Byte](128)(-1)
(0 to 9).foreach(i => array('0' + i) = i.toByte)
(0 to 5).foreach(i => array('A' + i) = (i + 10).toByte)
(0 to 5).foreach(i => array('a' + i) = (i + 10).toByte)
array
}
def hex(bytes: Array[Byte]): UTF8String = {
val length = bytes.length
val value = new Array[Byte](length * 2)
var i = 0
while (i < length) {
value(i * 2) = Hex.hexDigits((bytes(i) & 0xF0) >> 4)
value(i * 2 + 1) = Hex.hexDigits(bytes(i) & 0x0F)
i += 1
}
UTF8String.fromBytes(value)
}
def hex(num: Long): UTF8String = {
// Extract the hex digits of num into value[] from right to left
val value = new Array[Byte](16)
var numBuf = num
var len = 0
do {
len += 1
value(value.length - len) = Hex.hexDigits((numBuf & 0xF).toInt)
numBuf >>>= 4
} while (numBuf != 0)
UTF8String.fromBytes(java.util.Arrays.copyOfRange(value, value.length - len, value.length))
}
def unhex(bytes: Array[Byte]): Array[Byte] = {
val out = new Array[Byte]((bytes.length + 1) >> 1)
var i = 0
if ((bytes.length & 0x01) != 0) {
// padding with '0'
if (bytes(0) < 0) {
return null
}
val v = Hex.unhexDigits(bytes(0))
if (v == -1) {
return null
}
out(0) = v
i += 1
}
// two characters form the hex value.
while (i < bytes.length) {
if (bytes(i) < 0 || bytes(i + 1) < 0) {
return null
}
val first = Hex.unhexDigits(bytes(i))
val second = Hex.unhexDigits(bytes(i + 1))
if (first == -1 || second == -1) {
return null
}
out(i / 2) = (((first << 4) | second) & 0xFF).toByte
i += 2
}
out
}
}
/**
* If the argument is an INT or binary, hex returns the number as a STRING in hexadecimal format.
* Otherwise if the number is a STRING, it converts each character into its hex representation
* and returns the resulting STRING. Negative numbers would be treated as two's complement.
*/
@ExpressionDescription(
usage = "_FUNC_(expr) - Converts `expr` to hexadecimal.",
extended = """
Examples:
> SELECT _FUNC_(17);
11
> SELECT _FUNC_('Spark SQL');
537061726B2053514C
""")
case class Hex(child: Expression) extends UnaryExpression with ImplicitCastInputTypes {
override def inputTypes: Seq[AbstractDataType] =
Seq(TypeCollection(LongType, BinaryType, StringType))
override def dataType: DataType = StringType
protected override def nullSafeEval(num: Any): Any = child.dataType match {
case LongType => Hex.hex(num.asInstanceOf[Long])
case BinaryType => Hex.hex(num.asInstanceOf[Array[Byte]])
case StringType => Hex.hex(num.asInstanceOf[UTF8String].getBytes)
}
override protected def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
nullSafeCodeGen(ctx, ev, (c) => {
val hex = Hex.getClass.getName.stripSuffix("$")
s"${ev.value} = " + (child.dataType match {
case StringType => s"""$hex.hex($c.getBytes());"""
case _ => s"""$hex.hex($c);"""
})
})
}
}
/**
* Performs the inverse operation of HEX.
* Resulting characters are returned as a byte array.
*/
@ExpressionDescription(
usage = "_FUNC_(expr) - Converts hexadecimal `expr` to binary.",
extended = """
Examples:
> SELECT decode(_FUNC_('537061726B2053514C'), 'UTF-8');
Spark SQL
""")
case class Unhex(child: Expression) extends UnaryExpression with ImplicitCastInputTypes {
override def inputTypes: Seq[AbstractDataType] = Seq(StringType)
override def nullable: Boolean = true
override def dataType: DataType = BinaryType
protected override def nullSafeEval(num: Any): Any =
Hex.unhex(num.asInstanceOf[UTF8String].getBytes)
override protected def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
nullSafeCodeGen(ctx, ev, (c) => {
val hex = Hex.getClass.getName.stripSuffix("$")
s"""
${ev.value} = $hex.unhex($c.getBytes());
${ev.isNull} = ${ev.value} == null;
"""
})
}
}
////////////////////////////////////////////////////////////////////////////////////////////////////
////////////////////////////////////////////////////////////////////////////////////////////////////
// Binary math functions
////////////////////////////////////////////////////////////////////////////////////////////////////
////////////////////////////////////////////////////////////////////////////////////////////////////
// scalastyle:off line.size.limit
@ExpressionDescription(
usage = "_FUNC_(expr1, expr2) - Returns the angle in radians between the positive x-axis of a plane and the point given by the coordinates (`expr1`, `expr2`).",
extended = """
Examples:
> SELECT _FUNC_(0, 0);
0.0
""")
// scalastyle:on line.size.limit
case class Atan2(left: Expression, right: Expression)
extends BinaryMathExpression(math.atan2, "ATAN2") {
protected override def nullSafeEval(input1: Any, input2: Any): Any = {
// With codegen, the values returned by -0.0 and 0.0 are different. Handled with +0.0
math.atan2(input1.asInstanceOf[Double] + 0.0, input2.asInstanceOf[Double] + 0.0)
}
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
defineCodeGen(ctx, ev, (c1, c2) => s"java.lang.Math.atan2($c1 + 0.0, $c2 + 0.0)")
}
}
@ExpressionDescription(
usage = "_FUNC_(expr1, expr2) - Raises `expr1` to the power of `expr2`.",
extended = """
Examples:
> SELECT _FUNC_(2, 3);
8.0
""")
case class Pow(left: Expression, right: Expression)
extends BinaryMathExpression(math.pow, "POWER") {
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
defineCodeGen(ctx, ev, (c1, c2) => s"java.lang.Math.pow($c1, $c2)")
}
}
/**
* Bitwise left shift.
*
* @param left the base number to shift.
* @param right number of bits to left shift.
*/
@ExpressionDescription(
usage = "_FUNC_(base, expr) - Bitwise left shift.",
extended = """
Examples:
> SELECT _FUNC_(2, 1);
4
""")
case class ShiftLeft(left: Expression, right: Expression)
extends BinaryExpression with ImplicitCastInputTypes {
override def inputTypes: Seq[AbstractDataType] =
Seq(TypeCollection(IntegerType, LongType), IntegerType)
override def dataType: DataType = left.dataType
protected override def nullSafeEval(input1: Any, input2: Any): Any = {
input1 match {
case l: jl.Long => l << input2.asInstanceOf[jl.Integer]
case i: jl.Integer => i << input2.asInstanceOf[jl.Integer]
}
}
override protected def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
defineCodeGen(ctx, ev, (left, right) => s"$left << $right")
}
}
/**
* Bitwise (signed) right shift.
*
* @param left the base number to shift.
* @param right number of bits to right shift.
*/
@ExpressionDescription(
usage = "_FUNC_(base, expr) - Bitwise (signed) right shift.",
extended = """
Examples:
> SELECT _FUNC_(4, 1);
2
""")
case class ShiftRight(left: Expression, right: Expression)
extends BinaryExpression with ImplicitCastInputTypes {
override def inputTypes: Seq[AbstractDataType] =
Seq(TypeCollection(IntegerType, LongType), IntegerType)
override def dataType: DataType = left.dataType
protected override def nullSafeEval(input1: Any, input2: Any): Any = {
input1 match {
case l: jl.Long => l >> input2.asInstanceOf[jl.Integer]
case i: jl.Integer => i >> input2.asInstanceOf[jl.Integer]
}
}
override protected def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
defineCodeGen(ctx, ev, (left, right) => s"$left >> $right")
}
}
/**
* Bitwise unsigned right shift, for integer and long data type.
*
* @param left the base number.
* @param right the number of bits to right shift.
*/
@ExpressionDescription(
usage = "_FUNC_(base, expr) - Bitwise unsigned right shift.",
extended = """
Examples:
> SELECT _FUNC_(4, 1);
2
""")
case class ShiftRightUnsigned(left: Expression, right: Expression)
extends BinaryExpression with ImplicitCastInputTypes {
override def inputTypes: Seq[AbstractDataType] =
Seq(TypeCollection(IntegerType, LongType), IntegerType)
override def dataType: DataType = left.dataType
protected override def nullSafeEval(input1: Any, input2: Any): Any = {
input1 match {
case l: jl.Long => l >>> input2.asInstanceOf[jl.Integer]
case i: jl.Integer => i >>> input2.asInstanceOf[jl.Integer]
}
}
override protected def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
defineCodeGen(ctx, ev, (left, right) => s"$left >>> $right")
}
}
@ExpressionDescription(
usage = "_FUNC_(expr1, expr2) - Returns sqrt(`expr1`**2 + `expr2`**2).",
extended = """
Examples:
> SELECT _FUNC_(3, 4);
5.0
""")
case class Hypot(left: Expression, right: Expression)
extends BinaryMathExpression(math.hypot, "HYPOT")
/**
* Computes the logarithm of a number.
*
* @param left the logarithm base, default to e.
* @param right the number to compute the logarithm of.
*/
@ExpressionDescription(
usage = "_FUNC_(base, expr) - Returns the logarithm of `expr` with `base`.",
extended = """
Examples:
> SELECT _FUNC_(10, 100);
2.0
""")
case class Logarithm(left: Expression, right: Expression)
extends BinaryMathExpression((c1, c2) => math.log(c2) / math.log(c1), "LOG") {
/**
* Natural log, i.e. using e as the base.
*/
def this(child: Expression) = {
this(EulerNumber(), child)
}
override def nullable: Boolean = true
protected override def nullSafeEval(input1: Any, input2: Any): Any = {
val dLeft = input1.asInstanceOf[Double]
val dRight = input2.asInstanceOf[Double]
// Unlike Hive, we support Log base in (0.0, 1.0]
if (dLeft <= 0.0 || dRight <= 0.0) null else math.log(dRight) / math.log(dLeft)
}
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
if (left.isInstanceOf[EulerNumber]) {
nullSafeCodeGen(ctx, ev, (c1, c2) =>
s"""
if ($c2 <= 0.0) {
${ev.isNull} = true;
} else {
${ev.value} = java.lang.Math.log($c2);
}
""")
} else {
nullSafeCodeGen(ctx, ev, (c1, c2) =>
s"""
if ($c1 <= 0.0 || $c2 <= 0.0) {
${ev.isNull} = true;
} else {
${ev.value} = java.lang.Math.log($c2) / java.lang.Math.log($c1);
}
""")
}
}
}
/**
* Round the `child`'s result to `scale` decimal place when `scale` >= 0
* or round at integral part when `scale` < 0.
*
* Child of IntegralType would round to itself when `scale` >= 0.
* Child of FractionalType whose value is NaN or Infinite would always round to itself.
*
* Round's dataType would always equal to `child`'s dataType except for DecimalType,
* which would lead scale decrease from the origin DecimalType.
*
* @param child expr to be round, all [[NumericType]] is allowed as Input
* @param scale new scale to be round to, this should be a constant int at runtime
* @param mode rounding mode (e.g. HALF_UP, HALF_UP)
* @param modeStr rounding mode string name (e.g. "ROUND_HALF_UP", "ROUND_HALF_EVEN")
*/
abstract class RoundBase(child: Expression, scale: Expression,
mode: BigDecimal.RoundingMode.Value, modeStr: String)
extends BinaryExpression with Serializable with ImplicitCastInputTypes {
override def left: Expression = child
override def right: Expression = scale
// round of Decimal would eval to null if it fails to `changePrecision`
override def nullable: Boolean = true
override def foldable: Boolean = child.foldable
override lazy val dataType: DataType = child.dataType match {
// if the new scale is bigger which means we are scaling up,
// keep the original scale as `Decimal` does
case DecimalType.Fixed(p, s) => DecimalType(p, if (_scale > s) s else _scale)
case t => t
}
override def inputTypes: Seq[AbstractDataType] = Seq(NumericType, IntegerType)
override def checkInputDataTypes(): TypeCheckResult = {
super.checkInputDataTypes() match {
case TypeCheckSuccess =>
if (scale.foldable) {
TypeCheckSuccess
} else {
TypeCheckFailure("Only foldable Expression is allowed for scale arguments")
}
case f => f
}
}
// Avoid repeated evaluation since `scale` is a constant int,
// avoid unnecessary `child` evaluation in both codegen and non-codegen eval
// by checking if scaleV == null as well.
private lazy val scaleV: Any = scale.eval(EmptyRow)
private lazy val _scale: Int = scaleV.asInstanceOf[Int]
override def eval(input: InternalRow): Any = {
if (scaleV == null) { // if scale is null, no need to eval its child at all
null
} else {
val evalE = child.eval(input)
if (evalE == null) {
null
} else {
nullSafeEval(evalE)
}
}
}
// not overriding since _scale is a constant int at runtime
def nullSafeEval(input1: Any): Any = {
child.dataType match {
case _: DecimalType =>
val decimal = input1.asInstanceOf[Decimal]
decimal.toPrecision(decimal.precision, _scale, mode).orNull
case ByteType =>
BigDecimal(input1.asInstanceOf[Byte]).setScale(_scale, mode).toByte
case ShortType =>
BigDecimal(input1.asInstanceOf[Short]).setScale(_scale, mode).toShort
case IntegerType =>
BigDecimal(input1.asInstanceOf[Int]).setScale(_scale, mode).toInt
case LongType =>
BigDecimal(input1.asInstanceOf[Long]).setScale(_scale, mode).toLong
case FloatType =>
val f = input1.asInstanceOf[Float]
if (f.isNaN || f.isInfinite) {
f
} else {
BigDecimal(f.toDouble).setScale(_scale, mode).toFloat
}
case DoubleType =>
val d = input1.asInstanceOf[Double]
if (d.isNaN || d.isInfinite) {
d
} else {
BigDecimal(d).setScale(_scale, mode).toDouble
}
}
}
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
val ce = child.genCode(ctx)
val evaluationCode = child.dataType match {
case _: DecimalType =>
s"""
if (${ce.value}.changePrecision(${ce.value}.precision(), ${_scale},
java.math.BigDecimal.${modeStr})) {
${ev.value} = ${ce.value};
} else {
${ev.isNull} = true;
}"""
case ByteType =>
if (_scale < 0) {
s"""
${ev.value} = new java.math.BigDecimal(${ce.value}).
setScale(${_scale}, java.math.BigDecimal.${modeStr}).byteValue();"""
} else {
s"${ev.value} = ${ce.value};"
}
case ShortType =>
if (_scale < 0) {
s"""
${ev.value} = new java.math.BigDecimal(${ce.value}).
setScale(${_scale}, java.math.BigDecimal.${modeStr}).shortValue();"""
} else {
s"${ev.value} = ${ce.value};"
}
case IntegerType =>
if (_scale < 0) {
s"""
${ev.value} = new java.math.BigDecimal(${ce.value}).
setScale(${_scale}, java.math.BigDecimal.${modeStr}).intValue();"""
} else {
s"${ev.value} = ${ce.value};"
}
case LongType =>
if (_scale < 0) {
s"""
${ev.value} = new java.math.BigDecimal(${ce.value}).
setScale(${_scale}, java.math.BigDecimal.${modeStr}).longValue();"""
} else {
s"${ev.value} = ${ce.value};"
}
case FloatType => // if child eval to NaN or Infinity, just return it.
s"""
if (Float.isNaN(${ce.value}) || Float.isInfinite(${ce.value})) {
${ev.value} = ${ce.value};
} else {
${ev.value} = java.math.BigDecimal.valueOf(${ce.value}).
setScale(${_scale}, java.math.BigDecimal.${modeStr}).floatValue();
}"""
case DoubleType => // if child eval to NaN or Infinity, just return it.
s"""
if (Double.isNaN(${ce.value}) || Double.isInfinite(${ce.value})) {
${ev.value} = ${ce.value};
} else {
${ev.value} = java.math.BigDecimal.valueOf(${ce.value}).
setScale(${_scale}, java.math.BigDecimal.${modeStr}).doubleValue();
}"""
}
if (scaleV == null) { // if scale is null, no need to eval its child at all
ev.copy(code = s"""
boolean ${ev.isNull} = true;
${ctx.javaType(dataType)} ${ev.value} = ${ctx.defaultValue(dataType)};""")
} else {
ev.copy(code = s"""
${ce.code}
boolean ${ev.isNull} = ${ce.isNull};
${ctx.javaType(dataType)} ${ev.value} = ${ctx.defaultValue(dataType)};
if (!${ev.isNull}) {
$evaluationCode
}""")
}
}
}
/**
* Round an expression to d decimal places using HALF_UP rounding mode.
* round(2.5) == 3.0, round(3.5) == 4.0.
*/
// scalastyle:off line.size.limit
@ExpressionDescription(
usage = "_FUNC_(expr, d) - Returns `expr` rounded to `d` decimal places using HALF_UP rounding mode.",
extended = """
Examples:
> SELECT _FUNC_(2.5, 0);
3.0
""")
// scalastyle:on line.size.limit
case class Round(child: Expression, scale: Expression)
extends RoundBase(child, scale, BigDecimal.RoundingMode.HALF_UP, "ROUND_HALF_UP")
with Serializable with ImplicitCastInputTypes {
def this(child: Expression) = this(child, Literal(0))
}
/**
* Round an expression to d decimal places using HALF_EVEN rounding mode,
* also known as Gaussian rounding or bankers' rounding.
* round(2.5) = 2.0, round(3.5) = 4.0.
*/
// scalastyle:off line.size.limit
@ExpressionDescription(
usage = "_FUNC_(expr, d) - Returns `expr` rounded to `d` decimal places using HALF_EVEN rounding mode.",
extended = """
Examples:
> SELECT _FUNC_(2.5, 0);
2.0
""")
// scalastyle:on line.size.limit
case class BRound(child: Expression, scale: Expression)
extends RoundBase(child, scale, BigDecimal.RoundingMode.HALF_EVEN, "ROUND_HALF_EVEN")
with Serializable with ImplicitCastInputTypes {
def this(child: Expression) = this(child, Literal(0))
}
| MLnick/spark | sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/mathExpressions.scala | Scala | apache-2.0 | 37,417 |
Subsets and Splits
Filtered Scala Code Snippets
The query filters and retrieves a sample of code snippets that meet specific criteria, providing a basic overview of the dataset's content without revealing deeper insights.