code
stringlengths 5
1M
| repo_name
stringlengths 5
109
| path
stringlengths 6
208
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 5
1M
|
|---|---|---|---|---|---|
/*
* Copyright (C) 2015 Language Technology Group
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package model.queryable.impl
import model.KeyTerm
import model.queryable.KeyTermQueryable
import utils.DBSettings
// scalastyle:off
import scalikejdbc._
// scalastyle:on
class KeyTermQueryableImpl extends KeyTermQueryable with DBSettings {
protected def connector: NamedDB = NamedDB(ConnectionPool.DEFAULT_NAME)
protected val document = new DocumentQueryableImpl
override def getDocumentKeyTerms(docId: Long, limit: Option[Int] = None): List[KeyTerm] = connector.readOnly { implicit session =>
SQL("""SELECT term, frequency
FROM terms
WHERE docid = {docId}
%s
""".format(if (limit.isDefined) "LIMIT " + limit.get else "")).bindByName('docId -> docId).map(KeyTerm(_)).list.apply()
}
override def getRelationshipKeyTerms(relId: Long, limit: Option[Int] = None): List[KeyTerm] = connector.readOnly { implicit session =>
val docIds = document.getIdsByRelationshipId(relId)
val terms = sql"""
SELECT term
FROM terms
WHERE docid IN (${docIds.mkString(",")})
""".map(_.string("term")).list.apply()
val res = aggregateKeyTerms(terms)
if (limit.isDefined) res.take(limit.get) else res
}
private def aggregateKeyTerms(terms: List[String]): List[KeyTerm] = {
val termsToCount = terms.groupBy(identity).map {
case (term, counts) =>
KeyTerm(term, counts.length)
}.toList
termsToCount.sortWith { case (KeyTerm(_, c1), KeyTerm(_, c2)) => c1 >= c2 }
}
}
|
tudarmstadt-lt/newsleak
|
common/src/main/scala/model/queryable/impl/KeyTermQueryableImpl.scala
|
Scala
|
agpl-3.0
| 2,208
|
package is.hail.expr.ir
import java.io.{ByteArrayInputStream, ByteArrayOutputStream}
import is.hail.HailSuite
import is.hail.annotations.Region
import is.hail.asm4s._
import is.hail.backend.ExecuteContext
import is.hail.check.{Gen, Prop}
import is.hail.expr.ir.agg._
import is.hail.expr.ir.orderings.CodeOrdering
import is.hail.types.physical._
import is.hail.io.{InputBuffer, OutputBuffer, StreamBufferSpec}
import is.hail.types.physical.stypes.Int64SingleCodeType
import is.hail.types.physical.stypes.interfaces.primitive
import is.hail.types.physical.stypes.primitives.SInt64
import is.hail.utils._
import org.testng.annotations.Test
import scala.collection.mutable
class TestBTreeKey(mb: EmitMethodBuilder[_]) extends BTreeKey {
private val comp = mb.ecb.getOrderingFunction(SInt64, SInt64, CodeOrdering.Compare())
override def storageType: PTuple = PCanonicalTuple(required = true, PInt64(), PCanonicalTuple(false))
override def compType: PType = PInt64()
override def isEmpty(cb: EmitCodeBuilder, off: Code[Long]): Value[Boolean] =
storageType.isFieldMissing(cb, off, 1)
override def initializeEmpty(cb: EmitCodeBuilder, off: Code[Long]): Unit =
storageType.setFieldMissing(cb, off, 1)
def storeKey(cb: EmitCodeBuilder, _off: Code[Long], m: Code[Boolean], v: Code[Long]): Unit = {
val off = cb.memoize[Long](_off)
storageType.stagedInitialize(cb, off)
cb.ifx(m,
storageType.setFieldMissing(cb, off, 0),
cb += Region.storeLong(storageType.fieldOffset(off, 0), v)
)
}
override def copy(cb: EmitCodeBuilder, src: Code[Long], dest: Code[Long]): Unit =
cb += Region.copyFrom(src, dest, storageType.byteSize)
override def deepCopy(cb: EmitCodeBuilder, er: EmitRegion, src: Code[Long], dest: Code[Long]): Unit =
copy(cb, src, dest)
override def compKeys(cb: EmitCodeBuilder, k1: EmitValue, k2: EmitValue): Value[Int] =
comp(cb, k1, k2)
override def loadCompKey(cb: EmitCodeBuilder, off: Value[Long]): EmitValue =
EmitValue(
Some(storageType.isFieldMissing(cb, off, 0)),
primitive(cb.memoize(Region.loadLong(storageType.fieldOffset(off, 0)))))
}
object BTreeBackedSet {
def bulkLoad(ctx: ExecuteContext, region: Region, serialized: Array[Byte], n: Int): BTreeBackedSet = {
val fb = EmitFunctionBuilder[Region, InputBuffer, Long](ctx, "btree_bulk_load")
val cb = fb.ecb
val root = fb.genFieldThisRef[Long]()
val r = fb.genFieldThisRef[Region]()
val ib = fb.getCodeParam[InputBuffer](2)
val ib2 = fb.genFieldThisRef[InputBuffer]()
val km = fb.genFieldThisRef[Boolean]()
val kv = fb.genFieldThisRef[Long]()
val key = new TestBTreeKey(fb.apply_method)
val btree = new AppendOnlyBTree(cb, key, r, root, maxElements = n)
fb.emitWithBuilder { cb =>
cb += (r := fb.getCodeParam[Region](1))
btree.init(cb)
btree.bulkLoad(cb, ib) { (cb, ib, off) =>
cb.assign(km, ib.readBoolean())
cb.assign(kv, km.mux(0L, ib.readLong()))
key.storeKey(cb, off, km, kv)
}
root
}
val inputBuffer = new StreamBufferSpec().buildInputBuffer(new ByteArrayInputStream(serialized))
val set = new BTreeBackedSet(ctx, region, n)
set.root = fb.resultWithIndex()(HailSuite.theHailClassLoader, ctx.fs, 0, region)(region, inputBuffer)
set
}
}
class BTreeBackedSet(ctx: ExecuteContext, region: Region, n: Int) {
var root: Long = 0
private val newTreeF = {
val fb = EmitFunctionBuilder[Region, Long](ctx, "new_tree")
val cb = fb.ecb
val root = fb.genFieldThisRef[Long]()
val r = fb.genFieldThisRef[Region]()
val key = new TestBTreeKey(fb.apply_method)
val btree = new AppendOnlyBTree(cb, key, r, root, maxElements = n)
fb.emitWithBuilder { cb =>
cb.assign(r, fb.getCodeParam[Region](1))
btree.init(cb)
root
}
fb.resultWithIndex()(HailSuite.theHailClassLoader, ctx.fs, 0, region)
}
private val getF = {
val fb = EmitFunctionBuilder[Region, Long, Boolean, Long, Long](ctx, "get")
val cb = fb.ecb
val root = fb.genFieldThisRef[Long]()
val r = fb.genFieldThisRef[Region]()
val m = fb.getCodeParam[Boolean](3)
val v = fb.getCodeParam[Long](4)
val elt = fb.newLocal[Long]()
val key = new TestBTreeKey(fb.apply_method)
val btree = new AppendOnlyBTree(cb, key, r, root, maxElements = n)
fb.emitWithBuilder { cb =>
val ec = EmitCode(Code._empty, m, primitive(v))
cb.assign(r, fb.getCodeParam[Region](1))
cb.assign(root, fb.getCodeParam[Long](2))
cb.assign(elt, btree.getOrElseInitialize(cb, ec))
cb.ifx(key.isEmpty(cb, elt), {
key.storeKey(cb, elt, m, v)
})
root
}
fb.resultWithIndex()(HailSuite.theHailClassLoader, ctx.fs, 0, region)
}
private val getResultsF = {
val fb = EmitFunctionBuilder[Region, Long, Array[java.lang.Long]](ctx, "get_results")
val cb = fb.ecb
val root = fb.genFieldThisRef[Long]()
val r = fb.genFieldThisRef[Region]()
val key = new TestBTreeKey(fb.apply_method)
val btree = new AppendOnlyBTree(cb, key, r, root, maxElements = n)
val sab = new StagedArrayBuilder(Int64SingleCodeType, true, fb.apply_method, 16)
val idx = fb.newLocal[Int]()
val returnArray = fb.newLocal[Array[java.lang.Long]]()
fb.emitWithBuilder { cb =>
cb += (r := fb.getCodeParam[Region](1))
cb += (root := fb.getCodeParam[Long](2))
cb += sab.clear
btree.foreach(cb) { (cb, _koff) =>
val koff = cb.memoize(_koff)
val ec = key.loadCompKey(cb, koff)
cb.ifx(ec.m,
cb += sab.addMissing(),
cb += sab.add(ec.pv.asInt64.value))
}
cb += (returnArray := Code.newArray[java.lang.Long](sab.size))
cb += (idx := 0)
cb += Code.whileLoop(idx < sab.size,
returnArray.update(idx, sab.isMissing(idx).mux(
Code._null[java.lang.Long],
Code.boxLong(coerce[Long](sab(idx))))),
idx := idx + 1
)
returnArray
}
fb.resultWithIndex()(HailSuite.theHailClassLoader, ctx.fs, 0, region)
}
private val bulkStoreF = {
val fb = EmitFunctionBuilder[Long, OutputBuffer, Unit](ctx, "bulk_store")
val cb = fb.ecb
val root = fb.genFieldThisRef[Long]()
val r = fb.genFieldThisRef[Region]()
val ob = fb.getCodeParam[OutputBuffer](2)
val ob2 = fb.genFieldThisRef[OutputBuffer]()
val key = new TestBTreeKey(fb.apply_method)
val btree = new AppendOnlyBTree(cb, key, r, root, maxElements = n)
fb.emitWithBuilder { cb =>
cb += (root := fb.getCodeParam[Long](1))
cb += (ob2 := ob)
btree.bulkStore(cb, ob2) { (cb, obc, offc) =>
val ob = cb.newLocal("ob", obc)
val off = cb.newLocal("off", offc)
val ev = cb.memoize(key.loadCompKey(cb, off), "ev")
cb += ob.writeBoolean(ev.m)
cb.ifx(!ev.m, {
cb += ob.writeLong(ev.pv.asInt64.value)
})
}
ob2.flush()
}
fb.resultWithIndex()(HailSuite.theHailClassLoader, ctx.fs, 0, region)
}
def clear(): Unit = {
if (root != 0) { region.clear() }
root = newTreeF(region)
}
def getOrElseInsert(v: java.lang.Long): Unit =
root = getF(region, root, v == null, if (v == null) 0L else v.longValue())
def getElements: Array[java.lang.Long] =
getResultsF(region, root)
def bulkStore: Array[Byte] = {
val baos = new ByteArrayOutputStream()
val outputBuffer = new StreamBufferSpec().buildOutputBuffer(baos)
bulkStoreF(root, outputBuffer)
baos.toByteArray
}
}
class TestSet {
val map = mutable.Set[java.lang.Long]()
def clear(): Unit = map.clear()
def getOrElseInsert(v: java.lang.Long): Unit = map += v
def getElements: Array[java.lang.Long] = map.toArray
}
class StagedBTreeSuite extends HailSuite {
@Test def testBTree(): Unit = {
pool.scopedRegion { region =>
val refSet = new TestSet()
val nodeSizeParams = Array(
2 -> Gen.choose(-10, 10),
3 -> Gen.choose(-10, 10),
5 -> Gen.choose(-30, 30),
6 -> Gen.choose(-30, 30),
22 -> Gen.choose(-3, 3))
for ((n, values) <- nodeSizeParams) {
val testSet = new BTreeBackedSet(ctx, region, n)
val sets = Gen.buildableOf[Array](Gen.zip(Gen.coin(.1), values)
.map { case (m, v) => if (m) null else new java.lang.Long(v) })
val lt = { (l1: java.lang.Long, l2: java.lang.Long) =>
!(l1 == null) && ((l2 == null) || (l1 < l2))
}
Prop.forAll(sets) { set =>
refSet.clear()
testSet.clear()
assert(refSet.getElements sameElements testSet.getElements)
set.forall { v =>
refSet.getOrElseInsert(v)
testSet.getOrElseInsert(v)
refSet.getElements.sortWith(lt) sameElements testSet.getElements.sortWith(lt)
} && {
val serialized = testSet.bulkStore
val testSet2 = BTreeBackedSet.bulkLoad(ctx, region, serialized, n)
refSet.getElements.sortWith(lt) sameElements testSet2.getElements.sortWith(lt)
}
}.check()
}
}
}
}
|
hail-is/hail
|
hail/src/test/scala/is/hail/expr/ir/StagedBTreeSuite.scala
|
Scala
|
mit
| 9,119
|
package com.inocybe.pfm.template.model
import akka.http.scaladsl.marshallers.sprayjson.SprayJsonSupport
import spray.json.DefaultJsonProtocol
object JsonProtocol extends SprayJsonSupport with DefaultJsonProtocol {
implicit val workFormat = jsonFormat2(Work)
implicit val workResultFormat = jsonFormat2(WorkResult)
}
|
botekchristophe/akka-http-cluster-remote
|
src/main/scala/com/inocybe/pfm/template/model/JsonProtocol.scala
|
Scala
|
mit
| 324
|
package com.arcusys.learn.scorm.manifest.storage.impl.liferay
import com.arcusys.learn.persistence.liferay.model.LFActivityDataMap
import com.arcusys.learn.persistence.liferay.service.LFActivityDataMapLocalServiceUtil
import com.arcusys.valamis.lesson.scorm.model.manifest.ActivityDataMap
import com.arcusys.valamis.lesson.scorm.storage.ActivityDataStorage
import scala.collection.JavaConverters._
/**
* Created by mminin on 16.10.14.
*/
class ActivityDataRepositoryImpl extends ActivityDataStorage {
def renew() = {
LFActivityDataMapLocalServiceUtil.removeAll()
}
def create(packageID: Int, activityID: String, entity: ActivityDataMap) {
val newEntity: LFActivityDataMap = LFActivityDataMapLocalServiceUtil.createLFAttemptData()
newEntity.setTargetId(entity.targetId)
newEntity.setReadSharedData(entity.readSharedData)
newEntity.setWriteSharedData(entity.writeSharedData)
newEntity.setPackageID(packageID)
newEntity.setActivityID(activityID)
LFActivityDataMapLocalServiceUtil.addLFActivityDataMap(newEntity)
}
def getForActivity(packageID: Int, activityID: String) = {
LFActivityDataMapLocalServiceUtil.findByPackageIDAndActivityID(packageID, activityID).asScala map extract
}
def delete(packageID: Int, activityID: String) {
LFActivityDataMapLocalServiceUtil.removeByPackageIDAndActivityID(packageID, activityID)
}
private def extract(entity: LFActivityDataMap) =
new ActivityDataMap(entity.getTargetId, entity.getReadSharedData, entity.getWriteSharedData)
}
|
ViLPy/Valamis
|
learn-persistence-liferay-wrapper/src/main/scala/com/arcusys/learn/scorm/manifest/storage/impl/liferay/ActivityDataRepositoryImpl.scala
|
Scala
|
lgpl-3.0
| 1,533
|
package redis.commands
import redis._
import scala.concurrent.{Await, Future}
import akka.util.ByteString
import redis.protocol.{Bulk, MultiBulk}
import redis.actors.ReplyErrorException
class StringsSpec extends RedisStandaloneServer {
sequential
"Strings commands" should {
"APPEND" in {
val r = redis.set("appendKey", "Hello").flatMap(_ => {
redis.append("appendKey", " World").flatMap(length => {
length mustEqual "Hello World".length
redis.get("appendKey")
})
})
Await.result(r, timeOut) mustEqual Some(ByteString("Hello World"))
}
"BITCOUNT" in {
val r = redis.set("bitcountKey", "foobar").flatMap(_ => {
val a = redis.bitcount("bitcountKey")
val b = redis.bitcount("bitcountKey", 0, 0)
val c = redis.bitcount("bitcountKey", 1, 1)
Future.sequence(Seq(a, b, c))
})
Await.result(r, timeOut) mustEqual Seq(26, 4, 6)
}
"BITOP" in {
val s1 = redis.set("bitopKey1", "afoobar a")
val s2 = redis.set("bitopKey2", "aabcdef a")
val r = for {
_ <- s1
_ <- s2
and <- redis.bitopAND("ANDbitopKey", "bitopKey1", "bitopKey2")
or <- redis.bitopOR("ORbitopKey", "bitopKey1", "bitopKey2")
xor <- redis.bitopXOR("XORbitopKey", "bitopKey1", "bitopKey2")
not <- redis.bitopNOT("NOTbitopKey", "bitopKey1")
} yield {
"AND" in {
Await.result(redis.get("ANDbitopKey"), timeOut) mustEqual Some(ByteString("a`bc`ab a"))
}
"OR" in {
Await.result(redis.get("ORbitopKey"), timeOut) mustEqual Some(ByteString("agoofev a"))
}
"XOR" in {
Await.result(redis.get("XORbitopKey"), timeOut) mustEqual Some(ByteString(0, 7, 13, 12, 6, 4, 20, 0, 0))
}
"NOT" in {
Await.result(redis.get("NOTbitopKey"), timeOut) mustEqual Some(ByteString(-98, -103, -112, -112, -99, -98, -115, -33, -98))
}
}
Await.result(r, timeOut)
}
"BITPOS" in {
val r = for {
s1 <- redis.set("bitposKey", "a+b") // 01100001 00101011 01100010
v1 <- redis.bitpos("bitposKey", 0)
v2 <- redis.bitpos("bitposKey", 1)
v3 <- redis.bitpos("bitposKey", 1, 1)
v4 <- redis.bitpos("bitposKey", 0, 3)
v5 <- redis.bitpos("bitposKey", 0, 1, 2)
} yield {
s1 mustEqual true
v1 mustEqual 0
v2 mustEqual 1
v3 mustEqual 10
v4 mustEqual -1
v5 mustEqual 8
}
Await.result(r, timeOut)
}
"DECR" in {
val r = redis.set("decrKey", "10").flatMap(_ => {
redis.decr("decrKey")
})
val r2 = redis.set("decrKeyError", "234293482390480948029348230948").flatMap(_ => {
redis.decr("decrKeyError")
})
Await.result(r, timeOut) mustEqual 9
Await.result(r2, timeOut) must throwA[ReplyErrorException]("ERR value is not an integer or out of range")
}
"DECRBY" in {
val r = redis.set("decrbyKey", "10").flatMap(_ => {
redis.decrby("decrbyKey", 5)
})
Await.result(r, timeOut) mustEqual 5
}
"GET" in {
val r = redis.get("getKeyNonexisting")
val r2 = redis.set("getKey", "Hello").flatMap(_ => {
redis.get("getKey")
})
Await.result(r, timeOut) mustEqual None
Await.result(r2, timeOut) mustEqual Some(ByteString("Hello"))
val rrr = for {
r3 <- redis.get[String]("getKey")
} yield {
r3 must beSome("Hello")
}
Await.result(rrr, timeOut)
}
"GET with conversion" in {
val dumbObject = new DumbClass("foo", "bar")
val r = redis.set("getDumbKey", dumbObject).flatMap(_ => {
redis.get[DumbClass]("getDumbKey")
})
Await.result(r, timeOut) mustEqual Some(dumbObject)
}
"GETBIT" in {
val r = redis.getbit("getbitKeyNonexisting", 0)
val r2 = redis.set("getbitKey", "Hello").flatMap(_ => {
redis.getbit("getbitKey", 1)
})
Await.result(r, timeOut) mustEqual false
Await.result(r2, timeOut) mustEqual true
}
"GETRANGE" in {
val r = redis.set("getrangeKey", "This is a string").flatMap(_ => {
Future.sequence(Seq(
redis.getrange("getrangeKey", 0, 3),
redis.getrange("getrangeKey", -3, -1),
redis.getrange("getrangeKey", 0, -1),
redis.getrange("getrangeKey", 10, 100)
).map(_.map(_.map(_.utf8String).get)))
})
Await.result(r, timeOut) mustEqual Seq("This", "ing", "This is a string", "string")
}
"GETSET" in {
val r = redis.set("getsetKey", "Hello").flatMap(_ => {
redis.getset("getsetKey", "World").flatMap(hello => {
hello mustEqual Some(ByteString("Hello"))
redis.get("getsetKey")
})
})
Await.result(r, timeOut) mustEqual Some(ByteString("World"))
}
"INCR" in {
val r = redis.set("incrKey", "10").flatMap(_ => {
redis.incr("incrKey")
})
Await.result(r, timeOut) mustEqual 11
}
"INCRBY" in {
val r = redis.set("incrbyKey", "10").flatMap(_ => {
redis.incrby("incrbyKey", 5)
})
Await.result(r, timeOut) mustEqual 15
}
"INCRBYFLOAT" in {
val r = redis.set("incrbyfloatKey", "10.50").flatMap(_ => {
redis.incrbyfloat("incrbyfloatKey", 0.15)
})
Await.result(r, timeOut) mustEqual Some(10.65)
}
"MGET" in {
val s1 = redis.set("mgetKey", "Hello")
val s2 = redis.set("mgetKey2", "World")
val r = for {
_ <- s1
_ <- s2
mget <- redis.mget("mgetKey", "mgetKey2", "mgetKeyNonexisting")
} yield {
mget mustEqual Seq(Some(ByteString("Hello")), Some(ByteString("World")), None)
}
Await.result(r, timeOut)
}
"MSET" in {
val r = redis.mset(Map("msetKey" -> "Hello", "msetKey2" -> "World")).flatMap(ok => {
ok mustEqual true
Future.sequence(Seq(
redis.get("msetKey"),
redis.get("msetKey2")
))
})
Await.result(r, timeOut) mustEqual Seq(Some(ByteString("Hello")), Some(ByteString("World")))
}
"MSETNX" in {
val r = for {
_ <- redis.del("msetnxKey", "msetnxKey2")
msetnx <- redis.msetnx(Map("msetnxKey" -> "Hello", "msetnxKey2" -> "World"))
msetnxFalse <- redis.msetnx(Map("msetnxKey3" -> "Hello", "msetnxKey2" -> "already set !!"))
} yield {
msetnx mustEqual true
msetnxFalse mustEqual false
}
Await.result(r, timeOut)
}
"PSETEX" in {
val r = redis.psetex("psetexKey", 2000, "temp value").flatMap(x => {
x mustEqual true
redis.get("psetexKey").flatMap(v => {
v mustEqual Some(ByteString("temp value"))
Thread.sleep(2000)
redis.get("psetexKey")
})
})
Await.result(r, timeOut) mustEqual None
}
"SET" in {
val rr = for {
r <- redis.set("setKey", "value")
ex <- redis.set("setKey", "value", exSeconds = Some(2))
nxex <- redis.set("setKey2", "value", NX = true, exSeconds = Some(60))
ttlnxex <- redis.ttl("setKey2")
xxex <- redis.set("setKey2", "value", XX = true, exSeconds = Some(180))
ttlxxex <- redis.ttl("setKey2")
_ <- redis.del("setKey2")
px <- redis.set("setKey", "value", pxMilliseconds = Some(1))
nxTrue <- {
Thread.sleep(20)
redis.set("setKey", "value", NX = true)
}
xx <- redis.set("setKey", "value", XX = true)
nxFalse <- redis.set("setKey", "value", NX = true)
} yield {
r mustEqual true
ex mustEqual true
nxex must beTrue
ttlnxex must beBetween[Long](0, 60)
xxex must beTrue
ttlxxex must beBetween[Long](60, 180)
px mustEqual true
nxTrue mustEqual true // because pxMilliseconds = 1 millisecond
xx mustEqual true
nxFalse mustEqual false
}
Await.result(rr, timeOut)
}
"SETBIT" in {
val r = for {
_ <- redis.del("setbitKey")
setTrue <- redis.setbit("setbitKey", 1, value = true)
getTrue <- redis.getbit("setbitKey", 1)
setFalse <- redis.setbit("setbitKey", 1, value = false)
getFalse <- redis.getbit("setbitKey", 1)
} yield {
setTrue mustEqual false
getTrue mustEqual true
setFalse mustEqual true
getFalse mustEqual false
}
Await.result(r, timeOut)
}
"SETEX" in {
val r = redis.setex("setexKey", 1, "temp value").flatMap(x => {
x mustEqual true
redis.get("setexKey").flatMap(v => {
v mustEqual Some(ByteString("temp value"))
Thread.sleep(2000)
redis.get("setexKey")
})
})
Await.result(r, timeOut) mustEqual None
}
"SETNX" in {
val r = for {
_ <- redis.del("setnxKey")
s1 <- redis.setnx("setnxKey", "Hello")
s2 <- redis.setnx("setnxKey", "World")
} yield {
s1 mustEqual true
s2 mustEqual false
}
Await.result(r, timeOut)
}
"SETRANGE" in {
val r = redis.set("setrangeKey", "Hello World").flatMap(d => {
redis.setrange("setrangeKey", 6, "Redis").flatMap(length => {
length mustEqual "Hello Redis".length
redis.get("setrangeKey")
})
})
Await.result(r, timeOut) mustEqual Some(ByteString("Hello Redis"))
}
"STRLEN" in {
val r = redis.set("strlenKey", "Hello World").flatMap(d => {
redis.strlen("strlenKey").flatMap(length => {
length mustEqual "Hello World".length
redis.strlen("strlenKeyNonexisting")
})
})
Await.result(r, timeOut) mustEqual 0
}
}
}
|
npeters/rediscala
|
src/test/scala/redis/commands/StringsSpec.scala
|
Scala
|
apache-2.0
| 9,802
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.carbondata.mv.testutil
import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.catalyst.expressions.aggregate.AggregateExpression
import org.apache.spark.sql.catalyst.plans.logical._
import org.apache.spark.sql.catalyst.util._
import org.apache.spark.sql.test.util.QueryTest
import org.apache.carbondata.mv.plans.modular
import org.apache.carbondata.mv.plans.modular.Flags._
import org.apache.carbondata.mv.plans.modular.{ModularPlan, OneRowTable, Select}
/**
* Provides helper methods for comparing plans.
*/
abstract class ModularPlanTest extends QueryTest with PredicateHelper {
/**
* Since attribute references are given globally unique ids during analysis,
* we must normalize them to check if two different queries are identical.
*/
protected def normalizeExprIds(plan: ModularPlan): plan.type = {
plan transformAllExpressions {
case s: ScalarSubquery =>
s.copy(exprId = ExprId(0))
case e: Exists =>
e.copy(exprId = ExprId(0))
case l: ListQuery =>
l.copy(exprId = ExprId(0))
case a: AttributeReference =>
AttributeReference(a.name, a.dataType, a.nullable)(exprId = ExprId(0))
case a: Alias =>
Alias(a.child, a.name)(exprId = ExprId(0))
case ae: AggregateExpression =>
ae.copy(resultId = ExprId(0))
}
}
/**
* Rewrite [[EqualTo]] and [[EqualNullSafe]] operator to keep order. The following cases will be
* equivalent:
* 1. (a = b), (b = a);
* 2. (a <=> b), (b <=> a).
*/
private def rewriteEqual(condition: Expression): Expression = {
condition match {
case eq@EqualTo(l: Expression, r: Expression) =>
Seq(l, r).sortBy(_.hashCode()).reduce(EqualTo)
case eq@EqualNullSafe(l: Expression, r: Expression) =>
Seq(l, r).sortBy(_.hashCode()).reduce(EqualNullSafe)
case _ => condition // Don't reorder.
}
}
//
// /** Fails the test if the two plans do not match */
// protected def comparePlans(plan1: LogicalPlan, plan2: LogicalPlan) {
// val normalized1 = normalizePlan(normalizeExprIds(plan1))
// val normalized2 = normalizePlan(normalizeExprIds(plan2))
// if (normalized1 != normalized2) {
// fail(
// s"""
// |== FAIL: Plans do not match ===
// |${sideBySide(normalized1.treeString, normalized2.treeString).mkString("\\n")}
// """.stripMargin)
// }
// }
//
// /** Fails the test if the two expressions do not match */
// protected def compareExpressions(e1: Expression, e2: Expression): Unit = {
// comparePlans(Filter(e1, OneRowRelation), Filter(e2, OneRowRelation))
// }
//
// /** Fails the test if the join order in the two plans do not match */
// protected def compareJoinOrder(plan1: LogicalPlan, plan2: LogicalPlan) {
// val normalized1 = normalizePlan(normalizeExprIds(plan1))
// val normalized2 = normalizePlan(normalizeExprIds(plan2))
// if (!sameJoinPlan(normalized1, normalized2)) {
// fail(
// s"""
// |== FAIL: Plans do not match ===
// |${sideBySide(normalized1.treeString, normalized2.treeString).mkString("\\n")}
// """.stripMargin)
// }
// }
//
// /** Consider symmetry for joins when comparing plans. */
// private def sameJoinPlan(plan1: LogicalPlan, plan2: LogicalPlan): Boolean = {
// (plan1, plan2) match {
// case (j1: Join, j2: Join) =>
// (sameJoinPlan(j1.left, j2.left) && sameJoinPlan(j1.right, j2.right)) ||
// (sameJoinPlan(j1.left, j2.right) && sameJoinPlan(j1.right, j2.left))
// case (p1: Project, p2: Project) =>
// p1.projectList == p2.projectList && sameJoinPlan(p1.child, p2.child)
// case _ =>
// plan1 == plan2
// }
// }
/** Fails the test if the corresponding pairs of plans do not match */
protected def comparePlanCollections(planSet1: Seq[String], planSet2: Seq[String]) {
for ((plan1, plan2) <- planSet1 zip planSet2) {
compareMessages(plan1, plan2)
}
}
/** Fails the test if the two plans do not match */
/** Only expressionIds are normalized. This is enough for our test cases */
/** For more general normalization, see Spark PlanTest.scala for Logical Plan */
protected def comparePlans(plan1: ModularPlan, plan2: ModularPlan) {
val normalized1 = normalizeExprIds(plan1)
val normalized2 = normalizeExprIds(plan2)
if (normalized1 != normalized2) {
fail(
s"""
|== FAIL: Plans do not match ===
|${ sideBySide(normalized1.treeString, normalized1.treeString).mkString("\\n") }
""".stripMargin)
}
}
/** Fails the test if the two expressions do not match */
protected def compareExpressions(e1: Seq[Expression], e2: Seq[Expression]): Unit = {
comparePlans(
Select(Nil, Nil, e1, Map.empty, Nil, Seq(OneRowTable), NoFlags, Seq.empty, Seq.empty), modular
.Select(Nil, Nil, e2, Map.empty, Nil, Seq(OneRowTable), NoFlags, Seq.empty, Seq.empty))
}
protected def compareMessages(msg1: String, msg2: String) {
if (msg1 != msg2) {
fail(
s"""
|== FAIL: Messages do not match ==
|${ sideBySide(msg1, msg2).mkString("\\n") }
""".stripMargin)
}
}
object MatchLocalRelation {
def unapply(localRelation: LocalRelation): Option[(Seq[Attribute], Any)] = localRelation match {
case l: LocalRelation => Some(l.output, l.data)
case _ => None
}
}
}
|
jackylk/incubator-carbondata
|
mv/core/src/test/scala/org/apache/carbondata/mv/testutil/ModularPlanTest.scala
|
Scala
|
apache-2.0
| 6,347
|
package io.iohk.ethereum.vm
import akka.util.ByteString
import io.iohk.ethereum.crypto.kec256
import io.iohk.ethereum.domain.{Account, Address, TxLogEntry, UInt256}
import io.iohk.ethereum.domain.UInt256._
import io.iohk.ethereum.vm.BlockchainConfigForEvm.EtcForks.EtcFork
import io.iohk.ethereum.vm.BlockchainConfigForEvm.{EtcForks, EthForks}
import io.iohk.ethereum.vm.BlockchainConfigForEvm.EthForks.EthFork
// scalastyle:off magic.number
// scalastyle:off number.of.types
// scalastyle:off method.length
// scalastyle:off file.size.limit
object OpCodes {
val LogOpCodes: List[OpCode] = List(LOG0, LOG1, LOG2, LOG3, LOG4)
val SwapOpCodes: List[OpCode] = List(
SWAP1,
SWAP2,
SWAP3,
SWAP4,
SWAP5,
SWAP6,
SWAP7,
SWAP8,
SWAP9,
SWAP10,
SWAP11,
SWAP12,
SWAP13,
SWAP14,
SWAP15,
SWAP16
)
val DupOpCodes: List[OpCode] =
List(DUP1, DUP2, DUP3, DUP4, DUP5, DUP6, DUP7, DUP8, DUP9, DUP10, DUP11, DUP12, DUP13, DUP14, DUP15, DUP16)
val PushOpCodes: List[OpCode] = List(
PUSH1,
PUSH2,
PUSH3,
PUSH4,
PUSH5,
PUSH6,
PUSH7,
PUSH8,
PUSH9,
PUSH10,
PUSH11,
PUSH12,
PUSH13,
PUSH14,
PUSH15,
PUSH16,
PUSH17,
PUSH18,
PUSH19,
PUSH20,
PUSH21,
PUSH22,
PUSH23,
PUSH24,
PUSH25,
PUSH26,
PUSH27,
PUSH28,
PUSH29,
PUSH30,
PUSH31,
PUSH32
)
val FrontierOpCodes: List[OpCode] =
LogOpCodes ++ SwapOpCodes ++ PushOpCodes ++ DupOpCodes ++ List(
STOP,
ADD,
MUL,
SUB,
DIV,
SDIV,
MOD,
SMOD,
ADDMOD,
MULMOD,
EXP,
SIGNEXTEND,
LT,
GT,
SLT,
SGT,
EQ,
ISZERO,
AND,
OR,
XOR,
NOT,
BYTE,
SHA3,
ADDRESS,
BALANCE,
ORIGIN,
CALLER,
CALLVALUE,
CALLDATALOAD,
CALLDATASIZE,
CALLDATACOPY,
CODESIZE,
CODECOPY,
GASPRICE,
EXTCODESIZE,
EXTCODECOPY,
BLOCKHASH,
COINBASE,
TIMESTAMP,
NUMBER,
DIFFICULTY,
GASLIMIT,
POP,
MLOAD,
MSTORE,
MSTORE8,
SLOAD,
SSTORE,
JUMP,
JUMPI,
PC,
MSIZE,
GAS,
JUMPDEST,
CREATE,
CALL,
CALLCODE,
RETURN,
INVALID,
SELFDESTRUCT
)
val HomesteadOpCodes: List[OpCode] =
DELEGATECALL +: FrontierOpCodes
val ByzantiumOpCodes: List[OpCode] =
List(REVERT, STATICCALL, RETURNDATACOPY, RETURNDATASIZE) ++ HomesteadOpCodes
val ConstantinopleOpCodes: List[OpCode] =
List(EXTCODEHASH, CREATE2, SHL, SHR, SAR) ++ ByzantiumOpCodes
val PhoenixOpCodes: List[OpCode] =
List(CHAINID, SELFBALANCE) ++ ConstantinopleOpCodes
}
object OpCode {
def sliceBytes(bytes: ByteString, offset: UInt256, size: UInt256): ByteString = {
val start = offset.min(bytes.size).toInt
val end = (offset + size).min(bytes.size).toInt
bytes.slice(start, end).padTo(size.toInt, 0.toByte)
}
}
/**
* Base class for all the opcodes of the EVM
*
* @param code Opcode byte representation
* @param delta number of words to be popped from stack
* @param alpha number of words to be pushed to stack
*/
abstract class OpCode(val code: Byte, val delta: Int, val alpha: Int, val constGasFn: FeeSchedule => BigInt)
extends Product
with Serializable {
def this(code: Int, pop: Int, push: Int, constGasFn: FeeSchedule => BigInt) = this(code.toByte, pop, push, constGasFn)
def execute[W <: WorldStateProxy[W, S], S <: Storage[S]](state: ProgramState[W, S]): ProgramState[W, S] = {
if (!availableInContext(state))
state.withError(OpCodeNotAvailableInStaticContext(code))
else if (state.stack.size < delta)
state.withError(StackUnderflow)
else if (state.stack.size - delta + alpha > state.stack.maxSize)
state.withError(StackOverflow)
else {
val constGas: BigInt = constGasFn(state.config.feeSchedule)
val gas: BigInt = constGas + varGas(state)
if (gas > state.gas)
state.copy(gas = 0).withError(OutOfGas)
else
exec(state).spendGas(gas)
}
}
protected def varGas[W <: WorldStateProxy[W, S], S <: Storage[S]](state: ProgramState[W, S]): BigInt
protected def exec[W <: WorldStateProxy[W, S], S <: Storage[S]](state: ProgramState[W, S]): ProgramState[W, S]
protected def availableInContext[W <: WorldStateProxy[W, S], S <: Storage[S]]: ProgramState[W, S] => Boolean = _ =>
true
}
sealed trait ConstGas { self: OpCode =>
protected def varGas[W <: WorldStateProxy[W, S], S <: Storage[S]](state: ProgramState[W, S]): BigInt = 0
}
case object STOP extends OpCode(0x00, 0, 0, _.G_zero) with ConstGas {
protected def exec[W <: WorldStateProxy[W, S], S <: Storage[S]](state: ProgramState[W, S]): ProgramState[W, S] =
state.withReturnData(ByteString.empty).halt
}
sealed abstract class UnaryOp(code: Int, constGasFn: FeeSchedule => BigInt)(val f: UInt256 => UInt256)
extends OpCode(code, 1, 1, constGasFn)
with ConstGas {
protected def exec[W <: WorldStateProxy[W, S], S <: Storage[S]](state: ProgramState[W, S]): ProgramState[W, S] = {
val (a, stack1) = state.stack.pop
val res = f(a)
val stack2 = stack1.push(res)
state.withStack(stack2).step()
}
}
sealed abstract class BinaryOp(code: Int, constGasFn: FeeSchedule => BigInt)(val f: (UInt256, UInt256) => UInt256)
extends OpCode(code.toByte, 2, 1, constGasFn) {
protected def exec[W <: WorldStateProxy[W, S], S <: Storage[S]](state: ProgramState[W, S]): ProgramState[W, S] = {
val (Seq(a, b), stack1) = state.stack.pop(2)
val res = f(a, b)
val stack2 = stack1.push(res)
state.withStack(stack2).step()
}
}
sealed abstract class TernaryOp(code: Int, constGasFn: FeeSchedule => BigInt)(
val f: (UInt256, UInt256, UInt256) => UInt256
) extends OpCode(code.toByte, 3, 1, constGasFn) {
protected def exec[W <: WorldStateProxy[W, S], S <: Storage[S]](state: ProgramState[W, S]): ProgramState[W, S] = {
val (Seq(a, b, c), stack1) = state.stack.pop(3)
val res = f(a, b, c)
val stack2 = stack1.push(res)
state.withStack(stack2).step()
}
}
sealed abstract class ConstOp(code: Int)(
val f: ProgramState[_ <: WorldStateProxy[_, _ <: Storage[_]], _ <: Storage[_]] => UInt256
) extends OpCode(code, 0, 1, _.G_base)
with ConstGas {
protected def exec[W <: WorldStateProxy[W, S], S <: Storage[S]](state: ProgramState[W, S]): ProgramState[W, S] = {
val stack1 = state.stack.push(f(state))
state.withStack(stack1).step()
}
}
sealed abstract class ShiftingOp(code: Int, f: (UInt256, UInt256) => UInt256)
extends OpCode(code, 2, 1, _.G_verylow)
with ConstGas {
protected def exec[W <: WorldStateProxy[W, S], S <: Storage[S]](state: ProgramState[W, S]): ProgramState[W, S] = {
val (Seq(shift: UInt256, value: UInt256), remainingStack) = state.stack.pop(2)
val result = if (shift >= UInt256(256)) Zero else f(value, shift)
val resultStack = remainingStack.push(result)
state.withStack(resultStack).step()
}
}
case object ADD extends BinaryOp(0x01, _.G_verylow)(_ + _) with ConstGas
case object MUL extends BinaryOp(0x02, _.G_low)(_ * _) with ConstGas
case object SUB extends BinaryOp(0x03, _.G_verylow)(_ - _) with ConstGas
case object DIV extends BinaryOp(0x04, _.G_low)(_ div _) with ConstGas
case object SDIV extends BinaryOp(0x05, _.G_low)(_ sdiv _) with ConstGas
case object MOD extends BinaryOp(0x06, _.G_low)(_ mod _) with ConstGas
case object SMOD extends BinaryOp(0x07, _.G_low)(_ smod _) with ConstGas
case object ADDMOD extends TernaryOp(0x08, _.G_mid)(_.addmod(_, _)) with ConstGas
case object MULMOD extends TernaryOp(0x09, _.G_mid)(_.mulmod(_, _)) with ConstGas
case object EXP extends BinaryOp(0x0a, _.G_exp)(_ ** _) {
protected def varGas[W <: WorldStateProxy[W, S], S <: Storage[S]](state: ProgramState[W, S]): BigInt = {
val (Seq(_, m: UInt256), _) = state.stack.pop(2)
state.config.feeSchedule.G_expbyte * m.byteSize
}
}
case object SIGNEXTEND extends BinaryOp(0x0b, _.G_low)((a, b) => b signExtend a) with ConstGas
case object LT extends BinaryOp(0x10, _.G_verylow)(_ < _) with ConstGas
case object GT extends BinaryOp(0x11, _.G_verylow)(_ > _) with ConstGas
case object SLT extends BinaryOp(0x12, _.G_verylow)(_ slt _) with ConstGas
case object SGT extends BinaryOp(0x13, _.G_verylow)(_ sgt _) with ConstGas
case object EQ extends BinaryOp(0x14, _.G_verylow)(_ == _) with ConstGas
case object ISZERO extends UnaryOp(0x15, _.G_verylow)(_.isZero) with ConstGas
case object AND extends BinaryOp(0x16, _.G_verylow)(_ & _) with ConstGas
case object OR extends BinaryOp(0x17, _.G_verylow)(_ | _) with ConstGas
case object XOR extends BinaryOp(0x18, _.G_verylow)(_ ^ _) with ConstGas
case object NOT extends UnaryOp(0x19, _.G_verylow)(~_) with ConstGas
case object BYTE extends BinaryOp(0x1a, _.G_verylow)((a, b) => b getByte a) with ConstGas
// logical shift left
case object SHL extends ShiftingOp(0x1b, _ << _)
// logical shift right
case object SHR extends ShiftingOp(0x1c, _ >> _)
// arithmetic shift right
case object SAR extends OpCode(0x1d, 2, 1, _.G_verylow) with ConstGas {
protected def exec[W <: WorldStateProxy[W, S], S <: Storage[S]](state: ProgramState[W, S]): ProgramState[W, S] = {
val (Seq(shift, value), remainingStack) = state.stack.pop(2)
val result = if (shift >= UInt256(256)) {
if (value.toSign >= 0) Zero else UInt256(-1)
} else value sshift shift
val resultStack = remainingStack.push(result)
state.withStack(resultStack).step()
}
}
case object SHA3 extends OpCode(0x20, 2, 1, _.G_sha3) {
protected def exec[W <: WorldStateProxy[W, S], S <: Storage[S]](state: ProgramState[W, S]): ProgramState[W, S] = {
val (Seq(offset, size), stack1) = state.stack.pop(2)
val (input, mem1) = state.memory.load(offset, size)
val hash = kec256(input.toArray)
val ret = UInt256(hash)
val stack2 = stack1.push(ret)
state.withStack(stack2).withMemory(mem1).step()
}
protected def varGas[W <: WorldStateProxy[W, S], S <: Storage[S]](state: ProgramState[W, S]): BigInt = {
val (Seq(offset, size), _) = state.stack.pop(2)
val memCost = state.config.calcMemCost(state.memory.size, offset, size)
val shaCost = state.config.feeSchedule.G_sha3word * wordsForBytes(size)
memCost + shaCost
}
}
case object ADDRESS extends ConstOp(0x30)(_.env.ownerAddr.toUInt256)
case object BALANCE extends OpCode(0x31, 1, 1, _.G_balance) with ConstGas {
protected def exec[W <: WorldStateProxy[W, S], S <: Storage[S]](state: ProgramState[W, S]): ProgramState[W, S] = {
val (accountAddress, stack1) = state.stack.pop
val accountBalance = state.world.getBalance(Address(accountAddress))
val stack2 = stack1.push(accountBalance)
state.withStack(stack2).step()
}
}
case object EXTCODEHASH extends OpCode(0x3f, 1, 1, _.G_balance) with ConstGas {
protected def exec[W <: WorldStateProxy[W, S], S <: Storage[S]](state: ProgramState[W, S]): ProgramState[W, S] = {
val (accountAddress, stack1) = state.stack.pop
val address = Address(accountAddress)
/**
* Specification of EIP1052 - https://eips.ethereum.org/EIPS/eip-1052, says that we should return 0
* In case the account does not exist 0 is pushed to the stack.
*
* But the interpretation is, that account does not exists if:
* - it do not exists or,
* - is empty according to eip161 rules (account is considered empty when it has no code and zero nonce and zero balance)
*
* Example of existing check in geth:
* https://github.com/ethereum/go-ethereum/blob/aad3c67a92cd4f3cc3a885fdc514ba2a7fb3e0a3/core/state/statedb.go#L203
*/
val accountExists = !state.world.isAccountDead(address)
val codeHash =
if (accountExists) {
val code = state.world.getCode(address)
if (code.isEmpty)
UInt256(Account.EmptyCodeHash)
else
UInt256(kec256(code))
} else {
UInt256.Zero
}
val stack2 = stack1.push(codeHash)
state.withStack(stack2).step()
}
}
case object ORIGIN extends ConstOp(0x32)(_.env.originAddr.toUInt256)
case object CALLER extends ConstOp(0x33)(_.env.callerAddr.toUInt256)
case object CALLVALUE extends ConstOp(0x34)(_.env.value)
case object CALLDATALOAD extends OpCode(0x35, 1, 1, _.G_verylow) with ConstGas {
protected def exec[W <: WorldStateProxy[W, S], S <: Storage[S]](state: ProgramState[W, S]): ProgramState[W, S] = {
val (offset, stack1) = state.stack.pop
val data = OpCode.sliceBytes(state.inputData, offset, 32)
val stack2 = stack1.push(UInt256(data))
state.withStack(stack2).step()
}
}
case object CALLDATASIZE extends ConstOp(0x36)(_.inputData.size)
case object CALLDATACOPY extends OpCode(0x37, 3, 0, _.G_verylow) {
protected def exec[W <: WorldStateProxy[W, S], S <: Storage[S]](state: ProgramState[W, S]): ProgramState[W, S] = {
val (Seq(memOffset, dataOffset, size), stack1) = state.stack.pop(3)
val data = OpCode.sliceBytes(state.inputData, dataOffset, size)
val mem1 = state.memory.store(memOffset, data)
state.withStack(stack1).withMemory(mem1).step()
}
protected def varGas[W <: WorldStateProxy[W, S], S <: Storage[S]](state: ProgramState[W, S]): BigInt = {
val (Seq(offset, _, size), _) = state.stack.pop(3)
val memCost = state.config.calcMemCost(state.memory.size, offset, size)
val copyCost = state.config.feeSchedule.G_copy * wordsForBytes(size)
memCost + copyCost
}
}
case object CODESIZE extends ConstOp(0x38)(_.env.program.length)
case object CODECOPY extends OpCode(0x39, 3, 0, _.G_verylow) {
protected def exec[W <: WorldStateProxy[W, S], S <: Storage[S]](state: ProgramState[W, S]): ProgramState[W, S] = {
val (Seq(memOffset, codeOffset, size), stack1) = state.stack.pop(3)
val bytes = OpCode.sliceBytes(state.program.code, codeOffset, size)
val mem1 = state.memory.store(memOffset, bytes)
state.withStack(stack1).withMemory(mem1).step()
}
protected def varGas[W <: WorldStateProxy[W, S], S <: Storage[S]](state: ProgramState[W, S]): BigInt = {
val (Seq(offset, _, size), _) = state.stack.pop(3)
val memCost = state.config.calcMemCost(state.memory.size, offset, size)
val copyCost = state.config.feeSchedule.G_copy * wordsForBytes(size)
memCost + copyCost
}
}
case object GASPRICE extends ConstOp(0x3a)(_.env.gasPrice)
case object EXTCODESIZE extends OpCode(0x3b, 1, 1, _.G_extcode) with ConstGas {
protected def exec[W <: WorldStateProxy[W, S], S <: Storage[S]](state: ProgramState[W, S]): ProgramState[W, S] = {
val (addr, stack1) = state.stack.pop
val codeSize = state.world.getCode(Address(addr)).size
val stack2 = stack1.push(UInt256(codeSize))
state.withStack(stack2).step()
}
}
case object EXTCODECOPY extends OpCode(0x3c, 4, 0, _.G_extcode) {
protected def exec[W <: WorldStateProxy[W, S], S <: Storage[S]](state: ProgramState[W, S]): ProgramState[W, S] = {
val (Seq(address, memOffset, codeOffset, size), stack1) = state.stack.pop(4)
val codeCopy = OpCode.sliceBytes(state.world.getCode(Address(address)), codeOffset, size)
val mem1 = state.memory.store(memOffset, codeCopy)
state.withStack(stack1).withMemory(mem1).step()
}
protected def varGas[W <: WorldStateProxy[W, S], S <: Storage[S]](state: ProgramState[W, S]): BigInt = {
val (Seq(_, memOffset, _, size), _) = state.stack.pop(4)
val memCost = state.config.calcMemCost(state.memory.size, memOffset, size)
val copyCost = state.config.feeSchedule.G_copy * wordsForBytes(size)
memCost + copyCost
}
}
case object RETURNDATASIZE extends ConstOp(0x3d)(_.returnData.size)
case object RETURNDATACOPY extends OpCode(0x3e, 3, 0, _.G_verylow) {
protected def exec[W <: WorldStateProxy[W, S], S <: Storage[S]](state: ProgramState[W, S]): ProgramState[W, S] = {
val (Seq(memOffset, dataOffset, size), stack1) = state.stack.pop(3)
if (dataOffset.fillingAdd(size) > state.returnData.size) {
state.withStack(stack1).withError(ReturnDataOverflow)
} else {
val data = OpCode.sliceBytes(state.returnData, dataOffset, size)
val mem1 = state.memory.store(memOffset, data)
state.withStack(stack1).withMemory(mem1).step()
}
}
protected def varGas[W <: WorldStateProxy[W, S], S <: Storage[S]](state: ProgramState[W, S]): BigInt = {
val (Seq(offset, _, size), _) = state.stack.pop(3)
val memCost = state.config.calcMemCost(state.memory.size, offset, size)
val copyCost = state.config.feeSchedule.G_copy * wordsForBytes(size)
memCost + copyCost
}
}
case object BLOCKHASH extends OpCode(0x40, 1, 1, _.G_blockhash) with ConstGas {
protected def exec[W <: WorldStateProxy[W, S], S <: Storage[S]](state: ProgramState[W, S]): ProgramState[W, S] = {
val (blockNumber, stack1) = state.stack.pop
val outOfLimits = state.env.blockHeader.number - blockNumber > 256 || blockNumber >= state.env.blockHeader.number
val hash = if (outOfLimits) UInt256.Zero else state.world.getBlockHash(blockNumber).getOrElse(UInt256.Zero)
val stack2 = stack1.push(hash)
state.withStack(stack2).step()
}
}
case object COINBASE extends ConstOp(0x41)(s => UInt256(s.env.blockHeader.beneficiary))
case object TIMESTAMP extends ConstOp(0x42)(s => UInt256(s.env.blockHeader.unixTimestamp))
case object NUMBER extends ConstOp(0x43)(s => UInt256(s.env.blockHeader.number))
case object DIFFICULTY extends ConstOp(0x44)(s => UInt256(s.env.blockHeader.difficulty))
case object GASLIMIT extends ConstOp(0x45)(s => UInt256(s.env.blockHeader.gasLimit))
case object POP extends OpCode(0x50, 1, 0, _.G_base) with ConstGas {
protected def exec[W <: WorldStateProxy[W, S], S <: Storage[S]](state: ProgramState[W, S]): ProgramState[W, S] = {
val (_, stack1) = state.stack.pop
state.withStack(stack1).step()
}
}
case object MLOAD extends OpCode(0x51, 1, 1, _.G_verylow) {
protected def exec[W <: WorldStateProxy[W, S], S <: Storage[S]](state: ProgramState[W, S]): ProgramState[W, S] = {
val (offset, stack1) = state.stack.pop
val (word, mem1) = state.memory.load(offset)
val stack2 = stack1.push(word)
state.withStack(stack2).withMemory(mem1).step()
}
protected def varGas[W <: WorldStateProxy[W, S], S <: Storage[S]](state: ProgramState[W, S]): BigInt = {
val (offset, _) = state.stack.pop
state.config.calcMemCost(state.memory.size, offset, UInt256.Size)
}
}
case object MSTORE extends OpCode(0x52, 2, 0, _.G_verylow) {
protected def exec[W <: WorldStateProxy[W, S], S <: Storage[S]](state: ProgramState[W, S]): ProgramState[W, S] = {
val (Seq(offset, value), stack1) = state.stack.pop(2)
val updatedMem = state.memory.store(offset, value)
state.withStack(stack1).withMemory(updatedMem).step()
}
protected def varGas[W <: WorldStateProxy[W, S], S <: Storage[S]](state: ProgramState[W, S]): BigInt = {
val (offset, _) = state.stack.pop
state.config.calcMemCost(state.memory.size, offset, UInt256.Size)
}
}
case object SLOAD extends OpCode(0x54, 1, 1, _.G_sload) with ConstGas {
protected def exec[W <: WorldStateProxy[W, S], S <: Storage[S]](state: ProgramState[W, S]): ProgramState[W, S] = {
val (offset, stack1) = state.stack.pop
val value = state.storage.load(offset)
val stack2 = stack1.push(UInt256(value))
state.withStack(stack2).step()
}
}
case object MSTORE8 extends OpCode(0x53, 2, 0, _.G_verylow) {
protected def exec[W <: WorldStateProxy[W, S], S <: Storage[S]](state: ProgramState[W, S]): ProgramState[W, S] = {
val (Seq(offset, value), stack1) = state.stack.pop(2)
val valueToByte = (value mod 256).toByte
val updatedMem = state.memory.store(offset, valueToByte)
state.withStack(stack1).withMemory(updatedMem).step()
}
protected def varGas[W <: WorldStateProxy[W, S], S <: Storage[S]](state: ProgramState[W, S]): BigInt = {
val (offset, _) = state.stack.pop
state.config.calcMemCost(state.memory.size, offset, 1)
}
}
case object SSTORE extends OpCode(0x55, 2, 0, _.G_zero) {
protected def exec[W <: WorldStateProxy[W, S], S <: Storage[S]](state: ProgramState[W, S]): ProgramState[W, S] = {
val currentBlockNumber = state.env.blockHeader.number
val etcFork = state.config.blockchainConfig.etcForkForBlockNumber(currentBlockNumber)
val ethFork = state.config.blockchainConfig.ethForkForBlockNumber(currentBlockNumber)
val eip2200Enabled = isEip2200Enabled(etcFork, ethFork)
val eip1283Enabled = isEip1283Enabled(ethFork)
val (Seq(offset, newValue), stack1) = state.stack.pop(2)
val currentValue = state.storage.load(offset)
val refund: BigInt = if (eip2200Enabled || eip1283Enabled) {
val originalValue = state.originalWorld.getStorage(state.ownAddress).load(offset)
if (currentValue != newValue.toBigInt) {
if (originalValue == currentValue) { // fresh slot
if (originalValue != 0 && newValue.isZero)
state.config.feeSchedule.R_sclear
else 0
} else { // dirty slot
val clear = if (originalValue != 0) {
if (currentValue == 0)
-state.config.feeSchedule.R_sclear
else if (newValue.isZero)
state.config.feeSchedule.R_sclear
else
BigInt(0)
} else {
BigInt(0)
}
val reset = if (originalValue == newValue.toBigInt) {
if (UInt256(originalValue).isZero)
state.config.feeSchedule.R_sclear + state.config.feeSchedule.G_sreset - state.config.feeSchedule.G_sload
else
state.config.feeSchedule.G_sreset - state.config.feeSchedule.G_sload
} else BigInt(0)
clear + reset
}
} else BigInt(0)
} else {
if (newValue.isZero && !UInt256(currentValue).isZero)
state.config.feeSchedule.R_sclear
else
0
}
val updatedStorage = state.storage.store(offset, newValue)
state.withStack(stack1).withStorage(updatedStorage).refundGas(refund).step()
}
protected def varGas[W <: WorldStateProxy[W, S], S <: Storage[S]](state: ProgramState[W, S]): BigInt = {
val (Seq(offset, newValue), _) = state.stack.pop(2)
val currentValue = state.storage.load(offset)
val currentBlockNumber = state.env.blockHeader.number
val etcFork = state.config.blockchainConfig.etcForkForBlockNumber(currentBlockNumber)
val ethFork = state.config.blockchainConfig.ethForkForBlockNumber(currentBlockNumber)
val eip2200Enabled = isEip2200Enabled(etcFork, ethFork)
val eip1283Enabled = isEip1283Enabled(ethFork)
if (eip2200Enabled && state.gas <= state.config.feeSchedule.G_callstipend) {
state.config.feeSchedule.G_callstipend + 1 // Out of gas error
} else if (eip2200Enabled || eip1283Enabled) {
if (currentValue == newValue.toBigInt) { // no-op
state.config.feeSchedule.G_sload
} else {
val originalValue = state.originalWorld.getStorage(state.ownAddress).load(offset)
if (originalValue == currentValue) { //fresh slot
if (originalValue == 0)
state.config.feeSchedule.G_sset
else
state.config.feeSchedule.G_sreset
} else {
//dirty slot
state.config.feeSchedule.G_sload
}
}
} else {
if (UInt256(currentValue).isZero && !newValue.isZero)
state.config.feeSchedule.G_sset
else
state.config.feeSchedule.G_sreset
}
}
override protected def availableInContext[W <: WorldStateProxy[W, S], S <: Storage[S]]
: ProgramState[W, S] => Boolean = !_.staticCtx
// https://eips.ethereum.org/EIPS/eip-1283
private def isEip1283Enabled(ethFork: EthFork): Boolean = ethFork == EthForks.Constantinople
// https://eips.ethereum.org/EIPS/eip-2200
private def isEip2200Enabled(etcFork: EtcFork, ethFork: EthFork): Boolean =
(ethFork >= EthForks.Istanbul || etcFork >= EtcForks.Phoenix)
}
case object JUMP extends OpCode(0x56, 1, 0, _.G_mid) with ConstGas {
protected def exec[W <: WorldStateProxy[W, S], S <: Storage[S]](state: ProgramState[W, S]): ProgramState[W, S] = {
val (pos, stack1) = state.stack.pop
val dest = pos.toInt // fail with InvalidJump if conversion to Int is lossy
if (pos == dest && state.program.validJumpDestinations.contains(dest))
state.withStack(stack1).goto(dest)
else
state.withError(InvalidJump(pos))
}
}
case object JUMPI extends OpCode(0x57, 2, 0, _.G_high) with ConstGas {
protected def exec[W <: WorldStateProxy[W, S], S <: Storage[S]](state: ProgramState[W, S]): ProgramState[W, S] = {
val (Seq(pos, cond), stack1) = state.stack.pop(2)
val dest = pos.toInt // fail with InvalidJump if conversion to Int is lossy
if (cond.isZero)
state.withStack(stack1).step()
else if (pos == dest && state.program.validJumpDestinations.contains(dest))
state.withStack(stack1).goto(dest)
else
state.withError(InvalidJump(pos))
}
}
case object PC extends ConstOp(0x58)(_.pc)
case object MSIZE extends ConstOp(0x59)(s => (UInt256.Size * wordsForBytes(s.memory.size)).toUInt256)
case object GAS extends ConstOp(0x5a)(state => (state.gas - state.config.feeSchedule.G_base).toUInt256)
case object JUMPDEST extends OpCode(0x5b, 0, 0, _.G_jumpdest) with ConstGas {
protected def exec[W <: WorldStateProxy[W, S], S <: Storage[S]](state: ProgramState[W, S]): ProgramState[W, S] = {
state.step()
}
}
sealed abstract class PushOp(code: Int) extends OpCode(code, 0, 1, _.G_verylow) with ConstGas {
val i: Int = code - 0x60
protected def exec[W <: WorldStateProxy[W, S], S <: Storage[S]](state: ProgramState[W, S]): ProgramState[W, S] = {
val n = i + 1
val bytes = state.program.getBytes(state.pc + 1, n)
val word = UInt256(bytes)
val stack1 = state.stack.push(word)
state.withStack(stack1).step(n + 1)
}
}
case object PUSH1 extends PushOp(0x60)
case object PUSH2 extends PushOp(0x61)
case object PUSH3 extends PushOp(0x62)
case object PUSH4 extends PushOp(0x63)
case object PUSH5 extends PushOp(0x64)
case object PUSH6 extends PushOp(0x65)
case object PUSH7 extends PushOp(0x66)
case object PUSH8 extends PushOp(0x67)
case object PUSH9 extends PushOp(0x68)
case object PUSH10 extends PushOp(0x69)
case object PUSH11 extends PushOp(0x6a)
case object PUSH12 extends PushOp(0x6b)
case object PUSH13 extends PushOp(0x6c)
case object PUSH14 extends PushOp(0x6d)
case object PUSH15 extends PushOp(0x6e)
case object PUSH16 extends PushOp(0x6f)
case object PUSH17 extends PushOp(0x70)
case object PUSH18 extends PushOp(0x71)
case object PUSH19 extends PushOp(0x72)
case object PUSH20 extends PushOp(0x73)
case object PUSH21 extends PushOp(0x74)
case object PUSH22 extends PushOp(0x75)
case object PUSH23 extends PushOp(0x76)
case object PUSH24 extends PushOp(0x77)
case object PUSH25 extends PushOp(0x78)
case object PUSH26 extends PushOp(0x79)
case object PUSH27 extends PushOp(0x7a)
case object PUSH28 extends PushOp(0x7b)
case object PUSH29 extends PushOp(0x7c)
case object PUSH30 extends PushOp(0x7d)
case object PUSH31 extends PushOp(0x7e)
case object PUSH32 extends PushOp(0x7f)
sealed abstract class DupOp private (code: Int, val i: Int)
extends OpCode(code, i + 1, i + 2, _.G_verylow)
with ConstGas {
def this(code: Int) = this(code, code - 0x80)
protected def exec[W <: WorldStateProxy[W, S], S <: Storage[S]](state: ProgramState[W, S]): ProgramState[W, S] = {
val stack1 = state.stack.dup(i)
state.withStack(stack1).step()
}
}
case object DUP1 extends DupOp(0x80)
case object DUP2 extends DupOp(0x81)
case object DUP3 extends DupOp(0x82)
case object DUP4 extends DupOp(0x83)
case object DUP5 extends DupOp(0x84)
case object DUP6 extends DupOp(0x85)
case object DUP7 extends DupOp(0x86)
case object DUP8 extends DupOp(0x87)
case object DUP9 extends DupOp(0x88)
case object DUP10 extends DupOp(0x89)
case object DUP11 extends DupOp(0x8a)
case object DUP12 extends DupOp(0x8b)
case object DUP13 extends DupOp(0x8c)
case object DUP14 extends DupOp(0x8d)
case object DUP15 extends DupOp(0x8e)
case object DUP16 extends DupOp(0x8f)
sealed abstract class SwapOp(code: Int, val i: Int) extends OpCode(code, i + 2, i + 2, _.G_verylow) with ConstGas {
def this(code: Int) = this(code, code - 0x90)
protected def exec[W <: WorldStateProxy[W, S], S <: Storage[S]](state: ProgramState[W, S]): ProgramState[W, S] = {
val stack1 = state.stack.swap(i + 1)
state.withStack(stack1).step()
}
}
case object SWAP1 extends SwapOp(0x90)
case object SWAP2 extends SwapOp(0x91)
case object SWAP3 extends SwapOp(0x92)
case object SWAP4 extends SwapOp(0x93)
case object SWAP5 extends SwapOp(0x94)
case object SWAP6 extends SwapOp(0x95)
case object SWAP7 extends SwapOp(0x96)
case object SWAP8 extends SwapOp(0x97)
case object SWAP9 extends SwapOp(0x98)
case object SWAP10 extends SwapOp(0x99)
case object SWAP11 extends SwapOp(0x9a)
case object SWAP12 extends SwapOp(0x9b)
case object SWAP13 extends SwapOp(0x9c)
case object SWAP14 extends SwapOp(0x9d)
case object SWAP15 extends SwapOp(0x9e)
case object SWAP16 extends SwapOp(0x9f)
sealed abstract class LogOp(code: Int, val i: Int) extends OpCode(code, i + 2, 0, _.G_log) {
def this(code: Int) = this(code, code - 0xa0)
protected def exec[W <: WorldStateProxy[W, S], S <: Storage[S]](state: ProgramState[W, S]): ProgramState[W, S] = {
val (Seq(offset, size, topics @ _*), stack1) = state.stack.pop(delta)
val (data, memory) = state.memory.load(offset, size)
val logEntry = TxLogEntry(state.env.ownerAddr, topics.map(_.bytes), data)
state.withStack(stack1).withMemory(memory).withLog(logEntry).step()
}
protected def varGas[W <: WorldStateProxy[W, S], S <: Storage[S]](state: ProgramState[W, S]): BigInt = {
val (Seq(offset, size, _*), _) = state.stack.pop(delta)
val memCost = state.config.calcMemCost(state.memory.size, offset, size)
val logCost = state.config.feeSchedule.G_logdata * size + i * state.config.feeSchedule.G_logtopic
memCost + logCost
}
override protected def availableInContext[W <: WorldStateProxy[W, S], S <: Storage[S]]
: ProgramState[W, S] => Boolean = !_.staticCtx
}
case object LOG0 extends LogOp(0xa0)
case object LOG1 extends LogOp(0xa1)
case object LOG2 extends LogOp(0xa2)
case object LOG3 extends LogOp(0xa3)
case object LOG4 extends LogOp(0xa4)
abstract class CreateOp(code: Int, delta: Int) extends OpCode(code, delta, 1, _.G_create) {
protected def exec[W <: WorldStateProxy[W, S], S <: Storage[S]](state: ProgramState[W, S]): ProgramState[W, S] = {
val (Seq(endowment, inOffset, inSize), stack1) = state.stack.pop(3)
//FIXME: to avoid calculating this twice, we could adjust state.gas prior to execution in OpCode#execute
//not sure how this would affect other opcodes [EC-243]
val availableGas = state.gas - (constGasFn(state.config.feeSchedule) + varGas(state))
val startGas = state.config.gasCap(availableGas)
val (initCode, memory1) = state.memory.load(inOffset, inSize)
val world1 = state.world.increaseNonce(state.ownAddress)
val context: ProgramContext[W, S] = ProgramContext(
callerAddr = state.env.ownerAddr,
originAddr = state.env.originAddr,
recipientAddr = None,
gasPrice = state.env.gasPrice,
startGas = startGas,
inputData = initCode,
value = endowment,
endowment = endowment,
doTransfer = true,
blockHeader = state.env.blockHeader,
callDepth = state.env.callDepth + 1,
world = world1,
initialAddressesToDelete = state.addressesToDelete,
evmConfig = state.config,
originalWorld = state.originalWorld
)
val ((result, newAddress), stack2) = this match {
case CREATE => (state.vm.create(context), stack1)
case CREATE2 =>
val (Seq(salt), stack2) = stack1.pop(1)
(state.vm.create(context, Some(salt)), stack2)
}
result.error match {
case Some(err) =>
val world2 = if (err == InvalidCall) state.world else world1
val resultStack = stack2.push(UInt256.Zero)
val returnData = if (err == RevertOccurs) result.returnData else ByteString.empty
state
.spendGas(startGas - result.gasRemaining)
.withWorld(world2)
.withStack(resultStack)
.withReturnData(returnData)
.step()
case None =>
val resultStack = stack2.push(newAddress.toUInt256)
val internalTx =
InternalTransaction(CREATE, context.callerAddr, None, context.startGas, context.inputData, context.endowment)
state
.spendGas(startGas - result.gasRemaining)
.withWorld(result.world)
.refundGas(result.gasRefund)
.withStack(resultStack)
.withAddressesToDelete(result.addressesToDelete)
.withLogs(result.logs)
.withMemory(memory1)
.withInternalTxs(internalTx +: result.internalTxs)
.withReturnData(ByteString.empty)
.step()
}
}
override protected def availableInContext[W <: WorldStateProxy[W, S], S <: Storage[S]]
: ProgramState[W, S] => Boolean = !_.staticCtx
}
case object CREATE extends CreateOp(0xf0, 3) {
protected def varGas[W <: WorldStateProxy[W, S], S <: Storage[S]](state: ProgramState[W, S]): BigInt = {
val (Seq(_, inOffset, inSize), _) = state.stack.pop(3)
state.config.calcMemCost(state.memory.size, inOffset, inSize)
}
}
case object CREATE2 extends CreateOp(0xf5, 4) {
protected def varGas[W <: WorldStateProxy[W, S], S <: Storage[S]](state: ProgramState[W, S]): BigInt = {
val (Seq(_, inOffset, inSize), _) = state.stack.pop(3)
val memCost = state.config.calcMemCost(state.memory.size, inOffset, inSize)
val hashCost = state.config.feeSchedule.G_sha3word * wordsForBytes(inSize)
memCost + hashCost
}
}
abstract class CallOp(code: Int, delta: Int, alpha: Int) extends OpCode(code, delta, alpha, _.G_zero) {
protected def exec[W <: WorldStateProxy[W, S], S <: Storage[S]](state: ProgramState[W, S]): ProgramState[W, S] = {
val (params @ Seq(_, to, callValue, inOffset, inSize, outOffset, outSize), stack1) = getParams(state)
val toAddr = Address(to)
val (inputData, mem1) = state.memory.load(inOffset, inSize)
val (owner, caller, value, endowment, doTransfer, static) = this match {
case CALL =>
(toAddr, state.ownAddress, callValue, callValue, true, state.staticCtx)
case STATICCALL =>
/**
* We return `doTransfer = true` for STATICCALL as it should `functions equivalently to a CALL` (spec)
* Note that we won't transfer any founds during later transfer, as `value` and `endowment` are equal to Zero.
* One thing that will change though is that both - recipient and sender addresses will be added to touched accounts
* Set. And if empty they will be deleted at the end of transaction.
* Link to clarification about this behaviour in yp: https://github.com/ethereum/EIPs/pull/214#issuecomment-288697580
*/
(toAddr, state.ownAddress, UInt256.Zero, UInt256.Zero, true, true)
case CALLCODE =>
(state.ownAddress, state.ownAddress, callValue, callValue, false, state.staticCtx)
case DELEGATECALL =>
(state.ownAddress, state.env.callerAddr, callValue, UInt256.Zero, false, state.staticCtx)
}
val startGas = calcStartGas(state, params, endowment)
val context: ProgramContext[W, S] = ProgramContext(
callerAddr = caller,
originAddr = state.env.originAddr,
recipientAddr = Some(toAddr),
gasPrice = state.env.gasPrice,
startGas = startGas,
inputData = inputData,
value = value,
endowment = endowment,
doTransfer = doTransfer,
blockHeader = state.env.blockHeader,
callDepth = state.env.callDepth + 1,
world = state.world,
initialAddressesToDelete = state.addressesToDelete,
evmConfig = state.config,
staticCtx = static,
originalWorld = state.originalWorld
)
val result = state.vm.call(context, owner)
lazy val sizeCap = outSize.min(result.returnData.size).toInt
lazy val output = result.returnData.take(sizeCap)
lazy val mem2 = mem1.store(outOffset, output).expand(outOffset, outSize)
result.error match {
case Some(error) =>
val stack2 = stack1.push(UInt256.Zero)
val world1 = state.world.keepPrecompileTouched(result.world)
val gasAdjustment =
if (error == InvalidCall) -startGas else if (error == RevertOccurs) -result.gasRemaining else BigInt(0)
val memoryAdjustment = if (error == RevertOccurs) mem2 else mem1.expand(outOffset, outSize)
state
.withStack(stack2)
.withMemory(memoryAdjustment)
.withWorld(world1)
.spendGas(gasAdjustment)
.withReturnData(result.returnData)
.step()
case None =>
val stack2 = stack1.push(UInt256.One)
val internalTx = internalTransaction(state.env, to, startGas, inputData, endowment)
state
.spendGas(-result.gasRemaining)
.refundGas(result.gasRefund)
.withStack(stack2)
.withMemory(mem2)
.withWorld(result.world)
.withAddressesToDelete(result.addressesToDelete)
.withInternalTxs(internalTx +: result.internalTxs)
.withLogs(result.logs)
.withReturnData(result.returnData)
.step()
}
}
protected def internalTransaction(
env: ExecEnv,
callee: UInt256,
startGas: BigInt,
inputData: ByteString,
endowment: UInt256
): InternalTransaction = {
val from = env.ownerAddr
val to = if (this == CALL) Address(callee) else env.ownerAddr
InternalTransaction(this, from, Some(to), startGas, inputData, endowment)
}
protected def varGas[W <: WorldStateProxy[W, S], S <: Storage[S]](state: ProgramState[W, S]): BigInt = {
val (Seq(gas, to, callValue, inOffset, inSize, outOffset, outSize), _) = getParams(state)
val endowment = if (this == DELEGATECALL || this == STATICCALL) UInt256.Zero else callValue
val memCost = calcMemCost(state, inOffset, inSize, outOffset, outSize)
// FIXME: these are calculated twice (for gas and exec), especially account existence. Can we do better? [EC-243]
val gExtra: BigInt = gasExtra(state, endowment, Address(to))
val gCap: BigInt = gasCap(state, gas, gExtra + memCost)
memCost + gCap + gExtra
}
protected def calcMemCost[W <: WorldStateProxy[W, S], S <: Storage[S]](
state: ProgramState[W, S],
inOffset: UInt256,
inSize: UInt256,
outOffset: UInt256,
outSize: UInt256
): BigInt = {
val memCostIn = state.config.calcMemCost(state.memory.size, inOffset, inSize)
val memCostOut = state.config.calcMemCost(state.memory.size, outOffset, outSize)
memCostIn max memCostOut
}
protected def getParams[W <: WorldStateProxy[W, S], S <: Storage[S]](
state: ProgramState[W, S]
): (Seq[UInt256], Stack) = {
val (Seq(gas, to), stack1) = state.stack.pop(2)
val (value, stack2) = if (this == DELEGATECALL || this == STATICCALL) (state.env.value, stack1) else stack1.pop
val (Seq(inOffset, inSize, outOffset, outSize), stack3) = stack2.pop(4)
Seq(gas, to, value, inOffset, inSize, outOffset, outSize) -> stack3
}
protected def calcStartGas[W <: WorldStateProxy[W, S], S <: Storage[S]](
state: ProgramState[W, S],
params: Seq[UInt256],
endowment: UInt256
): BigInt = {
val Seq(gas, to, _, inOffset, inSize, outOffset, outSize) = params
val memCost = calcMemCost(state, inOffset, inSize, outOffset, outSize)
val gExtra = gasExtra(state, endowment, Address(to))
val gCap = gasCap(state, gas, gExtra + memCost)
if (endowment.isZero) gCap else gCap + state.config.feeSchedule.G_callstipend
}
private def gasCap[W <: WorldStateProxy[W, S], S <: Storage[S]](
state: ProgramState[W, S],
g: BigInt,
consumedGas: BigInt
): BigInt = {
if (state.config.subGasCapDivisor.isDefined && state.gas >= consumedGas)
g min state.config.gasCap(state.gas - consumedGas)
else
g
}
private def gasExtra[W <: WorldStateProxy[W, S], S <: Storage[S]](
state: ProgramState[W, S],
endowment: UInt256,
to: Address
): BigInt = {
val isValueTransfer = endowment > 0
def postEip161CostCondition: Boolean =
state.world.isAccountDead(to) && this == CALL && isValueTransfer
def preEip161CostCondition: Boolean =
!state.world.accountExists(to) && this == CALL
val c_new: BigInt =
if (
state.config.noEmptyAccounts && postEip161CostCondition || !state.config.noEmptyAccounts && preEip161CostCondition
)
state.config.feeSchedule.G_newaccount
else 0
val c_xfer: BigInt = if (endowment.isZero) 0 else state.config.feeSchedule.G_callvalue
state.config.feeSchedule.G_call + c_xfer + c_new
}
}
case object CALL extends CallOp(0xf1, 7, 1) {
override protected def availableInContext[W <: WorldStateProxy[W, S], S <: Storage[S]]
: ProgramState[W, S] => Boolean = state =>
!state.staticCtx || {
val (Seq(_, _, callValue), _) = state.stack.pop(3)
callValue.isZero
}
}
case object STATICCALL extends CallOp(0xfa, 6, 1)
case object CALLCODE extends CallOp(0xf2, 7, 1)
case object DELEGATECALL extends CallOp(0xf4, 6, 1)
case object RETURN extends OpCode(0xf3, 2, 0, _.G_zero) {
protected def exec[W <: WorldStateProxy[W, S], S <: Storage[S]](state: ProgramState[W, S]): ProgramState[W, S] = {
val (Seq(offset, size), stack1) = state.stack.pop(2)
val (ret, mem1) = state.memory.load(offset, size)
state.withStack(stack1).withReturnData(ret).withMemory(mem1).halt
}
protected def varGas[W <: WorldStateProxy[W, S], S <: Storage[S]](state: ProgramState[W, S]): BigInt = {
val (Seq(offset, size), _) = state.stack.pop(2)
state.config.calcMemCost(state.memory.size, offset, size)
}
}
case object REVERT extends OpCode(0xfd, 2, 0, _.G_zero) {
protected def exec[W <: WorldStateProxy[W, S], S <: Storage[S]](state: ProgramState[W, S]): ProgramState[W, S] = {
val (Seq(memory_offset, memory_length), stack1) = state.stack.pop(2)
val (ret, mem1) = state.memory.load(memory_offset, memory_length)
state.withStack(stack1).withMemory(mem1).revert(ret)
}
protected def varGas[W <: WorldStateProxy[W, S], S <: Storage[S]](state: ProgramState[W, S]): BigInt = {
val (Seq(memory_offset, memory_length), _) = state.stack.pop(2)
state.config.calcMemCost(state.memory.size, memory_offset, memory_length)
}
}
case object INVALID extends OpCode(0xfe, 0, 0, _.G_zero) with ConstGas {
protected def exec[W <: WorldStateProxy[W, S], S <: Storage[S]](state: ProgramState[W, S]): ProgramState[W, S] =
state.withError(InvalidOpCode(code))
}
case object SELFDESTRUCT extends OpCode(0xff, 1, 0, _.G_selfdestruct) {
protected def exec[W <: WorldStateProxy[W, S], S <: Storage[S]](state: ProgramState[W, S]): ProgramState[W, S] = {
val (refund, stack1) = state.stack.pop
val refundAddr: Address = Address(refund)
val gasRefund: BigInt =
if (state.addressesToDelete contains state.ownAddress) 0 else state.config.feeSchedule.R_selfdestruct
val world =
if (state.ownAddress == refundAddr)
state.world.removeAllEther(state.ownAddress)
else
state.world.transfer(state.ownAddress, refundAddr, state.ownBalance)
state
.withWorld(world)
.refundGas(gasRefund)
.withAddressToDelete(state.ownAddress)
.withStack(stack1)
.withReturnData(ByteString.empty)
.halt
}
protected def varGas[W <: WorldStateProxy[W, S], S <: Storage[S]](state: ProgramState[W, S]): BigInt = {
val isValueTransfer = state.ownBalance > 0
val (refundAddr, _) = state.stack.pop
val refundAddress = Address(refundAddr)
def postEip161CostCondition: Boolean =
state.config.chargeSelfDestructForNewAccount &&
isValueTransfer &&
state.world.isAccountDead(refundAddress)
def preEip161CostCondition: Boolean =
state.config.chargeSelfDestructForNewAccount && !state.world.accountExists(refundAddress)
if (
state.config.noEmptyAccounts && postEip161CostCondition || !state.config.noEmptyAccounts && preEip161CostCondition
)
state.config.feeSchedule.G_newaccount
else 0
}
override protected def availableInContext[W <: WorldStateProxy[W, S], S <: Storage[S]]
: ProgramState[W, S] => Boolean = !_.staticCtx
}
case object CHAINID extends ConstOp(0x46)(state => UInt256(state.env.evmConfig.blockchainConfig.chainId))
case object SELFBALANCE extends OpCode(0x47, 0, 1, _.G_low) with ConstGas {
protected def exec[W <: WorldStateProxy[W, S], S <: Storage[S]](state: ProgramState[W, S]): ProgramState[W, S] = {
val stack2 = state.stack.push(state.ownBalance)
state.withStack(stack2).step()
}
}
|
input-output-hk/etc-client
|
src/main/scala/io/iohk/ethereum/vm/OpCode.scala
|
Scala
|
mit
| 45,186
|
package demo
/*
* Copyright (C) 24/08/16 // mathieu.leclaire@openmole.org
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
import scaladget.bootstrapnative.bsn._
import com.raquo.laminar.api.L._
object PopoverDemo extends Demo {
val sc = sourcecode.Text {
import scaladget.bootstrapnative.Popup._
val buttonStyle: HESetters = Seq(
btn_secondary,
marginRight := "5"
)
val open = Var(false)
//SIMPLE POPOVERS
lazy val simplePopovers = div(
h2("Simple popovers"),
div(paddingTop := "20", "Simple popovers containing text, or simple content with no events to be fired and with basic trigger modes (click, hover)."),
// button("Left", buttonStyle).popover(vForm(width := 100)(label("Nice content", label_danger).render, span("A important message").render), Left, title = Some("Check this !")).render,
button("Title", buttonStyle).popover(div(button(btn_primary, "Hey"), div("Popover on hover with Title")), Top, ClickPopup, title = Some("Pop title")).render,
button("Title 2", buttonStyle).popover(div("Popover on hover with Title", color := "red"), Top, ClickPopup, title = Some("Pop title")).render,
button("Dismissable", buttonStyle).popover(div("An other popover"), Top, HoverPopup, Some("Pop title"), true).render,
inputTag("").amend(width := "320", marginTop := "10", placeholder := "Bottom (click)").popover(div("Tooltip on click on bottom"), Bottom, ClickPopup).render,
div(cls := "flex-row", justifyContent.right,
div(
cls <-- open.signal.map { o =>
if (o) "button-open" else "button-close"
},
inputTag("blablaba").amend(onSubmit --> { _ => open.update(!_) })
),
button("Open", buttonStyle, onClick --> { _ => open.update(!_) })
)
)
simplePopovers
}
val elementDemo = new ElementDemo {
def title: String = "Popover"
def code: String = sc.source
def element: HtmlElement = sc.value
override def codeWidth: Int = 9
}
}
|
openmole/scaladget
|
bootstrapDemo/src/main/scala/demo/PopoverDemo.scala
|
Scala
|
agpl-3.0
| 2,641
|
// From: https://github.com/akka/akka/blob/master/project/Unidoc.scala
import sbt._
import sbt.Keys._
import sbt.Project.Initialize
object Unidoc {
val unidocDirectory = SettingKey[File]("unidoc-directory")
val unidocExclude = SettingKey[Seq[String]]("unidoc-exclude")
val unidocAllSources = TaskKey[Seq[Seq[File]]]("unidoc-all-sources")
val unidocSources = TaskKey[Seq[File]]("unidoc-sources")
val unidocAllClasspaths = TaskKey[Seq[Classpath]]("unidoc-all-classpaths")
val unidocClasspath = TaskKey[Seq[File]]("unidoc-classpath")
val unidoc = TaskKey[File]("unidoc", "Create unified scaladoc for all aggregates")
lazy val settings = Seq(
unidocDirectory <<= crossTarget / "unidoc",
unidocExclude := Seq.empty,
unidocAllSources <<= (thisProjectRef, buildStructure, unidocExclude) flatMap allSources,
unidocSources <<= unidocAllSources map { _.flatten },
unidocAllClasspaths <<= (thisProjectRef, buildStructure, unidocExclude) flatMap allClasspaths,
unidocClasspath <<= unidocAllClasspaths map { _.flatten.map(_.data).distinct },
unidoc <<= unidocTask
)
def allSources(projectRef: ProjectRef, structure: Load.BuildStructure, exclude: Seq[String]): Task[Seq[Seq[File]]] = {
val projects = aggregated(projectRef, structure, exclude)
projects flatMap { sources in Compile in LocalProject(_) get structure.data } join
}
def allClasspaths(projectRef: ProjectRef, structure: Load.BuildStructure, exclude: Seq[String]): Task[Seq[Classpath]] = {
val projects = aggregated(projectRef, structure, exclude)
projects flatMap { dependencyClasspath in Compile in LocalProject(_) get structure.data } join
}
def aggregated(projectRef: ProjectRef, structure: Load.BuildStructure, exclude: Seq[String]): Seq[String] = {
val aggregate = Project.getProject(projectRef, structure).toSeq.flatMap(_.aggregate)
aggregate flatMap { ref =>
if (exclude contains ref.project) Seq.empty
else ref.project +: aggregated(ref, structure, exclude)
}
}
def unidocTask: Initialize[Task[File]] = {
(compilers, cacheDirectory, unidocSources, unidocClasspath, unidocDirectory, scalacOptions in doc, streams) map {
(compilers, cache, sources, classpath, target, options, s) => {
val scaladoc = new Scaladoc(100, compilers.scalac)
scaladoc.cached(cache / "unidoc", "main", sources, classpath, target, options, s.log)
target
}
}
}
}
|
travisbrown/zipkin
|
project/Unidoc.scala
|
Scala
|
apache-2.0
| 2,444
|
package com.reactific.jfxtend.scene
import javafx.scene.shape._
import javafx.scene.shape.Path
/** Unit Tests For package */
package object shape {
implicit class LineExt(val extendee: Line) extends LineExtensions
implicit class RectangleExt(val extendee: Rectangle) extends RectangleExtensions
implicit class SphereExt(val extendee: Sphere) extends SphereExtensions
implicit class PathExt(val extendee: Path) extends PathExtensions
implicit class ShapeExt(val extendee: Shape) extends ShapeExtensions[Shape]
implicit class Shape3DExt(val extendee: Shape3D) extends Shape3DExtensions[Shape3D]
}
|
reactific/jfxtensions
|
src/main/scala/com/reactific/jfxtend/scene/shape/package.scala
|
Scala
|
apache-2.0
| 611
|
package coursier.util
import org.jsoup.Jsoup
import scala.jdk.CollectionConverters._
private[coursier] abstract class WebPageCompatibility {
def listWebPageRawElements(page: String): Iterator[String] =
Jsoup.parse(page)
.select("a")
.asScala
.iterator
.map(_.attr("href"))
}
|
alexarchambault/coursier
|
modules/util/jvm/src/main/scala/coursier/util/WebPageCompatibility.scala
|
Scala
|
apache-2.0
| 310
|
package controllers.buildinfo
trait BuildInfoBase {
val name: String
val version: String
val commit: String
val author: String
val builtAtString: String
}
|
dpalmisano/plato
|
app/controllers/buildinfo/BuildInfoBase.scala
|
Scala
|
mit
| 166
|
/*
* Copyright 2021 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.ct.accounts.frs102.boxes
import uk.gov.hmrc.ct.accounts.frs102.retriever.{AbridgedAccountsBoxRetriever, Frs102AccountsBoxRetriever, FullAccountsBoxRetriever}
import uk.gov.hmrc.ct.box._
import uk.gov.hmrc.ct.box.retriever.BoxRetriever._
case class AC5052A(value: Option[Int]) extends CtBoxIdentifier(name = "Debtors due after more than one year") with CtOptionalInteger
with Input
with ValidatableBox[Frs102AccountsBoxRetriever]
with SelfValidatableBox[Frs102AccountsBoxRetriever, Option[Int]]
with Validators {
private def noteHasValue(boxRetriever: Frs102AccountsBoxRetriever): Boolean = {
boxRetriever match {
case x: AbridgedAccountsBoxRetriever => anyHaveValue(x.ac5052A(), x.ac5052B(), x.ac5052C())
case x: FullAccountsBoxRetriever => anyHaveValue(x.ac134(), x.ac135(), x.ac138(), x.ac139(), x.ac136(), x.ac137(), x.ac140(), x.ac141(), x.ac5052A(), x.ac5052B(), x.ac5052C())
}
}
override def validate(boxRetriever: Frs102AccountsBoxRetriever): Set[CtValidation] = {
import boxRetriever._
val isMandatory = anyHaveValue(ac52(), ac53())
collectErrors (
failIf(!isMandatory)(
validateCannotExist(boxRetriever)
),
failIf(isMandatory)(
validateNotEmpty(boxRetriever)
),
validateMoney(value, min = 0),
validateOptionalIntegerLessOrEqualBox(boxRetriever.ac52())
)
}
private def validateCannotExist(boxRetriever: Frs102AccountsBoxRetriever)(): Set[CtValidation] = {
if (noteHasValue(boxRetriever))
Set(CtValidation(None, "error.balanceSheet.debtors.cannotExist"))
else
Set.empty
}
private def validateNotEmpty(boxRetriever: Frs102AccountsBoxRetriever)(): Set[CtValidation] = {
if (!noteHasValue(boxRetriever))
Set(CtValidation(None, "error.balanceSheet.debtors.mustNotBeEmpty"))
else
Set.empty
}
}
|
hmrc/ct-calculations
|
src/main/scala/uk/gov/hmrc/ct/accounts/frs102/boxes/AC5052A.scala
|
Scala
|
apache-2.0
| 2,806
|
package com.socrata.datacoordinator.secondary.sql
import java.sql.{Connection, SQLException}
import java.util.UUID
import com.rojoma.simplearm.v2.using
class PostgresCollocationManifest(conn: Connection) extends SqlCollocationManifest(conn) {
private val log = org.slf4j.LoggerFactory.getLogger(classOf[PostgresCollocationManifest])
private val uniqueViolation = "23505"
override def addCollocations(jobId: UUID, collocations: Set[(String, String)]): Unit = {
val defaultAutoCommit = conn.getAutoCommit
// I think this is not needed because auto commit is already false... but
// set auto commit to false for doing batch inserts
conn.setAutoCommit(false)
// server is running at least Postgres 9.5 and ON CONFLICT is supported
using(conn.prepareStatement(
"""INSERT INTO collocation_manifest (job_id, dataset_internal_name_left, dataset_internal_name_right)
| VALUES (?, ? , ?)
|ON CONFLICT DO NOTHING""".stripMargin)) { insert =>
collocations.foreach { case (left, right) =>
insert.setObject(1, jobId)
insert.setString(2, left)
insert.setString(3, right)
insert.addBatch()
}
insert.executeBatch()
}
}
}
|
socrata-platform/data-coordinator
|
coordinatorlib/src/main/scala/com/socrata/datacoordinator/secondary/sql/PostgresCollocationManifest.scala
|
Scala
|
apache-2.0
| 1,230
|
package com.rbh.controllers
import play.api._
import play.api.mvc._
import play.api.libs.json._
import java.time._
import scala.concurrent.{Promise, Future}
import org.apache.kafka.clients.producer.RecordMetadata
//import scala.concurrent.ExecutionContext
import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.duration._
import scala.util.{Success, Failure}
import scala.concurrent.Await
import com.rbh.actors.acme.StepEventActor
import com.rbh.model.acme.StepEvent
import com.rbh.service.AcmePublisher._
import akka.actor._
import javax.inject._
import akka.pattern.ask
import akka.util.Timeout
import scala.concurrent.duration._
import java.util.concurrent.TimeoutException
class FilthPig extends Controller {
def index = Action {
Ok("Filthpig")
}
}
object AcmeReceiver {
val acmeTimeout = Play.current.configuration.getInt("acme.publish.timeout").getOrElse(1000)
}
// TODO: Authentication
class AcmeReceiver extends Controller {
def acmeSteps = Action(parse.json) { request =>
val receivedInstant: Instant = Instant.now
// implicit val ec: ExecutionContext = scala.concurrent.ExecutionContext.Implicits.global
val body: JsValue = request.body
// Validate
val transactionId = (body \ "transactionid").asOpt[Long]
val partnerId = (body \ "partnerid").asOpt[Long]
val nbrOfSteps = (body \ "nbrofsteps").asOpt[Int]
val eventTimestamp = (body \ "eventtimestamp").asOpt[Long]
// val attributes = (body \ "attributes").asOpt[Map[String, Any]]
val bodyElements = transactionId :: partnerId :: nbrOfSteps :: eventTimestamp :: Nil
// TODO: Improve validation.
val valid = bodyElements.filterNot { _.isDefined }.size == 0
if (valid) {
// TODO: Log good things...
val stepEvent = StepEvent(transactionId.get,
partnerId.get,
nbrOfSteps.get,
receivedInstant.toEpochMilli,
eventTimestamp.get)
val result: Future[RecordMetadata] = publish(stepEvent)
result onComplete {
case Success(recordMetadata) => {
println("Success much?")
// We presume here that these values could be of value for logging, and otherwise...
val offset: Long = recordMetadata.offset()
val partition: Int = recordMetadata.partition()
// Quick dirty timer thing
val doneDone: Long = Instant.now.toEpochMilli - stepEvent.receivedTimestamp
println(s"Done done --> $doneDone millis <---")
}
case Failure(t) => {
println("Failure much?")
// TODO: Log entire payload to error/retry log
val msg = t.getMessage
}
}
// Return 202
Accepted
} else {
// Log bad things...
BadRequest("Validation failed.")
}
}
}
@Singleton
class AcmeReceiverActor @Inject() (system: ActorSystem) extends Controller {
val stepEventActor = system.actorOf(StepEventActor.props, "se-actor")
implicit val timeout = Timeout(250.milliseconds)
import com.rbh.model.acme.StepEvent
def acmeSteps = Action(parse.json) { request =>
val receivedInstant: Instant = Instant.now
// implicit val ec: ExecutionContext = scala.concurrent.ExecutionContext.Implicits.global
val body: JsValue = request.body
// Validate
val transactionId = (body \ "transactionid").asOpt[Long]
val partnerId = (body \ "partnerid").asOpt[Long]
val nbrOfSteps = (body \ "nbrofsteps").asOpt[Int]
val eventTimestamp = (body \ "eventtimestamp").asOpt[Long]
// val attributes = (body \ "attributes").asOpt[Map[String, Any]]
val bodyElements = transactionId :: partnerId :: nbrOfSteps :: eventTimestamp :: Nil
// TODO: Improve validation.
val valid = bodyElements.filterNot { _.isDefined }.size == 0
if (valid) {
// TODO: Log good things...
val stepEvent = StepEvent(transactionId.get,
partnerId.get,
nbrOfSteps.get,
receivedInstant.toEpochMilli,
eventTimestamp.get)
val response: Future[Any] = stepEventActor ? stepEvent
val result: Future[RecordMetadata] = Await.result(response, timeout.duration).asInstanceOf[Future[RecordMetadata]]
result onComplete {
case Success(recordMetadata) => {
println("Success much?")
// We presume here that these values could be of value for logging, and otherwise...
val offset: Long = recordMetadata.offset()
val partition: Int = recordMetadata.partition()
}
case Failure(t) => {
println("Failure much?")
// TODO: Log entire payload to error/retry log
val msg = t.getMessage
println(msg)
}
}
Accepted
} else {
// Log bad things...
BadRequest("Validation failed.")
}
// val result = Await.result(future, timeout.duration).asInstanceOf[Int]
// val json: JsValue = Json.obj("reverse gazintas" -> result)
// Ok(json)
}
}
|
bradkarels/acme-step-consumer
|
app/com/rbh/controllers/Application.scala
|
Scala
|
mit
| 5,222
|
package io.getquill.context
import scala.concurrent.Await
import scala.concurrent.Future
import scala.concurrent.duration.Duration
import io.getquill._
import io.getquill.Literal
import io.getquill.CassandraStreamContext
package object cassandra {
lazy val mirrorContext = new CassandraMirrorContext with TestEntities
lazy val testSyncDB = new CassandraSyncContext[Literal]("testSyncDB") with TestEntities
lazy val testAsyncDB = new CassandraAsyncContext[Literal]("testAsyncDB") with TestEntities
lazy val testStreamDB = new CassandraStreamContext[Literal]("testStreamDB") with TestEntities
def await[T](f: Future[T]): T = Await.result(f, Duration.Inf)
}
|
jcranky/quill
|
quill-cassandra/src/test/scala/io/getquill/context/cassandra/package.scala
|
Scala
|
apache-2.0
| 673
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.planner.plan.rules.logical
import org.apache.flink.api.common.typeinfo.TypeInformation
import org.apache.flink.api.scala._
import org.apache.flink.table.api.Types
import org.apache.flink.table.api.scala._
import org.apache.flink.table.planner.plan.optimize.program.FlinkBatchProgram
import org.apache.flink.table.planner.utils.TableTestBase
import org.junit.Test
/**
* Test for [[FlinkAggregateExpandDistinctAggregatesRule]].
*/
class FlinkAggregateExpandDistinctAggregatesRuleTest extends TableTestBase {
private val util = batchTestUtil()
util.addTableSource[(Int, Long, Int)]("MyTable", 'a, 'b, 'c)
util.addTableSource[(Int, Long, String, String, String)]("MyTable2", 'a, 'b, 'c, 'd, 'e)
util.buildBatchProgram(FlinkBatchProgram.PHYSICAL)
@Test
def testSingleDistinctAgg(): Unit = {
util.verifyPlan("SELECT COUNT(DISTINCT a) FROM MyTable")
}
@Test
def testSingleDistinctAggOnMultiColumns(): Unit = {
util.verifyPlan("SELECT COUNT(DISTINCT a, b) FROM MyTable")
}
@Test
def testMultiDistinctAggOnSameColumn(): Unit = {
util.verifyPlan("SELECT COUNT(DISTINCT a), SUM(DISTINCT a), MAX(DISTINCT a) FROM MyTable")
}
@Test
def testSingleDistinctAggAndOneOrMultiNonDistinctAgg1(): Unit = {
// case 0x00: DISTINCT on COUNT and Non-DISTINCT on others
util.verifyPlan("SELECT COUNT(DISTINCT a), SUM(b) FROM MyTable")
}
@Test
def testSingleDistinctAggAndOneOrMultiNonDistinctAgg2(): Unit = {
// case 0x01: Non-DISTINCT on COUNT and DISTINCT on others
// when field `a` is non-nullable, count(a) = count(*)
util.verifyPlan("SELECT COUNT(a), SUM(DISTINCT b) FROM MyTable")
}
@Test
def testMultiDistinctAggOnDifferentColumn(): Unit = {
util.verifyPlan("SELECT COUNT(DISTINCT a), SUM(DISTINCT b) FROM MyTable")
}
@Test
def testMultiDistinctAndNonDistinctAggOnDifferentColumn(): Unit = {
util.verifyPlan("SELECT COUNT(DISTINCT a), SUM(DISTINCT b), COUNT(c) FROM MyTable")
}
@Test
def testSingleDistinctAggWithGroupBy(): Unit = {
// when field `a` is non-nullable, count(a) = count(*)
util.verifyPlan("SELECT a, COUNT(a), SUM(DISTINCT b) FROM MyTable GROUP BY a")
}
@Test
def testSingleDistinctAggWithGroupByAndCountStar(): Unit = {
util.verifyPlan("SELECT a, COUNT(*), SUM(DISTINCT b) FROM MyTable GROUP BY a")
}
@Test
def testTwoDistinctAggWithGroupByAndCountStar(): Unit = {
val sqlQuery = "SELECT a, COUNT(*), SUM(DISTINCT b), COUNT(DISTINCT b) FROM MyTable GROUP BY a"
util.verifyPlan(sqlQuery)
}
@Test
def testTwoDifferentDistinctAggWithGroupByAndCountStar(): Unit = {
val sqlQuery = "SELECT a, COUNT(*), SUM(DISTINCT b), COUNT(DISTINCT c) FROM MyTable GROUP BY a"
util.verifyPlan(sqlQuery)
}
@Test
def testMultiDifferentDistinctAggWithNonDistinctAggOnSameColumn(): Unit = {
util.verifyPlan("SELECT COUNT(DISTINCT a), SUM(DISTINCT b), MAX(a), MIN(a) FROM MyTable")
}
@Test
def testMultiDifferentDistinctAggWithNonDistinctAggOnSameColumnAndGroupBy(): Unit = {
val sqlQuery =
"SELECT COUNT(DISTINCT a), SUM(DISTINCT b), MAX(a), MIN(a) FROM MyTable GROUP BY c"
util.verifyPlan(sqlQuery)
}
@Test
def testMultiDifferentDistinctAggWithNonDistinctAggOnDifferentColumnAndGroupBy(): Unit = {
util.verifyPlan("SELECT SUM(DISTINCT a), COUNT(DISTINCT c) FROM MyTable GROUP BY b")
}
@Test
def testDistinctAggWithDuplicateField(): Unit = {
// when field `a` is non-nullable, count(a) = count(*)
util.verifyPlan("SELECT a, COUNT(a), SUM(b), SUM(DISTINCT b) FROM MyTable GROUP BY a")
}
@Test
def testSingleDistinctAggOnMultiColumnsWithGroupingSets(): Unit = {
util.verifyPlan("SELECT COUNT(DISTINCT a, b) FROM MyTable2 GROUP BY GROUPING SETS (c, d)")
}
@Test
def testMultiDistinctAggOnSameColumnWithGroupingSets(): Unit = {
val sqlQuery = "SELECT COUNT(DISTINCT a), SUM(DISTINCT a), MAX(DISTINCT a) " +
"FROM MyTable2 GROUP BY GROUPING SETS (b, c)"
util.verifyPlan(sqlQuery)
}
@Test
def testSingleDistinctAggAndOneOrMultiNonDistinctAggWithGroupingSets1(): Unit = {
// case 0x00: DISTINCT on COUNT and Non-DISTINCT on others
util.verifyPlan("SELECT COUNT(DISTINCT a), SUM(b) FROM MyTable2 GROUP BY GROUPING SETS (b, c)")
}
@Test
def testSingleDistinctAggAndOneOrMultiNonDistinctAggWithGroupingSets2(): Unit = {
// case 0x01: Non-DISTINCT on COUNT and DISTINCT on others
util.verifyPlan("SELECT COUNT(a), SUM(DISTINCT b) FROM MyTable2 GROUP BY GROUPING SETS (c, d)")
}
@Test
def testMultiDistinctAggOnDifferentColumnWithGroupingSets(): Unit = {
val sqlQuery = "SELECT COUNT(DISTINCT a), SUM(DISTINCT b) FROM MyTable2 " +
"GROUP BY GROUPING SETS (c, d)"
util.verifyPlan(sqlQuery)
}
@Test
def testMultiDistinctAndNonDistinctAggOnDifferentColumnWithGroupingSets(): Unit = {
val sqlQuery = "SELECT COUNT(DISTINCT a), SUM(DISTINCT b), COUNT(c) FROM MyTable2 " +
"GROUP BY GROUPING SETS (d, e)"
util.verifyPlan(sqlQuery)
}
@Test(expected = classOf[RuntimeException])
def testTooManyDistinctAggOnDifferentColumn(): Unit = {
// max group count must be less than 64
val fieldNames = (0 until 64).map(i => s"f$i").toArray
val fieldTypes: Array[TypeInformation[_]] = Array.fill(fieldNames.length)(Types.INT)
util.addTableSource("MyTable64", fieldTypes, fieldNames)
val distinctList = fieldNames.map(f => s"COUNT(DISTINCT $f)").mkString(", ")
val maxList = fieldNames.map(f => s"MAX($f)").mkString(", ")
val sqlQuery = s"SELECT $distinctList, $maxList FROM MyTable64"
util.verifyPlan(sqlQuery)
}
}
|
fhueske/flink
|
flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/plan/rules/logical/FlinkAggregateExpandDistinctAggregatesRuleTest.scala
|
Scala
|
apache-2.0
| 6,460
|
package views.html
import play.templates._
import play.templates.TemplateMagic._
import play.api.templates._
import play.api.templates.PlayMagic._
import models._
import controllers._
import java.lang._
import java.util._
import scala.collection.JavaConversions._
import scala.collection.JavaConverters._
import play.api.i18n._
import play.core.j.PlayMagicForJava._
import play.mvc._
import play.data._
import play.api.data.Field
import play.mvc.Http.Context.Implicit._
import views.html._
/**/
object Main extends BaseScalaTemplate[play.api.templates.HtmlFormat.Appendable,Format[play.api.templates.HtmlFormat.Appendable]](play.api.templates.HtmlFormat) with play.api.templates.Template2[String,Html,play.api.templates.HtmlFormat.Appendable] {
/**/
def apply/*1.2*/(page: String)(content: Html):play.api.templates.HtmlFormat.Appendable = {
_display_ {
Seq[Any](format.raw/*1.31*/("""
<!DOCTYPE html>
<html>
<head>
<title>Surferpedia</title>
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<link rel="stylesheet" href="http://netdna.bootstrapcdn.com/bootstrap/3.0.0/css/bootstrap.min.css">
<!-- Load site-specific customizations after bootstrap. -->
<link rel="stylesheet" media="screen" href=""""),_display_(Seq[Any](/*12.54*/routes/*12.60*/.Assets.at("stylesheets/main.css"))),format.raw/*12.94*/("""">
<link rel="shortcut icon" type="image/png" href=""""),_display_(Seq[Any](/*13.59*/routes/*13.65*/.Assets.at("images/favicon.png"))),format.raw/*13.97*/("""">
<!-- HTML5 shim and Respond.js IE8 support of HTML5 elements and media queries -->
<!--[if lt IE 9]>
<script src="http://cdnjs.cloudflare.com/ajax/libs/html5shiv/3.6.2/html5shiv.js"></script>
<script src="http://cdnjs.cloudflare.com/ajax/libs/respond.js/1.2.0/respond.js"></script>
<![endif]-->
</head>
<body>
<div class="header"></div>
<!-- Responsive navbar -->
<div class="navbar navbar-inverse" role="navigation">
<div class="container">
<div class="navbar-header">
<button type="button" class="navbar-toggle" data-toggle="collapse" data-target=".navbar-collapse">
<!-- Display three horizontal lines when navbar collapsed. -->
<span class="icon-bar"></span> <span class="icon-bar"></span> <span class="icon-bar"></span>
</button>
<a class="navbar-brand" href="/">Surferpedia</a>
</div>
<div class="collapse navbar-collapse">
<ul class="nav navbar-nav">
<li class="dropdown">
<a class="dropdown-toggle" data-toggle="dropdown" href="#">Males<b class="caret"></b></a>
<ul class="dropdown-menu" role="menu">
<li><a href=""""),_display_(Seq[Any](/*38.35*/routes/*38.41*/.Application.jeff())),format.raw/*38.60*/("""">Jeff Hakman</a></li>
<li><a href=""""),_display_(Seq[Any](/*39.35*/routes/*39.41*/.Application.laird())),format.raw/*39.61*/("""">Laird Hamilton</a></li>
</ul>
</li>
<li class="dropdown">
<a class="dropdown-toggle" data-toggle="dropdown" href="#">Females<b class="caret"></b></a>
<ul class="dropdown-menu" role="menu">
<li><a href=""""),_display_(Seq[Any](/*47.35*/routes/*47.41*/.Application.stephanie())),format.raw/*47.65*/("""">Stephanie Gilmore</a></li>
<li><a href=""""),_display_(Seq[Any](/*48.35*/routes/*48.41*/.Application.carissa())),format.raw/*48.63*/("""">Carissa Moore</a></li>
</ul>
</li>
<li class="dropdown">
<a class="dropdown-toggle" data-toggle="dropdown" href="#">Groms<b class="caret"></b></a>
<ul class="dropdown-menu" role="menu">
<li><a href=""""),_display_(Seq[Any](/*55.35*/routes/*55.41*/.Application.kanoa())),format.raw/*55.61*/("""">Kanoa Igarashi</a><a href=""""),_display_(Seq[Any](/*55.91*/routes/*55.97*/.Application.jake())),format.raw/*55.116*/("""">Jake Marshall</a></li>
</ul>
</li>
</ul>
</div>
</div>
</div>
"""),_display_(Seq[Any](/*64.8*/content)),format.raw/*64.15*/("""
<!-- Load Bootstrap JavaScript components. HTMLUnit (used in testing) requires JQuery 1.8.3 or below). -->
<script src="http://code.jquery.com/jquery-1.8.3.min.js"></script>
<script src="http://netdna.bootstrapcdn.com/bootstrap/3.0.0/js/bootstrap.min.js"></script>
</body>
</html>
"""))}
}
def render(page:String,content:Html): play.api.templates.HtmlFormat.Appendable = apply(page)(content)
def f:((String) => (Html) => play.api.templates.HtmlFormat.Appendable) = (page) => (content) => apply(page)(content)
def ref: this.type = this
}
/*
-- GENERATED --
DATE: Wed Oct 09 11:01:40 HST 2013
SOURCE: C:/Users/Diana/Desktop/surferpedia/app/views/Main.scala.html
HASH: 3db698b78fc31ef0094f797d2e8547be07a5d0cf
MATRIX: 778->1|901->30|1338->431|1353->437|1409->471|1507->533|1522->539|1576->571|2874->1833|2889->1839|2930->1858|3024->1916|3039->1922|3081->1942|3444->2269|3459->2275|3505->2299|3605->2363|3620->2369|3664->2391|4002->2693|4017->2699|4059->2719|4125->2749|4140->2755|4182->2774|4371->2928|4400->2935
LINES: 26->1|29->1|40->12|40->12|40->12|41->13|41->13|41->13|66->38|66->38|66->38|67->39|67->39|67->39|75->47|75->47|75->47|76->48|76->48|76->48|83->55|83->55|83->55|83->55|83->55|83->55|92->64|92->64
-- GENERATED --
*/
|
MattCCieslak/surferpedia
|
target/scala-2.10/src_managed/main/views/html/Main.template.scala
|
Scala
|
mit
| 5,810
|
// Equites, a Scala chess playground
// Copyright © 2014 Frank S. Thomas <frank@timepit.eu>
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with this program. If not, see <http://www.gnu.org/licenses/>.
package eu.timepit.equites
package format
object Pgn {
case class Comment(text: String)
case class Tag(name: String, value: String)
///
case class MaybeSquare(file: Option[File] = None, rank: Option[Rank] = None) {
def matches(square: Square): Boolean = {
val fileMatches = file.fold(true)(_ == square.file)
val rankMatches = rank.fold(true)(_ == square.rank)
fileMatches && rankMatches
}
def toSquare: Option[Square] =
file.flatMap(f => rank.flatMap(r => Square.from(f, r)))
}
object MaybeSquare {
def apply(square: Square): MaybeSquare =
MaybeSquare(Some(square.file), Some(square.rank))
}
case class MaybeDraw(src: MaybeSquare, dest: Square)
object MaybeDraw {
def apply(dest: Square): MaybeDraw =
MaybeDraw(MaybeSquare(), dest)
def apply(draw: Draw): MaybeDraw =
MaybeDraw(MaybeSquare(draw.src), draw.dest)
}
///
sealed trait CheckIndicator
case object Check extends CheckIndicator
case object CheckMate extends CheckIndicator
///
sealed trait SanAction
sealed trait SanMoveLike extends SanAction {
def pieceType: PieceType
def draw: MaybeDraw
}
sealed trait SanPromotionLike extends SanMoveLike {
def promotedTo: PromotedPieceType
}
case class SanMove(
pieceType: PieceType,
draw: MaybeDraw)
extends SanMoveLike
case class SanCapture(
pieceType: PieceType,
draw: MaybeDraw)
extends SanMoveLike
case class SanPromotion(
pieceType: Pawn.type,
draw: MaybeDraw,
promotedTo: PromotedPieceType)
extends SanPromotionLike {
def toSanMove: SanMove =
SanMove(pieceType, draw)
}
case class SanCaptureAndPromotion(
pieceType: Pawn.type,
draw: MaybeDraw,
promotedTo: PromotedPieceType)
extends SanPromotionLike {
def toSanCapture: SanCapture =
SanCapture(pieceType, draw)
}
case class SanCastling(side: Side) extends SanAction
case class CheckingSanAction(
action: SanAction,
indicator: CheckIndicator)
extends SanAction
///
sealed trait MoveElement
case class MoveNumber(moveNumber: Int, color: Color) extends MoveElement
case class MoveSymbol(action: SanAction) extends MoveElement
case class AnnotationGlyph(glyph: Int) extends MoveElement
///
sealed trait SeqElem
case class SeqMoveElement(move: MoveElement) extends SeqElem
case class SeqComment(comment: Comment) extends SeqElem
case class RecursiveVariation(variation: List[SeqElem]) extends SeqElem
///
case class MoveTextSection(moveText: List[SeqElem], result: GameResult)
case class GameRecord(header: List[Tag], moveText: MoveTextSection)
}
|
equites-chess/equites-core
|
src/main/scala/eu/timepit/equites/format/Pgn.scala
|
Scala
|
gpl-3.0
| 3,432
|
package com.rocketfuel.sdbc.postgresql
import java.time.{Duration => JavaDuration}
import java.util.concurrent.TimeUnit
import org.postgresql.util.PGInterval
import scala.concurrent.duration.Duration
trait IntervalImplicits {
implicit def JavaDurationToPGInterval(value: JavaDuration): PGInterval = {
val nano = value.getNano.toDouble / IntervalConstants.nanosecondsPerSecond.toDouble
val totalSeconds = value.getSeconds
val years = totalSeconds / IntervalConstants.secondsPerYear
val yearRemainder = totalSeconds % IntervalConstants.secondsPerYear
val months = yearRemainder / IntervalConstants.secondsPerMonth
val monthRemainder = yearRemainder % IntervalConstants.secondsPerMonth
val days = monthRemainder / IntervalConstants.secondsPerDay
val dayRemainder = monthRemainder % IntervalConstants.secondsPerDay
val hours = dayRemainder / IntervalConstants.secondsPerHour
val hoursRemainder = dayRemainder % IntervalConstants.secondsPerHour
val minutes = hoursRemainder / IntervalConstants.secondsPerMinute
val seconds = (hoursRemainder % IntervalConstants.secondsPerMinute).toDouble + nano
new PGInterval(
years.toInt,
months.toInt,
days.toInt,
hours.toInt,
minutes.toInt,
seconds
)
}
implicit def PGIntervalToJavaDuration(value: PGInterval): JavaDuration = {
val nanos = (value.getSeconds - value.getSeconds.floor) * IntervalConstants.nanosecondsPerSecond
var seconds = 0L
seconds += value.getSeconds.toLong
seconds += value.getMinutes * IntervalConstants.secondsPerMinute
seconds += value.getHours * IntervalConstants.secondsPerHour
seconds += value.getDays * IntervalConstants.secondsPerDay
seconds += value.getMonths * IntervalConstants.secondsPerMonth
seconds += value.getYears * IntervalConstants.secondsPerYear
JavaDuration.ofSeconds(seconds, nanos.toLong)
}
implicit def DurationToPGInterval(duration: Duration): PGInterval = {
val javaDuration = JavaDuration.ofNanos(duration.toNanos)
javaDuration
}
implicit def PGIntervalToScalaDuration(value: PGInterval): Duration = {
val javaDuration: JavaDuration = value
Duration(javaDuration.getSeconds, TimeUnit.SECONDS) + Duration(javaDuration.getNano, TimeUnit.NANOSECONDS)
}
}
|
rocketfuel/sdbc
|
postgresql/src/main/scala/com/rocketfuel/sdbc/postgresql/IntervalImplicits.scala
|
Scala
|
bsd-3-clause
| 2,298
|
package cz.kamenitxan.jakon.core.dynamic
import java.lang.reflect.Method
import java.sql.Connection
import javax.servlet.MultipartConfigElement
import com.google.gson.Gson
import cz.kamenitxan.jakon.core.database.DBHelper
import cz.kamenitxan.jakon.logging.Logger
import cz.kamenitxan.jakon.utils.PageContext
import cz.kamenitxan.jakon.utils.TypeReferences._
import cz.kamenitxan.jakon.validation.EntityValidator
import cz.kamenitxan.jakon.webui.conform.FieldConformer._
import cz.kamenitxan.jakon.webui.controller.pagelets.AbstractAdminPagelet
import cz.kamenitxan.jakon.webui.entity.CustomControllerInfo
import cz.kamenitxan.jakon.webui.{AdminSettings, Context}
import spark.{Request, Response, Spark}
import scala.annotation.tailrec
import scala.collection.mutable
import scala.jdk.CollectionConverters._
object PageletInitializer {
private val METHOD_VALDIATE = "validate"
private val gson = new Gson
val protectedPrefixes: mutable.Buffer[String] = mutable.Buffer[String]()
def initControllers(controllers: Seq[Class[_]]): Unit = {
Logger.info("Initializing pagelets")
controllers.foreach(c => {
Logger.debug("Initializing pagelet: " + c.getSimpleName)
val controllerAnn = c.getAnnotation(classOf[Pagelet])
if (controllerAnn.authRequired()) {
protectedPrefixes += controllerAnn.path()
}
c.getDeclaredMethods
.filter(m => m.getAnnotation(classOf[Get]) != null || m.getAnnotation(classOf[Post]) != null)
.foreach(m => {
val get = m.getAnnotation(classOf[Get])
val post = m.getAnnotation(classOf[Post])
if (get != null) {
initGetAnnotation(get, controllerAnn, m, c)
}
if (post != null) {
initPostAnnotation(post, controllerAnn, m, c)
}
})
})
controllers.filter(c => classOf[AbstractAdminPagelet].isAssignableFrom(c) && c.getAnnotation(classOf[Pagelet]).showInAdmin()).foreach(c => {
val apa = c.getDeclaredMethods.find(m => m.getAnnotation(classOf[Get]) != null)
if (apa.nonEmpty) {
val inst = c.getDeclaredConstructor().newInstance().asInstanceOf[AbstractAdminPagelet]
val controllerAnn = c.getAnnotation(classOf[Pagelet])
val get = apa.get.getAnnotation(classOf[Get])
AdminSettings.customControllersInfo += new CustomControllerInfo(inst.name, inst.icon, controllerAnn.path() + get.path(), c)
}
})
Logger.info("Pagelet initialization complete")
}
private def initGetAnnotation(get: Get, controllerAnn: Pagelet, m: Method, c: Class[_]): Unit = {
//TODO m.getReturnType.is
Spark.get(controllerAnn.path() + get.path(), (req: Request, res: Response) => {
val pagelet: IPagelet = c.getDeclaredConstructor().newInstance().asInstanceOf[IPagelet]
// TODO: vytvoreni conn pouze pokud je potreba
DBHelper.withDbConnection(conn => {
val methodArgs = createMethodArgs(m, req, res, conn, pagelet)
var context = m.invoke(pagelet, methodArgs.array: _*).asInstanceOf[mutable.Map[String, Any]]
if (notRedirected(res)) {
if (pagelet.isInstanceOf[AbstractAdminPagelet]) {
if (context == null) {
context = mutable.Map[String, Any]()
}
context = context ++ Context.getAdminContext
}
try {
pagelet.render(context, get.template(), req)
} catch {
case ex: Exception =>
Logger.error(s"${pagelet.getClass.getCanonicalName}.${m.getName}() threw exception", ex)
throw ex
}
} else {
""
}
})
})
}
private def initPostAnnotation(post: Post, controllerAnn: Pagelet, m: Method, c: Class[_]): Unit = {
Spark.post(controllerAnn.path() + post.path(), (req: Request, res: Response) => {
val pagelet = c.getDeclaredConstructor().newInstance().asInstanceOf[IPagelet]
// TODO: vytvoreni conn pouze pokud je potreba
DBHelper.withDbConnection(conn => {
val dataClass = getDataClass(m)
if (post.validate() && dataClass.isDefined) {
if (req.raw().getContentType.startsWith("multipart/form-data")) {
req.attribute("org.eclipse.jetty.multipartConfig", new MultipartConfigElement("/temp"))
}
val formData = EntityValidator.createFormData(req, dataClass.get)
EntityValidator.validate(dataClass.get.getSimpleName, formData) match {
case Left(result) =>
if ("true".equals(req.queryParams(METHOD_VALDIATE))) {
gson.toJson(result)
} else {
result.foreach(r => PageContext.getInstance().messages += r)
val rp = formData.map(kv => (kv._1.getName, kv._2))
val path = replacePathParams(controllerAnn.path() + post.path(), req.params.asScala)
pagelet.redirect(req, res, path, rp)
}
case Right(_) =>
if ("true".equals(req.queryParams(METHOD_VALDIATE))) {
gson.toJson(true)
} else {
val methodArgs = createMethodArgs(m, req, res, conn, pagelet)
invokePost(req, res, pagelet, m, post, methodArgs)
}
}
} else {
val methodArgs = createMethodArgs(m, req, res, conn, pagelet)
invokePost(req, res, pagelet, m, post, methodArgs)
}
})
})
}
@tailrec
private def replacePathParams(path: String, params: mutable.Map[String, String]): String = {
if (params.isEmpty) {
path
} else {
val head = params.head
val replaced = path.replace(head._1, head._2)
replacePathParams(replaced, params.tail)
}
}
private def invokePost(req: Request, res: Response, controller: IPagelet, m: Method, post: Post, methodArgs: MethodArgs) = {
if (notRedirected(res)) {
m.getReturnType match {
case STRING =>
m.invoke(controller, methodArgs.array: _*)
case _ =>
try {
val context = m.invoke(controller, methodArgs.array: _*).asInstanceOf[mutable.Map[String, Any]]
if (notRedirected(res)) {
controller.render(context, post.template(), req)
} else {
""
}
} catch {
case ex: Exception =>
Logger.error(s"${controller.getClass.getCanonicalName}.${m.getName}() threw exception", ex)
throw ex
}
}
} else {
""
}
}
private def notRedirected(res: Response) = {
if (res.raw().getStatus == 302 || res.raw().getStatus == 301) {
false
} else {
true
}
}
def getDataClass(m: Method): Option[Class[_]] = {
m.getParameterTypes.find(c => c != REQUEST_CLS && c != RESPONSE_CLS && c != CONNECTION_CLS)
}
private[dynamic] def createMethodArgs(m: Method, req: Request, res: Response, conn: Connection, pagelet: AnyRef): MethodArgs = {
var dataRef: Any = null
val arr = m.getParameterTypes.map {
case REQUEST_CLS => req
case RESPONSE_CLS => res
case CONNECTION_CLS => conn
case t =>
val enclosingCls = t.getEnclosingClass
val constructor = if (enclosingCls != null) t.getDeclaredConstructor(enclosingCls) else t.getDeclaredConstructor()
val data = (if (enclosingCls != null) constructor.newInstance(pagelet) else constructor.newInstance()).asInstanceOf[AnyRef]
Logger.debug(s"Creating pagelet data: {${t.getSimpleName}}")
t.getDeclaredFields.foreach(f => {
try {
if (req.queryMap(f.getName).hasValue) {
val value = req.queryMap(f.getName).values().mkString("\\r\\n")
f.setAccessible(true)
f.set(data, value.conform(f))
}
} catch {
case ex: Exception => Logger.error("Exception when setting pagelet data value", ex)
}
})
dataRef = data
data
}.asInstanceOf[Array[Any]]
new MethodArgs(arr, dataRef)
}
class MethodArgs(val array: Array[Any], val data: Any)
}
|
kamenitxan/Jakon
|
modules/backend/src/main/scala/cz/kamenitxan/jakon/core/dynamic/PageletInitializer.scala
|
Scala
|
bsd-3-clause
| 7,437
|
package giter8
import org.scalatest.{FlatSpec, Matchers}
class GitRepositoryTest extends FlatSpec with Matchers {
"JGit" should "resolve repo name correctly" in {
val testCases: Map[String, GitRepository] = Map(
"git@some.path.com/repo" -> GitRepository.Remote("git@some.path.com/repo"),
"git://some.path.com/repo" -> GitRepository.Remote("git://some.path.com/repo"),
"https://some.path.com/repo" -> GitRepository.Remote("https://some.path.com/repo"),
"http://some.path.com/repo" -> GitRepository.Remote("http://some.path.com/repo"),
"ssh://some.path.com/repo" -> GitRepository.Remote("ssh://some.path.com/repo"),
"file://relative/path" -> GitRepository.Local("relative/path"),
"file:///home/foo/bar" -> GitRepository.Local("/home/foo/bar"),
"foo/bar" -> GitRepository.GitHub("foo", "bar")
)
testCases foreach { testCase =>
val string = testCase._1
val expected = testCase._2
GitRepository.fromString(string) shouldBe Right(expected)
}
}
}
|
wolfendale/giter8
|
library/src/test/scala/giter8/GitRepositoryTest.scala
|
Scala
|
apache-2.0
| 1,033
|
/**
* Copyright (C) 2016 Pau Carré Cardona - All Rights Reserved
* You may use, distribute and modify this code under the
* terms of the Apache License v2.0 (http://www.apache.org/licenses/LICENSE-2.0.txt).
*/
package db
import java.util.concurrent.TimeUnit
import play.api.db.slick.{DatabaseConfigProvider, HasDatabaseConfig}
import slick.driver.H2Driver.api._
import slick.driver.JdbcProfile
import slick.jdbc.JdbcBackend
import slick.jdbc.meta.MTable
import play.api.{Play, Logger}
import scala.concurrent.{Future, Await}
import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.duration.Duration
object BoundingBoxQueryActions extends App with HasDatabaseConfig[JdbcProfile] {
lazy val dbConfig = DatabaseConfigProvider.get[JdbcProfile]("bounding_box")(Play.current)
lazy val logger: Logger = Logger(this.getClass())
lazy val boundingBoxTableQuery = TableQuery[BoundingBoxTable]
def getBoundingBoxByFileName(name: String) = {
val selectByName = boundingBoxTableQuery.filter{ boundingBoxTable =>
boundingBoxTable.name === name
}
db.run(selectByName.result.headOption)
}
def getAllBoundingBoxes() = db.run(boundingBoxTableQuery.result)
def insertOrUpdate(boundingBox: BoundingBox) = {
val insertOrUpdateAction = boundingBoxTableQuery.insertOrUpdate(boundingBox)
val insertOrUpdateResult = db.run(insertOrUpdateAction)
insertOrUpdateResult.onFailure { case err => db: JdbcBackend#DatabaseDef
logger.error("Unable to insert bounding box.", err)
}
}
}
|
paucarre/tiefvision
|
src/scala/tiefvision-web/app/db/BoundingBoxQueryActions.scala
|
Scala
|
apache-2.0
| 1,549
|
import scala.reflect.macros.whitebox.Context
import scala.language.experimental.macros
import scala.annotation.StaticAnnotation
object helloMacro {
def impl(c: Context)(annottees: c.Expr[Any]*): c.Expr[Any] = {
import c.universe._
val result = {
annottees.map(_.tree).toList match {
case ModuleDef(mods, name, Template(parents, self, body)) :: Nil =>
val helloMethod = DefDef(NoMods,
TermName("hello"),
List(),
List(List()),
TypeTree(),
Literal(Constant("hello")))
ModuleDef(mods, name, Template(parents, self, body :+ helloMethod))
}
}
c.Expr[Any](result)
}
}
class hello extends StaticAnnotation {
def macroTransform(annottees: Any*): Any = macro helloMacro.impl
}
package pkg {
class hello extends StaticAnnotation {
def macroTransform(annottees: Any*): Any = macro helloMacro.impl
}
}
|
xeno-by/paradise
|
tests/reflect/src/main/scala/hello.scala
|
Scala
|
bsd-3-clause
| 1,037
|
case class C[T](x: T)
case class CS(xs: C[_]*)
// t3856
object Test {
val x = CS(C(5), C("abc")) match { case CS(C(5), xs : _*) => xs }
println(x)
}
|
yusuke2255/dotty
|
tests/pending/pos/t3856.scala
|
Scala
|
bsd-3-clause
| 155
|
package monocle.syntax
import monocle._
import monocle.function.At
trait AppliedGetter[S, A] extends AppliedFold[S, A] {
override def optic: Getter[S, A]
def get: A = optic.get(value)
override def some[A1](implicit ev1: A =:= Option[A1]): AppliedFold[S, A1] =
adapt[Option[A1]].andThen(std.option.some[A1])
override private[monocle] def adapt[A1](implicit evA: A =:= A1): AppliedGetter[S, A1] =
evA.substituteCo[AppliedGetter[S, *]](this)
def andThen[B](other: Getter[A, B]): AppliedGetter[S, B] =
AppliedGetter(value, optic.andThen(other))
}
object AppliedGetter {
def apply[S, A](_value: S, _optic: Getter[S, A]): AppliedGetter[S, A] =
new AppliedGetter[S, A] {
val value: S = _value
val optic: Getter[S, A] = _optic
}
implicit def appliedGetterSyntax[S, A](self: AppliedGetter[S, A]): AppliedGetterSyntax[S, A] =
new AppliedGetterSyntax(self)
}
final case class AppliedGetterSyntax[S, A](private val self: AppliedGetter[S, A]) extends AnyVal {
def withDefault[A1](defaultValue: A1)(implicit evOpt: A =:= Option[A1]): AppliedGetter[S, A1] =
self.adapt[Option[A1]].andThen(std.option.withDefault(defaultValue))
def at[I, A1](i: I)(implicit evAt: At[A, i.type, A1]): AppliedGetter[S, A1] =
self.andThen(evAt.at(i))
/** compose a [[Fold]] with a [[Fold]] */
@deprecated("use andThen", since = "3.0.0-M1")
def composeFold[C](other: Fold[A, C]): AppliedFold[S, C] =
self.andThen(other)
/** compose a [[Fold]] with a [[Getter]] */
@deprecated("use andThen", since = "3.0.0-M1")
def composeGetter[C](other: Getter[A, C]): AppliedGetter[S, C] =
self.andThen(other)
/** compose a [[Fold]] with a [[PTraversal]] */
@deprecated("use andThen", since = "3.0.0-M1")
def composeTraversal[B, C, D](other: PTraversal[A, B, C, D]): AppliedFold[S, C] =
self.andThen(other)
/** compose a [[Fold]] with a [[POptional]] */
@deprecated("use andThen", since = "3.0.0-M1")
def composeOptional[B, C, D](other: POptional[A, B, C, D]): AppliedFold[S, C] =
self.andThen(other)
/** compose a [[Fold]] with a [[PPrism]] */
@deprecated("use andThen", since = "3.0.0-M1")
def composePrism[B, C, D](other: PPrism[A, B, C, D]): AppliedFold[S, C] =
self.andThen(other)
/** compose a [[Fold]] with a [[PLens]] */
@deprecated("use andThen", since = "3.0.0-M1")
def composeLens[B, C, D](other: PLens[A, B, C, D]): AppliedGetter[S, C] =
self.andThen(other)
/** compose a [[Fold]] with a [[PIso]] */
@deprecated("use andThen", since = "3.0.0-M1")
def composeIso[B, C, D](other: PIso[A, B, C, D]): AppliedGetter[S, C] =
self.andThen(other)
/** alias to composeTraversal */
@deprecated("use andThen", since = "3.0.0-M1")
def ^|->>[B, C, D](other: PTraversal[A, B, C, D]): AppliedFold[S, C] =
self.andThen(other)
/** alias to composeOptional */
@deprecated("use andThen", since = "3.0.0-M1")
def ^|-?[B, C, D](other: POptional[A, B, C, D]): AppliedFold[S, C] =
self.andThen(other)
/** alias to composePrism */
@deprecated("use andThen", since = "3.0.0-M1")
def ^<-?[B, C, D](other: PPrism[A, B, C, D]): AppliedFold[S, C] =
self.andThen(other)
/** alias to composeLens */
@deprecated("use andThen", since = "3.0.0-M1")
def ^|->[B, C, D](other: PLens[A, B, C, D]): AppliedGetter[S, C] =
self.andThen(other)
/** alias to composeIso */
@deprecated("use andThen", since = "3.0.0-M1")
def ^<->[B, C, D](other: PIso[A, B, C, D]): AppliedGetter[S, C] =
self.andThen(other)
}
|
julien-truffaut/Monocle
|
core/shared/src/main/scala/monocle/syntax/AppliedGetter.scala
|
Scala
|
mit
| 3,542
|
import collection.GenTraversable
import scala.annotation.tailrec
import scala.util.matching.Regex
trait Template {
val children: List[Template] = List.empty
protected def childrenContent =
children.map(_.toString).map(_.split("\\n").map(" " + _).mkString("\\n")).mkString("\\n") + "\\n"
}
trait ClassTemplate extends Template {
val name: String
val extendName: Option[String] = None
val withList: List[String] = List.empty
private def getExtend =
extendName match {
case Some(extendName) => "extends " + extendName
case None => ""
}
private def getWith =
withList.map("with " + _).mkString(" ")
override def toString =
"class " + name + " " + getExtend + " " + getWith + " {\\n" +
childrenContent +
"}"
}
class ScalaFileTemplate(packageName: Option[String] = None, importList: List[String] = List.empty, definitionList: List[Template]) extends Template {
override val children = definitionList
private def getPackage =
packageName match {
case Some(packageName) => "package " + packageName + "\\n\\n"
case None => ""
}
private def getImports =
importList.map("import " + _).mkString("\\n")
override protected def childrenContent =
children.map(_.toString).map(_.split("\\n").mkString("\\n")).mkString("\\n") + "\\n"
override def toString =
getPackage +
getImports + "\\n" +
childrenContent
}
class SingleClassFile(packageName: Option[String] = None, importList: List[String] = List.empty, classTemplate: ClassTemplate)
extends ScalaFileTemplate(packageName, importList, List(classTemplate))
class CompositeTemplate(templates: List[Template], combinator: String = "") extends Template {
override def toString = templates.map(_.toString).mkString(combinator)
}
class MessageTemplate(autoQuoteString: Boolean) extends Template {
def wrapStringIfNecessary(value: Any): String =
value match {
case strValue: String if autoQuoteString => "\\\\\\"" + strValue + "\\\\\\""
case other => other.toString
}
}
class SimpleMessageTemplate(message: String, autoQuoteString: Boolean = true) extends MessageTemplate(autoQuoteString) {
override def toString = message
}
abstract class LeftMessageTemplate(left: Any, autoQuoteString: Boolean = true) extends MessageTemplate(autoQuoteString) {
val message: String
override def toString =
wrapStringIfNecessary(left) + message
}
abstract class LeftRightMessageTemplate(left: Any, right: Any, autoQuoteString: Boolean = true) extends LeftMessageTemplate(left, autoQuoteString) {
val message: String
override def toString =
wrapStringIfNecessary(left) + message + wrapStringIfNecessary(right)
}
class EqualedMessageTemplate(left: Any, right: Any, autoQuoteString: Boolean = true) extends LeftRightMessageTemplate(left, right, autoQuoteString) {
val message = " equaled "
}
class DidNotEqualMessageTemplate(left: Any, right: Any, autoQuoteString: Boolean = true) extends LeftRightMessageTemplate(left, right, autoQuoteString) {
val message = " did not equal "
}
class WasEqualToMessageTemplate(left: Any, right: Any, autoQuoteString: Boolean = true) extends LeftRightMessageTemplate(left, right, autoQuoteString) {
val message = " was equal to "
}
class WasNotEqualToMessageTemplate(left: Any, right: Any, autoQuoteString: Boolean = true) extends LeftRightMessageTemplate(left, right, autoQuoteString) {
val message = " was not equal to "
}
class WasLessThanMessageTemplate(left: Any, right: Any, autoQuoteString: Boolean = true) extends LeftRightMessageTemplate(left, right, autoQuoteString) {
val message = " was less than "
}
class WasNotLessThanMessageTemplate(left: Any, right: Any, autoQuoteString: Boolean = true) extends LeftRightMessageTemplate(left, right, autoQuoteString) {
val message = " was not less than "
}
class WasLessThanOrEqualToMessageTemplate(left: Any, right: Any, autoQuoteString: Boolean = true) extends LeftRightMessageTemplate(left, right, autoQuoteString) {
val message = " was less than or equal to "
}
class WasNotLessThanOrEqualToMessageTemplate(left: Any, right: Any, autoQuoteString: Boolean = true) extends LeftRightMessageTemplate(left, right, autoQuoteString) {
val message = " was not less than or equal to "
}
class WasGreaterThanMessageTemplate(left: Any, right: Any, autoQuoteString: Boolean = true) extends LeftRightMessageTemplate(left, right, autoQuoteString) {
val message = " was greater than "
}
class WasNotGreaterThanMessageTemplate(left: Any, right: Any, autoQuoteString: Boolean = true) extends LeftRightMessageTemplate(left, right, autoQuoteString) {
val message = " was not greater than "
}
class WasGreaterThanOrEqualToMessageTemplate(left: Any, right: Any, autoQuoteString: Boolean = true) extends LeftRightMessageTemplate(left, right, autoQuoteString) {
val message = " was greater than or equal to "
}
class WasNotGreaterThanOrEqualToMessageTemplate(left: Any, right: Any, autoQuoteString: Boolean = true) extends LeftRightMessageTemplate(left, right, autoQuoteString) {
val message = " was not greater than or equal to "
}
class WasNullMessageTemplate extends SimpleMessageTemplate("The reference was null")
class WasNotNullMessageTemplate(left: Any, autoQuoteString: Boolean = true) extends LeftMessageTemplate(left, autoQuoteString) {
val message = " was not null"
}
class WasMessageTemplate(left: Any, right: Any, autoQuoteString: Boolean = true) extends LeftRightMessageTemplate(left, right, autoQuoteString) {
val message = " was "
}
class WasNotMessageTemplate(left: Any, right: Any, autoQuoteString: Boolean = true) extends LeftRightMessageTemplate(left, right, autoQuoteString) {
val message = " was not "
}
class WasAMessageTemplate(left: Any, right: Any, autoQuoteString: Boolean = true) extends LeftRightMessageTemplate(left, right, autoQuoteString) {
val message = " was a "
}
class WasNotAMessageTemplate(left: Any, right: Any, autoQuoteString: Boolean = true) extends LeftRightMessageTemplate(left, right, autoQuoteString) {
val message = " was not a "
}
class WasAnMessageTemplate(left: Any, right: Any, autoQuoteString: Boolean = true) extends LeftRightMessageTemplate(left, right, autoQuoteString) {
val message = " was an "
}
class WasNotAnMessageTemplate(left: Any, right: Any, autoQuoteString: Boolean = true) extends LeftRightMessageTemplate(left, right, autoQuoteString) {
val message = " was not an "
}
class WasTheSameInstanceAsMessageTemplate(left: Any, right: Any, autoQuoteString: Boolean = true) extends LeftRightMessageTemplate(left, right, autoQuoteString) {
val message = " was the same instance as "
}
class WasNotTheSameInstanceAsMessageTemplate(left: Any, right: Any, autoQuoteString: Boolean = true) extends LeftRightMessageTemplate(left, right, autoQuoteString) {
val message = " was not the same instance as "
}
class PropertyHadUnexpectedValueMessageTemplate(propertyName: String, expectedValue: Any, value: Any, target: Any, autoQuoteString: Boolean = true) extends MessageTemplate(autoQuoteString) {
override def toString =
"The " + propertyName + " property had value " + wrapStringIfNecessary(value) + ", instead of its expected value " + wrapStringIfNecessary(expectedValue) + ", on object " + wrapStringIfNecessary(target)
}
class PropertyHadExpectedValueMessageTemplate(propertyName: String, expectedValue: Any, target: Any, autoQuoteString: Boolean = true) extends MessageTemplate(autoQuoteString) {
override def toString =
"The " + propertyName + " property had its expected value " + wrapStringIfNecessary(expectedValue) + ", on object " + wrapStringIfNecessary(target)
}
class HadLengthMessageTemplate(left: Any, right: Any, autoQuoteString: Boolean = true) extends LeftRightMessageTemplate(left, right, autoQuoteString) {
val message = " had length "
}
class DidNotHaveLengthMessageTemplate(left: Any, right: Any, autoQuoteString: Boolean = true) extends LeftRightMessageTemplate(left, right, autoQuoteString) {
val message = " did not have length "
}
class HadSizeMessageTemplate(left: Any, right: Any, autoQuoteString: Boolean = true) extends LeftRightMessageTemplate(left, right, autoQuoteString) {
val message = " had size "
}
class DidNotHaveSizeMessageTemplate(left: Any, right: Any, autoQuoteString: Boolean = true) extends LeftRightMessageTemplate(left, right, autoQuoteString) {
val message = " did not have size "
}
class DidNotStartWithSubstringMessageTemplate(left: Any, right: Any, autoQuoteString: Boolean = true) extends LeftRightMessageTemplate(left, right, autoQuoteString) {
val message = " did not start with substring "
}
class StartedWithSubstringMessageTemplate(left: Any, right: Any, autoQuoteString: Boolean = true) extends LeftRightMessageTemplate(left, right, autoQuoteString) {
val message = " started with substring "
}
class DidNotEndWithSubstringMessageTemplate(left: Any, right: Any, autoQuoteString: Boolean = true) extends LeftRightMessageTemplate(left, right, autoQuoteString) {
val message = " did not end with substring "
}
class EndedWithSubstringMessageTemplate(left: Any, right: Any, autoQuoteString: Boolean = true) extends LeftRightMessageTemplate(left, right, autoQuoteString) {
val message = " ended with substring "
}
class DidNotIncludeSubstringMessageTemplate(left: Any, right: Any, autoQuoteString: Boolean = true) extends LeftRightMessageTemplate(left, right, autoQuoteString) {
val message = " did not include substring "
}
class IncludedSubstringMessageTemplate(left: Any, right: Any, autoQuoteString: Boolean = true) extends LeftRightMessageTemplate(left, right, autoQuoteString) {
val message = " included substring "
}
class DidNotStartWithRegexMessageTemplate(left: Any, right: Regex, autoQuoteString: Boolean = true) extends LeftRightMessageTemplate(left, right, autoQuoteString) {
val message = " did not start with a substring that matched the regular expression "
}
class StartedWithRegexMessageTemplate(left: Any, right: Regex, autoQuoteString: Boolean = true) extends LeftRightMessageTemplate(left, right, autoQuoteString) {
val message = " started with a substring that matched the regular expression "
}
class DidNotEndWithRegexMessageTemplate(left: Any, right: Regex, autoQuoteString: Boolean = true) extends LeftRightMessageTemplate(left, right, autoQuoteString) {
val message = " did not end with a substring that matched the regular expression "
}
class EndedWithRegexMessageTemplate(left: Any, right: Regex, autoQuoteString: Boolean = true) extends LeftRightMessageTemplate(left, right, autoQuoteString) {
val message = " ended with a substring that matched the regular expression "
}
class DidNotIncludeRegexMessageTemplate(left: Any, right: Regex, autoQuoteString: Boolean = true) extends LeftRightMessageTemplate(left, right, autoQuoteString) {
val message = " did not include substring that matched regex "
}
class IncludedRegexMessageTemplate(left: Any, right: Regex, autoQuoteString: Boolean = true) extends LeftRightMessageTemplate(left, right, autoQuoteString) {
val message = " included substring that matched regex "
}
class DidNotFullyMatchRegexMessageTemplate(left: Any, right: Regex, autoQuoteString: Boolean = true) extends LeftRightMessageTemplate(left, right, autoQuoteString) {
val message = " did not fully match the regular expression "
}
class FullyMatchRegexMessageTemplate(left: Any, right: Regex, autoQuoteString: Boolean = true) extends LeftRightMessageTemplate(left, right, autoQuoteString) {
val message = " fully matched the regular expression "
}
class DidNotContainElementMessageTemplate(left: Any, right: Any, autoQuoteString: Boolean = true) extends LeftRightMessageTemplate(left, right, autoQuoteString) {
val message = " did not contain element "
}
class ContainedElementMessageTemplate(left: Any, right: Any, autoQuoteString: Boolean = true) extends LeftRightMessageTemplate(left, right, autoQuoteString) {
val message = " contained element "
}
class DidNotContainKeyMessageTemplate(left: Any, right: Any, autoQuoteString: Boolean = true) extends LeftRightMessageTemplate(left, right, autoQuoteString) {
val message = " did not contain key "
}
class ContainedKeyMessageTemplate(left: Any, right: Any, autoQuoteString: Boolean = true) extends LeftRightMessageTemplate(left, right, autoQuoteString) {
val message = " contained key "
}
class DidNotContainValueMessageTemplate(left: Any, right: Any, autoQuoteString: Boolean = true) extends LeftRightMessageTemplate(left, right, autoQuoteString) {
val message = " did not contain value "
}
class ContainedValueMessageTemplate(left: Any, right: Any, autoQuoteString: Boolean = true) extends LeftRightMessageTemplate(left, right, autoQuoteString) {
val message = " contained value "
}
object Generator {
import java.io.{File, FileWriter, BufferedWriter}
def getIndex[T](xs: GenTraversable[T], value: T): Int = {
@tailrec
def getIndexAcc[T](itr: Iterator[T], count: Int): Int = {
if (itr.hasNext) {
val next = itr.next
if (next == value)
count
else
getIndexAcc(itr, count + 1)
}
else
-1
}
getIndexAcc(xs.toIterator, 0)
}
@tailrec
final def getNext[T](itr: Iterator[T], predicate: T => Boolean): T = {
val next = itr.next
if (predicate(next))
next
else
getNext(itr, predicate)
}
def getFirst[T](col: GenTraversable[T], predicate: T => Boolean): T =
getNext(col.toIterator, predicate)
@tailrec
final def getNextNot[T](itr: Iterator[T], predicate: T => Boolean): T = {
val next = itr.next
if (!predicate(next))
next
else
getNextNot(itr, predicate)
}
def getFirstNot[T](col: GenTraversable[T], predicate: T => Boolean): T =
getNextNot(col.toIterator, predicate)
def genFile(targetFile: File, template: ScalaFileTemplate) = {
val content = template.toString
val writer = new BufferedWriter(new FileWriter(targetFile))
try {
writer.write(content)
}
finally {
writer.flush()
writer.close()
}
}
}
class GenFramework
|
svn2github/scalatest
|
project/GenFramework.scala
|
Scala
|
apache-2.0
| 14,457
|
package jigg.pipeline
/*
Copyright 2013-2017 Hiroshi Noji
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
import java.util.Properties
import java.io.{ByteArrayOutputStream, PrintStream}
import scala.xml.{XML, Node}
import scala.xml.dtd.DocType
import scala.collection.JavaConverters._
import scala.concurrent._
import scala.concurrent.duration._
import scala.io.StdIn
import jigg.util.LogUtil.{ track, multipleTrack }
import jigg.util.{PropertiesUtil => PU, IOUtil, XMLUtil, JSONUtil}
import akka.Done
import akka.actor.{Actor, ActorRef, ActorSystem, Props}
import akka.event.Logging
import akka.http.scaladsl.coding.Deflate
import akka.http.scaladsl.Http
import akka.http.scaladsl.marshalling.ToResponseMarshaller
import akka.http.scaladsl.model._
import akka.http.scaladsl.model.StatusCodes.MovedPermanently
import akka.http.scaladsl.server.Directives._
import akka.http.scaladsl.unmarshalling.FromRequestUnmarshaller
import akka.pattern.ask
import akka.stream.ActorMaterializer
import akka.util.Timeout
class PipelineActor extends Actor {
import PipelineServer.Params
val log = Logging(context.system, this)
var lastParams: Params = null
var lastPipeline: Pipeline = null
def receive = {
case params: Params => {
val coreParams = removeNonCore(params)
// If params are empty, use the same params as before.
if (coreParams != lastParams && !coreParams.isEmpty) reset(coreParams)
sender ! lastPipeline
}
}
def removeNonCore(params: Params) = {
// The server is agnostic to the changes of these properties
// (not creating a new pipeline).
val noncore = Seq("props", "file", "output", "help", "outputFormat",
"checkRequirement", "inputFormat")
Params(params.kvs filter { case (k, v) => !(noncore contains k) })
}
def reset(params: Params) = {
lastParams = params
val props = new Properties
for ((k, v) <- params.kvs) props.setProperty(k, v)
lastPipeline = new Pipeline(props)
log.info("Pipeline is updated. New property: " + props)
log.info("Number of threads: " + lastPipeline.nThreads)
lastPipeline
}
}
class PipelineServer(val properties: Properties = new Properties) extends PropsHolder {
def prop(key: String) = PU.findProperty(key, properties)
@Prop(gloss="Port to serve on (default: 8080)") var port = 8080
@Prop(gloss="Host to serve on (default: localhost. Use 0.0.0.0 to make public)") var host = "localhost"
readProps()
def printHelp(os: PrintStream) = os.println(this.description)
override def description: String = s"""Usage:
${super.description}
JiggServer can be used as an interface of Jigg to other languages such as Python.
See README in "python/pyjigg" for this usage.
Another usage via curl is that:
> curl --data-urlencode 'annotators=corenlp[tokenize,ssplit]' \\\\
--data-urlencode 'q=Please annotate me!' \\\\
'http://localhost:8080/annotate?outputFormat=json'
The data with the key "q" is treated as an input text. Multiple "q"s in a query
are allowed and are concatenated.
Currently this server only supports POST method and the input text should be a raw text
(not XML or JSON, which will be supported in future). For each call, users must specify
the properties as the parameters.
The annotation for the first input may be very slow due to loading all annotator models,
which may take 30 ~ 60 secs if you use heavy components of Stanford CoreNLP (e.g., coref).
The annotation for the followed inputs should be reasonably fast, but note that if you
call the server with different parameters than the last call, the internal pipeline will
be reconstructed, and the loading time will be taken again.
To see the valid options, call "jigg.pipeline.Pipeline -help", or after starting the
server, access to e.g.,
http://localhost:8080/help
which displays the help message of the internal pipeline. One can also see the specific
help for each annotator with:
http://localhost:8080/help/<annotator name>
where <annotator name> may be specific name such as corenlp or kuromoji."""
def run() = {
case class OutputType(format: String)
implicit val timeout = Timeout(5.seconds)
implicit val system = ActorSystem("jigg-server")
implicit val materializer = ActorMaterializer()
// needed for the future flatMap/onComplete in the end
implicit val executionContext = system.dispatcher
val actor = system.actorOf(Props[PipelineActor])
val route =
path("annotate") {
post {
parameterSeq { _params =>
formFieldSeq { _forms =>
val (textSeq, formParamSeq) = _forms.partition(a => a._2 == "" || a._1 == "q")
val text = textSeq map {
case (a, "") => a
case ("q", a) => a
} mkString "\\n"
val params = _params.toMap ++ formParamSeq.toMap
val maybePipeline = (actor ? PipelineServer.Params(params)).mapTo[Pipeline]
val maybeResult = maybePipeline map { pipeline =>
try {
val annotation = pipeline.annotate(text)
def outputBy(format: String): String = format match {
case "json" => JSONUtil.toJSON(annotation).toString
case _ =>
val w = new java.io.StringWriter
pipeline.writeTo(w, annotation)
w.toString
}
params get "outputFormat" match {
case Some(a) if a == "json" || a == "xml" => outputBy(a)
case _ => outputBy("xml")
}
} catch { case e: Throwable =>
val sw = new java.io.StringWriter
e.printStackTrace(new java.io.PrintWriter(sw))
sys.error(sw.toString)
}
}
complete(maybeResult)
}
}
}
} ~ pathPrefix("help") {
pathEnd {
complete(mkHelp("true"))
} ~
pathPrefix(".+".r) { annotator =>
pathEndOrSingleSlash {
def normalize(c: Char) = c match {
case '<' => '['
case '>' => ']'
case _ => c
}
complete(mkHelp(annotator.map(normalize)))
}
}
}
val bindingFuture = Http().bindAndHandle(route, host, port)
println(s"Server online at $host:$port/\\nPress Ctrl-C to stop...")
Await.ready(
bindingFuture.flatMap(_ ⇒ waitForShutdownSignal(system)), // chaining both futures to fail fast
Duration.Inf) // It's waiting forever because maybe there is never a shutdown signal
bindingFuture
.flatMap(_.unbind()) // trigger unbinding from the port
.onComplete(_ => system.terminate()) // and shutdown when done
}
protected def waitForShutdownSignal(system: ActorSystem)(implicit ec: ExecutionContext): Future[Done] = {
val promise = Promise[Done]()
sys.addShutdownHook {
promise.trySuccess(Done)
}
promise.future
}
def mkHelp(annotator: String): String = {
val props = new Properties
props.setProperty("help", annotator)
val pipeline = new Pipeline(props)
val s = new ByteArrayOutputStream
pipeline.printHelp(new PrintStream(s))
s.toString("UTF8")
}
}
object PipelineServer {
case class Params(kvs: Map[String, String]) {
def isEmpty() = kvs.isEmpty
}
def main(args: Array[String]): Unit = {
val props = jigg.util.ArgumentsParser.parse(args.toList)
val server = new PipelineServer(props)
PU.findProperty("help", props) match {
case Some(help) =>
server.printHelp(System.out)
case None => server.run()
}
}
}
|
mynlp/jigg
|
src/main/scala/jigg/pipeline/PipelineServer.scala
|
Scala
|
apache-2.0
| 8,252
|
/* Copyright 2009-2016 EPFL, Lausanne */
package leon
package synthesis
package rules
import purescala.Expressions._
import purescala.ExprOps._
import purescala.Constructors._
import solvers._
import scala.concurrent.duration._
case object OptimisticGround extends Rule("Optimistic Ground") {
def instantiateOn(implicit hctx: SearchContext, p: Problem): Traversable[RuleInstantiation] = {
if (p.as.nonEmpty && p.xs.nonEmpty && !p.isTestBased) {
val res = new RuleInstantiation(this.name) {
def apply(hctx: SearchContext) = {
val solver = SimpleSolverAPI(hctx.solverFactory.withTimeout(50.millis))
val xss = p.xs.toSet
val ass = p.as.toSet
var i = 0
val maxTries = 3
var result: Option[RuleApplication] = None
var continue = true
var predicates: Seq[Expr] = Seq()
while (result.isEmpty && i < maxTries && continue) {
val phi = p.pc and andJoin(p.phi +: predicates)
val notPhi = p.pc and andJoin(not(p.phi) +: predicates)
//println("SOLVING " + phi + " ...")
solver.solveSAT(phi) match {
case (Some(true), satModel) =>
val newNotPhi = valuateWithModelIn(notPhi, xss, satModel)
//println("REFUTING " + Not(newNotPhi) + "...")
solver.solveSAT(newNotPhi) match {
case (Some(true), invalidModel) =>
// Found as such as the xs break, refine predicates
predicates = valuateWithModelIn(phi, ass, invalidModel) +: predicates
case (Some(false), _) =>
// Model appears valid, but it might be a fake expression (generic values)
val outExpr = tupleWrap(p.xs.map(valuateWithModel(satModel)))
if (!isRealExpr(outExpr)) {
// It does contain a generic value, we skip
predicates = valuateWithModelIn(phi, xss, satModel) +: predicates
} else {
result = Some(RuleClosed(Solution(BooleanLiteral(true), Set(), outExpr)))
}
case _ =>
continue = false
result = None
}
case (Some(false), _) =>
if (predicates.isEmpty) {
result = Some(RuleClosed(Solution.UNSAT(p)))
} else {
continue = false
result = None
}
case _ =>
continue = false
result = None
}
i += 1
}
result.getOrElse(RuleFailed())
}
}
List(res)
} else {
Nil
}
}
}
|
epfl-lara/leon
|
src/main/scala/leon/synthesis/rules/OptimisticGround.scala
|
Scala
|
gpl-3.0
| 2,824
|
/**
* Copyright 2014 Gianluca Amato <gamato@unich.it>
*
* This file is part of JANDOM: JVM-based Analyzer for Numerical DOMains
* JANDOM is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* JANDOM is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of a
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with JANDOM. If not, see <http://www.gnu.org/licenses/>.
*/
package it.unich.jandom.objectmodels
/**
* This is a trivial object model with a single non-primitive type, no fields and no arrays.
* @author Gianluca Amato <gamato@unich.it>
*/
object TrivialObjectModel extends TreeObjectModel with NoArrays with ObjectModelHelper {
self: ObjectModel =>
type Type = Unit
type Field = Unit
def declaredFields(t: Type) = Set()
def typeOf(f: Field) = {}
def lteq(t1: Type, t2: Type) = true
def parents(t: Type) = Set()
def children(t: Type) = Set()
def isPrimitive(t: Type) = false
def isConcrete(t: Type) = true
override def concreteApprox(t1: Type, t2: Type) = Some(())
override def concreteApprox(ts: Iterable[Type]) = if (ts.isEmpty) None else Some(())
override def mayShare(t1: Type, t2: Type) = true
override def mayBeAliases(t1: Type, t2: Type) = true
}
|
rubino22/JDBeta
|
core/src/test/scala/it/unich/jandom/objectmodels/TrivialObjectModel.scala
|
Scala
|
lgpl-3.0
| 1,591
|
/*
* Copyright (C) 2009-2017 Lightbend Inc. <https://www.lightbend.com>
*/
package controllers
import play.api.mvc._
class Application extends Controller {
def index = Action(Ok)
}
|
aradchykov/playframework
|
framework/src/sbt-plugin/src/sbt-test/routes-compiler-plugin/source-mapping/Application.scala
|
Scala
|
apache-2.0
| 187
|
package com.twitter.finagle.loadbalancer.aperture
import com.twitter.app.GlobalFlag
import com.twitter.finagle._
import com.twitter.finagle.Address.Inet
import com.twitter.finagle.loadbalancer.p2c.P2CPick
import com.twitter.finagle.loadbalancer.{Balancer, DistributorT, NodeT}
import com.twitter.finagle.util.Rng
import com.twitter.logging.{Level, Logger}
import com.twitter.util.{Future, Time}
import scala.collection.immutable.VectorBuilder
import scala.collection.mutable.ListBuffer
import scala.util.hashing.MurmurHash3
// Temporary flag used to disable dynamic expansion of the deterministic aperture width
// due to load. This flag is only for testing and to provide a coarse grained escape hatch
// and should not be considered a standard configuration option. After it has been demonstrated
// that not dynamically resizing the aperture is preferable for deterministic aperture, the default
// will be changed to `true` and a short time after that the flag will be removed altogether.
private object staticDetermisticApertureWidth
extends GlobalFlag[Boolean](
default = true,
help = "Deterministic Aperture doesn't increase its aperture"
)
private object Aperture {
private[this] val log = Logger.get()
// When picking a min aperture, we want to ensure that p2c can actually converge
// when there are weights present. Based on empirical measurements, weights are well
// respected when we have 4 or more servers.
// The root of the problem is that you can't send a fractional request to the (potentially)
// fractionally weighted edges of the aperture. The following thought experiment illustrates
// this.
// First, we consider the limiting case of only one weighted node. If we only have one node
// to choose from, it's impossible to respect the weight since we will always return the
// single node – we need at least 2 nodes in this case.
// Next, we extend the thought experiment to the case of pick2. How does the probability of
// picking the second node change? Consider the case of 3 nodes of weights [1, 1, 0.5]. The
// probability of node 2 being picked on the first try is 0.5/2.5, but it changes for the
// second pick to 0.5/1.5. This shifting of probability causes a drift in the probability
// of a node being either of the two picked and in the case of the three nodes above, the
// probability of being picked either first or second is ~0.61 relative to nodes 0 or 1,
// meaningfully different than the desired value of 0.50.
// Next, we extrapolate this to the case of a large number of nodes. As the number of nodes
// in the aperture increases the numerator (a node's weight) of the probability stays the same
// but denominator (the sum of weights) increases. As N reaches infinity, the difference in
// probability between being picked first or second converges to 0, restoring the probabilities
// to what we expect. Running the same simulation with N nodes where the last node has 0.5
// weight results in the following simulated probabilities (P) relative to nodes with weight 1
// of picking the last node (weight 0.5) for either the first or second pick:
// N 2 3 4 6 10 10000
// P 1.0 0.61 0.56 0.53 0.52 0.50
// While 4 healthy nodes has been determined to be sufficient for the p2c picking algorithm,
// it is susceptible to finding it's aperture without any healthy nodes. While this is rare
// in isolation it becomes more likely when there are many such sized apertures present.
// Therefore, we've assigned the min to 12 to further decrease the probability of having a
// aperture without any healthy nodes.
// Note: the flag will be removed and replaced with a constant after tuning.
private val MinDeterministicAperture: Int = {
val min = minDeterminsticAperture()
if (1 < min) min
else {
log.warning(
s"Unexpectedly low minimum d-aperture encountered: $min. " +
s"Check your configuration. Defaulting to 12."
)
12
}
}
// Cache the boolean value so that we don't need to pay the cost of the flag every invocation.
private val staticDAperture: Boolean = staticDetermisticApertureWidth()
}
/**
* The aperture distributor balances load onto a window, the aperture, of
* underlying capacity. The distributor exposes a control mechanism so that a
* controller can adjust the aperture according to load conditions.
*
* The window contains a number of discrete serving units, one for each
* node. No load metric is prescribed: this can be mixed in separately.
*
* The underlying nodes are arranged in a consistent fashion: an
* aperture of a given size always refers to the same set of nodes; a
* smaller aperture to a subset of those nodes so long as the nodes are of
* equal `status` (i.e. unhealthy nodes are de-prioritized). Thus, it is
* relatively harmless to adjust apertures frequently, since underlying nodes
* are typically backed by pools, and will be warm on average.
*/
private[loadbalancer] trait Aperture[Req, Rep] extends Balancer[Req, Rep] { self =>
import ProcessCoordinate._
import Aperture._
protected type Node <: ApertureNode
protected trait ApertureNode extends NodeT[Req, Rep] {
/**
* A token is a random integer associated with an Aperture node.
* It persists through node updates, but is not necessarily
* unique. Aperture uses this token to order the nodes when
* deterministic ordering is not enabled or available. Since
* the token is assigned at Node creation, this guarantees
* a stable order across distributor rebuilds.
*/
val token: Int = rng.nextInt()
}
/**
* The random number generator used to pick two nodes for
* comparison – since aperture uses p2c for selection.
*/
protected def rng: Rng
/**
* The minimum aperture as specified by the user config. Note this value is advisory
* and the distributor may actually derive a new min based on this. See `minUnits`
* for more details.
*/
protected def minAperture: Int
/**
* Enables [[Aperture]] to read coordinate data from [[ProcessCoordinate]]
* to derive an ordering for the endpoints used by this [[Balancer]] instance.
*/
protected def useDeterministicOrdering: Option[Boolean]
/**
* Adjust the aperture by `n` serving units.
*
* Calls to this method are intrinsically racy with respect to updates and rebuilds
* and no special consideration is taken to avoid these races as feedback mechanisms
* should simply fire again should an adjustment be made to an old [[Balancer]].
*/
protected def adjust(n: Int): Unit = dist.adjust(n)
/**
* Widen the aperture by one serving unit.
*/
protected def widen(): Unit = adjust(1)
/**
* Narrow the aperture by one serving unit.
*/
protected def narrow(): Unit = adjust(-1)
/**
* The current logical aperture. This is never less than 1, or more
* than `maxUnits`.
*/
protected def logicalAperture: Int = dist.logicalAperture
/**
* The maximum aperture serving units.
*/
protected def maxUnits: Int = dist.max
/**
* The minimum aperture serving units.
*/
protected def minUnits: Int = dist.min
/**
* Label used to identify this instance when logging internal state.
*/
protected def label: String
protected def dapertureActive: Boolean = {
if (ProcessCoordinate().isEmpty) {
false
} else {
useDeterministicOrdering match {
case Some(bool) => bool
case None => true
}
}
}
@volatile private[this] var _vectorHash: Int = -1
// Make a hash of the passed in `vec` and set `vectorHash`.
// Only an Inet address of the factory is considered and all
// other address types are ignored.
private[this] def updateVectorHash(vec: Seq[Node]): Unit = {
// A specialized reimplementation of MurmurHash3.listHash
val it = vec.iterator
var n = 0
var h = MurmurHash3.arraySeed
while (it.hasNext) it.next().factory.address match {
case Inet(addr, _) if !addr.isUnresolved =>
val d = MurmurHash3.bytesHash(addr.getAddress.getAddress)
h = MurmurHash3.mix(h, d)
n += 1
case _ => // no-op
}
_vectorHash = MurmurHash3.finalizeHash(h, n)
}
protected[this] def vectorHash: Int = _vectorHash
private[this] val gauges = Seq(
statsReceiver.addGauge("logical_aperture") { logicalAperture },
statsReceiver.addGauge("physical_aperture") { dist.physicalAperture },
statsReceiver.addGauge("use_deterministic_ordering") {
if (dapertureActive) 1F else 0F
},
statsReceiver.addGauge("vector_hash") { _vectorHash }
)
private[this] val coordinateUpdates = statsReceiver.counter("coordinate_updates")
private[this] val coordObservation = ProcessCoordinate.changes.respond { _ =>
// One nice side-effect of deferring to the balancers `updater` is
// that we serialize and collapse concurrent updates. So if we have a volatile
// source that is updating the coord, we are resilient to that. We could
// go even further by rate limiting the changes if we need to.
coordinateUpdates.incr()
self.rebuild()
}
private[this] def lbl = if (label.isEmpty) "<unlabelled>" else label
// `pickLog` will log on the hot path so should be enabled judiciously.
private val pickLog =
Logger.get(s"com.twitter.finagle.loadbalancer.aperture.Aperture.pick-log.$lbl")
// `rebuildLog` is used for rebuild level events which happen at a relatively low frequency.
private val rebuildLog =
Logger.get(s"com.twitter.finagle.loadbalancer.aperture.Aperture.rebuild-log.$lbl")
protected type Distributor = BaseDist
def additionalMetadata: Map[String, Any] = {
Map(
"distributor_class" -> dist.getClass.getSimpleName,
"logical_aperture_size" -> dist.logicalAperture,
"physical_aperture_size" -> dist.physicalAperture,
"min_aperture_size" -> dist.min,
"max_aperture_size" -> dist.max,
"vector_hash" -> vectorHash
) ++ dist.additionalMetadata
}
/**
* A distributor which implements the logic for controlling the size of an aperture
* but defers the implementation of pick to concrete implementations.
*
* @param vector The source vector received from a call to `rebuild`.
*
* @param initAperture The initial aperture to use.
*/
protected abstract class BaseDist(vector: Vector[Node], initAperture: Int)
extends DistributorT[Node](vector) {
type This = BaseDist
/**
* Returns the maximum size of the aperture window.
*/
final def max: Int = vector.size
/**
* Returns the minimum size of the aperture window.
*/
def min: Int = math.min(minAperture, vector.size)
// We are guaranteed that writes to aperture are serialized since
// we only expose them via the `narrow`, `widen`, etc. methods above. Those
// defer to the balancers `updater` which is serial. Therefore, we only
// need to guarantee visibility across threads and don't need to
// provide other synchronization between threads.
@volatile private[this] var _logicalAperture: Int = initAperture
// Make sure the aperture is within bounds [min, max].
adjust(0)
/**
* Returns the current logical aperture.
*/
def logicalAperture: Int = _logicalAperture
/**
* Represents how many servers `pick` will select over – which may
* differ from `logicalAperture` when using [[DeterministicAperture]].
*/
def physicalAperture: Int = logicalAperture
/**
* Adjusts the logical aperture by `n` while ensuring that it stays
* within the bounds [min, max].
*/
final def adjust(n: Int): Unit = {
_logicalAperture = math.max(min, math.min(max, _logicalAperture + n))
}
final def rebuild(): This = rebuild(vector)
final def rebuild(vec: Vector[Node]): This = {
updateVectorHash(vec)
if (vec.isEmpty) {
new EmptyVector(initAperture)
} else if (dapertureActive) {
ProcessCoordinate() match {
case Some(coord) =>
new DeterministicAperture(vec, initAperture, coord)
case None =>
// this should not happen as `dapertureActive` should prevent this case
// but hypothetically, the coordinate could get unset between calls
// to `dapertureActive` and `ProcessCoordinate()`
new RandomAperture(vec, initAperture)
}
} else {
new RandomAperture(vec, initAperture)
}
}
/**
* Returns the indices which are currently part of the aperture. That is,
* the indices over which `pick` selects.
*/
def indices: Set[Int]
def additionalMetadata: Map[String, Any]
}
/**
* A distributor which has an aperture size but an empty vector to select
* from, so it always returns the `failingNode`.
*/
protected class EmptyVector(initAperture: Int) extends BaseDist(Vector.empty, initAperture) {
require(vector.isEmpty, s"vector must be empty: $vector")
def indices: Set[Int] = Set.empty
def pick(): Node = failingNode(emptyException)
def needsRebuild: Boolean = false
def additionalMetadata: Map[String, Any] = Map.empty
}
// these are lifted out of `RandomAperture` to avoid unnecessary allocations.
private[this] val nodeToken: ApertureNode => Int = _.token
private[this] val nodeOpen: ApertureNode => Boolean = _.status == Status.Open
private[this] val nodeBusy: ApertureNode => Boolean = _.status == Status.Busy
/**
* A distributor which uses P2C to select nodes from within a window ("aperture").
* The `vector` is shuffled randomly to ensure that clients talking to the same
* set of nodes don't concentrate load on the same set of servers. However, there is
* a known limitation with the random shuffle since servers still have a well
* understood probability of being selected as part of an aperture (i.e. they
* follow a binomial distribution).
*
* @param vector The source vector received from a call to `rebuild`.
*
* @param initAperture The initial aperture to use.
*/
protected final class RandomAperture(vector: Vector[Node], initAperture: Int)
extends BaseDist(vector, initAperture)
with P2CPick[Node] {
require(vector.nonEmpty, "vector must be non empty")
/**
* Returns a new vector which is ordered by a node's status. Note, it is
* important that this is a stable sort since we care about the source order
* of `vec` to eliminate any unnecessary resource churn.
*/
private[this] def statusOrder(vec: Vector[Node]): Vector[Node] = {
val resultNodes = new VectorBuilder[Node]
val busyNodes = new ListBuffer[Node]
val closedNodes = new ListBuffer[Node]
val iter = vec.iterator
while (iter.hasNext) {
val node = iter.next()
node.status match {
case Status.Open => resultNodes += node
case Status.Busy => busyNodes += node
case Status.Closed => closedNodes += node
}
}
resultNodes ++= busyNodes ++= closedNodes
resultNodes.result
}
// Since we don't have any process coordinate, we sort the node
// by `token` which is deterministic across rebuilds but random
// globally, since `token` is assigned randomly per process
// when the node is created.
protected val vec = statusOrder(vector.sortBy(nodeToken))
protected def bound: Int = logicalAperture
protected def emptyNode: Node = failingNode(emptyException)
protected def rng: Rng = self.rng
private[this] def vecAsString: String =
vec
.take(logicalAperture)
.map(_.factory.address)
.mkString("[", ", ", "]")
if (rebuildLog.isLoggable(Level.DEBUG)) {
rebuildLog.debug(s"[RandomAperture.rebuild $lbl] nodes=$vecAsString")
}
def indices: Set[Int] = (0 until logicalAperture).toSet
// To reduce the amount of rebuilds needed, we rely on the probabilistic
// nature of p2c pick. That is, we know that only when a significant
// portion of the underlying vector is unavailable will we return an
// unavailable node to the layer above and trigger a rebuild. We do however
// want to return to our "stable" ordering as soon as we notice that a
// previously busy node is now available.
private[this] val busy = vector.filter(nodeBusy)
def needsRebuild: Boolean = busy.exists(nodeOpen)
def additionalMetadata: Map[String, Any] = Map("nodes" -> vecAsString)
}
/**
* [[DeterministicAperture]] addresses the shortcomings of [[RandomAperture]] by picking
* nodes within this process' [[ProcessCoordinate]]. Thus, when the group of peers
* converges on an aperture size, the servers are equally represented across the
* peers.
*
* @param vector The source vector received from a call to `rebuild`.
*
* @param initAperture The initial aperture to use.
*
* @param coord The [[ProcessCoordinate]] for this process which is used to narrow
* the range of `pick2`.
*/
protected final class DeterministicAperture(vector: Vector[Node], initAperture: Int, coord: Coord)
extends BaseDist(vector, initAperture) {
require(vector.nonEmpty, "vector must be non empty")
private[this] val ring = new Ring(vector.size, rng)
// Note that this definition ignores the user defined `minAperture`,
// but that isn't likely to hold much value given our definition of `min`
// and how we calculate the `apertureWidth`.
override def min: Int = math.min(Aperture.MinDeterministicAperture, vector.size)
// If we don't allow the aperture to be dynamic based on the load, we just use the min value.
override def logicalAperture: Int = if (staticDAperture) min else super.logicalAperture
// Translates the logical `aperture` into a physical one that
// maps to the ring. Note, we do this in terms of the peer
// unit width in order to ensure full ring coverage. As such,
// this width will be >= the aperture. Put differently, we may
// cover more servers than the `aperture` requested in service
// of global uniform load.
private[this] def apertureWidth: Double = {
// A recasting of the formula
// clients*aperture <= N*servers
// - N is the smallest integer satisfying the inequality and represents
// the number of times we have to circle the ring.
// -> ceil(clients*aperture/servers) = N
// - unitWidth = 1/clients; ring.unitWidth = 1/servers
// -> ceil(aperture*ring.unitWidth/unitWidth) = N
val unitWidth: Double = coord.unitWidth // (0, 1.0]
val unitAperture: Double = logicalAperture * ring.unitWidth // (0, 1.0]
val N: Int = math.ceil(unitAperture / unitWidth).toInt
val width: Double = N * unitWidth
// We know that `width` is bounded between (0, 1.0] since `N`
// at most will be the inverse of `unitWidth` (i.e. if `unitAperture`
// is 1, then units = 1/(1/x) = x, width = x*(1/x) = 1). However,
// practically, we take the min of 1.0 to account for any floating
// point stability issues.
math.min(1.0, width)
}
override def physicalAperture: Int = {
val width = apertureWidth
if (rebuildLog.isLoggable(Level.DEBUG)) {
rebuildLog.debug(
f"[DeterministicAperture.physicalAperture $lbl] ringUnit=${ring.unitWidth}%1.6f coordUnit=${coord.unitWidth}%1.6f coordOffset=${coord.offset}%1.6f apertureWidth=$width%1.6f"
)
}
ring.range(coord.offset, width)
}
def indices: Set[Int] = ring.indices(coord.offset, apertureWidth).toSet
private[this] def nodes: Seq[(Int, Double, Address)] = {
val offset = coord.offset
val width = apertureWidth
val indices = ring.indices(offset, width)
indices.map { i =>
val addr = vector(i).factory.address
val weight = ring.weight(i, offset, width)
(i, weight, addr)
}
}
// We log the contents of the aperture on each distributor rebuild when using
// deterministic aperture. Rebuilds are not frequent and concentrated around
// events where this information would be valuable (i.e. coordinate changes or
// host add/removes).
if (rebuildLog.isLoggable(Level.DEBUG)) {
val apertureSlice: String = {
val offset = coord.offset
val width = apertureWidth
val indices = ring.indices(offset, width)
nodes
.map {
case (i, weight, addr) =>
f"(index=$i, weight=$weight%1.6f, addr=$addr)"
}.mkString("[", ", ", "]")
}
rebuildLog.debug(s"[DeterministicAperture.rebuild $lbl] nodes=$apertureSlice")
// It may be useful see the raw server vector for d-aperture since we expect
// uniformity across processes.
if (rebuildLog.isLoggable(Level.TRACE)) {
val vectorString = vector.map(_.factory.address).mkString("[", ", ", "]")
rebuildLog.trace(s"[DeterministicAperture.rebuild $lbl] nodes=$vectorString")
}
}
/**
* Pick the least loaded (and healthiest) of the two nodes `a` and `b`
* taking into account their respective weights.
*/
private[this] def pick(a: Node, aw: Double, b: Node, bw: Double): Node = {
val aStatus = a.status
val bStatus = b.status
if (aStatus == bStatus) {
// Note, `aw` or `bw` can't be zero since `pick2` would not
// have returned the indices in the first place. However,
// we check anyways to safeguard against any numerical
// stability issues.
val _aw = if (aw == 0) 1.0 else aw
val _bw = if (bw == 0) 1.0 else bw
if (a.load / _aw <= b.load / _bw) a else b
} else {
if (Status.best(aStatus, bStatus) == aStatus) a else b
}
}
def pick(): Node = {
val offset = coord.offset
val width = apertureWidth
val a = ring.pick(offset, width)
val b = ring.tryPickSecond(a, offset, width)
val aw = ring.weight(a, offset, width)
val bw = ring.weight(b, offset, width)
val nodeA = vector(a)
val nodeB = vector(b)
val picked = pick(nodeA, aw, nodeB, bw)
if (pickLog.isLoggable(Level.TRACE)) {
pickLog.trace(
f"[DeterministicAperture.pick] a=(index=$a, weight=$aw%1.6f, node=$nodeA) b=(index=$b, weight=$bw%1.6f, node=$nodeB) picked=$picked"
)
}
picked
}
// rebuilds only need to happen when we receive ring updates (from
// the servers or our coordinate changing).
def needsRebuild: Boolean = false
def additionalMetadata: Map[String, Any] = Map(
"ring_unit_width" -> ring.unitWidth,
"peer_offset" -> coord.offset,
"peer_unit_width" -> coord.unitWidth,
"aperture_width" -> apertureWidth,
"nodes" -> nodes.map {
case (i, weight, addr) =>
Map[String, Any](
"index" -> i,
"weight" -> weight,
"address" -> addr.toString
)
}
)
}
protected def initDistributor(): Distributor = new EmptyVector(1)
override def close(deadline: Time): Future[Unit] = {
gauges.foreach(_.remove())
coordObservation.close(deadline).before { super.close(deadline) }
}
}
|
luciferous/finagle
|
finagle-core/src/main/scala/com/twitter/finagle/loadbalancer/aperture/Aperture.scala
|
Scala
|
apache-2.0
| 23,410
|
package com.themillhousegroup.gatsby.actors
import akka.actor.ActorRef
import io.gatling.core.action.Chainable
import io.gatling.core.session.{ Expression, Session }
import com.themillhousegroup.gatsby.stubby.RuntimeStubbing
import com.dividezero.stubby.core.model.StubExchange
import com.typesafe.scalalogging.slf4j.{ StrictLogging, Logging, Logger }
class TearDown(val simulation: RuntimeStubbing,
val requestNameExp: Expression[String],
val next: ActorRef) extends Chainable with CanTearDown {
}
trait CanTearDown {
this: StrictLogging =>
val simulation: RuntimeStubbing
val requestNameExp: Expression[String]
val next: ActorRef
def execute(session: Session): Unit = {
requestNameExp(session).foreach { requestName =>
logger.debug(s"Tearing down $requestName after scenario: ${session.scenarioName}")
simulation.removeExchange(requestName)
simulation.releaseLock(requestName)
next ! session
}
}
}
|
themillhousegroup/gatsby
|
src/main/scala/com/themillhousegroup/gatsby/actors/TearDown.scala
|
Scala
|
mit
| 958
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.hbase
import java.io.{IOException, ObjectInputStream, ObjectOutputStream}
import org.apache.hadoop.hbase.util.Bytes
import org.apache.spark.serializer.JavaSerializer
import org.apache.spark.util.{CollectionsUtils, Utils}
import org.apache.spark.{Partitioner, SparkEnv}
object HBasePartitioner {
implicit object HBaseRawOrdering extends Ordering[HBaseRawType] {
def compare(a: HBaseRawType, b: HBaseRawType) = Bytes.compareTo(a, b)
}
}
class HBasePartitioner (var splitKeys: Array[HBaseRawType]) extends Partitioner {
import HBasePartitioner.HBaseRawOrdering
type t = HBaseRawType
lazy private val len = splitKeys.length
// For pre-split table splitKeys(0) = bytes[0], to remove it,
// otherwise partition 0 always be empty and
// we will miss the last region's date when bulk load
lazy private val realSplitKeys = if (splitKeys.isEmpty) splitKeys else splitKeys.tail
def numPartitions = if (len == 0) 1 else len
@transient private val binarySearch: ((Array[t], t) => Int) = CollectionsUtils.makeBinarySearch[t]
def getPartition(key: Any): Int = {
val k = key.asInstanceOf[t]
var partition = 0
if (len <= 128 && len > 0) {
// If we have less than 128 partitions naive search
val ordering = implicitly[Ordering[t]]
while (partition < realSplitKeys.length && ordering.gt(k, realSplitKeys(partition))) {
partition += 1
}
} else {
// Determine which binary search method to use only once.
partition = binarySearch(realSplitKeys, k)
// binarySearch either returns the match location or -[insertion point]-1
if (partition < 0) {
partition = -partition - 1
}
if (partition > realSplitKeys.length) {
partition = realSplitKeys.length
}
}
partition
}
override def equals(other: Any): Boolean = other match {
case r: HBasePartitioner =>
r.splitKeys.sameElements(splitKeys)
case _ =>
false
}
override def hashCode(): Int = {
val prime = 31
var result = 1
var i = 0
while (i < splitKeys.length) {
result = prime * result + splitKeys(i).hashCode
i += 1
}
result = prime * result
result
}
}
|
XiaoqingWang/Spark-SQL-on-HBase
|
src/main/scala/org/apache/spark/sql/hbase/HBasePartitioner.scala
|
Scala
|
apache-2.0
| 3,030
|
/**
* Copyright 2017 Interel
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package core3.config
import com.typesafe.config._
/**
* Object for accessing global static config.
* <br><br>
* Loaded from file: 'static.conf'; static config is loaded only on at applications start.
*/
object StaticConfig {
private val config: Config = ConfigFactory.load().getConfig("server.static")
/**
* Retrieves the current config.
*
* @return the current static config
*/
def get: Config = {
config
}
}
|
Interel-Group/core3
|
src/main/scala/core3/config/StaticConfig.scala
|
Scala
|
apache-2.0
| 1,051
|
package org.javachannel.implicits.model
import java.util.Date
class Audit extends BaseEntity {
var notation:Option[String]=None
var modifiedBy:Option[String]=None
var timestamp:Option[Date]=None
}
|
jottinger/conceptmodel
|
src/main/scala/org/javachannel/implicits/model/Audit.scala
|
Scala
|
apache-2.0
| 205
|
package jsons
import models.InventoryOrder
import play.api.libs.json.Json
object InventoryOrderJson extends KiwiERPJson[InventoryOrder] {
def base(inventoryOrder: InventoryOrder) = Json.obj(
"createdAt" -> inventoryOrder.createdAt,
"deliveredDate" -> optDateTimeToString(inventoryOrder.deliveredDate),
"id" -> inventoryOrder.id,
"quantity" -> inventoryOrder.quantity,
"orderedDate" -> dateTimeToString(inventoryOrder.orderedDate),
"partsId" -> inventoryOrder.partsId,
"shippedDate" -> optDateTimeToString(inventoryOrder.shippedDate),
"status" -> inventoryOrder.status,
"updatedAt" -> inventoryOrder.updatedAt
)
}
|
KIWIKIGMBH/kiwierp
|
kiwierp-backend/app/jsons/InventoryOrderJson.scala
|
Scala
|
mpl-2.0
| 656
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
package scaps.searchEngine.queries
import scaps.api.Covariant
import scaps.api.FingerprintTerm
import scaps.api.TypeDef
import scaps.api.TypeRef
import scaps.api.ViewDef
import scaps.searchEngine.ApiTypeQuery
import scaps.searchEngine.MaximumClauseCountExceeded
import scaps.settings.QuerySettings
import scaps.utils.TraversableOps
import scala.util.Try
import scalaz.\/
import scaps.searchEngine.SemanticError
private[queries] sealed trait ExpandedQuery {
import ExpandedQuery._
def children: List[ExpandedQuery]
}
private[queries] object ExpandedQuery {
sealed trait Part extends ExpandedQuery
sealed trait Alternative extends ExpandedQuery
case class Sum(parts: List[Part]) extends Alternative {
val children = parts
override def toString =
parts.mkString("sum(", ", ", ")")
}
object Sum {
def apply(parts: Part*): Sum =
Sum(parts.toList)
}
case class Max(alternatives: List[Alternative]) extends Part {
val children = alternatives
override def toString =
alternatives.mkString("max(", ", ", ")")
}
object Max {
def apply(alts: Alternative*): Max =
Max(alts.toList)
}
case class Leaf(tpe: TypeRef, fraction: Double, distance: Int) extends Part with Alternative {
val children = Nil
override def toString =
s"$tpe^($fraction,$distance)"
}
def minimize(p: Part): Part = p match {
case Max((alt: Leaf) :: Nil) => alt
case Max(alts) =>
val minAlts = alts.map(minimize)
maxRepeatedPart(minAlts).fold[Part] {
Max(minAlts)
} { part =>
minimize(Max(factorOut(part, minAlts)))
}
case _ => p
}
private def maxRepeatedPart(alts: List[Alternative]): Option[Part] =
alts
.flatMap {
case Sum(parts) => parts.distinct
case _ => Nil
}
.groupBy(identity)
.mapValues(_.length)
.filter(_._2 > 1)
.maxByOpt(_._2)
.map(_._1)
private def factorOut(part: Part, alts: List[Alternative]): List[Alternative] = {
val (altsWithPart, altsWithoutPart) = alts.partition {
case Sum(ps) => ps.contains(part)
case _ => false
}
val altsMinusPart = altsWithPart.map {
case Sum(ps) => Sum(ps diff List(part))
case _ => ???
}
Sum(Max(altsMinusPart) :: part :: Nil) :: altsWithoutPart
}
def minimize(a: Alternative): Alternative = a match {
case Sum((part: Leaf) :: Nil) => part
case Sum(parts) => Sum(parts.map(minimize))
case _ => a
}
}
/**
* Creates APIQueries from (normalized) type refs.
*/
class QueryExpander(
settings: QuerySettings,
getTypeFrequency: FingerprintTerm => Double,
findViews: (TypeRef) => Seq[(TypeRef, Double)]) {
private case object MaximumClauseCountExceededException extends Exception
def apply(tpe: TypeRef): SemanticError \/ ApiTypeQuery = {
try {
val expanded = expandQuery(tpe)
val compacted = ExpandedQuery.minimize(expanded)
\/.right(toApiTypeQuery(compacted))
} catch {
case MaximumClauseCountExceededException =>
\/.left(MaximumClauseCountExceeded)
}
}
private[queries] def expandQuery(tpe: TypeRef): ExpandedQuery.Alternative = {
import ExpandedQuery._
var clauseCount = 0
def increaseCount() = {
clauseCount += 1
if (clauseCount > settings.maxClauseCount)
throw MaximumClauseCountExceededException
}
def parts(tpe: TypeRef, outerTpes: Set[TypeRef], fraction: Double, distance: Int): Alternative = {
increaseCount()
tpe match {
case TypeRef.Ignored(args, v) =>
Sum(args.map { arg =>
val partF = fraction / args.length
alternatives(arg, outerTpes, partF)
})
case tpe =>
val partArgs = tpe.args
.filterNot(_.isTypeParam)
val partF = fraction / (1 + partArgs.length)
val parts = partArgs.map { arg =>
if (outerTpes.contains(arg)) Leaf(arg.withArgsAsParams, partF, 0)
else alternatives(arg, outerTpes, partF)
}
Sum(Leaf(tpe.withArgsAsParams, partF, distance) :: parts)
}
}
def alternatives(tpe: TypeRef, outerTpes: Set[TypeRef], fraction: Double): Part = {
increaseCount()
val alternativesWithRetainedInfo =
(if (settings.views) findViews(tpe).toList else Nil)
.distinct
val outerTpesAndAlts = outerTpes + tpe ++ alternativesWithRetainedInfo.map(_._1)
val originalTypeParts = parts(tpe, outerTpesAndAlts, fraction, 0)
val alternativesParts =
alternativesWithRetainedInfo.map {
case (alt, retainedInfo) =>
parts(alt, outerTpesAndAlts, fraction * retainedInfo, 1)
}
Max(originalTypeParts :: alternativesParts)
}
tpe match {
case TypeRef.Ignored(_, _) =>
parts(tpe, Set(), 1, 0)
case _ =>
val itpe = TypeRef.Ignored(tpe :: Nil, Covariant)
parts(itpe, Set(), 1, 0)
}
}
private val boost: (ExpandedQuery.Leaf => Double) = { l =>
(if (settings.fractions) l.fraction else 1d) * (-settings.distanceWeight * l.distance + 1) * itf(l.tpe.term)
}
/**
* The inverse type frequency is defined as log10(10 / (10f + (1 - f)))
* where f is the type frequency normed by the maximum possible type frequency
* (see TypeFrequencies).
*/
private def itf(t: FingerprintTerm): Double = {
val base = settings.typeFrequencyWeight
if (base == 0) {
1
} else {
val freq = getTypeFrequency(t)
math.max(math.log(base / (freq * base + (1 - freq))) / math.log(base), 0.001)
}
}
private def toApiTypeQuery(q: ExpandedQuery): ApiTypeQuery = q match {
case ExpandedQuery.Sum(parts) =>
ApiTypeQuery.Sum(parts.map(toApiTypeQuery))
case ExpandedQuery.Max(alts) =>
ApiTypeQuery.Max(alts.map(toApiTypeQuery))
case l: ExpandedQuery.Leaf =>
ApiTypeQuery.Type(
l.tpe.variance,
l.tpe.name,
boost(l),
getTypeFrequency(l.tpe.term))
}
}
|
scala-search/scaps
|
core/src/main/scala/scaps/searchEngine/queries/QueryExpander.scala
|
Scala
|
mpl-2.0
| 6,305
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.scheduler.cluster.mesos
import java.util.Collections
import scala.collection.JavaConverters._
import org.apache.mesos.Protos._
import org.apache.mesos.Protos.Value.{Range => MesosRange, Ranges, Scalar}
import org.apache.mesos.SchedulerDriver
import org.mockito.{ArgumentCaptor, Matchers}
import org.mockito.Mockito._
object Utils {
val TEST_FRAMEWORK_ID = FrameworkID.newBuilder()
.setValue("test-framework-id")
.build()
val TEST_MASTER_INFO = MasterInfo.newBuilder()
.setId("test-master")
.setIp(0)
.setPort(0)
.build()
def createOffer(
offerId: String,
slaveId: String,
mem: Int,
cpus: Int,
ports: Option[(Long, Long)] = None,
gpus: Int = 0): Offer = {
val builder = Offer.newBuilder()
builder.addResourcesBuilder()
.setName("mem")
.setType(Value.Type.SCALAR)
.setScalar(Scalar.newBuilder().setValue(mem))
builder.addResourcesBuilder()
.setName("cpus")
.setType(Value.Type.SCALAR)
.setScalar(Scalar.newBuilder().setValue(cpus))
ports.foreach { resourcePorts =>
builder.addResourcesBuilder()
.setName("ports")
.setType(Value.Type.RANGES)
.setRanges(Ranges.newBuilder().addRange(MesosRange.newBuilder()
.setBegin(resourcePorts._1).setEnd(resourcePorts._2).build()))
}
if (gpus > 0) {
builder.addResourcesBuilder()
.setName("gpus")
.setType(Value.Type.SCALAR)
.setScalar(Scalar.newBuilder().setValue(gpus))
}
builder.setId(createOfferId(offerId))
.setFrameworkId(FrameworkID.newBuilder()
.setValue("f1"))
.setSlaveId(SlaveID.newBuilder().setValue(slaveId))
.setHostname(s"host${slaveId}")
.build()
}
def verifyTaskLaunched(driver: SchedulerDriver, offerId: String): List[TaskInfo] = {
val captor = ArgumentCaptor.forClass(classOf[java.util.Collection[TaskInfo]])
verify(driver, times(1)).launchTasks(
Matchers.eq(Collections.singleton(createOfferId(offerId))),
captor.capture())
captor.getValue.asScala.toList
}
def verifyTaskNotLaunched(driver: SchedulerDriver, offerId: String): Unit = {
verify(driver, times(0)).launchTasks(
Matchers.eq(Collections.singleton(createOfferId(offerId))),
Matchers.any(classOf[java.util.Collection[TaskInfo]]))
}
def createOfferId(offerId: String): OfferID = {
OfferID.newBuilder().setValue(offerId).build()
}
def createSlaveId(slaveId: String): SlaveID = {
SlaveID.newBuilder().setValue(slaveId).build()
}
def createExecutorId(executorId: String): ExecutorID = {
ExecutorID.newBuilder().setValue(executorId).build()
}
def createTaskId(taskId: String): TaskID = {
TaskID.newBuilder().setValue(taskId).build()
}
}
|
minixalpha/spark
|
resource-managers/mesos/src/test/scala/org/apache/spark/scheduler/cluster/mesos/Utils.scala
|
Scala
|
apache-2.0
| 3,602
|
package io.mem0r1es.trank.ranking
import java.net.URI
class ANCESTORS extends RankingAlgo {
/**
* Rank types by inverse-sort on the # of ANCESTORS contained in the type set.
*/
override def rank(entityTypes: Map[URI, HierInfo]): Seq[(URI, Double)] = {
def score(path: Seq[URI]): Double = {
path.filter (entityTypes.contains(_))
.length
}
entityTypes.map {
case (k, v) => (k, score(v.path))
}
.toSeq
.sortBy(_._2)
.reverse
}
}
|
ahmadassaf/TRank
|
src/main/scala/io/mem0r1es/trank/ranking/ANCESTORS.scala
|
Scala
|
apache-2.0
| 489
|
package paddy
trait UncaughtErrorHandler {
def handle(throwable: Throwable)
}
|
despegar/paddy
|
src/main/scala/paddy/UncaughtErrorHandler.scala
|
Scala
|
bsd-2-clause
| 80
|
package akka.contrib.persistence.mongodb
import akka.actor.ActorSystem
import akka.persistence.PersistentRepr
import akka.serialization.{SerializationExtension, Serialization}
import com.mongodb.util.JSON
import com.mongodb._
import com.typesafe.config.ConfigFactory
import org.scalatest.BeforeAndAfterAll
import collection.JavaConverters._
import scala.util.Try
abstract class JournalUpgradeSpec[D <: MongoPersistenceDriver, X <: MongoPersistenceExtension](extensionClass: Class[X], toDriver: ActorSystem => D) extends BaseUnitTest with EmbeddedMongo with BeforeAndAfterAll {
import ConfigLoanFixture._
override def embedDB = "upgrade-test"
override def beforeAll(): Unit = {
doBefore()
}
override def afterAll(): Unit = {
doAfter()
}
def config(extensionClass: Class[_]) = ConfigFactory.parseString(s"""
|akka.contrib.persistence.mongodb.mongo.driver = "${extensionClass.getName}"
|akka.contrib.persistence.mongodb.mongo.mongouri = "mongodb://localhost:$embedConnectionPort/$embedDB"
akka.contrib.persistence.mongodb.mongo.journal-automatic-upgrade = true
|akka.persistence.journal.plugin = "akka-contrib-mongodb-persistence-journal"
|akka-contrib-mongodb-persistence-journal {
| # Class name of the plugin.
| class = "akka.contrib.persistence.mongodb.MongoJournal"
|}
|akka.persistence.snapshot-store.plugin = "akka-contrib-mongodb-persistence-snapshot"
|akka-contrib-mongodb-persistence-snapshot {
| # Class name of the plugin.
| class = "akka.contrib.persistence.mongodb.MongoSnapshots"
|}
|""".stripMargin)
def configured[A](testCode: D => A) = withConfig(config(extensionClass), "upgrade-test")(toDriver andThen testCode)
"A mongo persistence driver" should "do nothing on a new installation" in configured { as =>
mongoClient.getDB(embedDB).getCollectionNames shouldNot contain ("akka_persistence_journal")
}
import JournallingFieldNames._
def buildLegacyObject[A](pid: String, sn: Long, payload: A)(implicit serEv: Serialization): DBObject = {
val builder = new BasicDBObjectBuilder()
builder
.add(PROCESSOR_ID, pid)
.add(SEQUENCE_NUMBER, sn)
.add(SERIALIZED,
serEv.serialize(PersistentRepr(payload, sn, pid)).get
).get()
}
def buildLegacyDocument[A](pid: String, sn: Long)(implicit serEv: Serialization): DBObject = {
val builder = new BasicDBObjectBuilder()
val serBuilder = new BasicDBObjectBuilder()
val plBuilder = new BasicDBObjectBuilder()
val subdoc = serBuilder.add(PayloadKey, plBuilder.add("abc",1).add("def",2.0).add("ghi",true).get()).get()
builder.add(PROCESSOR_ID, pid).add(SEQUENCE_NUMBER, sn).add(SERIALIZED, subdoc).get()
}
def queryByProcessorId(pid: String): DBObject = {
new BasicDBObjectBuilder().add(PROCESSOR_ID,pid).get()
}
def createLegacyIndex(coll: DBCollection): Unit = {
val idxSpec =
new BasicDBObjectBuilder()
.add(PROCESSOR_ID, 1)
.add(SEQUENCE_NUMBER, 1)
.add(DELETED, 1)
.get()
Try(coll.createIndex(idxSpec)).getOrElse(())
}
it should "upgrade an existing journal" in configured { as =>
implicit val serialization = SerializationExtension.get(as.actorSystem)
val coll = mongoClient.getDB(embedDB).getCollection("akka_persistence_journal")
createLegacyIndex(coll)
coll.insert(buildLegacyObject("foo",1,"bar"))
coll.insert(buildLegacyObject("foo",2,"bar"))
coll.insert(buildLegacyDocument("foo",3))
println(s"before = ${coll.find().toArray.asScala.toList}")
as.upgradeJournalIfNeeded()
val records = coll.find(queryByProcessorId("foo")).toArray.asScala.toList
println(records)
records should have size 3
records.zipWithIndex.foreach { case (dbo,idx) =>
dbo.get(PROCESSOR_ID) should be ("foo")
dbo.get(TO) should be (idx + 1)
dbo.get(FROM) should be (dbo.get(TO))
val event = dbo.get(EVENTS).asInstanceOf[BasicDBList].get(0).asInstanceOf[DBObject]
event.get(SEQUENCE_NUMBER) should be (idx + 1)
if (idx < 2) {
event.get(TYPE) should be ("s")
event.get(PayloadKey) should be ("bar")
} else {
event.get(TYPE) should be ("bson")
val bson = event.get(PayloadKey).asInstanceOf[DBObject]
bson.get("abc") should be (1)
bson.get("def") should be (2.0)
bson.get("ghi") shouldBe true
}
}
}
it should "upgrade a more complicated journal" in configured { as =>
implicit val serialization = SerializationExtension.get(as.actorSystem)
val coll = mongoClient.getDB(embedDB).getCollection("akka_persistence_journal")
coll.remove(new BasicDBObject())
createLegacyIndex(coll)
val doc =
"""
|{
| "_id" : { "$oid" : "55deeae33de20e69f33b748b" },
| "pid" : "foo",
| "sn" : { "$numberLong" : "1" },
| "dl" : false,
| "cs" : [ ],
| "pr" : {
| "p" : {
| "order-created" : {
| "id" : "alsonotarealguid",
| "seqNr" : 232,
| "userId" : "notarealguid",
| "cartId" : "notarealcartid",
| "phoneNumber" : "+15555005555",
| "from" : {
| "country" : "US"
| },
| "to" : {
| "country" : "RU",
| "region" : "MOW",
| "city" : "Moscow"
| },
| "dateCreated" : { "$date": "2015-08-27T10:48:03.101Z" },
| "timestamp" : { "$date": "2015-08-27T10:48:03.101Z" },
| "addressId" : "not-a-real-addressid"
| },
| "_timestamp" : { "$date": "2015-08-27T10:48:03.102Z" }
| }
| }
}""".stripMargin
coll.insert(JSON.parse(doc).asInstanceOf[DBObject])
as.upgradeJournalIfNeeded()
val records = coll.find(queryByProcessorId("foo")).toArray.asScala.toList
println(records)
records should have size 1
records.zipWithIndex.foreach { case (dbo,idx) =>
dbo.get(PROCESSOR_ID) should be ("foo")
dbo.get(TO) should be (idx + 1)
dbo.get(FROM) should be (dbo.get(TO))
val event = dbo.get(EVENTS).asInstanceOf[BasicDBList].get(0).asInstanceOf[DBObject]
event.get(SEQUENCE_NUMBER) should be (idx + 1)
event.get(TYPE) should be ("bson")
val bson = event.get(PayloadKey).asInstanceOf[DBObject]
val payload = bson.get("order-created").asInstanceOf[DBObject]
payload.get("cartId") should be ("notarealcartid")
payload.get("seqNr") should be (232)
}
}
}
|
tomzhang/akka-persistence-mongo
|
common/src/test/scala/akka/contrib/persistence/mongodb/JournalUpgradeSpec.scala
|
Scala
|
apache-2.0
| 6,656
|
package streaming.perf
import org.apache.spark.SparkContext
import org.apache.spark.streaming.StreamingContext._
import org.apache.spark.SparkContext._
import org.apache.spark.streaming.{Milliseconds, Time}
import org.apache.spark.rdd.RDD
import org.apache.spark.storage.StorageLevel
import streaming.perf.util.FileGenerator
class HdfsRecoveryTest(sc: SparkContext) extends PerfTest(sc) {
val RECORDS_PER_FILE = ("records-per-file", "Number records per file")
val FILE_CLEANER_DELAY = ("file-cleaner-delay", "Delay (secs) in cleaning up generated files")
override def longOptions = super.longOptions ++ Seq(RECORDS_PER_FILE, FILE_CLEANER_DELAY)
override def stringOptions = super.stringOptions ++ Seq(HDFS_URL)
/** Runs the test and returns a series of results, along with values of any parameters */
override def doRunPerf(): Seq[(String, Double)] = {
// Define variables
val maxRecordsPerFile = longOptionValue(RECORDS_PER_FILE)
val cleanerDelay = longOptionValue(FILE_CLEANER_DELAY)
val dataDirectory = hdfsUrl + "/data/"
val tempDataDirectory = hdfsUrl + "/temp/"
// Create the file generator
val fileGenerator = new FileGenerator(dataDirectory, tempDataDirectory, maxRecordsPerFile, cleanerDelay)
fileGenerator.initialize()
// Setup computation
val fileStream = ssc.textFileStream(dataDirectory)
val updateFunc = (values: Seq[Long], state: Option[Long]) => {
Some(values.foldLeft(0L)(_ + _) + state.getOrElse(0L))
}
val wordStream = fileStream.flatMap(_.split(" ")).map(x => (x, 1L))
val runningCountStream = wordStream.updateStateByKey[Long](updateFunc).persist(StorageLevel.MEMORY_AND_DISK_SER)
runningCountStream.checkpoint(Milliseconds(batchDurationMs * 5))
// Verify the running counts. For any key the running count should be in the sequence
// 1, 3, 6, 10, 15, 21, ... (i.e., nth number is sum of 1..n)
val expectedCounts = (1L to maxRecordsPerFile).map(x => (1L to x).reduce(_ + _)).toSet
wordStream.foreach((rdd: RDD[(String, Long)], time: Time) => {
val partitionCounts = rdd.sparkContext.runJob(rdd.mapPartitions(iter =>
iter.toSeq.groupBy(_._1).toSeq.map(x => (x._1, x._2.map(_._2).sum)).toIterator
), (iter: Iterator[(String, Long)]) => iter.toArray)
println(s"New partition counts ${partitionCounts.size}) at $time = " +
partitionCounts.map(_.mkString("[", ", ", "]")).mkString("[", ", ", "]"))
val counts = rdd.reduceByKey(_ + _, 1).collect()
println(s"New total count at $time = " + counts.mkString("[", ", ", "]"))
})
runningCountStream.foreach((rdd: RDD[(String, Long)], time: Time) => {
val counts = rdd.collect()
val possibleCounts = expectedCounts
val expected = counts.forall { case (word, count) => possibleCounts.contains(count) }
println("Running counts at " + time + " = " + counts.mkString("[", ", ", "]") + (if (!expected) ", no match" else ""))
println("-" * 40)
})
// Run the computation
ssc.start()
fileGenerator.start()
Thread.sleep(totalDurationSec * 1000)
fileGenerator.stop()
Thread.sleep(batchDurationMs * 2)
ssc.stop()
Thread.sleep(100)
fileGenerator.cleanup()
Nil
}
override def run(): String = {
run()
"PASSED"
}
}
|
zsxwing/spark-perf
|
streaming-tests/src/main/scala/streaming/perf/HdfsRecoveryTest.scala
|
Scala
|
apache-2.0
| 3,312
|
package hydrograph.engine.spark.components
import java.util.Set
import hydrograph.engine.core.component.entity.UnionAllEntity
import hydrograph.engine.core.component.entity.elements.SchemaField
import hydrograph.engine.spark.components.base.StraightPullComponentBase
import hydrograph.engine.spark.components.platform.BaseComponentParams
import org.apache.spark.sql.DataFrame
import org.slf4j.LoggerFactory
import scala.collection.mutable.ListBuffer
/**
* The Class UnionAllComponent.
*
* @author Bitwise
*
*/
class UnionAllComponent(unionAllEntity: UnionAllEntity, componentsParams: BaseComponentParams)
extends StraightPullComponentBase {
val LOG = LoggerFactory.getLogger(classOf[UnionAllComponent])
override def createComponent(): Map[String, DataFrame] = {
LOG.trace(unionAllEntity.toString)
try {
val dataFrameList = componentsParams.getDataFrameList()
val schemaFieldList = componentsParams.getSchemaFieldList()
validateInputFields(schemaFieldList)
val df = merge(dataFrameList)
val outSocketId = unionAllEntity.getOutSocketList.get(0).getSocketId
LOG.info("Created UnionAll component "+ unionAllEntity.getComponentId
+ " in batch "+ unionAllEntity.getBatch )
Map(outSocketId -> df)
} catch {
case ex: Exception => LOG.error("Error in UnionAll component " + unionAllEntity.getComponentId, ex)
throw ex
}
}
def merge(dataFrameList: ListBuffer[DataFrame]): DataFrame = {
def combine(dataFrame: DataFrame, acc: Int): DataFrame = {
if (dataFrameList.size <= acc) dataFrame
else combine(dataFrame.union(dataFrameList(acc).select(dataFrame.columns.head,dataFrame.columns.tail:_*)), acc + 1)
}
combine(dataFrameList(0), 1)
}
@throws(classOf[SchemaMismatchException])
def validateInputFields(schemaFieldList: ListBuffer[Set[SchemaField]]) = {
val refSchema = schemaFieldList(0)
schemaFieldList.tail.foreach { sf =>
{
if (refSchema.size != sf.size) {
LOG.error("Component:" + unionAllEntity.getComponentId()
+ " - Different schema is defined for input sockets. For UnionAll component schema of all input sockets should be same.")
throw new SchemaMismatchException("Component:" + unionAllEntity.getComponentId()
+ " - Different schema is defined for input sockets. For UnionAll component schema of all input sockets should be same.");
}
if (!refSchema.containsAll(sf)) {
LOG.error("Component:" + unionAllEntity.getComponentId()
+ " - Different schema is defined for input sockets. For UnionAll component schema of all input sockets should be same.")
throw new SchemaMismatchException("Component:" + unionAllEntity.getComponentId()
+ " - Different schema is defined for input sockets. For UnionAll component schema of all input sockets should be same.");
}
}
}
}
class SchemaMismatchException(msg: String) extends RuntimeException(msg: String)
}
|
capitalone/Hydrograph
|
hydrograph.engine/hydrograph.engine.spark/src/main/scala/hydrograph/engine/spark/components/UnionAllComponent.scala
|
Scala
|
apache-2.0
| 3,005
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.api.r
import java.io.File
import java.util.Arrays
import org.apache.spark.{SparkEnv, SparkException}
private[spark] object RUtils {
// Local path where R binary packages built from R source code contained in the spark
// packages specified with "--packages" or "--jars" command line option reside.
var rPackages: Option[String] = None
/**
* Get the SparkR package path in the local spark distribution.
*/
def localSparkRPackagePath: Option[String] = {
val sparkHome = sys.env.get("SPARK_HOME").orElse(sys.props.get("spark.test.home"))
sparkHome.map(
Seq(_, "R", "lib").mkString(File.separator)
)
}
/**
* Check if SparkR is installed before running tests that use SparkR.
*/
def isSparkRInstalled: Boolean = {
localSparkRPackagePath.filter { pkgDir =>
new File(Seq(pkgDir, "SparkR").mkString(File.separator)).exists
}.isDefined
}
/**
* Get the list of paths for R packages in various deployment modes, of which the first
* path is for the SparkR package itself. The second path is for R packages built as
* part of Spark Packages, if any exist. Spark Packages can be provided through the
* "--packages" or "--jars" command line options.
*
* This assumes that Spark properties `spark.master` and `spark.submit.deployMode`
* and environment variable `SPARK_HOME` are set.
*/
def sparkRPackagePath(isDriver: Boolean): Seq[String] = {
val (master, deployMode) =
if (isDriver) {
(sys.props("spark.master"), sys.props("spark.submit.deployMode"))
} else {
val sparkConf = SparkEnv.get.conf
(sparkConf.get("spark.master"), sparkConf.get("spark.submit.deployMode", "client"))
}
val isYarnCluster = master != null && master.contains("yarn") && deployMode == "cluster"
val isYarnClient = master != null && master.contains("yarn") && deployMode == "client"
// In YARN mode, the SparkR package is distributed as an archive symbolically
// linked to the "sparkr" file in the current directory and additional R packages
// are distributed as an archive symbolically linked to the "rpkg" file in the
// current directory.
//
// Note that this does not apply to the driver in client mode because it is run
// outside of the cluster.
if (isYarnCluster || (isYarnClient && !isDriver)) {
val sparkRPkgPath = new File("sparkr").getAbsolutePath
val rPkgPath = new File("rpkg")
if (rPkgPath.exists()) {
Seq(sparkRPkgPath, rPkgPath.getAbsolutePath)
} else {
Seq(sparkRPkgPath)
}
} else {
// Otherwise, assume the package is local
val sparkRPkgPath = localSparkRPackagePath.getOrElse {
throw new SparkException("SPARK_HOME not set. Can't locate SparkR package.")
}
if (!rPackages.isEmpty) {
Seq(sparkRPkgPath, rPackages.get)
} else {
Seq(sparkRPkgPath)
}
}
}
/** Check if R is installed before running tests that use R commands. */
def isRInstalled: Boolean = {
try {
val builder = new ProcessBuilder(Arrays.asList("R", "--version"))
builder.start().waitFor() == 0
} catch {
case e: Exception => false
}
}
}
|
bravo-zhang/spark
|
core/src/main/scala/org/apache/spark/api/r/RUtils.scala
|
Scala
|
apache-2.0
| 4,049
|
/*
* Copyright (C) 2015 Stratio (http://stratio.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.stratio.sparta.sdk.pipeline.schema
import java.sql.{Date, Timestamp}
import org.joda.time.DateTime
import org.junit.runner.RunWith
import org.scalatest.junit.JUnitRunner
import org.scalatest.{Matchers, WordSpec}
@RunWith(classOf[JUnitRunner])
class TypeOpTest extends WordSpec with Matchers {
"TypeOp" should {
"typeOperation String must be " in {
val expected = "String"
val result = TypeOp.transformValueByTypeOp(TypeOp.String, "String")
result should be(expected)
}
"typeOperation ArrayDouble from any must be " in {
val expected = Seq(1)
val result = TypeOp.transformValueByTypeOp(TypeOp.ArrayDouble, Seq("1"))
result should be(expected)
}
"typeOperation ArrayDouble must be " in {
val expected = Seq(1d)
val result = TypeOp.transformValueByTypeOp(TypeOp.ArrayDouble, Seq(1d))
result should be(expected)
}
"typeOperation ArrayString must be " in {
val expected = Seq("String")
val result = TypeOp.transformValueByTypeOp(TypeOp.ArrayString, Seq("String"))
result should be(expected)
}
"typeOperation ArrayString from any must be " in {
val expected = Seq("1.0")
val result = TypeOp.transformValueByTypeOp(TypeOp.ArrayString, Seq(1d))
result should be(expected)
}
"typeOperation Timestamp must be " in {
val expected = new Timestamp(1L)
val result = TypeOp.transformValueByTypeOp(TypeOp.Timestamp, new Timestamp(1L))
result should be(expected)
}
"typeOperation Date must be " in {
val expected = new Date(1L)
val result = TypeOp.transformValueByTypeOp(TypeOp.Date, new Date(1L))
result should be(expected)
}
"typeOperation DateTime must be " in {
val expected = new DateTime(1L)
val result = TypeOp.transformValueByTypeOp(TypeOp.DateTime, new DateTime(1L))
result should be(expected)
}
"typeOperation MapStringLong must be " in {
val expected = Map("a" -> 1L)
val result = TypeOp.transformValueByTypeOp(TypeOp.MapStringLong, Map("a" -> "1"))
result should be(expected)
}
"typeOperation MapStringLong from number must be " in {
val expected = Map("a" -> 1L)
val result = TypeOp.transformValueByTypeOp(TypeOp.MapStringLong, Map("a" -> 1L))
result should be(expected)
}
"typeOperation Long must be " in {
val expected = 1L
val result = TypeOp.transformValueByTypeOp(TypeOp.Long, 1L)
result should be(expected)
}
"typeOperation Binary must be " in {
val expected = "Binary"
val result = TypeOp.transformValueByTypeOp(TypeOp.Binary, "Binary")
result should be(expected)
}
"operation by name Binary must be " in {
val expected = TypeOp.Binary
val result = TypeOp.getTypeOperationByName("Binary", TypeOp.Binary)
result should be(expected)
}
"operation by name BigDecimal must be " in {
val expected = TypeOp.BigDecimal
val result = TypeOp.getTypeOperationByName("BigDecimal", TypeOp.BigDecimal)
result should be(expected)
}
"operation by name Long must be " in {
val expected = TypeOp.Long
val result = TypeOp.getTypeOperationByName("Long", TypeOp.Long)
result should be(expected)
}
"operation by name Int must be " in {
val expected = TypeOp.Int
val result = TypeOp.getTypeOperationByName("Int", TypeOp.Int)
result should be(expected)
}
"operation by name String must be " in {
val expected = TypeOp.String
val result = TypeOp.getTypeOperationByName("String", TypeOp.String)
result should be(expected)
}
"operation by name Double must be " in {
val expected = TypeOp.Double
val result = TypeOp.getTypeOperationByName("Double", TypeOp.String)
result should be(expected)
}
"operation by name Boolean must be " in {
val expected = TypeOp.Boolean
val result = TypeOp.getTypeOperationByName("Boolean", TypeOp.String)
result should be(expected)
}
"operation by name Date must be " in {
val expected = TypeOp.Date
val result = TypeOp.getTypeOperationByName("Date", TypeOp.String)
result should be(expected)
}
"operation by name DateTime must be " in {
val expected = TypeOp.DateTime
val result = TypeOp.getTypeOperationByName("DateTime", TypeOp.String)
result should be(expected)
}
"operation by name Timestamp must be " in {
val expected = TypeOp.Timestamp
val result = TypeOp.getTypeOperationByName("Timestamp", TypeOp.String)
result should be(expected)
}
"operation by name ArrayDouble must be " in {
val expected = TypeOp.ArrayDouble
val result = TypeOp.getTypeOperationByName("ArrayDouble", TypeOp.String)
result should be(expected)
}
"operation by name ArrayString must be " in {
val expected = TypeOp.ArrayString
val result = TypeOp.getTypeOperationByName("ArrayString", TypeOp.String)
result should be(expected)
}
"operation by name MapStringLong must be " in {
val expected = TypeOp.MapStringLong
val result = TypeOp.getTypeOperationByName("MapStringLong", TypeOp.String)
result should be(expected)
}
"operation by name not founded must be " in {
val expected = TypeOp.MapStringLong
val result = TypeOp.getTypeOperationByName("hello", TypeOp.MapStringLong)
result should be(expected)
}
}
}
|
Frannie-Ludmilla/sparta
|
sdk/src/test/scala/com/stratio/sparta/sdk/pipeline/schema/TypeOpTest.scala
|
Scala
|
apache-2.0
| 6,130
|
// - Project: scalajs-svgjs (https://github.com/jokade/scalajs-svgjs)
// Description: Bindings for svg.js RBox
//
// Copyright (c) 2015 Johannes Kastner <jokade@karchedon.de>
// Distributed under the MIT License (see included file LICENSE)
package biz.enef.svgjs
import scala.scalajs.js
trait RBox extends js.Object {
}
|
jokade/scalajs-svgjs
|
src/main/scala/biz/enef/svgjs/RBox.scala
|
Scala
|
mit
| 339
|
package com.dzegel.DynamockServer.service
import com.dzegel.DynamockServer.types.{DidOverwriteResponse, ExpectationId, Response}
import com.google.inject.{ImplementedBy, Singleton}
import scala.collection.concurrent.TrieMap
@ImplementedBy(classOf[DefaultResponseStore])
trait ResponseStore {
def registerResponse(expectationId: ExpectationId, response: Response): DidOverwriteResponse
def getResponses(expectationIds: Set[ExpectationId]): Map[ExpectationId, Response]
def deleteResponses(expectationIds: Set[ExpectationId]): Unit
def clearAllResponses(): Unit
}
@Singleton
class DefaultResponseStore extends ResponseStore {
private val expectationIdToResponse = TrieMap.empty[ExpectationId, Response]
override def registerResponse(expectationId: ExpectationId, response: Response): DidOverwriteResponse = this.synchronized {
expectationIdToResponse.put(expectationId, response).exists(_ != response)
}
override def getResponses(expectationIds: Set[ExpectationId]): Map[ExpectationId, Response] = this.synchronized {
expectationIds
.map(id => (id, expectationIdToResponse.get(id)))
.collect { case (id, Some(response)) => (id, response) }
.toMap
}
override def deleteResponses(expectationIds: Set[ExpectationId]): Unit = this.synchronized {
expectationIds.foreach(expectationIdToResponse.remove)
}
override def clearAllResponses(): Unit = this.synchronized {
expectationIdToResponse.clear()
}
}
|
dzegel/DynamockServer
|
src/main/scala/com/dzegel/DynamockServer/service/ResponseStore.scala
|
Scala
|
apache-2.0
| 1,465
|
package hackerRank.algorithms
/* group anagram strings
dog, eat, god, ate, tea
output should be [ [dog,god], [eat,ate,tea]]
*/
object GroupAnagram {
def findGroupAnagrams(words: Seq[String]): Seq[Seq[String]] = {
if (words.isEmpty) Seq.empty else if (words.lengthCompare(1) == 0) Seq(Seq(words.head)) else
words.map(word => findAnagrams(word, words)).distinct
}
def findAnagrams(word: String, compareWords: Seq[String]): Seq[String] = {
val originalWord = word.sorted
compareWords.filter(compareWord => compareWord.sorted.equalsIgnoreCase(originalWord) && !compareWord.equalsIgnoreCase(originalWord))
}
}
|
cricanr/AlgorithmsHackerRank
|
src/main/scala/hackerRank/algorithms/GroupAnagram.scala
|
Scala
|
mit
| 640
|
package scala.slick.session
import scala.language.reflectiveCalls
import java.sql._
import scala.slick.SlickException
import scala.slick.util.Logging
import scala.Array
/**
* A database session which opens a connection and transaction on demand.
*/
trait Session extends java.io.Closeable with Logging { self =>
def conn: Connection
def metaData: DatabaseMetaData
def capabilities: DatabaseCapabilities
def resultSetType: ResultSetType = ResultSetType.Auto
def resultSetConcurrency: ResultSetConcurrency = ResultSetConcurrency.Auto
def resultSetHoldability: ResultSetHoldability = ResultSetHoldability.Auto
final def prepareStatement(sql: String,
defaultType: ResultSetType = ResultSetType.ForwardOnly,
defaultConcurrency: ResultSetConcurrency = ResultSetConcurrency.ReadOnly,
defaultHoldability: ResultSetHoldability = ResultSetHoldability.Default): PreparedStatement = {
logger.debug("Preparing statement: "+sql)
resultSetHoldability.withDefault(defaultHoldability) match {
case ResultSetHoldability.Default =>
conn.prepareStatement(sql, resultSetType.withDefault(defaultType).intValue,
resultSetConcurrency.withDefault(defaultConcurrency).intValue)
case h =>
conn.prepareStatement(sql, resultSetType.withDefault(defaultType).intValue,
resultSetConcurrency.withDefault(defaultConcurrency).intValue,
h.intValue)
}
}
final def prepareInsertStatement(sql: String,
columnNames: Array[String] = new Array[String](0)): PreparedStatement = {
logger.debug("Preparing insert statement: "+sql+", returning: "+columnNames.mkString(","))
conn.prepareStatement(sql, columnNames)
}
final def prepareInsertStatement(sql: String, columnIndexes: Array[Int]): PreparedStatement = {
logger.debug("Preparing insert statement: "+sql+", returning indexes: "+columnIndexes.mkString(","))
conn.prepareStatement(sql, columnIndexes)
}
final def createStatement(defaultType: ResultSetType = ResultSetType.ForwardOnly,
defaultConcurrency: ResultSetConcurrency = ResultSetConcurrency.ReadOnly,
defaultHoldability: ResultSetHoldability = ResultSetHoldability.Default): Statement = {
loggingStatement(resultSetHoldability.withDefault(defaultHoldability) match {
case ResultSetHoldability.Default =>
conn.createStatement(resultSetType.withDefault(defaultType).intValue,
resultSetConcurrency.withDefault(defaultConcurrency).intValue)
case h =>
conn.createStatement(resultSetType.withDefault(defaultType).intValue,
resultSetConcurrency.withDefault(defaultConcurrency).intValue,
h.intValue)
})
}
final def withPreparedStatement[T](sql: String,
defaultType: ResultSetType = ResultSetType.ForwardOnly,
defaultConcurrency: ResultSetConcurrency = ResultSetConcurrency.ReadOnly,
defaultHoldability: ResultSetHoldability = ResultSetHoldability.Default)(f: (PreparedStatement => T)): T = {
val st = prepareStatement(sql, defaultType, defaultConcurrency, defaultHoldability)
try f(st) finally st.close()
}
final def withPreparedInsertStatement[T](sql: String,
columnNames: Array[String] = new Array[String](0))(f: (PreparedStatement => T)): T = {
val st = prepareInsertStatement(sql, columnNames)
try f(st) finally st.close()
}
final def withPreparedInsertStatement[T](sql: String,
columnIndexes: Array[Int])(f: (PreparedStatement => T)): T = {
val st = prepareInsertStatement(sql, columnIndexes)
try f(st) finally st.close()
}
final def withStatement[T](defaultType: ResultSetType = ResultSetType.ForwardOnly,
defaultConcurrency: ResultSetConcurrency = ResultSetConcurrency.ReadOnly,
defaultHoldability: ResultSetHoldability = ResultSetHoldability.Default)(f: (Statement => T)): T = {
val st = createStatement(defaultType, defaultConcurrency, defaultHoldability)
try f(st) finally st.close()
}
def close(): Unit
/**
* Call this method within a <em>withTransaction</em> call to roll back the current
* transaction after <em>withTransaction</em> returns.
*/
def rollback(): Unit
/**
* Run the supplied function within a transaction. If the function throws an Exception
* or the session's rollback() method is called, the transaction is rolled back,
* otherwise it is commited when the function returns.
*/
def withTransaction[T](f: => T): T
def forParameters(rsType: ResultSetType = resultSetType, rsConcurrency: ResultSetConcurrency = resultSetConcurrency,
rsHoldability: ResultSetHoldability = resultSetHoldability): Session = new Session {
override def resultSetType = rsType
override def resultSetConcurrency = rsConcurrency
override def resultSetHoldability = rsHoldability
def conn = self.conn
def metaData = self.metaData
def capabilities = self.capabilities
def close() = self.close()
def rollback() = self.rollback()
def withTransaction[T](f: => T) = self.withTransaction(f)
}
protected def loggingStatement(st: Statement): Statement = if(logger.isDebugEnabled) new Statement {
def setMaxFieldSize(max: Int) = st.setMaxFieldSize(max)
def clearWarnings() = st.clearWarnings()
def getMoreResults(current: Int) = st.getMoreResults(current)
def getMoreResults: Boolean = st.getMoreResults
def getGeneratedKeys: ResultSet = st.getGeneratedKeys
def cancel() = st.cancel()
def getResultSet: ResultSet = st.getResultSet
def setPoolable(poolable: Boolean) = st.setPoolable(poolable)
def isPoolable: Boolean = st.isPoolable
def setCursorName(name: String) = st.setCursorName(name)
def getUpdateCount: Int = st.getUpdateCount
def addBatch(sql: String) = {
logger.debug("Adding to batch: "+sql)
st.addBatch(sql)
}
def getMaxRows: Int = st.getMaxRows
def execute(sql: String, columnNames: Array[String]): Boolean = {
logger.debug("Executing statement: "+sql)
st.execute(sql, columnNames)
}
def execute(sql: String, columnIndexes: Array[Int]): Boolean = {
logger.debug("Executing statement: "+sql)
st.execute(sql, columnIndexes)
}
def execute(sql: String, autoGeneratedKeys: Int): Boolean = {
logger.debug("Executing statement: "+sql)
st.execute(sql, autoGeneratedKeys)
}
def execute(sql: String): Boolean = {
logger.debug("Executing statement: "+sql)
st.execute(sql)
}
def executeQuery(sql: String): ResultSet = {
logger.debug("Executing query: "+sql)
st.executeQuery(sql)
}
def getResultSetType: Int = st.getResultSetType
def unwrap[T](iface: Class[T]): T = st.unwrap(iface)
def setMaxRows(max: Int) = st.setMaxRows(max)
def getFetchSize: Int = st.getFetchSize
def getResultSetHoldability: Int = st.getResultSetHoldability
def setFetchDirection(direction: Int) = st.setFetchDirection(direction)
def getFetchDirection: Int = st.getFetchDirection
def getResultSetConcurrency: Int = st.getResultSetConcurrency
def isWrapperFor(iface: Class[_]): Boolean = st.isWrapperFor(iface)
def clearBatch() = st.clearBatch()
def close() = st.close()
def isClosed: Boolean = st.isClosed
def executeUpdate(sql: String, columnNames: Array[String]): Int = {
logger.debug("Executing update: "+sql)
st.executeUpdate(sql, columnNames)
}
def executeUpdate(sql: String, columnIndexes: Array[Int]): Int = {
logger.debug("Executing update: "+sql)
st.executeUpdate(sql, columnIndexes)
}
def executeUpdate(sql: String, autoGeneratedKeys: Int): Int = {
logger.debug("Executing update: "+sql)
st.executeUpdate(sql, autoGeneratedKeys)
}
def executeUpdate(sql: String): Int = {
logger.debug("Executing update: "+sql)
st.executeUpdate(sql)
}
def getWarnings: SQLWarning = st.getWarnings
def getQueryTimeout: Int = st.getQueryTimeout
def setQueryTimeout(seconds: Int) = st.setQueryTimeout(seconds)
def setFetchSize(rows: Int) = st.setFetchSize(rows)
def setEscapeProcessing(enable: Boolean) = st.setEscapeProcessing(enable)
def executeBatch(): Array[Int] = {
logger.debug("Executing batch")
st.executeBatch()
}
def getConnection: Connection = st.getConnection
def getMaxFieldSize: Int = st.getMaxFieldSize
def closeOnCompletion(): Unit =
st.asInstanceOf[{ def closeOnCompletion(): Unit }].closeOnCompletion()
def isCloseOnCompletion(): Boolean =
st.asInstanceOf[{ def isCloseOnCompletion(): Boolean }].isCloseOnCompletion()
} else st
}
class BaseSession private[session] (db: Database) extends Session {
var open = false
var doRollback = false
var inTransaction = false
lazy val conn = { open = true; db.createConnection() }
lazy val metaData = conn.getMetaData()
def capabilities = {
val dc = db.capabilities
if(dc ne null) dc
else {
val newDC = new DatabaseCapabilities(this)
db.capabilities = newDC
newDC
}
}
def close() {
if(open) conn.close()
}
def rollback() {
if(conn.getAutoCommit) throw new SlickException("Cannot roll back session in auto-commit mode")
doRollback = true
}
def withTransaction[T](f: => T): T = if(inTransaction) f else {
conn.setAutoCommit(false)
inTransaction = true
try {
var done = false
try {
doRollback = false
val res = f
if(doRollback) conn.rollback()
else conn.commit()
done = true
res
} finally if(!done) conn.rollback()
} finally {
conn.setAutoCommit(true)
inTransaction = false
}
}
}
|
zefonseca/slick-1.0.0-scala.2.11.1
|
src/main/scala/scala/slick/session/Session.scala
|
Scala
|
bsd-2-clause
| 9,802
|
package de.tu_berlin.impro3.scala
import java.lang.{ Long => JLong }
import java.io._
import net.sourceforge.argparse4j.ArgumentParsers
import net.sourceforge.argparse4j.inf._
import de.tu_berlin.impro3.scala.spatio_temporal_dynamics._
import metrics._
import parsers._
import scala.io.Source
import language.reflectiveCalls
/**
* This algorithm implements most of the metrics on Twitter hashtags described
* in the paper "Spatio-Temporal Dynamics of Online Memes: A Study of Geo-Tagged
* Tweets" by Krishna Y. Kamath, James Caverlee, Kyumin Lee, and Zhiyuan Cheng.
* These metrics "examine the impact of location, time, and distance on the
* adoption of hashtags, which is important for understanding meme diffusion and
* information propagation."
*/
object SpatioTemporalDynamics extends scala.App {
val description = """
| This algorithm implements most of the metrics on Twitter hashtags
| described in the paper "Spatio-Temporal Dynamics of Online Memes: A
| Study of Geo-Tagged Tweets" by Krishna Y. Kamath, James Caverlee, Kyumin
| Lee, and Zhiyuan Cheng. These metrics "examine the impact of location,
| time, and distance on the adoption of hashtags, which is important for
| understanding meme diffusion and information propagation."
""".stripMargin
// create argument parser
val ap = ArgumentParsers
.newArgumentParser("")
.defaultHelp(true)
.description(description)
// input file
ap.addArgument("file")
.`type`(classOf[String])
.dest("input")
.help("Tweets input file")
// output directory
ap.addArgument("dir")
.`type`(classOf[String])
.dest("output")
.help("Output directory")
// input file format
ap.addArgument("-f", "--format")
.choices("json", "jax", "csv", "tab")
.default("json")
.dest("format")
.help("Input file format")
// minimal number of occurrences filter
ap.addArgument("-m", "--min-size")
.`type`(classOf[Integer])
.default(1)
.dest("min-size")
.help("Minimal number of occurrences filter")
// time interval for temporal metrics
ap.addArgument("-t", "--time-int")
.`type`(classOf[JLong])
.default(1000 * 60 * 60 * 24l)
.dest("time-int")
.help("Time interval for temporal metrics in ms")
// parse arguments
val ns = try {
ap.parseArgs(args)
} catch { case ape: ArgumentParserException =>
ap.handleError(ape)
sys.exit(1)
}
// parameters
val inputFile = ns.getString("input")
val outputDir = ns.getString("output")
val timeInt = ns.getLong("time-int")
val minSize = ns.getInt ("min-size")
val parser = ns.getString("format") match {
case "tab" => new TabularParser
case "csv" => new CsvParser
case "jax" => new JaxParser
case _ => new JsonParser // default
}
// clustering
lazy val lines = Source.fromFile(inputFile).getLines().toSeq.par
lazy val tweets = parser.parse(lines).par
lazy val tags = tweets.flatMap { _.hashTagsWithLocation }
lazy val texts = clusterByText(tags)
lazy val zones = clusterByZone(tags)
lazy val byText = texts.filterTotalSize(minSize)
lazy val byZone = zones.filterTotalSize(minSize)
byText.metric { set =>
(Occurrences(set), Focus(set), Entropy(set), Spread(set), Lifespan(set))
}.toStream.sortBy { _._1 }.writeLines(outputDir, "hashtags.csv") {
case (tag, (occurs, (zone, focus), entropy, spread, (first, last))) =>
Seq(tag, occurs, zone, focus, entropy, spread, first, last)
.mkString(",")
} // unary metrics
byText.temporal(timeInt) { set =>
(Occurrences(set), Focus(set), Entropy(set), Spread(set))
}.toStream.sortBy { _._1 }.writeLines(outputDir, "temporal.csv") {
case ((tag, time), (occurs, (zone, focus), entropy, spread)) =>
Seq(tag, time, occurs, zone, focus, entropy, spread).mkString(",")
} // temporal metrics
byZone.metric { (set1, set2) =>
( HashTagSimilarity(set1, set2),
AdoptionLag (set1, set2),
SpatialImpact (set1, set2))
}.toStream.sortBy { _._1 }.writeLines(outputDir, "locations.csv") {
case ((zoneA, zoneB), (similarity, adoptionLag, spatialImpact)) =>
Seq(zoneA, zoneB, similarity, adoptionLag, spatialImpact).mkString(",")
} // binary metrics
println(s"Time: ${System.currentTimeMillis - executionStart} ms")
/** Write to file as separate lines. */
implicit class WriteLines[T](lines: Seq[T]) {
def writeLines(dir: String, file: String)(tos: T => String) = {
val path = new File(dir).toPath.resolve(file)
val writer = new PrintWriter(path.toFile)
try lines.foreach { l => writer.println(tos(l)) }
finally writer.close()
}
}
/** Disambiguate argeparse4j interop problem through reflection. */
implicit class Default(arg: {
def setDefault(value: Any): Argument
}) { def default(value: Any) = arg.setDefault(value) }
}
|
joroKr21/spatio-temporal-dynamics
|
impro3-ws14-scala/src/main/scala/de/tu_berlin/impro3/scala/SpatioTemporalDynamics.scala
|
Scala
|
apache-2.0
| 4,886
|
package mimir.lenses;
import java.sql._;
import java.io.StringReader;
import collection.JavaConversions._;
import mimir.parser._;
import mimir.ctables._;
import mimir.algebra._;
import mimir.util._;
import mimir.sql._;
import mimir.{Database,Mimir};
class LensManager(db: Database) {
var lensCache = scala.collection.mutable.Map[String,Lens]();
def init(): Unit =
{
db.update("""
CREATE TABLE MIMIR_LENSES(
name varchar(30),
query text,
lens_type varchar(30),
parameters text,
PRIMARY KEY(name)
);""");
}
def mkLens(lensType: String, lensName: String, args: List[Expression], source: Operator): Lens =
{
lensType.toUpperCase() match {
case "MISSING_VALUE" =>
new MissingValueLens(lensName, args, source)
}
}
def lensTypeString(lens: Lens): String =
{
lens match {
case _:MissingValueLens => "MISSING_VALUE"
}
}
def create(lensDefn: CreateLens): Unit = {
val (baseQuery, bindings) = db.convert(lensDefn.getSelectBody)
val originalSource =
Project(
bindings.map( _ match { case (external, internal) =>
ProjectArg(external, Var(internal))
}).toList,
baseQuery
)
val source: Operator = originalSource;
val lensName = lensDefn.getName.toUpperCase;
val lens = mkLens(
lensDefn.getType(),
lensName,
lensDefn.getArgs.map( (arg:net.sf.jsqlparser.expression.Expression) =>
db.convert(arg)
).toList,
source
);
lens.build(db);
lensCache.put(lensName, lens);
save(lens);
}
def save(lens: Lens): Unit = {
db.update("""
INSERT INTO MIMIR_LENSES(name, query, lens_type, parameters)
VALUES (?,?,?,?)
""", List(
lens.name,
lens.source.toString,
lensTypeString(lens),
lens.args.map(_.toString).mkString(",")
))
lens.save(db)
}
def load(lensName: String): Option[Lens] = {
lensCache.get(lensName) match {
case Some(s) => Some(s)
case None => {
val lensMetaResult =
db.query("""
SELECT lens_type, parameters, query
FROM MIMIR_LENSES
WHERE name = ?
""", List(lensName)).allRows
if(lensMetaResult.length == 0) { return None; }
else if(lensMetaResult.length > 1){
throw new SQLException("Multiple definitions for Lens `"+lensName+"`")
} else {
val lensMeta = lensMetaResult(0)
val lens =
mkLens(
lensMeta(0).asString,
lensName,
db.parseExpressionList(lensMeta(1).asString),
db.parseOperator(lensMeta(2).asString)
)
lens.load(db)
lensCache.put(lensName, lens)
return Some(lens)
}
}
}
}
def modelForLens(lensName: String): Model =
load(lensName).get.model
}
|
sophieyoung717/mimir
|
src/main/scala/mimir/lenses/LensManager.scala
|
Scala
|
apache-2.0
| 2,969
|
package de.leanovate.swaggercheck.schema.model
import org.mockito.Mockito._
import org.scalatest.{MustMatchers, WordSpec}
import org.scalatestplus.mockito.MockitoSugar
class ObjectDefinitionSpec extends WordSpec with MockitoSugar with MustMatchers {
"ObjectDefinition" should {
"accept any object if no property definition is set" in {
val path = JsonPath("jsonpath")
val node = TestNode(obj = Some(Map("field1" -> TestNode(), "field2" -> TestNode())))
val schema = mock[Schema]
val definition = ObjectDefinition(None, None, Left(true))
definition.validate(schema, path, node) mustBe ValidationSuccess
}
"succeed if non-required fields are missing" in {
val path = JsonPath("jsonpath")
val node = TestNode(obj = Some(Map("field1" -> TestNode(), "field2" -> TestNode())))
val schema = mock[Schema]
val field3Definition = mock[Definition]
val field4Definition = mock[Definition]
val definition = ObjectDefinition(None, Some(Map("field3" -> field3Definition, "field4" -> field4Definition)), Left(true))
definition.validate(schema, path, node) mustBe ValidationSuccess
verifyZeroInteractions(field3Definition, field4Definition)
}
"fail if a required fields are missing" in {
val path = JsonPath("jsonpath")
val node = TestNode(obj = Some(Map("field1" -> TestNode(), "field2" -> TestNode())))
val schema = mock[Schema]
val field3Definition = mock[Definition]
val field4Definition = mock[Definition]
when(field3Definition.validate(schema, path.field("field3"), TestNode(isNull = true))).thenReturn(ValidationResult.error("error1"))
when(field4Definition.validate(schema, path.field("field4"), TestNode(isNull = true))).thenReturn(ValidationResult.error("error2"))
val definition = ObjectDefinition(Some(Set("field3", "field4")), Some(Map("field3" -> field3Definition, "field4" -> field4Definition)), Left(true))
val ValidationFailure(result) = definition.validate(schema, path, node)
result mustBe Seq("error1", "error2")
}
"succeed if all fields succeed" in {
val path = JsonPath("jsonpath")
val field2 = TestNode()
val field3 = TestNode()
val field4 = TestNode()
val node = TestNode(obj = Some(Map("field1" -> TestNode(), "field2" -> field2, "field3" -> field3, "field4" -> field4)))
val schema = mock[Schema]
val field2Definition = mock[Definition]
val field3Definition = mock[Definition]
val field4Definition = mock[Definition]
when(field2Definition.validate(schema, path.field("field2"), field2)).thenReturn(ValidationResult.success)
when(field3Definition.validate(schema, path.field("field3"), field3)).thenReturn(ValidationResult.success)
when(field4Definition.validate(schema, path.field("field4"), field4)).thenReturn(ValidationResult.success)
val definition = ObjectDefinition(Some(Set("field3", "field4")),
Some(Map("field2" -> field2Definition, "field3" -> field3Definition, "field4" -> field4Definition)), Left(true))
definition.validate(schema, path, node) mustBe ValidationSuccess
verify(field2Definition).validate(schema, path.field("field2"), field2)
verify(field3Definition).validate(schema, path.field("field3"), field3)
verify(field4Definition).validate(schema, path.field("field4"), field4)
}
"fail if additional fields do not match definition" in {
val path = JsonPath("jsonpath")
val field1 = TestNode()
val field2 = TestNode()
val field3 = TestNode()
val field4 = TestNode()
val node = TestNode(obj = Some(Map("field1" -> field1, "field2" -> field2, "field3" -> field3, "field4" -> field4)))
val schema = mock[Schema]
val additionalDefinition = mock[Definition]
val field2Definition = mock[Definition]
val field3Definition = mock[Definition]
val field4Definition = mock[Definition]
when(additionalDefinition.validate(schema, path.field("field1"), field1)).thenReturn(ValidationResult.error("error"))
when(field2Definition.validate(schema, path.field("field2"), field2)).thenReturn(ValidationResult.success)
when(field3Definition.validate(schema, path.field("field3"), field3)).thenReturn(ValidationResult.success)
when(field4Definition.validate(schema, path.field("field4"), field4)).thenReturn(ValidationResult.success)
val definition = ObjectDefinition(Some(Set("field3", "field4")),
Some(Map("field2" -> field2Definition, "field3" -> field3Definition, "field4" -> field4Definition)),
Right(additionalDefinition))
val ValidationFailure(result) = definition.validate(schema, path, node)
result mustBe Seq("error")
}
"succeed if additional fields match definition" in {
val path = JsonPath("jsonpath")
val field1 = TestNode()
val field2 = TestNode()
val field3 = TestNode()
val field4 = TestNode()
val node = TestNode(obj = Some(Map("field1" -> field1, "field2" -> field2, "field3" -> field3, "field4" -> field4)))
val schema = mock[Schema]
val additionalDefinition = mock[Definition]
val field2Definition = mock[Definition]
val field3Definition = mock[Definition]
val field4Definition = mock[Definition]
when(additionalDefinition.validate(schema, path.field("field1"), field1)).thenReturn(ValidationResult.success)
when(field2Definition.validate(schema, path.field("field2"), field2)).thenReturn(ValidationResult.success)
when(field3Definition.validate(schema, path.field("field3"), field3)).thenReturn(ValidationResult.success)
when(field4Definition.validate(schema, path.field("field4"), field4)).thenReturn(ValidationResult.success)
val definition = ObjectDefinition(Some(Set("field3", "field4")),
Some(Map("field2" -> field2Definition, "field3" -> field3Definition, "field4" -> field4Definition)),
Right(additionalDefinition))
definition.validate(schema, path, node) mustBe ValidationSuccess
verify(additionalDefinition).validate(schema, path.field("field1"), field1)
verify(field2Definition).validate(schema, path.field("field2"), field2)
verify(field3Definition).validate(schema, path.field("field3"), field3)
verify(field4Definition).validate(schema, path.field("field4"), field4)
}
"fail validation on everything that is not an integer" in {
val path = JsonPath("jsonpath")
val node = TestNode()
val schema = mock[Schema]
val definition = ObjectDefinition(None, None, Left(true))
val ValidationFailure(result) = definition.validate(schema, path, node)
result must have size 1
result.head must endWith("should be an object in path jsonpath")
}
"fail if additional properties are not allowed" in {
val schema = mock[Schema]
val objectDefinition = ObjectDefinition(None, Some(Map(
"field1" -> BooleanDefinition
)), Left(false))
objectDefinition.validate(schema, JsonPath(), TestNode(obj = Some(Map(
"field1" -> TestNode(boolean = Some(true))
)))).isSuccess mustBe true
objectDefinition.validate(schema, JsonPath(), TestNode(obj = Some(Map(
"field1" -> TestNode(boolean = Some(true)),
"field2" -> TestNode(boolean = Some(false))
)))).isSuccess mustBe false
}
}
}
|
leanovate/swagger-check
|
json-schema-model/src/test/scala/de/leanovate/swaggercheck/schema/model/ObjectDefinitionSpec.scala
|
Scala
|
mit
| 7,418
|
package org.qi4j.sample.scala
import org.qi4j.api.concern.ConcernOf
/**
* TODO
*/
class HelloThereConcern
extends ConcernOf[ HelloWorldMixin2 ] with HelloWorldMixin2
{
override def sayHello(name: String ) = next.sayHello("there " + name)
}
|
joobn72/qi4j-sdk
|
libraries/lang-scala/src/test/scala/org/qi4j/sample/scala/HelloThereConcern.scala
|
Scala
|
apache-2.0
| 247
|
package com.cave.metrics.data.influxdb
import com.cave.metrics.data.Metric
object InfluxMetric {
final val MissingAccountTagMessage = "All metrics should have the account tag"
/**
* Convert a sequence of metrics into InfluxDB requests
*
* @param metrics the metrics to send to InfluxDB
* @return requests, grouped by account
*/
def prepareRequests(metrics: Seq[Metric]): Map[String, String] = {
metrics.groupBy(getAccount).map { case (account, metricSeq) =>
account ->
metricSeq.groupBy(getTagNames)
.values.map(convertToJson)
.mkString("[", ",", "]")
}
}
private def getTagNames(metric: Metric): String =
metric.tags.keys.foldLeft(metric.name)(_ + _)
private def getAccount(metric: Metric): String =
metric.tags.getOrElse(Metric.Organization, sys.error(MissingAccountTagMessage))
/**
* Convert a sequence of metrics to InfluxDB JSON format
*
* Assumes all metrics have the same partition key
*
* @param metrics the metrics to convert
* @return the JSON string for these metrics
*/
private[data] def convertToJson(metrics: Seq[Metric]): String = {
def wrapSeq(words: Seq[String]): String = words.map(word => s""","$word"""").mkString("")
def nonCaveTags(metric: Metric): Map[String, String] = metric.tags.filterKeys { key =>
key != Metric.Organization && key != Metric.Cluster
}
val first = metrics.headOption.getOrElse(sys.error("There should be at least one metric to convert."))
val points = metrics.map { metric =>
s"[${metric.timestamp},${metric.value}${wrapSeq(nonCaveTags(metric).values.toSeq)}]"
}.mkString("[", ",", "]")
val columns = s""""time","value"${wrapSeq(nonCaveTags(first).keys.toSeq)}"""
s"""{"name":"${first.name}","columns":[$columns],"points":$points}"""
}
}
|
gilt/cave
|
core/src/main/scala/com/cave/metrics/data/influxdb/InfluxMetric.scala
|
Scala
|
mit
| 1,841
|
package chat.tox.antox.wrapper
import chat.tox.antox.utils.Hex
object ToxAddress {
val MAX_ADDRESS_LENGTH = 76
def isAddressValid(address: String): Boolean =
address.length == MAX_ADDRESS_LENGTH &&
address.matches("^[0-9A-F]+$") &&
address.grouped(4).map(Integer.parseInt(_, 16)).fold(0)(_ ^ _) == 0
def removePrefix(address: String): String = {
val prefix = "tox:"
if (address.toLowerCase.contains(prefix)) {
address.substring(prefix.length)
} else {
address
}
}
}
case class ToxAddress(address: String) {
def fixedAddress : String = ToxAddress.removePrefix(address.toUpperCase())
if (!ToxAddress.isAddressValid(fixedAddress)) {
throw new IllegalArgumentException(s"address must be $ToxAddress.MAX_ADDRESS_LENGTH hex chars long")
}
def this(bytes: Array[Byte]) =
this(Hex.bytesToHexString(bytes))
def bytes: Array[Byte] = Hex.hexStringToBytes(fixedAddress)
def key: FriendKey = FriendKey(fixedAddress.substring(0, ToxKey.MAX_KEY_LENGTH))
override def toString: String = fixedAddress
}
|
subliun/Antox
|
app/src/main/scala/chat/tox/antox/wrapper/ToxAddress.scala
|
Scala
|
gpl-3.0
| 1,072
|
package utils
import com.github.nscala_time.time.Imports._
import org.joda.time.{LocalDate, IllegalFieldValueException}
import java.util.Calendar
/**
* The Class DateTimeUtils.
*
* @author Nguyen Duc Dung
* @since 4/15/14 3:57 PM
*
*/
object DateTimeUtils {
val quarters = Map(
1 -> List(1, 2, 3),
2 -> List(4, 5, 6),
3 -> List(7, 8, 9),
4 -> List(10, 11, 12)
)
def currentYear = LocalDate.now.getYear
def currentMonth = LocalDate.now.getMonthOfYear
def getQuarter(month: Int) = {
val quarter = (month - 1) / 3 + 1
quarter
}
def createLocalDate(month: Int, day: Int) = try {
LocalDate.now.withMonth(month).withDayOfMonth(day)
} catch {
case _: IllegalFieldValueException =>
LocalDate.now.withMonth(month)
.withDayOfMonth(LocalDate.now.withMonth(month).dayOfMonth().withMaximumValue().getDayOfMonth)
}
// This takes a 1-based month, e.g. January=1. If you want to use a 0-based
// month, remove the "- 1" later on.
// author: http://stackoverflow.com/questions/9909361/how-can-i-find-saturdays-and-sundays-in-a-given-month
def countWeekendDays(year: Int, month: Int) = {
val calendar = Calendar.getInstance()
// Note that month is 0-based in calendar, bizarrely.
calendar.set(year, month - 1, 1)
val daysInMonth = calendar.getActualMaximum(Calendar.DAY_OF_MONTH)
var count = 0
for (day <- 1 to daysInMonth) {
calendar.set(year, month - 1, day)
val dayOfWeek = calendar.get(Calendar.DAY_OF_WEEK)
if (dayOfWeek == Calendar.SUNDAY || dayOfWeek == Calendar.SATURDAY) {
count += 1
}
}
count
}
def monthWorkingDay(year: Int, month: Int) = {
val now = LocalDate.now().withMonthOfYear(month).withYear(year)
val soNgayTrongThang = now.dayOfMonth().withMaximumValue().getDayOfMonth
val weekend = DateTimeUtils.countWeekendDays(now.getYear, now.getMonthOfYear)
val workingDay = now.dayOfMonth().withMaximumValue().getDayOfMonth - weekend
workingDay
}
}
|
SunriseSoftVN/sunerp
|
app/utils/DateTimeUtils.scala
|
Scala
|
apache-2.0
| 2,018
|
import leon.lang._
object PropositionalLogic {
sealed abstract class Formula
case class And(lhs: Formula, rhs: Formula) extends Formula
case class Or(lhs: Formula, rhs: Formula) extends Formula
case class Implies(lhs: Formula, rhs: Formula) extends Formula
case class Not(f: Formula) extends Formula
case class Literal(id: BigInt) extends Formula
def nnf(formula: Formula): Formula = (formula match {
case And(lhs, rhs) => And(nnf(lhs), nnf(rhs))
case Or(lhs, rhs) => Or(nnf(lhs), nnf(rhs))
case Implies(lhs, rhs) => nnf(Or(Not(lhs), rhs))
case Not(And(lhs, rhs)) => Or(nnf(Not(lhs)), nnf(Not(rhs)))
case Not(Or(lhs, rhs)) => And(nnf(Not(lhs)), nnf(Not(rhs)))
case Not(Implies(lhs, rhs)) => And(nnf(lhs), nnf(Not(rhs)))
case Not(Not(f)) => nnf(f)
case Not(Literal(_)) => formula
case Literal(_) => formula
}) ensuring(isNNF(_))
def isNNF(f: Formula): Boolean = f match {
case _ => false
/* TODO: Implement isNNF */
}
// Note that matching should be exhaustive due to precondition.
def vars(f: Formula): Set[BigInt] = {
require(isNNF(f))
f match {
case And(lhs, rhs) => vars(lhs) ++ vars(rhs)
case Or(lhs, rhs) => vars(lhs) ++ vars(rhs)
case Not(Literal(i)) => Set[BigInt](i)
case Literal(i) => Set[BigInt](i)
}
}
}
|
ericpony/scala-examples
|
testcases/web/sav15/02_Exercise2.scala
|
Scala
|
mit
| 1,329
|
package com.coiney.akka.rabbit.protocol
import akka.actor.ActorRef
import com.rabbitmq.client.AMQP
import com.coiney.akka.rabbit.{ExchangeConfig, QueueConfig}
import scala.concurrent.duration.FiniteDuration
sealed trait RabbitRequest
case class AddConfirmListener(listener: ActorRef) extends RabbitRequest
case class AddReturnListener(listener: ActorRef) extends RabbitRequest
case class AddShutdownListener(listener: ActorRef) extends RabbitRequest
case class DeclareQueue(queueConfig: QueueConfig) extends RabbitRequest
case class DeclareQueuePassive(name: String) extends RabbitRequest
case class DeleteQueue(name: String, ifUnused: Boolean = false, ifEmpty: Boolean = false) extends RabbitRequest
case class PurgeQueue(name: String) extends RabbitRequest
case class BindQueue(name: String, exchange: String, routingKey: String, arguments: Map[String, AnyRef] = Map.empty) extends RabbitRequest
case class UnbindQueue(name: String, exchange: String, routingKey: String) extends RabbitRequest
case class DeclareExchange(exchangeConfig: ExchangeConfig) extends RabbitRequest
case class DeclareExchangePassive(name: String) extends RabbitRequest
case class DeleteExchange(name: String) extends RabbitRequest
case class BindExchange(destination: String, source: String, routingKey: String, arguments: Map[String, AnyRef] = Map.empty) extends RabbitRequest
case class UnbindExchange(destination: String, source: String, routingKey: String) extends RabbitRequest
case class Publish(exchange: String, routingKey: String, body: Array[Byte], mandatory: Boolean = true, immediate: Boolean = false, properties: Option[AMQP.BasicProperties] = None) extends RabbitRequest
case class Transaction(pubs: Seq[Publish]) extends RabbitRequest
case class Ack(deliveryTag: Long) extends RabbitRequest
case class Reject(deliveryTag: Long, requeue: Boolean = true) extends RabbitRequest
case class Get(queue: String, autoAck: Boolean = false) extends RabbitRequest
case object ConfirmSelect extends RabbitRequest
case class WaitForConfirms(timeout: Option[FiniteDuration]) extends RabbitRequest
case class WaitForConfirmsOrDie(timeout: Option[FiniteDuration]) extends RabbitRequest
case class AddConsumer(listener: ActorRef, consumer: ActorRef) extends RabbitRequest
case class ConsumeQueue(queueConfig: QueueConfig) extends RabbitRequest
case class CancelConsume(consumerTag: String) extends RabbitRequest
|
Coiney/akka-rabbit
|
akka-rabbit-core/src/main/scala/com/coiney/akka/rabbit/protocol/RabbitRequestProtocol.scala
|
Scala
|
bsd-3-clause
| 2,398
|
// Copyright: 2010 - 2016 https://github.com/ensime/ensime-server/graphs
// License: http://www.gnu.org/licenses/gpl-3.0.en.html
package org.ensime.indexer
import scala.collection.immutable.Queue
import org.objectweb.asm.Opcodes._
sealed trait Access
case object Public extends Access
case object Default extends Access
case object Protected extends Access
case object Private extends Access
object Access {
def apply(code: Int): Access =
if ((ACC_PUBLIC & code) > 0) Public
else if ((ACC_PROTECTED & code) > 0) Protected
else if ((ACC_PRIVATE & code) > 0) Private
else Default
}
sealed trait FullyQualifiedName {
def contains(o: FullyQualifiedName): Boolean
def fqnString: String
}
final case class PackageName(path: List[String]) extends FullyQualifiedName {
def contains(o: FullyQualifiedName) = o match {
case PackageName(pn) => pn.startsWith(path)
case ClassName(p, _) => contains(p)
case FieldName(c, _) => contains(c)
case MethodName(c, _, _) => contains(c)
}
def fqnString = path.mkString(".")
def parent = PackageName(path.init)
}
final case class ClassName(pack: PackageName, name: String)
extends FullyQualifiedName with DescriptorType {
def contains(o: FullyQualifiedName) = o match {
case ClassName(op, on) if pack == op && on.startsWith(name) =>
(on == name) || on.startsWith(name + "$")
case FieldName(cn, _) => contains(cn)
case MethodName(cn, _, _) => contains(cn)
case _ => false
}
def fqnString =
if (pack.path.isEmpty) name
else pack.fqnString + "." + name
private def nonPrimitiveInternalString: String =
"L" + (if (pack.path.isEmpty) name else pack.path.mkString("/") + "/" + name) + ";"
lazy val internalString: String = {
if (pack.path.isEmpty)
name match {
case "boolean" => "Z"
case "byte" => "B"
case "char" => "C"
case "short" => "S"
case "int" => "I"
case "long" => "J"
case "float" => "F"
case "double" => "D"
case "void" => "V"
case _ => nonPrimitiveInternalString
}
else nonPrimitiveInternalString
}
}
object ClassName {
private val Root = PackageName(Nil)
// we consider Primitives to be ClassNames
private def Primitive(name: String): ClassName = ClassName(Root, name)
val PrimitiveBoolean = Primitive("boolean")
val PrimitiveByte = Primitive("byte")
val PrimitiveChar = Primitive("char")
val PrimitiveShort = Primitive("short")
val PrimitiveInt = Primitive("int")
val PrimitiveLong = Primitive("long")
val PrimitiveFloat = Primitive("float")
val PrimitiveDouble = Primitive("double")
val PrimitiveVoid = Primitive("void")
// must be a single type descriptor
// strips array reification
def fromDescriptor(desc: String): ClassName =
DescriptorParser.parseType(desc) match {
case c: ClassName => c
case a: ArrayDescriptor => a.reifier
}
// internal name is effectively the FQN with / instead of dots
def fromInternal(internal: String): ClassName = fromFqn(internal, '/')
def fromFqn(internal: String, splitter: Char = '.'): ClassName = {
val parts = internal.split(splitter)
val (before, after) = parts.splitAt(parts.length - 1)
ClassName(PackageName(before.toList), after(0))
}
}
sealed trait MemberName extends FullyQualifiedName {
def contains(o: FullyQualifiedName) = this == o
}
case class FieldName(
owner: ClassName,
name: String
// not always available in the ASM parser
//ret: DescriptorType
) extends MemberName {
def fqnString = owner.fqnString + "." + name
}
// FQNs are not really unique, because method overloading, so fudge
// the descriptor into the FQN
final case class MethodName(
owner: ClassName,
name: String,
descriptor: Descriptor
) extends MemberName {
def fqnString = owner.fqnString + "." + name + descriptor.descriptorString
}
// Generics signature
sealed trait GenericArg
sealed trait SignatureType
sealed trait BoundType
sealed trait RealTypeSignature
object UpperBound extends BoundType
object LowerBound extends BoundType
final case class GenericClass(
genericParam: Seq[GenericParam],
superClasses: Seq[GenericClassName]
) extends SignatureType
final case class GenericParam(
name: String,
classNames: Seq[RealTypeSignature]
) extends SignatureType
final case class GenericClassName(
className: ClassName,
genericArg: Seq[GenericArg] = Seq.empty,
innerClass: Seq[InnerClassName] = Seq.empty
) extends SignatureType with RealTypeSignature
final case class InnerClassName(
name: String,
genericArg: Seq[GenericArg] = Seq.empty
)
object ExtendsObjectGenericArg
extends GenericArg
final case class SpecifiedGenericArg(
boundType: Option[BoundType],
genericSignature: RealTypeSignature
) extends GenericArg
final case class GenericArray(className: RealTypeSignature)
extends SignatureType with RealTypeSignature
final case class GenericVar(name: String)
extends SignatureType with RealTypeSignature
// Descriptors
sealed trait DescriptorType {
def internalString: String
}
final case class ArrayDescriptor(fqn: DescriptorType) extends DescriptorType {
def reifier: ClassName = fqn match {
case c: ClassName => c
case a: ArrayDescriptor => a.reifier
}
def internalString = "[" + fqn.internalString
}
final case class Descriptor(params: List[DescriptorType], ret: DescriptorType) {
def descriptorString =
"(" + params.map(_.internalString).mkString("") + ")" + ret.internalString
}
final case class RawClassfile(
name: ClassName,
generics: Option[GenericClass],
superClass: Option[ClassName],
interfaces: List[ClassName],
access: Access,
deprecated: Boolean,
fields: Queue[RawField],
methods: Queue[RawMethod],
source: RawSource
)
final case class RawSource(
filename: Option[String],
line: Option[Int]
)
final case class RawType(
fqn: String,
access: Access
)
final case class RawField(
name: FieldName,
clazz: DescriptorType,
generics: Option[String],
access: Access
)
final case class RawMethod(
name: MethodName,
access: Access,
generics: Option[String],
line: Option[Int]
)
|
sugakandrey/ensime-server
|
core/src/main/scala/org/ensime/indexer/domain.scala
|
Scala
|
gpl-3.0
| 6,162
|
object Test extends App {
override def toString = "Test"
assert(s"$this" == "Test")
assert(s"$this$this" == "TestTest")
assert(s"$this$$" == "Test$")
assert(s"$this.##" == "Test.##")
assert(s"$this.toString" == "Test.toString")
assert(s"$this=THIS" == "Test=THIS")
assert(raw"$"" == "\\"")
}
|
som-snytt/dotty
|
tests/run/t5856.scala
|
Scala
|
apache-2.0
| 308
|
package db
import db.TestTables.{directoryTable, itemTable, taggTable}
import slick.jdbc.H2Profile.api._
class DaoCommonDeleteOrderedTest extends DaoCommonTestHelper {
def createItem(parentId: Long, order: Int) = db.run(
itemTable returning itemTable.map(_.id) += Item(directoryId = parentId, order = order, value = "i")
).futureValue
def createItems = {
val rootDir = createDir(None, 0).get
(createItem(rootDir, 0), createItem(rootDir, 1), createItem(rootDir, 2), createItem(rootDir, 3))
}
def orderOfItem(id: Long) = db.run(itemTable.filter(_.id === id).map(_.order).result.head).futureValue
def numberOfItems = db.run(itemTable.size.result).futureValue
def createDir(parentId: Option[Long], order: Int): Option[Long] = Some(db.run(
directoryTable returning directoryTable.map(_.id) += Directory(parentId = parentId, order = order, name = "d")
).futureValue)
def createDirsWithParent = {
val rootDir = createDir(None, 0)
(createDir(rootDir, 0), createDir(rootDir, 1), createDir(rootDir, 2), createDir(rootDir, 3))
}
def createDirsWithoutParent = {
(createDir(None, 0), createDir(None, 1), createDir(None, 2), createDir(None, 3))
}
def orderOfDir(id: Option[Long]) = db.run(directoryTable.filter(_.id === id).map(_.order).result.head).futureValue
def numberOfDirs = db.run(directoryTable.size.result).futureValue
def createTagg(order: Int) = db.run(
taggTable returning taggTable.map(_.id) += Tagg(order = order, value = "t")
).futureValue
def createTaggs = {
(createTagg(0), createTagg(1), createTagg(2), createTagg(3))
}
def orderOfTagg(id: Long) = db.run(taggTable.filter(_.id === id).map(_.order).result.head).futureValue
def numberOfTaggs = db.run(taggTable.size.result).futureValue
"deleteOrdered should" - {
"M: HasParent, parentId: Some, position: lowest" in {
//given
val (i0, i1, i2, i3) = createItems
//when
db.run(
dao.deleteOrdered(itemTable, i0)
).futureValue
//then
orderOfItem(i1) should be(0)
orderOfItem(i2) should be(1)
orderOfItem(i3) should be(2)
numberOfItems should be(3)
}
"M: HasParent, parentId: Some, position: highest" in {
//given
val (i0, i1, i2, i3) = createItems
//when
db.run(
dao.deleteOrdered(itemTable, i3)
).futureValue
//then
orderOfItem(i0) should be(0)
orderOfItem(i1) should be(1)
orderOfItem(i2) should be(2)
numberOfItems should be(3)
}
"M: HasParent, parentId: Some, position: middle" in {
//given
val (i0, i1, i2, i3) = createItems
//when
db.run(
dao.deleteOrdered(itemTable, i1)
).futureValue
//then
orderOfItem(i0) should be(0)
orderOfItem(i2) should be(1)
orderOfItem(i3) should be(2)
numberOfItems should be(3)
}
"M: HasOptionalParent, parentId: None, position: lowest" in {
//given
val (d0, d1, d2, d3) = createDirsWithoutParent
//when
db.run(
dao.deleteOrdered(directoryTable, d0.get)
).futureValue
//then
orderOfDir(d1) should be(0)
orderOfDir(d2) should be(1)
orderOfDir(d3) should be(2)
numberOfDirs should be(3)
}
"M: HasOptionalParent, parentId: None, position: highest" in {
//given
val (d0, d1, d2, d3) = createDirsWithoutParent
//when
db.run(
dao.deleteOrdered(directoryTable, d3.get)
).futureValue
//then
orderOfDir(d0) should be(0)
orderOfDir(d1) should be(1)
orderOfDir(d2) should be(2)
numberOfDirs should be(3)
}
"M: HasOptionalParent, parentId: None, position: middle" in {
//given
val (d0, d1, d2, d3) = createDirsWithoutParent
//when
db.run(
dao.deleteOrdered(directoryTable, d2.get)
).futureValue
//then
orderOfDir(d0) should be(0)
orderOfDir(d1) should be(1)
orderOfDir(d3) should be(2)
numberOfDirs should be(3)
}
"M: HasOptionalParent, parentId: Some, position: lowest" in {
//given
val (d0, d1, d2, d3) = createDirsWithParent
//when
db.run(
dao.deleteOrdered(directoryTable, d0.get)
).futureValue
//then
orderOfDir(d1) should be(0)
orderOfDir(d2) should be(1)
orderOfDir(d3) should be(2)
numberOfDirs should be(4)
}
"M: HasOptionalParent, parentId: Some, position: highest" in {
//given
val (d0, d1, d2, d3) = createDirsWithParent
//when
db.run(
dao.deleteOrdered(directoryTable, d3.get)
).futureValue
//then
orderOfDir(d0) should be(0)
orderOfDir(d1) should be(1)
orderOfDir(d2) should be(2)
numberOfDirs should be(4)
}
"M: HasOptionalParent, parentId: Some, position: middle" in {
//given
val (d0, d1, d2, d3) = createDirsWithParent
//when
db.run(
dao.deleteOrdered(directoryTable, d1.get)
).futureValue
//then
orderOfDir(d0) should be(0)
orderOfDir(d2) should be(1)
orderOfDir(d3) should be(2)
numberOfDirs should be(4)
}
"M: no-parent, parentId: None, position: lowest" in {
//given
val (t0, t1, t2, t3) = createTaggs
//when
db.run(
dao.deleteOrdered(taggTable, t0)
).futureValue
//then
orderOfTagg(t1) should be(0)
orderOfTagg(t2) should be(1)
orderOfTagg(t3) should be(2)
numberOfTaggs should be(3)
}
"M: no-parent, parentId: None, position: highest" in {
//given
val (t0, t1, t2, t3) = createTaggs
//when
db.run(
dao.deleteOrdered(taggTable, t3)
).futureValue
//then
orderOfTagg(t0) should be(0)
orderOfTagg(t1) should be(1)
orderOfTagg(t2) should be(2)
numberOfTaggs should be(3)
}
"M: no-parent, parentId: None, position: middle" in {
//given
val (t0, t1, t2, t3) = createTaggs
//when
db.run(
dao.deleteOrdered(taggTable, t2)
).futureValue
//then
orderOfTagg(t0) should be(0)
orderOfTagg(t1) should be(1)
orderOfTagg(t3) should be(2)
numberOfTaggs should be(3)
}
}
}
|
Igorocky/readtext
|
server/test/db/DaoCommonDeleteOrderedTest.scala
|
Scala
|
mit
| 6,285
|
package scalajs.antdesign
import japgolly.scalajs.react.{React, ReactComponentU_, ReactNode}
import scala.scalajs.js
import scala.scalajs.js.{Dynamic, Object}
/**
* @see https://ant.design/components/badge/#API
* @param count Number to show in badge
* @param overflowCount Max count to show
* @param dot Whether to show red dot without number
* @param status Set Badge as a status dot
* @param text If [[status]] is set, [[text]] is to set the text of status dot
*/
case class Badge(count: js.UndefOr[Int] = js.undefined,
overflowCount: js.UndefOr[Int] = js.undefined,
dot: js.UndefOr[Boolean] = js.undefined,
status: js.UndefOr[Badge.Status] = js.undefined,
text: js.UndefOr[String] = js.undefined) {
def toJS: Object with Dynamic = {
val p = js.Dynamic.literal()
count.foreach { x =>
p.updateDynamic("count")(x)
}
overflowCount.foreach { x =>
p.updateDynamic("overflowCount")(x)
}
dot.foreach { x =>
p.updateDynamic("dot")(x)
}
status.foreach { x =>
p.updateDynamic("status")(x.id)
}
text.foreach { x =>
p.updateDynamic("text")(x)
}
p
}
def apply(children: ReactNode*): ReactComponentU_ = {
val f =
React.asInstanceOf[js.Dynamic].createFactory(js.Dynamic.global.antd.Badge)
f(toJS, children.toJsArray).asInstanceOf[ReactComponentU_]
}
}
object Badge {
sealed abstract class Status(val id: String)
object Status {
case object Success extends Status("success")
case object Processing extends Status("processing")
case object Default extends Status("default")
case object Error extends Status("error")
case object Warning extends Status("warning")
}
}
|
mdedetrich/scalajs-antdesign
|
src/main/scala/scalajs/antdesign/Badge.scala
|
Scala
|
bsd-3-clause
| 1,779
|
package com.twitter.scalding.parquet.scrooge
import _root_.parquet.scrooge.ParquetScroogeScheme
import cascading.scheme.Scheme
import com.twitter.scalding._
import com.twitter.scalding.parquet.thrift.ParquetThriftBase
import com.twitter.scalding.source.{ DailySuffixSource, HourlySuffixSource }
import com.twitter.scrooge.ThriftStruct
trait ParquetScrooge[T <: ThriftStruct] extends ParquetThriftBase[T] {
override def hdfsScheme = {
val scheme = new ParquetScroogeScheme[T](this.config)
HadoopSchemeInstance(scheme.asInstanceOf[Scheme[_, _, _, _, _]])
}
}
class DailySuffixParquetScrooge[T <: ThriftStruct](
path: String,
dateRange: DateRange)(implicit override val mf: Manifest[T])
extends DailySuffixSource(path, dateRange) with ParquetScrooge[T]
class HourlySuffixParquetScrooge[T <: ThriftStruct](
path: String,
dateRange: DateRange)(implicit override val mf: Manifest[T])
extends HourlySuffixSource(path, dateRange) with ParquetScrooge[T]
class FixedPathParquetScrooge[T <: ThriftStruct](paths: String*)(implicit override val mf: Manifest[T])
extends FixedPathSource(paths: _*) with ParquetScrooge[T]
|
nvoron23/scalding
|
scalding-parquet-scrooge/src/main/scala/com/twitter/scalding/parquet/scrooge/ParquetScrooge.scala
|
Scala
|
apache-2.0
| 1,140
|
/*
* Copyright 2020 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.play.frontend.auth.connectors
import org.mockito.Matchers.{any, eq => meq}
import org.mockito.Mockito._
import org.scalatest.mock.MockitoSugar
import play.api.libs.json.Json
import uk.gov.hmrc.domain.{Nino, SaUtr}
import uk.gov.hmrc.http._
import uk.gov.hmrc.play.frontend.auth._
import uk.gov.hmrc.play.frontend.auth.connectors.domain.{Accounts, PayeAccount, SaAccount}
import uk.gov.hmrc.play.test.{UnitSpec, WithFakeApplication}
import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.Future
class DelegationConnectorSpec extends UnitSpec with WithFakeApplication {
private implicit val hc = HeaderCarrier()
"The getDelegationData response handler" should {
val delegationDataObject = DelegationData(
principalName = "Dave Client",
attorneyName = "Bob Agent",
accounts = Accounts(
paye = Some(PayeAccount(link = "http://paye/some/path", nino = Nino("AB123456D"))),
sa = Some(SaAccount(link = "http://sa/some/utr", utr = SaUtr("1234567890")))
),
link = Link(url = "http://taxplatform/some/dashboard", text = "Back to dashboard")
)
val delegationDataJson = Json.obj(
"attorneyName" -> "Bob Agent",
"principalName" -> "Dave Client",
"link" -> Json.obj(
"url" -> "http://taxplatform/some/dashboard",
"text" -> "Back to dashboard"
),
"accounts" -> Json.obj(
"paye" -> Json.obj(
"link" -> "http://paye/some/path",
"nino" -> "AB123456D"
),
"sa" -> Json.obj(
"link" -> "http://sa/some/utr",
"utr" -> "1234567890"
)
)
)
"return the delegation data returned from the service, if the response code is 200" in new TestCase {
val response = HttpResponse(200, Some(delegationDataJson))
connector.responseHandler.read("GET", s"/oid/$oid", response) shouldBe Some(delegationDataObject)
}
"return None when the response code is 404" in new TestCase {
val response = HttpResponse(404)
connector.responseHandler.read("GET", s"/oid/$oid", response) shouldBe None
}
"throw an exception if the response code is anything other than 200 or 404" in new TestCase {
val oid204 = "204oid"
val oid400 = "400oid"
val oid500 = "500oid"
a[DelegationServiceException] should be thrownBy connector.responseHandler.read("GET", s"/oid/$oid204", HttpResponse(204))
a[DelegationServiceException] should be thrownBy connector.responseHandler.read("GET", s"/oid/$oid400", HttpResponse(400))
a[DelegationServiceException] should be thrownBy connector.responseHandler.read("GET", s"/oid/$oid500", HttpResponse(500))
}
"throw an exception if the response is not valid JSON" in new TestCase {
val response = HttpResponse(200, None, Map.empty, Some("{ not _ json :"))
a[DelegationServiceException] should be thrownBy connector.responseHandler.read("GET", s"/oid/$oid", response)
}
"throw an exception if the response is valid JSON, but not representing Delegation Data" in new TestCase {
val response = HttpResponse(200, None, Map.empty, Some("""{"valid":"json"}"""))
a[DelegationServiceException] should be thrownBy connector.responseHandler.read("GET", s"/oid/$oid", response)
}
}
"The startDelegation method" should {
val delegationContextObject = DelegationContext(
principalName = "Dave Client",
attorneyName = "Bob Agent",
principalTaxIdentifiers = TaxIdentifiers(
paye = Some(Nino("AB123456D")),
sa = Some(SaUtr("1234567890"))
),
link = Link(url = "http://taxplatform/some/dashboard", text = "Back to dashboard")
)
val delegationContextJson = Json.obj(
"attorneyName" -> "Bob Agent",
"principalName" -> "Dave Client",
"link" -> Json.obj(
"url" -> "http://taxplatform/some/dashboard",
"text" -> "Back to dashboard"
),
"principalTaxIdentifiers" -> Json.obj(
"paye" -> "AB123456D",
"sa" -> "1234567890"
)
).toString()
"send the delegation data to the DelegationService, and succeed if the response code is 201" in new TestCase {
when(mockHttp.PUT[DelegationContext, HttpResponse](meq(s"$baseUrl/oid/$oid"), meq(delegationContextObject))(any(), any(), any(), any()))
.thenReturn(Future(HttpResponse(201)))
await(connector.startDelegation(oid, delegationContextObject))
}
"send the delegation data to the DelegationService, and fail if the response code is anything other than 201" in new TestCase {
val oid200 = "200oid"
val oid204 = "204oid"
when(mockHttp.PUT[DelegationContext, HttpResponse](meq(s"$baseUrl/oid/$oid200"), meq(delegationContextObject))(any(), any(), any(), any()))
.thenReturn(Future(HttpResponse(200)))
when(mockHttp.PUT[DelegationContext, HttpResponse](meq(s"$baseUrl/oid/$oid204"), meq(delegationContextObject))(any(), any(), any(), any()))
.thenReturn(Future(HttpResponse(204)))
a[DelegationServiceException] should be thrownBy await(connector.startDelegation(oid200, delegationContextObject))
a[DelegationServiceException] should be thrownBy await(connector.startDelegation(oid204, delegationContextObject))
}
"send the delegation data to the DelegationService, and bubble up any exceptions thrown by http-verbs" in new TestCase {
when(mockHttp.PUT[DelegationContext, HttpResponse](any(), any())
(any(), any(), any(), any()))
.thenThrow(new RuntimeException("Boom"))
a[RuntimeException] should be thrownBy await(connector.startDelegation("url", delegationContextObject))
}
}
"The endDelegation method" should {
"request deletion from the Delegation Service and succeed if the result is 204" in new TestCase {
when(mockHttp.DELETE[HttpResponse](meq(s"$baseUrl/oid/$oid"))(any(), any(), any())).thenReturn(Future(HttpResponse(204)))
await(connector.endDelegation(oid))
}
"request deletion from the Delegation Service and succeed if the result is 404" in new TestCase {
when(mockHttp.DELETE[HttpResponse](meq(s"$baseUrl/oid/$oid"))(any(), any(), any())).thenReturn(Future(HttpResponse(404)))
await(connector.endDelegation(oid))
}
"request deletion from the Delegation Service and fail if the result anything other than 204 or 404" in new TestCase {
val oid200 = "200oid"
val oid201 = "201oid"
val oid400 = "400oid"
val oid500 = "500oid"
when(mockHttp.DELETE[HttpResponse](meq(s"$baseUrl/oid/$oid200"))(any(), any(), any()))
.thenReturn(Future(HttpResponse(200)))
when(mockHttp.DELETE[HttpResponse](meq(s"$baseUrl/oid/$oid201"))(any(), any(), any()))
.thenReturn(Future(HttpResponse(201)))
when(mockHttp.DELETE[HttpResponse](meq(s"$baseUrl/oid/$oid400"))(any(), any(), any()))
.thenReturn(Future(HttpResponse(400)))
when(mockHttp.DELETE[HttpResponse](meq(s"$baseUrl/oid/$oid500"))(any(), any(), any()))
.thenReturn(Future(HttpResponse(500)))
a[DelegationServiceException] should be thrownBy await(connector.endDelegation(oid200))
a[DelegationServiceException] should be thrownBy await(connector.endDelegation(oid201))
a[DelegationServiceException] should be thrownBy await(connector.endDelegation(oid400))
a[DelegationServiceException] should be thrownBy await(connector.endDelegation(oid500))
}
}
trait TestHttp extends CoreGet with CorePut with CoreDelete
trait TestCase extends MockitoSugar {
val baseUrl = s"http://localhost"
val mockHttp = mock[TestHttp]
val connector = new DelegationConnector {
override protected val serviceUrl = baseUrl
override protected lazy val http = mockHttp
}
val oid = "1234"
}
}
|
hmrc/play-authorised-frontend
|
src/test/scala/uk/gov/hmrc/play/frontend/auth/connectors/DelegationConnectorSpec.scala
|
Scala
|
apache-2.0
| 8,451
|
/*
* Bang.scala
* (Cord)
*
* Copyright (c) 2015-2020 Hanns Holger Rutz.
*
* This software is published under the GNU Lesser General Public License v2.1+
*
*
* For further information, please contact Hanns Holger Rutz at
* contact@sciss.de
*/
package de.sciss.cord
case object Bang {
override def toString = "bang"
}
|
Sciss/Cord
|
src/main/scala/de/sciss/cord/Bang.scala
|
Scala
|
lgpl-2.1
| 334
|
package org.scalajs.core.tools.optimizer
import scala.annotation.switch
import org.scalajs.core.ir
import ir.Position
import ir.Position.NoPosition
import org.scalajs.core.tools.javascript.Trees._
import com.google.javascript.rhino._
import com.google.javascript.jscomp._
import scala.collection.mutable
import scala.annotation.tailrec
import java.net.URI
class ClosureAstTransformer(val relativizeBaseURI: Option[URI] = None) {
private val inputId = new InputId("Scala.js IR")
private val dummySourceName = new java.net.URI("virtualfile:scala.js-ir")
def transformStat(tree: Tree)(implicit parentPos: Position): Node =
innerTransformStat(tree, tree.pos orElse parentPos)
private def innerTransformStat(tree: Tree, pos_in: Position): Node = {
implicit val pos = pos_in
wrapTransform(tree) {
case VarDef(ident, EmptyTree) =>
new Node(Token.VAR, transformName(ident))
case VarDef(ident, rhs) =>
val node = transformName(ident)
node.addChildToFront(transformExpr(rhs))
new Node(Token.VAR, node)
case Skip() =>
new Node(Token.EMPTY)
case Block(stats) =>
transformBlock(stats, pos)
case Labeled(label, body) =>
new Node(Token.LABEL, transformLabel(label), transformBlock(body))
case Return(EmptyTree) =>
new Node(Token.RETURN)
case Return(expr) =>
new Node(Token.RETURN, transformExpr(expr))
case If(cond, thenp, Skip()) =>
new Node(Token.IF, transformExpr(cond), transformBlock(thenp))
case If(cond, thenp, elsep) =>
new Node(Token.IF, transformExpr(cond),
transformBlock(thenp), transformBlock(elsep))
case While(cond, body, None) =>
new Node(Token.WHILE, transformExpr(cond), transformBlock(body))
case While(cond, body, Some(label)) =>
val whileNode =
new Node(Token.WHILE, transformExpr(cond), transformBlock(body))
new Node(Token.LABEL, transformLabel(label),
setNodePosition(whileNode, pos))
case DoWhile(body, cond, None) =>
new Node(Token.DO, transformBlock(body), transformExpr(cond))
case DoWhile(body, cond, Some(label)) =>
val doNode =
new Node(Token.DO, transformBlock(body), transformExpr(cond))
new Node(Token.LABEL, transformLabel(label),
setNodePosition(doNode, pos))
case Try(block, errVar, handler, EmptyTree) =>
val catchPos = handler.pos orElse pos
val catchNode =
new Node(Token.CATCH, transformName(errVar), transformBlock(handler))
val blockNode =
new Node(Token.BLOCK, setNodePosition(catchNode, catchPos))
new Node(Token.TRY, transformBlock(block),
setNodePosition(blockNode, catchPos))
case Try(block, _, EmptyTree, finalizer) =>
val blockNode = setNodePosition(new Node(Token.BLOCK), pos)
new Node(Token.TRY, transformBlock(block), blockNode,
transformBlock(finalizer))
case Try(block, errVar, handler, finalizer) =>
val catchPos = handler.pos orElse pos
val catchNode =
new Node(Token.CATCH, transformName(errVar), transformBlock(handler))
val blockNode =
new Node(Token.BLOCK, setNodePosition(catchNode, catchPos))
new Node(Token.TRY, transformBlock(block),
setNodePosition(blockNode, catchPos), transformBlock(finalizer))
case Throw(expr) =>
new Node(Token.THROW, transformExpr(expr))
case Break(None) =>
new Node(Token.BREAK)
case Break(Some(label)) =>
new Node(Token.BREAK, transformLabel(label))
case Continue(None) =>
new Node(Token.CONTINUE)
case Continue(Some(label)) =>
new Node(Token.CONTINUE, transformLabel(label))
case Switch(selector, cases, default) =>
val switchNode = new Node(Token.SWITCH, transformExpr(selector))
for ((expr, body) <- cases) {
val bodyNode = transformBlock(body)
bodyNode.putBooleanProp(Node.SYNTHETIC_BLOCK_PROP, true)
val caseNode = new Node(Token.CASE, transformExpr(expr), bodyNode)
switchNode.addChildToBack(
setNodePosition(caseNode, expr.pos orElse pos))
}
if (default != EmptyTree) {
val bodyNode = transformBlock(default)
bodyNode.putBooleanProp(Node.SYNTHETIC_BLOCK_PROP, true)
val caseNode = new Node(Token.DEFAULT_CASE, bodyNode)
switchNode.addChildToBack(
setNodePosition(caseNode, default.pos orElse pos))
}
switchNode
case Debugger() =>
new Node(Token.DEBUGGER)
case FunctionDef(name, args, body) =>
val node = transformName(name)
val rhs = genFunction(name.name, args, body)
node.addChildToFront(rhs)
new Node(Token.VAR, node)
case _ =>
// We just assume it is an expression
new Node(Token.EXPR_RESULT, transformExpr(tree))
}
}
def transformExpr(tree: Tree)(implicit parentPos: Position): Node =
innerTransformExpr(tree, tree.pos orElse parentPos)
private def innerTransformExpr(tree: Tree, pos_in: Position): Node = {
implicit val pos = pos_in
wrapTransform(tree) {
case Block(exprs) =>
exprs.map(transformExpr).reduceRight { (expr1, expr2) =>
setNodePosition(new Node(Token.COMMA, expr1, expr2), pos)
}
case If(cond, thenp, elsep) =>
new Node(Token.HOOK, transformExpr(cond),
transformExpr(thenp), transformExpr(elsep))
case Assign(lhs, rhs) =>
new Node(Token.ASSIGN, transformExpr(lhs), transformExpr(rhs))
case New(ctor, args) =>
val node = new Node(Token.NEW, transformExpr(ctor))
args.foreach(arg => node.addChildToBack(transformExpr(arg)))
node
case DotSelect(qualifier, item) =>
new Node(Token.GETPROP, transformExpr(qualifier), transformString(item))
case BracketSelect(qualifier, item) =>
new Node(Token.GETELEM, transformExpr(qualifier), transformExpr(item))
case Apply(fun, args) =>
val node = new Node(Token.CALL, transformExpr(fun))
args.foreach(arg => node.addChildToBack(transformExpr(arg)))
// Closure needs to know (from the parser), if the call has a bound
// `this` or not. Since JSDesugar inserts protects calls if necessary,
// it is sufficient to check if we have a select as target
if (!fun.isInstanceOf[DotSelect] &&
!fun.isInstanceOf[BracketSelect])
node.putBooleanProp(Node.FREE_CALL, true)
node
case Delete(prop) =>
new Node(Token.DELPROP, transformExpr(prop))
case UnaryOp(op, lhs) =>
mkUnaryOp(op, transformExpr(lhs))
case BinaryOp(op, lhs, rhs) =>
mkBinaryOp(op, transformExpr(lhs), transformExpr(rhs))
case ArrayConstr(items) =>
val node = new Node(Token.ARRAYLIT)
items.foreach(i => node.addChildToBack(transformExpr(i)))
node
case ObjectConstr(fields) =>
val node = new Node(Token.OBJECTLIT)
for ((name, expr) <- fields) {
val fldNode = transformStringKey(name)
fldNode.addChildToBack(transformExpr(expr))
node.addChildToBack(fldNode)
}
node
case Undefined() =>
new Node(Token.VOID, setNodePosition(Node.newNumber(0.0), pos))
case Null() =>
new Node(Token.NULL)
case BooleanLiteral(value) =>
if (value) new Node(Token.TRUE) else new Node(Token.FALSE)
case IntLiteral(value) =>
Node.newNumber(value)
case DoubleLiteral(value) =>
Node.newNumber(value)
case StringLiteral(value) =>
Node.newString(value)
case VarRef(ident) =>
transformName(ident)
case This() =>
new Node(Token.THIS)
case Function(args, body) =>
genFunction("", args, body)
case _ =>
throw new TransformException(s"Unknown tree of class ${tree.getClass()}")
}
}
private def genFunction(name: String, args: List[ParamDef], body: Tree)(
implicit pos: Position): Node = {
val paramList = new Node(Token.PARAM_LIST)
args.foreach(arg => paramList.addChildToBack(transformParam(arg)))
val nameNode = setNodePosition(Node.newString(Token.NAME, name), pos)
new Node(Token.FUNCTION, nameNode, paramList, transformBlock(body))
}
def transformParam(param: ParamDef)(implicit parentPos: Position): Node =
transformName(param.name)
def transformName(ident: Ident)(implicit parentPos: Position): Node =
setNodePosition(Node.newString(Token.NAME, ident.name),
ident.pos orElse parentPos)
def transformLabel(ident: Ident)(implicit parentPos: Position): Node =
setNodePosition(Node.newString(Token.LABEL_NAME, ident.name),
ident.pos orElse parentPos)
def transformString(pName: PropertyName)(implicit parentPos: Position): Node =
setNodePosition(Node.newString(pName.name), pName.pos orElse parentPos)
def transformStringKey(pName: PropertyName)(
implicit parentPos: Position): Node = {
val node = Node.newString(Token.STRING_KEY, pName.name)
if (pName.isInstanceOf[StringLiteral])
node.setQuotedString()
setNodePosition(node, pName.pos orElse parentPos)
}
def transformBlock(tree: Tree)(implicit parentPos: Position): Node = {
val pos = if (tree.pos.isDefined) tree.pos else parentPos
wrapTransform(tree) {
case Block(stats) =>
transformBlock(stats, pos)
case tree =>
transformBlock(List(tree), pos)
} (pos)
}
def transformBlock(stats: List[Tree], blockPos: Position): Node = {
@inline def ctorDoc(node: Node) = {
val b = new JSDocInfoBuilder(false)
b.recordConstructor()
b.build(node)
}
val block = new Node(Token.BLOCK)
// The Rhino IR attaches DocComments to the following nodes (rather than
// having individual nodes). We preprocess these here.
@tailrec
def loop(ts: List[Tree], nextIsCtor: Boolean = false): Unit = ts match {
case DocComment(text) :: tss if text.startsWith("@constructor") =>
loop(tss, nextIsCtor = true)
case DocComment(text) :: tss =>
loop(tss)
case t :: tss =>
val node = transformStat(t)(blockPos)
if (nextIsCtor) {
// The @constructor must be propagated through an ExprResult node
val trg =
if (node.isExprResult()) node.getChildAtIndex(0)
else node
trg.setJSDocInfo(ctorDoc(trg))
}
block.addChildToBack(node)
loop(tss)
case Nil =>
}
loop(stats)
block
}
@inline
private def wrapTransform(tree: Tree)(body: Tree => Node)(
implicit pos: Position): Node = {
try {
setNodePosition(body(tree), pos)
} catch {
case e: TransformException =>
throw e // pass through
case e: RuntimeException =>
throw new TransformException(tree, e)
}
}
def setNodePosition(node: Node, pos: ir.Position): node.type = {
if (pos != ir.Position.NoPosition) {
attachSourceFile(node, pos.source)
node.setLineno(pos.line+1)
node.setCharno(pos.column)
} else {
attachSourceFile(node, dummySourceName)
}
node
}
private def attachSourceFile(node: Node, source: URI): node.type = {
val str = sourceUriToString(source)
node.setInputId(inputId)
node.setStaticSourceFile(new SourceFile(str))
node
}
private def sourceUriToString(uri: URI): String = {
val relURI = relativizeBaseURI.fold(uri)(ir.Utils.relativize(_, uri))
ir.Utils.fixFileURI(relURI).toASCIIString
}
// Helpers for IR
@inline
private def mkUnaryOp(op: UnaryOp.Code, lhs: Node): Node = {
import ir.Trees.JSUnaryOp._
val tok = (op: @switch) match {
case ! => Token.NOT
case ~ => Token.BITNOT
case + => Token.POS
case - => Token.NEG
case `typeof` => Token.TYPEOF
}
new Node(tok, lhs)
}
@inline
private def mkBinaryOp(op: BinaryOp.Code, lhs: Node, rhs: Node): Node = {
import ir.Trees.JSBinaryOp._
val tok = (op: @switch) match {
case === => Token.SHEQ
case !== => Token.SHNE
case + => Token.ADD
case - => Token.SUB
case * => Token.MUL
case / => Token.DIV
case % => Token.MOD
case | => Token.BITOR
case & => Token.BITAND
case ^ => Token.BITXOR
case << => Token.LSH
case >> => Token.RSH
case >>> => Token.URSH
case < => Token.LT
case <= => Token.LE
case > => Token.GT
case >= => Token.GE
case || => Token.OR
case && => Token.AND
case `in` => Token.IN
case `instanceof` => Token.INSTANCEOF
}
new Node(tok, lhs, rhs)
}
// Exception wrapper in transforms
class TransformException private (msg: String, e: Throwable)
extends RuntimeException(msg, e) {
def this(tree: Tree, e: Throwable) =
this(TransformException.mkMsg(tree), e)
def this(msg: String) = this(msg, null)
}
object TransformException {
import ir.Printers._
import java.io._
private def mkMsg(tree: Tree): String = {
"Exception while translating Scala.js JS tree to GCC IR at tree:\\n" +
tree.show
}
}
}
|
CapeSepias/scala-js
|
tools/jvm/src/main/scala/org/scalajs/core/tools/optimizer/ClosureAstTransformer.scala
|
Scala
|
bsd-3-clause
| 13,408
|
package info.simsimy.MsgPackParser
import java.nio.ByteBuffer
/**
* Created by sim on 14/12/2016.
*/
class ByteReader(data: Array[Byte]) {
var Position : Int = 0
def getShort(): Short = {
return (0.asInstanceOf[Short] | getByte()).asInstanceOf[Short]
}
def getByte(): Byte = {
val res = data(Position)
Position += 1
return res
}
def getUShort(): Short = {
// return (0.asInstanceOf[Int] | getByte()).asInstanceOf[Short]
(getByte() & 0xFF).asInstanceOf[Short]
}
def getInt(): Int = {
return getBytes(2).foldLeft(0)((num, nextByte) => (num << 8) | nextByte).asInstanceOf[Int]
}
def getUInt(): Int = {
return getBytes(2).foldLeft(0)((num, nextByte) => (num << 8) | (nextByte & 0xFF)).asInstanceOf[Int]
}
def getBytes(count: Long): Array[Byte] = {
val startPosition = Position
Position += count.asInstanceOf[Int]
return data.slice(startPosition, Position)
}
def getLong(): Long = {
return getBytes(4).foldLeft(0L)((num, nextByte) => (num << 8) | nextByte)
}
def getULong(): Long = {
return getBytes(4).foldLeft(0L)((num, nextByte) => (num << 8) | (nextByte & 0xFF))
}
def getBigInt(): BigInt = {
return BigInt(getBytes(8))
}
def getUBigInt(): BigInt = {
val srcBytes = getBytes(8)
return BigInt(Array(0x00.asInstanceOf[Byte]) ++ srcBytes)
}
def getString(wordLen: Long): String = {
return new String(getBytes(wordLen))
}
def getFloat(): Float = {
return ByteBuffer.wrap(getBytes(4)).getFloat
}
def getDouble(): Double = {
return ByteBuffer.wrap(getBytes(8)).getDouble
}
}
|
SimSimY/scala-msgpack-rawparser
|
src/main/scala/info/simsimy/MsgPackParser/ByteReader.scala
|
Scala
|
apache-2.0
| 1,620
|
package com.github.wakfudecrypt.types.data
import com.github.wakfudecrypt._
@BinaryDecoder
case class UserGroupTemplate(
_0_int32: Int,
_1_bool: Boolean,
_2_hasmap_int32_composite: Map[Int, UserGroupTemplate_2_hasmap_int32_composite]
)
object UserGroupTemplate extends BinaryDataCompanion[UserGroupTemplate] {
override val dataId = 74
}
@BinaryDecoder
case class UserGroupTemplate_2_hasmap_int32_composite(
_0_int32: Int,
_1_int16: Short,
_2_str: String
)
|
jac3km4/wakfudecrypt
|
types/src/main/scala/com/github/wakfudecrypt/types/data/UserGroupTemplate.scala
|
Scala
|
mit
| 474
|
package net.categoricaldata.category
case class Path[O, G](source: O, target: O, morphisms: List[G]) {
if (morphisms.isEmpty) require(source == target)
def length = morphisms.size
def andThen(path: Path[O, G]) = {
require(target == path.source)
Path(source, path.target, morphisms ::: path.morphisms)
}
// This is purely a micro-optimization.
override lazy val hashCode = morphisms.hashCode
override def toString = {
def generatorToString(g: G): String = {
g match {
case g: net.categoricaldata.ontology.Arrow => " --- \\"" + g.name + "\\" --> " + g.target.toString
case _ => "(" + g.toString() + ")"
}
}
source.toString + morphisms.map(generatorToString(_)).mkString
}
}
|
JasonGross/categoricaldata
|
src/main/scala/net/categoricaldata/category/Path.scala
|
Scala
|
mit
| 739
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst.util
import scala.language.implicitConversions
import scala.util.matching.Regex
import scala.util.parsing.combinator.syntactical.StandardTokenParsers
import org.apache.spark.sql.catalyst.SqlLexical
import org.apache.spark.sql.types._
/**
* This is a data type parser that can be used to parse string representations of data types
* provided in SQL queries. This parser is mixed in with DDLParser and SqlParser.
*/
private[sql] trait DataTypeParser extends StandardTokenParsers {
// This is used to create a parser from a regex. We are using regexes for data type strings
// since these strings can be also used as column names or field names.
import lexical.Identifier
implicit def regexToParser(regex: Regex): Parser[String] = acceptMatch(
s"identifier matching regex ${regex}",
{ case Identifier(str) if regex.unapplySeq(str).isDefined => str }
)
protected lazy val primitiveType: Parser[DataType] =
"(?i)string".r ^^^ StringType |
"(?i)float".r ^^^ FloatType |
"(?i)(?:int|integer)".r ^^^ IntegerType |
"(?i)tinyint".r ^^^ ByteType |
"(?i)smallint".r ^^^ ShortType |
"(?i)double".r ^^^ DoubleType |
"(?i)(?:bigint|long)".r ^^^ LongType |
"(?i)binary".r ^^^ BinaryType |
"(?i)boolean".r ^^^ BooleanType |
fixedDecimalType |
"(?i)decimal".r ^^^ DecimalType.USER_DEFAULT |
"(?i)date".r ^^^ DateType |
"(?i)timestamp".r ^^^ TimestampType |
varchar |
char
protected lazy val fixedDecimalType: Parser[DataType] =
("(?i)decimal".r ~> "(" ~> numericLit) ~ ("," ~> numericLit <~ ")") ^^ {
case precision ~ scale =>
DecimalType(precision.toInt, scale.toInt)
}
protected lazy val char: Parser[DataType] =
"(?i)char".r ~> "(" ~> (numericLit <~ ")") ^^^ StringType
protected lazy val varchar: Parser[DataType] =
"(?i)varchar".r ~> "(" ~> (numericLit <~ ")") ^^^ StringType
protected lazy val arrayType: Parser[DataType] =
"(?i)array".r ~> "<" ~> dataType <~ ">" ^^ {
case tpe => ArrayType(tpe)
}
protected lazy val mapType: Parser[DataType] =
"(?i)map".r ~> "<" ~> dataType ~ "," ~ dataType <~ ">" ^^ {
case t1 ~ _ ~ t2 => MapType(t1, t2)
}
protected lazy val structField: Parser[StructField] =
ident ~ ":" ~ dataType ^^ {
case name ~ _ ~ tpe => StructField(name, tpe, nullable = true)
}
protected lazy val structType: Parser[DataType] =
("(?i)struct".r ~> "<" ~> repsep(structField, ",") <~ ">" ^^ {
case fields => new StructType(fields.toArray)
}) |
("(?i)struct".r ~ "<>" ^^^ StructType(Nil))
protected lazy val dataType: Parser[DataType] =
arrayType |
mapType |
structType |
primitiveType
def toDataType(dataTypeString: String): DataType = synchronized {
phrase(dataType)(new lexical.Scanner(dataTypeString)) match {
case Success(result, _) => result
case failure: NoSuccess => throw new DataTypeException(failMessage(dataTypeString))
}
}
private def failMessage(dataTypeString: String): String = {
s"Unsupported dataType: $dataTypeString. If you have a struct and a field name of it has " +
"any special characters, please use backticks (`) to quote that field name, e.g. `x+y`. " +
"Please note that backtick itself is not supported in a field name."
}
}
private[sql] object DataTypeParser {
lazy val dataTypeParser = new DataTypeParser {
override val lexical = new SqlLexical
}
def parse(dataTypeString: String): DataType = dataTypeParser.toDataType(dataTypeString)
}
/** The exception thrown from the [[DataTypeParser]]. */
private[sql] class DataTypeException(message: String) extends Exception(message)
|
chenc10/Spark-PAF
|
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/DataTypeParser.scala
|
Scala
|
apache-2.0
| 4,521
|
/*
* Copyright (c) 2013 Aviat Networks.
* This file is part of DocReg+Web. Please refer to the NOTICE.txt file for license details.
*/
package vvv.docreg.backend
import akka.actor.Actor
import vvv.docreg.util.Environment
import net.liftweb.common.{Empty, Full}
import vvv.docreg.model.{ProjectAuthorization, Project, User, UserLookup}
import org.squeryl.PrimitiveTypeMode._
import scala.concurrent.duration._
class UserStorage extends Actor {
override def preStart() {
context.system.scheduler.schedule(15.minutes, 24.hours, self, 'Check)(context.dispatcher)
super.preStart()
}
def receive = {
case 'Check => {
val directory = Environment.env.directory
directory.groupMembers(DirectoryConfig.userGroup()) match {
case Full(dns) => {
for (dn <- dns; attributes <- directory.findAttributes(dn)) {
// Cheap and nasty lookup that should set things right for most users.
transaction( UserLookup.fromAttributes(attributes, true) )
}
}
case _ => {
// Ignore
}
}
// for (p <- Project.findAll()) {
// directory.groupMembers(DirectoryConfig.projectAuthorizationGroup(p.name)) match {
// case Full(dns) => {
// val authorized = ProjectAuthorization.authorizedUsersFor(p.id)
// val users = for {
// dn <- dns
// attributes <- directory.findAttributes(dn)
// user <- transaction( UserLookup.fromAttributes(attributes, true) )
// } yield user
// println("Authorized for " + p.name + " = " + users.map(_.displayName))
// }
// case Empty => {
// // Remove all authorizations
// }
// case _ => {
// // Ignore
// }
// }
// }
}
case other => {
unhandled(other)
}
}
}
|
scott-abernethy/docreg-web
|
src/main/scala/vvv/docreg/backend/UserStorage.scala
|
Scala
|
gpl-3.0
| 1,883
|
package com.twitter.finagle.mux
import com.twitter.app.GlobalFlag
import com.twitter.conversions.time._
import com.twitter.finagle.{Status, Stack}
import com.twitter.finagle.mux._
import com.twitter.finagle.stats.{NullStatsReceiver, StatsReceiver}
import com.twitter.finagle.util.parsers.{double, duration, int, list}
import com.twitter.util.{Duration, Future}
import java.util.logging.Logger
/**
* Failure detectors attempt to gauge the liveness of a peer,
* usually by sending ping messages and evaluating response
* times.
*/
private[mux] trait FailureDetector {
def status: Status
}
/**
* The null failure detector is the most conservative: it uses
* no information, and always gauges the session to be
* [[Status.Open]].
*/
private object NullFailureDetector extends FailureDetector {
def status: Status = Status.Open
}
/**
* GlobalFlag to configure FailureDetection used only in the
* absence of any app-specified config. This is the default (legacy)
* behavior.
*/
object sessionFailureDetector extends GlobalFlag(
// by default, use `DarkModeConfig` to just send pings.
// This is an intermediate step to use `ThresholdConfig()`.
"dark",
"The failure detector used to determine session liveness " +
"[none|dark|threshold:minPeriod:threshold:win:closeThreshold]")
/**
* Companion object capable of creating a FailureDetector based on parameterized config.
*/
object FailureDetector {
/**
* Base type used to identify and configure the [[FailureDetector]].
*/
sealed trait Config
/**
* Default config type which tells the [[FailureDetector]] to extract
* config values from the sessionFailureDetector GlobalFlag.
*/
case object GlobalFlagConfig extends Config
/**
* Indicated to use the [[com.twitter.finagle.mux.NullFailureDetector]]
* when creating a new detector
*/
case object NullConfig extends Config
/**
* Indicated to use the default ping frequency and mark busy threshold;
* but it just exports stats instead of actually marking an endpoint as busy.
*/
case class DarkModeConfig(
minPeriod: Duration = 5.seconds,
threshold: Double = 2,
windowSize: Int = 100,
closeThreshold: Int = -1)
extends Config
/**
* Indicated to use the [[com.twitter.finagle.mux.ThresholdFailureDetector]]
* configured with these values when creating a new detector.
*
* The default `windowSize` and `threshold` are chosen from examining a
* representative ping distribution in a Twitter data center. With long tail
* distribution, we want a reasonably large window size to capture long RTTs
* in the history. A small threshold makes the detection sensitive to potential
* failures. There can be a low rate of false positive, which is fine in most
* production cases with cluster redundancy.
*/
case class ThresholdConfig(
minPeriod: Duration = 5.seconds,
threshold: Double = 2,
windowSize: Int = 100,
closeThreshold: Int = -1)
extends Config
/**
* Helper class for configuring a [[FailureDetector]] within a
* [[com.twitter.finagle.Stackable]] client
*/
case class Param(param: Config) {
def mk(): (Param, Stack.Param[Param]) =
(this, Param.param)
}
case object Param {
implicit val param = Stack.Param(Param(GlobalFlagConfig))
}
private[this] val log = Logger.getLogger(getClass.getName)
/**
* Instantiate a new FailureDetector based on the config type
*/
def apply(
config: Config,
ping: () => Future[Unit],
close: () => Future[Unit],
statsReceiver: StatsReceiver
): FailureDetector = {
config match {
case NullConfig => NullFailureDetector
case cfg: DarkModeConfig =>
new ThresholdFailureDetector(ping, close, cfg.minPeriod, cfg.threshold,
cfg.windowSize, cfg.closeThreshold, darkMode = true, statsReceiver = statsReceiver)
case cfg: ThresholdConfig =>
new ThresholdFailureDetector(ping, close, cfg.minPeriod, cfg.threshold,
cfg.windowSize, cfg.closeThreshold, darkMode = false, statsReceiver = statsReceiver)
case GlobalFlagConfig =>
parseConfigFromFlags(ping, close, statsReceiver = statsReceiver)
}
}
/**
* Fallback behavior: parse the sessionFailureDetector global flag and
* instantiate the proper config.
*/
private def parseConfigFromFlags(
ping: () => Future[Unit],
close: () => Future[Unit],
nanoTime: () => Long = System.nanoTime,
statsReceiver: StatsReceiver = NullStatsReceiver
): FailureDetector = {
sessionFailureDetector() match {
case list("threshold", duration(min), double(threshold), int(win), int(closeThreshold)) =>
new ThresholdFailureDetector(
ping, close, min, threshold, win, closeThreshold, nanoTime, false, statsReceiver)
case list("threshold", duration(min), double(threshold), int(win)) =>
new ThresholdFailureDetector(
ping, close, min, threshold, win, nanoTime = nanoTime, statsReceiver = statsReceiver)
case list("threshold", duration(min), double(threshold)) =>
new ThresholdFailureDetector(
ping, close, min, threshold, nanoTime = nanoTime, statsReceiver = statsReceiver)
case list("threshold", duration(min)) =>
new ThresholdFailureDetector(
ping, close, min, nanoTime = nanoTime, statsReceiver = statsReceiver)
case list("threshold") =>
new ThresholdFailureDetector(
ping, close, nanoTime = nanoTime, statsReceiver = statsReceiver)
case list("dark") =>
new ThresholdFailureDetector(
ping, close, nanoTime = nanoTime, statsReceiver = statsReceiver, darkMode = true)
case list("none") =>
NullFailureDetector
case list(_*) =>
log.warning(s"unknown failure detector ${sessionFailureDetector()} specified")
NullFailureDetector
}
}
}
|
rojanu/finagle
|
finagle-mux/src/main/scala/com/twitter/finagle/mux/FailureDetector.scala
|
Scala
|
apache-2.0
| 5,914
|
package java.lang
/* This is a hijacked class. Its instances are primitive numbers.
* Constructors are not emitted.
*/
final class Float private () extends Number with Comparable[Float] {
def this(value: scala.Float) = this()
def this(s: String) = this()
@inline def floatValue(): scala.Float =
this.asInstanceOf[scala.Float]
@inline override def byteValue(): scala.Byte = floatValue.toByte
@inline override def shortValue(): scala.Short = floatValue.toShort
@inline def intValue(): scala.Int = floatValue.toInt
@inline def longValue(): scala.Long = floatValue.toLong
@inline def doubleValue(): scala.Double = floatValue.toDouble
override def equals(that: Any): scala.Boolean = that match {
case that: Double => // yes, Double
val a = doubleValue
val b = that.doubleValue
(a == b) || (Double.isNaN(a) && Double.isNaN(b))
case _ =>
false
}
@inline override def hashCode(): Int =
Float.hashCode(floatValue)
@inline override def compareTo(that: Float): Int =
Float.compare(floatValue, that.floatValue)
@inline override def toString(): String =
Float.toString(floatValue)
@inline def isNaN(): scala.Boolean =
Float.isNaN(floatValue)
@inline def isInfinite(): scala.Boolean =
Float.isInfinite(floatValue)
}
object Float {
final val TYPE = classOf[scala.Float]
final val POSITIVE_INFINITY = 1.0f / 0.0f
final val NEGATIVE_INFINITY = 1.0f / -0.0f
final val NaN = 0.0f / 0.0f
final val MAX_VALUE = scala.Float.MaxValue
final val MIN_NORMAL = 1.17549435e-38f
final val MIN_VALUE = scala.Float.MinPositiveValue
final val MAX_EXPONENT = 127
final val MIN_EXPONENT = -126
final val SIZE = 32
final val BYTES = 4
@inline def valueOf(floatValue: scala.Float): Float = new Float(floatValue)
@inline def valueOf(s: String): Float = valueOf(parseFloat(s))
@inline def parseFloat(s: String): scala.Float =
Double.parseDouble(s).toFloat
@inline def toString(f: scala.Float): String =
"" + f
def toHexString(f: scala.Float): String = {
val ebits = 8 // exponent size
val mbits = 23 // mantissa size
val bias = (1 << (ebits - 1)) - 1
val bits = floatToIntBits(f)
val s = bits < 0
val m = bits & ((1 << mbits) - 1)
val e = (bits >>> mbits).toInt & ((1 << ebits) - 1) // biased
val posResult = if (e > 0) {
if (e == (1 << ebits) - 1) {
// Special
if (m != 0) "NaN"
else "Infinity"
} else {
// Normalized
"0x1." + mantissaToHexString(m) + "p" + (e - bias)
}
} else {
if (m != 0) {
// Subnormal
"0x0." + mantissaToHexString(m) + "p-126"
} else {
// Zero
"0x0.0p0"
}
}
if (bits < 0) "-" + posResult else posResult
}
@inline
private def mantissaToHexString(m: Int): String = {
@inline def padHex6(i: Int): String = {
val s = Integer.toHexString(i)
"000000".substring(s.length) + s // 6 zeros
}
// The << 1 turns `m` from a 23-bit int into a 24-bit int (multiple of 4)
val padded = padHex6(m << 1)
var len = padded.length
while (len > 1 && padded.charAt(len - 1) == '0')
len -= 1
padded.substring(0, len)
}
@inline def compare(a: scala.Float, b: scala.Float): scala.Int =
Double.compare(a, b)
@inline def isNaN(v: scala.Float): scala.Boolean =
v != v
@inline def isInfinite(v: scala.Float): scala.Boolean =
v == POSITIVE_INFINITY || v == NEGATIVE_INFINITY
@inline def isFinite(f: scala.Float): scala.Boolean =
!isNaN(f) && !isInfinite(f)
// Uses the hashCode of Doubles. See Bits.numberHashCode for the rationale.
@inline def hashCode(value: scala.Float): Int =
scala.scalajs.runtime.Bits.numberHashCode(value)
@inline def intBitsToFloat(bits: scala.Int): scala.Float =
scala.scalajs.runtime.Bits.intBitsToFloat(bits)
@inline def floatToIntBits(value: scala.Float): scala.Int =
scala.scalajs.runtime.Bits.floatToIntBits(value)
@inline def sum(a: scala.Float, b: scala.Float): scala.Float =
a + b
@inline def max(a: scala.Float, b: scala.Float): scala.Float =
Math.max(a, b)
@inline def min(a: scala.Float, b: scala.Float): scala.Float =
Math.min(a, b)
}
|
xuwei-k/scala-js
|
javalanglib/src/main/scala/java/lang/Float.scala
|
Scala
|
bsd-3-clause
| 4,254
|
package com.github.zenpie.macrowave.internal.datalift
import java.io.DataOutput
import java.util
import scala.reflect.macros.whitebox
import scala.language.implicitConversions
abstract class LiftCodec[@specialized T](val c: whitebox.Context) {
import c.universe._
def encode(stream: DataOutput, value: T): Unit
def decode(stream: TermName): Tree
}
object LiftCodec {
implicit def stringCodecFactory: whitebox.Context => LiftCodec[String] =
new LiftCodec[String](_) {
import c.universe._
val maxLenOfUTFString = 65535
override def encode(stream: DataOutput, value: String): Unit = {
val chunks = value.grouped(maxLenOfUTFString).toArray
stream.writeInt(chunks.length)
for (chunk <- chunks) {
stream.writeUTF(chunk)
}
}
override def decode(stream: TermName): Tree = {
val chunkCount = TermName(c.freshName("chunkCount"))
val sBuilder = TermName(c.freshName("builder"))
val i = TermName(c.freshName("i"))
q"""
{
val $chunkCount = $stream.readInt()
val $sBuilder = new _root_.java.lang.StringBuilder()
var $i = 0
while ($i < $chunkCount) {
$sBuilder.append($stream.readUTF())
$i += 1
}
$sBuilder.toString()
}
"""
}
}
implicit val intArrayCodecFactory: whitebox.Context => LiftCodec[Array[Int]] =
{ c =>
import c.universe._
primitiveArrayCodec(c)(_.writeInt(_), stream => q"$stream.readInt()")
}
implicit val longArrayCodecFactory: whitebox.Context => LiftCodec[Array[Long]] =
{ c =>
import c.universe._
primitiveArrayCodec(c)(_.writeLong(_), stream => q"$stream.readLong()")
}
implicit val bitsetCodecFactory: whitebox.Context => LiftCodec[util.BitSet] =
{ c =>
val longArrayCodec = LiftCodec.longArrayCodecFactory(c)
type ForeignTermName = longArrayCodec.c.universe.TermName
new LiftCodec[util.BitSet](c) {
import c.universe._
override def encode(stream: DataOutput, value: util.BitSet): Unit = {
longArrayCodec.encode(stream, value.toLongArray)
}
override def decode(stream: TermName): Tree = {
val longs = TermName(c.freshName("longs"))
val BitSet = q"_root_.java.util.BitSet"
q"""
{
val $longs = ${longArrayCodec.decode(stream.asInstanceOf[ForeignTermName]).asInstanceOf[Tree]}
$BitSet.valueOf($longs)
}
"""
}
}
}
private def primitiveArrayCodec[@specialized T <: AnyVal]
(c0: whitebox.Context)
(writeElement: (DataOutput, T) => Unit,
readElement : c0.universe.TermName => c0.universe.Tree)
(implicit tpeTag: c0.universe.TypeTag[T]): LiftCodec[Array[T]] = {
type ForeignTermName = c0.universe.TermName
new LiftCodec[Array[T]](c0) {
import c.universe._
override def encode(stream: DataOutput, value: Array[T]): Unit = {
val arrayLength = value.length
stream.writeInt(arrayLength)
var i = 0
while (i < arrayLength) {
writeElement(stream, value(i))
i += 1
}
}
override def decode(stream: TermName): Tree = {
val arrayLength = TermName(c.freshName("arrayLength"))
val value = TermName(c.freshName("value"))
val i = TermName(c.freshName("i"))
val tTpe = typeOf[T]
q"""
{
val $arrayLength = $stream.readInt()
val $value = new Array[$tTpe]($arrayLength)
var $i = 0
while ($i < $arrayLength) {
$value($i) = ${readElement(stream.asInstanceOf[ForeignTermName]).asInstanceOf[Tree]}
$i += 1
}
$value
}
"""
}
}
}
}
|
zen-pie/macrowave
|
src/main/scala/com/github/zenpie/macrowave/internal/datalift/LiftCodec.scala
|
Scala
|
mit
| 3,961
|
package com.cloudray.scalapress.widgets.controller
import org.scalatest.{OneInstancePerTest, FunSuite}
import org.scalatest.mock.MockitoSugar
import org.mockito.Mockito
import com.cloudray.scalapress.widgets.{HtmlWidget, WidgetDao}
import org.springframework.ui.ModelMap
/** @author Stephen Samuel */
class HtmlWidgetEditControllerTest extends FunSuite with MockitoSugar with OneInstancePerTest {
val dao = mock[WidgetDao]
val widgetDao = dao
val controller = new HtmlWidgetEditController(widgetDao)
test("controller loads instance of HtmlWidget") {
val w = new HtmlWidget
Mockito.when(dao.find(3)).thenReturn(w)
val actual: HtmlWidget = controller.widget(3)
assert(w === actual)
}
test("save persists the widget") {
val w = new HtmlWidget
controller.save(w, new ModelMap)
Mockito.verify(dao).save(w)
}
}
|
vidyacraghav/scalapress
|
src/test/scala/com/cloudray/scalapress/widgets/controller/HtmlWidgetEditControllerTest.scala
|
Scala
|
apache-2.0
| 852
|
package api
import api.Api._
import models.Page
import play.api.libs.json._
/*
* Successful response for an ApiRequest.
*/
case class ApiResponse(status: Int, json: JsValue, headers: Seq[(String, String)]) extends ApiResult
object ApiResponse {
//////////////////////////////////////////////////////////////////////
// Status Codes
final val STATUS_OK = 200
final val STATUS_CREATED = 201
final val STATUS_ACCEPTED = 202
final val STATUS_NOCONTENT = 204
//////////////////////////////////////////////////////////////////////
// Predefined responses
def ok(json: JsValue, headers: (String, String)*) = apply(STATUS_OK, json, headers)
def ok[A](json: JsValue, page: Page[A], headers: (String, String)*) = apply(STATUS_OK, json, headers ++ Seq(
HEADER_PAGE -> page.page.toString,
HEADER_PAGE_FROM -> page.offset.toString,
// HEADER_PAGE_SIZE -> page.pageSize.toString,
HEADER_PAGE_TOTAL -> page.total.toString
))
def created(json: JsValue, headers: (String, String)*) = apply(STATUS_CREATED, json, headers)
def created(headers: (String, String)*) = apply(STATUS_CREATED, JsNull, headers)
def accepted(json: JsValue, headers: (String, String)*) = apply(STATUS_ACCEPTED, json, headers)
def accepted(headers: (String, String)*) = apply(STATUS_ACCEPTED, JsNull, headers)
def noContent(headers: (String, String)*) = apply(STATUS_NOCONTENT, JsNull, headers)
}
|
ygpark2/play-ain-board
|
app/api/ApiResponse.scala
|
Scala
|
bsd-3-clause
| 1,406
|
import sbt._
import Keys._
class PlayJdbc(
scalacPlugin: Project,
jdbcScala: Project) {
import Dependencies._
import Format._
val playVersion = settingKey[String]("Playframework version")
lazy val project =
Project(id = "play-jdbc", base = file("play-jdbc")).
settings(formatSettings ++ Seq(
name := "play-jdbc",
scalacOptions in Test ++= ScalacPlugin.
compilerOptions(scalacPlugin).value,
playVersion := {
val scalaVer = scalaBinaryVersion.value
if (scalaVer == "2.11") "2.5.19"
else if (scalaVer == "2.12") "2.6.7"
else if (scalaVer == "2.13") "2.7.3"
else "2.4.8"
},
unmanagedSourceDirectories in Compile += {
val base = (sourceDirectory in Compile).value
CrossVersion.partialVersion(playVersion.value) match {
case Some((maj, min)) => base / s"play-${maj}.${min}"
case _ => base / "play"
}
},
compile in Test := (compile in Test).
dependsOn(compile in (scalacPlugin, Test)).value,
// make sure plugin is there
libraryDependencies ++= {
val anorm = "org.playframework.anorm" %% "anorm" % "2.6.10"
Seq(
"org.eu.acolyte" % "jdbc-driver" % (version in ThisBuild).value,
"com.typesafe.play" %% "play-jdbc-api" % playVersion.value % "provided",
anorm % Test,
"org.specs2" %% "specs2-core" % specsVer.value % Test)
}
)).dependsOn(scalacPlugin, jdbcScala)
}
|
cchantep/acolyte
|
project/PlayJdbc.scala
|
Scala
|
lgpl-2.1
| 1,578
|
package scalan.util
import scalan.BaseTests
import scalan.util.CollectionUtil._
class CollectionUtilTests extends BaseTests {
def join(l: Map[Int,Int], r: Map[Int,Int]) =
outerJoin(l, r)((_,l) => l, (_,r) => r, (k,l,r) => l + r)
def joinSeqs(l: Seq[Int], r: Seq[Int]) =
outerJoinSeqs(l, r)(l => l, r => r)((_,l) => l, (_,r) => r, (k,l,r) => l + r).map(_._2)
def joinPairs(l: Seq[(String,Int)], r: Seq[(String,Int)]) =
outerJoinSeqs(l, r)(l => l._1, r => r._1)((_,l) => l._2, (_,r) => r._2, (k,l,r) => l._2 + r._2)
test("outerJoin maps") {
val left = Map(1 -> 1, 2 -> 2, 3 -> 3)
val right = Map(2 -> 2, 3 -> 3, 4 -> 4)
assertResult(Map(1 -> 1, 2 -> 4, 3 -> 6, 4 -> 4))(join(left,right))
assertResult(Map(1 -> 1, 2 -> 2, 3 -> 3))(join(left,Map()))
assertResult(Map(2 -> 2, 3 -> 3, 4 -> 4))(join(Map(), right))
assertResult(Map(2 -> 4, 3 -> 6, 4 -> 8))(join(right, right))
}
test("outerJoinSeqs") {
val left = Seq(1, 2, 3)
val right = Seq(2, 3, 4)
assertResult(Seq(1, 4, 6, 4))(joinSeqs(left, right))
assertResult(Seq(1, 2, 3))(joinSeqs(left,Seq()))
assertResult(Seq(2, 3, 4))(joinSeqs(Seq(), right))
assertResult(Seq(4, 6, 8))(joinSeqs(right, right))
val inner = Seq("a" -> 1, "b" -> 2, "c" -> 3)
val outer = Seq("b" -> 2, "c" -> 3, "d" -> 4)
assertResult(Seq("a" -> 1, "b" -> 4, "c" -> 6, "d" -> 4))(joinPairs(inner, outer))
assertResult(Seq("a" -> 1, "b" -> 2, "c" -> 3))(joinPairs(inner,Seq()))
assertResult(Seq("b" -> 2, "c" -> 3, "d" -> 4))(joinPairs(Seq(), outer))
assertResult(Seq("b" -> 4, "c" -> 6, "d" -> 8))(joinPairs(outer, outer))
}
}
|
scalan/scalan
|
common/src/test/scala/scalan/util/CollectionUtilTests.scala
|
Scala
|
apache-2.0
| 1,652
|
/*
* Copyright (C) 2005, The Beangle Software.
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published
* by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.beangle.ems.core.config.service
trait DataSourceManager {
def activate(resourceId: Iterable[Int], active: Boolean): Unit
}
|
beangle/ems
|
core/src/main/scala/org/beangle/ems/core/config/service/DataSourceManager.scala
|
Scala
|
lgpl-3.0
| 862
|
package com.coiney.akka.rabbit.protocol
import com.rabbitmq.client.AMQP
case class RabbitRPCResult(data: Option[Array[Byte]], properties: Option[AMQP.BasicProperties] = None)
trait RabbitRPCProcessor {
def process(hd: HandleDelivery): RabbitRPCResult
def recover(hd: HandleDelivery, cause: Throwable): RabbitRPCResult
}
case class RabbitRPCRequest(publishes: List[Publish], numberOfResponses: Int = 1)
case class RabbitRPCResponse(handleDeliveries: List[HandleDelivery])
|
Coiney/akka-rabbit
|
akka-rabbit-core/src/main/scala/com/coiney/akka/rabbit/protocol/RabbitRPCProtocol.scala
|
Scala
|
bsd-3-clause
| 481
|
package ch8
import org.learningconcurrency._
import ch8._
import akka.actor._
import akka.event.Logging
import akka.util.Timeout
import akka.pattern.{ask, pipe, gracefulStop}
import akka.util.Timeout
import scala.concurrent.duration._
import scala.concurrent.ExecutionContext.Implicits.global
import scala.util._
class Pongy extends Actor {
val log = Logging(context.system, this)
def receive = {
case "ping" =>
log.info("Got a ping -- ponging back!")
sender ! "pong"
context.stop(self)
}
override def postStop() = log.info("pongy going down")
}
class Pingy extends Actor {
def receive = {
case pongyRef: ActorRef =>
implicit val timeout = Timeout(2 seconds)
val future = pongyRef ? "ping"
pipe(future) to sender
}
}
class Master extends Actor {
val log = Logging(context.system, this)
val pingy = ourSystem.actorOf(Props[Pingy], "pingy")
val pongy = ourSystem.actorOf(Props[Pongy], "pongy")
def receive = {
case "start" =>
pingy ! pongy
case "pong" =>
log.info("got a pong back!")
context.stop(self)
}
override def postStop() = log.info("master going down")
}
object CommunicatingAsk extends App {
val masta = ourSystem.actorOf(Props[Master], "masta")
masta ! "start"
Thread.sleep(1000)
ourSystem.shutdown()
}
class Router extends Actor {
var i = 0
val children = for (_ <- 0 until 4) yield context.actorOf(Props[StringPrinter])
def receive = {
case "stop" => context.stop(self)
case msg =>
children(i) forward msg
i = (i + 1) % 4
}
}
object CommunicatingRouter extends App {
val router = ourSystem.actorOf(Props[Router], "router")
router ! "Hi."
router ! "I'm talking to you!"
Thread.sleep(1000)
router ! "stop"
Thread.sleep(1000)
ourSystem.shutdown()
}
object CommunicatingPoisonPill extends App {
val masta = ourSystem.actorOf(Props[Master], "masta")
masta ! akka.actor.PoisonPill
Thread.sleep(1000)
ourSystem.shutdown()
}
class GracefulPingy extends Actor {
val pongy = context.actorOf(Props[Pongy], "pongy")
context.watch(pongy)
def receive = {
case GracefulPingy.CustomShutdown =>
context.stop(pongy)
case Terminated(`pongy`) =>
context.stop(self)
}
}
object GracefulPingy {
object CustomShutdown
}
object CommunicatingGracefulStop extends App {
val grace = ourSystem.actorOf(Props[GracefulPingy], "grace")
val stopped = gracefulStop(grace, 3.seconds, GracefulPingy.CustomShutdown)
stopped onComplete {
case Success(x) =>
log("graceful shutdown successful")
ourSystem.shutdown()
case Failure(t) =>
log("grace not stopped!")
ourSystem.shutdown()
}
}
|
tophua/spark1.52
|
examples/src/main/scala/scalaDemo/threadConcurrency/ch8/Communicating.scala
|
Scala
|
apache-2.0
| 2,710
|
package spoker.betting
import spoker.{ Player, ManageablePlayer }
case class Better(manageablePlayer: ManageablePlayer) {
lazy val myAction = BetterAction(this)
def myActionIs(a: Action) = myAction(a)
def bet(value: Int) = myActionIs(Bet(value))
def call = myActionIs(Call)
def check = myActionIs(Check)
def allIn = myActionIs(AllIn)
def raise(value: Int) = myActionIs(Raise(value))
def fold = myActionIs(Fold)
override def equals(that: Any) = that match {
case b: Better => playerFromBetter(this) == playerFromBetter(b)
case mp: ManageablePlayer => playerFromBetter(this) == mp.positionedPlayer.player
}
override def hashCode = playerFromBetter(this).##
}
|
vladmm/scala-poker
|
src/main/scala/spoker/betting/Better.scala
|
Scala
|
mit
| 699
|
package chat.tox.antox.utils
import java.util
import android.content.{BroadcastReceiver, Context, Intent}
import android.net.ConnectivityManager
import scala.collection.JavaConversions._
trait ConnectionTypeChangeListener {
//only called when network is connected
def connectionTypeChange(connectionType: Int): Unit
}
object ConnectionManager {
private val listenerList = new util.ArrayList[ConnectionTypeChangeListener]()
private var lastConnectionType: Option[Int] = None
def addConnectionTypeChangeListener(listener: ConnectionTypeChangeListener): Unit = {
listenerList.add(listener)
}
def getConnectionType(context: Context): Int = {
val connectivityManager = context.getSystemService(Context.CONNECTIVITY_SERVICE)
.asInstanceOf[ConnectivityManager]
connectivityManager.getActiveNetworkInfo.getType
}
def isNetworkAvailable(context: Context): Boolean = {
val connMgr = context.getSystemService(Context.CONNECTIVITY_SERVICE).asInstanceOf[ConnectivityManager]
val networkInfo = connMgr.getActiveNetworkInfo
networkInfo != null && networkInfo.isConnected
}
}
class ConnectionManager extends BroadcastReceiver {
override def onReceive(context: Context, intent: Intent) {
if (ConnectionManager.isNetworkAvailable(context)) {
val connectionType = ConnectionManager.getConnectionType(context)
if (ConnectionManager.lastConnectionType.isEmpty || connectionType != ConnectionManager.lastConnectionType.get) {
for (listener <- ConnectionManager.listenerList) {
listener.connectionTypeChange(connectionType)
}
ConnectionManager.lastConnectionType = Some(connectionType)
}
}
}
}
|
subliun/Antox
|
app/src/main/scala/chat/tox/antox/utils/ConnectionManager.scala
|
Scala
|
gpl-3.0
| 1,697
|
package org.jetbrains.plugins.scala.codeInsight.template.macros
import com.intellij.codeInsight.CodeInsightBundle
import com.intellij.codeInsight.lookup.LookupElement
import com.intellij.codeInsight.template._
import org.jetbrains.plugins.scala.codeInsight.template.impl.ScalaCodeContextType
import org.jetbrains.plugins.scala.codeInsight.template.util.MacroUtil
import org.jetbrains.plugins.scala.lang.completion.lookups.ScalaLookupItem
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.typedef.ScTypeDefinition
import org.jetbrains.plugins.scala.lang.psi.types.result.TypingContext
/**
* @author Roman.Shein
* @since 23.09.2015.
*/
class ScalaComponentTypeOfMacro extends ScalaMacro {
override def innerCalculateResult(params: Array[Expression], context: ExpressionContext): Result = {
if (params.length != 1) return null
params.head.calculateResult(context) match {
case scTypeRes: ScalaTypeResult =>
MacroUtil.getComponentFromArrayType(scTypeRes.myType).map(new ScalaTypeResult(_)).orNull
case otherRes: Result =>
MacroUtil.resultToScExpr(otherRes, context).flatMap(_.getType().toOption).
flatMap(MacroUtil.getComponentFromArrayType).map(new ScalaTypeResult(_)).orNull
}
}
override def innerCalculateLookupItems(params: Array[Expression], context: ExpressionContext): Array[LookupElement] = {
if (params.length != 1) return null
val outerItems = params(0).calculateLookupItems(context)
if (outerItems == null) return null
outerItems.flatMap {
case lookupItem: ScalaLookupItem => lookupItem.element match {
case typeDef: ScTypeDefinition =>
typeDef.getType(TypingContext.empty).toOption.flatMap(MacroUtil.getComponentFromArrayType).
map(MacroUtil.getTypeLookupItem(_, context.getProject))
case _ => None
}
case _ => None
}.filter(_.isDefined).map(_.get)
}
def getName: String = MacroUtil.scalaIdPrefix + "componentTypeOf"
def getPresentableName: String = MacroUtil.scalaPresentablePrefix + CodeInsightBundle.message("macro.component.type.of.array")
override def isAcceptableInContext(context: TemplateContextType): Boolean = context.isInstanceOf[ScalaCodeContextType]
}
|
loskutov/intellij-scala
|
src/org/jetbrains/plugins/scala/codeInsight/template/macros/ScalaComponentTypeOfMacro.scala
|
Scala
|
apache-2.0
| 2,241
|
/*-
* #%L
* FWAPP Framework
* %%
* Copyright (C) 2016 - 2017 Open Design Flow
* %%
* This program is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
* #L%
*/
package org.odfi.wsb.fwapp.framework
import com.idyria.osi.ooxoo.core.buffers.datatypes.BooleanBuffer
import com.idyria.osi.ooxoo.core.buffers.datatypes.XSDStringBuffer
import com.idyria.osi.ooxoo.core.buffers.datatypes.IntegerBuffer
import com.idyria.osi.ooxoo.core.buffers.datatypes.DoubleBuffer
import org.w3c.dom.html.HTMLElement
import com.idyria.osi.vui.html.Input
trait FWAppValueBufferView extends FWAppValueBindingView {
/**
* BindValue with Buffers
*/
def bindBufferValue(vb: IntegerBuffer): Unit = {
+@("value" -> vb.toString())
this.bindValue {
v: Int =>
vb.set(v)
}
}
/**
* BindValue with Buffers
*/
def bindBufferValue(vb: DoubleBuffer): Unit = {
+@("value" -> vb.toString())
this.bindValue {
v: Double =>
// println(s"Updating bound value")
vb.set(v)
}
}
/**
* BindValue with Buffers
*/
def bindBufferValue(vb: XSDStringBuffer): Unit = {
+@("value" -> vb.toString())
this.bindValue {
v: String =>
vb.set(v)
}
}
def bindBufferValue(vb: BooleanBuffer): Unit = {
vb.toBool match {
case true =>
+@("checked" -> "true")
case false =>
}
this.bindValue {
v: Boolean =>
println("Updating boolean to: " + v)
vb.set(v)
}
}
def inputToBuffer(vb: DoubleBuffer)(cl: => Any): Input[HTMLElement, _] = {
input {
bindBufferValue(vb)
cl
}
}
def inputToBuffer(vb: IntegerBuffer)(cl: => Any): Input[HTMLElement, _] = {
input {
bindBufferValue(vb)
cl
}
}
def inputToBuffer(vb: BooleanBuffer)(cl: => Any): Input[HTMLElement, _] = {
input {
bindBufferValue(vb)
cl
}
}
def inputToBufferWithlabel(name: String, vb: XSDStringBuffer)(cl: => Any): Input[HTMLElement, _] = {
input {
label(name) {
}
bindBufferValue(vb)
cl
}
}
def inputToBufferWithlabel(name: String, vb: DoubleBuffer)(cl: => Any): Input[HTMLElement, _] = {
input {
label(name) {
}
bindBufferValue(vb)
cl
}
}
def inputToBufferWithlabel(name: String, vb: IntegerBuffer)(cl: => Any): Input[HTMLElement, _] = {
input {
label(name) {
}
bindBufferValue(vb)
cl
}
}
def inputToBufferWithlabel(name: String, vb: BooleanBuffer)(cl: => Any): Input[HTMLElement, _] = {
input {
label(name) {
}
bindBufferValue(vb)
cl
}
}
def selectToBuffer(values: List[(String, String)], vb: XSDStringBuffer)(cl: => Any) = {
// If actual value not in range, set to first
values.find {
case (name, v) => v == vb.toString()
} match {
case None =>
vb.set(values(0)._1)
case other =>
}
// Create Select
select {
//-- Set options
values.foreach {
case (name, v) =>
option(v) {
textContent(name)
//-- Selected
if (v == vb.toString()) {
+@("selected" -> true)
}
}
}
//-- Bind
bindValue { sv: String => vb.set(sv) }
//-- Config closure
cl
}
}
}
|
opendesignflow/fwapp
|
src/main/scala/org/odfi/wsb/fwapp/framework/FWAppValueBufferView.scala
|
Scala
|
agpl-3.0
| 4,505
|
//: ----------------------------------------------------------------------------
//: Copyright (C) 2014 Verizon. All Rights Reserved.
//:
//: Licensed under the Apache License, Version 2.0 (the "License");
//: you may not use this file except in compliance with the License.
//: You may obtain a copy of the License at
//:
//: http://www.apache.org/licenses/LICENSE-2.0
//:
//: Unless required by applicable law or agreed to in writing, software
//: distributed under the License is distributed on an "AS IS" BASIS,
//: WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
//: See the License for the specific language governing permissions and
//: limitations under the License.
//:
//: ----------------------------------------------------------------------------
package remotely
import scala.collection.immutable.SortedSet
import scala.reflect.runtime.universe._
import scalaz.concurrent.Task
import scalaz.{\\/, Applicative, Monad, Nondeterminism}
import scala.reflect.runtime.universe.TypeTag
import scodec.{Codec,Decoder,Encoder}
import scodec.bits.BitVector
import shapeless._
/**
* Represents a remote computation which yields a
* value of type `A`. Remote expressions can be serialized
* and sent to a server for evaluation.
*/
sealed trait Remote[+A] {
override def toString = pretty
def pretty: String = "Remote {\\n " +
Remote.refs(this).mkString("\\n ") + "\\n " +
toString + "\\n}"
}
object Remote {
/** Reference a remote value on the server, assuming it has the given type. */
def ref[A:TypeTag](s: String): Remote[A] = {
val tag = Remote.nameToTag[A](s)
Remote.Ref[A](tag)
}
/** Promote a local value to a remote value. */
def local[A:Encoder:TypeTag](a: A): Remote[A] =
Remote.Local(a, Some(Encoder[A]), Remote.toTag[A])
/** Provides the syntax `expr.run(endpoint)`, where `endpoint: Endpoint`. */
implicit class RunSyntax[A](self: Remote[A]) {
/**
* Run this `Remote[A]` at the given `Endpoint`. We require a `TypeTag[A]` and
* `Codec[A]` in order to deserialize the response and check that it has the expected type.
*/
def run(at: Endpoint, M: Monitoring = Monitoring.empty)(implicit A: TypeTag[A], C: Codec[A]): Response[A] =
evaluate(at, M)(self)
/** Call `self.run(at, M).apply(ctx)` to get back a `Task[A]`. */
def runWithContext(at: Endpoint, ctx: Response.Context, M: Monitoring = Monitoring.empty)(implicit A: TypeTag[A], C: Codec[A]): Task[A] =
run(at, M).apply(ctx)
/** Run this with an empty context */
def runWithoutContext(at: Endpoint)(implicit A: TypeTag[A], C: Codec[A]): Task[A] =
runWithContext(at, Response.Context.empty)
}
implicit class Ap1Syntax[A,B](self: Remote[A => B]) {
def apply(a: Remote[A]): Remote[B] =
Remote.Ap1(self, a)
}
implicit class Ap2Syntax[A,B,C](self: Remote[(A,B) => C]) {
def apply(a: Remote[A], b: Remote[B]): Remote[C] =
Remote.Ap2(self, a, b)
}
implicit class Ap3Syntax[A,B,C,D](self: Remote[(A,B,C) => D]) {
def apply(a: Remote[A], b: Remote[B], c: Remote[C]): Remote[D] =
Remote.Ap3(self, a, b, c)
}
implicit class Ap4Syntax[A,B,C,D,E](self: Remote[(A,B,C,D) => E]) {
def apply(a: Remote[A], b: Remote[B], c: Remote[C], d: Remote[D]): Remote[E] =
Remote.Ap4(self, a, b, c, d)
}
implicit class Ap5Syntax[A,B,C,D,E,F](self: Remote[(A,B,C,D,E) => F]) {
def apply(a: Remote[A], b: Remote[B], c: Remote[C], d: Remote[D], e: Remote[E]): Remote[F] =
Remote.Ap5(self, a, b, c, d, e)
}
/** Promote a local value to a remote value. */
private[remotely] case class Local[A](
a: A, // the value
format: Option[Encoder[A]], // serializer for `A`
tag: String // identifies the deserializer to be used by server
) extends Remote[A] {
override def toString = a.toString
}
/**
* Reference to a remote value on the server.
*/
private[remotely] case class Ref[A](name: String) extends Remote[A] {
override def toString = name.takeWhile(_ != ':')
}
// we require a separate constructor for each function
// arity, since remote invocations must be fully saturated
private[remotely] case class Ap1[A,B](
f: Remote[A => B],
a: Remote[A]) extends Remote[B] {
override def toString = s"$f($a)"
}
private[remotely] case class Ap2[A,B,C](
f: Remote[(A,B) => C],
a: Remote[A],
b: Remote[B]) extends Remote[C] {
override def toString = s"$f($a, $b)"
}
private[remotely] case class Ap3[A,B,C,D](
f: Remote[(A,B,C) => D],
a: Remote[A],
b: Remote[B],
c: Remote[C]) extends Remote[D] {
override def toString = s"$f($a, $b, $c)"
}
private[remotely] case class Ap4[A,B,C,D,E](
f: Remote[(A,B,C,D) => E],
a: Remote[A],
b: Remote[B],
c: Remote[C],
d: Remote[D]) extends Remote[E] {
override def toString = s"$f($a, $b, $c, $d)"
}
private[remotely] case class Ap5[A,B,C,D,E,F](
f: Remote[(A,B,C,D,E) => F],
a: Remote[A],
b: Remote[B],
c: Remote[C],
d: Remote[D],
e: Remote[E]) extends Remote[F] {
override def toString = s"$f($a, $b, $c, $d, $e)"
}
/** Collect up all the `Ref` names referenced by `r`. */
def refs[A](r: Remote[A]): SortedSet[String] = r match {
case Local(a,e,t) => SortedSet.empty
case Ref(t) => SortedSet(t)
case Ap1(f,a) => refs(f).union(refs(a))
case Ap2(f,a,b) => refs(f).union(refs(a)).union(refs(b))
case Ap3(f,a,b,c) => refs(f).union(refs(a)).union(refs(b)).union(refs(c))
case Ap4(f,a,b,c,d) => refs(f).union(refs(a)).union(refs(b)).union(refs(c)).union(refs(d))
case Ap5(f,a,b,c,d,e) => refs(f).union(refs(a)).union(refs(b)).union(refs(c)).union(refs(d)).union(refs(e))
}
/** Collect up all the formats referenced by `r`. */
def formats[A](r: Remote[A]): SortedSet[String] = r match {
case Local(a,e,t) => SortedSet(t)
case Ref(t) => SortedSet.empty
case Ap1(f,a) => formats(f).union(formats(a))
case Ap2(f,a,b) => formats(f).union(formats(a)).union(formats(b))
case Ap3(f,a,b,c) => formats(f).union(formats(a)).union(formats(b)).union(formats(c))
case Ap4(f,a,b,c,d) => formats(f).union(formats(a)).union(formats(b)).union(formats(c)).union(formats(d))
case Ap5(f,a,b,c,d,e) => formats(f).union(formats(a)).union(formats(b)).union(formats(c)).union(formats(d)).union(formats(e))
}
def toTag[A:TypeTag]: String = {
val tt = typeTag[A]
val result = tt.tpe.toString
if(result.startsWith("java.lang.")) result.drop(10)
else if (result.startsWith("scala.")) result.drop(6)
else result
}
def nameToTag[A:TypeTag](s: String): String =
s"$s: ${toTag[A]}"
/** Lower priority implicits. */
private[remotely] trait lowpriority {
implicit def codecIsRemote[A:Codec:TypeTag](a: A): Remote[A] = local(a)
}
/** Provides implicits for promoting values to `Remote[A]`. */
object implicits extends lowpriority {
/** Implicitly promote a local value to a `Remote[A]`. */
implicit def localToRemote[A:Encoder:TypeTag](a: A): Remote[A] = local(a)
}
}
|
oncue/remotely
|
core/src/main/scala/Remote.scala
|
Scala
|
apache-2.0
| 7,113
|
package io.swagger.client.api
import com.wordnik.swagger.client._
import scala.concurrent.{ Future, Await }
import scala.concurrent.duration._
import collection.mutable
class UserApi(client: TransportClient, config: SwaggerConfig) extends ApiClient(client, config) {
def userMeGet()(implicit reader: ClientResponseReader[Unit]): Future[Unit] = {
// create path and map variables
val path = (addFmt("/user/me"))
// query params
val queryParams = new mutable.HashMap[String, String]
val headerParams = new mutable.HashMap[String, String]
val resFuture = client.submit("GET", path, queryParams.toMap, headerParams.toMap, "")
resFuture flatMap { resp =>
process(reader.read(resp))
}
}
}
|
QuantiModo/QuantiModo-SDK-Async-Scala
|
src/main/scala/io/swagger/client/api/UserApi.scala
|
Scala
|
apache-2.0
| 756
|
/*
* Copyright (c) 2013 Curry Order System authors.
* This file is part of Curry Order System. Please refer to the NOTICE.txt file for license details.
*/
package code.util
import org.specs._
import net.liftweb._
import http._
import net.liftweb.util._
import net.liftweb.common._
import org.specs.matcher._
import org.specs.specification._
import Helpers._
import mapper._
import scala.actors.Actor
import scala.actors.Actor._
import java.util.{Calendar, Date}
import code.model.{TaskType, EmailTask}
object SchedulerTest extends Specification
{
"Scheduler" should
{
"find next wednesday 6am for order reminder" >>
{
val task = new EmailTask(TaskType.REMINDER,null)
val cal=Calendar.getInstance()
cal.set(2011,1,8,4,23,7)
cal.getTime
task.getRunTime(cal) must be_==(new java.util.Date(111,1,9,6,0))
cal.set(2011,1,9,6,1,7)
cal.getTime
task.getRunTime(cal) must be_==(new java.util.Date(111,1,16,6,0))
cal.set(2011,1,9,6,0,0)
cal.getTime
task.getRunTime(cal) must be_==(new java.util.Date(111,1,16,6,0))
val task2 = new EmailTask(TaskType.ORDER, null)
task2.getRunTime(cal) must be_==(new java.util.Date(111,1,9,11,30))
}
"cope with the next wednesday being next year" >>
{
val task = new EmailTask(TaskType.REMINDER, null)
val cal=Calendar.getInstance()
cal.set(2010,11,29,8,23,7)
cal.getTime
task.getRunTime(cal) must be_==(new java.util.Date(111,0,5,6,0))
}
}
}
|
scott-abernethy/curry-order-system
|
src/test/scala/code/util/SchedulerTest.scala
|
Scala
|
gpl-3.0
| 1,511
|
package com.twitter.finagle.buoyant.h2
package netty4
import com.twitter.concurrent.AsyncQueue
import com.twitter.finagle.Failure
import com.twitter.finagle.stats.{NullStatsReceiver => FNullStatsReceiver, StatsReceiver => FStatsReceiver}
import com.twitter.logging.Logger
import com.twitter.util.{Future, Promise, Return, Throw}
import io.netty.handler.codec.http2._
import java.util.concurrent.atomic.AtomicReference
import scala.annotation.tailrec
/**
* Reads and writes a bi-directional HTTP/2 stream.
*
* Each stream transport has two "sides":
*
* - Dispatchers provide a stream with remote frames _from_ a socket
* into a `RemoteMsg`-typed message. The `onRemoteMessage` future
* is satisfied when an initial HEADERS frame is received from the
* dispatcher.
*
* - Dispatchers write a `LocalMsg`-typed message _to_ a socket. The
* stream transport reads from the message's stream until it
* _fails_, so that errors may be propagated if the local side of
* the stream is reset.
*
* When both sides of the stream are closed, the `onReset` future is
* satisfied.
*
* Either side may reset the stream prematurely, causing the `onReset`
* future to fail, typically with a [[StreamError]] indicating whether
* the reset was initiated from the remote or local side of the
* stream. This information is used by i.e. dispatchers to determine
* whether a reset frame must be written.
*/
private[h2] trait Netty4StreamTransport[SendMsg <: Message, RecvMsg <: Message] {
import Netty4StreamTransport._
/** The HTTP/2 STREAM_ID of this stream. */
def streamId: Int
/** for logging */
protected[this] def prefix: String
protected[this] def transport: H2Transport.Writer
protected[this] def statsReceiver: StatsReceiver
protected[this] def mkRecvMsg(headers: Http2Headers, stream: Stream): RecvMsg
/*
* A stream's state is represented by the `StreamState` ADT,
* reflecting the state diagram detailed in RFC7540 §5.1:
*
* +--------+
* recv ES | | send ES
* ,-------| open |-------.
* / | | \\
* v +--------+ v
* +----------+ | +----------+
* | half | | | half |
* | closed | | send R / | closed |
* | (remote) | | recv R | (local) |
* +----------+ | +----------+
* | | |
* | send ES / | recv ES / |
* | send R / v send R / |
* | recv R +--------+ recv R |
* `----------->| |<-----------'
* | closed |
* | |
* +--------+
*
* (Note that SERVER_PUSH is not supported or represented in this
* version of the state diagram).
*/
private[this] sealed trait StreamState
/**
* The stream is open in both directions.
*
* When the StreamTransport is initialized (because a dispatcher has
* a stream frame it needs to dispatch), it starts in the `Open`
* state, because the stream exists and neither the remote nor local
* halves of the stream have been closed (i.e. by sending a frame
* with END_STREAM set).
*
* Since the local half of the stream is written from the dispatcher
* to the transport, we simply track whether this has completed.
*
* The remote half of the connection is represented with a
* [[RemoteState]] so that received frames may be passed inbound to
* the application: first, by satisfying the `onRemoteMessage`
* Future with [[RemotePending]], and then by offering data and
* trailer frames to [[RemoteStreaming]].
*/
private[this] case class Open(remote: RemoteState) extends StreamState with ResettableState {
/**
* Act on a stream reset by failing a pending or streaming remote.
*/
override def reset(rst: Reset): Unit = remote.reset(rst)
}
/**
* The `SendMsg` has been entirely sent, and the `RecvMsg` is still
* being received.
*/
private[this] case class LocalClosed(remote: RemoteState)
extends StreamState with ResettableState {
override def reset(rst: Reset): Unit = remote.reset(rst)
}
/**
* The `RecvMsg` has been entirely received, and the `SendMsg` is still
* being sent.
*
* Though the remote half is closed, it may reset the local half of
* the stream. This is achieved by failing the stream's underlying
* queue so that the consumer of a stream fails `read()` with a
* reset.
*/
private[this] class RemoteClosed(q: AsyncQueue[Frame])
extends StreamState with ResettableState {
def close(): Unit = q.fail(Reset.NoError, discard = false)
override def reset(rst: Reset): Unit = q.fail(rst, discard = true)
}
private[this] object RemoteClosed {
def unapply(rc: RemoteClosed): Boolean = true
}
/** Both `RecvMsg` and `SendMsg` have been entirely sent. */
private[this] case class Closed(error: Reset) extends StreamState
/** The state of the remote side of a stream. */
private[this] sealed trait RemoteState extends ResettableState
/** A remote stream before the initial HEADERS frames have been received. */
private[this] class RemotePending(p: Promise[RecvMsg]) extends RemoteState {
def future: Future[RecvMsg] = p
def setMessage(rm: RecvMsg): Unit = p.setValue(rm)
override def reset(rst: Reset): Unit = p.setException(rst)
}
private[this] object RemotePending {
def unapply(rs: RemotePending): Boolean = true
}
/** A remote stream that has been initiated but not yet closed or reset. */
private[this] class RemoteStreaming(q: AsyncQueue[Frame]) extends RemoteState {
def toRemoteClosed: RemoteClosed = new RemoteClosed(q)
def offer(f: Frame): Boolean = q.offer(f)
def close(): Unit = q.fail(Reset.NoError, discard = false)
override def reset(rst: Reset): Unit = q.fail(rst, discard = true)
}
private[this] object RemoteStreaming {
def apply(q: AsyncQueue[Frame]): RemoteStreaming = new RemoteStreaming(q)
def unapply(rs: RemoteStreaming): Boolean = true
}
/** Helper to extract a RemoteState from a StreamState. */
private[this] object RemoteOpen {
def unapply(s: StreamState): Option[RemoteState] = s match {
case Open(r) => Some(r)
case LocalClosed(r) => Some(r)
case Closed(_) | RemoteClosed() => None
}
}
/** Helper to match writable states. */
private[this] object LocalOpen {
def unapply(s: StreamState): Boolean = s match {
case Open(_) | RemoteClosed() => true
case Closed(_) | LocalClosed(_) => false
}
}
/**
* Because remote reads and local writes may occur concurrently,
* this state is stored in the `stateRef` atomic reference. Writes
* and reads are performed without locking (at the expense of
* retrying on collision).
*/
private[this] val stateRef: AtomicReference[StreamState] = {
val remoteMsgP = new Promise[RecvMsg]
// When the remote message--especially a client's response--is
// canceled, close the transport, sending a RST_STREAM as
// appropriate.
remoteMsgP.setInterruptHandler {
case err: Reset =>
log.debug("[%s] remote message interrupted: %s", prefix, err)
localReset(err)
case Failure(Some(err: Reset)) =>
log.debug("[%s] remote message interrupted: %s", prefix, err)
localReset(err)
case f@Failure(_) if f.isFlagged(Failure.Interrupted) =>
log.debug("[%s] remote message interrupted: %s", prefix, f)
localReset(Reset.Cancel)
case f@Failure(_) if f.isFlagged(Failure.Rejected) =>
log.debug("[%s] remote message interrupted: %s", prefix, f)
localReset(Reset.Refused)
case e =>
log.debug("[%s] remote message interrupted: %s", prefix, e)
localReset(Reset.InternalError)
}
new AtomicReference(Open(new RemotePending(remoteMsgP)))
}
val onRecvMessage: Future[RecvMsg] = stateRef.get match {
case Open(rp@RemotePending()) => rp.future
case s => sys.error(s"unexpected initialization state: $s")
}
/**
* Satisfied successfully when the stream is fully closed with no
* error. An exception is raised with a Reset if the stream is
* closed prematurely.
*/
def onReset: Future[Unit] = resetP
private[this] val resetP = new Promise[Unit]
def isClosed = stateRef.get match {
case Closed(_) => true
case _ => false
}
def remoteReset(err: Reset): Unit =
if (tryReset(err)) err match {
case Reset.NoError =>
resetP.setDone(); ()
case err => resetP.setException(StreamError.Remote(err))
}
def localReset(err: Reset): Unit =
if (tryReset(err)) err match {
case Reset.NoError =>
resetP.setDone(); ()
case err => resetP.setException(StreamError.Local(err))
}
@tailrec private[this] def tryReset(err: Reset): Boolean =
stateRef.get match {
case state: StreamState with ResettableState =>
if (stateRef.compareAndSet(state, Closed(err))) {
log.debug("[%s] resetting %s in %s", prefix, err, state)
state.reset(err)
true
} else tryReset(err)
case _ => false
}
/**
* Updates the stateRef to reflect that the local stream has been closed.
*
* If the ref is already local closed, then the remote stream is reset and
* the reset promise results in an exception. If the ref is remote closed,
* then the ref becomes fully closed and the reset promise is completed.
*/
@tailrec private[this] def closeLocal(): Unit =
stateRef.get match {
case Closed(_) =>
case state@LocalClosed(remote) =>
if (stateRef.compareAndSet(state, Closed(Reset.InternalError))) {
remote.reset(Reset.InternalError)
resetP.setException(new IllegalStateException("closing local from LocalClosed"))
} else closeLocal()
case state@Open(remote) =>
if (!stateRef.compareAndSet(state, LocalClosed(remote))) closeLocal()
case state@RemoteClosed() =>
if (stateRef.compareAndSet(state, Closed(Reset.NoError))) {
resetP.setDone(); ()
} else closeLocal()
}
/**
* Offer a Netty Http2StreamFrame from the remote.
*
* `recv` returns false to indicate that a frame could not be
* accepted. This may occur, for example, when a message is
* received on a closed stream.
*/
@tailrec final def recv(in: Http2StreamFrame): Boolean = {
val state = stateRef.get
log.trace("[%s] admitting %s in %s", prefix, in.name, state)
def resetFromRemote(remote: ResettableState, rst: Reset): Boolean =
if (stateRef.compareAndSet(state, Closed(rst))) {
remote.reset(rst)
resetP.setException(StreamError.Remote(rst))
true
} else false
def resetFromLocal(remote: ResettableState, rst: Reset): Boolean =
if (stateRef.compareAndSet(state, Closed(rst))) {
remote.reset(rst)
resetP.setException(StreamError.Local(rst))
true
} else false
def recvFrame(f: Frame, remote: RemoteStreaming): Boolean =
if (remote.offer(f)) {
statsReceiver.recordRemoteFrame(f)
true
} else {
log.debug("[%s] remote offer failed", prefix)
false
}
in match {
case rst: Http2ResetFrame =>
val err = Netty4Message.Reset.fromFrame(rst)
state match {
case Closed(_) => false
case RemoteOpen(remote) =>
if (resetFromRemote(remote, err)) {
statsReceiver.remoteResetCount.incr()
true
} else recv(rst)
case state@RemoteClosed() =>
if (resetFromRemote(state, err)) {
statsReceiver.remoteResetCount.incr()
true
} else recv(rst)
}
case hdrs: Http2HeadersFrame if hdrs.isEndStream =>
state match {
case Closed(_) => false
case state@RemoteClosed() =>
if (resetFromLocal(state, Reset.InternalError)) true
else recv(hdrs)
case Open(remote@RemotePending()) =>
val q = new AsyncQueue[Frame](1)
val msg = mkRecvMsg(hdrs.headers, Stream.empty(q))
if (ConnectionHeaders.detect(msg.headers)) {
if (resetFromLocal(remote, Reset.ProtocolError)) true
else recv(hdrs)
} else {
if (stateRef.compareAndSet(state, new RemoteClosed(q))) {
remote.setMessage(msg)
true
} else recv(hdrs)
}
case Open(remote@RemoteStreaming()) =>
if (stateRef.compareAndSet(state, remote.toRemoteClosed)) {
if (recvFrame(toFrame(hdrs), remote)) {
remote.close()
true
} else false
} else recv(hdrs)
case state@LocalClosed(remote@RemotePending()) =>
val msg = mkRecvMsg(hdrs.headers, NilStream)
if (ConnectionHeaders.detect(msg.headers)) {
if (resetFromLocal(state, Reset.ProtocolError)) true
else recv(hdrs)
} else {
if (stateRef.compareAndSet(state, Closed(Reset.NoError))) {
remote.setMessage(msg)
resetP.setDone()
true
} else recv(hdrs)
}
case LocalClosed(remote@RemoteStreaming()) =>
if (stateRef.compareAndSet(state, Closed(Reset.NoError))) {
if (recvFrame(toFrame(hdrs), remote)) {
remote.close()
resetP.setDone()
true
} else false
} else recv(hdrs)
}
case hdrs: Http2HeadersFrame =>
// A HEADERS frame without END_STREAM may only be received to
// initiate a message (i.e. when the remote is still pending).
state match {
case Closed(_) => false
case state@RemoteClosed() =>
if (resetFromLocal(state, Reset.Closed)) false
else recv(hdrs)
case RemoteOpen(remote@RemoteStreaming()) =>
if (resetFromLocal(remote, Reset.InternalError)) false
else recv(hdrs)
case Open(remote@RemotePending()) =>
val q = new AsyncQueue[Frame]
val msg = mkRecvMsg(hdrs.headers, Stream(q))
if (ConnectionHeaders.detect(msg.headers)) {
if (resetFromLocal(remote, Reset.ProtocolError)) true
else recv(hdrs)
} else {
if (stateRef.compareAndSet(state, Open(RemoteStreaming(q)))) {
remote.setMessage(msg)
true
} else recv(hdrs)
}
case LocalClosed(remote@RemotePending()) =>
val q = new AsyncQueue[Frame]
val msg = mkRecvMsg(hdrs.headers, Stream(q))
if (ConnectionHeaders.detect(msg.headers)) {
if (resetFromLocal(remote, Reset.ProtocolError)) true
else recv(hdrs)
} else {
if (stateRef.compareAndSet(state, LocalClosed(RemoteStreaming(q)))) {
remote.setMessage(msg)
true
} else recv(hdrs)
}
}
case data: Http2DataFrame if data.isEndStream =>
state match {
case Closed(_) => false
case state@RemoteClosed() =>
if (resetFromLocal(state, Reset.Closed)) false
else recv(data)
case RemoteOpen(remote@RemotePending()) =>
if (resetFromLocal(remote, Reset.InternalError)) false
else recv(data)
case Open(remote@RemoteStreaming()) =>
if (stateRef.compareAndSet(state, remote.toRemoteClosed)) {
if (recvFrame(toFrame(data), remote)) {
remote.close()
true
} else throw new IllegalStateException("stream queue closed prematurely")
} else recv(data)
case LocalClosed(remote@RemoteStreaming()) =>
if (stateRef.compareAndSet(state, Closed(Reset.NoError))) {
if (recvFrame(toFrame(data), remote)) {
remote.close()
resetP.setDone()
true
} else throw new IllegalStateException("stream queue closed prematurely")
} else recv(data)
}
case data: Http2DataFrame =>
state match {
case Closed(_) => false
case state@RemoteClosed() =>
if (resetFromLocal(state, Reset.Closed)) false
else recv(data)
case RemoteOpen(remote@RemotePending()) =>
if (resetFromLocal(remote, Reset.InternalError)) false
else recv(data)
case Open(remote@RemoteStreaming()) =>
if (recvFrame(toFrame(data), remote)) true
else {
if (resetFromLocal(remote, Reset.Closed)) false
else recv(data)
}
case LocalClosed(remote@RemoteStreaming()) =>
if (recvFrame(toFrame(data), remote)) true
else {
if (resetFromLocal(remote, Reset.Closed)) false
else recv(data)
}
}
}
}
private[this] def toFrame(f: Http2StreamFrame): Frame = f match {
case f: Http2DataFrame => Netty4Message.Data(f, updateWindow)
case f: Http2HeadersFrame if f.isEndStream => Netty4Message.Trailers(f.headers)
case f => throw new IllegalArgumentException(s"invalid stream frame: ${f}")
}
private[this] val updateWindow: Int => Future[Unit] = transport.updateWindow(streamId, _)
/**
* Write a `SendMsg`-typed [[Message]] to the remote.
*
* The outer future is satisfied initially to indicate that the
* local message has been initiated (i.e. its HEADERS have been
* sent). This first future is satisfied with a second future. The
* second future is satisfied when the full local stream has been
* written to the remote.
*
* If any write fails or is canceled, the entire stream is reset.
*
* If the stream is reset, writes are canceled.
*/
def send(msg: SendMsg): Future[Future[Unit]] = {
val headersF = writeHeaders(msg.headers, msg.stream.isEmpty)
val streamFF = headersF.map { _ =>
if (msg.stream.isEmpty) Future.Unit
else writeStream(msg.stream)
}
val writeF = streamFF.flatten
onReset.onFailure(writeF.raise(_))
writeF.respond {
case Return(_) =>
closeLocal()
case Throw(StreamError.Remote(e)) =>
val rst = e match {
case rst: Reset => rst
case _ => Reset.Cancel
}
log.debug("[%s] remote write failed: %s", prefix, rst)
remoteReset(rst)
case Throw(StreamError.Local(e)) =>
val rst = e match {
case rst: Reset => rst
case _ => Reset.Cancel
}
log.debug("[%s] stream read failed: %s", prefix, rst)
localReset(rst)
case Throw(e) =>
log.error(e, "[%s] unexpected error", prefix)
localReset(Reset.InternalError)
}
localResetOnCancel(streamFF)
}
private[this] val writeHeaders: (Headers, Boolean) => Future[Unit] = { (hdrs, eos) =>
stateRef.get match {
case Closed(rst) => Future.exception(StreamError.Remote(rst))
case LocalClosed(_) => Future.exception(new IllegalStateException("writing on closed stream"))
case LocalOpen() =>
if (ConnectionHeaders.detect(hdrs)) {
Future.exception(StreamError.Local(Reset.ProtocolError))
} else localResetOnCancel(transport.write(streamId, hdrs, eos))
}
}
/** Write a request stream to the underlying transport */
private[this] val writeStream: Stream => Future[Unit] = { stream =>
def loop(): Future[Unit] =
stream.read().rescue(wrapLocalEx)
.flatMap { f =>
writeFrame(f).flatMap { _ =>
if (!f.isEnd) loop() else Future.Unit
}
}
localResetOnCancel(loop())
}
private[this] def localResetOnCancel[T](f: Future[T]): Future[T] = {
val p = new Promise[T]
p.setInterruptHandler {
case e =>
localReset(Reset.Cancel)
f.raise(e)
}
f.proxyTo(p)
p
}
private[this] val writeFrame: Frame => Future[Unit] = { frame =>
stateRef.get match {
case Closed(rst) => Future.exception(StreamError.Remote(rst))
case LocalClosed(_) => Future.exception(new IllegalStateException("writing on closed stream"))
case LocalOpen() =>
statsReceiver.recordLocalFrame(frame)
transport.write(streamId, frame).rescue(wrapRemoteEx)
.before(frame.release().rescue(wrapLocalEx))
}
}
}
object Netty4StreamTransport {
private lazy val log = Logger.get("h2")
/** Helper: a state that supports Reset. (All but Closed) */
private trait ResettableState {
def reset(rst: Reset): Unit
}
private val wrapLocalEx: PartialFunction[Throwable, Future[Nothing]] = {
case e@StreamError.Local(_) => Future.exception(e)
case e@StreamError.Remote(_) => Future.exception(e)
case e => Future.exception(StreamError.Local(e))
}
private def wrapRemoteEx: PartialFunction[Throwable, Future[Nothing]] = {
case e@StreamError.Local(_) => Future.exception(e)
case e@StreamError.Remote(_) => Future.exception(e)
case e => Future.exception(StreamError.Remote(e))
}
private object NilStream extends Stream {
override def isEmpty = true
override def onEnd = Future.Unit
override def read(): Future[Frame] = Future.exception(Reset.NoError)
}
class StatsReceiver(val underlying: FStatsReceiver) {
private[this] val local = underlying.scope("local")
private[this] val localDataBytes = local.stat("data", "bytes")
private[this] val localDataFrames = local.counter("data", "frames")
private[this] val localTrailersCount = local.counter("trailers")
val localResetCount = local.counter("reset")
val recordLocalFrame: Frame => Unit = {
case d: Frame.Data =>
localDataFrames.incr()
localDataBytes.add(d.buf.length)
case t: Frame.Trailers => localTrailersCount.incr()
}
private[this] val remote = underlying.scope("remote")
private[this] val remoteDataBytes = remote.stat("data", "bytes")
private[this] val remoteDataFrames = remote.counter("data", "frames")
private[this] val remoteTrailersCount = remote.counter("trailers")
val remoteResetCount = remote.counter("reset")
val recordRemoteFrame: Frame => Unit = {
case d: Frame.Data =>
remoteDataFrames.incr()
remoteDataBytes.add(d.buf.length)
case _: Frame.Trailers => remoteTrailersCount.incr()
}
}
object NullStatsReceiver extends StatsReceiver(FNullStatsReceiver)
private class Client(
override val streamId: Int,
override protected[this] val transport: H2Transport.Writer,
override protected[this] val statsReceiver: StatsReceiver
) extends Netty4StreamTransport[Request, Response] {
override protected[this] val prefix =
s"C L:${transport.localAddress} R:${transport.remoteAddress} S:${streamId}"
override protected[this] def mkRecvMsg(headers: Http2Headers, stream: Stream): Response =
Response(Netty4Message.Headers(headers), stream)
}
private class Server(
override val streamId: Int,
override protected[this] val transport: H2Transport.Writer,
override protected[this] val statsReceiver: StatsReceiver
) extends Netty4StreamTransport[Response, Request] {
override protected[this] val prefix =
s"S L:${transport.localAddress} R:${transport.remoteAddress} S:${streamId}"
override protected[this] def mkRecvMsg(headers: Http2Headers, stream: Stream): Request =
Request(Netty4Message.Headers(headers), stream)
}
def client(
id: Int,
writer: H2Transport.Writer,
stats: StatsReceiver = NullStatsReceiver
): Netty4StreamTransport[Request, Response] =
new Client(id, writer, stats)
def server(
id: Int,
writer: H2Transport.Writer,
stats: StatsReceiver = NullStatsReceiver
): Netty4StreamTransport[Response, Request] =
new Server(id, writer, stats)
}
|
denverwilliams/linkerd
|
finagle/h2/src/main/scala/com/twitter/finagle/buoyant/h2/netty4/Netty4StreamTransport.scala
|
Scala
|
apache-2.0
| 24,384
|
package org.bitcoins.crypto
import scodec.bits.ByteVector
import java.math.BigInteger
import scala.math.BigInt
trait CryptoNumberUtil {
/** Converts a sequence of bytes to a **big endian** unsigned integer */
def toUnsignedInt(bytes: ByteVector): BigInt = {
toUnsignedInt(bytes.toArray)
}
/** Converts a sequence of bytes to a **big endian** unsigned integer */
def toUnsignedInt(bytes: Array[Byte]): BigInt = {
BigInt(new BigInteger(1, bytes))
}
def uintToFieldElement(bytes: ByteVector): FieldElement = {
FieldElement(toUnsignedInt(bytes))
}
/** Takes a hex string and parses it to a [[scala.math.BigInt BigInt]]. */
def toBigInt(hex: String): BigInt = toBigInt(CryptoBytesUtil.decodeHex(hex))
/** Converts a sequence of bytes to twos complement signed number. */
def toBigInt(bytes: ByteVector): BigInt = {
//BigInt interprets the number as an unsigned number then applies the given
//sign in front of that number, therefore if we have a negative number we need to invert it
//since twos complement is an inverted number representation for negative numbers
//see [[https://en.wikipedia.org/wiki/Two%27s_complement]]
if (bytes.isEmpty) BigInt(0)
//check if sign bit is set
else if ((0x80.toByte & bytes.head) != 0) {
val invertedBytes = bytes.tail.map(b => (b ^ 0xff.toByte).toByte)
val firstByteInverted = (bytes.head ^ 0xff.toByte).toByte
val num = firstByteInverted +: invertedBytes
BigInt(-1, num.toArray) - 1
} else {
val firstBitOff = (0x7f & bytes.head).toByte
val num = firstBitOff +: bytes.tail
BigInt(num.toArray)
}
}
}
object CryptoNumberUtil extends CryptoNumberUtil
|
bitcoin-s/bitcoin-s
|
crypto/src/main/scala/org/bitcoins/crypto/CryptoNumberUtil.scala
|
Scala
|
mit
| 1,707
|
package me.laiseca.restcale.internal.function
import org.mockito.Mockito.verify
import org.scalatest.FlatSpec
import org.scalatest.Matchers
import org.scalatest.mock.MockitoSugar
import me.laiseca.restcale.http.HttpMethod
class RestFunction0Spec extends FlatSpec with Matchers with MockitoSugar {
val method = HttpMethod.GET
val path = "/some/path"
"Function0 wrapper" should "call the wrapped function" in {
val function = mock[Function0[AnyRef]]
val testObj = new RestFunction0(function, method, path, List())
testObj.apply()
verify(function).apply()
}
}
|
xabierlaiseca/restcale
|
shared/src/test/scala/me/laiseca/restcale/internal/function/RestFunction0Spec.scala
|
Scala
|
apache-2.0
| 593
|
/*
* Copyright 2001-2008 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalatest
package fixture
import collection.immutable.TreeSet
import events.TestFailed
import events.TestSucceeded
import mock.MockitoSugar
import org.scalatest.events.InfoProvided
class FixtureSuiteSpec extends org.scalatest.FunSpec with PrivateMethodTester with SharedHelpers {
describe("The private testMethodTakesInformer method") {
val testMethodTakesAFixtureAndInformer = PrivateMethod[Boolean]('testMethodTakesAFixtureAndInformer)
val suiteObject = FixtureSuite
it("should return true if passed a string that ends in (FixtureParam, Informer)") {
assert(suiteObject invokePrivate testMethodTakesAFixtureAndInformer("thisDoes(FixtureParam, Informer)"))
assert(suiteObject invokePrivate testMethodTakesAFixtureAndInformer("(FixtureParam, Informer)"))
assert(suiteObject invokePrivate testMethodTakesAFixtureAndInformer("test(FixtureParam, Informer)"))
}
it("should return false if passed a string that doesn't end in (FixtureParam, Informer)") {
assert(!(suiteObject invokePrivate testMethodTakesAFixtureAndInformer("thisDoesNot(FixtureParam)")))
assert(!(suiteObject invokePrivate testMethodTakesAFixtureAndInformer("test(FixtureParam)")))
}
}
/*
describe("A fixture.Suite without SimpleWithFixture") {
it("should return the test names in alphabetical order from testNames") {
val a = new FixtureSuite {
type FixtureParam = String
def withFixture(fun: String => Unit, config: Map[String, Any]) {}
def testThis(fixture: String) {}
def testThat(fixture: String) {}
}
expectResult(List("testThat(FixtureParam)", "testThis(FixtureParam)")) {
a.testNames.iterator.toList
}
val b = new FixtureSuite {
type FixtureParam = String
def withFixture(fun: String => Unit, config: Map[String, Any]) {}
}
expectResult(List[String]()) {
b.testNames.iterator.toList
}
val c = new FixtureSuite {
type FixtureParam = String
def withFixture(fun: String => Unit, config: Map[String, Any]) {}
def testThat(fixture: String) {}
def testThis(fixture: String) {}
}
expectResult(List("testThat(FixtureParam)", "testThis(FixtureParam)")) {
c.testNames.iterator.toList
}
}
it("should discover tests with and without Informer parameters") {
val a = new FixtureSuite {
type FixtureParam = String
def withFixture(fun: String => Unit, config: Map[String, Any]) {}
def testThis(fixture: String) = ()
def testThat(fixture: String, info: Informer) = ()
}
assert(a.testNames === TreeSet("testThat(FixtureParam, Informer)", "testThis(FixtureParam)"))
}
it("should pass in the fixture to every test method") {
val a = new FixtureSuite {
type FixtureParam = String
val hello = "Hello, world!"
def withFixture(fun: String => Unit, config: Map[String, Any]) {
test(hello)
}
def testThis(fixture: String) {
assert(fixture === hello)
}
def testThat(fixture: String, info: Informer) {
assert(fixture === hello)
}
}
a.run(None, SilentReporter, new Stopper {}, Filter(), Map(), None, new Tracker())
}
it("can pass in the config map to every test method via the fixture") {
val key = "greeting"
val hello = "Hello, world!"
val a = new FixtureSuite {
type FixtureParam = Map[String, Any]
def withFixture(fun: FixtureParam => Unit, config: Map[String, Any]) {
test(config)
}
def testThis(fixture: FixtureParam) {
assert(fixture(key) === hello)
}
def testThat(fixture: FixtureParam, info: Informer) {
assert(fixture(key) === hello)
}
}
val rep = new EventRecordingReporter
a.run(None, rep, new Stopper {}, Filter(), Map(key -> hello), None, new Tracker())
assert(!rep.eventsReceived.exists(_.isInstanceOf[TestFailed]))
}
}
*/
describe("A FixtureSuite") {
it("should return the test names in alphabetical order from testNames") {
val a = new FixtureSuite {
type FixtureParam = String
def withFixture(test: OneArgTest) {}
def testThis(fixture: String) {}
def testThat(fixture: String) {}
}
expectResult(List("testThat(FixtureParam)", "testThis(FixtureParam)")) {
a.testNames.iterator.toList
}
val b = new FixtureSuite {
type FixtureParam = String
def withFixture(test: OneArgTest) {}
}
expectResult(List[String]()) {
b.testNames.iterator.toList
}
val c = new FixtureSuite {
type FixtureParam = String
def withFixture(test: OneArgTest) {}
def testThat(fixture: String) {}
def testThis(fixture: String) {}
}
expectResult(List("testThat(FixtureParam)", "testThis(FixtureParam)")) {
c.testNames.iterator.toList
}
}
it("should discover tests with and without Informer parameters") {
val a = new FixtureSuite {
type FixtureParam = String
def withFixture(test: OneArgTest) {}
def testThis(fixture: String) = ()
def testThat(fixture: String, info: Informer) = ()
}
assert(a.testNames === TreeSet("testThat(FixtureParam, Informer)", "testThis(FixtureParam)"))
}
it("should pass in the fixture to every test method") {
val a = new FixtureSuite {
type FixtureParam = String
val hello = "Hello, world!"
def withFixture(test: OneArgTest) {
test(hello)
}
def testThis(fixture: String) {
assert(fixture === hello)
}
def testThat(fixture: String, info: Informer) {
assert(fixture === hello)
}
}
val rep = new EventRecordingReporter
a.run(None, Args(rep))
assert(!rep.eventsReceived.exists(_.isInstanceOf[TestFailed]))
}
it("should return a correct tags map from the tags method") {
val a = new FixtureSuite {
type FixtureParam = String
def withFixture(test: OneArgTest) {}
@Ignore
def testThis(fixture: FixtureParam) = ()
def testThat(fixture: FixtureParam, info: Informer) = ()
}
assert(a.tags === Map("testThis(FixtureParam)" -> Set("org.scalatest.Ignore")))
val b = new FixtureSuite {
type FixtureParam = String
def withFixture(test: OneArgTest) {}
def testThis(fixture: FixtureParam) = ()
@Ignore
def testThat(fixture: FixtureParam, info: Informer) = ()
}
assert(b.tags === Map("testThat(FixtureParam, Informer)" -> Set("org.scalatest.Ignore")))
val c = new FixtureSuite {
type FixtureParam = String
def withFixture(test: OneArgTest) {}
@Ignore
def testThis(fixture: FixtureParam) = ()
@Ignore
def testThat(fixture: FixtureParam, info: Informer) = ()
}
assert(c.tags === Map("testThis(FixtureParam)" -> Set("org.scalatest.Ignore"), "testThat(FixtureParam, Informer)" -> Set("org.scalatest.Ignore")))
val d = new FixtureSuite {
type FixtureParam = String
def withFixture(test: OneArgTest) {}
@SlowAsMolasses
def testThis(fixture: FixtureParam) = ()
@SlowAsMolasses
@Ignore
def testThat(fixture: FixtureParam, info: Informer) = ()
}
assert(d.tags === Map("testThis(FixtureParam)" -> Set("org.scalatest.SlowAsMolasses"), "testThat(FixtureParam, Informer)" -> Set("org.scalatest.Ignore", "org.scalatest.SlowAsMolasses")))
val e = new FixtureSuite {
type FixtureParam = String
def withFixture(test: OneArgTest) {}
}
assert(e.tags === Map())
}
class TestWasCalledSuite extends FixtureSuite {
type FixtureParam = String
def withFixture(test: OneArgTest) { test("hi") }
var theTestThisCalled = false
var theTestThatCalled = false
def testThis(s: String) { theTestThisCalled = true }
def testThat(s: String) { theTestThatCalled = true }
}
it("should execute all tests when run is called with testName None") {
val b = new TestWasCalledSuite
b.run(None, Args(SilentReporter))
assert(b.theTestThisCalled)
assert(b.theTestThatCalled)
}
it("should execute one test when run is called with a defined testName") {
val a = new TestWasCalledSuite
a.run(Some("testThis(FixtureParam)"), Args(SilentReporter))
assert(a.theTestThisCalled)
assert(!a.theTestThatCalled)
}
it("should report as ignored, ant not run, tests marked ignored") {
val a = new FixtureSuite {
type FixtureParam = String
def withFixture(test: OneArgTest) { test("hi") }
var theTestThisCalled = false
var theTestThatCalled = false
def testThis(fixture: FixtureParam) { theTestThisCalled = true }
def testThat(fixture: FixtureParam, info: Informer) { theTestThatCalled = true }
}
val repA = new TestIgnoredTrackingReporter
a.run(None, Args(repA))
assert(!repA.testIgnoredReceived)
assert(a.theTestThisCalled)
assert(a.theTestThatCalled)
val b = new FixtureSuite {
type FixtureParam = String
def withFixture(test: OneArgTest) { test("hi") }
var theTestThisCalled = false
var theTestThatCalled = false
@Ignore
def testThis(fixture: FixtureParam) { theTestThisCalled = true }
def testThat(fixture: FixtureParam, info: Informer) { theTestThatCalled = true }
}
val repB = new TestIgnoredTrackingReporter
b.run(None, Args(repB))
assert(repB.testIgnoredReceived)
assert(repB.lastEvent.isDefined)
assert(repB.lastEvent.get.testName endsWith "testThis(FixtureParam)")
assert(!b.theTestThisCalled)
assert(b.theTestThatCalled)
val c = new FixtureSuite {
type FixtureParam = String
def withFixture(test: OneArgTest) { test("hi") }
var theTestThisCalled = false
var theTestThatCalled = false
def testThis(fixture: FixtureParam) { theTestThisCalled = true }
@Ignore
def testThat(fixture: FixtureParam, info: Informer) { theTestThatCalled = true }
}
val repC = new TestIgnoredTrackingReporter
c.run(None, Args(repC))
assert(repC.testIgnoredReceived)
assert(repC.lastEvent.isDefined)
assert(repC.lastEvent.get.testName endsWith "testThat(FixtureParam, Informer)", repC.lastEvent.get.testName)
assert(c.theTestThisCalled)
assert(!c.theTestThatCalled)
val d = new FixtureSuite {
type FixtureParam = String
def withFixture(test: OneArgTest) { test("hi") }
var theTestThisCalled = false
var theTestThatCalled = false
@Ignore
def testThis(fixture: FixtureParam) { theTestThisCalled = true }
@Ignore
def testThat(fixture: FixtureParam, info: Informer) { theTestThatCalled = true }
}
val repD = new TestIgnoredTrackingReporter
d.run(None, Args(repD))
assert(repD.testIgnoredReceived)
assert(repD.lastEvent.isDefined)
assert(repD.lastEvent.get.testName endsWith "testThis(FixtureParam)") // last because run alphabetically
assert(!d.theTestThisCalled)
assert(!d.theTestThatCalled)
}
it("should ignore a test marked as ignored if run is invoked with that testName") {
val e = new FixtureSuite {
type FixtureParam = String
def withFixture(test: OneArgTest) { test("hi") }
var theTestThisCalled = false
var theTestThatCalled = false
@Ignore
def testThis(fixture: FixtureParam) { theTestThisCalled = true }
def testThat(fixture: FixtureParam, info: Informer) { theTestThatCalled = true }
}
val repE = new TestIgnoredTrackingReporter
e.run(Some("testThis(FixtureParam)"), Args(repE))
assert(repE.testIgnoredReceived)
assert(!e.theTestThisCalled)
assert(!e.theTestThatCalled)
}
it("should throw IllegalArgumentException if run is passed a testName that does not exist") {
val suite = new FixtureSuite {
type FixtureParam = String
def withFixture(test: OneArgTest) { test("hi") }
var theTestThisCalled = false
var theTestThatCalled = false
def testThis(fixture: FixtureParam) { theTestThisCalled = true }
def testThat(fixture: FixtureParam, info: Informer) { theTestThatCalled = true }
}
intercept[IllegalArgumentException] {
// Here, they forgot that the name is actually testThis(FixtureParam)
suite.run(Some("testThis"), Args(SilentReporter))
}
intercept[IllegalArgumentException] {
// Here, they gave a non-existent test name
suite.run(Some("doesNotExist(FixtureParam)"), Args(SilentReporter))
}
}
it("should run only those tests selected by the tags to include and exclude sets") {
// Nothing is excluded
val a = new FixtureSuite {
type FixtureParam = String
def withFixture(test: OneArgTest) { test("hi") }
var theTestThisCalled = false
var theTestThatCalled = false
@SlowAsMolasses
def testThis(fixture: FixtureParam) { theTestThisCalled = true }
def testThat(fixture: FixtureParam, info: Informer) { theTestThatCalled = true }
}
val repA = new TestIgnoredTrackingReporter
a.run(None, Args(repA))
assert(!repA.testIgnoredReceived)
assert(a.theTestThisCalled)
assert(a.theTestThatCalled)
// SlowAsMolasses is included, one test should be excluded
val b = new FixtureSuite {
type FixtureParam = String
def withFixture(test: OneArgTest) { test("hi") }
var theTestThisCalled = false
var theTestThatCalled = false
@SlowAsMolasses
def testThis(fixture: FixtureParam) { theTestThisCalled = true }
def testThat(fixture: FixtureParam, info: Informer) { theTestThatCalled = true }
}
val repB = new TestIgnoredTrackingReporter
b.run(None, Args(repB, Stopper.default, Filter(Some(Set("org.scalatest.SlowAsMolasses")), Set()), Map(), None, new Tracker, Set.empty))
assert(!repB.testIgnoredReceived)
assert(b.theTestThisCalled)
assert(!b.theTestThatCalled)
// SlowAsMolasses is included, and both tests should be included
val c = new FixtureSuite {
type FixtureParam = String
def withFixture(test: OneArgTest) { test("hi") }
var theTestThisCalled = false
var theTestThatCalled = false
@SlowAsMolasses
def testThis(fixture: FixtureParam) { theTestThisCalled = true }
@SlowAsMolasses
def testThat(fixture: FixtureParam, info: Informer) { theTestThatCalled = true }
}
val repC = new TestIgnoredTrackingReporter
c.run(None, Args(repB, Stopper.default, Filter(Some(Set("org.scalatest.SlowAsMolasses")), Set()), Map(), None, new Tracker, Set.empty))
assert(!repC.testIgnoredReceived)
assert(c.theTestThisCalled)
assert(c.theTestThatCalled)
// SlowAsMolasses is included. both tests should be included but one ignored
val d = new FixtureSuite {
type FixtureParam = String
def withFixture(test: OneArgTest) { test("hi") }
var theTestThisCalled = false
var theTestThatCalled = false
@Ignore
@SlowAsMolasses
def testThis(fixture: FixtureParam) { theTestThisCalled = true }
@SlowAsMolasses
def testThat(fixture: FixtureParam, info: Informer) { theTestThatCalled = true }
}
val repD = new TestIgnoredTrackingReporter
d.run(None, Args(repD, Stopper.default, Filter(Some(Set("org.scalatest.SlowAsMolasses")), Set("org.scalatest.Ignore")), Map(), None, new Tracker, Set.empty))
assert(repD.testIgnoredReceived)
assert(!d.theTestThisCalled)
assert(d.theTestThatCalled)
// SlowAsMolasses included, FastAsLight excluded
val e = new FixtureSuite {
type FixtureParam = String
def withFixture(test: OneArgTest) { test("hi") }
var theTestThisCalled = false
var theTestThatCalled = false
var theTestTheOtherCalled = false
@FastAsLight
@SlowAsMolasses
def testThis(fixture: FixtureParam) { theTestThisCalled = true }
@SlowAsMolasses
def testThat(fixture: FixtureParam, info: Informer) { theTestThatCalled = true }
def testTheOther(fixture: FixtureParam, info: Informer) { theTestTheOtherCalled = true }
}
val repE = new TestIgnoredTrackingReporter
e.run(None, Args(repE, Stopper.default, Filter(Some(Set("org.scalatest.SlowAsMolasses")), Set("org.scalatest.FastAsLight")),
Map(), None, new Tracker, Set.empty))
assert(!repE.testIgnoredReceived)
assert(!e.theTestThisCalled)
assert(e.theTestThatCalled)
assert(!e.theTestTheOtherCalled)
// An Ignored test that was both included and excluded should not generate a TestIgnored event
val f = new FixtureSuite {
type FixtureParam = String
def withFixture(test: OneArgTest) { test("hi") }
var theTestThisCalled = false
var theTestThatCalled = false
var theTestTheOtherCalled = false
@Ignore
@FastAsLight
@SlowAsMolasses
def testThis(fixture: FixtureParam) { theTestThisCalled = true }
@SlowAsMolasses
def testThat(fixture: FixtureParam, info: Informer) { theTestThatCalled = true }
def testTheOther(fixture: FixtureParam, info: Informer) { theTestTheOtherCalled = true }
}
val repF = new TestIgnoredTrackingReporter
f.run(None, Args(repF, Stopper.default, Filter(Some(Set("org.scalatest.SlowAsMolasses")), Set("org.scalatest.FastAsLight")),
Map(), None, new Tracker, Set.empty))
assert(!repF.testIgnoredReceived)
assert(!f.theTestThisCalled)
assert(f.theTestThatCalled)
assert(!f.theTestTheOtherCalled)
// An Ignored test that was not included should not generate a TestIgnored event
val g = new FixtureSuite {
type FixtureParam = String
def withFixture(test: OneArgTest) { test("hi") }
var theTestThisCalled = false
var theTestThatCalled = false
var theTestTheOtherCalled = false
@FastAsLight
@SlowAsMolasses
def testThis(fixture: FixtureParam) { theTestThisCalled = true }
@SlowAsMolasses
def testThat(fixture: FixtureParam, info: Informer) { theTestThatCalled = true }
@Ignore
def testTheOther(fixture: FixtureParam, info: Informer) { theTestTheOtherCalled = true }
}
val repG = new TestIgnoredTrackingReporter
g.run(None, Args(repG, Stopper.default, Filter(Some(Set("org.scalatest.SlowAsMolasses")), Set("org.scalatest.FastAsLight")),
Map(), None, new Tracker, Set.empty))
assert(!repG.testIgnoredReceived)
assert(!g.theTestThisCalled)
assert(g.theTestThatCalled)
assert(!g.theTestTheOtherCalled)
// No tagsToInclude set, FastAsLight excluded
val h = new FixtureSuite {
type FixtureParam = String
def withFixture(test: OneArgTest) { test("hi") }
var theTestThisCalled = false
var theTestThatCalled = false
var theTestTheOtherCalled = false
@FastAsLight
@SlowAsMolasses
def testThis(fixture: FixtureParam) { theTestThisCalled = true }
@SlowAsMolasses
def testThat(fixture: FixtureParam, info: Informer) { theTestThatCalled = true }
def testTheOther(fixture: FixtureParam, info: Informer) { theTestTheOtherCalled = true }
}
val repH = new TestIgnoredTrackingReporter
h.run(None, Args(repH, Stopper.default, Filter(None, Set("org.scalatest.FastAsLight")), Map(), None, new Tracker, Set.empty))
assert(!repH.testIgnoredReceived)
assert(!h.theTestThisCalled)
assert(h.theTestThatCalled)
assert(h.theTestTheOtherCalled)
// No tagsToInclude set, SlowAsMolasses excluded
val i = new FixtureSuite {
type FixtureParam = String
def withFixture(test: OneArgTest) { test("hi") }
var theTestThisCalled = false
var theTestThatCalled = false
var theTestTheOtherCalled = false
@FastAsLight
@SlowAsMolasses
def testThis(fixture: FixtureParam) { theTestThisCalled = true }
@SlowAsMolasses
def testThat(fixture: FixtureParam, info: Informer) { theTestThatCalled = true }
def testTheOther(fixture: FixtureParam, info: Informer) { theTestTheOtherCalled = true }
}
val repI = new TestIgnoredTrackingReporter
i.run(None, Args(repI, Stopper.default, Filter(None, Set("org.scalatest.SlowAsMolasses")), Map(), None, new Tracker, Set.empty))
assert(!repI.testIgnoredReceived)
assert(!i.theTestThisCalled)
assert(!i.theTestThatCalled)
assert(i.theTestTheOtherCalled)
// No tagsToInclude set, SlowAsMolasses excluded, TestIgnored should not be received on excluded ones
val j = new FixtureSuite {
type FixtureParam = String
def withFixture(test: OneArgTest) { test("hi") }
var theTestThisCalled = false
var theTestThatCalled = false
var theTestTheOtherCalled = false
@Ignore
@FastAsLight
@SlowAsMolasses
def testThis(fixture: FixtureParam) { theTestThisCalled = true }
@Ignore
@SlowAsMolasses
def testThat(fixture: FixtureParam, info: Informer) { theTestThatCalled = true }
def testTheOther(fixture: FixtureParam, info: Informer) { theTestTheOtherCalled = true }
}
val repJ = new TestIgnoredTrackingReporter
j.run(None, Args(repJ, Stopper.default, Filter(None, Set("org.scalatest.SlowAsMolasses")), Map(), None, new Tracker, Set.empty))
assert(!repI.testIgnoredReceived)
assert(!j.theTestThisCalled)
assert(!j.theTestThatCalled)
assert(j.theTestTheOtherCalled)
// Same as previous, except Ignore specifically mentioned in excludes set
val k = new FixtureSuite {
type FixtureParam = String
def withFixture(test: OneArgTest) { test("hi") }
var theTestThisCalled = false
var theTestThatCalled = false
var theTestTheOtherCalled = false
@Ignore
@FastAsLight
@SlowAsMolasses
def testThis(fixture: FixtureParam) { theTestThisCalled = true }
@Ignore
@SlowAsMolasses
def testThat(fixture: FixtureParam, info: Informer) { theTestThatCalled = true }
@Ignore
def testTheOther(fixture: FixtureParam, info: Informer) { theTestTheOtherCalled = true }
}
val repK = new TestIgnoredTrackingReporter
k.run(None, Args(repK, Stopper.default, Filter(None, Set("org.scalatest.SlowAsMolasses", "org.scalatest.Ignore")), Map(), None, new Tracker, Set.empty))
assert(repK.testIgnoredReceived)
assert(!k.theTestThisCalled)
assert(!k.theTestThatCalled)
assert(!k.theTestTheOtherCalled)
}
it("should return the correct test count from its expectedTestCount method") {
val a = new FixtureSuite {
type FixtureParam = String
def withFixture(test: OneArgTest) { test("hi") }
def testThis(fixture: FixtureParam) = ()
def testThat(fixture: FixtureParam, info: Informer) = ()
}
assert(a.expectedTestCount(Filter()) === 2)
val b = new FixtureSuite {
type FixtureParam = String
def withFixture(test: OneArgTest) { test("hi") }
@Ignore
def testThis(fixture: FixtureParam) = ()
def testThat(fixture: FixtureParam, info: Informer) = ()
}
assert(b.expectedTestCount(Filter()) === 1)
val c = new FixtureSuite {
type FixtureParam = String
def withFixture(test: OneArgTest) { test("hi") }
@FastAsLight
def testThis(fixture: FixtureParam) = ()
def testThat(fixture: FixtureParam, info: Informer) = ()
}
assert(c.expectedTestCount(Filter(Some(Set("org.scalatest.FastAsLight")), Set())) === 1)
assert(c.expectedTestCount(Filter(None, Set("org.scalatest.FastAsLight"))) === 1)
val d = new FixtureSuite {
type FixtureParam = String
def withFixture(test: OneArgTest) { test("hi") }
@FastAsLight
@SlowAsMolasses
def testThis(fixture: FixtureParam) = ()
@SlowAsMolasses
def testThat(fixture: FixtureParam, info: Informer) = ()
def testTheOtherThing(info: Informer) = ()
}
assert(d.expectedTestCount(Filter(Some(Set("org.scalatest.FastAsLight")), Set())) === 1)
assert(d.expectedTestCount(Filter(Some(Set("org.scalatest.SlowAsMolasses")), Set("org.scalatest.FastAsLight"))) === 1)
assert(d.expectedTestCount(Filter(None, Set("org.scalatest.SlowAsMolasses"))) === 1)
assert(d.expectedTestCount(Filter()) === 3)
val e = new FixtureSuite {
type FixtureParam = String
def withFixture(test: OneArgTest) { test("hi") }
@FastAsLight
@SlowAsMolasses
def testThis(fixture: FixtureParam) = ()
@SlowAsMolasses
def testThat(fixture: FixtureParam, info: Informer) = ()
@Ignore
def testTheOtherThing(info: Informer) = ()
}
assert(e.expectedTestCount(Filter(Some(Set("org.scalatest.FastAsLight")), Set())) === 1)
assert(e.expectedTestCount(Filter(Some(Set("org.scalatest.SlowAsMolasses")), Set("org.scalatest.FastAsLight"))) === 1)
assert(e.expectedTestCount(Filter(None, Set("org.scalatest.SlowAsMolasses"))) === 0)
assert(e.expectedTestCount(Filter()) === 2)
val f = Suites(a, b, c, d, e)
assert(f.expectedTestCount(Filter()) === 10)
}
it("should generate a TestPending message when the test body is (pending)") {
val a = new FixtureSuite {
type FixtureParam = String
val hello = "Hello, world!"
def withFixture(test: OneArgTest) {
test(hello)
}
def testDoThis(fixture: FixtureParam) { pending }
def testDoThat(fixture: FixtureParam) {
assert(fixture === hello)
}
def testDoSomethingElse(fixture: FixtureParam) {
assert(fixture === hello)
pending
}
}
val rep = new EventRecordingReporter
a.run(None, Args(rep))
val tp = rep.testPendingEventsReceived
assert(tp.size === 2)
}
it("should generate a TestCanceled message when the test body has a cancel invocation") {
val a = new FixtureSuite {
type FixtureParam = String
val hello = "Hello, world!"
def withFixture(test: OneArgTest) {
test(hello)
}
def testDoThis(fixture: FixtureParam) { cancel() }
def testDoThat(fixture: FixtureParam) {
assert(fixture === hello)
}
def testDoSomethingElse(fixture: FixtureParam) {
assert(fixture === hello)
cancel("meant to do that")
}
}
val rep = new EventRecordingReporter
a.run(None, Args(rep))
val tp = rep.testCanceledEventsReceived
assert(tp.size === 2)
}
it("should generate a TestCanceled message when the test body has an assume invocation") {
val a = new FixtureSuite {
type FixtureParam = String
val hello = "Hello, world!"
def withFixture(test: OneArgTest) {
test(hello)
}
def testDoThis(fixture: FixtureParam) { assume(1 === 2) }
def testDoThat(fixture: FixtureParam) {
assert(fixture === hello)
}
def testDoSomethingElse(fixture: FixtureParam) {
assert(fixture === hello)
assume(1 === 2, "meant to do that")
}
}
val rep = new EventRecordingReporter
a.run(None, Args(rep))
val tp = rep.testCanceledEventsReceived
assert(tp.size === 2)
}
it("should generate a test failure if a Throwable, or an Error other than direct Error subtypes " +
"known in JDK 1.5, excluding AssertionError") {
val a = new FixtureSuite {
type FixtureParam = String
val hello = "Hello, world!"
def withFixture(test: OneArgTest) {
test(hello)
}
def testThrowsAssertionError(s: String) { throw new AssertionError }
def testThrowsPlainOldError(s: String) { throw new Error }
def testThrowsThrowable(s: String) { throw new Throwable }
}
val rep = new EventRecordingReporter
a.run(None, Args(rep))
val tf = rep.testFailedEventsReceived
assert(tf.size === 3)
}
it("should propagate out Errors that are direct subtypes of Error in JDK 1.5, other than " +
"AssertionError, causing Suites and Runs to abort.") {
val a = new FixtureSuite {
type FixtureParam = String
val hello = "Hello, world!"
def withFixture(test: OneArgTest) {
test(hello)
}
def testThrowsAssertionError(s: String) { throw new OutOfMemoryError }
}
intercept[OutOfMemoryError] {
a.run(None, Args(SilentReporter))
}
}
it("should allow both tests that take fixtures and tests that don't") {
val a = new FixtureSuite {
type FixtureParam = String
def withFixture(test: OneArgTest) {
test("Hello, world!")
}
var takesNoArgsInvoked = false
def testTakesNoArgs() { takesNoArgsInvoked = true }
var takesAnInformerInvoked = false
def testTakesAnInformer(info: Informer) { takesAnInformerInvoked = true }
var takesAFixtureInvoked = false
def testTakesAFixture(s: String) { takesAFixtureInvoked = true }
var takesAFixtureAndInformerInvoked = false
def testTakesAFixtureAndInformer(s: String, info: Informer) { takesAFixtureAndInformerInvoked = true }
}
a.run(None, Args(SilentReporter))
assert(a.testNames.size === 4, a.testNames)
assert(a.takesNoArgsInvoked)
assert(a.takesAnInformerInvoked)
assert(a.takesAFixtureInvoked)
assert(a.takesAFixtureAndInformerInvoked)
}
it("should allow primitive type fixtures") {
val a = new FixtureSuite {
type FixtureParam = Int
def withFixture(test: OneArgTest) {
test(99)
}
var takesNoArgsInvoked = false
def testTakesNoArgs() { takesNoArgsInvoked = true }
var takesAnInformerInvoked = false
def testTakesAnInformer(info: Informer) { takesAnInformerInvoked = true }
var takesAFixtureInvoked = false
def testTakesAFixture(i: Int) { takesAFixtureInvoked = true }
var takesAFixtureAndInformerInvoked = false
def testTakesAFixtureAndInformer(i: Int, info: Informer) { takesAFixtureAndInformerInvoked = true }
}
a.run(None, Args(SilentReporter))
assert(a.testNames.size === 4, a.testNames)
assert(a.takesNoArgsInvoked)
assert(a.takesAnInformerInvoked)
assert(a.takesAFixtureInvoked)
assert(a.takesAFixtureAndInformerInvoked)
}
it("should pass a NoArgTest to withFixture for test methods that take no arguments") {
class MySuite extends FixtureSuite {
type FixtureParam = String
var aNoArgTestWasPassed = false
var aOneArgTestWasPassed = false
override def withFixture(test: NoArgTest) {
aNoArgTestWasPassed = true
}
def withFixture(test: OneArgTest) {
aOneArgTestWasPassed = true
}
def testSomething() {
assert(1 + 1 === 2)
}
}
val s = new MySuite
s.run(None, Args(SilentReporter))
assert(s.aNoArgTestWasPassed)
assert(!s.aOneArgTestWasPassed)
}
it("should pass a NoArgTest to withFixture for test methods that take only an Informer") {
class MySuite extends FixtureSuite {
type FixtureParam = String
var aNoArgTestWasPassed = false
var aOneArgTestWasPassed = false
override def withFixture(test: NoArgTest) {
aNoArgTestWasPassed = true
}
def withFixture(test: OneArgTest) {
aOneArgTestWasPassed = true
}
def testSomething(info: Informer) {
assert(1 + 1 === 2)
}
}
val s = new MySuite
s.run(None, Args(SilentReporter))
assert(s.aNoArgTestWasPassed)
assert(!s.aOneArgTestWasPassed)
}
it("should not pass a NoArgTest to withFixture for test methods that take a Fixture and an Informer") {
class MySuite extends FixtureSuite {
type FixtureParam = String
var aNoArgTestWasPassed = false
var aOneArgTestWasPassed = false
override def withFixture(test: NoArgTest) {
aNoArgTestWasPassed = true
}
def withFixture(test: OneArgTest) {
aOneArgTestWasPassed = true
}
def testSomething(fixture: FixtureParam, info: Informer) {
assert(1 + 1 === 2)
}
}
val s = new MySuite
s.run(None, Args(SilentReporter))
assert(!s.aNoArgTestWasPassed)
assert(s.aOneArgTestWasPassed)
}
it("should not pass a NoArgTest to withFixture for test methods that take a Fixture") {
class MySuite extends FixtureSuite {
type FixtureParam = String
var aNoArgTestWasPassed = false
def withFixture(test: OneArgTest) {
// Shouldn't be called
}
override def withFixture(test: NoArgTest) {
aNoArgTestWasPassed = true
}
def testSomething(fixture: FixtureParam) {
assert(1 + 1 === 2)
}
}
val s = new MySuite
s.run(None, Args(SilentReporter))
assert(!s.aNoArgTestWasPassed)
}
it("should pass a NoArgTest that invokes the no-arg test when the " +
"NoArgTest's no-arg apply method is invoked") {
class MySuite extends FixtureSuite {
type FixtureParam = String
var theNoArgTestWasInvoked = false
def withFixture(test: OneArgTest) {
// Shouldn't be called, but just in case don't invoke a OneArgTest
}
def testSomething() {
theNoArgTestWasInvoked = true
}
}
val s = new MySuite
s.run(None, Args(SilentReporter))
assert(s.theNoArgTestWasInvoked)
}
it("should pass a NoArgTest that invokes a test that takse an Informer when the " +
"NoArgTest's no-arg apply method is invoked") {
class MySuite extends FixtureSuite {
type FixtureParam = String
var theNoArgTestWasInvoked = false
def withFixture(test: OneArgTest) {
// Shouldn't be called, but just in case don't invoke a OneArgTest
}
def testSomething(info: Informer) {
theNoArgTestWasInvoked = true
}
}
val s = new MySuite
s.run(None, Args(SilentReporter))
assert(s.theNoArgTestWasInvoked)
}
it("should pass the correct test name in the OneArgTest passed to withFixture") {
val a = new FixtureSuite {
type FixtureParam = String
var correctTestNameWasPassed = false
def withFixture(test: OneArgTest) {
correctTestNameWasPassed = test.name == "testSomething(FixtureParam, Informer)"
test("hi")
}
def testSomething(fixture: FixtureParam, info: Informer) {}
}
a.run(None, Args(SilentReporter))
assert(a.correctTestNameWasPassed)
}
it("should pass the correct config map in the OneArgTest passed to withFixture") {
val a = new FixtureSuite {
type FixtureParam = String
var correctConfigMapWasPassed = false
def withFixture(test: OneArgTest) {
correctConfigMapWasPassed = (test.configMap == Map("hi" -> 7))
test("hi")
}
def testSomething(fixture: FixtureParam, info: Informer) {}
}
a.run(None, Args(SilentReporter, Stopper.default, Filter(), Map("hi" -> 7), None, new Tracker(), Set.empty))
assert(a.correctConfigMapWasPassed)
}
/*
it("should send InfoProvided events with aboutAPendingTest set to true and aboutACanceledTest set to false for info " +
"calls made from a test that is pending") {
val a = new FixtureSuite {
type FixtureParam = String
def withFixture(test: OneArgTest) {
test("hi")
}
def testSomething(s: String, info: Informer) {
info("two integers")
info("one is subracted from the other")
info("the result is the difference between the two numbers")
pending
}
}
val rep = new EventRecordingReporter
a.run(None, Args(rep))
val testPending = rep.testPendingEventsReceived
assert(testPending.size === 1)
val recordedEvents = testPending(0).recordedEvents
assert(recordedEvents.size === 3)
for (event <- recordedEvents) {
val ip = event.asInstanceOf[InfoProvided]
assert(ip.aboutAPendingTest.isDefined && ip.aboutAPendingTest.get)
assert(ip.aboutACanceledTest.isDefined && !ip.aboutACanceledTest.get)
}
}
it("should send InfoProvided events with aboutAPendingTest and aboutACanceledTest set to false for info " +
"calls made from a test that is not pending or canceled") {
val a = new FixtureSuite {
type FixtureParam = String
def withFixture(test: OneArgTest) {
test("hi")
}
def testSomething(s: String, info: Informer) {
info("two integers")
info("one is subracted from the other")
info("the result is the difference between the two numbers")
assert(1 + 1 === 2)
}
}
val rep = new EventRecordingReporter
a.run(None, Args(rep))
val testSucceeded = rep.testSucceededEventsReceived
assert(testSucceeded.size === 1)
val recordedEvents = testSucceeded(0).recordedEvents
assert(recordedEvents.size === 3)
for (event <- recordedEvents) {
val ip = event.asInstanceOf[InfoProvided]
assert(ip.aboutAPendingTest.isDefined && !ip.aboutAPendingTest.get)
assert(ip.aboutACanceledTest.isDefined && !ip.aboutACanceledTest.get)
}
}
it("should send InfoProvided events with aboutAPendingTest set to false and aboutACanceledTest set to true for info " +
"calls made from a test that is canceled") {
val a = new FixtureSuite {
type FixtureParam = String
def withFixture(test: OneArgTest) {
test("hi")
}
def testSomething(s: String, info: Informer) {
info("two integers")
info("one is subracted from the other")
info("the result is the difference between the two numbers")
cancel()
}
}
val rep = new EventRecordingReporter
a.run(None, Args(rep))
val testCanceled = rep.testCanceledEventsReceived
assert(testCanceled.size === 1)
val recordedEvents = testCanceled(0).recordedEvents
assert(recordedEvents.size === 3)
for (event <- recordedEvents) {
val ip = event.asInstanceOf[InfoProvided]
assert(ip.aboutAPendingTest.isDefined && !ip.aboutAPendingTest.get)
assert(ip.aboutACanceledTest.isDefined && ip.aboutACanceledTest.get)
}
}
*/
it("should, when a test method takes an Informer and writes to it, report the info in the test completion event") {
val msg = "hi there dude"
class MySuite extends FixtureSuite {
type FixtureParam = String
def withFixture(test: OneArgTest) {
test("hi")
}
def testWithInformer(s: String, info: Informer) {
info(msg)
}
}
val (testStartingIndex, testSucceededIndex) =
getIndexesForTestInformerEventOrderTests(new MySuite, "testWithInformer(FixtureParam, Informer)", msg)
assert(testStartingIndex < testSucceededIndex)
}
}
describe("A OneArgTest") {
it("should provide an easy way to invoke a NoArgTest") {
var noArgWithFixtureWasCalled = false
val a = new FixtureSuite {
type FixtureParam = String
override def withFixture(test: NoArgTest) {
noArgWithFixtureWasCalled = true
test()
}
def withFixture(test: OneArgTest) {
withFixture(test.toNoArgTest("hi"))
}
def testSomething(fixture: String) { assert(fixture === "hi") }
}
val rep = new EventRecordingReporter
a.run(None, Args(rep))
assert(noArgWithFixtureWasCalled)
assert(rep.eventsReceived.exists(_.isInstanceOf[TestSucceeded]))
}
}
}
|
hubertp/scalatest
|
src/test/scala/org/scalatest/fixture/FixtureSuiteSpec.scala
|
Scala
|
apache-2.0
| 41,624
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.hive.execution.command
import org.apache.spark.sql.{CarbonEnv, Row, SparkSession}
import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
import org.apache.spark.sql.execution.command.{ResetCommand, RunnableCommand}
case class CarbonResetCommand()
extends RunnableCommand {
override val output = ResetCommand.output
override def run(sparkSession: SparkSession): Seq[Row] = {
CarbonEnv.getInstance(sparkSession).carbonSessionInfo.getSessionParams.clear()
ResetCommand.run(sparkSession)
}
}
/**
* This method matches the reset command based on the spark version
*/
object MatchResetCommand {
def unapply(plan: LogicalPlan): Option[LogicalPlan] = {
plan match {
case r@ResetCommand =>
Some(plan)
case _ =>
None
}
}
}
|
jackylk/incubator-carbondata
|
integration/spark/src/main/scala/org/apache/spark/sql/hive/execution/command/CarbonResetCommand.scala
|
Scala
|
apache-2.0
| 1,614
|
package fi.pelam.javafxutil
import org.junit.Assert._
import org.junit.Test
case class TestImmutable(a: Int, b: Int) {
}
class ScalaToJavafxPropsTest {
val immutable = TestImmutable(1, 2)
val scalaToJavafxProps = ScalaToJavafxProps.forClass[TestImmutable]
val props: JavafxProps[TestImmutable] = scalaToJavafxProps(immutable)
@Test
def testPropertyNames(): Unit = {
assertEquals("a, b", props.propertyNames.reduce(_ + ", " + _))
}
@Test
def testIntegerPropertyValue(): Unit = {
assertEquals(1, props.getNumberProperty("a").getValue.intValue())
}
@Test
def testToString(): Unit = {
assertEquals("JavafxProps for TestImmutable(1,2) (generated by JavaFxPropsMaker)", props.toString)
}
}
|
pelamfi/pelam-scala-incubator
|
src/test/scala/fi/pelam/javafxutil/ScalaToJavafxPropsTest.scala
|
Scala
|
apache-2.0
| 726
|
package com.arcusys.valamis.content.service
import com.arcusys.valamis.content.exceptions.NoCategoryException
import com.arcusys.valamis.content.model._
import com.arcusys.valamis.content.storage.{CategoryStorage, PlainTextStorage, QuestionStorage}
import com.arcusys.valamis.persistence.common.DatabaseLayer
import slick.dbio.DBIO
import scala.concurrent.ExecutionContext.Implicits.global
trait CategoryService {
def create(category: Category): Category
def copyWithContent(id: Long, newTitle: String, newDescription: String): Category
def update(id: Long, newTitle: String, newDescription: String): Unit
def moveToCategory(id: Long, newCategoryId: Option[Long], courseId: Long): Unit
def moveToCourse(id: Long, courseId: Long, moveToRoot: Boolean): Unit
def getByID(id: Long): Option[Category]
def getByTitle(name: String): Option[Category]
def getByTitleAndCourseId(name: String, courseId: Long): Option[Category]
def getByCategory(categoryId: Option[Long], courseId: Long): Seq[Category]
def deleteWithContent(id: Long): Unit
}
abstract class CategoryServiceImpl extends CategoryService {
def categories: CategoryStorage
def questionStorage: QuestionStorage
def plainTextStorage: PlainTextStorage
def plainTextService: PlainTextService
def questionService: QuestionService
def dbLayer: DatabaseLayer
import DatabaseLayer._
override def create(category: Category): Category = dbLayer.execSync(categories.create(category))
override def getByID(id: Long): Option[Category] = dbLayer.execSync(categories.getById(id))
override def getByTitle(name: String): Option[Category] = dbLayer.execSync(categories.getByTitle(name))
override def getByTitleAndCourseId(name: String, courseId: Long): Option[Category] = dbLayer.execSync {
categories.getByTitleAndCourseId(name, courseId)
}
override def getByCategory(categoryId: Option[Long], courseId: Long): Seq[Category] = dbLayer.execSync {
categories.getByCategory(categoryId, courseId)
}
override def copyWithContent(oldCategoryId: Long, newTitle: String, newDescription: String): Category =
dbLayer.execSyncInTransaction {
categories.getById(oldCategoryId) ifSomeThen { category =>
copyWithContent(category.copy(title = newTitle, description = newDescription), category.categoryId)
} map (_.getOrElse(throw new NoCategoryException(oldCategoryId)))
}
private def copyWithContent(oldCategory: Category, newParentId: Option[Long]): DBIO[Category] = {
for {
newCategory <- categories.create(oldCategory.copy(id = None, categoryId = newParentId))
_ <- plainTextService.copyByCategoryAction(oldCategory.id, newCategory.id, oldCategory.courseId)
_ <- questionService.copyByCategoryAction(oldCategory.id, newCategory.id, oldCategory.courseId)
otherCats <- categories.getByCategory(oldCategory.id, oldCategory.courseId)
_ <- sequence(otherCats.map { otherCat => copyWithContent(otherCat, newCategory.id) })
} yield newCategory
}
override def update(id: Long, newTitle: String, newDescription: String): Unit = dbLayer.execSync {
categories.getById(id).ifSomeThen { cat =>
categories.update(cat.copy(title = newTitle, description = newDescription))
}
}
private def moveToCourseAction(id: Long, courseId: Long, moveToRoot: Boolean): DBIO[Option[Unit]] = {
categories.getById(id).ifSomeThen { cat =>
categories.moveToCourse(id, courseId, moveToRoot) andThen
moveRelatedContentToCourseAction(id, cat.courseId, courseId)
}
}
override def moveToCourse(id: Long, courseId: Long, moveToRoot: Boolean): Unit = dbLayer.execSyncInTransaction {
moveToCourseAction(id, courseId, moveToRoot)
}
override def moveToCategory(id: Long, newCategoryId: Option[Long], courseId: Long): Unit =
dbLayer.execSyncInTransaction {
if (newCategoryId.isDefined) {
for {
newCourseId <- categories.getById(newCategoryId.get).map(_.map(_.courseId).getOrElse(courseId))
_ <- categories.moveToCategory(id, newCategoryId, newCourseId)
_ <- if (newCourseId != courseId) {
moveRelatedContentToCourseAction(id, courseId, newCourseId)
} else {
DBIO.successful()
}
} yield ()
} else {
categories.moveToCategory(id, newCategoryId, courseId)
}
}
private def moveRelatedContentToCourseAction(categoryId: Long, oldCourseId: Long, newCourseId: Long) =
for {
questions <- questionStorage.getByCategory(categoryId)
_ <- sequence(questions.map {q => questionService.moveToCourseAction(q.id.get, newCourseId, moveToRoot = false) })
plainTexts <- plainTextStorage.getByCategory(categoryId)
_ <- sequence(plainTexts.map { pt => plainTextService.moveToCourseAction(pt.id.get, newCourseId, moveToRoot = false) })
cats <- categories.getByCategory(categoryId)
_ <- sequence(cats.map { cat => moveToCourseAction(cat.id.get, newCourseId, moveToRoot = false) })
} yield ()
override def deleteWithContent(id: Long): Unit = {
//all related content will be delete automatically thanks to onDelete=ForeignKeyAction.Cascade option for FK
//in ContentTableComponent classes
//TODO delete content manually (in case of another storage impl)
dbLayer.execSyncInTransaction(categories.delete(id))
}
}
|
arcusys/Valamis
|
valamis-questionbank/src/main/scala/com/arcusys/valamis/content/service/CategoryService.scala
|
Scala
|
gpl-3.0
| 5,372
|
package jp.co.dwango.s99
import org.scalacheck.Prop.forAll
import org.scalacheck.Properties
class P14Check extends Properties("P14") {
property("duplicate()") = forAll { (s: List[Int]) =>
P14.duplicate(s) == s.map { x =>
List(x, x)
}.flatten
}
}
|
dwango/S99
|
src/test/scala/jp/co/dwango/s99/P14Check.scala
|
Scala
|
mit
| 266
|
/*
* Copyright 2011-2019 Asakusa Framework Team.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.asakusafw.spark.extensions.iterativebatch.compiler
package graph
import org.junit.runner.RunWith
import org.scalatest.FlatSpec
import org.scalatest.junit.JUnitRunner
import java.io.{ DataInput, DataOutput }
import java.util.{ List => JList }
import scala.collection.JavaConversions._
import scala.concurrent.{ Await, Future }
import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.duration.Duration
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.io.Writable
import org.apache.spark.{ HashPartitioner, Partitioner }
import org.apache.spark.rdd.RDD
import com.asakusafw.bridge.stage.StageInfo
import com.asakusafw.lang.compiler.model.description.{ ClassDescription, ImmediateDescription }
import com.asakusafw.lang.compiler.model.graph.{ Groups, MarkerOperator }
import com.asakusafw.lang.compiler.model.testing.OperatorExtractor
import com.asakusafw.lang.compiler.planning.{ PlanBuilder, PlanMarker }
import com.asakusafw.runtime.core.Result
import com.asakusafw.runtime.model.DataModel
import com.asakusafw.runtime.value.{ BooleanOption, IntOption }
import com.asakusafw.spark.compiler.{ ClassServerForAll, FlowIdForEach }
import com.asakusafw.spark.compiler.graph._
import com.asakusafw.spark.compiler.planning.{ IterativeInfo, SubPlanInfo, SubPlanOutputInfo }
import com.asakusafw.spark.runtime._
import com.asakusafw.spark.runtime.graph.{
Broadcast,
BroadcastId,
CoGroup,
GroupOrdering,
SortOrdering,
Source
}
import com.asakusafw.spark.runtime.io.WritableSerDe
import com.asakusafw.spark.runtime.rdd.{ BranchKey, ShuffleKey }
import com.asakusafw.spark.tools.asm._
import com.asakusafw.vocabulary.flow.processor.InputBuffer
import com.asakusafw.vocabulary.operator.{ CoGroup => CoGroupOp }
import com.asakusafw.spark.extensions.iterativebatch.compiler.spi.RoundAwareNodeCompiler
import com.asakusafw.spark.extensions.iterativebatch.runtime.graph.RoundAwareParallelCollectionSource
@RunWith(classOf[JUnitRunner])
class CoGroupClassBuilderSpecTest extends CoGroupClassBuilderSpec
class CoGroupClassBuilderSpec
extends FlatSpec
with ClassServerForAll
with SparkForAll
with FlowIdForEach
with UsingCompilerContext
with JobContextSugar
with RoundContextSugar {
import CoGroupClassBuilderSpec._
behavior of classOf[CoGroupClassBuilder].getSimpleName
for {
method <- Seq("cogroup", "cogroupEscape")
(outputType, partitioners) <- Seq(
(SubPlanOutputInfo.OutputType.DONT_CARE, 7),
(SubPlanOutputInfo.OutputType.PREPARE_EXTERNAL_OUTPUT, 0))
iterativeInfo <- Seq(
IterativeInfo.always(),
IterativeInfo.never(),
IterativeInfo.parameter("round"))
} {
val conf = s"OutputType: ${outputType}, IterativeInfo: ${iterativeInfo}"
it should s"build cogroup class ${method}: [${conf}]" in {
val foosMarker = MarkerOperator.builder(ClassDescription.of(classOf[Foo]))
.attribute(classOf[PlanMarker], PlanMarker.GATHER).build()
val barsMarker = MarkerOperator.builder(ClassDescription.of(classOf[Bar]))
.attribute(classOf[PlanMarker], PlanMarker.GATHER).build()
val operator = OperatorExtractor
.extract(classOf[CoGroupOp], classOf[CoGroupOperator], method)
.input("foos", ClassDescription.of(classOf[Foo]),
Groups.parse(Seq("id")),
foosMarker.getOutput)
.input("bars", ClassDescription.of(classOf[Bar]),
Groups.parse(Seq("fooId"), Seq("+id")),
barsMarker.getOutput)
.output("fooResult", ClassDescription.of(classOf[Foo]))
.output("barResult", ClassDescription.of(classOf[Bar]))
.output("fooError", ClassDescription.of(classOf[Foo]))
.output("barError", ClassDescription.of(classOf[Bar]))
.output("nResult", ClassDescription.of(classOf[N]))
.argument("n", ImmediateDescription.of(10))
.build()
val fooResultMarker = MarkerOperator.builder(ClassDescription.of(classOf[Foo]))
.attribute(classOf[PlanMarker], PlanMarker.CHECKPOINT).build()
operator.findOutput("fooResult").connect(fooResultMarker.getInput)
val barResultMarker = MarkerOperator.builder(ClassDescription.of(classOf[Bar]))
.attribute(classOf[PlanMarker], PlanMarker.CHECKPOINT).build()
operator.findOutput("barResult").connect(barResultMarker.getInput)
val fooErrorMarker = MarkerOperator.builder(ClassDescription.of(classOf[Foo]))
.attribute(classOf[PlanMarker], PlanMarker.CHECKPOINT).build()
operator.findOutput("fooError").connect(fooErrorMarker.getInput)
val barErrorMarker = MarkerOperator.builder(ClassDescription.of(classOf[Bar]))
.attribute(classOf[PlanMarker], PlanMarker.CHECKPOINT).build()
operator.findOutput("barError").connect(barErrorMarker.getInput)
val fooAllMarker = MarkerOperator.builder(ClassDescription.of(classOf[Foo]))
.attribute(classOf[PlanMarker], PlanMarker.CHECKPOINT).build()
operator.findOutput("fooResult").connect(fooAllMarker.getInput)
operator.findOutput("fooError").connect(fooAllMarker.getInput)
val barAllMarker = MarkerOperator.builder(ClassDescription.of(classOf[Bar]))
.attribute(classOf[PlanMarker], PlanMarker.CHECKPOINT).build()
operator.findOutput("barResult").connect(barAllMarker.getInput)
operator.findOutput("barError").connect(barAllMarker.getInput)
val nResultMarker = MarkerOperator.builder(ClassDescription.of(classOf[N]))
.attribute(classOf[PlanMarker], PlanMarker.CHECKPOINT).build()
operator.findOutput("nResult").connect(nResultMarker.getInput)
val plan = PlanBuilder.from(Seq(operator))
.add(
Seq(foosMarker, barsMarker),
Seq(fooResultMarker, barResultMarker,
fooErrorMarker, barErrorMarker,
fooAllMarker, barAllMarker,
nResultMarker)).build().getPlan()
assert(plan.getElements.size === 1)
val subplan = plan.getElements.head
subplan.putAttr(
new SubPlanInfo(_,
SubPlanInfo.DriverType.COGROUP,
Seq.empty[SubPlanInfo.DriverOption],
operator))
subplan.putAttr(_ => iterativeInfo)
val foosInput = subplan.findIn(foosMarker)
val barsInput = subplan.findIn(barsMarker)
for {
marker <- Seq(
fooResultMarker, barResultMarker,
fooErrorMarker, barErrorMarker,
fooAllMarker, barAllMarker,
nResultMarker)
} {
subplan.findOut(marker)
.putAttr(
new SubPlanOutputInfo(_,
outputType,
Seq.empty[SubPlanOutputInfo.OutputOption], null, null))
}
implicit val context = newNodeCompilerContext(flowId, classServer.root.toFile)
context.branchKeys.getField(foosInput.getOperator.getSerialNumber)
context.branchKeys.getField(barsInput.getOperator.getSerialNumber)
val compiler = RoundAwareNodeCompiler.get(subplan)
val thisType = compiler.compile(subplan)
context.addClass(context.branchKeys)
context.addClass(context.broadcastIds)
val cls = classServer.loadClass(thisType).asSubclass(classOf[CoGroup])
val branchKeyCls = classServer.loadClass(context.branchKeys.thisType.getClassName)
def getBranchKey(marker: MarkerOperator): BranchKey = {
val sn = subplan.getOperators.toSet
.find(_.getOriginalSerialNumber == marker.getOriginalSerialNumber).get.getSerialNumber
branchKeyCls.getField(context.branchKeys.getField(sn)).get(null).asInstanceOf[BranchKey]
}
implicit val jobContext = newJobContext(sc)
val foos =
new RoundAwareParallelCollectionSource(getBranchKey(foosMarker), (0 until 100))("foos")
.mapWithRoundContext(getBranchKey(foosMarker))(Foo.intToFoo)
val fooOrd = new Foo.SortOrdering()
val bars =
new RoundAwareParallelCollectionSource(getBranchKey(barsMarker), (0 until 100))("bars")
.flatMapWithRoundContext(getBranchKey(barsMarker))(Bar.intToBars)
val barOrd = new Bar.SortOrdering()
val grouping = new GroupingOrdering()
val partitioner = new HashPartitioner(2)
val cogroup = cls.getConstructor(
classOf[Seq[(Seq[(Source, BranchKey)], Option[SortOrdering])]],
classOf[GroupOrdering],
classOf[Partitioner],
classOf[Map[BroadcastId, Broadcast[_]]],
classOf[JobContext])
.newInstance(
Seq(
(Seq((foos, getBranchKey(foosMarker))), Option(fooOrd)),
(Seq((bars, getBranchKey(barsMarker))), Option(barOrd))),
grouping,
partitioner,
Map.empty,
jobContext)
assert(cogroup.partitioners.size === partitioners)
assert(cogroup.branchKeys ===
Set(fooResultMarker, barResultMarker,
fooErrorMarker, barErrorMarker,
fooAllMarker, barAllMarker,
nResultMarker).map(getBranchKey))
for {
round <- 0 to 1
} {
val rc = newRoundContext(
stageId = s"round_${round}",
batchArguments = Map("round" -> round.toString))
val bias = if (iterativeInfo.isIterative) 100 * round else 0
val results = cogroup.compute(rc)
val (((fooResult, barResult), (fooError, barError)), ((fooAll, barAll), nResult)) =
Await.result(
results(getBranchKey(fooResultMarker)).map {
_().map {
case (_, foo: Foo) => foo.id.get
}.collect.toSeq
}.zip {
results(getBranchKey(barResultMarker)).map {
_().map {
case (_, bar: Bar) => (bar.id.get, bar.fooId.get)
}.collect.toSeq
}
}.zip {
results(getBranchKey(fooErrorMarker)).map {
_().map {
case (_, foo: Foo) => foo.id.get
}.collect.toSeq.sorted
}.zip {
results(getBranchKey(barErrorMarker)).map {
_().map {
case (_, bar: Bar) => (bar.id.get, bar.fooId.get)
}.collect.toSeq.sortBy(_._2)
}
}
}.zip {
results(getBranchKey(fooAllMarker)).map {
_().map {
case (_, foo: Foo) => foo.id.get
}.collect.toSeq.sorted
}.zip {
results(getBranchKey(barAllMarker)).map {
_().map {
case (_, bar: Bar) => (bar.id.get, bar.fooId.get)
}.collect.toSeq.sortBy(_._2)
}
}.zip {
results(getBranchKey(nResultMarker)).map {
_().map {
case (_, n: N) => n.n.get
}.collect.toSeq
}
}
}, Duration.Inf)
assert(fooResult.size === 1)
assert(fooResult.head === bias + 1)
assert(barResult.size === 1)
assert(barResult.head._1 === bias + 0)
assert(barResult.head._2 === bias + 1)
assert(fooError.size === 99)
assert(fooError.head === bias + 0)
for (i <- 2 until 10) {
assert(fooError(i - 1) === bias + i)
}
assert(barError.size === 4949)
for {
i <- 2 until 100
j <- 0 until i
} {
assert(barError((i * (i - 1)) / 2 + j - 1)._1 == bias + j)
assert(barError((i * (i - 1)) / 2 + j - 1)._2 == bias + i)
}
assert(fooAll.size === 100)
for (i <- 0 until 100) {
assert(fooAll(i) === bias + i)
}
assert(barAll.size === 4950)
for {
i <- 0 until 100
j <- 0 until i
} {
assert(barAll((i * (i - 1)) / 2 + j)._1 == bias + +j)
assert(barAll((i * (i - 1)) / 2 + j)._2 == bias + i)
}
assert(nResult.size === 100)
nResult.foreach(n => assert(n === 10))
}
}
}
}
object CoGroupClassBuilderSpec {
class GroupingOrdering extends Ordering[ShuffleKey] {
override def compare(x: ShuffleKey, y: ShuffleKey): Int = {
IntOption.compareBytes(x.grouping, 0, x.grouping.length, y.grouping, 0, y.grouping.length)
}
}
class Foo extends DataModel[Foo] with Writable {
val id = new IntOption()
override def reset(): Unit = {
id.setNull()
}
override def copyFrom(other: Foo): Unit = {
id.copyFrom(other.id)
}
override def readFields(in: DataInput): Unit = {
id.readFields(in)
}
override def write(out: DataOutput): Unit = {
id.write(out)
}
def getIdOption: IntOption = id
}
object Foo {
def intToFoo(rc: RoundContext): Int => (_, Foo) = {
val stageInfo = StageInfo.deserialize(rc.hadoopConf.value.get(StageInfo.KEY_NAME))
val round = stageInfo.getBatchArguments()("round").toInt
lazy val foo = new Foo()
{ i =>
foo.id.modify(100 * round + i)
val shuffleKey = new ShuffleKey(
WritableSerDe.serialize(foo.id),
WritableSerDe.serialize(new BooleanOption().modify(i % 3 == 0)))
(shuffleKey, foo)
}
}
class SortOrdering extends GroupingOrdering {
override def compare(x: ShuffleKey, y: ShuffleKey): Int = {
val cmp = super.compare(x, y)
if (cmp == 0) {
BooleanOption.compareBytes(x.ordering, 0, x.ordering.length, y.ordering, 0, y.ordering.length)
} else {
cmp
}
}
}
}
class Bar extends DataModel[Bar] with Writable {
val id = new IntOption()
val fooId = new IntOption()
override def reset(): Unit = {
id.setNull()
fooId.setNull()
}
override def copyFrom(other: Bar): Unit = {
id.copyFrom(other.id)
fooId.copyFrom(other.fooId)
}
override def readFields(in: DataInput): Unit = {
id.readFields(in)
fooId.readFields(in)
}
override def write(out: DataOutput): Unit = {
id.write(out)
fooId.write(out)
}
def getIdOption: IntOption = id
def getFooIdOption: IntOption = fooId
}
object Bar {
def intToBars(rc: RoundContext): Int => Iterator[(_, Bar)] = {
val stageInfo = StageInfo.deserialize(rc.hadoopConf.value.get(StageInfo.KEY_NAME))
val round = stageInfo.getBatchArguments()("round").toInt
lazy val bar = new Bar()
{ i: Int =>
(0 until i).iterator.map { j =>
bar.id.modify(100 * round + j)
bar.fooId.modify(100 * round + i)
val shuffleKey = new ShuffleKey(
WritableSerDe.serialize(bar.fooId),
WritableSerDe.serialize(new IntOption().modify(bar.id.toString.hashCode)))
(shuffleKey, bar)
}
}
}
class SortOrdering extends GroupingOrdering {
override def compare(x: ShuffleKey, y: ShuffleKey): Int = {
val cmp = super.compare(x, y)
if (cmp == 0) {
IntOption.compareBytes(x.ordering, 0, x.ordering.length, y.ordering, 0, y.ordering.length)
} else {
cmp
}
}
}
}
class N extends DataModel[N] with Writable {
val n = new IntOption()
override def reset(): Unit = {
n.setNull()
}
override def copyFrom(other: N): Unit = {
n.copyFrom(other.n)
}
override def readFields(in: DataInput): Unit = {
n.readFields(in)
}
override def write(out: DataOutput): Unit = {
n.write(out)
}
def getNOption: IntOption = n
}
class CoGroupOperator {
private[this] val n = new N
@CoGroupOp
def cogroup(
foos: JList[Foo], bars: JList[Bar],
fooResult: Result[Foo], barResult: Result[Bar],
fooError: Result[Foo], barError: Result[Bar],
nResult: Result[N], n: Int): Unit = {
if (foos.size == 1 && bars.size == 1) {
fooResult.add(foos(0))
barResult.add(bars(0))
} else {
foos.foreach(fooError.add)
bars.foreach(barError.add)
}
this.n.n.modify(n)
nResult.add(this.n)
}
@CoGroupOp(inputBuffer = InputBuffer.ESCAPE)
def cogroupEscape(
foos: JList[Foo], bars: JList[Bar],
fooResult: Result[Foo], barResult: Result[Bar],
fooError: Result[Foo], barError: Result[Bar],
nResult: Result[N], n: Int): Unit = {
if (foos.size == 1 && bars.size == 1) {
fooResult.add(foos(0))
barResult.add(bars(0))
} else {
foos.foreach(fooError.add)
bars.foreach(barError.add)
}
this.n.n.modify(n)
nResult.add(this.n)
}
}
}
|
ashigeru/asakusafw-spark
|
extensions/iterativebatch/compiler/core/src/test/scala/com/asakusafw/spark/extensions/iterativebatch/compiler/graph/CoGroupClassBuilderSpec.scala
|
Scala
|
apache-2.0
| 17,237
|
package com.kodekutters.gpsd4scala.core
import akka.actor._
import java.net.InetSocketAddress
import com.kodekutters.gpsd4scala.messages.ConnectionFailed
/**
* Author: Ringo Wathelet
* Date: 18/04/13
* Version: 1
*/
/**
* main entry point to link to the gpsd server
*
* @param address the IP address of the gpsd server, e.g. localhost:2947
*/
class GPSdLinker(address: java.net.InetSocketAddress) extends Actor with ActorLogging with CollectorManager {
def this(server: String, port: Int = 2947) = this(new InetSocketAddress(server, port))
// the client that connects to the gpsd server
val gpsdClient = context.actorOf(GpsdClient.props(address, collectorList), "client")
// manage the collectors, then the linker receive
def receive = manageCollectors orElse linkerReceive
def linkerReceive: Receive = {
// from the client, typically when no connection could be established
case ConnectionFailed =>
log.info("\n......connection failed, probably because the gpsd daemon is not running")
// report to the parent that the connection failed
context.parent ! ConnectionFailed
context stop self
// forward all other commands to the client
case cmd => gpsdClient forward cmd
}
}
object GPSdLinker {
def props(address: java.net.InetSocketAddress): Props = Props(new GPSdLinker(address))
def props(server: String, port: Int = 2947): Props = Props(new GPSdLinker(server, port))
}
|
workingDog/Gpsd4Scala
|
src/main/scala/com/kodekutters/gpsd4scala/core/GPSdLinker.scala
|
Scala
|
bsd-3-clause
| 1,449
|
import sbt._
import sbt.Keys._
object SlickTestingBuild extends Build {
lazy val slickTesting = Project(
id = "slick-testing",
base = file("."),
settings = Seq(
name := "Slick Testing",
organization := "com.daverstevens",
version := "0.1-SNAPSHOT",
scalaVersion := "2.11.2"
// add other settings here
)
)
}
|
drstevens/slick-testing
|
project/Build.scala
|
Scala
|
apache-2.0
| 359
|
Subsets and Splits
Filtered Scala Code Snippets
The query filters and retrieves a sample of code snippets that meet specific criteria, providing a basic overview of the dataset's content without revealing deeper insights.