code
stringlengths 5
1M
| repo_name
stringlengths 5
109
| path
stringlengths 6
208
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 5
1M
|
|---|---|---|---|---|---|
package config.paramconf
import config.Conf
import scala.collection.mutable
/**
* 预处理过程使用参数
*
* Created by yhao on 2017/3/8.
*/
class PreprocessParams extends Serializable {
val kvMap: mutable.LinkedHashMap[String, String] = Conf.loadConf("../conf/preprocess.properties")
val fanjian: String = kvMap.getOrElse("fanjian", "f2j") //繁简转换
val quanban: String = kvMap.getOrElse("quanban", "q2b") //全半角转换
val minLineLen: Int = kvMap.getOrElse("minLineLen", "1").toInt //最短行长度
val handleInvalid: String = kvMap.getOrElse("handleInvalid", "error") //如何处理非法行
val segmentType: String = kvMap.getOrElse("segType", "StandardSegment") //分词方式
val delNum: Boolean = kvMap.getOrElse("delNum", "false").toBoolean //是否去除数字
val delEn: Boolean = kvMap.getOrElse("delEn", "false").toBoolean //是否去除英语单词
val addNature: Boolean = kvMap.getOrElse("addNature", "false").toBoolean //是否添加词性
val natureFilter: Boolean = kvMap.getOrElse("natureFilter", "false").toBoolean //是否按词性过滤
val minTermLen: Int = kvMap.getOrElse("minTermLen", "1").toInt //最小词长度
val minTermNum: Int = kvMap.getOrElse("minTermNum", "3").toInt //行最小词数
val minTF: Double = kvMap.getOrElse("minTF", "1").toDouble //最小词频
val vocabSize: Int = kvMap.getOrElse("vocabSize", "10000").toInt //特征词汇表大小
val minDocFreq: Int = kvMap.getOrElse("minDocFreq", "1").toInt //最小文档频率
val stopwordFilePath: String = kvMap.getOrElse("stopwordFilePath", "ckooc-ml/dictionaries/hanlp/data/dictionary/stopwords.txt") //停用词表路径
}
|
yhao2014/CkoocNLP
|
ckooc-ml/src/main/scala/config/paramconf/PreprocessParams.scala
|
Scala
|
apache-2.0
| 1,727
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.api.python
import java.util.Arrays
import org.apache.spark.Partitioner
import org.apache.spark.util.Utils
/**
* A [[org.apache.spark.Partitioner]] that performs handling of long-valued keys, for use by the Python API.
*
* Stores the unique id() of the Python-side partitioning function so that it is incorporated into
* equality comparisons. Correctness requires that the id is a unique identifier for the
* lifetime of the program (i.e. that it is not re-used as the id of a different partitioning
* function). This can be ensured by using the Python id() function and maintaining a reference
* to the Python partitioning function so that its id() is not reused.
*/
private[spark] class PythonPartitioner(
override val numPartitions: Int,
val pyPartitionFunctionId: Long)
extends Partitioner {
override def getPartition(key: Any): Int = key match {
case null => 0
// we don't trust the Python partition function to return valid partition ID's so
// let's do a modulo numPartitions in any case
case key: Long => Utils.nonNegativeMod(key.toInt, numPartitions)
case _ => Utils.nonNegativeMod(key.hashCode(), numPartitions)
}
override def equals(other: Any): Boolean = other match {
case h: PythonPartitioner =>
h.numPartitions == numPartitions && h.pyPartitionFunctionId == pyPartitionFunctionId
case _ =>
false
}
}
|
cloudera/spark
|
core/src/main/scala/org/apache/spark/api/python/PythonPartitioner.scala
|
Scala
|
apache-2.0
| 2,211
|
package se.callista.akka.camel
import akka.camel.{Consumer, CamelMessage, Producer, AkkaCamelException}
import akka.actor.{Props, ActorSystem, Actor }
import scala.concurrent.duration._
import util.parsing.json.{JSONObject, JSONArray}
import akka.util
import akka.actor.Status.Failure
class HttpProducer(host: String) extends Producer {
import System._
def endpointUri = "jetty://http://" + host + "/?bridgeEndpoint=true"
def threadName = Thread.currentThread().getName
var start : Long = 0
override def transformOutgoingMessage(msg: Any) = {
println("Request> " + threadName + " " + host)
start = currentTimeMillis()
msg
}
override def transformResponse(msg: Any): Any = {
val t = currentTimeMillis() - start
msg match {
case msg: CamelMessage => {
println("Response> " + threadName + " " + host)
msg.copy(headers = msg.headers ++ Map("Url" -> host, "RequestTime" -> t) )
}
case Failure(ex) => CamelMessage("", Map("Url" -> host, "RequestTime" -> t, "Server" -> ex.getMessage))
}
}
}
class HttpConsumer(hosts: List[String]) extends Consumer {
def endpointUri = "jetty://http://localhost:9090/"
import scala.concurrent.Future
import akka.pattern.ask
val actors = hosts.map(host => context.actorOf(Props(new HttpProducer(host))))
implicit val dispatcher = context.system.dispatcher
implicit val timeout = util.Timeout(5 seconds)
def receive = {
case msg: CamelMessage => {
val props = Map("CamelHttpMethod" -> "HEAD")
val futures = actors.map(a => a.ask(CamelMessage("", props)).mapTo[CamelMessage])
val r = Future.sequence(futures)
val originalSender = sender
r onSuccess { case msg =>
val headers = msg.map(_.headers(Set("Server", "Url", "RequestTime")))
originalSender ! CamelMessage(new JSONArray( headers.map(new JSONObject(_))), Map("ContentType" -> "application/json"))
}
}
}
}
object Main extends App {
val system = ActorSystem("some-system")
val hosts = List(
"www.aftonbladet.se",
"www.expressen.se",
"www.svd.se",
"www.gp.se",
"www.dn.se",
"www.sydsvenskan.se",
"www.corren.se",
"www.barometern.se",
"www.bt.se",
"eposten.se",
"www.helagotland.se",
"www.gotland.net",
"hallandsposten.se",
"www.jonkopingsposten.se"
)
val consumer = system.actorOf(Props(new HttpConsumer(hosts)))
println("Finished")
}
|
parwen68/camel-akka-example
|
src/main/scala/se/callista/akka/camel/Main.scala
|
Scala
|
apache-2.0
| 2,445
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.network.yarn
import scala.collection.JavaConverters._
import scala.collection.mutable
import org.apache.hadoop.metrics2.{MetricsInfo, MetricsRecordBuilder}
import org.mockito.ArgumentMatchers.{any, anyDouble, anyInt, anyLong}
import org.mockito.Mockito.{mock, verify, when}
import org.scalatest.matchers.must.Matchers
import org.scalatest.matchers.should.Matchers._
import org.apache.spark.SparkFunSuite
import org.apache.spark.network.server.OneForOneStreamManager
import org.apache.spark.network.shuffle.{ExternalBlockHandler, ExternalShuffleBlockResolver}
class YarnShuffleServiceMetricsSuite extends SparkFunSuite with Matchers {
val streamManager = mock(classOf[OneForOneStreamManager])
val blockResolver = mock(classOf[ExternalShuffleBlockResolver])
when(blockResolver.getRegisteredExecutorsSize).thenReturn(42)
val metrics = new ExternalBlockHandler(streamManager, blockResolver).getAllMetrics
test("metrics named as expected") {
val allMetrics = Seq(
"openBlockRequestLatencyMillis", "registerExecutorRequestLatencyMillis",
"blockTransferRate", "blockTransferMessageRate", "blockTransferAvgSize_1min",
"blockTransferRateBytes", "registeredExecutorsSize", "numActiveConnections",
"numCaughtExceptions", "finalizeShuffleMergeLatencyMillis",
"fetchMergedBlocksMetaLatencyMillis")
// Use sorted Seq instead of Set for easier comparison when there is a mismatch
metrics.getMetrics.keySet().asScala.toSeq.sorted should be (allMetrics.sorted)
}
// these metrics will generate more metrics on the collector
for (testname <- Seq("openBlockRequestLatencyMillis",
"registerExecutorRequestLatencyMillis",
"blockTransferRateBytes", "blockTransferRate", "blockTransferMessageRate")) {
test(s"$testname - collector receives correct types") {
val builder = mock(classOf[MetricsRecordBuilder])
val counterNames = mutable.Buffer[String]()
when(builder.addCounter(any(), anyLong())).thenAnswer(iom => {
counterNames += iom.getArgument[MetricsInfo](0).name()
builder
})
val gaugeLongNames = mutable.Buffer[String]()
when(builder.addGauge(any(), anyLong())).thenAnswer(iom => {
gaugeLongNames += iom.getArgument[MetricsInfo](0).name()
builder
})
val gaugeDoubleNames = mutable.Buffer[String]()
when(builder.addGauge(any(), anyDouble())).thenAnswer(iom => {
gaugeDoubleNames += iom.getArgument[MetricsInfo](0).name()
builder
})
YarnShuffleServiceMetrics.collectMetric(builder, testname,
metrics.getMetrics.get(testname))
assert(counterNames === Seq(s"${testname}_count"))
val (expectLong, expectDouble) =
if (testname.matches("blockTransfer(Message)?Rate(Bytes)?$")) {
// blockTransfer(Message)?Rate(Bytes)? metrics are Meter so just have rate information
(Seq(), Seq("1", "5", "15", "Mean").map(suffix => s"${testname}_rate$suffix"))
} else {
// other metrics are Timer so have rate and timing information
(
Seq(s"${testname}_nanos_max", s"${testname}_nanos_min"),
Seq("rate1", "rate5", "rate15", "rateMean", "nanos_mean", "nanos_stdDev",
"nanos_1stPercentile", "nanos_5thPercentile", "nanos_25thPercentile",
"nanos_50thPercentile", "nanos_75thPercentile", "nanos_95thPercentile",
"nanos_98thPercentile", "nanos_99thPercentile", "nanos_999thPercentile")
.map(suffix => s"${testname}_$suffix")
)
}
assert(gaugeLongNames.sorted === expectLong.sorted)
assert(gaugeDoubleNames.sorted === expectDouble.sorted)
}
}
// this metric writes only one gauge to the collector
test("registeredExecutorsSize - collector receives correct types") {
val builder = mock(classOf[MetricsRecordBuilder])
YarnShuffleServiceMetrics.collectMetric(builder, "registeredExecutorsSize",
metrics.getMetrics.get("registeredExecutorsSize"))
// only one
verify(builder).addGauge(any(), anyInt())
}
}
|
wangmiao1981/spark
|
resource-managers/yarn/src/test/scala/org/apache/spark/network/yarn/YarnShuffleServiceMetricsSuite.scala
|
Scala
|
apache-2.0
| 4,916
|
/*
* Copyright 2007-2010 WorldWide Conferencing, LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package webapptest {
package snippet {
import _root_.scala.xml.{NodeSeq, Text}
import _root_.net.liftweb.http._
import _root_.net.liftweb.common._
import _root_.net.liftweb.util._
import _root_.net.liftweb.widgets.autocomplete._
object posted extends RequestVar[Box[String]](Empty)
class AutoCompleteDemo {
def render(xhtml: NodeSeq) :NodeSeq = {
AutoComplete("", (current, limit) => {
println("current = " + current)
(1 to limit).map(v => "Value_" + v)
}, s => posted(Full(s))) ++ (posted.map(t => <p>{"Submitted " + t}</p>) openOr Text(""))
}
}
}
}
|
jeppenejsum/liftweb
|
framework/lift-modules/lift-widgets/src/test/scala/webapptest/snippet/AutoCompleteDemo.scala
|
Scala
|
apache-2.0
| 1,199
|
/*
* Shadowsocks - A shadowsocks client for Android
* Copyright (C) 2014 <max.c.lv@gmail.com>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
*
* ___====-_ _-====___
* _--^^^#####// \\\\#####^^^--_
* _-^##########// ( ) \\\\##########^-_
* -############// |\\^^/| \\\\############-
* _/############// (@::@) \\\\############\\_
* /#############(( \\\\// ))#############\\
* -###############\\\\ (oo) //###############-
* -#################\\\\ / VV \\ //#################-
* -###################\\\\/ \\//###################-
* _#/|##########/\\######( /\\ )######/\\##########|\\#_
* |/ |#/\\#/\\#/\\/ \\#/\\##\\ | | /##/\\#/ \\/\\#/\\#/\\#| \\|
* ` |/ V V ` V \\#\\| | | |/#/ V ' V V \\| '
* ` ` ` ` / | | | | \\ ' ' ' '
* ( | | | | )
* __\\ | | | | /__
* (vvv(VVV)(VVV)vvv)
*
* HERE BE DRAGONS
*
*/
package com.github.shadowsocks
import java.util.{Timer, TimerTask}
import android.app.Service
import android.content.Context
import android.os.{Handler, RemoteCallbackList}
import com.github.shadowsocks.aidl.{Config, IShadowsocksService, IShadowsocksServiceCallback}
import com.github.shadowsocks.utils.{State, TrafficMonitor, TrafficMonitorThread}
trait BaseService extends Service {
@volatile private var state = State.INIT
@volatile private var callbackCount = 0
var timer: Timer = null
var trafficMonitorThread: TrafficMonitorThread = null
var config: Config = null
final val callbacks = new RemoteCallbackList[IShadowsocksServiceCallback]
val binder = new IShadowsocksService.Stub {
override def getMode: Int = {
getServiceMode
}
override def getState: Int = {
state
}
override def unregisterCallback(cb: IShadowsocksServiceCallback) {
if (cb != null ) {
callbacks.unregister(cb)
callbackCount -= 1
}
if (callbackCount == 0 && timer != null) {
timer.cancel()
timer = null
}
if (callbackCount == 0 && state != State.CONNECTING && state != State.CONNECTED) {
stopBackgroundService()
}
}
override def registerCallback(cb: IShadowsocksServiceCallback) {
if (cb != null) {
if (callbackCount == 0 && timer == null) {
val task = new TimerTask {
def run {
TrafficMonitor.updateRate()
updateTrafficTotal(TrafficMonitor.getDeltaTx, TrafficMonitor.getDeltaRx)
updateTrafficRate(TrafficMonitor.getTxRate, TrafficMonitor.getRxRate,
TrafficMonitor.getTxTotal, TrafficMonitor.getRxTotal)
}
}
timer = new Timer(true)
timer.schedule(task, 1000, 1000)
}
callbacks.register(cb)
callbackCount += 1
}
}
override def stop() {
if (state != State.CONNECTING && state != State.STOPPING) {
stopRunner()
}
}
override def start(config: Config) {
if (state != State.CONNECTING && state != State.STOPPING) {
startRunner(config)
}
}
}
def startRunner(config: Config) {
this.config = config
TrafficMonitor.reset()
trafficMonitorThread = new TrafficMonitorThread()
trafficMonitorThread.start()
}
def stopRunner() {
// Make sure update total traffic when stopping the runner
updateTrafficTotal(TrafficMonitor.getDeltaTx, TrafficMonitor.getDeltaRx)
TrafficMonitor.reset()
if (trafficMonitorThread != null) {
trafficMonitorThread.stopThread()
trafficMonitorThread = null
}
}
def updateTrafficTotal(tx: Long, rx: Long) {
val handler = new Handler(getContext.getMainLooper)
handler.post(() => {
if (config != null) {
ShadowsocksApplication.profileManager.getProfile(config.profileId) match {
case Some(profile) =>
profile.tx += tx
profile.rx += rx
ShadowsocksApplication.profileManager.updateProfile(profile)
case None => // Ignore
}
}
})
}
def stopBackgroundService()
def getServiceMode: Int
def getTag: String
def getContext: Context
def getCallbackCount: Int = {
callbackCount
}
def getState: Int = {
state
}
def changeState(s: Int) {
changeState(s, null)
}
def updateTrafficRate(txRate: String, rxRate: String, txTotal: String, rxTotal: String) {
val handler = new Handler(getContext.getMainLooper)
handler.post(() => {
if (callbackCount > 0) {
val n = callbacks.beginBroadcast()
for (i <- 0 until n) {
try {
callbacks.getBroadcastItem(i).trafficUpdated(txRate, rxRate, txTotal, rxTotal)
} catch {
case _: Exception => // Ignore
}
}
callbacks.finishBroadcast()
}
})
}
protected def changeState(s: Int, msg: String) {
val handler = new Handler(getContext.getMainLooper)
handler.post(() => if (state != s) {
if (callbackCount > 0) {
val n = callbacks.beginBroadcast()
for (i <- 0 until n) {
try {
callbacks.getBroadcastItem(i).stateChanged(s, msg)
} catch {
case _: Exception => // Ignore
}
}
callbacks.finishBroadcast()
}
state = s
})
}
}
|
a642500/shadowsocks-android
|
src/main/scala/com/github/shadowsocks/BaseService.scala
|
Scala
|
gpl-3.0
| 6,190
|
package collins.models
import org.squeryl.PrimitiveTypeMode._
import org.squeryl.Schema
import play.api.libs.json.Format
import play.api.libs.json.JsObject
import play.api.libs.json.JsSuccess
import play.api.libs.json.JsValue
import play.api.libs.json.Json
import collins.models.cache.Cache
import collins.models.shared.AnormAdapter
import collins.models.shared.ValidatedEntity
case class AssetType(name: String, label: String, id: Int = 0) extends ValidatedEntity[Int] {
override def validate() {
require(name != null && name.length > 0, "Name must not be empty")
}
override def asJson: String =
Json.stringify(AssetType.AssetTypeFormat.writes(this))
// We do this to mock the former Enum stuff
override def toString(): String = name
}
object AssetType extends Schema with AnormAdapter[AssetType] with AssetTypeKeys {
override val tableDef = table[AssetType]("asset_type")
val reservedNames = List("SERVER_NODE","SERVER_CHASSIS","RACK","SWITCH","ROUTER","POWER_CIRCUIT","POWER_STRIP","DATA_CENTER","CONFIGURATION")
on(tableDef)(a => declare(
a.id is(autoIncremented,primaryKey),
a.name is(unique)
))
implicit object AssetTypeFormat extends Format[AssetType] {
override def reads(json: JsValue) = JsSuccess(AssetType(
(json \\ "NAME").as[String],
(json \\ "LABEL").as[String],
(json \\ "ID").asOpt[Int].getOrElse(0)
))
override def writes(at: AssetType) = JsObject(Seq(
"ID" -> Json.toJson(at.id),
"NAME" -> Json.toJson(at.name),
"LABEL" -> Json.toJson(at.label)
))
}
def findById(id: Int): Option[AssetType] = Cache.get(findByIdKey(id), inTransaction {
tableDef.lookup(id)
})
override def get(a: AssetType) = findById(a.id).get
def find(): List[AssetType] = Cache.get(findKey, inTransaction {
from(tableDef)(at => select(at)).toList
})
def findByName(name: String): Option[AssetType] = Cache.get(findByNameKey(name), inTransaction {
tableDef.where(a =>
a.name.toLowerCase === name.toLowerCase
).headOption
})
override def delete(a: AssetType): Int = inTransaction {
afterDeleteCallback(a) {
tableDef.deleteWhere(p => p.id === a.id)
}
}
def typeNames: Set[String] = find().map(_.name).toSet
def isServerNode(at: AssetType): Boolean = ServerNode.map(_.id).filter(_.equals(at.id)).isDefined
def ServerNode = findByName("SERVER_NODE")
def Configuration = findByName("CONFIGURATION")
def isSystemType(atype: AssetType) = reservedNames.contains(atype.name.toUpperCase)
}
|
box/collins
|
app/collins/models/AssetType.scala
|
Scala
|
apache-2.0
| 2,533
|
package com.mblinn.oo.tinyweb.stepfour
import com.mblinn.oo.tinyweb.RenderingException
trait View {
def render(model: Map[String, List[String]]): String
}
class FunctionView(viewRenderer: (Map[String, List[String]]) => String) extends View {
def render(model: Map[String, List[String]]) =
try
viewRenderer(model)
catch {
case e: Exception => throw new RenderingException(e)
}
}
|
pharmpress/codingdojo
|
dojo-tinyweb/src/main/scala/com/mblinn/oo/tinyweb/stepfour/View.scala
|
Scala
|
apache-2.0
| 406
|
/*
* Copyright 2015 Heiko Seeberger
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package de.heikoseeberger.akkasse
import akka.stream.scaladsl.Source
import akka.util.ByteString
import scala.concurrent.Await
import scala.concurrent.duration.DurationInt
class ServerSentEventParserSpec extends BaseSpec {
"A ServerSentEventParser" should {
"parse ServerSentEvents correctly" in {
val input = """|data: message 1 line 1
|data:message 1 line 2
|
|data: message 2
|:This is a comment and must be ignored
|event: Only the last event should be considered
|event: message 2 event
|id: Only the last id should be considered
|id: 42
|retry: 512
|
|data:
|
|event: message 4 event
|id:
|
|data
|id
|
|data: incomplete message
|""".stripMargin
val chunkSize = input.length / 5
val events = Source(input.sliding(chunkSize, chunkSize).map(ByteString(_)).toList)
.transform(() => new LineParser(1048576))
.transform(() => new ServerSentEventParser(1048576))
.runFold(Vector.empty[ServerSentEvent])(_ :+ _)
Await.result(events, 1 second) shouldBe Vector(
ServerSentEvent("message 1 line 1\\nmessage 1 line 2"),
ServerSentEvent("message 2", "message 2 event", "42", 512),
ServerSentEvent.heartbeat,
ServerSentEvent("", Some("message 4 event"), ServerSentEvent.emptyId),
ServerSentEvent("", None, ServerSentEvent.emptyId)
)
}
"handle all sorts of EOL delimiters" in {
val input = "data: line1\\ndata: line2\\rdata: line3\\r\\n\\n"
val events = Source.single(ByteString(input))
.transform(() => new LineParser(1048576))
.transform(() => new ServerSentEventParser(1048576))
.runFold(Vector.empty[ServerSentEvent])(_ :+ _)
Await.result(events, 1 second) shouldBe Vector(ServerSentEvent("line1\\nline2\\nline3"))
}
"ignore unparsable retry fields" in {
val input = "data: stuff\\nretry: ten\\n\\n"
val events = Source.single(ByteString(input))
.transform(() => new LineParser(1048576))
.transform(() => new ServerSentEventParser(1048576))
.runFold(Vector.empty[ServerSentEvent])(_ :+ _)
Await.result(events, 1 second) shouldBe Vector(ServerSentEvent("stuff", retry = None))
}
"work for issue 36" in {
val input = "data: stuff\\r\\ndata: more\\r\\ndata: extra\\n\\n"
val events = Source.single(ByteString(input))
.transform(() => new LineParser(1048576))
.transform(() => new ServerSentEventParser(1048576))
.runFold(Vector.empty[ServerSentEvent])(_ :+ _)
Await.result(events, 1 second) shouldBe Vector(ServerSentEvent("stuff\\nmore\\nextra"))
}
}
}
|
jasonchaffee/akka-sse
|
akka-sse/src/test/scala/de/heikoseeberger/akkasse/ServerSentEventParserSpec.scala
|
Scala
|
apache-2.0
| 3,610
|
package org.zouzias.spray.models.responses
import spray.json.DefaultJsonProtocol
/**
* Update Response model
*/
case class UpdateResponse(name : String, status : String, message: String)
object UpdateResponseProtocol extends DefaultJsonProtocol {
implicit val updateFormat = jsonFormat3(UpdateResponse)
}
|
zouzias/spray-swagger-template
|
src/main/scala/org/zouzias/spray/models/responses/UpdateResponse.scala
|
Scala
|
apache-2.0
| 312
|
package org.wquery.query.exprs
import org.wquery.lang._
import org.wquery.lang.exprs._
import org.wquery.lang.operations._
import org.wquery.model._
import org.wquery.query._
import org.wquery.query.operations._
case class EmissionExpr(expr: EvaluableExpr) extends EvaluableExpr {
def evaluationPlan(wordNet: WordNet#Schema, bindings: BindingsSchema, context: Context) = EmitOp(expr.evaluationPlan(wordNet, bindings, context))
}
case class IteratorExpr(bindingExpr: EvaluableExpr, iteratedExpr: EvaluableExpr) extends EvaluableExpr {
def evaluationPlan(wordNet: WordNet#Schema, bindings: BindingsSchema, context: Context) = {
val bindingsOp = bindingExpr.evaluationPlan(wordNet, bindings, context)
IterateOp(bindingsOp, iteratedExpr.evaluationPlan(wordNet, bindings union bindingsOp.bindingsPattern, context))
}
}
case class IfElseExpr(conditionExpr: EvaluableExpr, ifExpr: EvaluableExpr, elseExpr: Option[EvaluableExpr]) extends EvaluableExpr {
def evaluationPlan(wordNet: WordNet#Schema, bindings: BindingsSchema, context: Context) = IfElseOp(conditionExpr.evaluationPlan(wordNet, bindings, context),
ifExpr.evaluationPlan(wordNet, bindings, context), elseExpr.map(_.evaluationPlan(wordNet, bindings, context)))
}
case class BlockExpr(exprs: List[EvaluableExpr]) extends EvaluableExpr {
def evaluationPlan(wordNet: WordNet#Schema, bindings: BindingsSchema, context: Context) = {
val blockBindings = BindingsSchema(bindings, true)
BlockOp(exprs.map(expr => expr.evaluationPlan(wordNet, blockBindings, context)))
}
}
case class WhileDoExpr(conditionExpr: EvaluableExpr, iteratedExpr: EvaluableExpr) extends EvaluableExpr {
def evaluationPlan(wordNet: WordNet#Schema, bindings: BindingsSchema, context: Context)
= WhileDoOp(conditionExpr.evaluationPlan(wordNet, bindings, context), iteratedExpr.evaluationPlan(wordNet, bindings, context))
}
case class FunctionDefinitionExpr(name: String, expr: EvaluableExpr) extends EvaluableExpr {
def evaluationPlan(wordNet: WordNet#Schema, bindings: BindingsSchema, context: Context) = {
bindings.bindSetVariableType(SetVariable.FunctionArgumentsVariable, FunctionDefinitionArgumentsRefOp(), 0, true)
FunctionDefinitionOp(name, expr.evaluationPlan(wordNet, bindings, context))
}
}
case class VariableAssignmentExpr(variables: List[SetVariable], expr: EvaluableExpr) extends EvaluableExpr {
def evaluationPlan(wordNet: WordNet#Schema, bindings: BindingsSchema, context: Context) = {
val op = expr.evaluationPlan(wordNet, bindings, context)
bindings.bindSetVariableType(variables.last.name, op, variables.length - 1, true)
for (pos <- 0 until variables.size - 1) {
bindings.bindSetVariableType(variables(pos).name, op, pos, false)
}
AssignmentOp(variables, op)
}
}
|
marekkubis/wquery
|
src/main/scala/org/wquery/query/queryExprs.scala
|
Scala
|
bsd-3-clause
| 2,793
|
/*
* Copyright (C) 2016 Christopher Batey and Dogan Narinc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scassandra.server.priming.routes
import akka.actor.ActorRef
import akka.testkit.TestActor.{AutoPilot, KeepRunning}
import akka.testkit.TestProbe
import akka.util.Timeout
import com.typesafe.scalalogging.LazyLogging
import org.scalatest.{FunSpec, Matchers}
import org.scassandra.server.actors._
import org.scassandra.server.priming.json.PrimingJsonImplicits
import spray.testkit.ScalatestRouteTest
import scala.concurrent.ExecutionContext
import scala.concurrent.duration._
import scala.language.postfixOps
class CurrentRouteTest extends FunSpec with ScalatestRouteTest with CurrentRoute with Matchers with LazyLogging {
override implicit def actorRefFactory = system
import PrimingJsonImplicits._
val _1connections = ClientConnection("127.0.0.1", 9042) :: ClientConnection("127.0.0.1", 9043) :: Nil
val _2connections = ClientConnection("127.0.0.2", 9044) :: Nil
val allConnections = _1connections ++ _2connections
val all = (c: ClientConnection) => true
val filterHost = (hOpt: Option[String]) => hOpt.map((h: String) => (c: ClientConnection) => c.host == h).getOrElse(all)
val filterPort = (pOpt: Option[Int]) => pOpt.map((p: Int) => (c: ClientConnection) => c.port == p).getOrElse(all)
val filter = (hOpt: Option[String], pOpt: Option[Int]) => allConnections.filter(filterHost(hOpt)).filter(filterPort(pOpt))
lazy val serverActor = TestProbe()
serverActor.setAutoPilot(new AutoPilot {
override def run(sender: ActorRef, msg: Any): AutoPilot = msg match {
case GetClientConnections(host, port) =>
sender ! ClientConnections(filter(host, port))
KeepRunning
case c: SendCommandToClient =>
sender ! ClosedConnections(filter(c.host, c.port), c.description)
KeepRunning
case AcceptNewConnections =>
sender ! AcceptNewConnectionsEnabled(false)
KeepRunning
case RejectNewConnections(x) =>
sender ! RejectNewConnectionsEnabled(true)
KeepRunning
}
})
override implicit val tcpServer: ActorRef = serverActor.ref
override implicit val timeout = Timeout(5 seconds)
override implicit val dispatcher = ExecutionContext.Implicits.global
it("Should get all client connections") {
Get("/current/connections") ~> currentRoute ~> check {
val response = responseAs[ClientConnections]
response.connections should equal (allConnections)
}
}
it("Should get all client connections by ip") {
Get("/current/connections/127.0.0.1") ~> currentRoute ~> check {
val response = responseAs[ClientConnections]
response.connections should equal (_1connections)
}
}
it("Should get connection by ip and port") {
Get("/current/connections/127.0.0.1/9042") ~> currentRoute ~> check {
val response = responseAs[ClientConnections]
response.connections should equal (_1connections.filter(_.port == 9042))
}
}
it("Should close all client connections") {
Delete("/current/connections") ~> currentRoute ~> check {
val response = responseAs[ClosedConnections]
response.connections should equal (allConnections)
response.operation should equal ("close")
}
}
it("Should halfclose all client connections") {
Delete("/current/connections?type=halfclose") ~> currentRoute ~> check {
val response = responseAs[ClosedConnections]
response.connections should equal (allConnections)
response.operation should equal ("halfclose")
}
}
it("Should reset all client connections") {
Delete("/current/connections?type=reset") ~> currentRoute ~> check {
val response = responseAs[ClosedConnections]
response.connections should equal (allConnections)
response.operation should equal ("reset")
}
}
it("Should close client connections by ip") {
Delete("/current/connections/127.0.0.1") ~> currentRoute ~> check {
val response = responseAs[ClosedConnections]
response.connections should equal (_1connections)
response.operation should equal ("close")
}
}
it("Should close client connection by ip and port") {
Delete("/current/connections/127.0.0.1/9042") ~> currentRoute ~> check {
val response = responseAs[ClosedConnections]
response.connections should equal (_1connections.filter(_.port == 9042))
response.operation should equal ("close")
}
}
it("Should enable listening.") {
Put("/current/listener") ~> currentRoute ~> check {
val response = responseAs[AcceptNewConnectionsEnabled]
response.changed should equal(false)
}
}
it("Should disable listening.") {
Delete("/current/listener") ~> currentRoute ~> check {
val response = responseAs[RejectNewConnectionsEnabled]
response.changed should equal(true)
}
}
}
|
mikefero/cpp-driver
|
gtests/src/integration/scassandra/server/server/src/test/scala/org/scassandra/server/priming/routes/CurrentRouteTest.scala
|
Scala
|
apache-2.0
| 5,392
|
package dotty.tools.dotc
package core
import Periods._, Contexts._, Symbols._, Denotations._, Names._, NameOps._, Annotations._
import Types._, Flags._, Decorators._, Transformers._, StdNames._, Scopes._
import NameOps._
import Scopes.Scope
import collection.mutable
import collection.immutable.BitSet
import scala.reflect.io.AbstractFile
import Decorators.SymbolIteratorDecorator
import annotation.tailrec
import util.SimpleMap
import util.Stats
import config.Config
import config.Printers._
trait SymDenotations { this: Context =>
import SymDenotations._
/** Factory method for SymDenotion creation. All creations
* should be done via this method.
*/
def SymDenotation(
symbol: Symbol,
owner: Symbol,
name: Name,
initFlags: FlagSet,
initInfo: Type,
initPrivateWithin: Symbol = NoSymbol)(implicit ctx: Context): SymDenotation = {
val result =
if (symbol.isClass)
if (initFlags is Package) new PackageClassDenotation(symbol, owner, name, initFlags, initInfo, initPrivateWithin, ctx.runId)
else new ClassDenotation(symbol, owner, name, initFlags, initInfo, initPrivateWithin, ctx.runId)
else new SymDenotation(symbol, owner, name, initFlags, initInfo, initPrivateWithin)
result.validFor = stablePeriod
result
}
def stillValid(denot: SymDenotation): Boolean =
if (denot is ValidForever) true
else try {
val owner = denot.owner.denot
def isSelfSym = owner.infoOrCompleter match {
case ClassInfo(_, _, _, _, selfInfo) => selfInfo == denot.symbol
case _ => false
}
stillValid(owner) && owner.isClass && (
(owner.decls.lookupAll(denot.name) contains denot.symbol)
|| isSelfSym
)
} catch {
case ex: StaleSymbol => false
}
}
object SymDenotations {
/** A sym-denotation represents the contents of a definition
* during a period.
*/
class SymDenotation private[SymDenotations] (
final val symbol: Symbol,
ownerIfExists: Symbol,
final val name: Name,
initFlags: FlagSet,
initInfo: Type,
initPrivateWithin: Symbol = NoSymbol) extends SingleDenotation {
//assert(symbol.id != 4940, name)
override def hasUniqueSym: Boolean = exists
// ------ Getting and setting fields -----------------------------
private[this] var myFlags: FlagSet = adaptFlags(initFlags)
private[this] var myInfo: Type = initInfo
private[this] var myPrivateWithin: Symbol = initPrivateWithin
private[this] var myAnnotations: List[Annotation] = Nil
/** The owner of the symbol; overridden in NoDenotation */
def owner: Symbol = ownerIfExists
/** The flag set */
final def flags(implicit ctx: Context): FlagSet = { ensureCompleted(); myFlags }
/** The flag set without forcing symbol completion.
* Should be used only for printing.
*/
private[dotc] final def flagsUNSAFE = myFlags
/** Adapt flag set to this denotation's term or type nature */
private def adaptFlags(flags: FlagSet) = if (isType) flags.toTypeFlags else flags.toTermFlags
/** Update the flag set */
private final def flags_=(flags: FlagSet): Unit =
myFlags = adaptFlags(flags)
/** Set given flags(s) of this denotation */
final def setFlag(flags: FlagSet): Unit = { myFlags |= flags }
/** Unset given flags(s) of this denotation */
final def resetFlag(flags: FlagSet): Unit = { myFlags &~= flags }
/** Has this denotation one of the flags in `fs` set? */
final def is(fs: FlagSet)(implicit ctx: Context) = {
(if (fs <= FromStartFlags) myFlags else flags) is fs
}
/** Has this denotation one of the flags in `fs` set, whereas none of the flags
* in `butNot` are set?
*/
final def is(fs: FlagSet, butNot: FlagSet)(implicit ctx: Context) =
(if (fs <= FromStartFlags && butNot <= FromStartFlags) myFlags else flags) is (fs, butNot)
/** Has this denotation all of the flags in `fs` set? */
final def is(fs: FlagConjunction)(implicit ctx: Context) =
(if (fs <= FromStartFlags) myFlags else flags) is fs
/** Has this denotation all of the flags in `fs` set, whereas none of the flags
* in `butNot` are set?
*/
final def is(fs: FlagConjunction, butNot: FlagSet)(implicit ctx: Context) =
(if (fs <= FromStartFlags && butNot <= FromStartFlags) myFlags else flags) is (fs, butNot)
/** The type info.
* The info is an instance of TypeType iff this is a type denotation
* Uncompleted denotations set myInfo to a LazyType.
*/
final def info(implicit ctx: Context): Type = myInfo match {
case myInfo: LazyType => completeFrom(myInfo); info
case _ => myInfo
}
/** The type info, or, if symbol is not yet completed, the completer */
final def infoOrCompleter = myInfo
/** Optionally, the info if it is completed */
final def unforcedInfo: Option[Type] = myInfo match {
case myInfo: LazyType => None
case _ => Some(myInfo)
}
private def completeFrom(completer: LazyType)(implicit ctx: Context): Unit = {
if (myFlags is Touched) throw new CyclicReference(this)
myFlags |= Touched
completions.println(s"completing ${this.debugString}")
completer.complete(this)
completions.println(s"completed ${this.debugString}")
}
protected[dotc] final def info_=(tp: Type) = {
/*
def illegal: String = s"illegal type for $this: $tp"
if (this is Module) // make sure module invariants that allow moduleClass and sourceModule to work are kept.
tp match {
case tp: ClassInfo => assert(tp.selfInfo.isInstanceOf[TermRefBySym], illegal)
case tp: NamedType => assert(tp.isInstanceOf[TypeRefBySym], illegal)
case tp: ExprType => assert(tp.resultType.isInstanceOf[TypeRefBySym], illegal)
case _ =>
}
*/
myInfo = tp
}
/** The name, except if this is a module class, strip the module class suffix */
def effectiveName(implicit ctx: Context) =
if (this is ModuleClass) name.stripModuleClassSuffix else name
/** The privateWithin boundary, NoSymbol if no boundary is given.
*/
final def privateWithin(implicit ctx: Context): Symbol = { ensureCompleted(); myPrivateWithin }
/** Set privateWithin. */
protected[core] final def privateWithin_=(sym: Symbol): Unit =
myPrivateWithin = sym
/** The annotations of this denotation */
final def annotations(implicit ctx: Context): List[Annotation] = {
ensureCompleted(); myAnnotations
}
/** Update the annotations of this denotation */
private[core] final def annotations_=(annots: List[Annotation]): Unit =
myAnnotations = annots
/** Does this denotation have an annotation matching the given class symbol? */
final def hasAnnotation(cls: Symbol)(implicit ctx: Context) =
dropOtherAnnotations(annotations, cls).nonEmpty
/** Add given annotation to the annotations of this denotation */
final def addAnnotation(annot: Annotation): Unit =
annotations = annot :: myAnnotations
@tailrec
private def dropOtherAnnotations(anns: List[Annotation], cls: Symbol)(implicit ctx: Context): List[Annotation] = anns match {
case ann :: rest => if (ann matches cls) anns else dropOtherAnnotations(rest, cls)
case Nil => Nil
}
/** The denotation is completed: info is not a lazy type and attributes have defined values */
final def isCompleted: Boolean = !myInfo.isInstanceOf[LazyType]
/** The denotation is in train of being completed */
final def isCompleting: Boolean = (myFlags is Touched) && !isCompleted
/** The completer of this denotation. @pre: Denotation is not yet completed */
final def completer: LazyType = myInfo.asInstanceOf[LazyType]
/** Make sure this denotation is completed */
final def ensureCompleted()(implicit ctx: Context): Unit = info
/** The symbols defined in this class or object.
*/
final def decls(implicit ctx: Context): Scope = myInfo match {
case cinfo: LazyType =>
val knownDecls = cinfo.decls
if (knownDecls ne EmptyScope) knownDecls
else { completeFrom(cinfo); decls } // complete-once
case _ => info.decls
}
/** If this is a package class, the symbols entered in it
* before it is completed. (this is needed to eagerly enter synthetic
* aliases such as AnyRef into a package class without forcing it.
* Right now, I believe the only usage is for the AnyRef alias
* in Definitions.
*/
final def preDecls(implicit ctx: Context): MutableScope = myInfo match {
case pinfo: SymbolLoaders # PackageLoader => pinfo.preDecls
case _ => decls.asInstanceOf[MutableScope]
}
// ------ Names ----------------------------------------------
/** The name with which the denoting symbol was created */
final def originalName(implicit ctx: Context) = {
val d = initial.asSymDenotation
if (d is ExpandedName) d.name.unexpandedName() else d.name // !!!DEBUG, was: effectiveName
}
/** The encoded full path name of this denotation, where outer names and inner names
* are separated by `separator` characters.
* Never translates expansions of operators back to operator symbol.
* Drops package objects. Represents terms in the owner chain by a simple `separator`.
*/
def fullNameSeparated(separator: Char)(implicit ctx: Context): Name =
if (symbol == NoSymbol || owner == NoSymbol || owner.isEffectiveRoot) name
else {
var owner = this
var sep = ""
do {
owner = owner.owner
sep += separator
} while (!owner.isClass)
val fn = owner.skipPackageObject.fullNameSeparated(separator) ++ sep ++ name
if (isType) fn.toTypeName else fn.toTermName
}
/** `fullName` where `.' is the separator character */
def fullName(implicit ctx: Context): Name = fullNameSeparated('.')
// ----- Tests -------------------------------------------------
/** Is this denotation a type? */
override def isType: Boolean = name.isTypeName
/** Is this denotation a class? */
final def isClass: Boolean = isInstanceOf[ClassDenotation]
/** Cast to class denotation */
final def asClass: ClassDenotation = asInstanceOf[ClassDenotation]
/** is this symbol the result of an erroneous definition? */
def isError: Boolean = false
/** Make denotation not exist */
final def markAbsent(): Unit =
myInfo = NoType
/** Is symbol known to not exist? */
final def isAbsent(implicit ctx: Context): Boolean =
myInfo == NoType ||
(this is (ModuleVal, butNot = Package)) && moduleClass.isAbsent
/** Is this symbol the root class or its companion object? */
final def isRoot: Boolean =
(name.toTermName == nme.ROOT) && (owner eq NoSymbol)
/** Is this symbol the empty package class or its companion object? */
final def isEmptyPackage(implicit ctx: Context): Boolean =
name.toTermName == nme.EMPTY_PACKAGE && owner.isRoot
/** Is this symbol the empty package class or its companion object? */
final def isEffectiveRoot(implicit ctx: Context) = isRoot || isEmptyPackage
/** Is this symbol an anonymous class? */
final def isAnonymousClass(implicit ctx: Context): Boolean =
initial.asSymDenotation.name startsWith tpnme.ANON_CLASS
/** Is this symbol a package object or its module class? */
def isPackageObject(implicit ctx: Context): Boolean = {
val poName = if (isType) nme.PACKAGE_CLS else nme.PACKAGE
(name.toTermName == poName) && (owner is Package) && (this is Module)
}
/** Is this symbol an abstract type? */
final def isAbstractType(implicit ctx: Context) = isType && (this is Deferred)
/** Is this symbol an alias type? */
final def isAliasType(implicit ctx: Context) = isAbstractOrAliasType && !(this is Deferred)
/** Is this symbol an abstract or alias type? */
final def isAbstractOrAliasType = isType & !isClass
/** Is this definition contained in `boundary`?
* Same as `ownersIterator contains boundary` but more efficient.
*/
final def isContainedIn(boundary: Symbol)(implicit ctx: Context): Boolean = {
def recur(sym: Symbol): Boolean =
if (sym eq boundary) true
else if (sym eq NoSymbol) false
else if ((sym is PackageClass) && !(boundary is PackageClass)) false
else recur(sym.owner)
recur(symbol)
}
/** Is this denotation static (i.e. with no outer instance)? */
final def isStatic(implicit ctx: Context) =
(this is Static) || this.exists && owner.isStaticOwner
/** Is this a package class or module class that defines static symbols? */
final def isStaticOwner(implicit ctx: Context): Boolean =
(this is PackageClass) || (this is ModuleClass) && isStatic
/** Is this denotation defined in the same scope and compilation unit as that symbol? */
final def isCoDefinedWith(that: Symbol)(implicit ctx: Context) =
(this.effectiveOwner == that.effectiveOwner) &&
( !(this.effectiveOwner is PackageClass)
|| { val thisFile = this.symbol.associatedFile
val thatFile = that.symbol.associatedFile
( thisFile == null
|| thatFile == null
|| thisFile.path == thatFile.path // Cheap possibly wrong check, then expensive normalization
|| thisFile.canonicalPath == thatFile.canonicalPath
)
}
)
/** Is this a denotation of a stable term (or an arbitrary type)? */
final def isStable(implicit ctx: Context) = {
val isUnstable =
(this is UnstableValue) ||
info.isVolatile && !hasAnnotation(defn.uncheckedStableClass)
(this is Stable) || isType || {
if (isUnstable) false
else { setFlag(Stable); true }
}
}
/** Is this a user defined "def" method? Excluded are accessors. */
final def isSourceMethod(implicit ctx: Context) = this is (Method, butNot = Accessor)
/** Is this a setter? */
final def isGetter(implicit ctx: Context) = (this is Accessor) && !originalName.isSetterName
/** Is this a setter? */
final def isSetter(implicit ctx: Context) = (this is Accessor) && originalName.isSetterName
/** is this the constructor of a class? */
final def isClassConstructor = name == nme.CONSTRUCTOR
/** Is this the constructor of a trait? */
final def isImplClassConstructor = name == nme.IMPLCLASS_CONSTRUCTOR
/** Is this the constructor of a trait or a class */
final def isConstructor = name.isConstructorName
/** Is this a local template dummmy? */
final def isLocalDummy: Boolean = name.isLocalDummyName
/** Does this symbol denote the primary constructor of its enclosing class? */
final def isPrimaryConstructor(implicit ctx: Context) =
isConstructor && owner.primaryConstructor == this
/** Is this a subclass of the given class `base`? */
def isSubClass(base: Symbol)(implicit ctx: Context) = false
/** Is this a subclass of `base`,
* and is the denoting symbol also different from `Null` or `Nothing`?
* @note erroneous classes are assumed to derive from all other classes
* and all classes derive from them.
*/
def derivesFrom(base: Symbol)(implicit ctx: Context) = false
/** Is this symbol a class that does not extend `AnyVal`? */
final def isNonValueClass(implicit ctx: Context): Boolean =
isClass && !derivesFrom(defn.AnyValClass)
/** Is this symbol a class references to which that are supertypes of null? */
final def isNullableClass(implicit ctx: Context): Boolean =
isNonValueClass && !(this is ModuleClass) // todo: check that class does not derive from NotNull?
/** Is this definition accessible as a member of tree with type `pre`?
* @param pre The type of the tree from which the selection is made
* @param superAccess Access is via super
* Everything is accessible if `pre` is `NoPrefix`.
* A symbol with type `NoType` is not accessible for any other prefix.
*/
final def isAccessibleFrom(pre: Type, superAccess: Boolean = false, whyNot: StringBuffer = null)(implicit ctx: Context): Boolean = {
/** Are we inside definition of `boundary`? */
def accessWithin(boundary: Symbol) =
ctx.owner.isContainedIn(boundary) &&
(!(this is JavaDefined) || // disregard package nesting for Java
ctx.owner.enclosingPackage == boundary.enclosingPackage)
/** Are we within definition of linked class of `boundary`? */
def accessWithinLinked(boundary: Symbol) = {
val linked = boundary.linkedClass
(linked ne NoSymbol) && accessWithin(linked)
}
/** Is `pre` of the form C.this, where C is exactly the owner of this symbol,
* or, if this symbol is protected, a subclass of the owner?
*/
def isCorrectThisType(pre: Type): Boolean = pre match {
case ThisType(pclazz) =>
(pclazz eq owner) ||
(this is Protected) && pclazz.derivesFrom(owner)
case _ => false
}
/** Is protected access to target symbol permitted? */
def isProtectedAccessOK = {
def fail(str: => String): Boolean = {
if (whyNot != null) whyNot append str
false
}
val cls = owner.enclosingSubClass
if (!cls.exists)
fail(
s""" Access to protected $this not permitted because
| enclosing ${ctx.owner.enclosingClass.showLocated} is not a subclass of
| ${owner.showLocated} where target is defined""".stripMargin)
else if (
!( isType // allow accesses to types from arbitrary subclasses fixes #4737
|| pre.baseType(cls).exists
|| isConstructor
|| (owner is ModuleClass) // don't perform this check for static members
))
fail(
s""" Access to protected ${symbol.show} not permitted because
| prefix type ${pre.widen.show} does not conform to
| ${cls.showLocated} where the access takes place""".stripMargin)
else true
}
if (pre eq NoPrefix) true
else if (info eq NoType) false
else {
val boundary = accessBoundary(owner)
( boundary.isTerm
|| boundary.isRoot
|| (accessWithin(boundary) || accessWithinLinked(boundary)) &&
( !(this is Local)
|| (owner is ImplClass) // allow private local accesses to impl class members
|| isCorrectThisType(pre)
)
|| (this is Protected) &&
( superAccess
|| pre.isInstanceOf[ThisType]
|| ctx.phase.erasedTypes
|| isProtectedAccessOK
)
)
}
}
/** Do members of this symbol need translation via asSeenFrom when
* accessed via prefix `pre`?
*/
def membersNeedAsSeenFrom(pre: Type)(implicit ctx: Context) =
!( this.isTerm
|| this.isStaticOwner
|| ctx.erasedTypes && symbol != defn.ArrayClass
|| (pre eq NoPrefix) || (pre eq thisType)
)
/** Is this symbol concrete, or that symbol deferred? */
def isAsConcrete(that: Symbol)(implicit ctx: Context): Boolean =
!(this is Deferred) || (that is Deferred)
/** Does this symbol have defined or inherited default parameters? */
def hasDefaultParams(implicit ctx: Context): Boolean =
if (this is HasDefaultParams) true
else if (this is NoDefaultParams) false
else {
val result = allOverriddenSymbols exists (_.hasDefaultParams)
setFlag(if (result) InheritedDefaultParams else NoDefaultParams)
result
}
// def isOverridable: Boolean = !!! need to enforce that classes cannot be redefined
// def isSkolem: Boolean = ???
// ------ access to related symbols ---------------------------------
/* Modules and module classes are represented as follows:
*
* object X extends Y { def f() }
*
* <module> lazy val X: X$ = new X$
* <module> class X$ extends Y { this: X.type => def f() }
*
* During completion, references to moduleClass and sourceModules are stored in
* the completers.
*/
/** The class implementing this module, NoSymbol if not applicable. */
final def moduleClass(implicit ctx: Context): Symbol =
if (this is ModuleVal)
myInfo match {
case info: TypeRef => info.symbol
case ExprType(info: TypeRef) => info.symbol // needed after uncurry, when module terms might be accessor defs
case info: LazyType => info.moduleClass
case _ => println(s"missing module class for $name: $myInfo"); NoSymbol
}
else NoSymbol
/** The module implemented by this module class, NoSymbol if not applicable. */
final def sourceModule(implicit ctx: Context): Symbol = myInfo match {
case ClassInfo(_, _, _, _, selfType: TermRef) if this is ModuleClass =>
selfType.symbol
case info: LazyType =>
info.sourceModule
case _ =>
NoSymbol
}
/** The chain of owners of this denotation, starting with the denoting symbol itself */
final def ownersIterator(implicit ctx: Context) = new Iterator[Symbol] {
private[this] var current = symbol
def hasNext = current.exists
def next: Symbol = {
val result = current
current = current.owner
result
}
}
/** If this is a package object or its implementing class, its owner,
* otherwise the denoting symbol.
*/
final def skipPackageObject(implicit ctx: Context): Symbol =
if (isPackageObject) owner else symbol
/** The owner, skipping package objects. */
final def effectiveOwner(implicit ctx: Context) = owner.skipPackageObject
/** The class containing this denotation.
* If this denotation is already a class, return itself
*/
final def enclosingClass(implicit ctx: Context): Symbol =
if (isClass || !exists) symbol else owner.enclosingClass
/** The class containing this denotation which has the given effective name.
*/
final def enclosingClassNamed(name: Name)(implicit ctx: Context): Symbol = {
val cls = enclosingClass
if (cls.effectiveName == name || !cls.exists) cls else cls.owner.enclosingClassNamed(name)
}
/** The top-level class containing this denotation,
* except for a toplevel module, where its module class is returned.
*/
final def topLevelClass(implicit ctx: Context): Symbol = {
def topLevel(d: SymDenotation): Symbol = {
if ((d is PackageClass) || (d.owner is PackageClass)) d.symbol
else topLevel(d.owner)
}
val sym = topLevel(this)
if (sym.isClass) sym else sym.moduleClass
}
/** The package class containing this denotation */
final def enclosingPackage(implicit ctx: Context): Symbol =
if (this is PackageClass) symbol else owner.enclosingPackage
/** The module object with the same (term-) name as this class or module class,
* and which is also defined in the same scope and compilation unit.
* NoSymbol if this module does not exist.
*/
final def companionModule(implicit ctx: Context): Symbol =
if (owner.exists && name != tpnme.ANON_CLASS) // name test to avoid forcing, thereby causing cyclic reference errors
owner.info.decl(effectiveName.toTermName)
.suchThat(sym => (sym is Module) && sym.isCoDefinedWith(symbol))
.symbol
else NoSymbol
/** The class with the same (type-) name as this module or module class,
* and which is also defined in the same scope and compilation unit.
* NoSymbol if this class does not exist.
*/
final def companionClass(implicit ctx: Context): Symbol =
if (owner.exists)
owner.info.decl(effectiveName.toTypeName)
.suchThat(sym => sym.isClass && sym.isCoDefinedWith(symbol))
.symbol
else NoSymbol
/** If this is a class, the module class of its companion object.
* If this is a module class, its companion class.
* NoSymbol otherwise.
*/
final def linkedClass(implicit ctx: Context): Symbol =
if (this is ModuleClass) companionClass
else if (this.isClass) companionModule.moduleClass
else NoSymbol
/** The class that encloses the owner of the current context
* and that is a subclass of this class. NoSymbol if no such class exists.
*/
final def enclosingSubClass(implicit ctx: Context) =
ctx.owner.ownersIterator.findSymbol(_.isSubClass(symbol))
/** The non-private symbol whose name and type matches the type of this symbol
* in the given class.
* @param inClass The class containing the symbol's definition
* @param site The base type from which member types are computed
*
* inClass <-- find denot.symbol class C { <-- symbol is here
*
* site: Subtype of both inClass and C
*/
final def matchingSymbol(inClass: Symbol, site: Type)(implicit ctx: Context): Symbol = {
var denot = inClass.info.nonPrivateDecl(name)
if (denot.isTerm) // types of the same name always match
denot = denot.matchingDenotation(site, site.memberInfo(symbol))
denot.symbol
}
/** The symbol, in class `inClass`, that is overridden by this denotation. */
final def overriddenSymbol(inClass: ClassSymbol)(implicit ctx: Context): Symbol =
matchingSymbol(inClass, owner.thisType)
/** All symbols overriden by this denotation. */
final def allOverriddenSymbols(implicit ctx: Context): Iterator[Symbol] =
if (exists)
owner.info.baseClasses.tail.iterator map overriddenSymbol filter (_.exists)
else
Iterator.empty
/** The class or term symbol up to which this symbol is accessible,
* or RootClass if it is public. As java protected statics are
* otherwise completely inaccessible in scala, they are treated
* as public.
* @param base The access boundary to assume if this symbol is protected
*/
final def accessBoundary(base: Symbol)(implicit ctx: Context): Symbol = {
val fs = flags
if (fs is PrivateOrLocal) owner
else if (fs is StaticProtected) defn.RootClass
else if (privateWithin.exists && !ctx.phase.erasedTypes) privateWithin
else if (fs is Protected) base
else defn.RootClass
}
/** The primary constructor of a class or trait, NoSymbol if not applicable. */
def primaryConstructor(implicit ctx: Context): Symbol = NoSymbol
// ----- type-related ------------------------------------------------
/** The type parameters of a class symbol, Nil for all other symbols */
def typeParams(implicit ctx: Context): List[TypeSymbol] = Nil
/** The type This(cls), where cls is this class, NoPrefix for all other symbols */
def thisType(implicit ctx: Context): Type = NoPrefix
override def typeRef(implicit ctx: Context): TypeRef =
TypeRef(owner.thisType, name.asTypeName, this)
override def termRef(implicit ctx: Context): TermRef =
TermRef(owner.thisType, name.asTermName, this)
override def valRef(implicit ctx: Context): TermRef =
TermRef.withSig(owner.thisType, name.asTermName, Signature.NotAMethod, this)
override def termRefWithSig(implicit ctx: Context): TermRef =
TermRef.withSig(owner.thisType, name.asTermName, signature, this)
/** The variance of this type parameter or type member as an Int, with
* +1 = Covariant, -1 = Contravariant, 0 = Nonvariant, or not a type parameter
*/
final def variance(implicit ctx: Context): Int =
if (this is Covariant) 1
else if (this is Contravariant) -1
else 0
override def toString = {
val kindString =
if (myFlags is ModuleClass) "module class"
else if (isClass) "class"
else if (isType) "type"
else if (myFlags is Module) "module"
else if (myFlags is Method) "method"
else "val"
s"$kindString $name"
}
val debugString = toString+"#"+symbol.id // !!! DEBUG
// ----- copies ------------------------------------------------------
protected def newLikeThis(s: Symbol, i: Type): SingleDenotation = new UniqueRefDenotation(s, i, validFor)
/** Copy this denotation, overriding selective fields */
final def copySymDenotation(
symbol: Symbol = this.symbol,
owner: Symbol = this.owner,
name: Name = this.name,
initFlags: FlagSet = UndefinedFlags,
info: Type = null,
privateWithin: Symbol = null,
annotations: List[Annotation] = null)(implicit ctx: Context) =
{ // simulate default parameters, while also passing implicit context ctx to the default values
val initFlags1 = if (initFlags != UndefinedFlags) initFlags else this.flags
val info1 = if (info != null) info else this.info
val privateWithin1 = if (privateWithin != null) privateWithin else this.privateWithin
val annotations1 = if (annotations != null) annotations else this.annotations
val d = ctx.SymDenotation(symbol, owner, name, initFlags1, info1, privateWithin1)
d.annotations = annotations1
d
}
}
/** The contents of a class definition during a period
*/
class ClassDenotation private[SymDenotations] (
symbol: Symbol,
ownerIfExists: Symbol,
name: Name,
initFlags: FlagSet,
initInfo: Type,
initPrivateWithin: Symbol,
initRunId: RunId)
extends SymDenotation(symbol, ownerIfExists, name, initFlags, initInfo, initPrivateWithin) {
import util.LRUCache
// ----- denotation fields and accessors ------------------------------
if (initFlags is (Module, butNot = Package)) assert(name.isModuleClassName)
/** The symbol asserted to have type ClassSymbol */
def classSymbol: ClassSymbol = symbol.asInstanceOf[ClassSymbol]
/** The info asserted to have type ClassInfo */
def classInfo(implicit ctx: Context): ClassInfo = info.asInstanceOf[ClassInfo]
/** TODO: Document why caches are supposedly safe to use */
private[this] var myTypeParams: List[TypeSymbol] = _
/** The type parameters of this class */
override final def typeParams(implicit ctx: Context): List[TypeSymbol] = {
def computeTypeParams = decls.filter(sym =>
(sym is TypeParam) && sym.owner == symbol).asInstanceOf[List[TypeSymbol]]
if (myTypeParams == null) myTypeParams = computeTypeParams
myTypeParams
}
/** The denotations of all parents in this class. */
def classParents(implicit ctx: Context): List[TypeRef] = info match {
case classInfo: ClassInfo => classInfo.classParents
case _ => Nil
}
/** The denotation is fully completed: all attributes are fully defined.
* ClassDenotations compiled from source are first completed, then fully completed.
* @see Namer#ClassCompleter
*/
private def isFullyCompleted(implicit ctx: Context): Boolean =
isCompleted && classParents.nonEmpty
// ------ syncing inheritance-related info -----------------------------
private var firstRunId: RunId = initRunId
/** If caches influenced by parent classes are still valid, the denotation
* itself, otherwise a freshly initialized copy.
*/
override def syncWithParents(implicit ctx: Context): SingleDenotation = {
def isYounger(tref: TypeRef) = tref.symbol.denot match {
case denot: ClassDenotation =>
if (denot.validFor.runId < ctx.runId) denot.current // syncs with its parents in turn
val result = denot.firstRunId > this.firstRunId
if (result) incremental.println(s"$denot is younger than $this")
result
case _ => false
}
val parentIsYounger = (firstRunId < ctx.runId) && {
infoOrCompleter match {
case cinfo: ClassInfo => cinfo.classParents exists isYounger
case _ => false
}
}
if (parentIsYounger) {
incremental.println(s"parents of $this are invalid; symbol id = ${symbol.id}, copying ...\\n")
invalidateInheritedInfo()
firstRunId = ctx.runId
}
this
}
/** Invalidate all caches and fields that depend on base classes and their contents */
private def invalidateInheritedInfo(): Unit = {
myBaseClasses = null
mySuperClassBits = null
myMemberFingerPrint = FingerPrint.unknown
myMemberCache = null
memberNamesCache = SimpleMap.Empty
}
// ------ class-specific operations -----------------------------------
private[this] var myThisType: Type = null
override def thisType(implicit ctx: Context): Type = {
if (myThisType == null) myThisType = computeThisType
myThisType
}
private def computeThisType(implicit ctx: Context): Type = ThisType(classSymbol) /*
if ((this is PackageClass) && !isRoot)
TermRef(owner.thisType, name.toTermName)
else
ThisType(classSymbol) */
private[this] var myTypeRef: TypeRef = null
override def typeRef(implicit ctx: Context): TypeRef = {
if (myTypeRef == null) myTypeRef = super.typeRef
myTypeRef
}
private[this] var myBaseClasses: List[ClassSymbol] = null
private[this] var mySuperClassBits: BitSet = null
private def computeBases(implicit ctx: Context): Unit = {
if (myBaseClasses == Nil) throw new CyclicReference(this)
myBaseClasses = Nil
val seen = new mutable.BitSet
val locked = new mutable.BitSet
def addBaseClasses(bcs: List[ClassSymbol], to: List[ClassSymbol])
: List[ClassSymbol] = bcs match {
case bc :: bcs1 =>
val bcs1added = addBaseClasses(bcs1, to)
val id = bc.superId
if (seen contains id) bcs1added
else {
seen += id
bc :: bcs1added
}
case nil =>
to
}
def addParentBaseClasses(ps: List[Type], to: List[ClassSymbol]): List[ClassSymbol] = ps match {
case p :: ps1 =>
addParentBaseClasses(ps1, addBaseClasses(p.baseClasses, to))
case nil =>
to
}
myBaseClasses = classSymbol :: addParentBaseClasses(classParents, Nil)
mySuperClassBits = ctx.uniqueBits.findEntryOrUpdate(seen.toImmutable)
}
/** A bitset that contains the superId's of all base classes */
private def superClassBits(implicit ctx: Context): BitSet =
if (classParents.isEmpty) BitSet() // can happen when called too early in Namers
else {
if (mySuperClassBits == null) computeBases
mySuperClassBits
}
/** The base classes of this class in linearization order,
* with the class itself as first element.
*/
def baseClasses(implicit ctx: Context): List[ClassSymbol] =
if (classParents.isEmpty) classSymbol :: Nil // can happen when called too early in Namers
else {
if (myBaseClasses == null) computeBases
myBaseClasses
}
final override def derivesFrom(base: Symbol)(implicit ctx: Context): Boolean =
!isAbsent &&
base.isClass &&
( (symbol eq base)
|| (superClassBits contains base.superId)
|| (this is Erroneous)
|| (base is Erroneous)
)
final override def isSubClass(base: Symbol)(implicit ctx: Context) =
derivesFrom(base) ||
base.isClass && (
(symbol eq defn.NothingClass) ||
(symbol eq defn.NullClass) && (base ne defn.NothingClass))
private[this] var myMemberFingerPrint: FingerPrint = FingerPrint.unknown
private def computeMemberFingerPrint(implicit ctx: Context): FingerPrint = {
var fp = FingerPrint()
var e = info.decls.lastEntry
while (e != null) {
fp.include(e.sym.name)
e = e.prev
}
var ps = classParents
while (ps.nonEmpty) {
val parent = ps.head.typeSymbol
parent.denot match {
case classd: ClassDenotation =>
fp.include(classd.memberFingerPrint)
parent.denot.setFlag(Frozen)
case _ =>
}
ps = ps.tail
}
fp
}
/** A bloom filter for the names of all members in this class.
* Makes sense only for parent classes, and should definitely
* not be used for package classes because cache never
* gets invalidated.
*/
def memberFingerPrint(implicit ctx: Context): FingerPrint = {
if (myMemberFingerPrint == FingerPrint.unknown) myMemberFingerPrint = computeMemberFingerPrint
myMemberFingerPrint
}
private[this] var myMemberCache: LRUCache[Name, PreDenotation] = null
private def memberCache: LRUCache[Name, PreDenotation] = {
if (myMemberCache == null) myMemberCache = new LRUCache
myMemberCache
}
/** Enter a symbol in current scope.
* Note: We require that this does not happen after the first time
* someone does a findMember on a subclass.
*/
def enter(sym: Symbol, scope: Scope = EmptyScope)(implicit ctx: Context): Unit = {
val mscope = scope match {
case scope: MutableScope => scope
case _ => decls.asInstanceOf[MutableScope]
}
if (this is PackageClass) { // replace existing symbols
val entry = mscope.lookupEntry(sym.name)
if (entry != null) {
mscope.unlink(entry)
entry.sym.denot = sym.denot // to avoid stale symbols
}
}
enterNoReplace(sym, mscope)
}
/** Enter a symbol in current scope without potentially replacing the old copy. */
def enterNoReplace(sym: Symbol, scope: MutableScope)(implicit ctx: Context): Unit = {
require(!(this is Frozen))
scope.enter(sym)
if (myMemberFingerPrint != FingerPrint.unknown)
myMemberFingerPrint.include(sym.name)
if (myMemberCache != null)
myMemberCache invalidate sym.name
}
/** Delete symbol from current scope.
* Note: We require that this does not happen after the first time
* someone does a findMember on a subclass.
*/
def delete(sym: Symbol)(implicit ctx: Context) = {
require(!(this is Frozen))
info.decls.asInstanceOf[MutableScope].unlink(sym)
if (myMemberFingerPrint != FingerPrint.unknown)
computeMemberFingerPrint
if (myMemberCache != null)
myMemberCache invalidate sym.name
}
/** All members of this class that have the given name.
* The elements of the returned pre-denotation all
* have existing symbols.
*/
final def membersNamed(name: Name)(implicit ctx: Context): PreDenotation = {
val privates = decls.denotsNamed(name, selectPrivate)
privates union nonPrivateMembersNamed(name).filterDisjoint(privates)
}
/** All non-private members of this class that have the given name.
* The elements of the returned pre-denotation all
* have existing symbols.
*/
final def nonPrivateMembersNamed(name: Name)(implicit ctx: Context): PreDenotation = {
Stats.record("nonPrivateMembersNamed")
if (Config.cacheMembersNamed) {
var denots: PreDenotation = memberCache lookup name
if (denots == null) {
denots = computeNPMembersNamed(name)
if (isFullyCompleted) memberCache.enter(name, denots)
} else if (Config.checkCacheMembersNamed) {
val denots1 = computeNPMembersNamed(name)
assert(denots.exists == denots1.exists, s"cache inconsistency: cached: $denots, computed $denots1, name = $name, owner = $this")
}
denots
} else computeNPMembersNamed(name)
}
private[core] def computeNPMembersNamed(name: Name)(implicit ctx: Context): PreDenotation = /*>|>*/ Stats.track("computeNPMembersNamed") /*<|<*/ {
if (!classSymbol.hasChildren ||
!Config.useFingerPrints ||
(memberFingerPrint contains name)) {
Stats.record("computeNPMembersNamed after fingerprint")
ensureCompleted()
val ownDenots = decls.denotsNamed(name, selectNonPrivate)
if (debugTrace) // DEBUG
println(s"$this.member($name), ownDenots = $ownDenots")
def collect(denots: PreDenotation, parents: List[TypeRef]): PreDenotation = parents match {
case p :: ps =>
val denots1 = collect(denots, ps)
p.symbol.denot match {
case parentd: ClassDenotation =>
denots1 union
parentd.nonPrivateMembersNamed(name)
.mapInherited(ownDenots, denots1, thisType)
case _ =>
denots1
}
case nil =>
denots
}
if (name.isConstructorName) ownDenots
else collect(ownDenots, classParents)
} else NoDenotation
}
override final def findMember(name: Name, pre: Type, excluded: FlagSet)(implicit ctx: Context): Denotation = {
val raw = if (excluded is Private) nonPrivateMembersNamed(name) else membersNamed(name)
raw.filterExcluded(excluded).asSeenFrom(pre).toDenot(pre)
}
private[this] var baseTypeCache: java.util.HashMap[CachedType, Type] = null
private[this] var baseTypeValid: RunId = NoRunId
/** Compute tp.baseType(this) */
final def baseTypeOf(tp: Type)(implicit ctx: Context): Type = {
def foldGlb(bt: Type, ps: List[Type]): Type = ps match {
case p :: ps1 => foldGlb(bt & baseTypeOf(p), ps1)
case _ => bt
}
def computeBaseTypeOf(tp: Type): Type = {
Stats.record("computeBaseTypeOf")
tp match {
case tp: TypeRef =>
val subcls = tp.symbol
if (subcls eq symbol)
tp
else subcls.denot match {
case cdenot: ClassDenotation =>
if (cdenot.superClassBits contains symbol.superId) foldGlb(NoType, tp.parents)
else NoType
case _ =>
baseTypeOf(tp.underlying)
}
case tp: TypeProxy =>
baseTypeOf(tp.underlying)
case AndType(tp1, tp2) =>
baseTypeOf(tp1) & baseTypeOf(tp2)
case OrType(tp1, tp2) =>
baseTypeOf(tp1) | baseTypeOf(tp2)
case _ =>
NoType
}
}
/*>|>*/ ctx.debugTraceIndented(s"$tp.baseType($this)") /*<|<*/ {
if (symbol.isStatic && tp.derivesFrom(symbol))
symbol.typeRef
else tp match {
case tp: CachedType =>
if (baseTypeValid != ctx.runId) {
baseTypeCache = new java.util.HashMap[CachedType, Type]
baseTypeValid = ctx.runId
}
var basetp = baseTypeCache get tp
if (basetp == null) {
baseTypeCache.put(tp, NoPrefix)
basetp = computeBaseTypeOf(tp)
baseTypeCache.put(tp, basetp)
} else if (basetp == NoPrefix) {
throw new CyclicReference(this)
}
basetp
case _ =>
computeBaseTypeOf(tp)
}
}
}
private[this] var memberNamesCache: SimpleMap[NameFilter, Set[Name]] = SimpleMap.Empty
def memberNames(keepOnly: NameFilter)(implicit ctx: Context): Set[Name] = {
def computeMemberNames: Set[Name] = {
val inheritedNames = (classParents flatMap (_.memberNames(keepOnly, thisType))).toSet
val ownSyms =
if (keepOnly == implicitFilter)
if (this is Package) Iterator.empty
else info.decls.iterator filter (_ is Implicit)
else info.decls.iterator
val ownNames = ownSyms map (_.name)
val candidates = inheritedNames ++ ownNames
candidates filter (keepOnly(thisType, _))
}
if ((this is PackageClass) || (keepOnly == implicitFilter) || !Config.cacheMemberNames)
computeMemberNames // don't cache package member names; they might change
else {
val cached = memberNamesCache(keepOnly)
if (cached != null) cached
else {
setFlag(Frozen)
val names = computeMemberNames
memberNamesCache = memberNamesCache.updated(keepOnly, names)
names
}
}
}
private[this] var fullNameCache: SimpleMap[Character, Name] = SimpleMap.Empty
override final def fullNameSeparated(separator: Char)(implicit ctx: Context): Name = {
val cached = fullNameCache(separator)
if (cached != null) cached
else {
val fn = super.fullNameSeparated(separator)
fullNameCache = fullNameCache.updated(separator, fn)
fn
}
}
// to avoid overloading ambiguities
override def fullName(implicit ctx: Context): Name = super.fullName
override def primaryConstructor(implicit ctx: Context): Symbol = {
val cname = if (this is ImplClass) nme.IMPLCLASS_CONSTRUCTOR else nme.CONSTRUCTOR
decls.denotsNamed(cname).first.symbol
}
}
/** The denotation of a package class.
* It overrides ClassDenotation to take account of package objects when looking for members
*/
class PackageClassDenotation private[SymDenotations] (
symbol: Symbol,
ownerIfExists: Symbol,
name: Name,
initFlags: FlagSet,
initInfo: Type,
initPrivateWithin: Symbol,
initRunId: RunId)
extends ClassDenotation(symbol, ownerIfExists, name, initFlags, initInfo, initPrivateWithin, initRunId) {
private[this] var packageObjCache: SymDenotation = _
private[this] var packageObjRunId: RunId = NoRunId
/** The package object in this class, of one exists */
def packageObj(implicit ctx: Context): SymDenotation = {
if (packageObjRunId != ctx.runId) {
packageObjRunId = ctx.runId
packageObjCache = NoDenotation // break cycle in case we are looking for package object itself
packageObjCache = findMember(nme.PACKAGE, thisType, EmptyFlags).asSymDenotation
}
packageObjCache
}
/** Look first for members in package; if none are found look in package object */
override def computeNPMembersNamed(name: Name)(implicit ctx: Context): PreDenotation = {
val denots = super.computeNPMembersNamed(name)
if (denots.exists) denots
else packageObj.moduleClass.denot match {
case pcls: ClassDenotation => pcls.computeNPMembersNamed(name)
case _ => denots
}
}
/** The union of the member names of the package and the package object */
override def memberNames(keepOnly: NameFilter)(implicit ctx: Context): Set[Name] = {
val ownNames = super.memberNames(keepOnly)
packageObj.moduleClass.denot match {
case pcls: ClassDenotation => ownNames union pcls.memberNames(keepOnly)
case _ => ownNames
}
}
}
object NoDenotation extends SymDenotation(
NoSymbol, NoSymbol, "<none>".toTermName, Permanent, NoType) {
override def exists = false
override def isTerm = false
override def isType = false
override def owner: Symbol = throw new AssertionError("NoDenotation.owner")
override def computeAsSeenFrom(pre: Type)(implicit ctx: Context): SingleDenotation = this
validFor = Period.allInRun(NoRunId) // will be brought forward automatically
}
// ---- Completion --------------------------------------------------------
/** Instances of LazyType are carried by uncompleted symbols.
* Note: LazyTypes double up as (constant) functions from Symbol and
* from (TermSymbol, ClassSymbol) to LazyType. That way lazy types can be
* directly passed to symbol creation methods in Symbols that demand instances
* of these function types.
*/
abstract class LazyType extends UncachedGroundType
with (Symbol => LazyType)
with ((TermSymbol, ClassSymbol) => LazyType) { self =>
/** Sets all missing fields of given denotation */
def complete(denot: SymDenotation)(implicit ctx: Context): Unit
def apply(sym: Symbol) = this
def apply(module: TermSymbol, modcls: ClassSymbol) = this
private var myDecls: Scope = EmptyScope
private var mySourceModuleFn: () => Symbol = NoSymbolFn
private var myModuleClassFn: () => Symbol = NoSymbolFn
/** A proxy to this lazy type that keeps the complete operation
* but provides fresh slots for scope/sourceModule/moduleClass
*/
def proxy: LazyType = new LazyType {
override def complete(denot: SymDenotation)(implicit ctx: Context) = self.complete(denot)
}
def decls: Scope = myDecls
def sourceModule: Symbol = mySourceModuleFn()
def moduleClass: Symbol = myModuleClassFn()
def withDecls(decls: Scope): this.type = { myDecls = decls; this }
def withSourceModule(sourceModule: => Symbol): this.type = { mySourceModuleFn = () => sourceModule; this }
def withModuleClass(moduleClass: => Symbol): this.type = { myModuleClassFn = () => moduleClass; this }
}
val NoSymbolFn = () => NoSymbol
/** A missing completer */
class NoCompleter extends LazyType {
def complete(denot: SymDenotation)(implicit ctx: Context): Unit = unsupported("complete")
}
/** A lazy type for modules that points to the module class.
* Needed so that `moduleClass` works before completion.
* Completion of modules is always completion of the underlying
* module class, followed by copying the relevant fields to the module.
*/
class ModuleCompleter(override val moduleClass: ClassSymbol) extends LazyType {
def complete(denot: SymDenotation)(implicit ctx: Context): Unit = {
val from = moduleClass.denot.asClass
denot.setFlag(from.flags.toTermFlags & RetainedModuleValFlags)
denot.annotations = from.annotations filter (_.appliesToModule)
// !!! ^^^ needs to be revised later. The problem is that annotations might
// only apply to the module but not to the module class. The right solution
// is to have the module class completer set the annotations of both the
// class and the module.
denot.info = moduleClass.typeRef
denot.privateWithin = from.privateWithin
}
}
/** A completer for missing references */
class StubInfo() extends LazyType {
def initializeToDefaults(denot: SymDenotation)(implicit ctx: Context) = {
denot.info = denot match {
case denot: ClassDenotation =>
ClassInfo(denot.owner.thisType, denot.classSymbol, Nil, EmptyScope)
case _ =>
ErrorType
}
denot.privateWithin = NoSymbol
}
def complete(denot: SymDenotation)(implicit ctx: Context): Unit = {
val sym = denot.symbol
val file = sym.associatedFile
val (location, src) =
if (file != null) (s" in $file", file.toString)
else ("", "the signature")
val name = ctx.fresh.withSetting(ctx.settings.debugNames, true).nameString(denot.name)
ctx.error(
s"""|bad symbolic reference. A signature$location
|refers to $name in ${denot.owner.showKind} ${denot.owner.showFullName} which is not available.
|It may be completely missing from the current classpath, or the version on
|the classpath might be incompatible with the version used when compiling $src.""".stripMargin)
if (ctx.debug) throw new Error()
initializeToDefaults(denot)
}
}
// ---- Fingerprints -----------------------------------------------------
/** A fingerprint is a bitset that acts as a bloom filter for sets
* of names.
*/
class FingerPrint(val bits: Array[Long]) extends AnyVal {
import FingerPrint._
/** Include some bits of name's hashcode in set */
def include(name: Name): Unit = {
val hash = name.hashCode & Mask
bits(hash >> WordSizeLog) |= (1L << hash)
}
/** Include all bits of `that` fingerprint in set */
def include(that: FingerPrint): Unit =
for (i <- 0 until NumWords) bits(i) |= that.bits(i)
/** Does set contain hash bits of given name? */
def contains(name: Name): Boolean = {
val hash = name.hashCode & Mask
(bits(hash >> WordSizeLog) & (1L << hash)) != 0
}
}
object FingerPrint {
def apply() = new FingerPrint(new Array[Long](NumWords))
val unknown = new FingerPrint(null)
private final val WordSizeLog = 6
private final val NumWords = 32
private final val NumBits = NumWords << WordSizeLog
private final val Mask = NumBits - 1
}
}
|
magarciaEPFL/dotty
|
src/dotty/tools/dotc/core/SymDenotations.scala
|
Scala
|
bsd-3-clause
| 52,740
|
package parsing.multi
import parsing.multi.Lexer._
import parsing.multi.LookaheadLexer._
/**
* Created by hongdi.ren.
*/
abstract class Lexer(input: String) {
var p: Int = 0
var c: Char = input.charAt(p)
def consume(): Unit = {
p = p + 1
if (p >= input.length) c = EOF
else c = input.charAt(p)
}
def `match`(x: Char): Unit = {
if (c == x) consume()
else throw new IllegalArgumentException(s"expecting $x; found $c")
}
def nextToken(): Token
def getTokenName(tokenType: Int): String
}
object Lexer {
val EOF: Char = (-1).toChar
val EOF_TYPE: Int = 1
}
class LookaheadLexer(input: String) extends Lexer(input) {
override def getTokenName(x: Int): String = tokenNames(x)
override def nextToken(): Token = {
while (c != EOF) {
c match {
case _ if blank(c) => WS()
case ',' =>
consume()
return Token(COMMA, ",")
case '[' =>
consume()
return Token(LBRACK, "[")
case ']' =>
consume()
return Token(RBRACK, "]")
case '=' =>
consume()
return Token(EQUALS, "=")
case _ if isLETTER() => return NAME()
case _ => throw new IllegalArgumentException("invalid character: " + c)
}
}
Token(EOF_TYPE, "<EOF>")
}
def isLETTER(): Boolean = c.isLetter
def NAME(): Token = {
val sb = new StringBuilder
do {
sb.append(c)
consume()
} while (isLETTER())
Token(LookaheadLexer.NAME, sb.toString())
}
def WS(): Unit = while (blank(c)) consume()
}
object LookaheadLexer {
val NAME: Int = 2
val COMMA: Int = 3
val LBRACK: Int = 4
val RBRACK: Int = 5
val EQUALS: Int = 6
val tokenNames: IndexedSeq[String] = Vector("n/a", "<EOF>", "NAME", "COMMA", "LBRACK", "RBRACK", "EQUALS")
val blank: Set[Char] = Set(' ', '\\t', '\\n', '\\r')
}
|
Ryan-Git/LangImplPatterns
|
src/main/scala/parsing/multi/LookaheadLexer.scala
|
Scala
|
apache-2.0
| 1,883
|
package pl.msitko.xml.dsl
import pl.msitko.xml.BasicSpec
import pl.msitko.xml.entities.{Attribute, Element, LabeledElement}
import pl.msitko.xml.matchers.{Namespace, PrefixedNamespace}
import pl.msitko.xml.optics.{LabeledElementOptics, NodeOptics}
import pl.msitko.xml.printing.{Indent, PrinterConfig}
import pl.msitko.xml.test.utils.ExampleInputs
trait OpticsBuilderSpec extends BasicSpec with ExampleInputs {
implicit val printerConfig = PrinterConfig.Default
"OpticsBuilder" should {
"set text for chosen path" in {
val parsed = parseExample(noNamespaceXmlStringWithWsExample)
val traversal = (root \\ "c1" \\ "f").hasTextOnly
val res = traversal.set("new")(parsed)
print(res) should === (expectedRes)
}
"modify text for chosen path" in {
val parsed = parseExample(noNamespaceXmlStringWithWsExample)
val traversal = (root \\ "c1" \\ "f").hasTextOnly
val res = traversal.modify(_.toUpperCase)(parsed)
print(res) should === (expectedRes2)
}
"modify existing attribute value" in {
val parsed = parse(input3)
val traversal = (root \\ "c1" \\ "f").attr("someKey")
val res = traversal.set("newValue")(parsed)
print(res) should === (expectedRes3)
}
"made two modifications with functional `andThen` composition" in {
val parsed = parse(input3)
val modifyAttr = (root \\ "c1" \\ "f").attr("someKey").set("newValue")
val modifyText = (root \\ "c1" \\ "f").hasTextOnly.modify(_.toUpperCase)
val modify = modifyAttr andThen modifyText
val res = modify(parsed)
print(res) should === (expectedRes3Alternative)
}
"add attribute" in {
val parsed = parseExample(noNamespaceXmlStringWithWsExample)
val traversal = (root \\ "c1" \\ "f").attrs
val res = traversal.modify(attrs => attrs :+ Attribute.unprefixed("someKey", "newValue"))(parsed)
print(res) should === (expectedRes4)
}
"replaceOrAddAttr" in {
val parsed = parse(input5)
val traversal = (root \\ "c1" \\ "f")
val res = traversal.replaceOrAddAttr("someKey", "newValue")(parsed)
print(res) should === (expectedRes4)
}
"replaceOrAddAttr for ResolvedNameMatcher" in {
val parsed = parse(input6)
val ns = PrefixedNamespace("a", "http://a.com")
val traversal = (root \\ "c1" \\ "f")
val res = traversal.replaceOrAddAttr(ns.name("someKey"), "newValue")(parsed)
print(res) should === (expectedRes6)
}
"modify attribute for ResolvedNameMatcher" in {
val parsed = parse(input7)
val ns = Namespace("http://a.com")
val traversal = (root \\ "c1" \\ "f").attr(ns.name("someKey"))
val res = traversal.modify(_.toUpperCase)(parsed)
print(res) should === (expectedRes7)
}
"modify attribute for IgnoreNamespaceMatcher" in {
val parsed = parse(input7)
val traversal = (root \\ "c1" \\ "f").attr("someKey")
val res = traversal.modify(_.toUpperCase)(parsed)
print(res) should === (expectedRes8)
}
"modify attribute for ResolvedNameMatcher2" in {
val parsed = parse(input7)
val ns = Namespace("")
val traversal = (root \\ "c1" \\ "f").attr(ns.name("someKey"))
val res = traversal.modify(_.toUpperCase)(parsed)
print(res) should === (expectedRes9)
}
"modify attribute in root element" in {
val parsed = parse(input10)
val traversal = root.attr("someKey")
val res = traversal.set("newValue")(parsed)
print(res) should === (expectedRes10)
}
"modify text in root element" in {
val parsed = parse(input10)
val res = root.hasTextOnly.set("hello")(parsed)
print(res) should === (expectedRes11)
}
"add attribute in root element" in {
val parsed = parse(input10)
val res = root.attrs.modify(attrs => attrs :+ Attribute.unprefixed("anotherKey", "newValue"))(parsed)
print(res) should === (expectedRes12)
}
"replaceOrAddAttr in root element" in {
{
val parsed = parse(input13)
val res = root.replaceOrAddAttr("anotherKey", "newValue")(parsed)
print(res) should === (expectedRes12)
}
{
val parsed = parse(input14)
val res = root.replaceOrAddAttr("anotherKey", "newValue")(parsed)
print(res) should === (expectedRes12)
}
}
// TODO: add to cookbook
"renameLabel" in {
val parsed = parse(example15("f"))
val res = (root \\ "c1").renameLabel("f", "xyz")(parsed)
print(res) should === (example15("xyz"))
}
// TODO: add sht like this to cookbook
"having" in {
import LabeledElementOptics._
import NodeOptics._
val parsed = parse(example15("f"))
// TODO: does not look nice
val res = (((root \\ "c1").having { node =>
isLabeledElement.composeOptional(isLabeled("g")).getOption(node).isDefined
}) \\ "f").hasTextOnly.modify(_.toUpperCase)(parsed)
print(res) should === (output16)
}
"having 2" in {
import LabeledElementOptics._
import NodeOptics._
import pl.msitko.xml.optics.ElementOptics._
val parsed = parse(example17("item"))
val res = (((root \\ "c1").having { node =>
isLabeledElement.composeOptional(isLabeled("g")).composeOptional(attribute("someKey")).getOption(node).isDefined
}) \\ "f").hasTextOnly.modify(_.toUpperCase)(parsed)
print(res) should === (example17("ITEM"))
}
"index" in {
val parsed = parse(example17("item"))
val modification = (root \\ "c1" \\ "f").index(1).hasTextOnly.modify(_.toUpperCase)
// because of hacky implementation, we check if calling modification a few times before does not affect results
modification(parsed)
modification(parsed)
val res = modification(parsed)
print(res) should === (example17("ITEM"))
}
"index and then index" in {
val parsed = parse(example19("item"))
val res = ((root \\ "c1" \\ "f").index(1) \\ "h" \\ "i").index(1).hasTextOnly.modify(_.toUpperCase)(parsed)
print(res) should === (example19("ITEM"))
}
"elementAt" in {
// contrary to `childAt` we don't need to minimize as Text elements will be ignored
val parsed = parse(example18("item"))
val res = (root \\ "c1" \\ "f").elementAt(1).hasTextOnly.modify(_.toUpperCase)(parsed)
print(res) should === (example18("ITEM"))
}
// TODO: may be a nice addition to cookbook
"insert new node as the first node" in {
val parsed = parse(minimalInput)
// TODO: better factory methods needed instead of this
val newElement = LabeledElement.unprefixed("new", Element())
val res = (root \\ "f").children.modify( ch => newElement +: ch)(parsed)
print(res)(PrinterConfig(Indent.IndentWith(" "), true)) should === (example20)
}
"insert new node as the last node" in {
val parsed = parse(minimalInput)
// TODO: better factory methods needed instead of this
val newElement = LabeledElement.unprefixed("new", Element())
val res = (root \\ "f").children.modify( ch => ch :+ newElement)(parsed)
print(res)(PrinterConfig(Indent.IndentWith(" "), true)) should === (example21)
}
}
// TODO: get rid of code duplication
val expectedRes =
"""<?xml version="1.0" encoding="UTF-8"?>
|<a>
| <c1>
| <f>new</f>
| <g>item2</g>
| </c1>
| <c1>
| <f>new</f>
| <h>item2</h>
| </c1>
|</a>""".stripMargin
val expectedRes2 =
"""<?xml version="1.0" encoding="UTF-8"?>
|<a>
| <c1>
| <f>ITEM1</f>
| <g>item2</g>
| </c1>
| <c1>
| <f>ITEM1</f>
| <h>item2</h>
| </c1>
|</a>""".stripMargin
val input3 =
"""<?xml version="1.0" encoding="UTF-8"?>
|<a>
| <c1>
| <f someKey="oldValue">item1</f>
| <g>item2</g>
| </c1>
| <c1>
| <f someKey="oldValue" anotherKey="someValue">item1</f>
| <h>item2</h>
| </c1>
|</a>""".stripMargin
val expectedRes3 =
"""<?xml version="1.0" encoding="UTF-8"?>
|<a>
| <c1>
| <f someKey="newValue">item1</f>
| <g>item2</g>
| </c1>
| <c1>
| <f someKey="newValue" anotherKey="someValue">item1</f>
| <h>item2</h>
| </c1>
|</a>""".stripMargin
val expectedRes3Alternative =
"""<?xml version="1.0" encoding="UTF-8"?>
|<a>
| <c1>
| <f someKey="newValue">ITEM1</f>
| <g>item2</g>
| </c1>
| <c1>
| <f someKey="newValue" anotherKey="someValue">ITEM1</f>
| <h>item2</h>
| </c1>
|</a>""".stripMargin
val expectedRes4 =
"""<?xml version="1.0" encoding="UTF-8"?>
|<a>
| <c1>
| <f someKey="newValue">item1</f>
| <g>item2</g>
| </c1>
| <c1>
| <f someKey="newValue">item1</f>
| <h>item2</h>
| </c1>
|</a>""".stripMargin
val input5 =
"""<?xml version="1.0" encoding="UTF-8"?>
|<a>
| <c1>
| <f someKey="oldValue">item1</f>
| <g>item2</g>
| </c1>
| <c1>
| <f>item1</f>
| <h>item2</h>
| </c1>
|</a>""".stripMargin
val expectedRes5 =
"""<?xml version="1.0" encoding="UTF-8"?>
|<a>
| <c1>
| <f someKey="newValue">item1</f>
| <g>item2</g>
| </c1>
| <c1>
| <f someKey="newValue">item1</f>
| <h>item2</h>
| </c1>
|</a>""".stripMargin
val input6 =
"""<?xml version="1.0" encoding="UTF-8"?>
|<a xmlns:a="http://a.com" xmlns:b="http://b.com">
| <c1>
| <f a:someKey="oldValue">item1</f>
| <f someKey="oldValue">item2</f>
| </c1>
| <c1>
| <a:f someKey="oldValue">item1</a:f>
| <f b:someKey="oldValue">item2</f>
| <b:f someKey="oldValue">item2</b:f>
| </c1>
|</a>""".stripMargin
// why for <a:f> new attribute is added instead of being modified is not obvious, for explanation look at:
// https://stackoverflow.com/questions/41561/xml-namespaces-and-attributes
val expectedRes6 =
"""<?xml version="1.0" encoding="UTF-8"?>
|<a xmlns:a="http://a.com" xmlns:b="http://b.com">
| <c1>
| <f a:someKey="newValue">item1</f>
| <f xmlns:a="http://a.com" someKey="oldValue" a:someKey="newValue">item2</f>
| </c1>
| <c1>
| <a:f xmlns:a="http://a.com" someKey="oldValue" a:someKey="newValue">item1</a:f>
| <f xmlns:a="http://a.com" b:someKey="oldValue" a:someKey="newValue">item2</f>
| <b:f xmlns:a="http://a.com" someKey="oldValue" a:someKey="newValue">item2</b:f>
| </c1>
|</a>""".stripMargin
val input7 =
"""<?xml version="1.0" encoding="UTF-8"?>
|<a xmlns:a="http://a.com" xmlns:b="http://b.com">
| <c1>
| <f a:someKey="oldValue">item1</f>
| <f someKey="oldValue">item2</f>
| <f b:someKey="oldValue">item3</f>
| <f anotherKey="oldValue">item4</f>
| <f>item5</f>
| </c1>
|</a>""".stripMargin
val expectedRes7 =
"""<?xml version="1.0" encoding="UTF-8"?>
|<a xmlns:a="http://a.com" xmlns:b="http://b.com">
| <c1>
| <f a:someKey="OLDVALUE">item1</f>
| <f someKey="oldValue">item2</f>
| <f b:someKey="oldValue">item3</f>
| <f anotherKey="oldValue">item4</f>
| <f>item5</f>
| </c1>
|</a>""".stripMargin
val expectedRes8 =
"""<?xml version="1.0" encoding="UTF-8"?>
|<a xmlns:a="http://a.com" xmlns:b="http://b.com">
| <c1>
| <f a:someKey="OLDVALUE">item1</f>
| <f someKey="OLDVALUE">item2</f>
| <f b:someKey="OLDVALUE">item3</f>
| <f anotherKey="oldValue">item4</f>
| <f>item5</f>
| </c1>
|</a>""".stripMargin
val expectedRes9 =
"""<?xml version="1.0" encoding="UTF-8"?>
|<a xmlns:a="http://a.com" xmlns:b="http://b.com">
| <c1>
| <f a:someKey="oldValue">item1</f>
| <f someKey="OLDVALUE">item2</f>
| <f b:someKey="oldValue">item3</f>
| <f anotherKey="oldValue">item4</f>
| <f>item5</f>
| </c1>
|</a>""".stripMargin
val input10 =
"""<?xml version="1.0" encoding="UTF-8"?>
|<a someKey="oldValue">
|</a>""".stripMargin
val expectedRes10 =
"""<?xml version="1.0" encoding="UTF-8"?>
|<a someKey="newValue">
|</a>""".stripMargin
val expectedRes11 =
"""<?xml version="1.0" encoding="UTF-8"?>
|<a someKey="oldValue">hello</a>""".stripMargin
val expectedRes12 =
"""<?xml version="1.0" encoding="UTF-8"?>
|<a someKey="oldValue" anotherKey="newValue">
|</a>""".stripMargin
val input13 =
"""<?xml version="1.0" encoding="UTF-8"?>
|<a someKey="oldValue">
|</a>""".stripMargin
val input14 =
"""<?xml version="1.0" encoding="UTF-8"?>
|<a someKey="oldValue" anotherKey="oldValue">
|</a>""".stripMargin
def example15(toReplace: String) =
s"""<?xml version="1.0" encoding="UTF-8"?>
|<a>
| <c1>
| <$toReplace>item</$toReplace>
| <g>item</g>
| </c1>
| <c2>
| <f>item</f>
| </c2>
| <c1>
| <$toReplace>item</$toReplace>
| </c1>
|</a>""".stripMargin
val output16 =
"""<?xml version="1.0" encoding="UTF-8"?>
|<a>
| <c1>
| <f>ITEM</f>
| <g>item</g>
| </c1>
| <c2>
| <f>item</f>
| </c2>
| <c1>
| <f>item</f>
| </c1>
|</a>""".stripMargin
def example17(toReplace: String) =
s"""<?xml version="1.0" encoding="UTF-8"?>
|<a>
| <c1>
| <f>item</f>
| <g>item</g>
| </c1>
| <c1>
| <f>$toReplace</f>
| <g someKey="someValue">item</g>
| </c1>
|</a>""".stripMargin
def example18(toReplace: String) =
s"""<?xml version="1.0" encoding="UTF-8"?>
|<a>
| <c1>
| <f></f>
| <f>
| <h>abc</h>
| </f>
| <f>
| <h>abc</h>
| <i>$toReplace</i>
| </f>
| <f>
| <h>abc</h>
| <i>$toReplace</i>
| <i>abc</i>
| </f>
| </c1>
|</a>""".stripMargin
val input =
s"""<?xml version="1.0" encoding="UTF-8"?>
|<a>
| <f></f>
| <f>
| <h>abc</h>
| <i>toReplace</i>
| </f>
| <f>
| <h>abc</h>
| <i>toReplace</i>
| </f>
|</a>""".stripMargin
def example19(toReplace: String) =
s"""<?xml version="1.0" encoding="UTF-8"?>
|<a>
| <c1>
| <f>
| <h>
| <i>item</i>
| <i>to be selected</i>
| </h>
| </f>
| <f>
| <h>
| <i>item</i>
| <i>$toReplace</i>
| </h>
| </f>
| </c1>
|</a>""".stripMargin
val minimalInput =
s"""<?xml version="1.0" encoding="UTF-8"?>
|<a>
| <f>
| <g>some text</g>
| </f>
|</a>""".stripMargin
val example20 =
s"""<?xml version="1.0" encoding="UTF-8"?>
|<a>
| <f>
| <new></new>
| <g>some text</g>
| </f>
|</a>""".stripMargin
val example21 =
s"""<?xml version="1.0" encoding="UTF-8"?>
|<a>
| <f>
| <g>some text</g>
| <new></new>
| </f>
|</a>""".stripMargin
}
|
note/xml-lens
|
optics/shared/src/test/scala/pl/msitko/xml/dsl/OpticsBuilderSpec.scala
|
Scala
|
mit
| 15,983
|
package org.greenrd.seraphim
import java.io.{File, PrintWriter}
import org.apache.commons.io.FileUtils
import org.scalatest.FunSpec
class BaseCommitTest extends FunSpec {
private val gitRepoName = "temp-git"
private def prepareGitRepo(): GitRepo = {
val gitRepoFolder = new File("target", gitRepoName)
if (gitRepoFolder.exists()) {
FileUtils.deleteDirectory(gitRepoFolder)
}
new GitRepo(gitRepoFolder)
}
private val gitRepo = prepareGitRepo()
private val fileName = "test.txt"
private val file = new File(gitRepo.path, fileName)
private def createFile(): Unit = file.createNewFile()
private def modifyFile(): Unit = {
val pw = new PrintWriter(file)
pw.write("hello")
pw.close()
}
private def addAndCommitToGit(gitMessage: String): Unit = {
gitRepo.gitCommand("add", fileName)
gitRepo.gitCommand("commit", "-m", gitMessage)
}
describe("BaseCommit") {
it("Finds a base-commit for a branch") {
prepareGitRepo()
gitRepo.gitCommand("init")
createFile()
addAndCommitToGit("new file added")
val baseCommit = gitRepo.gitCommand("rev-parse", "HEAD")
gitRepo.gitCommand("checkout", "-b", "branch")
modifyFile()
addAndCommitToGit("file changed")
assert(BaseCommit(gitRepo).baseCommitForCurrentBranch() === baseCommit)
}
}
}
|
gokyo/project-seraphim
|
src/test/scala/org/greenrd/seraphim/BaseCommitTest.scala
|
Scala
|
mit
| 1,358
|
/*
* Copyright 2016 The BigDL Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.bigdl.nn
import com.intel.analytics.bigdl.nn.abstractnn.AbstractModule
import com.intel.analytics.bigdl.tensor.Tensor
import com.intel.analytics.bigdl.tensor.TensorNumericMath.TensorNumeric
import com.intel.analytics.bigdl.utils.Table
import scala.reflect._
/**
* Region of interest pooling
* The RoIPooling uses max pooling to convert the features inside any valid region of interest
* into a small feature map with a fixed spatial extent of pooledH × pooledW (e.g., 7 × 7)
* an RoI is a rectangular window into a conv feature map.
* Each RoI is defined by a four-tuple (x1, y1, x2, y2) that specifies its
* top-left corner (x1, y1) and its bottom-right corner (x2, y2).
* RoI max pooling works by dividing the h × w RoI window into an pooledH × pooledW grid of
* sub-windows of approximate size h/H × w/W and then max-pooling the values in each sub-window
* into the corresponding output grid cell.
* Pooling is applied independently to each feature map channel
* @param pooledW spatial extent in width
* @param pooledH spatial extent in height
* @param spatialScale spatial scale
* @tparam T Numeric type. Only support float/double now
*/
class RoiPooling[T: ClassTag](val pooledW: Int, val pooledH: Int, val spatialScale: T)
(implicit ev: TensorNumeric[T]) extends AbstractModule[Table, Tensor[T], T] {
private val argmax: Tensor[T] = Tensor[T]
private val gradInputTensor: Tensor[T] = Tensor[T]
gradInput.insert(gradInputTensor)
override def updateOutput(input: Table): Tensor[T] = {
require(input.length() == 2, "there must have two tensors in the table")
val data = input[Tensor[T]](1) // Input data to ROIPooling
val rois = input[Tensor[T]](2) // Input label to ROIPooling
require(rois.size().length > 1 && rois.size(2) == 5, "roi input shape should be (R, 5)")
output.resize(rois.size(1), data.size(2), pooledH, pooledW)
.fill(ev.fromType[Double](Double.MinValue))
argmax.resizeAs(output).fill(ev.fromType(-1))
val dataOffset = offset(0, 1, sizes = data.size())
val argmaxOffset = offset(0, 1, sizes = argmax.size())
val outputOffset = offset(0, 1, sizes = output.size())
val roisOffset = offset(1, sizes = rois.size())
var n = 0
val dataSize = data.size()
if (classTag[T] == classTag[Double]) {
val inputData = data.storage().array().asInstanceOf[Array[Double]]
val argmaxData = argmax.storage().array().asInstanceOf[Array[Double]]
val outputData = output.storage().array().asInstanceOf[Array[Double]]
val roisDouble = rois.asInstanceOf[Tensor[Double]]
while (n < rois.size(1)) {
poolOneRoiDouble(n, roisDouble(n + 1),
inputData, dataSize, dataOffset,
argmaxData, argmaxOffset,
outputData, outputOffset,
roisOffset, ev.toType[Double](spatialScale))
n += 1
}
} else if (classTag[T] == classTag[Float]) {
val inputData = data.storage().array().asInstanceOf[Array[Float]]
val argmaxData = argmax.storage().array().asInstanceOf[Array[Float]]
val outputData = output.storage().array().asInstanceOf[Array[Float]]
val roisFloat = rois.asInstanceOf[Tensor[Float]]
while (n < rois.size(1)) {
poolOneRoiFloat(n, roisFloat(n + 1),
inputData, dataSize, dataOffset,
argmaxData, argmaxOffset,
outputData, outputOffset,
roisOffset, ev.toType[Float](spatialScale))
n += 1
}
} else {
throw new IllegalArgumentException("currently only Double and Float types are supported")
}
output
}
private def scaleRoiFloat(roi: Tensor[Float], ind: Int, spatialScale: Float): Int = {
Math.round(roi.valueAt(ind) * spatialScale)
}
private def poolOneRoiFloat(n: Int, roi: Tensor[Float],
inputData: Array[Float], dataSize: Array[Int], dataOffset: Int,
argmaxData: Array[Float], argmaxOffset: Int,
outputData: Array[Float], outputOffset: Int, roisOffset: Int,
spatialScale: Float): Unit = {
val roiBatchInd = roi.valueAt(1).toInt
val roiStartW = scaleRoiFloat(roi, 2, spatialScale)
val roiStartH = scaleRoiFloat(roi, 3, spatialScale)
require(roiBatchInd >= 0 && dataSize(0) > roiBatchInd)
val binSizeH = Math.max(scaleRoiFloat(roi, 5, spatialScale) - roiStartH + 1, 1f) / pooledH
val binSizeW = Math.max(scaleRoiFloat(roi, 4, spatialScale) - roiStartW + 1, 1f) / pooledW
var batchDataIndex = offset(roiBatchInd, sizes = dataSize)
var c = 0
while (c < dataSize(1)) {
var ph = 0
val outputDataIndex = outputOffset * (n * dataSize(1) + c)
val argmaxIndex = argmaxOffset * (n * dataSize(1) + c)
while (ph < pooledH) {
var pw = 0
while (pw < pooledW) {
// Compute pooling region for this output unit:
// start (included) = floor(ph * roi_height / pooled_height_)
// end (excluded) = ceil((ph + 1) * roi_height / pooled_height_)
val hstart = Math.min(Math.max(Math.floor(ph * binSizeH).toInt + roiStartH, 0),
dataSize(2))
val hend = Math.min(Math.max(Math.ceil((ph + 1) * binSizeH).toInt + roiStartH, 0),
dataSize(2))
val wstart = Math.min(Math.max(Math.floor(pw * binSizeW).toInt + roiStartW, 0),
dataSize(3))
val wend = Math.min(Math.max(Math.ceil((pw + 1) * binSizeW).toInt + roiStartW, 0),
dataSize(3))
val poolIndex = ph * pooledW + pw
if ((hend <= hstart) || (wend <= wstart)) {
outputData(outputDataIndex + poolIndex) = 0
argmaxData(argmaxIndex + poolIndex) = -1
} else {
var h = hstart
while (h < hend) {
var w = wstart
val hi = h * dataSize(3)
while (w < wend) {
val index = hi + w
if (inputData(batchDataIndex + index) >
outputData(outputDataIndex + poolIndex)) {
outputData(outputDataIndex + poolIndex) = inputData(batchDataIndex + index)
argmaxData(argmaxIndex + poolIndex) = index
}
w += 1
}
h += 1
}
}
pw += 1
}
ph += 1
}
// Increment all data pointers by one channel
c += 1
batchDataIndex += dataOffset
}
}
private def scaleRoiDouble(roi: Tensor[Double], ind: Int, spatialScale: Double): Int = {
Math.round(roi.valueAt(ind) * spatialScale).toInt
}
private def poolOneRoiDouble(n: Int, roi: Tensor[Double],
inputData: Array[Double], dataSize: Array[Int], dataOffset: Int,
argmaxData: Array[Double], argmaxOffset: Int,
outputData: Array[Double], outputOffset: Int, roisOffset: Int,
spatialScale: Double): Unit = {
val roiBatchInd = roi.valueAt(1).toInt
val roiStartW = scaleRoiDouble(roi, 2, spatialScale)
val roiStartH = scaleRoiDouble(roi, 3, spatialScale)
require(roiBatchInd >= 0 && dataSize(0) > roiBatchInd)
val binSizeH = Math.max(scaleRoiDouble(roi, 5, spatialScale) - roiStartH + 1, 1f) / pooledH
val binSizeW = Math.max(scaleRoiDouble(roi, 4, spatialScale) - roiStartW + 1, 1f) / pooledW
var batchDataIndex = offset(roiBatchInd, sizes = dataSize)
var c = 0
while (c < dataSize(1)) {
var ph = 0
val outputDataIndex = outputOffset * (n * dataSize(1) + c)
val argmaxIndex = argmaxOffset * (n * dataSize(1) + c)
while (ph < pooledH) {
var pw = 0
while (pw < pooledW) {
// Compute pooling region for this output unit:
// start (included) = floor(ph * roi_height / pooled_height_)
// end (excluded) = ceil((ph + 1) * roi_height / pooled_height_)
val hstart = Math.min(Math.max(Math.floor(ph * binSizeH).toInt + roiStartH, 0),
dataSize(2))
val hend = Math.min(Math.max(Math.ceil((ph + 1) * binSizeH).toInt + roiStartH, 0),
dataSize(2))
val wstart = Math.min(Math.max(Math.floor(pw * binSizeW).toInt + roiStartW, 0),
dataSize(3))
val wend = Math.min(Math.max(Math.ceil((pw + 1) * binSizeW).toInt + roiStartW, 0),
dataSize(3))
val poolIndex = ph * pooledW + pw
if ((hend <= hstart) || (wend <= wstart)) {
outputData(outputDataIndex + poolIndex) = 0
argmaxData(argmaxIndex + poolIndex) = -1
} else {
var h = hstart
while (h < hend) {
var w = wstart
val hi = h * dataSize(3)
while (w < wend) {
val index = hi + w
if (inputData(batchDataIndex + index) >
outputData(outputDataIndex + poolIndex)) {
outputData(outputDataIndex + poolIndex) = inputData(batchDataIndex + index)
argmaxData(argmaxIndex + poolIndex) = index
}
w += 1
}
h += 1
}
}
pw += 1
}
ph += 1
}
// Increment all data pointers by one channel
c += 1
batchDataIndex += dataOffset
}
}
/**
* get the data offset given n, c, h, w
* @param n batch indice
* @param c channel indice
* @param h height indice
* @param w width indice
* @param sizes tensor size
* @return array offset
*/
private def offset(n: Int, c: Int = 0, h: Int = 0, w: Int = 0, sizes: Array[Int]): Int = {
require(sizes.length == 2 || sizes.length >= 4)
if (sizes.length == 2) ((n * sizes(1) + c) + h) + w
else ((n * sizes(1) + c) * sizes(2) + h) * sizes(3) + w
}
override def updateGradInput(input: Table, gradOutput: Tensor[T]): Table = {
val numRois = output.size(1)
if (classTag[T] == classTag[Double]) {
val data = input[Tensor[Double]](1)
val roisData = input[Tensor[Double]](2).storage().array()
val argmaxData = argmax.storage().array().asInstanceOf[Array[Double]]
val gradInputData = gradInputTensor.resizeAs(data).zero()
.storage().array().asInstanceOf[Array[Double]]
val gradOutputData = gradOutput.storage().array().asInstanceOf[Array[Double]]
roiPoolingBackwardDouble(roisData, numRois, data,
argmaxData, gradInputData, gradOutputData)
} else if (classTag[T] == classTag[Float]) {
val data = input[Tensor[Float]](1)
val roisData = input[Tensor[Float]](2).storage().array()
val argmaxData = argmax.storage().array().asInstanceOf[Array[Float]]
val gradInputData = gradInputTensor.resizeAs(data).zero()
.storage().array().asInstanceOf[Array[Float]]
val gradOutputData = gradOutput.storage().array().asInstanceOf[Array[Float]]
roiPoolingBackwardFloat(roisData, numRois, data,
argmaxData, gradInputData, gradOutputData)
} else {
throw new IllegalArgumentException("currently only Double and Float types are supported")
}
gradInput
}
private def roiPoolingBackwardFloat(roisData: Array[Float], numRois: Int, data: Tensor[Float],
argmaxData: Array[Float], gradInputData: Array[Float], gradOutputData: Array[Float]): Unit = {
var roiN = 0
var c = 0
var ph = 0
var pw = 0
// Accumulate gradient over all ROIs
while (roiN < numRois) {
val roiBatchInd = roisData(roiN * 5).toInt
// Accumulate gradients over each bin in this ROI
c = 0
while (c < data.size(2)) {
ph = 0
while (ph < pooledH) {
pw = 0
while (pw < pooledW) {
val outputOffset = ((roiN * data.size(2) + c) * pooledH + ph) * pooledW + pw
val argmaxIndex = argmaxData(outputOffset)
if (argmaxIndex >= 0) {
val inputOffset = (roiBatchInd * data.size(2)
+ c) * data.size(3) * data.size(4) + argmaxIndex.toInt
gradInputData(inputOffset) = gradInputData(inputOffset) + gradOutputData(outputOffset)
}
pw += 1
}
ph += 1
}
c += 1
}
roiN += 1
}
}
private def roiPoolingBackwardDouble(roisData: Array[Double], numRois: Int, data: Tensor[Double],
argmaxData: Array[Double], gradInputData: Array[Double],
gradOutputData: Array[Double]): Unit = {
var roiN = 0
var c = 0
var ph = 0
var pw = 0
// Accumulate gradient over all ROIs
while (roiN < numRois) {
val roiBatchInd = roisData(roiN * 5).toInt
// Accumulate gradients over each bin in this ROI
c = 0
while (c < data.size(2)) {
ph = 0
while (ph < pooledH) {
pw = 0
while (pw < pooledW) {
val outputOffset = ((roiN * data.size(2) + c) * pooledH + ph) * pooledW + pw
val argmaxIndex = argmaxData(outputOffset)
if (argmaxIndex >= 0) {
val inputOffset = (roiBatchInd * data.size(2)
+ c) * data.size(3) * data.size(4) + argmaxIndex.toInt
gradInputData(inputOffset) = gradInputData(inputOffset) + gradOutputData(outputOffset)
}
pw += 1
}
ph += 1
}
c += 1
}
roiN += 1
}
}
override def toString: String = "nn.RoiPooling"
override def clearState(): this.type = {
super.clearState()
argmax.set()
gradInputTensor.set()
this
}
}
object RoiPooling {
def apply[@specialized(Float, Double) T: ClassTag](
pooled_w: Int, pooled_h: Int, spatial_scale: T)(implicit ev: TensorNumeric[T]): RoiPooling[T] =
new RoiPooling[T](pooled_w, pooled_h, spatial_scale)
}
|
psyyz10/BigDL
|
spark/dl/src/main/scala/com/intel/analytics/bigdl/nn/RoiPooling.scala
|
Scala
|
apache-2.0
| 14,275
|
import io.gatling.core.Predef._
import io.gatling.http.Predef._
import scala.concurrent.duration._
import scala.util.Random
class RealGatling extends Simulation {
val feeder = Iterator.continually(
Map(
"userId" -> Random.nextInt(100).toString,
"blogId" -> Random.nextInt(100).toString,
"uuid" -> common.CommonUtil.uuid
)
)
val scn = scenario("RealGatling")
.feed(feeder)
.exec(
http("login")
.post("/login")
.basicAuth(s"john$${userId}", s"john$${userId}")
.check(header("X-Token").exists.saveAs("token"))
)
.exec(Command.queryBlogs)
.doIfOrElse(session => session("blogCount").as[Int] >= 200) {
repeat(50, "commentId") {
exec { session =>
val ids = session("discussionIds").as[Seq[String]]
session.set("discussionUuid",
ids(scala.util.Random.nextInt(ids.length)))
}
.exec(Command.queryComments)
.repeat(5, "replyId") {
doIfOrElse(session => session.contains("commentIds")) {
exec(Command.replyComment)
} {
exec(Command.addComment)
}
}
.pause(1)
}
} {
exec(Command.createBlog)
}
val httpConf = http
.baseURL("http://localhost:8080")
.acceptHeader("text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8")
.doNotTrackHeader("1")
.acceptLanguageHeader("en-US,en;q=0.5")
.acceptEncodingHeader("gzip, deflate")
.userAgentHeader("Mozilla/5.0 (Windows NT 5.1; rv:31.0) Gecko/20100101 Firefox/31.0")
setUp(
scn.inject(rampUsersPerSec(1) to(15) during(50 seconds) randomized)
).protocols(httpConf)
object Command {
val queryBlogs = http("queryBlogs")
.get("/blog")
.header("Authorization", s"Bearer $${token}")
.header(HttpHeaderNames.Accept, HttpHeaderValues.ApplicationJson)
.check(status.is(session => 200))
.check(jsonPath("$.blogs[*]._id").count.optional.saveAs("blogCount"))
.check(jsonPath("$.blogs[*].discussions[*].id").findAll.optional
.saveAs("discussionIds"))
val queryComments = http("queryComments")
.get(s"/discussion/$${discussionIds.random()}")
.header("Authorization", s"Bearer $${token}")
.header(HttpHeaderNames.Accept, HttpHeaderValues.ApplicationJson)
.check(status.is(session => 200))
.check(jsonPath("$._id").exists.saveAs("discussionUuid"))
.check(jsonPath("$..commentId").findAll.optional
.saveAs("commentIds"))
val createBlog = http("createBlog")
.post(s"/commands/blog/$${uuid}")
.header("Authorization", s"Bearer $${token}")
.body(StringBody(s"""{"_t":"CreateBlog", "title": "Blog $${userId} $${blogId}", "content": "My $${userId} $${blogId}. blog", "loggedIn": ""}"""))
.check(status.is(session => 200))
val addComment = http("addComment")
.post(s"/commands/discussion/$${discussionUuid}")
.header("Authorization", s"Bearer $${token}")
.body(StringBody(s"""{"_t":"AddComment", "id": "$${uuid}", "content": "$${commentId}. megjegyzés", "loggedIn": ""}"""))
.check(status.is(session => 200))
val replyComment = http("replyComment")
.post(s"/commands/discussion/$${discussionUuid}")
.header("Authorization", s"Bearer $${token}")
.body(StringBody(s"""{"_t":"ReplyComment", "id": "$${uuid}", "parentId": "$${commentIds.random()}", "content": "$${replyId}. válasz", "loggedIn": ""}"""))
.check(status.is(session => 200))
}
}
|
enpassant/rapids
|
src/it/scala/RealGatling.scala
|
Scala
|
apache-2.0
| 3,522
|
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** @author Usman Nisar, Aravind Kalimurthy, John Miller
* @version 1.2
* @date Tue Dec 20 15:10:55 EST 2016
* @see LICENSE (MIT style license file).
*
* @see www2012.wwwconference.org/proceedings/proceedings/p949.pdf
*
* Dual Simulation CAR Using Mutable Sets
*/
package scalation.graphalytics.mutable
import scala.collection.mutable.{Set => SET}
import scala.reflect.ClassTag
import scalation.graphalytics.mutable.{ExampleGraphS => EX_GRAPH}
import scalation.util.MultiSet
import LabelFunctions._
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** The 'DualSimCAR' classs provides an implementation for Dual Graph Simulation.
* @param g the data graph G(V, E, l)
* @param q the query graph Q(U, D, k)
*/
class DualSimCAR [TLabel: ClassTag] (g: Graph [TLabel], q: Graph [TLabel])
extends GraphMatcher (g, q)
{
/** The DEBUG flag
*/
private val DEBUG = false
/** The Child labels for the query graph
*/
private val cLabel = Array.ofDim [MultiSet [TLabel]] (q.size)
for (u <- cLabel.indices) cLabel(u) = qChildLabels (q, u)
/** The Parent labels for the query graph
*/
private val pLabel = Array.ofDim [MultiSet [TLabel]] (q.size)
for (u <- pLabel.indices) pLabel(u) = qParentLabels (q, u)
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Apply the Dual Graph Simulation pattern matching algorithm to find the mappings
* from the query graph 'q' to the data graph 'g'. These are represented by a
* multi-valued function 'phi' that maps each query graph vertex 'u' to a
* set of data graph vertices '{v}'.
*/
def mappings (): Array [SET [Int]] = nisarDualSimCAR (feasibleMates ())
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Given the mappings 'phi' produced by the 'feasibleMates' method,
* eliminate mappings 'u -> v' when (1) v's children fail to match u's
* or (2) v's parents fail to match u's.
* @param phi array of mappings from a query vertex u to { graph vertices v }
*/
def nisarDualSimCAR (phi: Array [SET [Int]]): Array [SET [Int]] =
{
var alter = true
while (alter) { // check for matching children and parents
alter = false
// loop over query vertices u, data vertices v in phi(u), and u's children u_c
for (u <- qRange; v <- phi(u)) {
val chu = cLabel(u)
val chv = gChildLabels(g, v, u, q.ch(u), phi)
val res = ! (chu ⊆ chv)
if (DEBUG) println("u : " + u + " v : " + v + " chu : " + chu + " chv : " + chv + " res : " + res)
if (res) {
phi(u) -= v // remove v due to lack of child match
alter = true
} // if
} //for
// loop over query vertices u, data vertices v in phi(u), and u's parents u_p
for (u <- qRange; v <- phi(u)) {
val pau = pLabel(u)
val pav = gParentLabels(g, v, u, q.pa(u), phi)
val res = ! (pau ⊆ pav)
if (DEBUG) println("u : " + u + " v : " + v + " pau : " + pau + " pav : " + pav + " res : " + res)
if (res) {
phi(u) -= v // remove v due to lack of child match
alter = true
} // if
} //for
} // while
phi
} // nisarDualSimCAR
} // DualSimCAR class
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** The `DualSimCARTest` object is used to test the `DualSimCAR` class.
* > run-main scalation.graphalytics.mutable.DualSimCARTest
*/
object DualSimCARTest extends App
{
val g = EX_GRAPH.g1
val q = EX_GRAPH.q1
println (s"g.checkEdges = ${g.checkEdges}")
g.printG ()
println (s"q.checkEdges = ${q.checkEdges}")
q.printG ()
(new DualSimCAR (g, q)).test ("DualSimCAR") // Dual Graph Simulation Pattern Matcher
} // DualSimCARTest object
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** The `DualSimCARTest2` object is used to test the `DualSimCAR` class.
* > run-main scalation.graphalytics.mutable.DualSimCARTest2
*/
object DualSimCARTest2 extends App
{
val g = EX_GRAPH.g2
val q = EX_GRAPH.q2
println (s"g.checkEdges = ${g.checkEdges}")
g.printG ()
println (s"q.checkEdges = ${q.checkEdges}")
q.printG ()
(new DualSimCAR (g, q)).test ("DualSimCAR") // Dual Graph Simulation Pattern Matcher
} // DualSimCARTest2 object
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** The `DualSimCARTest3` object is used to test the `DualSimCAR` class.
* > run-main scalation.graphalytics.mutable.DualSimCARTest3
*
object DualSimCARTest3 extends App
{
val g = EX_GRAPH.g3
val q = EX_GRAPH.q3
println (s"g.checkEdges = ${g.checkEdges}")
g.printG ()
println (s"q.checkEdges = ${q.checkEdges}")
q.printG ()
(new DualSimCAR (g, q)).test ("DualSimCAR") // Dual Graph Simulation Pattern Matcher
} // DualSimCARTest3 object
*/
|
NBKlepp/fda
|
scalation_1.2/src/main/scala/scalation/graphalytics/mutable/DualSimCAR.scala
|
Scala
|
mit
| 5,409
|
/***
* Copyright 2014 Rackspace US, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.rackspace.com.papi.components.checker
import javax.servlet.FilterChain
import com.rackspace.com.papi.components.checker.servlet._
import com.rackspace.com.papi.components.checker.step.results.Result
import scala.language.implicitConversions
/**
* Exception thrown by the assert result handler,
* if a request fails to validate
*/
class ResultFailedException(val msg : String, val req : CheckerServletRequest,
val resp : CheckerServletResponse, val chain : FilterChain, val result : Result)
extends Exception(msg){}
|
tylerroyal/api-checker
|
core/src/test/scala/com/rackspace/com/papi/components/checker/ResultFailedException.scala
|
Scala
|
apache-2.0
| 1,189
|
/* - Coeus web framework -------------------------
*
* Licensed under the Apache License, Version 2.0.
*
* Author: Spiros Tzavellas
*/
package com.tzavellas.coeus.i18n.locale
import java.util.Locale
import javax.servlet.http.{HttpServletRequest, HttpServletResponse}
/**
* Provides a way to resolve the user's locale for a given request.
*/
trait LocaleResolver {
/**
* Resolve the user's locale using the specified request.
*
* @param request the request to use for resolving the Locale
* @return a valid <code>Locale</code> object (never <code>null</code>)
*/
def resolve(request: HttpServletRequest): Locale
/**
* Set the user's locale to the specified <code>Locale</code> object.
*
* @param request the current Servlet request
* @param response the current Servlet response
* @param locale the user's locale
*/
def setLocale(request: HttpServletRequest, response: HttpServletResponse, locale: Locale)
}
|
sptz45/coeus
|
src/main/scala/com/tzavellas/coeus/i18n/locale/LocaleResolver.scala
|
Scala
|
apache-2.0
| 964
|
def fibonacci(first: Int, second: Int): Stream[Int] = first #:: fibonacci(second, first + second)
println(fibonacci(1, 1).takeWhile(_ < 4000000).filter(_%2 == 0).sum)
|
Furisuke/ProjectEuler
|
scala/problem0002.scala
|
Scala
|
mit
| 168
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.deploy
private[spark] class ApplicationDescription(
val name: String,
val maxCores: Option[Int],
val memoryPerSlave: Int,
val command: Command,
var appUiUrl: String,
val eventLogDir: Option[String] = None,
// short name of compression codec used when writing event logs, if any (e.g. lzf)
val eventLogCodec: Option[String] = None)
extends Serializable {
val user = System.getProperty("user.name", "<unknown>")
def copy(
name: String = name,
maxCores: Option[Int] = maxCores,
memoryPerSlave: Int = memoryPerSlave,
command: Command = command,
appUiUrl: String = appUiUrl,
eventLogDir: Option[String] = eventLogDir,
eventLogCodec: Option[String] = eventLogCodec): ApplicationDescription =
new ApplicationDescription(
name, maxCores, memoryPerSlave, command, appUiUrl, eventLogDir, eventLogCodec)
override def toString: String = "ApplicationDescription(" + name + ")"
}
|
hengyicai/OnlineAggregationUCAS
|
core/src/main/scala/org/apache/spark/deploy/ApplicationDescription.scala
|
Scala
|
apache-2.0
| 1,789
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.knockdata.spark.highcharts.demo
import org.junit.Rule
import java.io.PrintWriter
import com.knockdata.spark.highcharts.model._
import com.knockdata.spark.highcharts._
import org.apache.spark.sql.functions._
import org.junit.Test
import org.junit.rules.TestName
// # Line Chart Demo
//
// Based on [Line Chart Demo](http://www.highcharts.com/demo/line-basic)
//
class DemoLineChart {
val sqlContext = SparkEnv.sqlContext
import sqlContext.implicits._
val bank = DataSet.dfBank
@Test
def demoBasicLine(): Unit = {
val Tokyo = Seq(7.0, 6.9, 9.5, 14.5, 18.2, 21.5, 25.2, 26.5, 23.3, 18.3, 13.9, 9.6)
.map(("Tokyo", _))
val NewYork = Seq(-0.2, 0.8, 5.7, 11.3, 17.0, 22.0, 24.8, 24.1, 20.1, 14.1, 8.6, 2.5)
.map(("New York", _))
val Berlin = Seq(-0.9, 0.6, 3.5, 8.4, 13.5, 17.0, 18.6, 17.9, 14.3, 9.0, 3.9, 1.0)
.map(("Berlin", _))
val London = Seq(3.9, 4.2, 5.7, 8.5, 11.9, 15.2, 17.0, 16.6, 14.2, 10.3, 6.6, 4.8)
.map(("London", _))
val dataFrame = (Tokyo ++ NewYork ++ Berlin ++ London).toDF("city", "temperature")
dataFrame.show()
val chart = highcharts(dataFrame
.seriesCol("city")
.series("y" -> col("temperature")))
chart.plot()
new PrintWriter(s"target/demoBasicLine.json") { write(chart.replaced); close }
}
// ## Line Chart Basic
//
// Based on [Highcharts Demo Line Basic](http://www.highcharts.com/demo/line-basic)
//
// an line chart with
//
// * x axis data from column $"age"
// * y axis aggregated the average balance
// * data point order by age
//
@Test
def demoLineBasic(): Unit = {
val chart = highcharts(bank
.series("x" -> "age", "y" -> avg(col("balance")))
.orderBy(col("age")))
chart.plot()
new PrintWriter(s"target/demoLineBasic.json") { write(chart.replaced); close }
}
// ## Line Chart Basic, Explicitly Ascending Order
//
// Based on [Highcharts Demo Line Basic](http://www.highcharts.com/demo/line-basic)
//
// an line chart with
//
// * x axis data from column $"age"
// * y axis aggregated the average balance
// * data point order by age, specified EXPLICITLY ascending order
//
@Test
def demoLineBasicAsc(): Unit = {
val chart = highcharts(bank
.series("x" -> "age", "y" -> avg(col("balance")))
.orderBy(col("age").asc))
chart.plot()
new PrintWriter("target/demoLineBasicAsc.json") { write(chart.replaced); close }
}
// ## Line Chart Basic, Descending Order
//
// Based on [Highcharts Demo Line Basic](http://www.highcharts.com/demo/line-basic)
//
// an line chart with
//
// * x axis data from column $"age"
// * y axis aggregated the average balance
// * data point order by age, descending order
//
@Test
def demoLineBasicDesc(): Unit = {
val chart = highcharts(bank
.series("name" -> "age", "y" -> avg(col("balance")))
.orderBy(col("age").desc))
.xAxis(new XAxis("age").typ("category"))
chart.plot()
new PrintWriter("target/demoLineBasicDesc.json") { write(chart.replaced); close }
}
// ## Line Chart Multiple Series
//
// Based on [Highcharts Demo Line Basic](http://www.highcharts.com/demo/line-basic)
//
// an line chart with
//
// * create multiple series according to $"marital" column
// * x axis data from column $"age"
// * y axis aggregated the average balance
// * data point order by age, descending order
//
@Test
def demoLineBasicMultipleSeries(): Unit = {
val chart = highcharts(bank.seriesCol("marital")
.series("name" -> "age", "y" -> avg(col("balance")))
.orderBy(col("age")))
chart.plot()
new PrintWriter("target/demoLineBasicMultipleSeries.json") { write(chart.replaced); close }
}
// ## Line Chart Multiple Series, With Options
//
// Based on [Highcharts Demo Line Basic](http://www.highcharts.com/demo/line-basic)
//
// an line chart with
//
// * create multiple series according to $"marital" column
// * x axis data from column $"age"
// * y axis aggregated the average balance
// * data point order by age
//
@Test
def demoLineBasicMultipleSeriesWithOption(): Unit = {
val chart = highcharts(bank.seriesCol("marital")
.series("name" -> "age",
"y" -> avg(col("balance")))
.orderBy(col("age")))
.title(new Title("Marital Job Average Balance").x(-20))
.subtitle(new Subtitle("Source: Zeppelin Tutorial").x(-20))
.xAxis(new XAxis("Age").typ("category"))
.yAxis(new YAxis("Balance(¥)").plotLines(
Map("value" -> 0, "width" -> 1, "color" -> "#808080")))
.tooltip(new Tooltip().valueSuffix("¥"))
.legend(new Legend().layout("vertical").align("right")
.verticalAlign("middle").borderWidth(0))
.plotOptions(PlotOptions.series.threshold(3000))
chart.plot()
new PrintWriter("target/demoLineBasicMultipleSeriesWithOption.json") { write(chart.replaced); close }
}
// ## Line Chart, With Data Labels
//
// Based on [Highchart Line Charts Demo With data labels](http://www.highcharts.com/demo/line-labels)
//
// an line chart with
//
// * name data(xAxis) from column $"name"
// * y axis aggregated the average balance
// * data point order by $"job"
//
@Test
def demoLineWithDataLabels(): Unit = {
val chart = highcharts(bank.series("name" -> "job", "y" -> avg(col("balance")))
.orderBy(col("job")))
.plotOptions(PlotOptions.line
.dataLabels("enabled" -> true, "format" -> "{point.y:.2f}"))
.tooltip(new Tooltip().valueDecimals(2))
chart.plot()
new PrintWriter("target/demoLineWithDataLabels.json") { write(chart.replaced); close }
}
// ## Line Chart Zoomable
//
// Based on [Highchart Line Charts Zoomable](http://www.highcharts.com/demo/line-time-series)
//
// an line chart with
//
// * name data(xAxis) from column $"age"
// * y axis aggregated the average balance
// * data point order by $"job"
//
//
// NOTE:
// linearGradient is not described in [Highcharts API](http://api.highcharts.com/highcharts#plotOptions.area.fillColor)
//
@Test
def demoLineZoomable(): Unit = {
val options = PlotOptions.area
.fillColorLinearGradient("x1" -> 0, "y1" -> 0, "x2" -> 0, "y2" -> 1)
.fillColorStops((0, "Highcharts.getOptions().colors[0]"),
(1, "Highcharts.Color(Highcharts.getOptions().colors[0]).setOpacity(0).get('rgba')"))
val chart = highcharts(bank.series("name" -> "age", "y" -> avg(col("balance")))
.orderBy(col("age")))
.chart(Chart.area.zoomType("x"))
.plotOptions(options)
chart.plot()
new PrintWriter("target/demoLineZoomable.json") { write(chart.replaced); close }
}
// ## Spline Inverted
//
// Based on [Highchart Spline Line Inverted](http://www.highcharts.com/demo/spline-inverted)
//
// an line chart with
//
// * name data(xAxis) from column $"age"
// * y axis aggregated the average balance
// * data point order by $"job"
//
@Test
def demoSplineInverted(): Unit = {
val chart = highcharts(bank.series("x" -> "age", "y" -> avg(col("balance")))
.orderBy(col("age")))
.chart(Chart.spline.inverted(true))
chart.plot()
new PrintWriter("target/demoSplineInverted.json") { write(chart.replaced); close }
}
// @Test
// def demoSplineWithSymbols(): Unit = {
// // TODO
// }
// ## Spline With Plot Bands
//
// Based on [Spline Plot Bands](http://www.highcharts.com/demo/spline-plot-bands)
//
// an line chart with
//
// * name data(xAxis) from column $"age"
// * y axis aggregated the average balance
// * data point order by $"job"
//
@Test
def demoSplineWithPlotBands(): Unit = {
val yAxis = new YAxis("Average Balance").plotBands(
Map("from" -> 0, "to" -> 1000, "color" -> "rgba(68, 170, 213, 0.1)",
"label" -> Map(
"text" -> "Low",
"style" -> Map(
"color" -> "#606060"
)
)
),
Map("from" -> 5000, "to" -> 10000, "color" -> "rgba(68, 170, 213, 0.1)",
"label" -> Map(
"text" -> "High",
"style" -> Map(
"color" -> "#606060"
)
)
)
)
val chart = highcharts(bank.series("x" -> "age", "y" -> avg(col("balance")))
.orderBy(col("age")))
.yAxis(yAxis)
chart.plot()
new PrintWriter("target/demoSplineWithPlotBands.json") { write(chart.replaced); close }
}
// ## Time Data With Irregular Intervals
//
// Based on [Time Data With Irregular Intervals](http://www.highcharts.com/demo/spline-irregular-time)
//
// an line chart with
//
// * multiple series using column $"year"
// * x axis from $"time"
// * y axis using $"depth"
//
@Test
def demoTimeDataWithIrregularIntervals(): Unit = {
val chart = highcharts(DataSet.dfSnowDepth.seriesCol("year")
.series("x" -> "time", "y" -> "depth"))
.chart(Chart.spline)
.title(new Title("Snow depth at Vikjafjellet, Norway"))
.subtitle(new Subtitle("Irregular time data in Highcharts JS"))
.xAxis(new XAxis("Date").typ("datetime").dateTimeLabelFormats(
"month" -> "%e. %b", "year" -> "%b"))
.yAxis(new YAxis("Snow depth (m)").min(0))
.tooltip(new Tooltip().headerFormat("<b>{series.name}</b><br>").pointFormat(
"{point.x:%e. %b}: {point.y:.2f} m"))
.plotOptions(PlotOptions.spline.marker("enabled" -> true))
chart.plot()
new PrintWriter("target/demoTimeDataWithIrregularIntervals.json") { write(chart.replaced); close }
}
}
|
knockdata/spark-highcharts
|
src/test/scala/com/knockdata/spark/highcharts/demo/DemoLineChart.scala
|
Scala
|
apache-2.0
| 10,372
|
/*
* Copyright 2016 The BigDL Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.bigdl.dllib.nn
import java.util
import com.intel.analytics.bigdl.Module
import com.intel.analytics.bigdl.dllib.nn.Graph.ModuleNode
import com.intel.analytics.bigdl.dllib.nn.abstractnn.{AbstractModule, Activity}
import com.intel.analytics.bigdl.dllib.nn.tf._
import com.intel.analytics.bigdl.serialization.Bigdl.{AttrValue, BigDLModule}
import com.intel.analytics.bigdl.dllib.tensor.Tensor
import com.intel.analytics.bigdl.dllib.tensor.TensorNumericMath.TensorNumeric
import com.intel.analytics.bigdl.dllib.utils._
import com.intel.analytics.bigdl.dllib.utils.serializer._
import com.intel.analytics.bigdl.dllib.utils.serializer.converters.DataConverter
import com.intel.analytics.bigdl.dllib.utils.tf.Tensorflow
import com.intel.analytics.bigdl.dllib.visualization.tensorboard.{FileWriter => TFFileWriter}
import scala.collection.mutable
import scala.collection.mutable.ArrayBuffer
import scala.reflect.ClassTag
import scala.reflect.runtime.universe
import scala.language.existentials
import scala.collection.JavaConverters._
import org.tensorflow.framework.GraphDef
/**
* A graph container. The modules in the container are connected as a directed Graph. Each module
* can output one tensor or multiple tensors(as table). The edges between modules in the graph
* define how these tensors are passed. For example, if a module outputs two tensors, you can
* pass these two tensors together to its following module, or pass only one of them
* to its following module. If a tensor in the module output is connected to multiple modules, in
* the back propagation, the gradients from multiple connection will be accumulated. If multiple
* edges point to one module, the tensors from these edges will be stack as a table, then pass to
* that module. In the back propagation, the gradients will be splited based on how the input
* tensors stack.
*
* The graph container has multiple inputs and multiple outputs. The order of the input tensors
* should be same with the order of the input nodes when you construct the graph container. In the
* back propagation, the order of the gradients tensors should be the same with the order of the
* output nodes.
*
* If there's one output, the module output is a tensor. If there're multiple outputs, the module
* output is a table, which is actually an sequence of tensor. The order of the output tensors is
* same with the order of the output modules.
*
* All inputs should be able to connect to outputs through some paths in the graph. It is
* allowed that some successors of the inputs node are not connect to outputs. If so, these nodes
* will be excluded in the computation.
*
* @param inputs input nodes
* @param outputs output nodes
* @param variables an Array of tensor containing all the weights and biases of this graph,
* used when different nodes of this graph may share the same weight or bias.
* @tparam T Numeric type. Only support float/double now
*/
@SerialVersionUID(- 2896121321564992779L)
abstract class Graph[T: ClassTag](
val inputs : Seq[ModuleNode[T]],
private[bigdl] val outputs : Seq[ModuleNode[T]],
private[bigdl] val variables: Option[(Array[Tensor[T]], Array[Tensor[T]])] = None
)(implicit ev: TensorNumeric[T])
extends Container[Activity, Activity, T] with MklInt8Convertible {
/**
* For a multi-tensor output module, some output tensors may not contributed to the final forward
* result. So in the back propagation, the gradient on these positions are missing. And we use
* zero tensor to populate.
*
* @param output
* @param gradOutput
*/
protected def addZeroTensorToMissingGradOutput(output: Table, gradOutput: Table): Unit = {
var i = 0
while (i < output.length()) {
if (!gradOutput.contains(i + 1)) {
val tensor = output[Tensor[T]](i + 1)
val zero = Tensor(tensor.size())
gradOutput(i + 1) = zero
}
i = i + 1
}
}
private def calcSumTimesOfAllNodes(
timesOfAllNodes: Array[(AbstractModule[_ <: Activity, _ <: Activity, T], Long, Long)])
: (Long, Long) = {
var sumForward = 0L
var sumBackward = 0L
timesOfAllNodes.foreach(x => {
sumForward += x._2
sumBackward += x._3
})
(sumForward, sumBackward)
}
override def getTimes():
Array[(AbstractModule[_ <: Activity, _ <: Activity, T], Long, Long)] = {
val timesOfAllNodes = this.modules.flatMap(_.getTimes()).toArray
val (sumForward, sumBackward) = calcSumTimesOfAllNodes(timesOfAllNodes)
timesOfAllNodes ++ Array((this, this.forwardTime - sumForward, this.backwardTime - sumBackward))
}
override def parameters(): (Array[Tensor[T]], Array[Tensor[T]]) = {
variables match {
case None => super.parameters()
case Some((weights, gradients)) => (weights, gradients)
}
}
// todo: expand the graph
override def toGraph(startNodes: ModuleNode[T]*): Graph[T] = this
/**
* Return the corresponding node has the given name. If the given name doesn't match any node,
* NoSuchElementException will be thrown
* @param name
* @return
*/
def node(name: String): ModuleNode[T] = {
val matchNodes = forwardNodes.filter(_.element.getName() == name).toArray
if (matchNodes.length == 0) {
throw new NoSuchElementException(s"Can not find node with name $name")
} else {
return matchNodes.head
}
}
// Add a dummy output node, to get an one end forward graph. So the nodes that are not dependent
// by the outputs will be excluded
protected val dummyOutput = new ModuleNode[T](new Identity[T]())
outputs.foreach(_ -> dummyOutput)
protected val forwardGraph = dummyOutput.graph(reverse = true)
protected val forwardNodes = forwardGraph.DFS.toArray
populateModules()
// Check all inputs of the graph should be passed in
checkRoots
protected def populateModules(): Unit
// Check if the graph is correct
private def checkRoots: Unit = {
def duplicatedNames(names: Seq[String]): mutable.Set[String] = {
names.sortWith(_ < _)
val buffer = new mutable.HashSet[String]()
var i = 1
while(i < names.length) {
if (names(i) == names(i - 1)) buffer.add(names(i))
i += 1
}
buffer
}
require(forwardNodes.map(_.element.getName()).distinct.length == forwardNodes.length,
s"the name of node in the graph should be unique, but find duplicated name " +
s"${duplicatedNames(forwardNodes.map(_.element.getName())).mkString(", ")}")
val roots = forwardNodes.filter(_.prevNodes.size == 0)
.filterNot(_.element.isInstanceOf[WithoutInput])
.filterNot(_.element.isInstanceOf[ControlDependency[_]])
val realInputs = inputs.filterNot(_.element.isInstanceOf[WithoutInput])
require(roots.size == realInputs.length, s"There're ${realInputs.length} inputs, " +
s"but graph has ${roots.size} roots")
realInputs.foreach(n =>
require(roots.contains(n), "inputs and graph roots are not match")
)
}
protected var dummyOutputGrad: ModuleNode[T] = _
protected var backwardGraph: DirectedGraph[AbstractModule[Activity, Activity, T]] = _
protected var backwardNodes: Array[Node[AbstractModule[Activity, Activity, T]]] = _
// If the graph will generate gradInput for the input
private var isGradInputAvailable: Array[Boolean] = _
/**
* Generate backward graph and apply the stopGrad
*/
private[bigdl] def buildBackwardGraph(): this.type = {
// Clone the forward graph and reverse the edge
val gradGraph = forwardGraph.cloneGraph(reverseEdge = true)
dummyOutputGrad = gradGraph.source
gradGraph.DFS.filter(x => isStopGradient(x.element)).foreach(removeStopNodes(_))
backwardNodes = gradGraph.DFS
.filterNot(_.eq(dummyOutputGrad))
.filterNot(_.element.isInstanceOf[ControlDependency[_]]).toArray
val inputNames = inputs.map(_.element.getName()).toSet
val dummyBackwardEnd = Identity().inputs()
val backwardTargets = backwardNodes
.filter(n => (n.element.parameters() != null && n.element.parameters()._1.length != 0)
|| inputNames.contains(n.element.getName()))
backwardTargets.foreach(_ -> dummyBackwardEnd)
backwardGraph = dummyBackwardEnd.graph(true)
// Check if gradInput is empty for each input
isGradInputAvailable = inputs.map(_ => false).toArray
backwardGraph.DFS.foreach(curNode => {
inputs.zipWithIndex.map { case (n, i) =>
if (curNode.element.getName() == n.element.getName() && !isStopGradient(n.element)) {
isGradInputAvailable(i) = true
}
}
})
clearState()
this
}
private var stopGradientLayers: util.HashSet[String] = _
def getStopGradientLayers(): util.HashSet[String] = stopGradientLayers
/**
* whether stop propagating gradInput back
* @return
*/
protected def isStopGradient(module: AbstractModule[_ <: Activity, _ <: Activity, T]): Boolean = {
null != stopGradientLayers && stopGradientLayers.contains(module.getName())
}
/**
* stop the input gradient of layers that match the given ```names```
* their input gradient are not computed.
* And they will not contributed to the input gradient computation of
* layers that depend on them.
* @param names an array of layer names
* @return current graph model
*/
def stopGradient(names: Array[String]): this.type = {
if (stopGradientLayers == null) stopGradientLayers = new util.HashSet[String]()
names.foreach(name => {
val layer = this (name)
require(layer.isDefined, s"cannot find layer match ${name}")
stopGradientLayers.add(layer.get.getName())
})
buildBackwardGraph()
this
}
/**
* set an array of layers that match the given ```names``` to be "freezed",
* i.e. their parameters(weight/bias, if exists) are not changed in training process
* @param names an array of layer names
* @return current graph model
*/
def freeze(names: Array[String]): this.type = {
names.foreach(name => {
val layer = this (name)
require(layer.isDefined, s"cannot find layer match ${name}")
layer.get.setScaleW(0)
layer.get.setScaleB(0)
})
this
}
private[bigdl] def removeStopNodes(n: Node[_]): Unit = {
val nodes = n.nextNodes
n.removeNextEdges()
nodes.filter(_.prevNodes.length == 0).foreach(removeStopNodes(_))
}
protected def getInput(
node: Node[AbstractModule[Activity, Activity, T]],
input: Activity
): Activity = {
if (inputs.length == 1) {
require(inputs(0).eq(node), "input node is not in the input list")
input
} else {
val i = inputs.indexOf(node)
require(i != -1, "input node is not in the input list")
input.toTable[Tensor[T]](i + 1)
}
}
def findInput(node: ModuleNode[T], input: Activity): Activity = {
if (node.element.isInstanceOf[WithoutInput]) return null
val nodeInput = if (node.prevNodes.isEmpty) {
getInput(node, input)
} else {
val prevActivities = node.prevNodesAndEdges
.filterNot(n => n._1.element.isInstanceOf[ControlDependency[T]])
.map(n => {
n._2.fromIndex match {
case Some(i) =>
if (n._1.element.output == null || (i == 1 && n._1.element.output.isTensor)) {
n._1.element.output
} else {
n._1.element.output.toTable.apply[Activity](i)
}
case None => n._1.element.output
}
})
if (prevActivities.length == 1) {
prevActivities.head
} else {
T.seq(prevActivities)
}
}
nodeInput
}
protected def findGradOutput(curNode: ModuleNode[T], gradOutput: Activity): Activity = {
var curGradOutput : Activity = if (curNode.eq(dummyOutputGrad)) gradOutput else null
curNode.prevNodesAndEdges.filterNot(n => n._1.element.isInstanceOf[ControlDependency[T]])
.foreach(n => {
val otherActivity = if (n._1.element.gradInput.isTensor || n._1.nextEdges.length == 1) {
n._1.element.gradInput
} else {
val index = n._1.nextEdges.indexOf(n._2) + 1
n._1.element.gradInput.toTable.apply[Activity](index)
}
n._2.fromIndex match {
case Some(i) =>
if (i == 1 && curNode.element.output.isTensor) {
curGradOutput = accActivity(curGradOutput, otherActivity)
} else {
if (curNode.element.output.isTable && curGradOutput == null) {
curGradOutput = T()
}
val curActivity = curGradOutput.toTable.getOrElse[Activity](i, null)
curGradOutput.toTable(i) = accActivity(curActivity, otherActivity)
}
case None =>
curGradOutput = accActivity(curGradOutput, otherActivity)
}
})
if (curNode.element.output.isTable) {
addZeroTensorToMissingGradOutput(curNode.element.output.toTable, curGradOutput.toTable)
}
curGradOutput
}
protected def fetchModelGradInput(): Activity = {
if (inputs.length == 1) {
if (isGradInputAvailable.head) {
inputs.head.element.gradInput
} else {
Activity.emptyGradInput(this.getName())
}
} else {
var i = 0
T.seq(inputs.zipWithIndex.map{ case(n, i) =>
if (isGradInputAvailable(i)) {
n.element.gradInput
} else {
Activity.emptyGradInput(this.getName())
}
})
}
}
override def reset(): Unit = {
if (null != stopGradientLayers) stopGradientLayers.clear()
unFreeze()
buildBackwardGraph()
}
/**
* Get forward executions, the dummy node will be filtered.
*
* This method will output an unsorted executions.
* @return
*/
def getForwardExecutions(): Array[Node[AbstractModule[Activity, Activity, T]]] = {
forwardNodes.filterNot(_.eq(dummyOutput))
}
/**
* Get forward executions, the dummy nodes and control dependency nodes will be filtered.
*
* This method will output a sorted executions. If the graph contains loop, it will throw an
* exception
* @return
*/
def getSortedForwardExecutions(): Array[ModuleNode[T]] = {
forwardGraph.topologySort
// todo: convert control dep node to edge
.filterNot(_.element.isInstanceOf[ControlDependency[T]]).reverse
.filter(n => !n.eq(dummyOutput))
}
@inline
protected def accActivity(activity: Activity, other: Activity): Activity = {
if (activity == null) {
other
} else {
if (other.isTensor) {
require(activity.isTensor, "Cannot add a table to a tensor")
activity.toTensor[T].add(other.toTensor[T])
} else {
// if 'activity' and 'other' are both table, we need to merge 'other' to 'activity'
// if 'other' and 'activity' both contains the index, update 'activity' by sum
// if 'other' contains the index while 'activity' does not,
// just insert the corresponding tensor of 'other' to 'activity'
val actTable = activity.toTable
val otherTable = other.toTable
otherTable.keySet.foreach(index => {
if (actTable.contains(index)) {
accActivity(actTable[Activity](index), otherTable[Activity](index))
} else {
actTable.insert(index.asInstanceOf[Int], otherTable(index))
}
})
actTable
}
}
}
/**
* Save current model graph to a folder, which can be display in tensorboard by running
* tensorboard --logdir logPath
* @param logPath
* @param backward Draw backward graph instead of forward
* @return
*/
def saveGraphTopology(logPath: String, backward: Boolean = false): this.type = {
val writer = new TFFileWriter(logPath)
val graphBuilder = GraphDef.newBuilder()
val nodes = if (backward) {
backwardNodes.filter(n => !n.eq(dummyOutputGrad))
} else {
forwardNodes.filter(n => !n.eq(dummyOutput))
}
nodes.map(m => {
val nodeDef = Tensorflow.bigdlModule(m.element, m.prevNodes.map(_.element.getName()).asJava)
graphBuilder.addNode(nodeDef)
})
writer.addGraphDef(graphBuilder.build())
writer.close()
this
}
/**
* Clear the original module and reset with module in the graph
*/
def resetModules(): Unit = {
modules.clear()
modules.appendAll(forwardGraph.DFS.toArray
.filterNot(_.element.isInstanceOf[ControlDependency[T]])
.filter(n => !n.eq(dummyOutput)).map(_.element)
// Some tests compare the paramerters between sequential and graph,add a reverse makes
// it's eaiser to compare
.reverse
)
}
}
object Graph extends GraphSerializable {
/**
* Node for graph container. The module should have a tensor/table input while a tensor output
* @tparam T
*/
type ModuleNode[T] = Node[AbstractModule[Activity, Activity, T]]
/**
* Build multiple inputs, multiple outputs graph container.
* @param input input node
* @param output output node
* @return a graph container
*/
def apply[T: ClassTag](
input: Array[ModuleNode[T]],
output: Array[ModuleNode[T]],
variables: Option[(Array[Tensor[T]], Array[Tensor[T]])] = None
)(implicit ev: TensorNumeric[T]): Graph[T] = {
new StaticGraph[T](input, output, variables)
}
def apply[T: ClassTag](preprocessor: Module[T], trainable: Module[T])
(implicit ev: TensorNumeric[T]): Graph[T] = {
val preprocessorNode = preprocessor.inputs()
val stopGradients = Identity[T]().inputs(preprocessorNode)
val trainableNode = trainable.inputs(stopGradients)
val graph = apply[T](preprocessorNode, trainableNode)
graph.stopGradient(Array(stopGradients.element.getName()))
graph
}
private[bigdl] def dynamic[T: ClassTag](
input : Array[ModuleNode[T]],
output : Array[ModuleNode[T]],
variables: Option[(Array[Tensor[T]], Array[Tensor[T]])] = None,
generateBackward: Boolean = true)(implicit ev: TensorNumeric[T]): Graph[T] = {
new DynamicGraph[T](input, output, variables, generateBackward)
}
/**
* Build a single input, multiple outputs graph container
* @param input input node
* @param output output nodes
* @return a graph container
*/
def apply[T: ClassTag](input: ModuleNode[T], output: Array[ModuleNode[T]])
(implicit ev: TensorNumeric[T]): Graph[T] = {
new StaticGraph[T](Seq(input), output)
}
private[bigdl] def dynamic[T: ClassTag](input : ModuleNode[T], output : Array[ModuleNode[T]])
(implicit ev: TensorNumeric[T]) : Graph[T] = {
new DynamicGraph[T](Array(input), output, None, true)
}
/**
* Build a multiple inputs, single output graph container
* @param input input nodes
* @param output output node
* @return a graph container
*/
def apply[T: ClassTag](input: Array[ModuleNode[T]], output: ModuleNode[T])
(implicit ev: TensorNumeric[T]): Graph[T] = {
new StaticGraph[T](input, Seq(output))
}
private[bigdl] def dynamic[T: ClassTag](input : Array[ModuleNode[T]], output : ModuleNode[T])
(implicit ev: TensorNumeric[T]) : Graph[T] = {
new DynamicGraph[T](input, Array(output), None, true)
}
/**
* Build a single input, single output graph container
* @param input input nodes
* @param output output nodes
* @return a graph container
*/
def apply[T: ClassTag](input: ModuleNode[T], output: ModuleNode[T])
(implicit ev: TensorNumeric[T]): Graph[T] = {
new StaticGraph[T](Seq(input), Seq(output))
}
private[bigdl] def dynamic[T: ClassTag](input : ModuleNode[T], output : ModuleNode[T])
(implicit ev: TensorNumeric[T]) : Graph[T] = {
new DynamicGraph[T](Array(input), Array(output), None, true)
}
}
trait GraphSerializable extends ContainerSerializable {
private[bigdl] def prepareLoadModule[T: ClassTag](context: DeserializeContext)
(implicit ev: TensorNumeric[T]) = {
val module = context.bigdlModule
val subModules = module.getSubModulesList.asScala
val attributes = module.getAttrMap
val inputNames = new ArrayBuffer[String]
val outputNames = new ArrayBuffer[String]
DataConverter.getAttributeValue(context, attributes.get("inputNames"))
.asInstanceOf[Array[String]].map(name => inputNames.append(name))
DataConverter.getAttributeValue(context, attributes.get("outputNames"))
.asInstanceOf[Array[String]].map(name => outputNames.append(name))
val inputs = new ArrayBuffer[ModuleNode[T]]
val outputs = new ArrayBuffer[ModuleNode[T]]
// layer name to layer node mapping
val layerMap = new mutable.HashMap[String, (ModuleNode[T], Seq[String])]()
subModules.foreach(subModule => {
val bigDLModule = ModuleSerializer.load(DeserializeContext(subModule,
context.storages, context.storageType))
val moduleNode = bigDLModule.module match {
case controlOps: ControlOps[T] => createControlNode(controlOps)
case _ => new ModuleNode[T](bigDLModule.module)
}
val preNodes = bigDLModule.pre
layerMap(bigDLModule.module.getName) = (moduleNode, preNodes)
})
layerMap.values.foreach(moduleNode => {
val edges = DataConverter.getAttributeValue(context,
attributes.get(s"${moduleNode._1.element.getName}_edges")).
asInstanceOf[mutable.HashMap[String, mutable.HashMap[String, Int]]]
val edgeMap = edges.get(moduleNode._1.element.getName).get
moduleNode._2.foreach(pre => {
if (layerMap.contains(pre)) {
val edge: Edge = edgeMap.get(pre).get match {
case -1 => Edge()
case index: Int => Edge(index)
}
layerMap(pre)._1.add(moduleNode._1, edge)
}
})
})
inputNames.foreach(inputName => inputs.append(layerMap(inputName)._1))
outputNames.foreach(outputName => outputs.append(layerMap(outputName)._1))
var sharedVariables: Option[(Array[Tensor[T]], Array[Tensor[T]])] = None
if (attributes.containsKey("sharedWeight") && attributes.containsKey("sharedBias")) {
val weights = attributes.get("sharedWeight")
val biases = attributes.get("sharedBias")
val weightArray = DataConverter.getAttributeValue(context, weights)
.asInstanceOf[Array[Tensor[T]]]
val biasArray = DataConverter.getAttributeValue(context, biases)
.asInstanceOf[Array[Tensor[T]]]
sharedVariables = Some(weightArray, biasArray)
}
val generateBackwardValue = attributes.get("generateBackward")
(module, inputs, outputs, generateBackwardValue, sharedVariables)
}
override def doLoadModule[T: ClassTag](context: DeserializeContext)
(implicit ev: TensorNumeric[T]): AbstractModule[Activity, Activity, T] = {
val (module, inputs, outputs, generateBackwardValue, sharedVariables) =
prepareLoadModule(context)
val attributes = module.getAttrMap
val graph = if (generateBackwardValue != null) {
val generateBackward = DataConverter.getAttributeValue(context, generateBackwardValue)
.asInstanceOf[Boolean]
Graph.dynamic[T](inputs.toArray, outputs.toArray, sharedVariables, generateBackward)
} else {
new StaticGraph[T](inputs, outputs, sharedVariables, false)
}
var serializedStopGradientLayers : Array[String] = null
// this is to keep backward compatible
if (attributes.containsKey("stopGradientLayers")) {
val stopGradientLayers = attributes.get("stopGradientLayers")
serializedStopGradientLayers = DataConverter.
getAttributeValue(context, stopGradientLayers).asInstanceOf[Array[String]]
}
if (serializedStopGradientLayers != null) {
graph.stopGradient(serializedStopGradientLayers)
}
graph
}
private def createControlNode[T: ClassTag](controlOps: ControlOps[T]): ModuleNode[T] = {
controlOps match {
case switchOps: SwitchOps[T] => new SwitchControlNode[Module[T]](switchOps)
case mergeOps: MergeOps[T] => new MergeControlNode[Module[T]](mergeOps)
case _ => new Node[Module[T]](controlOps)
}
}
override def doSerializeModule[T: ClassTag](context: SerializeContext[T],
graphBuilder: BigDLModule.Builder)
(implicit ev: TensorNumeric[T]): Unit = {
val module = context.moduleData
module.next.foreach(_ => graphBuilder.addAllPreModules(_))
module.pre.foreach(_ => graphBuilder.addAllNextModules(_))
val graph = module.module.asInstanceOf[Graph[T]]
val inputsNames = graph.inputs.map(_.element.getName).toArray
val outputsNames = graph.outputs.map(_.element.getName).toArray
graph.getForwardExecutions.foreach(execution => {
val edgeMap = new mutable.HashMap[String, mutable.Map[String, Int]]
val preNodesAndEdges = execution.prevNodesAndEdges
val preNodes = preNodesAndEdges.map(_._1.element.getName)
val nextNodes = preNodesAndEdges.map(_._1.element.getName)
val currNode = execution.element
.asInstanceOf[AbstractModule[Activity, Activity, T]]
val subModel = ModuleSerializer.serialize(SerializeContext(
ModuleData(currNode, preNodes, nextNodes), context.storages, context.storageType))
// add edges
val preNodeEdges = new mutable.HashMap[String, Int]()
preNodesAndEdges.foreach(pre => {
val preNodeName = pre._1.element.getName
val preEdgeIndex = pre._2.fromIndex match {
case Some(i) => i
case None => -1
}
preNodeEdges(preNodeName) = preEdgeIndex
})
edgeMap(execution.element.getName) = preNodeEdges
val attriBulder = AttrValue.newBuilder
DataConverter.setAttributeValue(context, attriBulder, edgeMap)
graphBuilder.putAttr(s"${execution.element.getName}_edges", attriBulder.build)
graphBuilder.addSubModules(subModel.bigDLModule)
})
if (graph.variables.isDefined) {
val (weights, bias) = graph.variables.get
val weightAttrBuilder = AttrValue.newBuilder
DataConverter.setAttributeValue(context, weightAttrBuilder, weights,
universe.typeOf[Array[Tensor[_ <: scala.Any]]])
graphBuilder.putAttr("sharedWeight", weightAttrBuilder.build)
val biasAttrBuilder = AttrValue.newBuilder
DataConverter.setAttributeValue(context, biasAttrBuilder, bias,
universe.typeOf[Array[Tensor[_ <: scala.Any]]])
graphBuilder.putAttr("sharedBias", biasAttrBuilder.build)
}
val inputNamesAttrBuilder = AttrValue.newBuilder
DataConverter.setAttributeValue(context, inputNamesAttrBuilder,
inputsNames, universe.typeOf[Array[String]])
graphBuilder.putAttr("inputNames", inputNamesAttrBuilder.build)
val outputNamesBuilder = AttrValue.newBuilder
DataConverter.setAttributeValue(context, outputNamesBuilder,
outputsNames, universe.typeOf[Array[String]])
graphBuilder.putAttr("outputNames", outputNamesBuilder.build)
if (graph.isInstanceOf[DynamicGraph[_]]) {
val generateBackwardBuilder = AttrValue.newBuilder
DataConverter.setAttributeValue(context, generateBackwardBuilder,
graph.asInstanceOf[DynamicGraph[_]].generateBackward, universe.typeOf[Boolean])
graphBuilder.putAttr("generateBackward", generateBackwardBuilder.build)
}
val stopGradientLayers = graph.getStopGradientLayers
if (stopGradientLayers != null && stopGradientLayers.size > 0) {
val stopGradientLayersBuilder = AttrValue.newBuilder
DataConverter.setAttributeValue(context, stopGradientLayersBuilder,
stopGradientLayers.toArray(new Array[String](stopGradientLayers.size)),
universe.typeOf[Array[String]])
graphBuilder.putAttr("stopGradientLayers", stopGradientLayersBuilder.build)
}
}
}
|
intel-analytics/BigDL
|
scala/dllib/src/main/scala/com/intel/analytics/bigdl/dllib/nn/Graph.scala
|
Scala
|
apache-2.0
| 28,337
|
import scala.compiletime.*
object Test {
inline def dummy1: Int => Int =
(i: Int) => i + 1
inline def dummy2: (i: Int) => i.type =
(i: Int) => i
inline def dummy3: Int => Int =
(i: Int) => ???
inline def dummy4: Int => Int =
???
object I extends (Int => Int) {
def apply(i: Int): i.type = i
}
abstract class II extends (Int => Int) {
val apply = 123
}
inline def dummy5: II =
(i: Int) => i + 1
abstract class III extends (Int => Int) {
def impl(i: Int): Int
def apply(i: Int): Int = -1
}
inline def dummy6: III =
(i: Int) => i + 1
abstract class IV extends (Int => Int) {
def apply(s: String): String
}
abstract class V extends IV {
def apply(s: String): String = "gotcha"
}
inline def dummy7: IV =
{ (i: Int) => i + 1 } : V
def main(argv : Array[String]) : Unit = {
println(s"compile-time: ${codeOf(Macros.betaReduce(dummy1)(3))}")
println(s"run-time: ${Macros.betaReduce(dummy1)(3)}")
println(s"compile-time: ${codeOf(Macros.betaReduce(dummy2)(1))}")
// paramrefs have to be properly substituted in this case
println(s"run-time: ${Macros.betaReduce(dummy2)(1)}")
// ensure the inlined ??? is ascribed type Int so this compiles
def throwsNotImplemented1 = Macros.betaReduceAdd1(dummy3)(4)
// ensure we handle cases where the (non-inlineable) function itself needs ascribing
def throwsNotImplemented2 = Macros.betaReduce(dummy4)(6)
// make sure paramref types work when inlining is not possible
println(s"run-time: ${Macros.betaReduce(I)(5)}")
// -- cases below are non-function types, which are currently not inlined for simplicity but may be in the future
// (also, this tests that we return something valid when we see a closure that we can't inline)
// A non-function type with an apply value that can be confused with the apply method.
println(s"run-time: ${Macros.betaReduce(dummy5)(6)}")
// should print -1 (without inlining), because the apparent apply method actually
// has nothing to do with the function literal
println(s"run-time: ${Macros.betaReduce(dummy6)(7)}")
// the literal does contain the implementation of the apply method, but there are two abstract apply methods
// in the outermost abstract type
println(s"run-time: ${Macros.betaReduce(dummy7)(8)}")
}
}
|
dotty-staging/dotty
|
tests/run-macros/beta-reduce-inline-result/Test_2.scala
|
Scala
|
apache-2.0
| 2,373
|
package aia.cluster
package words
import com.typesafe.config.ConfigFactory
import akka.actor.{Props, ActorSystem}
import akka.cluster.Cluster
import JobReceptionist.JobRequest
object Main extends App {
val config = ConfigFactory.load()
val system = ActorSystem("words", config)
println(s"Starting node with roles: ${Cluster(system).selfRoles}")
if(system.settings.config.getStringList("akka.cluster.roles").contains("master")) {
Cluster(system).registerOnMemberUp {
val receptionist = system.actorOf(Props[JobReceptionist], "receptionist")
println("Master node is ready.")
val text = List("this is a test", "of some very naive word counting", "but what can you say", "it is what it is")
receptionist ! JobRequest("the first job", (1 to 100000).flatMap(i => text ++ text).toList)
system.actorOf(Props(new ClusterDomainEventListener), "cluster-listener")
}
}
}
|
RayRoestenburg/akka-in-action
|
chapter-cluster/src/main/scala/aia/cluster/words/Main.scala
|
Scala
|
mit
| 912
|
package xnioredis.codegen
import java.net.{URL, URLConnection}
import java.nio.charset.StandardCharsets
import java.nio.file.{Files, Paths}
import com.fasterxml.jackson.databind.ObjectMapper
import com.fasterxml.jackson.module.scala.DefaultScalaModule
import resource._
import scala.collection.JavaConverters._
import scala.collection.mutable
import scala.io.Source
object Updater extends App {
val replyPattern = "@([a-zA-Z\\\\-]+)".r
val objectMapper = new ObjectMapper
objectMapper.registerModule(DefaultScalaModule)
val classLoader: ClassLoader = Generator.getClass.getClassLoader
val commands = managed(classLoader.getResourceAsStream("commands.json")) acquireAndGet { stream =>
objectMapper.readValue(stream, classOf[mutable.LinkedHashMap[String, Map[String, Any]]])
}
val targetDir = Paths.get("src", "main", "resources", "replies")
Files.createDirectories(targetDir)
commands.foreach { case (commandName, definition) =>
val url = new URL("https://raw.githubusercontent.com/antirez/redis-doc/master/commands/" +
commandName.toLowerCase.replace(' ', '-') + ".md")
println("Reading " + url)
val urlConnection: URLConnection = url.openConnection
val content = managed(urlConnection.getInputStream) acquireAndGet { inputStream =>
Source.fromInputStream(inputStream).mkString
}
val tags = (for (m <- replyPattern findAllMatchIn content) yield m group 1).dropWhile {
_ != "return"
}.takeWhile {
tag => tag != "examples" && tag != "history"
}.filter {
_ != "return"
}.toIterable
val file = targetDir.resolve(commandName)
if (tags.size == 1) {
Files.write(file, tags.asJava, StandardCharsets.UTF_8)
}
}
}
|
vkorenev/xnio-redis-client
|
xnio-redis-codegen/src/main/scala/xnioredis/codegen/Updater.scala
|
Scala
|
apache-2.0
| 1,717
|
package spire
package math
import spire.algebra._
import org.scalacheck.Arbitrary._
import org.scalacheck._
import Arbitrary.arbitrary
object ArbitrarySupport {
object Ordinal {
trait _0
trait _1
trait _2
trait _3
trait _4
trait _5
trait _6
trait _7
trait _8
trait _9
trait _10
trait _20
trait _50
trait _100
}
abstract class Size[A](val value: Int)
object Size {
import Ordinal._
implicit object Size0 extends Size[_0](0)
implicit object Size1 extends Size[_1](1)
implicit object Size2 extends Size[_2](2)
implicit object Size3 extends Size[_3](3)
implicit object Size4 extends Size[_4](4)
implicit object Size5 extends Size[_5](5)
implicit object Size6 extends Size[_6](6)
implicit object Size7 extends Size[_3](7)
implicit object Size8 extends Size[_3](8)
implicit object Size9 extends Size[_3](9)
implicit object Size10 extends Size[_10](10)
implicit object Size20 extends Size[_20](20)
implicit object Size50 extends Size[_50](50)
implicit object Size100 extends Size[_100](100)
def apply[A](implicit sz: Size[A]): Int = sz.value
}
case class Sized[A, L, U](num: A)
case class Positive[A](num: A)
case class Negative[A](num: A)
case class NonZero[A](num: A)
case class NonPositive[A](num: A)
case class NonNegative[A](num: A)
import spire.syntax.all._
implicit def sized[A: EuclideanRing: Signed: Arbitrary, L: Size, U: Size]: Arbitrary[Sized[A, L, U]] =
Arbitrary(arbitrary[A].map(a => Sized((a emod (Size[U] - Size[L])).abs + Size[L])))
implicit def positive[A: Signed: Arbitrary]: Arbitrary[Positive[A]] =
Arbitrary(arbitrary[A].map(_.abs).filter(_.signum > 0).map(Positive(_)))
implicit def negative[A: Signed: AdditiveGroup: Arbitrary]: Arbitrary[Negative[A]] =
Arbitrary(arbitrary[A].map(-_.abs).filter(_.signum < 0).map(Negative(_)))
implicit def nonZero[A: Signed: AdditiveGroup: Arbitrary]: Arbitrary[NonZero[A]] =
Arbitrary(arbitrary[A].filter(_.signum != 0).map(NonZero(_)))
implicit def nonPositive[A: Signed: AdditiveGroup: Arbitrary]: Arbitrary[NonPositive[A]] =
Arbitrary(arbitrary[A].map(-_.abs).filter(_.signum < 1).map(NonPositive(_)))
implicit def nonNegative[A: Signed: AdditiveGroup: Arbitrary]: Arbitrary[NonNegative[A]] =
Arbitrary(arbitrary[A].map(_.abs).filter(_.signum > -1).map(NonNegative(_)))
}
|
adampingel/spire
|
tests/src/test/scala/spire/math/ArbitrarySupport.scala
|
Scala
|
mit
| 2,422
|
package org.joda.time.base
import org.joda.time.Chronology
import org.joda.time.DateTime
import org.joda.time.DateTimeField
import org.joda.time.DateTimeFieldType
import org.joda.time.DateTimeUtils
import org.joda.time.DurationFieldType
import org.joda.time.ReadableInstant
import org.joda.time.ReadablePartial
import org.joda.time.field.FieldUtils
import org.joda.time.format.DateTimeFormatter
abstract class AbstractPartial protected ()
extends ReadablePartial()
with Comparable[ReadablePartial] {
protected def getField(index: Int, chrono: Chronology): DateTimeField
def getFieldType(index: Int): DateTimeFieldType =
getField(index, getChronology).getType
def getFieldTypes(): Array[DateTimeFieldType] = {
val result = Array.ofDim[DateTimeFieldType](size)
for (i <- 0 until result.length) {
result(i) = getFieldType(i)
}
result
}
def getField(index: Int): DateTimeField = getField(index, getChronology)
def getFields(): Array[DateTimeField] = {
val result = Array.ofDim[DateTimeField](size)
for (i <- 0 until result.length) {
result(i) = getField(i)
}
result
}
def getValues(): Array[Int] = {
val result = Array.ofDim[Int](size)
for (i <- 0 until result.length) {
result(i) = getValue(i)
}
result
}
def get(`type`: DateTimeFieldType): Int = getValue(indexOfSupported(`type`))
def isSupported(`type`: DateTimeFieldType): Boolean = (indexOf(`type`) != -1)
def indexOf(`type`: DateTimeFieldType): Int = {
(0 until this.size()).find(getFieldType(_) == `type`).getOrElse(-1)
}
protected def indexOfSupported(`type`: DateTimeFieldType): Int = {
val index = indexOf(`type`)
if (index == -1) {
throw new IllegalArgumentException(
"Field '" + `type` + "' is not supported")
}
index
}
protected def indexOf(`type`: DurationFieldType): Int = {
(0 until this.size())
.find(getFieldType(_).getDurationType == `type`)
.getOrElse(-1)
}
protected def indexOfSupported(`type`: DurationFieldType): Int = {
val index = indexOf(`type`)
if (index == -1) {
throw new IllegalArgumentException(
"Field '" + `type` + "' is not supported")
}
index
}
def toDateTime(baseInstant: ReadableInstant): DateTime = {
val chrono = DateTimeUtils.getInstantChronology(baseInstant)
val instantMillis = DateTimeUtils.getInstantMillis(baseInstant)
val resolved = chrono.set(this, instantMillis)
new DateTime(resolved, chrono)
}
override def equals(partial: Any): Boolean = {
if (super.equals(partial)) {
return true
}
if (partial.isInstanceOf[ReadablePartial] == false) {
return false
}
val other = partial.asInstanceOf[ReadablePartial]
if (size != other.size) {
return false
}
for (i <- 0 until this.size()
if getValue(i) != other.getValue(i) || getFieldType(i) != other
.getFieldType(i)) {
return false
}
FieldUtils.equals(getChronology, other.getChronology)
}
override def hashCode(): Int = {
var total = 157
for (i <- 0 until this.size()) {
total = 23 * total + getValue(i)
total = 23 * total + getFieldType(i).hashCode
}
total += getChronology.hashCode
total
}
def compareTo(other: ReadablePartial): Int = {
if (this == other) {
return 0
}
if (size != other.size) {
throw new ClassCastException(
"ReadablePartial objects must have matching field types")
}
for (i <- 0 until this.size()
if getFieldType(i) != other.getFieldType(i)) {
throw new ClassCastException(
"ReadablePartial objects must have matching field types")
}
for (i <- 0 until this.size()) {
if (getValue(i) > other.getValue(i)) {
return 1
}
if (getValue(i) < other.getValue(i)) {
return -1
}
}
0
}
def isAfter(partial: ReadablePartial): Boolean = {
if (partial == null) {
throw new IllegalArgumentException("Partial cannot be null")
}
compareTo(partial) > 0
}
def isBefore(partial: ReadablePartial): Boolean = {
if (partial == null) {
throw new IllegalArgumentException("Partial cannot be null")
}
compareTo(partial) < 0
}
def isEqual(partial: ReadablePartial): Boolean = {
if (partial == null) {
throw new IllegalArgumentException("Partial cannot be null")
}
compareTo(partial) == 0
}
def toString(formatter: DateTimeFormatter): String = {
if (formatter == null) {
return toString
}
formatter.print(this)
}
}
|
mdedetrich/soda-time
|
shared/src/main/scala/org/joda/time/base/AbstractPartial.scala
|
Scala
|
bsd-2-clause
| 4,616
|
/*
* Copyright 2001-2013 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalatest
import SharedHelpers.thisLineNumber
import enablers.Emptiness
import Matchers._
class ShouldBeEmptyLogicalAndImplicitSpec extends Spec {
val fileName: String = "ShouldBeEmptyLogicalAndImplicitSpec.scala"
def wasEqualTo(left: Any, right: Any): String =
FailureMessages("wasEqualTo", left, right)
def wasNotEqualTo(left: Any, right: Any): String =
FailureMessages("wasNotEqualTo", left, right)
def equaled(left: Any, right: Any): String =
FailureMessages("equaled", left, right)
def didNotEqual(left: Any, right: Any): String =
FailureMessages("didNotEqual", left, right)
def wasNotEmpty(left: Any): String =
FailureMessages("wasNotEmpty", left)
def wasEmpty(left: Any): String =
FailureMessages("wasEmpty", left)
def allError(message: String, lineNumber: Int, left: Any): String = {
val messageWithIndex = UnquotedString(" " + FailureMessages("forAssertionsGenTraversableMessageWithStackDepth", 0, UnquotedString(message), UnquotedString(fileName + ":" + lineNumber)))
FailureMessages("allShorthandFailed", messageWithIndex, left)
}
trait Thing {
def isEmpty: Boolean
}
val nonEmptyThing = new Thing {
val isEmpty = false
}
val emptyThing = new Thing {
val isEmpty = true
}
implicit def emptinessOfThing: Emptiness[Thing] =
new Emptiness[Thing] {
def isEmpty(thing: Thing): Boolean = thing.isEmpty
}
object `Emptiness matcher` {
object `when work with 'list should be (empty)'` {
def `should do nothing when list is empty` {
emptyThing should (equal (emptyThing) and be (empty))
emptyThing should (be (empty) and equal (emptyThing))
emptyThing should (be_== (emptyThing) and be (empty))
emptyThing should (be (empty) and be_== (emptyThing))
}
def `should throw TestFailedException with correct stack depth when list is not empty` {
val caught1 = intercept[TestFailedException] {
nonEmptyThing should (equal (nonEmptyThing) and be (empty))
}
assert(caught1.message === Some(equaled(nonEmptyThing, nonEmptyThing) + ", but " + wasNotEmpty(nonEmptyThing)))
assert(caught1.failedCodeFileName === Some(fileName))
assert(caught1.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught2 = intercept[TestFailedException] {
nonEmptyThing should (be (empty) and equal (nonEmptyThing))
}
assert(caught2.message === Some(wasNotEmpty(nonEmptyThing)))
assert(caught2.failedCodeFileName === Some(fileName))
assert(caught2.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught3 = intercept[TestFailedException] {
nonEmptyThing should (be_== (nonEmptyThing) and be (empty))
}
assert(caught3.message === Some(wasEqualTo(nonEmptyThing, nonEmptyThing) + ", but " + wasNotEmpty(nonEmptyThing)))
assert(caught3.failedCodeFileName === Some(fileName))
assert(caught3.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught4 = intercept[TestFailedException] {
nonEmptyThing should (be (empty) and be_== (nonEmptyThing))
}
assert(caught4.message === Some(wasNotEmpty(nonEmptyThing)))
assert(caught4.failedCodeFileName === Some(fileName))
assert(caught4.failedCodeLineNumber === Some(thisLineNumber - 4))
}
}
object `when work with 'list should not be empty'` {
def `should do nothing when file is not empty` {
nonEmptyThing should (not equal emptyThing and not be empty)
nonEmptyThing should (not be empty and not equal emptyThing)
nonEmptyThing should (not be_== emptyThing and not be empty)
nonEmptyThing should (not be empty and not be_== emptyThing)
}
def `should throw TestFailedException with correct stack depth when list is not empty` {
val caught1 = intercept[TestFailedException] {
emptyThing should (not equal nonEmptyThing and not be empty)
}
assert(caught1.message === Some(didNotEqual(emptyThing, nonEmptyThing) + ", but " + wasEmpty(emptyThing)))
assert(caught1.failedCodeFileName === Some(fileName))
assert(caught1.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught2 = intercept[TestFailedException] {
emptyThing should (not be empty and not equal nonEmptyThing)
}
assert(caught2.message === Some(wasEmpty(emptyThing)))
assert(caught2.failedCodeFileName === Some(fileName))
assert(caught2.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught3 = intercept[TestFailedException] {
emptyThing should (not be_== nonEmptyThing and not be empty)
}
assert(caught3.message === Some(wasNotEqualTo(emptyThing, nonEmptyThing) + ", but " + wasEmpty(emptyThing)))
assert(caught3.failedCodeFileName === Some(fileName))
assert(caught3.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught4 = intercept[TestFailedException] {
emptyThing should (not be empty and not be_== nonEmptyThing)
}
assert(caught4.message === Some(wasEmpty(emptyThing)))
assert(caught4.failedCodeFileName === Some(fileName))
assert(caught4.failedCodeLineNumber === Some(thisLineNumber - 4))
}
}
object `when work with 'all(xs) should be (empty)'` {
def `should do nothing when all(xs) is empty` {
all(List(emptyThing)) should (be_== (emptyThing) and be (empty))
all(List(emptyThing)) should (be (empty) and be_== (emptyThing))
all(List(emptyThing)) should (equal (emptyThing) and be (empty))
all(List(emptyThing)) should (be (empty) and equal (emptyThing))
}
def `should throw TestFailedException with correct stack depth when all(xs) is not empty` {
val left1 = List(nonEmptyThing)
val caught1 = intercept[TestFailedException] {
all(left1) should (be_== (nonEmptyThing) and be (empty))
}
assert(caught1.message === Some(allError(wasEqualTo(nonEmptyThing, nonEmptyThing) + ", but " + wasNotEmpty(nonEmptyThing), thisLineNumber - 2, left1)))
assert(caught1.failedCodeFileName === Some(fileName))
assert(caught1.failedCodeLineNumber === Some(thisLineNumber - 4))
val left2 = List(nonEmptyThing)
val caught2 = intercept[TestFailedException] {
all(left2) should (be (empty) and be_== (nonEmptyThing))
}
assert(caught2.message === Some(allError(wasNotEmpty(nonEmptyThing), thisLineNumber - 2, left2)))
assert(caught2.failedCodeFileName === Some(fileName))
assert(caught2.failedCodeLineNumber === Some(thisLineNumber - 4))
val left3 = List(nonEmptyThing)
val caught3 = intercept[TestFailedException] {
all(left3) should (equal (nonEmptyThing) and be (empty))
}
assert(caught3.message === Some(allError(equaled(nonEmptyThing, nonEmptyThing) + ", but " + wasNotEmpty(nonEmptyThing), thisLineNumber - 2, left3)))
assert(caught3.failedCodeFileName === Some(fileName))
assert(caught3.failedCodeLineNumber === Some(thisLineNumber - 4))
val left4 = List(nonEmptyThing)
val caught4 = intercept[TestFailedException] {
all(left4) should (be (empty) and equal (nonEmptyThing))
}
assert(caught4.message === Some(allError(wasNotEmpty(nonEmptyThing), thisLineNumber - 2, left4)))
assert(caught4.failedCodeFileName === Some(fileName))
assert(caught4.failedCodeLineNumber === Some(thisLineNumber - 4))
}
}
object `when work with 'all(xs) should not be empty'` {
def `should do nothing when all(xs) is not empty` {
all(List(nonEmptyThing)) should (not be empty and not be_== emptyThing)
all(List(nonEmptyThing)) should (not be_== emptyThing and not be empty)
all(List(nonEmptyThing)) should (not be empty and not equal emptyThing)
all(List(nonEmptyThing)) should (not equal emptyThing and not be empty)
}
def `should throw TestFailedException with correct stack depth when all(xs) is empty` {
val left1 = List(emptyThing)
val caught1 = intercept[TestFailedException] {
all(left1) should (not be_== nonEmptyThing and not be empty)
}
assert(caught1.message === Some(allError(wasNotEqualTo(emptyThing, nonEmptyThing) + ", but " + wasEmpty(emptyThing), thisLineNumber - 2, left1)))
assert(caught1.failedCodeFileName === Some(fileName))
assert(caught1.failedCodeLineNumber === Some(thisLineNumber - 4))
val left2 = List(emptyThing)
val caught2 = intercept[TestFailedException] {
all(left2) should (not be empty and not be_== nonEmptyThing)
}
assert(caught2.message === Some(allError(wasEmpty(emptyThing), thisLineNumber - 2, left2)))
assert(caught2.failedCodeFileName === Some(fileName))
assert(caught2.failedCodeLineNumber === Some(thisLineNumber - 4))
val left3 = List(emptyThing)
val caught3 = intercept[TestFailedException] {
all(left3) should (not equal nonEmptyThing and not be empty)
}
assert(caught3.message === Some(allError(didNotEqual(emptyThing, nonEmptyThing) + ", but " + wasEmpty(emptyThing), thisLineNumber - 2, left3)))
assert(caught3.failedCodeFileName === Some(fileName))
assert(caught3.failedCodeLineNumber === Some(thisLineNumber - 4))
val left4 = List(emptyThing)
val caught4 = intercept[TestFailedException] {
all(left4) should (not be empty and not equal nonEmptyThing)
}
assert(caught4.message === Some(allError(wasEmpty(emptyThing), thisLineNumber - 2, left4)))
assert(caught4.failedCodeFileName === Some(fileName))
assert(caught4.failedCodeLineNumber === Some(thisLineNumber - 4))
}
}
}
}
|
travisbrown/scalatest
|
src/test/scala/org/scalatest/ShouldBeEmptyLogicalAndImplicitSpec.scala
|
Scala
|
apache-2.0
| 10,786
|
/*
* (c) Copyright 2016 Hewlett Packard Enterprise Development LP
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package tutorial.cogio
import cogdebugger._
import cogio.webcam.ColorWebcam
import libcog._
import toolkit.computervision.colorfunctions.Luma
import toolkit.computervision.motion.OpticFlowPolynomial
import toolkit.filtering.spatial.LaplacianFilter
object CameraExample extends CogDebuggerApp(
new ComputeGraph {
val sensor = ColorWebcam(width = 640, height = 480, framerate = 30)
val flow = new OpticFlowPolynomial(Luma(toVectorField(sensor))).flow
val laplacian = convolve(toVectorField(sensor), LaplacianFilter(), BorderZero)
probe(flow)
probe(laplacian)
}
)
|
hpe-cct/cct-tutorial
|
src/main/scala/tutorial/cogio/CameraExample.scala
|
Scala
|
apache-2.0
| 1,213
|
package models
import com.typesafe.config.Config
/**
* Created by rbrowning on 4/25/17.
*/
case class Item(item: String, category: String, params: Seq[String]) {
def getFullItemPath(implicit conf: Config): String = {
conf.getString("entertainment.storage.path.root") +
(category match {
case "anime" => conf.getString("entertainment.storage.path.anime")
case "show" => conf.getString("entertainment.storage.path.shows")
case "movie" => conf.getString("entertainment.storage.path.movies")
}) + item
}
}
|
rebrowning/entertainment-cluster-member
|
src/main/scala/models/Item.scala
|
Scala
|
apache-2.0
| 545
|
package org.numenta.nupic.flink.streaming.examples.hotgym
import de.javakaffee.kryoserializers.jodatime.JodaDateTimeSerializer
import org.apache.flink.api.java.utils.ParameterTool
import org.apache.flink.core.fs.FileSystem.WriteMode
import org.apache.flink.streaming.api.scala._
import org.joda.time.DateTime
import org.numenta.nupic.Parameters.KEY
import org.numenta.nupic.algorithms.{Anomaly, CLAClassifier, SpatialPooler, TemporalMemory}
import org.numenta.nupic.encoders.DateEncoder._
import org.numenta.nupic.encoders.scala._
import org.numenta.nupic.flink.streaming.examples.common.NetworkDemoParameters
import org.numenta.nupic.network.Network
import org.numenta.nupic.flink.streaming.api.scala._
import scala.collection.JavaConverters._
trait HotGymModel {
case class Consumption(timestamp: DateTime, consumption: Double)
case class Prediction(timestamp: String, actual: Double, predicted: Double, anomalyScore: Double)
val network = (key: AnyRef) => {
val encoder = MultiEncoder(
DateEncoder().name("timestamp").timeOfDay(21, 9.5),
ScalarEncoder().name("consumption").n(50).w(21).maxVal(100.0).resolution(0.1).clipInput(true)
)
val params = NetworkDemoParameters.params
val network = Network.create("HotGym Network", params)
.add(Network.createRegion("Region 1")
.add(Network.createLayer("Layer 2/3", params)
.alterParameter(KEY.AUTO_CLASSIFY, true)
.alterParameter(KEY.INFERRED_FIELDS, Map("consumption" -> classOf[CLAClassifier]).asJava)
.add(encoder)
.add(Anomaly.create())
.add(new TemporalMemory())
.add(new SpatialPooler())))
network.setEncoder(encoder)
network
}
}
/**
* Demonstrate the hotgym "basic network" as a Flink job.
*/
object Demo extends HotGymModel {
def main(args: Array[String]) {
val appArgs = ParameterTool.fromArgs(args)
val env = {
val env = StreamExecutionEnvironment.getExecutionEnvironment
env.getConfig.setGlobalJobParameters(appArgs)
env.setParallelism(1)
env.addDefaultKryoSerializer(classOf[DateTime], classOf[JodaDateTimeSerializer])
env
}
/**
* Parse the hotgym csv file as a datastream of consumption records.
*/
def readCsvFile(path: String): DataStream[Consumption] = {
env.readTextFile(appArgs.getRequired("input"))
.map {
_.split(",") match {
case Array(timestamp, consumption) => Consumption(LOOSE_DATE_TIME.parseDateTime(timestamp), consumption.toDouble)
}
}
}
val hotGymConsumption: DataStream[Consumption] = readCsvFile(appArgs.getRequired("input"))
val inferences: DataStream[Prediction] = hotGymConsumption
.learn(network)
.select { inference => inference }
.keyBy { _ => None }
.mapWithState { (inference, state: Option[Double]) =>
val prediction = Prediction(
inference._1.timestamp.toString(LOOSE_DATE_TIME),
inference._1.consumption,
state match {
case Some(prediction) => prediction
case None => 0.0
},
inference._2.getAnomalyScore)
// store the prediction about the next value as state for the next iteration,
// so that actual vs predicted is a meaningful comparison
val predictedConsumption = inference._2.getClassification("consumption").getMostProbableValue(1).asInstanceOf[Any] match {
case value: Double if value != 0.0 => value
case _ => state.getOrElse(0.0)
}
(prediction, Some(predictedConsumption))
}
if (appArgs.has("output")) {
inferences.writeAsCsv(appArgs.getRequired("output"), writeMode = WriteMode.OVERWRITE)
}
else {
inferences.print()
}
env.execute("hotgym")
}
}
|
nupic-community/flink-htm
|
flink-htm-examples/src/main/scala/org/numenta/nupic/flink/streaming/examples/hotgym/HotGym.scala
|
Scala
|
agpl-3.0
| 3,820
|
/*
* Copyright 2019 ABSA Group Limited
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package za.co.absa.spline.persistence.tx
import java.util.UUID
import com.arangodb.async.ArangoDatabaseAsync
import com.arangodb.model.TransactionOptions
import org.slf4s.Logging
import za.co.absa.spline.persistence.model.{ArangoDocument, CollectionDef}
import za.co.absa.spline.persistence.tx.TxBuilder.{ArangoTxImpl, condLine}
import scala.compat.java8.FutureConverters._
import scala.concurrent.Future
sealed trait Query {
def collectionDefs: Seq[CollectionDef]
}
case class NativeQuery(
query: String,
params: Map[String, Any] = Map.empty,
override val collectionDefs: Seq[CollectionDef] = Nil
) extends Query
case class UpdateQuery(
collectionDef: CollectionDef,
filter: String,
data: Map[String, Any],
chainInput: Boolean = false
) extends Query {
override def collectionDefs: Seq[CollectionDef] = Seq(collectionDef)
}
object UpdateQuery {
val DocWildcard = s"_${UUID.randomUUID}_"
}
case class InsertQuery(
collectionDef: CollectionDef,
documents: Seq[ArangoDocument],
ignoreExisting: Boolean = false,
chainInput: Boolean = false
) extends Query {
override def collectionDefs: Seq[CollectionDef] = Seq(collectionDef)
}
object InsertQuery {
def apply(colDef: CollectionDef, docs: ArangoDocument*): InsertQuery = InsertQuery(colDef, docs)
}
class TxBuilder {
private var queries: Seq[Query] = Vector.empty
def addQuery(q: Query): this.type = {
queries :+= q
this
}
def buildTx: ArangoTx = new ArangoTxImpl(this)
private[tx] def generateJs(): String = {
val statements = queries.zipWithIndex.map {
case (nq: NativeQuery, i) =>
s"""
|lastRes = (function(db, params){
| return ${nq.query}
|})(_db, _params[$i]);
|""".stripMargin.trim
case (iq: InsertQuery, i) =>
val colName = iq.collectionDef.name
val objects = if (iq.chainInput) "lastRes" else s"_params[$i]"
val iterMethod = if (iq.chainInput) "map" else s"forEach"
Seq(
s"$objects.$iterMethod(o =>",
condLine(iq.ignoreExisting,
s"""
| o._key && _db._collection("$colName").exists(o._key) ||
| o._from && o._to && _db._query(`
| WITH $colName
| FOR e IN $colName
| FILTER e._from == @o._from && e._to == @o._to
| LIMIT 1
| COLLECT WITH COUNT INTO cnt
| RETURN !!cnt
| `, {o}).next() ||
| """.stripMargin),
s"""_db._collection("$colName").insert(o, {silent:true}));"""
).mkString
case (uq: UpdateQuery, i) =>
val colName = uq.collectionDef.name
val doc = "a"
val filter = uq.filter.replace(UpdateQuery.DocWildcard, doc)
val bDoc = if (uq.chainInput) "lastRes" else s"_params[$i]"
s"""
|_db._query(`
| WITH $colName
| FOR $doc IN $colName
| FILTER $filter
| UPDATE $doc._key WITH @b IN $colName
|`, {"b": $bDoc});
|""".stripMargin.trim
}
s"""
|function (_params) {
| const _db = require('internal').db;
| ${statements.mkString("\\n").replace("\\n", "\\n ")}
|}
|""".stripMargin
}
private def options: TransactionOptions = {
val params = queries
.map({
case nq: NativeQuery => nq.params
case iq: InsertQuery => iq.documents.toVector
case uq: UpdateQuery => uq.data
})
val writeCollections = queries
.flatMap(_.collectionDefs)
.map(_.name)
.distinct
new TransactionOptions()
.params(params)
.writeCollections(writeCollections: _*)
.allowImplicit(false)
}
}
object TxBuilder {
private class ArangoTxImpl(txBuilder: TxBuilder) extends ArangoTx with Logging {
override def execute(db: ArangoDatabaseAsync): Future[Unit] = {
val jsCode = txBuilder.generateJs()
log.debug(jsCode)
db.transaction(jsCode, classOf[Unit], txBuilder.options).toScala
}
}
private def condLine(cond: => Boolean, stmt: => String): String = if (cond) stmt else ""
}
|
AbsaOSS/spline
|
persistence/src/main/scala/za/co/absa/spline/persistence/tx/TxBuilder.scala
|
Scala
|
apache-2.0
| 4,800
|
package xsbtClasspath
import sbt._
final case class Asset(
file:File,
name:String,
main:Boolean
) {
def flatPathMapping:(File,String) = (file, name)
}
|
ritschwumm/xsbt-classpath
|
src/main/scala/Asset.scala
|
Scala
|
bsd-2-clause
| 157
|
/**
* Copyright 2015 Yahoo Inc. Licensed under the Apache License, Version 2.0
* See accompanying LICENSE file.
*/
package kafka.manager.base
import kafka.manager.model.ActorModel
import ActorModel.{ActorRequest, CommandRequest}
/**
* @author hiral
*/
abstract class BaseCommandActor extends BaseActor {
final def processActorRequest(request: ActorRequest): Unit = {
request match {
case queryRequest: CommandRequest =>
processCommandRequest(queryRequest)
case any: Any => log.warning("bca : processActorRequest : Received unknown message: {}", any)
}
}
def processCommandRequest(request: CommandRequest): Unit
@scala.throws[Exception](classOf[Exception])
override def preStart(): Unit = super.preStart()
@scala.throws[Exception](classOf[Exception])
override def postStop(): Unit = super.postStop()
@scala.throws[Exception](classOf[Exception])
override def preRestart(reason: Throwable, message: Option[Any]): Unit = super.preRestart(reason, message)
}
|
krux/kafka-manager
|
app/kafka/manager/base/BaseCommandActor.scala
|
Scala
|
apache-2.0
| 1,013
|
package unicredit
import scala.tools.nsc.{ Global, Phase }
import scala.tools.nsc.plugins.{ Plugin, PluginComponent }
import scala.tools.nsc.transform.{ Transform, TypingTransformers }
import scala.tools.nsc.symtab.Flags
import scala.tools.nsc.plugins.Plugin
import scala.tools.nsc.ast.TreeDSL
import java.nio.file.Files.readAllBytes
import java.nio.file.Paths.get
import scala.collection.mutable
import scala.util.{ Try => STry, Success, Failure }
import scala.reflect.internal.MissingRequirementError
class AnnotationAdderPlugin(val global: Global) extends Plugin {
import global._
val name = "annotation-adder-plugin"
val description = "Want to add annotation to classes, fields, and methods"
val components = List[PluginComponent](AnnotationAdderComponent)
lazy val config: mutable.Set[(String, String, List[String])] =
(try new String(readAllBytes(get("./annotation_adder.config"))).split("\\n").toSeq.map(e => {
val splitted = e.split(" ")
(splitted(0), splitted(1), splitted.drop(2).toList)
})
catch {
case err: Throwable =>
println("Annotation adder configuration file is missing")
Seq()
}).to[mutable.Set]
private object AnnotationAdderComponent extends PluginComponent with Transform {
val global = AnnotationAdderPlugin.this.global
import global._
import global.definitions._
override val runsAfter = List("typer")
val phaseName = "annotation-adder"
def newTransformer(unit: CompilationUnit) =
new Transformer {
val annotators =
config.flatMap { c =>
val originalSym = STry {
try
rootMirror.getClassByName((c._1: TypeName))
catch {
case _: MissingRequirementError => //is not a class get member
val i = c._1.lastIndexOf('.')
val className = c._1.substring(0, i)
val memberName = c._1.substring(i + 1)
val cl = rootMirror.getClassByName((className: TypeName))
try {
getMember(cl, (memberName: TermName))
} catch {
case err: Throwable =>
err.printStackTrace
throw err
}
case err: Throwable =>
err.printStackTrace
throw err
}
}
val annotation = STry {
rootMirror.getClassByName((c._2: TypeName))
}
val params =
c._3.map(x => { //maybe a little better could be done
x match {
case "true" => reify(true).tree
case "false" => reify(false).tree
case str => reify(str.toString()).tree
}
})
(originalSym, annotation) match {
case (Success(orSym), Success(annotationSym)) =>
//orSym.addAnnotation(annotationSym)
Some(orSym, annotationSym, params)
//unit.warning(null, s"adding annotation ${c._2} to ${c._1}")
case _ =>
None
//unit.warning(null, s"ANNOTATION ADDER ERROR: ${c._1} or ${c._2} not found")
}
}
//probably we could avoid to use a transformer and use a traver only
override def transform(tree: Tree): Tree = {
/* // try {
val toAnnotate = annotators.find { case (symb,_,_) => symb == tree.symbol && symb.owner == tree.symbol.owner}
val toValAnnotate =
if (toAnnotate.isDefined) None
else annotators.find { case (symb,_,_) =>
symb.owner == tree.symbol.owner && (symb.nameString+" " : TermName) == tree.symbol.name}
*/
tree match {
case cd : ClassDef =>
val toAnnotate = annotators.find { case (symb,_,_) =>
symb == cd.symbol && symb.owner == cd.symbol.owner}
if (toAnnotate.isDefined) {
cd.symbol.addAnnotation(toAnnotate.get._2, toAnnotate.get._3: _*)
unit.warning(tree.pos, s"Class Annotation ${toAnnotate.get._2.nameString} added.")
}
case dd: DefDef =>
val toAnnotate = annotators.find { case (symb,_,_) =>
symb == dd.symbol && symb.owner == dd.symbol.owner}
if (toAnnotate.isDefined) {
dd.symbol.addAnnotation(toAnnotate.get._2, toAnnotate.get._3: _*)
unit.warning(tree.pos, s"Def Annotation ${toAnnotate.get._2.nameString} added.")
}
case vd: ValDef =>
val toAnnotate = annotators.find { case (symb,_,_) =>
symb.owner == vd.symbol.owner && (symb.nameString+" " : TermName) == vd.symbol.name}
if (toAnnotate.isDefined) {
vd.symbol.addAnnotation(toAnnotate.get._2, toAnnotate.get._3: _*)
unit.warning(tree.pos, s"Val Annotation ${toAnnotate.get._2.nameString} added.")
}
case _ =>
}
/*
if (toAnnotate.isDefined) {
tree.symbol.addAnnotation(toAnnotate.get._2, toAnnotate.get._3: _*)
unit.warning(tree.pos, s"Annotation ${toAnnotate.get._2.nameString} added.")
} else if (toValAnnotate.isDefined) {
tree.symbol.addAnnotation(toValAnnotate.get._2, toAnnotate.get._3: _*)
unit.warning(tree.pos, s"Annotation ${toValAnnotate.get._2.nameString} added")
}
} catch {
case _ : Throwable =>
}*/
super.transform(tree)
}
}
}
}
|
jmnarloch/akka.js
|
annotation-adder-plugin/src/main/scala/unicredit/AnnotationAdderPlugin.scala
|
Scala
|
bsd-3-clause
| 5,794
|
/*
Copyright (c) 2017-2021, Robby, Kansas State University
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.sireum.$internal
import scala.language.experimental.macros
object Macro {
val templateString = "st\"...\""
def parMap[T, U](poolRef: _root_.java.util.concurrent.atomic.AtomicReference[AnyRef],
cores: Int, arg: scala.collection.Seq[T], f: T => U): scala.collection.IndexedSeq[U] = macro Macro.parMapImpl
def sync[T](o: AnyRef, arg: T): T = macro Macro.syncImpl
def isJs: Boolean = macro Macro.isJsImpl
def version: String = macro Macro.versionImpl
def commitHash: String = macro Macro.commitHashImpl
def eval[T](c: scala.reflect.macros.blackbox.Context)(
t: Any, n: Int = 6): T = { // HACK: eval may non-deterministically fail, so try n times!
val tree = t.asInstanceOf[c.Tree]
val expr = c.Expr(c.untypecheck(tree))
for (_ <- 0 until n) {
scala.util.Try(c.eval[T](expr)) match {
case scala.util.Success(x) => return x
case _ =>
}
synchronized { wait(100) }
}
c.eval[T](expr)
}
}
import Macro._
class Macro(val c: scala.reflect.macros.blackbox.Context) {
val isJsCheck: Boolean = "true" == System.getenv("PROYEK_JS") || scala.util.Try(Class.forName("scala.scalajs.js.Any", false, getClass.getClassLoader)).isSuccess
import c.universe._
def l[T](args: c.Expr[Any]*): c.Expr[T] =
c.Expr[T]( q"""halt("Slang l\"\"\"...\"\"\" should have been erased by the Sireum Scala plugin.")""")
def lUnit(args: c.Expr[Any]*): c.Expr[Unit] = c.Expr[Unit](q"{}")
def lUnit1(arg0: c.Tree): c.Expr[Unit] = c.Expr[Unit](q"{}")
def lUnit2(arg0: c.Tree, arg1: c.Tree): c.Expr[Unit] = c.Expr[Unit](q"{}")
def lUnit3(arg0: c.Tree, arg1: c.Tree, arg2: c.Tree): c.Expr[Unit] = c.Expr[Unit](q"{}")
def lUnit4(arg0: c.Tree, arg1: c.Tree, arg2: c.Tree, arg3: c.Tree): c.Expr[Unit] = c.Expr[Unit](q"{}")
def lUnit0S(arg0: c.Tree*): c.Expr[Unit] = c.Expr[Unit](q"{}")
def lUnit1S(arg0: c.Tree, arg1: c.Tree*): c.Expr[Unit] = c.Expr[Unit](q"{}")
def lUnit2S(arg0: c.Tree, arg1: c.Tree, arg2: c.Tree*): c.Expr[Unit] = c.Expr[Unit](q"{}")
def lUnit3S(arg0: c.Tree, arg1: c.Tree, arg2: c.Tree, arg3: c.Tree*): c.Expr[Unit] = c.Expr[Unit](q"{}")
def lDef[T](args: c.Expr[Any]*): c.Expr[T] =
c.Expr[T]( q"""halt("Slang l\"\"\"...\"\"\" should have been erased by the Sireum Scala plugin.")""")
def $[T]: c.Expr[T] = c.Expr[T]( q"""halt("Cannot invoke this method")""")
def extractParts: Seq[c.Tree] = (c.prefix.tree match {
case q"org.sireum.`package`.$$Slang(scala.StringContext.apply(..$ps)).$_" => ps
case q"sireum.this.`package`.$$Slang(scala.StringContext.apply(..$ps)).$_" => ps
case q"org.sireum.`package`.$$Slang(scala.StringContext.apply(..$ps))" => ps
case q"sireum.this.`package`.$$Slang(scala.StringContext.apply(..$ps))" => ps
}).asInstanceOf[Seq[c.Tree]]
def zApply(args: c.Tree*): c.Tree = {
val parts = extractParts
if (parts.size != 1) c.abort(c.prefix.tree.pos, "Slang z\"...\" should not contain $$ arguments.")
q"_root_.org.sireum.Z.$$String(${parts.head})"
}
def cApply(args: c.Tree*): c.Tree = {
val parts = extractParts
if (parts.size != 1) c.abort(c.prefix.tree.pos, "Slang c\"...\" should not contain $$ arguments.")
val s = Macro.eval[String](c)(parts.head)
if (s.codePointCount(0, s.length) != 1) c.abort(c.prefix.tree.pos, "Slang c\"...\" can only have a single character.")
q"_root_.org.sireum.C(${parts.head}.codePointAt(0))"
}
def f32Apply(args: c.Tree*): c.Tree = {
val parts = extractParts
if (parts.size != 1) c.abort(c.prefix.tree.pos, "Slang f32\"...\" should not contain $$ arguments.")
q"_root_.org.sireum.F32.$$String(${parts.head})"
}
def f64Apply(args: c.Tree*): c.Tree = {
val parts = extractParts
if (parts.size != 1) c.abort(c.prefix.tree.pos, "Slang f64\"...\" should not contain $$ arguments.")
q"_root_.org.sireum.F64.$$String(${parts.head})"
}
def rApply(args: c.Tree*): c.Tree = {
val parts = extractParts
if (parts.size != 1) c.abort(c.prefix.tree.pos, "Slang r\"...\" should not contain $$ arguments.")
q"_root_.org.sireum.R.$$String(${parts.head})"
}
def stringApply(args: c.Tree*): c.Tree = {
val parts = extractParts
if (parts.size != 1) c.abort(c.prefix.tree.pos, "Slang string\"...\" should not contain $$ arguments.")
q"_root_.org.sireum.String(${parts.head})"
}
def $assign(arg: c.Tree): c.Tree = {
def args(n: Int): c.Tree = {
val l = (for (i <- 1 to n) yield
Apply(q"_root_.org.sireum.helper.assign", List(Select(Ident(TermName("x")), TermName(s"_$i"))))).toList
Block(List(q"val x = $arg"),
Apply(Select(Ident(TermName("scala")), TermName(s"Tuple$n")), l))
}
//println(showRaw(arg))
val mm = c.typeOf[MutableMarker]
val r = arg match {
case q"(..$args)" if args.size > 1 => arg
case _ =>
if (arg.tpe <:< mm) q"_root_.org.sireum.helper.assignMut($arg)"
else if (arg.tpe.typeSymbol.fullName.startsWith("scala.Tuple")) {
val n = arg.tpe.typeSymbol.fullName.substring("scala.Tuple".length).toInt
args(n)
}
else arg
}
//println(showRaw(r))
//println(showCode(r))
r
}
def $ret(arg: c.Tree): c.Tree = {
def args(n: Int): c.Tree = {
val l = (for (i <- 1 to n) yield
Apply(q"_root_.org.sireum.helper.ret", List(Select(Ident(TermName("x")), TermName(s"_$i"))))).toList
Block(List(q"val x = $arg"),
Apply(Select(Ident(TermName("scala")), TermName(s"Tuple$n")), l))
}
//println(showRaw(arg))
val mm = c.typeOf[MutableMarker]
val r = arg match {
case q"(..$args)" if args.size > 1 => arg
case _ =>
if (arg.tpe <:< mm) q"_root_.org.sireum.helper.retMut($arg)"
else if (arg.tpe.typeSymbol.fullName.startsWith("scala.Tuple")) {
val n = arg.tpe.typeSymbol.fullName.substring("scala.Tuple".length).toInt
args(n)
}
else arg
}
//println(showRaw(r))
//println(showCode(r))
r
}
def $tmatch(arg: c.Tree): c.Tree = {
def args(n: Int): c.Tree = {
val l = (for (i <- 1 to n) yield
Apply(q"_root_.org.sireum.helper.assign", List(Select(Ident(TermName("x")), TermName(s"_$i"))))).toList
Block(List(q"val x = $arg"),
Apply(Select(Ident(TermName("scala")), TermName(s"Tuple$n")), l))
}
//println(showRaw(arg))
val r = arg match {
case q"(..$args)" if args.size > 1 => arg
case _ =>
if (arg.tpe.typeSymbol.fullName.startsWith("scala.Tuple")) {
val n = arg.tpe.typeSymbol.fullName.substring("scala.Tuple".length).toInt
args(n)
}
else arg
}
//println(showRaw(r))
//println(showCode(r))
r
}
def parMapImpl(poolRef: c.Tree, cores: c.Tree, arg: c.Tree, f: c.Tree): c.Tree =
if (isJsCheck) q"$arg.map($f).toIndexedSeq"
else
q"""{
val newPool = new _root_.java.util.concurrent.ForkJoinPool($cores)
val success = $poolRef.compareAndSet(null, newPool)
if (!success) newPool.shutdown()
val pc = _root_.scala.collection.parallel.mutable.ParArray($arg: _*)
pc.tasksupport = new _root_.scala.collection.parallel.ForkJoinTaskSupport($poolRef.get.asInstanceOf[_root_.java.util.concurrent.ForkJoinPool])
val r = pc.map($f).toIndexedSeq
if (success) $poolRef.getAndSet(null).asInstanceOf[_root_.java.util.concurrent.ForkJoinPool].shutdown()
r
}"""
def syncImpl(o: c.Tree, arg: c.Tree): c.Tree = if (isJsCheck) arg else q"$o.synchronized { $arg }"
def sn(args: c.Tree*): c.Tree = {
val parts = extractParts
if (parts.size != 1) c.abort(c.prefix.tree.pos, "Slang sn\"...\" should not contain $$ arguments.")
q"???"
}
def st(args: c.Tree*): c.Tree = {
def processArg(e: c.Tree, sep: c.Tree): c.Tree = {
val t = e.tpe.dealias
val templ = c.typeOf[org.sireum.$internal.STMarker]
val r =
if (t <:< templ) q"ST.Templ(scala.Seq($e), $sep)"
else if (t <:< c.typeOf[ISMarker] || t <:< c.typeOf[MSMarker]) {
t.typeArgs.length match {
case 1 if t.typeArgs.head <:< templ => q"ST.Templ($e.elements, $sep)"
case 2 if t.typeArgs(1) <:< templ => q"ST.Templ($e.elements, $sep)"
case _ => q"ST.Any($e.elements.map($$internal.Option.apply), $sep)"
}
} else if (t.erasure <:< c.typeOf[CollectionCompat.IterableOnce[Any]].erasure) {
if (t.typeArgs.head <:< templ) q"ST.Templ($e.toSeq, $sep)"
else q"ST.Any($e.toSeq.map($$internal.Option.apply), $sep)"
} else q"ST.Any(scala.Seq($$internal.Option($e)), $sep)"
//println(showCode(r))
r
}
//println(showRaw(c.prefix.tree))
//println(showCode(c.prefix.tree))
val pos = c.prefix.tree.pos
val isSingle =
if (pos.source.content.length >= pos.start + 5)
new String(pos.source.content.slice(pos.start, pos.start + 5)) != "st\"\"\""
else true
val parts = {
val ps = extractParts
if (isSingle) ps.map(p => q"StringContext.processEscapes($p)") else ps
}
val stArgs = for (arg <- args) yield arg match {
case q"(..$exprs)" if exprs.size > 1 =>
if (exprs.size != 2) c.abort(arg.pos, s"Expecting a pair instead of a ${exprs.size}-tuple.")
val e = exprs(1).asInstanceOf[c.Tree]
val first = exprs.head.asInstanceOf[c.Tree]
val t = e.tpe
if (t <:< c.typeOf[Predef.String]) processArg(first, e)
else if (t.typeSymbol.fullName == "org.sireum.String") processArg(first, q"$e.value")
else c.abort(e.pos, s"Expecting a separator string instead of '${showCode(e)}'.")
case _ =>
processArg(arg, Literal(Constant("")))
}
val source = if (pos.isRange) {
val text = pos.source.content
val sb = new java.lang.StringBuilder
for (_ <- 0 until pos.column - 1) sb.append(' ')
for (i <- pos.start until pos.end) {
sb.append(text(i))
}
sb.toString
} else templateString
q"ST(scala.Seq(..$parts), scala.Seq[ST.Arg](..$stArgs), ${Literal(Constant(source))})"
}
def proc(args: c.Tree*): c.Tree = {
val pos = c.prefix.tree.pos
val isSingle =
if (pos.source.content.length >= pos.start + 7)
new String(pos.source.content.slice(pos.start, pos.start + 7)) != "proc\"\"\""
else true
val parts = {
val ps = extractParts
if (isSingle) ps.map(p => q"StringContext.processEscapes($p)") else ps
}
val stArgs = for (arg <- args) yield q"""ST.Any(scala.Seq($$internal.Option(Os_Ext.pathString($arg))), "")"""
val source = if (pos.isRange) {
val text = pos.source.content
val sb = new java.lang.StringBuilder
for (_ <- 0 until pos.column - 1) sb.append(' ')
for (i <- pos.start until pos.end) {
sb.append(text(i))
}
sb.toString
} else templateString
val r = q"Os.procs(ST(scala.Seq(..$parts), scala.Seq[ST.Arg](..$stArgs), ${Literal(Constant(source))}).render)"
//println(showCode(r))
r
}
def isJsImpl: c.Tree = if (isJsCheck) q"true" else q"false"
def exec(command: Array[String], dir: java.io.File): String = {
val pb = new ProcessBuilder(command: _*)
//pb.redirectErrorStream(true)
pb.directory(dir)
val exec = pb.start()
val br = new java.io.BufferedReader(new java.io.InputStreamReader(exec.getInputStream))
val sb = new StringBuilder
var c = br.read()
while (c != -1) {
sb.append(c.toChar)
c = br.read()
}
exec.waitFor()
return sb.toString
}
def commitHashImpl: c.Tree = {
val f = c.enclosingPosition.pos.source.file.file
//print(s"Retrieving commit hash for ${f.getName} from ${f.getParent}: ")
val star = if (exec(Array("git", "status", "--porcelain"), f.getParentFile).trim == "") "" else "*"
val hash = exec(Array("git", "log", "-n", "1", "--pretty=format:%H"), f.getParentFile).trim
val r = s"$hash$star"
//println(r)
c.universe.Literal(c.universe.Constant(r))
}
def versionImpl: c.Tree = {
val f = c.enclosingPosition.pos.source.file.file
//print(s"Retrieving version for ${f.getName} from ${f.getParent}: ")
val star = if (exec(Array("git", "status", "--porcelain"), f.getParentFile).trim == "") "" else "*"
val v = exec(Array("git", "log", "-n", "1", "--date=format:%Y%m%d", "--pretty=format:4.%cd.%h"), f.getParentFile).trim
val r = s"$v$star"
//println(r)
c.universe.Literal(c.universe.Constant(r))
}
}
|
sireum/v3-logika-runtime
|
macros/shared/src/main/scala/org/sireum/$internal/Macro.scala
|
Scala
|
bsd-2-clause
| 13,859
|
/*
* Copyright 2002-2013 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License")
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package skinny.test
import javax.servlet.http.{ Cookie, HttpServletResponse }
import javax.servlet.ServletOutputStream
import java.io.{ ByteArrayOutputStream, PrintWriter }
import java.util._
class MockHttpServletResponse extends HttpServletResponse {
var CHARSET_PREFIX = "charset="
var CONTENT_TYPE_HEADER = "Content-Type"
var CONTENT_LENGTH_HEADER = "Content-Length"
var LOCATION_HEADER = "Location"
//---------------------------------------------------------------------
// ServletResponse properties
//---------------------------------------------------------------------
var outputStreamAccessAllowed = true
var writerAccessAllowed = true
var characterEncoding = "ISO-8859-1"
var charset = false
var content = new ByteArrayOutputStream()
var writer: PrintWriter = _
var contentLength: Long = 0
var contentType: String = _
var bufferSize = 4096
var committed: Boolean = false
var locale: Locale = Locale.getDefault()
//---------------------------------------------------------------------
// HttpServletResponse properties
//---------------------------------------------------------------------
var cookies = new ArrayList[Cookie]()
var headers = new LinkedHashMap[String, HeaderValueHolder]
var status = 200
var errorMessage: String = _
var forwardedUrl: String = _
var includedUrls = new ArrayList[String]
override def getLocale: Locale = locale
override def setLocale(loc: Locale): Unit = {
this.locale = locale
}
override def reset(): Unit = {
resetBuffer()
characterEncoding = null
contentLength = 0
contentType = null
locale = null
cookies.clear()
headers.clear()
status = HttpServletResponse.SC_OK
errorMessage = null
}
override def isCommitted: Boolean = committed
override def resetBuffer(): Unit = content.reset()
override def flushBuffer(): Unit = committed = true
override def getBufferSize: Int = bufferSize
override def setBufferSize(size: Int): Unit = {
this.bufferSize = bufferSize
}
override def setContentType(contentType: String): Unit = {
this.contentType = contentType
}
override def setContentLength(len: Int): Unit = {
this.contentLength = len
}
override def setCharacterEncoding(charset: String): Unit = {
this.characterEncoding = charset
}
override def getWriter: PrintWriter = writer
val stubOutputStream = new MockServletOutputStream
override def getOutputStream: ServletOutputStream = stubOutputStream
override def getContentType: String = contentType
override def getCharacterEncoding: String = characterEncoding
override def getStatus: Int = status
override def setStatus(sc: Int, sm: String): Unit = {
// TODO: sm is ignored for now
this.status = sc
}
override def setStatus(sc: Int): Unit = {
this.status = sc
}
private def _addHeader(name: String, value: Any): Unit = {
Option(headers.get(name)).map(_.getValues()).map(_.add(value)).getOrElse(_setHeader(name, value))
}
private def _setHeader(name: String, value: Any): Unit = {
headers.put(name, HeaderValueHolder(value))
}
override def getHeaderNames: Collection[String] = headers.keySet
override def getHeaders(name: String): Collection[String] = {
Option(headers.get(name)).map(_.getStringValues).getOrElse(new java.util.ArrayList[String])
}
override def getHeader(name: String): String = {
Option(headers.get(name)).map(_.getStringValue).orNull[String]
}
override def addHeader(name: String, value: String): Unit = _addHeader(name, value)
override def setHeader(name: String, value: String): Unit = _setHeader(name, value)
override def addIntHeader(name: String, value: Int): Unit = _addHeader(name, value)
override def setIntHeader(name: String, value: Int): Unit = _setHeader(name, value)
override def addDateHeader(name: String, date: Long): Unit = _addHeader(name, date)
override def setDateHeader(name: String, date: Long): Unit = _setHeader(name, date)
override def containsHeader(name: String): Boolean = headers.keySet.contains(name)
override def sendRedirect(location: String): Unit = {
setHeader(LOCATION_HEADER, location)
setStatus(HttpServletResponse.SC_MOVED_TEMPORARILY)
committed = true
}
override def sendError(sc: Int): Unit = {
status = sc
committed = true
}
override def sendError(sc: Int, msg: String): Unit = {
status = sc
errorMessage = msg
committed = true
}
override def encodeRedirectUrl(url: String): String = encodeRedirectURL(url)
override def encodeUrl(url: String): String = encodeURL(url)
override def encodeRedirectURL(url: String): String = encodeURL(url)
override def encodeURL(url: String): String = url
override def addCookie(cookie: Cookie): Unit = cookies.add(cookie)
override def setContentLengthLong(len: Long): Unit = {
this.contentLength = len
}
}
|
seratch/skinny-framework
|
test/src/main/scala/skinny/test/MockHttpServletResponse.scala
|
Scala
|
mit
| 5,746
|
/*
* Copyright (C) 2009-2013 Typesafe Inc. <http://www.typesafe.com>
*/
package play.core.j
import play.api._
import play.api.mvc._
import java.io.File
import scala.concurrent.Future
import play.api.libs.iteratee._
import scala.util.control.NonFatal
/** Adapter that holds the Java `GlobalSettings` and acts as a Scala `GlobalSettings` for the framework. */
class JavaGlobalSettingsAdapter(val underlying: play.GlobalSettings) extends GlobalSettings {
require(underlying != null, "underlying cannot be null")
override def beforeStart(app: Application) {
underlying.beforeStart(app.injector.instanceOf[play.Application])
}
override def onStart(app: Application) {
underlying.onStart(app.injector.instanceOf[play.Application])
}
override def onStop(app: Application) {
underlying.onStop(app.injector.instanceOf[play.Application])
}
override def onRouteRequest(request: RequestHeader): Option[Handler] = {
val r = JavaHelpers.createJavaRequest(request)
Option(underlying.onRouteRequest(r)).map(Some(_)).getOrElse(super.onRouteRequest(request))
}
override def onError(request: RequestHeader, ex: Throwable): Future[Result] = {
JavaHelpers.invokeWithContextOpt(request, req => underlying.onError(req, ex))
.getOrElse(super.onError(request, ex))
}
override def onHandlerNotFound(request: RequestHeader): Future[Result] = {
JavaHelpers.invokeWithContextOpt(request, req => underlying.onHandlerNotFound(req))
.getOrElse(super.onHandlerNotFound(request))
}
override def onBadRequest(request: RequestHeader, error: String): Future[Result] = {
JavaHelpers.invokeWithContextOpt(request, req => underlying.onBadRequest(req, error))
.getOrElse(super.onBadRequest(request, error))
}
override def onLoadConfig(config: Configuration, path: File, classloader: ClassLoader, mode: Mode.Mode) = {
import JavaModeConverter.asJavaMode
Option(underlying.onLoadConfig(new play.Configuration(config), path, classloader, mode))
.map(_.getWrappedConfiguration).getOrElse(super.onLoadConfig(config, path, classloader, mode))
}
override def doFilter(a: EssentialAction): EssentialAction = {
try {
Filters(super.doFilter(a), underlying.filters.map(_.newInstance: play.api.mvc.EssentialFilter): _*)
} catch {
case NonFatal(e) => {
import play.api.libs.iteratee.Execution.Implicits.trampoline
EssentialAction(req => Iteratee.flatten(onError(req, e).map(result => Done(result, Input.Empty))))
}
}
}
}
|
jyotikamboj/container
|
pf-framework/src/play/src/main/scala/play/core/j/JavaGlobalSettingsAdapter.scala
|
Scala
|
mit
| 2,529
|
/*
Copyright (c) 2016, Rice University
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
1. Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following
disclaimer in the documentation and/or other materials provided
with the distribution.
3. Neither the name of Rice University
nor the names of its contributors may be used to endorse or
promote products derived from this software without specific
prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.apache.spark.rdd.cl.tests
import java.util.LinkedList
import com.amd.aparapi.internal.writer.ScalaArrayParameter
import com.amd.aparapi.internal.model.Tuple2ClassModel
import com.amd.aparapi.internal.model.ClassModel
import com.amd.aparapi.internal.model.HardCodedClassModels
import com.amd.aparapi.internal.model.DenseVectorClassModel
import com.amd.aparapi.internal.model.ScalaArrayClassModel
import org.apache.spark.rdd.cl.SyncCodeGenTest
import org.apache.spark.rdd.cl.CodeGenTest
import org.apache.spark.rdd.cl.CodeGenTests
import org.apache.spark.rdd.cl.CodeGenUtil
import org.apache.spark.broadcast.Broadcast
import org.apache.spark.rdd.cl.PrimitiveArrayInputBufferWrapperConfig
object ASPLOSAES extends SyncCodeGenTest[Array[Int], Array[Int]] {
def getExpectedException() : String = { return null }
def getExpectedKernel() : String = { getExpectedKernelHelper(getClass) }
def getExpectedNumInputs : Int = {
1
}
def init() : HardCodedClassModels = {
val models = new HardCodedClassModels()
val arrayModel = ScalaArrayClassModel.create("I")
models.addClassModelFor(classOf[Array[_]], arrayModel)
models
}
def complete(params : LinkedList[ScalaArrayParameter]) {
}
def getFunction() : Function1[Array[Int], Array[Int]] = {
val bKey : Broadcast[Array[Int]] = null
new Function[Array[Int], Array[Int]] {
val sbox: Array[Int] = Array(
0x63, 0x7c, 0x77, 0x7b, 0xf2, 0x6b, 0x6f, 0xc5,
0x30, 0x01, 0x67, 0x2b, 0xfe, 0xd7, 0xab, 0x76,
0xca, 0x82, 0xc9, 0x7d, 0xfa, 0x59, 0x47, 0xf0,
0xad, 0xd4, 0xa2, 0xaf, 0x9c, 0xa4, 0x72, 0xc0,
0xb7, 0xfd, 0x93, 0x26, 0x36, 0x3f, 0xf7, 0xcc,
0x34, 0xa5, 0xe5, 0xf1, 0x71, 0xd8, 0x31, 0x15,
0x04, 0xc7, 0x23, 0xc3, 0x18, 0x96, 0x05, 0x9a,
0x07, 0x12, 0x80, 0xe2, 0xeb, 0x27, 0xb2, 0x75,
0x09, 0x83, 0x2c, 0x1a, 0x1b, 0x6e, 0x5a, 0xa0,
0x52, 0x3b, 0xd6, 0xb3, 0x29, 0xe3, 0x2f, 0x84,
0x53, 0xd1, 0x00, 0xed, 0x20, 0xfc, 0xb1, 0x5b,
0x6a, 0xcb, 0xbe, 0x39, 0x4a, 0x4c, 0x58, 0xcf,
0xd0, 0xef, 0xaa, 0xfb, 0x43, 0x4d, 0x33, 0x85,
0x45, 0xf9, 0x02, 0x7f, 0x50, 0x3c, 0x9f, 0xa8,
0x51, 0xa3, 0x40, 0x8f, 0x92, 0x9d, 0x38, 0xf5,
0xbc, 0xb6, 0xda, 0x21, 0x10, 0xff, 0xf3, 0xd2,
0xcd, 0x0c, 0x13, 0xec, 0x5f, 0x97, 0x44, 0x17,
0xc4, 0xa7, 0x7e, 0x3d, 0x64, 0x5d, 0x19, 0x73,
0x60, 0x81, 0x4f, 0xdc, 0x22, 0x2a, 0x90, 0x88,
0x46, 0xee, 0xb8, 0x14, 0xde, 0x5e, 0x0b, 0xdb,
0xe0, 0x32, 0x3a, 0x0a, 0x49, 0x06, 0x24, 0x5c,
0xc2, 0xd3, 0xac, 0x62, 0x91, 0x95, 0xe4, 0x79,
0xe7, 0xc8, 0x37, 0x6d, 0x8d, 0xd5, 0x4e, 0xa9,
0x6c, 0x56, 0xf4, 0xea, 0x65, 0x7a, 0xae, 0x08,
0xba, 0x78, 0x25, 0x2e, 0x1c, 0xa6, 0xb4, 0xc6,
0xe8, 0xdd, 0x74, 0x1f, 0x4b, 0xbd, 0x8b, 0x8a,
0x70, 0x3e, 0xb5, 0x66, 0x48, 0x03, 0xf6, 0x0e,
0x61, 0x35, 0x57, 0xb9, 0x86, 0xc1, 0x1d, 0x9e,
0xe1, 0xf8, 0x98, 0x11, 0x69, 0xd9, 0x8e, 0x94,
0x9b, 0x1e, 0x87, 0xe9, 0xce, 0x55, 0x28, 0xdf,
0x8c, 0xa1, 0x89, 0x0d, 0xbf, 0xe6, 0x42, 0x68,
0x41, 0x99, 0x2d, 0x0f, 0xb0, 0x54, 0xbb, 0x16
)
def rj_xtime(x: Int): Int = {
val mask: Int = (x.toInt & 0x80).toInt
if (mask == 1)
((x << 1) ^ 0x1b).toInt
else
(x << 1).toInt
}
override def apply(data : Array[Int]) : Array[Int] = {
val key = new Array[Int](32)
val enckey = new Array[Int](32)
val deckey = new Array[Int](32)
val aes_data = new Array[Int](16)
var rcon = 1
var i: Int = 0
var j: Int = 0
var p: Int = 0
var q: Int = 0
while (i < 16) {
aes_data(i) = data(i)
i = (i + 1).toInt
}
i = 0
while (i < 32) {
enckey(i) = bKey.value(i)
deckey(i) = bKey.value(i)
i = (i + 1).toInt
}
i = 7
while (i > 0) {
// add_expandEncKey(deckey, rcon)
deckey(0) = (deckey(0) ^ sbox(29) ^ rcon).toInt
deckey(1) = (deckey(1) ^ sbox(30)).toInt
deckey(2) = (deckey(2) ^ sbox(31)).toInt
deckey(3) = (deckey(3) ^ sbox(28)).toInt
rcon = ((rcon << 1) ^ (((rcon >> 7) & 1) * 0x1b)).toInt
deckey(4) = (deckey(4) ^ deckey(0)).toInt
deckey(5) = (deckey(5) ^ deckey(1)).toInt
deckey(6) = (deckey(6) ^ deckey(2)).toInt
deckey(7) = (deckey(7) ^ deckey(3)).toInt
deckey(8) = (deckey(8) ^ deckey(4)).toInt
deckey(9) = (deckey(9) ^ deckey(5)).toInt
deckey(10) = (deckey(10) ^ deckey(6)).toInt
deckey(11) = (deckey(11) ^ deckey(7)).toInt
deckey(12) = (deckey(12) ^ deckey(8)).toInt
deckey(13) = (deckey(13) ^ deckey(9)).toInt
deckey(14) = (deckey(14) ^ deckey(10)).toInt
deckey(15) = (deckey(15) ^ deckey(11)).toInt
deckey(16) = (deckey(16) ^ sbox(12)).toInt
deckey(17) = (deckey(17) ^ sbox(13)).toInt
deckey(18) = (deckey(18) ^ sbox(14)).toInt
deckey(19) = (deckey(19) ^ sbox(15)).toInt
deckey(20) = (deckey(20) ^ deckey(16)).toInt
deckey(21) = (deckey(21) ^ deckey(17)).toInt
deckey(22) = (deckey(22) ^ deckey(18)).toInt
deckey(23) = (deckey(23) ^ deckey(19)).toInt
deckey(24) = (deckey(24) ^ deckey(20)).toInt
deckey(25) = (deckey(25) ^ deckey(21)).toInt
deckey(26) = (deckey(26) ^ deckey(22)).toInt
deckey(27) = (deckey(27) ^ deckey(23)).toInt
deckey(28) = (deckey(28) ^ deckey(24)).toInt
deckey(29) = (deckey(29) ^ deckey(25)).toInt
deckey(30) = (deckey(30) ^ deckey(26)).toInt
deckey(31) = (deckey(31) ^ deckey(27)).toInt
i = (i - 1).toInt
}
//aes_addRoundKey_cpy(aes_data, enckey, key)
i = 15
while (i > 0) {
enckey(i) = key(i)
aes_data(i) = (aes_data(i) ^ 1).toInt
key(i + 16) = enckey(i + 16)
i = (i - 1).toInt
}
rcon = 1
i = 0
while (i < 14) {
// sub byte
j = 15
while (j > 0) {
aes_data(j) = sbox(aes_data(j) & 0xff)
j = (j - 1).toInt
}
// shift rows
p = aes_data(1)
p = aes_data(1)
aes_data(1) = aes_data(5)
aes_data(5) = aes_data(9)
aes_data(9) = aes_data(13)
aes_data(13) = p
p = aes_data(10)
aes_data(10) = aes_data(2)
aes_data(2) = p
q = aes_data(3)
aes_data(3) = aes_data(15)
aes_data(15) = aes_data(11)
aes_data(11) = aes_data(7)
aes_data(7) = q
q = aes_data(14)
aes_data(14) = aes_data(6)
aes_data(6) = q
// mix columns
j = 0
while (j < 16) {
var a = aes_data(j)
var b = aes_data(j + 1)
var c = aes_data(j + 2)
var d = aes_data(j + 3)
var e = (a ^ b ^ c ^ d).toInt
aes_data(j) = (aes_data(j) ^ e ^ rj_xtime(a ^ b)).toInt
aes_data(j + 1) = (aes_data(j) ^ e ^ rj_xtime(b ^ c)).toInt
aes_data(j + 2) = (aes_data(j) ^ e ^ rj_xtime(c ^ d)).toInt
aes_data(j + 3) = (aes_data(j) ^ e ^ rj_xtime(d ^ a)).toInt
j = (j + 4).toInt
}
if (i % 1 == 1) {
// aes_addRoundKey(aes_data, key(16))
j = 15
while (j > 0) {
aes_data(j) = (aes_data(j) ^ key(16 + j)).toInt
j = (j - 1).toInt
}
}
else {
// aes_expandEncKey(key, rcon)
j = 0
key(0) = (key(0) ^ sbox(29) ^ rcon).toInt
key(1) = (key(1) ^ sbox(30)).toInt
key(2) = (key(2) ^ sbox(31)).toInt
key(3) = (key(3) ^ sbox(28)).toInt
rcon = ((rcon << 1) ^ (((rcon >> 7) & 1) * 0x1b)).toInt
j = 4
while (j < 16) {
key(j) = (key(j) ^ key(j - 4)).toInt
key(j + 1) = (key(j + 1) ^ key(j - 3)).toInt
key(j + 2) = (key(j + 2) ^ key(j - 2)).toInt
key(j + 3) = (key(j + 3) ^ key(j - 1)).toInt
j = (j + 4).toInt
}
key(16) = (key(16) ^ sbox(12)).toInt
key(17) = (key(17) ^ sbox(13)).toInt
key(18) = (key(18) ^ sbox(14)).toInt
key(19) = (key(19) ^ sbox(15)).toInt
j = 20
while (j < 32) {
key(j) = (key(j) ^ key(j - 4)).toInt
key(j + 1) = (key(j + 1) ^ key(j - 3)).toInt
key(j + 2) = (key(j + 2) ^ key(j - 2)).toInt
key(j + 3) = (key(j + 3) ^ key(j - 1)).toInt
j = (j + 4).toInt
}
// aes_addRoundKey(aes_data, key)
j = 15
while (j > 0) {
aes_data(j) = (aes_data(j) ^ key(j)).toInt
j = (j - 1).toInt
}
}
i = (i + 1).toInt
}
// sub bytes (aes_data)
i = 15
while (i > 0) {
aes_data(i) = sbox(aes_data(i) % 0xff)
i = (i - 1).toInt
}
// shift rows (aes_data)
p = aes_data(1)
aes_data(1) = aes_data(5)
aes_data(5) = aes_data(9)
aes_data(9) = aes_data(13)
aes_data(13) = p
p = aes_data(10)
aes_data(10) = aes_data(2)
aes_data(2) = p
q = aes_data(3)
aes_data(3) = aes_data(15)
aes_data(15) = aes_data(11)
aes_data(11) = aes_data(7)
aes_data(7) = q
q = aes_data(14)
aes_data(14) = aes_data(6)
aes_data(6) = q
// add expand enc key(key, rcon)
j = 0
key(0) = (key(0) ^ sbox(29) ^ rcon).toInt
key(1) = (key(1) ^ sbox(30)).toInt
key(2) = (key(2) ^ sbox(31)).toInt
key(3) = (key(3) ^ sbox(28)).toInt
rcon = ((rcon << 1) ^ (((rcon >> 7) & 1) * 0x1b)).toInt
j = 4
while (j < 16) {
key(j) = (key(j) ^ key(j - 4)).toInt
key(j + 1) = (key(j + 1) ^ key(j - 3)).toInt
key(j + 2) = (key(j + 2) ^ key(j - 2)).toInt
key(j + 3) = (key(j + 3) ^ key(j - 1)).toInt
j = (j + 4).toInt
}
key(16) = (key(16) ^ sbox(12)).toInt
key(17) = (key(17) ^ sbox(13)).toInt
key(18) = (key(18) ^ sbox(14)).toInt
key(19) = (key(19) ^ sbox(15)).toInt
j = 20
while (j < 32) {
key(j) = (key(j) ^ key(j - 4)).toInt
key(j + 1) = (key(j) ^ key(j - 3)).toInt
key(j + 2) = (key(j) ^ key(j - 2)).toInt
key(j + 3) = (key(j) ^ key(j - 1)).toInt
j = (j + 4).toInt
}
// add round key(aes_data, key)
j = 15
while (j > 0) {
aes_data(j) = (aes_data(j) ^ key(j)).toInt
j = (j - 1).toInt
}
aes_data
}
}
}
}
|
agrippa/spark-swat
|
swat/src/test/scala/org/apache/spark/rdd/cl/tests/ASPLOSAES.scala
|
Scala
|
bsd-3-clause
| 13,241
|
package ml.combust.mleap.core.feature
import ml.combust.mleap.core.types.{ScalarType, StructField}
import org.scalatest.FunSpec
class ImputerModelSpec extends FunSpec {
describe("input/output schema"){
val model = ImputerModel(12, 23.4, "mean")
it("Has the right input schema") {
assert(model.inputSchema.fields ==
Seq(StructField("input", ScalarType.Double)))
}
it("Has the right output schema") {
assert(model.outputSchema.fields ==
Seq(StructField("output", ScalarType.Double.nonNullable)))
}
}
}
|
combust/mleap
|
mleap-core/src/test/scala/ml/combust/mleap/core/feature/ImputerModelSpec.scala
|
Scala
|
apache-2.0
| 557
|
package io.hnfmr.chapter4
import cats.data.State
import cats.syntax.applicative._
object StateTut extends App {
type CalcState[A] = State[List[Int], A]
def evalOne(sym: String): CalcState[Int] =
sym match {
case "+" => operator(_ + _)
case "-" => operator(_ - _)
case "*" => operator(_ * _)
case "/" => operator(_ / _)
case a => operand(a.toInt)
}
def operand(num: Int): CalcState[Int] =
State[List[Int], Int] { stack => (num :: stack, num)}
def operator(func: (Int, Int) => Int): CalcState[Int] =
State[List[Int], Int] {
case a :: b :: tail =>
val ans = func(a, b)
(ans :: tail, ans)
case _ => sys.error("Impossible!")
}
val program = for {
_ <- evalOne("1")
_ <- evalOne("3")
ans <- evalOne("/")
} yield ans
println(program.runA(List()).value)
def evalAll(input: List[String]): CalcState[Int] = {
val i = 0.pure[CalcState]
input.foldLeft(i) { (a, b) => {
a flatMap (_ => evalOne(b) )
}}
}
println(evalAll(List("1", "2", "+")).runA(Nil).value)
}
|
hnfmr/advanced-scala
|
src/main/scala/io/hnfmr/chapter4/StateTut.scala
|
Scala
|
mit
| 1,087
|
/*
The stats assignments and things will be done in the character creator. Too much is being handled by the core classes. I have split them up.
A separate combat mechanics system will be implemented, it's a function of the character's stats after all.
*/
package chara
import scala.collection.mutable.ArrayBuffer
//import skill.spells.Spell
abstract class Characterg{
//Values and Stats/////////////////////////////////
var name: String
// Hit Points
var hp: Int
var maxHP: Int
// Stats 10 is average, one should be able to rearrange these
var str: Int
var dex: Int
var cons: Int
var intl: Int
var wis: Int
var cha: Int
// Mana
var mana: Int //established as a function of int and wis
// Level and experience points
var lvl: Int
var exp: Int
//Class and race checker
var race:String
var clas:String
//Spellbook - contains spells for the characters
//var spellBook = ArrayBuffer[Spell]()
}
class BaseCharacter extends Characterg{
//Values and Stats/////////////////////////////////
override var name = ""
// Hit Points
override var hp = 10
override var maxHP = 10
// Stats 10 is average, one should be able to rearrange these
override var str = 10
override var dex = 10
override var cons = 10
override var intl = 10
override var wis = 10
override var cha = 10
// Mana
override var mana = 0 //established as a function of int and wis
// Level and experience points
override var lvl = 1
override var exp = 0
//Class and race checker
override var race = ""
override var clas = ""
}
abstract class CharacterDecorator(char: Characterg) extends Characterg{
//Values and Stats/////////////////////////////////
name = char.name
// Hit Points
hp = char.hp
maxHP = char.maxHP
// Stats 10 is average, one should be able to rearrange these
str = char.str
dex = char.dex
cons = char.cons
intl = char.intl
wis = char.wis
cha = char.cha
// Mana
mana = char.mana //established as a function of int and wis
// Level and experience points
lvl = char.lvl
exp = char.exp
//Class and race checker
race = char.race
clas = char.clas
}
|
SystemicCypher/Games
|
src/main/scala/chara/character.scala
|
Scala
|
mit
| 2,092
|
package controllers
import java.util.concurrent.TimeUnit
import java.util.{Date, UUID}
import javax.inject.Inject
import akka.NotUsed
import akka.actor.ActorSystem
import akka.stream.Materializer
import akka.stream.scaladsl.{Flow, Sink, Source}
import controllers.TaskSolver._
import data.TaskDao
import models.{Language, Task}
import monix.execution.FutureUtils.extensions._
import monix.execution.Scheduler.Implicits.global
import monix.execution.cancelables.AssignableCancelable
import monix.reactive.{Observable, OverflowStrategy}
import org.scalatest.Suite
import play.api.Logger
import play.api.cache.CacheApi
import play.api.data.Form
import play.api.data.Forms._
import play.api.i18n.{I18nSupport, MessagesApi}
import play.api.libs.json.{JsValue, Json}
import play.api.mvc.{Action, Controller, WebSocket}
import service.reflection.{DynamicSuiteExecutor, RuntimeSuiteExecutor}
import shared.model.{Event, Line}
import scala.concurrent.duration.Duration._
import scala.concurrent.duration._
import scala.concurrent.{Future, Promise}
import scala.language.postfixOps
import scala.util.control.NonFatal
import scala.util.{Failure, Success}
class TaskSolver @Inject()(dynamicExecutor: DynamicSuiteExecutor, runtimeExecutor: RuntimeSuiteExecutor,
dao: TaskDao, val messagesApi: MessagesApi, cache: CacheApi)
(implicit system: ActorSystem, mat: Materializer)
extends Controller with I18nSupport with JSONFormats {
val solutionForm = Form {
mapping(
solution -> nonEmptyText,
year -> longNumber,
lang -> nonEmptyText,
timeuuid -> nonEmptyText
)(SolutionForm.apply)(SolutionForm.unapply)
}
def getTask(year: Long, lang: String, timeuuid: UUID) = Action.async { implicit request =>
def notFound = Redirect(routes.DevGymApp.index).flashing(flashToUser -> messagesApi("taskNotFound"))
val task = getCachedTask(year, lang, timeuuid)
task.map {
case Some(t) => Ok(views.html.task(t.name, t.description,
solutionForm.fill(SolutionForm(t.solutionTemplate, year, lang, timeuuid.toString))))
case None => notFound
}.recover { case NonFatal(e) => notFound }
}
def taskStream = Action { req =>
Ok.chunked(req.body.asJson.fold(Source.empty[JsValue]) { clientInput =>
Source.fromPublisher(
Observable.create[Event](OverflowStrategy.DropOld(20)) { downstream =>
val cancelable = AssignableCancelable.single()
val prevTimestamp = (clientInput \\ "prevTimestamp").as[Long]
val currentTimestamp = (clientInput \\ "currentTimestamp").as[Long]
if (Duration(currentTimestamp - prevTimestamp, TimeUnit.MILLISECONDS) < 1.seconds) {
downstream.onNext(Line("Too many requests per second from the same client. Slow down"))
downstream.onComplete()
} else {
val solution = (clientInput \\ "solution").as[String]
val year = (clientInput \\ "year").as[Long]
val lang = (clientInput \\ "lang").as[String]
val timeuuid = (clientInput \\ "timeuuid").as[String]
getCachedTask(year, lang, UUID.fromString(timeuuid)).onComplete {
case Success(Some(task)) =>
val (checkNext, onBlockComplete) = service.testAsync { testResult =>
downstream.onNext(testResult)
downstream.onComplete()
}
val block: (String => Unit) => Unit = dynamicExecutor(solution, task.suite, task.solutionTrait)
cancelable := monix.eval.Task(block { next =>
downstream.onNext(Line(next))
checkNext(next)
}).runAsync(onBlockComplete)
case Success(None) =>
downstream.onError(new RuntimeException(s"Task is not available for a given solution: $solution"))
case Failure(ex) =>
downstream.onError(ex)
}
}
cancelable
}.map(Json.toJson(_)).toReactivePublisher)
})
}
private def getCachedTask(year: Long, lang: String, timeuuid: UUID): Future[Option[Task]] = {
def getFromDb: Future[Option[Task]] = {
Logger.debug(s"getting task from db: $year, $lang, $timeuuid")
dao.getTask(new Date(year), Language.withName(lang), timeuuid)
}
val suiteKey = (year, lang, timeuuid).toString()
val maybeTask = cache.get[Task](suiteKey)
maybeTask match {
case Some(_) => Future.successful(maybeTask)
case None =>
val f = getFromDb
f.foreach {
case Some(t) => cache.set(suiteKey, t, expiration)
case None =>
}
f
}
}
/**
* it is an example of executing the test classes from the classPath. We only parse solution text coming from a user
* Such approach is faster of course, than controllers.TaskSolver#taskStream().
*
* This approach can be used for predefined tests of DevGym platform to get better performance for demo tests.
* Currently, we are not using this method and it should be removed from here to some snippet storage
*
* @return WebSocket
*/
def runtimeTaskStream = WebSocket.accept { _ =>
val clientInputPromise = Promise[JsValue]()
val channel: Observable[Event] =
Observable.create[Event](OverflowStrategy.DropOld(20)) { downstream =>
val cancelable = AssignableCancelable.single()
clientInputPromise.future.timeout(1.second).onComplete {
case Success(fromClient) =>
val (checkNext, onBlockComplete) = service.testAsync { testResult =>
downstream.onNext(testResult)
downstream.onComplete()
}
cancelable := monix.eval.Task {
val suiteClass = "tasktest.SubArrayWithMaxSumTest"
val solutionTrait = "tasktest.SubArrayWithMaxSumSolution"
val solution = (fromClient \\ "solution").as[String]
val block: (String => Unit) => Unit = runtimeExecutor(
Class.forName(suiteClass).asInstanceOf[Class[Suite]],
Class.forName(solutionTrait).asInstanceOf[Class[AnyRef]],
solution)
block { next =>
downstream.onNext(Line(next))
checkNext(next)
}
}.runAsync(onBlockComplete)
case Failure(ex) => downstream.onError(ex)
}
cancelable
}
val sink = Sink.foreach[JsValue](js => clientInputPromise.trySuccess(js))
Flow.fromSinkAndSource(sink, Source.fromPublisher(channel.map(Json.toJson(_)).toReactivePublisher))
}
}
case class SolutionForm(solution: String, year: Long, taskType: String, timeuuid: String)
object TaskSolver {
val cannotCheckNow = "cannotCheckNow"
val solution = "solution"
val year = "year"
val lang = "lang"
val timeuuid = "timeuuid"
val expiration = 60 seconds
}
|
DmytroOrlov/devgym
|
server/app/controllers/TaskSolver.scala
|
Scala
|
apache-2.0
| 7,005
|
package kornell.server.api
import javax.ws.rs._
import javax.ws.rs.core.Response
import kornell.core.entity.{CourseDetailsEntityType, CourseDetailsLibrary}
import kornell.core.to.CourseDetailsLibrariesTO
import kornell.server.jdbc.repository.CourseDetailsLibrariesRepo
import kornell.server.util.AccessDeniedErr
import kornell.server.util.Conditional.toConditional
@Path("courseDetailsLibraries")
class CourseDetailsLibrariesResource {
@Path("{uuid}")
def get(@PathParam("uuid") uuid: String) = CourseDetailsLibraryResource(uuid)
@POST
@Consumes(Array(CourseDetailsLibrary.TYPE))
@Produces(Array(CourseDetailsLibrary.TYPE))
def create(courseDetailsLibrary: CourseDetailsLibrary): CourseDetailsLibrary = {
CourseDetailsLibrariesRepo.create(courseDetailsLibrary)
}.requiring(isPlatformAdmin, AccessDeniedErr())
.or(isInstitutionAdmin, AccessDeniedErr())
.or(isPublisher, AccessDeniedErr())
.get
@GET
@Path("/{entityType}/{entityUUID}")
@Produces(Array(CourseDetailsLibrariesTO.TYPE))
def getByEntityTypeAndUUID(@PathParam("entityType") entityType: String,
@PathParam("entityUUID") entityUUID: String): CourseDetailsLibrariesTO = {
CourseDetailsLibrariesRepo.getForEntity(entityUUID, CourseDetailsEntityType.valueOf(entityType))
}.requiring(isPlatformAdmin, AccessDeniedErr())
.or(isInstitutionAdmin, AccessDeniedErr())
.or(isPublisher, AccessDeniedErr())
.get
@POST
@Path("/{entityType}/{entityUUID}/moveUp/{index}")
def moveUp(@PathParam("entityType") entityType: String,
@PathParam("entityUUID") entityUUID: String,
@PathParam("index") index: String): Response = {
CourseDetailsLibrariesRepo.moveUp(entityUUID, CourseDetailsEntityType.valueOf(entityType), index.toInt)
Response.noContent.build
}.requiring(isPlatformAdmin, AccessDeniedErr())
.or(isInstitutionAdmin, AccessDeniedErr())
.or(isPublisher, AccessDeniedErr())
.get
@POST
@Path("/{entityType}/{entityUUID}/moveDown/{index}")
def moveDown(@PathParam("entityType") entityType: String,
@PathParam("entityUUID") entityUUID: String,
@PathParam("index") index: String): Response = {
CourseDetailsLibrariesRepo.moveDown(entityUUID, CourseDetailsEntityType.valueOf(entityType), index.toInt)
Response.noContent.build
}.requiring(isPlatformAdmin, AccessDeniedErr())
.or(isInstitutionAdmin, AccessDeniedErr())
.or(isPublisher, AccessDeniedErr())
.get
}
object CourseDetailsLibrariesResource {
def apply(uuid: String) = new CourseDetailsLibraryResource(uuid)
}
|
Craftware/Kornell
|
kornell-api/src/main/scala/kornell/server/api/CourseDetailsLibrariesResource.scala
|
Scala
|
apache-2.0
| 2,552
|
package com.sksamuel.elastic4s
import com.sksamuel.elastic4s.DefinitionAttributes.{DefinitionAttributePreference, DefinitionAttributeRefresh}
import org.elasticsearch.action.get.{GetResponse, MultiGetAction, MultiGetItemResponse, MultiGetRequest, MultiGetRequestBuilder, MultiGetResponse}
import org.elasticsearch.client.Client
import scala.concurrent.Future
/** @author Stephen Samuel */
trait MultiGetDsl extends GetDsl {
implicit object MultiGetDefinitionExecutable
extends Executable[MultiGetDefinition, MultiGetResponse, MultiGetResponse] {
override def apply(c: Client, t: MultiGetDefinition): Future[MultiGetResponse] = {
injectFuture(c.multiGet(t.build, _))
}
}
}
case class MultiGetResult(original: MultiGetResponse) {
import scala.collection.JavaConverters._
@deprecated("use .responses for a scala friendly Seq, or use .original to access the java result", "2.0")
def getResponses() = original.getResponses
def responses: Seq[MultiGetItemResult] = original.iterator.asScala.map(MultiGetItemResult.apply).toList
}
case class MultiGetItemResult(original: MultiGetItemResponse) {
@deprecated("use failure for a scala friendly Option, or use .original to access the java result", "2.0")
def getFailure = original.getFailure
@deprecated("use response for a scala friendly Option, or use .original to access the java result", "2.0")
def getResponse = original.getResponse
def getId = original.getId
def getIndex = original.getIndex
def getType = original.getType
def isFailed = original.isFailed
def failure: Option[MultiGetResponse.Failure] = Option(original.getFailure)
def id = original.getId
def index = original.getIndex
def response: Option[GetResponse] = Option(original.getResponse)
def `type`: String = original.getType
def failed: Boolean = original.isFailed
}
case class MultiGetDefinition(gets: Iterable[GetDefinition])
extends DefinitionAttributePreference
with DefinitionAttributeRefresh {
val _builder = new MultiGetRequestBuilder(ProxyClients.client, MultiGetAction.INSTANCE)
gets foreach { get =>
val item = new MultiGetRequest.Item(get.indexTypes.index, get.indexTypes.types.headOption.orNull, get.id)
item.routing(get.build.routing)
item.fields(get.build.fields: _*)
item.version(get.build.version)
_builder.add(item)
}
def build: MultiGetRequest = _builder.request()
def realtime(realtime: Boolean): this.type = {
_builder.setRealtime(realtime)
this
}
}
|
k4200/elastic4s
|
elastic4s-core/src/main/scala/com/sksamuel/elastic4s/MultiGetDsl.scala
|
Scala
|
apache-2.0
| 2,503
|
/***********************************************************************
* Copyright (c) 2013-2019 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.fs.storage.converter
import com.typesafe.scalalogging.StrictLogging
import org.apache.hadoop.fs.{FileContext, Path}
import org.locationtech.geomesa.convert2.SimpleFeatureConverter
import org.locationtech.geomesa.features.{ScalaSimpleFeature, TransformSimpleFeature}
import org.locationtech.geomesa.fs.storage.common.AbstractFileSystemStorage.FileSystemPathReader
import org.locationtech.geomesa.utils.collection.CloseableIterator
import org.opengis.feature.simple.{SimpleFeature, SimpleFeatureType}
import org.opengis.filter.Filter
import scala.util.control.NonFatal
class ConverterFileSystemReader(
fc: FileContext,
converter: SimpleFeatureConverter,
filter: Option[Filter],
transform: Option[(String, SimpleFeatureType)]
) extends FileSystemPathReader with StrictLogging {
override def read(path: Path): CloseableIterator[SimpleFeature] = {
logger.debug(s"Opening file $path")
val iter = try { converter.process(fc.open(path)) } catch {
case NonFatal(e) => logger.error(s"Error processing uri '$path'", e); CloseableIterator.empty
}
transformed(filtered(iter))
}
private def filtered(in: CloseableIterator[SimpleFeature]): CloseableIterator[SimpleFeature] = {
filter match {
case None => in
case Some(f) => in.filter(f.evaluate)
}
}
private def transformed(in: CloseableIterator[SimpleFeature]): CloseableIterator[SimpleFeature] = {
transform match {
case None => in
case Some((tdefs, tsft)) =>
val feature = TransformSimpleFeature(converter.targetSft, tsft, tdefs)
in.map(f => ScalaSimpleFeature.copy(feature.setFeature(f)))
}
}
}
|
elahrvivaz/geomesa
|
geomesa-fs/geomesa-fs-storage/geomesa-fs-storage-convert/src/main/scala/org/locationtech/geomesa/fs/storage/converter/ConverterFileSystemReader.scala
|
Scala
|
apache-2.0
| 2,146
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst.json
import java.io.Writer
import java.nio.charset.StandardCharsets
import com.fasterxml.jackson.core._
import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.expressions.SpecializedGetters
import org.apache.spark.sql.catalyst.util.{ArrayData, DateTimeUtils, MapData}
import org.apache.spark.sql.types._
/**
* `JackGenerator` can only be initialized with a `StructType` or a `MapType`.
* Once it is initialized with `StructType`, it can be used to write out a struct or an array of
* struct. Once it is initialized with `MapType`, it can be used to write out a map or an array
* of map. An exception will be thrown if trying to write out a struct if it is initialized with
* a `MapType`, and vice verse.
*/
private[sql] class JacksonGenerator(
dataType: DataType,
writer: Writer,
options: JSONOptions) {
// A `ValueWriter` is responsible for writing a field of an `InternalRow` to appropriate
// JSON data. Here we are using `SpecializedGetters` rather than `InternalRow` so that
// we can directly access data in `ArrayData` without the help of `SpecificMutableRow`.
private type ValueWriter = (SpecializedGetters, Int) => Unit
// `JackGenerator` can only be initialized with a `StructType` or a `MapType`.
require(dataType.isInstanceOf[StructType] || dataType.isInstanceOf[MapType],
s"JacksonGenerator only supports to be initialized with a ${StructType.simpleString} " +
s"or ${MapType.simpleString} but got ${dataType.catalogString}")
// `ValueWriter`s for all fields of the schema
private lazy val rootFieldWriters: Array[ValueWriter] = dataType match {
case st: StructType => st.map(_.dataType).map(makeWriter).toArray
case _ => throw new UnsupportedOperationException(
s"Initial type ${dataType.catalogString} must be a struct")
}
// `ValueWriter` for array data storing rows of the schema.
private lazy val arrElementWriter: ValueWriter = dataType match {
case st: StructType =>
(arr: SpecializedGetters, i: Int) => {
writeObject(writeFields(arr.getStruct(i, st.length), st, rootFieldWriters))
}
case mt: MapType =>
(arr: SpecializedGetters, i: Int) => {
writeObject(writeMapData(arr.getMap(i), mt, mapElementWriter))
}
}
private lazy val mapElementWriter: ValueWriter = dataType match {
case mt: MapType => makeWriter(mt.valueType)
case _ => throw new UnsupportedOperationException(
s"Initial type ${dataType.catalogString} must be a map")
}
private val gen = new JsonFactory().createGenerator(writer).setRootValueSeparator(null)
private val lineSeparator: String = options.lineSeparatorInWrite
private def makeWriter(dataType: DataType): ValueWriter = dataType match {
case NullType =>
(row: SpecializedGetters, ordinal: Int) =>
gen.writeNull()
case BooleanType =>
(row: SpecializedGetters, ordinal: Int) =>
gen.writeBoolean(row.getBoolean(ordinal))
case ByteType =>
(row: SpecializedGetters, ordinal: Int) =>
gen.writeNumber(row.getByte(ordinal))
case ShortType =>
(row: SpecializedGetters, ordinal: Int) =>
gen.writeNumber(row.getShort(ordinal))
case IntegerType =>
(row: SpecializedGetters, ordinal: Int) =>
gen.writeNumber(row.getInt(ordinal))
case LongType =>
(row: SpecializedGetters, ordinal: Int) =>
gen.writeNumber(row.getLong(ordinal))
case FloatType =>
(row: SpecializedGetters, ordinal: Int) =>
gen.writeNumber(row.getFloat(ordinal))
case DoubleType =>
(row: SpecializedGetters, ordinal: Int) =>
gen.writeNumber(row.getDouble(ordinal))
case StringType =>
(row: SpecializedGetters, ordinal: Int) =>
gen.writeString(row.getUTF8String(ordinal).toString)
case TimestampType =>
(row: SpecializedGetters, ordinal: Int) =>
val timestampString =
options.timestampFormat.format(DateTimeUtils.toJavaTimestamp(row.getLong(ordinal)))
gen.writeString(timestampString)
case DateType =>
(row: SpecializedGetters, ordinal: Int) =>
val dateString =
options.dateFormat.format(DateTimeUtils.toJavaDate(row.getInt(ordinal)))
gen.writeString(dateString)
case BinaryType =>
(row: SpecializedGetters, ordinal: Int) =>
gen.writeBinary(row.getBinary(ordinal))
case dt: DecimalType =>
(row: SpecializedGetters, ordinal: Int) =>
gen.writeNumber(row.getDecimal(ordinal, dt.precision, dt.scale).toJavaBigDecimal)
case st: StructType =>
val fieldWriters = st.map(_.dataType).map(makeWriter)
(row: SpecializedGetters, ordinal: Int) =>
writeObject(writeFields(row.getStruct(ordinal, st.length), st, fieldWriters))
case at: ArrayType =>
val elementWriter = makeWriter(at.elementType)
(row: SpecializedGetters, ordinal: Int) =>
writeArray(writeArrayData(row.getArray(ordinal), elementWriter))
case mt: MapType =>
val valueWriter = makeWriter(mt.valueType)
(row: SpecializedGetters, ordinal: Int) =>
writeObject(writeMapData(row.getMap(ordinal), mt, valueWriter))
// For UDT values, they should be in the SQL type's corresponding value type.
// We should not see values in the user-defined class at here.
// For example, VectorUDT's SQL type is an array of double. So, we should expect that v is
// an ArrayData at here, instead of a Vector.
case t: UserDefinedType[_] =>
makeWriter(t.sqlType)
case _ =>
(row: SpecializedGetters, ordinal: Int) =>
val v = row.get(ordinal, dataType)
sys.error(s"Failed to convert value $v (class of ${v.getClass}}) " +
s"with the type of $dataType to JSON.")
}
private def writeObject(f: => Unit): Unit = {
gen.writeStartObject()
f
gen.writeEndObject()
}
private def writeFields(
row: InternalRow, schema: StructType, fieldWriters: Seq[ValueWriter]): Unit = {
var i = 0
while (i < row.numFields) {
val field = schema(i)
if (!row.isNullAt(i)) {
gen.writeFieldName(field.name)
fieldWriters(i).apply(row, i)
}
i += 1
}
}
private def writeArray(f: => Unit): Unit = {
gen.writeStartArray()
f
gen.writeEndArray()
}
private def writeArrayData(
array: ArrayData, fieldWriter: ValueWriter): Unit = {
var i = 0
while (i < array.numElements()) {
if (!array.isNullAt(i)) {
fieldWriter.apply(array, i)
} else {
gen.writeNull()
}
i += 1
}
}
private def writeMapData(
map: MapData, mapType: MapType, fieldWriter: ValueWriter): Unit = {
val keyArray = map.keyArray()
val valueArray = map.valueArray()
var i = 0
while (i < map.numElements()) {
gen.writeFieldName(keyArray.get(i, mapType.keyType).toString)
if (!valueArray.isNullAt(i)) {
fieldWriter.apply(valueArray, i)
} else {
gen.writeNull()
}
i += 1
}
}
def close(): Unit = gen.close()
def flush(): Unit = gen.flush()
/**
* Transforms a single `InternalRow` to JSON object using Jackson.
* This api calling will be validated through accessing `rootFieldWriters`.
*
* @param row The row to convert
*/
def write(row: InternalRow): Unit = {
writeObject(writeFields(
fieldWriters = rootFieldWriters,
row = row,
schema = dataType.asInstanceOf[StructType]))
}
/**
* Transforms multiple `InternalRow`s or `MapData`s to JSON array using Jackson
*
* @param array The array of rows or maps to convert
*/
def write(array: ArrayData): Unit = writeArray(writeArrayData(array, arrElementWriter))
/**
* Transforms a single `MapData` to JSON object using Jackson
* This api calling will will be validated through accessing `mapElementWriter`.
*
* @param map a map to convert
*/
def write(map: MapData): Unit = {
writeObject(writeMapData(
fieldWriter = mapElementWriter,
map = map,
mapType = dataType.asInstanceOf[MapType]))
}
def writeLineEnding(): Unit = {
// Note that JSON uses writer with UTF-8 charset. This string will be written out as UTF-8.
gen.writeRaw(lineSeparator)
}
}
|
tengpeng/spark
|
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/json/JacksonGenerator.scala
|
Scala
|
apache-2.0
| 9,126
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.ml.feature
import org.apache.spark.ml.attribute.{Attribute, NominalAttribute}
import org.apache.spark.ml.util.DefaultReadWriteTest
import org.apache.spark.mllib.util.MLlibTestSparkContext
import org.apache.spark.sql.{Row, SQLContext}
import org.apache.spark.{SparkContext, SparkFunSuite}
class QuantileDiscretizerSuite
extends SparkFunSuite with MLlibTestSparkContext with DefaultReadWriteTest {
import org.apache.spark.ml.feature.QuantileDiscretizerSuite._
test("Test quantile discretizer") {
checkDiscretizedData(sc,
Array[Double](1, 2, 3, 3, 3, 3, 3, 3, 3),
10,
Array[Double](1, 2, 3, 3, 3, 3, 3, 3, 3),
Array("-Infinity, 1.0", "1.0, 2.0", "2.0, 3.0", "3.0, Infinity"))
checkDiscretizedData(sc,
Array[Double](1, 2, 3, 3, 3, 3, 3, 3, 3),
4,
Array[Double](1, 2, 3, 3, 3, 3, 3, 3, 3),
Array("-Infinity, 1.0", "1.0, 2.0", "2.0, 3.0", "3.0, Infinity"))
checkDiscretizedData(sc,
Array[Double](1, 2, 3, 3, 3, 3, 3, 3, 3),
3,
Array[Double](0, 1, 2, 2, 2, 2, 2, 2, 2),
Array("-Infinity, 2.0", "2.0, 3.0", "3.0, Infinity"))
checkDiscretizedData(sc,
Array[Double](1, 2, 3, 3, 3, 3, 3, 3, 3),
2,
Array[Double](0, 1, 1, 1, 1, 1, 1, 1, 1),
Array("-Infinity, 2.0", "2.0, Infinity"))
}
test("Test getting splits") {
val splitTestPoints = Array(
Array[Double]() -> Array(Double.NegativeInfinity, 0, Double.PositiveInfinity),
Array(Double.NegativeInfinity) -> Array(Double.NegativeInfinity, 0, Double.PositiveInfinity),
Array(Double.PositiveInfinity) -> Array(Double.NegativeInfinity, 0, Double.PositiveInfinity),
Array(Double.NegativeInfinity, Double.PositiveInfinity)
-> Array(Double.NegativeInfinity, 0, Double.PositiveInfinity),
Array(0.0) -> Array(Double.NegativeInfinity, 0, Double.PositiveInfinity),
Array(1.0) -> Array(Double.NegativeInfinity, 1, Double.PositiveInfinity),
Array(0.0, 1.0) -> Array(Double.NegativeInfinity, 0, 1, Double.PositiveInfinity)
)
for ((ori, res) <- splitTestPoints) {
assert(QuantileDiscretizer.getSplits(ori) === res, "Returned splits are invalid.")
}
}
test("Test splits on dataset larger than minSamplesRequired") {
val sqlCtx = SQLContext.getOrCreate(sc)
import sqlCtx.implicits._
val datasetSize = QuantileDiscretizer.minSamplesRequired + 1
val numBuckets = 5
val df = sc.parallelize((1.0 to datasetSize by 1.0).map(Tuple1.apply)).toDF("input")
val discretizer = new QuantileDiscretizer()
.setInputCol("input")
.setOutputCol("result")
.setNumBuckets(numBuckets)
.setSeed(1)
val result = discretizer.fit(df).transform(df)
val observedNumBuckets = result.select("result").distinct.count
assert(observedNumBuckets === numBuckets,
"Observed number of buckets does not equal expected number of buckets.")
}
test("read/write") {
val t = new QuantileDiscretizer()
.setInputCol("myInputCol")
.setOutputCol("myOutputCol")
.setNumBuckets(6)
testDefaultReadWrite(t)
}
}
private object QuantileDiscretizerSuite extends SparkFunSuite {
def checkDiscretizedData(
sc: SparkContext,
data: Array[Double],
numBucket: Int,
expectedResult: Array[Double],
expectedAttrs: Array[String]): Unit = {
val sqlCtx = SQLContext.getOrCreate(sc)
import sqlCtx.implicits._
val df = sc.parallelize(data.map(Tuple1.apply)).toDF("input")
val discretizer = new QuantileDiscretizer().setInputCol("input").setOutputCol("result")
.setNumBuckets(numBucket).setSeed(1)
val result = discretizer.fit(df).transform(df)
val transformedFeatures = result.select("result").collect()
.map { case Row(transformedFeature: Double) => transformedFeature }
val transformedAttrs = Attribute.fromStructField(result.schema("result"))
.asInstanceOf[NominalAttribute].values.get
assert(transformedFeatures === expectedResult,
"Transformed features do not equal expected features.")
assert(transformedAttrs === expectedAttrs,
"Transformed attributes do not equal expected attributes.")
}
}
|
haowu80s/spark
|
mllib/src/test/scala/org/apache/spark/ml/feature/QuantileDiscretizerSuite.scala
|
Scala
|
apache-2.0
| 4,996
|
package hello
import scala.scalajs.js
trait MyTrait {
val x = 5
def foo(y: Int) = x
}
object HelloWorld extends MyTrait {
def main(args: Array[String]): Unit = {
println("hello dotty.js!")
println(foo(4))
}
}
|
som-snytt/dotty
|
sandbox/scalajs/src/hello.scala
|
Scala
|
apache-2.0
| 228
|
package it.dtk.cluster
import akka.actor._
import akka.cluster.ClusterEvent.{CurrentClusterState, MemberUp}
import akka.cluster.{Cluster, Member, MemberStatus}
import akka.event.Logging
import it.dtk.cluster.BackendWorkerProtocol._
import it.dtk.feed.Model.FeedInfo
import it.dtk.kafka.FeedProducerKafka
import net.ceedubs.ficus.Ficus._
object BackendWorkerProtocol {
case class FeedJob(source: FeedInfo)
case class FeedJobResult(source: FeedInfo)
case object BackendRegistration
}
/**
* Created by fabiofumarola on 16/08/15.
*/
class Worker extends Actor {
val config = context.system.settings.config
val masterRole = config.as[String]("app.master-role")
val log = Logging(context.system.eventStream, this.getClass.getCanonicalName)
val cluster = Cluster(context.system)
// subscribe to cluster changes, MemberUp
// re-subscribe when restart
override def preStart(): Unit = cluster.subscribe(self, classOf[MemberUp])
override def postStop(): Unit = {
producer.close()
cluster.unsubscribe(self)
}
val producer = new FeedProducerKafka(
topic = config.as[String]("kafka.topic"),
clientId = config.as[String]("kafka.consumer-group"),
brokersList = config.as[String]("kafka.brokers"))
override def receive: Receive = {
case msg: FeedJob =>
val executor = context.actorOf(Props(new WorkExecutor(producer)))
executor forward msg
case state: CurrentClusterState =>
state.members.filter(_.status == MemberStatus.Up) foreach register
case MemberUp(m) => register(m)
}
def register(member: Member): Unit = {
if (member.hasRole(masterRole))
context.actorSelection(RootActorPath(member.address) / "user" / masterRole) ! BackendRegistration
}
}
class WorkExecutor(val producer: FeedProducerKafka) extends Actor {
val log = Logging(context.system.eventStream, this.getClass.getCanonicalName)
import com.github.nscala_time.time.Imports._
import it.dtk.feed.logic._
override def receive: Receive = {
case FeedJob(source) =>
log.info("start worker for feed {}", source.url)
val timedSource = if (source.dateLastFeed.isEmpty)
source.copy(dateLastFeed = Some(DateTime.yesterday))
else source
val lastUrls = source.last100Urls.toSet
try {
val filtered = FeedUtil.parseFeed(timedSource.url)
.filter(f => f.date > timedSource.dateLastFeed.get)
filtered.foreach { f =>
log.debug(f.toString)
producer.sendSync(f)
}
val lastTime = filtered.map(_.date).max
val filteredUrl = filtered.map(_.uri).toSet
val nextScheduler = FeedSchedulerUtil.when(timedSource.fScheduler, filteredUrl.size)
log.info("extracted {} urls for feed {}", filteredUrl.size, timedSource.url)
val nextIterationUrls = (filteredUrl.toList ++ timedSource.last100Urls).take(100)
sender() ! FeedJobResult(
timedSource.copy(
last100Urls = nextIterationUrls,
countUrl = source.countUrl + filteredUrl.size,
dateLastFeed = Option(lastTime),
fScheduler = nextScheduler))
}
catch {
case ex: Throwable =>
log.error(ex, "error processing feed {}", timedSource.url)
val nextScheduler = FeedSchedulerUtil.gotException(timedSource.fScheduler)
sender() ! FeedJobResult(timedSource.copy(fScheduler = nextScheduler))
}
self ! PoisonPill
}
}
//object WorkerMain extends App {
// val port = if (args.isEmpty) "0" else args(0)
// val config = ConfigFactory.parseString(s"akka.remote.netty.tcp.port=$port").
// withFallback(ConfigFactory.load("worker.conf"))
//
// val actorName = config.as[String]("app.master-role")
// val system = ActorSystem("ClusterSystem", config)
// val worker = system.actorOf(Props(classOf[Worker]), actorName)
// println(s"started actor ${worker.path}")
//}
|
DataToKnowledge/wheretolive-feed
|
feed-cluster/src/main/scala/it/dtk/cluster/Worker.scala
|
Scala
|
apache-2.0
| 3,911
|
package presentation._5
import com.twitter.finagle.http.Method.Get
import com.twitter.finagle.http.filter.Cors
import com.twitter.finagle.http.filter.Cors.HttpFilter
import com.twitter.finagle.http.path.Root
import com.twitter.finagle.http.{Request, Response, Status}
import com.twitter.finagle.{Http, Service}
import io.fintrospect.formats.Argo.JsonFormat.array
import io.fintrospect.formats.Argo.ResponseBuilder._
import io.fintrospect.parameters.Query
import io.fintrospect.renderers.swagger2dot0.{ApiInfo, Swagger2dot0Json}
import io.fintrospect.{ResponseSpec, RouteModule, RouteSpec}
import presentation.Book
class SearchRoute(books: RemoteBooks) {
private val titlePartParam = Query.required.string("titlePart")
def search() = Service.mk[Request, Response] {
request => {
val titlePart = titlePartParam <-- request
books.search(titlePart)
.map(results => results.split(",").map(Book(_)).toSeq)
.map(books => Ok(array(books.map(_.toJson))))
}
}
val route = RouteSpec("search books")
.taking(titlePartParam)
.returning(ResponseSpec.json(Status.Ok -> "search results", array(Book("1984").toJson)))
.at(Get) / "search" bindTo search
}
class SearchApp {
private val apiInfo = ApiInfo("search some books", "1.0", "an api for searching our book collection")
val service = RouteModule(Root, Swagger2dot0Json(apiInfo))
.withRoute(new SearchRoute(new RemoteBooks).route)
.toService
val searchService = new HttpFilter(Cors.UnsafePermissivePolicy).andThen(service)
Http.serve(":9000", searchService)
}
|
daviddenton/fintrospect
|
src/test/scala/presentation/_5/SearchApp.scala
|
Scala
|
apache-2.0
| 1,578
|
object Problem {
class SudokuBoard(val data:Array[Array[Int]]) {
def row(row: Int) = data(row)
def col(col: Int) = data.map(_(col))
def square(sq: Int) = {
val cc = sq % 3;
var rr = sq / 3;
data.slice(rr * 3, rr * 3 + 3).map(_.slice(cc * 3, cc * 3 + 3)).reduce(_ ++ _)
}
def isValid() = {
def isValid(select: Int => Seq[Int]): Boolean = {
(0 until 9).forall(n => {
val withoutZeros = select(n).filter(_ != 0)
withoutZeros.size == withoutZeros.distinct.size
})
}
isValid(row) && isValid(col) && isValid(square)
}
def updated(row: Int, col: Int, value: Int) = {
var newData = data.updated(row, data(row).updated(col, value))
new SudokuBoard(newData);
}
def findUpperLeftMostZero(): (Int, Int) = {
val pos = data.flatten.indexOf(0)
if (pos != -1) (pos / 9, pos % 9) else (-1, -1)
}
override def toString = {
data.map("\\n" + _.zipWithIndex.map(x => (if (x._2 % 3 == 0) "| " else " ") + x._1).mkString).zipWithIndex.map(x => if (x._2 % 3 == 0) "\\n" + "-" * 21 + x._1 else x._1).mkString + "\\n"
}
}
def solve(sudoku: SudokuBoard): Option[SudokuBoard] = {
val (row, col) = sudoku.findUpperLeftMostZero
val noMoreZeros = row == -1 && col == -1
if (!sudoku.isValid) None
else if (noMoreZeros) Some(sudoku) // we are done
else (1 to 9).map(n => {
solve(sudoku.updated(row, col, n))
}).find(_.nonEmpty) getOrElse None
}
def parse(path: String): List[SudokuBoard] = {
scala.io.Source.fromFile(path).mkString.split("\\n").zipWithIndex.filter(_._2 % 10 != 0).map(x => x._1.toArray.map(_.toString.toInt)).grouped(9).toList.map(new SudokuBoard(_))
}
def main(args: Array[String]) {
val start = System.currentTimeMillis
val sudokus = parse("sudoku.txt")
val solvedSudokus = sudokus.map(solve)
// just assure everything is ok
assert(solvedSudokus.forall(_.nonEmpty) == true)
println(solvedSudokus.map(_.get.row(0).take(3).mkString.toInt).sum)
val stop = System.currentTimeMillis
println("Time taken: " + (stop - start) + "ms")
}
}
|
Jiri-Kremser/euler
|
096/Problem.scala
|
Scala
|
gpl-2.0
| 2,155
|
import language.`future-migration`
class Test
object Test {
def foo[A <% Test](x: A) = x // error
}
|
dotty-staging/dotty
|
tests/neg/i11567.scala
|
Scala
|
apache-2.0
| 103
|
package metal
package generic
import scala.annotation.tailrec
import util.Dummy
abstract class HashMap[K, V] extends generic.Map[K, V] {
import HashMap.{UNUSED, USED}
type Immutable = metal.immutable.HashMap[K, V]
type Mutable = metal.mutable.HashMap[K, V]
type Scala = metal.immutable.WrappedHashMap[K, V]
/** Number of defined slots. */
def size: Int
@inline final def longSize = size
/** Number of used slots. */
def used: Int
/** Mask = nSlots - 1, used for hashing. */
def mask: Int
/** Point at which we should grow. */
def limit: Int
def nSlots: Int = buckets.length
def key(i: Int): K = keys(i)
def value(i: Int): V = values(i)
def bucket(i: Int): Byte = buckets(i)
/** Status of the slots in the hash table. */
private[metal] def buckets: Array[Byte]
/** Slots for keys. */
private[metal] def keys: Array[K]
/** Slots for values. */
private[metal] def values: Array[V]
def mutableCopy = new mutable.HashMap[K, V](keys.clone, buckets.clone, values.clone, size, used, mask, limit)
@inline final def isEmpty = size == 0
@inline final def nonEmpty = size > 0
def keyArray(ptr: VPtr[this.type]): Array[K] = keys
def keyIndex(ptr: VPtr[this.type]): Int = ptr.raw.toInt
def valueArray(ptr: VPtr[this.type]): Array[V] = values
def valueIndex(ptr: VPtr[this.type]): Int = ptr.raw.toInt
final def ptrFind[@specialized L](key: L): Ptr[this.type] = {
val keysL = keys.asInstanceOf[Array[L]]
@inline @tailrec def loop(i: Int, perturbation: Int): Ptr[this.type] = {
val j = i & mask
val status = buckets(j)
if (status == UNUSED) Ptr.Null(this)
else if (status == USED && keysL(j) == key) VPtr(this, j)
else loop((i << 2) + i + perturbation + 1, perturbation >> 5)
}
val i = K.asInstanceOf[MetalTag[L]].hash(key) & 0x7fffffff
loop(i, i)
}
final def ptr: Ptr[this.type] = {
var i = 0
while (i < buckets.length && buckets(i) != 3) i += 1
if (i < buckets.length) VPtr(this, i) else Ptr.Null(this)
}
final def ptrNext(ptr: VPtr[this.type]): Ptr[this.type] = {
var i = ptr.raw.toInt + 1
while (i < buckets.length && buckets(i) != 3) i += 1
if (i < buckets.length) VPtr(this, i) else Ptr.Null(this)
}
final def ptrKey[@specialized L](ptr: VPtr[this.type]): L = keys.asInstanceOf[Array[L]](ptr.raw.toInt)
final def ptrValue[@specialized W](ptr: VPtr[this.type]): W = values.asInstanceOf[Array[W]](ptr.raw.toInt)
final def ptrElement1[@specialized E1](ptr: VPtr[this.type]): E1 = keys.asInstanceOf[Array[E1]](ptr.raw.toInt)
final def ptrElement2[@specialized E2](ptr: VPtr[this.type]): E2 = values.asInstanceOf[Array[E2]](ptr.raw.toInt)
}
object HashMap {
/** Unused bucket. */
@inline final def UNUSED: Byte = 0
/** Once used, has been deleted but not yet overwritten. */
@inline final def DELETED: Byte = 2
/** Used. */
@inline final def USED: Byte = 3
}
trait HashMapFactory extends MapFactory {
type KExtra[K] = Dummy[K]
type VExtra[V] = Dummy[V]
type M[K, V] <: generic.HashMap[K, V]
}
|
denisrosset/ptrcoll
|
library/src/main/scala/metal/generic/HashMap.scala
|
Scala
|
mit
| 3,083
|
package org.openmole.site
/*
* Copyright (C) 19/04/17 // mathieu.leclaire@openmole.org
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
import org.scalajs.dom.KeyboardEvent
import scaladget.bootstrapnative.bsn._
import scaladget.tools._
import scaladget.lunr.IIndexSearchResult
import scalatags.JsDom.all._
import org.scalajs.dom.raw.MouseEvent
import rx._
import scaladget.bootstrapnative.Selector.Dropdown
object Search {
implicit val ctx: Ctx.Owner = Ctx.Owner.safe()
def build(getIndex: () ⇒ Unit) {
val centerSearch = Seq(
width := 150,
margin := "0 auto",
display := "block"
)
val searchInput = inputTag("")(placeholder := "Search", centerSearch).render
val result: Var[Seq[IIndexSearchResult]] = Var(Seq())
case class Item(index: Int = 0, ref: String = "", maxSize: Int = 0)
val item = Var(Item())
val resultStyle: ModifierSeq = Seq(
color := "black",
left := -160,
width := 200
)
def search = () ⇒ {
result() = SiteJS.search(s"*${searchInput.value}*")
item() = Item()
false
}
val resultDiv = {
lazy val results = div(
onkeydown := { (k: KeyboardEvent) ⇒
val curItem = item.now
if (k.keyCode == 40 && curItem.index < curItem.maxSize - 1) {
item() = curItem.copy(index = curItem.index + 1)
false
}
else if (k.keyCode == 38 && curItem.index > 0) {
item() = curItem.copy(index = curItem.index - 1)
false
}
},
searchInput,
Rx {
val rr = result().take(10).zipWithIndex
div(scalatags.JsDom.all.paddingTop := 20)(
for {
r ← rr
} yield {
div(
a(pointer, href := r._1.ref)(SiteJS.entries.get(r._1.ref)), {
if (r._2 == item().index) {
item() = item.now.copy(ref = r._1.ref, maxSize = rr.size)
backgroundColor := "#ddd"
}
else color := "black"
}
)
}
)
}
)
div(
form(
results,
onkeyup := {
(k: KeyboardEvent) ⇒
if (k.keyCode != 38 && k.keyCode != 40)
search()
},
onsubmit := { () ⇒
if (item.now.ref != "")
org.scalajs.dom.window.location.href = item.now.ref
false
}
)
)
}
val dd = new Dropdown(resultDiv, div, emptyMod, resultStyle, () ⇒ {})
val ddd = org.scalajs.dom.window.document.getElementById(shared.searchImg)
ddd.addEventListener("mouseover", {
(e: MouseEvent) ⇒
getIndex()
})
ddd.addEventListener("click", {
(e: MouseEvent) ⇒
dd.toggle
searchInput.focus()
})
org.scalajs.dom.window.document.getElementById(shared.searchDiv).appendChild(dd.render)
}
}
|
openmole/openmole
|
openmole/bin/org.openmole.site/js/src/main/scala/org/openmole/site/Search.scala
|
Scala
|
agpl-3.0
| 3,622
|
package cbt
import java.io._
import java.net._
import ammonite.ops.{cwd => _,_}
trait Scaffold{
def logger: Logger
def generateBasicBuildFile(
projectDirectory: File,
scalaVersion: String,
groupId: String,
artifactId: String,
version: String
): Unit = {
/**
TODO:
- make behavior more user friendly:
- not generate half and then throw exception for one thing already existing
- maybe not generate all of this, e.g. offer different variants
*/
val generatedFiles = Seq(
"build/build.scala" -> s"""import cbt._
import java.net.URL
import java.io.File
import scala.collection.immutable.Seq
class Build(context: Context) extends BasicBuild(context) with BuildShared{
override def artifactId: String = "$artifactId"
override def groupId = "$groupId"
override def dependencies = super.dependencies ++ Seq( // don't forget super.dependencies here
// "org.cvogt" %% "scala-extensions" % "0.4.1"
)
// required for .pom file
override def name = artifactId
override def description : String = lib.requiredForPom("description")
}
""",
"build/build/build.scala" -> s"""import cbt._
import java.net.URL
import java.io.File
import scala.collection.immutable.Seq
class Build(context: Context) extends BuildBuild(context){
override def scalaVersion: String = "2.11.7"
override def dependencies = super.dependencies ++ Seq(
BuildDependency( projectDirectory.parent ++ "/build-shared")
// , "com.lihaoyi" %% "ammonite-ops" % "0.5.5"
)
}
""",
"test/Main.scala" -> s"""object Main{
def main( args: Array[String] ) = {
assert( false, "Go. Write some tests :)!" )
}
}
""",
"test/build/build.scala" -> s"""import cbt._
import java.net.URL
import java.io.File
import scala.collection.immutable.Seq
class Build(context: cbt.Context) extends BasicBuild(context) with BuildShared/* with cbt.mixins.ScalaTest*/{
// def scalaTestVersion = "2.2.6"
override def dependencies = super.dependencies ++ Seq(
// , "org.scalacheck" %% "scalacheck" % "1.13.0"
)
}
""",
"test/build/build/build.scala" -> s"""import cbt._
import java.net.URL
import java.io.File
import scala.collection.immutable.Seq
class Build(context: Context) extends BuildBuild(context){
override def scalaVersion: String = "2.11.7"
override def dependencies = super.dependencies ++ Seq(
BuildDependency( projectDirectory.parent.parent ++ "/build-shared")
// , "com.lihaoyi" %% "ammonite-ops" % "0.5.5"
)
}
""",
"build-shared/build/build.scala" -> s"""import cbt._
import java.net.URL
import java.io.File
import scala.collection.immutable.Seq
class Build(context: Context) extends BasicBuild(context){
override def scalaVersion: String = "$scalaVersion"
override def dependencies = super.dependencies ++ Seq( // don't forget super.dependencies here
CbtDependency
// , "org.cvogt" %% "scala-extensions" % "0.4.1"
)
}
""",
"build-shared/BuildShared.scala" -> s"""import cbt._
import java.net.URL
import java.io.File
import scala.collection.immutable.Seq
trait BuildShared extends BasicBuild{
override def scalaVersion: String = "$scalaVersion"
override def enableConcurrency = false // enable for speed, disable for debugging
override def groupId = "$groupId"
override def version = "$version"
// required for .pom file
override def url : URL = lib.requiredForPom("url")
override def developers: Seq[Developer] = lib.requiredForPom("developers")
override def licenses : Seq[License] = lib.requiredForPom("licenses")
override def scmUrl : String = lib.requiredForPom("scmUrl")
override def scmConnection: String = lib.requiredForPom("scmConnection")
override def pomExtra: Seq[scala.xml.Node] = Seq()
}
"""
)
generatedFiles.map{
case ( fileName, code ) =>
scala.util.Try{
write( Path( projectDirectory.string ++ "/" ++ fileName ), code )
import scala.Console._
println( GREEN ++ "Created " ++ fileName ++ RESET )
}
}.foreach(
_.recover{
case e: java.nio.file.FileAlreadyExistsException =>
e.printStackTrace
}.get
)
return ()
}
}
|
tobias-johansson/cbt
|
stage2/Scaffold.scala
|
Scala
|
bsd-2-clause
| 4,224
|
/*
* Copyright (c) 2013 University of Massachusetts Amherst
* Licensed under the Apache License, Version 2.0
* http://www.apache.org/licenses/LICENSE-2.0
*/
package edu.umass.cs.iesl.bibmogrify.reader
import org.scalatest.{BeforeAndAfter, FunSuite}
import com.typesafe.scalalogging.slf4j.Logging
import edu.umass.cs.iesl.bibmogrify.UrlNamedInputStream
import edu.umass.cs.iesl.bibmogrify.model.{RichStructuredCitation, Published}
import edu.umass.cs.iesl.scalacommons.StringUtils._
import edu.umass.cs.iesl.bibmogrify.compare.AminoAcidTitleHash
class ElsevierReaderTestSuite extends FunSuite with BeforeAndAfter with Logging {
import RichStructuredCitation.enrichStructuredCitation
val file = getClass.getResource("/examples/elsevier/example.xml")
val citationList = ElsevierReader(new UrlNamedInputStream(file.toExternalForm.n, file))
val c = citationList.toIterator.next()
// todo: detailed tests of all fields
/*
<article mdate="2002-01-03" key="persons/Codd71a">
<author>E. F. Codd</author>
<title>Further Normalization of the Data Base Relational Model.</title>
<journal>IBM Research Report, San Jose, California</journal>
<volume>RJ909</volume>
<month>August</month>
<year>1971</year>
<cdrom>ibmTR/rj909.pdf</cdrom>f
<ee>db/labs/ibm/RJ909.html</ee>
*/
test("Title is parsed") {
assert(c.title === "Effects of excessive cookie consumption in garbage-can-dwelling shagbeasts".opt)
}
test("Authors are parsed") {
assert(c.authors.size === 2)
assert(c.authors.head.roles.isEmpty)
//assert(c.authors.head.agent.name.flatMap(_.inferFully.bestFullName) === emptyStringToNone("Kermit T. Frog"))
assert(c.authors.head.agent.toString === "Kermit T. Frog")
}
test("Partial date is parsed") {
val ce = c.dates.head
assert(ce.eventType === Published)
assert(ce.date.get.year === Some(2002))
assert(ce.date.get.month === None)
assert(ce.date.get.day === None)
}
test("Journal is parsed") {
val cont = c.containedIn.get
assert(cont.container.title === "Acta Sesamae".opt)
assert(cont.volume === "50".opt)
}
test("Volume is parsed") {
val cont = c.containedIn.get
val vol = cont.volume
assert(vol.unwrap === "50")
val num = cont.number
assert(num.unwrap === "10")
}
test("Page range is parsed") {
val cont = c.containedIn.get
val pages = cont.pages.get
assert(pages.toString === "641-651")
assert(pages.numPages.get === 11)
}
test("Abstract is parsed") {
val ce = c.englishAbstract
assert(ce.unwrap.split(" ").head === "Domestication")
assert(ce.unwrap.split(" ").last === "cans.")
}
test("Every record has a title") {
for (c <- citationList) {
assert(!c.title.isEmpty)
logger.info("Title : " + c.title)
}
}
test("amino-acid version includes all fields") {
assert(AminoAcidTitleHash.apply(c).head.s ===
// "FFCTSFXCSSVCKCNSMPTNNGRBGCNDWLLNGSHGBSTSFRGSMCTSSMVSVTVZ")
"FFCTSFGCSSVCKCNSMPTNNGRFGCNDWLLNGSHGFSTSFRGSMCTSSMVSVTVW")
}
}
|
iesl/bibmogrify
|
src/test/scala/edu/umass/cs/iesl/bibmogrify/reader/ElsevierReaderTestSuite.scala
|
Scala
|
apache-2.0
| 4,152
|
package com.getjenny.starchat.entities.io
/**
* Created by Angelo Leto <angelo@getjenny.com> on 8/04/19.
*/
case class UpdateQAByQueryReq(
documentSearch: QADocumentSearch,
document: QADocumentUpdate
)
|
GetJenny/starchat
|
src/main/scala/com/getjenny/starchat/entities/io/UpdateQAByQueryReq.scala
|
Scala
|
gpl-2.0
| 297
|
/*
* Copyright 2017 Alexey Kuzin <amkuzink@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package choiceroulette.gui.utils
import language.reflectiveCalls
/** Utilities to control some kind of resources.
*
* @author Alexey Kuzin <amkuzink@gmail.com>
*/
object Control {
def using[A <: { def close(): Unit }, B](res: A)(func: A => B): B = {
try {
func(res)
} finally {
res.close()
}
}
}
|
leviathan941/choiceroulette
|
guiapp/src/main/scala/choiceroulette/gui/utils/Control.scala
|
Scala
|
apache-2.0
| 948
|
package mosaico.common
import java.io.{File, FileOutputStream}
import java.net.URL
import akka.http.scaladsl.Http
import akka.http.scaladsl.model.HttpRequest
import akka.http.scaladsl.model.headers.RawHeader
import akka.util.ByteString
import sbt.URL
import scala.collection.immutable
/**
* Download urls with Akka
*/
trait Download extends FileUtils {
import mosaico.common.AkkaCommon._
private var total: Double = 0
private var counter: Long = 0
private var nextCheck: Long = 0
def initDownload(len: Long): Unit = {
counter = 0
total = len
nextCheck = System.currentTimeMillis()
}
def progressDownload(bs: ByteString): ByteString = {
counter += bs.size
val now = System.currentTimeMillis()
if (now > nextCheck) {
nextCheck += CHECK_INTERVAL
print(if (total >= 0) {
"%02.0f%%".format(counter / total * 100)
} else "")
println(s" ${counter.toLong / MEGA}/${total.toLong / MEGA}")
}
bs
}
def downloadUrlHttps(url: URL, file: File) = {
println(s"<<< ${url}")
sbt.IO.download(url, file)
println(s">>> ${file}")
Some(file)
}
/**
* Download the url to the file, adds optionally the specified header
*
* @param url
* @param file
* @param header
* @return
*/
def downloadUrl(url: URL, file: File, header: Option[String] = None): Option[File] = {
//println(s"downloadUrl: ${url} ${file} ${header}")
val incomplete = new File(file.getAbsolutePath + ".dld")
val headerSeq = header.map {
s =>
val a: Array[String] = s.split(":")
immutable.Seq(new RawHeader(a.head, a.tail.mkString(":")))
}.getOrElse(Nil)
val toDownload = if (file.exists()) {
if (!incomplete.exists()) {
println("file already downloaded")
false
} else {
file.delete()
incomplete.delete()
println(s"removed ${file}")
true
}
} else true
if (toDownload && url.getProtocol == "https") {
// workaround for https until sbt upgrade to 2.11/2.12
println(s"<<< ${url}")
sbt.IO.download(url, file)
println(s">>> ${file}")
} else if (toDownload && url.getProtocol == "http") {
val req = HttpRequest(uri = url.toString, headers = headerSeq)
println(s"<<< ${url}")
var res = waitFor(Http().singleRequest(req))
while (res.getHeader("Location").nonEmpty) {
val redir = res.getHeader("Location").get.value
println(redir)
res = waitFor(Http().singleRequest(
HttpRequest(uri = redir, headers = headerSeq)))
}
println(res)
val ent = res.entity
val len = ent.contentLengthOption.getOrElse(-1l)
initDownload(len)
incomplete.createNewFile()
val out = new FileOutputStream(file).getChannel
waitFor(ent.withSizeLimit(SIZE_LIMIT).
dataBytes.
map(progressDownload).
runForeach(bs => out.write(bs.asByteBuffer))
)
if (res.status.intValue == 200)
incomplete.delete()
println(s">>> ${file}")
}
Some(file)
}
}
|
sciabarra/Mosaico
|
plugin/src/main/scala/mosaico/common/Download.scala
|
Scala
|
apache-2.0
| 3,105
|
package com.sundogsoftware.spark
import org.apache.spark._
import org.apache.spark.SparkContext._
import org.apache.spark.sql._
import org.apache.log4j._
object SparkSQL {
case class Person(ID:Int, name:String, age:Int, numFriends:Int)
def mapper(line:String): Person = {
val fields = line.split(',')
val person:Person = Person(fields(0).toInt, fields(1), fields(2).toInt, fields(3).toInt)
return person
}
/** Our main function where the action happens */
def main(args: Array[String]) {
// Set the log level to only print errors
Logger.getLogger("org").setLevel(Level.ERROR)
// Use new SparkSession interface in Spark 2.0
val spark = SparkSession
.builder
.appName("SparkSQL")
.master("local[*]")
.config("spark.sql.warehouse.dir", "file:///C:/temp") // Necessary to work around a Windows bug in Spark 2.0.0; omit if you're not on Windows.
.getOrCreate()
val lines = spark.sparkContext.textFile("../fakefriends.csv")
val people = lines.map(mapper)
// Infer the schema, and register the DataSet as a table.
import spark.implicits._
val schemaPeople = people.toDS
schemaPeople.printSchema()
schemaPeople.createOrReplaceTempView("people")
// SQL can be run over DataFrames that have been registered as a table
val teenagers = spark.sql("SELECT * FROM people WHERE age >= 13 AND age <= 19")
val results = teenagers.collect()
results.foreach(println)
spark.stop()
}
}
|
BobLovesData/Apache-Spark-In-24-Hours
|
src/net/massstreet/examples/SparkSQL.scala
|
Scala
|
gpl-3.0
| 1,549
|
package lowerthirds
import java.io._
import java.awt.{GraphicsEnvironment, AlphaComposite, Font => JFont}
import java.awt.Font._
import java.awt.image.BufferedImage
import java.awt.RenderingHints.{KEY_ANTIALIASING, VALUE_ANTIALIAS_ON}
import java.awt.Color._
import javax.imageio.{ImageIO}
import javax.swing.undo.{UndoManager, UndoableEdit}
import javax.swing.event.{UndoableEditEvent, UndoableEditListener}
import javax.swing.KeyStroke._
import io.Source
import swing._
import Dialog._
import event._
import Swing._
import BorderPanel.Position._
import collection.mutable.ListBuffer
import scala.util.matching.Regex
import markup.YAML
import typesetter.{Typesetter, Font, Box, Glue}
import Util._
class TS extends YAML
{
val resource = getClass
val fs = "/"//System.getProperties.getProperty( "file.separator" )
// val home = System.getProperties.getProperty( "user.home" ) + fs
// val fonts = s"${home}Dropbox${fs}Typography${fs}Fonts${fs}"
val fonts = s"resources/"
// val cmttf = s"${fonts}bakoma${fs}ttf${fs}"
// val cmpfb = s"${fonts}bakoma${fs}pfb${fs}"
// val cmotf = s"${fonts}bakoma${fs}otf${fs}"
val amspfb = s"${fonts}amsfonts${fs}pfb${fs}"
val cmttf = s"${fonts}cm-unicode-0.7.0${fs}"
val gentiumttf = s"${fonts}GentiumPlus-1.510${fs}"
def out( o: Any ) = LowerThirdsEditor.messages.text = LowerThirdsEditor.messages.text + o.toString + '\\n'
Font.load( resource, s"${cmttf}cmunrm.ttf" )
// loadFont( "Computer Modern Smallcaps", s"${cmttf}cmcsc10.ttf" )
Font.load( resource, s"${cmttf}cmunrb.ttf" )
// loadFont( "Computer Modern Slant", s"${cmttf}cmsl12.ttf" )
// loadFont( "Computer Modern Italic", s"${cmttf}cmti12.ttf" )
//// loadFont( "Computer Modern Typewriter", s"${cmttf}cmuntt.ttf" )
Font.preload( resource, s"${cmttf}cmunssdc.ttf" )
Font.load( resource, s"${cmttf}cmunorm.ttf" )
// loadFont( "Concrete Bold", s"${cmttf}cmunobx.ttf" )
Font.load( resource, s"${cmttf}cmunoti.ttf" )
//// loadFont( "Gentium Regular", s"${fonts}GentiumPlus-1.510${fs}GentiumPlus-R.ttf" )
// loadFont( "Gentium Italic", s"${fonts}GentiumPlus-1.510${fs}GentiumPlus-I.ttf" )
// loadFont( "Gentium Compact Regular", s"${fonts}GentiumPlusCompact-1.508${fs}GentiumPlusCompact-R.ttf" )
// loadFont( "Gentium Compact Italic", s"${fonts}GentiumPlusCompact-1.508${fs}GentiumPlusCompact-I.ttf" )
// loadFont( "Free Helvetian Roman Bold Condensed", s"${fonts}free-helvetian-roman-bold-condensed.pfb" )
// val CM_PLAIN = Font( "Computer Modern Regular", PLAIN, 30 )
// val CM_BOLD = Font( "Computer Modern Bold", PLAIN, 30 )
// val CM_ITALIC = Font( "Computer Modern Italic", PLAIN, 30 )
// val CM_SLANT = Font( "Computer Modern Slant", PLAIN, 30 )
// val CM_TYPEWRITER = Font( "Computer Modern Typewriter", PLAIN, 30 )
// val CM_SMALLCAPS = Font.smallcaps( "Computer Modern Regular", PLAIN, 30 )
// val CM_SANS_CONDENSED_BOLD = Font( "Computer Modern Sans Condensed Bold", PLAIN, 18 )
//
// val CONCRETE_PLAIN = Font( "Concrete Regular", PLAIN, 30 )
// val CONCRETE_BOLD = Font( "Concrete Regular", BOLD, 30 )
// val CONCRETE_ITALIC = Font( "Concrete Italic", PLAIN, 30 )
// val CONCRETE_SLANT = Font( "Concrete Regular", ITALIC, 30 )
// val CONCRETE_SMALLCAPS = Font.smallcaps( "Concrete Regular", PLAIN, 30 )
// val GENTIUM_PLAIN = Font( "Gentium Regular", PLAIN, 30 )
// val GENTIUM_BOLD = Font( "Gentium Regular", BOLD, 30 )
// val GENTIUM_SMALLCAPS = Font.smallcaps( "Gentium Regular", PLAIN, 30 )
// val GENTIUM_ITALIC = Font( "Gentium Regular", ITALIC, 30 )
// val GREEK_FONT = Font( "Gentium Regular", PLAIN, 30 )
// val GENTIUM_SLANT = Font( "Gentium Regular", ITALIC, 30 )
// val VERSE_FONT = Font( "Free Helvetian Roman Bold Condensed", PLAIN, 18 )
// val MONO_FONT = Font( "Droid Sans Mono", PLAIN, 30 )
// def rmfont = CONCRETE_PLAIN
//
// def bfont = CONCRETE_BOLD
//
// def scfont = CONCRETE_SMALLCAPS
//
// def ifont = CONCRETE_ITALIC
//
// def sfont = CONCRETE_SLANT
//
// def ttfont = CM_TYPEWRITER
//
// def vfont = VERSE_FONT
}
object LowerThirdsEditor extends SimpleSwingApplication
{
val VERSION = "0.7"
val DATE = "Nov 1, 2014"
val SHOW_TUTORIAL = false
val IMAGE_WIDTH = 1280
val IMAGE_HEIGHT = 720
val TRANSPARENT = AlphaComposite.getInstance( AlphaComposite.SRC, 0 )
val fs = "/"//System.getProperties.getProperty( "file.separator" )
val fonts = s"resources${fs}"
val FONT =
JFont.createFont( TRUETYPE_FONT, getClass.getResourceAsStream(s"${fonts}Droid${fs}DroidSansMono.ttf") ).
deriveFont( PLAIN, 16 )
lazy val textChooser = new FileChooser
lazy val messages =
new TextArea
{
editable = false
font = FONT
}
var boxes: List[(String, Box)] = Nil
lazy val editor = new EditorTextArea
lazy val overlayFrame =
new Frame
{
peer.setDefaultCloseOperation( javax.swing.WindowConstants.DO_NOTHING_ON_CLOSE )
title = "Overlays"
resizable = false
contents =
new TabbedPane
{
focusable = false
pages += new TabbedPane.Page( "", new CheckeredTypesetterPanel(null, IMAGE_WIDTH, IMAGE_HEIGHT) )
}
pack
}
var changesMade = false
lazy val top: Frame =
new Frame
{
peer.setDefaultCloseOperation( javax.swing.WindowConstants.DO_NOTHING_ON_CLOSE )
def makeTitle( modified: Boolean ) =
{
changesMade = modified
title = (if (modified) "*" else "") + documentTitle + " - Lower Thirds Editor v" + VERSION + " (" + DATE + ")"
}
override def closeOperation = quit
makeTitle( false )
location = new Point( 200, 90 )
editor.status = makeTitle
var info = true
reactions +=
{
case e: WindowActivated =>
if (info)
{
if (SHOW_TUTORIAL)
onEDT {tutorialPage.visible = true}
info = false
}
}
menuBar =
new MenuBar
{
contents +=
new Menu( "File" )
{
contents +=
new MenuItem(
Action( "Open" )
{
textChooser.showOpenDialog( null ) match
{
case FileChooser.Result.Approve =>
editor.text = readTextFile( new FileInputStream(textChooser.selectedFile) )
editor.unmodified
editor.caret.position = 0
case FileChooser.Result.Cancel =>
}
} )
contents +=
new MenuItem(
new Action( "Save" )
{
accelerator = Some( getKeyStroke("ctrl S") )
def apply = save
} )
contents +=
new MenuItem(
Action( "Save As..." )
{
saveAs
} )
contents +=
new MenuItem(
Action( "Export All" )
{
val file = textChooser.selectedFile
if (file eq null)
messages.text = "can't export: no file has been chosen; do a save"
else
{
delete( file )
export( file, boxes, IMAGE_WIDTH, IMAGE_HEIGHT )
}
} )
contents +=
new MenuItem(
Action( "Export Current" )
{
val file = textChooser.selectedFile
if (file eq null)
messages.text = "can't export: no file has been chosen; do a save"
else
{
val title = overlayFrame.contents.head.asInstanceOf[TabbedPane].selection.page.title
boxes.find( _._1 == title ) match
{
case None => messages.text = "can't export: '" + title + "' not found"
case Some( box ) =>
export( file, List(box), IMAGE_WIDTH, IMAGE_HEIGHT )
}
}
} )
contents +=
new MenuItem(
Action( "Quit" )
{
quit
} )
}
contents +=
new Menu( "Edit" )
{
contents +=
new MenuItem(
new Action( "Undo" )
{
accelerator = Some( getKeyStroke("ctrl Z") )
def apply = editor.undo
} )
contents +=
new MenuItem(
new Action( "Redo" )
{
accelerator = Some( getKeyStroke("ctrl Y") )
def apply = editor.redo
} )
contents += new Separator
contents +=
new MenuItem(
new Action( "Copy" )
{
accelerator = Some( getKeyStroke("ctrl C") )
def apply = editor.copy
} )
contents +=
new MenuItem(
new Action( "Cut" )
{
accelerator = Some( getKeyStroke("ctrl X") )
def apply = editor.cut
} )
contents +=
new MenuItem(
new Action( "Paste" )
{
accelerator = Some( getKeyStroke("ctrl V") )
def apply = editor.paste
} )
}
contents +=
new Menu( "Help" )
{
contents +=
new MenuItem(
Action( "About Lower Thirds Editor" )
{
val el = top.location
val es = top.size
val as = aboutDialog.size
val cx = el.getX + es.getWidth/2
val cy = el.getY + es.getHeight/2
aboutDialog.location = new Point( (cx - as.getWidth/2).toInt, (cy - as.getHeight/2).toInt )
aboutDialog.visible = true
} )
// contents +=
// new MenuItem(
// Action( "Manual" )
// {
// manualPage.visible = true
// } )
// contents +=
// new MenuItem(
// Action( "Markup Language" )
// {
// markupPage.visible = true
// } )
// contents +=
// new MenuItem(
// Action( "Command Reference (by Category)" )
// {
// commandCategoryPage.visible = true
// } )
// contents +=
// new MenuItem(
// Action( "Tutorial" )
// {
// tutorialPage.visible = true
// } )
contents +=
new MenuItem(
Action( "Command Reference (Alphabetical)" )
{
commandAlphabeticalPage.visible = true
} )
}
}
contents =
new SplitPane( Orientation.Horizontal,
new ScrollPane( editor )
{
preferredSize = (700, 300)
},
new ScrollPane( messages )
{
preferredSize = (700, 80)
} )
}
lazy val aboutDialog =
new Dialog( top )
{
val ABOUT_WIDTH = 800
val MARGIN = 10
val t = new TS
t('hsize) = ABOUT_WIDTH - 2*MARGIN
t('vsize) = -1
val map =
Map (
"fs" -> "/",
"fonts" -> "resources${fs}",
"cmttf" -> "${fonts}cm-unicode-0.7.0${fs}",
"VERSION" -> VERSION,
"DATE" -> DATE
)
val b = t processDocument stringReplace(readTextFile(getClass.getResourceAsStream(s"resources${fs}about")), map )
contents = new TypesetterPanel( b, MARGIN )
title = "About Lower Thirds Editor"
modal = true
resizable = false
pack
}
lazy val tutorialPage = helpPage( "tutorial", "Tutorial", 10, 800, 500 )
lazy val manualPage = helpPage( "manual", "Manual", 10, 800, 500 )
lazy val markupPage = helpPage( "language", "Markup Language", 10, 800, 500 )
lazy val commandCategoryPage = helpPage( "commands", "Categorical Command Reference", 10, 800, 500 )
lazy val commandAlphabeticalPage = helpPage( "index", "Alphabetical Command Reference", 10, 800, 500 )
onEDT {overlayFrame.visible = true}
// onEDT {editor.requestFocusInWindow}
override def quit
{
if (changesMade)
showConfirmation( message = "You have unsaved material. Save?", messageType = Message.Warning ) match
{
case Result.Yes =>
if (!save)
return
case Result.Closed => return
case _ =>
}
sys.exit( 0 )
}
def saveAs =
textChooser.showSaveDialog( null ) match
{
case FileChooser.Result.Approve =>
write
true
case FileChooser.Result.Cancel => false
}
def titled = textChooser.selectedFile ne null
def documentTitle =
if (titled)
textChooser.selectedFile.getName + " (" + textChooser.selectedFile.getParent + ")"
else
"Untitled"
def save =
if (titled)
{
if (editor.modified)
write
true
}
else
saveAs
def write
{
val w = new BufferedWriter( new OutputStreamWriter(new FileOutputStream(textChooser.selectedFile), "UTF-8") )
w.write( editor.text, 0, editor.text.length )
w.close
editor.unmodified
messages.text = "wrote " + (if (editor.text.isEmpty) 0 else editor.text.count( _ == '\\n' ) + 1) + " lines to " + textChooser.selectedFile
}
def stringReplace( s: String, replacement: Map[String, String] ): String =
"""\\$\\{([a-zA-Z]+)\\}""".r replaceAllIn (s, m => Regex.quoteReplacement(stringReplace(replacement(m.group(1)), replacement)))
def helpPage( page: String, name: String, margin: Int, width: Int, height: Int/*, modalWindow: Boolean = true*/ ) =
new Dialog( top )
{
val fsize = 24
val t =
new TS
{
val rm = Font( resource, "cmunrm", "plain", fsize )
// val rm = Font( resource, "Serif", "plain", fsize )
// val rm = Font( resource, s"${gentiumttf}GentiumPlus-R.ttf", "plain", fsize )
variable( 'font, rm )
variable( 'rmfont, rm )
variable( 'bfont, Font(resource, s"${cmttf}cmunrb.ttf", "plain", fsize) )
variable( 'scfont, Font(resource, s"${amspfb}cmcsc10.pfb", "plain", fsize) )
variable( 'ifont, Font(resource, s"${cmttf}cmunti.ttf", "plain", fsize) )
variable( 'sfont, Font(resource, s"${cmttf}cmunsl.ttf", "plain", fsize) )
variable( 'tfont, Font(resource, s"${cmttf}cmuntt.ttf", "plain", fsize) )
variable( 'tbfont, Font(resource, s"${cmttf}cmuntb.ttf", "plain", fsize) )
// variable( 'bfont, Font(resource, s"Serif", "bold", fsize) )
// variable( 'scfont, Font(resource, s"${amspfb}cmcsc10.pfb", "plain", fsize) )
// variable( 'ifont, Font(resource, s"Serif", "slant", fsize) )
// variable( 'sfont, Font(resource, s"Serif", "slant", fsize) )
// variable( 'tfont, Font(resource, s"Monospaced", "plain", (fsize*.9).toInt) )
// variable( 'tbfont, Font(resource, s"Monospaced", "bold", (fsize*.9).toInt) )
// variable( 'bfont, Font(resource, s"Serif", "bold", fsize) )
// variable( 'scfont, Font(resource, s"${amspfb}cmcsc10.pfb", "plain", fsize) )
// variable( 'ifont, Font(resource, s"Serif", "slant", fsize) )
// variable( 'sfont, Font(resource, s"Serif", "slant", fsize) )
// variable( 'tfont, Font(resource, s"Monospaced", "plain", (fsize*.9).toInt) )
// variable( 'tbfont, Font(resource, s"Monospaced", "bold", (fsize*.9).toInt) )
}
t('hsize) = width - 2*margin
t('vsize) = -1
val b = t processDocumentFromFile( getClass, "resources" + fs + page )
contents = new ScrollableTypesetterPanel( b, margin, height )
title = name
modal = false//modalWindow
resizable = false
pack
centerOnScreen
}
object EditorUndoManager extends UndoManager
{
def nextUndo = editToBeUndone
}
class EditorTextArea extends TextArea
{
var status: Boolean => Unit = _ // true means modified
lineWrap = true
wordWrap = true
font = FONT
private var unmodifiedEdit: UndoableEdit = null
private val um = EditorUndoManager
def redo =
if (um.canRedo)
{
um.redo
status( modified )
}
def undo =
if (um.canUndo)
{
um.undo
status( modified )
}
def unmodified
{
unmodifiedEdit = um.nextUndo
status( false )
}
def modified = unmodifiedEdit ne um.nextUndo
peer.getDocument.addUndoableEditListener( um )
peer.getDocument.addUndoableEditListener(
new UndoableEditListener
{
def undoableEditHappened( e: UndoableEditEvent )
{
status( true )
}
} )
reactions +=
{
case _ =>
try
{
boxes = typeset( Source.fromString(text).getLines, IMAGE_WIDTH, IMAGE_HEIGHT )
messages.text = ""
val tabs = overlayFrame.contents.head.asInstanceOf[TabbedPane]
val pages = tabs.pages
for ((name, box) <- boxes)
{
pages.find (_.title == name) match
{
case None =>
pages += new TabbedPane.Page( name, new CheckeredTypesetterPanel(box, IMAGE_WIDTH, IMAGE_HEIGHT) )
tabs.selection.index = pages.last.index
case Some( page ) =>
page.content.asInstanceOf[TypesetterPanel].box = box
overlayFrame.repaint
}
}
var index = 0
while (index < pages.length)
if (boxes.exists( _._1 == pages(index).title ))
index += 1
else
pages.remove( index )
if (pages.isEmpty)
pages += new TabbedPane.Page( "", new CheckeredTypesetterPanel(null, IMAGE_WIDTH, IMAGE_HEIGHT) )
}
catch
{
case e: Exception => messages.text = if (e.getMessage eq null) e.toString else e.getMessage
}
}
}
class ScrollableTypesetterPanel( box: Box, margin: Int, h: Int ) extends
ScrollPane( new TypesetterPanel(box, margin) )
{
preferredSize = (box.width.toInt + 2*margin + new ScrollBar().minimumSize.width + 1, h)
}
class TypesetterPanel( var box: Box, margin: Int, w: Int = 0, h: Int = 0 ) extends Panel
{
protected val dim =
if (w == 0)
(box.width.toInt + 2*margin, box.height.toInt + 2*margin)
else
(w + 2*margin, h + 2*margin)
preferredSize = dim
focusable = false
override def paint( g: Graphics2D )
{
super.paint( g )
g.setRenderingHint( KEY_ANTIALIASING, VALUE_ANTIALIAS_ON )
// val normalizing = GraphicsEnvironment.getLocalGraphicsEnvironment.getDefaultScreenDevice.getDefaultConfiguration.getNormalizingTransform
//
// g.transform(normalizing)
paintBackground( g )
if (box ne null)
box.draw( g, margin, margin )
}
def paintBackground( g: Graphics2D ) {}
}
class CheckeredTypesetterPanel( box: Box, w: Int, h: Int ) extends TypesetterPanel( box, 0, w, h )
{
val DARK_SQUARE_COLOR = new Color( 0x808080 )
val LIGHT_SQUARE_COLOR = new Color( 0xB0B0B0 )
val SQUARE_SIZE = 16
override def paintBackground( g: Graphics2D )
{
g setColor LIGHT_SQUARE_COLOR
g.fillRect( 0, 0, dim._1, dim._2 )
g setColor DARK_SQUARE_COLOR
for (x <- 0 until dim._1 by SQUARE_SIZE)
for (y <- 0 until dim._2 by SQUARE_SIZE)
if ((x/SQUARE_SIZE + y/SQUARE_SIZE)%2 == 1)
g.fillRect( x, y, SQUARE_SIZE, SQUARE_SIZE )
}
}
def delete( input: File )
{
val parent = input.getParentFile
val pngfilter =
new FilenameFilter
{
def accept( dir: File, name: String) = name endsWith ".png"
}
parent.listFiles( pngfilter ) foreach (_.delete)
}
def export( input: File, images: List[(String, Box)], imageWidth: Int, imageHeight: Int )
{
val parent = input.getParentFile
for ((file, box) <- images)
{
val img = new BufferedImage( imageWidth, imageHeight, BufferedImage.TYPE_INT_ARGB_PRE )
val g = img.getGraphics.asInstanceOf[Graphics2D]
g setColor BLACK
g setComposite TRANSPARENT
g.fillRect( 0, 0, imageWidth, imageHeight )
g.setRenderingHint( KEY_ANTIALIASING, VALUE_ANTIALIAS_ON )
box.draw( g, 0, 0 )
ImageIO.write( img, "PNG", new File(parent, file + ".png") )
}
messages.text = "Exported " + (if (images.length > 1) (images.length + " images") else ("'" + images.head._1 + "'"))
}
def typeset( lines: Iterator[String], boxWidth: Int, boxHeight: Int ) =
{
def pathValid( name: String ) =
name != "" && name != "." && name != ".." && name != " "*name.length &&
!name.exists( "/\\\\<>:\\"|?*\\t\\r\\n" contains _ )
val boxes = new ListBuffer[(String, Box)]
while (lines hasNext)
{
val file = lines next
if (pathValid( file ))
{
val buf = new StringBuilder
def copy: Unit =
if (lines hasNext)
{
val line = lines next
if (!line.startsWith( "----" ))
{
buf append line
buf append '\\n'
copy
}
}
copy
val t =
new TS
{
val fsize = 30
val rm = Font( resource, s"${cmttf}cmunorm.ttf", "plain", fsize )
// val rm = Font( resource, s"Serif", "plain", fsize )
variable( 'font, rm )
variable( 'rmfont, rm )
variable( 'bfont, Font(resource, s"${cmttf}cmunorm.ttf", "bold", fsize) )
variable( 'scfont, Font(resource, s"${cmttf}cmunorm.ttf", "smallcaps", fsize) )
variable( 'ifont, Font(resource, s"${cmttf}cmunoti.ttf", "plain", fsize) )
variable( 'sfont, Font(resource, s"${cmttf}cmunorm.ttf", "slant", fsize) )
variable( 'tfont, Font(resource, s"${cmttf}cmuntt.ttf", "plain", fsize) )
variable( 'tbfont, Font(resource, s"${cmttf}cmuntb.ttf", "plain", fsize) )
// variable( 'vfont, Font(resource, s"${fonts}free-helvetian-roman-bold-condensed.pfb", "plain", (.6*fsize).toInt) )
variable( 'vfont, Font(resource, "cmunssdc", "plain", (.6*fsize).toInt) )
// variable( 'bfont, Font(resource, s"Serif", "bold", fsize) )
// variable( 'scfont, Font(resource, s"${amspfb}cmcsc10.pfb", "plain", fsize) )
// variable( 'ifont, Font(resource, s"Serif", "slant", fsize) )
// variable( 'sfont, Font(resource, s"Serif", "slant", fsize) )
// variable( 'tfont, Font(resource, s"Monospaced", "plain", fsize) )
// variable( 'tbfont, Font(resource, s"Monospaced", "bold", fsize) )
// variable( 'vfont, Font(resource, s"Monospaced", "bold", (.6*fsize).toInt) )
}
t('color) = WHITE
t('vcolor) = WHITE
t('hsize) = boxWidth
boxes += file -> t.processDocument( buf.toString )
}
else
sys.error( "invalid filename: \\"" + file + '"' )
}
boxes.toList
}
}
|
edadma/lteditor
|
src/main/scala/LowerThirdsEditor.scala
|
Scala
|
gpl-3.0
| 21,199
|
/*
* Copyright (C) 2016 University of Basel, Graphics and Vision Research Group
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package scalismo.ui.control.interactor.landmark.simple
import java.awt.event.MouseEvent
import java.awt.{Color, Cursor}
import javax.swing.SwingUtilities
import scalismo.ui.control.interactor.Interactor.Verdict
import scalismo.ui.control.interactor.Interactor.Verdict.Pass
import scalismo.ui.control.interactor.{DefaultInteractor, Interactor, Recipe}
import scalismo.ui.model.properties.Uncertainty
import scalismo.ui.model.{LandmarkNode, SceneNode}
import scalismo.ui.resources.icons.BundledIcon
import scalismo.ui.view.ScalismoFrame
import scala.swing.ToggleButton
import scala.swing.event.ButtonClicked
trait SimpleLandmarkingInteractorTrait extends Interactor {
def defaultUncertainty: Uncertainty
val landmarkingButton: ToggleButton = new ToggleButton {
private val myIcon = BundledIcon.Landmark
def updateUi(): Unit = {
val onOff = if (selected) "ON" else "OFF"
tooltip = s"Toggle landmarking (currently $onOff)"
val iconColor = if (selected) Color.GREEN.darker else Color.DARK_GRAY
icon = myIcon.colored(iconColor).standardSized()
}
reactions += {
case ButtonClicked(_) => updateUi()
}
updateUi()
}
override def onActivated(frame: ScalismoFrame): Unit = {
frame.toolbar.add(landmarkingButton)
}
override def onDeactivated(frame: ScalismoFrame): Unit = {
frame.toolbar.remove(landmarkingButton)
}
override def mouseClicked(e: MouseEvent): Verdict = {
if (landmarkingButton.selected && SwingUtilities.isLeftMouseButton(e)) {
Recipe.AddLandmarkOnClick.mouseClicked(e, defaultUncertainty)
} else {
Pass
}
}
// set the cursor to a crosshair if we're in landmarking mode
override def mouseEntered(e: MouseEvent): Verdict = {
val cursor =
if (landmarkingButton.selected) Cursor.getPredefinedCursor(Cursor.CROSSHAIR_CURSOR) else Cursor.getDefaultCursor
e.canvas.setCursor(cursor)
super.mouseEntered(e)
}
override def mouseMoved(e: MouseEvent): Verdict = {
if (landmarkingButton.selected) {
def exceptLandmarks(node: SceneNode) = node match {
case _: LandmarkNode => false
case _ => true
}
Recipe.HighlightOutlineOfPickableObject.mouseMoved(e, exceptLandmarks)
}
super.mouseMoved(e)
}
}
object SimpleLandmarkingInteractor extends SimpleLandmarkingInteractorTrait with DefaultInteractor {
override val defaultUncertainty: Uncertainty = Uncertainty.DefaultUncertainty
override def mousePressed(e: MouseEvent): Verdict = Recipe.Block2DRotation.mousePressed(e)
}
|
unibas-gravis/scalismo-ui
|
src/main/scala/scalismo/ui/control/interactor/landmark/simple/SimpleLandmarkingInteractor.scala
|
Scala
|
gpl-3.0
| 3,309
|
/*
* Copyright (C) 2020 MapRoulette contributors (see CONTRIBUTORS.md).
* Licensed under the Apache License, Version 2.0 (see LICENSE).
*/
package org.maproulette.framework.controller
import javax.inject.Inject
import org.maproulette.data.ActionManager
import org.maproulette.framework.service.VirtualProjectService
import org.maproulette.session.SessionManager
import play.api.mvc._
/**
* The virtual project controller handles all operations specific to Virtual
* Project objects. It extendes ProjectController.
*
* See ProjectController for more details on all project object operations
*
* @author krotstan
*/
class VirtualProjectController @Inject() (
override val sessionManager: SessionManager,
override val actionManager: ActionManager,
override val bodyParsers: PlayBodyParsers,
virtualProjectService: VirtualProjectService,
components: ControllerComponents
) extends AbstractController(components)
with MapRouletteController {
/**
* Adds a challenge to a virtual project. This requires Write access on the project
*
* @param projectId The virtual project to add the challenge to
* @param challengeId The challenge that you are adding
* @return Ok with no message
*/
def addChallenge(projectId: Long, challengeId: Long): Action[AnyContent] = Action.async {
implicit request =>
sessionManager.authenticatedRequest { implicit user =>
this.virtualProjectService.addChallenge(projectId, challengeId, user)
Ok
}
}
/**
* Removes a challenge from a virtual project. This requires Write access on the project
*
* @param projectId The virtual project to remove the challenge from
* @param challengeId The challenge that you are removing
* @return Ok with no message
*/
def removeChallenge(projectId: Long, challengeId: Long): Action[AnyContent] = Action.async {
implicit request =>
sessionManager.authenticatedRequest { implicit user =>
this.virtualProjectService.removeChallenge(projectId, challengeId, user)
Ok
}
}
}
|
mgcuthbert/maproulette2
|
app/org/maproulette/framework/controller/VirtualProjectController.scala
|
Scala
|
apache-2.0
| 2,090
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.mllib.linalg
import scala.util.Random
import breeze.linalg.{DenseMatrix => BDM, squaredDistance => breezeSquaredDistance}
import org.apache.spark.{Logging, SparkException, SparkFunSuite}
import org.apache.spark.mllib.util.TestingUtils._
/**
* 密集和稀疏向量
* 一个向量(1.0,0.0,3.0)它有2中表示的方法
*密集:[1.0,0.0,3.0]其和一般的数组无异
*稀疏:(3,[0,2],[1.0,3.0])其表示的含义(向量大小,序号,值)序号从0开始
*/
class VectorsSuite extends SparkFunSuite with Logging {
val arr = Array(0.1, 0.0, 0.3, 0.4)
//长度
val n = 4
//索引
val indices = Array(0, 2, 3)
//值
val values = Array(0.1, 0.3, 0.4)
//密集向量(dense vector)使用double数组表示元素值
test("dense vector construction with varargs") {//可变参数构建密集向量
//密集向量
val vec = Vectors.dense(arr).asInstanceOf[DenseVector]
assert(vec.size === arr.length)
assert(vec.values.eq(arr))
}
test("dense vector construction from a double array") {//双数组构造密集矢量
//密集向量
val vec = Vectors.dense(arr).asInstanceOf[DenseVector]
assert(vec.size === arr.length)
assert(vec.values.eq(arr))
}
test("sparse vector construction") {
//稀疏向量 第一参数4表示此向量的长度,第二个参数Array(0,2,3)表示的索引,第三个参数表示值Array(0.1, 0.3, 0.4)
//与前面的Array(0, 2, 3)是相互对应的,表示第0个位置的值为1.0,第2个位置的值为3,即第2位置为0
val vec = Vectors.sparse(n, indices, values).asInstanceOf[SparseVector]
assert(vec.size === n)//稀疏矩阵长度
assert(vec.indices.eq(indices))//稀疏矩阵索引
assert(vec.values.eq(values))//稀疏矩阵值
}
test("sparse vector construction with unordered elements") {//无序元素构建稀疏向量
//构建一个没有索引的元素,reverse: Array[(Int, Double)] = Array((3,0.4), (2,0.3), (0,0.1))
val vec = Vectors.sparse(n, indices.zip(values).reverse).asInstanceOf[SparseVector]
assert(vec.size === n)
assert(vec.indices === indices)
assert(vec.values === values)
}
//构造具有不匹配索引/值数组的稀疏向量
test("sparse vector construction with mismatched indices/values array") {
intercept[IllegalArgumentException] {
Vectors.sparse(4, Array(1, 2, 3), Array(3.0, 5.0, 7.0, 9.0))
}
intercept[IllegalArgumentException] {
Vectors.sparse(4, Array(1, 2, 3), Array(3.0, 5.0))
}
}
//构造具有太多索引和大小的稀疏向量
test("sparse vector construction with too many indices vs size") {
intercept[IllegalArgumentException] {
Vectors.sparse(3, Array(1, 2, 3, 4), Array(3.0, 5.0, 7.0, 9.0))
}
}
test("dense to array") {//密集矩阵转换数组
val vec = Vectors.dense(arr).asInstanceOf[DenseVector]
assert(vec.toArray.eq(arr))
}
test("dense argmax") {//密集矩阵最大元素位置
val vec = Vectors.dense(Array.empty[Double]).asInstanceOf[DenseVector]
assert(vec.argmax === -1)//找出最大元素位置
val vec2 = Vectors.dense(arr).asInstanceOf[DenseVector]
assert(vec2.argmax === 3)//找出最大元素位置
val vec3 = Vectors.dense(Array(-1.0, 0.0, -2.0, 1.0)).asInstanceOf[DenseVector]
assert(vec3.argmax === 3)//找出最大元素位置
}
/**
* 密集和稀疏向量
* 一个向量(1.0,0.0,3.0)它有2中表示的方法
*密集:[1.0,0.0,3.0]其和一般的数组无异
*稀疏:(3,[0,2],[1.0,3.0])其表示的含义(向量大小,序号,值)序号从0开始
*/
test("sparse to array") {//稀疏矩阵转换数组
val vec = Vectors.sparse(n, indices, values).asInstanceOf[SparseVector]
assert(vec.toArray === arr)
}
test("sparse argmax") {//稀疏矩阵最大元素位置
val vec = Vectors.sparse(0, Array.empty[Int], Array.empty[Double]).asInstanceOf[SparseVector]
//空的向量最大位置为-1
assert(vec.argmax === -1)//找出最大元素位置
val vec2 = Vectors.sparse(n, indices, values).asInstanceOf[SparseVector]
assert(vec2.argmax === 3)//找出最大元素位置
val vec3 = Vectors.sparse(5, Array(2, 3, 4), Array(1.0, 0.0, -.7))
assert(vec3.argmax === 2)//找出最大元素位置,即第二个索引值为1
// check for case that sparse vector is created with
//检查稀疏向量的情况下,只有负值创建
// only negative values {0.0, 0.0,-1.0, -0.7, 0.0}
val vec4 = Vectors.sparse(5, Array(2, 3), Array(-1.0, -.7))
assert(vec4.argmax === 0)//找出最大元素位置
val vec5 = Vectors.sparse(11, Array(0, 3, 10), Array(-1.0, -.7, 0.0))
assert(vec5.argmax === 1)//找出最大元素位置
val vec6 = Vectors.sparse(11, Array(0, 1, 2), Array(-1.0, -.7, 0.0))
assert(vec6.argmax === 2)//找出最大元素位置
val vec7 = Vectors.sparse(5, Array(0, 1, 3), Array(-1.0, 0.0, -.7))
assert(vec7.argmax === 1)//找出最大元素位置
val vec8 = Vectors.sparse(5, Array(1, 2), Array(0.0, -1.0))
assert(vec8.argmax === 0)//找出最大元素位置,索引的位置从0开始
}
test("vector equals") {//向量等于
val dv1 = Vectors.dense(arr.clone())
val dv2 = Vectors.dense(arr.clone())
val sv1 = Vectors.sparse(n, indices.clone(), values.clone())
val sv2 = Vectors.sparse(n, indices.clone(), values.clone())
val vectors = Seq(dv1, dv2, sv1, sv2)
for (v <- vectors; u <- vectors) {
assert(v === u)
assert(v.## === u.##)
}
val another = Vectors.dense(0.1, 0.2, 0.3, 0.4)
for (v <- vectors) {
assert(v != another)
assert(v.## != another.##)
}
}
test("vectors equals with explicit 0") {//向量等于显式0
val dv1 = Vectors.dense(Array(0, 0.9, 0, 0.8, 0))
val sv1 = Vectors.sparse(5, Array(1, 3), Array(0.9, 0.8))
val sv2 = Vectors.sparse(5, Array(0, 1, 2, 3, 4), Array(0, 0.9, 0, 0.8, 0))
val vectors = Seq(dv1, sv1, sv2)
for (v <- vectors; u <- vectors) {
assert(v === u)
assert(v.## === u.##)
}
val another = Vectors.sparse(5, Array(0, 1, 3), Array(0, 0.9, 0.2))
for (v <- vectors) {
assert(v != another)
assert(v.## != another.##)
}
}
test("indexing dense vectors") {//密集向量索引
val vec = Vectors.dense(1.0, 2.0, 3.0, 4.0)
assert(vec(0) === 1.0)
assert(vec(3) === 4.0)
}
test("indexing sparse vectors") {//索引稀疏向量
//稀疏向量 第一参数7表示此向量的长度,第二个参数Array(0, 2, 4, 6)表示的索引,第三个参数Array(1.0, 2.0, 3.0, 4.0)
//与前面的Array(0, 2, 4, 6)是相互对应的,表示第0个位置的值为1.0,第2个位置的值为2,第4个位置的值为3,第6个位置的值为4
val vec = Vectors.sparse(7, Array(0, 2, 4, 6), Array(1.0, 2.0, 3.0, 4.0))
assert(vec(0) === 1.0)
assert(vec(1) === 0.0)//第一个Array没有1值,数据为0
assert(vec(2) === 2.0)
assert(vec(3) === 0.0)//第一个Array没有3值,数据为0
assert(vec(6) === 4.0)
val vec2 = Vectors.sparse(8, Array(0, 2, 4, 6), Array(1.0, 2.0, 3.0, 4.0))
assert(vec2(6) === 4.0)
assert(vec2(7) === 0.0)
}
test("parse vectors") {//解析向量
val vectors = Seq(
Vectors.dense(Array.empty[Double]),
Vectors.dense(1.0),
Vectors.dense(1.0E6, 0.0, -2.0e-7),
Vectors.sparse(0, Array.empty[Int], Array.empty[Double]),
Vectors.sparse(1, Array(0), Array(1.0)),
Vectors.sparse(3, Array(0, 2), Array(1.0, -2.0)))
vectors.foreach { v =>
val v1 = Vectors.parse(v.toString)
assert(v.getClass === v1.getClass)
assert(v === v1)
}
val malformatted = Seq("1", "[1,,]", "[1,2b]", "(1,[1,2])", "([1],[2.0,1.0])")
malformatted.foreach { s =>
intercept[SparkException] {
Vectors.parse(s)
logInfo(s"Didn't detect malformatted string $s.")
}
}
}
test("zeros") {//零点
assert(Vectors.zeros(3) === Vectors.dense(0.0, 0.0, 0.0))
}
test("Vector.copy") {//矢量复制
val sv = Vectors.sparse(4, Array(0, 2), Array(1.0, 2.0))
val svCopy = sv.copy
(sv, svCopy) match {
case (sv: SparseVector, svCopy: SparseVector) =>
assert(sv.size === svCopy.size)
assert(sv.indices === svCopy.indices)
assert(sv.values === svCopy.values)
assert(!sv.indices.eq(svCopy.indices))
assert(!sv.values.eq(svCopy.values))
case _ =>
throw new RuntimeException(s"copy returned ${svCopy.getClass} on ${sv.getClass}.")
}
val dv = Vectors.dense(1.0, 0.0, 2.0)
val dvCopy = dv.copy
(dv, dvCopy) match {
case (dv: DenseVector, dvCopy: DenseVector) =>
assert(dv.size === dvCopy.size)
assert(dv.values === dvCopy.values)
assert(!dv.values.eq(dvCopy.values))
case _ =>
throw new RuntimeException(s"copy returned ${dvCopy.getClass} on ${dv.getClass}.")
}
}
test("VectorUDT") {
val dv0 = Vectors.dense(Array.empty[Double])
val dv1 = Vectors.dense(1.0, 2.0)
val sv0 = Vectors.sparse(2, Array.empty, Array.empty)
val sv1 = Vectors.sparse(2, Array(1), Array(2.0))
val udt = new VectorUDT()
for (v <- Seq(dv0, dv1, sv0, sv1)) {
assert(v === udt.deserialize(udt.serialize(v)))
}
assert(udt.typeName == "vector")
assert(udt.simpleString == "vector")
}
test("fromBreeze") {//来自Breeze
val x = BDM.zeros[Double](10, 10)
val v = Vectors.fromBreeze(x(::, 0))
assert(v.size === x.rows)
}
test("sqdist") {//平方距离
val random = new Random()
for (m <- 1 until 1000 by 100) {
val nnz = random.nextInt(m)
val indices1 = random.shuffle(0 to m - 1).slice(0, nnz).sorted.toArray
val values1 = Array.fill(nnz)(random.nextDouble)
val sparseVector1 = Vectors.sparse(m, indices1, values1)
val indices2 = random.shuffle(0 to m - 1).slice(0, nnz).sorted.toArray
val values2 = Array.fill(nnz)(random.nextDouble)
val sparseVector2 = Vectors.sparse(m, indices2, values2)
val denseVector1 = Vectors.dense(sparseVector1.toArray)
val denseVector2 = Vectors.dense(sparseVector2.toArray)
val squaredDist = breezeSquaredDistance(sparseVector1.toBreeze, sparseVector2.toBreeze)
// SparseVector vs. SparseVector
assert(Vectors.sqdist(sparseVector1, sparseVector2) ~== squaredDist relTol 1E-8)
// DenseVector vs. SparseVector
assert(Vectors.sqdist(denseVector1, sparseVector2) ~== squaredDist relTol 1E-8)
// DenseVector vs. DenseVector
assert(Vectors.sqdist(denseVector1, denseVector2) ~== squaredDist relTol 1E-8)
}
}
test("foreachActive") {//迭代活动
val dv = Vectors.dense(0.0, 1.2, 3.1, 0.0)
//4表示此向量的长度,后面的比较直观,Seq里面每一对都是(索引,值)的形式。
val sv = Vectors.sparse(4, Seq((1, 1.2), (2, 3.1), (3, 0.0)))
val dvMap = scala.collection.mutable.Map[Int, Double]()
dv.foreachActive { (index, value) =>
dvMap.put(index, value)
}
assert(dvMap.size === 4)
assert(dvMap.get(0) === Some(0.0))
assert(dvMap.get(1) === Some(1.2))
assert(dvMap.get(2) === Some(3.1))
assert(dvMap.get(3) === Some(0.0))
//4表示此向量的长度,后面的比较直观,Seq里面每一对都是(索引,值)的形式。
val svMap = scala.collection.mutable.Map[Int, Double]()
sv.foreachActive { (index, value) =>
svMap.put(index, value)
}
assert(svMap.size === 3)
assert(svMap.get(1) === Some(1.2))
assert(svMap.get(2) === Some(3.1))
assert(svMap.get(3) === Some(0.0))
}
test("vector p-norm") {//向量范数
val dv = Vectors.dense(0.0, -1.2, 3.1, 0.0, -4.5, 1.9)
val sv = Vectors.sparse(6, Seq((1, -1.2), (2, 3.1), (3, 0.0), (4, -4.5), (5, 1.9)))
//矩阵范数 norm 10.700000000000001,即所有元素相加
assert(Vectors.norm(dv, 1.0) ~== dv.toArray.foldLeft(0.0)((a, v) =>
{
/**
* 0.0 0.0
* 0.0 1.2
* 1.2 3.1
* 4.3 0.0
* 4.3 4.5
* 8.8 1.9
*/
//math.abs返回数的绝对值
println(a+"\\t"+ math.abs(v))
a + math.abs(v)
}) relTol 1E-8)
assert(Vectors.norm(sv, 1.0) ~== sv.toArray.foldLeft(0.0)((a, v) =>
a + math.abs(v)) relTol 1E-8)
//正平方根
assert(Vectors.norm(dv, 2.0) ~== math.sqrt(dv.toArray.foldLeft(0.0)((a, v) =>
a + v * v)) relTol 1E-8)
//math.sqrt返回数字的平方根
assert(Vectors.norm(sv, 2.0) ~== math.sqrt(sv.toArray.foldLeft(0.0)((a, v) =>
a + v * v)) relTol 1E-8)
//math.abs返回数的绝对值
assert(Vectors.norm(dv, Double.PositiveInfinity) ~== dv.toArray.map(math.abs).max relTol 1E-8)
assert(Vectors.norm(sv, Double.PositiveInfinity) ~== sv.toArray.map(math.abs).max relTol 1E-8)
assert(Vectors.norm(dv, 3.7) ~== math.pow(dv.toArray.foldLeft(0.0)((a, v) =>
//math.abs返回数的绝对值
a + math.pow(math.abs(v), 3.7)), 1.0 / 3.7) relTol 1E-8)
assert(Vectors.norm(sv, 3.7) ~== math.pow(sv.toArray.foldLeft(0.0)((a, v) =>
a + math.pow(math.abs(v), 3.7)), 1.0 / 3.7) relTol 1E-8)
}
test("Vector numActive and numNonzeros") {//
val dv = Vectors.dense(0.0, 2.0, 3.0, 0.0)
assert(dv.numActives === 4)//最大数据
assert(dv.numNonzeros === 2)//非零值
//res4: org.apache.spark.mllib.linalg.DenseVector = [0.0,2.0,3.0,0.0]
val sv = Vectors.sparse(4, Array(0, 1, 2), Array(0.0, 2.0, 3.0))
sv.toDense
assert(sv.numActives === 3)
assert(sv.numNonzeros === 2)
}
test("Vector toSparse and toDense") {//向量转换稀疏矩阵和密集矩阵
val dv0 = Vectors.dense(0.0, 2.0, 3.0, 0.0)
assert(dv0.toDense === dv0)
val dv0s = dv0.toSparse
assert(dv0s.numActives === 2)
assert(dv0s === dv0)
val sv0 = Vectors.sparse(4, Array(0, 1, 2), Array(0.0, 2.0, 3.0))
assert(sv0.toDense === sv0)
val sv0s = sv0.toSparse
assert(sv0s.numActives === 2)
assert(sv0s === sv0)
}
test("Vector.compressed") {//向量压缩
val dv0 = Vectors.dense(1.0, 2.0, 3.0, 0.0)
val dv0c = dv0.compressed.asInstanceOf[DenseVector]
assert(dv0c === dv0)
val dv1 = Vectors.dense(0.0, 2.0, 0.0, 0.0)
val dv1c = dv1.compressed.asInstanceOf[SparseVector]
assert(dv1 === dv1c)
assert(dv1c.numActives === 1)
val sv0 = Vectors.sparse(4, Array(1, 2), Array(2.0, 0.0))
val sv0c = sv0.compressed.asInstanceOf[SparseVector]
assert(sv0 === sv0c)
assert(sv0c.numActives === 1)
val sv1 = Vectors.sparse(4, Array(0, 1, 2), Array(1.0, 2.0, 3.0))
val sv1c = sv1.compressed.asInstanceOf[DenseVector]
assert(sv1 === sv1c)
}
test("SparseVector.slice") {//稀疏向量切片
val v = new SparseVector(5, Array(1, 2, 4), Array(1.1, 2.2, 4.4))
assert(v.slice(Array(0, 2)) === new SparseVector(2, Array(1), Array(2.2)))
assert(v.slice(Array(2, 0)) === new SparseVector(2, Array(0), Array(2.2)))
assert(v.slice(Array(2, 0, 3, 4)) === new SparseVector(4, Array(0, 3), Array(2.2, 4.4)))
}
}
|
tophua/spark1.52
|
mllib/src/test/scala/org/apache/spark/mllib/linalg/VectorsSuite.scala
|
Scala
|
apache-2.0
| 15,898
|
package scalaj.http
import java.io.{ByteArrayInputStream, ByteArrayOutputStream, IOException}
import java.net.{HttpCookie, InetSocketAddress, Proxy}
import java.util.zip.GZIPOutputStream
import javax.servlet.{ServletRequest, ServletResponse}
import javax.servlet.http.{HttpServlet, HttpServletRequest, HttpServletResponse}
import org.eclipse.jetty.security.{ConstraintMapping, ConstraintSecurityHandler, HashLoginService}
import org.eclipse.jetty.security.authentication.{BasicAuthenticator, DigestAuthenticator, LoginAuthenticator}
import org.eclipse.jetty.server.{Request, Server}
import org.eclipse.jetty.server.handler.AbstractHandler
import org.eclipse.jetty.servlet.{ServletContextHandler, ServletHandler, ServletHolder}
import org.eclipse.jetty.servlets.ProxyServlet
import org.eclipse.jetty.util.security.{Constraint, Credential}
import org.junit.Assert._
import org.junit.Test
import scalaj.http.HttpConstants._
class HttpTest {
def makeRequest(
reqHandler: (HttpServletRequest, HttpServletResponse) => Unit
)(requestF: String => Unit): Unit = {
val server = new Server(0)
server.setHandler(new AbstractHandler(){
def handle(
target: String,
baseRequest: Request,
request: HttpServletRequest,
response: HttpServletResponse
): Unit = {
reqHandler(request, response)
baseRequest.setHandled(true)
}
})
try {
server.start()
val port = server.getConnectors.head.getLocalPort
requestF("http://localhost:" + port + "/")
} finally {
server.stop()
}
}
def makeAuthenticatedRequest
(
authenticator: LoginAuthenticator,
username: String,
password: String,
response:String
)(requestF: String => Unit): Unit = {
val server = new Server(0)
val context = new ServletContextHandler(ServletContextHandler.SESSIONS)
val loginService = new HashLoginService()
val roles = Array("user")
loginService.putUser(username, Credential.getCredential(password), roles)
loginService.setName("Test Realm")
val constraint = new Constraint()
constraint.setName(loginService.getName)
constraint.setRoles(roles)
constraint.setAuthenticate(true)
val cm = new ConstraintMapping()
cm.setConstraint(constraint)
cm.setPathSpec("/*")
val csh = new ConstraintSecurityHandler()
csh.setAuthenticator(authenticator)
csh.setRealmName(loginService.getName)
csh.addConstraintMapping(cm)
csh.setLoginService(loginService)
context.setSecurityHandler(csh)
context.setContextPath("/")
server.setHandler(context)
context.addServlet(new ServletHolder(new HttpServlet(){
override def doGet(reg: HttpServletRequest, resp: HttpServletResponse): Unit = {
resp.getWriter.print(response)
}
}), "/*")
try {
server.start()
val port = server.getConnectors.head.getLocalPort
requestF("http://localhost:" + port + "/")
} finally {
server.stop()
}
}
def makeProxiedRequest(proxyF: (String, Int) => Unit): Unit = {
val server = new Server(0)
val servletHandler = new ServletHandler()
servletHandler.addServletWithMapping(classOf[AuthProxyServlet], "/*")
server.setHandler(servletHandler)
try {
server.start()
val port = server.getConnectors.head.getLocalPort
proxyF("localhost", port)
} finally {
server.stop()
}
}
@Test
def basicAuthRequest: Unit = {
val expectedBody = "Hello from authed servlet"
makeAuthenticatedRequest(new BasicAuthenticator(), "test", "test", expectedBody){url =>
val result = Http(url).auth("test", "test").asString
assertEquals(200, result.code)
assertEquals(expectedBody, result.body)
}
}
@Test
def digestAuthRequest: Unit = {
val expectedBody = "Hello from authed servlet"
makeAuthenticatedRequest(new DigestAuthenticator(), "test", "test", expectedBody){url =>
val result = Http(url).digestAuth("test", "test").asString
assertEquals("expecting success, but got " + result, 200, result.code)
assertEquals(expectedBody, result.body)
}
}
@Test
def digestAuthRequestBadCreds: Unit = {
// verify that we don't loop infinitely on bad creds
makeAuthenticatedRequest(new DigestAuthenticator(), "test", "test", "hi"){url =>
val result = Http(url).digestAuth("test", "wrong").asString
assertEquals("expecting failure, but got " + result, 401, result.code)
}
}
@Test
def basicRequest: Unit = {
val expectedCode = HttpServletResponse.SC_OK
val expectedBody = "ok"
val expectedContentType = "text/text;charset=utf-8"
object MyHttp extends BaseHttp(options = Seq(HttpOptions.readTimeout(1234)))
makeRequest((req, resp) => {
resp.setContentType(expectedContentType)
resp.setStatus(expectedCode)
resp.getWriter.print(expectedBody)
})(url => {
val request: HttpRequest = MyHttp(url)
val response: HttpResponse[String] = request.execute()
assertEquals(Some(expectedContentType), response.header("Content-Type"))
assertEquals(expectedCode, response.code)
assertEquals(expectedBody, response.body)
assertEquals("HTTP/1.1 200 OK", response.statusLine)
assertTrue(response.is2xx)
assertTrue(response.isSuccess)
assertTrue(response.isNotError)
})
}
// TODO: .oauth currently must be the last method to be called because it captures the state of the request.
// see https://github.com/scalaj/scalaj-http/pull/156
@Test
def oauthRequestShouldHaveCorrectAuthHeader: Unit = {
val consumerToken = Token("dpf43f3p2l4k3l03","kd94hf93k423kf44")
val userToken = Token("nnch734d00sl2jdk","pfkkdhi9sl3r4s00")
object MyHttp extends BaseHttp(options = Seq(HttpOptions.readTimeout(1234)))
makeRequest((req, resp) => {
resp.setContentType("text/text;charset=utf-8")
resp.setStatus(HttpServletResponse.SC_OK)
resp.getWriter.print(req.getHeader("Authorization"))
})(url => {
val request: HttpRequest = MyHttp(url).param("file", "vacation.jpg").oauth(
consumerToken,
userToken
)
val response: HttpResponse[String] = request.execute()
// Authorizaiton header is prefixed with "OAuth ", comma separated, quoted values
val oauthHeaderParams: Map[String, String] = response.body.substring(6).split(",").flatMap(_.split("=") match {
case Array(k,v) => Some(
HttpConstants.urlDecode(k, "utf-8") ->
HttpConstants.urlDecode(v.substring(1, v.length -1 ), "utf-8")
)
case _ => None
}).toMap
val (_, expectedSignature) = OAuth.getSig(
oauthHeaderParams.filterKeys(Set("oauth_timestamp", "oauth_nonce")).toSeq,
request,
consumerToken,
Some(userToken),
None
)
assertEquals(Some(expectedSignature), oauthHeaderParams.get("oauth_signature"))
})
}
@Test
def serverError: Unit = {
makeRequest((req, resp) => {
resp.setStatus(500)
resp.getWriter.print("error")
})(url => {
val response: HttpResponse[String] = Http(url).execute()
assertEquals(500, response.code)
assertEquals("error", response.body)
assertTrue(response.is5xx)
assertTrue(response.isServerError)
assertTrue(response.isError)
})
}
@Test
def redirectShouldNotFollowByDefault: Unit = {
makeRequest((req, resp) => {
resp.setStatus(301)
resp.setHeader("Location", "http://www.google.com/")
resp.getWriter.print("moved")
})(url => {
val response: HttpResponse[String] = Http(url).execute()
assertEquals(301, response.code)
assertEquals("moved", response.body)
})
}
@Test
def shouldFollowRedirectOnProtocolSwitch: Unit = {
List(301, 302, 307).foreach(status => {
makeRequest((_, resp) => {
resp.setStatus(status)
resp.setHeader("Location", "https://foobar.foobar")
})(url => {
try {
val response = Http(url).option(HttpOptions.followRedirects(true)).execute()
fail(s"Expecting redirect to throw IOException for $status status, but got $response")
} catch {
case _: IOException =>
}
})
})
}
@Test
def headersShouldBeCaseInsensitive: Unit = {
makeRequest((req, resp) => {
// special check for content-encoding header, though it seems like jetty normalizes it.
resp.setHeader("content-ENCODING", "gzip")
val byteStream = new ByteArrayOutputStream()
val gzipStream = new GZIPOutputStream(byteStream)
resp.setHeader("X-FOO", "foobar")
gzipStream.write("hello".getBytes("UTF-8"))
gzipStream.close()
resp.getOutputStream.write(byteStream.toByteArray)
})(url => {
val response: HttpResponse[String] = Http(url).execute()
assertEquals(200, response.code)
assertEquals("hello", response.body)
assertEquals(Some("foobar"), response.header("x-foo"))
assertEquals(Some("foobar"), response.header("x-FOO"))
})
}
@Test
def asParams: Unit = {
makeRequest((req, resp) => {
resp.setStatus(200)
resp.getWriter.print("foo=bar")
})(url => {
val response = Http(url).asParams
assertEquals(Seq("foo" -> "bar"), response.body)
})
}
@Test
def asParamMap: Unit = {
makeRequest((req, resp) => {
resp.setStatus(200)
resp.getWriter.print("foo=bar")
})(url => {
val response = Http(url).asParamMap
assertEquals(Map("foo" -> "bar"), response.body)
})
}
@Test
def asBytes: Unit = {
makeRequest((req, resp) => {
resp.setStatus(200)
resp.getWriter.print("hi")
})(url => {
val response = Http(url).asBytes
assertEquals("hi", new String(response.body, HttpConstants.utf8))
})
}
@Test
def shouldPrependOptions: Unit = {
val http = Http("http://foo.com/")
val origOptions = http.options
val origOptionsLength = origOptions.length
val newOptions: List[HttpOptions.HttpOption] = List(c => { }, c=> { }, c => {})
val http2 = http.options(newOptions)
assertEquals(http2.options.length, origOptionsLength + 3)
assertEquals(http2.options.take(3), newOptions)
assertEquals(origOptions.length, origOptionsLength)
}
@Test
def lastTimeoutValueShouldWin: Unit = {
makeRequest((req, resp) => {
resp.setStatus(200)
resp.getWriter.print("hi")
})(url => {
val getFunc: HttpExec = (req, c) => {
assertEquals(c.getReadTimeout, 1234)
assertEquals(c.getConnectTimeout, 1234)
}
val r = Http(url).option(HttpOptions.connTimeout(1234)).option(HttpOptions.readTimeout(1234))
.copy(connectFunc = getFunc)
r.execute()
})
}
@Test
def readString: Unit = {
val bais = new ByteArrayInputStream("hello there".getBytes(HttpConstants.utf8))
assertEquals("hello there", HttpConstants.readString(bais))
}
@Test
def overrideTheMethod: Unit = {
makeRequest((req, resp) => {
assertEquals("DELETE", req.getMethod)
resp.setStatus(200)
resp.getWriter.print("")
})(url => {
Http(url).method("DELETE").asString
})
}
@Test
def unofficialOverrideTheMethod: Unit = {
makeRequest((req, resp) => {
resp.setStatus(200)
resp.getWriter.print("")
})(url => {
val fooFunc: HttpExec = (req, c) => {
throw new RuntimeException(c.getRequestMethod)
}
try {
Http(url).method("FOO").copy(connectFunc = fooFunc).execute()
fail("expected throw")
} catch {
case e: RuntimeException if e.getMessage == "FOO" => // ok
}
})
}
@Test
def shouldUseCharsetFromServerContentType: Unit = {
val diverseString = "£ÇÜfÿ"
Seq("UTF-8", "ISO-8859-1").foreach { charset =>
makeRequest((req, resp) => {
resp.setStatus(200)
resp.setContentType("text/plain; charset=" + charset)
resp.getOutputStream.write(diverseString.getBytes(charset))
})(url => {
assertEquals("Should properly decode " + charset + " responses", diverseString, Http(url).asString.body)
})
}
}
@Test
def proxyNoAuthTest: Unit = {
val theExpectedBody = "hello hello"
makeProxiedRequest((proxyHost, proxyPort) => {
makeRequest((req, resp) => {
resp.setStatus(200)
resp.getWriter.print(theExpectedBody)
})(url => {
val result = Http(url).proxy(proxyHost, proxyPort).asString
assertEquals(theExpectedBody, result.body)
assertEquals(200, result.code)
})
})
}
@Test
def proxyBadAuthTest: Unit = {
makeProxiedRequest((proxyHost, proxyPort) => {
makeRequest((req, resp) => {
resp.setStatus(200)
})(url => {
val result = Http(url).proxy(proxyHost, proxyPort).proxyAuth("test", "bad").asString
assertEquals(407, result.code)
})
})
}
@Test
def proxyCorrectAuthTest: Unit = {
val theExpectedBody = "hello hello"
makeProxiedRequest((proxyHost, proxyPort) => {
makeRequest((req, resp) => {
resp.setStatus(200)
resp.getWriter.print(theExpectedBody)
})(url => {
val result = Http(url).proxy(proxyHost, proxyPort).proxyAuth("test", "test").asString
assertEquals(theExpectedBody, result.body)
assertEquals(200, result.code)
})
})
}
@Test
def allModificationsAreAdditive: Unit = {
val params = List("a" -> "b")
val proxy = new Proxy(Proxy.Type.HTTP, new InetSocketAddress("host", 80))
val headers = List("foo" -> "bar")
val options = List(HttpOptions.readTimeout(1234))
var req = Http("http://foo.com/").params(params)
req = req.proxy("host", 80)
assertEquals("params", params, req.params)
val expectedNewOptions = options ++ req.options
req = req.options(options)
assertEquals("params", params, req.params)
assertEquals("proxy", proxy, req.proxyConfig.get)
assertEquals("options", expectedNewOptions, req.options)
req = req.headers(headers)
assertEquals("params", params, req.params)
assertEquals("proxy", proxy, req.proxyConfig.get)
assertEquals("options", expectedNewOptions, req.options)
}
@Test(expected = classOf[java.net.ConnectException])
def serverDown: Unit = {
val response = Http("http://localhost:9999/").execute()
assertEquals("", response.body)
}
@Test
def varargs: Unit = {
val req = Http("http://foo.com/").params("a" -> "b", "b" -> "a")
.headers("a" -> "b", "b" -> "a")
.options(HttpOptions.connTimeout(100), HttpOptions.readTimeout(100))
assertEquals(2, req.params.size)
}
@Test
def parseCookies: Unit = {
val httpResponse = HttpResponse("hi", 200, Map("Set-Cookie" -> IndexedSeq("foo=bar", "baz=biz")))
assertEquals(IndexedSeq(new HttpCookie("foo", "bar"), new HttpCookie("baz", "biz")), httpResponse.cookies)
}
@Test(expected = classOf[scalaj.http.HttpStatusException])
def throwErrorThrowsWith401: Unit = {
HttpResponse("hi", 401, Map.empty).throwError
}
@Test(expected = classOf[scalaj.http.HttpStatusException])
def throwServerErrorThrowsWith400: Unit = {
HttpResponse("hi", 400, Map.empty).throwError
}
@Test
def throwErrorOkWith200: Unit = {
assertEquals(200, HttpResponse("hi", 200, Map.empty).throwError.code)
}
@Test
def throwServerErrorOkWith400: Unit = {
assertEquals(400, HttpResponse("hi", 400, Map.empty).throwServerError.code)
}
@Test
def testGetEquals: Unit = {
assertEquals(Http("http://foo.com/"), Http("http://foo.com/"))
}
@Test
def testPostEquals: Unit = {
assertEquals(Http("http://foo.com/").postData("hi"), Http("http://foo.com/").postData("hi"))
}
}
class AuthProxyServlet extends ProxyServlet {
override def service(req: ServletRequest, res: ServletResponse): Unit = {
val httpReq = req.asInstanceOf[HttpServletRequest]
val httpRes = res.asInstanceOf[HttpServletResponse]
val proxyAuth = httpReq.getHeader("proxy-authorization")
if(proxyAuth == null || proxyAuth == HttpConstants.basicAuthValue("test", "test")){
super.service(req, res)
}
else {
httpRes.sendError(HttpServletResponse.SC_PROXY_AUTHENTICATION_REQUIRED, "invalid proxy auth")
}
}
}
|
scalaj/scalaj-http
|
src/test/scala/scalaj/http/HttpTest.scala
|
Scala
|
apache-2.0
| 16,374
|
/* Scores: A sorted lazy list monad
*
* Scored[A] represents a sorted, lazy list of (Prob,A) pairs. The order is decreasing:
* Prob = 1 is best and Prob = 0 worst. Scored is a monad with additional structure,
* such as a fast(er) Cartesian product operation corresponding to Haskell's Applicative
* type class.
*
* When Scored instances combine monadically, their probabilities multiply. For flatMap,
* you should think of conditional probabilities: an expression like
*
* def f(x) = y
* x flatMap f
*
* combines probabilities via
*
* Pr(y) = Pr(x) * Pr(y | x)
*
* This formula is only a heuristic; eddy is not very principled about how it
* combines "probabilities".
*
* For speed, Scored[A] is highly lazy: an instance can be either empty (Empty), a known
* best value following by something else (Best), or a lazy thunk with a known probability
* bound (LazyScored). When forced, LazyScored may return more laziness (another LazyScored).
* As an important optimization, LazyScored.force takes a bound and may force until the
* probability falls before the bound (or a concrete result is reached); this lets the
* imperative logic in JavaScores run longer without allocating heap objects or returning
* to slow, purely functional code. This optimization is transparent to nearly all uses of
* Scored, but makes the internals significantly more complicated (and about an order of
* magnitude faster).
*
* Finally, if Flags.trackErrors is on, Scored[A] can also be Bad(error), where error is a
* lazy string representing what went wrong. This feature is quite flaky, and only suitable
* for development use at this time. The principled way of tracking errors is via
* Scored[Either[Error,A]], and this mechanism will be necessary if errors are to be exposed
* to users in the future.
*
* By pushing computations inside Scored, eddy allows nearly every aspect of its code to
* generate (and squash) ambiguity as it occurs, ranking different options based on local
* information and relying on laziness to not explore too more of an exponentially large
* search space.
*/
package tarski
import utility.Utility._
import scala.annotation.tailrec
import tarski.JavaScores._
import tarski.Flags.trackErrors
object Scores {
// High probabilities compare first
abstract class HasProb extends Comparable[HasProb] {
def p: Double
def compareTo(o: HasProb): Int = {
val x = p
val y = o.p
if (x > y) -1 else if (x < y) 1 else 0
}
}
// Probabilities are either simple doubles (normally), or structures declared in Java (for debugging with names)
type Prob = Double
@inline def Prob(name: => String, p: Prob): Prob = p
//type Prob = DebugProb
//def Prob(name: String, p: Double): Prob = new NameProb(name,p)
// Probabilities
case class Alt[+A](dp: Prob, x: A) extends HasProb { // Explicit class to avoid boxing the probability
def p = pp(dp)
def map[B](f: A => B): Alt[B] = Alt(dp,f(x))
}
sealed abstract class Scored[+A] extends HasProb {
// Invariant: p >= probability of any option
def p: Double
// These force some evaluation
final def strict: StrictScored[A] = {
@tailrec def loop(x: Scored[A]): StrictScored[A] = x match {
case x:StrictScored[A] => x
case x:LazyScored[A] => loop(x force 0)
}
loop(this)
}
final def best: Either[Error,Alt[A]] = strict match {
case Best(p,x,_) => Right(Alt(p,x))
case x:EmptyOrBad => Left(x.error)
}
final def all: Either[Error,Stream[Alt[A]]] = strict match {
case x:Best[A] => Right(x.stream)
case x:EmptyOrBad => Left(x.error)
}
final def stream: Stream[Alt[A]] = strict match {
case Best(p,x,r) => Alt(p,x) #:: r.stream
case x:EmptyOrBad => Stream.Empty
}
final def isEmpty: Boolean = strict match {
case x:Best[_] => false
case _:EmptyOrBad => true
}
final def isSingle: Boolean = strict match {
case Best(_,_,r) => r.isEmpty
case _:EmptyOrBad => false
}
@tailrec final def below(q: Double): Boolean = p <= q || (this match {
case s:LazyScored[A] => s.force(q).below(q)
case _ => false
})
final def slowSize: Int = {
@tailrec def loop(s: Scored[A], n: Int): Int = s.strict match {
case Best(_,_,s) => loop(s,n+1)
case _:EmptyOrBad => n
}
loop(this,0)
}
// Multiply all probabilities by p. For internal use only: users should call biased(p,s).
def _bias(p: Prob): Scored[A]
// Apply f to every alternative
def map[B](f: A => B): Scored[B] = new LazyMap(this,f)
// Either this or s
def ++[B >: A](s: Scored[B]): Scored[B]
// f is assumed to generate conditional probabilities
def flatMap[B](f: A => Scored[B]): Scored[B] = new Extractor[B](new FlatMapState(this,f))
// We are assumed independent of t
def productWith[B,C](s: Scored[B])(f: (A,B) => C): Scored[C] = s match {
case s:EmptyOrBad => s
case _ => new LazyProductWith(this,s,f)
}
// Filter, turning Empty into given error
final def filter(f: A => Boolean, error: => String): Scored[A] = _filter(f, if (trackErrors) () => error else null)
def _filter(f: A => Boolean, error: () => String): Scored[A] = new LazyFilter(this,f,error)
// Collect, turning Empty into given error
final def collect[B](error: => String, f: PartialFunction[A,B]): Scored[B] =
_collect(f,if (trackErrors) () => error else null)
def _collect[B](f: PartialFunction[A,B], error: () => String): Scored[B] = new LazyCollect(this,f,error)
}
// Scored with at least one option evaluated (if any exists)
sealed abstract class StrictScored[+A] extends Scored[A]
// A lazy version of Scored
abstract class LazyScored[+A] extends Scored[A] {
// May return another LazyScored, usually with lower probability. Optionally continue until prob <= p.
def force(hi: Double): Scored[A]
override def _bias(p: Prob): Scored[A] = new LazyBias(this,p)
def ++[B >: A](s: Scored[B]): Scored[B] =
if (p >= s.p) new LazyPlus(this,s)
else s match {
case s:LazyScored[B] => new LazyPlus(s,this)
case s:Best[B] => Best(s.dp,s.x,s.r ++ this)
case _:EmptyOrBad => impossible
}
}
// No options
sealed abstract class EmptyOrBad extends StrictScored[Nothing] {
def p = 0
def error: Error
override def map[B](f: Nothing => B) = this
override def flatMap[B](f: Nothing => Scored[B]) = this
override def _bias(p: Prob) = this
override def productWith[B,C](s: Scored[B])(f: (Nothing,B) => C) = this
override def _filter(f: Nothing => Boolean, error: () => String) = this
override def _collect[B](f: PartialFunction[Nothing,B], error: () => String) = this
}
// Failure
final class Bad(_error: => Error) extends EmptyOrBad {
lazy val error = _error
def ++[B](s: Scored[B]) = s match {
case s:LazyScored[B] => new LazyPlus(s,this)
case s:Bad => new Bad(NestError("++ failed",List(error,s.error)))
case Empty|_:Best[_] => s
}
}
// No options, but not Bad
object Empty extends EmptyOrBad {
def error = OneError("unknown error")
override def ++[B](s: Scored[B]) = s
override def toString = "Empty"
}
// One best possibility, then lazily more
final case class Best[+A](dp: Prob, x: A, r: Scored[A]) extends StrictScored[A] {
override def p = pp(dp)
override def _bias(q: Prob) = Best(pmul(dp,q),x,r _bias q)
override def ++[B >: A](s: Scored[B]): Scored[B] =
if (p >= s.p) Best(dp,x,r ++ s)
else s match {
case x:LazyScored[B] => new LazyPlus(x,this)
case Best(q,y,s) => Best(q,y,s++this)
case _:EmptyOrBad => impossible
}
}
// Lazy version of filter
private final class LazyFilter[A](private[this] var x: Scored[A], private[this] var f: A => Boolean,
private[this] var error: () => String) extends LazyScored[A] {
val p = x.p
private[this] var s: Scored[A] = null
def force(p: Double) = {
if (s eq null) {
@tailrec def loop(x: Scored[A], first: Boolean): Scored[A] = x match {
case x:LazyScored[A] => if (first || x.p > p) loop(x force p,first=false)
else new LazyFilter(x,f,error)
case Best(p,x,r) => if (f(x)) Best(p,x,r._filter(f,null))
else loop(r,first=false)
case x:Bad if trackErrors => x
case _:EmptyOrBad => if (!trackErrors || error == null) Empty
else { val e = error; new Bad(OneError(e())) } // Be careful to not reference error in a closure
}
s = loop(x,first=true)
x = null; f = null; error = null
}
s
}
}
// Lazy version of collect. Very similar to LazyFilter.
private final class LazyCollect[A,B](private[this] var x: Scored[A], private[this] var f: PartialFunction[A,B],
private[this] var error: () => String) extends LazyScored[B] {
val p = x.p
private[this] var s: Scored[B] = null
def force(p: Double) = {
if (s eq null) {
@tailrec def loop(x: Scored[A], first: Boolean): Scored[B] = x match {
case x:LazyScored[A] => if (first || x.p > p) loop(x force p,first=false)
else new LazyCollect(x,f,error)
case Best(p,x,r) => if (f.isDefinedAt(x)) Best(p,f(x),r._collect(f,null))
else loop(r,first=false)
case x:Bad if trackErrors => x
case _:EmptyOrBad => if (!trackErrors || error == null) Empty
else { val e = error; new Bad(OneError(e())) } // Be careful to not reference error in a closure
}
s = loop(x,first=true)
x = null; f = null; error = null
}
s
}
}
// Lazy version of good. Used only if trackErrors is on.
private final class LazyGood[A](private[this] var x: LazyScored[A]) extends LazyScored[A] {
val p = x.p
private[this] var s: Scored[A] = null
def force(p: Double) = {
if (s eq null) {
s = x force p match {
case x:Best[A] => x
case x:LazyGood[A] => x
case _:EmptyOrBad => Empty
case x:LazyScored[A] => new LazyGood(x)
}
x = null
}
s
}
}
// Drop errors. For internal use only.
@inline def good[A](s: Scored[A]): Scored[A] = if (!trackErrors) s else s match {
case _:Best[A]|_:LazyGood[A] => s
case _:EmptyOrBad => Empty
case s:LazyScored[A] => new LazyGood(s)
}
// Score constructors
@inline def fail(error: => String): EmptyOrBad =
if (trackErrors) new Bad(OneError(error))
else Empty
private val knownProb = Prob("known",1)
@inline def known[A](x: A): Scored[A] = Best(knownProb,x,Empty)
@inline def knownThen[A](x: A, s: Scored[A]) = Best(knownProb,x,good(s))
@inline def single[A](x: A, p: Prob): Scored[A] = Best(p,x,Empty)
@inline def orError[A](x: Scored[A], error: => String): Scored[A] =
if (trackErrors) new Bad(OneError(error)) ++ x
else x
// Bias and delay
@inline def biased[A](p: Prob, s: => Scored[A]): Scored[A] = {
val f = () => s
if (pp(p) == 1) f() // Don't bother delaying if the bound is useless
else new LazyBiased(p,f)
}
@inline def uniform[A <: AnyRef](p: Prob, xs: Array[A], error: => String): Scored[A] =
uniformThen(p,xs,fail(error))
@inline def uniform[A <: AnyRef](p: Prob, xs: List[A], error: => String): Scored[A] =
uniformThen(p,xs,fail(error))
@inline def uniformGood[A <: AnyRef](p: Prob, xs: Array[A]): Scored[A] =
uniformThen(p,xs,Empty)
@inline def list[A](xs: List[Alt[A]], error: => String): Scored[A] =
if (trackErrors) new Bad(OneError(error)) ++ listGood(xs)
else listGood(xs)
// Assuming s.p <= q, divide all probabilities in s by q
def unbiased[A](q: Prob, s: Scored[A]): Scored[A] = s match {
case s:LazyScored[A] => new LazyUnbiased[A](q,s)
case Best(p,x,r) => Best(pdiv(p,q),x,unbiased(q,r))
case s:EmptyOrBad => s
}
// Evaluate s enough to make sure it's nonempty, then call f(best) with best.p == 1.
// For use when alternatives are indistinguishable, but we still need to know if one exists.
def whatever[A,B](s: Scored[A])(f: Best[A] => B): Scored[B] = s match {
case s:LazyScored[A] => new LazyWhatever[A,B](s,f)
case Best(p,x,r) => single(f(Best(knownProb,x,unbiased(p,r))),p)
case s:EmptyOrBad => s
}
// Structured errors
sealed abstract class Error {
def prefixed(p: String): String
def short: String
}
case class OneError(e: String) extends Error {
def prefixed(p: String) = p+e
def short = e
}
case class NestError(e: String, es: List[Error]) extends Error {
def prefixed(p: String) = (p+e :: es.map(_.prefixed(p+" "))).mkString("\\n")
def short = e
}
// Product sugar
// Fixed size products
def product[A,B](a: Scored[A], b: => Scored[B]): Scored[(A,B)] =
a.productWith(b)((_,_))
def productWith[A,B,T](a: Scored[A], b: => Scored[B])(f: (A,B) => T): Scored[T] =
a.productWith(b)(f)
def product[A,B,C](a: Scored[A], b: => Scored[B], c: => Scored[C]): Scored[(A,B,C)] =
product(a,b).productWith(c)((ab,c) => (ab._1,ab._2,c))
def productWith[A,B,C,T](a: Scored[A], b: => Scored[B], c: => Scored[C])(f: (A,B,C) => T): Scored[T] =
product(a,b).productWith(c)((ab,c) => f(ab._1,ab._2,c))
def product[A,B,C,D](a: Scored[A], b: => Scored[B], c: => Scored[C], d: => Scored[D]): Scored[(A,B,C,D)] =
product(a,b).productWith(product(c,d))((ab,cd) => (ab._1,ab._2,cd._1,cd._2))
def productWith[A,B,C,D,T](a: Scored[A], b: => Scored[B], c: => Scored[C], d: => Scored[D])(f: (A,B,C,D) => T): Scored[T] =
product(a,b).productWith(product(c,d))((ab,cd) => f(ab._1,ab._2,cd._1,cd._2))
// Sequence products
private val knownNone = known(None)
private val knownNil = known(Nil)
def product[A](xs: Option[Scored[A]]): Scored[Option[A]] = xs match {
case None => knownNone
case Some(x) => x map (Some(_))
}
def product[A](xs: List[Scored[A]]): Scored[List[A]] = xs match {
case Nil => knownNil
case List(sx) => sx map (List(_))
case sx :: sxs => sx.productWith(product(sxs))(_::_)
}
def productFoldLeft[A,E](e: E)(fs: List[E => Scored[(E,A)]]): Scored[(E,List[A])] =
fs match {
case Nil => known((e,Nil))
case List(f) => f(e) map {case (e,x) => (e,List(x))}
case f :: fs => f(e) flatMap {case (ex,x) => productFoldLeft(ex)(fs) map {case (exs,xs) => (exs,x::xs)}}
}
// thread is map followed by product
def thread[A,B](xs: Option[A])(f: A => Scored[B]): Scored[Option[B]] = product(xs map f)
def thread[A,B](xs: List[A]) (f: A => Scored[B]): Scored[List[B]] = product(xs map f)
// All pairs (x,y) from xs,ys s.t. f(x) contains g(y)
def link[A,B,C](xs: Scored[A], ys: Scored[B])(f: A => Traversable[C], g: B => C, fe: A => EmptyOrBad): Scored[(A,B)] =
new Extractor(new LinkState[A,B,C](xs,ys,f,g,if (trackErrors) fe else null))
// Scala helpers for JavaUtils
def nestError[A](s: String, bads: List[Bad]): Scored[A] =
if (trackErrors) bads match {
case List(b) => b
case bs => new Bad(NestError(s,bads map (_.error)))
} else Empty
def oneError[A](error: => String): Scored[A] =
if (trackErrors) new Bad(OneError(error))
else Empty
// Performance warnings for debugging code
if (trackErrors)
println("PERFORMANCE WARNING: Error tracking is on, Scored will be slower than otherwise")
if (trackProbabilities)
println("PERFORMANCE WARNING: Probability tracking is on, Scored will be slower than otherwise")
}
|
eddysystems/eddy
|
tarski/src/tarski/Scores.scala
|
Scala
|
bsd-2-clause
| 15,806
|
/** Copyright 2015, Metreta Information Technology s.r.l. */
package com.metreta.spark.orientdb.connector
import org.apache.spark.SparkContext
import org.apache.spark.rdd.RDD
import com.metreta.spark.orientdb.connector.api.OrientDBConnector
import com.orientechnologies.orient.core.sql.OCommandSQL
import org.apache.spark.Logging
import java.text.SimpleDateFormat
import com.orientechnologies.orient.core.metadata.schema.OClass
import com.orientechnologies.orient.core.metadata.schema.OType
import java.lang.reflect.Field
import com.orientechnologies.orient.core.metadata.schema.OProperty
import org.apache.commons.codec.binary.Base64
import java.math.BigDecimal
import com.orientechnologies.orient.core.record.impl.ODocument
import org.apache.spark.TaskContext
import com.metreta.spark.orientdb.connector.rdd.OrientDocumentException
class ClassRDDFunctions[T](rdd: RDD[T]) extends Serializable with Logging {
/**
* Saves an instance of [[org.apache.spark.rdd.RDD RDD]] into an OrientDB class.
* The Input class must have been created on OrientDB, the rdd must be composed by instances of a
* case class or of primitive objects and case class can have a number of attribute >= 0.
*
* @param orientClass
*/
def saveToOrient(orientClass: String)(implicit connector: OrientDBConnector = OrientDBConnector(rdd.sparkContext.getConf)): Unit = {
rdd.foreachPartition { partition =>
val db = connector.databaseDocumentTx()
while (partition.hasNext) {
val obj = partition.next()
val doc = new ODocument(orientClass);
setProperties("value", doc, obj)
db.save(doc)
}
db.commit()
db.close()
}
}
/**
* Upserts an instance of [[org.apache.spark.rdd.RDD RDD]] into an OrientDB class.
* @param orientClass
* @params keyColumns
* @params where
*/
def upsertToOrient(orientClass: String, keyColumns: List[String], where: String = "")(implicit connector: OrientDBConnector = OrientDBConnector(rdd.sparkContext.getConf)): Unit = {
if (keyColumns.size < 1)
throw new OrientDocumentException(s"Please specify at leas one key column for ${orientClass} class.")
rdd.foreachPartition { partition =>
val db = connector.databaseDocumentTx()
while (partition.hasNext) {
val obj = partition.next()
var fromQuery = s"UPDATE ${orientClass} ${getInsertString(obj)} upsert return after @rid where ${getUpsertWhereString(obj, keyColumns)}"
if (!where.isEmpty()) fromQuery = fromQuery + s" AND ${where}"
try {
db.command(new OCommandSQL(fromQuery)).execute().asInstanceOf[java.util.ArrayList[Any]]
} catch {
case e: Exception => {
db.rollback()
e.printStackTrace()
}
}
}
db.commit()
db.close()
}
}
/**
* Converts an instance of a case class to a string which will
* be utilized for SQL INSERT command composition.
*
* Example:
* given a case class Person(name: String, surname: String)
*
* getInsertString(Person("Larry", "Page")) will return a String: " name = 'Larry', surname = 'Page'"
*
* @param orientClass used to obtain the fields types
* @param obj an object
* @return a string
*/
private def getInsertString[T](obj: T): String = {
var insStr = "SET"
if (obj != null) {
obj match {
case o: Int => insStr = insStr + " value = " + o + ","
case o: Boolean => insStr = insStr + " value = " + o + ","
case o: BigDecimal => insStr = insStr + " value = " + o + ","
case o: Float => insStr = insStr + " value = " + o + ","
case o: Double => insStr = insStr + " value = " + o + ","
case o: java.util.Date => insStr = insStr + " value = date('" + orientDateFormat.format(o) + "'),"
case o: Short => insStr = insStr + " value = " + o + ","
case o: Long => insStr = insStr + " value = " + o + ","
case o: String => insStr = insStr + " value = '" + escapeString(o) + "',"
case o: Array[Byte] => insStr = insStr + " value = '" + Base64.encodeBase64String(o.asInstanceOf[Array[Byte]]) + "',"
case o: Byte => insStr = insStr + " value = " + o + ","
case o => {
obj.getClass().getDeclaredFields.foreach {
case field =>
field.setAccessible(true)
insStr = insStr + " " + field.getName + " = " + buildValueByType(field.get(obj)) + ","
}
}
}
}
insStr.dropRight(1)
}
private def getUpsertWhereString[T](obj: T, keyColumns: List[String]): String = {
var upsStr = ""
for (key <- keyColumns) {
val field = obj.getClass().getDeclaredField(key)
field.setAccessible(true)
val value = field.get(obj)
upsStr = upsStr + " " + key + " = " + buildValueByType(value) + " AND"
}
upsStr.dropRight(3)
}
private val orientDateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss")
private def buildValueByType(fieldValue: AnyRef): String = fieldValue match {
case _: Array[Byte] => "'" + Base64.encodeBase64String(fieldValue.asInstanceOf[Array[Byte]]) + "'" //"'" + (fieldValue.asInstanceOf[Array[Byte]]) + "'"//
case _: java.lang.String => "'" + escapeString(fieldValue.toString) + "'"
case _: java.util.Date => "date('" + orientDateFormat.format(fieldValue) + "')"
case _ => fieldValue.toString()
}
private def setProperties[T](fieldName: String, doc: ODocument, obj: T): Unit = {
if (obj != null) {
obj match {
case o: Int => doc.field(fieldName, o, OType.INTEGER)
case o: Boolean => doc.field(fieldName, o, OType.BOOLEAN)
case o: BigDecimal => doc.field(fieldName, o, OType.DECIMAL)
case o: Float => doc.field(fieldName, o, OType.FLOAT)
case o: Double => doc.field(fieldName, o, OType.DOUBLE)
case o: java.util.Date => doc.field(fieldName, orientDateFormat.format(o), OType.DATE)
case o: Short => doc.field(fieldName, o, OType.SHORT)
case o: Long => doc.field(fieldName, o, OType.LONG)
case o: String => doc.field(fieldName, o, OType.STRING)
case o: Array[Byte] => doc.field(fieldName, o, OType.BINARY) //insStr = insStr + " value = '" + Base64.encodeBase64String(o.asInstanceOf[Array[Byte]]) + "',"
case o: Byte => doc.field(fieldName, o, OType.BYTE)
case _ => {
obj.getClass().getDeclaredFields.foreach {
case field =>
field.setAccessible(true)
setProperties(field.getName, doc, field.get(obj))
}
}
}
}
}
private def escapeString(in: String): String = {
return in.replace("'", "\\'").replace("\"", "\\\"")
}
}
|
metreta/spark-orientdb-connector
|
spark-orientdb-connector/src/main/scala/com/metreta/spark/orientdb/connector/ClassRDDFunctions.scala
|
Scala
|
apache-2.0
| 6,923
|
package db
import org.scalatestplus.play._
class UtilSpec extends PlaySpec {
"generateVersionSortKey" in {
Util.generateVersionSortKey("0.0.1") must be("6:10000.10000.10001")
Util.generateVersionSortKey("0") must be("3:0")
Util.generateVersionSortKey("other") must be("3:other")
Seq("0.0.10", "0.0.5", "1.0.0", "other").sortBy { Util.generateVersionSortKey(_) } must be(
Seq("other", "0.0.5", "0.0.10", "1.0.0")
)
}
}
|
flowcommerce/delta
|
api/test/db/UtilSpec.scala
|
Scala
|
mit
| 453
|
package frdomain.ch6
package domain
package model
import java.util.{ Date, Calendar }
import util.{ Try, Success, Failure }
import scalaz._
import Scalaz._
object common {
type Amount = BigDecimal
def today = Calendar.getInstance.getTime
}
import common._
case class Balance(amount: Amount = 0)
sealed trait Account {
def no: String
def name: String
def dateOfOpen: Option[Date]
def dateOfClose: Option[Date]
def balance: Balance
}
final case class CheckingAccount (no: String, name: String,
dateOfOpen: Option[Date], dateOfClose: Option[Date] = None, balance: Balance = Balance()) extends Account
final case class SavingsAccount (no: String, name: String, rateOfInterest: Amount,
dateOfOpen: Option[Date], dateOfClose: Option[Date] = None, balance: Balance = Balance()) extends Account
object Account {
private def validateAccountNo(no: String) =
if (no.isEmpty || no.size < 5) s"Account No has to be at least 5 characters long: found $no".failureNel[String]
else no.successNel[String]
private def validateOpenCloseDate(od: Date, cd: Option[Date]) = cd.map { c =>
if (c before od) s"Close date [$c] cannot be earlier than open date [$od]".failureNel[(Option[Date], Option[Date])]
else (od.some, cd).successNel[String]
}.getOrElse { (od.some, cd).successNel[String] }
private def validateRate(rate: BigDecimal) =
if (rate <= BigDecimal(0)) s"Interest rate $rate must be > 0".failureNel[BigDecimal]
else rate.successNel[String]
def checkingAccount(no: String, name: String, openDate: Option[Date], closeDate: Option[Date],
balance: Balance): \\/[NonEmptyList[String], Account] = {
val od = openDate.getOrElse(today)
(
validateAccountNo(no) |@|
validateOpenCloseDate(openDate.getOrElse(today), closeDate)
) { (n, d) =>
CheckingAccount(n, name, d._1, d._2, balance)
}.disjunction
}
def savingsAccount(no: String, name: String, rate: BigDecimal, openDate: Option[Date],
closeDate: Option[Date], balance: Balance): \\/[NonEmptyList[String], Account] = {
val od = openDate.getOrElse(today)
(
validateAccountNo(no) |@|
validateOpenCloseDate(openDate.getOrElse(today), closeDate) |@|
validateRate(rate)
) { (n, d, r) =>
SavingsAccount(n, name, r, d._1, d._2, balance)
}.disjunction
}
private def validateAccountAlreadyClosed(a: Account) = {
if (a.dateOfClose isDefined) s"Account ${a.no} is already closed".failureNel[Account]
else a.successNel[String]
}
private def validateCloseDate(a: Account, cd: Date) = {
if (cd before a.dateOfOpen.get) s"Close date [$cd] cannot be earlier than open date [${a.dateOfOpen.get}]".failureNel[Date]
else cd.successNel[String]
}
def close(a: Account, closeDate: Date): \\/[NonEmptyList[String], Account] = {
(validateAccountAlreadyClosed(a) |@| validateCloseDate(a, closeDate)) { (acc, d) =>
acc match {
case c: CheckingAccount => c.copy(dateOfClose = Some(closeDate))
case s: SavingsAccount => s.copy(dateOfClose = Some(closeDate))
}
}.disjunction
}
private def checkBalance(a: Account, amount: Amount) = {
if (amount < 0 && a.balance.amount < -amount) s"Insufficient amount in ${a.no} to debit".failureNel[Account]
else a.successNel[String]
}
def updateBalance(a: Account, amount: Amount): \\/[NonEmptyList[String], Account] = {
(validateAccountAlreadyClosed(a) |@| checkBalance(a, amount)) { (_, _) =>
a match {
case c: CheckingAccount => c.copy(balance = Balance(c.balance.amount + amount))
case s: SavingsAccount => s.copy(balance = Balance(s.balance.amount + amount))
}
}.disjunction
}
def rate(a: Account) = a match {
case SavingsAccount(_, _, r, _, _, _) => r.some
case _ => None
}
}
|
debasishg/frdomain
|
src/main/scala/frdomain/ch6/domain/model/Account.scala
|
Scala
|
apache-2.0
| 3,820
|
package com.nekopiano.scala.processing.sandbox.poc.pdf;
import com.nekopiano.scala.processing.{ScalaPAppCompanion, ScalaPApplet, ScalaPVector, TwoDimensionalPApp}
import processing.core.PGraphics
import processing.event.MouseEvent;
/**
* Created on 24/07/2016.
*/
class PdfExportApp extends TwoDimensionalPApp {
var record = false
override def settings(): Unit = {
//size(800, 600, P3D)
//size(800, 600, P2D)
size(800, 600)
//smooth()
// for Retina
pixelDensity(2)
//noLoop()
//size(800, 600, PDF, "processing-sample.pdf")
//size(800, 600, SVG, "processing-sample.svg")
//beginRecord(PDF, "processing-sample.pdf")
}
override def setup(): Unit = {
background(250)
}
var startX = 0
override def draw(): Unit = {
if (record) {
//beginRaw(PDF, "output.pdf");
beginRecord(PDF, "output.pdf");
}
scale(scale)
background(250)
fill(0)
//rect(60, 80, 102, 76)
// rect(30, 20, 50, 50)
// scale(0.5f)
// rect(30, 20, 50, 50)
// scale(1f)
// stroke(200)
// beginShape(TRIANGLE_STRIP)
// vertex(30, 75)
// vertex(40, 20)
// vertex(50, 75)
// vertex(60, 20)
// vertex(70, 75)
// vertex(80, 20)
// vertex(90, 75)
// endShape()
//stroke(10,10,10,100)
stroke(50)
//strokeWeight(10)
// When line() w/ the 3rd dimension in 2D, you'll get the following warning:
// vertex() with x, y, and z coordinates can only be used with a renderer that supports 3D, such as P3D. Use a version without a z-coordinate instead.
// and JAVA2D or PDF doesn't display that line.
line(ScalaPVector.origin, ScalaPVector(300, 450))
line(ScalaPVector(width, height), ScalaPVector(230, 400))
stroke(random(50), random(255), random(255), 100)
line(startX, 0, random(0, width), height)
if (startX < width) {
startX += 1
} else {
startX = 0
}
text("SAMPLE Text", ScalaPVector(500, 500))
text("scale=" + scale, ScalaPVector(width/2, height/2))
// translate(width/2, height/2, -200)
// rotateZ(0.2F)
// rotateY(mouseX/500.0F)
// box(100)
//exit()
//endRecord()
if (record) {
//endRaw()
endRecord()
record = false;
}
}
// Hit 'r' to record a single frame
override def keyPressed() {
if (key == 'r') {
record = true;
}
}
var scale = 1f
override def mouseWheel(event:MouseEvent) {
val amount = event.getCount() / 100f
scale = scale + amount
}
}
object PdfExportApp extends ScalaPAppCompanion {
}
|
lamusique/ScalaProcessing
|
samples/src/test/scala/com/nekopiano/scala/processing/sandbox/poc/pdf/PdfExport.scala
|
Scala
|
apache-2.0
| 2,570
|
package com.ajlopez.scala.ajlisp
class Atom(name: String) extends SymbolicExpression {
def evaluate(context: Context): Any = context.getValue(name)
override def toString(): String = name
}
|
ajlopez/AjLispScala
|
src/main/scala/com/ajlopez/scala/ajlisp/Atom.scala
|
Scala
|
mit
| 214
|
package edu.iitd.nlp.ListExtraction
import scala.collection.mutable
case class FeatureVector(vec: mutable.ArrayBuffer[Double] = FeatureVector.Default().vec) {
def +(other: FeatureVector): FeatureVector = {
require(this.vec.length == other.vec.length)
FeatureVector(vec.zip(other.vec).map { case (a, b) => a + b })
}
def -(other: FeatureVector): FeatureVector = {
require(this.vec.length == other.vec.length)
FeatureVector(vec.zip(other.vec).map { case (a, b) => a - b })
}
def *(other: FeatureVector): Double = {
require(this.vec.length == other.vec.length)
vec.zip(other.vec).map { case (a, b) => a * b }.sum
}
def *(mult: Double): FeatureVector = {
FeatureVector(vec.map(_ * mult))
}
def /(div: Double): FeatureVector = {
FeatureVector(vec.map(_ / div))
}
def ==(other: FeatureVector): Boolean = {
require(this.vec.length == other.vec.length)
this.vec == other.vec || (this - other).vec.map(Math.abs).max < FeatureVector.eps
}
def normalised: FeatureVector = {
if (this.vec.sum == 0) this
else this / this.vec.sum
}
}
object FeatureVector {
val eps = 1e-6
val defaultNumFeatures = 6
def Default(n: Int = defaultNumFeatures) = {
val res = Zeros(n)
res.vec(1) = 1.0
res
}
def Zeros(n: Int = defaultNumFeatures) = FeatureVector(mutable.ArrayBuffer.fill(n)(0.0))
def NegativeInfinities(n: Int = defaultNumFeatures) = FeatureVector(mutable.ArrayBuffer.fill(n)(Double.NegativeInfinity))
def baseLine(n: Int = defaultNumFeatures) = {
require(n == defaultNumFeatures)
val res = Zeros(n)
res.vec(4) = -1.0
res.vec(5) = -1.0
res
}
def syntacticSimilarity(n: Int = defaultNumFeatures) = {
val res = Zeros(n)
// res.vec(2) = 1.0
res.vec(3) = 1.0
res
}
def bagOfWordsSimilarity(n: Int = defaultNumFeatures) = {
val res = Zeros(n)
res.vec(0) = 1.0
res
}
}
case class Params(leftDis: Int = 0, rightDis: Int = 0)
|
satwantrana/list-extractor
|
src/main/scala/edu/iitd/nlp/ListExtraction/FeatureVector.scala
|
Scala
|
mit
| 1,971
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.util
import java.lang.management.ManagementFactory
import java.lang.reflect.{Field, Modifier}
import java.util.{IdentityHashMap, Random}
import scala.collection.mutable.ArrayBuffer
import scala.runtime.ScalaRunTime
import com.google.common.collect.MapMaker
import org.apache.spark.annotation.DeveloperApi
import org.apache.spark.internal.Logging
import org.apache.spark.internal.config.Tests.TEST_USE_COMPRESSED_OOPS_KEY
import org.apache.spark.util.collection.OpenHashSet
/**
* A trait that allows a class to give [[SizeEstimator]] more accurate size estimation.
* When a class extends it, [[SizeEstimator]] will query the `estimatedSize` first.
* If `estimatedSize` does not return [[None]], [[SizeEstimator]] will use the returned size
* as the size of the object. Otherwise, [[SizeEstimator]] will do the estimation work.
* The difference between a [[KnownSizeEstimation]] and
* [[org.apache.spark.util.collection.SizeTracker]] is that, a
* [[org.apache.spark.util.collection.SizeTracker]] still uses [[SizeEstimator]] to
* estimate the size. However, a [[KnownSizeEstimation]] can provide a better estimation without
* using [[SizeEstimator]].
*/
private[spark] trait KnownSizeEstimation {
def estimatedSize: Long
}
/**
* :: DeveloperApi ::
* Estimates the sizes of Java objects (number of bytes of memory they occupy), for use in
* memory-aware caches.
*
* Based on the following JavaWorld article:
* http://www.javaworld.com/javaworld/javaqa/2003-12/02-qa-1226-sizeof.html
*/
@DeveloperApi
object SizeEstimator extends Logging {
/**
* Estimate the number of bytes that the given object takes up on the JVM heap. The estimate
* includes space taken up by objects referenced by the given object, their references, and so on
* and so forth.
*
* This is useful for determining the amount of heap space a broadcast variable will occupy on
* each executor or the amount of space each object will take when caching objects in
* deserialized form. This is not the same as the serialized size of the object, which will
* typically be much smaller.
*/
def estimate(obj: AnyRef): Long = estimate(obj, new IdentityHashMap[AnyRef, AnyRef])
// Sizes of primitive types
private val BYTE_SIZE = 1
private val BOOLEAN_SIZE = 1
private val CHAR_SIZE = 2
private val SHORT_SIZE = 2
private val INT_SIZE = 4
private val LONG_SIZE = 8
private val FLOAT_SIZE = 4
private val DOUBLE_SIZE = 8
// Fields can be primitive types, sizes are: 1, 2, 4, 8. Or fields can be pointers. The size of
// a pointer is 4 or 8 depending on the JVM (32-bit or 64-bit) and UseCompressedOops flag.
// The sizes should be in descending order, as we will use that information for fields placement.
private val fieldSizes = List(8, 4, 2, 1)
// Alignment boundary for objects
// TODO: Is this arch dependent ?
private val ALIGN_SIZE = 8
// A cache of ClassInfo objects for each class
// We use weakKeys to allow GC of dynamically created classes
private val classInfos = new MapMaker().weakKeys().makeMap[Class[_], ClassInfo]()
// Object and pointer sizes are arch dependent
private var is64bit = false
// Size of an object reference
// Based on https://wikis.oracle.com/display/HotSpotInternals/CompressedOops
private var isCompressedOops = false
private var pointerSize = 4
// Minimum size of a java.lang.Object
private var objectSize = 8
initialize()
// Sets object size, pointer size based on architecture and CompressedOops settings
// from the JVM.
private def initialize() {
val arch = System.getProperty("os.arch")
is64bit = arch.contains("64") || arch.contains("s390x")
isCompressedOops = getIsCompressedOops
objectSize = if (!is64bit) 8 else {
if (!isCompressedOops) {
16
} else {
12
}
}
pointerSize = if (is64bit && !isCompressedOops) 8 else 4
classInfos.clear()
classInfos.put(classOf[Object], new ClassInfo(objectSize, Nil))
}
private def getIsCompressedOops: Boolean = {
// This is only used by tests to override the detection of compressed oops. The test
// actually uses a system property instead of a SparkConf, so we'll stick with that.
if (System.getProperty(TEST_USE_COMPRESSED_OOPS_KEY) != null) {
return System.getProperty(TEST_USE_COMPRESSED_OOPS_KEY).toBoolean
}
// java.vm.info provides compressed ref info for IBM JDKs
if (System.getProperty("java.vendor").contains("IBM")) {
return System.getProperty("java.vm.info").contains("Compressed Ref")
}
try {
val hotSpotMBeanName = "com.sun.management:type=HotSpotDiagnostic"
val server = ManagementFactory.getPlatformMBeanServer()
// NOTE: This should throw an exception in non-Sun JVMs
// scalastyle:off classforname
val hotSpotMBeanClass = Class.forName("com.sun.management.HotSpotDiagnosticMXBean")
val getVMMethod = hotSpotMBeanClass.getDeclaredMethod("getVMOption",
Class.forName("java.lang.String"))
// scalastyle:on classforname
val bean = ManagementFactory.newPlatformMXBeanProxy(server,
hotSpotMBeanName, hotSpotMBeanClass)
// TODO: We could use reflection on the VMOption returned ?
getVMMethod.invoke(bean, "UseCompressedOops").toString.contains("true")
} catch {
case e: Exception =>
// Guess whether they've enabled UseCompressedOops based on whether maxMemory < 32 GB
val guess = Runtime.getRuntime.maxMemory < (32L*1024*1024*1024)
val guessInWords = if (guess) "yes" else "not"
logWarning("Failed to check whether UseCompressedOops is set; assuming " + guessInWords)
return guess
}
}
/**
* The state of an ongoing size estimation. Contains a stack of objects to visit as well as an
* IdentityHashMap of visited objects, and provides utility methods for enqueueing new objects
* to visit.
*/
private class SearchState(val visited: IdentityHashMap[AnyRef, AnyRef]) {
val stack = new ArrayBuffer[AnyRef]
var size = 0L
def enqueue(obj: AnyRef) {
if (obj != null && !visited.containsKey(obj)) {
visited.put(obj, null)
stack += obj
}
}
def isFinished(): Boolean = stack.isEmpty
def dequeue(): AnyRef = {
val elem = stack.last
stack.trimEnd(1)
elem
}
}
/**
* Cached information about each class. We remember two things: the "shell size" of the class
* (size of all non-static fields plus the java.lang.Object size), and any fields that are
* pointers to objects.
*/
private class ClassInfo(
val shellSize: Long,
val pointerFields: List[Field]) {}
private def estimate(obj: AnyRef, visited: IdentityHashMap[AnyRef, AnyRef]): Long = {
val state = new SearchState(visited)
state.enqueue(obj)
while (!state.isFinished) {
visitSingleObject(state.dequeue(), state)
}
state.size
}
private def visitSingleObject(obj: AnyRef, state: SearchState) {
val cls = obj.getClass
if (cls.isArray) {
visitArray(obj, cls, state)
} else if (cls.getName.startsWith("scala.reflect")) {
// Many objects in the scala.reflect package reference global reflection objects which, in
// turn, reference many other large global objects. Do nothing in this case.
} else if (obj.isInstanceOf[ClassLoader] || obj.isInstanceOf[Class[_]]) {
// Hadoop JobConfs created in the interpreter have a ClassLoader, which greatly confuses
// the size estimator since it references the whole REPL. Do nothing in this case. In
// general all ClassLoaders and Classes will be shared between objects anyway.
} else {
obj match {
case s: KnownSizeEstimation =>
state.size += s.estimatedSize
case _ =>
val classInfo = getClassInfo(cls)
state.size += alignSize(classInfo.shellSize)
for (field <- classInfo.pointerFields) {
state.enqueue(field.get(obj))
}
}
}
}
// Estimate the size of arrays larger than ARRAY_SIZE_FOR_SAMPLING by sampling.
private val ARRAY_SIZE_FOR_SAMPLING = 400
private val ARRAY_SAMPLE_SIZE = 100 // should be lower than ARRAY_SIZE_FOR_SAMPLING
private def visitArray(array: AnyRef, arrayClass: Class[_], state: SearchState) {
val length = ScalaRunTime.array_length(array)
val elementClass = arrayClass.getComponentType()
// Arrays have object header and length field which is an integer
var arrSize: Long = alignSize(objectSize + INT_SIZE)
if (elementClass.isPrimitive) {
arrSize += alignSize(length.toLong * primitiveSize(elementClass))
state.size += arrSize
} else {
arrSize += alignSize(length.toLong * pointerSize)
state.size += arrSize
if (length <= ARRAY_SIZE_FOR_SAMPLING) {
var arrayIndex = 0
while (arrayIndex < length) {
state.enqueue(ScalaRunTime.array_apply(array, arrayIndex).asInstanceOf[AnyRef])
arrayIndex += 1
}
} else {
// Estimate the size of a large array by sampling elements without replacement.
// To exclude the shared objects that the array elements may link, sample twice
// and use the min one to calculate array size.
val rand = new Random(42)
val drawn = new OpenHashSet[Int](2 * ARRAY_SAMPLE_SIZE)
val s1 = sampleArray(array, state, rand, drawn, length)
val s2 = sampleArray(array, state, rand, drawn, length)
val size = math.min(s1, s2)
state.size += math.max(s1, s2) +
(size * ((length - ARRAY_SAMPLE_SIZE) / (ARRAY_SAMPLE_SIZE))).toLong
}
}
}
private def sampleArray(
array: AnyRef,
state: SearchState,
rand: Random,
drawn: OpenHashSet[Int],
length: Int): Long = {
var size = 0L
for (i <- 0 until ARRAY_SAMPLE_SIZE) {
var index = 0
do {
index = rand.nextInt(length)
} while (drawn.contains(index))
drawn.add(index)
val obj = ScalaRunTime.array_apply(array, index).asInstanceOf[AnyRef]
if (obj != null) {
size += SizeEstimator.estimate(obj, state.visited).toLong
}
}
size
}
private def primitiveSize(cls: Class[_]): Int = {
if (cls == classOf[Byte]) {
BYTE_SIZE
} else if (cls == classOf[Boolean]) {
BOOLEAN_SIZE
} else if (cls == classOf[Char]) {
CHAR_SIZE
} else if (cls == classOf[Short]) {
SHORT_SIZE
} else if (cls == classOf[Int]) {
INT_SIZE
} else if (cls == classOf[Long]) {
LONG_SIZE
} else if (cls == classOf[Float]) {
FLOAT_SIZE
} else if (cls == classOf[Double]) {
DOUBLE_SIZE
} else {
throw new IllegalArgumentException(
"Non-primitive class " + cls + " passed to primitiveSize()")
}
}
/**
* Get or compute the ClassInfo for a given class.
*/
private def getClassInfo(cls: Class[_]): ClassInfo = {
// Check whether we've already cached a ClassInfo for this class
val info = classInfos.get(cls)
if (info != null) {
return info
}
val parent = getClassInfo(cls.getSuperclass)
var shellSize = parent.shellSize
var pointerFields = parent.pointerFields
val sizeCount = Array.fill(fieldSizes.max + 1)(0)
// iterate through the fields of this class and gather information.
for (field <- cls.getDeclaredFields) {
if (!Modifier.isStatic(field.getModifiers)) {
val fieldClass = field.getType
if (fieldClass.isPrimitive) {
sizeCount(primitiveSize(fieldClass)) += 1
} else {
// Note: in Java 9+ this would be better with trySetAccessible and canAccess
try {
field.setAccessible(true) // Enable future get()'s on this field
pointerFields = field :: pointerFields
} catch {
// If the field isn't accessible, we can still record the pointer size
// but can't know more about the field, so ignore it
case _: SecurityException =>
// do nothing
// Java 9+ can throw InaccessibleObjectException but the class is Java 9+-only
case re: RuntimeException
if re.getClass.getSimpleName == "InaccessibleObjectException" =>
// do nothing
}
sizeCount(pointerSize) += 1
}
}
}
// Based on the simulated field layout code in Aleksey Shipilev's report:
// http://cr.openjdk.java.net/~shade/papers/2013-shipilev-fieldlayout-latest.pdf
// The code is in Figure 9.
// The simplified idea of field layout consists of 4 parts (see more details in the report):
//
// 1. field alignment: HotSpot lays out the fields aligned by their size.
// 2. object alignment: HotSpot rounds instance size up to 8 bytes
// 3. consistent fields layouts throughout the hierarchy: This means we should layout
// superclass first. And we can use superclass's shellSize as a starting point to layout the
// other fields in this class.
// 4. class alignment: HotSpot rounds field blocks up to HeapOopSize not 4 bytes, confirmed
// with Aleksey. see https://bugs.openjdk.java.net/browse/CODETOOLS-7901322
//
// The real world field layout is much more complicated. There are three kinds of fields
// order in Java 8. And we don't consider the @contended annotation introduced by Java 8.
// see the HotSpot classloader code, layout_fields method for more details.
// hg.openjdk.java.net/jdk8/jdk8/hotspot/file/tip/src/share/vm/classfile/classFileParser.cpp
var alignedSize = shellSize
for (size <- fieldSizes if sizeCount(size) > 0) {
val count = sizeCount(size).toLong
// If there are internal gaps, smaller field can fit in.
alignedSize = math.max(alignedSize, alignSizeUp(shellSize, size) + size * count)
shellSize += size * count
}
// Should choose a larger size to be new shellSize and clearly alignedSize >= shellSize, and
// round up the instance filed blocks
shellSize = alignSizeUp(alignedSize, pointerSize)
// Create and cache a new ClassInfo
val newInfo = new ClassInfo(shellSize, pointerFields)
classInfos.put(cls, newInfo)
newInfo
}
private def alignSize(size: Long): Long = alignSizeUp(size, ALIGN_SIZE)
/**
* Compute aligned size. The alignSize must be 2^n, otherwise the result will be wrong.
* When alignSize = 2^n, alignSize - 1 = 2^n - 1. The binary representation of (alignSize - 1)
* will only have n trailing 1s(0b00...001..1). ~(alignSize - 1) will be 0b11..110..0. Hence,
* (size + alignSize - 1) & ~(alignSize - 1) will set the last n bits to zeros, which leads to
* multiple of alignSize.
*/
private def alignSizeUp(size: Long, alignSize: Int): Long =
(size + alignSize - 1) & ~(alignSize - 1)
}
|
yanboliang/spark
|
core/src/main/scala/org/apache/spark/util/SizeEstimator.scala
|
Scala
|
apache-2.0
| 15,719
|
package org.bitcoins.testkit.async
import org.bitcoins.asyncutil.AsyncUtil
import org.scalatest.exceptions.{StackDepthException, TestFailedException}
import scala.concurrent.{ExecutionContext, Future}
import scala.concurrent.duration.FiniteDuration
abstract class TestAsyncUtil extends AsyncUtil with Serializable {
override protected def retryUntilSatisfiedWithCounter(
conditionF: () => Future[Boolean],
duration: FiniteDuration,
counter: Int,
maxTries: Int,
stackTrace: Array[StackTraceElement])(implicit
ec: ExecutionContext): Future[Unit] = {
val retryF = super
.retryUntilSatisfiedWithCounter(conditionF,
duration,
counter,
maxTries,
stackTrace)
TestAsyncUtil.transformRetryToTestFailure(retryF)
}
}
object TestAsyncUtil extends TestAsyncUtil {
/** As opposed to the AsyncUtil in the rpc project, in the testkit, we can assume that
* TestAsyncUtil methods are being called from tests and as such, we want to trim the stack
* trace to exclude stack elements that occur before the beginning of a test.
* Additionally, we want to transform RpcRetryExceptions to TestFailedExceptions which
* conveniently mention the line that called the TestAsyncUtil method.
*/
def transformRetryToTestFailure[T](fut: Future[T])(implicit
ec: ExecutionContext): Future[T] = {
def transformRetry(err: Throwable): Throwable = {
if (err.isInstanceOf[RpcRetryException]) {
val retryErr = err.asInstanceOf[RpcRetryException]
val relevantStackTrace = retryErr.caller.tail
.dropWhile(elem => retryErr.internalFiles.contains(elem.getFileName))
.takeWhile(!_.getFileName.contains("TestSuite"))
val stackElement = relevantStackTrace.head
val file = stackElement.getFileName
val path = stackElement.getClassName
val line = stackElement.getLineNumber
val pos = org.scalactic.source.Position(file, path, line)
val newErr = new TestFailedException({ _: StackDepthException =>
Some(retryErr.message)
},
None,
pos)
newErr.setStackTrace(relevantStackTrace)
newErr
} else {
err
}
}
fut.transform({ elem: T =>
elem
},
transformRetry)
}
}
|
bitcoin-s/bitcoin-s
|
testkit/src/main/scala/org/bitcoins/testkit/async/TestAsyncUtil.scala
|
Scala
|
mit
| 2,627
|
package org.specs2.exter.matcher
import org.specs2.matcher.{Expectable, Matcher}
import org.specs2.specification.SpecificationStructure
trait ExterSeq { self: SpecificationStructure =>
private def seqToEnglish[A](seq: Seq[A]) = {
seq.map(x => s"'$x'")
.mkString(", ")
.reverse
.replaceFirst(" ,", " and ".reverse)
.reverse
}
/** Assert that a given sequence contains all of the listed elements, regardless of order. */
def containAll[A](values: A*) = new Matcher[Seq[A]] {
def apply[S <: Seq[A]](ex: Expectable[S]) = {
val subject = ex.value
val notFound = values.filter(!subject.contains(_))
result(
notFound.length == 0,
s"$subject contained ${seqToEnglish(values)}",
s"$subject does not contain ${seqToEnglish(notFound)}",
ex
)
}
}
/** Assert that a given sequence contains all and no more of the listed elements, regardless of order. */
def containExactly[A](values: A*) = new Matcher[Seq[A]] {
def apply[S <: Seq[A]](ex: Expectable[S]) = {
val subject = ex.value
val notFound = values.filter(!subject.contains(_))
lazy val diff = subject.toSet.diff(values.toSet).toSeq
lazy val failMessage =
if (notFound.length == 0) s"$subject had extra values ${seqToEnglish(diff)}"
else s"$subject does not contain ${seqToEnglish(notFound)}"
result(
subject.length == values.length && notFound.length == 0,
s"$subject contained exactly ${seqToEnglish(values)}",
failMessage,
ex
)
}
}
/** Assert that at least one of the listed elements is within the given sequence */
def containSome[A](values: A*) = new Matcher[Seq[A]] {
def apply[S <: Seq[A]](ex: Expectable[S]) = {
val subject = ex.value
val found = values.filter(subject.contains)
result(
found.length > 0,
s"${seqToEnglish(found)} found in $subject",
s"None of the expected values found (${seqToEnglish(values)}}) in $subject",
ex
)
}
}
}
|
tstone/specs2-exter
|
src/main/scala/org/specs2/exter/matcher/ExterSeq.scala
|
Scala
|
mit
| 2,070
|
/*
* Copyright 2015 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.ct.computations
import uk.gov.hmrc.ct.box.{Calculated, CtBoxIdentifier, CtBoolean}
import uk.gov.hmrc.ct.computations.calculations.LowEmissionCarsCalculator
import uk.gov.hmrc.ct.computations.retriever.ComputationsBoxRetriever
case class LEC10(value: Boolean) extends CtBoxIdentifier("Disposals Exceed Special Rate Pool") with CtBoolean
object LEC10 extends Calculated[LEC10, ComputationsBoxRetriever] with LowEmissionCarsCalculator {
override def calculate(fieldValueRetriever: ComputationsBoxRetriever): LEC10 =
LEC10(disposalsExceedsSpecialRatePool(fieldValueRetriever.retrieveLEC01(),
fieldValueRetriever.retrieveCP666(),
fieldValueRetriever.retrieveCP667()
))
}
|
scottcutts/ct-calculations
|
src/main/scala/uk/gov/hmrc/ct/computations/LEC10.scala
|
Scala
|
apache-2.0
| 1,319
|
package com.wallace.demo.app.common
/**
* Created by Wallace on 2016/11/6.
*/
trait UserDefineFunc {
def toBigInt(x: Int): BigInt = {
math.BigInt.int2bigInt(x)
}
}
|
BiyuHuang/CodePrototypesDemo
|
demo/ScalaDemo/src/main/scala/com/wallace/demo/app/common/UserDefineFunc.scala
|
Scala
|
apache-2.0
| 177
|
/*
* Copyright (c) 2014-2018 by The Monix Project Developers.
* See the project homepage at: https://monix.io
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package monix.reactive.internal.operators
import java.io.{OutputStream, PrintStream}
import monix.reactive.Observable
import monix.execution.exceptions.DummyException
import monix.execution.atomic.AtomicInt
import scala.concurrent.duration.Duration.Zero
import scala.concurrent.duration._
object DumpSuite extends BaseOperatorSuite {
def dummyOut(count: AtomicInt = null) = {
val out = new OutputStream { def write(b: Int) = () }
new PrintStream(out) {
override def println(x: String) = {
super.println(x)
if (count != null) {
val c = count.decrementAndGet()
if (c == 0) throw new DummyException("dummy")
}
}
}
}
def createObservable(sourceCount: Int) = Some {
val o = Observable.range(0, sourceCount)
.dump("o", dummyOut())
Sample(o, count(sourceCount), sum(sourceCount), Zero, Zero)
}
def observableInError(sourceCount: Int, ex: Throwable) = Some {
val o = createObservableEndingInError(Observable.range(0, sourceCount), ex)
.dump("o", dummyOut())
Sample(o, count(sourceCount), sum(sourceCount), Zero, Zero)
}
def brokenUserCodeObservable(sourceCount: Int, ex: Throwable) = None
def count(sourceCount: Int) = sourceCount
def sum(sourceCount: Int) =
sourceCount * (sourceCount - 1) / 2
override def cancelableObservables(): Seq[DumpSuite.Sample] = {
val sample = Observable.range(0, 10).delayOnNext(1.second)
.dump("o", dummyOut())
Seq(Sample(sample,0,0,0.seconds,0.seconds))
}
}
|
Wogan/monix
|
monix-reactive/shared/src/test/scala/monix/reactive/internal/operators/DumpSuite.scala
|
Scala
|
apache-2.0
| 2,202
|
package ch.ltouroumov.coe.common
import ch.ltouroumov.coe.utils.bootstraper._
object Items extends ItemsRegister {
protected val factory = ItemRegistrationHandlerFactory
@LazyRegister
lazy val amethystCrystal = register("amethystCrystal", 5000, new AmethystCrystal(_))(
LocalizedName("Amethyst Crystal"),
OreDictEntry("crystalAmethyst")
)
@LazyRegister
lazy val amethystDust = register("amethystDust", 5001, new AmethystDust(_))(
LocalizedName("Amethyst Dust"),
OreDictEntry("dustAmethyst")
)
@LazyRegister
lazy val gem = register("gem", 5002, new GenericGem(_))(
LocalizedNames(GenericGem.localizedNamesTable)
)
}
|
ltouroumov/craft-of-exile
|
src/main/scala/ch/ltouroumov/coe/common/Items.scala
|
Scala
|
gpl-3.0
| 661
|
import scala.util.parsing.json.JSON
import scala.collection.mutable.MutableList
import java.io.{File, PrintWriter}
import scala.xml.Null
import java.math.BigDecimal
object JsonToJava {
type JsonObject = Map[String, Any]
type JsonArray = List[Any]
JSON.globalNumberParser = (input: String) => {
if (input.contains("."))
new BigDecimal(input)
else
Integer.parseInt(input)
}
def evalObject(className: String, obj: JsonObject): Boolean = {
val writer = new PrintWriter(new File(String.format("../%s.java", upperFirstChar(className))))
for ((k, v) <- obj) {
v match {
case value: JsonObject => {
writeToFile(writer, classNameify(k));
evalObject(k, value)
}
case value: JsonArray => evalArray(writer, k, value)
case _ => evalPrimitive(writer, k, v)
}
}
writer.close()
true
}
def classNameify(str: String): String = {
String.format("public %s %s;\n",
upperFirstChar(str), str)
}
def upperFirstChar(str: String): String = {
str.substring(0, 1).toUpperCase() + str.substring(1, str.length())
}
def evalPrimitive(writer: PrintWriter, fieldName: String, primitive: Any): Boolean = {
if (primitive == null)
writeToFile(writer, String.format("public %s %s;\n", "String",
fieldName));
else
writeToFile(writer, String.format("public %s %s;\n", primitive.getClass.getSimpleName(),
fieldName));
true
}
def writeToFile(writer: PrintWriter, str: String) = {
// println(str);
writer.write(str);
}
def evalArray(writer: PrintWriter, className: String, arr: List[Any]): Boolean = {
if (arr.isEmpty) {
writeToFile(writer, String.format("public %s %s;\n", "String[]",
className));
true
} else {
writeToFile(writer, String.format("public ArrayList<%s> %s;\n", upperFirstChar(className),
className));
arr.head match {
case item: JsonObject => evalObject(className, item)
case item: JsonArray => evalArray(writer, className, arr)
// case _ => evalPrimitiveArray(writer, fieldName, primitive)
}
}
}
def main(args: Array[String]) {
if (args.isEmpty) {
println("USAGE: java -jar jsontojava.jar <JSON_FILE_PATH>")
return
}
val json = io.Source.fromFile(args.head).mkString
val result = JSON.parseFull(json)
result match {
case Some(e) => e match {
case x: JsonObject => evalObject("RootObject", x)
// case x: JsonArray => evalArray(x)
case _ => println("Error!")
}
case None => println("Failed.")
}
}
}
|
amigold/JsonToJava
|
src/main/scala/JsonToJava.scala
|
Scala
|
mit
| 2,670
|
/***********************************************************************
* Copyright (c) 2013-2020 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.spark
import java.util.{Collections, Locale}
import com.typesafe.scalalogging.LazyLogging
import org.apache.hadoop.conf.Configuration
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.catalyst.expressions.Expression
import org.apache.spark.sql.sources._
import org.apache.spark.sql.types.StructType
import org.apache.spark.sql.{Row, SQLContext}
import org.apache.spark.storage.StorageLevel
import org.geotools.data.{DataStoreFinder, Query, Transaction}
import org.geotools.filter.text.ecql.ECQL
import org.locationtech.geomesa.filter.FilterHelper
import org.locationtech.geomesa.memory.cqengine.datastore.GeoCQEngineDataStore
import org.locationtech.geomesa.spark.GeoMesaRelation.{CachedRDD, IndexedRDD, PartitionedIndexedRDD, PartitionedRDD}
import org.locationtech.geomesa.spark.GeoMesaSparkSQL.GEOMESA_SQL_FEATURE
import org.locationtech.geomesa.spark.jts.util.WKTUtils
import org.locationtech.geomesa.utils.collection.SelfClosingIterator
import org.locationtech.geomesa.utils.geotools.SimpleFeatureTypes
import org.locationtech.jts.geom.Envelope
import org.opengis.feature.simple.{SimpleFeature, SimpleFeatureType}
import scala.collection.Iterator
import scala.util.control.NonFatal
/**
* The Spark Relation that builds the scan over the GeoMesa table
*
* @param sqlContext spark sql context
* @param sft simple feature type associated with the rows in the relation
* @param schema spark sql schema (must correspond to the sft)
* @param params user parameters, generally for configured the underlying data store and/or caching/partitioning
* @param filter a push-down geotools filter applied to the relation
* @param cached an optional cached RDD, used to speed up queries when enabled
* @param partitioned an optional spatially partitioned RDD, used to speed up spatial joins when enabled
*/
case class GeoMesaRelation(
sqlContext: SQLContext,
sft: SimpleFeatureType,
schema: StructType,
params: Map[String, String],
filter: Option[org.opengis.filter.Filter],
cached: Option[CachedRDD],
partitioned: Option[PartitionedRDD]
) extends BaseRelation with PrunedFilteredScan with LazyLogging {
import scala.collection.JavaConverters._
/**
* Attempts to do an optimized join between two relations.
*
* Currently this method uses grid partitioning on both relations so that the join comparisons
* only need to be applied on each pair of partitions, instead of globally. This only works
* if both relations have already been grid partitioned.
*
* @param other relation to join
* @param condition join condition
* @return an optimized join, if possible to do so
*/
def join(other: GeoMesaRelation, condition: Expression): Option[GeoMesaJoinRelation] = {
val opt = for { p <- partitioned; o <- other.partitioned } yield {
val toJoin = if (p.envelopes == o.envelopes) {
Some(other)
} else if (p.cover) {
val repartitioned: SpatialRDD = p.partitions match {
case None => p.raw
case Some(partitions) => SpatialRDD(p.raw.repartition(partitions), p.raw.schema)
}
val parallelism = p.partitions.getOrElse(sqlContext.sparkContext.defaultParallelism)
val rdd = RelationUtils.grid(repartitioned, p.envelopes, parallelism)
val partitioned = Some(PartitionedRDD(rdd, p.raw, p.envelopes, p.partitions, p.cover))
Some(other.copy(partitioned = partitioned))
} else {
logger.warn("Joining across two relations that are not partitioned by the same scheme - unable to optimize")
None
}
toJoin.map { rel =>
GeoMesaJoinRelation(sqlContext, this, rel, StructType(schema.fields ++ rel.schema.fields), condition)
}
}
opt.flatten
}
override def buildScan(
requiredColumns: Array[String],
filters: Array[org.apache.spark.sql.sources.Filter]): RDD[Row] = {
lazy val debug =
s"filt = $filter, filters = ${filters.mkString(",")}, requiredColumns = ${requiredColumns.mkString(",")}"
val filt = {
val sum = Seq.newBuilder[org.opengis.filter.Filter]
filter.foreach(sum += _)
filters.foreach(f => SparkUtils.sparkFilterToCQLFilter(f).foreach(sum += _))
FilterHelper.filterListAsAnd(sum.result).getOrElse(org.opengis.filter.Filter.INCLUDE)
}
val requiredAttributes = requiredColumns.filterNot(_ == "__fid__")
// avoid closures on complex objects
val schema = this.schema // note: referencing case class members evidently serializes the whole class??
val typeName = sft.getTypeName
val result: RDD[SimpleFeature] = cached match {
case None =>
logger.debug(s"Building scan, $debug")
val conf = new Configuration(sqlContext.sparkContext.hadoopConfiguration)
val query = new Query(typeName, filt, requiredAttributes)
GeoMesaSpark(params.asJava).rdd(conf, sqlContext.sparkContext, params, query)
case Some(IndexedRDD(rdd)) =>
logger.debug(s"Building in-memory scan, $debug")
val cql = ECQL.toCQL(filt)
rdd.flatMap { engine =>
val query = new Query(typeName, ECQL.toFilter(cql), requiredAttributes)
SelfClosingIterator(engine.getFeatureReader(query, Transaction.AUTO_COMMIT))
}
case Some(PartitionedIndexedRDD(rdd, _)) =>
logger.debug(s"Building partitioned in-memory scan, $debug")
val cql = ECQL.toCQL(filt)
rdd.flatMap { case (_, engine) =>
val query = new Query(typeName, ECQL.toFilter(cql), requiredAttributes)
SelfClosingIterator(engine.getFeatureReader(query, Transaction.AUTO_COMMIT))
}
}
val extractors = SparkUtils.getExtractors(requiredColumns, schema)
result.map(SparkUtils.sf2row(schema, _, extractors))
}
override def unhandledFilters(filters: Array[Filter]): Array[Filter] = {
filters.filter {
case _ @ (_:IsNotNull | _:IsNull) => true
case _ => false
}
}
}
object GeoMesaRelation extends LazyLogging {
import scala.collection.JavaConverters._
/**
* Create a new relation based on the input parameters
*
* @param sqlContext sql context
* @param params parameters
* @return
*/
def apply(sqlContext: SQLContext, params: Map[String, String]): GeoMesaRelation = {
val name = params.getOrElse(GEOMESA_SQL_FEATURE,
throw new IllegalArgumentException(s"Feature type must be specified with '$GEOMESA_SQL_FEATURE'"))
val sft = GeoMesaSpark(params.asJava).sft(params, name).getOrElse {
throw new IllegalArgumentException(s"Could not load feature type with name '$name'")
}
apply(sqlContext, params, SparkUtils.createStructType(sft), sft)
}
/**
* Create a new relation based on the input parameters, with the given schema
*
* @param sqlContext sql context
* @param params parameters
* @param schema schema
* @return
*/
def apply(sqlContext: SQLContext, params: Map[String, String], schema: StructType): GeoMesaRelation = {
val name = params.getOrElse(GEOMESA_SQL_FEATURE,
throw new IllegalArgumentException(s"Feature type must be specified with '$GEOMESA_SQL_FEATURE'"))
val sft = GeoMesaSpark(params.asJava).sft(params, name).getOrElse {
throw new IllegalArgumentException(s"Could not load feature type with name '$name'")
}
apply(sqlContext, params, schema, sft)
}
/**
* Create a new relation based on the input parameters, with the given schema and underlying feature type
*
* @param sqlContext sql context
* @param params parameters
* @param schema schema
* @param sft simple feature type
* @return
*/
def apply(
sqlContext: SQLContext,
params: Map[String, String],
schema: StructType,
sft: SimpleFeatureType): GeoMesaRelation = {
logger.trace(s"Creating GeoMesaRelation with sft: $sft")
def get[T](key: String, transform: String => T, default: => T): T = {
params.get(key) match {
case None => default
case Some(v) =>
try { transform(v) } catch {
case NonFatal(e) => logger.error(s"Error evaluating param '$key' with value '$v':", e); default
}
}
}
def rawRDD: SpatialRDD = {
val query = new Query(sft.getTypeName, ECQL.toFilter(params.getOrElse("query", "INCLUDE")))
GeoMesaSpark(params.asJava).rdd(new Configuration(), sqlContext.sparkContext, params, query)
}
val partitioned = if (!get[Boolean]("spatial", _.toBoolean, false)) { None } else {
val raw = rawRDD
val bounds: Envelope = params.get("bounds") match {
case None => RelationUtils.getBound(raw)
case Some(b) =>
try { WKTUtils.read(b).getEnvelopeInternal } catch {
case NonFatal(e) => throw new IllegalArgumentException(s"Error reading provided bounds '$b':", e)
}
}
val partitions = Option(get[Int]("partitions", _.toInt, -1)).filter(_ > 0)
val parallelism = partitions.getOrElse(sqlContext.sparkContext.defaultParallelism)
// control partitioning strategies that require a sample of the data
lazy val sampleSize = get[Int]("sampleSize", _.toInt, 100)
lazy val threshold = get[Double]("threshold", _.toDouble, 0.3)
val envelopes = params.getOrElse("strategy", "equal").toLowerCase(Locale.US) match {
case "equal" => RelationUtils.equalPartitioning(bounds, parallelism)
case "earth" => RelationUtils.wholeEarthPartitioning(parallelism)
case "weighted" => RelationUtils.weightedPartitioning(raw, bounds, parallelism, sampleSize)
case "rtree" => RelationUtils.rtreePartitioning(raw, parallelism, sampleSize, threshold)
case s => throw new IllegalArgumentException(s"Invalid partitioning strategy: $s")
}
val rdd = RelationUtils.grid(raw, envelopes, parallelism)
rdd.persist(StorageLevel.MEMORY_ONLY)
Some(PartitionedRDD(rdd, raw, envelopes, partitions, get[Boolean]("cover", _.toBoolean, false)))
}
val cached = if (!get[Boolean]("cache", _.toBoolean, false)) { None } else {
val check = Collections.singletonMap[String, java.io.Serializable]("cqengine", "true")
if (!DataStoreFinder.getAvailableDataStores.asScala.exists(_.canProcess(check))) {
throw new IllegalArgumentException("Caching requires the GeoCQEngineDataStore to be available on the classpath")
}
// avoid closure on full sft
val typeName = sft.getTypeName
val encodedSft = SimpleFeatureTypes.encodeType(sft, includeUserData = true)
val indexGeom = get[Boolean]("indexGeom", _.toBoolean, false)
partitioned match {
case Some(p) =>
val rdd = p.rdd.mapValues { iter =>
val engine = new GeoCQEngineDataStore(indexGeom)
engine.createSchema(SimpleFeatureTypes.createType(typeName, encodedSft))
engine.namesToEngine.get(typeName).insert(iter)
engine
}
p.rdd.unpersist() // make this call blocking?
rdd.persist(StorageLevel.MEMORY_ONLY)
Some(PartitionedIndexedRDD(rdd, p.envelopes))
case None =>
val rdd = rawRDD.mapPartitions { iter =>
val engine = new GeoCQEngineDataStore(indexGeom)
engine.createSchema(SimpleFeatureTypes.createType(typeName, encodedSft))
engine.namesToEngine.get(typeName).insert(iter.toList)
Iterator.single(engine)
}
rdd.persist(StorageLevel.MEMORY_ONLY)
Some(IndexedRDD(rdd))
}
}
GeoMesaRelation(sqlContext, sft, schema, params, None, cached, partitioned)
}
/**
* Holder for a partitioning scheme
*
* @param rdd partitioned rdd
* @param raw underlying unpartitioned rdd
* @param envelopes envelopes used in partitioning
* @param partitions hint for number of partitions
* @param cover cover partitions or not when joining
*/
case class PartitionedRDD(
rdd: RDD[(Int, Iterable[SimpleFeature])],
raw: SpatialRDD,
envelopes: List[Envelope],
partitions: Option[Int],
cover: Boolean
)
/**
* Trait for cached RDDs used to accelerate scans
*/
sealed trait CachedRDD
/**
* An RDD where each element is a spatial index containing multiple features
*
* @param rdd indexed features
*/
case class IndexedRDD(rdd: RDD[GeoCQEngineDataStore]) extends CachedRDD
/**
* An RDD where each element is a spatial index containing multiple features, partitioned by
* a spatial grid
*
* @param rdd grid cell -> indexed features
* @param envelopes envelopes corresponding the each grid cell
*/
case class PartitionedIndexedRDD(rdd: RDD[(Int, GeoCQEngineDataStore)], envelopes: List[Envelope])
extends CachedRDD
}
|
aheyne/geomesa
|
geomesa-spark/geomesa-spark-sql/src/main/scala/org/locationtech/geomesa/spark/GeoMesaRelation.scala
|
Scala
|
apache-2.0
| 13,324
|
package mesosphere.marathon
package tasks
import mesosphere.UnitTest
import mesosphere.marathon.core.condition.Condition
import mesosphere.marathon.core.instance.update.InstanceUpdateOperation
import mesosphere.marathon.core.instance.{Instance, LocalVolumeId, TestInstanceBuilder}
import mesosphere.marathon.core.launcher.impl.InstanceOpFactoryImpl
import mesosphere.marathon.core.launcher.{InstanceOp, InstanceOpFactory, OfferMatchResult}
import mesosphere.marathon.core.pod.{MesosContainer, PodDefinition}
import mesosphere.marathon.core.task.Task
import mesosphere.marathon.core.task.state.{AgentTestDefaults, NetworkInfo}
import mesosphere.marathon.core.task.tracker.InstanceTracker
import mesosphere.marathon.metrics.Metrics
import mesosphere.marathon.metrics.dummy.DummyMetrics
import mesosphere.marathon.raml.Resources
import mesosphere.marathon.state._
import mesosphere.marathon.stream.Implicits._
import mesosphere.marathon.test.{MarathonTestHelper, SettableClock}
import mesosphere.mesos.protos.Implicits.slaveIDToProto
import mesosphere.mesos.protos.SlaveID
import org.scalatest.Inside
import scala.collection.immutable.Seq
class InstanceOpFactoryImplTest extends UnitTest with Inside {
"InstanceOpFactoryImpl" should {
"Copy SlaveID from Offer to Task" in {
val f = new Fixture
val appId = PathId("/test")
val offer = MarathonTestHelper.makeBasicOffer()
.setHostname(f.defaultHostName)
.setSlaveId(SlaveID("some slave ID"))
.build()
val instance = TestInstanceBuilder.newBuilderWithLaunchedTask(appId, f.clock.now()).getInstance()
val app: AppDefinition = AppDefinition(id = appId, portDefinitions = List())
val scheduledInstance = Instance.scheduled(app, Instance.Id.forRunSpec(appId))
val runningInstances = Map(instance.instanceId -> instance)
val request = InstanceOpFactory.Request(offer, runningInstances, scheduledInstances = NonEmptyIterable(scheduledInstance))
val matchResult = f.instanceOpFactory.matchOfferRequest(request)
val matched = inside(matchResult) {
case matched: OfferMatchResult.Match => matched
}
val expectedTaskId = Task.Id(scheduledInstance.instanceId)
val expectedTask = Task(
taskId = expectedTaskId,
runSpecVersion = app.version,
status = Task.Status(
stagedAt = f.clock.now(),
condition = Condition.Provisioned,
networkInfo = NetworkInfo(
f.defaultHostName,
hostPorts = Nil,
ipAddresses = Nil
)
)
)
val expectedAgentInfo = Instance.AgentInfo(
host = f.defaultHostName,
agentId = Some(offer.getSlaveId.getValue),
region = None,
zone = None,
attributes = Vector.empty
)
val expectedState = instance.state.copy(condition = Condition.Provisioned)
val provisionOp = InstanceUpdateOperation.Provision(expectedTaskId.instanceId, expectedAgentInfo, app, Map(expectedTaskId -> expectedTask), expectedState.since)
matched.instanceOp.stateOp should be(provisionOp)
}
"Normal app -> Launch" in {
Given("A normal app, a normal offer and no tasks")
val f = new Fixture
val app = f.normalApp
val offer = f.offer
When("We infer the taskOp")
val request = InstanceOpFactory.Request(offer, Map.empty, scheduledInstances = NonEmptyIterable(Instance.scheduled(app)))
val matchResult = f.instanceOpFactory.matchOfferRequest(request)
Then("A Match with Launch is inferred")
inside(matchResult) {
case mr: OfferMatchResult.Match =>
mr.instanceOp shouldBe an[InstanceOp.LaunchTask]
}
}
"Resident app -> ReserveAndCreateVolumes succeeds" in {
Given("A resident app, a normal offer and no tasks")
val f = new Fixture
val app = f.residentApp
val offer = f.offerWithSpaceForLocalVolume
When("We infer the taskOp")
val request = InstanceOpFactory.Request(offer, Map.empty, scheduledInstances = NonEmptyIterable(Instance.scheduled(app)))
val matchResult = f.instanceOpFactory.matchOfferRequest(request)
Then("A Match with ReserveAndCreateVolumes is returned")
inside(matchResult) {
case mr: OfferMatchResult.Match =>
mr.instanceOp shouldBe an[InstanceOp.ReserveAndCreateVolumes]
}
}
"Resident pod -> ReserveAndCreateVolumes succeeds" in {
Given("A resident pod, a normal offer and no tasks")
val f = new Fixture
val pod = f.residentPod
val offer = f.offerWithSpaceForLocalVolume
When("We infer the taskOp")
val request = InstanceOpFactory.Request(offer, Map.empty, scheduledInstances = NonEmptyIterable(Instance.scheduled(pod)))
val matchResult = f.instanceOpFactory.matchOfferRequest(request)
Then("A Match with ReserveAndCreateVolumes is returned")
inside(matchResult) {
case mr: OfferMatchResult.Match =>
mr.instanceOp shouldBe an[InstanceOp.ReserveAndCreateVolumes]
}
}
"Resident app -> Launch succeeds" in {
Given("A resident app, an offer with persistent volumes and a matching task")
val f = new Fixture
val app = f.residentApp.copy(instances = 2)
val localVolumeIdLaunched = LocalVolumeId(app.id, "persistent-volume-launched", "uuidLaunched")
val localVolumeIdUnwanted = LocalVolumeId(app.id, "persistent-volume-unwanted", "uuidUnwanted")
val localVolumeIdMatch = LocalVolumeId(app.id, "persistent-volume", "uuidMatch")
val reservedInstance = f.scheduledReservedInstance(app.id, localVolumeIdMatch)
val reservedTaskId = Task.Id(reservedInstance.instanceId)
val offer = f.offerWithVolumes(
reservedTaskId, localVolumeIdLaunched, localVolumeIdUnwanted, localVolumeIdMatch
)
val runningInstances = Instance.instancesById(Seq(
f.residentLaunchedInstance(app.id, localVolumeIdLaunched)))
When("We infer the taskOp")
val request = InstanceOpFactory.Request(offer, runningInstances, scheduledInstances = NonEmptyIterable(reservedInstance))
val matchResult = f.instanceOpFactory.matchOfferRequest(request)
Then("A Match with a Launch is returned")
val matched = inside(matchResult) {
case matched: OfferMatchResult.Match =>
matched.instanceOp shouldBe an[InstanceOp.LaunchTask]
matched
}
And("the taskInfo contains the correct persistent volume")
val taskInfoResources = matched.instanceOp.offerOperations.head.getLaunch.getTaskInfos(0).getResourcesList
val found = taskInfoResources.find { resource =>
resource.hasDisk && resource.getDisk.hasPersistence &&
resource.getDisk.getPersistence.getId == localVolumeIdMatch.idString
}
found should not be empty
}
// There are times when an agent gets a new agentId after a reboot. There might have been a task using
// reservations and a persistent volume on agent-1 in the past. When agent-1 is rebooted and looses
// the task, Marathon might see the resources offered from agent-2 in the future - if the agent has
// been re-registered with that new ID. In order to report correct AgentInfo, the AgentInfo needs to recreated
// each time we launch on an existing reservation.
"update the agentInfo based on the used offer" in {
val f = new Fixture
val app = f.residentApp
val volumeId = LocalVolumeId(app.id, "/path", "uuid1")
val existingReservedInstance = f.scheduledReservedInstance(app.id, volumeId)
val taskId = Task.Id(existingReservedInstance.instanceId)
val updatedHostName = "updatedHostName"
val updatedAgentId = "updatedAgentId"
val offer = f.offerWithVolumes(taskId, updatedHostName, updatedAgentId, volumeId)
val request = InstanceOpFactory.Request(offer, Map.empty, scheduledInstances = NonEmptyIterable(existingReservedInstance))
val result = f.instanceOpFactory.matchOfferRequest(request)
inside(result) {
case m: OfferMatchResult.Match =>
inside(m.instanceOp) {
case launchTask: InstanceOp.LaunchTask =>
inside(launchTask.stateOp) {
case provision: InstanceUpdateOperation.Provision =>
provision.agentInfo.host shouldBe updatedHostName
provision.agentInfo.agentId shouldBe Some(updatedAgentId)
}
}
}
}
}
class Fixture {
import mesosphere.marathon.test.{MarathonTestHelper => MTH}
val instanceTracker = mock[InstanceTracker]
val config: MarathonConf = MTH.defaultConfig(mesosRole = Some("test"))
implicit val clock = new SettableClock()
val metrics: Metrics = DummyMetrics
val instanceOpFactory: InstanceOpFactory = new InstanceOpFactoryImpl(metrics, config)
val defaultHostName = AgentTestDefaults.defaultHostName
val defaultAgentId = AgentTestDefaults.defaultAgentId
def normalApp = MTH.makeBasicApp()
def residentApp = MTH.appWithPersistentVolume()
def residentPod = PodDefinition(
PathId("/test-pod"),
containers = Seq(MesosContainer(
name = "first",
resources = Resources(cpus = 1.0, mem = 64.0, disk = 1.0),
volumeMounts = Seq(VolumeMount(volumeName = Some("pst"), mountPath = "persistent-volume")))
),
volumes = Seq(PersistentVolume(name = Some("pst"), persistent = PersistentVolumeInfo(10)))
)
def scheduledReservedInstance(appId: PathId, volumeIds: LocalVolumeId*) =
TestInstanceBuilder.scheduledWithReservation(residentApp, Seq(volumeIds: _*))
def residentLaunchedInstance(appId: PathId, volumeIds: LocalVolumeId*) =
TestInstanceBuilder.newBuilder(appId).addTaskResidentLaunched(Seq(volumeIds: _*)).getInstance()
def offer = MTH.makeBasicOffer().build()
def offerWithSpaceForLocalVolume = MTH.makeBasicOffer(disk = 1025).build()
def insufficientOffer = MTH.makeBasicOffer(cpus = 0.01, mem = 1, disk = 0.01, beginPort = 31000, endPort = 31001).build()
def offerWithVolumes(taskId: Task.Id, localVolumeIds: LocalVolumeId*) =
MTH.offerWithVolumes(taskId, defaultHostName, defaultAgentId, localVolumeIds: _*)
def offerWithVolumes(taskId: Task.Id, hostname: String, agentId: String, localVolumeIds: LocalVolumeId*) =
MTH.offerWithVolumes(taskId, hostname, agentId, localVolumeIds: _*)
}
}
|
gsantovena/marathon
|
src/test/scala/mesosphere/marathon/tasks/InstanceOpFactoryImplTest.scala
|
Scala
|
apache-2.0
| 10,439
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution
import java.nio.charset.StandardCharsets
import java.sql.{Date, Timestamp}
import org.apache.spark.sql.Row
import org.apache.spark.sql.catalyst.util.{DateFormatter, DateTimeUtils, TimestampFormatter}
import org.apache.spark.sql.catalyst.util.IntervalUtils._
import org.apache.spark.sql.execution.command.{DescribeCommandBase, ExecutedCommandExec, ShowTablesCommand}
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.internal.SQLConf.IntervalStyle._
import org.apache.spark.sql.types._
import org.apache.spark.unsafe.types.CalendarInterval
/**
* Runs a query returning the result in Hive compatible form.
*/
object HiveResult {
/**
* Returns the result as a hive compatible sequence of strings. This is used in tests and
* `SparkSQLDriver` for CLI applications.
*/
def hiveResultString(executedPlan: SparkPlan): Seq[String] = executedPlan match {
case ExecutedCommandExec(_: DescribeCommandBase) =>
// If it is a describe command for a Hive table, we want to have the output format
// be similar with Hive.
executedPlan.executeCollectPublic().map {
case Row(name: String, dataType: String, comment) =>
Seq(name, dataType,
Option(comment.asInstanceOf[String]).getOrElse(""))
.map(s => String.format(s"%-20s", s))
.mkString("\\t")
}
// SHOW TABLES in Hive only output table names, while ours output database, table name, isTemp.
case command @ ExecutedCommandExec(s: ShowTablesCommand) if !s.isExtended =>
command.executeCollect().map(_.getString(1))
case other =>
val result: Seq[Seq[Any]] = other.executeCollectPublic().map(_.toSeq).toSeq
// We need the types so we can output struct field names
val types = executedPlan.output.map(_.dataType)
// Reformat to match hive tab delimited output.
result.map(_.zip(types).map(e => toHiveString(e)))
.map(_.mkString("\\t"))
}
private lazy val zoneId = DateTimeUtils.getZoneId(SQLConf.get.sessionLocalTimeZone)
private lazy val dateFormatter = DateFormatter(zoneId)
private lazy val timestampFormatter = TimestampFormatter.getFractionFormatter(zoneId)
/** Formats a datum (based on the given data type) and returns the string representation. */
def toHiveString(a: (Any, DataType), nested: Boolean = false): String = a match {
case (null, _) => if (nested) "null" else "NULL"
case (b, BooleanType) => b.toString
case (d: Date, DateType) => dateFormatter.format(DateTimeUtils.fromJavaDate(d))
case (t: Timestamp, TimestampType) =>
timestampFormatter.format(DateTimeUtils.fromJavaTimestamp(t))
case (bin: Array[Byte], BinaryType) => new String(bin, StandardCharsets.UTF_8)
case (decimal: java.math.BigDecimal, DecimalType()) => decimal.toPlainString
case (n, _: NumericType) => n.toString
case (s: String, StringType) => if (nested) "\\"" + s + "\\"" else s
case (interval: CalendarInterval, CalendarIntervalType) =>
SQLConf.get.intervalOutputStyle match {
case SQL_STANDARD => toSqlStandardString(interval)
case ISO_8601 => toIso8601String(interval)
case MULTI_UNITS => toMultiUnitsString(interval)
}
case (seq: Seq[_], ArrayType(typ, _)) =>
seq.map(v => (v, typ)).map(e => toHiveString(e, true)).mkString("[", ",", "]")
case (m: Map[_, _], MapType(kType, vType, _)) =>
m.map { case (key, value) =>
toHiveString((key, kType), true) + ":" + toHiveString((value, vType), true)
}.toSeq.sorted.mkString("{", ",", "}")
case (struct: Row, StructType(fields)) =>
struct.toSeq.zip(fields).map { case (v, t) =>
s""""${t.name}":${toHiveString((v, t.dataType), true)}"""
}.mkString("{", ",", "}")
case (other, _: UserDefinedType[_]) => other.toString
}
}
|
jkbradley/spark
|
sql/core/src/main/scala/org/apache/spark/sql/execution/HiveResult.scala
|
Scala
|
apache-2.0
| 4,656
|
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** @author John Miller
* @version 1.3
* @date Mon Oct 10 16:42:21 EDT 2016
* @see LICENSE (MIT style license file).
*
* @see romisatriawahono.net/lecture/dm/paper/clustering/
* Garcia%20-%20K-means%20algorithms%20for%20functional%20data%20-%202015.pdf
*/
package scalation.analytics.fda
import scalation.analytics.clusterer.{Clusterer, KMeansClusterer}
import scalation.linalgebra.{MatrixD, VectorD}
import scalation.util.banner
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** The `KMeansClustering_F` class provides a simple form of k-means clustering
* that simply smoothes the data and then appliers `KMeansClustering`.
* @param x the vectors/points to be clustered stored as rows of a matrix
* @param t the time points
* @param τ the time points for knots
* @param k the number of clusters to make
* @param s the random number stream (to vary the clusters made)
*/
class KMeansClustering_F (x: MatrixD, t: VectorD, τ: VectorD, k: Int, s: Int = 0)
extends Clusterer
{
private val DEBUG = true // debug flag
private val xs = new MatrixD (x.dim1, x.dim2) // smoothed version of data matrix
private var cl: KMeansClusterer = null // holder to clustering algorithm
/** As seen from class KMeansClustering_F, the missing signatures are as follows.
* For convenience, these are usable as stub implementations.
*/
def centroids(): scalation.linalgebra.MatrixD = ???
def csize(): scalation.linalgebra.VectorI = ???
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Create 'k' clusters consisting of points/rows that are closest to each other.
*/
def cluster (): Array [Int] =
{
smooth () // smooth the data
if (DEBUG) println ("xs = " + xs)
cl = new KMeansClusterer (xs, k, s) // use classical k-means
cl.cluster () // create the clsuters
} // cluster
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Given a new point/vector 'y', determine which cluster it belongs to,
* i.e., the cluster whose centroid it is closest to.
* @param y the vector to classify
*/
def classify (y: VectorD): Int =
{
cl.classify (y)
} // classify
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Smooth the data: For each row of matrix 'x', create a smoother version
* and store it in matrix 'xs'.
*/
private def smooth ()
{
for (i <- x.range1) { // for each vector/row in matrix x
// val moo = new Smoothing_F (x(i), t, t.dim-3)
val moo = new Smoothing_F (x(i), t, τ)
val c = moo.train ()
if (DEBUG) println ("c = " + c)
for (j <- x.range2) xs(i, j) = moo.predict (t(j))
} // for
} // smooth
} // KMeansClustering_F class
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** The `KMeansClustering_FTest` object is used to test the `KMeansClustering_F`
* class.
* > run-main scalation.analytics.fda.KMeansClustering_FTest
*/
object KMeansClustering_FTest extends App
{
val x = new MatrixD ((6, 6), 1.0, 2.0, 2.0, 1.0, 2.0, 1.0, // FIX: need larger example
2.0, 1.0, 1.0, 2.0, 1.0, 2.0,
5.0, 4.0, 4.0, 5.0, 4.0, 5.0,
4.0, 5.0, 5.0, 4.0, 5.0, 4.0,
9.0, 8.0, 8.0, 9.0, 8.0, 9.0,
8.0, 9.0, 9.0, 8.0, 9.0, 8.0)
val t = VectorD.range (0, 6) / 2.0
val y = new VectorD (6); y.set (10.0)
println ("x = " + x)
println ("t = " + t)
println ("y = " + y)
println ("----------------------------------------------------")
for (s <- 0 to 4) { // test with different random streams
banner ("KMeansClustering_F for stream s = " + s)
val cl = new KMeansClustering_F (x, t, t, 3, s)
println ("--- final cluster = " + cl.cluster ().deep + "\\n")
println ("--- classify " + y + " = " + cl.classify (y) + "\\n")
} // for
} // KMeansClustering_FTest object
|
scalation/fda
|
scalation_1.3/scalation_modeling/src/main/scala/scalation/analytics/fda/KMeansClustering_F.scala
|
Scala
|
mit
| 4,479
|
/*
** Copyright [2013-2016] [Megam Systems]
**
** Licensed under the Apache License, Version 2.0 (the "License");
** you may not use this file except in compliance with the License.
** You may obtain a copy of the License at
**
** http://www.apache.org/licenses/LICENSE-2.0
**
** Unless required by applicable law or agreed to in writing, software
** distributed under the License is distributed on an "AS IS" BASIS,
** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
** See the License for the specific language governing permissions and
** limitations under the License.
*/
package models.base
import scalaz._
import Scalaz._
import scalaz.effect.IO
import scalaz.EitherT._
import scalaz.Validation
import scalaz.Validation.FlatMap._
import scalaz.NonEmptyList._
import cache._
import db._
import io.megam.auth.funnel.FunnelErrors._
import controllers.Constants._
import com.stackmob.scaliak._
import io.megam.auth.stack.AccountResult
import io.megam.common.riak.GunnySack
import io.megam.common.uid.UID
import io.megam.util.Time
import net.liftweb.json._
import net.liftweb.json.scalaz.JsonScalaz._
import java.nio.charset.Charset
import com.basho.riak.client.core.query.indexes.{ RiakIndexes, StringBinIndex, LongIntIndex }
import com.basho.riak.client.core.util.{ Constants => RiakConstants }
/**
* @author rajthilak
* authority
*
*/
case class AccountInput(first_name: String, last_name: String, phone: String, email: String, api_key: String, password: String, authority: String, password_reset_key: String, password_reset_sent_at: String) {
val json = "{\"first_name\":\"" + first_name + "\",\"last_name\":\"" + last_name + "\",\"phone\":\"" + phone + "\",\"email\":\"" + email + "\",\"api_key\":\"" + api_key + "\",\"password\":\"" + password + "\",\"authority\":\"" + authority + "\",\"password_reset_key\":\"" + password_reset_key + "\",\"password_reset_sent_at\":\"" + password_reset_sent_at + "\"}"
}
case class updateAccountInput(id: String, first_name: String, last_name: String, phone: String, email: String, api_key: String, password: String, authority: String, password_reset_key: String, password_reset_sent_at: String, created_at: String) {
val json = "{\"id\":\"" + id + "\",\"first_name\":\"" + first_name + "\",\"last_name\":\"" + last_name + "\",\"phone\":\"" + phone + "\",\"email\":\"" + email + "\",\"api_key\":\"" + api_key + "\",\"password\":\"" + password + "\",\"authority\":\"" + authority + "\",\"password_reset_key\":\"" + password_reset_key + "\",\"password_reset_sent_at\":\"" + password_reset_sent_at + "\",\"created_at\":\"" + created_at + "\"}"
}
object Accounts {
val metadataKey = "ACC"
val metadataVal = "1002"
val bindex = "accountId"
implicit val formats = DefaultFormats
private val riak = GWRiak("accounts")
/**
* A private method which chains computation to make GunnySack when provided with an input json, email.
* parses the json, and converts it to profile input, if there is an error during parsing, a MalformedBodyError is sent back.
* After that flatMap on its success and the account id information is looked up.
* If the account id is looked up successfully, then yield the GunnySack object.
*/
private def mkGunnySack(input: String): ValidationNel[Throwable, Option[GunnySack]] = {
val accountInput: ValidationNel[Throwable, AccountInput] = (Validation.fromTryCatchThrowable[AccountInput,Throwable] {
parse(input).extract[AccountInput]
} leftMap { t: Throwable => new MalformedBodyError(input, t.getMessage) }).toValidationNel //capture failure
for {
m <- accountInput
uid <- (UID("act").get leftMap { ut: NonEmptyList[Throwable] => ut })
// org <- models.team.Organizations.create(m.email, OrganizationsInput(DEFAULT_ORG_NAME).json)
} yield {
val bvalue = Set(uid.get._1 + uid.get._2)
val json = AccountResult(uid.get._1 + uid.get._2, m.first_name, m.last_name, m.phone, m.email, m.api_key, m.password, m.authority, m.password_reset_key, m.password_reset_sent_at, Time.now.toString).toJson(false)
new GunnySack(m.email, json, RiakConstants.CTYPE_TEXT_UTF8, None,
Map(metadataKey -> metadataVal), Map((bindex, bvalue))).some
}
}
/*
* create new account item with the 'name' of the item provide as input.
* A index name account id will point to the "account bucket" bucket.
*/
def create(input: String): ValidationNel[Throwable, Option[AccountResult]] = {
(mkGunnySack(input) leftMap { err: NonEmptyList[Throwable] =>
new ServiceUnavailableError(input, (err.list.map(m => m.getMessage)).mkString("\n"))
}).toValidationNel.flatMap { gs: Option[GunnySack] =>
(riak.store(gs.get) leftMap { t: NonEmptyList[Throwable] => t }).
flatMap { maybeGS: Option[GunnySack] =>
maybeGS match {
case Some(thatGS) => (parse(thatGS.value).extract[AccountResult].some).successNel[Throwable]
case None => {
play.api.Logger.warn(("%s%s%-20s%s").format(Console.GREEN, Console.BOLD,"Account.created success",Console.RESET))
(parse(gs.get.value).extract[AccountResult].some).successNel[Throwable];
}
}
}
}
}
/**
* Parse the input body when you start, if its ok, then we process it.
* Or else send back a bad return code saying "the body contains invalid character, with the message received.
* If there is an error in the snowflake connection, we need to send one.
*/
private def updateGunnySack(email: String, input: String): ValidationNel[Throwable, Option[GunnySack]] = {
val ripNel: ValidationNel[Throwable, updateAccountInput] = (Validation.fromTryCatchThrowable[updateAccountInput,Throwable] {
parse(input).extract[updateAccountInput]
} leftMap { t: Throwable => new MalformedBodyError(input, t.getMessage) }).toValidationNel //capture failure
for {
rip <- ripNel
aor <- (Accounts.findByEmail(email) leftMap { t: NonEmptyList[Throwable] => t })
} yield {
val bvalue = Set(aor.get.id)
val json = AccountResult(NilorNot(rip.id, aor.get.id), NilorNot(rip.first_name, aor.get.first_name), NilorNot(rip.last_name, aor.get.last_name), NilorNot(rip.phone, aor.get.phone), NilorNot(rip.email, aor.get.email), NilorNot(rip.api_key, aor.get.api_key), NilorNot(rip.password, aor.get.password), NilorNot(rip.authority, aor.get.authority), NilorNot(rip.password_reset_key, aor.get.password_reset_key), NilorNot(rip.password_reset_sent_at, aor.get.password_reset_sent_at), NilorNot(rip.created_at, aor.get.created_at)).toJson(false)
new GunnySack((email), json, RiakConstants.CTYPE_TEXT_UTF8, None,
Map(metadataKey -> metadataVal), Map((bindex, bvalue))).some
}
}
def NilorNot(rip: String, aor: String): String = {
rip == null match {
case true => return aor
case false => return rip
}
}
def updateAccount(email: String, input: String): ValidationNel[Throwable, Option[AccountResult]] = {
(updateGunnySack(email, input) leftMap { err: NonEmptyList[Throwable] =>
new ServiceUnavailableError(input, (err.list.map(m => m.getMessage)).mkString("\n"))
}).toValidationNel.flatMap { gs: Option[GunnySack] =>
(riak.store(gs.get) leftMap { t: NonEmptyList[Throwable] => t }).
flatMap { maybeGS: Option[GunnySack] =>
maybeGS match {
case Some(thatGS) => (parse(thatGS.value).extract[AccountResult].some).successNel[Throwable]
case None => {
play.api.Logger.warn(("%s%s%-20s%s").format(Console.GREEN, Console.BOLD,"Account.updated success",Console.RESET))
(parse(gs.get.value).extract[AccountResult].some).successNel[Throwable];
}
}
}
}
}
/**
* Performs a fetch from Riak bucket. If there is an error then ServiceUnavailable is sent back.
* If not, if there a GunnySack value, then it is parsed. When on parsing error, send back ResourceItemNotFound error.
* When there is no gunnysack value (None), then return back a failure - ResourceItemNotFound
*/
def findByEmail(email: String): ValidationNel[Throwable, Option[AccountResult]] = {
InMemory[ValidationNel[Throwable, Option[AccountResult]]]({
name: String =>
{
play.api.Logger.debug(("%-20s -->[%s]").format("InMemory", email))
(riak.fetch(email) leftMap { t: NonEmptyList[Throwable] =>
new ServiceUnavailableError(email, (t.list.map(m => m.getMessage)).mkString("\n"))
}).toValidationNel.flatMap { xso: Option[GunnySack] =>
xso match {
case Some(xs) => {
(Validation.fromTryCatchThrowable[io.megam.auth.stack.AccountResult,Throwable] {
// initiate_default_cloud(email)
parse(xs.value).extract[AccountResult]
} leftMap { t: Throwable =>
new ResourceItemNotFound(email, t.getMessage)
}).toValidationNel.flatMap { j: AccountResult =>
Validation.success[Throwable, Option[AccountResult]](j.some).toValidationNel
}
}
case None => Validation.failure[Throwable, Option[AccountResult]](new ResourceItemNotFound(email, "")).toValidationNel
}
}
}
}).get(email).eval(InMemoryCache[ValidationNel[Throwable, Option[AccountResult]]]())
}
/**
* Find by the accounts id.
*/
def findByAccountsId(id: String): ValidationNel[Throwable, Option[AccountResult]] = {
val metadataKey = "Field"
val metadataVal = "1002"
val bindex = ""
val bvalue = Set("")
val fetchValue = riak.fetchIndexByValue(new GunnySack("accountId", id,
RiakConstants.CTYPE_TEXT_UTF8, None, Map(metadataKey -> metadataVal), Map((bindex, bvalue))))
fetchValue match {
case Success(msg) => {
val key = msg match {
case List(x) => x
}
findByEmail(key)
}
case Failure(err) => Validation.failure[Throwable, Option[AccountResult]](
new ServiceUnavailableError(id, (err.list.map(m => m.getMessage)).mkString("\n"))).toValidationNel
}
}
implicit val sedimentAccountEmail = new Sedimenter[ValidationNel[Throwable, Option[AccountResult]]] {
def sediment(maybeASediment: ValidationNel[Throwable, Option[AccountResult]]): Boolean = {
val notSed = maybeASediment.isSuccess
notSed
}
}
}
|
meglytics/bidi
|
app/models/base/Accounts.scala
|
Scala
|
mit
| 10,452
|
package models
import org.specs2.mutable._
import java.sql.Connection
import java.time.Instant
import anorm._
import anorm.SqlParser
import play.api.test._
import play.api.test.Helpers._
import java.util.Locale
import com.ruimo.scoins.Scoping._
import helpers.QueryString
import helpers.{CategoryIdSearchCondition, CategoryCodeSearchCondition}
import com.ruimo.scoins.Scoping._
import play.api.Application
import play.api.inject.guice.GuiceApplicationBuilder
import helpers.InjectorSupport
import play.api.db.Database
class ItemSpec extends Specification with InjectorSupport {
def date(s: String): Instant = Instant.ofEpochMilli(java.sql.Date.valueOf(s).getTime)
"Item" should {
"List item when empty." in {
implicit val app: Application = GuiceApplicationBuilder().configure(inMemoryDatabase()).build()
val localeInfo = inject[LocaleInfoRepo]
inject[Database].withConnection { implicit conn =>
inject[ItemRepo].listBySiteId(siteId = 1, locale = localeInfo.Ja, queryString = "foo") === List()
}
}
"Item name." in {
implicit val app: Application = GuiceApplicationBuilder().configure(inMemoryDatabase()).build()
val localeInfo = inject[LocaleInfoRepo]
inject[Database].withConnection { implicit conn =>
val cat1 = inject[CategoryRepo].createNew(
Map(localeInfo.Ja -> "植木", localeInfo.En -> "Plant")
)
val item1 = inject[ItemRepo].createNew(cat1)
val names = inject[ItemNameRepo].createNew(item1, Map(localeInfo.Ja -> "杉", localeInfo.En -> "Cedar"))
names.size === 2
names(localeInfo.Ja) === ItemName(localeInfo.Ja.id, item1.id.get, "杉")
names(localeInfo.En) === ItemName(localeInfo.En.id, item1.id.get, "Cedar")
val map = inject[ItemNameRepo].list(item1)
map.size === 2
map(localeInfo.Ja) === ItemName(localeInfo.Ja.id, item1.id.get, "杉")
map(localeInfo.En) === ItemName(localeInfo.En.id, item1.id.get, "Cedar")
}
}
"item price." in {
implicit val app: Application = GuiceApplicationBuilder().configure(inMemoryDatabase()).build()
val localeInfo = inject[LocaleInfoRepo]
inject[Database].withConnection { implicit conn =>
val cat1 = inject[CategoryRepo].createNew(
Map(localeInfo.Ja -> "植木", localeInfo.En -> "Plant")
)
val site1 = inject[SiteRepo].createNew(localeInfo.Ja, "商店1")
val item1 = inject[ItemRepo].createNew(cat1)
inject[ItemPriceRepo].get(site1, item1) === None
val price1 = inject[ItemPriceRepo].createNew(item1, site1)
val saved1 = inject[ItemPriceRepo].get(site1, item1).get
saved1 === price1
}
}
"Can get item price history." in {
implicit val app: Application = GuiceApplicationBuilder().configure(inMemoryDatabase()).build()
val localeInfo = inject[LocaleInfoRepo]
inject[Database].withConnection { implicit conn =>
val cat1 = inject[CategoryRepo].createNew(
Map(localeInfo.Ja -> "植木", localeInfo.En -> "Plant")
)
val site1 = inject[SiteRepo].createNew(localeInfo.Ja, "商店1")
val item1 = inject[ItemRepo].createNew(cat1)
val price1 = inject[ItemPriceRepo].createNew(item1, site1)
val tax = inject[TaxRepo].createNew
val currencyInfo = inject[CurrencyRegistry]
inject[ItemPriceHistoryRepo].createNew(
price1, tax, currencyInfo.Jpy, BigDecimal(100), None, BigDecimal(90), date("2013-01-02")
)
inject[ItemPriceHistoryRepo].createNew(
price1, tax, currencyInfo.Jpy, BigDecimal(200), None, BigDecimal(190), date("9999-12-31")
)
inject[ItemPriceHistoryRepo].at(price1.id.get, date("2013-01-01")).unitPrice === BigDecimal(100)
inject[ItemPriceHistoryRepo].at(price1.id.get, date("2013-01-02")).unitPrice === BigDecimal(200)
inject[ItemPriceHistoryRepo].at(price1.id.get, date("2013-01-03")).unitPrice === BigDecimal(200)
}
}
"Can get metadata" in {
implicit val app: Application = GuiceApplicationBuilder().configure(inMemoryDatabase()).build()
val localeInfo = inject[LocaleInfoRepo]
inject[Database].withConnection { implicit conn =>
val cat1 = inject[CategoryRepo].createNew(
Map(localeInfo.Ja -> "植木", localeInfo.En -> "Plant")
)
val item1 = inject[ItemRepo].createNew(cat1)
val item2 = inject[ItemRepo].createNew(cat1)
ItemNumericMetadata.createNew(item1, ItemNumericMetadataType.HEIGHT, 100)
ItemNumericMetadata.createNew(item2, ItemNumericMetadataType.HEIGHT, 1000)
ItemNumericMetadata(item1, ItemNumericMetadataType.HEIGHT).metadata === 100
ItemNumericMetadata(item2, ItemNumericMetadataType.HEIGHT).metadata === 1000
}
}
"Can create site item text metadata" in {
implicit val app: Application = GuiceApplicationBuilder().configure(inMemoryDatabase()).build()
val localeInfo = inject[LocaleInfoRepo]
inject[Database].withConnection { implicit conn =>
val cat1 = inject[CategoryRepo].createNew(
Map(localeInfo.Ja -> "植木", localeInfo.En -> "Plant")
)
val item1 = inject[ItemRepo].createNew(cat1)
val site1 = inject[SiteRepo].createNew(localeInfo.Ja, "商店1")
val site2 = inject[SiteRepo].createNew(localeInfo.Ja, "商店2")
SiteItemTextMetadata.createNew(site1.id.get, item1.id.get, SiteItemTextMetadataType.PRICE_MEMO, "MEMO01")
SiteItemTextMetadata.createNew(site2.id.get, item1.id.get, SiteItemTextMetadataType.PRICE_MEMO, "MEMO02")
SiteItemTextMetadata(site1.id.get, item1.id.get, SiteItemTextMetadataType.PRICE_MEMO).metadata === "MEMO01"
SiteItemTextMetadata(site2.id.get, item1.id.get, SiteItemTextMetadataType.PRICE_MEMO).metadata === "MEMO02"
}
}
"Can get all metadata at once" in {
implicit val app: Application = GuiceApplicationBuilder().configure(inMemoryDatabase()).build()
val localeInfo = inject[LocaleInfoRepo]
inject[Database].withConnection { implicit conn =>
val cat1 = inject[CategoryRepo].createNew(
Map(localeInfo.Ja -> "植木", localeInfo.En -> "Plant")
)
val item1 = inject[ItemRepo].createNew(cat1)
val item2 = inject[ItemRepo].createNew(cat1)
ItemNumericMetadata.createNew(item1, ItemNumericMetadataType.HEIGHT, 100)
ItemNumericMetadata.createNew(item2, ItemNumericMetadataType.HEIGHT, 1000)
val map1 = ItemNumericMetadata.all(item1)
map1.size === 1
map1(ItemNumericMetadataType.HEIGHT).metadata === 100
val map2 = ItemNumericMetadata.all(item2)
map2.size === 1
map2(ItemNumericMetadataType.HEIGHT).metadata === 1000
}
}
case class CreatedRecords(
category1: Category, category2: Category
)
def storeItems(tax: Tax, site1: Site, site2: Site)(implicit app: Application, conn: Connection): CreatedRecords = {
val localeInfo = inject[LocaleInfoRepo]
val currencyInfo = inject[CurrencyRegistry]
inject[Database].withConnection { implicit conn =>
val cat1 = inject[CategoryRepo].createNew(
Map(localeInfo.Ja -> "植木", localeInfo.En -> "Plant")
)
val cat2 = inject[CategoryRepo].createNew(
Map(localeInfo.Ja -> "果樹", localeInfo.En -> "Fruit")
)
val item1 = inject[ItemRepo].createNew(cat1)
val item2 = inject[ItemRepo].createNew(cat2)
val item3 = inject[ItemRepo].createNew(cat1)
val item4 = inject[ItemRepo].createNew(cat2)
val item5 = inject[ItemRepo].createNew(cat1)
inject[ItemNameRepo].createNew(item1, Map(localeInfo.Ja -> "杉", localeInfo.En -> "Cedar"))
inject[ItemNameRepo].createNew(item2, Map(localeInfo.Ja -> "梅", localeInfo.En -> "Ume"))
inject[ItemNameRepo].createNew(item3, Map(localeInfo.Ja -> "桜", localeInfo.En -> "Cherry"))
inject[ItemNameRepo].createNew(item4, Map(localeInfo.Ja -> "桃", localeInfo.En -> "Peach"))
inject[ItemNameRepo].createNew(item5, Map(localeInfo.Ja -> "もみじ", localeInfo.En -> "Maple"))
inject[SiteItemRepo].createNew(site1, item1)
inject[SiteItemRepo].createNew(site1, item3)
inject[SiteItemRepo].createNew(site1, item5)
inject[SiteItemRepo].createNew(site2, item2)
inject[SiteItemRepo].createNew(site2, item4)
inject[ItemDescriptionRepo].createNew(item1, site1, "杉説明")
inject[ItemDescriptionRepo].createNew(item2, site2, "梅説明")
inject[ItemDescriptionRepo].createNew(item3, site1, "桜説明")
inject[ItemDescriptionRepo].createNew(item4, site2, "桃説明")
inject[ItemDescriptionRepo].createNew(item5, site1, "もみじ説明")
val price1 = inject[ItemPriceRepo].createNew(item1, site1)
val price2 = inject[ItemPriceRepo].createNew(item2, site2)
val price3 = inject[ItemPriceRepo].createNew(item3, site1)
val price4 = inject[ItemPriceRepo].createNew(item4, site2)
val price5 = inject[ItemPriceRepo].createNew(item5, site1)
inject[ItemPriceHistoryRepo].createNew(
price1, tax, currencyInfo.Jpy, BigDecimal(100), None, BigDecimal(90), date("2013-01-02")
)
inject[ItemPriceHistoryRepo].createNew(
price1, tax, currencyInfo.Jpy, BigDecimal(101), None, BigDecimal(89), date("9999-12-31")
)
inject[ItemPriceHistoryRepo].createNew(
price2, tax, currencyInfo.Jpy, BigDecimal(300), None, BigDecimal(290), date("2013-01-03")
)
inject[ItemPriceHistoryRepo].createNew(
price2, tax, currencyInfo.Jpy, BigDecimal(301), None, BigDecimal(291), date("9999-12-31")
)
inject[ItemPriceHistoryRepo].createNew(
price3, tax, currencyInfo.Jpy, BigDecimal(500), None, BigDecimal(480), date("2013-01-04")
)
inject[ItemPriceHistoryRepo].createNew(
price3, tax, currencyInfo.Jpy, BigDecimal(501), None, BigDecimal(481), date("9999-12-31")
)
inject[ItemPriceHistoryRepo].createNew(
price4, tax, currencyInfo.Jpy, BigDecimal(1200), None, BigDecimal(1100), date("2013-01-05")
)
inject[ItemPriceHistoryRepo].createNew(
price4, tax, currencyInfo.Jpy, BigDecimal(1201), None, BigDecimal(1101), date("9999-12-31")
)
inject[ItemPriceHistoryRepo].createNew(
price5, tax, currencyInfo.Jpy, BigDecimal(2000), None, BigDecimal(1900), date("2013-01-06")
)
inject[ItemPriceHistoryRepo].createNew(
price5, tax, currencyInfo.Jpy, BigDecimal(2001), None, BigDecimal(1901), date("9999-12-31")
)
val height1 = ItemNumericMetadata.createNew(item1, ItemNumericMetadataType.HEIGHT, 100)
val height2 = ItemNumericMetadata.createNew(item2, ItemNumericMetadataType.HEIGHT, 200)
val height3 = ItemNumericMetadata.createNew(item3, ItemNumericMetadataType.HEIGHT, 300)
val height4 = ItemNumericMetadata.createNew(item4, ItemNumericMetadataType.HEIGHT, 400)
val height5 = ItemNumericMetadata.createNew(item5, ItemNumericMetadataType.HEIGHT, 500)
CreatedRecords(cat1, cat2)
}
}
"List item by site." in {
implicit val app: Application = GuiceApplicationBuilder().configure(inMemoryDatabase()).build()
val localeInfo = inject[LocaleInfoRepo]
val currencyInfo = inject[CurrencyRegistry]
inject[Database].withConnection { implicit conn =>
val tax = inject[TaxRepo].createNew
val site1 = inject[SiteRepo].createNew(localeInfo.Ja, "商店1")
val site2 = inject[SiteRepo].createNew(localeInfo.Ja, "商店2")
storeItems(tax, site1, site2)
val time = date("2013-01-04")
val list1 = inject[ItemRepo].listBySite(site1, localeInfo.Ja, "", now = time)
list1.size === 3
list1(0)._2.name === "もみじ"
list1(1)._2.name === "杉"
list1(2)._2.name === "桜"
list1(0)._3.description === "もみじ説明"
list1(1)._3.description === "杉説明"
list1(2)._3.description === "桜説明"
list1(0)._5.taxId === tax.id.get
list1(0)._5.currency === currencyInfo.Jpy
list1(0)._5.unitPrice === BigDecimal(2000)
list1(1)._5.taxId === tax.id.get
list1(1)._5.currency === currencyInfo.Jpy
list1(1)._5.unitPrice === BigDecimal(101)
list1(2)._5.taxId === tax.id.get
list1(2)._5.currency === currencyInfo.Jpy
list1(2)._5.unitPrice === BigDecimal(501)
list1(0)._6(ItemNumericMetadataType.HEIGHT).metadata === 500
list1(1)._6(ItemNumericMetadataType.HEIGHT).metadata === 100
list1(2)._6(ItemNumericMetadataType.HEIGHT).metadata === 300
}
}
"List item by category." in {
implicit val app: Application = GuiceApplicationBuilder().configure(inMemoryDatabase()).build()
val localeInfo = inject[LocaleInfoRepo]
val currencyInfo = inject[CurrencyRegistry]
inject[Database].withConnection { implicit conn => {
val tax = inject[TaxRepo].createNew
val site1 = inject[SiteRepo].createNew(localeInfo.Ja, "商店1")
val cat1: Category = inject[CategoryRepo].createNew(Map(localeInfo.Ja -> "植木", localeInfo.En -> "Plant"))
val cat2 = inject[CategoryRepo].createNew(parent = Some(cat1), names = Map(localeInfo.Ja -> "果樹", localeInfo.En -> "Fruit"))
val item1 = inject[ItemRepo].createNew(cat1)
val item2 = inject[ItemRepo].createNew(cat2)
inject[ItemNameRepo].createNew(item1, Map(localeInfo.Ja -> "杉", localeInfo.En -> "Cedar"))
inject[ItemNameRepo].createNew(item2, Map(localeInfo.Ja -> "梅", localeInfo.En -> "Ume"))
inject[SiteItemRepo].createNew(site1, item1)
inject[SiteItemRepo].createNew(site1, item2)
inject[ItemDescriptionRepo].createNew(item1, site1, "杉説明")
inject[ItemDescriptionRepo].createNew(item2, site1, "梅説明")
val price1 = inject[ItemPriceRepo].createNew(item1, site1)
val price2 = inject[ItemPriceRepo].createNew(item2, site1)
inject[ItemPriceHistoryRepo].createNew(
price1, tax, currencyInfo.Jpy, BigDecimal(101), None, BigDecimal(89), date("9999-12-31")
)
inject[ItemPriceHistoryRepo].createNew(
price2, tax, currencyInfo.Jpy, BigDecimal(301), None, BigDecimal(291), date("9999-12-31")
)
// Since cat2 is a child of cat1, both item1 and item2 will be shown.
val list1 = inject[ItemRepo].list(
locale = localeInfo.Ja, queryString = QueryString(), category = CategoryIdSearchCondition(cat1.id.get)
)
doWith(list1.records) { recs =>
recs.size === 2
recs(0)._2.name === "杉"
recs(1)._2.name === "梅"
}
}
}
"List item by category with supplemental category." in {
implicit val app: Application = GuiceApplicationBuilder().configure(inMemoryDatabase()).build()
val localeInfo = inject[LocaleInfoRepo]
val currencyInfo = inject[CurrencyRegistry]
inject[Database].withConnection { implicit conn =>
val tax = inject[TaxRepo].createNew
val site1 = inject[SiteRepo].createNew(localeInfo.Ja, "商店1")
val cat0 = inject[CategoryRepo].createNew(Map(localeInfo.Ja -> "樹木", localeInfo.En -> "Tree"))
val cat1 = inject[CategoryRepo].createNew(Some(cat0), Map(localeInfo.Ja -> "植木", localeInfo.En -> "Plant"))
val cat2 = inject[CategoryRepo].createNew(Some(cat0), Map(localeInfo.Ja -> "果樹", localeInfo.En -> "Fruit"))
val cat3 = inject[CategoryRepo].createNew(Some(cat0), Map(localeInfo.Ja -> "盆栽", localeInfo.En -> "Bonsai"))
// item1: cat1, cat2
// item2: cat2
// item3: cat1
// item4: cat3
val item1 = inject[ItemRepo].createNew(cat1)
val item2 = inject[ItemRepo].createNew(cat2)
val item3 = inject[ItemRepo].createNew(cat1)
val item4 = inject[ItemRepo].createNew(cat3)
inject[SupplementalCategoryRepo].createNew(item1.id.get, cat2.id.get)
inject[ItemNameRepo].createNew(item1, Map(localeInfo.Ja -> "杉", localeInfo.En -> "Cedar"))
inject[ItemNameRepo].createNew(item2, Map(localeInfo.Ja -> "梅", localeInfo.En -> "Ume"))
inject[ItemNameRepo].createNew(item3, Map(localeInfo.Ja -> "松", localeInfo.En -> "Pine"))
inject[ItemNameRepo].createNew(item4, Map(localeInfo.Ja -> "もみじ", localeInfo.En -> "Maple"))
inject[SiteItemRepo].createNew(site1, item1)
inject[SiteItemRepo].createNew(site1, item2)
inject[SiteItemRepo].createNew(site1, item3)
inject[SiteItemRepo].createNew(site1, item4)
inject[ItemDescriptionRepo].createNew(item1, site1, "杉説明")
inject[ItemDescriptionRepo].createNew(item2, site1, "梅説明")
inject[ItemDescriptionRepo].createNew(item3, site1, "松説明")
inject[ItemDescriptionRepo].createNew(item4, site1, "もみじ説明")
val price1 = inject[ItemPriceRepo].createNew(item1, site1)
val price2 = inject[ItemPriceRepo].createNew(item2, site1)
val price3 = inject[ItemPriceRepo].createNew(item3, site1)
val price4 = inject[ItemPriceRepo].createNew(item4, site1)
inject[ItemPriceHistoryRepo].createNew(
price1, tax, currencyInfo.Jpy, BigDecimal(101), None, BigDecimal(89), date("9999-12-31")
)
inject[ItemPriceHistoryRepo].createNew(
price2, tax, currencyInfo.Jpy, BigDecimal(301), None, BigDecimal(291), date("9999-12-31")
)
inject[ItemPriceHistoryRepo].createNew(
price3, tax, currencyInfo.Jpy, BigDecimal(401), None, BigDecimal(391), date("9999-12-31")
)
inject[ItemPriceHistoryRepo].createNew(
price4, tax, currencyInfo.Jpy, BigDecimal(501), None, BigDecimal(491), date("9999-12-31")
)
doWith(
inject[ItemRepo].list(
locale = localeInfo.Ja, queryString = QueryString(), category = CategoryIdSearchCondition(cat2.id.get)
).records
) { recs =>
recs.size === 2
recs(0)._2.name === "杉"
recs(1)._2.name === "梅"
}
doWith(
inject[ItemRepo].list(
locale = localeInfo.Ja, queryString = QueryString(), category = CategoryIdSearchCondition(cat1.id.get)
).records
) { recs =>
recs.size === 2
recs(0)._2.name === "杉"
recs(1)._2.name === "松"
}
doWith(
inject[ItemRepo].list(
locale = localeInfo.Ja, queryString = QueryString(), category = CategoryIdSearchCondition(cat3.id.get)
).records
) { recs =>
recs.size === 1
recs(0)._2.name === "もみじ"
}
// cat1 or cat3
doWith(
inject[ItemRepo].list(
locale = localeInfo.Ja, queryString = QueryString(),
category = CategoryIdSearchCondition(cat1.id.get + "," + cat3.id.get)
).records
) { recs =>
recs.size === 3
recs(0)._2.name === "もみじ"
recs(1)._2.name === "杉"
recs(2)._2.name === "松"
}
// cat1 or cat3 & cat2
doWith(
inject[ItemRepo].list(
locale = localeInfo.Ja, queryString = QueryString(),
category = CategoryIdSearchCondition(cat1.id.get + "," + cat3.id.get + "&" + cat2.id.get)
).records
) { recs =>
recs.size === 1
recs(0)._2.name === "杉"
}
}
}
"List item." in {
implicit val app: Application = GuiceApplicationBuilder().configure(inMemoryDatabase()).build()
val localeInfo = inject[LocaleInfoRepo]
val currencyInfo = inject[CurrencyRegistry]
inject[Database].withConnection { implicit conn =>
val tax = inject[TaxRepo].createNew
val site1 = inject[SiteRepo].createNew(localeInfo.Ja, "商店1")
val site2 = inject[SiteRepo].createNew(localeInfo.Ja, "商店2")
val createdRecords = storeItems(tax, site1, site2)
val time = date("2013-01-04")
doWith(inject[ItemRepo].list(None, localeInfo.Ja, QueryString(), now = time)) { pages =>
pages.pageCount === 1
pages.currentPage === 0
pages.pageSize === 10
val list1 = pages.records
list1.size === 5
list1(0)._2.name === "もみじ"
list1(1)._2.name === "杉"
list1(2)._2.name === "桃"
list1(3)._2.name === "桜"
list1(4)._2.name === "梅"
list1(0)._3.description === "もみじ説明"
list1(1)._3.description === "杉説明"
list1(2)._3.description === "桃説明"
list1(3)._3.description === "桜説明"
list1(4)._3.description === "梅説明"
list1(0)._5.taxId === tax.id.get
list1(0)._5.currency === currencyInfo.Jpy
list1(0)._5.unitPrice === BigDecimal(2000)
list1(1)._5.taxId === tax.id.get
list1(1)._5.currency === currencyInfo.Jpy
list1(1)._5.unitPrice === BigDecimal(101)
list1(2)._5.taxId === tax.id.get
list1(2)._5.currency === currencyInfo.Jpy
list1(2)._5.unitPrice === BigDecimal(1200)
list1(3)._5.taxId === tax.id.get
list1(3)._5.currency === currencyInfo.Jpy
list1(3)._5.unitPrice === BigDecimal(501)
list1(4)._5.taxId === tax.id.get
list1(4)._5.currency === currencyInfo.Jpy
list1(4)._5.unitPrice === BigDecimal(301)
list1(0)._6(ItemNumericMetadataType.HEIGHT).metadata === 500
list1(1)._6(ItemNumericMetadataType.HEIGHT).metadata === 100
list1(2)._6(ItemNumericMetadataType.HEIGHT).metadata === 400
list1(3)._6(ItemNumericMetadataType.HEIGHT).metadata === 300
list1(4)._6(ItemNumericMetadataType.HEIGHT).metadata === 200
}
// Specify category
doWith(
inject[ItemRepo].list(None, localeInfo.Ja, QueryString(), CategoryIdSearchCondition(createdRecords.category1.id.get), now = time)
) { pages =>
pages.pageCount === 1
pages.currentPage === 0
pages.pageSize === 10
val list1 = pages.records
list1.size === 3
list1(0)._2.name === "もみじ"
list1(1)._2.name === "杉"
list1(2)._2.name === "桜"
list1(0)._3.description === "もみじ説明"
list1(1)._3.description === "杉説明"
list1(2)._3.description === "桜説明"
list1(0)._5.taxId === tax.id.get
list1(0)._5.currency === currencyInfo.Jpy
list1(0)._5.unitPrice === BigDecimal(2000)
list1(1)._5.taxId === tax.id.get
list1(1)._5.currency === currencyInfo.Jpy
list1(1)._5.unitPrice === BigDecimal(101)
list1(2)._5.taxId === tax.id.get
list1(2)._5.currency === currencyInfo.Jpy
list1(2)._5.unitPrice === BigDecimal(501)
list1(0)._6(ItemNumericMetadataType.HEIGHT).metadata === 500
list1(1)._6(ItemNumericMetadataType.HEIGHT).metadata === 100
list1(2)._6(ItemNumericMetadataType.HEIGHT).metadata === 300
}
// Specify site
doWith(
inject[ItemRepo].list(
None, localeInfo.Ja, QueryString(),
CategoryIdSearchCondition.Null, CategoryCodeSearchCondition.Null ,Some(site1.id.get), now = time
)
) { pages =>
pages.pageCount === 1
pages.currentPage === 0
pages.pageSize === 10
val list1 = pages.records
list1.size === 3
list1(0)._2.name === "もみじ"
list1(1)._2.name === "杉"
list1(2)._2.name === "桜"
list1(0)._3.description === "もみじ説明"
list1(1)._3.description === "杉説明"
list1(2)._3.description === "桜説明"
list1(0)._5.taxId === tax.id.get
list1(0)._5.currency === currencyInfo.Jpy
list1(0)._5.unitPrice === BigDecimal(2000)
list1(1)._5.taxId === tax.id.get
list1(1)._5.currency === currencyInfo.Jpy
list1(1)._5.unitPrice === BigDecimal(101)
list1(2)._5.taxId === tax.id.get
list1(2)._5.currency === currencyInfo.Jpy
list1(2)._5.unitPrice === BigDecimal(501)
list1(0)._6(ItemNumericMetadataType.HEIGHT).metadata === 500
list1(1)._6(ItemNumericMetadataType.HEIGHT).metadata === 100
list1(2)._6(ItemNumericMetadataType.HEIGHT).metadata === 300
}
}
}
"List item for maintenance." in {
implicit val app: Application = GuiceApplicationBuilder().configure(inMemoryDatabase()).build()
val localeInfo = inject[LocaleInfoRepo]
val currencyInfo = inject[CurrencyRegistry]
inject[Database].withConnection { implicit conn =>
val tax = inject[TaxRepo].createNew
val site1 = inject[SiteRepo].createNew(localeInfo.Ja, "商店1")
val site2 = inject[SiteRepo].createNew(localeInfo.Ja, "商店2")
storeItems(tax, site1, site2)
val time = date("2013-01-04")
val pages = inject[ItemRepo].listForMaintenance(
siteUser = None,
locale = localeInfo.Ja,
queryString = QueryString(),
now = time
)
pages.pageCount === 1
pages.currentPage === 0
pages.pageSize === 10
val list1 = pages.records
list1.size === 5
list1(0)._2.get.name === "もみじ"
list1(1)._2.get.name === "杉"
list1(2)._2.get.name === "桃"
list1(3)._2.get.name === "桜"
list1(4)._2.get.name === "梅"
list1(0)._3.get.description === "もみじ説明"
list1(1)._3.get.description === "杉説明"
list1(2)._3.get.description === "桃説明"
list1(3)._3.get.description === "桜説明"
list1(4)._3.get.description === "梅説明"
doWith(list1(0)._5) { optPriceHistory =>
optPriceHistory.get.taxId === tax.id.get
optPriceHistory.get.currency === currencyInfo.Jpy
optPriceHistory.get.unitPrice === BigDecimal(2000)
}
doWith(list1(1)._5) { optPriceHistory =>
optPriceHistory.get.taxId === tax.id.get
optPriceHistory.get.currency === currencyInfo.Jpy
optPriceHistory.get.unitPrice === BigDecimal(101)
}
doWith(list1(2)._5) { optPriceHistory =>
optPriceHistory.get.taxId === tax.id.get
optPriceHistory.get.currency === currencyInfo.Jpy
optPriceHistory.get.unitPrice === BigDecimal(1200)
}
doWith(list1(3)._5) { optPriceHistory =>
optPriceHistory.get.taxId === tax.id.get
optPriceHistory.get.currency === currencyInfo.Jpy
optPriceHistory.get.unitPrice === BigDecimal(501)
}
doWith(list1(4)._5) { optPriceHistory =>
optPriceHistory.get.taxId === tax.id.get
optPriceHistory.get.currency === currencyInfo.Jpy
optPriceHistory.get.unitPrice === BigDecimal(301)
}
list1(0)._6(ItemNumericMetadataType.HEIGHT).metadata === 500
list1(1)._6(ItemNumericMetadataType.HEIGHT).metadata === 100
list1(2)._6(ItemNumericMetadataType.HEIGHT).metadata === 400
list1(3)._6(ItemNumericMetadataType.HEIGHT).metadata === 300
list1(4)._6(ItemNumericMetadataType.HEIGHT).metadata === 200
doWith(
inject[ItemRepo].listForMaintenance(
siteUser = None,
locale = localeInfo.En,
queryString = QueryString(),
now = time
)
) { pages =>
pages.pageCount === 1
pages.currentPage === 0
pages.pageSize === 10
doWith(pages.records) { list =>
list.size === 5
list(0)._2.get.name === "Cedar"
list(1)._2.get.name === "Cherry"
list(2)._2.get.name === "Maple"
list(3)._2.get.name === "Peach"
list(4)._2.get.name === "Ume"
list(0)._3 === None
list(1)._3 === None
list(2)._3 === None
list(3)._3 === None
list(4)._3 === None
doWith(list(0)._5) { optPriceHistory =>
optPriceHistory.get.taxId === tax.id.get
optPriceHistory.get.currency === currencyInfo.Jpy
optPriceHistory.get.unitPrice === BigDecimal(101)
}
doWith(list(1)._5) { optPriceHistory =>
optPriceHistory.get.taxId === tax.id.get
optPriceHistory.get.currency === currencyInfo.Jpy
optPriceHistory.get.unitPrice === BigDecimal(501)
}
doWith(list(2)._5) { optPriceHistory =>
optPriceHistory.get.taxId === tax.id.get
optPriceHistory.get.currency === currencyInfo.Jpy
optPriceHistory.get.unitPrice === BigDecimal(2000)
}
doWith(list(3)._5) { optPriceHistory =>
optPriceHistory.get.taxId === tax.id.get
optPriceHistory.get.currency === currencyInfo.Jpy
optPriceHistory.get.unitPrice === BigDecimal(1200)
}
doWith(list(4)._5) { optPriceHistory =>
optPriceHistory.get.taxId === tax.id.get
optPriceHistory.get.currency === currencyInfo.Jpy
optPriceHistory.get.unitPrice === BigDecimal(301)
}
list(0)._6(ItemNumericMetadataType.HEIGHT).metadata === 100
list(1)._6(ItemNumericMetadataType.HEIGHT).metadata === 300
list(2)._6(ItemNumericMetadataType.HEIGHT).metadata === 500
list(3)._6(ItemNumericMetadataType.HEIGHT).metadata === 400
list(4)._6(ItemNumericMetadataType.HEIGHT).metadata === 200
}
}
}
}
"Can create sql for item query." in {
implicit val app: Application = GuiceApplicationBuilder().configure(inMemoryDatabase()).build()
val localeInfo = inject[LocaleInfoRepo]
val currencyInfo = inject[CurrencyRegistry]
inject[ItemRepo].createQueryConditionSql(
QueryString(List("Hello", "World")), CategoryIdSearchCondition.Null, CategoryCodeSearchCondition.Null, None
) ===
"and (item_name.item_name like {query0} or item_description.description like {query0}) " +
"and (item_name.item_name like {query1} or item_description.description like {query1}) "
inject[ItemRepo].createQueryConditionSql(
QueryString(List("Hello", "World")), CategoryIdSearchCondition(123L), CategoryCodeSearchCondition.Null, None
) ===
"and (item_name.item_name like {query0} or item_description.description like {query0}) " +
"and (item_name.item_name like {query1} or item_description.description like {query1}) " +
"""
and (
item.category_id in (
select descendant from category_path where ancestor in (123)
)
or exists (
select descendant from category_path
where ancestor in (
select category_id from supplemental_category where item_id = item.item_id
)
and descendant in (123)
)
)
"""
inject[ItemRepo].createQueryConditionSql(
QueryString(List()), CategoryIdSearchCondition(123L), CategoryCodeSearchCondition.Null, None
) ===
"""
and (
item.category_id in (
select descendant from category_path where ancestor in (123)
)
or exists (
select descendant from category_path
where ancestor in (
select category_id from supplemental_category where item_id = item.item_id
)
and descendant in (123)
)
)
"""
inject[ItemRepo].createQueryConditionSql(
QueryString(List()), CategoryIdSearchCondition.Null, CategoryCodeSearchCondition.Null, Some(234L)
) ===
"and site.site_id = 234 "
}
"Can get ite information from site id and item id." in {
implicit val app: Application = GuiceApplicationBuilder().configure(inMemoryDatabase()).build()
val localeInfo = inject[LocaleInfoRepo]
val currencyInfo = inject[CurrencyRegistry]
inject[Database].withConnection { implicit conn =>
val startTime = System.currentTimeMillis
val site1 = inject[SiteRepo].createNew(localeInfo.Ja, "商店1")
val site2 = inject[SiteRepo].createNew(localeInfo.Ja, "商店2")
val cat1 = inject[CategoryRepo].createNew(Map(localeInfo.Ja -> "植木", localeInfo.En -> "Plant"))
val cat2 = inject[CategoryRepo].createNew(Map(localeInfo.Ja -> "果樹", localeInfo.En -> "Fruit"))
val item1 = inject[ItemRepo].createNew(cat1)
val item2 = inject[ItemRepo].createNew(cat2)
val name1 = inject[ItemNameRepo].createNew(item1, Map(localeInfo.Ja -> "杉", localeInfo.En -> "Cedar"))
val name2 = inject[ItemNameRepo].createNew(item2, Map(localeInfo.Ja -> "桃", localeInfo.En -> "Peach"))
val siteItem1 = inject[SiteItemRepo].createNew(site1, item1)
val siteItem2 = inject[SiteItemRepo].createNew(site1, item2)
inject[SiteItemRepo].createNew(site2, item1)
doWith(inject[SiteItemRepo].getWithSiteAndItem(site1.id.get, item1.id.get, localeInfo.Ja).get) { rec =>
rec._1 === site1
rec._2 === name1(localeInfo.Ja)
}
doWith(inject[SiteItemRepo].getWithSiteAndItem(site1.id.get, item1.id.get, localeInfo.En).get) { rec =>
rec._1 === site1
rec._2 === name1(localeInfo.En)
}
doWith(inject[SiteItemRepo].getWithSiteAndItem(site1.id.get, item2.id.get, localeInfo.Ja).get) { rec =>
rec._1 === site1
rec._2 === name2(localeInfo.Ja)
}
doWith(inject[SiteItemRepo].getWithSiteAndItem(site2.id.get, item1.id.get, localeInfo.Ja).get) { rec =>
rec._1 === site2
rec._2 === name1(localeInfo.Ja)
}
inject[SiteItemRepo].getWithSiteAndItem(site2.id.get, item2.id.get, localeInfo.Ja) === None
val currentTime = System.currentTimeMillis
doWith(inject[SiteItemRepo].list(item1.id.get)) { tbl =>
tbl.size === 2
tbl(0)._2.itemId.id === item1.id.get.id
tbl(0)._2.created.toEpochMilli must be_>=(startTime)
tbl(0)._2.created.toEpochMilli must be_<=(currentTime)
tbl(1)._2.itemId.id === item1.id.get.id
tbl(1)._2.created.toEpochMilli must be_>=(startTime)
tbl(1)._2.created.toEpochMilli must be_<=(currentTime)
}
}
}
}
}
}
|
ruimo/store2
|
test/models/ItemSpec.scala
|
Scala
|
apache-2.0
| 35,195
|
/* Copyright 2009-2016 EPFL, Lausanne */
package leon.unit.purescala
import leon.test._
import leon.purescala.Common._
import leon.purescala.Expressions._
import leon.purescala.Definitions._
import leon.purescala.Types._
import leon.purescala.TypeOps._
class TypeOpsSuite extends LeonTestSuite with helpers.WithLikelyEq with helpers.ExpressionsDSL {
test("type bounds") { ctx =>
val tp = TypeParameter.fresh("T")
val tpD = new TypeParameterDef(tp)
val tp2 = TypeParameter.fresh("A")
val tp3 = TypeParameter.fresh("B")
val listD = new AbstractClassDef(FreshIdentifier("List"), Seq(tpD), None)
val listT = listD.typed
val nilD = new CaseClassDef(FreshIdentifier("Nil"), Seq(tpD), Some(listT), false)
val nilT = nilD.typed
val consD = new CaseClassDef(FreshIdentifier("Cons"), Seq(tpD), Some(listT), false)
val consT = consD.typed
assert(isSubtypeOf(tp, tp), "T <: T")
assert(isSubtypeOf(listT, listT), "List[T] <: List[T]")
assert(isSubtypeOf(listD.typed, listD.typed), "List[T] <: List[T]")
assert(isSubtypeOf(nilT, listT), "Subtypes are subtypes")
assert(isSubtypeOf(consT, listT), "Subtypes are subtypes")
assert(!isSubtypeOf(listT, nilT ), "Supertypes are not subtypes")
assert(!isSubtypeOf(listT, consT), "Supertypes are not subtypes")
assert(!isSubtypeOf(nilD.typed(Seq(tp2)), listD.typed(Seq(tp3))), "Types are not subtypes with incompatible params")
assert(!isSubtypeOf(nilD.typed(Seq(tp2)), listD.typed(Seq(IntegerType))), "Types are not subtypes with incompatible params")
assert(!isSubtypeOf(SetType(tp2), SetType(tp3)), "Types are not subtypes with incompatible params")
assert(!isSubtypeOf(nilD.typed(Seq(nilT)), listD.typed(Seq(listT))), "Classes are invariant")
assert(!isSubtypeOf(SetType(nilT), SetType(listT)), "Sets are invariant")
assert(isSubtypeOf(FunctionType(Seq(listT), nilT), FunctionType(Seq(nilT), listT)), "Functions have contravariant params/ covariant result")
assert(!typesCompatible(tp2, tp3), "Different types should be incompatible")
assert(!typesCompatible(BooleanType, tp3), "Different types should be incompatible")
assert(!typesCompatible(tp2, BooleanType), "Different types should be incompatible")
assert(!typesCompatible(IntegerType, Int32Type), "Different types should be incompatible")
// Type parameters
assert(unify(tp, tp2, Seq(tp) ).isDefined, "T and A unify with T free")
assert(unify(tp, tp2, Seq(tp2)).isDefined, "T and A unify with A free")
assert(unify(listT, listD.typed(Seq(tp2)), Seq(tp) ).isDefined, "List[T] <: List[A] with T free")
assert(unify(listT, listD.typed(Seq(tp2)), Seq(tp2)).isDefined, "List[T] <: List[A] with A free")
assert(unify(listT, listD.typed(Seq(tp2)), Seq() ).isEmpty, "List[T] !<: List[A] with A,T not free")
assert(unify(listT, nilT, Seq(tp) ).isEmpty, "Subtypes not unifiable")
assert(
{
val s = unify(MapType(IntegerType, tp), MapType(tp2, IntegerType), Seq(tp, tp2)).getOrElse(Nil)
s.contains(tp -> IntegerType) && s.contains(tp2 -> IntegerType)
},
"MapType unifiable"
)
assert(
instantiation_>:(listD.typed(Seq(tp2)), consD.typed(Seq(tp))) contains Map(tp2 -> tp),
"List[A] >: Cons[T] under A -> T"
)
assert(
instantiation_>:(listD.typed(Seq(tp2)), consD.typed(Seq(IntegerType))) contains Map(tp2 -> IntegerType),
"List[A] >: Cons[BigInt] under A -> BigInt"
)
assert(
instantiation_<:(consD.typed(Seq(tp)), listD.typed(Seq(tp2))) contains Map(tp -> tp2),
"Cons[T] <: List[A] under T -> A"
)
assert(
instantiation_<:(consD.typed(Seq(IntegerType)), listD.typed(Seq(tp2))).isEmpty,
"Cons[BigInt] cannot be instantiated so that it is <: List[A]"
)
assert(
instantiation_<:(
TupleType(Seq(nilT, consT)),
TupleType(Seq(listD.typed(Seq(tp2)), listD.typed(Seq(tp2))))
).contains(Map(tp -> tp2)),
"Covariant tuples"
)
assert(
instantiation_<:(TupleType(Seq(IntegerType, Int32Type)), TupleType(Seq(IntegerType, Int32Type, IntegerType))).isEmpty,
"Incompatible tuples"
)
assert(
instantiation_<:(
MapType(consT, IntegerType),
MapType(listT, IntegerType)
).isEmpty,
"Invariant maps"
)
assert(
instantiation_<:(
MapType(tp, IntegerType),
MapType(tp2, IntegerType)
).contains(Map(tp -> tp2)),
"Instantiation within map type"
)
assert(
instantiation_<:(
FunctionType(Seq(listT, listT), nilT),
FunctionType(Seq(consD.typed(Seq(tp2)), nilD.typed(Seq(tp2))), nilD.typed(Seq(tp2)))
).contains(Map(tp -> tp2)),
"Covariant/ Contravariant function types"
)
// (List[A], A, List[A]) >: (List[List[BigInt]], Cons[BigInt], Nil[List[BigInt]])))
// for A -> List[BigInt]
assert(
instantiation_>:(
TupleType(Seq(listT, tp, listT)),
TupleType(Seq(
listD.typed(Seq(listD.typed(Seq(IntegerType)))),
consD.typed(Seq(IntegerType)),
nilD.typed(Seq(listD.typed(Seq(IntegerType))))
))
).contains(Map(tp -> listD.typed(Seq(IntegerType)))),
"Complex example"
)
}
test("instantiateType Hole") { ctx =>
val tp1 = TypeParameter.fresh("a")
val tp2 = TypeParameter.fresh("b")
val e1 = Hole(tp1, Nil)
val e2 = instantiateType(e1, Map(tp1 -> tp2), Map())
e2 match {
case Hole(tp, _) => assert(tp == tp2, "Type should have been substituted")
case _ => fail("Incorrect expr shape, should be a Hole")
}
}
}
|
epfl-lara/leon
|
src/test/scala/leon/unit/purescala/TypeOpsSuite.scala
|
Scala
|
gpl-3.0
| 5,809
|
package org.bitcoins.dlc.wallet.internal
import org.bitcoins.core.api.dlc.wallet.db.IncomingDLCOfferDb
import org.bitcoins.core.protocol.tlv.DLCOfferTLV
import org.bitcoins.crypto.Sha256Digest
import org.bitcoins.dlc.wallet.DLCWallet
import org.bitcoins.dlc.wallet.models.IncomingDLCOfferDbHelper
import scala.concurrent.Future
trait IncomingDLCOffersHandling { self: DLCWallet =>
def registerIncomingDLCOffer(
offerTLV: DLCOfferTLV,
peer: Option[String],
message: Option[String]): Future[Sha256Digest] = {
val dbo = IncomingDLCOfferDbHelper.fromTLV(offerTLV = offerTLV,
peer = peer,
message = message)
for {
added <- dlcWalletDAOs.incomingDLCOfferDAO.create(dbo)
_ <- dlcConfig.walletCallbacks.executeOnDLCOfferAdd(logger, added)
} yield dbo.hash
}
def rejectIncomingDLCOffer(offerHash: Sha256Digest): Future[Unit] = {
for {
_ <- dlcWalletDAOs.incomingDLCOfferDAO.delete(offerHash)
_ <- dlcConfig.walletCallbacks.executeOnDLCOfferRemove(logger, offerHash)
} yield ()
}
def listIncomingDLCOffers(): Future[Vector[IncomingDLCOfferDb]] = {
dlcWalletDAOs.incomingDLCOfferDAO.findAll()
}
def findIncomingDLCOffer(
offerHash: Sha256Digest): Future[Option[IncomingDLCOfferDb]] = {
dlcWalletDAOs.incomingDLCOfferDAO.find(offerHash)
}
}
|
bitcoin-s/bitcoin-s
|
dlc-wallet/src/main/scala/org/bitcoins/dlc/wallet/internal/IncomingDLCOffersHandling.scala
|
Scala
|
mit
| 1,422
|
package com.abdulradi.redikka.test
import org.scalatest._
import com.abdulradi.redikka.test.clients.Client
abstract class Spec(config: TestConfig) extends WordSpec with Matchers {
def r = config.r
def subject = config.subject
}
case class TestConfig(r: Client, subject: String)
object Redis {
def via(client: Int => Client) =
TestConfig(client(6379), "Redis")
}
object Redikka {
def via(client: Int => Client) =
TestConfig(client(9736), "Redikka")
}
|
tabdulradi/redikka
|
redikka-tests/src/test/scala/Spec.scala
|
Scala
|
mit
| 474
|
import com.github.nscala_time.time.Imports._
import org.joda.time.DateMidnight
object Prob19 {
//daysOfMonth(1900)(0) is Junuary days of 1900
def daysOfMonth(year: Int) = Vector(31,
28 + (if(leapYear(year)) 1 else 0),
31, 30, 31, 30, 31, 31, 30, 31, 30, 31)
def leapYear(year: Int): Boolean = year match {
case y if y % 400 == 0 => true
case y if y % 100 == 0 => false
case y if y % 4 == 0 => true
case _ => false
}
// yearOfDays(0) is days of 1900
val yearOfDays: Stream[Int] = Stream.from(1900).map { year => daysOfMonth(year).sum }
// 0 is Monday
val headWeekdayOfYear: Stream[Int] = 0 #:: headWeekdayOfYear.zip(yearOfDays).map {
case (before, day) =>
(before + day) % 7
}
def headWeekdayOfMonth(year: Int): Stream[Int] = {
val headOfYear = headWeekdayOfYear(year - 1900)
lazy val result: Stream[Int] = headOfYear #:: result.zip(daysOfMonth(year)).map {
case (before, day) => (before + day) % 7
}
result
}
def test() {
for {
year <- (1901 to 2000).par
month <- 1 to 12
joda = new DateMidnight(year, month, 1)
my = headWeekdayOfMonth(year)(month - 1) + 1
} {
assert(joda.getDayOfWeek == my, "Error: year=%d, month=%d".format(year, month))
}
}
def main(args: Array[String]) {
assert(leapYear(1904))
assert(daysOfMonth(1900).sum == 365)
assert(daysOfMonth(1904).sum == 366)
assert(daysOfMonth(2000).sum == 366)
assert(headWeekdayOfMonth(1900).head == 0)
assert(headWeekdayOfYear(4) == 4)
assert(headWeekdayOfYear(1) == 1)
test()
val headWeekdayOfMonths = for {
year <- 1901 to 2000
headOfMonth <- headWeekdayOfMonth(year)
} yield headOfMonth == 6
println(headWeekdayOfMonths.count(identity))
val results = for {
year <- 1901 to 2000
month <- 1 to 12
joda = new DateMidnight(year, month, 1)
} yield joda.getDayOfWeek == 7
println(results.count(identity))
}
}
|
ponkotuy/ProjectEular
|
src/main/scala/Prob19.scala
|
Scala
|
mit
| 1,981
|
package im.actor.server.cli
import akka.actor.ActorSystem
import akka.stream.ActorMaterializer
import com.typesafe.config.ConfigFactory
import im.actor.config.ActorConfig
import im.actor.server.db.DbExtension
import sql.migration.V20151108011300__FillUserSequence
import scala.concurrent._
final class MigrationHandlers {
def userSequence(): Future[Unit] = {
val config = ActorConfig.load(ConfigFactory.parseString(
"""
|akka {
| cluster.seed-nodes = []
| remote {
| netty.tcp.hostname = "127.0.0.1"
| netty.tcp.port = 0
| }
|}
""".stripMargin
))
implicit val system = ActorSystem("migrator", config)
implicit val ec = system.dispatcher
implicit val db = DbExtension(system)
implicit val mat = ActorMaterializer()
val migration = new V20151108011300__FillUserSequence
Future {
blocking {
migration.migrate()
}
}
}
}
|
EaglesoftZJ/actor-platform
|
actor-server/actor-cli/src/main/scala/im/actor/server/cli/MigrationHandlers.scala
|
Scala
|
agpl-3.0
| 958
|
/*
* Copyright (C) Lightbend Inc. <https://www.lightbend.com>
*/
import java.util.regex.Pattern
import bintray.BintrayPlugin.autoImport._
import com.jsuereth.sbtpgp.PgpKeys
import com.typesafe.tools.mima.core.ProblemFilters
import com.typesafe.tools.mima.core._
import com.typesafe.tools.mima.plugin.MimaKeys._
import com.typesafe.tools.mima.plugin.MimaPlugin._
import de.heikoseeberger.sbtheader.AutomateHeaderPlugin
import de.heikoseeberger.sbtheader.FileType
import de.heikoseeberger.sbtheader.CommentStyle
import de.heikoseeberger.sbtheader.HeaderPlugin.autoImport._
import interplay._
import interplay.Omnidoc.autoImport._
import interplay.PlayBuildBase.autoImport._
import interplay.ScalaVersions._
import sbt._
import sbt.Keys._
import sbt.ScriptedPlugin.autoImport._
import sbtwhitesource.WhiteSourcePlugin.autoImport._
import scala.sys.process.stringToProcess
import scala.util.control.NonFatal
object BuildSettings {
val snapshotBranch: String = {
try {
val branch = "git rev-parse --abbrev-ref HEAD".!!.trim
if (branch == "HEAD") {
// not on a branch, get the hash
"git rev-parse HEAD".!!.trim
} else branch
} catch {
case NonFatal(_) => "unknown"
}
}
/** File header settings. */
private def fileUriRegexFilter(pattern: String): FileFilter = new FileFilter {
val compiledPattern = Pattern.compile(pattern)
override def accept(pathname: File): Boolean = {
val uriString = pathname.toURI.toString
compiledPattern.matcher(uriString).matches()
}
}
val fileHeaderSettings = Seq(
excludeFilter in (Compile, headerSources) := HiddenFileFilter ||
fileUriRegexFilter(".*/cookie/encoding/.*") || fileUriRegexFilter(".*/inject/SourceProvider.java$") ||
fileUriRegexFilter(".*/libs/reflect/.*"),
headerLicense := Some(HeaderLicense.Custom("Copyright (C) Lightbend Inc. <https://www.lightbend.com>")),
headerMappings ++= Map(
FileType.xml -> CommentStyle.xmlStyleBlockComment,
FileType.conf -> CommentStyle.hashLineComment
)
)
private val VersionPattern = """^(\d+).(\d+).(\d+)(-.*)?""".r
def evictionSettings: Seq[Setting[_]] = Seq(
// This avoids a lot of dependency resolution warnings to be showed.
evictionWarningOptions in update := EvictionWarningOptions.default
.withWarnTransitiveEvictions(false)
.withWarnDirectEvictions(false)
)
// We are not automatically promoting artifacts to Sonatype and
// Bintray so that we can have more control of the release process
// and do something if somethings fails (for example, if publishing
// a artifact times out).
def playPublishingPromotionSettings: Seq[Setting[_]] = Seq(
playBuildPromoteBintray := false,
playBuildPromoteSonatype := false
)
val DocsApplication = config("docs").hide
val SourcesApplication = config("sources").hide
/** These settings are used by all projects. */
def playCommonSettings: Seq[Setting[_]] = Def.settings(
fileHeaderSettings,
homepage := Some(url("https://playframework.com")),
ivyLoggingLevel := UpdateLogging.DownloadOnly,
resolvers ++= Seq(
// using this variant due to sbt#5405
"sonatype-service-local-releases"
.at("https://oss.sonatype.org/service/local/repositories/releases/content/"), // sync ScriptedTools.scala
Resolver.typesafeRepo("releases"),
Resolver.typesafeIvyRepo("releases"),
Resolver.sbtPluginRepo("releases"), // weird sbt-pgp/play docs/vegemite issue
),
evictionSettings,
ivyConfigurations ++= Seq(DocsApplication, SourcesApplication),
javacOptions ++= Seq("-encoding", "UTF-8", "-Xlint:unchecked", "-Xlint:deprecation"),
scalacOptions in (Compile, doc) := {
// disable the new scaladoc feature for scala 2.12.0, might be removed in 2.12.0-1 (https://github.com/scala/scala-dev/issues/249)
CrossVersion.partialVersion(scalaVersion.value) match {
case Some((2, v)) if v >= 12 => Seq("-no-java-comments")
case _ => Seq()
}
},
fork in Test := true,
parallelExecution in Test := false,
testListeners in (Test, test) := Nil,
javaOptions in Test ++= Seq("-XX:MaxMetaspaceSize=384m", "-Xmx512m", "-Xms128m"),
testOptions ++= Seq(
Tests.Argument(TestFrameworks.Specs2, "showtimes"),
Tests.Argument(TestFrameworks.JUnit, "-v")
),
bintrayPackage := "play-sbt-plugin",
playPublishingPromotionSettings,
version ~= { v =>
v +
sys.props.get("akka.version").map("-akka-" + _).getOrElse("") +
sys.props.get("akka.http.version").map("-akka-http-" + _).getOrElse("")
},
apiURL := {
val v = version.value
if (isSnapshot.value) {
v match {
case VersionPattern(epoch, major, _, _) =>
Some(url(raw"https://www.playframework.com/documentation/$epoch.$major.x/api/scala/index.html"))
case _ => Some(url("https://www.playframework.com/documentation/latest/api/scala/index.html"))
}
} else {
Some(url(raw"https://www.playframework.com/documentation/$v/api/scala/index.html"))
}
},
autoAPIMappings := true,
apiMappings ++= {
val scalaInstance = Keys.scalaInstance.value
scalaInstance.libraryJars.map { libraryJar =>
libraryJar -> url(
raw"""http://scala-lang.org/files/archive/api/${scalaInstance.actualVersion}/index.html"""
)
}.toMap
},
apiMappings ++= {
// Maps JDK 1.8 jar into apidoc.
val rtJar = sys.props
.get("sun.boot.class.path")
.flatMap(cp =>
cp.split(java.io.File.pathSeparator).collectFirst {
case str if str.endsWith(java.io.File.separator + "rt.jar") => str
}
)
rtJar match {
case None => Map.empty
case Some(rtJar) => Map(file(rtJar) -> url(Docs.javaApiUrl))
}
},
apiMappings ++= {
// Finds appropriate scala apidoc from dependencies when autoAPIMappings are insufficient.
// See the following:
//
// http://stackoverflow.com/questions/19786841/can-i-use-sbts-apimappings-setting-for-managed-dependencies/20919304#20919304
// http://www.scala-sbt.org/release/docs/Howto-Scaladoc.html#Enable+manual+linking+to+the+external+Scaladoc+of+managed+dependencies
// https://github.com/ThoughtWorksInc/sbt-api-mappings/blob/master/src/main/scala/com/thoughtworks/sbtApiMappings/ApiMappings.scala#L34
val ScalaLibraryRegex = """^.*[/\\]scala-library-([\d\.]+)\.jar$""".r
val JavaxInjectRegex = """^.*[/\\]java.inject-([\d\.]+)\.jar$""".r
val IvyRegex = """^.*[/\\]([\.\-_\w]+)[/\\]([\.\-_\w]+)[/\\](?:jars|bundles)[/\\]([\.\-_\w]+)\.jar$""".r
(for {
jar <- (dependencyClasspath in Compile in doc).value.toSet ++ (dependencyClasspath in Test in doc).value
fullyFile = jar.data
urlOption = fullyFile.getCanonicalPath match {
case ScalaLibraryRegex(v) =>
Some(url(raw"""http://scala-lang.org/files/archive/api/$v/index.html"""))
case JavaxInjectRegex(v) =>
// the jar file doesn't match up with $apiName-
Some(url(Docs.javaxInjectUrl))
case re @ IvyRegex(apiOrganization, apiName, jarBaseFile) if jarBaseFile.startsWith(s"$apiName-") =>
val apiVersion = jarBaseFile.substring(apiName.length + 1, jarBaseFile.length)
apiOrganization match {
case "com.typesafe.akka" =>
Some(url(raw"https://doc.akka.io/api/akka/$apiVersion/"))
case default =>
val link = Docs.artifactToJavadoc(apiOrganization, apiName, apiVersion, jarBaseFile)
Some(url(link))
}
case other =>
None
}
url <- urlOption
} yield (fullyFile -> url))(collection.breakOut(Map.canBuildFrom))
}
)
// Versions of previous minor releases being checked for binary compatibility
val mimaPreviousVersion: Option[String] = Some("2.8.0")
/**
* These settings are used by all projects that are part of the runtime, as opposed to the development mode of Play.
*/
def playRuntimeSettings: Seq[Setting[_]] = Def.settings(
playCommonSettings,
mimaDefaultSettings,
mimaPreviousArtifacts := mimaPreviousVersion.map { version =>
val cross = if (crossPaths.value) CrossVersion.binary else CrossVersion.disabled
(organization.value %% moduleName.value % version).cross(cross)
}.toSet,
mimaBinaryIssueFilters ++= Seq(
// Remove deprecated methods from HttpRequestHandler
ProblemFilters.exclude[DirectMissingMethodProblem]("play.api.http.DefaultHttpRequestHandler.filterHandler"),
ProblemFilters.exclude[DirectMissingMethodProblem]("play.api.http.DefaultHttpRequestHandler.this"),
ProblemFilters.exclude[DirectMissingMethodProblem]("play.api.http.JavaCompatibleHttpRequestHandler.this"),
// Refactor params of runEvolutions (ApplicationEvolutions however is private anyway)
ProblemFilters.exclude[IncompatibleMethTypeProblem]("play.api.db.evolutions.ApplicationEvolutions.runEvolutions"),
// Removed @varargs (which removed the array forwarder method)
ProblemFilters.exclude[DirectMissingMethodProblem]("play.api.libs.typedmap.DefaultTypedMap.-"),
// Add .addAttrs(...) varargs and override methods to Request/RequestHeader and TypedMap's
ProblemFilters.exclude[ReversedMissingMethodProblem]("play.mvc.Http#Request.addAttrs"),
ProblemFilters.exclude[ReversedMissingMethodProblem]("play.mvc.Http#RequestHeader.addAttrs"),
ProblemFilters.exclude[ReversedMissingMethodProblem]("play.api.libs.typedmap.TypedMap.+"),
ProblemFilters.exclude[ReversedMissingMethodProblem]("play.api.libs.typedmap.TypedMap.-"),
ProblemFilters.exclude[IncompatibleMethTypeProblem]("play.api.libs.typedmap.DefaultTypedMap.-"),
// Remove outdated (internal) method
ProblemFilters.exclude[DirectMissingMethodProblem]("play.api.libs.streams.Execution.defaultExecutionContext"),
// Add allowEmptyFiles config to allow empty file uploads
ProblemFilters.exclude[DirectMissingMethodProblem]("play.api.http.ParserConfiguration.apply"),
ProblemFilters.exclude[DirectMissingMethodProblem]("play.api.http.ParserConfiguration.copy"),
ProblemFilters.exclude[DirectMissingMethodProblem]("play.api.http.ParserConfiguration.this"),
ProblemFilters.exclude[IncompatibleSignatureProblem]("play.api.http.ParserConfiguration.curried"),
ProblemFilters.exclude[IncompatibleSignatureProblem]("play.api.http.ParserConfiguration.tupled"),
ProblemFilters.exclude[IncompatibleSignatureProblem]("play.api.http.ParserConfiguration.unapply"),
ProblemFilters.exclude[MissingTypesProblem]("play.api.http.ParserConfiguration$"),
// Add withExtraServerConfiguration() to append server config to endpoints
ProblemFilters
.exclude[ReversedMissingMethodProblem]("play.api.test.ServerEndpointRecipe.withExtraServerConfiguration"),
// Support custom name of play_evolutions(_lock) table via metaTable config
ProblemFilters
.exclude[DirectMissingMethodProblem]("play.api.db.evolutions.DefaultEvolutionsDatasourceConfig.apply"),
ProblemFilters
.exclude[DirectMissingMethodProblem]("play.api.db.evolutions.DefaultEvolutionsDatasourceConfig.copy"),
ProblemFilters
.exclude[DirectMissingMethodProblem]("play.api.db.evolutions.DefaultEvolutionsDatasourceConfig.this"),
ProblemFilters.exclude[IncompatibleResultTypeProblem](
"play.api.db.evolutions.DefaultEvolutionsDatasourceConfig.copy$default$3"
),
ProblemFilters
.exclude[IncompatibleSignatureProblem]("play.api.db.evolutions.DefaultEvolutionsDatasourceConfig.curried"),
ProblemFilters
.exclude[IncompatibleSignatureProblem]("play.api.db.evolutions.DefaultEvolutionsDatasourceConfig.tupled"),
ProblemFilters
.exclude[IncompatibleSignatureProblem]("play.api.db.evolutions.DefaultEvolutionsDatasourceConfig.unapply"),
ProblemFilters.exclude[MissingTypesProblem]("play.api.db.evolutions.DefaultEvolutionsDatasourceConfig$"),
ProblemFilters.exclude[DirectMissingMethodProblem]("play.api.db.evolutions.DefaultEvolutionsApi.applyFor"),
ProblemFilters.exclude[DirectMissingMethodProblem]("play.api.db.evolutions.EvolutionsApi.applyFor"),
ProblemFilters.exclude[ReversedMissingMethodProblem]("play.api.db.evolutions.EvolutionsApi.evolve"),
ProblemFilters.exclude[ReversedMissingMethodProblem]("play.api.db.evolutions.EvolutionsApi.resetScripts"),
ProblemFilters.exclude[ReversedMissingMethodProblem]("play.api.db.evolutions.EvolutionsApi.resolve"),
ProblemFilters.exclude[ReversedMissingMethodProblem]("play.api.db.evolutions.EvolutionsApi.scripts"),
ProblemFilters.exclude[DirectMissingMethodProblem]("play.api.db.evolutions.Evolutions.applyEvolutions"),
ProblemFilters.exclude[DirectMissingMethodProblem]("play.api.db.evolutions.Evolutions.cleanupEvolutions"),
ProblemFilters.exclude[DirectMissingMethodProblem]("play.api.db.evolutions.Evolutions.withEvolutions"),
ProblemFilters
.exclude[ReversedMissingMethodProblem]("play.api.db.evolutions.EvolutionsDatasourceConfig.metaTable"),
ProblemFilters.exclude[DirectMissingMethodProblem]("play.api.db.evolutions.OfflineEvolutions.applyScript"),
ProblemFilters.exclude[DirectMissingMethodProblem]("play.api.db.evolutions.OfflineEvolutions.resolve"),
// Add Result attributes
ProblemFilters.exclude[DirectMissingMethodProblem]("play.api.mvc.Result.apply"),
ProblemFilters.exclude[DirectMissingMethodProblem]("play.api.mvc.Result.copy"),
ProblemFilters.exclude[DirectMissingMethodProblem]("play.api.mvc.Result.this"),
ProblemFilters.exclude[IncompatibleSignatureProblem]("play.api.mvc.Result.unapply"),
// Config which sets Caffeine's internal executor, also switched to trampoline where useful
ProblemFilters.exclude[DirectMissingMethodProblem]("play.api.cache.caffeine.CacheManagerProvider.this"),
ProblemFilters.exclude[DirectMissingMethodProblem]("play.api.cache.caffeine.CaffeineCacheApi.this"),
ProblemFilters.exclude[DirectMissingMethodProblem]("play.api.cache.caffeine.CaffeineCacheManager.this"),
ProblemFilters.exclude[DirectMissingMethodProblem]("play.cache.caffeine.CaffeineParser.from"),
),
unmanagedSourceDirectories in Compile += {
val suffix = CrossVersion.partialVersion(scalaVersion.value) match {
case Some((x, y)) => s"$x.$y"
case None => scalaBinaryVersion.value
}
(sourceDirectory in Compile).value / s"scala-$suffix"
},
// Argument for setting size of permgen space or meta space for all forked processes
Docs.apiDocsInclude := true
)
/** A project that is shared between the sbt runtime and the Play runtime. */
def PlayNonCrossBuiltProject(name: String, dir: String): Project = {
Project(name, file(dir))
.enablePlugins(PlaySbtLibrary, AutomateHeaderPlugin)
.settings(playRuntimeSettings: _*)
.settings(omnidocSettings: _*)
.settings(
autoScalaLibrary := false,
crossPaths := false,
crossScalaVersions := Seq(scala212)
)
}
/** A project that is only used when running in development. */
def PlayDevelopmentProject(name: String, dir: String): Project = {
Project(name, file(dir))
.enablePlugins(PlayLibrary, AutomateHeaderPlugin)
.settings(
playCommonSettings,
(javacOptions in compile) ~= (_.map {
case "1.8" => "1.6"
case other => other
}),
mimaPreviousArtifacts := Set.empty,
)
}
/** A project that is in the Play runtime. */
def PlayCrossBuiltProject(name: String, dir: String): Project = {
Project(name, file(dir))
.enablePlugins(PlayLibrary, AutomateHeaderPlugin, AkkaSnapshotRepositories)
.settings(playRuntimeSettings: _*)
.settings(omnidocSettings: _*)
.settings(
scalacOptions += "-target:jvm-1.8"
)
}
def omnidocSettings: Seq[Setting[_]] = Def.settings(
Omnidoc.projectSettings,
omnidocSnapshotBranch := snapshotBranch,
omnidocPathPrefix := ""
)
def playScriptedSettings: Seq[Setting[_]] = Seq(
// Don't automatically publish anything.
// The test-sbt-plugins-* scripts publish before running the scripted tests.
// When developing the sbt plugins:
// * run a publishLocal in the root project to get everything
// * run a publishLocal in the changes projects for fast feedback loops
scriptedDependencies := (()), // drop Test/compile & publishLocal
scriptedBufferLog := false,
scriptedLaunchOpts ++= Seq(
s"-Dsbt.boot.directory=${file(sys.props("user.home")) / ".sbt" / "boot"}",
"-Xmx512m",
"-XX:MaxMetaspaceSize=512m",
s"-Dscala.version=$scala212",
),
scripted := scripted.tag(Tags.Test).evaluated,
)
def disablePublishing = Def.settings(
disableNonLocalPublishing,
// This setting will work for sbt 1, but not 0.13. For 0.13 it only affects
// `compile` and `update` tasks.
skip in publish := true,
publishLocal := {},
)
def disableNonLocalPublishing = Def.settings(
// For sbt 0.13 this is what we need to avoid publishing. These settings can
// be removed when we move to sbt 1.
PgpKeys.publishSigned := {},
publish := {},
// We also don't need to track dependencies for unpublished projects
// so we need to disable WhiteSource plugin.
whitesourceIgnore := true
)
/** A project that runs in the sbt runtime. */
def PlaySbtProject(name: String, dir: String): Project = {
Project(name, file(dir))
.enablePlugins(PlaySbtLibrary, AutomateHeaderPlugin)
.settings(
playCommonSettings,
mimaPreviousArtifacts := Set.empty,
)
}
/** A project that *is* an sbt plugin. */
def PlaySbtPluginProject(name: String, dir: String): Project = {
Project(name, file(dir))
.enablePlugins(PlaySbtPlugin, AutomateHeaderPlugin)
.settings(
playCommonSettings,
playScriptedSettings,
fork in Test := false,
mimaPreviousArtifacts := Set.empty,
)
}
}
|
wegtam/playframework
|
project/BuildSettings.scala
|
Scala
|
apache-2.0
| 18,362
|
package scala.pickling.privatepublicctorstest
import org.scalatest.FunSuite
import scala.pickling._, scala.pickling.Defaults._, json._
class C private(private var x: Int, private var y: String) {
// another private ctor
private def this(x: Int) = this(x, "bye")
// a single public ctor
def this() = this(5, "hello")
override def equals(other: Any): Boolean =
other.isInstanceOf[C] && other.asInstanceOf[C].x == x && other.asInstanceOf[C].y == y
}
class PrivatePublicCtorsTest extends FunSuite {
test("main") {
val c = new C
val pickle: JSONPickle = c.pickle
val newC = pickle.unpickle[C]
assert(c == newC)
}
}
class RuntimePrivatePublicCtorsTest extends FunSuite {
test("main") {
val c = new C
val p: JSONPickle = (c: Any).pickle
val up = p.unpickle[Any]
assert(c == up)
}
}
|
scala/pickling
|
core/src/test/scala/scala/pickling/generation/PrivatePublicConstructorsTest.scala
|
Scala
|
bsd-3-clause
| 834
|
package ilc
package howTo
import org.scalatest.FunSuite
class GetConstraintsOutOfConstantsSuite
extends FunSuite
with GetConstraintsOutOfConstants
with feature.base.Pretty
|
inc-lc/ilc-scala
|
src/test/scala/ilc/howTo/GetConstraintsOutOfConstantsSuite.scala
|
Scala
|
mit
| 180
|
/* sbt -- Simple Build Tool
* Copyright 2009, 2010 Josh Cough, Mark Harrah
*/
package sbt.impl
import org.scalatools.testing._
class ScalaTestFramework extends Framework
{
val name = "ScalaTest"
val tests = Fingerprint.classOnly("org.scalatest.Suite")
def testRunner(loader: ClassLoader, loggers: Array[Logger]): Runner = new ScalaTestRunner(loader, loggers)
}
/** The test runner for ScalaTest tests. Based on Josh Cough's translation of sbt's original runner.*/
private class ScalaTestRunner(loader: ClassLoader, loggers: Array[Logger]) extends Runner
{
def run(testClassName: String, fingerprint: TestFingerprint, handler: EventHandler, args: Array[String])
{
def result(r: Result) { handler.handle(new TestEvent(testClassName, r, null)) }
import org.scalatest.{Filter, Stopper, Suite, Tracker}
val test = Load.`class`(testClassName, loader).asInstanceOf[Suite]
val reporter = new ScalaTestReporter
test.run(None, reporter, new Stopper {}, Filter(), Map.empty, None, new Tracker)
result( if(reporter.succeeded) Result.Success else Result.Failure )
}
private class ScalaTestReporter extends org.scalatest.Reporter with NotNull
{
import org.scalatest.events._
def apply(event: Event)
{
event match
{
case _: RunCompleted => info("Run completed.")
case _: RunStarting => info("Run starting")
case _: RunStopped => error("Run stopped")
case _: RunAborted => error("Run aborted")
case ts: TestStarting => info(ts.testName, "Test Starting", None)
case _: TestPending =>
case tf: TestFailed => error(tf.testName, "Test Failed", None, tf.throwable)
case ts: TestSucceeded => info(ts.testName, "Test Succeeded", None)
case ti: TestIgnored => info(ti.testName, "Test Ignored", None)
case sc: SuiteCompleted => info(sc.suiteName, "Suite Completed", None)
case sa: SuiteAborted => error(sa.suiteName, "Suite Aborted", Some(sa.message), sa.throwable)
case ss: SuiteStarting => info(ss.suiteName, "Suite Starting", None)
case ip: InfoProvided => info(ip.message)
}
}
def info(name: String, event: String, message: Option[String]): Unit = info(messageString(name, event, message))
def error(name: String, event: String, message: Option[String], t: Option[Throwable])
{
succeeded = false
t.foreach(trace)
error(messageString(name, event, message))
}
private def messageString(name: String, event: String, message: Option[String]) = event + " - " + name + withMessage(message)
private def withMessage(message: Option[String]) =
{
val trimmed = message.map(_.trim).getOrElse("")
if(trimmed.isEmpty) "" else ": " + trimmed
}
private def info(msg: String): Unit = loggers.foreach(_.info(msg))
private def trace(t: Throwable): Unit = loggers.foreach(_.trace(t))
private def error(msg: String): Unit =
{
succeeded = false
loggers.foreach(_.error(msg))
}
var succeeded = true
}
}
|
harrah/test-compat
|
ScalaTest.scala
|
Scala
|
bsd-3-clause
| 2,911
|
package me.yingrui.segment.dict
class HashDictionary extends IDictionary {
def getCapacity(): Int = {
return headIndexersHashMap.size
}
def clear() {
headIndexersHashMap = new java.util.HashMap[String, HeadIndexer]()
}
def lookupHeadIndexer(head: String): HeadIndexer = {
return headIndexersHashMap.get(head)
}
override def getWord(wordStr: String): IWord = {
val headIndexer = lookupHeadIndexer(getHead(wordStr))
if (headIndexer != null) headIndexer.findWord(wordStr) else null
}
def lookupWord(wordStr: String): IWord = {
val headIndexer = lookupHeadIndexer(getHead(wordStr))
if (headIndexer != null) headIndexer.get(wordStr) else null
}
override def getWords(sentenceStr: String): Array[IWord] = {
val headIndexer = lookupHeadIndexer(getHead(sentenceStr))
if (headIndexer != null)
headIndexer.findMultiWord(sentenceStr)
else
null
}
override def iterator(): List[IWord] = {
var wordList = List[IWord]()
for (i <- 0 until headIndexers.size) {
val headIndexer = headIndexers(i)
val wordArray = headIndexer.getWordArray()
wordList = wordList ++ wordArray.getWordItems().toList
}
return wordList
}
def getHead(wordStr: String): String = {
return wordStr.substring(0, headLength)
}
def addWord(word: IWord) {
var headIndexer = lookupHeadIndexer(getHead(word.getWordName()))
if (null == headIndexer) {
headIndexer = createHeadIndexer(word)
}
headIndexer.add(word)
}
private def createHeadIndexer(word: IWord): HeadIndexer = {
val headIndexer = HeadIndexer(word, headLength)
headIndexers = headIndexers ++ List(headIndexer)
headIndexersHashMap.put(headIndexer.getHeadStr(), headIndexer)
return headIndexer
}
def setHeadLength(headLength: Int) {
this.headLength = headLength
}
private var headLength: Int = 1
private var headIndexersHashMap = new java.util.HashMap[String, HeadIndexer]()
private var headIndexers = List[HeadIndexer]()
}
|
yingrui/mahjong
|
lib-segment/src/main/scala/me/yingrui/segment/dict/HashDictionary.scala
|
Scala
|
gpl-3.0
| 2,097
|
Subsets and Splits
Filtered Scala Code Snippets
The query filters and retrieves a sample of code snippets that meet specific criteria, providing a basic overview of the dataset's content without revealing deeper insights.