code
stringlengths 5
1M
| repo_name
stringlengths 5
109
| path
stringlengths 6
208
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 5
1M
|
|---|---|---|---|---|---|
package me.reminisce.stats.statistics
import com.github.nscala_time.time.Imports._
import me.reminisce.stats.statistics.Stats.{QuestionsByType, Rival, StatsEntities}
object Responses {
case class StatsResponse(
userId: String,
date: DateTime = DateTime.now,
amount: Int,
win: Int,
loss: Int,
tie: Int,
rivals: Set[Rival],
questionsByType: QuestionsByType
)
def responseFromStats(stats: StatsEntities): StatsResponse = stats match {
case StatsEntities(_, userId, date, amount, win, loss, tie, rivals, questionsByType) =>
StatsResponse(userId, date, amount, win, loss, tie, rivals, questionsByType)
}
}
|
reminisceme/stats
|
src/main/scala/me/reminisce/stats/statistics/Responses.scala
|
Scala
|
apache-2.0
| 654
|
package models
import org.joda.time.DateTime
import securesocial.core._
import securesocial.core.IdentityId
import securesocial.core.OAuth1Info
import securesocial.core.OAuth2Info
import securesocial.core.PasswordInfo
case class User(uid: Option[Long] = None,
identityId: IdentityId,
firstName: String,
lastName: String,
fullName: String,
email: Option[String],
avatarUrl: Option[String],
authMethod: AuthenticationMethod,
oAuth1Info: Option[OAuth1Info],
oAuth2Info: Option[OAuth2Info],
passwordInfo: Option[PasswordInfo] = None) extends Identity {
def userSetting(): Option[UserSetting] = {
Tables.UserSettings.findBy(this)
}
def saveUserSetting(desktopNotifications: Boolean): Unit = {
Tables.UserSettings.saveUserSetting(this, desktopNotifications)
}
def myRooms(): Set[Room] = {
val publicRooms = Tables.Rooms.public_rooms()
val joinedRooms = Tables.RoomUsers.findByUserId(this.uid.get)
(publicRooms ++ joinedRooms).toSet
}
}
object UserFromIdentity {
def apply(i: Identity): User = User(None, i.identityId, i.firstName, i.lastName, i.fullName,
i.email, i.avatarUrl, i.authMethod, i.oAuth1Info, i.oAuth2Info)
}
case class UserSetting(id: Option[Long], user_id: Long, desktopNotifications: Boolean, created: DateTime, updated: DateTime)
case class Room(id: Option[Long], ownerId: Long, name: String, isPrivate: Boolean, created: DateTime) {
def createComment(user: User, text: String, replyTo: Option[Long]): Comment = {
val comment = Comment(None, user.uid.get, id.get, text, replyTo, DateTime.now)
Tables.Rooms.createComment(comment)
}
def owner: User = {
Tables.Users.findById(ownerId).get
}
def latest_post: Option[Comment] = {
Tables.Rooms.comments(this.id.get, 1).headOption match {
case Some(comment) =>
Some(comment._1)
case None =>
None
}
}
def comments(size: Int, to: DateTime): Seq[(Comment, User)] = {
Tables.Rooms.comments(this.id.get, size, to)
}
def members: Seq[User] = {
Tables.RoomUsers.findByRoomId(this.id.get)
}
def addMember(user: User): Unit = {
Tables.Rooms.addMember(this, user)
}
}
case class Comment(id: Option[Long], userId: Long, roomId: Long, message: String, replyTo: Option[Long], created: DateTime) {
def user: User = {
Tables.Users.findById(userId).get
}
def reply_to: Option[Comment] = {
replyTo match {
case Some(id) =>
Tables.Comments.findById(replyTo.get)
case None =>
None
}
}
}
case class RoomUser(id: Option[Long], userId: Long, roomId: Long, created: DateTime)
|
phantomtype/Phantasm
|
app/models/Models.scala
|
Scala
|
mit
| 2,752
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.deploy.master.ui
import javax.servlet.http.HttpServletRequest
import scala.xml.Node
import org.apache.spark.deploy.DeployMessages.{MasterStateResponse, RequestMasterState}
import org.apache.spark.deploy.ExecutorState
import org.apache.spark.deploy.StandaloneResourceUtils.{formatResourceRequirements, formatResourcesAddresses}
import org.apache.spark.deploy.master.ExecutorDesc
import org.apache.spark.ui.{ToolTips, UIUtils, WebUIPage}
import org.apache.spark.util.Utils
private[ui] class ApplicationPage(parent: MasterWebUI) extends WebUIPage("app") {
private val master = parent.masterEndpointRef
/** Executor details for a particular application */
def render(request: HttpServletRequest): Seq[Node] = {
val appId = request.getParameter("appId")
val state = master.askSync[MasterStateResponse](RequestMasterState)
val app = state.activeApps.find(_.id == appId)
.getOrElse(state.completedApps.find(_.id == appId).orNull)
if (app == null) {
val msg = <div class="row">No running application with ID {appId}</div>
return UIUtils.basicSparkPage(request, msg, "Not Found")
}
val executorHeaders = Seq("ExecutorID", "Worker", "Cores", "Memory", "Resources",
"State", "Logs")
val allExecutors = (app.executors.values ++ app.removedExecutors).toSet.toSeq
// This includes executors that are either still running or have exited cleanly
val executors = allExecutors.filter { exec =>
!ExecutorState.isFinished(exec.state) || exec.state == ExecutorState.EXITED
}
val removedExecutors = allExecutors.diff(executors)
val executorsTable = UIUtils.listingTable(executorHeaders, executorRow, executors)
val removedExecutorsTable = UIUtils.listingTable(executorHeaders, executorRow, removedExecutors)
val content =
<div class="row">
<div class="col-12">
<ul class="list-unstyled">
<li><strong>ID:</strong> {app.id}</li>
<li><strong>Name:</strong> {app.desc.name}</li>
<li><strong>User:</strong> {app.desc.user}</li>
<li><strong>Cores:</strong>
{
if (app.desc.maxCores.isEmpty) {
"Unlimited (%s granted)".format(app.coresGranted)
} else {
"%s (%s granted, %s left)".format(
app.desc.maxCores.get, app.coresGranted, app.coresLeft)
}
}
</li>
<li>
<span data-toggle="tooltip" title={ToolTips.APPLICATION_EXECUTOR_LIMIT}
data-placement="top">
<strong>Executor Limit: </strong>
{
if (app.executorLimit == Int.MaxValue) "Unlimited" else app.executorLimit
}
({app.executors.size} granted)
</span>
</li>
<li>
<strong>Executor Memory:</strong>
{Utils.megabytesToString(app.desc.memoryPerExecutorMB)}
</li>
<li>
<strong>Executor Resources:</strong>
{formatResourceRequirements(app.desc.resourceReqsPerExecutor)}
</li>
<li><strong>Submit Date:</strong> {UIUtils.formatDate(app.submitDate)}</li>
<li><strong>State:</strong> {app.state}</li>
{
if (!app.isFinished) {
<li><strong>
<a href={UIUtils.makeHref(parent.master.reverseProxy,
app.id, app.desc.appUiUrl)}>Application Detail UI</a>
</strong></li>
}
}
</ul>
</div>
</div>
<div class="row"> <!-- Executors -->
<div class="col-12">
<span class="collapse-aggregated-executors collapse-table"
onClick="collapseTable('collapse-aggregated-executors','aggregated-executors')">
<h4>
<span class="collapse-table-arrow arrow-open"></span>
<a>Executor Summary ({allExecutors.length})</a>
</h4>
</span>
<div class="aggregated-executors collapsible-table">
{executorsTable}
</div>
{
if (removedExecutors.nonEmpty) {
<span class="collapse-aggregated-removedExecutors collapse-table"
onClick="collapseTable('collapse-aggregated-removedExecutors',
'aggregated-removedExecutors')">
<h4>
<span class="collapse-table-arrow arrow-open"></span>
<a>Removed Executors ({removedExecutors.length})</a>
</h4>
</span> ++
<div class="aggregated-removedExecutors collapsible-table">
{removedExecutorsTable}
</div>
}
}
</div>
</div>;
UIUtils.basicSparkPage(request, content, "Application: " + app.desc.name)
}
private def executorRow(executor: ExecutorDesc): Seq[Node] = {
val workerUrlRef = UIUtils.makeHref(parent.master.reverseProxy,
executor.worker.id, executor.worker.webUiAddress)
<tr>
<td>{executor.id}</td>
<td>
<a href={workerUrlRef}>{executor.worker.id}</a>
</td>
<td>{executor.cores}</td>
<td>{executor.memory}</td>
<td>{formatResourcesAddresses(executor.resources)}</td>
<td>{executor.state}</td>
<td>
<a href={s"$workerUrlRef/logPage/?appId=${executor.application.id}&executorId=${executor.
id}&logType=stdout"}>stdout</a>
<a href={s"$workerUrlRef/logPage/?appId=${executor.application.id}&executorId=${executor.
id}&logType=stderr"}>stderr</a>
</td>
</tr>
}
}
|
maropu/spark
|
core/src/main/scala/org/apache/spark/deploy/master/ui/ApplicationPage.scala
|
Scala
|
apache-2.0
| 6,523
|
package spotlight.model.outlier
import omnibus.akka.envelope.WorkId
import shapeless.Lens
import spotlight.model.outlier.AnalysisPlan.Scope
/** Created by rolfsd on 11/4/16.
*/
trait CorrelatedData[T] {
def data: T
def correlationIds: Set[WorkId]
def scope: Option[AnalysisPlan.Scope] = None
def withData( newData: T ): CorrelatedData[T]
def withCorrelationIds( newIds: Set[WorkId] ): CorrelatedData[T]
def withScope( newScope: Option[AnalysisPlan.Scope] ): CorrelatedData[T]
}
object CorrelatedData {
def unapply( cdata: CorrelatedData[_] ): Option[( Any, Set[WorkId], Option[AnalysisPlan.Scope] )] = {
Some( cdata.data, cdata.correlationIds, cdata.scope )
}
def dataLens[T]: Lens[CorrelatedData[T], T] = new Lens[CorrelatedData[T], T] {
override def get( cd: CorrelatedData[T] ): T = cd.data
override def set( cd: CorrelatedData[T] )( d: T ): CorrelatedData[T] = cd withData d
}
def correlationIdsLens[T]: Lens[CorrelatedData[T], Set[WorkId]] = new Lens[CorrelatedData[T], Set[WorkId]] {
override def get( cd: CorrelatedData[T] ): Set[WorkId] = cd.correlationIds
override def set( cd: CorrelatedData[T] )( cids: Set[WorkId] ): CorrelatedData[T] = cd withCorrelationIds cids
}
def scopeLens[T]: Lens[CorrelatedData[T], Option[AnalysisPlan.Scope]] = new Lens[CorrelatedData[T], Option[AnalysisPlan.Scope]] {
override def get( cd: CorrelatedData[T] ): Option[Scope] = cd.scope
override def set( cd: CorrelatedData[T] )( s: Option[Scope] ): CorrelatedData[T] = cd withScope s
}
}
|
dmrolfs/lineup
|
core/src/main/scala/spotlight/model/outlier/CorrelatedData.scala
|
Scala
|
mit
| 1,542
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.livy.thriftserver.ui
import java.text.SimpleDateFormat
import org.apache.livy.server.JsonServlet
import org.apache.livy.thriftserver.LivyThriftServer
class ThriftJsonServlet(val basePath: String) extends JsonServlet {
private val df = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss z")
case class SessionInfo(
sessionId: String,
livySessionId: String,
owner: String,
createdAt: String)
get("/sessions") {
val thriftSessions = LivyThriftServer.getInstance.map { server =>
val sessionManager = server.getSessionManager
sessionManager.getSessions.map { sessionHandle =>
val info = sessionManager.getSessionInfo(sessionHandle)
SessionInfo(sessionHandle.getSessionId.toString,
sessionManager.livySessionId(sessionHandle).map(_.toString).getOrElse(""),
info.username,
df.format(info.creationTime))
}.toSeq
}.getOrElse(Seq.empty)
val from = params.get("from").map(_.toInt).getOrElse(0)
val size = params.get("size").map(_.toInt).getOrElse(100)
Map(
"from" -> from,
"total" -> thriftSessions.length,
"sessions" -> thriftSessions.view(from, from + size))
}
}
|
ajbozarth/incubator-livy
|
thriftserver/server/src/main/scala/org/apache/livy/thriftserver/ui/ThriftJsonServlet.scala
|
Scala
|
apache-2.0
| 2,008
|
/*
* Copyright (C) 2009-2013 Typesafe Inc. <http://www.typesafe.com>
*/
package detailedtopics.configuration.gzipencoding
import play.api.test._
import play.filters.gzip.GzipFilter
import play.api.mvc.Results
import play.core.j.JavaGlobalSettingsAdapter
import play.twirl.api.Html
object GzipEncoding extends PlaySpecification {
"gzip filter" should {
"be possible to configure in play" in {
//#global
import play.api._
import play.api.mvc._
import play.filters.gzip.GzipFilter
object Global extends WithFilters(new GzipFilter()) with GlobalSettings {
// onStart, onStop etc...
}
//#global
running(FakeApplication()) {
header(CONTENT_ENCODING,
Global.doFilter(Action(Results.Ok))(gzipRequest).run
) must beSome("gzip")
}
}
"allow custom strategies for when to gzip" in {
val filter =
//#should-gzip
new GzipFilter(shouldGzip = (request, response) =>
response.headers.get("Content-Type").exists(_.startsWith("text/html")))
//#should-gzip
import play.api.mvc._
running(FakeApplication()) {
header(CONTENT_ENCODING,
filter(Action(Results.Ok("foo")))(gzipRequest).run
) must beNone
}
}
"be possible to configure in a play java project" in {
import play.api.mvc._
running(FakeApplication()) {
val global = new JavaGlobalSettingsAdapter(new Global())
header(CONTENT_ENCODING,
global.doFilter(Action(Results.Ok))(gzipRequest).run
) must beSome("gzip")
}
}
}
def gzipRequest = FakeRequest().withHeaders(ACCEPT_ENCODING -> "gzip")
}
|
jyotikamboj/container
|
pf-documentation/manual/detailedTopics/configuration/code/GzipEncoding.scala
|
Scala
|
mit
| 1,689
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.planner.runtime.harness
import org.apache.flink.api.scala._
import org.apache.flink.streaming.runtime.streamrecord.StreamRecord
import org.apache.flink.streaming.util.OneInputStreamOperatorTestHarness
import org.apache.flink.table.api._
import org.apache.flink.table.api.bridge.scala._
import org.apache.flink.table.data.{RowData, TimestampData}
import org.apache.flink.table.planner.factories.TestValuesTableFactory
import org.apache.flink.table.planner.runtime.utils.StreamingWithStateTestBase.{HEAP_BACKEND, ROCKSDB_BACKEND, StateBackendMode}
import org.apache.flink.table.planner.runtime.utils.TestData
import org.apache.flink.table.runtime.util.RowDataHarnessAssertor
import org.apache.flink.table.runtime.util.StreamRecordUtils.binaryRecord
import org.apache.flink.table.runtime.util.TimeWindowUtil.toUtcTimestampMills
import org.apache.flink.types.Row
import org.apache.flink.types.RowKind.INSERT
import java.time.{LocalDateTime, ZoneId}
import java.util.concurrent.ConcurrentLinkedQueue
import java.util.{Collection => JCollection}
import org.junit.runner.RunWith
import org.junit.runners.Parameterized
import org.junit.{Before, Test}
import scala.collection.JavaConversions._
/**
* Harness tests for processing-time window table function.
*/
@RunWith(classOf[Parameterized])
class WindowTableFunctionHarnessTest(backend: StateBackendMode, shiftTimeZone: ZoneId)
extends HarnessTestBase(backend) {
private val UTC_ZONE_ID = ZoneId.of("UTC")
private val assertor = new RowDataHarnessAssertor(
Array(
DataTypes.STRING().getLogicalType,
DataTypes.DOUBLE().getLogicalType,
DataTypes.STRING().getLogicalType,
DataTypes.STRING().getLogicalType,
DataTypes.TIMESTAMP_LTZ(3).getLogicalType,
DataTypes.TIMESTAMP_LTZ(3).getLogicalType,
DataTypes.TIMESTAMP_LTZ(3).getLogicalType))
@Before
override def before(): Unit = {
super.before()
val dataId = TestValuesTableFactory.registerData(TestData.windowDataWithTimestamp)
tEnv.getConfig.setLocalTimeZone(shiftTimeZone)
tEnv.executeSql(
s"""
|CREATE TABLE T1 (
| `ts` STRING,
| `int` INT,
| `double` DOUBLE,
| `float` FLOAT,
| `bigdec` DECIMAL(10, 2),
| `string` STRING,
| `name` STRING,
| proctime AS PROCTIME()
|) WITH (
| 'connector' = 'values',
| 'data-id' = '$dataId'
|)
|""".stripMargin)
}
@Test
def testProcessingTimeTumbleWindow(): Unit = {
val sql =
"""
|SELECT *
|FROM TABLE(TUMBLE(TABLE T1, DESCRIPTOR(proctime), INTERVAL '5' SECOND))
""".stripMargin
val t1 = tEnv.sqlQuery(sql)
val testHarness = createHarnessTesterForNoState(t1.toAppendStream[Row], "WindowTableFunction")
testHarness.open()
ingestData(testHarness)
val expected = new ConcurrentLinkedQueue[Object]()
expected.add(record("a", 1.0D, "Hi", null,
localMills("1970-01-01T00:00:00"),
localMills("1970-01-01T00:00:05"),
mills("1970-01-01T00:00:04.999")))
expected.add(record("a", 2.0D, "Comment#1", null,
localMills("1970-01-01T00:00:00"),
localMills("1970-01-01T00:00:05"),
mills("1970-01-01T00:00:04.999")))
expected.add(record("a", 2.0D, "Comment#1", null,
localMills("1970-01-01T00:00:00"),
localMills("1970-01-01T00:00:05"),
mills("1970-01-01T00:00:04.999")))
expected.add(record("a", 5.0D, null, null,
localMills("1970-01-01T00:00:00"),
localMills("1970-01-01T00:00:05"),
mills("1970-01-01T00:00:04.999")))
expected.add(record("a", 5.0D, "Hi", null,
localMills("1970-01-01T00:00:00"),
localMills("1970-01-01T00:00:05"),
mills("1970-01-01T00:00:04.999")))
expected.add(record("b", 6.0D, "Hi", null,
localMills("1970-01-01T00:00:05"),
localMills("1970-01-01T00:00:10"),
mills("1970-01-01T00:00:09.999")))
expected.add(record("b", 3.0D, "Hello", null,
localMills("1970-01-01T00:00:05"),
localMills("1970-01-01T00:00:10"),
mills("1970-01-01T00:00:09.999")))
expected.add(record("a", null, "Comment#2", null,
localMills("1970-01-01T00:00:05"),
localMills("1970-01-01T00:00:10"),
mills("1970-01-01T00:00:09.999")))
expected.add(record("b", 4.0D, "Hi", null,
localMills("1970-01-01T00:00:15"),
localMills("1970-01-01T00:00:20"),
mills("1970-01-01T00:00:19.999")))
expected.add(record(null, 7.0D, null, null,
localMills("1970-01-01T00:00:30"),
localMills("1970-01-01T00:00:35"),
mills("1970-01-01T00:00:34.999")))
expected.add(record("b", 3.0D, "Comment#3", null,
localMills("1970-01-01T00:00:30"),
localMills("1970-01-01T00:00:35"),
mills("1970-01-01T00:00:34.999")))
assertor.assertOutputEqualsSorted("result mismatch", expected, testHarness.getOutput)
testHarness.close()
}
@Test
def testProcessingTimeHopWindow(): Unit = {
val sql =
"""
|SELECT * FROM TABLE(
| HOP(TABLE T1, DESCRIPTOR(proctime), INTERVAL '5' SECOND, INTERVAL '10' SECOND))
""".stripMargin
val t1 = tEnv.sqlQuery(sql)
val testHarness = createHarnessTesterForNoState(t1.toAppendStream[Row], "WindowTableFunction")
testHarness.open()
ingestData(testHarness)
val expected = new ConcurrentLinkedQueue[Object]()
expected.add(record("a", 1.0D, "Hi", null,
localMills("1969-12-31T23:59:55"),
localMills("1970-01-01T00:00:05"),
mills("1970-01-01T00:00:04.999")))
expected.add(record("a", 1.0D, "Hi", null,
localMills("1970-01-01T00:00"),
localMills("1970-01-01T00:00:10"),
mills("1970-01-01T00:00:09.999")))
expected.add(record("a", 2.0D, "Comment#1", null,
localMills("1969-12-31T23:59:55"),
localMills("1970-01-01T00:00:05"),
mills("1970-01-01T00:00:04.999")))
expected.add(record("a", 2.0D, "Comment#1", null,
localMills("1970-01-01T00:00"),
localMills("1970-01-01T00:00:10"),
mills("1970-01-01T00:00:09.999")))
expected.add(record("a", 2.0D, "Comment#1", null,
localMills("1969-12-31T23:59:55"),
localMills("1970-01-01T00:00:05"),
mills("1970-01-01T00:00:04.999")))
expected.add(record("a", 2.0D, "Comment#1", null,
localMills("1970-01-01T00:00"),
localMills("1970-01-01T00:00:10"),
mills("1970-01-01T00:00:09.999")))
expected.add(record("a", 5.0D, null, null,
localMills("1969-12-31T23:59:55"),
localMills("1970-01-01T00:00:05"),
mills("1970-01-01T00:00:04.999")))
expected.add(record("a", 5.0D, null, null,
localMills("1970-01-01T00:00"),
localMills("1970-01-01T00:00:10"),
mills("1970-01-01T00:00:09.999")))
expected.add(record("a", 5.0D, "Hi", null,
localMills("1969-12-31T23:59:55"),
localMills("1970-01-01T00:00:05"),
mills("1970-01-01T00:00:04.999")))
expected.add(record("a", 5.0D, "Hi", null,
localMills("1970-01-01T00:00"),
localMills("1970-01-01T00:00:10"),
mills("1970-01-01T00:00:09.999")))
expected.add(record("b", 6.0D, "Hi", null,
localMills("1970-01-01T00:00:00"),
localMills("1970-01-01T00:00:10"),
mills("1970-01-01T00:00:09.999")))
expected.add(record("b", 6.0D, "Hi", null,
localMills("1970-01-01T00:00:05"),
localMills("1970-01-01T00:00:15"),
mills("1970-01-01T00:00:14.999")))
expected.add(record("b", 3.0D, "Hello", null,
localMills("1970-01-01T00:00:00"),
localMills("1970-01-01T00:00:10"),
mills("1970-01-01T00:00:09.999")))
expected.add(record("b", 3.0D, "Hello", null,
localMills("1970-01-01T00:00:05"),
localMills("1970-01-01T00:00:15"),
mills("1970-01-01T00:00:14.999")))
expected.add(record("a", null, "Comment#2", null,
localMills("1970-01-01T00:00:00"),
localMills("1970-01-01T00:00:10"),
mills("1970-01-01T00:00:09.999")))
expected.add(record("a", null, "Comment#2", null,
localMills("1970-01-01T00:00:05"),
localMills("1970-01-01T00:00:15"),
mills("1970-01-01T00:00:14.999")))
expected.add(record("b", 4.0D, "Hi", null,
localMills("1970-01-01T00:00:10"),
localMills("1970-01-01T00:00:20"),
mills("1970-01-01T00:00:19.999")))
expected.add(record("b", 4.0D, "Hi", null,
localMills("1970-01-01T00:00:15"),
localMills("1970-01-01T00:00:25"),
mills("1970-01-01T00:00:24.999")))
expected.add(record(null, 7.0D, null, null,
localMills("1970-01-01T00:00:25"),
localMills("1970-01-01T00:00:35"),
mills("1970-01-01T00:00:34.999")))
expected.add(record(null, 7.0D, null, null,
localMills("1970-01-01T00:00:30"),
localMills("1970-01-01T00:00:40"),
mills("1970-01-01T00:00:39.999")))
expected.add(record("b", 3.0D, "Comment#3", null,
localMills("1970-01-01T00:00:25"),
localMills("1970-01-01T00:00:35"),
mills("1970-01-01T00:00:34.999")))
expected.add(record("b", 3.0D, "Comment#3", null,
localMills("1970-01-01T00:00:30"),
localMills("1970-01-01T00:00:40"),
mills("1970-01-01T00:00:39.999")))
assertor.assertOutputEqualsSorted("result mismatch", expected, testHarness.getOutput)
testHarness.close()
}
@Test
def testProcessingTimeCumulateWindow(): Unit = {
val sql =
"""
|SELECT * FROM TABLE(
| CUMULATE(TABLE T1, DESCRIPTOR(proctime), INTERVAL '5' SECOND, INTERVAL '15' SECOND))
""".stripMargin
val t1 = tEnv.sqlQuery(sql)
val testHarness = createHarnessTesterForNoState(t1.toAppendStream[Row], "WindowTableFunction")
testHarness.open()
ingestData(testHarness)
val expected = new ConcurrentLinkedQueue[Object]()
expected.add(record("a", 1.0D, "Hi", null,
localMills("1970-01-01T00:00:00"),
localMills("1970-01-01T00:00:05"),
mills("1970-01-01T00:00:04.999")))
expected.add(record("a", 1.0D, "Hi", null,
localMills("1970-01-01T00:00"),
localMills("1970-01-01T00:00:10"),
mills("1970-01-01T00:00:09.999")))
expected.add(record("a", 1.0D, "Hi", null,
localMills("1970-01-01T00:00"),
localMills("1970-01-01T00:00:15"),
mills("1970-01-01T00:00:14.999")))
expected.add(record("a", 2.0D, "Comment#1", null,
localMills("1970-01-01T00:00:00"),
localMills("1970-01-01T00:00:05"),
mills("1970-01-01T00:00:04.999")))
expected.add(record("a", 2.0D, "Comment#1", null,
localMills("1970-01-01T00:00"),
localMills("1970-01-01T00:00:10"),
mills("1970-01-01T00:00:09.999")))
expected.add(record("a", 2.0D, "Comment#1", null,
localMills("1970-01-01T00:00"),
localMills("1970-01-01T00:00:15"),
mills("1970-01-01T00:00:14.999")))
expected.add(record("a", 2.0D, "Comment#1", null,
localMills("1970-01-01T00:00:00"),
localMills("1970-01-01T00:00:05"),
mills("1970-01-01T00:00:04.999")))
expected.add(record("a", 2.0D, "Comment#1", null,
localMills("1970-01-01T00:00"),
localMills("1970-01-01T00:00:10"),
mills("1970-01-01T00:00:09.999")))
expected.add(record("a", 2.0D, "Comment#1", null,
localMills("1970-01-01T00:00"),
localMills("1970-01-01T00:00:15"),
mills("1970-01-01T00:00:14.999")))
expected.add(record("a", 5.0D, null, null,
localMills("1970-01-01T00:00:00"),
localMills("1970-01-01T00:00:05"),
mills("1970-01-01T00:00:04.999")))
expected.add(record("a", 5.0D, null, null,
localMills("1970-01-01T00:00"),
localMills("1970-01-01T00:00:10"),
mills("1970-01-01T00:00:09.999")))
expected.add(record("a", 5.0D, null, null,
localMills("1970-01-01T00:00"),
localMills("1970-01-01T00:00:15"),
mills("1970-01-01T00:00:14.999")))
expected.add(record("a", 5.0D, "Hi", null,
localMills("1970-01-01T00:00:00"),
localMills("1970-01-01T00:00:05"),
mills("1970-01-01T00:00:04.999")))
expected.add(record("a", 5.0D, "Hi", null,
localMills("1970-01-01T00:00"),
localMills("1970-01-01T00:00:10"),
mills("1970-01-01T00:00:09.999")))
expected.add(record("a", 5.0D, "Hi", null,
localMills("1970-01-01T00:00"),
localMills("1970-01-01T00:00:15"),
mills("1970-01-01T00:00:14.999")))
expected.add(record("b", 6.0D, "Hi", null,
localMills("1970-01-01T00:00:00"),
localMills("1970-01-01T00:00:10"),
mills("1970-01-01T00:00:09.999")))
expected.add(record("b", 6.0D, "Hi", null,
localMills("1970-01-01T00:00:00"),
localMills("1970-01-01T00:00:15"),
mills("1970-01-01T00:00:14.999")))
expected.add(record("b", 3.0D, "Hello", null,
localMills("1970-01-01T00:00:00"),
localMills("1970-01-01T00:00:10"),
mills("1970-01-01T00:00:09.999")))
expected.add(record("b", 3.0D, "Hello", null,
localMills("1970-01-01T00:00:00"),
localMills("1970-01-01T00:00:15"),
mills("1970-01-01T00:00:14.999")))
expected.add(record("a", null, "Comment#2", null,
localMills("1970-01-01T00:00:00"),
localMills("1970-01-01T00:00:10"),
mills("1970-01-01T00:00:09.999")))
expected.add(record("a", null, "Comment#2", null,
localMills("1970-01-01T00:00:00"),
localMills("1970-01-01T00:00:15"),
mills("1970-01-01T00:00:14.999")))
expected.add(record("b", 4.0D, "Hi", null,
localMills("1970-01-01T00:00:15"),
localMills("1970-01-01T00:00:20"),
mills("1970-01-01T00:00:19.999")))
expected.add(record("b", 4.0D, "Hi", null,
localMills("1970-01-01T00:00:15"),
localMills("1970-01-01T00:00:25"),
mills("1970-01-01T00:00:24.999")))
expected.add(record("b", 4.0D, "Hi", null,
localMills("1970-01-01T00:00:15"),
localMills("1970-01-01T00:00:30"),
mills("1970-01-01T00:00:29.999")))
expected.add(record(null, 7.0D, null, null,
localMills("1970-01-01T00:00:30"),
localMills("1970-01-01T00:00:35"),
mills("1970-01-01T00:00:34.999")))
expected.add(record(null, 7.0D, null, null,
localMills("1970-01-01T00:00:30"),
localMills("1970-01-01T00:00:40"),
mills("1970-01-01T00:00:39.999")))
expected.add(record(null, 7.0D, null, null,
localMills("1970-01-01T00:00:30"),
localMills("1970-01-01T00:00:45"),
mills("1970-01-01T00:00:44.999")))
expected.add(record("b", 3.0D, "Comment#3", null,
localMills("1970-01-01T00:00:30"),
localMills("1970-01-01T00:00:35"),
mills("1970-01-01T00:00:34.999")))
expected.add(record("b", 3.0D, "Comment#3", null,
localMills("1970-01-01T00:00:30"),
localMills("1970-01-01T00:00:40"),
mills("1970-01-01T00:00:39.999")))
expected.add(record("b", 3.0D, "Comment#3", null,
localMills("1970-01-01T00:00:30"),
localMills("1970-01-01T00:00:45"),
mills("1970-01-01T00:00:44.999")))
assertor.assertOutputEqualsSorted("result mismatch", expected, testHarness.getOutput)
testHarness.close()
}
/**
* Ingests testing data, the input schema is [name, double, string, proctime].
* We follow the test data in [[TestData.windowDataWithTimestamp]] to have the same produced
* result.
*/
private def ingestData(
testHarness: OneInputStreamOperatorTestHarness[RowData, RowData]): Unit = {
// input schema: [name, double, string, proctime]
testHarness.setProcessingTime(1000L)
testHarness.processElement(record("a", 1d, "Hi", null))
testHarness.setProcessingTime(2000L)
testHarness.processElement(record("a", 2d, "Comment#1", null))
testHarness.setProcessingTime(3000L)
testHarness.processElement(record("a", 2d, "Comment#1", null))
testHarness.setProcessingTime(4000L)
testHarness.processElement(record("a", 5d, null, null))
testHarness.processElement(record("a", 5d, "Hi", null))
testHarness.setProcessingTime(6000L)
testHarness.processElement(record("b", 6d, "Hi", null))
testHarness.setProcessingTime(7000L)
testHarness.processElement(record("b", 3d, "Hello", null))
testHarness.setProcessingTime(8000L)
testHarness.processElement(record("a", null, "Comment#2", null))
testHarness.setProcessingTime(16000L)
testHarness.processElement(record("b", 4d, "Hi", null))
testHarness.setProcessingTime(32000L)
testHarness.processElement(record(null, 7d, null, null))
testHarness.setProcessingTime(34000L)
testHarness.processElement(record("b", 3d, "Comment#3", null))
testHarness.setProcessingTime(50000L)
}
private def record(args: Any*): StreamRecord[RowData] = {
val objs = args.map {
case l: Long => Long.box(l)
case d: Double => Double.box(d)
case arg@_ => arg.asInstanceOf[Object]
}.toArray
binaryRecord(INSERT, objs: _*)
}
private def localMills(dateTime: String): TimestampData = {
val windowDateTime = LocalDateTime.parse(dateTime).atZone(UTC_ZONE_ID)
TimestampData.fromEpochMillis(
toUtcTimestampMills(windowDateTime.toInstant.toEpochMilli, shiftTimeZone))
}
private def mills(dateTime: String): TimestampData = {
val windowDateTime = LocalDateTime.parse(dateTime).atZone(UTC_ZONE_ID)
TimestampData.fromEpochMillis(windowDateTime.toInstant.toEpochMilli)
}
}
object WindowTableFunctionHarnessTest {
@Parameterized.Parameters(name = "StateBackend={0}, TimeZone={1}")
def parameters(): JCollection[Array[java.lang.Object]] = {
Seq[Array[AnyRef]](
Array(HEAP_BACKEND, ZoneId.of("UTC")),
Array(HEAP_BACKEND, ZoneId.of("Asia/Shanghai")),
Array(ROCKSDB_BACKEND, ZoneId.of("UTC")),
Array(ROCKSDB_BACKEND, ZoneId.of("Asia/Shanghai")))
}
}
|
apache/flink
|
flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/planner/runtime/harness/WindowTableFunctionHarnessTest.scala
|
Scala
|
apache-2.0
| 18,523
|
import org.specs2.mutable._
import org.specs2.runner._
import org.junit.runner._
import org.openqa.selenium.htmlunit.HtmlUnitDriver
import play.api.test._
import play.api.test.Helpers._
/**
* add your integration spec here.
* An integration test will fire up a whole play application in a real (or headless) browser
*/
@RunWith(classOf[JUnitRunner])
class IntegrationSpec extends Specification {
"Application" should {
"work from within a browser" in new WithBrowser {
browser.goTo("http://localhost:" + port)
browser.pageSource must contain("Add Person")
}
}
}
|
aevalo/dvdrental
|
test/IntegrationSpec.scala
|
Scala
|
mit
| 596
|
package freetocompose.example
import scala.language.higherKinds
import cats.Monad
import cats.instances.list._
import cats.syntax.foldable._
object Utils {
def repeat[M[_]: Monad](times: Int)(fa: M[_]) = List.fill(times)(fa).sequence_
def iterateUntil[M[_]: Monad, A](pred: A ⇒ Boolean)(fa: M[A]): M[A] =
Monad[M].flatMap(fa)(y ⇒ if (pred(y)) Monad[M].pure(y) else iterateUntil(pred)(fa))
}
|
msiegenthaler/free-to-compose
|
example/src/main/scala/freetocompose/example/Utils.scala
|
Scala
|
apache-2.0
| 405
|
package mesosphere.marathon
import java.util.concurrent.CountDownLatch
import java.util.{ Timer, TimerTask }
import javax.inject.{ Inject, Named }
import akka.Done
import akka.actor.{ ActorRef, ActorSystem }
import akka.stream.Materializer
import akka.util.Timeout
import com.google.common.util.concurrent.AbstractExecutionThreadService
import mesosphere.marathon.MarathonSchedulerActor._
import mesosphere.marathon.core.deployment.{ DeploymentManager, DeploymentPlan, DeploymentStepInfo }
import mesosphere.marathon.core.election.{ ElectionCandidate, ElectionService }
import mesosphere.marathon.core.group.GroupManager
import mesosphere.marathon.core.heartbeat._
import mesosphere.marathon.core.instance.Instance
import mesosphere.marathon.core.leadership.LeadershipCoordinator
import mesosphere.marathon.core.storage.store.PersistenceStore
import mesosphere.marathon.state.{ AppDefinition, PathId, Timestamp }
import mesosphere.marathon.storage.migration.Migration
import mesosphere.marathon.stream.Sink
import mesosphere.util.PromiseActor
import org.apache.mesos.SchedulerDriver
import org.slf4j.LoggerFactory
import scala.concurrent.duration._
import scala.concurrent.{ Await, Future }
import scala.util.Failure
/**
* PrePostDriverCallback is implemented by callback receivers which have to listen for driver
* start/stop events
*/
trait PrePostDriverCallback {
/**
* Will get called _before_ the driver is running, but after migration.
*/
def preDriverStarts: Future[Unit]
/**
* Will get called _after_ the driver terminated
*/
def postDriverTerminates: Future[Unit]
}
/**
* DeploymentService provides methods to deploy plans.
*/
// TODO (AD): do we need this trait?
trait DeploymentService {
/**
* Deploy a plan.
* @param plan the plan to deploy.
* @param force only one deployment can be applied at a time. With this flag
* one can control, to stop a current deployment and start a new one.
* @return a failed future if the deployment failed.
*/
def deploy(plan: DeploymentPlan, force: Boolean = false): Future[Done]
def listRunningDeployments(): Future[Seq[DeploymentStepInfo]]
}
/**
* Wrapper class for the scheduler
*/
class MarathonSchedulerService @Inject() (
persistenceStore: PersistenceStore[_, _, _],
leadershipCoordinator: LeadershipCoordinator,
config: MarathonConf,
electionService: ElectionService,
prePostDriverCallbacks: Seq[PrePostDriverCallback],
groupManager: GroupManager,
driverFactory: SchedulerDriverFactory,
system: ActorSystem,
migration: Migration,
deploymentManager: DeploymentManager,
@Named("schedulerActor") schedulerActor: ActorRef,
@Named(ModuleNames.MESOS_HEARTBEAT_ACTOR) mesosHeartbeatActor: ActorRef)(implicit mat: Materializer)
extends AbstractExecutionThreadService with ElectionCandidate with DeploymentService {
import mesosphere.marathon.core.async.ExecutionContexts.global
implicit val zkTimeout = config.zkTimeoutDuration
val isRunningLatch = new CountDownLatch(1)
// Time to wait before trying to reconcile app tasks after driver starts
val reconciliationInitialDelay =
Duration(config.reconciliationInitialDelay(), MILLISECONDS)
// Interval between task reconciliation operations
val reconciliationInterval =
Duration(config.reconciliationInterval(), MILLISECONDS)
// Time to wait before trying to scale apps after driver starts
val scaleAppsInitialDelay =
Duration(config.scaleAppsInitialDelay(), MILLISECONDS)
// Interval between attempts to scale apps
val scaleAppsInterval =
Duration(config.scaleAppsInterval(), MILLISECONDS)
private[mesosphere] var timer = newTimer()
val log = LoggerFactory.getLogger(getClass.getName)
// This is a little ugly as we are using a mutable variable. But drivers can't
// be reused (i.e. once stopped they can't be started again. Thus,
// we have to allocate a new driver before each run or after each stop.
var driver: Option[SchedulerDriver] = None
implicit val timeout: Timeout = 5.seconds
protected def newTimer() = new Timer("marathonSchedulerTimer")
def deploy(plan: DeploymentPlan, force: Boolean = false): Future[Done] = {
log.info(s"Deploy plan with force=$force:\\n$plan ")
val future: Future[Any] = PromiseActor.askWithoutTimeout(system, schedulerActor, Deploy(plan, force))
future.map {
case DeploymentStarted(_) => Done
case DeploymentFailed(_, t) => throw t
}
}
def cancelDeployment(plan: DeploymentPlan): Unit =
schedulerActor ! CancelDeployment(plan)
def listAppVersions(appId: PathId): Seq[Timestamp] =
Await.result(groupManager.appVersions(appId).map(Timestamp(_)).runWith(Sink.seq), config.zkTimeoutDuration)
def listRunningDeployments(): Future[Seq[DeploymentStepInfo]] =
deploymentManager.list()
def getApp(appId: PathId, version: Timestamp): Option[AppDefinition] = {
Await.result(groupManager.appVersion(appId, version.toOffsetDateTime), config.zkTimeoutDuration)
}
def killInstances(
appId: PathId,
instances: Seq[Instance]): Unit = {
schedulerActor ! KillTasks(appId, instances)
}
//Begin Service interface
override def startUp(): Unit = {
log.info("Starting up")
super.startUp()
}
override def run(): Unit = {
log.info("Beginning run")
// The first thing we do is offer our leadership.
electionService.offerLeadership(this)
// Block on the latch which will be countdown only when shutdown has been
// triggered. This is to prevent run()
// from exiting.
scala.concurrent.blocking {
isRunningLatch.await()
}
log.info("Completed run")
}
override def triggerShutdown(): Unit = synchronized {
log.info("Shutdown triggered")
electionService.abdicateLeadership()
stopDriver()
log.info("Cancelling timer")
timer.cancel()
// The countdown latch blocks run() from exiting. Counting down the latch removes the block.
log.info("Removing the blocking of run()")
isRunningLatch.countDown()
super.triggerShutdown()
}
private[this] def stopDriver(): Unit = synchronized {
// many are the assumptions concerning when this is invoked. see startLeadership, stopLeadership,
// triggerShutdown.
log.info("Stopping driver")
// Stopping the driver will cause the driver run() method to return.
driver.foreach(_.stop(true)) // failover = true
// signals that the driver was stopped manually (as opposed to crashing mid-process)
driver = None
}
//End Service interface
//Begin ElectionCandidate interface
override def startLeadership(): Unit = synchronized {
log.info("As new leader running the driver")
// allow interactions with the persistence store
persistenceStore.markOpen()
// Before reading to and writing from the storage, let's ensure that
// no stale values are read from the persistence store.
// Although in case of ZK it is done at the time of creation of CuratorZK,
// it is better to be safe than sorry.
Await.result(persistenceStore.sync(), Duration.Inf)
refreshCachesAndDoMigration()
// run all pre-driver callbacks
log.info(s"""Call preDriverStarts callbacks on ${prePostDriverCallbacks.mkString(", ")}""")
Await.result(
Future.sequence(prePostDriverCallbacks.map(_.preDriverStarts)),
config.onElectedPrepareTimeout().millis
)
log.info("Finished preDriverStarts callbacks")
// start all leadership coordination actors
Await.result(leadershipCoordinator.prepareForStart(), config.maxActorStartupTime().milliseconds)
// create new driver
driver = Some(driverFactory.createDriver())
// start timers
schedulePeriodicOperations()
// The following block asynchronously runs the driver. Note that driver.run()
// blocks until the driver has been stopped (or aborted).
Future {
scala.concurrent.blocking {
driver.foreach(_.run())
}
} onComplete { result =>
synchronized {
log.info(s"Driver future completed with result=$result.")
result match {
case Failure(t) => log.error("Exception while running driver", t)
case _ =>
}
// ONLY do this if there's some sort of driver crash: avoid invoking abdication logic if
// the driver was stopped via stopDriver. stopDriver only happens when
// 1. we're being terminated (and have already abdicated)
// 2. we've lost leadership (no need to abdicate if we've already lost)
driver.foreach { _ =>
electionService.abdicateLeadership()
}
driver = None
log.info(s"Call postDriverRuns callbacks on ${prePostDriverCallbacks.mkString(", ")}")
Await.result(Future.sequence(prePostDriverCallbacks.map(_.postDriverTerminates)), config.zkTimeoutDuration)
log.info("Finished postDriverRuns callbacks")
}
}
}
private def refreshCachesAndDoMigration(): Unit = {
// GroupManager and GroupRepository are holding in memory caches of the root group. The cache is loaded when it is accessed the first time.
// Actually this is really bad, because each marathon will log the amount of groups during startup through Kamon.
// Therefore the root group state is loaded from zk when the marathon instance is started.
// When the marathon instance is elected as leader, this cache is still in the same state as the time marathon started.
// Therefore we need to re-load the root group from zk again from zookeeper when becoming leader.
// The same is true after doing the migration. A migration or a restore also affects the state of zookeeper, but does not
// update the internal hold caches. Therefore we need to refresh the internally loaded caches after the migration.
// Actually we need to do the fresh twice, before the migration, to perform the migration on the current zk state and after
// the migration to have marathon loaded the current valid state to the internal caches.
// refresh group repository cache
Await.result(groupManager.invalidateGroupCache(), Duration.Inf)
// execute tasks, only the leader is allowed to
migration.migrate()
// refresh group repository again - migration or restore might changed zk state, this needs to be re-loaded
Await.result(groupManager.invalidateGroupCache(), Duration.Inf)
}
override def stopLeadership(): Unit = synchronized {
// invoked by election service upon loss of leadership (state transitioned to Idle)
log.info("Lost leadership")
// disallow any interaction with the persistence storage
persistenceStore.markClosed()
leadershipCoordinator.stop()
val oldTimer = timer
timer = newTimer()
oldTimer.cancel()
driver.foreach { driverInstance =>
mesosHeartbeatActor ! Heartbeat.MessageDeactivate(MesosHeartbeatMonitor.sessionOf(driverInstance))
// Our leadership has been defeated. Thus, stop the driver.
stopDriver()
}
}
//End ElectionDelegate interface
private def schedulePeriodicOperations(): Unit = synchronized {
timer.schedule(
new TimerTask {
def run(): Unit = {
if (electionService.isLeader) {
schedulerActor ! ScaleRunSpecs
} else log.info("Not leader therefore not scaling apps")
}
},
scaleAppsInitialDelay.toMillis,
scaleAppsInterval.toMillis
)
timer.schedule(
new TimerTask {
def run(): Unit = {
if (electionService.isLeader) {
schedulerActor ! ReconcileTasks
schedulerActor ! ReconcileHealthChecks
} else log.info("Not leader therefore not reconciling tasks")
}
},
reconciliationInitialDelay.toMillis,
reconciliationInterval.toMillis
)
}
}
|
janisz/marathon
|
src/main/scala/mesosphere/marathon/MarathonSchedulerService.scala
|
Scala
|
apache-2.0
| 11,873
|
package test.substance
import org.scalatest.Spec
import org.scalatest.matchers.ShouldMatchers
import vog.substance.Substance
import swing.{Graphics2D}
import org.scalatest.mock.MockitoSugar
import org.mockito.Mockito._
import vog.cache.Image
import java.awt.image.{BufferedImage, ImageObserver}
/**
* @author Ivyl
*/
class SubstanceSpec extends Spec with ShouldMatchers with MockitoSugar {
describe("Substance") {
describe("working methods") {
class TestSubstance extends Substance {
var image: Option[Image] = None
var run = false
var painted = false
protected def behavior { run = true }
override protected def paint(g: Graphics2D, observer: ImageObserver) {
super.paint(g, observer)
painted = true
}
}
val substance = new TestSubstance
it("should invoke behavior on every behave") {
substance.behave
substance.run should be (true)
}
it("should invoke paint when drawing") {
val graphics = mock[Graphics2D]
val observer = mock[ImageObserver]
substance.draw(graphics, observer)
substance.painted should be (true)
}
it("should not paint on graphics when drawing and image is not set") {
val graphics = mock[Graphics2D]
val observer = mock[ImageObserver]
substance.draw(graphics, observer)
verifyZeroInteractions(graphics)
}
it("should paint on graphics when drawing and image is set") {
val graphics = mock[Graphics2D]
val observer = mock[ImageObserver]
val image = new Image(mock[BufferedImage])
substance.image = Some(image)
substance.x = 343
substance.y = 21
substance.draw(graphics, observer)
verify(graphics).drawImage(image.image, substance.x.toInt, substance.y.toInt, observer)
}
describe("thread safety") {
pending
}
}
}
}
|
ivyl/vog-engine
|
test/substance/SubstanceSpec.scala
|
Scala
|
mit
| 1,963
|
package com.art4ul.algorithms.sort
class InsertionSort[T <% Ordered[T]]{
def swap(array: Array[T], firstIndex: Int, secondIndex: Int): Unit = {
val tmp: T = array(firstIndex)
array(firstIndex) = array(secondIndex)
array(secondIndex) = tmp
}
def sort(array: Array[T]): Unit = {
for (i <- 1 to array.length-1) {
var j = i
while (j>0 && array(j) < array(j - 1)) {
swap(array,j,j-1)
j-=1
}
}
}
}
|
art4ul/AlgorithmSandbox
|
src/main/scala/com/art4ul/algorithms/sort/InsertionSort.scala
|
Scala
|
apache-2.0
| 458
|
package com.twitter.finagle
import com.github.benmanes.caffeine.cache.{CacheLoader, Caffeine, LoadingCache}
import com.twitter.cache.caffeine.CaffeineCache
import com.twitter.concurrent.AsyncSemaphore
import com.twitter.conversions.time._
import com.twitter.finagle.stats.{DefaultStatsReceiver, StatsReceiver}
import com.twitter.finagle.util.{DefaultTimer, InetSocketAddressUtil, Updater}
import com.twitter.logging.Logger
import com.twitter.util.{Await, Closable, Var, _}
import java.net.{InetAddress, InetSocketAddress, UnknownHostException}
private[finagle] class DnsResolver(statsReceiver: StatsReceiver, resolvePool: FuturePool)
extends (String => Future[Seq[InetAddress]]) {
private[this] val dnsLookupFailures = statsReceiver.counter("dns_lookup_failures")
private[this] val dnsLookups = statsReceiver.counter("dns_lookups")
private[this] val log = Logger()
// Resolve hostnames asynchronously and concurrently.
private[this] val dnsCond = new AsyncSemaphore(100)
private[this] val waitersGauge = statsReceiver.addGauge("queue_size") { dnsCond.numWaiters }
private[this] val Loopback = Future.value(Seq(InetAddress.getLoopbackAddress))
override def apply(host: String): Future[Seq[InetAddress]] = {
if (host.isEmpty || host == "localhost") {
// Avoid using the thread pool to resolve localhost. Ideally we
// would always do that if hostname is an IP address, but there is
// no native API to determine if it is the case. localhost can
// safely be treated specially here, see rfc6761 section 6.3.3.
Loopback
} else {
dnsLookups.incr()
dnsCond.acquire().flatMap { permit =>
resolvePool(InetAddress.getAllByName(host).toSeq)
.onFailure { e =>
log.debug(s"Failed to resolve $host. Error $e")
dnsLookupFailures.incr()
}
.ensure { permit.release() }
}
}
}
}
/**
* Resolver for inet scheme.
*/
object InetResolver {
def apply(): Resolver = apply(DefaultStatsReceiver)
def apply(resolvePool: FuturePool): Resolver = apply(DefaultStatsReceiver, resolvePool)
def apply(unscopedStatsReceiver: StatsReceiver): Resolver =
apply(unscopedStatsReceiver, FuturePool.unboundedPool)
def apply(unscopedStatsReceiver: StatsReceiver, resolvePool: FuturePool): Resolver = {
val statsReceiver = unscopedStatsReceiver.scope("inet").scope("dns")
new InetResolver(
new DnsResolver(statsReceiver, resolvePool),
statsReceiver,
Some(5.seconds),
resolvePool
)
}
}
private[finagle] class InetResolver(
resolveHost: String => Future[Seq[InetAddress]],
statsReceiver: StatsReceiver,
pollIntervalOpt: Option[Duration],
resolvePool: FuturePool
) extends Resolver {
import InetSocketAddressUtil._
type HostPortMetadata = (String, Int, Addr.Metadata)
val scheme = "inet"
private[this] val latencyStat = statsReceiver.stat("lookup_ms")
private[this] val successes = statsReceiver.counter("successes")
private[this] val failures = statsReceiver.counter("failures")
private[this] val log = Logger()
private[this] val timer = DefaultTimer
/**
* Resolve all hostnames and merge into a final Addr.
* If all lookups are unknown hosts, returns Addr.Neg.
* If all lookups fail with unexpected errors, returns Addr.Failed.
* If any lookup succeeds the final result will be Addr.Bound
* with the successful results.
*/
def toAddr(hp: Seq[HostPortMetadata]): Future[Addr] = {
val elapsed = Stopwatch.start()
Future
.collectToTry(hp.map {
case (host, port, meta) =>
resolveHost(host).map { inetAddrs =>
inetAddrs.map { inetAddr =>
Address.Inet(new InetSocketAddress(inetAddr, port), meta)
}
}
})
.flatMap { seq: Seq[Try[Seq[Address]]] =>
// Filter out all successes. If there was at least 1 success, consider
// the entire operation a success
val results = seq.collect {
case Return(subset) => subset
}.flatten
// Consider any result a success. Ignore partial failures.
if (results.nonEmpty) {
successes.incr()
latencyStat.add(elapsed().inMilliseconds)
Future.value(Addr.Bound(results.toSet))
} else {
// Either no hosts or resolution failed for every host
failures.incr()
latencyStat.add(elapsed().inMilliseconds)
log.debug(s"Resolution failed for all hosts in $hp")
seq.collectFirst {
case Throw(e) => e
} match {
case Some(_: UnknownHostException) => Future.value(Addr.Neg)
case Some(e) => Future.value(Addr.Failed(e))
case None => Future.value(Addr.Bound(Set[Address]()))
}
}
}
}
def bindHostPortsToAddr(hosts: Seq[HostPortMetadata]): Var[Addr] = {
Var.async(Addr.Pending: Addr) { u =>
toAddr(hosts) onSuccess { u() = _ }
pollIntervalOpt match {
case Some(pollInterval) =>
val updater = new Updater[Unit] {
val one = Seq(())
// Just perform one update at a time.
protected def preprocess(elems: Seq[Unit]) = one
protected def handle(unit: Unit) {
// This always runs in a thread pool; it's okay to block.
u() = Await.result(toAddr(hosts))
}
}
timer.schedule(pollInterval.fromNow, pollInterval) {
resolvePool(updater(()))
}
case None =>
Closable.nop
}
}
}
/**
* Binds to the specified hostnames, and refreshes the DNS information periodically.
*/
def bind(hosts: String): Var[Addr] = Try(parseHostPorts(hosts)) match {
case Return(hp) =>
bindHostPortsToAddr(hp.map {
case (host, port) =>
(host, port, Addr.Metadata.empty)
})
case Throw(exc) =>
Var.value(Addr.Failed(exc))
}
}
/**
* InetResolver that caches all successful DNS lookups indefinitely
* and does not poll for updates.
*
* Clients should only use this in scenarios where host -> IP map changes
* do not occur.
*/
object FixedInetResolver {
private[this] val log = Logger()
val scheme = "fixedinet"
def apply(): InetResolver =
apply(DefaultStatsReceiver)
def apply(unscopedStatsReceiver: StatsReceiver): InetResolver =
apply(unscopedStatsReceiver, 16000)
def apply(unscopedStatsReceiver: StatsReceiver, maxCacheSize: Long): InetResolver =
apply(unscopedStatsReceiver, maxCacheSize, Stream.empty, DefaultTimer)
/**
* Uses a [[com.twitter.util.Future]] cache to memoize lookups.
*
* @param maxCacheSize Specifies the maximum number of `Futures` that can be cached.
* No maximum size limit if Long.MaxValue.
* @param backoffs Optionally retry DNS resolution failures using this sequence of
* durations for backoff. Stream.empty means don't retry.
*/
def apply(
unscopedStatsReceiver: StatsReceiver,
maxCacheSize: Long,
backoffs: Stream[Duration],
timer: Timer
): InetResolver = {
val statsReceiver = unscopedStatsReceiver.scope("inet").scope("dns")
new FixedInetResolver(
cache(
new DnsResolver(statsReceiver, FuturePool.unboundedPool),
maxCacheSize,
backoffs,
timer
),
statsReceiver
)
}
// A size-bounded FutureCache backed by a LoaderCache
private[finagle] def cache(
resolveHost: String => Future[Seq[InetAddress]],
maxCacheSize: Long,
backoffs: Stream[Duration] = Stream.empty,
timer: Timer = DefaultTimer
): LoadingCache[String, Future[Seq[InetAddress]]] = {
val cacheLoader = new CacheLoader[String, Future[Seq[InetAddress]]]() {
def load(host: String): Future[Seq[InetAddress]] = {
// Optionally retry failed DNS resolutions with specified backoff.
def retryingLoad(nextBackoffs: Stream[Duration]): Future[Seq[InetAddress]] = {
resolveHost(host).rescue {
case exc: UnknownHostException =>
nextBackoffs match {
case nextBackoff #:: restBackoffs =>
log.debug(
s"Caught UnknownHostException resolving host '$host'. Retrying in $nextBackoff..."
)
Future.sleep(nextBackoff)(timer).before(retryingLoad(restBackoffs))
case Stream.Empty =>
Future.exception(exc)
}
}
}
retryingLoad(backoffs)
}
}
var builder = Caffeine
.newBuilder()
.recordStats()
if (maxCacheSize != Long.MaxValue) {
builder = builder.maximumSize(maxCacheSize)
}
builder.build(cacheLoader)
}
}
/**
* Uses a [[com.twitter.util.Future]] cache to memoize lookups.
*
* @param cache The lookup cache
*/
private[finagle] class FixedInetResolver(
cache: LoadingCache[String, Future[Seq[InetAddress]]],
statsReceiver: StatsReceiver
) extends InetResolver(
CaffeineCache.fromLoadingCache(cache),
statsReceiver,
None,
FuturePool.unboundedPool
) {
override val scheme = FixedInetResolver.scheme
private[this] val cacheStatsReceiver = statsReceiver.scope("cache")
private[this] val cacheGauges = Seq(
cacheStatsReceiver.addGauge("size") { cache.estimatedSize },
cacheStatsReceiver.addGauge("evicts") { cache.stats().evictionCount },
cacheStatsReceiver.addGauge("hit_rate") { cache.stats().hitRate.toFloat }
)
}
|
mkhq/finagle
|
finagle-core/src/main/scala/com/twitter/finagle/InetResolver.scala
|
Scala
|
apache-2.0
| 9,551
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.planner.plan.metadata
import com.google.common.collect.ImmutableList
import org.apache.calcite.rel.logical.{LogicalFilter, LogicalProject, LogicalValues}
import org.apache.calcite.rel.{RelCollation, RelCollations, RelFieldCollation}
import org.apache.calcite.sql.`type`.SqlTypeName
import org.apache.calcite.sql.fun.SqlStdOperatorTable.{LESS_THAN, PLUS}
import org.junit.Assert.assertEquals
import org.junit.Test
import scala.collection.JavaConversions._
class FlinkRelMdRowCollationTest extends FlinkRelMdHandlerTestBase {
protected lazy val collationValues: LogicalValues = {
val valuesType = relBuilder.getTypeFactory
.builder()
.add("a", SqlTypeName.BIGINT)
.add("b", SqlTypeName.DOUBLE)
.add("c", SqlTypeName.BOOLEAN)
.add("d", SqlTypeName.INTEGER)
.build()
val tupleList = List(
List("1", "9.0", "true", "2"),
List("2", "6.0", "false", "3"),
List("3", "3.0", "true", "4")
).map(createLiteralList(valuesType, _))
relBuilder.clear()
relBuilder.values(tupleList, valuesType)
relBuilder.build().asInstanceOf[LogicalValues]
}
@Test
def testCollationsOnTableScan(): Unit = {
Array(studentLogicalScan, studentBatchScan, studentStreamScan).foreach { scan =>
assertEquals(ImmutableList.of(), mq.collations(scan))
}
}
@Test
def testCollationsOnValues(): Unit = {
assertEquals(ImmutableList.of(RelCollations.of(6)), mq.collations(logicalValues))
assertEquals(
ImmutableList.of(
convertToRelCollation(List.range(0, 8)),
convertToRelCollation(List.range(1, 8)),
convertToRelCollation(List.range(2, 8)),
convertToRelCollation(List.range(3, 8)),
convertToRelCollation(List.range(4, 8)),
convertToRelCollation(List.range(5, 8)),
convertToRelCollation(List.range(6, 8)),
convertToRelCollation(List.range(7, 8))
),
mq.collations(emptyValues))
assertEquals(
ImmutableList.of(convertToRelCollation(List.range(0, 4)), RelCollations.of(3)),
mq.collations(collationValues))
}
@Test
def testCollationsOnProject(): Unit = {
assertEquals(ImmutableList.of(), mq.collations(logicalProject))
val project: LogicalProject = {
relBuilder.push(collationValues)
val projects = List(
// a + b
relBuilder.call(PLUS, relBuilder.field(0), relBuilder.literal(1)),
// c
relBuilder.field(2),
// d
relBuilder.field(3),
// 2
rexBuilder.makeLiteral(2L, longType, true)
)
relBuilder.project(projects).build().asInstanceOf[LogicalProject]
}
assertEquals(ImmutableList.of(RelCollations.of(2)), mq.collations(project))
}
@Test
def testCollationsOnFilter(): Unit = {
assertEquals(ImmutableList.of(), mq.collations(logicalFilter))
relBuilder.push(studentLogicalScan)
val filter: LogicalFilter = {
relBuilder.push(collationValues)
// a < 10
val expr = relBuilder.call(LESS_THAN, relBuilder.field(0), relBuilder.literal(10))
relBuilder.filter(expr).build.asInstanceOf[LogicalFilter]
}
assertEquals(
ImmutableList.of(convertToRelCollation(List.range(0, 4)), RelCollations.of(3)),
mq.collations(filter))
}
@Test
def testCollationsOnExpand(): Unit = {
Array(logicalExpand, flinkLogicalExpand, batchExpand, streamExpand).foreach {
expand => assertEquals(ImmutableList.of(), mq.collations(expand))
}
}
@Test
def testCollationsOnExchange(): Unit = {
Array(batchExchange, streamExchange).foreach {
exchange => assertEquals(ImmutableList.of(), mq.collations(exchange))
}
}
@Test
def testCollationsOnRank(): Unit = {
Array(logicalRank, flinkLogicalRank, batchLocalRank, streamRank).foreach {
rank => assertEquals(ImmutableList.of(), mq.collations(rank))
}
}
@Test
def testCollationsOnSort(): Unit = {
Array(logicalSort, flinkLogicalSort, batchSort, streamSort,
logicalSortLimit, flinkLogicalSortLimit, batchSortLimit, streamSortLimit).foreach { sort =>
assertEquals(
ImmutableList.of(RelCollations.of(
new RelFieldCollation(6),
new RelFieldCollation(2, RelFieldCollation.Direction.DESCENDING))),
mq.collations(sort))
}
Array(logicalLimit, logicalLimit, batchLimit, streamLimit).foreach { limit =>
assertEquals(ImmutableList.of(RelCollations.of()), mq.collations(limit))
}
}
@Test
def testCollationsOnWindow(): Unit = {
assertEquals(ImmutableList.of(), mq.collations(flinkLogicalOverAgg))
}
@Test
def testCollationsOnAggregate(): Unit = {
Array(logicalAgg, flinkLogicalAgg, batchGlobalAggWithLocal, batchGlobalAggWithoutLocal,
batchLocalAgg).foreach {
agg => assertEquals(ImmutableList.of(), mq.collations(agg))
}
}
@Test
def testCollationsOnJoin(): Unit = {
Array(logicalInnerJoinOnUniqueKeys, logicalLeftJoinNotOnUniqueKeys,
logicalRightJoinOnRHSUniqueKeys, logicalFullJoinWithoutEquiCond,
logicalSemiJoinOnLHSUniqueKeys, logicalAntiJoinOnRHSUniqueKeys).foreach {
join => assertEquals(ImmutableList.of(), mq.collations(join))
}
}
@Test
def testCollationsOnUnion(): Unit = {
Array(logicalUnion, logicalUnionAll).foreach {
union => assertEquals(ImmutableList.of(), mq.collations(union))
}
}
@Test
def testCollationsOnIntersect(): Unit = {
Array(logicalIntersect, logicalIntersectAll).foreach {
intersect => assertEquals(ImmutableList.of(), mq.collations(intersect))
}
}
@Test
def testCollationsOnMinus(): Unit = {
Array(logicalMinus, logicalMinusAll).foreach {
minus => assertEquals(ImmutableList.of(), mq.collations(minus))
}
}
@Test
def testCollationsOnDefault(): Unit = {
assertEquals(ImmutableList.of(), mq.collations(testRel))
}
private def convertToRelCollation(relFieldCollations: List[Int]): RelCollation = {
RelCollations.of(relFieldCollations.map(i => new RelFieldCollation(i)): _*)
}
}
|
hequn8128/flink
|
flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/plan/metadata/FlinkRelMdRowCollationTest.scala
|
Scala
|
apache-2.0
| 6,870
|
package io.apibuilder.validation
import io.apibuilder.spec.v0.models._
/**
* A cache of all of the operations defined in the list of services. Allows
* for resolution from a Method and Path to the service in which that operation
* is defined.
*
* As paths can be dynamic, it's difficult to precache the exact resolution
* of a path to a service. This cache internally optimized by splitting the
* path on '/' - and using the number of parts to select only the subset
* of operations with the same number of parts. We then iterate through
* this subset to select the specific operation.
*/
private[validation] case class ServiceOperationCache(
services: List[ApiBuilderService]
)(
acceptPath: String => Boolean
) {
private[this] case class Entry(route: Route, operation: ApiBuilderOperation)
private[this] val entries: List[Entry] = {
services.flatMap { s =>
s.service.resources.flatMap(_.operations)
.filter { op => acceptPath(op.path) }
.map { op =>
Entry(
Route(op.method, op.path),
ApiBuilderOperation(s, op)
)
}
}
}
private[this] val entriesByNumberSlashes: Map[Int, List[Entry]] = entries.groupBy { e =>
numberSlashes(e.route.path)
}
private[this] def numberSlashes(path: String): Int = path.count(_ == '/')
def findOperation(method: Method, path: String): Option[ApiBuilderOperation] = {
entriesByNumberSlashes.getOrElse(numberSlashes(path), List.empty)
.find(_.route.matches(method, path))
.map(_.operation)
}
}
case class ServiceOperationResolver(services: List[ApiBuilderService]) {
private[this] val static = ServiceOperationCache(services)(Route.isStatic)
private[this] val dynamic = ServiceOperationCache(services)(Route.isDynamic)
// If we find a static path in any service, return that one.
// Otherwise return the first matching service. This handles ambiguity:
// - service 1 defines POST /:organization/tokens
// - service 2 defines POST /users/tokens
// We want to return service 2 when the path is /users/tokens
def findOperation(method: Method, path: String): Option[ApiBuilderOperation] = {
static.findOperation(method, path).orElse {
dynamic.findOperation(method, path)
}
}
}
|
flowcommerce/lib-apidoc-json-validation
|
src/main/scala/io/apibuilder/validation/ServiceOperationResolver.scala
|
Scala
|
mit
| 2,278
|
package com.twitter.finagle.client
import com.twitter.finagle._
import com.twitter.finagle.filter.RequestLogger
import com.twitter.finagle.naming.BindingFactory
import com.twitter.finagle.param._
import com.twitter.finagle.stack.nilStack
import com.twitter.finagle.util.Showable
/**
* The standard template implementation for
* [[com.twitter.finagle.client.StackClient]].
*
* @see The [[https://twitter.github.io/finagle/guide/Clients.html user guide]]
* for further details on Finagle clients and their configuration.
* @see [[StackClient.newStack]] for the default modules used by Finagle
* clients.
*/
trait EndpointerStackClient[Req, Rep, This <: EndpointerStackClient[Req, Rep, This]]
extends StackClient[Req, Rep]
with Stack.Parameterized[This]
with CommonParams[This]
with ClientParams[This]
with WithClientAdmissionControl[This]
with WithClientTransport[This]
with WithClientSession[This]
with WithSessionQualifier[This] {
/**
* Defines the service factory, which establishes connections to a remote
* peer on apply and returns a service which can write messages onto
* the wire and read them off of the wire.
*
* Concrete StackClient implementations are expected to specify this.
*/
protected def endpointer: Stackable[ServiceFactory[Req, Rep]]
def withStack(stack: Stack[ServiceFactory[Req, Rep]]): This =
copy1(stack = stack)
override def withStack(
fn: Stack[ServiceFactory[Req, Rep]] => Stack[ServiceFactory[Req, Rep]]
): This =
withStack(fn(stack))
/**
* Creates a new StackClient with `f` applied to `stack`.
*
* This is the same as [[withStack]].
*/
@deprecated(
"Use withStack(Stack[ServiceFactory[Req, Rep]] => Stack[ServiceFactory[Req, Rep]]) instead",
"2018-10-30"
)
def transformed(f: Stack[ServiceFactory[Req, Rep]] => Stack[ServiceFactory[Req, Rep]]): This =
withStack(f)
/**
* Creates a new StackClient with parameter `p`.
*/
override def configured[P: Stack.Param](p: P): This =
withParams(params + p)
/**
* Creates a new StackClient with parameter `psp._1` and Stack Param type `psp._2`.
*/
override def configured[P](psp: (P, Stack.Param[P])): This = {
val (p, sp) = psp
configured(p)(sp)
}
/**
* Creates a new StackClient with additional parameters `newParams`.
*/
override def configuredParams(newParams: Stack.Params): This = {
withParams(params ++ newParams)
}
/**
* Creates a new StackClient with `params` used to configure this StackClient's `stack`.
*/
def withParams(params: Stack.Params): This =
copy1(params = params)
/**
* Prepends `filter` to the top of the client. That is, after materializing
* the client (newClient/newService) `filter` will be the first element which
* requests flow through. This is a familiar chaining combinator for filters and
* is particularly useful for `StdStackClient` implementations that don't expose
* services but instead wrap the resulting service with a rich API.
*/
def filtered(filter: Filter[Req, Rep, Req, Rep]): This = {
val role = Stack.Role(filter.getClass.getSimpleName)
val stackable = Filter.canStackFromFac.toStackable(role, filter)
withStack(stackable +: stack)
}
/**
* A copy constructor in lieu of defining StackClient as a
* case class.
*/
protected def copy1(
stack: Stack[ServiceFactory[Req, Rep]] = this.stack,
params: Stack.Params = this.params
): This
/**
* @inheritdoc
*
* @param label0 if an empty String is provided, then the label
* from the [[Label]] [[Stack.Params]] is used.
* If that is also an empty String, then `dest` is used.
*/
def newClient(dest: Name, label0: String): ServiceFactory[Req, Rep] = {
val stats = params[Stats].statsReceiver
val label1 = params[Label].label
// For historical reasons, we have two sources for identifying
// a client. The most recently set `label0` takes precedence.
val clientLabel = (label0, label1) match {
case (Label.Default, Label.Default) => Showable.show(dest)
case (Label.Default, l1) => l1
case _ => label0
}
val clientStack = {
val baseStack = stack ++ (endpointer +: nilStack)
params[RequestLogger.Param] match {
case RequestLogger.Param.Enabled =>
val tranformer = RequestLogger.newStackTransformer(clientLabel)
tranformer(baseStack)
case RequestLogger.Param.Disabled =>
baseStack
}
}
val clientParams = params +
Label(clientLabel) +
Stats(stats.scope(clientLabel)) +
BindingFactory.Dest(dest)
clientStack.make(clientParams)
}
def newService(dest: Name, label: String): Service[Req, Rep] = {
val client = copy1(
params = params + FactoryToService.Enabled(true)
).newClient(dest, label)
new FactoryToService[Req, Rep](client)
}
}
|
luciferous/finagle
|
finagle-core/src/main/scala/com/twitter/finagle/client/EndpointerStackClient.scala
|
Scala
|
apache-2.0
| 4,957
|
package io.swagger.client.model
import io.swagger.client.core.ApiModel
import org.joda.time.DateTime
case class Inline_response_200_6 (
data: Option[Connector],
success: Option[Boolean])
extends ApiModel
|
QuantiModo/QuantiModo-SDK-Akka-Scala
|
src/main/scala/io/swagger/client/model/Inline_response_200_6.scala
|
Scala
|
gpl-2.0
| 216
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.hbase.catalyst.expressions
import org.apache.spark.sql.catalyst.errors.TreeNodeException
import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.types._
import org.apache.spark.sql.hbase.types._
import org.apache.spark.sql.hbase.types.RangeType._
object PartialPredicateOperations {
// When the checkNullness argument of the partialReduce method is false, the partial
// reduction is nullness-based, i.e., uninterested columns are assigned nulls,
// which necessitates changes of the null handling from the normal evaluations
// of predicate expressions. The IsNull/IsNotNull will return indefinite results.
//
// When the checkNullness argument of the partialReduce method is true, the "is null"
// and "is not null" will return true or false in a definite manner; while other expressions
// will evaluate to indefinite values.
//
// The whole mechanism is based upon the fact that any expression will evaluate to null
// if any of its operands is null.
//
// There are 3 possible results: TRUE, FALSE, and MAYBE represented by a predicate
// which will be used to further filter the results
implicit class partialPredicateReducer(e: Expression) {
/**
* @param e the expression to be partially evaluated
* @param schema the schema of 'e'
* @return the original attribute for the bound reference
*/
def unboundAttributeReference(e: Expression, schema: Seq[Attribute]): Expression = {
e transform {
case b: BoundReference => schema(b.ordinal)
}
}
/**
*
* @param input rows to evaluate against
* @param schema the schema of 'e'
* @param checkNull the flag to check whether the partial evaluation is
* for nullness checking purpose or not
* @return
*/
def partialReduce(input: Row, schema: Seq[Attribute], checkNull: Boolean = false):
(Any, Expression) = {
e match {
case And(left, right) =>
val l = left.partialReduce(input, schema)
if (l._1 == false) {
(false, null)
} else {
val r = right.partialReduce(input, schema)
if (r._1 == false) {
(false, null)
} else {
(l._1, r._1) match {
case (true, true) => (true, null)
case (true, _) => (null, r._2)
case (_, true) => (null, l._2)
case (_, _) =>
if ((l._2 fastEquals left) && (r._2 fastEquals right)) {
(null, unboundAttributeReference(e, schema))
} else {
(null, And(l._2, r._2))
}
case _ => sys.error("unexpected child type(s) in partial reduction")
}
}
}
case Or(left, right) =>
val l = left.partialReduce(input, schema)
if (l._1 == true) {
(true, null)
} else {
val r = right.partialReduce(input, schema)
if (r._1 == true) {
(true, null)
} else {
(l._1, r._1) match {
case (false, false) => (false, null)
case (false, _) => (null, r._2)
case (_, false) => (null, l._2)
case (_, _) =>
if ((l._2 fastEquals left) && (r._2 fastEquals right)) {
(null, unboundAttributeReference(e, schema))
} else {
(null, Or(l._2, r._2))
}
case _ => sys.error("unexpected child type(s) in partial reduction")
}
}
}
case Not(child) =>
child.partialReduce(input, schema) match {
case (b: Boolean, null) => (!b, null)
case (null, ec: Expression) => if (ec fastEquals child) {
(null, unboundAttributeReference(e, schema))
} else {
(null, Not(ec))
}
}
case In(value, list) =>
val (evaluatedValue, expr) = value.partialReduce(input, schema)
if (evaluatedValue == null) {
val evaluatedList = list.map(e=>e.partialReduce(input, schema) match {
case (null, e: Expression) => e
case (d, _) => Literal.create(d, e.dataType)
})
(null, In(expr, evaluatedList))
} else {
val evaluatedList: Seq[(Any, Expression)] = list.map(_.partialReduce(input, schema))
var foundInList = false
var newList = List[Expression]()
for (item <- evaluatedList if !foundInList) {
if (item._1 == null) {
newList = newList :+ item._2
} else if (item._2 == null) {
val cmp = prc2(input, value.dataType, item._2.dataType, evaluatedValue, item._1)
if (cmp.isDefined && cmp.get == 0) {
foundInList = true
} else if (cmp.isEmpty || (cmp.isDefined && (cmp.get == 1 || cmp.get == -1))) {
newList = newList :+ item._2
}
}
}
if (foundInList) {
(true, null)
} else if (newList.isEmpty) {
(false, null)
} else {
(null, In(expr, newList))
}
}
case InSet(value, hset) =>
val evaluatedValue = value.partialReduce(input, schema)
if (evaluatedValue._1 == null) {
(null, InSet(evaluatedValue._2, hset))
} else {
var foundInSet = false
var newHset = Set[Any]()
for (item <- hset if !foundInSet) {
val cmp = prc2(input, value.dataType, value.dataType, evaluatedValue._1, item)
if (cmp.isDefined && cmp.get == 0) {
foundInSet = true
} else if (cmp.isEmpty || (cmp.isDefined && (cmp.get == 1 || cmp.get == -1))) {
newHset = newHset + item
}
}
if (foundInSet) {
(true, null)
} else if (newHset.isEmpty) {
(false, null)
} else {
(null, InSet(evaluatedValue._2, newHset))
}
}
case l: LeafExpression =>
val res = l.eval(input)
(res, l)
case b: BoundReference =>
val res = b.eval(input)
(res, schema(b.ordinal))
case n: NamedExpression =>
val res = n.eval(input)
(res, n)
case IsNull(child) => if (checkNull) {
if (child == null) {
(true, null)
} else {
(false, null)
}
} else {
(null, unboundAttributeReference(e, schema))
}
case IsNotNull(child) => if (checkNull) {
if (child == null) {
(false, null)
} else {
(true, null)
}
} else {
(null, unboundAttributeReference(e, schema))
}
// TODO: CAST/Arithmetic could be treated more nicely
case Cast(_, _) => (null, unboundAttributeReference(e, schema))
// case BinaryArithmetic => null
case UnaryMinus(_) => (null, unboundAttributeReference(e, schema))
case EqualTo(left, right) =>
val evalL = left.partialReduce(input, schema)
val evalR = right.partialReduce(input, schema)
if (evalL._1 == null && evalR._1 == null) {
(null, EqualTo(evalL._2, evalR._2))
} else if (evalL._1 == null) {
(null, EqualTo(evalL._2, right))
} else if (evalR._1 == null) {
(null, EqualTo(left, evalR._2))
} else {
val cmp = prc2(input, left.dataType, right.dataType, evalL._1, evalR._1)
if (cmp.isDefined && cmp.get != 1 && cmp.get != -1) {
(cmp.get == 0, null)
} else {
(null, EqualTo(evalL._2, evalR._2))
}
}
case LessThan(left, right) =>
val evalL = left.partialReduce(input, schema)
val evalR = right.partialReduce(input, schema)
if (evalL._1 == null && evalR._1 == null) {
(null, LessThan(evalL._2, evalR._2))
} else if (evalL._1 == null) {
(null, LessThan(evalL._2, right))
} else if (evalR._1 == null) {
(null, LessThan(left, evalR._2))
} else {
val cmp = prc2(input, left.dataType, right.dataType, evalL._1, evalR._1)
if (cmp.isDefined && cmp.get != -1) {
(cmp.get == -2, null)
} else {
(null, LessThan(evalL._2, evalR._2))
}
}
case LessThanOrEqual(left, right) =>
val evalL = left.partialReduce(input, schema)
val evalR = right.partialReduce(input, schema)
if (evalL._1 == null && evalR._1 == null) {
(null, LessThanOrEqual(evalL._2, evalR._2))
} else if (evalL._1 == null) {
(null, LessThanOrEqual(evalL._2, right))
} else if (evalR._1 == null) {
(null, LessThanOrEqual(left, evalR._2))
} else {
val cmp = prc2(input, left.dataType, right.dataType, evalL._1, evalR._1)
if (cmp.isDefined) {
if (cmp.get == 1) {
(null, EqualTo(evalL._2, evalR._2))
} else {
(cmp.get <= 0, null)
}
} else {
(null, LessThanOrEqual(evalL._2, evalR._2))
}
}
case GreaterThan(left, right) =>
val evalL = left.partialReduce(input, schema)
val evalR = right.partialReduce(input, schema)
if (evalL._1 == null && evalR._1 == null) {
(null, GreaterThan(evalL._2, evalR._2))
} else if (evalL._1 == null) {
(null, GreaterThan(evalL._2, right))
} else if (evalR._1 == null) {
(null, GreaterThan(left, evalR._2))
} else {
val cmp = prc2(input, left.dataType, right.dataType, evalL._1, evalR._1)
if (cmp.isDefined && cmp.get != 1) {
(cmp.get == 2, null)
} else {
(null, GreaterThan(evalL._2, evalR._2))
}
}
case GreaterThanOrEqual(left, right) =>
val evalL = left.partialReduce(input, schema)
val evalR = right.partialReduce(input, schema)
if (evalL._1 == null && evalR._1 == null) {
(null, GreaterThanOrEqual(evalL._2, evalR._2))
} else if (evalL._1 == null) {
(null, GreaterThanOrEqual(evalL._2, right))
} else if (evalR._1 == null) {
(null, GreaterThanOrEqual(left, evalR._2))
} else {
val cmp = prc2(input, left.dataType, right.dataType, evalL._1, evalR._1)
if (cmp.isDefined) {
if (cmp.get == -1) {
(null, EqualTo(evalL._2, evalR._2))
} else {
(cmp.get >= 0, null)
}
} else {
(null, GreaterThanOrEqual(evalL._2, evalR._2))
}
}
case If(predicate, trueE, falseE) =>
val (v, _) = predicate.partialReduce(input, schema)
if (v == null) {
(null, unboundAttributeReference(e, schema))
} else if (v.asInstanceOf[Boolean]) {
trueE.partialReduce(input, schema)
} else {
falseE.partialReduce(input, schema)
}
case _ => (null, unboundAttributeReference(e, schema))
}
}
@inline
protected def prc2(
i: Row,
dataType1: DataType,
dataType2: DataType,
eval1: Any,
eval2: Any): Option[Int] = {
if (dataType1 != dataType2) {
throw new TreeNodeException(e, s"Types do not match $dataType1 != $dataType2")
}
dataType1 match {
case nativeType: AtomicType =>
val pdt: RangeType[nativeType.InternalType] = {
nativeType.toRangeType[nativeType.InternalType]
}
pdt.partialOrdering.tryCompare(
pdt.toPartiallyOrderingDataType(eval1, nativeType),
pdt.toPartiallyOrderingDataType(eval2, nativeType))
case other => sys.error(s"Type $other does not support partially ordered operations")
}
}
}
}
|
nkhuyu/Spark-SQL-on-HBase
|
src/main/scala/org/apache/spark/sql/hbase/catalyst/expressions/PartialPredicateOperations.scala
|
Scala
|
apache-2.0
| 13,367
|
package equellatests.tests
import equellatests.domain.{Fairness, RandomWord, RandomWords, TestLogon}
import equellatests.instgen.workflow._
import equellatests.pages.cloudprovider.{
CloudProviderListPage,
TestCloudProviderDetails,
TestCloudProviderPage
}
import equellatests.restapi.ERest
import equellatests.restapi.cloudprovider.{RCloudProviderForward, RCloudProviders}
import equellatests.{SimpleSeleniumBrowser, SimpleTestCase, StatefulProperties}
import integtester.IntegTester
import io.circe.generic.semiauto._
import io.circe.{Decoder, Encoder}
import org.scalacheck.{Gen, Prop}
import Prop._
import equellatests.tests.SanityTestProperties.Pages
object CloudProviderProperties extends StatefulProperties("Cloud Providers") with SimpleTestCase {
val CloudProviderFirstLast = ("Cloud", "Provider")
object Scenarios extends Enumeration {
val Add, Delete = Value
}
sealed trait CloudProviderTestCommand
case class RegisterProvider(provider: TestCloudProviderDetails) extends CloudProviderTestCommand
case class DeleteProvider(providerName: String) extends CloudProviderTestCommand
case class ProviderTestState(registered: Set[TestCloudProviderDetails],
scenarios: Scenarios.ValueSet)
type State = ProviderTestState
type Command = CloudProviderTestCommand
implicit val testCaseEncoder: Encoder[CloudProviderTestCommand] = deriveEncoder
implicit val testCaseDecoder: Decoder[CloudProviderTestCommand] = deriveDecoder
val genCloudProvider = for {
name <- RandomWords.someWords
descSize <- Gen.choose(0, 10)
desc <- Gen.listOfN(descSize, RandomWord.word)
} yield
TestCloudProviderDetails(name.asString,
Some(desc.map(_.word).mkString(" ")).filter(_.nonEmpty),
None)
override def logon = tleAdminLogon
def doAdd: Gen[List[CloudProviderTestCommand]] =
for {
nProviders <- Gen.choose(1, 3)
providers <- Gen.listOfN(nProviders, for {
sz <- Gen.choose(0, 10)
p <- Gen.resize(sz, genCloudProvider)
} yield p)
} yield providers.map(RegisterProvider)
def genTestCommands(s: ProviderTestState): Gen[List[CloudProviderTestCommand]] = s match {
case s if s.scenarios == Scenarios.values => List()
case s =>
for {
command <- Fairness.favourIncomplete(1, 0)(Scenarios.values.toSeq, s.scenarios.contains)
res <- command match {
case Scenarios.Delete if s.registered.nonEmpty =>
Gen.oneOf(s.registered.toSeq).map(p => List(DeleteProvider(p.name)))
case _ => doAdd
}
} yield res
}
statefulProp("register a cloud provider") {
generateCommands(genTestCommands)
}
override def initialState: ProviderTestState =
ProviderTestState(registered = Set.empty, scenarios = Scenarios.ValueSet.empty)
override def runCommand(c: CloudProviderTestCommand, s: ProviderTestState): ProviderTestState =
c match {
case RegisterProvider(provider) =>
s.copy(scenarios = s.scenarios + Scenarios.Add, registered = s.registered + provider)
case DeleteProvider(providerName) =>
s.copy(scenarios = s.scenarios + Scenarios.Delete,
registered = s.registered.filter(_.name != providerName))
}
def loadProviderPage(b: SimpleSeleniumBrowser): CloudProviderListPage = {
val listPage = b.page match {
case lp: CloudProviderListPage => lp
case _ => CloudProviderListPage(b.page.ctx).load()
}
b.page = listPage
listPage
}
override def runCommandInBrowser(c: CloudProviderTestCommand,
s: ProviderTestState,
b: SimpleSeleniumBrowser): Prop = c match {
case RegisterProvider(provider) =>
val actualProvider = provider.copy(name = s"${b.unique} ${provider.name}")
var listPage = loadProviderPage(b)
val testProviderPage = TestCloudProviderPage(b.page.ctx, actualProvider)
listPage.add(testProviderPage.createRegistrationUrl())
testProviderPage.get()
testProviderPage.registerProvider()
testProviderPage.authenticateAsProvider()
val firstName = testProviderPage.getFirstName
val lastName = testProviderPage.getLastName
listPage = testProviderPage.returnToEQUELLA()
listPage.waitForResults()
val result = listPage.resultForName(actualProvider.name)
val description = result.description()
Prop.all(
((firstName, lastName) ?= CloudProviderFirstLast) :| "Should be able to authenticate as the cloud provider",
(description ?= actualProvider.description) :| "Description should match"
)
case DeleteProvider(providerName) =>
val listPage = loadProviderPage(b)
val realName = s"${b.unique} $providerName"
listPage.delete(realName)
val existing = listPage.checkCloudProviderExisting(realName)
Prop(!existing).label(providerName + " is deleted")
}
}
|
equella/Equella
|
autotest/Tests/src/test/scala/equellatests/tests/CloudProviderProperties.scala
|
Scala
|
apache-2.0
| 5,056
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.deploy
import java.io.{ByteArrayOutputStream, File, PrintStream}
import java.lang.reflect.InvocationTargetException
import java.nio.charset.StandardCharsets
import java.util.{List => JList}
import scala.collection.JavaConverters._
import scala.collection.mutable.{ArrayBuffer, HashMap}
import scala.io.Source
import scala.util.Try
import org.apache.spark.{SparkConf, SparkException, SparkUserAppException}
import org.apache.spark.deploy.SparkSubmitAction._
import org.apache.spark.internal.{config, Logging}
import org.apache.spark.internal.config.DYN_ALLOCATION_ENABLED
import org.apache.spark.launcher.SparkSubmitArgumentsParser
import org.apache.spark.network.util.JavaUtils
import org.apache.spark.util.Utils
/**
* Parses and encapsulates arguments from the spark-submit script.
* The env argument is used for testing.
*/
private[deploy] class SparkSubmitArguments(args: Seq[String], env: Map[String, String] = sys.env)
extends SparkSubmitArgumentsParser with Logging {
var master: String = null
var deployMode: String = null
var executorMemory: String = null
var executorCores: String = null
var totalExecutorCores: String = null
var propertiesFile: String = null
var driverMemory: String = null
var driverExtraClassPath: String = null
var driverExtraLibraryPath: String = null
var driverExtraJavaOptions: String = null
var queue: String = null
var numExecutors: String = null
var files: String = null
var archives: String = null
var mainClass: String = null
var primaryResource: String = null
var name: String = null
var childArgs: ArrayBuffer[String] = new ArrayBuffer[String]()
var jars: String = null
var packages: String = null
var repositories: String = null
var ivyRepoPath: String = null
var ivySettingsPath: Option[String] = None
var packagesExclusions: String = null
var verbose: Boolean = false
var isPython: Boolean = false
var pyFiles: String = null
var isR: Boolean = false
var action: SparkSubmitAction = null
val sparkProperties: HashMap[String, String] = new HashMap[String, String]()
var proxyUser: String = null
var principal: String = null
var keytab: String = null
private var dynamicAllocationEnabled: Boolean = false
// Standalone cluster mode only
var supervise: Boolean = false
var driverCores: String = null
var submissionToKill: String = null
var submissionToRequestStatusFor: String = null
var useRest: Boolean = false // used internally
/** Default properties present in the currently defined defaults file. */
lazy val defaultSparkProperties: HashMap[String, String] = {
val defaultProperties = new HashMap[String, String]()
if (verbose) {
logInfo(s"Using properties file: $propertiesFile")
}
Option(propertiesFile).foreach { filename =>
val properties = Utils.getPropertiesFromFile(filename)
properties.foreach { case (k, v) =>
defaultProperties(k) = v
}
// Property files may contain sensitive information, so redact before printing
if (verbose) {
Utils.redact(properties).foreach { case (k, v) =>
logInfo(s"Adding default property: $k=$v")
}
}
}
defaultProperties
}
// Set parameters from command line arguments
parse(args.asJava)
// Populate `sparkProperties` map from properties file
mergeDefaultSparkProperties()
// Remove keys that don't start with "spark." from `sparkProperties`.
ignoreNonSparkProperties()
// Use `sparkProperties` map along with env vars to fill in any missing parameters
loadEnvironmentArguments()
useRest = sparkProperties.getOrElse("spark.master.rest.enabled", "false").toBoolean
validateArguments()
/**
* Merge values from the default properties file with those specified through --conf.
* When this is called, `sparkProperties` is already filled with configs from the latter.
*/
private def mergeDefaultSparkProperties(): Unit = {
// Use common defaults file, if not specified by user
propertiesFile = Option(propertiesFile).getOrElse(Utils.getDefaultPropertiesFile(env))
// Honor --conf before the defaults file
defaultSparkProperties.foreach { case (k, v) =>
if (!sparkProperties.contains(k)) {
sparkProperties(k) = v
}
}
}
/**
* Remove keys that don't start with "spark." from `sparkProperties`.
*/
private def ignoreNonSparkProperties(): Unit = {
sparkProperties.keys.foreach { k =>
if (!k.startsWith("spark.")) {
sparkProperties -= k
logWarning(s"Ignoring non-Spark config property: $k")
}
}
}
/**
* Load arguments from environment variables, Spark properties etc.
*/
private def loadEnvironmentArguments(): Unit = {
master = Option(master)
.orElse(sparkProperties.get("spark.master"))
.orElse(env.get("MASTER"))
.orNull
driverExtraClassPath = Option(driverExtraClassPath)
.orElse(sparkProperties.get(config.DRIVER_CLASS_PATH.key))
.orNull
driverExtraJavaOptions = Option(driverExtraJavaOptions)
.orElse(sparkProperties.get(config.DRIVER_JAVA_OPTIONS.key))
.orNull
driverExtraLibraryPath = Option(driverExtraLibraryPath)
.orElse(sparkProperties.get(config.DRIVER_LIBRARY_PATH.key))
.orNull
driverMemory = Option(driverMemory)
.orElse(sparkProperties.get(config.DRIVER_MEMORY.key))
.orElse(env.get("SPARK_DRIVER_MEMORY"))
.orNull
driverCores = Option(driverCores)
.orElse(sparkProperties.get(config.DRIVER_CORES.key))
.orNull
executorMemory = Option(executorMemory)
.orElse(sparkProperties.get(config.EXECUTOR_MEMORY.key))
.orElse(env.get("SPARK_EXECUTOR_MEMORY"))
.orNull
executorCores = Option(executorCores)
.orElse(sparkProperties.get(config.EXECUTOR_CORES.key))
.orElse(env.get("SPARK_EXECUTOR_CORES"))
.orNull
totalExecutorCores = Option(totalExecutorCores)
.orElse(sparkProperties.get(config.CORES_MAX.key))
.orNull
name = Option(name).orElse(sparkProperties.get("spark.app.name")).orNull
jars = Option(jars).orElse(sparkProperties.get(config.JARS.key)).orNull
files = Option(files).orElse(sparkProperties.get(config.FILES.key)).orNull
pyFiles = Option(pyFiles).orElse(sparkProperties.get(config.SUBMIT_PYTHON_FILES.key)).orNull
ivyRepoPath = sparkProperties.get("spark.jars.ivy").orNull
ivySettingsPath = sparkProperties.get("spark.jars.ivySettings")
packages = Option(packages).orElse(sparkProperties.get("spark.jars.packages")).orNull
packagesExclusions = Option(packagesExclusions)
.orElse(sparkProperties.get("spark.jars.excludes")).orNull
repositories = Option(repositories)
.orElse(sparkProperties.get("spark.jars.repositories")).orNull
deployMode = Option(deployMode)
.orElse(sparkProperties.get(config.SUBMIT_DEPLOY_MODE.key))
.orElse(env.get("DEPLOY_MODE"))
.orNull
numExecutors = Option(numExecutors)
.getOrElse(sparkProperties.get(config.EXECUTOR_INSTANCES.key).orNull)
queue = Option(queue).orElse(sparkProperties.get("spark.yarn.queue")).orNull
keytab = Option(keytab)
.orElse(sparkProperties.get("spark.kerberos.keytab"))
.orElse(sparkProperties.get("spark.yarn.keytab"))
.orNull
principal = Option(principal)
.orElse(sparkProperties.get("spark.kerberos.principal"))
.orElse(sparkProperties.get("spark.yarn.principal"))
.orNull
dynamicAllocationEnabled =
sparkProperties.get(DYN_ALLOCATION_ENABLED.key).exists("true".equalsIgnoreCase)
// Global defaults. These should be keep to minimum to avoid confusing behavior.
master = Option(master).getOrElse("local[*]")
// In YARN mode, app name can be set via SPARK_YARN_APP_NAME (see SPARK-5222)
if (master.startsWith("yarn")) {
name = Option(name).orElse(env.get("SPARK_YARN_APP_NAME")).orNull
}
// Set name from main class if not given
name = Option(name).orElse(Option(mainClass)).orNull
if (name == null && primaryResource != null) {
name = new File(primaryResource).getName()
}
// Action should be SUBMIT unless otherwise specified
action = Option(action).getOrElse(SUBMIT)
}
/** Ensure that required fields exists. Call this only once all defaults are loaded. */
private def validateArguments(): Unit = {
action match {
case SUBMIT => validateSubmitArguments()
case KILL => validateKillArguments()
case REQUEST_STATUS => validateStatusRequestArguments()
case PRINT_VERSION =>
}
}
private def validateSubmitArguments(): Unit = {
if (args.length == 0) {
printUsageAndExit(-1)
}
if (primaryResource == null) {
error("Must specify a primary resource (JAR or Python or R file)")
}
if (driverMemory != null
&& Try(JavaUtils.byteStringAsBytes(driverMemory)).getOrElse(-1L) <= 0) {
error("Driver memory must be a positive number")
}
if (executorMemory != null
&& Try(JavaUtils.byteStringAsBytes(executorMemory)).getOrElse(-1L) <= 0) {
error("Executor memory must be a positive number")
}
if (executorCores != null && Try(executorCores.toInt).getOrElse(-1) <= 0) {
error("Executor cores must be a positive number")
}
if (totalExecutorCores != null && Try(totalExecutorCores.toInt).getOrElse(-1) <= 0) {
error("Total executor cores must be a positive number")
}
if (!dynamicAllocationEnabled &&
numExecutors != null && Try(numExecutors.toInt).getOrElse(-1) <= 0) {
error("Number of executors must be a positive number")
}
if (master.startsWith("yarn")) {
val hasHadoopEnv = env.contains("HADOOP_CONF_DIR") || env.contains("YARN_CONF_DIR")
if (!hasHadoopEnv && !Utils.isTesting) {
error(s"When running with master '$master' " +
"either HADOOP_CONF_DIR or YARN_CONF_DIR must be set in the environment.")
}
}
if (proxyUser != null && principal != null) {
error("Only one of --proxy-user or --principal can be provided.")
}
}
private def validateKillArguments(): Unit = {
if (submissionToKill == null) {
error("Please specify a submission to kill.")
}
}
private def validateStatusRequestArguments(): Unit = {
if (submissionToRequestStatusFor == null) {
error("Please specify a submission to request status for.")
}
}
def isStandaloneCluster: Boolean = {
master.startsWith("spark://") && deployMode == "cluster"
}
override def toString: String = {
s"""Parsed arguments:
| master $master
| deployMode $deployMode
| executorMemory $executorMemory
| executorCores $executorCores
| totalExecutorCores $totalExecutorCores
| propertiesFile $propertiesFile
| driverMemory $driverMemory
| driverCores $driverCores
| driverExtraClassPath $driverExtraClassPath
| driverExtraLibraryPath $driverExtraLibraryPath
| driverExtraJavaOptions $driverExtraJavaOptions
| supervise $supervise
| queue $queue
| numExecutors $numExecutors
| files $files
| pyFiles $pyFiles
| archives $archives
| mainClass $mainClass
| primaryResource $primaryResource
| name $name
| childArgs [${childArgs.mkString(" ")}]
| jars $jars
| packages $packages
| packagesExclusions $packagesExclusions
| repositories $repositories
| verbose $verbose
|
|Spark properties used, including those specified through
| --conf and those from the properties file $propertiesFile:
|${Utils.redact(sparkProperties).mkString(" ", "\\n ", "\\n")}
""".stripMargin
}
/** Fill in values by parsing user options. */
override protected def handle(opt: String, value: String): Boolean = {
opt match {
case NAME =>
name = value
case MASTER =>
master = value
case CLASS =>
mainClass = value
case DEPLOY_MODE =>
if (value != "client" && value != "cluster") {
error("--deploy-mode must be either \\"client\\" or \\"cluster\\"")
}
deployMode = value
case NUM_EXECUTORS =>
numExecutors = value
case TOTAL_EXECUTOR_CORES =>
totalExecutorCores = value
case EXECUTOR_CORES =>
executorCores = value
case EXECUTOR_MEMORY =>
executorMemory = value
case DRIVER_MEMORY =>
driverMemory = value
case DRIVER_CORES =>
driverCores = value
case DRIVER_CLASS_PATH =>
driverExtraClassPath = value
case DRIVER_JAVA_OPTIONS =>
driverExtraJavaOptions = value
case DRIVER_LIBRARY_PATH =>
driverExtraLibraryPath = value
case PROPERTIES_FILE =>
propertiesFile = value
case KILL_SUBMISSION =>
submissionToKill = value
if (action != null) {
error(s"Action cannot be both $action and $KILL.")
}
action = KILL
case STATUS =>
submissionToRequestStatusFor = value
if (action != null) {
error(s"Action cannot be both $action and $REQUEST_STATUS.")
}
action = REQUEST_STATUS
case SUPERVISE =>
supervise = true
case QUEUE =>
queue = value
case FILES =>
files = Utils.resolveURIs(value)
case PY_FILES =>
pyFiles = Utils.resolveURIs(value)
case ARCHIVES =>
archives = Utils.resolveURIs(value)
case JARS =>
jars = Utils.resolveURIs(value)
case PACKAGES =>
packages = value
case PACKAGES_EXCLUDE =>
packagesExclusions = value
case REPOSITORIES =>
repositories = value
case CONF =>
val (confName, confValue) = SparkSubmitUtils.parseSparkConfProperty(value)
sparkProperties(confName) = confValue
case PROXY_USER =>
proxyUser = value
case PRINCIPAL =>
principal = value
case KEYTAB =>
keytab = value
case HELP =>
printUsageAndExit(0)
case VERBOSE =>
verbose = true
case VERSION =>
action = SparkSubmitAction.PRINT_VERSION
case USAGE_ERROR =>
printUsageAndExit(1)
case _ =>
error(s"Unexpected argument '$opt'.")
}
action != SparkSubmitAction.PRINT_VERSION
}
/**
* Handle unrecognized command line options.
*
* The first unrecognized option is treated as the "primary resource". Everything else is
* treated as application arguments.
*/
override protected def handleUnknown(opt: String): Boolean = {
if (opt.startsWith("-")) {
error(s"Unrecognized option '$opt'.")
}
primaryResource =
if (!SparkSubmit.isShell(opt) && !SparkSubmit.isInternal(opt)) {
Utils.resolveURI(opt).toString
} else {
opt
}
isPython = SparkSubmit.isPython(opt)
isR = SparkSubmit.isR(opt)
false
}
override protected def handleExtraArgs(extra: JList[String]): Unit = {
childArgs ++= extra.asScala
}
private def printUsageAndExit(exitCode: Int, unknownParam: Any = null): Unit = {
if (unknownParam != null) {
logInfo("Unknown/unsupported param " + unknownParam)
}
val command = sys.env.getOrElse("_SPARK_CMD_USAGE",
"""Usage: spark-submit [options] <app jar | python file | R file> [app arguments]
|Usage: spark-submit --kill [submission ID] --master [spark://...]
|Usage: spark-submit --status [submission ID] --master [spark://...]
|Usage: spark-submit run-example [options] example-class [example args]""".stripMargin)
logInfo(command)
val mem_mb = Utils.DEFAULT_DRIVER_MEM_MB
logInfo(
s"""
|Options:
| --master MASTER_URL spark://host:port, mesos://host:port, yarn,
| k8s://https://host:port, or local (Default: local[*]).
| --deploy-mode DEPLOY_MODE Whether to launch the driver program locally ("client") or
| on one of the worker machines inside the cluster ("cluster")
| (Default: client).
| --class CLASS_NAME Your application's main class (for Java / Scala apps).
| --name NAME A name of your application.
| --jars JARS Comma-separated list of jars to include on the driver
| and executor classpaths.
| --packages Comma-separated list of maven coordinates of jars to include
| on the driver and executor classpaths. Will search the local
| maven repo, then maven central and any additional remote
| repositories given by --repositories. The format for the
| coordinates should be groupId:artifactId:version.
| --exclude-packages Comma-separated list of groupId:artifactId, to exclude while
| resolving the dependencies provided in --packages to avoid
| dependency conflicts.
| --repositories Comma-separated list of additional remote repositories to
| search for the maven coordinates given with --packages.
| --py-files PY_FILES Comma-separated list of .zip, .egg, or .py files to place
| on the PYTHONPATH for Python apps.
| --files FILES Comma-separated list of files to be placed in the working
| directory of each executor. File paths of these files
| in executors can be accessed via SparkFiles.get(fileName).
|
| --conf PROP=VALUE Arbitrary Spark configuration property.
| --properties-file FILE Path to a file from which to load extra properties. If not
| specified, this will look for conf/spark-defaults.conf.
|
| --driver-memory MEM Memory for driver (e.g. 1000M, 2G) (Default: ${mem_mb}M).
| --driver-java-options Extra Java options to pass to the driver.
| --driver-library-path Extra library path entries to pass to the driver.
| --driver-class-path Extra class path entries to pass to the driver. Note that
| jars added with --jars are automatically included in the
| classpath.
|
| --executor-memory MEM Memory per executor (e.g. 1000M, 2G) (Default: 1G).
|
| --proxy-user NAME User to impersonate when submitting the application.
| This argument does not work with --principal / --keytab.
|
| --help, -h Show this help message and exit.
| --verbose, -v Print additional debug output.
| --version, Print the version of current Spark.
|
| Cluster deploy mode only:
| --driver-cores NUM Number of cores used by the driver, only in cluster mode
| (Default: 1).
|
| Spark standalone or Mesos with cluster deploy mode only:
| --supervise If given, restarts the driver on failure.
|
| Spark standalone, Mesos or K8s with cluster deploy mode only:
| --kill SUBMISSION_ID If given, kills the driver specified.
| --status SUBMISSION_ID If given, requests the status of the driver specified.
|
| Spark standalone, Mesos and Kubernetes only:
| --total-executor-cores NUM Total cores for all executors.
|
| Spark standalone, YARN and Kubernetes only:
| --executor-cores NUM Number of cores used by each executor. (Default: 1 in
| YARN and K8S modes, or all available cores on the worker
| in standalone mode).
|
| Spark on YARN and Kubernetes only:
| --num-executors NUM Number of executors to launch (Default: 2).
| If dynamic allocation is enabled, the initial number of
| executors will be at least NUM.
| --principal PRINCIPAL Principal to be used to login to KDC.
| --keytab KEYTAB The full path to the file that contains the keytab for the
| principal specified above.
|
| Spark on YARN only:
| --queue QUEUE_NAME The YARN queue to submit to (Default: "default").
| --archives ARCHIVES Comma separated list of archives to be extracted into the
| working directory of each executor.
""".stripMargin
)
if (SparkSubmit.isSqlShell(mainClass)) {
logInfo("CLI options:")
logInfo(getSqlShellOptions())
}
throw new SparkUserAppException(exitCode)
}
/**
* Run the Spark SQL CLI main class with the "--help" option and catch its output. Then filter
* the results to remove unwanted lines.
*
* Since the CLI will call `System.exit()`, we install a security manager to prevent that call
* from working, and restore the original one afterwards.
*/
private def getSqlShellOptions(): String = {
val currentOut = System.out
val currentErr = System.err
val currentSm = System.getSecurityManager()
try {
val out = new ByteArrayOutputStream()
val stream = new PrintStream(out)
System.setOut(stream)
System.setErr(stream)
val sm = new SecurityManager() {
override def checkExit(status: Int): Unit = {
throw new SecurityException()
}
override def checkPermission(perm: java.security.Permission): Unit = {}
}
System.setSecurityManager(sm)
try {
Utils.classForName(mainClass).getMethod("main", classOf[Array[String]])
.invoke(null, Array(HELP))
} catch {
case e: InvocationTargetException =>
// Ignore SecurityException, since we throw it above.
if (!e.getCause().isInstanceOf[SecurityException]) {
throw e
}
}
stream.flush()
// Get the output and discard any unnecessary lines from it.
Source.fromString(new String(out.toByteArray(), StandardCharsets.UTF_8)).getLines
.filter { line =>
!line.startsWith("log4j") && !line.startsWith("usage")
}
.mkString("\\n")
} finally {
System.setSecurityManager(currentSm)
System.setOut(currentOut)
System.setErr(currentErr)
}
}
private def error(msg: String): Unit = throw new SparkException(msg)
private[deploy] def toSparkConf(sparkConf: Option[SparkConf] = None): SparkConf = {
// either use an existing config or create a new empty one
sparkProperties.foldLeft(sparkConf.getOrElse(new SparkConf())) {
case (conf, (k, v)) => conf.set(k, v)
}
}
}
|
jkbradley/spark
|
core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala
|
Scala
|
apache-2.0
| 24,459
|
/*
* Copyright (C) 2015 Stratio (http://stratio.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.stratio.sparta.testat.internal
import akka.util.Timeout
import com.stratio.sparta.testat.SpartaATSuite
import com.stratio.sparta.testat.embedded.{ElasticThread, ElasticsearchEmbeddedServer, JVMProcess}
import spray.client.pipelining._
import spray.http._
import scala.concurrent.duration._
import scala.concurrent.{Await, Future}
import scala.util.parsing.json.JSON
/**
* Test used in a internal project.
* @author anistal
*/
class ISocketOElasticsearchJsonAT extends SpartaATSuite {
val policyFile = "policies/ISocket-OElasticsearchJSON.json"
override val PathToCsv = getClass.getClassLoader.getResource("fixtures/at-json-data").getPath
"Sparta" should {
"starts and executes a policy that reads from a socket and writes in ElasticSearch" in {
spartaRunner
checkData
}
}
def checkData: Unit = {
numberOfEventsGrouped(indexName = "id_smfprocess_minute",
mappingName = "day_v1",
field = "id",
value = "P0001_1435139880000") should be(2d)
numberOfEventsGrouped(indexName = "id_minute",
mappingName = "day_v1",
field = "id",
value = "1435139880000") should be(2d)
}
private def numberOfEventsGrouped(indexName: String, mappingName: String, field: String, value: Any): Double = {
val pipeline: HttpRequest => Future[HttpResponse] = sendReceive
val productArequest: Future[HttpResponse] =
pipeline(Get(s"http://${Localhost}:9200/$indexName/$mappingName/_search?q=*:*"))
val response: HttpResponse = Await.result(productArequest, Timeout(5.seconds).duration)
JSON.globalNumberParser = { input: String => input.toDouble }
val json = JSON.parseFull(response.entity.data.asString)
val rows = json.get.asInstanceOf[Map[String, Any]]
.get("hits").get.asInstanceOf[Map[String, Any]]
.get("hits").get.asInstanceOf[List[Map[String, Any]]]
rows.filter(tuple =>
tuple.get("_source").get.asInstanceOf[Map[String, Any]].get(field).get == value).head.get("_source").get
.asInstanceOf[Map[String, String]].get("countoperator").get.asInstanceOf[Double]
}
override def extraBefore: Unit = JVMProcess.runMain(ElasticThread.getClass.getCanonicalName.dropRight(1), false)
override def extraAfter: Unit = {
JVMProcess.shutdown()
ElasticsearchEmbeddedServer.cleanData
deletePath(s"$CheckpointPath/${"ATSocketElasticsearch".toLowerCase}")
}
}
|
danielcsant/sparta
|
test-at/src/test/scala/com/stratio/sparta/testat/internal/ISocketOElasticsearchJsonAT.scala
|
Scala
|
apache-2.0
| 3,024
|
/*
* Copyright © 2015 Lukas Rosenthaler, Benjamin Geer, Ivan Subotic,
* Tobias Schweizer, André Kilchenmann, and Sepideh Alassi.
*
* This file is part of Knora.
*
* Knora is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published
* by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Knora is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public
* License along with Knora. If not, see <http://www.gnu.org/licenses/>.
*/
package org.knora.salsah
import akka.actor.{ActorSystem, ExtendedActorSystem, Extension, ExtensionId, ExtensionIdProvider}
import com.typesafe.config.Config
/**
* Reads application settings that come from `application.conf`.
*/
class SettingsImpl(config: Config) extends Extension {
val hostName = config.getString("app.http.hostname")
val httpPort = config.getInt("app.http.http-port")
val httpsPort = config.getInt("app.http.https-port")
// used for testing
val headless = config.getBoolean("app.testing.headless")
// used in deployment
val deployed = config.getBoolean("app.deployed")
val workingDirectory = config.getString("app.workdir")
// Javascript Configuration
val webapiUrl = config.getString("app.jsconf.webapi-url")
val sipiUrl = config.getString("app.jsconf.sipi-url")
}
object Settings extends ExtensionId[SettingsImpl] with ExtensionIdProvider {
override def lookup() = Settings
override def createExtension(system: ExtendedActorSystem) =
new SettingsImpl(system.settings.config)
/**
* Java API: retrieve the Settings extension for the given system.
*/
override def get(system: ActorSystem): SettingsImpl = super.get(system)
}
|
nie-ine/Knora
|
salsah/src/main/scala/org/knora/salsah/Settings.scala
|
Scala
|
agpl-3.0
| 2,059
|
package au.com.intelix.rs.core.actors
import au.com.intelix.evt.{InfoE, WarningE}
object CommonActorEvt {
object Evt {
case object PostStop extends InfoE
case object PreStart extends InfoE
case object PreRestart extends WarningE
case object PostRestart extends WarningE
case object StateTransition extends InfoE
}
}
|
intelix/reactiveservices
|
platform/core/src/main/scala/au/com/intelix/rs/core/actors/CommonActorEvt.scala
|
Scala
|
apache-2.0
| 344
|
/* Copyright 2009-2021 EPFL, Lausanne */
object Nested3 {
def foo(a: BigInt): BigInt = {
require(a >= 0 && a <= 50)
val b = a + 2
val c = a + b
def rec1(d: BigInt): BigInt = {
require(d >= 0 && d <= 50)
val e = d + b + c
e
}
rec1(2)
} ensuring(_ > 0)
}
|
epfl-lara/stainless
|
frontends/benchmarks/verification/valid/MicroTests/Nested3.scala
|
Scala
|
apache-2.0
| 302
|
package io.scalajs.npm.mongodb
import io.scalajs.nodejs.Assert
import org.scalatest.FunSpec
import scala.concurrent.Future
import scala.scalajs.js
/**
* Db Test Suites
* @author lawrence.daniels@gmail.com
*/
class DbTest extends FunSpec with MongoDBTestSupport {
describe("Db") {
it("supports executing code on the server") {
withMongo("Code") { db =>
db.eval(new Code("i + 3;"), js.Dictionary("i" -> 2), (err, result) => {
Assert.equal(5, result)
})
Future.successful({})
}
}
}
}
|
scalajs-io/mongodb
|
src/test/scala/io/scalajs/npm/mongodb/DbTest.scala
|
Scala
|
apache-2.0
| 554
|
package edu.gemini.pit.ui.util
import swing._
import java.awt
import javax.swing.BorderFactory
import scalaz._
import Scalaz._
import java.awt.Color
class ProposalSubmissionErrorDialog(msg: Seq[String]) extends StdModalEditor[String]("Proposal submission errors") { dialog =>
contents = Content
// Our main content object
object Content extends BorderPanel {
// Space things out a little more
peer.setLayout(new awt.BorderLayout(8, 8))
border = BorderFactory.createEmptyBorder(8, 8, 8, 8)
// Add our content, defined below
Option(header).foreach { add(_, BorderPanel.Position.North) }
add(editor, BorderPanel.Position.Center)
add(Footer, BorderPanel.Position.South)
// Footer is a standard widget
lazy val Footer = OkFooter(dialog) {
close(value)
}
}
object DetailsArea extends BorderPanel {
border = BorderFactory.createLineBorder(Color.LIGHT_GRAY)
val text = new TextArea
text.wordWrap = true
text.lineWrap = true
text.editable = false
text.background = Color.white
text.text = msg.map(l => s"\u2022 $l").mkString("\n")
val sp = new ScrollPane(text)
sp.horizontalScrollBarPolicy = ScrollPane.BarPolicy.Never
sp.preferredSize = new Dimension(550, 150)
add(sp, BorderPanel.Position.Center)
}
object Log extends GridBagPanel with Rows {
addRow(new Label("Summary of errors:"))
add(DetailsArea, new Constraints { gridx = 1; gridy = 3; fill = GridBagPanel.Fill.Horizontal; weightx = 2 })
revalidate()
repaint()
dialog.pack()
}
def editor = Log
def value = ""
}
|
spakzad/ocs
|
bundle/edu.gemini.pit/src/main/scala/edu/gemini/pit/ui/util/ProposalSubmissionErrorDialog.scala
|
Scala
|
bsd-3-clause
| 1,604
|
package org.template.classification
import io.prediction.controller.IEngineFactory
import io.prediction.controller.Engine
class Query(
val features: Array[Double]
) extends Serializable
class PredictedResult(
val label: Double
) extends Serializable
object ClassificationEngine extends IEngineFactory {
def apply() = {
new Engine(
classOf[DataSource],
classOf[Preparator],
Map("naive" -> classOf[NaiveBayesAlgorithm],
"randomforest" -> classOf[RandomForestAlgorithm]), // ADDED
classOf[Serving])
}
}
|
beni55/PredictionIO
|
examples/scala-parallel-classification/add-algorithm/src/main/scala/Engine.scala
|
Scala
|
apache-2.0
| 548
|
package com.monsanto.stats.tables.models
object PeerType {
val Follower = 1
val Friend = 2
}
case class Peer(
originalUserId: Long,
peerType: Int,
peerUserId: Long
)
|
MonsantoCo/chinese-restaurant-process
|
src/main/scala/com/monsanto/stats/tables/models/Peer.scala
|
Scala
|
bsd-3-clause
| 180
|
package org.knora.webapi.util
import org.knora.webapi.messages.v1.responder.usermessages.UserProfileV1
/**
* Holds an optional, mutable IRI for use in tests.
*/
class MutableUserProfileV1 {
private var maybeUserProfile: Option[UserProfileV1] = None
/**
* Stores the user's profile.
* @param userProfileV1 the user's profile to be stored.
*/
def set(userProfileV1: UserProfileV1): Unit = {
maybeUserProfile = Some(userProfileV1)
}
/**
* Removes any stored IRI.
*/
def unset(): Unit = {
maybeUserProfile = None
}
/**
* Gets the stored IRI, or throws an exception if the IRI is not set.
* @return the stored IRI.
*/
def get: UserProfileV1 = {
maybeUserProfile.getOrElse(throw TestUserProfileException("This test could not be run because a previous test failed"))
}
}
/**
* Thrown if a stored IRI was needed but was not set.
*/
case class TestUserProfileException(message: String) extends Exception(message)
|
nie-ine/Knora
|
webapi/src/test/scala/org/knora/webapi/util/MutableUserProfileV1.scala
|
Scala
|
agpl-3.0
| 1,030
|
package com.twitter.finagle.mux
import com.twitter.app.GlobalFlag
import com.twitter.conversions.time._
import com.twitter.finagle._
import com.twitter.finagle.context.Contexts
import com.twitter.finagle.mux.lease.exp.{Lessee, Lessor, nackOnExpiredLease}
import com.twitter.finagle.netty3.{BufChannelBuffer, ChannelBufferBuf}
import com.twitter.finagle.stats.{NullStatsReceiver, StatsReceiver}
import com.twitter.finagle.tracing.{NullTracer, Trace, Tracer}
import com.twitter.finagle.transport.Transport
import com.twitter.finagle.util.DefaultTimer
import com.twitter.util._
import java.util.concurrent.ConcurrentHashMap
import java.util.concurrent.atomic.{AtomicInteger, AtomicReference}
import java.util.logging.{Level, Logger}
import org.jboss.netty.buffer.ChannelBuffer
import scala.annotation.tailrec
import scala.collection.JavaConverters._
/**
* Indicates that a client requested that a given request be discarded.
*
* This implies that the client issued a Tdiscarded message for a given tagged
* request, as per [[com.twitter.finagle.mux]].
*/
case class ClientDiscardedRequestException(why: String) extends Exception(why)
object gracefulShutdownEnabled extends GlobalFlag(true, "Graceful shutdown enabled. " +
"Temporary measure to allow servers to deploy without hurting clients.")
/**
* A tracker is responsible for tracking pending transactions
* and coordinating draining.
*/
private class Tracker[T] {
private[this] val pending = new ConcurrentHashMap[Int, Future[T]]
private[this] val _drained: Promise[Unit] = new Promise
// The state of a tracker is a single integer. Its absolute
// value minus one indicates the number of pending requests.
// A negative value indicates that the tracker is draining.
// Negative values cannot transition to positive values.
private[this] val state = new AtomicInteger(1)
/**
* Try to enter a transaction, returning false if the
* tracker is draining.
*/
@tailrec
private[this] def enter(): Boolean = {
val n = state.get
if (n <= 0) false
else if (!state.compareAndSet(n, n+1)) enter()
else true
}
/**
* Exit an entered transaction.
*/
@tailrec
private[this] def exit(): Unit = {
val n = state.get
if (n < 0) {
if (state.incrementAndGet() == -1)
_drained.setDone()
} else if (!state.compareAndSet(n, n-1)) exit()
}
private[this] val closedExit = (_: Try[Unit]) => exit()
/**
* Track a transaction. `track` manages the lifetime of a tag
* and its reply. Function `process` handles the result of `reply`.
* The ordering here is important: the tag is relinquished after
* `reply` is satisfied but before `process` is invoked, but is still
* considered pending until `process` completes. This is because:
* (1) the tag is freed once a client receives the reply, and, since
* write completion is not synchronous with processing the next
* request, there is a race between acknowledging the write and
* receiving the next request from the client (which may then reuse
* the tag); (2) we can't complete draining until we've acknowledged
* the write for the last request processed.
*/
def track(tag: Int, reply: Future[T])(process: Try[T] => Future[Unit]): Future[Unit] = {
if (!enter()) return reply.transform(process)
pending.put(tag, reply)
reply transform { r =>
pending.remove(tag)
process(r).respond(closedExit)
}
}
/**
* Retrieve the value for the pending request matching `tag`.
*/
def get(tag: Int): Option[Future[T]] =
Option(pending.get(tag))
/**
* Returns the set of current tags.
*/
def tags: Set[Int] =
pending.keySet.asScala.toSet
/**
* Initiate the draining protocol. After `drain` is called, future
* requests for tracking are dropped. [[drained]] is satisified
* when the number of pending requests reaches 0.
*/
@tailrec
final def drain(): Unit = {
val n = state.get
if (n < 0) return
if (!state.compareAndSet(n, -n)) drain()
else if (n == 1) _drained.setDone()
}
/**
* True when the tracker is in draining state.
*/
def isDraining: Boolean = state.get < 0
/**
* Satisifed when the tracker has completed the draining protocol,
* as described in [[drain]].
*/
def drained: Future[Unit] = _drained
/**
* Tests whether the given tag is actively tracked.
*/
def isTracking(tag: Int): Boolean = pending.containsKey(tag)
/**
* The number of tracked tags.
*/
def npending: Int =
math.abs(state.get)-1
}
private[twitter] object ServerDispatcher {
/**
* Construct a new request-response dispatcher.
*/
def newRequestResponse(
trans: Transport[ChannelBuffer, ChannelBuffer],
service: Service[Request, Response],
lessor: Lessor,
tracer: Tracer,
statsReceiver: StatsReceiver
): ServerDispatcher =
new ServerDispatcher(trans, Processor andThen service, lessor, tracer, statsReceiver)
/**
* Construct a new request-response dispatcher with a
* null lessor, tracer, and statsReceiver.
*/
def newRequestResponse(
trans: Transport[ChannelBuffer, ChannelBuffer],
service: Service[Request, Response]
): ServerDispatcher =
newRequestResponse(trans, service, Lessor.nil, NullTracer, NullStatsReceiver)
val Epsilon = 1.second
object State extends Enumeration {
val Open, Draining, Closed = Value
}
}
/**
* A dispatcher for the Mux protocol. In addition to multiplexing, the dispatcher
* handles concerns of leasing and draining.
*/
private[twitter] class ServerDispatcher(
trans: Transport[ChannelBuffer, ChannelBuffer],
service: Service[Message, Message],
lessor: Lessor, // the lessor that the dispatcher should register with in order to get leases
tracer: Tracer,
statsReceiver: StatsReceiver
) extends Closable with Lessee {
import Message._
import ServerDispatcher.State
private[this] implicit val injectTimer = DefaultTimer.twitter
private[this] val tracker = new Tracker[Message]
private[this] val log = Logger.getLogger(getClass.getName)
private[this] val state: AtomicReference[State.Value] =
new AtomicReference(State.Open)
@volatile private[this] var lease = Tlease.MaxLease
@volatile private[this] var curElapsed = NilStopwatch.start()
lessor.register(this)
private[this] def write(m: Message): Future[Unit] =
trans.write(encode(m))
private[this] def isAccepting: Boolean =
!tracker.isDraining && (!nackOnExpiredLease() || (lease > Duration.Zero))
private[this] def process(m: Message): Unit = m match {
case (_: Tdispatch | _: Treq) if isAccepting =>
// A misbehaving client is sending duplicate pending tags.
// Note that, since the client is managing multiple outstanding
// requests for this tag, and we're returning an Rerr here, there
// are no guarantees about client behavior in this case. Possibly
// we should terminate the session in this case.
//
// TODO: introduce uniform handling of tag tracking
// (across all request types), and also uniform handling
// (e.g., session termination).
if (tracker.isTracking(m.tag)) {
log.warning(s"Received duplicate tag ${m.tag} from client ${trans.remoteAddress}")
write(Rerr(m.tag, s"Duplicate tag ${m.tag}"))
return
}
lessor.observeArrival()
val elapsed = Stopwatch.start()
tracker.track(m.tag, service(m)) {
case Return(rep) =>
lessor.observe(elapsed())
write(rep)
case Throw(exc) =>
log.log(Level.WARNING, s"Error processing message $m", exc)
write(Rerr(m.tag, exc.toString))
}
// Dispatch when !isAccepting
case d: Tdispatch =>
write(RdispatchNack(d.tag, Nil))
case r: Treq =>
write(RreqNack(r.tag))
case _: Tping =>
service(m) respond {
case Return(rep) => write(rep)
case Throw(exc) => write(Rerr(m.tag, exc.toString))
}
case Tdiscarded(tag, why) =>
tracker.get(tag) match {
case Some(reply) =>
reply.raise(new ClientDiscardedRequestException(why))
case None =>
}
case Rdrain(1) if state.get == State.Draining =>
tracker.drain()
case m@Tmessage(tag) =>
val msg = Rerr(tag, f"Did not understand Tmessage ${m.typ}%d")
write(msg)
}
private[this] def loop(): Unit =
Future.each(trans.read) { buf =>
val save = Local.save()
process(decode(buf))
Local.restore(save)
} ensure { hangup(Time.now) }
Local.letClear {
Trace.letTracer(tracer) {
trans.peerCertificate match {
case None => loop()
case Some(cert) => Contexts.local.let(Transport.peerCertCtx, cert) { loop() }
}
}
}
trans.onClose respond { res =>
val exc = res match {
case Return(exc) => exc
case Throw(exc) => exc
}
val cancelledExc = new CancelledRequestException(exc)
for (tag <- tracker.tags; f <- tracker.get(tag))
f.raise(cancelledExc)
service.close()
lessor.unregister(this)
state.get match {
case State.Open =>
statsReceiver.counter("clienthangup").incr()
case (State.Draining | State.Closed) =>
statsReceiver.counter("serverhangup").incr()
}
}
@tailrec
private[this] def hangup(deadline: Time): Future[Unit] = state.get match {
case State.Closed => Future.Done
case s@(State.Draining | State.Open) =>
if (!state.compareAndSet(s, State.Closed)) hangup(deadline) else {
trans.close(deadline)
}
}
def close(deadline: Time): Future[Unit] = {
if (!state.compareAndSet(State.Open, State.Draining))
return trans.onClose.unit
if (!gracefulShutdownEnabled()) {
// In theory, we can do slightly better here.
// (i.e., at least try to wait for requests to drain)
// but instead we should just disable this flag.
return hangup(deadline)
}
statsReceiver.counter("draining").incr()
val done = write(Tdrain(1)) before
tracker.drained.within(deadline-Time.now) before
trans.close(deadline)
done transform {
case Return(_) =>
statsReceiver.counter("drained").incr()
Future.Done
case Throw(_: ChannelClosedException) =>
Future.Done
case Throw(_) =>
hangup(deadline)
}
}
/**
* Emit a lease to the clients of this server. If howlong is less than or
* equal to 0, also nack all requests until a new lease is issued.
*/
def issue(howlong: Duration): Unit = {
require(howlong >= Tlease.MinLease)
synchronized {
val diff = (lease - curElapsed()).abs
if (diff > ServerDispatcher.Epsilon) {
curElapsed = Stopwatch.start()
lease = howlong
write(Tlease(howlong min Tlease.MaxLease))
} else if ((howlong < Duration.Zero) && (lease > Duration.Zero)) {
curElapsed = Stopwatch.start()
lease = howlong
}
}
}
def npending: Int = tracker.npending
}
/**
* Processor handles request, dispatch, and ping messages. Request
* and dispatch messages are passed onto the request-response in the
* filter chain. Pings are answered immediately in the affirmative.
*
* (This arrangement permits interpositioning other filters to modify ping
* or dispatch behavior, e.g., for testing.)
*/
private object Processor extends Filter[Message, Message, Request, Response] {
import Message._
private[this] def dispatch(tdispatch: Tdispatch, service: Service[Request, Response]): Future[Message] = {
val Tdispatch(tag, contexts, dst, dtab, bytes) = tdispatch
val contextBufs = contexts map { case (k, v) =>
ChannelBufferBuf.Owned(k.duplicate) -> ChannelBufferBuf.Owned(v.duplicate)
}
Contexts.broadcast.letUnmarshal(contextBufs) {
if (dtab.length > 0)
Dtab.local ++= dtab
service(Request(dst, ChannelBufferBuf.Owned(bytes))) transform {
case Return(rep) =>
Future.value(RdispatchOk(tag, Nil, BufChannelBuffer(rep.body)))
case Throw(f: Failure) if f.isFlagged(Failure.Restartable) =>
Future.value(RdispatchNack(tag, Nil))
case Throw(exc) =>
Future.value(RdispatchError(tag, Nil, exc.toString))
}
}
}
private[this] def dispatch(treq: Treq, service: Service[Request, Response]): Future[Message] = {
val Treq(tag, traceId, bytes) = treq
Trace.letIdOption(traceId) {
service(Request(Path.empty, ChannelBufferBuf.Owned(bytes))) transform {
case Return(rep) =>
Future.value(RreqOk(tag, BufChannelBuffer(rep.body)))
case Throw(f: Failure) if f.isFlagged(Failure.Restartable) =>
Future.value(RreqNack(tag))
case Throw(exc) =>
Future.value(RreqError(tag, exc.toString))
}
}
}
def apply(req: Message, service: Service[Request, Response]): Future[Message] = req match {
case d: Tdispatch => dispatch(d, service)
case r: Treq => dispatch(r, service)
case Tping(tag) => Future.value(Rping(tag))
case m => Future.exception(new IllegalArgumentException(s"Cannot process message $m"))
}
}
|
nomadlabs/finagle
|
finagle-mux/src/main/scala/com/twitter/finagle/mux/Server.scala
|
Scala
|
apache-2.0
| 13,184
|
package ru.maizy.ambient7.core.config.helper
/**
* Copyright (c) Nikita Kovaliov, maizy.ru, 2017
* See LICENSE.txt for details.
*/
import ru.maizy.ambient7.core.config.{ Ambient7Options, ParsingError }
object ConfigRuleOps {
import ru.maizy.ambient7.core.config.reader.UniversalConfigReader._
implicit class IfSuccessOp[T](configRes: configs.Result[T]) {
def ifSuccess(saveValue: T => Ambient7Options): ParseResult = {
configRes match {
case configs.Result.Failure(error) => Left(ParsingError.withMessages(error.messages))
case configs.Result.Success(value) => Right(saveValue(value))
}
}
}
}
|
maizy/ambient7
|
core/src/main/scala/ru/maizy/ambient7/core/config/helper/ConfigRuleOps.scala
|
Scala
|
apache-2.0
| 644
|
package bakery.sbt
import java.nio.charset.Charset
import com.typesafe.sbt.packager.Keys.packageName
import com.typesafe.sbt.packager.archetypes.JavaAppPackaging
import com.typesafe.sbt.packager.docker.{CmdLike, DockerPlugin, ExecCmd}
import com.typesafe.sbt.packager.docker.DockerPlugin.autoImport._
import com.typesafe.sbt.packager.universal.UniversalPlugin.autoImport._
import kubeyml.deployment.NoProbe
import kubeyml.deployment.plugin.Keys._
import kubeyml.deployment.plugin.KubeDeploymentPlugin
import sbt.Keys._
import sbt._
object BuildInteractionDockerImageSBTPlugin extends sbt.AutoPlugin {
case class CommandArgumentsBuilder(name: Option[String], publish: Option[String], artifact: Option[String], interactions: List[String], springEnabled: Option[Boolean])
case class CommandArguments(name: String, publish: String, artifact: Option[String], interactions: List[String], springEnabled: Boolean)
override def requires: Plugins = DockerPlugin && JavaAppPackaging && KubeDeploymentPlugin
override def trigger: PluginTrigger = allRequirements
object autoImport {
val mainClassBody = settingKey[Option[String]]("Main's class source code")
/**
* Example: "buildInteractionDockerImage --image-name=<name> --publish=<local|remote> --artifact=net.bytebuddy:byte-buddy:1.10.8 --interaction=path.to.Interaction --interaction=path.to.Interaction2"
*/
def buildDockerCommand: Command = Command.args("buildInteractionDockerImage", "<arg>") { (state, args) =>
val NameRegex = """--image-name=(.+)""".r
val PublishRegex = """--publish=(.+)""".r
val ArtifactRegex = """--artifact=(.+)""".r
val InteractionRegex = """--interaction=(.+)""".r
val SpringEnabledRegex = """--springEnabled=(.+)""".r
val builder = args.foldLeft(CommandArgumentsBuilder(None, None, None, List.empty, None)) { (builder, arg) =>
arg match {
case NameRegex(value) => builder.copy(name = Some(value))
case PublishRegex(value) => builder.copy(publish = Some(value))
case ArtifactRegex(value) => builder.copy(artifact = Some(value))
case InteractionRegex(value) => builder.copy(interactions = value :: builder.interactions)
case SpringEnabledRegex(value) => builder.copy(springEnabled = Option.apply(value.toBoolean))
}
}
val arguments = builder match {
case cmd@CommandArgumentsBuilder(Some(name), Some("local" | "remote") | None, artifact, interactions, None) if interactions.nonEmpty =>
CommandArguments(name, cmd.publish.getOrElse("remote"), artifact, interactions, false)
case cmd@CommandArgumentsBuilder(Some(name), Some("local" | "remote") | None, artifact, interactions, Some(springEnabled)) if interactions.nonEmpty =>
CommandArguments(name, cmd.publish.getOrElse("remote"), artifact, interactions, springEnabled)
case CommandArgumentsBuilder(None, _, _, _, _) =>
throw new MessageOnlyException(s"Expected name for image (--image-name=<name>)")
case CommandArgumentsBuilder(_, _, _, interactions, _) if interactions.isEmpty =>
throw new MessageOnlyException(s"Expected at least one interaction or configuration (--interaction=<full-class-path>)")
case _ =>
throw new MessageOnlyException(s"Expected publish to be either local or remote or empty (--publish=<local|remote>)")
}
executeDockerBuild(state, arguments)
}
private def executeDockerBuild(state: State, arguments: CommandArguments): State = {
val moduleID: Option[ModuleID] = arguments.artifact map {
_.split(":") match {
case Array(organization, name, revision) => organization % name % revision
case other => throw new MessageOnlyException(s"Unexpected dependency declaration $other")
}
}
val stateWithNewDependency =
Project.extract(state).appendWithSession(Seq(
name := arguments.name,
libraryDependencies ++= moduleID.toSeq,
Docker / packageName := arguments.name,
ThisBuild / version := moduleID.map(_.revision).getOrElse((ThisBuild / version ).value),
Universal / javaOptions += arguments.interactions.mkString(","),
kube / livenessProbe := NoProbe,
dockerBaseImage := "adoptopenjdk/openjdk11",
Compile / sourceGenerators += Def.task {
val mainClassName =
(Compile / mainClass).value.getOrElse(throw new MessageOnlyException("mainClass in Compile is required"))
val pathList = mainClassName.split("\\.")
val file =
(pathList.dropRight(1) :+ pathList.last + ".scala")
.foldLeft((Compile / sourceManaged).value) {
case (file, subPath) => file / subPath
}
val mainClassDefault = if(arguments.springEnabled) mainClassBodySpringDefault else mainClassBodyDefault
val sourceBytes = mainClassBody.value.getOrElse(mainClassDefault).getBytes(Charset.defaultCharset())
IO.write(file, sourceBytes)
Seq(file)
}.taskValue
), state)
val commandName = arguments.publish match {
case "local" => "Docker/publishLocal"
case _ => "Docker/publish"
}
val updatedState = Command.process(commandName, stateWithNewDependency)
Command.process("kubeyml:gen", updatedState)
state
}
}
import autoImport._
override lazy val projectSettings: Seq[Def.Setting[_]] = Seq(
mainClassBody := None,
Compile / mainClass := Some("com.ing.bakery.Main"),
commands += buildDockerCommand
)
private val mainClassBodyDefault =
"""
|package com.ing.bakery
|
|import com.ing.bakery.interaction.RemoteInteractionLoader
|import com.ing.baker.runtime.scaladsl.InteractionInstance
|
|import scala.concurrent.ExecutionContext.Implicits.global
|
|/**
| * Expects single argument containing full classpath entry point for interaction
| */
|object Main extends App {
|
| private def runApp(classNames: String): Unit =
| try {
| val interactions: List[String] = classNames.split(",").toList
| val implementations = interactions
| .map(entryClassName => Class.forName(entryClassName).getConstructor().newInstance().asInstanceOf[AnyRef])
| .map(implementation => InteractionInstance.unsafeFrom(implementation))
| RemoteInteractionLoader.apply(implementations)
| } catch {
| case ex: Exception =>
| throw new IllegalStateException(s"Unable to initialize the classes $classNames", ex)
| }
|
|
| args.headOption.map(runApp).getOrElse(throw new IllegalAccessException("Expected class name as a parameter"))
|}
|""".stripMargin
private val mainClassBodySpringDefault =
"""
|package com.ing.bakery
|
|import java.util
|
|import com.ing.bakery.interaction.RemoteInteractionLoader
|import com.ing.baker.recipe.javadsl.Interaction
|import com.ing.baker.runtime.scaladsl.InteractionInstance
|import com.typesafe.scalalogging.LazyLogging
|import org.springframework.context.annotation.AnnotationConfigApplicationContext
|
|import scala.collection.JavaConverters._
|import scala.concurrent.ExecutionContext.Implicits.global
|
|/**
| * Expects single argument containing Spring configuration
| */
|object Main extends App with LazyLogging{
|
|
| def getImplementations(configurationClassString: String) : List[InteractionInstance] = {
| val configClass = Class.forName(configurationClassString)
| logger.info("Class found: " + configClass)
| val ctx = new AnnotationConfigApplicationContext();
| logger.info("Context created")
| ctx.register(configClass)
| logger.info("Context registered")
| ctx.refresh()
| logger.info("Context refreshed")
| val interactions: util.Map[String, Interaction] =
| ctx.getBeansOfType(classOf[com.ing.baker.recipe.javadsl.Interaction])
| interactions.asScala.values.map(implementation => {
| val instance = InteractionInstance.unsafeFrom(implementation)
| logger.info("Added implementation: " + instance.name)
| instance
| }).toList
| }
|
| private def runApp(configurationClassString: String): Unit =
| try {
| logger.info("Starting for configuration: " + configurationClassString)
| val implementations = getImplementations(configurationClassString)
| logger.info("Starting RemoteInteractionLoader")
| RemoteInteractionLoader.apply(implementations)
| } catch {
| case ex: Exception =>
| throw new IllegalStateException(s"Unable to initialize the interaction instances", ex)
| }
|
| args.headOption.map(runApp).getOrElse(throw new IllegalAccessException("Please provide a Spring configuration containing valid interactions"))
|}
|""".stripMargin
}
|
ing-bank/baker
|
project/BuildInteractionDockerImageSBTPlugin.scala
|
Scala
|
mit
| 9,258
|
// Copyright (C) 2017 Calin Cruceru <calin.cruceru@stud.acs.upb.ro>.
//
// See the LICENCE file distributed with this work for additional
// information regarding copyright ownership.
package org.symnet
package models.iptables.virtdev
package devices
abstract class VirtualDevice[+Config](
name: String,
inputPorts: Int,
outputPorts: Int,
config: Config) {
def inputPort(which: Int): Port = {
assert(which < inputPorts)
s"$name-in-$which"
}
def outputPort(which: Int): Port = {
assert(which < outputPorts)
s"$name-out-$which"
}
def portInstructions: Map[Port, Instruction]
def links: Map[Port, Port]
}
abstract class RegularVirtualDevice[+Config](
name: String,
inputPorts: Int,
outputPorts: Int,
config: Config)
extends VirtualDevice(name, inputPorts, outputPorts, config) {
// It is generally the case that regular VDs don't have links, otherwise
// they would be composite VDs.
//
// However, if that's not the case, this method can still be overridden,
// this is just the default.
override def links: Map[Port, Port] = Map.empty
}
abstract class CompositeVirtualDevice[+Config](
name: String,
inputPorts: Int,
outputPorts: Int,
config: Config)
extends VirtualDevice[Config](name, inputPorts, outputPorts, config) {
final override def portInstructions: Map[Port, Instruction] =
compPortInstructions ++ devices.flatMap(_.portInstructions)
final override def links: Map[Port, Port] =
newLinks ++ devices.flatMap(_.links)
// Composites should be composed of some other virtual devices. We use this
// to ensure that the links and the port instructions are correctly
// accumulated.
protected def devices: List[VirtualDevice[_]]
// Each composite VD should define the links it adds.
protected def newLinks: Map[Port, Port]
// It is generally the case that composite VDs don't have port instructions
// themselves, but they link together VDs which do.
//
// However, if there is any composite VD which needs to add some port
// instructions, this method can still be overridden, this is just the
// default.
protected def compPortInstructions: Map[Port, Instruction] = Map.empty
}
abstract class VirtualDeviceBuilder[T <: VirtualDevice[_]](name: String) {
def build: T
}
|
calincru/iptables-sefl
|
src/main/scala/org/symnet/models/iptables/virtdev/devices/VirtualDeviceBase.scala
|
Scala
|
mit
| 2,379
|
package com.productfoundry.play.hal
import play.api.mvc.Call
trait Related[A] {
def name: String
def item: A
def cardinality: Related.Cardinality.Cardinality
def single(): Related[A]
}
case class RelatedLink(
name: String,
item: Link,
cardinality: Related.Cardinality.Cardinality = Related.Cardinality.Multiple
) extends Related[Link] {
override def single(): RelatedLink = copy(cardinality = Related.Cardinality.Single)
}
object RelatedLink {
def apply(call: Call, item: Link): RelatedLink = new RelatedLink(call.url, item, Related.Cardinality.Multiple)
}
case class RelatedResource(
name: String,
item: Resource,
cardinality: Related.Cardinality.Cardinality = Related.Cardinality.Multiple
) extends Related[Resource] {
override def single(): RelatedResource = copy(cardinality = Related.Cardinality.Single)
}
object RelatedResource {
def apply(call: Call, item: Resource): RelatedResource = new RelatedResource(call.url, item, Related.Cardinality.Multiple)
}
object Related {
object Cardinality extends Enumeration {
type Cardinality = Value
val Single, Multiple = Value
}
}
|
Product-Foundry/hal-scala
|
src/main/scala/com/productfoundry/play/hal/Related.scala
|
Scala
|
apache-2.0
| 1,131
|
object ControlUnit
// This set of comments is to answer the question in Exercise 5.
// Responsibilities:
// 1. check if sensors are triggered etc.. (poll the sensors)
class ControlUnit (sensors: List[Sensor]) {
def pollSensors() {
for (sensor <- sensors) {
if (sensor.isTriggered) {
System.out.println("A " + sensor.getSensorType + " sensor was triggered at " + sensor.getLocation)
}
else {
System.out.println("Polled " + sensor.getSensorType + " at " + sensor.getLocation + " successfully")
}
}
}
}
|
BBK-PiJ-2015-67/sdp-portfolio
|
exercises/week02/alarmsystem-scala/src/ControlUnit.scala
|
Scala
|
unlicense
| 553
|
/*
Copyright 2016 Shyam Anand
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package modules
import actors.{WSLogger, WSActor, Supervisor}
import akka.routing.BalancingPool
import com.google.inject.AbstractModule
import play.api.libs.concurrent.AkkaGuiceSupport
/**
* Created by shyam on 16/03/16.
*/
class ActorModule extends AbstractModule with AkkaGuiceSupport {
override def configure(): Unit = {
bindActor[Supervisor]("supervisor")
bindActor[WSActor]("ws-actor", BalancingPool(5).props)
bindActor[WSLogger]("ws-logger")
}
}
|
shyam-anand/play-scala-akka-slick
|
app/modules/ActorModule.scala
|
Scala
|
apache-2.0
| 1,063
|
/**
* © 2019 Refinitiv. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the “License”); you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an “AS IS” BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
*
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cmwell.build
import sbt._
import scala.concurrent.Future
object CMWellCommon {
val release = "Atom"
object Tags {
val ES = sbt.Tags.Tag("elasticsearch")
val Cassandra = sbt.Tags.Tag("cassandra")
val Kafka = sbt.Tags.Tag("kafka")
val Grid = sbt.Tags.Tag("grid")
val IntegrationTests = sbt.Tags.Tag("integrationTests")
}
//why work hard? see: http://www.scala-sbt.org/release/docs/Detailed-Topics/Mapping-Files.html#relative-to-a-directory
def files2TupleRec(pathPrefix: String, dir: File): Seq[Tuple2[File,String]] = {
sbt.IO.listFiles(dir) flatMap {
f => {
if(f.isFile) Seq((f,s"${pathPrefix}${f.getName}"))
else files2TupleRec(s"${pathPrefix}${f.getName}/",f)
}
}
}
// case class ProcessLoggerImpl(f: File) extends ProcessLogger {
//
// def out(o: => String) = sbt.IO.append(f, s"[OUT]: $o\\n")
// def err(e: => String) = sbt.IO.append(f, s"[ERR]: $e\\n")
// }
def copyLogs(destinationDir: File, sourceDir: File): Unit = {
val listLogs = sbt.IO.listFiles(new java.io.FileFilter{def accept(f: File): Boolean = f.getName.endsWith(".log") && f.isFile}) _
val listDirs = sbt.IO.listFiles(new java.io.FileFilter{def accept(f: File): Boolean = f.isDirectory}) _
def recHelper(dstDir: File, srcDir: File): Unit = {
val nextDir = dstDir / srcDir.getName
listLogs(srcDir).foreach(log => sbt.IO.copyFile(log, nextDir / log.getName))
listDirs(srcDir).foreach(dir => recHelper(nextDir, dir))
}
recHelper(destinationDir, sourceDir)
}
def generateLogbackXML(filename: String, pwd: String): String = {
val xml = s"""<configuration>
<appender name="FILE" class="ch.qos.logback.core.rolling.RollingFileAppender">
<file>logs/${filename}.log</file>
<encoder>
<pattern>%date %level [%thread] %logger{10} [%file:%line] %msg%n</pattern>
</encoder>
<rollingPolicy class="ch.qos.logback.core.rolling.FixedWindowRollingPolicy">
<maxIndex>5</maxIndex>
<FileNamePattern>logs/${filename}.log.%i.gz</FileNamePattern>
</rollingPolicy>
<triggeringPolicy class="ch.qos.logback.core.rolling.SizeBasedTriggeringPolicy">
<maxFileSize>10MB</maxFileSize>
</triggeringPolicy>
</appender>
<root level="debug">
<appender-ref ref="FILE"/>
</root>
</configuration>"""
val xmlFile = file(pwd) / filename / "logback.xml"
sbt.IO.write(xmlFile, xml)
xmlFile.getAbsolutePath
}
def combineThrowablesAsCause(t1: Throwable, t2: Throwable)(f: Throwable => Throwable): Throwable =
f(Option(t1.getCause).fold(t1.initCause(t2)){ _ =>
Option(t2.getCause).fold(t2.initCause(t1)){ _ =>
t2
}
})
def combineThrowablesAsCauseAsync[T](t1: Throwable, t2: Throwable)(f: Throwable => Throwable): Future[T] =
Future.failed[T](combineThrowablesAsCause(t1,t2)(f))
}
|
e-orz/CM-Well
|
server/project/cmwell-build-plugin/src/main/scala/cmwell/build/CMWellCommon.scala
|
Scala
|
apache-2.0
| 3,466
|
/**
* Copyright (C) 2016 Orbeon, Inc.
*
* This program is free software; you can redistribute it and/or modify it under the terms of the
* GNU Lesser General Public License as published by the Free Software Foundation; either version
* 2.1 of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
* without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
* See the GNU Lesser General Public License for more details.
*
* The full text of the license is available at http://www.gnu.org/copyleft/lesser.html
*/
package org.orbeon.oxf.fr.persistence.relational.index.status
// Functions called by UI
object UI {
def status : String = StatusStore.getStatus.name
def stop() : Unit = StatusStore.setStatus(Status.Stopping)
def getProviderToken = Some(StatusStore.getStatus).collect{case Status.Indexing(p, _, _) => p }.getOrElse("")
def getProviderCurrent = Some(StatusStore.getStatus).collect{case Status.Indexing(_, c, _) => c.current }.getOrElse(0)
def getProviderTotal = Some(StatusStore.getStatus).collect{case Status.Indexing(_, c, _) => c.total }.getOrElse(0)
def getDocumentCurrent = Some(StatusStore.getStatus).collect{case Status.Indexing(_, _, Some(d)) => d.current }.getOrElse(0)
def getDocumentTotal = Some(StatusStore.getStatus).collect{case Status.Indexing(_, _, Some(d)) => d.total }.getOrElse(0)
}
|
orbeon/orbeon-forms
|
form-runner/jvm/src/main/scala/org/orbeon/oxf/fr/persistence/relational/index/status/UI.scala
|
Scala
|
lgpl-2.1
| 1,505
|
/**
* Copyright (C) 2009-2015 Typesafe Inc. <http://www.typesafe.com>
*/
package akka.actor
import language.existentials
import scala.util.control.NonFatal
import scala.util.{ Try, Success, Failure }
import scala.collection.immutable
import scala.concurrent.duration.FiniteDuration
import scala.concurrent.duration.Duration
import scala.reflect.ClassTag
import scala.concurrent.{ Await, Future }
import akka.japi.{ Creator, Option ⇒ JOption }
import akka.japi.Util.{ immutableSeq, immutableSingletonSeq }
import akka.util.Timeout
import akka.util.Reflect.instantiator
import akka.serialization.{ JavaSerializer, SerializationExtension }
import akka.dispatch._
import java.util.concurrent.atomic.{ AtomicReference ⇒ AtomVar }
import java.util.concurrent.TimeoutException
import java.util.concurrent.TimeUnit.MILLISECONDS
import java.io.ObjectStreamException
import java.lang.reflect.{ InvocationTargetException, Method, InvocationHandler, Proxy }
import akka.pattern.AskTimeoutException
/**
* A TypedActorFactory is something that can created TypedActor instances.
*/
trait TypedActorFactory {
/**
* Underlying dependency is to be able to create normal Actors
*/
protected def actorFactory: ActorRefFactory
/**
* Underlying dependency to a TypedActorExtension, which can either be contextual or ActorSystem "global"
*/
protected def typedActor: TypedActorExtension
/**
* Stops the underlying ActorRef for the supplied TypedActor proxy,
* if any, returns whether it could find the find the ActorRef or not
*/
def stop(proxy: AnyRef): Boolean = getActorRefFor(proxy) match {
case null ⇒ false
case ref ⇒ ref.asInstanceOf[InternalActorRef].stop; true
}
/**
* Sends a PoisonPill the underlying ActorRef for the supplied TypedActor proxy,
* if any, returns whether it could find the find the ActorRef or not
*/
def poisonPill(proxy: AnyRef): Boolean = getActorRefFor(proxy) match {
case null ⇒ false
case ref ⇒ ref ! PoisonPill; true
}
/**
* Returns whether the supplied AnyRef is a TypedActor proxy or not
*/
def isTypedActor(proxyOrNot: AnyRef): Boolean
/**
* Retrieves the underlying ActorRef for the supplied TypedActor proxy, or null if none found
*/
def getActorRefFor(proxy: AnyRef): ActorRef
/**
* Creates a new TypedActor with the specified properties
*/
def typedActorOf[R <: AnyRef, T <: R](props: TypedProps[T]): R = {
val proxyVar = new AtomVar[R] //Chicken'n'egg-resolver
val c = props.creator //Cache this to avoid closing over the Props
val i = props.interfaces //Cache this to avoid closing over the Props
val ap = Props(new TypedActor.TypedActor[R, T](proxyVar, c(), i)).withDeploy(props.actorProps.deploy)
typedActor.createActorRefProxy(props, proxyVar, actorFactory.actorOf(ap))
}
/**
* Creates a new TypedActor with the specified properties
*/
def typedActorOf[R <: AnyRef, T <: R](props: TypedProps[T], name: String): R = {
val proxyVar = new AtomVar[R] //Chicken'n'egg-resolver
val c = props.creator //Cache this to avoid closing over the Props
val i = props.interfaces //Cache this to avoid closing over the Props
val ap = Props(new akka.actor.TypedActor.TypedActor[R, T](proxyVar, c(), i)).withDeploy(props.actorProps.deploy)
typedActor.createActorRefProxy(props, proxyVar, actorFactory.actorOf(ap, name))
}
/**
* Creates a TypedActor that intercepts the calls and forwards them as [[akka.actor.TypedActor.MethodCall]]
* to the provided ActorRef.
*/
def typedActorOf[R <: AnyRef, T <: R](props: TypedProps[T], actorRef: ActorRef): R =
typedActor.createActorRefProxy(props, null: AtomVar[R], actorRef)
}
/**
* This represents the TypedActor Akka Extension, access to the functionality is done through a given ActorSystem.
*/
object TypedActor extends ExtensionId[TypedActorExtension] with ExtensionIdProvider {
override def get(system: ActorSystem): TypedActorExtension = super.get(system)
def lookup() = this
def createExtension(system: ExtendedActorSystem): TypedActorExtension = new TypedActorExtension(system)
/**
* Returns a contextual TypedActorFactory of this extension, this means that any TypedActors created by this TypedActorExtension
* will be children to the specified context, this allows for creating hierarchies of TypedActors.
* Do _not_ let this instance escape the TypedActor since that will not be thread-safe.
*/
def apply(context: ActorContext): TypedActorFactory = ContextualTypedActorFactory(apply(context.system), context)
/**
* Returns a contextual TypedActorFactory of this extension, this means that any TypedActors created by this TypedActorExtension
* will be children to the specified context, this allows for creating hierarchies of TypedActors.
* Do _not_ let this instance escape the TypedActor since that will not be thread-safe.
*
* Java API
*/
def get(context: ActorContext): TypedActorFactory = apply(context)
/**
* This class represents a Method call, and has a reference to the Method to be called and the parameters to supply
* It's sent to the ActorRef backing the TypedActor and can be serialized and deserialized
*/
final case class MethodCall(method: Method, parameters: Array[AnyRef]) {
def isOneWay = method.getReturnType == java.lang.Void.TYPE
def returnsFuture = classOf[Future[_]] isAssignableFrom method.getReturnType
def returnsJOption = classOf[akka.japi.Option[_]] isAssignableFrom method.getReturnType
def returnsOption = classOf[scala.Option[_]] isAssignableFrom method.getReturnType
/**
* Invokes the Method on the supplied instance
*
* Throws the underlying exception if there's an InvocationTargetException thrown on the invocation.
*/
def apply(instance: AnyRef): AnyRef = try {
parameters match {
case null ⇒ method.invoke(instance)
case args if args.length == 0 ⇒ method.invoke(instance)
case args ⇒ method.invoke(instance, args: _*)
}
} catch { case i: InvocationTargetException ⇒ throw i.getTargetException }
@throws(classOf[ObjectStreamException]) private def writeReplace(): AnyRef = parameters match {
case null ⇒ SerializedMethodCall(method.getDeclaringClass, method.getName, method.getParameterTypes, null)
case ps if ps.length == 0 ⇒ SerializedMethodCall(method.getDeclaringClass, method.getName, method.getParameterTypes, Array())
case ps ⇒
val serialization = SerializationExtension(akka.serialization.JavaSerializer.currentSystem.value)
val serializedParameters = Array.ofDim[(Int, Class[_], Array[Byte])](ps.length)
for (i ← 0 until ps.length) {
val p = ps(i)
val s = serialization.findSerializerFor(p)
val m = if (s.includeManifest) p.getClass else null
serializedParameters(i) = (s.identifier, m, s toBinary parameters(i)) //Mutable for the sake of sanity
}
SerializedMethodCall(method.getDeclaringClass, method.getName, method.getParameterTypes, serializedParameters)
}
}
/**
* INTERNAL API
*
* Represents the serialized form of a MethodCall, uses readResolve and writeReplace to marshall the call
*/
private[akka] final case class SerializedMethodCall(ownerType: Class[_], methodName: String, parameterTypes: Array[Class[_]], serializedParameters: Array[(Int, Class[_], Array[Byte])]) {
//TODO implement writeObject and readObject to serialize
//TODO Possible optimization is to special encode the parameter-types to conserve space
@throws(classOf[ObjectStreamException]) private def readResolve(): AnyRef = {
val system = akka.serialization.JavaSerializer.currentSystem.value
if (system eq null) throw new IllegalStateException(
"Trying to deserialize a SerializedMethodCall without an ActorSystem in scope." +
" Use akka.serialization.Serialization.currentSystem.withValue(system) { ... }")
val serialization = SerializationExtension(system)
MethodCall(ownerType.getDeclaredMethod(methodName, parameterTypes: _*), serializedParameters match {
case null ⇒ null
case a if a.length == 0 ⇒ Array[AnyRef]()
case a ⇒
val deserializedParameters: Array[AnyRef] = Array.ofDim[AnyRef](a.length) //Mutable for the sake of sanity
for (i ← 0 until a.length) {
val (sId, manifest, bytes) = a(i)
deserializedParameters(i) =
serialization.serializerByIdentity(sId).fromBinary(bytes, Option(manifest))
}
deserializedParameters
})
}
}
private val selfReference = new ThreadLocal[AnyRef]
private val currentContext = new ThreadLocal[ActorContext]
@SerialVersionUID(1L)
private case object NullResponse
/**
* Returns the reference to the proxy when called inside a method call in a TypedActor
*
* Example:
* <p/>
* class FooImpl extends Foo {
* def doFoo {
* val myself = TypedActor.self[Foo]
* }
* }
*
* Useful when you want to send a reference to this TypedActor to someone else.
*
* NEVER EXPOSE "this" to someone else, always use "self[TypeOfInterface(s)]"
*
* Throws IllegalStateException if called outside of the scope of a method on this TypedActor.
*
* Throws ClassCastException if the supplied type T isn't the type of the proxy associated with this TypedActor.
*/
def self[T <: AnyRef] = selfReference.get.asInstanceOf[T] match {
case null ⇒ throw new IllegalStateException("Calling TypedActor.self outside of a TypedActor implementation method!")
case some ⇒ some
}
/**
* Returns the ActorContext (for a TypedActor) when inside a method call in a TypedActor.
*/
def context: ActorContext = currentContext.get match {
case null ⇒ throw new IllegalStateException("Calling TypedActor.context outside of a TypedActor implementation method!")
case some ⇒ some
}
/**
* Returns the default dispatcher (for a TypedActor) when inside a method call in a TypedActor.
*/
implicit def dispatcher = context.dispatcher
/**
* INTERNAL API
*
* Implementation of TypedActor as an Actor
*/
private[akka] class TypedActor[R <: AnyRef, T <: R](val proxyVar: AtomVar[R], createInstance: ⇒ T, interfaces: immutable.Seq[Class[_]]) extends Actor {
// if we were remote deployed we need to create a local proxy
if (!context.parent.asInstanceOf[InternalActorRef].isLocal)
TypedActor.get(context.system).createActorRefProxy(
TypedProps(interfaces, createInstance), proxyVar, context.self)
private val me = withContext[T](createInstance)
override def supervisorStrategy: SupervisorStrategy = me match {
case l: Supervisor ⇒ l.supervisorStrategy
case _ ⇒ super.supervisorStrategy
}
override def preStart(): Unit = withContext {
me match {
case l: PreStart ⇒ l.preStart()
case _ ⇒ super.preStart()
}
}
override def postStop(): Unit = try {
withContext {
me match {
case l: PostStop ⇒ l.postStop()
case _ ⇒ super.postStop()
}
}
} finally {
TypedActor(context.system).invocationHandlerFor(proxyVar.get) match {
case null ⇒
case some ⇒
some.actorVar.set(context.system.deadLetters) //Point it to the DLQ
proxyVar.set(null.asInstanceOf[R])
}
}
override def preRestart(reason: Throwable, message: Option[Any]): Unit = withContext {
me match {
case l: PreRestart ⇒ l.preRestart(reason, message)
case _ ⇒ context.children foreach context.stop //Can't be super.preRestart(reason, message) since that would invoke postStop which would set the actorVar to DL and proxyVar to null
}
}
override def postRestart(reason: Throwable): Unit = withContext {
me match {
case l: PostRestart ⇒ l.postRestart(reason)
case _ ⇒ super.postRestart(reason)
}
}
protected def withContext[U](unitOfWork: ⇒ U): U = {
TypedActor.selfReference set proxyVar.get
TypedActor.currentContext set context
try unitOfWork finally {
TypedActor.selfReference set null
TypedActor.currentContext set null
}
}
def receive = {
case m: MethodCall ⇒ withContext {
if (m.isOneWay) m(me)
else {
try {
val s = sender()
m(me) match {
case f: Future[_] if m.returnsFuture ⇒
implicit val dispatcher = context.dispatcher
f onComplete {
case Success(null) ⇒ s ! NullResponse
case Success(result) ⇒ s ! result
case Failure(f) ⇒ s ! Status.Failure(f)
}
case null ⇒ s ! NullResponse
case result ⇒ s ! result
}
} catch {
case NonFatal(e) ⇒
sender() ! Status.Failure(e)
throw e
}
}
}
case msg if me.isInstanceOf[Receiver] ⇒ withContext {
me.asInstanceOf[Receiver].onReceive(msg, sender())
}
}
}
/**
* Mix this into your TypedActor to be able to define supervisor strategy
*/
trait Supervisor {
/**
* User overridable definition the strategy to use for supervising
* child actors.
*/
def supervisorStrategy(): SupervisorStrategy
}
/**
* Mix this into your TypedActor to be able to intercept Terminated messages
*/
trait Receiver {
def onReceive(message: Any, sender: ActorRef): Unit
}
/**
* Mix this into your TypedActor to be able to hook into its lifecycle
*/
trait PreStart {
/**
* User overridable callback.
* <p/>
* Is called when an Actor is started by invoking 'actor'.
*/
def preStart(): Unit
}
/**
* Mix this into your TypedActor to be able to hook into its lifecycle
*/
trait PostStop {
/**
* User overridable callback.
* <p/>
* Is called when 'actor.stop()' is invoked.
*/
def postStop(): Unit
}
/**
* Mix this into your TypedActor to be able to hook into its lifecycle
*/
trait PreRestart {
/**
* User overridable callback: '''By default it disposes of all children and then calls `postStop()`.'''
* @param reason the Throwable that caused the restart to happen
* @param message optionally the current message the actor processed when failing, if applicable
* <p/>
* Is called on a crashed Actor right BEFORE it is restarted to allow clean
* up of resources before Actor is terminated.
* By default it terminates all children and calls postStop()
*/
def preRestart(reason: Throwable, message: Option[Any]): Unit
}
trait PostRestart {
/**
* User overridable callback: By default it calls `preStart()`.
* @param reason the Throwable that caused the restart to happen
* <p/>
* Is called right AFTER restart on the newly created Actor to allow reinitialization after an Actor crash.
*/
def postRestart(reason: Throwable): Unit
}
/**
* INTERNAL API
*/
private[akka] class TypedActorInvocationHandler(@transient val extension: TypedActorExtension, @transient val actorVar: AtomVar[ActorRef], @transient val timeout: Timeout) extends InvocationHandler with Serializable {
def actor = actorVar.get
@throws(classOf[Throwable])
def invoke(proxy: AnyRef, method: Method, args: Array[AnyRef]): AnyRef = method.getName match {
case "toString" ⇒ actor.toString
case "equals" ⇒ (args.length == 1 && (proxy eq args(0)) || actor == extension.getActorRefFor(args(0))).asInstanceOf[AnyRef] //Force boxing of the boolean
case "hashCode" ⇒ actor.hashCode.asInstanceOf[AnyRef]
case _ ⇒
implicit val dispatcher = extension.system.dispatcher
import akka.pattern.ask
MethodCall(method, args) match {
case m if m.isOneWay ⇒
actor ! m; null //Null return value
case m if m.returnsFuture ⇒ ask(actor, m)(timeout) map {
case NullResponse ⇒ null
case other ⇒ other
}
case m if m.returnsJOption || m.returnsOption ⇒
val f = ask(actor, m)(timeout)
(try { Await.ready(f, timeout.duration).value } catch { case _: TimeoutException ⇒ None }) match {
case None | Some(Success(NullResponse)) | Some(Failure(_: AskTimeoutException)) ⇒
if (m.returnsJOption) JOption.none[Any] else None
case Some(t: Try[_]) ⇒
t.get.asInstanceOf[AnyRef]
}
case m ⇒ Await.result(ask(actor, m)(timeout), timeout.duration) match {
case NullResponse ⇒ null
case other ⇒ other.asInstanceOf[AnyRef]
}
}
}
@throws(classOf[ObjectStreamException]) private def writeReplace(): AnyRef = SerializedTypedActorInvocationHandler(actor, timeout.duration)
}
/**
* INTERNAL API
*/
private[akka] final case class SerializedTypedActorInvocationHandler(val actor: ActorRef, val timeout: FiniteDuration) {
@throws(classOf[ObjectStreamException]) private def readResolve(): AnyRef = JavaSerializer.currentSystem.value match {
case null ⇒ throw new IllegalStateException("SerializedTypedActorInvocationHandler.readResolve requires that JavaSerializer.currentSystem.value is set to a non-null value")
case some ⇒ toTypedActorInvocationHandler(some)
}
def toTypedActorInvocationHandler(system: ActorSystem): TypedActorInvocationHandler =
new TypedActorInvocationHandler(TypedActor(system), new AtomVar[ActorRef](actor), new Timeout(timeout))
}
}
/**
* TypedProps is a TypedActor configuration object, that is thread safe and fully sharable.
* It's used in TypedActorFactory.typedActorOf to configure a TypedActor instance.
*/
object TypedProps {
val defaultDispatcherId: String = Dispatchers.DefaultDispatcherId
val defaultTimeout: Option[Timeout] = None
val defaultLoader: Option[ClassLoader] = None
/**
* @return a sequence of interfaces that the specified class implements,
* or a sequence containing only itself, if itself is an interface.
*/
def extractInterfaces(clazz: Class[_]): immutable.Seq[Class[_]] =
if (clazz.isInterface) immutableSingletonSeq(clazz) else immutableSeq(clazz.getInterfaces)
/**
* Uses the supplied class as the factory for the TypedActor implementation,
* proxying all the interfaces it implements.
*
* Scala API
*/
def apply[T <: AnyRef](implementation: Class[T]): TypedProps[T] =
new TypedProps[T](implementation)
/**
* Uses the supplied class as the factory for the TypedActor implementation,
* and that has the specified interface,
* or if the interface class is not an interface, all the interfaces it implements,
* appended in the sequence of interfaces.
*
* Scala API
*/
def apply[T <: AnyRef](interface: Class[_ >: T], implementation: Class[T]): TypedProps[T] =
new TypedProps[T](extractInterfaces(interface), instantiator(implementation))
/**
* Uses the supplied thunk as the factory for the TypedActor implementation,
* and that has the specified interface,
* or if the interface class is not an interface, all the interfaces it implements,
* appended in the sequence of interfaces.
*
* Scala API
*/
def apply[T <: AnyRef](interface: Class[_ >: T], creator: ⇒ T): TypedProps[T] =
new TypedProps[T](extractInterfaces(interface), () ⇒ creator)
/**
* Uses the supplied class as the factory for the TypedActor implementation,
* proxying all the interfaces it implements.
*
* Scala API
*/
def apply[T <: AnyRef: ClassTag](): TypedProps[T] =
new TypedProps[T](implicitly[ClassTag[T]].runtimeClass.asInstanceOf[Class[T]])
/**
* INTERNAL API
*/
private[akka] def apply[T <: AnyRef](interfaces: immutable.Seq[Class[_]], creator: ⇒ T): TypedProps[T] =
new TypedProps[T](interfaces, () ⇒ creator)
}
/**
* TypedProps is a TypedActor configuration object, that is thread safe and fully sharable.
* It's used in TypedActorFactory.typedActorOf to configure a TypedActor instance.
*/
@SerialVersionUID(1L)
final case class TypedProps[T <: AnyRef] protected[TypedProps] (
interfaces: immutable.Seq[Class[_]],
creator: () ⇒ T,
dispatcher: String = TypedProps.defaultDispatcherId,
deploy: Deploy = Props.defaultDeploy,
timeout: Option[Timeout] = TypedProps.defaultTimeout,
loader: Option[ClassLoader] = TypedProps.defaultLoader) {
/**
* Uses the supplied class as the factory for the TypedActor implementation,
* and that has the specified interface,
* or if the interface class is not an interface, all the interfaces it implements,
* appended in the sequence of interfaces.
*/
def this(implementation: Class[T]) =
this(interfaces = TypedProps.extractInterfaces(implementation),
creator = instantiator(implementation))
/**
* Java API: Uses the supplied Creator as the factory for the TypedActor implementation,
* and that has the specified interface,
* or if the interface class is not an interface, all the interfaces it implements,
* appended in the sequence of interfaces.
*/
def this(interface: Class[_ >: T], implementation: Creator[T]) =
this(interfaces = TypedProps.extractInterfaces(interface),
creator = implementation.create _)
/**
* Java API: Uses the supplied class as the factory for the TypedActor implementation,
* and that has the specified interface,
* or if the interface class is not an interface, all the interfaces it implements,
* appended in the sequence of interfaces.
*/
def this(interface: Class[_ >: T], implementation: Class[T]) =
this(interfaces = TypedProps.extractInterfaces(interface),
creator = instantiator(implementation))
/**
* Returns a new TypedProps with the specified dispatcher set.
*/
def withDispatcher(d: String): TypedProps[T] = copy(dispatcher = d)
/**
* Returns a new TypedProps with the specified deployment configuration.
*/
def withDeploy(d: Deploy): TypedProps[T] = copy(deploy = d)
/**
* Java API: return a new TypedProps that will use the specified ClassLoader to create its proxy class in
* If loader is null, it will use the bootstrap classloader.
*/
def withLoader(loader: ClassLoader): TypedProps[T] = withLoader(Option(loader))
/**
* Scala API: return a new TypedProps that will use the specified ClassLoader to create its proxy class in
* If loader is null, it will use the bootstrap classloader.
*
* Scala API
*/
def withLoader(loader: Option[ClassLoader]): TypedProps[T] = this.copy(loader = loader)
/**
* Java API: return a new TypedProps that will use the specified Timeout for its non-void-returning methods,
* if null is specified, it will use the default timeout as specified in the configuration.
*/
def withTimeout(timeout: Timeout): TypedProps[T] = this.copy(timeout = Option(timeout))
/**
* Scala API: return a new TypedProps that will use the specified Timeout for its non-void-returning methods,
* if None is specified, it will use the default timeout as specified in the configuration.
*
*
*/
def withTimeout(timeout: Option[Timeout]): TypedProps[T] = this.copy(timeout = timeout)
/**
* Returns a new TypedProps that has the specified interface,
* or if the interface class is not an interface, all the interfaces it implements,
* appended in the sequence of interfaces.
*/
def withInterface(interface: Class[_ >: T]): TypedProps[T] =
this.copy(interfaces = interfaces ++ TypedProps.extractInterfaces(interface))
/**
* Returns a new TypedProps without the specified interface,
* or if the interface class is not an interface, all the interfaces it implements.
*/
def withoutInterface(interface: Class[_ >: T]): TypedProps[T] =
this.copy(interfaces = interfaces diff TypedProps.extractInterfaces(interface))
/**
* Returns the akka.actor.Props representation of this TypedProps
*/
def actorProps(): Props =
if (dispatcher == Props.default.dispatcher)
Props.default.withDeploy(deploy)
else Props.default.withDispatcher(dispatcher).withDeploy(deploy)
}
/**
* ContextualTypedActorFactory allows TypedActors to create children, effectively forming the same Actor Supervision Hierarchies
* as normal Actors can.
*/
final case class ContextualTypedActorFactory(typedActor: TypedActorExtension, actorFactory: ActorContext) extends TypedActorFactory {
override def getActorRefFor(proxy: AnyRef): ActorRef = typedActor.getActorRefFor(proxy)
override def isTypedActor(proxyOrNot: AnyRef): Boolean = typedActor.isTypedActor(proxyOrNot)
}
class TypedActorExtension(val system: ExtendedActorSystem) extends TypedActorFactory with Extension {
import TypedActor._ //Import the goodies from the companion object
protected def actorFactory: ActorRefFactory = system
protected def typedActor = this
import system.settings
import akka.util.Helpers.ConfigOps
/**
* Default timeout for typed actor methods with non-void return type
*/
final val DefaultReturnTimeout = Timeout(settings.config.getMillisDuration("akka.actor.typed.timeout"))
/**
* Retrieves the underlying ActorRef for the supplied TypedActor proxy, or null if none found
*/
def getActorRefFor(proxy: AnyRef): ActorRef = invocationHandlerFor(proxy) match {
case null ⇒ null
case handler ⇒ handler.actor
}
/**
* Returns whether the supplied AnyRef is a TypedActor proxy or not
*/
def isTypedActor(proxyOrNot: AnyRef): Boolean = invocationHandlerFor(proxyOrNot) ne null
// Private API
/**
* INTERNAL API
*/
private[akka] def createActorRefProxy[R <: AnyRef, T <: R](props: TypedProps[T], proxyVar: AtomVar[R], actorRef: ⇒ ActorRef): R = {
//Warning, do not change order of the following statements, it's some elaborate chicken-n-egg handling
val actorVar = new AtomVar[ActorRef](null)
val proxy = Proxy.newProxyInstance(
(props.loader orElse props.interfaces.collectFirst { case any ⇒ any.getClassLoader }).orNull, //If we have no loader, we arbitrarily take the loader of the first interface
props.interfaces.toArray,
new TypedActorInvocationHandler(this, actorVar, props.timeout getOrElse DefaultReturnTimeout)).asInstanceOf[R]
if (proxyVar eq null) {
actorVar set actorRef
proxy
} else {
proxyVar set proxy // Chicken and egg situation we needed to solve, set the proxy so that we can set the self-reference inside each receive
actorVar set actorRef //Make sure the InvocationHandler gets a hold of the actor reference, this is not a problem since the proxy hasn't escaped this method yet
proxyVar.get
}
}
/**
* INTERNAL API
*/
private[akka] def invocationHandlerFor(typedActor: AnyRef): TypedActorInvocationHandler =
if ((typedActor ne null) && classOf[Proxy].isAssignableFrom(typedActor.getClass) && Proxy.isProxyClass(typedActor.getClass)) typedActor match {
case null ⇒ null
case other ⇒ Proxy.getInvocationHandler(other) match {
case null ⇒ null
case handler: TypedActorInvocationHandler ⇒ handler
case _ ⇒ null
}
}
else null
}
|
jmnarloch/akka.js
|
akka-js-actor/jvm/src/main/scala/akka/actor/TypedActor.scala
|
Scala
|
bsd-3-clause
| 27,785
|
package com.github.spirom.sparkflights.experiments
import com.github.spirom.sparkflights.experiments.common.ByYearAdderCombiner
import com.github.spirom.sparkflights.fw.CoreExperiment
import org.apache.spark.SparkContext
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.DataFrame
class TopAirlinesByAnnualDeparturesCore(sc: SparkContext)
extends CoreExperiment("TopAirlinesByAnnualDeparturesCore", sc) {
def runUserCode(sc: SparkContext, df: DataFrame, outputBase: String): Unit = {
val departures: RDD[(String, Int)] =
df.select("uniquecarrier", "year").map(r =>
(r.getString(0), r.getInt(1)))
val comb = new ByYearAdderCombiner[String]
val byCarrierKey =
comb.aggregateByKey(departures)
val carriersWithAverage = byCarrierKey.map(
{ case (year, acc) => (year, acc.average()) }
)
val sortedByAverage =
carriersWithAverage.sortBy( { case (_, ave) => ave }, ascending=false)
val top50 = sc.parallelize(sortedByAverage.take(50), 1)
top50.saveAsTextFile(s"$outputBase/top_carriers_ave_departures")
val totalCount = sortedByAverage.count()
sc.parallelize(Seq(totalCount), 1).saveAsTextFile(s"$outputBase/carrier_count")
}
}
|
spirom/SparkFlightExamples
|
src/main/scala/com/github/spirom/sparkflights/experiments/TopAirlinesByAnnualDeparturesCore.scala
|
Scala
|
mit
| 1,215
|
/*
* Copyright 2014–2018 SlamData Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package quasar
import slamdata.Predef._
import quasar.fp.ski.κ
import quasar.effect.MonotonicSeq
import simulacrum.typeclass
import scalaz._
import scalaz.concurrent.Task
import scalaz.syntax.functor._
/** A source of strings unique within `F[_]`, an implementation must have the
* property that, if Applicative[F], then (freshName |@| freshName)(_ != _).
*/
@typeclass trait NameGenerator[F[_]] {
/** Returns a fresh name, guaranteed to be unique among all the other names
* generated from `F`.
*/
def freshName: F[String]
/** Returns a fresh name, prefixed with the given string. */
def prefixedName(prefix: String)(implicit F: Functor[F]): F[String] =
freshName map (prefix + _)
}
object NameGenerator extends NameGeneratorInstances {
/** A short, randomized string to use as "salt" in salted name generators. */
val salt: Task[String] =
Task.delay(scala.util.Random.nextInt().toHexString)
}
sealed abstract class NameGeneratorInstances extends NameGeneratorInstances0 {
implicit def sequenceNameGenerator[F[_]](implicit F: MonadState[F, Long]): NameGenerator[F] =
new NameGenerator[F] {
def freshName = F.bind(F.get)(n => F.put(n + 1) as n.toString)
}
implicit def monotonicSeqNameGenerator[S[_]](implicit S: MonotonicSeq :<: S): NameGenerator[Free[S, ?]] =
new NameGenerator[Free[S, ?]] {
def freshName = MonotonicSeq.Ops[S].next map (_.toString)
}
}
sealed abstract class NameGeneratorInstances0 {
implicit def eitherTNameGenerator[F[_]: NameGenerator : Functor, A]: NameGenerator[EitherT[F, A, ?]] =
new NameGenerator[EitherT[F, A, ?]] {
def freshName = EitherT.rightT(NameGenerator[F].freshName)
}
implicit def readerTNameGenerator[F[_]: NameGenerator, A]: NameGenerator[ReaderT[F, A, ?]] =
new NameGenerator[ReaderT[F, A, ?]] {
def freshName = ReaderT(κ(NameGenerator[F].freshName))
}
implicit def stateTNameGenerator[F[_]: NameGenerator : Monad, S]: NameGenerator[StateT[F, S, ?]] =
new NameGenerator[StateT[F, S, ?]] {
def freshName = StateT(s => NameGenerator[F].freshName strengthL s)
}
implicit def writerTNameGenerator[F[_]: NameGenerator : Functor, W: Monoid]: NameGenerator[WriterT[F, W, ?]] =
new NameGenerator[WriterT[F, W, ?]] {
def freshName = WriterT.put(NameGenerator[F].freshName)(Monoid[W].zero)
}
}
|
jedesah/Quasar
|
connector/src/main/scala/quasar/NameGenerator.scala
|
Scala
|
apache-2.0
| 2,971
|
package io.vamp.workflow_driver
import akka.actor.{ ActorRef, ActorSystem }
import akka.util.Timeout
import io.vamp.common.{ ClassProvider, Config, Namespace }
import io.vamp.common.akka.{ ActorBootstrap, IoC }
import io.vamp.workflow_driver.notification.{ UnsupportedWorkflowDriverError, WorkflowDriverNotificationProvider }
import scala.concurrent.{ ExecutionContext, Future }
class WorkflowDriverBootstrap extends ActorBootstrap with WorkflowDriverNotificationProvider {
def createActors(implicit actorSystem: ActorSystem, namespace: Namespace, timeout: Timeout) = {
implicit val ec: ExecutionContext = actorSystem.dispatcher
val types = Config.string("vamp.workflow-driver.type")().toLowerCase.split(',').map(_.trim).toList
val drivers: Future[List[ActorRef]] = Future.sequence(types.map { name ⇒
ClassProvider.find[WorkflowDriver](name) match {
case Some(clazz) ⇒ IoC.createActor(clazz)
case None ⇒ throwException(UnsupportedWorkflowDriverError(name))
}
})
types.foreach { t ⇒ info(s"Workflow driver: $t") }
drivers.flatMap(IoC.createActor[WorkflowDriverActor](_)).map(_ :: Nil)
}
}
|
dragoslav/vamp
|
workflow_driver/src/main/scala/io/vamp/workflow_driver/WorkflowDriverBootstrap.scala
|
Scala
|
apache-2.0
| 1,165
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.ml.feature
import org.apache.spark.annotation.{Since, Experimental}
import org.apache.spark.ml.Transformer
import org.apache.spark.ml.attribute.{Attribute, AttributeGroup}
import org.apache.spark.ml.param.shared.{HasInputCol, HasOutputCol}
import org.apache.spark.ml.param.{IntArrayParam, ParamMap, StringArrayParam}
import org.apache.spark.ml.util._
import org.apache.spark.mllib.linalg._
import org.apache.spark.sql.DataFrame
import org.apache.spark.sql.functions._
import org.apache.spark.sql.types.StructType
/**
* :: Experimental ::
* This class takes a feature vector and outputs a new feature vector with a subarray of the
* original features.
*
* The subset of features can be specified with either indices ([[setIndices()]])
* or names ([[setNames()]]). At least one feature must be selected. Duplicate features
* are not allowed, so there can be no overlap between selected indices and names.
*
* The output vector will order features with the selected indices first (in the order given),
* followed by the selected names (in the order given).
*/
@Experimental
final class VectorSlicer(override val uid: String)
extends Transformer with HasInputCol with HasOutputCol with DefaultParamsWritable {
def this() = this(Identifiable.randomUID("vectorSlicer"))
/**
* An array of indices to select features from a vector column.
* There can be no overlap with [[names]].
* Default: Empty array
* @group param
*/
val indices = new IntArrayParam(this, "indices",
"An array of indices to select features from a vector column." +
" There can be no overlap with names.", VectorSlicer.validIndices)
setDefault(indices -> Array.empty[Int])
/** @group getParam */
def getIndices: Array[Int] = $(indices)
/** @group setParam */
def setIndices(value: Array[Int]): this.type = set(indices, value)
/**
* An array of feature names to select features from a vector column.
* These names must be specified by ML [[org.apache.spark.ml.attribute.Attribute]]s.
* There can be no overlap with [[indices]].
* Default: Empty Array
* @group param
*/
val names = new StringArrayParam(this, "names",
"An array of feature names to select features from a vector column." +
" There can be no overlap with indices.", VectorSlicer.validNames)
setDefault(names -> Array.empty[String])
/** @group getParam */
def getNames: Array[String] = $(names)
/** @group setParam */
def setNames(value: Array[String]): this.type = set(names, value)
/** @group setParam */
def setInputCol(value: String): this.type = set(inputCol, value)
/** @group setParam */
def setOutputCol(value: String): this.type = set(outputCol, value)
override def validateParams(): Unit = {
require($(indices).length > 0 || $(names).length > 0,
s"VectorSlicer requires that at least one feature be selected.")
}
override def transform(dataset: DataFrame): DataFrame = {
// Validity checks
transformSchema(dataset.schema)
val inputAttr = AttributeGroup.fromStructField(dataset.schema($(inputCol)))
inputAttr.numAttributes.foreach { numFeatures =>
val maxIndex = $(indices).max
require(maxIndex < numFeatures,
s"Selected feature index $maxIndex invalid for only $numFeatures input features.")
}
// Prepare output attributes
val inds = getSelectedFeatureIndices(dataset.schema)
val selectedAttrs: Option[Array[Attribute]] = inputAttr.attributes.map { attrs =>
inds.map(index => attrs(index))
}
val outputAttr = selectedAttrs match {
case Some(attrs) => new AttributeGroup($(outputCol), attrs)
case None => new AttributeGroup($(outputCol), inds.length)
}
// Select features
val slicer = udf { vec: Vector =>
vec match {
case features: DenseVector => Vectors.dense(inds.map(features.apply))
case features: SparseVector => features.slice(inds)
}
}
dataset.withColumn($(outputCol), slicer(dataset($(inputCol))), outputAttr.toMetadata())
}
/** Get the feature indices in order: indices, names */
private def getSelectedFeatureIndices(schema: StructType): Array[Int] = {
val nameFeatures = MetadataUtils.getFeatureIndicesFromNames(schema($(inputCol)), $(names))
val indFeatures = $(indices)
val numDistinctFeatures = (nameFeatures ++ indFeatures).distinct.length
lazy val errMsg = "VectorSlicer requires indices and names to be disjoint" +
s" sets of features, but they overlap." +
s" indices: ${indFeatures.mkString("[", ",", "]")}." +
s" names: " +
nameFeatures.zip($(names)).map { case (i, n) => s"$i:$n" }.mkString("[", ",", "]")
require(nameFeatures.length + indFeatures.length == numDistinctFeatures, errMsg)
indFeatures ++ nameFeatures
}
override def transformSchema(schema: StructType): StructType = {
SchemaUtils.checkColumnType(schema, $(inputCol), new VectorUDT)
if (schema.fieldNames.contains($(outputCol))) {
throw new IllegalArgumentException(s"Output column ${$(outputCol)} already exists.")
}
val numFeaturesSelected = $(indices).length + $(names).length
val outputAttr = new AttributeGroup($(outputCol), numFeaturesSelected)
val outputFields = schema.fields :+ outputAttr.toStructField()
StructType(outputFields)
}
override def copy(extra: ParamMap): VectorSlicer = defaultCopy(extra)
}
@Since("1.6.0")
object VectorSlicer extends DefaultParamsReadable[VectorSlicer] {
/** Return true if given feature indices are valid */
private[feature] def validIndices(indices: Array[Int]): Boolean = {
if (indices.isEmpty) {
true
} else {
indices.length == indices.distinct.length && indices.forall(_ >= 0)
}
}
/** Return true if given feature names are valid */
private[feature] def validNames(names: Array[String]): Boolean = {
names.forall(_.nonEmpty) && names.length == names.distinct.length
}
@Since("1.6.0")
override def load(path: String): VectorSlicer = super.load(path)
}
|
chenc10/Spark-PAF
|
mllib/src/main/scala/org/apache/spark/ml/feature/VectorSlicer.scala
|
Scala
|
apache-2.0
| 6,852
|
package uima.ae.ir.en
import java.util.Locale
import org.apache.uima.jcas.JCas
import uima.ae.ir.MultiLingualInformationRetriever
import uima.modules.common.en.EnglishDocumentAnnotator
import uima.modules.ir.correction.en.{EnglishBoWBasedIRDocCorrector, EnglishKeywordBasedIRDocCorrector}
import uima.modules.ir.fulltext.indri.en.{EnglishRetrievalByBoW, EnglishRetrievalByKeyword}
import us.feliscat.types.{BoWQuery, Geography, KeywordQuery}
import us.feliscat.util.uima.JCasID
import scala.collection.mutable
/**
* <pre>
* Created on 2017/02/04.
* </pre>
*
* @author K.Sakamoto
*/
object EnglishInformationRetriever extends MultiLingualInformationRetriever with EnglishDocumentAnnotator {
override protected def retrieveByKeyword(aJCas: JCas,
query: KeywordQuery,
keywordCorrectionMap: mutable.Map[String, Seq[String]])(implicit id: JCasID): Option[Long] = {
if (localeId != Locale.ENGLISH.getLanguage) {
return None
}
Option(EnglishRetrievalByKeyword.retrieve(
aJCas,
query,
keywordCorrectionMap,
mIndriScoreIndex,
mDocumentId))
}
override protected def retrieveByBoW(aJCas: JCas, query: BoWQuery)(implicit id: JCasID): Option[Long] = {
if (localeId != Locale.ENGLISH.getLanguage) {
return None
}
Option(EnglishRetrievalByBoW.retrieve(
aJCas,
query,
mIndriScoreIndex,
mDocumentId))
}
override protected def correctDocByKeyword(aJCas: JCas,
query: KeywordQuery,
keywordCorrectionMap: Map[String, Seq[String]],
beginTimeLimit: Option[Int],
endTimeLimit: Option[Int],
geographyLimit: Option[Geography])(implicit id: JCasID): Unit = {
EnglishKeywordBasedIRDocCorrector.correct(
aJCas,
query,
keywordCorrectionMap,
beginTimeLimit,
endTimeLimit,
geographyLimit)
}
override protected def correctDocByBoW(aJCas: JCas,
query: BoWQuery,
beginTimeLimit: Option[Int],
endTimeLimit: Option[Int],
geographyLimit: Option[Geography])(implicit id: JCasID): Unit = {
EnglishBoWBasedIRDocCorrector.correct(
aJCas,
query,
beginTimeLimit,
endTimeLimit,
geographyLimit)
}
}
|
ktr-skmt/FelisCatusZero-multilingual
|
src/main/scala/uima/ae/ir/en/EnglishInformationRetriever.scala
|
Scala
|
apache-2.0
| 2,611
|
package com.nhlreplay.parser.playbyplay
object Pattern {
lazy val Assists = """.+?:"""
lazy val BlockedBy = """\s+BLOCKED\sBY\s+"""
lazy val Count = """\((\d+)\)"""
lazy val Distance = """(\d+\sft)\."""
lazy val DrawnBy = """Drawn\sBy:"""
lazy val Hit = """\s+HIT\s+"""
lazy val Label = """((?:\w|\s)+)\:"""
lazy val Name = """(?:\w|\s|\d|-|'|\(|\))+"""
lazy val Number = """#\d+"""
lazy val OptionalStart = """(?:"""
lazy val OptionalEnd = """)?"""
lazy val PenaltyDuration = """(?:\s\(maj\))?\((\d+)\smin\)"""
lazy val PenaltyReason = """\u00a0(.+?)"""
lazy val PenaltyShot = OptionalStart + """Penalty\sShot""" + OptionalEnd
lazy val Player = """(""" + Number + Separator + Name + """|TEAM),?"""
lazy val Separator = """(?:\s|,|;|\s-\s)*"""
lazy val ServedBy = """Served\sBy:"""
lazy val Text = """(.+)"""
lazy val Vs = """\s+vs\s+"""
lazy val Won = """\s+won\s+"""
lazy val Word = """((?:\w|-|\.)+),?"""
lazy val Words = """((?:\w|-|\.|\s)+),"""
lazy val Zone = Word + """\sZone"""
}
|
peruukki/NHLReplay
|
app/com/nhlreplay/parser/playbyplay/Pattern.scala
|
Scala
|
mit
| 1,035
|
package HackerRank.TenDaysOfStatistics
import java.io.{ByteArrayInputStream, IOException, InputStream, PrintWriter}
import java.util.InputMismatchException
import scala.annotation.tailrec
import scala.collection.generic.CanBuildFrom
import scala.language.higherKinds
/**
* Copyright (c) 2017 A. Roberto Fischer
*
* @author A. Roberto Fischer <a.robertofischer@gmail.com> on 5/9/2017
*/
private[this] object Day4BinomialDistribution1 {
import Reader._
import Writer._
private[this] val TEST_INPUT: Option[String] = None
//------------------------------------------------------------------------------------------//
// Solution
//------------------------------------------------------------------------------------------//
private[this] def solve(): Unit = {
val boys = nextDouble()
val girls = nextDouble()
val odds = boys / (boys + girls)
println(Math.rint((3 to 6).map(binomialDist(6, _, odds)).sum * 1000) / 1000)
}
private[this] def power(n: Double, i: Int) = {
@tailrec
def _power(n: Double, i: Int, current: Double): Double = {
if (i == 1) {
current
} else {
_power(n, i - 1, current * n)
}
}
if (i == 0) 1.0 else _power(n, i, n)
}
private[this] def factorial(n: Int): Int = {
if (n == 0) 1 else n * factorial(n - 1)
}
private[this] def combination(n: Int, k: Int) = {
factorial(n) / (factorial(k) * factorial(n - k))
}
private[this] def binomialDist(n: Int, x: Int, p: Double) = {
combination(n, x) * power(p, x) * power(1 - p, n - x)
}
//------------------------------------------------------------------------------------------//
// Run
//------------------------------------------------------------------------------------------//
@throws[Exception]
def main(args: Array[String]): Unit = {
val s = System.currentTimeMillis
solve()
flush()
if (TEST_INPUT.isDefined) System.out.println(System.currentTimeMillis - s + "ms")
}
//------------------------------------------------------------------------------------------//
// Input
//------------------------------------------------------------------------------------------//
private[this] final object Reader {
private[this] implicit val in: InputStream = TEST_INPUT.fold(System.in)(s => new ByteArrayInputStream(s.getBytes))
def nextSeq[T, Coll[_]](reader: => Seq[T], n: Int)
(implicit cbf: CanBuildFrom[Coll[T], T, Coll[T]]): Coll[T] = {
val builder = cbf()
builder.sizeHint(n)
for (_ <- 0 until n) {
builder ++= reader
}
builder.result()
}
def next[T, Coll[_]](reader: => T, n: Int)
(implicit cbf: CanBuildFrom[Coll[T], T, Coll[T]]): Coll[T] = {
val builder = cbf()
builder.sizeHint(n)
for (_ <- 0 until n) {
builder += reader
}
builder.result()
}
def nextWithIndex[T, Coll[_]](reader: => T, n: Int)
(implicit cbf: CanBuildFrom[Coll[(T, Int)], (T, Int), Coll[(T, Int)]]): Coll[(T, Int)] = {
val builder = cbf()
builder.sizeHint(n)
for (i <- 0 until n) {
builder += ((reader, i))
}
builder.result()
}
def nextDouble[Coll[_]]
(n: Int)(implicit cbf: CanBuildFrom[Coll[Double], Double, Coll[Double]]): Coll[Double] = {
val builder = cbf()
builder.sizeHint(n)
for (_ <- 0 until n) {
builder += nextDouble()
}
builder.result()
}
def nextDoubleWithIndex[Coll[_]]
(n: Int)(implicit cbf: CanBuildFrom[Coll[(Double, Int)], (Double, Int), Coll[(Double, Int)]]): Coll[(Double, Int)] = {
val builder = cbf()
builder.sizeHint(n)
for (i <- 0 until n) {
builder += ((nextDouble(), i))
}
builder.result()
}
def nextChar[Coll[_]]
(n: Int)(implicit cbf: CanBuildFrom[Coll[Char], Char, Coll[Char]]): Coll[Char] = {
val builder = cbf()
builder.sizeHint(n)
var b = skip
var p = 0
while (p < n && !isSpaceChar(b)) {
builder += b.toChar
p += 1
b = readByte().toInt
}
builder.result()
}
def nextCharWithIndex[Coll[_]]
(n: Int)(implicit cbf: CanBuildFrom[Coll[(Char, Int)], (Char, Int), Coll[(Char, Int)]]): Coll[(Char, Int)] = {
val builder = cbf()
builder.sizeHint(n)
var b = skip
var p = 0
while (p < n && !isSpaceChar(b)) {
builder += ((b.toChar, p))
p += 1
b = readByte().toInt
}
builder.result()
}
def nextInt[Coll[_]]
(n: Int)(implicit cbf: CanBuildFrom[Coll[Int], Int, Coll[Int]]): Coll[Int] = {
val builder = cbf()
builder.sizeHint(n)
for (_ <- 0 until n) {
builder += nextInt()
}
builder.result()
}
def nextIntWithIndex[Coll[_]]
(n: Int)(implicit cbf: CanBuildFrom[Coll[(Int, Int)], (Int, Int), Coll[(Int, Int)]]): Coll[(Int, Int)] = {
val builder = cbf()
builder.sizeHint(n)
for (i <- 0 until n) {
builder += ((nextInt(), i))
}
builder.result()
}
def nextLong[Coll[_]]
(n: Int)(implicit cbf: CanBuildFrom[Coll[Long], Long, Coll[Long]]): Coll[Long] = {
val builder = cbf()
builder.sizeHint(n)
for (_ <- 0 until n) {
builder += nextLong()
}
builder.result()
}
def nextLongWithIndex[Coll[_]]
(n: Int)(implicit cbf: CanBuildFrom[Coll[(Long, Int)], (Long, Int), Coll[(Long, Int)]]): Coll[(Long, Int)] = {
val builder = cbf()
builder.sizeHint(n)
for (i <- 0 until n) {
builder += ((nextLong(), i))
}
builder.result()
}
def nextString[Coll[_]]
(n: Int)(implicit cbf: CanBuildFrom[Coll[String], String, Coll[String]]): Coll[String] = {
val builder = cbf()
builder.sizeHint(n)
for (_ <- 0 until n) {
builder += nextString()
}
builder.result()
}
def nextStringWithIndex[Coll[_]]
(n: Int)(implicit cbf: CanBuildFrom[Coll[(String, Int)], (String, Int), Coll[(String, Int)]]): Coll[(String, Int)] = {
val builder = cbf()
builder.sizeHint(n)
for (i <- 0 until n) {
builder += ((nextString(), i))
}
builder.result()
}
def nextMultiLine(n: Int, m: Int): Array[Array[Char]] = {
val map = new Array[Array[Char]](n)
var i = 0
while (i < n) {
map(i) = nextChar[Array](m)
i += 1
}
map
}
def nextDouble(): Double = nextString().toDouble
def nextChar(): Char = skip.toChar
def nextString(): String = {
var b = skip
val sb = new java.lang.StringBuilder
while (!isSpaceChar(b)) {
sb.appendCodePoint(b)
b = readByte().toInt
}
sb.toString
}
def nextInt(): Int = {
var num = 0
var b = 0
var minus = false
while ( {
b = readByte().toInt
b != -1 && !((b >= '0' && b <= '9') || b == '-')
}) {}
if (b == '-') {
minus = true
b = readByte().toInt
}
while (true) {
if (b >= '0' && b <= '9') {
num = num * 10 + (b - '0')
} else {
if (minus) return -num else return num
}
b = readByte().toInt
}
throw new IOException("Read Int")
}
def nextLong(): Long = {
var num = 0L
var b = 0
var minus = false
while ( {
b = readByte().toInt
b != -1 && !((b >= '0' && b <= '9') || b == '-')
}) {}
if (b == '-') {
minus = true
b = readByte().toInt
}
while (true) {
if (b >= '0' && b <= '9') {
num = num * 10 + (b - '0')
} else {
if (minus) return -num else return num
}
b = readByte().toInt
}
throw new IOException("Read Long")
}
private[this] val inputBuffer = new Array[Byte](1024)
private[this] var lenBuffer = 0
private[this] var ptrBuffer = 0
private[this] def readByte()(implicit in: java.io.InputStream): Byte = {
if (lenBuffer == -1) throw new InputMismatchException
if (ptrBuffer >= lenBuffer) {
ptrBuffer = 0
try {
lenBuffer = in.read(inputBuffer)
} catch {
case _: IOException =>
throw new InputMismatchException
}
if (lenBuffer <= 0) return -1
}
inputBuffer({
ptrBuffer += 1
ptrBuffer - 1
})
}
private[this] def isSpaceChar(c: Int) = !(c >= 33 && c <= 126)
private[this] def skip = {
var b = 0
while ( {
b = readByte().toInt
b != -1 && isSpaceChar(b)
}) {}
b
}
}
//------------------------------------------------------------------------------------------//
// Output
//------------------------------------------------------------------------------------------//
private[this] final object Writer {
private[this] val out = new PrintWriter(System.out)
def flush(): Unit = out.flush()
def println(x: Any): Unit = out.println(x)
def print(x: Any): Unit = out.print(x)
}
}
|
robertoFischer/hackerrank
|
src/main/scala/HackerRank/TenDaysOfStatistics/Day4BinomialDistribution1.scala
|
Scala
|
mit
| 9,253
|
package com.github.mdr.graphospasm.grapheditor.part
import com.github.mdr.graphospasm.grapheditor.figure.NodeFigure
import com.github.mdr.graphospasm.grapheditor.Plugin
import org.eclipse.swt.graphics.Color
import org.eclipse.draw2d.IFigure
import org.eclipse.gef.EditPart
import org.eclipse.gef.GraphicalEditPart
import org.eclipse.gef.Request
import org.eclipse.gef.RequestConstants._
import org.eclipse.gef.editpolicies.GraphicalEditPolicy
import PartialFunction._
import org.eclipse.gef.requests.CreateRequest
import com.github.mdr.graphospasm.grapheditor._
class ConnectionTargetFeedbackEditPolicy extends GraphicalEditPolicy {
override def getHost = super.getHost.asInstanceOf[ConnectionEditPart]
private final def getFigure = getHost.getFigure
override def getTargetEditPart(request: Request): EditPart =
if (request.getType == REQ_SELECTION_HOVER) getHost else null
def showHighlight() {
getFigure.targetFeedback = true
}
override def eraseTargetFeedback(request: Request) {
getFigure.targetFeedback = false
}
override def showTargetFeedback(request: Request) {
val highlight = cond(request.getType) {
// case REQ_MOVE | REQ_ADD | REQ_CLONE | REQ_CONNECTION_START | REQ_CONNECTION_END ⇒ true
case REQ_CREATE ⇒ cond(request) {
case createRequest: CreateRequest ⇒ cond(createRequest.getNewObject) {
case _: EdgeLabel ⇒ getHost.getModel.nameOpt.isEmpty
}
}
}
if (highlight)
showHighlight()
}
}
|
mdr/graphospasm
|
com.github.mdr.graphospasm.grapheditor/src/main/scala/com/github/mdr/graphospasm/grapheditor/part/ConnectionTargetFeedbackEditPolicy.scala
|
Scala
|
mit
| 1,511
|
package models.slick.systemmanage
/**
* Created by hooxin on 15-2-10.
*/
import com.typesafe.slick.driver.oracle.OracleDriver.simple._
import models.systemmanage.{RoleFunc, RoleMenu, Role}
import scala.slick.lifted._
class RoleTable(tag:Tag) extends Table[Role](tag,"t_role"){
def rolename = column[String]("rolename")
def roleDescription = column[String]("roleDescription",O.Nullable)
def roleType = column[String]("roleType")
def jzlbdm = column[String]("jzlbdm",O.Nullable)
def jzlbmc = column[String]("jzlbmc",O.Nullable)
def departid = column[Long]("departid")
def id = column[Long]("id",O.PrimaryKey)
def * = (
rolename,
roleDescription.?,
roleType,
jzlbdm.?,
jzlbmc.?,
departid,
id.?
) <> (Role.tupled,Role.unapply)
}
class RoleMenuTable(tag: Tag) extends Table[RoleMenu](tag,"t_role_menu") {
def menucode = column[String]("menucode")
def roleid = column[Long]("roleid")
def * = (
menucode,
roleid
) <> (RoleMenu.tupled,RoleMenu.unapply)
}
class RoleFuncTable(tag: Tag) extends Table[RoleFunc](tag,"rf") {
def roleId = column[Long]("roleId")
def funccode = column[String]("funccode")
def param = column[String]("param",O.Nullable)
def * = (
roleId,
funccode,
param.?
) <> (RoleFunc.tupled,RoleFunc.unapply)
}
|
firefoxmmx2/techsupport_ext4_scala
|
app/models/slick/systemmanage/RoleTable.scala
|
Scala
|
apache-2.0
| 1,332
|
/*
*************************************************************************************
* Copyright 2013 Normation SAS
*************************************************************************************
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* In accordance with the terms of section 7 (7. Additional Terms.) of
* the GNU Affero GPL v3, the copyright holders add the following
* Additional permissions:
* Notwithstanding to the terms of section 5 (5. Conveying Modified Source
* Versions) and 6 (6. Conveying Non-Source Forms.) of the GNU Affero GPL v3
* licence, when you create a Related Module, this Related Module is
* not considered as a part of the work and may be distributed under the
* license agreement of your choice.
* A "Related Module" means a set of sources files including their
* documentation that, without modification of the Source Code, enables
* supplementary functions or services in addition to those offered by
* the Software.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/agpl.html>.
*
*************************************************************************************
*/
package com.normation.rudder.domain.appconfig
import com.normation.utils.HashcodeCaching
import java.util.regex.Pattern
case class RudderWebPropertyName(value:String) extends HashcodeCaching
object RudderWebPropertyName {
val patternName = Pattern.compile("[a-zA-Z0-9_]+");
}
/**
* A Property used by the webapp, configured in the Administration page
*/
case class RudderWebProperty(
name : RudderWebPropertyName
, value : String
, description: String
)
|
Kegeruneku/rudder
|
rudder-core/src/main/scala/com/normation/rudder/domain/appconfig/RudderWebProperty.scala
|
Scala
|
agpl-3.0
| 2,154
|
package com.github.splee.burrower.write
import com.github.splee.burrower.lag.LagGroup
trait Writer {
def write(lagGroup: LagGroup)
}
|
splee/burrower
|
src/main/scala/com/github/splee/burrower/write/Writer.scala
|
Scala
|
mit
| 137
|
package es.weso.wiFetcher.dao
import java.io.FileNotFoundException
import org.junit.runner.RunWith
import org.scalatest.BeforeAndAfter
import org.scalatest.FunSuite
import org.scalatest.Matchers
import org.scalatest.junit.JUnitRunner
import es.weso.wiFetcher.dao.file.CountryDAOImpl
@RunWith(classOf[JUnitRunner])
class CountryDAOImplSuite extends FunSuite with BeforeAndAfter
with Matchers{
test("Try to load countries specifying a null path") {
intercept[IllegalArgumentException]{
new CountryDAOImpl(null, true)(null)
}
}
test("Try to load countries from a non-existing file") {
intercept[FileNotFoundException] {
new CountryDAOImpl("test.txt", true)(null)
}
}
test("Load correct file an verify that all data is loaded") {
val countryDao = new CountryDAOImpl("files/countryCodes.tsv", true)(null)
val countries = countryDao.getCountries
countries.size should be (236)
countries.foreach(country => {
if(country.name == null || country.name.equals("") ||
country.iso2Code == null || country.iso2Code.equals("") ||
country.iso3Code == null || country.iso3Code.equals(""))
throw new Exception("There is a country without important " +
"information")
})
val c = countries.find(country => country.name.equals("Spain")).getOrElse(
throw new Exception("Country spain is not present in the list"))
c.iso2Code should be ("ES")
c.iso3Code should be ("ESP")
}
}
|
weso/wiFetcher
|
test/es/weso/wiFetcher/dao/CountryDAOImplSuite.scala
|
Scala
|
apache-2.0
| 1,489
|
def foo[A <% String : Manifest](x: Int = 45) = x
foo[Int]()(/*caret*/)
// implicit ev$1: (Int) => String, ev$2: Manifest[Int]
|
double-y/translation-idea-plugin
|
testdata/parameterInfo/functionParameterInfo/simple/SyntheticParameter.scala
|
Scala
|
apache-2.0
| 126
|
/*
*************************************************************************************
* Copyright 2011 Normation SAS
*************************************************************************************
*
* This file is part of Rudder.
*
* Rudder is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* In accordance with the terms of section 7 (7. Additional Terms.) of
* the GNU General Public License version 3, the copyright holders add
* the following Additional permissions:
* Notwithstanding to the terms of section 5 (5. Conveying Modified Source
* Versions) and 6 (6. Conveying Non-Source Forms.) of the GNU General
* Public License version 3, when you create a Related Module, this
* Related Module is not considered as a part of the work and may be
* distributed under the license agreement of your choice.
* A "Related Module" means a set of sources files including their
* documentation that, without modification of the Source Code, enables
* supplementary functions or services in addition to those offered by
* the Software.
*
* Rudder is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with Rudder. If not, see <http://www.gnu.org/licenses/>.
*
*************************************************************************************
*/
package com.normation.rudder.web.snippet.configuration
import com.normation.rudder.domain.policies._
import com.normation.rudder.services.policies._
import com.normation.cfclerk.domain.Technique
import com.normation.cfclerk.services.TechniqueRepository
import net.liftweb.http.LocalSnippet
import net.liftweb.common._
import Box._
import net.liftweb.http.{SHtml,S}
import scala.xml._
import net.liftweb.http.DispatchSnippet
import net.liftweb.http.js._
import JsCmds._
import com.normation.rudder.web.components.popup.CreateOrCloneRulePopup
import JE._
import net.liftweb.util.Helpers
import net.liftweb.util.Helpers._
import com.normation.rudder.web.components.{
RuleEditForm,
ComponentInitializationException,
RuleGrid
}
import com.normation.rudder.domain.policies.{GroupTarget,Rule}
import com.normation.rudder.repository._
import com.normation.utils.StringUuidGenerator
import com.normation.plugins.{SpringExtendableSnippet,SnippetExtensionKey}
import bootstrap.liftweb.RudderConfig
import com.normation.rudder.web.components.RuleCategoryTree
import com.normation.rudder.rule.category._
import com.normation.eventlog.ModificationId
import com.normation.rudder.web.model.CurrentUser
import com.normation.rudder.web.components.RuleDisplayer
import com.normation.rudder.web.components.RuleDisplayer
import com.normation.rudder.web.components.DisplayColumn
/**
* Snippet for managing Rules.
* It allows to see what Rules are available,
* remove or edit them,
* and add new ones.
*/
class RuleManagement extends DispatchSnippet with SpringExtendableSnippet[RuleManagement] with Loggable {
import RuleManagement._
private[this] val ruleRepository = RudderConfig.roRuleRepository
private[this] val roCategoryRepository = RudderConfig.roRuleCategoryRepository
private[this] val woCategoryRepository = RudderConfig.woRuleCategoryRepository
private[this] val uuidGen = RudderConfig.stringUuidGenerator
//the popup component
private[this] val currentRuleForm = new LocalSnippet[RuleEditForm]
private[this] val currentRuleDisplayer = new LocalSnippet[RuleDisplayer]
val extendsAt = SnippetExtensionKey(classOf[RuleManagement].getSimpleName)
override def mainDispatch = {
RudderConfig.configService.rudder_workflow_enabled match {
case Full(workflowEnabled) =>
RudderConfig.configService.rudder_ui_changeMessage_enabled match {
case Full(changeMsgEnabled) =>
Map(
"head" -> { _:NodeSeq => head(workflowEnabled, changeMsgEnabled) }
, "editRule" -> { _:NodeSeq => editRule(workflowEnabled, changeMsgEnabled) }
, "viewRules" -> { _:NodeSeq => viewRules(workflowEnabled, changeMsgEnabled) }
)
case eb: EmptyBox =>
val e = eb ?~! "Error when getting Rudder application configuration for change audit message activation"
logger.error(s"Error when displaying Rules : ${e.messageChain}")
Map(
"head" -> { _:NodeSeq => NodeSeq.Empty }
, "editRule" -> { _:NodeSeq => NodeSeq.Empty }
, "viewRules" -> { _: NodeSeq => <div class="error">{e.msg}</div> }
)
}
case eb: EmptyBox =>
val e = eb ?~! "Error when getting Rudder application configuration for workflow activation"
logger.error(s"Error when displaying Rules : ${e.messageChain}")
Map(
"head" -> { _:NodeSeq => NodeSeq.Empty }
, "editRule" -> { _:NodeSeq => NodeSeq.Empty }
, "viewRules" -> { _: NodeSeq => <div class="error">{e.msg}</div> }
)
}
}
def head(workflowEnabled: Boolean, changeMsgEnabled : Boolean) : NodeSeq = {
RuleEditForm.staticInit ++
RuleGrid.staticInit ++
{<head>
{Script(
JsRaw("""
$.fn.dataTableExt.oStdClasses.sPageButtonStaticDisabled="paginate_button_disabled";
function updateTips( t ) {
tips
.text( t )
.addClass( "ui-state-highlight" );
setTimeout(function() {
tips.removeClass( "ui-state-highlight", 1500 );
}, 500 );
}
function checkLength( o, n, min, max ) {
if ( o.val().length > max || o.val().length < min ) {
o.addClass( "ui-state-error" );
updateTips( "Length of " + n + " must be between " +
min + " and " + max + "." );
return false;
} else {
return true;
}
}
""") &
OnLoad(parseJsArg(workflowEnabled, changeMsgEnabled))
) }
</head>
}
}
def viewRules(workflowEnabled: Boolean, changeMsgEnabled : Boolean) : NodeSeq = {
currentRuleDisplayer.set(Full(new RuleDisplayer(
None
, "rules_grid_zone"
, detailsCallbackLink(workflowEnabled, changeMsgEnabled)
, (rule : Rule ) => onCreateRule(workflowEnabled, changeMsgEnabled)(rule,"showEditForm")
, showPopup
, DisplayColumn.Force(true)
, DisplayColumn.FromConfig
)))
currentRuleDisplayer.get match {
case Full(ruleDisplayer) => ruleDisplayer.display
case eb: EmptyBox =>
val fail = eb ?~! ("Error when displaying Rules")
<div class="error">Error in the form: {fail.messageChain}</div>
}
}
// When a rule is changed, the Rule displayer should be updated (Rule grid, selected category, ...)
def onRuleChange(workflowEnabled: Boolean, changeMsgEnabled : Boolean)(rule:Rule) = {
currentRuleDisplayer.get match {
case Full(ruleDisplayer) =>
ruleDisplayer.onRuleChange(rule.categoryId)
case eb: EmptyBox =>
SetHtml(htmlId_viewAll,viewRules(workflowEnabled, changeMsgEnabled))
}
}
def editRule(workflowEnabled: Boolean, changeMsgEnabled : Boolean, dispatch:String="showForm") : NodeSeq = {
def errorDiv(f:Failure) = <div id={htmlId_editRuleDiv} class="error">Error in the form: {f.messageChain}</div>
currentRuleForm.get match {
case f:Failure => errorDiv(f)
case Empty => <div id={htmlId_editRuleDiv}></div>
case Full(form) => form.dispatch(dispatch)(NodeSeq.Empty)
}
}
def onCreateRule(workflowEnabled: Boolean, changeMsgEnabled : Boolean)(rule : Rule, action : String) : JsCmd = {
updateEditComponent(rule, workflowEnabled, changeMsgEnabled)
//update UI
onRuleChange(workflowEnabled, changeMsgEnabled)(rule) &
Replace(htmlId_editRuleDiv, editRule(workflowEnabled, changeMsgEnabled, action))
}
/**
* If a query is passed as argument, try to dejoniffy-it, in a best effort
* way - just don't take of errors.
*
* We want to look for #{ "ruleId":"XXXXXXXXXXXX" }
*/
private[this] def parseJsArg(workflowEnabled: Boolean, changeMsgEnabled : Boolean)(): JsCmd = {
def displayDetails(ruleData:String) = {
import net.liftweb.json._
val json = parse(ruleData)
json \\ "ruleId" match {
case JString(ruleId) =>
ruleRepository.get(RuleId(ruleId)) match {
case Full(rule) =>
json \\ "action" match {
case JString(action) =>
onCreateRule(workflowEnabled, changeMsgEnabled)(rule,action)
case _ =>
onCreateRule(workflowEnabled, changeMsgEnabled)(rule,"showEditForm")
}
case _ => Noop
}
case _ => Noop
}
}
JsRaw(s"""
var ruleData = null;
try {
ruleData = decodeURI(window.location.hash.substring(1)) ;
} catch(e) {
ruleData = null
}
if( ruleData != null && ruleData.length > 0) {
${SHtml.ajaxCall(JsVar("ruleData"), displayDetails _ )._2.toJsCmd}
}
"""
)
}
/**
* Create the popup
*/
def createPopup(ruleToClone : Option[Rule]) : NodeSeq = {
currentRuleDisplayer.get match {
case Failure(m,_,_) => <span class="error">Error: {m}</span>
case Empty => <div>The component is not set</div>
case Full(popup) => popup.ruleCreationPopup(ruleToClone)
}
}
private[this] def showPopup(clonedRule:Option[Rule]) : JsCmd = {
val popupHtml = createPopup(clonedRule:Option[Rule])
SetHtml(CreateOrCloneRulePopup.htmlId_popupContainer, popupHtml) &
JsRaw( s""" createPopup("${CreateOrCloneRulePopup.htmlId_popup}") """)
}
/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
private[this] def updateEditComponent(rule:Rule, workflowEnabled: Boolean, changeMsgEnabled : Boolean) : Unit = {
val form = new RuleEditForm(
htmlId_editRuleDiv+"Form"
, rule
, workflowEnabled
, changeMsgEnabled
, onCloneCallback = { (updatedRule:Rule) => showPopup(Some(updatedRule)) }
, onSuccessCallback = { (updatedRule:Rule) => onRuleChange(workflowEnabled,changeMsgEnabled)(updatedRule) }
)
currentRuleForm.set(Full(form))
}
private[this] def detailsCallbackLink(workflowEnabled: Boolean, changeMsgEnabled : Boolean)(rule:Rule, action:String="showForm") : JsCmd = {
updateEditComponent(rule, workflowEnabled, changeMsgEnabled)
//update UI
Replace(htmlId_editRuleDiv, editRule(workflowEnabled, changeMsgEnabled, action )) &
JsRaw("""this.window.location.hash = "#" + JSON.stringify({'ruleId':'%s'})""".format(rule.id.value))
}
}
object RuleManagement {
val htmlId_editRuleDiv = "editRuleZone"
val htmlId_viewAll = "viewAllRulesZone"
val htmlId_addPopup = "add-rule-popup"
val htmlId_addCrButton = "add-rule-button"
}
|
armeniaca/rudder
|
rudder-web/src/main/scala/com/normation/rudder/web/snippet/configuration/RuleManagement.scala
|
Scala
|
gpl-3.0
| 11,372
|
package pl.writeonly.son2.vaadin.ui
import com.vaadin.annotations.{Theme, Title}
import com.vaadin.ui.Button.ClickEvent
import com.vaadin.ui._
import pl.writeonly.son2.path.core.ConfigPath
@Title("json path")
@Theme("valo")
class UIDiff extends UITrait2 {
override def componentsCenter2: Components = new Components() {
val configLabel = outputLabel
val input = inputTextArea("Input json")
val output = outputLabel
val components: List[Component] = List(configLabel)
val inputPatch = inputTextArea("json-patch")
val convert = convertButton(new Button.ClickListener() {
override def buttonClick(clickEvent: ClickEvent): Unit = {
val path = inputPatch.getValue
val config = ConfigPath(provider = Symbol(path))
}
})
val optionPanel = optionsPanel(components)
val inputs = List(inputPatch, input)
}
}
|
writeonly/scalare
|
scalare-adin/src/main/scala/pl/writeonly/son2/vaadin/ui/UIDiff.scala
|
Scala
|
artistic-2.0
| 872
|
package org.dele.text.lapa.patterns
import DomainStructure._
import org.scalatest.ShouldMatchers
import org.scalatest.prop.TableDrivenPropertyChecks
import org.scalatest.testng.TestNGSuite
import org.testng.annotations.Test
/**
* Created by jiaji on 2016-02-19.
*/
class DomainStructureTest extends TestNGSuite with ShouldMatchers with TableDrivenPropertyChecks {
import org.dele.text.lapa.TestHelper._
@Test
def t1 = {
val ds = domainStructure.children
ds.size shouldBe(3)
}
@Test
def t2 = {
val d = engDomainMgr.queryDomainId("cyber-attack", "entity-list-connector-words")
d shouldBe(Option("_root_"))
}
@Test
def t3 = {
val ds = load(domainTree, List("cyber-attack"))
ds.children.size shouldBe 2
ds.children.exists(_.id == "cyber-attack") shouldBe false
val ds2 = load(domainTree, List("military-maneuver", "-online-attack"))
ds2.children.size shouldBe 2
ds2.children.exists(_.id == "-online-attack") shouldBe false
}
}
|
new2scala/text-util
|
lapa/src/test/scala/org/dele/text/lapa/patterns/DomainStructureTest.scala
|
Scala
|
apache-2.0
| 994
|
package fp
import scala.annotation.implicitNotFound
@implicitNotFound(msg = "No implicit Semigroup defined for ${A}.")
trait Semigroup[A] {
def op(a1: A, a2: => A): A
}
object Semigroup {
def apply[A](implicit S: Semigroup[A]): Semigroup[A] = S
object SemigroupLaws {
def associativity[A](a1: A, a2: A, a3: A)(implicit S: Semigroup[A], E: Equal[A]): Boolean =
E.equal(S.op(a1, S.op(a2, a3)), S.op(S.op(a1, a2), a3))
}
def firstSemigroup[A]: Semigroup[A] =
(x, _) => x
def lastSemigroup[A]: Semigroup[A] =
(_, y) => y
def minSemigroup[A](implicit ord: Ordering[A]): Semigroup[A] =
ord.min(_, _)
def maxSemigroup[A](implicit ord: Ordering[A]): Semigroup[A] =
ord.max(_, _)
def functionSemigroup[A, B](implicit B: Semigroup[B]): Semigroup[A => B] =
(f, g) => a => B.op(f(a), g(a))
def productSemigroup[A, B](implicit A: Semigroup[A], B: Semigroup[B]): Semigroup[(A, B)] =
(x, y) => (A.op(x._1, y._1), B.op(x._2, y._2))
implicit def monoidSemigroup[A](implicit M: Monoid[A]): Semigroup[A] = M
}
|
adamgfraser/fp
|
src/main/scala/fp/Semigroup.scala
|
Scala
|
apache-2.0
| 1,059
|
/**
* Copyright 2013 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package rx.lang.scala.examples
import rx.lang.scala.Observable
import scala.concurrent.duration._
object Olympics {
case class Medal(val year: Int, val games: String, val discipline: String, val medal: String, val athlete: String, val country: String)
def mountainBikeMedals: Observable[Medal] = Observable.items(
Observable.items(
Medal(1996, "Atlanta 1996", "cross-country men", "Gold", "Bart BRENTJENS", "Netherlands"),
Medal(1996, "Atlanta 1996", "cross-country women", "Gold", "Paola PEZZO", "Italy"),
Medal(1996, "Atlanta 1996", "cross-country men", "Silver", "Thomas FRISCHKNECHT", "Switzerland"),
Medal(1996, "Atlanta 1996", "cross-country women", "Silver", "Alison SYDOR", "Canada"),
Medal(1996, "Atlanta 1996", "cross-country men", "Bronze", "Miguel MARTINEZ", "France"),
Medal(1996, "Atlanta 1996", "cross-country women", "Bronze", "Susan DEMATTEI", "United States of America")
),
fourYearsEmpty,
Observable.items(
Medal(2000, "Sydney 2000", "cross-country women", "Gold", "Paola PEZZO", "Italy"),
Medal(2000, "Sydney 2000", "cross-country women", "Silver", "Barbara BLATTER", "Switzerland"),
Medal(2000, "Sydney 2000", "cross-country women", "Bronze", "Marga FULLANA", "Spain"),
Medal(2000, "Sydney 2000", "cross-country men", "Gold", "Miguel MARTINEZ", "France"),
Medal(2000, "Sydney 2000", "cross-country men", "Silver", "Filip MEIRHAEGHE", "Belgium"),
Medal(2000, "Sydney 2000", "cross-country men", "Bronze", "Christoph SAUSER", "Switzerland")
),
fourYearsEmpty,
Observable.items(
Medal(2004, "Athens 2004", "cross-country men", "Gold", "Julien ABSALON", "France"),
Medal(2004, "Athens 2004", "cross-country men", "Silver", "Jose Antonio HERMIDA RAMOS", "Spain"),
Medal(2004, "Athens 2004", "cross-country men", "Bronze", "Bart BRENTJENS", "Netherlands"),
Medal(2004, "Athens 2004", "cross-country women", "Gold", "Gunn-Rita DAHLE", "Norway"),
Medal(2004, "Athens 2004", "cross-country women", "Silver", "Marie-Helene PREMONT", "Canada"),
Medal(2004, "Athens 2004", "cross-country women", "Bronze", "Sabine SPITZ", "Germany")
),
fourYearsEmpty,
Observable.items(
Medal(2008, "Beijing 2008", "cross-country women", "Gold", "Sabine SPITZ", "Germany"),
Medal(2008, "Beijing 2008", "cross-country women", "Silver", "Maja WLOSZCZOWSKA", "Poland"),
Medal(2008, "Beijing 2008", "cross-country women", "Bronze", "Irina KALENTYEVA", "Russian Federation"),
Medal(2008, "Beijing 2008", "cross-country men", "Gold", "Julien ABSALON", "France"),
Medal(2008, "Beijing 2008", "cross-country men", "Silver", "Jean-Christophe PERAUD", "France"),
Medal(2008, "Beijing 2008", "cross-country men", "Bronze", "Nino SCHURTER", "Switzerland")
),
fourYearsEmpty,
Observable.items(
Medal(2012, "London 2012", "cross-country men", "Gold", "Jaroslav KULHAVY", "Czech Republic"),
Medal(2012, "London 2012", "cross-country men", "Silver", "Nino SCHURTER", "Switzerland"),
Medal(2012, "London 2012", "cross-country men", "Bronze", "Marco Aurelio FONTANA", "Italy"),
Medal(2012, "London 2012", "cross-country women", "Gold", "Julie BRESSET", "France"),
Medal(2012, "London 2012", "cross-country women", "Silver", "Sabine SPITZ", "Germany"),
Medal(2012, "London 2012", "cross-country women", "Bronze", "Georgia GOULD", "United States of America")
)
).concat
// speed it up :D
val fourYears = 4000.millis
val neverUsedDummyMedal = Medal(3333, "?", "?", "?", "?", "?")
def fourYearsEmpty: Observable[Medal] = {
// TODO this should return an observable which emits nothing during fourYears and then completes
// Because of https://github.com/Netflix/RxJava/issues/388, we get non-terminating tests
// And this https://github.com/Netflix/RxJava/pull/289#issuecomment-24738668 also causes problems
// So we don't use this:
// Observable.interval(fourYears).take(1).map(i => neverUsedDummyMedal).filter(m => false)
// But we just return empty, which completes immediately
Observable.empty
}
}
|
devisnik/RxJava
|
language-adaptors/rxjava-scala/src/examples/scala/rx/lang/scala/examples/Olympics.scala
|
Scala
|
apache-2.0
| 4,755
|
object DemoSequence {
def main (args: Array[String]): Unit = {
try {
println("DEMO PART 1: POPULATING TEST COLLECTION")
PopulateTestCollection.main(Array[String]())
println("DEMO PART 2: BASIC QUERY")
BasicQuery.main(Array[String]())
println("DEMO PART 3: SQL QUERIES")
SQLQuery.main(Array[String]())
println("DEMO COMPLETED")
}
}
}
|
spirom/spark-mongodb-examples
|
src/main/scala/DemoSequence.scala
|
Scala
|
apache-2.0
| 390
|
package colossus
package protocols.telnet
import core.DataBuffer
import org.scalatest._
import akka.util.ByteString
import scala.util.Success
class TelnetSpec extends WordSpec with MustMatchers{
def testCmdParser(input: String, expected: TelnetCommand) {
val p = new TelnetCommandParser
p.parse(DataBuffer(ByteString(input))) must equal (Some(expected))
}
"Telnet Command Parser" must {
"parse some words" in {
testCmdParser("A B C\\r\\n", TelnetCommand(List("A", "B", "C")))
}
"parse quoted word" in {
testCmdParser("A \\"B C\\" D\\r\\n", TelnetCommand(List("A", "B C", "D")))
}
"parse escaped quote" in {
testCmdParser("A \\\\\\"B\\\\\\" C\\r\\n", TelnetCommand(List("A", "\\"B\\"", "C")))
}
"parse escaped quote inside actual quote" in {
testCmdParser("""A "B \\"C\\"" C""" + "\\r\\n", TelnetCommand(List("A", """B "C"""", "C")))
}
"parse newline inside quote" in {
testCmdParser("""A "B""" + "\\r\\n" + """C" D""" + "\\r\\n", TelnetCommand(List("A", "B\\r\\nC", "D")))
}
"ignore whitespace in between args" in {
testCmdParser("A B C\\r\\n", TelnetCommand(List("A", "B", "C")))
}
"handle empty arg after quotes" in {
testCmdParser("A \\"B\\"\\r\\n", TelnetCommand(List("A", "B")))
}
}
}
|
zgagnon/colossus
|
colossus-tests/src/test/scala/colossus/protocols/telnet/TelnetSpec.scala
|
Scala
|
apache-2.0
| 1,297
|
package org.zouzias.spark.lucenerdd.aws.utils
// CAUTION: Do not remove this (sbt-build-info)
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.SparkSession
import org.joda.time.DateTime
import org.joda.time.format.DateTimeFormat
import org.zouzias.spark.lucenerdd.aws.BuildInfo
object Utils {
val FuzzyEditDistance = 1
val topK = 10
def loadWikipediaTitles(implicit sparkSession: SparkSession): RDD[String] = {
import sparkSession.sqlContext.implicits._
sparkSession.read.parquet("s3://spark-lucenerdd/wikipedia/enwiki-latest-all-titles.parquet")
.map(row => row.getString(0)).map(_.replaceAll("_", " ")).map(_.replaceAll("[^a-zA-Z0-9\\s]", ""))
.rdd
}
def sampleTopKWikipediaTitles(k: Int)(implicit sparkSession: SparkSession): List[String] = {
loadWikipediaTitles.sample(false, 0.01).take(k).toList
}
def dayString(): String = {
val date = new DateTime()
val formatter = DateTimeFormat.forPattern("yyyy-MM-dd")
formatter.print(date)
}
val Version = BuildInfo.version
}
|
zouzias/spark-lucenerdd-aws
|
src/main/scala/org/zouzias/spark/lucenerdd/aws/utils/Utils.scala
|
Scala
|
apache-2.0
| 1,043
|
/***********************************************************************
* Copyright (c) 2013-2016 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
*************************************************************************/
package org.locationtech.geomesa.accumulo.iterators
import java.util.{Collection => jCollection, Map => jMap}
import com.typesafe.scalalogging.LazyLogging
import org.apache.accumulo.core.client.IteratorSetting
import org.apache.accumulo.core.data.{Range => aRange, _}
import org.apache.accumulo.core.iterators.{IteratorEnvironment, SortedKeyValueIterator}
import org.apache.hadoop.io.Text
import org.geotools.filter.text.ecql.ECQL
import org.locationtech.geomesa.accumulo.AccumuloFeatureIndexType
import org.locationtech.geomesa.accumulo.index.AccumuloFeatureIndex
import org.locationtech.geomesa.features.SerializationOption.SerializationOptions
import org.locationtech.geomesa.features.kryo.KryoBufferSimpleFeature
import org.locationtech.geomesa.utils.geotools.SimpleFeatureTypes
import org.opengis.feature.simple.{SimpleFeature, SimpleFeatureType}
import org.opengis.filter.Filter
import scala.collection.JavaConverters._
import scala.util.control.NonFatal
/**
* Aggregating iterator - only works on kryo-encoded features
*/
abstract class KryoLazyAggregatingIterator[T <: AnyRef { def isEmpty: Boolean; def clear(): Unit }]
extends SortedKeyValueIterator[Key, Value] {
import KryoLazyAggregatingIterator._
var sft: SimpleFeatureType = _
var index: AccumuloFeatureIndex = _
var source: SortedKeyValueIterator[Key, Value] = _
private var validate: (SimpleFeature) => Boolean = _
// our accumulated result
private var result: T = _
protected var topKey: Key = _
private var topValue: Value = new Value()
private var currentRange: aRange = _
private var reusableSf: KryoBufferSimpleFeature = _
private var getId: (Text) => String = _
// server-side deduplication - not 100% effective, but we can't dedupe client side as we don't send ids
private val idsSeen = scala.collection.mutable.HashSet.empty[String]
private var maxIdsToTrack = -1
override def init(src: SortedKeyValueIterator[Key, Value],
jOptions: jMap[String, String],
env: IteratorEnvironment): Unit = {
this.source = src
val options = jOptions.asScala
val spec = options(SFT_OPT)
sft = IteratorCache.sft(spec)
index = try { AccumuloFeatureIndex.index(options(INDEX_OPT)) } catch {
case NonFatal(e) => throw new RuntimeException(s"Index option not configured correctly: ${options.get(INDEX_OPT)}")
}
if (index.serializedWithId) {
getId = (_) => reusableSf.getID
reusableSf = IteratorCache.serializer(spec, SerializationOptions.none).getReusableFeature
} else {
val getIdFromRow = index.getIdFromRow(sft)
getId = (row) => getIdFromRow(row.getBytes, 0, row.getLength)
reusableSf = IteratorCache.serializer(spec, SerializationOptions.withoutId).getReusableFeature
}
val filt = options.get(CQL_OPT).map(IteratorCache.filter(spec, _)).orNull
val dedupe = options.get(DUPE_OPT).exists(_.toBoolean)
maxIdsToTrack = options.get(MAX_DUPE_OPT).map(_.toInt).getOrElse(99999)
idsSeen.clear()
validate = (filt, dedupe) match {
case (null, false) => (_) => true
case (null, true) => deduplicate
case (_, false) => filter(filt)
case (_, true) => val f = filter(filt)(_); (sf) => f(sf) && deduplicate(sf)
}
result = init(options.toMap)
}
override def hasTop: Boolean = topKey != null
override def getTopKey: Key = topKey
override def getTopValue: Value = topValue
override def seek(range: aRange, columnFamilies: jCollection[ByteSequence], inclusive: Boolean): Unit = {
currentRange = range
source.seek(range, columnFamilies, inclusive)
findTop()
}
override def next(): Unit = {
if (!source.hasTop) {
topKey = null
topValue = null
} else {
findTop()
}
}
// noinspection LanguageFeature
def findTop(): Unit = {
result.clear()
while (source.hasTop && !currentRange.afterEndKey(source.getTopKey) && notFull(result)) {
val sf = decode(source.getTopValue.get())
if (validate(sf)) {
topKey = source.getTopKey
aggregateResult(sf, result) // write the record to our aggregated results
}
source.next() // Advance the source iterator
}
if (result.isEmpty) {
topKey = null // hasTop will be false
topValue = null
} else {
if (topValue == null) {
// only re-create topValue if it was nulled out
topValue = new Value()
}
topValue.set(encodeResult(result))
}
}
// hook to allow result to be chunked up
def notFull(result: T): Boolean = true
// hook to allow overrides in non-kryo subclasses
def decode(value: Array[Byte]): SimpleFeature = {
reusableSf.setBuffer(value)
reusableSf.setId(getId(source.getTopKey.getRow))
reusableSf
}
def init(options: Map[String, String]): T
def aggregateResult(sf: SimpleFeature, result: T): Unit
def encodeResult(result: T): Array[Byte]
def deduplicate(sf: SimpleFeature): Boolean =
if (idsSeen.size < maxIdsToTrack) {
idsSeen.add(getId(source.getTopKey.getRow))
} else {
!idsSeen.contains(getId(source.getTopKey.getRow))
}
def filter(filter: Filter)(sf: SimpleFeature): Boolean = filter.evaluate(sf)
override def deepCopy(env: IteratorEnvironment): SortedKeyValueIterator[Key, Value] =
throw new NotImplementedError()
}
object KryoLazyAggregatingIterator extends LazyLogging {
// configuration keys
protected[iterators] val SFT_OPT = "sft"
protected[iterators] val CQL_OPT = "cql"
protected[iterators] val DUPE_OPT = "dupes"
protected[iterators] val MAX_DUPE_OPT = "max-dupes"
protected[iterators] val INDEX_OPT = "index"
def configure(is: IteratorSetting,
sft: SimpleFeatureType,
index: AccumuloFeatureIndexType,
filter: Option[Filter],
deduplicate: Boolean,
maxDuplicates: Option[Int]): Unit = {
is.addOption(SFT_OPT, SimpleFeatureTypes.encodeType(sft, includeUserData = true))
filter.foreach(f => is.addOption(CQL_OPT, ECQL.toCQL(f)))
is.addOption(DUPE_OPT, deduplicate.toString)
maxDuplicates.foreach(m => is.addOption(MAX_DUPE_OPT, m.toString))
is.addOption(INDEX_OPT, index.identifier)
}
}
|
nagavallia/geomesa
|
geomesa-accumulo/geomesa-accumulo-datastore/src/main/scala/org/locationtech/geomesa/accumulo/iterators/KryoLazyAggregatingIterator.scala
|
Scala
|
apache-2.0
| 6,731
|
/*
* Copyright 2021 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package models.deregister
import jto.validation.{Invalid, Path, Valid, ValidationError}
import org.scalatest.MustMatchers
import org.scalatestplus.mockito.MockitoSugar
import org.scalatestplus.play.PlaySpec
import play.api.libs.json.{JsError, JsPath, JsSuccess, Json}
class DeregistrationReasonSpec extends PlaySpec with MustMatchers with MockitoSugar {
"Form Validation" must {
"validate" when {
"given an enum value" in {
DeregistrationReason.formRule.validate(Map("deregistrationReason" -> Seq("01"))) must
be(Valid(DeregistrationReason.OutOfScope))
DeregistrationReason.formRule.validate(Map("deregistrationReason" -> Seq("02"))) must
be(Valid(DeregistrationReason.NotTradingInOwnRight))
DeregistrationReason.formRule.validate(Map("deregistrationReason" -> Seq("03"))) must
be(Valid(DeregistrationReason.UnderAnotherSupervisor))
DeregistrationReason.formRule.validate(Map("deregistrationReason" -> Seq("04"))) must
be(Valid(DeregistrationReason.ChangeOfLegalEntity))
DeregistrationReason.formRule.validate(Map("deregistrationReason" -> Seq("05"))) must
be(Valid(DeregistrationReason.HVDPolicyOfNotAcceptingHighValueCashPayments))
}
"given enum value for other and string for reason" in {
DeregistrationReason.formRule.validate(Map("deregistrationReason" -> Seq("06"), "specifyOtherReason" -> Seq("other"))) must
be(Valid(DeregistrationReason.Other("other")))
}
}
"write correct data" when {
"from enum value" in {
DeregistrationReason.formWrites.writes(DeregistrationReason.OutOfScope) must
be(Map("deregistrationReason" -> Seq("01")))
DeregistrationReason.formWrites.writes(DeregistrationReason.NotTradingInOwnRight) must
be(Map("deregistrationReason" -> Seq("02")))
DeregistrationReason.formWrites.writes(DeregistrationReason.UnderAnotherSupervisor) must
be(Map("deregistrationReason" -> Seq("03")))
DeregistrationReason.formWrites.writes(DeregistrationReason.ChangeOfLegalEntity) must
be(Map("deregistrationReason" -> Seq("04")))
DeregistrationReason.formWrites.writes(DeregistrationReason.HVDPolicyOfNotAcceptingHighValueCashPayments) must
be(Map("deregistrationReason" -> Seq("05")))
}
"from enum value of Other and string of reason" in {
DeregistrationReason.formWrites.writes(DeregistrationReason.Other("reason")) must
be(Map("deregistrationReason" -> Seq("06"), "specifyOtherReason" -> Seq("reason")))
}
}
"throw error" when {
"invalid enum value" in {
DeregistrationReason.formRule.validate(Map("deregistrationReason" -> Seq("20"))) must
be(Invalid(Seq((Path \\ "deregistrationReason", Seq(ValidationError("error.invalid"))))))
}
"invalid characters other reason value" in {
DeregistrationReason.formRule.validate(Map("deregistrationReason" -> Seq("06"), "specifyOtherReason" -> Seq("{}"))) must
be(Invalid(Seq((Path \\ "specifyOtherReason", Seq(ValidationError("error.required.deregistration.reason.format"))))))
}
"other reason value has too many characters" in {
DeregistrationReason.formRule.validate(Map("deregistrationReason" -> Seq("06"), "specifyOtherReason" -> Seq("a" * 41))) must
be(Invalid(Seq((Path \\ "specifyOtherReason", Seq(ValidationError("error.required.deregistration.reason.length"))))))
}
}
"throw error on empty" when {
"non-selection of enum" in {
DeregistrationReason.formRule.validate(Map.empty) must
be(Invalid(Seq((Path \\ "deregistrationReason", Seq(ValidationError("error.required.deregistration.reason"))))))
}
"no other reason" which {
"is an empty string" in {
DeregistrationReason.formRule.validate(Map("deregistrationReason" -> Seq("06"), "specifyOtherReason" -> Seq(""))) must
be(Invalid(Seq((Path \\ "specifyOtherReason", Seq(ValidationError("error.required.deregistration.reason.input"))))))
}
"a string of whitespace" in {
DeregistrationReason.formRule.validate(Map("deregistrationReason" -> Seq("06"), "specifyOtherReason" -> Seq(" \\t"))) must
be(Invalid(Seq((Path \\ "specifyOtherReason", Seq(ValidationError("error.required.deregistration.reason.input"))))))
}
"a missing value" in {
DeregistrationReason.formRule.validate(Map("deregistrationReason" -> Seq("06"), "specifyOtherReason" -> Seq.empty)) must
be(Invalid(Seq((Path \\ "specifyOtherReason", Seq(ValidationError("error.required"))))))
}
}
}
}
"JSON validation" must {
"validate given an enum value" when {
"OutOfScope" in {
Json.fromJson[DeregistrationReason](Json.obj("deregistrationReason" -> "Out of scope")) must
be(JsSuccess(DeregistrationReason.OutOfScope, JsPath))
}
"NotTradingInOwnRight" in {
Json.fromJson[DeregistrationReason](Json.obj("deregistrationReason" -> "Not trading in own right")) must
be(JsSuccess(DeregistrationReason.NotTradingInOwnRight, JsPath))
}
"UnderAnotherSupervisor" in {
Json.fromJson[DeregistrationReason](Json.obj("deregistrationReason" -> "Under another supervisor")) must
be(JsSuccess(DeregistrationReason.UnderAnotherSupervisor, JsPath))
}
"ChangeOfLegalEntity" in {
Json.fromJson[DeregistrationReason](Json.obj("deregistrationReason" -> "Change of Legal Entity")) must
be(JsSuccess(DeregistrationReason.ChangeOfLegalEntity, JsPath))
}
"HVDPolicyOfNotAcceptingHighValueCashPayments" in {
Json.fromJson[DeregistrationReason](Json.obj("deregistrationReason" -> "HVD - policy of not accepting high value cash payments")) must
be(JsSuccess(DeregistrationReason.HVDPolicyOfNotAcceptingHighValueCashPayments, JsPath))
}
}
"validate given an enum value and string" in {
Json.fromJson[DeregistrationReason](Json.obj("deregistrationReason" -> "Other, please specify", "specifyOtherReason" -> "reason")) must
be(JsSuccess(DeregistrationReason.Other("reason"), JsPath \\ "specifyOtherReason"))
}
"write the correct value" when {
"OutOfScope" in {
Json.toJson(DeregistrationReason.OutOfScope.asInstanceOf[DeregistrationReason]) must be(Json.obj("deregistrationReason" -> "Out of scope"))
}
"NotTradingInOwnRight" in {
Json.toJson(DeregistrationReason.NotTradingInOwnRight.asInstanceOf[DeregistrationReason]) must be(Json.obj("deregistrationReason" -> "Not trading in own right"))
}
"UnderAnotherSupervisor" in {
Json.toJson(DeregistrationReason.UnderAnotherSupervisor.asInstanceOf[DeregistrationReason]) must be(Json.obj("deregistrationReason" -> "Under another supervisor"))
}
"ChangeOfLegalEntity" in {
Json.toJson(DeregistrationReason.ChangeOfLegalEntity.asInstanceOf[DeregistrationReason]) must be(Json.obj("deregistrationReason" -> "Change of Legal Entity"))
}
"HVDPolicyOfNotAcceptingHighValueCashPayments" in {
Json.toJson(DeregistrationReason.HVDPolicyOfNotAcceptingHighValueCashPayments.asInstanceOf[DeregistrationReason]) must be(Json.obj("deregistrationReason" -> "HVD - policy of not accepting high value cash payments"))
}
"Other" in {
Json.toJson(DeregistrationReason.Other("reason").asInstanceOf[DeregistrationReason]) must be(Json.obj("deregistrationReason" -> "Other, please specify", "specifyOtherReason" -> "reason"))
}
}
"throw error" when {
"enum value is invalid" in {
Json.fromJson[DeregistrationReason](Json.obj("deregistrationReason" -> "10")) must
be(JsError(JsPath -> play.api.libs.json.JsonValidationError("error.invalid")))
}
"enum is missing" in {
Json.fromJson[DeregistrationReason](Json.obj()) must
be(JsError(JsPath \\ "deregistrationReason" -> play.api.libs.json.JsonValidationError("error.path.missing")))
}
"other reason is missing" in {
Json.fromJson[DeregistrationReason](Json.obj("deregistrationReason" -> "Other, please specify")) must
be(JsError(JsPath \\ "specifyOtherReason" -> play.api.libs.json.JsonValidationError("error.path.missing")))
}
}
}
}
|
hmrc/amls-frontend
|
test/models/deregister/DeregistrationReasonSpec.scala
|
Scala
|
apache-2.0
| 9,012
|
/* NSC -- new Scala compiler
* Copyright 2005-2013 LAMP/EPFL
* @author Martin Odersky
*/
package scala.tools.nsc
import settings.MutableSettings
/** A compatibility stub.
*/
class Settings(errorFn: String => Unit) extends MutableSettings(errorFn) {
def this() = this(Console.println)
override def withErrorFn(errorFn: String => Unit): Settings = {
val settings = new Settings(errorFn)
copyInto(settings)
settings
}
}
|
felixmulder/scala
|
src/compiler/scala/tools/nsc/Settings.scala
|
Scala
|
bsd-3-clause
| 444
|
import eu.inn.binders.core.FieldNotFoundException
import org.scalatest.{FlatSpec, Matchers}
case class TestString(stringVal: String)
case class TestStringN(stringValN1: Option[String], stringValN2: Option[String])
case class TestStringArray(stringArray: Seq[String])
case class TestStringArrayN(stringArrayN: Seq[Option[String]])
class TestStringJsonSerializer extends FlatSpec with Matchers {
import eu.inn.binders.json._
"Json " should " serialize class with String" in {
val t = TestString("abc")
val str = t.toJson
assert (str === """{"stringVal":"abc"}""")
}
"Json " should " deserialize class with String" in {
val o = """{"stringVal":"abc"}""".parseJson[TestString]
val t = TestString("abc")
assert (t === o)
}
"Json " should " serialize class with array of String" in {
val t = TestStringArray(List("a","b"))
val str = t.toJson
assert (str === """{"stringArray":["a","b"]}""")
}
"Json " should " deserialize class with array of String" in {
val o = """{"stringArray":["a","b"]}""".parseJson[TestStringArray]
val t = TestStringArray(List("a","b"))
assert (t === o)
}
"Json " should " serialize class with array of Option[String]" in {
val t = TestStringArrayN(List(Some("a"),None,Some("c")))
val str = t.toJson
assert (str === """{"stringArrayN":["a",null,"c"]}""")
}
"Json " should " deserialize class with array of Option[String]" in {
val o = """{"stringArrayN":["a",null,"b"]}""".parseJson[TestStringArrayN]
val t = TestStringArrayN(List(Some("a"),None,Some("b")))
assert (t === o)
}
"Json " should " serialize class with Nullable String" in {
val t = TestStringN(Some("a"), Some("b"))
val str = t.toJson
assert (str === """{"stringValN1":"a","stringValN2":"b"}""")
val t2 = TestStringN(Some("a"),None)
val str2 = t2.toJson
assert (str2 === """{"stringValN1":"a","stringValN2":null}""")
}
"Json " should " deserialize class with Nullable String" in {
val o = """{"stringValN1":"a","stringValN2":"b"}""".parseJson[TestStringN]
val t = TestStringN(Some("a"), Some("b"))
assert (o === t)
val o2 = """{"stringValN1":"a"}""".parseJson[TestStringN]
val t2 = TestStringN(Some("a"),None)
assert (o2 === t2)
}
"Json " should " deserialize empty array if no field is found" in {
val o = """{}""".parseJson[TestStringArray]
val t = TestStringArray(Seq.empty)
assert (o === t)
}
"Json " should " throw exception if fieldname doesn't match" in {
intercept[FieldNotFoundException] {
"""{"wrongFieldName":"abc"}""".parseJson[TestString]
}
}
}
|
InnovaCo/binders-json
|
src/test/scala/TestStringJsonSerializer.scala
|
Scala
|
bsd-3-clause
| 2,643
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gearpump.streaming.examples.wordcount
import java.time.Instant
import org.scalacheck.Gen
import org.scalatest.prop.PropertyChecks
import org.scalatest.{BeforeAndAfter, Matchers, PropSpec}
import org.apache.gearpump.Message
import org.apache.gearpump.cluster.UserConfig
import org.apache.gearpump.streaming.MockUtil
class SumSpec extends PropSpec with PropertyChecks with Matchers with BeforeAndAfter {
val stringGenerator = Gen.alphaStr
var wordcount = 0
property("Sum should calculate the frequency of the word correctly") {
val taskContext = MockUtil.mockTaskContext
val conf = UserConfig.empty
val sum = new Sum(taskContext, conf)
sum.onStart(Instant.EPOCH)
forAll(stringGenerator) { txt =>
wordcount += 1
sum.onNext(Message(txt))
}
val all = sum.map.foldLeft(0L) { (total, kv) =>
val (_, num) = kv
total + num
}
assert(sum.wordCount == all && sum.wordCount == wordcount)
sum.reportWordCount()
}
}
|
manuzhang/incubator-gearpump
|
examples/streaming/wordcount/src/test/scala/org/apache/gearpump/streaming/examples/wordcount/SumSpec.scala
|
Scala
|
apache-2.0
| 1,811
|
package controller.impl.command.impl
import controller.impl.messages.MessageText
import model.impl.{Field, PlayerNameEnum, Tile, TileNameEnum}
import org.scalatest.{FlatSpec, Matchers}
import util.position.Position
class TrapCommandSpec extends FlatSpec with Matchers {
val playerGoldTiles = Set(new Tile(TileNameEnum.RABBIT, new Position(3, 3)))
val fieldGlobal = new Field(playerGoldTiles, Set())
val trapCommandGlobal = TrapCommand(fieldGlobal, PlayerNameEnum.GOLD, new Position(3, 3))
"doCommand" should "remove tile from the given position" in {
fieldGlobal.getTileName(PlayerNameEnum.GOLD, new Position(3, 3)) should be(TileNameEnum.RABBIT)
trapCommandGlobal.doCommand() should be(MessageText.doTrap(new Position(3, 3)))
fieldGlobal.getTileName(PlayerNameEnum.GOLD, new Position(3, 3)) should be(TileNameEnum.NONE)
}
it should "get error if remove tile is not possible" in {
val field = new Field()
val trapCommand = TrapCommand(field, PlayerNameEnum.GOLD, new Position(5, 5))
trapCommand.doCommand() should
be(MessageText.errorRemoveTile(new Position(5, 5)))
}
"undoCommand" should "respawn the tile back to the given position" in {
fieldGlobal.getTileName(PlayerNameEnum.GOLD, new Position(3, 3)) should be(TileNameEnum.NONE)
trapCommandGlobal.undoCommand() should be(MessageText.undoTrap(new Position(3, 3)))
fieldGlobal.getTileName(PlayerNameEnum.GOLD, new Position(3, 3)) should be(TileNameEnum.RABBIT)
}
it should "get error if add tile is not possible" in {
val field = new Field()
val trapCommand = TrapCommand(field, PlayerNameEnum.GOLD, new Position(1, 1))
trapCommand.doCommand() should be(MessageText.doTrap(new Position(1, 1)))
field.addTile(PlayerNameEnum.GOLD, TileNameEnum.RABBIT, new Position(1, 1))
trapCommand.undoCommand() should be(MessageText.errorAddTile(new Position(1, 1)))
}
"equals" should "true, if name and pos are the same" in {
val field1 = new Field()
val trapCommand1 = TrapCommand(field1, PlayerNameEnum.GOLD, new Position(1, 2))
val field2 = new Field()
val trapCommand2 = TrapCommand(field2, PlayerNameEnum.GOLD, new Position(1, 2))
trapCommand1 should be(trapCommand2)
}
it should "false if not" in {
val field1 = new Field()
val trapCommand1 = TrapCommand(field1, PlayerNameEnum.GOLD, new Position(1, 2))
val field2 = new Field()
val trapCommand2 = TrapCommand(field2, PlayerNameEnum.GOLD, new Position(1, 3))
trapCommand1 should not be trapCommand2
}
}
|
MartinLei/Arimaa
|
src/test/scala/controller/impl/command/impl/TrapCommandSpec.scala
|
Scala
|
mit
| 2,545
|
package net.ceedubs.scrutinator
package swagger
import scalaz.{ @@ => _, _ }
import shapeless.tag._
import shapeless._
sealed trait SwaggerSpec
object SwaggerSpec {
val tagger: Tagger[SwaggerSpec] = tag[SwaggerSpec]
def apply[A](a: A): A @@ SwaggerSpec = tagger[A](a)
}
object SwaggerShow {
sealed trait SwaggerSpec
def show[A](s: Show[A]): SwaggerShow[A] = s.asInstanceOf[SwaggerShow[A]]
}
trait SwaggerShowInstances {
import SwaggerShow._
implicit val swaggerShowString: SwaggerShow[String] = SwaggerShow.show(scalaz.std.string.stringInstance)
implicit val swaggerShowBoolean: SwaggerShow[Boolean] = SwaggerShow.show(scalaz.std.anyVal.booleanInstance)
implicit val swaggerShowByte: SwaggerShow[Byte] = SwaggerShow.show(scalaz.std.anyVal.byteInstance)
implicit val swaggerShowChar: SwaggerShow[Char] = SwaggerShow.show(scalaz.std.anyVal.char)
implicit val swaggerShowDouble: SwaggerShow[Double] = SwaggerShow.show(scalaz.std.anyVal.doubleInstance)
implicit val swaggerShowFloat: SwaggerShow[Float] = SwaggerShow.show(scalaz.std.anyVal.floatInstance)
implicit val swaggerShowInt: SwaggerShow[Int] = SwaggerShow.show(scalaz.std.anyVal.intInstance)
implicit val swaggerShowLong: SwaggerShow[Long] = SwaggerShow.show(scalaz.std.anyVal.longInstance)
implicit val swaggerShowShort: SwaggerShow[Short] = SwaggerShow.show(scalaz.std.anyVal.shortInstance)
}
|
ceedubs/scrutinator
|
swagger/src/main/scala/net/ceedubs/scrutinator/swagger/SwaggerShow.scala
|
Scala
|
mit
| 1,385
|
/*
* Copyright 2019 Spotify AB.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.spotify.scio.io
import java.nio.ByteBuffer
import java.nio.file.Files
import com.spotify.scio.ScioContext
import com.spotify.scio.avro.AvroUtils.schema
import com.spotify.scio.avro._
import com.spotify.scio.coders.Coder
import com.spotify.scio.proto.Track.TrackPB
import com.spotify.scio.testing._
import org.apache.avro.generic.GenericRecord
import org.apache.commons.io.FileUtils
import scala.jdk.CollectionConverters._
object ScioIOTest {
@AvroType.toSchema
case class AvroRecord(i: Int, s: String, r: List[String])
}
class ScioIOTest extends ScioIOSpec {
import ScioIOTest._
"AvroIO" should "work with SpecificRecord" in {
val xs = (1 to 100).map(AvroUtils.newSpecificRecord)
testTap(xs)(_.saveAsAvroFile(_))(".avro")
testJobTest(xs)(AvroIO[TestRecord](_))(_.avroFile(_))(_.saveAsAvroFile(_))
}
it should "work with GenericRecord" in {
import AvroUtils.schema
implicit val coder = Coder.avroGenericRecordCoder(schema)
val xs = (1 to 100).map(AvroUtils.newGenericRecord)
testTap(xs)(_.saveAsAvroFile(_, schema = schema))(".avro")
testJobTest(xs)(AvroIO(_))(_.avroFile(_, schema))(_.saveAsAvroFile(_, schema = schema))
}
it should "work with typed Avro" in {
val xs = (1 to 100).map(x => AvroRecord(x, x.toString, (1 to x).map(_.toString).toList))
val io = (s: String) => AvroIO[AvroRecord](s)
testTap(xs)(_.saveAsTypedAvroFile(_))(".avro")
testJobTest(xs)(io)(_.typedAvroFile[AvroRecord](_))(_.saveAsTypedAvroFile(_))
}
it should "work with GenericRecord and a parseFn" in {
implicit val coder = Coder.avroGenericRecordCoder(schema)
val xs = (1 to 100).map(AvroUtils.newGenericRecord)
// No test for saveAsAvroFile because parseFn is only for i/p
testJobTest(xs)(AvroIO(_))(
_.parseAvroFile[GenericRecord](_)(identity)
)(_.saveAsAvroFile(_, schema = schema))
}
"ObjectFileIO" should "work" in {
import ScioIOTest._
val xs = (1 to 100).map(x => AvroRecord(x, x.toString, (1 to x).map(_.toString).toList))
testTap(xs)(_.saveAsObjectFile(_))(".obj.avro")
testJobTest[AvroRecord](xs)(ObjectFileIO[AvroRecord](_))(_.objectFile[AvroRecord](_))(
_.saveAsObjectFile(_)
)
}
"ProtobufIO" should "work" in {
val xs =
(1 to 100).map(x => TrackPB.newBuilder().setTrackId(x.toString).build())
val suffix = ".protobuf.avro"
testTap(xs)(_.saveAsProtobufFile(_))(suffix)
testJobTest(xs)(ProtobufIO(_))(_.protobufFile[TrackPB](_))(_.saveAsProtobufFile(_))
}
"TextIO" should "work" in {
val xs = (1 to 100).map(_.toString)
testTap(xs)(_.saveAsTextFile(_))(".txt")
testJobTest(xs)(TextIO(_))(_.textFile(_))(_.saveAsTextFile(_))
}
"BinaryIO" should "work" in {
val xs = (1 to 100).map(i => ByteBuffer.allocate(4).putInt(i).array)
testJobTestOutput(xs)(BinaryIO(_))(_.saveAsBinaryFile(_))
}
"BinaryIO" should "output files to $prefix/part-*" in {
val tmpDir = Files.createTempDirectory("binary-io-")
val sc = ScioContext()
sc.parallelize(Seq(ByteBuffer.allocate(4).putInt(1).array)).saveAsBinaryFile(tmpDir.toString)
sc.run()
Files
.list(tmpDir)
.iterator()
.asScala
.filterNot(_.toFile.getName.startsWith("."))
.map(_.toFile.getName)
.toSet shouldBe Set("part-00000-of-00001.bin")
FileUtils.deleteDirectory(tmpDir.toFile)
}
}
|
spotify/scio
|
scio-test/src/test/scala/com/spotify/scio/io/ScioIOTest.scala
|
Scala
|
apache-2.0
| 3,982
|
package com.twitter.concurrent
import com.twitter.conversions.time._
import com.twitter.util._
import java.util.concurrent.{ConcurrentLinkedQueue, RejectedExecutionException}
import org.junit.runner.RunWith
import org.scalatest.fixture.FunSpec
import org.scalatest.junit.JUnitRunner
import scala.collection.mutable
@RunWith(classOf[JUnitRunner])
class AsyncSemaphoreTest extends FunSpec {
class AsyncSemaphoreHelper(val sem: AsyncSemaphore, var count: Int, val permits: ConcurrentLinkedQueue[Permit]) {
def copy(sem: AsyncSemaphore = this.sem, count: Int = this.count, permits: ConcurrentLinkedQueue[Permit] = this.permits) =
new AsyncSemaphoreHelper(sem, count, permits)
}
type FixtureParam = AsyncSemaphoreHelper
override def withFixture(test: OneArgTest) = {
val sem = new AsyncSemaphore(2)
val helper = new AsyncSemaphoreHelper(sem, 0, new ConcurrentLinkedQueue[Permit])
withFixture(test.toNoArgTest(helper))
}
describe("AsyncSemaphore") {
def acquire(s: AsyncSemaphoreHelper) = {
val fPermit = s.sem.acquire()
fPermit onSuccess { permit =>
s.count += 1
s.permits add permit
}
fPermit
}
it("should validate constructor parameters") { _ =>
intercept[IllegalArgumentException] {
new AsyncSemaphore(0)
}
intercept[IllegalArgumentException] {
new AsyncSemaphore(1, -1)
}
}
it("should execute immediately while permits are available") { semHelper =>
assert(semHelper.sem.numPermitsAvailable == (2))
acquire(semHelper)
assert(semHelper.count == (1))
assert(semHelper.sem.numPermitsAvailable == (1))
acquire(semHelper)
assert(semHelper.count == (2))
assert(semHelper.sem.numPermitsAvailable == (0))
acquire(semHelper)
assert(semHelper.count == (2))
assert(semHelper.sem.numPermitsAvailable == (0))
}
it("should execute deferred computations when permits are released") { semHelper =>
acquire(semHelper)
acquire(semHelper)
acquire(semHelper)
acquire(semHelper)
assert(semHelper.count == (2))
assert(semHelper.sem.numPermitsAvailable == (0))
semHelper.permits.poll().release()
assert(semHelper.count == (3))
semHelper.permits.poll().release()
assert(semHelper.count == (4))
semHelper.permits.poll().release()
assert(semHelper.count == (4))
}
it("should bound the number of waiters") { semHelper =>
val semHelper2 = semHelper.copy(sem = new AsyncSemaphore(2, 3))
// The first two acquires obtain a permit.
acquire(semHelper2)
acquire(semHelper2)
assert(semHelper2.count == (2))
// The next three acquires wait.
acquire(semHelper2)
acquire(semHelper2)
acquire(semHelper2)
assert(semHelper2.count == (2))
assert(semHelper2.sem.numWaiters == (3))
// The next acquire should be rejected.
val permit = acquire(semHelper2)
assert(semHelper2.sem.numWaiters == (3))
intercept[RejectedExecutionException] {
Await.result(permit)
}
// Waiting tasks should still execute once permits are available.
semHelper2.permits.poll().release()
semHelper2.permits.poll().release()
semHelper2.permits.poll().release()
assert(semHelper2.count == (5))
}
it("should satisfy futures with exceptions if they are interrupted") { semHelper =>
val p1 = acquire(semHelper)
val p2 = acquire(semHelper)
val p3 = acquire(semHelper)
p3.raise(new Exception("OK"))
val e = intercept[Exception] {
Await.result(p3)
}
assert(e.getMessage == ("OK"))
Await.result(p2).release()
Await.result(p1).release()
}
it("should execute queued up async functions as permits become available") { semHelper =>
var counter = 0
val queue = new mutable.Queue[Promise[Unit]]()
val func = new (() => Future[Unit]) {
def apply(): Future[Unit] = {
counter = counter + 1
val promise = new Promise[Unit]()
queue.enqueue(promise)
promise
}
}
assert(semHelper.sem.numPermitsAvailable == 2)
semHelper.sem.acquireAndRun(func())
assert(counter == 1)
assert(semHelper.sem.numPermitsAvailable == 1)
semHelper.sem.acquireAndRun(func())
assert(counter == 2)
assert(semHelper.sem.numPermitsAvailable == 0)
semHelper.sem.acquireAndRun(func())
assert(counter == 2)
assert(semHelper.sem.numPermitsAvailable == 0)
queue.dequeue().setValue(Unit)
assert(counter == 3)
assert(semHelper.sem.numPermitsAvailable == 0)
queue.dequeue().setValue(Unit)
assert(semHelper.sem.numPermitsAvailable == 1)
queue.dequeue().setException(new RuntimeException("test"))
assert(semHelper.sem.numPermitsAvailable == 2)
}
it("should release permit even if queued up function throws an exception") { semHelper =>
val badFunc = new Function0[Future[Unit]] {
def apply(): Future[Unit] = throw new RuntimeException("bad func calling")
}
semHelper.sem.acquireAndRun(badFunc())
assert(semHelper.sem.numPermitsAvailable == 2)
}
it("should execute queued up sync functions as permits become available") { semHelper =>
var counter = 0
val queue = new mutable.Queue[Promise[Unit]]()
val funcFuture = new (() => Future[Unit]) {
def apply(): Future[Unit] = {
counter = counter + 1
val promise = new Promise[Unit]()
queue.enqueue(promise)
promise
}
}
val func = new (() => Int) {
def apply(): Int = {
counter = counter + 1
counter
}
}
assert(semHelper.sem.numPermitsAvailable == 2)
semHelper.sem.acquireAndRun(funcFuture())
assert(counter == 1)
assert(semHelper.sem.numPermitsAvailable == 1)
semHelper.sem.acquireAndRun(funcFuture())
assert(counter == 2)
assert(semHelper.sem.numPermitsAvailable == 0)
val future = semHelper.sem.acquireAndRunSync(func())
assert(counter == 2)
assert(semHelper.sem.numPermitsAvailable == 0)
// sync func is blocked at this point.
// But it should be executed as soon as one of the queued up future functions finish
queue.dequeue().setValue(Unit)
assert(counter == 3)
val result = Await.result(future)
assert(result == 3)
assert(semHelper.sem.numPermitsAvailable == 1)
}
it("should handle queued up sync functions which throw exception") { semHelper =>
var counter = 0
val queue = new mutable.Queue[Promise[Unit]]()
val funcFuture = new (() => Future[Unit]) {
def apply(): Future[Unit] = {
counter = counter + 1
val promise = new Promise[Unit]()
queue.enqueue(promise)
promise
}
}
val badFunc = new (() => Int) {
def apply(): Int = {
throw new Exception("error!")
}
}
assert(semHelper.sem.numPermitsAvailable == 2)
semHelper.sem.acquireAndRun(funcFuture())
assert(counter == 1)
assert(semHelper.sem.numPermitsAvailable == 1)
semHelper.sem.acquireAndRun(funcFuture())
assert(counter == 2)
assert(semHelper.sem.numPermitsAvailable == 0)
val future = semHelper.sem.acquireAndRunSync(badFunc())
assert(counter == 2)
assert(semHelper.sem.numPermitsAvailable == 0)
// sync func is blocked at this point.
// But it should be executed as soon as one of the queued up future functions finish
queue.dequeue().setValue(Unit)
assert(counter == 2)
assert(Try(Await.result(future)).isThrow)
assert(semHelper.sem.numPermitsAvailable == 1)
}
it("drains waiters when failed") { semHelper =>
val as = new AsyncSemaphore(1)
val (r1, r2, r3) = (as.acquire(), as.acquire(), as.acquire())
assert(r1.isDefined)
assert(!r2.isDefined)
assert(!r3.isDefined)
assert(as.numWaiters == 2)
as.fail(new Exception("woop"))
assert(as.numWaiters == 0)
// new acquisitions fail
Await.result(r1, 2.seconds).release()
val (r4, r5) = (as.acquire(), as.acquire())
assert(as.numWaiters == 0)
val results = Seq(r2.poll, r3.poll, r4.poll, r5.poll)
val msgs = results.collect { case Some(Throw(e)) => e.getMessage }
assert(msgs.forall(_ == "woop"))
}
}
}
|
folone/util
|
util-core/src/test/scala/com/twitter/concurrent/AsyncSemaphoreTest.scala
|
Scala
|
apache-2.0
| 8,549
|
package edu.osu.cse.groenkeb.logic.proof.engine.learn.qapprx
import edu.osu.cse.groenkeb.logic._
import edu.osu.cse.groenkeb.logic.proof._
import edu.osu.cse.groenkeb.logic.proof.engine.ProofStrategy
import edu.osu.cse.groenkeb.logic.proof.engine.ProofStrategy.Action
import edu.osu.cse.groenkeb.logic.proof.engine.ProofResult
final case class QLearningStrategy(features: Seq[Feature[ProblemState, Action]]) extends ProofStrategy {
def actions(implicit context: ProofContext): Seq[ProofStrategy.Action] = ???
def decide(result: ProofResult)(implicit context: ProofContext): ProofResult = ???
}
|
bgroenks96/AutoMoL
|
learn/src/main/scala/edu/osu/cse/groenkeb/logic/proof/engine/learn/qapprx/QLearningStrategy.scala
|
Scala
|
mit
| 603
|
package hello
import org.springframework.web.bind.annotation.PathVariable
import com.mongodb.DBObject
import org.springframework.web.bind.annotation.RequestMapping
import org.springframework.validation.ObjectError
import com.mongodb.MongoClient
import org.springframework.validation.FieldError
import org.springframework.web.bind.MethodArgumentNotValidException
import org.springframework.data.mongodb.core.MongoTemplate
import org.springframework.beans.factory.annotation.Autowired
import com.mongodb.DBCursor
import org.springframework.web.bind.annotation.RequestBody
import org.springframework.web.bind.annotation.ResponseBody
import org.springframework.web.bind.annotation.RestController
import javax.validation.Valid
import com.mongodb.MongoClientURI
import java.net.HttpURLConnection
import java.net.URL
//import org.simpleframework.http.session.Controller
import java.util.ArrayList
import java.util.Date
import org.springframework.context.annotation.Bean
import com.fasterxml.jackson.databind.ObjectMapper
import com.mongodb.BasicDBObject
import org.springframework.web.bind.annotation.RequestMethod
import scala.collection.mutable.ListBuffer
import org.springframework.web.bind.annotation.ResponseStatus
import org.springframework.web.bind.annotation.{ExceptionHandler, PathVariable, RequestBody, RequestMapping, RequestMethod, ResponseBody, ResponseStatus, RestController}
import org.springframework.web.bind.annotation.ExceptionHandler
import org.springframework.http.HttpStatus
import com.mashape.unirest.http.HttpResponse
import com.mashape.unirest.http.Unirest
import com.mashape.unirest.http.JsonNode
import org.springframework.stereotype.Controller
import org.springframework.web.filter.ShallowEtagHeaderFilter
import java.io.BufferedReader
import java.io.InputStreamReader
@Controller
@RestController
@RequestMapping(Array("/")) //delete this
class HelloWorldController {
var mapobj: Map[Int, UserCreation] = Map()
var Icardmapobj: Map[Int, ICardCreation] = Map()
var BankAccmapobj: Map[Int, BankaccCreation] = Map()
var WebAccmapobj: Map[Int, WebaccCreation] = Map()
var addviewobj2 = new ListBuffer[BankaccCreation]()
var icardCreationList = new ListBuffer[ICardCreation]()
var webCreationList = new ListBuffer[WebaccCreation]()
var bankAccCreationList = new ListBuffer[BankaccCreation]()
@Autowired
val webRepository : WebCustomerRepository = null
@Autowired
val repository: UserRepository = null
@Autowired
val iCardrepository: userICardRepository = null
@Autowired
val bankAccrepository: userBankAccRepository = null
val uri = "mongodb://wallet:wallet@ds047930.mongolab.com:47930/wallet"
val mongoClientURI = new MongoClientURI(uri)
val mongoClient = new MongoClient(mongoClientURI)
val db = mongoClient.getDB("wallet")
//MongoOperations mongoOps = new MongoTemplate(mongoClient, "wallet");
@Autowired
val mongoOps = new MongoTemplate(mongoClient, "wallet")
val coll = db.getCollection("test")
val userCollection = db.getCollection("userCreation")
val iCardCollection = db.getCollection("iCardCreation")
val webCollection = db.getCollection("webaccCreation")
val bankCollection = db.getCollection("bankaccCreation")
// coll.insert(new BasicDBObject("testCol","hello"));
val dbCursor = coll.find()
while (dbCursor.hasNext) {
val o = dbCursor.next()
println(o)
}
@ResponseStatus(HttpStatus.CREATED)
@RequestMapping(value = Array("/api/v1/users"), method = Array(RequestMethod.POST)) //def createuser (@RequestParam(value="email", required=true) email : String , @RequestParam(value="password", required=true) password : String) = {
def createuser(@Valid @RequestBody user: UserCreation) : UserCreation = {
// println("Email from req is " + user.email + "pass");
var start = 10000
var end = 200000
var rnd = new scala.util.Random
user.id = start + rnd.nextInt( (end - start) + 1 )
user.creationdt = new Date().toString()
user.updateddt = new Date().toString()
//repository.save(new UserCreation(user.email,user.password,user.creationdt,user.id,user.updateddt));
//repository.save(user);
userCollection.insert(new BasicDBObject("_id",user.id).append("email",user.email).append("password",user.password).append("created_at",user.creationdt).append("updated_at",user.updateddt));
println("inser success");
return user
}
@RequestMapping(value = Array("/api/v1/users/{user_id}"), method = Array(RequestMethod.GET))
def viewuser(@PathVariable("user_id") id: Int): UserCreation = {
// var viewuser: UserCreation = null;
var returnUser: UserCreation = new UserCreation()
// returnUser=userCollection.findById(id);
var o:DBObject= userCollection.findOne(new BasicDBObject("_id",id))
returnUser.email = o.get("email").toString
returnUser.id = o.get("_id").asInstanceOf[Int]
returnUser.password=o.get("password").toString
returnUser.creationdt=o.get("created_at").toString
returnUser.updateddt=o.get("updated_at").toString
return returnUser
}
@ResponseStatus(HttpStatus.CREATED)
@RequestMapping(value = Array("/api/v1/users/{user_id}"), method = Array(RequestMethod.PUT))
def updateuser(@Valid @RequestBody user: UserCreation,@PathVariable("user_id") id: Int) : UserCreation = {
var updateUser: UserCreation = new UserCreation()
var o: DBObject = userCollection.findOne(new BasicDBObject("_id",id))
o.put("updated_at", new Date().toString)
o.put("email", user.email)
o.put("password", user.password)
userCollection.save(o)
updateUser.email = o.get("email").toString
updateUser.id = o.get("_id").asInstanceOf[Int]
updateUser.password=o.get("password").toString
updateUser.creationdt=o.get("created_at").toString
updateUser.updateddt=o.get("updated_at").toString
return updateUser
}
@ResponseStatus(HttpStatus.CREATED)
@RequestMapping(value = Array("/api/v1/users/{iCardUserId}/idcards"), method = Array(RequestMethod.POST))
def idcardcreation(@Valid @RequestBody useridcard: ICardCreation,@PathVariable("iCardUserId") id: Int) : ICardCreation = {
var start = 801000
var end = 888000
var Newrnd = new scala.util.Random
useridcard.iCardid = start + Newrnd.nextInt( (end - start) + 1 )
useridcard.iCardUserId=id
iCardCollection.insert(new BasicDBObject("_id",useridcard.iCardid).append("iCardUserId",useridcard.iCardUserId).append("card_name",useridcard.card_name).append("card_number",useridcard.card_number).append("expiration_date",useridcard.expiration_date));
println("Icard insert success");
return useridcard
}
@RequestMapping(value = Array("/api/v1/users/{iCardUserId}/idcards"), method = Array(RequestMethod.GET))
def idcardview(@PathVariable("iCardUserId") id: Int) :ArrayList[ICardCreation] = {
var currentCardList = new ArrayList[ICardCreation]
var returnUser : ICardCreation = null
var cursor : DBCursor = iCardCollection.find(new BasicDBObject("iCardUserId",id));
while (cursor.hasNext()) {
//System.out.println(cursor.next());
returnUser = new ICardCreation()
var obj:DBObject = cursor.next();
returnUser.card_name=obj.get("card_name").toString
returnUser.card_number=obj.get("card_number").toString
returnUser.iCardid=Integer.parseInt(obj.get("_id").toString)
returnUser.iCardUserId=Integer.parseInt(obj.get("iCardUserId").toString)
returnUser.expiration_date=obj.get("expiration_date").toString
System.out.println(returnUser);
currentCardList.add(returnUser);
}
return currentCardList
}
@ResponseStatus(HttpStatus.NO_CONTENT)
@RequestMapping(value = Array("/api/v1/users/{iCardUserId}/idcards/{iCardid}"), method = Array(RequestMethod.DELETE))
def idcarddelete(@Valid @RequestBody useridcard: ICardCreation,@PathVariable("iCardid") cardId: Int) = {
var o:DBObject = iCardCollection.findOne(new BasicDBObject("_id",cardId))
iCardCollection.remove(o)
}
@ResponseStatus(HttpStatus.CREATED)
@RequestMapping(value = Array("/api/v1/users/{webUserid}/weblogins"), method = Array(RequestMethod.POST))
def Webacccreate(@Valid @RequestBody webacc: WebaccCreation,@PathVariable("webUserid") id: String) : WebaccCreation = {
var start = 900000
var end = 1000000
var rnd = new scala.util.Random
webacc.webLoginId =start + rnd.nextInt( (end - start) + 1 )
webacc.webUserid=id
//webRepository.save(webacc);
webCollection.insert(new BasicDBObject("_id",webacc.webLoginId).append("webUserid",webacc.webUserid).append("password",webacc.password).append("login",webacc.login).append("url",webacc.url));
println("insert success");
return webacc
}
@RequestMapping(value = Array("/api/v1/users/{webUserid}/weblogins"), method = Array(RequestMethod.GET))
// def viewwebacc(@PathVariable("user_id") id: String) : ArrayList[WebaccCreation] = {
def viewwebacc(@PathVariable("webUserid") id: String) : ArrayList[WebaccCreation] = {
var returnWebUser: WebaccCreation = null
var returnWebUserList = new ArrayList[WebaccCreation]
// returnWebUser=webRepository.findBywebUserid(id)
var cursor: DBCursor = webCollection.find(new BasicDBObject("webUserid", id))
while (cursor.hasNext()) {
returnWebUser = new WebaccCreation()
var obj: DBObject = cursor.next();
returnWebUser.webLoginId = obj.get("_id").asInstanceOf[Int]
returnWebUser.webUserid = obj.get("webUserid").toString
returnWebUser.password = obj.get("password").toString
returnWebUser.login = obj.get("login").toString
returnWebUser.url = obj.get("url").toString
returnWebUserList.add(returnWebUser)
}
return returnWebUserList
}
@ResponseStatus(HttpStatus.NO_CONTENT)
@RequestMapping(value = Array("/api/v1/users/{user_id}/weblogins/{webLoginId}"), method = Array(RequestMethod.DELETE))
def deletewebacc(@Valid @RequestBody webacc: WebaccCreation,@PathVariable("webLoginId") loginId: Int) = {
//webRepository.removeBywebLoginId(loginId)
var delObj : DBObject = webCollection.findOne(new BasicDBObject("_id",loginId))
webCollection.remove(delObj)
}
@ResponseStatus(HttpStatus.CREATED)
@RequestMapping(value = Array("/api/v1/users/{user_id}/bankaccounts"), method = Array(RequestMethod.POST))
def bankacccreate(@Valid @RequestBody bankacc: BankaccCreation,@PathVariable("user_id") id: Int) : BankaccCreation = {
var start = 10550000
var end = 20550000
var rnd = new scala.util.Random
bankacc.bankId = start + rnd.nextInt( (end - start) + 1 )
bankacc.bankUserid=id
println("Bank insert success");
// var url: URL = null
// var conn: HttpURLConnection = null
var line: String = null
var result: String = null
var urlToRead : String= "http://www.routingnumbers.info/api/data.json?rn="+bankacc.routing_number
System.out.println("Url being accessed"+ urlToRead)
//url = new URL(urlToRead)
// var hpURLConn:HttpURLConnection = url.openConnection().asInstanceOf[HttpURLConnection]
// hpURLConn.setRequestMethod("GET")
var response : HttpResponse[JsonNode] =Unirest.get("http://www.routingnumbers.info/api/data.json")
.field("rn", bankacc.routing_number ).asJson();
var body : JsonNode =response.getBody();
var mapper:ObjectMapper = new ObjectMapper();
// var url1:String = "http://www.routingnumbers.info/api/data.json?rn="+bankAccount.routing_number
val url:URL = new URL(urlToRead);
var conn:HttpURLConnection = url.openConnection().asInstanceOf[HttpURLConnection]
conn.setRequestMethod("GET");
conn.setRequestProperty("Accept", "application/json");
if (conn.getResponseCode() != 200) {
//throw new RuntimeException("Failed : HTTP error code : "+ conn.getResponseCode());
bankacc.account_name = "Default Account Name"
}else{
var br:BufferedReader = new BufferedReader(new InputStreamReader(
(conn.getInputStream())));
var output:String=""
System.out.println("Output from Server .... \\n");
/*while ((output = br.readLine()) != null) {
System.out.println(output);
}*/
output = br.readLine();
System.out.println(output);
conn.disconnect();
var mapper:ObjectMapper = new ObjectMapper();
var routingDetails:routeDetails = mapper.readValue(output, classOf[routeDetails])
if(routingDetails.message .equals("OK")){
bankacc.account_name = routingDetails.customer_name
}else{
bankacc.account_name = "Default Bank Name"
}
println("customer name "+bankacc.account_name);
}
bankCollection.insert(new BasicDBObject("_id",bankacc.bankId).append("account_name",bankacc.account_name).append("account_number",bankacc.account_number).append("bankUserid",bankacc.bankUserid).append("routing_number",bankacc.routing_number));
return bankacc
}
@RequestMapping(value = Array("/api/v1/users/{user_id}/bankaccounts"), method = Array(RequestMethod.GET))
def viewbankacc(@PathVariable("user_id") id: Int) : ArrayList[BankaccCreation] = {
var returnBankUser: BankaccCreation =null
var returnBankUserList = new ArrayList[BankaccCreation]
// returnWebUser=webRepository.findBywebUserid(id)
var cursor: DBCursor = bankCollection.find(new BasicDBObject("bankUserid", id))
//System.out.println("Inside cursor"+id)
while (cursor.hasNext()) {
returnBankUser = new BankaccCreation()
var obj: DBObject = cursor.next();
// System.out.println("Inside while lool"+obj.get("_id").asInstanceOf[Int]+"userid "+obj.get("bankUserid").asInstanceOf[Int])
returnBankUser.bankId= obj.get("_id").asInstanceOf[Int]
returnBankUser.bankUserid = obj.get("bankUserid").asInstanceOf[Int]
returnBankUser.account_name = obj.get("account_name").toString
returnBankUser.account_number = obj.get("account_number").toString
returnBankUser.routing_number = obj.get("routing_number").toString
//returnBankUser.customer_name = obj.get("customer_name").toString
returnBankUserList.add(returnBankUser)
}
return returnBankUserList
}
@ResponseStatus(HttpStatus.NO_CONTENT)
@RequestMapping(value = Array("/api/v1/users/{user_id}/bankaccounts/{ba_id}"), method = Array(RequestMethod.DELETE))
def deletebankacc(@Valid @RequestBody bankacc: BankaccCreation,@PathVariable("ba_id") bank_id: Int) = {
//bankAccrepository.removeByBankId(bank_id)
var delObj : DBObject = bankCollection.findOne(new BasicDBObject("_id",bank_id))
bankCollection.remove(delObj)
}
/* @Bean
def etagFilter(): Filter = {
val shallowEtagHeaderFilter = new ShallowEtagHeaderFilter()
return shallowEtagHeaderFilter
}
@ExceptionHandler
@ResponseStatus(HttpStatus.BAD_REQUEST)
@ResponseBody
def handleException(ex:MethodArgumentNotValidException):ErrorMessage ={
var fldErrorsList:List[FieldError] =ex.getBindingResult().getFieldError()
//ex.getBindingResult().getFieldErrors()S
var objErrorsList:List[ObjectError] = ex.getBindingResult().getGlobalErrors()
var errorList:List[String] = new ArrayList[String]
var errorString:String = ""
var tempitr = fldErrorsList.iterator()
while(tempitr.hasNext()){
var fldError:FieldError = tempitr.next()
errorString = fldError.getField() + ", "+fldError.getDefaultMessage()
errorList.add(errorString)
}
var tempitr2 = objErrorsList.iterator()
while(tempitr2.hasNext()){
var objError:ObjectError = tempitr2.next()
errorString = objError.getObjectName() + ", "+objError.getDefaultMessage()
errorList.add(errorString)
}
return new ErrorMessage(errorList);
}*/
}
|
nikhilpanicker/cmpe273-assignment2
|
src/main/scala/hello/HelloController.scala
|
Scala
|
mit
| 15,750
|
package com.tpalanga.testlib.test.client.impl
import akka.actor.ActorSystem
import akka.http.scaladsl.marshalling.Marshal
import akka.http.scaladsl.model.{ContentTypes, RequestEntity}
import akka.stream.{ActorMaterializer, ActorMaterializerSettings, Materializer}
import com.tpalanga.testlib.test.client.{NoEntity, Response, RestServiceClient}
import com.tpalanga.testlib.test.config.RestServiceConfig
import com.typesafe.scalalogging.LazyLogging
import scala.concurrent.{ExecutionContext, Future}
object NewsletterServiceRestClient {
type NewsletterServiceRestClientFactory = (RestServiceConfig, ActorSystem) => NewsletterServiceRestClient
def defaultFactory: NewsletterServiceRestClientFactory =
(config, system) => new NewsletterServiceRestClient(config)(system)
}
class NewsletterServiceRestClient(val restServiceConfig: RestServiceConfig)
(implicit val system: ActorSystem)
extends RestServiceClient with LazyLogging {
import NoEntity.DataFormats._
import SubscriberJsonProtocol._
import akka.http.scaladsl.marshallers.sprayjson.SprayJsonSupport._
logger.debug(s"NewsletterServiceRestServiceClient: $restServiceConfig")
private implicit val materializer: Materializer = ActorMaterializer(ActorMaterializerSettings(system))
def subscriberRetrieve(id: String)(implicit ec: ExecutionContext): Future[Response[Subscriber]] =
client.get(s"/data/subscribers/$id").map { httpResponse =>
Response[Subscriber](httpResponse)
}
def subscriberCreate(subscriber: Subscriber)(implicit ec: ExecutionContext): Future[Response[Subscriber]] =
for {
entity <- Marshal(subscriber).to[RequestEntity]
httpResponse <- client.post(s"/data/subscribers", Nil, entity.withContentType(ContentTypes.`application/json`))
} yield Response[Subscriber](httpResponse)
def subscriberUpdate(user: Subscriber)(implicit ec: ExecutionContext): Future[Response[Subscriber]] =
for {
entity <- Marshal(user).to[RequestEntity]
httpResponse <- client.put(s"/data/subscribers/${user.id}", Nil, entity.withContentType(ContentTypes.`application/json`))
} yield Response[Subscriber](httpResponse)
def subscriberDelete(id: String)(implicit ec: ExecutionContext): Future[Response[NoEntity]] =
client.delete(s"/data/subscribers/$id").map { httpResponse =>
Response[NoEntity](httpResponse)
}
def subscriberList()(implicit ec: ExecutionContext): Future[Response[Subscribers]] =
client.get(s"/data/subscribers").map { httpResponse =>
Response[Subscribers](httpResponse)
}
}
|
tpalanga/akka-http-microservice
|
testLib/src/main/scala/com/tpalanga/testlib/test/client/impl/NewsletterServiceRestClient.scala
|
Scala
|
unlicense
| 2,584
|
package org.deca.compiler.signature
import scala.collection.mutable.Lattice
import scala.collection.mutable.Stack
import scala.collection.immutable.Set
import scala.math.PartialOrdering
sealed abstract class InferenceConstraint {
def substitute(vx: SignatureVariable,newY: MonoSignature): Unit
def alpha: MonoSignature
def beta: MonoSignature
def polymorphic: Boolean = (alpha,beta) match {
case (vx: SignatureVariable,vy: SignatureVariable) => !vx.isInstanceOf[BoundsVariable[_]] && !vy.isInstanceOf[BoundsVariable[_]]
case _ => false
}
}
case class SubsumptionConstraint(var x: MonoSignature,var y: MonoSignature) extends InferenceConstraint {
override def alpha = x
override def beta = y
override def substitute(vx: SignatureVariable,newY: MonoSignature): Unit = {
x = x.replace(vx,newY)
y = y.replace(vx,newY)
}
override def toString = x.toString + " <: " + y.toString
}
case class PhysicalSubtypingConstraint(var x: MonoType,var y: MonoType) extends InferenceConstraint {
override def alpha = x
override def beta = y
override def substitute(vx: SignatureVariable,newY: MonoSignature): Unit = {
x = x.replace(vx,newY).asInstanceOf[MonoType]
y = y.replace(vx,newY).asInstanceOf[MonoType]
}
override def toString = x.toString + " <@< " + y.toString
}
case class EqualityConstraint(var x: MonoSignature,var y: MonoSignature) extends InferenceConstraint {
override def alpha = x
override def beta = y
override def substitute(vx: SignatureVariable,newY: MonoSignature): Unit = {
x = x.replace(vx,newY)
y = y.replace(vx,newY)
}
override def toString = x.toString + " =:= " + y.toString
}
trait InferenceOrdering[T <: MonoSignature] {
protected val lattice: Lattice[T]
def lt(x: T,y: T): Option[Set[InferenceConstraint]]
def equiv(x: T,y: T): Option[Set[InferenceConstraint]]
def lteq(x: T,y: T): Option[Set[InferenceConstraint]] = lt(x,y) orElse equiv(x,y)
def join(x: T,y: T): (T,Set[InferenceConstraint]) = {
val tau = lattice.join(x,y)
(lteq(x,tau),lteq(y,tau)) match {
case (Some(cx),Some(cy)) => (tau,cx ++ cy)
case (None,None) => throw new Exception("Unsatisfiable signature constraints: " + SubsumptionConstraint(x,tau).toString + " and " + SubsumptionConstraint(y,tau).toString)
case (None,_) => throw new Exception("Unsatisfiable signature constraint: " + SubsumptionConstraint(x,tau).toString)
case (_,None) => throw new Exception("Unsatisfiable signature constraint: " + SubsumptionConstraint(y,tau).toString)
}
}
def meet(x: T,y: T): (T,Set[InferenceConstraint]) = {
val tau = lattice.meet(x,y)
(lteq(tau,x),lteq(tau,y)) match {
case (Some(cx),Some(cy)) => (tau,cx ++ cy)
case (None,None) => throw new Exception("Unsatisfiable signature constraints: " + SubsumptionConstraint(tau,x).toString + " and " + SubsumptionConstraint(tau,y).toString)
case (None,_) => throw new Exception("Unsatisfiable signature constraint: " + SubsumptionConstraint(tau,x).toString)
case (_,None) => throw new Exception("Unsatisfiable signature constraint: " + SubsumptionConstraint(tau,y).toString)
}
}
}
object SignatureRelation extends InferenceOrdering[MonoSignature] {
override protected val lattice = null
override def lt(x: MonoSignature,y: MonoSignature): Option[Set[InferenceConstraint]] = (x,y) match {
case (tx: MonoType,ty: MonoType) => TypeRelation.lt(tx,ty)
case (rx: MonoRegion,ry: MonoRegion) => RegionRelation.lt(rx,ry)
case (ex: MonoEffect,ey: MonoEffect) => EffectRelation.lt(ex,ey)
case (mx: MonoMutability,my: MonoMutability) => MutabilityRelation.lt(mx,my)
case _ => throw new Exception("Mismatched signatures: " + x.toString + " <: " + y.toString)
}
override def equiv(x: MonoSignature,y: MonoSignature): Option[Set[InferenceConstraint]] = (x,y) match {
case (tx: MonoType,ty: MonoType) => TypeRelation.equiv(tx,ty)
case (rx: MonoRegion,ry: MonoRegion) => RegionRelation.equiv(rx,ry)
case (ex: MonoEffect,ey: MonoEffect) => EffectRelation.equiv(ex,ey)
case (mx: MonoMutability,my: MonoMutability) => MutabilityRelation.equiv(mx,my)
case _ => throw new Exception("Mismatched signatures: " + x.toString + " =:= " + y.toString)
}
override def join(x: MonoSignature,y: MonoSignature): (MonoSignature,Set[InferenceConstraint]) = (x,y) match {
case (tx: MonoType,ty: MonoType) => TypeRelation.join(tx,ty)
case (rx: MonoRegion,ry: MonoRegion) => RegionRelation.join(rx,ry)
case (ex: MonoEffect,ey: MonoEffect) => EffectRelation.join(ex,ey)
case (mx: MonoMutability,my: MonoMutability) => MutabilityRelation.join(mx,my)
case _ => throw new Exception("Mismatched signatures: " + x.toString + " join " + y.toString)
}
override def meet(x: MonoSignature,y: MonoSignature): (MonoSignature,Set[InferenceConstraint]) = (x,y) match {
case (tx: MonoType,ty: MonoType) => TypeRelation.meet(tx,ty)
case (rx: MonoRegion,ry: MonoRegion) => RegionRelation.meet(rx,ry)
case (ex: MonoEffect,ey: MonoEffect) => EffectRelation.meet(ex,ey)
case (mx: MonoMutability,my: MonoMutability) => MutabilityRelation.meet(mx,my)
case _ => throw new Exception("Mismatched signatures: " + x.toString + " join " + y.toString)
}
}
object SignatureOrdering extends PartialOrdering[MonoSignature] {
override def lt(x: MonoSignature,y: MonoSignature): Boolean = SignatureRelation.lt(x,y) match {
case Some(constraints) => constraints.isEmpty
case None => false
}
override def equiv(x: MonoSignature,y: MonoSignature): Boolean = SignatureRelation.equiv(x,y) match {
case Some(constraints) => constraints.isEmpty
case None => false
}
override def gt(x: MonoSignature,y: MonoSignature): Boolean = lt(y,x)
override def lteq(x: MonoSignature,y: MonoSignature): Boolean = equiv(x,y) || lt(x,y)
override def gteq(x: MonoSignature,y: MonoSignature): Boolean = equiv(x,y) || gt(x,y)
override def tryCompare(x: MonoSignature,y: MonoSignature): Option[Int] = {
if(gt(x,y))
Some(1)
else if(lt(x,y))
Some(-1)
else if(equiv(x,y))
Some(0)
else
None
}
}
|
Playermet/codegoogle.decac
|
src/org/deca/compiler/signature/SignatureOrdering.scala
|
Scala
|
gpl-3.0
| 6,142
|
import scala.math
object Prob46 {
val twiceSquare = Stream.from(1).map { i => 2*i*i }
val primes = Prime.set(10000000)
def isGoldbach(n: Int): Boolean =
twiceSquare.takeWhile(_ < n).exists{ i => primes.contains(n - i) }
def main(args: Array[String]) {
assert(Array(9, 15, 21, 25, 27, 33).forall(isGoldbach))
val result = Stream.range(9, primes.max, 2)
.filterNot(primes.contains)
.filterNot(isGoldbach).head
println(result)
}
}
|
ponkotuy/ProjectEular
|
src/main/scala/Prob46.scala
|
Scala
|
mit
| 470
|
/**
* ====
* This file is part of SensApp [ http://sensapp.modelbased.net ]
*
* Copyright (C) 2011- SINTEF ICT
* Contact: SINTEF ICT <nicolas.ferry@sintef.no>
*
* Module: net.modelbased.sensapp
*
* SensApp is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation, either version 3 of
* the License, or (at your option) any later version.
*
* SensApp is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General
* Public License along with SensApp. If not, see
* <http://www.gnu.org/licenses/>.
* ====
*
* This file is part of SensApp [ http://sensapp.modelbased.net ]
*
* Copyright (C) 2012- SINTEF ICT
* Contact: SINTEF ICT <nicolas.ferry@sintef.no>
*
* Module: net.modelbased.sensapp.backyard.gatling
*
* SensApp is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation, either version 3 of
* the License, or (at your option) any later version.
*
* SensApp is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General
* Public License along with SensApp. If not, see
* <http://www.gnu.org/licenses/>.
*/
package net.modelbased.sensapp.backyard.gatling
//import io.gatling.core.session.Session
//import io.gatling.core.scenario.{scenario, Simulation}
import io.gatling.core.Predef._
import io.gatling.http.Predef._
import bootstrap._
import io.gatling.core.scenario.RampInjection
import io.gatling.core.structure.ProfiledScenarioBuilder
/**
* Created with IntelliJ IDEA.
* User: Jonathan
* Date: 05/08/13
* Time: 12:48
*/
class SensorPushSimulation extends Simulation {
val numberOfUsers: Int = 5
val timeframe: Int = 10
val numberOfData: Int = 5
val maxDelayBetweenPush: Int = 400
def apply = {
//ramp(numberOfUsers);
//rampRate(timeframe);
setUp(sensorPush.inject(RampInjection(numberOfUsers, timeframe)))
/*List(sensorPush)*/
}
val headers = Map("Content-Type" -> "application/json", "Accept" -> "text/plain,application/json")
val sensorPush =
scenario("Sensor pushing Data")
.exec { (session: Session) => // Preparing the session
session.set("sensorId", RandomSensor())
.set("stamp", (System.currentTimeMillis / 1000))
}
.exec{ // 0. Is SensApp alive?
http("Is SensApp alive?")
.get("http://"+Target.serverName+"/databases/raw/sensors")
.check(status is 200)
}.pause(100, 200/*, MILLISECONDS*/)
.exec { // 1. Creating the database
http("Creating the database")
.post("http://"+Target.serverName+"/databases/raw/sensors")
.headers(headers)
.body(StringBody("{\\"sensor\\": \\"${sensorId}\\", \\"baseTime\\": ${stamp}, \\"schema\\": \\"Numerical\\"}"))
}.pause(100, 200/*, MILLISECONDS*/)
.repeat(numberOfData){ // Pushing data
exec { session: Session =>
session.set("data", RandomData(session("sensorId").as[String],
session("stamp").as[Long]))
}.exec {
http("Pushing random data")
.put("http://"+Target.serverName+"/databases/raw/data/${sensorId}")
.headers(headers).body(StringBody("${data}"))
}.exec { (session: Session) =>
session.set("stamp", session("stamp").as[Long] + 1)
}.pause(100, maxDelayBetweenPush/*, MILLISECONDS*/)
}//.times(numberOfData)
.exec { // 3. Eventually deleting the database
http("Deleting the database")
.delete("http://"+Target.serverName+"/databases/raw/sensors/${sensorId}")
}
setUp(sensorPush.inject(RampInjection(numberOfUsers, timeframe)))
}
|
SINTEF-9012/sensapp
|
net.modelbased.sensapp.backyard.gatling/src/main/scala/net/modelbased/sensapp/backyard/gatling/SensorPushSimulation.scala
|
Scala
|
lgpl-3.0
| 4,221
|
package alexsmirnov.pbconsole.gcode
import spray.json._
import spray.json.DefaultJsonProtocol._
import scala.util.Try
import spray.json.ParserInput.apply
object G2Response {
case class G2CommandResponse(r: JsValue, f: List[Int], rawLine: String) extends CommandResponse with StatusResponse {
def status = f(1)
def linesAvailable = f(2)
def isError = status != 0
def values = Nil
}
implicit val g2CRFormat = jsonFormat3(G2CommandResponse)
case class G2StatusReport(sr: Map[String, Double], rawLine: String) extends StatusResponse {
def get(field: String): Option[Double] = sr.get(field)
def values = Nil
}
implicit val g2SFormat = jsonFormat2(G2StatusReport)
case class ExceptionInfo(fb: BigDecimal, st: Int, msg: String)
implicit val eiFormat = jsonFormat3(ExceptionInfo)
case class ExceptionReport(er: ExceptionInfo, rawLine: String) extends Response
implicit val erFormat = jsonFormat2(ExceptionReport)
def apply(line: String): Response = {
val jsonOpt = Try(JsonParser(line)).toOption
jsonOpt.flatMap { json =>
val jo = json.asJsObject
extractResponse(jo.copy( jo.fields + ("rawLine"->JsString(line)) ))
}.getOrElse(UnknownResponse(line))
}
def extractResponse(json: JsValue): Option[Response] = {
val cr = safeReader[G2CommandResponse].read(json)
val sr = cr.left.flatMap{ _ => safeReader[G2StatusReport].read(json)}
val er = sr.left.flatMap{ _ => safeReader[ExceptionReport].read(json)}
er.right.toOption
}
}
|
alexsmirnov/printrbot-g2-console
|
src/main/scala/alexsmirnov/pbconsole/gcode/G2Response.scala
|
Scala
|
bsd-3-clause
| 1,514
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.carbondata.streaming.parser
import java.text.SimpleDateFormat
import java.util
import org.apache.hadoop.conf.Configuration
import org.apache.spark.sql.Row
import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.encoders.{ExpressionEncoder, RowEncoder}
import org.apache.spark.sql.types.StructType
import org.apache.carbondata.core.constants.CarbonCommonConstants
import org.apache.carbondata.processing.loading.ComplexDelimitersEnum
import org.apache.carbondata.processing.loading.constants.DataLoadProcessorConstants
import org.apache.carbondata.util.SparkStreamingUtil
/**
* SparkSQL Row Stream Parser.
*/
class RowStreamParserImp extends CarbonStreamParser {
var configuration: Configuration = _
var isVarcharTypeMapping: Array[Boolean] = _
var structType: StructType = _
var encoder: ExpressionEncoder[Row] = _
var timeStampFormat: SimpleDateFormat = _
var dateFormat: SimpleDateFormat = _
val complexDelimiters: util.ArrayList[String] = new util.ArrayList[String]()
var serializationNullFormat: String = _
override def initialize(configuration: Configuration,
structType: StructType, isVarcharTypeMapping: Array[Boolean]): Unit = {
this.configuration = configuration
this.structType = structType
this.encoder = RowEncoder.apply(this.structType).resolveAndBind()
this.isVarcharTypeMapping = isVarcharTypeMapping
this.timeStampFormat = new SimpleDateFormat(
this.configuration.get(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT))
this.dateFormat = new SimpleDateFormat(
this.configuration.get(CarbonCommonConstants.CARBON_DATE_FORMAT))
this.complexDelimiters.add(this.configuration.get("carbon_complex_delimiter_level_1"))
this.complexDelimiters.add(this.configuration.get("carbon_complex_delimiter_level_2"))
this.complexDelimiters.add(this.configuration.get("carbon_complex_delimiter_level_3"))
this.complexDelimiters.add(ComplexDelimitersEnum.COMPLEX_DELIMITERS_LEVEL_4.value())
this.serializationNullFormat =
this.configuration.get(DataLoadProcessorConstants.SERIALIZATION_NULL_FORMAT)
}
override def parserRow(value: InternalRow): Array[Object] = {
SparkStreamingUtil.convertInternalRowToRow(encoder)(value).toSeq.zipWithIndex.map {
case (x, i) =>
FieldConverter.objectToString(
x, serializationNullFormat, complexDelimiters,
timeStampFormat, dateFormat,
isVarcharType = i < this.isVarcharTypeMapping.length && this.isVarcharTypeMapping(i),
binaryCodec = null)
} }.toArray
override def close(): Unit = {
}
}
|
zzcclp/carbondata
|
streaming/src/main/scala/org/apache/carbondata/streaming/parser/RowStreamParserImp.scala
|
Scala
|
apache-2.0
| 3,425
|
/*
* Copyright (c) 2015-2022 "Neo Technology,"
* Network Engine for Objects in Lund AB [http://neotechnology.com]
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Attribution Notice under the terms of the Apache License 2.0
*
* This work was created by the collective efforts of the openCypher community.
* Without limiting the terms of Section 6, any Derivative Work that is not
* approved by the public consensus process of the openCypher Implementers Group
* should not be described as “Cypher” (and Cypher® is a registered trademark of
* Neo4j Inc.) or as "openCypher". Extensions by implementers or prototypes or
* proposals for change that have been documented or implemented should only be
* described as "implementation extensions to Cypher" or as "proposed changes to
* Cypher that are not yet approved by the openCypher community".
*/
package org.opencypher.tools.tck.inspection.browser.web
import org.opencypher.tools.tck.api.CypherValueRecords
import org.opencypher.tools.tck.api.Dummy
import org.opencypher.tools.tck.api.ExecQuery
import org.opencypher.tools.tck.api.ControlQuery
import org.opencypher.tools.tck.api.CsvFile
import org.opencypher.tools.tck.api.Execute
import org.opencypher.tools.tck.api.ExpectError
import org.opencypher.tools.tck.api.ExpectResult
import org.opencypher.tools.tck.api.InitQuery
import org.opencypher.tools.tck.api.Measure
import org.opencypher.tools.tck.api.Parameters
import org.opencypher.tools.tck.api.RegisterProcedure
import org.opencypher.tools.tck.api.Scenario
import org.opencypher.tools.tck.api.SideEffectQuery
import org.opencypher.tools.tck.api.SideEffects
import org.opencypher.tools.tck.api.Step
import org.opencypher.tools.tck.constants.TCKSideEffects.ADDED_LABELS
import org.opencypher.tools.tck.constants.TCKSideEffects.ADDED_NODES
import org.opencypher.tools.tck.constants.TCKSideEffects.ADDED_PROPERTIES
import org.opencypher.tools.tck.constants.TCKSideEffects.ADDED_RELATIONSHIPS
import org.opencypher.tools.tck.constants.TCKSideEffects.DELETED_LABELS
import org.opencypher.tools.tck.constants.TCKSideEffects.DELETED_NODES
import org.opencypher.tools.tck.constants.TCKSideEffects.DELETED_PROPERTIES
import org.opencypher.tools.tck.constants.TCKSideEffects.DELETED_RELATIONSHIPS
import org.opencypher.tools.tck.api.groups.Group
import scalatags.Text
import scalatags.Text.all._
import scalatags.Text.tags2
import scalatags.generic
import scalatags.text.Builder
import scala.language.implicitConversions
trait PageBasic {
class FragOption[T](option: Option[T]) {
def mapToFrag(f: T => Frag): Frag = option match {
case None => frag()
case Some(value) => f(value)
}
}
implicit def optionToFragOption[T](option: Option[T]): FragOption[T] = new FragOption[T](option)
def categorySeparator = "⟩"
def inlineSpacer(): Text.TypedTag[String] = span(width:=1.em, display.`inline-block`)(" ")
def pageTitle(title: Frag*): Text.TypedTag[String] = h1(CSS.pageTitle)(title)
def sectionTitle(title: Frag*): Text.TypedTag[String] = h2(CSS.sectionTitle)(title)
def subSectionTitle(title: Frag*): Text.TypedTag[String] = h3(CSS.subSectionTitle)(title)
def error(msg: Frag*): Text.all.doctype = page(div(color.red)(msg))
def page(content: Frag*): Text.all.doctype = doctype("html")(
html(
head(
meta(charset:="utf-8"),
tags2.style(CSS.styleSheetText)
),
body(content)
)
)
def listScenariosPage(scenarios: Group => Option[Set[Scenario]], group: Group, kind: Option[Frag], showSingleScenarioURL: Scenario => String, openScenarioInEditorURL: Scenario => String ): Text.all.doctype = {
val scenarioSeq = scenarios(group).map(_.toSeq.sortBy(s => (s.categories.mkString("/"), s.featureName, s.number, s.name, s.exampleIndex))).getOrElse(Seq.empty[Scenario])
page(
pageTitle(scenarioSeq.size, kind.mapToFrag(k => frag(" ", k)), " scenario(s) in group ", i(group.toString)),
ul(
for(s <- scenarioSeq) yield
li(
scenarioLocationFrag(s),
inlineSpacer(),
s.number.map(n => frag(s"[$n]", inlineSpacer())).getOrElse(frag()),
link(showSingleScenarioURL(s), scenarioTitle(s)),
inlineSpacer(),
blankLink(openScenarioInEditorURL(s), "[code]"),
)
)
)
}
def scenarioTitle(scenario: Scenario): String =
scenario.name + scenario.exampleIndex.map(i => " #" + i).getOrElse("") + scenario.exampleName.map(n => " (" + n + ")").getOrElse("")
def scenarioLocationFrag(scenario: Scenario,
collection: Option[String] = None,
showUrl: Option[String] = None,
sourceUrl: Option[String] = None): generic.Frag[Builder, String] =
frag(
collection.mapToFrag(col =>
span(CSS.tckCollection)(col)
),
scenario.categories.flatMap(c =>
Seq(span(CSS.categorySepInLocationLine)(categorySeparator), span(CSS.categoryNameInLocationLine)(c))
),
span(CSS.featureIntroInLocationLine)(categorySeparator, categorySeparator), span(CSS.featureNameInLocationLine)(scenario.featureName),
showUrl.mapToFrag(url =>
span(CSS.scenarioLinkInLocationLine)(link(url, "[show]"))
),
sourceUrl.mapToFrag(url =>
span(CSS.scenarioLinkInLocationLine)(blankLink(url,"[code]"))
),
)
trait Anchor
def anchor(anchor: Anchor): Text.TypedTag[String] = a(name:=anchor.toString)
def link2LocalAnchor(anchor: Anchor, linkContent: Frag*): Text.TypedTag[String] =
a(href:="#"+anchor.toString)(linkContent)
def link(url: String, linkContent: Frag*): Text.TypedTag[String] =
a(href:=url)(linkContent)
def blankLink(url: String, linkContent: Frag*): Text.TypedTag[String] =
a(href:=url, target:="_blank")(linkContent)
def stepFrag(step: Step): Text.TypedTag[String] = {
def stepFrag(name: String, content: Frag*): Text.TypedTag[String] =
div(CSS.step)(
div(if (content.isEmpty) CSS.emptyStepName else CSS.stepName)(name),
if(content.nonEmpty) div(CSS.stepContent)(content) else frag()
)
step match {
case Dummy(_) =>
stepFrag(
"Setup an empty graph"
)
case Parameters(values, _) =>
stepFrag(
"Parameters",
div(table()(
for((k,v) <- values.toSeq) yield
tr(
td()(code(k)),
td()("="),
td()(code(v.toString))
)
))
)
case CsvFile(urlParameter, values, _) =>
stepFrag(
"Create CSV file",
div("containing"),
div(cypherValueRecordsFrag(values)),
div("file URL available in parameter ", code("$", urlParameter))
)
case RegisterProcedure(signature, values, _) =>
stepFrag(
"Registered procedure",
div(code(signature)),
div(cypherValueRecordsFrag(values))
)
case Measure(source) =>
stepFrag(
"Measure side effects"
)
case Execute(query, qt, source) =>
stepFrag(
qt match {
case InitQuery => "Initialize with"
case ExecQuery => "Execute query"
case ControlQuery => "Control query"
case SideEffectQuery => "Execute update"
},
div(pre(fontFamily:="Monospace")(query))
)
case ExpectResult(expectedResult, _, sorted) =>
stepFrag(
"Expect result, " + (if (sorted) "in order" else "in any order"),
div(cypherValueRecordsFrag(expectedResult))
)
case SideEffects(expected, _) =>
val sideEffectOrder = Seq(ADDED_NODES, ADDED_RELATIONSHIPS, ADDED_LABELS, ADDED_PROPERTIES,
DELETED_NODES, DELETED_RELATIONSHIPS, DELETED_LABELS, DELETED_PROPERTIES)
stepFrag(
"Check side effects",
div(table()(
for(eff <- sideEffectOrder) yield
tr(
td()(eff),
td()(expected.v.getOrElse(eff, 0).toString)
)
))
)
case ExpectError(errorType, phase, detail, _) =>
stepFrag("Expect error",
div(table()(
tr(td()(b("Type:")), td()(errorType)),
tr(td()(b("Phase:")), td()(phase)),
tr(td()(b("Detail:")), td()(detail)),
))
)
}
}
def cypherValueRecordsFrag(values: CypherValueRecords): Text.TypedTag[String] =
table()(
tr(
for(col <- values.header) yield th(col)
),
for(row <- values.rows) yield
tr(
for(col <- values.header) yield td(code(row(col).toString))
)
)
}
|
opencypher/openCypher
|
tools/tck-inspection/src/main/scala_2.13/org/opencypher/tools/tck/inspection/browser/web/PageBasic.scala
|
Scala
|
apache-2.0
| 9,243
|
/*
* Copyright 2016 Ben Ripkens
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package de.bripkens.ha.reporting
import akka.actor.{ Props, ActorLogging, Actor }
import com.fasterxml.jackson.databind.ObjectMapper
import de.bripkens.ha.ComponentStatus.ComponentStatus
import de.bripkens.ha.{ ComponentStatus, ComponentStatusUpdate, ConsoleReporterConfig }
import scala.collection.mutable
object ConsoleReporter {
def props(mapper: ObjectMapper, config: ConsoleReporterConfig) = Props(new ConsoleReporter(mapper, config))
}
class ConsoleReporter(val mapper: ObjectMapper, val config: ConsoleReporterConfig) extends Actor
with ActorLogging {
val componentStatus = new mutable.HashMap[String, ComponentStatus]()
override def receive: Receive = {
case ComponentStatusUpdate(component, ComponentStatus.OKAY, _) => {
if (!componentStatus.get(component.id).contains(ComponentStatus.OKAY)) {
log.info(s"${component.name} is okay.")
componentStatus.put(component.id, ComponentStatus.OKAY)
}
}
case ComponentStatusUpdate(component, ComponentStatus.UNHEALTHY, _) => {
if (!componentStatus.get(component.id).contains(ComponentStatus.UNHEALTHY)) {
log.info(s"${component.name} has some issues.")
componentStatus.put(component.id, ComponentStatus.UNHEALTHY)
}
}
case ComponentStatusUpdate(component, ComponentStatus.NOT_REACHABLE, _) => {
if (!componentStatus.get(component.id).contains(ComponentStatus.NOT_REACHABLE)) {
log.info(s"${component.name} cannot be reached.")
componentStatus.put(component.id, ComponentStatus.NOT_REACHABLE)
}
}
}
}
|
bripkens/health-check-adapter
|
src/main/scala/de/bripkens/ha/reporting/ConsoleReporter.scala
|
Scala
|
apache-2.0
| 2,174
|
/* Copyright 2012-2019 Micronautics Research Corporation.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License. */
package com.micronautics
import play.api.libs.json.{Format, JsError, JsResult, JsString, JsSuccess, JsValue}
import scala.reflect.ClassTag
package object playUtils {
implicit object JsonFormatImplicits extends JsonFormats
/** Create Play JSON Reads/Writes (Formatter) for any Java Enum.
* Use like this (the unit tests contain similar code):
* {{{
* implicit val myEnumFormat = javaEnumFormat[MyEnum]
* Json.toJson(DiscountEnum.FullPrice) == "FullPrice"
* Json.fromJson[DiscountEnum](JsString("FullPrice")) == DiscountEnum.FullPrice
* }}}
* @see [http://stackoverflow.com/a/34045056/553865] */
implicit def javaEnumFormat[E <: Enum[E] : ClassTag]: Format[E] = new Format[E] {
override def reads(json: JsValue): JsResult[E] = json.validate[String] match {
case JsSuccess(value, _) => try {
val clazz = implicitly[ClassTag[E]].runtimeClass.asInstanceOf[Class[E]]
JsSuccess(Enum.valueOf(clazz, value))
} catch {
case _: IllegalArgumentException => JsError("enumeration.unknown.value")
}
case JsError(_) => JsError("enumeration.expected.string")
}
override def writes(o: E): JsValue = JsString(o.toString)
}
}
|
mslinn/scalacourses-play-utils
|
src/main/scala/com/micronautics/playUtils/package.scala
|
Scala
|
mit
| 1,813
|
object Switch
{
// def withResource[A](f : Resource => A) : A = {
// val r = getResource() // Replace with the code to acquire the resource
// try {
// f(r)
// } finally {
// r.dispose()
// }
// }
abstract class ReturnBox[+T] { def value: T }
case class Return[+T](val value: T) extends ReturnBox[T]
case object Void extends ReturnBox[Nothing] { def value = throw new Exception("Oops") }
case object Break extends ReturnBox[Nothing] { def value = throw new Exception("Oops ") }
case object Continue extends ReturnBox[Nothing] { def value = throw new Exception("Oops") }
def apply(x: A)(cases: Case[A,B]*) = {
val default = cases.find(_.isDefault)
cases match {
case Nil => Break
case c :: rest if x != c.value => apply(x)(rest)
case c :: rest => c(x) match {
case ret: Return[B] => ret
case Break => Break
case _ => apply(x)(rest) // fallthrough
}
}
}
case class Case[A, B](val value: A, f: ReturnBox[B], next: Option[Case[A, B]]) {
val isDefault = false
implicit def unitToContinue(x: Unit): ReturnBox[B] = Continue
def apply(x: A): ReturnBox[B] = f(x)
}
case class DefaultCase[A, B](f: ReturnBox[B], next: Option[Case[A, B]]) extends Case(x: Any => true, f, next) {
override val isDefault = true
}
}
class Test
{
import Switch._
def go(x: String): String = {
Switch(
Case { x: String => println(x) },
Case { x: String => println(x + x) }
)
Case { }
}
}
|
mbana/scalify
|
doc/Switch.scala
|
Scala
|
isc
| 1,476
|
package demo
package components
package materialui
import chandu0101.scalajs.react.components.fascades.LatLng
import chandu0101.scalajs.react.components.materialui.{MuiDatePicker, MuiDatePickerMode}
import japgolly.scalajs.react.ReactComponentB
import japgolly.scalajs.react.vdom.prefix_<^._
object MuiDatePickerDemo {
val code =
"""
| MuiDatePicker(hintText = "Protrait Dialog"),
| MuiDatePicker(hintText = "Landscape Dialog",mode = MuiDatePickerMode.LANDSCAPE)
|
""".stripMargin
val component = ReactComponentB[Unit]("MuiDatePickerDemo")
.render(P => {
<.div(
CodeExample(code,"MuiDatePicker")(
MuiDatePicker(hintText = "Protrait Dialog")(),
MuiDatePicker(hintText = "Landscape Dialog",mode = MuiDatePickerMode.LANDSCAPE)()
)
)
}).buildU
lazy val latlng = LatLng(16.3008,80.4428)
def apply() = component()
}
|
tpdi/scalajs-react-components
|
demo/src/main/scala/demo/components/materialui/MuiDatePickerDemo.scala
|
Scala
|
apache-2.0
| 889
|
package springboard.fpscala.ds
import scala.annotation.tailrec
sealed trait List[+T]
case object Nil extends List[Nothing]
case class Cons[+A](h: A, t: List[A]) extends List[A]
object List
{
def sum(ints: List[Int]): Int = ints match {
case Nil => 0
case Cons(x,xs) => x + sum(xs)
}
def product(ds: List[Double]): Double = ds match {
case Nil => 1.0
case Cons(0.0, _) => 0.0
case Cons(x,xs) => x * product(xs)
}
def apply[A](as: A*): List[A] = {
if (as.isEmpty) Nil
else Cons(as.head, apply(as.tail: _*))
}
def tail[A](ls: List[A]): List[A] = ls match {
case Nil => Nil
case Cons(_, Nil) => Nil
case Cons(_, xs) => xs
}
def setHead[A](h: A, ls: List[A]): List[A] = ls match {
case Nil => Nil
case Cons(_, xs) => Cons(h, xs)
}
def drop[A](c: Int, ls: List[A]): List[A] = {
@tailrec
def _drop(_c: Int, _ls: List[A]): List[A] = _c match {
case _ if c == _c => _ls
case _ => _drop(_c + 1, tail(_ls))
}
_drop(0, ls)
}
def dropWhile[A](ls: List[A], p: A => Boolean): List[A] = {
@tailrec
def _drop(_ls: List[A]): List[A] = _ls match {
case Nil => Nil
case Cons(h, t) if p(h) => _drop(t)
case _ => _ls
}
_drop(ls)
}
def init[A](ls: List[A]): List[A] = {
@tailrec
def _init(_ls: List[A]): List[A] = _ls match {
case Nil => Nil
case Cons(h, Cons(t, Nil)) if
}
_init
}
def main(args: Array[String]): Unit = {
val one = Cons(5, Nil)
val two = Cons(10, one)
// println(tail(empty))
// println(tail(one))
// println(tail(two))
//
// println(setHead(20, Nil))
// println(setHead(20, one))
// println(setHead(20, two))
println(drop(1, two))
println(drop(2, two))
println(dropWhile(two, (i: Int) => i > 5))
}
}
|
defpearlpilot/webcrawler
|
app/springboard/fpscala/ds/List.scala
|
Scala
|
gpl-3.0
| 1,827
|
package com.arcusys.valamis.lesson.scorm.model.manifest
/**Rollup rule action*/
object RollupAction extends Enumeration {
type RollupAction = Value
/** Rollup satisfied status */
val Satisfied = Value("satisfied")
/** Rollup not satisfied status */
val NotSatisfied = Value("notSatisfied")
/** Rollup completed status */
val Completed = Value("completed")
/** Rollup incomplete status */
val Incomplete = Value("incomplete")
}
|
ViLPy/Valamis
|
valamis-scorm-lesson/src/main/scala/com/arcusys/valamis/lesson/scorm/model/manifest/RollupAction.scala
|
Scala
|
lgpl-3.0
| 446
|
/*
* Copyright (C) 2013 Alcatel-Lucent.
*
* See the NOTICE file distributed with this work for additional
* information regarding copyright ownership.
* Licensed to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package molecule
package utils
// Following trick proposed by Jason Zaugg and Paul Philips is used to
// disambiguate overloaded methods whose type parameters are lost
// because of type erasure.
// http://scala-programming-language.1934581.n4.nabble.com/disambiguation-of-double-definition-resulting-from-generic-type-erasure-td2327664.html
//
// To use like this.
//
//scala> object overload {
// | def foo[_ : __](a: List[Int]) = 0
// | def foo[_ : __ : __](a: List[String]) = 0
// | }
//defined module overload
//
class __[_]
object __ {
private[this] val ___ = new __[Any]
implicit def make__[T] = ___.asInstanceOf[__[T]]
}
|
molecule-labs/molecule
|
molecule-core/src/main/scala/molecule/utils/Overload.scala
|
Scala
|
apache-2.0
| 1,375
|
package com.github.mdr.mash.ns.core
import com.github.mdr.mash.functions.{ BoundParams, MashFunction, Parameter, ParameterModel }
import com.github.mdr.mash.runtime.MashValue
object TapFunction extends MashFunction("core.tap") {
object Params {
val F = Parameter(
nameOpt = Some("f"),
summaryOpt = Some("Function to apply to the item"))
val Value = Parameter(
nameOpt = Some("value"),
summaryOpt = Some("Value to tap"))
}
import Params._
val params = ParameterModel(F, Value)
def call(boundParams: BoundParams): MashValue = {
val f = boundParams.validateFunction(F)
val value = boundParams(Value)
f.apply(value)
value
}
override def typeInferenceStrategy = Boolean
override def summaryOpt = Some("Apply a function to a value, ignore the result, and return the original value.")
}
|
mdr/mash
|
src/main/scala/com/github/mdr/mash/ns/core/TapFunction.scala
|
Scala
|
mit
| 850
|
import edu.uta.diql._
import com.twitter.scalding._
object Test extends ExecutionApp {
case class Customer ( name: String, cid: Int, account: Float )
case class Order ( oid: Int, cid: Int, price: Float )
explain(true)
def job: Execution[Unit]
= Execution.getArgs.flatMap {
case args
=> val CF = args("CF")
val OF = args("OF")
val output_file = args("out")
val customers = TypedPipe.from(TextLine(CF)).map{ line => line.split(",")
match { case Array(a,b,c) => Customer(a,b.toInt,c.toFloat) } }
val orders = TypedPipe.from(TextLine(OF)).map{ line => line.split(",")
match { case Array(a,b,c) => Order(a.toInt,b.toInt,c.toFloat) } }
q("""
select ( k, avg/c.account )
from c <- customers
where c.account < +/(select o.price from o <- orders where o.cid == c.cid
&& count/(select d from d <- customers where o.cid == d.cid) > 1)
group by k: c.account % 10
""").writeExecution(TypedTsv("out"))
}
override def main ( args: Array[String] ) {
val t: Long = System.currentTimeMillis()
super.main(args)
println("**** DIQL Scalding run time: "+(System.currentTimeMillis()-t)/1000.0+" secs")
}
}
|
fegaras/DIQL
|
benchmarks/NestedScalding2.scala
|
Scala
|
apache-2.0
| 1,451
|
package grammarcomp
package engine
import grammar.utils._
import grammar._
import java.io._
import java.lang.management._
import scala.collection.JavaConversions._
import scala.collection.mutable.ListBuffer
import grammar.EBNFGrammar._
import grammar.CFGrammar._
object GrammarToCFGA {
def grammarToCFGA(g: Grammar[String], filename: String) {
val pr = new PrintWriter(new FileOutputStream(new File(filename)))
val st = g.start
val nts = g.start +: (g.nonTerminals.filterNot(_ == st))
val gramstr = nts.foldLeft(""){(acc, nt) =>
acc + nt +
g.nontermToRules(nt).foldLeft(""){(acc,rl) =>
acc + " : " + rl.rightSide.foldLeft(""){
case (acc, t : Terminal[String]) => acc + " \\""+t+"\\""
case (acc, nt : Nonterminal) => acc + " "+ nt
} + ";\\n"
} + "\\n" }
val pw = new PrintWriter(new FileOutputStream(new File(filename)))
pw.print(gramstr)
pw.close()
}
}
|
epfl-lara/GrammarComparison
|
src/main/scala/grammarcomp/engine/GrammarToCFGA.scala
|
Scala
|
mit
| 954
|
/**
* FILE: Catalog
* Copyright (c) 2015 - 2018 GeoSpark Development Team
*
* MIT License
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package org.datasyslab.geosparksql.UDF
import org.apache.spark.sql.catalyst.analysis.FunctionRegistry.FunctionBuilder
import org.apache.spark.sql.expressions.UserDefinedAggregateFunction
import org.apache.spark.sql.geosparksql.expressions._
import scala.reflect.runtime.{universe => ru}
object Catalog {
val expressions:Seq[FunctionBuilder] = Seq(
ST_PointFromText,
ST_PolygonFromText,
ST_LineStringFromText,
ST_GeomFromWKT,
ST_GeomFromWKB,
ST_Point,
ST_PolygonFromEnvelope,
ST_Contains,
ST_Intersects,
ST_Within,
ST_Distance,
ST_ConvexHull,
ST_Envelope,
ST_Length,
ST_Area,
ST_Centroid,
ST_Transform,
ST_Intersection,
ST_IsValid,
ST_PrecisionReduce
)
val aggregateExpressions:Seq[UserDefinedAggregateFunction] = Seq(
new ST_Union_Aggr,
new ST_Envelope_Aggr
)
}
|
zongsizhang/GeoSpark
|
sql/src/main/scala/org/datasyslab/geosparksql/UDF/Catalog.scala
|
Scala
|
mit
| 2,058
|
package se.uu.farmbio.vs.examples
import org.apache.spark.Logging
import org.apache.spark.SparkConf
import org.apache.spark.SparkContext
import org.apache.spark.SparkContext._
import scopt.OptionParser
import se.uu.farmbio.vs.SBVSPipeline
import java.io.PrintWriter
import openeye.oedocking.OEDockMethod
import openeye.oedocking.OESearchResolution
/**
* @author laeeq
*/
object MoleculeCounter extends Logging {
case class Arglist(
master: String = null,
conformersFile: String = null)
def main(args: Array[String]) {
val defaultParams = Arglist()
val parser = new OptionParser[Arglist]("MoleculeCounter") {
head("Counts number of molecules in conformer file")
opt[String]("master")
.text("spark master")
.action((x, c) => c.copy(master = x))
arg[String]("<conformers-file>")
.required()
.text("path to input SDF conformers file")
.action((x, c) => c.copy(conformersFile = x))
}
parser.parse(args, defaultParams).map { params =>
run(params)
} getOrElse {
sys.exit(1)
}
System.exit(0)
}
def run(params: Arglist) {
//Init Spark
val conf = new SparkConf()
.setAppName("MoleculeCounter")
if (params.master != null) {
conf.setMaster(params.master)
}
val sc = new SparkContext(conf)
val molCount = sc.textFile(params.conformersFile)
.filter(_=="$$$$")
.count
println(s"Number of molecules in this file are " + molCount)
sc.stop()
}
}
|
laeeq80/spark-cpvs
|
vs.examples/src/main/scala/se/uu/farmbio/vs/examples/MoleculeCounter.scala
|
Scala
|
apache-2.0
| 1,512
|
package net.danielbrice.scalakata
import org.scalatest.{FlatSpec, Matchers}
class TreeFoldTest extends FlatSpec with Matchers {
import TreeFold._
"treeFold" should "preserve the order of the tree" in {
// given
val tree = Branch(Branch(Leaf("a"), Leaf("b")), Branch(Leaf("c"), Leaf("d")))
// when
val res = treeFold(tree, identity[String], (l: String, r: String) => s"$l$r")
// then
res shouldBe "abcd"
}
}
|
friedbrice/scala-kata
|
src/test/scala/net/danielbrice/scalakata/TreeFoldTest.scala
|
Scala
|
gpl-3.0
| 443
|
package net.sansa_stack.query.spark.ontop
import java.sql.{Connection, DriverManager, SQLException}
/**
* Creates an in-memory H2 JDBC connection.
*
* @author Lorenz Buehmann
*/
object JDBCConnection {
val logger = com.typesafe.scalalogging.Logger(JDBCConnection.getClass)
val JDBC_URL: String = "jdbc:h2:mem:sansaontopdb;DATABASE_TO_UPPER=FALSE"
val JDBC_USER: String = "sa"
val JDBC_PASSWORD: String = ""
var initialized: Boolean = false
lazy val connection: Connection = try {
// scalastyle:off classforname
Class.forName("org.h2.Driver")
// scalastyle:on classforname
val conn = DriverManager.getConnection(JDBC_URL, JDBC_USER, JDBC_PASSWORD)
initialized = true
conn
} catch {
case e: SQLException =>
logger.error("Error occurred when creating in-memory H2 database", e)
throw e
}
def close(): Unit = if (initialized) connection.close()
}
|
SANSA-Stack/SANSA-RDF
|
sansa-query/sansa-query-spark/src/main/scala/net/sansa_stack/query/spark/ontop/JDBCConnection.scala
|
Scala
|
apache-2.0
| 913
|
package skinny.test
import java.io.ByteArrayOutputStream
import javax.servlet.{ ServletOutputStream, WriteListener }
class MockServletOutputStream extends ServletOutputStream {
private[this] val byteArrayOutputStream = new ByteArrayOutputStream()
override def write(i: Int) = byteArrayOutputStream.write(i)
override def isReady: Boolean = true
override def setWriteListener(writeListener: WriteListener): Unit = ???
override def toString = byteArrayOutputStream.toString
def toString(charset: String) = byteArrayOutputStream.toString(charset)
def toByteArray = byteArrayOutputStream.toByteArray
}
|
skinny-framework/skinny-framework
|
test/src/main/scala/skinny/test/MockServletOutputStream.scala
|
Scala
|
mit
| 620
|
package no.hib.dpf.text.scala.bridge;
import no.hib.dpf.text.scala.DPFTextCore
import no.hib.dpf.text.scala.editor.Editor
import no.hib.dpf.text.scala.output.EditorOutput
import no.hib.dpf.text.scala.editor.EditorProposal
import no.hib.dpf.text.scala.output.EclipseLog
import no.hib.dpf.text.scala.ct._
import no.hib.dpf.text.scala.ct.mutable._
import no.hib.dpf.text.scala.validation._
import no.hib.dpf.text.scala.bridge.JavaScalaConverter._
import scala.collection.mutable.{ Map => MMap }
import scala.collection.mutable.{ Set => MSet }
import scala.collection.JavaConversions._
import scala.collection.JavaConverters._
import scala.collection.mutable.ListBuffer
import no.hib.dpf.text.{ DPFConstants => JConstants }
import no.hib.dpf.text.tdpf.{ DpfId => JDpfId }
import no.hib.dpf.text.tdpf.{ RId => JRId }
import no.hib.dpf.text.tdpf.{ Specification => JSpecification }
import no.hib.dpf.text.tdpf.{ Element => JElement }
import no.hib.dpf.text.tdpf.{ Node => JNode }
import no.hib.dpf.text.tdpf.{ Property => JProperty }
import no.hib.dpf.text.tdpf.{ NodeSimple => JNodeSimple }
import no.hib.dpf.text.tdpf.{ Arrow => JArrow }
import no.hib.dpf.text.tdpf.{ DpfId => JId }
import no.hib.dpf.text.tdpf.{ DataType => JDataType }
import no.hib.dpf.text.tdpf.{ Signature => JSignature }
import no.hib.dpf.text.tdpf.{ Constraint => JConstraint }
import no.hib.dpf.text.util.{ Tuple => JTuple }
import no.hib.dpf.text.util.{ CommonUtil => JCommonUtil }
import org.eclipse.jface.dialogs.{ MessageDialog => JMessageDialog }
import java.util.{ Stack => JStack }
import java.util.{ Set => JSet }
import java.util.{ List => JList }
import java.util.{ LinkedList => JLList }
import java.util.{ Map => JMap }
import java.util.{ HashMap => JHashMap }
import java.io.File
import org.eclipse.core.resources.IFile
import org.eclipse.emf.ecore.EObject
/**
* Used to Bridge the "Java world" into the "Scala world", where the main data structures are case classes (in this application).
* The whole process how specifications are transfered into the "Scala world" may be improved one day.
* The implementation started with a parser using Scala's parser combinators which have been replaced by an XText based parser.
* This has been a major refactoring which should have been more carefully engineered.
*/
object ScalaBridge extends EditorValidation {
/**
* Read a stack of Java specifications into the scala world.
* Return Tuple: x=Graph as List of Strings, y=Tuple containing the Element that cases the error plus the error message
*/
def read(stack: JStack[JTuple[String, JSpecification]], path: String): JTuple[JList[String], JList[JTuple[EObject, String]]] = {
try {
if (stack.empty()) {
sys.error("No Specification!");
}
//init:
Editor.initParser();
var rSpec: JTuple[S, JList[JTuple[EObject, String]]] = null;
var top: JTuple[String, JSpecification] = null;
while (!stack.empty()) {
top = stack.pop();
try {
rSpec = read(top.x, path, top.y);
} catch {
case ex: Throwable => EclipseLog.log(ex);
}
}
//Create FKey:
val specification: JSpecification = top.y
var signatureList: List[FKey] = Nil;
for (l <- specification.getAtomicConstraints()) {
signatureList = FKey(
l.getConstraintSemantic().getId(), path,
l.getConstraintSemantic().getVersionName()) :: signatureList;
}
signatureList = signatureList.reverse;
val info = Editor.getSpecInfo(top.x, path)
//Get further information:
val asPlainGraph: Boolean = (null != specification.getOutput()
&& specification.getOutput().contains("PLAIN"));
val javaList = EditorOutput.serializeSpecification(rSpec.x, info, asPlainGraph).asJava
new JTuple(javaList, rSpec.y);
} catch {
case ex: Throwable => EclipseLog.log(ex); throw ex;
}
}
/**
* Read a Java signature into the scala world.
* Used to serialize the signature with missing Ids.
*/
def read(signatureName: String, path: String, js: JSignature): JTuple[JList[String], JList[JTuple[EObject, String]]] = {
def updateSpecificationCache(): Option[String] = {
try {
//Specifications may need an update
//Remove required specifications from cache so that they are
//reread next time:
var removeSpecs: List[FKey] = Nil;
for (info <- Editor.getSpecInfos) {
if (info.key.path == path) {
for (sigFKey <- info.signatures) {
//Update constraint set:
if (sigFKey.name == signatureName) {
removeSpecs = info.key :: removeSpecs;
}
}
}
}
//Update Specs:
for (k <- removeSpecs) {
Editor.removeSpec(k)
}
} catch {
case ex: Throwable => EclipseLog.log(ex)
}
None
}
try {
if (null == js) {
sys.error("No Signature!");
}
val cs = readConstraintSemantics(js, signatureName, path)
//Specifications may need an update
//Remove required specifications from cache so that they are
//reread next time:
updateSpecificationCache();
val fKey = Editor.getSignatureFKey(signatureName, path)
val ls = EditorOutput.serializeSignature(cs._1.values.toList, fKey);
new JTuple(ls, cs._3);
} catch {
case ex: Throwable => EclipseLog.log(ex); throw ex;
}
}
/**
* Read a specification into the scala world.
* The meta-specifications has to be read before.
* Note: currently are no recursive meta-specifications supported beside the DPF specification which is recursively defined.
*/
def read(name: String, path: String, s: JSpecification): JTuple[S, JList[JTuple[EObject, String]]] = {
try {
val errors: JList[JTuple[EObject, String]] = new JLList[JTuple[EObject, String]];
Editor.GCtx.next = s.getVersion().getNextNumber();
var elements: List[Tuple2[RParseResult, EObject]] = Nil;
//
//Read Graph:
//
val mmGraphName = s.getType().getId();
Editor.createGraph(name, path, mmGraphName);
if (null != s.getGraph() && null != s.getGraph().getElements()) //invariant
for (e <- s.getGraph().getElements().iterator()) {
e match {
case n: JNode =>
//Consume Node:
val curNode = convertJNode(n);
elements = (curNode, n) :: elements
//Consume Node-inherit:
if (null != n.getInh()) {
for (i <- n.getInh()) {
//Create node:
val curSimpleNode = convertJNodeSimple(i);
elements = (curSimpleNode, i) :: elements
//Add inheritance:
curNode.element match {
case Some(rn: RNode) =>
curSimpleNode.element match {
case Some(superRn: RNode) => elements = (convertRInheritanceRelation(rn, superRn), i) :: elements;
case _ => //do nothing
}
case _ => //do nothing
}
}
}
//Consume Node-body:
if (null != n.getProperties()) {
for (p <- n.getProperties()) {
elements = (convertJProperty(n, p), p) :: elements;
}
}
case a: JArrow =>
//Consume Arrow:
elements = (convertJArrow(a), a) :: elements
}
}
def createNodeAndHandleError(e: EObject, rnO: Option[RElement], withId: Boolean) {
rnO match {
case Some(rn @ RNode(_, Some(_), _)) =>
if (withId) {
createNode(rn) match {
case ParseResult(None, Some(m2)) => errors.add(new JTuple(e, m2))
case _ => //do nothing
}
}
case Some(rn @ RNode(_, None, _)) =>
if (!withId) {
createNode(rn) match {
case ParseResult(None, Some(m2)) => errors.add(new JTuple(e, m2))
case _ => //do nothing
}
}
case _ => /* do nothing */
}
}
//Add nodes with id
for (e <- elements) {
e._1 match {
case RParseResult(rnO @ Some(RNode(_, _, _)), _) => createNodeAndHandleError(e._2, rnO, true)
case RParseResult(None, Some(m)) => errors.add(new JTuple(e._2, m))
case RParseResult(Some(ra @ RArrow(_, _, srO, tgO, _, _)), _) =>
createNodeAndHandleError(e._2, Some(srO), true)
createNodeAndHandleError(e._2, tgO, true)
case _ => //do nothing
}
}
//Add nodes without id
for (e <- elements) {
e._1 match {
case RParseResult(rnO @ Some(RNode(_, _, _)), _) => createNodeAndHandleError(e._2, rnO, false)
case RParseResult(None, Some(m)) => errors.add(new JTuple(e._2, m))
case RParseResult(Some(ra @ RArrow(_, _, srO, tgO, _, _)), _) =>
createNodeAndHandleError(e._2, Some(srO), false)
createNodeAndHandleError(e._2, tgO, false)
case _ => //do nothing
}
}
//Add arrows with id
for (e <- elements) {
e._1 match {
case RParseResult(Some(ra @ RArrow(_, Some(_), _, _, _, _)), _) =>
createArrow(ra) match {
case ParseResult(None, Some(m2)) => errors.add(new JTuple(e._2, m2))
case _ => //do nothing
}
case RParseResult(None, Some(m)) => errors.add(new JTuple(e._2, m))
case _ => //do nothing
}
}
//Add arrows without id
for (e <- elements) {
e._1 match {
case RParseResult(Some(ra @ RArrow(_, None, _, _, _, _)), _) =>
createArrow(ra) match {
case ParseResult(None, Some(m2)) => errors.add(new JTuple(e._2, m2))
case _ => //do nothing
}
case RParseResult(None, Some(m)) => errors.add(new JTuple(e._2, m))
case _ => //do nothing
}
}
//Return Graph in List of StringBuffer:
val rGraph = Editor.curMGraph.immutable();
val cLists = readConstraints(s, path, errors)
//Save Spec:
val spec = S(rGraph, cLists)
var sName: List[FKey] = Nil;
for (l <- s.getAtomicConstraints()) {
sName = FKey(
l.getConstraintSemantic().getId(), path,
l.getConstraintSemantic().getVersionName()) :: sName;
}
val infoS = SInfo(name, path,
s.getVersion().getName(),
s.getVersion().getNumber(),
Editor.GCtx.next,
s.getType().getId(),
s.getType().getVersionName(),
sName.reverse)
//Check meta-specification version:
val vName = s.getType().getVersionName()
checkSpecVersion(infoS) match {
case Some(errorMsg) => errors += new JTuple[EObject, String](s.getType(), errorMsg)
case None => /* do nothing */
}
Editor.saveSpec(infoS, spec);
new JTuple(spec, errors)
} catch {
case e: Throwable => EclipseLog.log(e); throw e;
}
}
/**
* Helper methods to convert a XText generated data structure into one which has been
* used before in the Scala parser-combinators implementation (which has been replaced).
*/
private def convertJNode(a: JNode): RParseResult = {
val id = a.getId();
val t = a.getType();
createRNode(id.getName(), jId2RId(id), t.getName(), jId2RId(t));
}
/**
* Helper methods to convert a XText generated data structure into one which has been
* used before in the Scala parser-combinators implementation (which has been replaced).
*/
private def convertJNodeSimple(a: JNodeSimple): RParseResult = {
val id = a.getId();
val t = a.getType();
createRNode(id.getName(), jId2RId(id), t.getName(), jId2RId(t));
}
/**
* Helper methods to convert a XText generated data structure into one which has been
* used before in the Scala parser-combinators implementation (which has been replaced).
*/
private def convertJArrow(a: JArrow): RParseResult = {
//
//Find out which kind of arrow:
//
val id = a.getId();
val t = a.getType();
//Src Node:
val sr = convertJNodeSimple(a.getSr()) match {
case rs @ RParseResult(None, Some(m)) => return rs;
case RParseResult(Some(rn: RNode), None) => rn
case _ => sys.error("convertJArrow -> Programming Error 1")
}
if (null != a.getTgNode()) {
val tg = convertJNodeSimple(a.getTgNode()) match {
case rs @ RParseResult(None, Some(m)) => return rs;
case RParseResult(Some(rn: RNode), None) => rn
case _ => sys.error("convertJArrow -> Programming Error 2")
}
//Inheritance:
if (null != id) {
createRArrow(id.getName(), jId2RId(id), sr, tg, t.getName(), jId2RId(t))
} else {
convertRInheritanceRelation(sr, tg)
}
} else if (null != a.getTgValue()) {
//Value:
if (null != a.getTgValue().getValue()) {
createRAttributeValue(id.getName(), jId2RId(id), sr, a.getTgValue().getValue(), false, t.getName(), jId2RId(t))
//Variable for Value
} else if (null != a.getTgValue().getVariableId()) {
createRAttributeValue(id.getName(), jId2RId(id), sr, a.getTgValue().getVariableId(), true, t.getName(), jId2RId(t))
} else {
sys.error("Programming error")
}
} else {
//Is is an AttributeType:
convertRAttributeType(id.getName(), jId2RId(id), sr, jDT2TyeP(a.getTgDataType()), "*")
}
}
/**
* Helper methods to convert a XText generated data structure into one which has been
* used before in the Scala parser-combinators implementation (which has been replaced).
*/
private def convertJProperty(root: JNode, a: JProperty): RParseResult = {
//
//Find out which kind of arrow:
//
val id = a.getId();
val t = a.getType();
//Src Node:
val sr = convertJNode(root) match {
case rs @ RParseResult(None, Some(m)) => return rs;
case RParseResult(Some(rn: RNode), None) => rn
case _ => sys.error("convertJProperty -> Programming Error 1")
}
if (null != a.getTgNode()) {
val tg = convertJNodeSimple(a.getTgNode()) match {
case rs @ RParseResult(None, Some(m)) => return rs;
case RParseResult(Some(rn: RNode), None) => rn
case _ => sys.error("convertJProperty -> Programming Error")
}
createRArrow(id.getName(), jId2RId(id), sr, tg, t.getName(), jId2RId(t))
} else if (null != a.getTgValue()) {
//Value:
if (null != a.getTgValue().getValue()) {
createRAttributeValue(id.getName(), jId2RId(id), sr, a.getTgValue().getValue(), false, t.getName(), jId2RId(t))
//Variable for Value
} else if (null != a.getTgValue().getVariableId()) {
createRAttributeValue(id.getName(), jId2RId(id), sr, a.getTgValue().getVariableId(), true, t.getName(), jId2RId(t))
} else {
sys.error("Programming error")
}
} else {
//Is is an AttributeType:
convertRAttributeType(id.getName(), jId2RId(id), sr, jDT2TyeP(a.getTgDataType()), "*")
}
}
/**
* Check a specification for the constraints defined in its meta-specification.
* Returns a list of errors mapped to element ids in a String representation plus further global errors that may have been occurred.
*/
def checkAllConstraints(s: JSpecification, path: String): JTuple[JMap[String, String], JList[String]] = {
val rs1 = new JHashMap[String, String]();
val rs2 = new JLList[String]();
try {
//Init:
Editor.initParser();
//Set curTS:
val name = s.getType().getId();
Editor.curTS = Editor.getSpec(name, path)
val tempName = ".DummyName";
read(tempName, path, s)
val is = IS(Editor.curTS, Editor.getSpec(tempName, path).g)
//Validate: //Maybe validation should not be a singleton
if (!Editor.curTS.cs.isEmpty) {
val validator = Editor.curTS.cs.head.validator
validator.tLanguage match {
case "KodKod" => KodKodValidation.synchronized {
println("Verfiy with KodKod")
//Empty errors:
KodKodValidation.Errors.init();
//Validate:
KodKodValidation.validate(is);
//Collect errors:
for (e <- KodKodValidation.Errors.getErrors) {
e._1 match {
case rid @ RId(_) => rs1.put(rid.vToString, e._2);
case _ => rs1.put(e._1.formatV, e._2);
}
}
//Collect errors:
for (e <- KodKodValidation.Errors.getGlobalErrors) {
rs2.add(e);
}
}
case _ => OCLValidation.synchronized {
println("Verfiy with OCL")
//Empty errors:
OCLValidation.Errors.init();
//Validate:
OCLValidation.validate(is);
//Collect errors:
for (e <- OCLValidation.Errors.getErrors) {
e._1 match {
case rid @ RId(_) => rs1.put(rid.vToString, e._2);
case _ => rs1.put(e._1.formatV, e._2);
}
}
//Collect errors:
for (e <- OCLValidation.Errors.getGlobalErrors) {
rs2.add(e);
}
}
}
}
} catch {
case ex: Throwable =>
rs2.add("checkConstraints() " + ex.getMessage());
ex.printStackTrace();
}
new JTuple(rs1, rs2);
}
/**
* Read constraint semantics i.e. Signature.
* Returns a map of validators and a list of errors that may have been occurred.
* Furthermore a the counter for the next new Id is returned.
* (The counter information may be removed since this information is not also saved by a call to "Editor.saveSignatureFKey")
*/
private def readConstraintSemantics(cs: JSignature, sName: String, path: String): (Map[Id, Validator], Int, JList[JTuple[EObject, String]]) = {
try {
var nId = cs.getVersion().getNextNumber()
def getNewId(): RId = { val newId = RId(Set(nId)); nId += 1; newId }
//Vals:
val rs = MMap[Id, Validator]();
val errors = new JLList[JTuple[EObject, String]];
for (jv <- cs.getValidators()) {
try {
val idO = jId2RId(jv.getId());
val n = jv.getId().getName();
var rList: List[RElement] = Nil;
var errorFound = false;
for (e <- jv.getArity()) {
e match {
case n: JNodeSimple =>
//Consume Node:
convertJNodeSimple(n) match {
case rs @ RParseResult(None, Some(m)) => errors.add(new JTuple(n, m)); errorFound = true;
case RParseResult(Some(rn: RNode), None) => rList = rn :: rList;
case _ => EclipseLog.log("readConstraintSemantics(cs:JSignature) Error 1")
}
case a: JArrow =>
//Consume Arrow:
convertJArrow(a) match {
case rs @ RParseResult(None, Some(m)) => errors.add(new JTuple(a, m)); errorFound = true;
case RParseResult(Some(ra: RArrow), None) => rList = ra :: rList;
case _ => EclipseLog.log("readConstraintSemantics(cs:JSignature) Error 2")
}
case _ => EclipseLog.log("readConstraintSemantics(cs:JSignature) Error 3")
}
}
//Resolve new or existing rid
val id = idO match {
case Some(exId) => exId
case None => getNewId()
}
createValidator(id, n, jv.getParameter_vars().toList, rList.reverse, cs.getInput().getName(), jv.getOcl(), jv.getErrorMsg()) match {
case (Some(validator), None) => rs += (validator.id -> validator)
case (None, Some(msg)) => errors.add(new JTuple(jv.getId(), msg));
case _ => EclipseLog.log("readConstraintSemantics(cs:JSignature) Error 4")
}
} catch {
case ex: Throwable => //ex.printStackTrace();
EclipseLog.log(ex);
errors.add(new JTuple(jv.getId(), ex.getMessage()));
}
}
//Save SignatureKey
try {
Editor.saveSignatureFKey(sName, path,
cs.getVersion().getName(),
cs.getVersion().getNumber(), nId)
} catch {
case t: Throwable => EclipseLog.log(t);
}
(rs.toMap, nId, errors); //Constraints, next constraintId, errors
} catch {
case t: Throwable =>
EclipseLog.log(t);
throw t;
}
}
/**
* Read all constraint sections (there can be more than one) of a specification into Scala case classes.
*/
private def readConstraints(s: JSpecification, path: String, errors: JList[JTuple[EObject, String]]): List[Set[Constraint]] = {
//
//Read constraint semantics:
//
var validatorList: List[Map[Id, Validator]] = Nil;
for (sT <- DPFTextCore.readSignatures(s, JCommonUtil.iResourceFromDirectory(path))) {
try {
validatorList = readConstraintSemantics(sT.y, sT.x, path)._1 :: validatorList
} catch {
case ex: Throwable =>
val msg = "Signature could not be read! " + ex.getMessage();
errors.add(new JTuple(s, msg));
println(msg)
//ex.printStackTrace();
}
}
validatorList = validatorList.reverse
//Check versions of constraint semantics
for (l <- s.getAtomicConstraints()) {
val key = FKey(l.getConstraintSemantic().getId(), path, l.getConstraintSemantic().getVersionName())
checkSignatureVersion(key) match {
case Some(errorMsg) => errors += new JTuple[EObject, String](l.getConstraintSemantic(), errorMsg)
case None => /* do nothing */
}
}
//
//Read constraints + add semantics:
//(also check if arrow with type inheritance are correctly read)
//
var rs: List[Set[Constraint]] = Nil;
var i = 0;
for (l <- s.getAtomicConstraints()) { //invariant
val cSet = MSet[Constraint]();
val validator = validatorList(i);
i += 1;
for (c <- l.getConstraints()) {
var arity_d: List[Element] = Nil;
try {
var errorFound = false;
for (e <- c.getArity_d()) {
e match {
case n: JNodeSimple =>
//
//Consume Node:
//
convertJNodeSimple(n) match {
case rs @ RParseResult(None, Some(m)) => errors.add(new JTuple(e, m)); errorFound = true;
case RParseResult(Some(rn: RNode), None) => findNode(rn) match {
case (Some(n), None) => arity_d = n :: arity_d;
case (None, Some(m)) => errors.add(new JTuple(e, m)); errorFound = true;
case _ => sys.error("Constraint Programming Error 1")
}
case _ => sys.error("Constraint Programming Error 2")
}
case a: JArrow =>
//
//Consume Arrow:
//
convertJArrow(a) match {
case rs @ RParseResult(None, Some(m)) => errors.add(new JTuple(e, m)); errorFound = true;
case RParseResult(Some(ra: RArrow), None) =>
//Inheritance arrow:
if (ra.t == TypeArrow.TInheritance) {
checkInheritanceArrow(ra) match {
case ParseResult(Some(a), None) => arity_d = a :: arity_d;
case ParseResult(None, Some(m)) => errors.add(new JTuple(e, m)); errorFound = true;
case _ => sys.error("Constraint Programming Error 3")
}
} else {
//Usual arrow:
findArrow(ra) match {
case (Some(a), None) => arity_d = a :: arity_d;
case (None, Some(m)) => errors.add(new JTuple(e, m)); errorFound = true;
case _ => sys.error("Constraint Programming Error 4")
}
}
case _ => sys.error("Constraint Programming Error 5")
}
case _ => sys.error("Constraint Programming Error 6")
}
}
if (!errorFound) {
jId2RId(c.getType()) match {
case Some(id) =>
//
//Add validator by id:
//
try {
val newC = Constraint(List(c.getParameter(): _*), arity_d.reverse, validator(id))
val msgO = checkConstraintArity(newC);
msgO match {
case None => cSet += newC
case Some(msg) => errors.add(new JTuple(c, msg));
}
} catch {
case ex: Throwable => errors.add(new JTuple(c, "Constraint not found!"));
}
case None =>
//
//Find validator by name:
//
validator.values.find(_.n == c.getType().getName()) match {
case Some(v) =>
//Add found validator by name:
try {
val newC = Constraint(List(c.getParameter(): _*), arity_d.reverse, v)
val msgO = checkConstraintArity(newC);
msgO match {
case None => cSet += newC
case Some(msg) => errors.add(new JTuple(c, msg));
}
} catch {
case ex: Throwable => errors.add(new JTuple(c, "Constraint not found!"));
}
case None =>
errors.add(new JTuple(c, "Constraint does not have a validator!"));
}
}
} else {
EclipseLog.log("1. Warning: Constraint not added:>" + c.getParameter())
}
} catch {
case ex: Throwable =>
EclipseLog.log("2. Warning: Constraint not added:>" + ex.getMessage()) /* do nothing */
EclipseLog.log(ex);
}
}
rs = cSet.toSet :: rs;
}
rs.reverse;
}
}
|
fmantz/DPF_Text
|
no.hib.dpf.text/src_scala/no/hib/dpf/text/scala/bridge/ScalaBridge.scala
|
Scala
|
epl-1.0
| 26,568
|
/*
* Scala (https://www.scala-lang.org)
*
* Copyright EPFL and Lightbend, Inc.
*
* Licensed under Apache License 2.0
* (http://www.apache.org/licenses/LICENSE-2.0).
*
* See the NOTICE file distributed with this work for
* additional information regarding copyright ownership.
*/
package scala.async
package run
package toughtype
import language.{reflectiveCalls, postfixOps}
import scala.concurrent._
import scala.concurrent.duration._
import scala.async.Async._
import org.junit.{Assert, Test}
import scala.async.internal.AsyncId
object ToughTypeObject {
import ExecutionContext.Implicits.global
class Inner
def m2 = async[(List[_], ToughTypeObject.Inner)] {
val y = await(Future[List[_]](Nil))
val z = await(Future[Inner](new Inner))
(y, z)
}
}
class ToughTypeSpec {
@Test def `propogates tough types`(): Unit = {
val fut = ToughTypeObject.m2
val res: (List[_], scala.async.run.toughtype.ToughTypeObject.Inner) = Await.result(fut, 2 seconds)
res._1 mustBe (Nil)
}
@Test def patternMatchingPartialFunction(): Unit = {
import AsyncId.{await, async}
async {
await(1)
val a = await(1)
val f = { case x => x + a }: PartialFunction[Int, Int]
await(f(2))
} mustBe 3
}
@Test def patternMatchingPartialFunctionNested(): Unit = {
import AsyncId.{await, async}
async {
await(1)
val neg1 = -1
val a = await(1)
val f = { case x => ({case x => neg1 * x}: PartialFunction[Int, Int])(x + a) }: PartialFunction[Int, Int]
await(f(2))
} mustBe -3
}
@Test def patternMatchingFunction(): Unit = {
import AsyncId.{await, async}
async {
await(1)
val a = await(1)
val f = { case x => x + a }: Function[Int, Int]
await(f(2))
} mustBe 3
}
@Test def existentialBindIssue19(): Unit = {
import AsyncId.{await, async}
def m7(a: Any) = async {
a match {
case s: Seq[_] =>
val x = s.size
var ss = s
ss = s
await(x)
}
}
m7(Nil) mustBe 0
}
@Test def existentialBind2Issue19(): Unit = {
import scala.async.Async._, scala.concurrent.ExecutionContext.Implicits.global
def conjure[T]: T = null.asInstanceOf[T]
def m3 = async {
val p: List[Option[_]] = conjure[List[Option[_]]]
await(Future(1))
}
def m4 = async {
await(Future[List[_]](Nil))
}
}
@Test def singletonTypeIssue17(): Unit = {
import AsyncId.{async, await}
class A { class B }
async {
val a = new A
def foo(b: a.B) = 0
await(foo(new a.B))
}
}
@Test def existentialMatch(): Unit = {
import AsyncId.{async, await}
trait Container[+A]
case class ContainerImpl[A](value: A) extends Container[A]
def foo: Container[_] = async {
val a: Any = List(1)
a match {
case buf: Seq[_] =>
val foo = await(5)
val e0 = buf(0)
ContainerImpl(e0)
}
}
foo
}
@Test def existentialIfElse0(): Unit = {
import AsyncId.{async, await}
trait Container[+A]
case class ContainerImpl[A](value: A) extends Container[A]
def foo: Container[_] = async {
val a: Any = List(1)
if (true) {
val buf: Seq[_] = List(1)
val foo = await(5)
val e0 = buf(0)
ContainerImpl(e0)
} else ???
}
foo
}
// This test was failing when lifting `def r` with:
// symbol value m#10864 does not exist in r$1
//
// We generated:
//
// private[this] def r$1#5727[A#5728 >: Nothing#157 <: Any#156](m#5731: Foo#2349[A#5728]): Unit#208 = Bippy#2352.this.bar#5532({
// m#5730;
// ()
// });
//
// Notice the incorrect reference to `m`.
//
// We compensated in `Lifter` by copying `ValDef` parameter symbols directly across.
//
// Turns out the behaviour stems from `thisMethodType` in `Namers`, which treats type parameter skolem symbols.
@Test def nestedMethodWithInconsistencyTreeAndInfoParamSymbols(): Unit = {
import language.{reflectiveCalls, postfixOps}
import scala.concurrent.{Future, ExecutionContext, Await}
import scala.concurrent.duration._
import scala.async.Async.{async, await}
import scala.async.internal.AsyncId
class Foo[A]
object Bippy {
import ExecutionContext.Implicits.global
def bar(f: => Unit): Unit = f
def quux: Future[String] = ???
def foo = async {
def r[A](m: Foo[A])(n: A) = {
bar {
locally(m)
locally(n)
identity[A] _
}
}
await(quux)
r(new Foo[String])("")
}
}
Bippy
}
@Test
def ticket63(): Unit = {
import scala.async.Async._
import scala.concurrent.{ ExecutionContext, Future }
object SomeExecutionContext extends ExecutionContext {
def reportFailure(t: Throwable): Unit = ???
def execute(runnable: Runnable): Unit = ???
}
trait FunDep[W, S, R] {
def method(w: W, s: S): Future[R]
}
object FunDep {
implicit def `Something to do with List`[W, S, R](implicit funDep: FunDep[W, S, R]) =
new FunDep[W, List[S], W] {
def method(w: W, l: List[S]) = async {
val it = l.iterator
while (it.hasNext) {
await(funDep.method(w, it.next()))
}
w
}(SomeExecutionContext)
}
}
}
@Test def ticket66Nothing(): Unit = {
import scala.concurrent.Future
import scala.concurrent.ExecutionContext.Implicits.global
val e = new Exception()
val f: Future[Nothing] = Future.failed(e)
val f1 = async {
await(f)
}
try {
Await.result(f1, 5.seconds)
} catch {
case `e` =>
}
}
@Test def ticket83ValueClass(): Unit = {
import scala.async.Async._
import scala.concurrent._, duration._, ExecutionContext.Implicits.global
val f = async {
val uid = new IntWrapper("foo")
await(Future(uid))
}
val result = Await.result(f, 5.seconds)
result mustEqual (new IntWrapper("foo"))
}
@Test def ticket86NestedValueClass(): Unit = {
import ExecutionContext.Implicits.global
val f = async {
val a = Future.successful(new IntWrapper("42"))
await(await(a).plusStr)
}
val result = Await.result(f, 5.seconds)
result mustEqual "42!"
}
@Test def ticket86MatchedValueClass(): Unit = {
import ExecutionContext.Implicits.global
def doAThing(param: IntWrapper) = Future(None)
val fut = async {
Option(new IntWrapper("value!")) match {
case Some(valueHolder) =>
await(doAThing(valueHolder))
case None =>
None
}
}
val result = Await.result(fut, 5.seconds)
result mustBe None
}
@Test def ticket86MatchedParameterizedValueClass(): Unit = {
import ExecutionContext.Implicits.global
def doAThing(param: ParamWrapper[String]) = Future(None)
val fut = async {
Option(new ParamWrapper("value!")) match {
case Some(valueHolder) =>
await(doAThing(valueHolder))
case None =>
None
}
}
val result = Await.result(fut, 5.seconds)
result mustBe None
}
@Test def ticket86PrivateValueClass(): Unit = {
import ExecutionContext.Implicits.global
def doAThing(param: PrivateWrapper) = Future(None)
val fut = async {
Option(PrivateWrapper.Instance) match {
case Some(valueHolder) =>
await(doAThing(valueHolder))
case None =>
None
}
}
val result = Await.result(fut, 5.seconds)
result mustBe None
}
@Test def awaitOfAbstractType(): Unit = {
import ExecutionContext.Implicits.global
def combine[A](a1: A, a2: A): A = a1
def combineAsync[A](a1: Future[A], a2: Future[A]) = async {
combine(await(a1), await(a2))
}
val fut = combineAsync(Future(1), Future(2))
val result = Await.result(fut, 5.seconds)
result mustEqual 1
}
// https://github.com/scala/async/issues/106
@Test def valueClassT106(): Unit = {
import scala.async.internal.AsyncId._
async {
"whatever value" match {
case _ =>
await("whatever return type")
new IntWrapper("value class matters")
}
"whatever return type"
}
}
}
class IntWrapper(val value: String) extends AnyVal {
def plusStr = Future.successful(value + "!")
}
class ParamWrapper[T](val value: T) extends AnyVal
class PrivateWrapper private (private val value: String) extends AnyVal
object PrivateWrapper {
def Instance = new PrivateWrapper("")
}
trait A
trait B
trait L[A2, B2 <: A2] {
def bar(a: Any, b: Any) = 0
}
|
scala/async
|
src/test/scala/scala/async/run/toughtype/ToughType.scala
|
Scala
|
bsd-3-clause
| 8,731
|
/*
* Copyright 2016 The BigDL Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.bigdl.utils.tf.loaders
import com.intel.analytics.bigdl.tensor.Tensor
class LgammaSpec extends UnaryOpBaseSpec {
override def getOpName: String = "Lgamma"
override def getInput: Tensor[_] = Tensor[Float](4, 32, 32, 3).rand()
}
|
yiheng/BigDL
|
spark/dl/src/test/scala/com/intel/analytics/bigdl/utils/tf/loaders/LgammaSpec.scala
|
Scala
|
apache-2.0
| 864
|
object Test {
def main(args: Array[String]): Unit = {
val seq: MySeq[Undefined] = new MySeq[Floats](new Array[Float](10))
println(10 == seq.array.length)
}
}
sealed trait Undefined { type ArrayType <: Array[_] }
sealed trait Floats extends Undefined { type ArrayType = Array[Float] }
class MySeq[+T <: Undefined](val array: T#ArrayType)
|
yusuke2255/dotty
|
tests/pending/run/t3798.scala
|
Scala
|
bsd-3-clause
| 350
|
package yokohama.holdem
import akka.actor.{ Actor, ActorLogging, ActorRef, FSM, Props, Terminated }
import akka.contrib.pattern.ClusterSingletonProxy
import akka.routing.FromConfig
import akka.util.Timeout
import scala.concurrent.duration.FiniteDuration
/**
* A game engine organizes, creates, and manages games.
*/
object GameEngine {
sealed trait State
object State {
case object Pausing extends State
case object Running extends State
}
case class GameData( game: Option[ActorRef] = None )
val name: String = "game-engine"
def props( askTimeout: Timeout, startGameInterval: FiniteDuration, maxPlayers: Int): Props = Props(new GameEngine(askTimeout, startGameInterval, maxPlayers))
}
class GameEngine(askTimeout: Timeout, startGameInterval: FiniteDuration, maxPlayers: Int) extends Actor with FSM[GameEngine.State, GameEngine.GameData] with ActorLogging with SettingsActor {
import GameEngine._
private val playerRepository =
context.actorOf(
ClusterSingletonProxy.props(s"/user/singleton/${PlayerRepository.name}", Some(PlayerRepository.name)), PlayerRepository.name)
startWith(State.Pausing, GameData())
when(State.Pausing, startGameInterval) {
case Event(StateTimeout, data) => goto(State.Running) using data.copy(game = Some(startGame()))
}
when(State.Running) {
case Event(Terminated(_), data) => goto(State.Pausing) using data.copy(game = None)
}
onTransition {
case _ -> State.Pausing => log.debug("Transitioning to pausing gamestate")
case _ -> State.Running => log.debug("Transitioning to running gamestate")
}
initialize()
private def startGame(): ActorRef = {
log.info("Starting a new poker game")
context.watch(createGame())
}
protected def createGame(): ActorRef = {
context.actorOf(Game.props(), "name")
}
}
|
jeffusan/yokohama-holdem
|
src/main/scala/yokohama/holdem/GameEngine.scala
|
Scala
|
gpl-2.0
| 1,842
|
package com.ftchinese.jobs.common
/**
* Apple server
* Created by wanbo on 16/4/5.
*/
trait AppleServer {
val pro_host: String = ""
val pro_port: Int = 0
val dev_host: String = ""
val dev_port: Int = 0
}
|
FTChinese/push
|
src/main/scala/com/ftchinese/jobs/common/AppleServer.scala
|
Scala
|
mit
| 228
|
package org.oedura.scavro.plugin
import java.io.{FileOutputStream, InputStream}
import sbt._
import scala.io.Source
import scala.util.Random
class TestUtils(workingDir: File) {
(workingDir / "in").mkdir
(workingDir / "out").mkdir
def tmpDir = workingDir
def tmpPath = workingDir.getAbsolutePath
private def extractResource(resourceName: String): File = {
val is: InputStream = getClass.getResourceAsStream(s"/$resourceName")
val text = Source.fromInputStream(is).mkString
val os: FileOutputStream = new FileOutputStream(workingDir / "in" / resourceName)
os.write(text.getBytes)
os.close()
is.close()
workingDir / "in" / resourceName
}
lazy val schemaFile: File = extractResource("Number.avsc")
lazy val protocolFile: File = {
schemaFile
extractResource("NumberSystem.avdl")
}
def cleanup() = {
def getRecursively(f: File): Seq[File] = f.listFiles.filter(_.isDirectory).flatMap(getRecursively) ++ f.listFiles
getRecursively(workingDir).foreach { f =>
if (!f.delete()) throw new RuntimeException("Failed to delete " + f.getAbsolutePath)
}
tmpDir.delete()
}
}
object TestUtils {
private val alphabet = ('a' to 'z') ++ ('A' to 'Z') ++ ('0' to '9')
def randomFile(dir: File, prefix: String = "", suffix: String = "", maxTries: Int = 100, nameSize: Int = 10): File = {
def randomFileImpl(triesLeft: Int): String = {
val testName: String = (1 to nameSize).map(_ => alphabet(Random.nextInt(alphabet.size))).mkString
if (!(dir / (prefix + testName + suffix)).exists) prefix + testName + suffix
else if (triesLeft < 0) throw new Exception("Unable to find empty random file path.")
else randomFileImpl(triesLeft - 1)
}
dir / randomFileImpl(maxTries)
}
def randomFileName(prefix: String, suffix: String = "", maxTries: Int = 100, nameSize: Int = 10): String = {
def randomFileNameImpl(triesLeft: Int): String = {
val testName: String = (1 to nameSize).map(_ => alphabet(Random.nextInt(alphabet.size))).mkString
if (!file(prefix + testName + suffix).exists) prefix + testName + suffix
else if (triesLeft < 0) throw new Exception("Unable to find empty random file path.")
else randomFileNameImpl(triesLeft - 1)
}
randomFileNameImpl(maxTries)
}
def apply(workingDir: File) = {
if (workingDir.exists && workingDir.isDirectory) new TestUtils(workingDir)
else if (!workingDir.exists) {
val success = workingDir.mkdirs
if (success) new TestUtils(workingDir)
else throw new Exception("Cannot initialize working directory")
} else throw new Exception("Requested directory is occupied by ordinary file")
}
}
|
oedura/scavro
|
plugin/src/test/scala/org/oedura/scavro/plugin/TestUtils.scala
|
Scala
|
apache-2.0
| 2,699
|
Subsets and Splits
Filtered Scala Code Snippets
The query filters and retrieves a sample of code snippets that meet specific criteria, providing a basic overview of the dataset's content without revealing deeper insights.