code
stringlengths 5
1M
| repo_name
stringlengths 5
109
| path
stringlengths 6
208
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 5
1M
|
|---|---|---|---|---|---|
/*
* The MIT License (MIT)
* <p/>
* Copyright (c) 2016 SWEeneyThreads
* <p/>
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
* <p/>
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
* <p/>
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
* <p/>
*
* @author SWEeneyThreads
* @version 0.0.1
* @since 0.0.1
*/
package server.messages.query.user
import server.messages.query.PermissionMessages.{ReadMessage, ReadWriteMessage}
import server.messages.query.ReplyInfo
/**
* RowMessages are used to manage operations on items.
*/
object RowMessages {
/**
* Trait that every message which belongs to rows operations has to extend.
*
* @see UserMessage
*/
trait RowMessage extends UserMessage
/**
* An InsertRowMessage is used to request the insert of an item on the selected map. An user needs Write permission
* to request this operation, therefore this message extends ReadWriteMessage.
* @param key The key of the item to insert
* @param value The value of the item to insert
*
* @see RowMessage
* @see ReadWriteMessage
*/
case class InsertRowMessage(key: String, value: Array[Byte]) extends RowMessage with ReadWriteMessage
/**
* An UpdateRowMessage is used to request the update of an item on the selected map. An user needs Write permission
* to request this operation, therefore this message extends ReadWriteMessage.
* @param key The key of the item to update
* @param value The value of the item to update
*
* @see RowMessage
* @see ReadWriteMessage
*/
case class UpdateRowMessage(key: String, value: Array[Byte]) extends RowMessage with ReadWriteMessage
/**
* An RemoveRowMessage is used to request the removal of an item on the selected map. An user needs Write permission
* to request this operation, therefore this message extends ReadWriteMessage.
* @param key The key of the item to remove
*
* @see RowMessage
* @see ReadWriteMessage
*/
case class RemoveRowMessage(key: String) extends RowMessage with ReadWriteMessage
/**
* An FindRowMessage is used to request the value of an item on the selected map. An user needs Write permission
* to request this operation, therefore this message extends ReadWriteMessage.
* @param key The key of the item to find
*
* @see RowMessage
* @see ReadWriteMessage
*/
case class FindRowMessage(key: String) extends RowMessage with ReadMessage
/**
* A ListKeysMessage is used to request the list of keys that compose the selected map. An user needs Read permission
* to request this operation, therefore this message extends ReadMessage.
*
* @see RowMessage
* @see ReadMessage
*/
case class ListKeysMessage() extends RowMessage with ReadMessage
/**
* A StorefinderRowMessage is used to pass the RowMessage to the Storefinder which represents the selected map.
* @param mapName The name of the selected map
* @param rowMessage The RowMessage
*
* @see RowMessage
*/
case class StorefinderRowMessage(mapName: String, rowMessage: RowMessage) extends RowMessage
/**
* A KeyAlreadyExistInfo is used as response to a insert item request, if the item requested for creation already
* exist.
*
* @see ReplyInfo
*/
case class KeyAlreadyExistInfo() extends ReplyInfo
/**
* A KeyDoesNotExistInfo is used as response to a item request, if the item requested does not exist.
*
* @see ReplyInfo
*/
case class KeyDoesNotExistInfo() extends ReplyInfo
/**
* A ListKeyInfo is used as response to a list keys request.
* @param keys The list of keys
*
* @see ReplyInfo
*/
case class ListKeyInfo(keys: List[String]) extends ReplyInfo
/**
* A NoKeyInfo is used as response to a list keys request if no keys are present in the selected map.
*
* @see ReplyInfo
*/
case class NoKeysInfo() extends ReplyInfo
/**
* A FindInfo is used as response to a find item request, returning the value of the item.
* @param value The value of the item requested
*
* @see ReplyInfo
*/
case class FindInfo(value: Array[Byte]) extends ReplyInfo
}
|
SweeneyThreads/Actorbase
|
src/main/scala/server/messages/query/user/RowMessages.scala
|
Scala
|
mit
| 5,116
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.rpc.netty
import scala.concurrent.Promise
import org.apache.spark.Logging
import org.apache.spark.network.client.RpcResponseCallback
import org.apache.spark.rpc.{RpcAddress, RpcCallContext}
private[netty] abstract class NettyRpcCallContext(override val senderAddress: RpcAddress)
extends RpcCallContext with Logging {
protected def send(message: Any): Unit
override def reply(response: Any): Unit = {
send(response)
}
override def sendFailure(e: Throwable): Unit = {
send(RpcFailure(e))
}
}
/**
* If the sender and the receiver are in the same process, the reply can be sent back via `Promise`.
*/
private[netty] class LocalNettyRpcCallContext(
senderAddress: RpcAddress,
p: Promise[Any])
extends NettyRpcCallContext(senderAddress) {
override protected def send(message: Any): Unit = {
p.success(message)
}
}
/**
* A [[RpcCallContext]] that will call [[RpcResponseCallback]] to send the reply back.
*/
private[netty] class RemoteNettyRpcCallContext(
nettyEnv: NettyRpcEnv,
callback: RpcResponseCallback,
senderAddress: RpcAddress)
extends NettyRpcCallContext(senderAddress) {
override protected def send(message: Any): Unit = {
val reply = nettyEnv.serialize(message)
callback.onSuccess(reply)
}
}
|
chenc10/Spark-PAF
|
core/src/main/scala/org/apache/spark/rpc/netty/NettyRpcCallContext.scala
|
Scala
|
apache-2.0
| 2,104
|
package techex.domain
object notifications {
}
case class DeviceToken(value:String){
override def toString = "DeviceToken("+value.take(10)+")"
}
sealed trait NotificationTarget
case class Android(maybeToken:Option[DeviceToken]) extends NotificationTarget
case class iOS(maybeToken:Option[DeviceToken]) extends NotificationTarget
case class Web() extends NotificationTarget
case class Slack() extends NotificationTarget
case class Device() extends NotificationTarget
case class SysOut() extends NotificationTarget
trait AttentionLevel{
def asColor =
this match{
case Info => "#36a64f"
case Good => "good"
case Attention => "warning"
case Alert => "danger"
case _ => ""
}
}
case object Info extends AttentionLevel
case object Good extends AttentionLevel
case object Attention extends AttentionLevel
case object Alert extends AttentionLevel
case class Notification(platform:NotificationTarget, message:String,severity:AttentionLevel = Info)
|
kantega/tech-ex-2015
|
backend/src/main/scala/techex/domain/notifications.scala
|
Scala
|
mit
| 984
|
/***********************************************************************
* Copyright (c) 2013-2017 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.process.analytic
import java.util.Date
import org.geotools.data.collection.ListFeatureCollection
import org.geotools.data.simple.SimpleFeatureCollection
import org.geotools.process.ProcessException
import org.geotools.process.factory.{DescribeParameter, DescribeProcess, DescribeResult}
import org.locationtech.geomesa.process.GeoMesaProcess
import org.locationtech.geomesa.utils.collection.SelfClosingIterator
import org.locationtech.geomesa.utils.geotools.SimpleFeatureTypes
import org.opengis.util.ProgressListener
/**
* Returns a single feature that is the head of a track of related simple features
*/
@DescribeProcess(
title = "Track Label Process",
description = "Returns a single feature appropriate for labelling a track of features"
)
class TrackLabelProcess extends GeoMesaProcess {
@throws(classOf[ProcessException])
@DescribeResult(name = "result", description = "Label features")
def execute(@DescribeParameter(name = "data", description = "Input features")
featureCollection: SimpleFeatureCollection,
@DescribeParameter(name = "track", description = "Track attribute to use for grouping features")
track: String,
@DescribeParameter(name = "dtg", description = "Date attribute to use for ordering tracks", min = 0)
dtg: String,
monitor: ProgressListener): SimpleFeatureCollection = {
import org.locationtech.geomesa.utils.geotools.RichSimpleFeatureType.RichSimpleFeatureType
val sft = featureCollection.getSchema
lazy val sftString = s"${sft.getTypeName}: ${SimpleFeatureTypes.encodeType(sft)}"
val trackField = Option(track).map(sft.indexOf).filter(_ != -1).getOrElse {
throw new IllegalArgumentException(s"Invalid track field $track for schema $sftString")
}
val dtgField = Option(dtg).map(sft.indexOf).orElse(sft.getDtgIndex)
// noinspection ExistsEquals
if (dtgField.exists(_ == -1)) {
throw new IllegalArgumentException(s"Invalid track field $track for schema $sftString")
}
val results = new ListFeatureCollection(sft)
val grouped = SelfClosingIterator(featureCollection).toSeq.groupBy(_.getAttribute(trackField))
dtgField match {
case None => grouped.foreach { case (_, features) => results.add(features.head) }
case Some(d) => grouped.foreach { case (_, features) => results.add(features.maxBy(_.getAttribute(d).asInstanceOf[Date])) }
}
results
}
}
|
ronq/geomesa
|
geomesa-process/geomesa-process-vector/src/main/scala/org/locationtech/geomesa/process/analytic/TrackLabelProcess.scala
|
Scala
|
apache-2.0
| 2,960
|
/**
* This file is based on GitHubProvider.scala
* Original work: Copyright 2012-2014 Jorge Aliss (jaliss at gmail dot com) - twitter: @jaliss
* Modifcations: Copyright 2015 KASHIMA Kazuo (k4200 at kazu dot tv) - twitter: @k4200
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package securesocial.core.providers
import play.api.libs.ws.{ WSResponse, WSAuthScheme }
import play.api.libs.json.{ Reads, Json, JsValue }
import securesocial.core._
import securesocial.core.services.{ CacheService, HttpService, RoutesService }
import scala.concurrent.{ ExecutionContext, Future }
import BitbucketProvider.{ ErrorResponse, UserResponse }
class BitbucketOAuth2Client(
httpService: HttpService, settings: OAuth2Settings)(implicit executionContext: ExecutionContext) extends OAuth2Client.Default(httpService, settings)(executionContext) {
override def exchangeCodeForToken(code: String, callBackUrl: String, builder: OAuth2InfoBuilder): Future[OAuth2Info] = {
val params = Map(
OAuth2Constants.GrantType -> Seq(OAuth2Constants.AuthorizationCode),
OAuth2Constants.Code -> Seq(code),
OAuth2Constants.RedirectUri -> Seq(callBackUrl)) ++ settings.accessTokenUrlParams.mapValues(Seq(_))
httpService.url(settings.accessTokenUrl)
.withAuth(settings.clientId, settings.clientSecret, WSAuthScheme.BASIC)
.post(params).map(builder)
}
}
/**
* A Bitbucket provider
*/
class BitbucketProvider(
routesService: RoutesService,
cacheService: CacheService,
client: OAuth2Client)
extends OAuth2Provider(routesService, client, cacheService) {
val GetAuthenticatedUser = "https://api.bitbucket.org/2.0/user?access_token=%s"
implicit val errorResponseReads: Reads[ErrorResponse] = Json.reads[ErrorResponse]
implicit val userTestReads: Reads[UserResponse] = Json.reads[UserResponse]
override val id = BitbucketProvider.Bitbucket
override protected def buildInfo(response: WSResponse): OAuth2Info = {
val error = (response.json \\ "error").asOpt[ErrorResponse]
if (error.isDefined) {
logger.error("[securesocial] An error occurred while getting an access token: " + error.get.message)
throw new AuthenticationException()
}
super.buildInfo(response)
}
def fillProfile(info: OAuth2Info): Future[BasicProfile] = {
client.retrieveProfile(GetAuthenticatedUser.format(info.accessToken)).map { me =>
val optError = (me \\ "error").asOpt[ErrorResponse]
optError match {
case Some(error) =>
logger.error(s"[securesocial] error retrieving profile information from Bitbucket. Message = ${error.message}")
throw new AuthenticationException()
case _ =>
val userInfo = me.as[UserResponse]
val extraInfo = Map(
"username" -> userInfo.username)
BasicProfile(id, userInfo.uuid, None, None, Some(userInfo.display_name), None, None, authMethod, oAuth2Info = Some(info), extraInfo = Some(extraInfo))
}
} recover {
case e: AuthenticationException => throw e
case e =>
logger.error("[securesocial] error retrieving profile information from github", e)
throw new AuthenticationException()
}
}
}
object BitbucketProvider {
val Bitbucket = "bitbucket"
case class ErrorResponse(
message: String,
detail: String,
id: Option[String])
case class UserResponse(
uuid: String,
display_name: String,
username: String)
def apply(routesService: RoutesService, cacheService: CacheService, dummyClient: OAuth2Client)(implicit executionContext: ExecutionContext): BitbucketProvider = {
val client = new BitbucketOAuth2Client(dummyClient.httpService, dummyClient.settings)
new BitbucketProvider(routesService, cacheService, client)
}
}
|
k4200/securesocial
|
module-code/app/securesocial/core/providers/BitbucketProvider.scala
|
Scala
|
apache-2.0
| 4,271
|
package PACKAGE_MODELS
case class DatabaseVersion(version: Long, appliedIn: Long)
{
}
|
luismfonseca/agile-scala-android
|
src/main/resources/create/src/main/scala/PACKAGE_NAME_AS_DIR/models/DatabaseVersion.scala
|
Scala
|
mit
| 94
|
package isel.leic.ps.CardioStream.WebApp.sparkJobs
import org.apache.spark.sql.SparkSession
/**
* Created by isaac on 09-06-2017.
*/
class ProcessHeartRate (val x : String) {
def method(): Unit = {
}
}
|
isaacboucinha/CardioStream
|
web-app/src/main/scala/isel/leic/ps/CardioStream/WebApp/sparkJobs/ProcessHeartRate.scala
|
Scala
|
apache-2.0
| 214
|
/*
* Copyright 2015 Heiko Seeberger
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package de.heikoseeberger.constructr
import akka.Done
import akka.actor.{ ActorSystem, Address, FSM, Props }
import akka.cluster.Cluster
import akka.pattern.{ after => delayed }
import akka.stream.ActorMaterializer
import akka.testkit.{ TestDuration, TestProbe }
import de.heikoseeberger.constructr.coordination.Coordination
import org.mockito.Mockito
import org.scalatest.{ BeforeAndAfterAll, Matchers, WordSpec }
import scala.concurrent.duration.{ Duration, DurationInt }
import scala.concurrent.{ Await, Future }
final class ConstructrMachineSpec extends WordSpec with Matchers with BeforeAndAfterAll {
import ConstructrMachine._
import Mockito._
private implicit val system = ActorSystem()
private implicit val mat = ActorMaterializer()
import system.dispatcher
private val address = Cluster(system).selfAddress
"ConstructrMachine" should {
"retry the given number of retries and then fail" in {
val coordination = mock(classOf[Coordination])
when(coordination.getNodes()).thenReturn(
boom(),
delayed(1.hour.dilated, system.scheduler)(noNodes())
)
val monitor = TestProbe()
val machine = system.actorOf(
Props(
new ConstructrMachine(
selfNode = address,
coordination = coordination,
coordinationTimeout = 100.millis.dilated,
nrOfRetries = 1,
retryDelay = 100.millis.dilated,
refreshInterval = 1.second.dilated,
ttlFactor = 1.5,
maxNrOfSeedNodes = 3,
joinTimeout = 100.millis.dilated,
ignoreRefreshFailures = false
)
)
)
machine ! FSM.SubscribeTransitionCallBack(monitor.ref)
monitor.watch(machine)
monitor.expectMsgPF(hint = "Current state GettingNodes") {
case FSM.CurrentState(_, State.GettingNodes) => ()
}
monitor.expectMsgPF(hint = "GettingNodes -> RetryScheduled") {
case FSM.Transition(_, State.GettingNodes, State.RetryScheduled) => ()
}
monitor.expectMsgPF(hint = "RetryScheduled -> GettingNodes") {
case FSM.Transition(_, State.RetryScheduled, State.GettingNodes) => ()
}
monitor.expectTerminated(machine)
}
"correctly work down the happy path (including retries)" in {
val coordination = mock(classOf[Coordination])
when(coordination.getNodes()).thenReturn(
delayed(1.hour.dilated, system.scheduler)(noNodes()),
boom(),
noNodes(),
noNodes()
)
when(coordination.lock(address, 1650.millis.dilated)).thenReturn(
delayed(1.hour.dilated, system.scheduler)(boom()),
boom(),
Future.successful(false),
Future.successful(true)
)
when(coordination.addSelf(address, 1500.millis.dilated)).thenReturn(
delayed(1.hour.dilated, system.scheduler)(boom()),
boom(),
Future.successful(Done)
)
when(coordination.refresh(address, 1500.millis.dilated)).thenReturn(
delayed(1.hour.dilated, system.scheduler)(boom()),
boom(),
Future.successful(Done)
)
val monitor = TestProbe()
val machine = system.actorOf(
Props(
new ConstructrMachine(
selfNode = address,
coordination = coordination,
coordinationTimeout = 100.millis.dilated,
nrOfRetries = 2,
retryDelay = 100.millis.dilated,
refreshInterval = 1.second.dilated,
ttlFactor = 1.5,
maxNrOfSeedNodes = 3,
joinTimeout = 100.millis.dilated,
ignoreRefreshFailures = false
)
)
)
machine ! FSM.SubscribeTransitionCallBack(monitor.ref)
monitor.expectMsgPF(hint = "Current state GettingNodes") {
case FSM.CurrentState(_, State.GettingNodes) => ()
}
monitor.expectMsgPF(hint = "GettingNodes -> RetryScheduled") {
case FSM.Transition(_, State.GettingNodes, State.RetryScheduled) => ()
}
monitor.expectMsgPF(hint = "RetryScheduled -> GettingNodes") {
case FSM.Transition(_, State.RetryScheduled, State.GettingNodes) => ()
}
monitor.expectMsgPF(hint = "GettingNodes -> RetryScheduled") {
case FSM.Transition(_, State.GettingNodes, State.RetryScheduled) => ()
}
monitor.expectMsgPF(hint = "RetryScheduled -> GettingNodes") {
case FSM.Transition(_, State.RetryScheduled, State.GettingNodes) => ()
}
monitor.expectMsgPF(hint = "GettingNodes -> Locking") {
case FSM.Transition(_, State.GettingNodes, State.Locking) => ()
}
monitor.expectMsgPF(hint = "Locking -> RetryScheduled") {
case FSM.Transition(_, State.Locking, State.RetryScheduled) => ()
}
monitor.expectMsgPF(hint = "RetryScheduled -> Locking") {
case FSM.Transition(_, State.RetryScheduled, State.Locking) => ()
}
monitor.expectMsgPF(hint = "Locking -> RetryScheduled") {
case FSM.Transition(_, State.Locking, State.RetryScheduled) => ()
}
monitor.expectMsgPF(hint = "RetryScheduled -> Locking") {
case FSM.Transition(_, State.RetryScheduled, State.Locking) => ()
}
monitor.expectMsgPF(hint = "Locking -> GettingNodes") {
case FSM.Transition(_, State.Locking, State.GettingNodes) => ()
}
monitor.expectMsgPF(hint = "GettingNodes -> Locking") {
case FSM.Transition(_, State.GettingNodes, State.Locking) => ()
}
monitor.expectMsgPF(hint = "Locking -> Joining") {
case FSM.Transition(_, State.Locking, State.Joining) => ()
}
monitor.expectMsgPF(hint = "Joining -> AddingSelf") {
case FSM.Transition(_, State.Joining, State.AddingSelf) => ()
}
monitor.expectMsgPF(hint = "AddingSelf -> RetryScheduled") {
case FSM.Transition(_, State.AddingSelf, State.RetryScheduled) => ()
}
monitor.expectMsgPF(hint = "RetryScheduled -> AddingSelf") {
case FSM.Transition(_, State.RetryScheduled, State.AddingSelf) => ()
}
monitor.expectMsgPF(hint = "AddingSelf -> RetryScheduled") {
case FSM.Transition(_, State.AddingSelf, State.RetryScheduled) => ()
}
monitor.expectMsgPF(hint = "RetryScheduled -> AddingSelf") {
case FSM.Transition(_, State.RetryScheduled, State.AddingSelf) => ()
}
monitor.expectMsgPF(hint = "AddingSelf -> RefreshScheduled") {
case FSM.Transition(_, State.AddingSelf, State.RefreshScheduled) => ()
}
monitor.expectMsgPF(hint = "RefreshScheduled -> Refreshing") {
case FSM.Transition(_, State.RefreshScheduled, State.Refreshing) => ()
}
monitor.expectMsgPF(hint = "Refreshing -> RetryScheduled") {
case FSM.Transition(_, State.Refreshing, State.RetryScheduled) => ()
}
monitor.expectMsgPF(hint = "RetryScheduled -> Refreshing") {
case FSM.Transition(_, State.RetryScheduled, State.Refreshing) => ()
}
monitor.expectMsgPF(hint = "Refreshing -> RetryScheduled") {
case FSM.Transition(_, State.Refreshing, State.RetryScheduled) => ()
}
monitor.expectMsgPF(hint = "RetryScheduled -> Refreshing") {
case FSM.Transition(_, State.RetryScheduled, State.Refreshing) => ()
}
monitor.expectMsgPF(hint = "Refreshing -> RefreshScheduled") {
case FSM.Transition(_, State.Refreshing, State.RefreshScheduled) => ()
}
}
"machine won't terminate on exceeded number of retries in Refreshing (if it's specified in configuration)" in {
val coordination = mock(classOf[Coordination])
when(coordination.getNodes()).thenReturn(noNodes())
when(coordination.lock(address, 1650.millis.dilated)).thenReturn(
Future.successful(true)
)
when(coordination.addSelf(address, 1500.millis.dilated)).thenReturn(
Future.successful(Done)
)
when(coordination.refresh(address, 1500.millis.dilated)).thenReturn(
Future.successful(Done),
delayed(1.hour.dilated, system.scheduler)(boom()),
boom(),
boom(),
Future.successful(Done)
)
val nrOfRetries = 2
val monitor = TestProbe()
val machine = system.actorOf(
Props(
new ConstructrMachine(
selfNode = address,
coordination = coordination,
coordinationTimeout = 100.millis.dilated,
nrOfRetries = nrOfRetries,
retryDelay = 100.millis.dilated,
refreshInterval = 1.second.dilated,
ttlFactor = 1.5,
maxNrOfSeedNodes = 3,
joinTimeout = 100.millis.dilated,
ignoreRefreshFailures = true
)
)
)
machine ! FSM.SubscribeTransitionCallBack(monitor.ref)
monitor.expectMsgPF(hint = "Current state GettingNodes") {
case FSM.CurrentState(_, State.GettingNodes) => ()
}
monitor.expectMsgPF(hint = "GettingNodes -> Locking") {
case FSM.Transition(_, State.GettingNodes, State.Locking) => ()
}
monitor.expectMsgPF(hint = "Locking -> Joining") {
case FSM.Transition(_, State.Locking, State.Joining) => ()
}
monitor.expectMsgPF(hint = "Joining -> AddingSelf") {
case FSM.Transition(_, State.Joining, State.AddingSelf) => ()
}
monitor.expectMsgPF(hint = "AddingSelf -> RefreshScheduled") {
case FSM.Transition(_, State.AddingSelf, State.RefreshScheduled) => ()
}
monitor.expectMsgPF(hint = "RefreshScheduled -> Refreshing") {
case FSM.Transition(_, State.RefreshScheduled, State.Refreshing) => ()
}
monitor.expectMsgPF(hint = "Refreshing -> RefreshScheduled") {
case FSM.Transition(_, State.Refreshing, State.RefreshScheduled) => ()
}
monitor.expectMsgPF(hint = "RefreshScheduled -> Refreshing") {
case FSM.Transition(_, State.RefreshScheduled, State.Refreshing) => ()
}
(1 to nrOfRetries + 1) foreach { _ =>
monitor.expectMsgPF(hint = "Refreshing -> RetryScheduled") {
case FSM.Transition(_, State.Refreshing, State.RetryScheduled) => ()
}
monitor.expectMsgPF(hint = "RetryScheduled -> Refreshing") {
case FSM.Transition(_, State.RetryScheduled, State.Refreshing) => ()
}
}
monitor.expectMsgPF(hint = "Refreshing -> RefreshScheduled") {
case FSM.Transition(_, State.Refreshing, State.RefreshScheduled) => ()
}
}
}
override protected def afterAll() = {
Await.ready(system.terminate, Duration.Inf)
super.afterAll()
}
private def boom() = Future.failed(new Exception("BOOM"))
private def noNodes() = Future.successful(Set.empty[Address])
}
|
Tecsisa/constructr
|
core/src/test/scala/de/heikoseeberger/constructr/ConstructrMachineSpec.scala
|
Scala
|
apache-2.0
| 11,367
|
// scalastyle:off
/*
* Copyright 2015 Ayasdi Inc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// scalastyle:on
package com.databricks.spark.csv.readers
import java.io.StringReader
import com.univocity.parsers.csv._
/**
* Read and parse CSV-like input
* @param fieldSep the delimiter used to separate fields in a line
* @param lineSep the delimiter used to separate lines
* @param quote character used to quote fields
* @param escape character used to escape the quote character
* @param ignoreLeadingSpace ignore white space before a field
* @param ignoreTrailingSpace ignore white space after a field
* @param headers headers for the columns
* @param inputBufSize size of buffer to use for parsing input, tune for performance
* @param maxCols maximum number of columns allowed, for safety against bad inputs
*/
private[readers] abstract class CsvReader(
fieldSep: Char = ',',
lineSep: String = "\\n",
quote: Char = '"',
escape: Char = '\\\\',
commentMarker: Char = '#',
ignoreLeadingSpace: Boolean = true,
ignoreTrailingSpace: Boolean = true,
headers: Seq[String],
inputBufSize: Int = 128,
maxCols: Int = 20480) {
protected lazy val parser: CsvParser = {
val settings = new CsvParserSettings()
val format = settings.getFormat
format.setDelimiter(fieldSep)
format.setLineSeparator(lineSep)
format.setQuote(quote)
format.setQuoteEscape(escape)
format.setComment(commentMarker)
settings.setIgnoreLeadingWhitespaces(ignoreLeadingSpace)
settings.setIgnoreTrailingWhitespaces(ignoreTrailingSpace)
settings.setReadInputOnSeparateThread(false)
settings.setInputBufferSize(inputBufSize)
settings.setMaxColumns(maxCols)
settings.setNullValue("")
settings.setMaxCharsPerColumn(100000)
if (headers != null) settings.setHeaders(headers: _*)
new CsvParser(settings)
}
}
/**
* Parser for parsing a line at a time. Not efficient for bulk data.
* @param fieldSep the delimiter used to separate fields in a line
* @param lineSep the delimiter used to separate lines
* @param quote character used to quote fields
* @param escape character used to escape the quote character
* @param ignoreLeadingSpace ignore white space before a field
* @param ignoreTrailingSpace ignore white space after a field
* @param inputBufSize size of buffer to use for parsing input, tune for performance
* @param maxCols maximum number of columns allowed, for safety against bad inputs
*/
private[csv] class LineCsvReader(
fieldSep: Char = ',',
lineSep: String = "\\n",
quote: Char = '"',
escape: Char = '\\\\',
commentMarker: Char = '#',
ignoreLeadingSpace: Boolean = true,
ignoreTrailingSpace: Boolean = true,
inputBufSize: Int = 128,
maxCols: Int = 20480)
extends CsvReader(
fieldSep,
lineSep,
quote,
escape,
commentMarker,
ignoreLeadingSpace,
ignoreTrailingSpace,
null,
inputBufSize,
maxCols) {
/**
* parse a line
* @param line a String with no newline at the end
* @return array of strings where each string is a field in the CSV record
*/
def parseLine(line: String): Array[String] = {
parser.beginParsing(new StringReader(line))
val parsed = parser.parseNext()
parser.stopParsing()
parsed
}
}
/**
* Parser for parsing lines in bulk. Use this when efficiency is desired.
* @param iter iterator over lines in the file
* @param fieldSep the delimiter used to separate fields in a line
* @param lineSep the delimiter used to separate lines
* @param quote character used to quote fields
* @param escape character used to escape the quote character
* @param ignoreLeadingSpace ignore white space before a field
* @param ignoreTrailingSpace ignore white space after a field
* @param headers headers for the columns
* @param inputBufSize size of buffer to use for parsing input, tune for performance
* @param maxCols maximum number of columns allowed, for safety against bad inputs
*/
private[csv] class BulkCsvReader(
iter: Iterator[String],
split: Int, // for debugging
fieldSep: Char = ',',
lineSep: String = "\\n",
quote: Char = '"',
escape: Char = '\\\\',
commentMarker: Char = '#',
ignoreLeadingSpace: Boolean = true,
ignoreTrailingSpace: Boolean = true,
headers: Seq[String],
inputBufSize: Int = 128,
maxCols: Int = 20480)
extends CsvReader(
fieldSep,
lineSep,
quote,
escape,
commentMarker,
ignoreLeadingSpace,
ignoreTrailingSpace,
headers,
inputBufSize,
maxCols)
with Iterator[Array[String]] {
private val reader = new StringIteratorReader(iter)
parser.beginParsing(reader)
private var nextRecord = parser.parseNext()
/**
* get the next parsed line.
* @return array of strings where each string is a field in the CSV record
*/
override def next(): Array[String] = {
val curRecord = nextRecord
if(curRecord != null) {
nextRecord = parser.parseNext()
} else {
throw new NoSuchElementException("next record is null")
}
curRecord
}
override def hasNext: Boolean = nextRecord != null
}
/**
* A Reader that "reads" from a sequence of lines. Spark's textFile method removes newlines at
* end of each line Univocity parser requires a Reader that provides access to the data to be
* parsed and needs the newlines to be present
* @param iter iterator over RDD[String]
*/
private class StringIteratorReader(val iter: Iterator[String]) extends java.io.Reader {
private var next: Long = 0
private var length: Long = 0 // length of input so far
private var start: Long = 0
private var str: String = null // current string from iter
/**
* fetch next string from iter, if done with current one
* pretend there is a new line at the end of every string we get from from iter
*/
private def refill(): Unit = {
if (length == next) {
if (iter.hasNext) {
str = iter.next()
start = length
length += (str.length + 1) // allowance for newline removed by SparkContext.textFile()
} else {
str = null
}
}
}
/**
* read the next character, if at end of string pretend there is a new line
*/
override def read(): Int = {
refill()
if (next >= length) {
-1
} else {
val cur = next - start
next += 1
if (cur == str.length) '\\n' else str.charAt(cur.toInt)
}
}
/**
* read from str into cbuf
*/
override def read(cbuf: Array[Char], off: Int, len: Int): Int = {
refill()
var n = 0
if ((off < 0) || (off > cbuf.length) || (len < 0) ||
((off + len) > cbuf.length) || ((off + len) < 0)) {
throw new IndexOutOfBoundsException()
} else if (len == 0) {
n = 0
} else {
if (next >= length) { // end of input
n = -1
} else {
n = Math.min(length - next, len).toInt // lesser of amount of input available or buf size
if (n == length - next) {
str.getChars((next - start).toInt, (next - start + n - 1).toInt, cbuf, off)
cbuf(off + n - 1) = '\\n'
} else {
str.getChars((next - start).toInt, (next - start + n).toInt, cbuf, off)
}
next += n
if (n < len) {
val m = read(cbuf, off + n, len - n) // have more space, fetch more input from iter
if(m != -1) n += m
}
}
}
n
}
override def skip(ns: Long): Long = {
throw new IllegalArgumentException("Skip not implemented")
}
override def ready: Boolean = {
refill()
true
}
override def markSupported: Boolean = false
override def mark(readAheadLimit: Int): Unit = {
throw new IllegalArgumentException("Mark not implemented")
}
override def reset(): Unit = {
throw new IllegalArgumentException("Mark and hence reset not implemented")
}
override def close(): Unit = { }
}
|
abridgett/spark-csv
|
src/main/scala/com/databricks/spark/csv/readers/readers.scala
|
Scala
|
apache-2.0
| 8,437
|
package exerciseOne
import java.io.File
import akka.actor.ActorSystem
import org.reactivestreams.api.{Consumer, Producer}
object DisplayMp4 {
/**
* run:
* ./activator 'runMain exerciseOne.DisplayMp4'
*
*/
def main(args: Array[String]): Unit = {
// ActorSystem represents the "engine" we run in, including threading configuration and concurrency semantics.
val system = ActorSystem()
// Given - The location of the mp4 we can display (note first few seconds are still frame).
val mp4 = new File("goose.mp4")
// ------------
// EXERCISE 1.2
// ------------
// Fill in the code necessary to construct a UI display and read the mp4 file and
// play it in the UI display.
// TODO - Your code here.
}
}
|
retroryan/streams-workshop
|
src/exercises/exerciseOne/DisplayMp4.scala
|
Scala
|
cc0-1.0
| 766
|
package jigg.pipeline
/*
Copyright 2013-2016 Hiroshi Noji
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
import scala.collection.JavaConverters._
import scala.util.control
import java.lang.Process
import java.io._
/** IOCommunicator abstracts IO communication mechanism, and provides several utility
* functions.
*
* An annotator, which relies on an external command, may have a member of this trait.
* When unit testing such annotator, we may use an annotator class which overrides
* a communicator variable to eliminate dependencies for external resource.
*
* See MecabAnnotator, for example.
*/
trait IOCommunicator {
def write(line: String): Unit
def writeln(line: String): Unit
def flush(): Unit
def isAlive: Boolean
def readingIter: Iterator[String]
def closeResource() = {}
/** This is the basic method for writing.
* If an error is detected, return that on the left of Either.
*/
def safeWriteWithFlush(lines: TraversableOnce[String]): Either[Throwable, Unit] =
control.Exception.allCatch either {
for (line <- lines) writeln(line)
flush()
}
final def safeWriteWithFlush(line: String): Either[Throwable, Unit] =
safeWriteWithFlush(Seq(line))
def safeWrite(lines: TraversableOnce[String]): Either[Throwable, Unit] =
control.Exception.allCatch either { for (line <- lines) writeln(line) }
/** Call `readUntil` if the first line matches to `firstLine`.
* Otherwise, return the (unmatched) first line and the remaining input iterator
* on the left of Either.
*/
def readUntilIf(
firstLine: String=>Boolean,
lastLine: String=>Boolean,
errorLine: String=>Boolean = _ == null):
Either[(Seq[String], Iterator[String]), Seq[String]] = {
val iter = readingIter
iter.next match {
case l if firstLine(l) =>
readUntil(Iterator(l) ++ iter, lastLine, errorLine)
case l => Left((Seq(l), iter))
}
}
/** Read until a line matching `lastLine` or `errorLine` is detected.
* Return the sequence of lines until the last line on the right of Either
* if lastLine is found or stream is empty.
* Return the sequence of lines and remaining input as an iterator on the
* left of Either if errorLine is detected or all inputs are read.
*
* WARNING: the last element of returned seq is the line matching lastLine
* or errorLine. This means, for example, if we set _==null to errorLine,
* the last element is null. Use `dropRight(1)` appropriately if you want
* to ignore the last element!
*/
def readUntil(lastLine: String=>Boolean, errorLine: String=>Boolean = _ == null)
: Either[(Seq[String], Iterator[String]), Seq[String]] =
readUntil(readingIter, lastLine, errorLine)
/** Read all output untill null is detected (no error check).
*/
def readAll(): Seq[String] =
readingIter.takeWhile(_ != null).toVector
protected def readUntil(
iter: Iterator[String],
lastLine: String=>Boolean,
errorLine: String=>Boolean): Either[(Seq[String], Iterator[String]), Seq[String]] = {
def readIter(cond: String=>Boolean): Seq[String] = {
var last = ""
val ret = iter.takeWhile { l => last = l; !cond(l) }.toVector
ret :+ last
}
if (iter.isEmpty) Right(Array[String]())
else {
val result = readIter(l => lastLine(l) || errorLine(l)) // = iter.takeWhile { l => lastLine(l) || errorLine(l) }.toVector
result.last match {
case l if lastLine(l) => Right(result)
case l if errorLine(l) => Left(result, iter)
case _ => Left(result, iter)
}
}
}
}
/** The basic IOCommunicator using java's Process.
*/
trait ProcessCommunicator extends IOCommunicator {
def cmd: String
def args: Seq[String]
val process: Process = startProcess()
val processIn = new BufferedReader(new InputStreamReader(process.getInputStream, "UTF-8"))
val processOut = new BufferedWriter(new OutputStreamWriter(process.getOutputStream, "UTF-8"))
checkStartError()
override def closeResource() = {
processIn.close()
processOut.close()
process.destroy()
}
def write(line: String) = processOut.write(line)
def writeln(line: String) {
processOut.write(line)
processOut.newLine()
}
def flush() = processOut.flush()
def isAlive: Boolean = !isExited
def readingIter = Iterator.continually(processIn.readLine())
protected def startProcess(): Process =
control.Exception.allCatch either startWithRedirectError() match {
case Right(process)
if (!ProcessCommunicator.isExited(process)) => process
case Right(deadProcess) => startError(new RuntimeException)
case Left(error) => startError(error)
}
private def startWithRedirectError() = {
val fullCmd = (cmd.split("\\s+") ++ args).toSeq.asJava
val pb = new ProcessBuilder(fullCmd)
pb.redirectErrorStream(true)
pb.start
}
/** Called when failing to launch the software with given command
*/
protected def startError(e: Throwable) = throw e
protected def checkStartError() = {}
protected def isExited = ProcessCommunicator.isExited(process)
}
object ProcessCommunicator {
private def isExited(p: Process) =
try { p.exitValue; true }
catch { case e: IllegalThreadStateException => false }
}
/** An example class of IOCommunicator
*/
class CommonProcessCommunicator(val cmd: String, val args: Seq[String])
extends ProcessCommunicator
/** A communicator, which may be used in a unit test.
* Writing does nothing. By reading, it reads the given output lines.
*/
class StubExternalCommunicator(outputs: Seq[String]) extends IOCommunicator {
def this(output: String) = this(Seq(output))
def isAlive = true
def write(line: String) = {}
def writeln(line: String) = {}
def flush() = {}
var i = 0
def readingIter = {
val iter = if (i < outputs.size) outputs(i).split("\n").toIterator else Iterator[String]()
i += 1
iter
}
}
class MapStubExternalCommunicator(responces: Map[String, String]) extends IOCommunicator {
var currentIn = ""
def isAlive = true
def write(line: String) = currentIn = line.trim() // assuming line ends with `\n`, which is generally true
def writeln(line: String) = currentIn = line
def flush() = {}
def readingIter = {
val o = responces(currentIn)
o.split("\n").toIterator
}
}
|
tomeken-yoshinaga/jigg
|
src/main/scala/jigg/pipeline/IOCommunicator.scala
|
Scala
|
apache-2.0
| 6,884
|
package com.twitter.finagle.tracing
import com.twitter.conversions.DurationOps._
import com.twitter.io.Buf
import com.twitter.util.Time
import com.twitter.util.{Return, Throw}
import org.mockito.Matchers.any
import org.mockito.Mockito.{never, times, verify, when, atLeast}
import org.scalatest.{OneInstancePerTest, BeforeAndAfter, FunSuite}
import org.scalatestplus.mockito.MockitoSugar
import scala.util.Random
class TraceTest extends FunSuite with MockitoSugar with BeforeAndAfter with OneInstancePerTest {
val Seq(id0, id1, id2) = 0 until 3 map { i =>
TraceId(Some(SpanId(i)), Some(SpanId(i)), SpanId(i), None, Flags(i))
}
test("have a default id without parents, etc.") {
assert(Trace.id match {
case TraceId(None, None, _, None, Flags(0), None, _) => true
case _ => false
})
}
test("Trace.letTracer") {
var runs = 0
val tracer = mock[Tracer]
assert(Trace.tracers.isEmpty)
Trace.letTracer(tracer) {
assert(Trace.tracers == List(tracer))
runs += 1
}
assert(runs == 1)
}
test("Trace.letId") {
var didRun = false
val priorId = Trace.id
Trace.letId(id0) {
assert(Trace.id == id0)
didRun = true
}
assert(didRun)
assert(Trace.id == priorId)
}
test("Trace.letId: set a fresh id when none exist") {
assert(Trace.idOption == None)
val defaultId = Trace.id
Trace.letId(Trace.nextId) {
assert(Trace.id != defaultId)
assert(Trace.id match {
case TraceId(None, None, _, None, Flags(0), None, _) => true
case _ => false
})
}
}
test("Trace.letId: set a derived id when one exists") {
Trace.letId(Trace.nextId) {
val topId = Trace.id
Trace.letId(Trace.nextId) {
assert(Trace.id match {
case TraceId(Some(traceId), Some(parentId), _, None, Flags(0), _, _)
if traceId == topId.traceId && parentId == topId.spanId =>
true
case _ => false
})
}
}
}
test("Trace.letId: not set additional terminal id") {
Trace.letId(Trace.nextId, true) {
val topId = Trace.id
Trace.letId(Trace.nextId, true) {
assert(Trace.id == topId)
}
}
}
test("Trace.letId: not set id when terminal id exists") {
Trace.letId(Trace.nextId, true) {
val topId = Trace.id
Trace.letId(Trace.nextId) {
assert(Trace.id == topId)
}
}
}
val tracer1 = mock[Tracer]
val tracer2 = mock[Tracer]
when(tracer1.isActivelyTracing(any[TraceId])).thenReturn(true)
when(tracer2.isActivelyTracing(any[TraceId])).thenReturn(true)
test("Trace.traceService") {
var didRun = false
Trace.letTracer(tracer1) {
val priorId = Trace.id
Trace.traceService("service", "rpcname") {
assert(Trace.id != priorId)
didRun = true
}
verify(tracer1, atLeast(3)).record(any[Record])
assert(Trace.id == priorId)
}
assert(didRun)
}
test("Trace.record: report topmost id to all tracers") {
Time.withCurrentTimeFrozen { tc =>
Trace.letTracerAndId(tracer1, id0) {
val ann = Annotation.Message("hello")
Trace.record(ann)
verify(tracer1, times(1)).record(any[Record])
Trace.letId(id1) {
Trace.record(ann)
verify(tracer1, times(1)).record(Record(id1, Time.now, ann))
tc.advance(1.second)
Trace.letId(id2) {
Trace.record(ann)
verify(tracer1, times(1)).record(Record(id2, Time.now, ann))
tc.advance(1.second)
Trace.letTracerAndId(tracer2, id0) {
Trace.record(ann)
verify(tracer1, times(1)).record(Record(id0, Time.now, ann))
verify(tracer2, times(1)).record(Record(id0, Time.now, ann))
}
}
}
}
}
}
test("Trace.record: record IDs not in the stack to all tracers") {
Time.withCurrentTimeFrozen { tc =>
Trace.letTracerAndId(tracer1, id0) {
Trace.letTracer(tracer2) {
val rec1 = Record(id1, Time.now, Annotation.Message("wtf"))
Trace.record(rec1)
verify(tracer1, times(1)).record(rec1)
verify(tracer2, times(1)).record(rec1)
val rec0 = Record(id0, Time.now, Annotation.Message("wtf0"))
Trace.record(rec0)
verify(tracer1, times(1)).record(rec0)
verify(tracer2, times(1)).record(rec0)
}
}
}
}
test("Trace.record: record binary annotations") {
Time.withCurrentTimeFrozen { tc =>
Trace.letTracerAndId(tracer1, id0) {
val rec1 = Record(id0, Time.now, Annotation.BinaryAnnotation("key", "test"))
Trace.recordBinary("key", "test")
verify(tracer1, times(1)).record(rec1)
}
}
}
/* TODO temporarily disabled until we can mock stopwatches
"Trace.time" in Time.withCurrentTimeFrozen { tc =>
val tracer = new BufferingTracer()
val duration = 1.second
Trace.pushTracer(tracer)
Trace.time("msg") {
tc.advance(duration)
}
tracer.iterator foreach { r =>
r.annotation mustEqual Annotation.Message("msg")
r.duration mustEqual Some(duration)
}
}
*/
test("pass flags to next id") {
val flags = Flags().setDebug
val id = TraceId(Some(SpanId(1L)), Some(SpanId(2L)), SpanId(3L), None, flags)
Trace.letId(id) {
val nextId = Trace.nextId
assert(id.flags == nextId.flags)
}
}
test("set empty flags in next id if no current id set") {
val nextId = Trace.nextId
assert(nextId.flags == Flags())
}
test("generates 64-bit SpanIDs by default") {
val nextId = Trace.nextId
assert(nextId.spanId.toString.length == 16)
}
test("Trace.letTracerAndNextId: start with a default TraceId") {
Time.withCurrentTimeFrozen { tc =>
val tracer = mock[Tracer]
when(tracer.sampleTrace(any[TraceId])).thenReturn(None)
when(tracer.isActivelyTracing(any[TraceId])).thenReturn(true)
Trace.letTracerAndNextId(tracer) {
val currentId = Trace.id
assert(currentId match {
case TraceId(None, None, _, None, Flags(0), None, _) => true
case _ => false
})
assert(Trace.isTerminal == false)
assert(Trace.tracers == List(tracer))
Trace.record("Hello world")
verify(tracer, times(1)).sampleTrace(currentId)
verify(tracer, times(1))
.record(Record(currentId, Time.now, Annotation.Message("Hello world"), None))
}
}
}
test("Trace.letTracerAndNextId: use parent's sampled if it is defined") {
Time.withCurrentTimeFrozen { tc =>
val tracer = mock[Tracer]
when(tracer.sampleTrace(any[TraceId])).thenReturn(Some(true))
val parentId =
TraceId(Some(SpanId(123)), Some(SpanId(456)), SpanId(789), Some(false), Flags(0))
Trace.letId(parentId) {
Trace.letTracerAndNextId(tracer) {
val currentId = Trace.id
assert(currentId match {
case TraceId(Some(_traceId), Some(_parentId), _, Some(_sampled), Flags(0), _, _)
if (_traceId == parentId.traceId) && (_parentId == parentId.spanId) &&
(_sampled == parentId.sampled.get) =>
true
case _ => false
})
when(tracer.isActivelyTracing(currentId)).thenReturn(currentId.sampled.getOrElse(true))
assert(Trace.isTerminal == false)
assert(Trace.tracers == List(tracer))
verify(tracer, never()).sampleTrace(currentId)
}
}
}
}
test("Trace.letTracerAndNextId: call with terminal=true") {
Time.withCurrentTimeFrozen { tc =>
val tracer = mock[Tracer]
when(tracer.sampleTrace(any[TraceId])).thenReturn(None)
when(tracer.isActivelyTracing(any[TraceId])).thenReturn(true)
Trace.letTracerAndNextId(tracer, true) {
val currentId = Trace.id
assert(currentId match {
case TraceId(None, None, _, None, Flags(0), None, _) => true
case _ => false
})
assert(Trace.isTerminal == true)
assert(Trace.tracers == List(tracer))
verify(tracer, times(1)).sampleTrace(currentId)
Trace.record("Hello world")
verify(tracer, times(1))
.record(Record(currentId, Time.now, Annotation.Message("Hello world"), None))
}
}
}
test("Trace.letTracerAndNextId: trace with terminal set for the current state") {
Time.withCurrentTimeFrozen { tc =>
val tracer = mock[Tracer]
when(tracer.sampleTrace(any[TraceId])).thenReturn(Some(true))
when(tracer.isActivelyTracing(any[TraceId])).thenReturn(true)
val parentId =
TraceId(Some(SpanId(123)), Some(SpanId(456)), SpanId(789), Some(true), Flags(0))
Trace.letId(parentId, terminal = true) {
Trace.letTracerAndNextId(tracer) {
val currentId = Trace.id
assert(currentId == parentId)
assert(Trace.isTerminal == true)
assert(Trace.tracers == List(tracer))
verify(tracer, never()).sampleTrace(currentId)
Trace.record("Hello world")
verify(tracer, times(1))
.record(Record(currentId, Time.now, Annotation.Message("Hello world"), None))
}
}
}
}
test("Trace.isActivelyTracing") {
val id = TraceId(Some(SpanId(12)), Some(SpanId(13)), SpanId(14), None, Flags(0L))
val tracer1 = mock[Tracer]
val tracer2 = mock[Tracer]
val tracer = BroadcastTracer(Seq(tracer1, tracer2))
// no tracers, not tracing
assert(!Trace.isActivelyTracing)
// only the null tracer, still false
Trace.letTracerAndId(NullTracer, id) {
assert(!Trace.isActivelyTracing)
}
Trace.letTracer(tracer) {
Trace.letId(id) {
// Even if one tracer returns, then true
when(tracer1.isActivelyTracing(any[TraceId])).thenReturn(false)
when(tracer2.isActivelyTracing(any[TraceId])).thenReturn(true)
assert(Trace.isActivelyTracing)
// when everything returns true, then true
when(tracer1.isActivelyTracing(any[TraceId])).thenReturn(true)
when(tracer2.isActivelyTracing(any[TraceId])).thenReturn(true)
assert(Trace.isActivelyTracing)
// tracing enabled flag overrides individual tracer decisions
when(tracer1.isActivelyTracing(any[TraceId])).thenReturn(true)
when(tracer2.isActivelyTracing(any[TraceId])).thenReturn(true)
Trace.disable()
assert(!Trace.isActivelyTracing)
Trace.enable()
assert(Trace.isActivelyTracing)
// when everything returns false, then false
when(tracer1.isActivelyTracing(any[TraceId])).thenReturn(false)
when(tracer2.isActivelyTracing(any[TraceId])).thenReturn(false)
assert(!Trace.isActivelyTracing)
}
}
}
test("trace ID serialization: valid ids (64-bit)") {
// TODO: Consider using scalacheck here. (CSL-595)
def longs(seed: Long) = {
val rng = new Random(seed)
Seq.fill(10) { rng.nextLong() }
}
def spanIds(seed: Long): Seq[Option[SpanId]] =
None +: (longs(seed) map (l => Some(SpanId(l))))
val traceIds = for {
traceId <- spanIds(1L)
parentId <- traceId +: spanIds(2L)
maybeSpanId <- parentId +: spanIds(3L)
spanId <- maybeSpanId.toSeq
flags <- Seq(Flags(0L), Flags(Flags.Debug))
sampled <- Seq(None, Some(false), Some(true))
} yield TraceId(traceId, parentId, spanId, sampled, flags)
for (id <- traceIds)
assert(Trace.TraceIdContext.tryUnmarshal(Trace.TraceIdContext.marshal(id)) == Return(id))
}
test("trace ID serialization: valid ids (128-bit)") {
val traceId = TraceId(
Some(SpanId(1L)),
Some(SpanId(1L)),
SpanId(2L),
None,
Flags(Flags.Debug),
Some(SpanId(2L))
)
assert(
Trace.TraceIdContext.tryUnmarshal(Trace.TraceIdContext.marshal(traceId)) == Return(traceId))
}
// example from X-Amzn-Trace-Id: Root=1-5759e988-bd862e3fe1be46a994272793;Sampled=1
test("Trace.nextTraceIdHigh: encodes epoch seconds") {
Time.withTimeAt(Time.fromSeconds(1465510280)) { tc => // Thursday, June 9, 2016 10:11:20 PM
val traceIdHigh = Tracing.nextTraceIdHigh()
assert(traceIdHigh.toString.startsWith("5759e988"))
}
}
test("trace ID serialization: throw in handle on invalid size") {
val bytes = new Array[Byte](33)
Trace.TraceIdContext.tryUnmarshal(Buf.ByteArray.Owned(bytes)) match {
case Throw(_: IllegalArgumentException) =>
case rv => fail(s"Got $rv")
}
}
}
|
luciferous/finagle
|
finagle-core/src/test/scala/com/twitter/finagle/tracing/TraceTest.scala
|
Scala
|
apache-2.0
| 12,631
|
package org.jetbrains.plugins.scala
package lang
package psi
package impl
package statements
import com.intellij.lang.ASTNode
import com.intellij.openapi.progress.ProgressManager
import com.intellij.psi._
import com.intellij.psi.scope.PsiScopeProcessor
import org.jetbrains.plugins.scala.extensions.PsiElementExt
import org.jetbrains.plugins.scala.lang.lexer._
import org.jetbrains.plugins.scala.lang.parser.ScalaElementTypes
import org.jetbrains.plugins.scala.lang.psi.api.base.types.ScTypeElement
import org.jetbrains.plugins.scala.lang.psi.api.statements._
import org.jetbrains.plugins.scala.lang.psi.api.statements.params._
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.ScTypeParametersOwner
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.typedef.ScMember
import org.jetbrains.plugins.scala.lang.psi.impl.ScalaPsiElementFactory.createIdentifier
import org.jetbrains.plugins.scala.lang.psi.stubs.ScFunctionStub
import org.jetbrains.plugins.scala.lang.psi.stubs.elements.ScFunctionElementType
import org.jetbrains.plugins.scala.macroAnnotations.{Cached, ModCount}
/**
* @author ilyas
*/
abstract class ScFunctionImpl protected (stub: ScFunctionStub, nodeType: ScFunctionElementType, node: ASTNode)
extends ScalaStubBasedElementImpl(stub, nodeType, node) with ScMember
with ScFunction with ScTypeParametersOwner {
override def isStable = false
def nameId: PsiElement = {
val n = getNode.findChildByType(ScalaTokenTypes.tIDENTIFIER) match {
case null => getNode.findChildByType(ScalaTokenTypes.kTHIS)
case notNull => notNull
}
if (n == null) {
return createIdentifier(getGreenStub.getName).getPsi
}
n.getPsi
}
@Cached(synchronized = false, ModCount.anyScalaPsiModificationCount, this)
def paramClauses: ScParameters = getStubOrPsiChild(ScalaElementTypes.PARAM_CLAUSES)
override def processDeclarations(processor: PsiScopeProcessor, state: ResolveState,
lastParent: PsiElement, place: PsiElement): Boolean = {
// process function's process type parameters
if (!super[ScTypeParametersOwner].processDeclarations(processor, state, lastParent, place)) return false
lazy val parameterIncludingSynthetic: Seq[ScParameter] = effectiveParameterClauses.flatMap(_.effectiveParameters)
if (getStub == null) {
returnTypeElement match {
case Some(x) if lastParent != null && x.startOffsetInParent == lastParent.startOffsetInParent =>
for (p <- parameterIncludingSynthetic) {
ProgressManager.checkCanceled()
if (!processor.execute(p, state)) return false
}
case _ =>
}
} else {
if (lastParent != null && lastParent.getContext != lastParent.getParent) {
for (p <- parameterIncludingSynthetic) {
ProgressManager.checkCanceled()
if (!processor.execute(p, state)) return false
}
}
}
true
}
@Cached(synchronized = false, ModCount.anyScalaPsiModificationCount, this)
def returnTypeElement: Option[ScTypeElement] = byPsiOrStub(findChild(classOf[ScTypeElement]))(_.typeElement)
}
|
ilinum/intellij-scala
|
src/org/jetbrains/plugins/scala/lang/psi/impl/statements/ScFunctionImpl.scala
|
Scala
|
apache-2.0
| 3,133
|
/*
* Copyright (c) 2015, Nightfall Group
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package moe.nightfall.instrumentality
import org.lwjgl.util.vector.{Matrix4f, Vector3f}
/**
* I'd replace half of this with a quaternion if I wasn't afraid it wouldn't interpolate.
* Created on 24/07/15.
*/
class PoseBoneTransform {
// TODO SCALA make this immutable / more OPS!
/**
* The rotation values(applied in that order)
* The reason for having 3 sets of values is to allow some rotations to happen after/before others.
* (for example: rotation on one axis to raise/lower head should be applied after left/right rotation)
*/
var X0, Y0, Z0: Double = _
var X1, Y1: Double = _
var X2: Double = _
/**
* The translation values(applied before rotation)
*/
var TX0, TY0, TZ0: Double = _
// Alpha multiplier
var alphaMul = 1.0d
/**
* My Little Miku
* Interpolation Is Magic
*
* @param A The pose to interpolate from.
* @param B The pose to interpolate to.
* @param i The interpolation value.
*/
def this(A0: PoseBoneTransform, B0: PoseBoneTransform, i0: Float) {
this()
val i = 1.0f - i0
val A = if (A0 == null) new PoseBoneTransform() else A0
val B = if (B0 == null) new PoseBoneTransform() else B0
X0 = (A.X0 * i) + (B.X0 * (1.0f - i))
Y0 = (A.Y0 * i) + (B.Y0 * (1.0f - i))
Z0 = (A.Z0 * i) + (B.Z0 * (1.0f - i))
X1 = (A.X1 * i) + (B.X1 * (1.0f - i))
Y1 = (A.Y1 * i) + (B.Y1 * (1.0f - i))
X2 = (A.X2 * i) + (B.X2 * (1.0f - i))
TX0 = (A.TX0 * i) + (B.TX0 * (1.0f - i))
TY0 = (A.TY0 * i) + (B.TY0 * (1.0f - i))
TZ0 = (A.TZ0 * i) + (B.TZ0 * (1.0f - i))
alphaMul = (A.alphaMul * i) + (B.alphaMul * (1.0f - i))
}
def this(boneTransform: PoseBoneTransform) {
this()
X0 = boneTransform.X0
Y0 = boneTransform.Y0
Z0 = boneTransform.Z0
X1 = boneTransform.X1
Y1 = boneTransform.Y1
X2 = boneTransform.X2
TX0 = boneTransform.TX0
TY0 = boneTransform.TY0
TZ0 = boneTransform.TZ0
alphaMul = boneTransform.alphaMul
}
def this(v: Float, v1: Float, v2: Float, v3: Float, v4: Float) {
this()
X0 = v
Y0 = v1
Z0 = v2
X1 = v3
Y1 = v4
}
def +=(other: PoseBoneTransform): PoseBoneTransform = {
val nt = new PoseBoneTransform
X0 += other.X0
Y0 += other.Y0
Z0 += other.Z0
X1 += other.X1
Y1 += other.Y1
X2 += other.X2
TX0 += other.TX0
TY0 += other.TY0
TZ0 += other.TZ0
alphaMul *= other.alphaMul
this
}
def *=(other: Double): PoseBoneTransform = {
val nt = new PoseBoneTransform
X0 *= other
Y0 *= other
Z0 *= other
X1 *= other
Y1 *= other
X2 *= other
TX0 *= other
TY0 *= other
TZ0 *= other
alphaMul = (other * (alphaMul - 1)) + 1
this
}
def isZero(): Boolean = !isNotZero
def isNotZero(): Boolean = (X0 != 0) || (Y0 != 0) || (Z0 != 0) ||
(X1 != 0) || (Y1 != 0) ||
(X2 != 0) ||
(TX0 != 0) || (TY0 != 0) || (TZ0 != 0) || (alphaMul != 1)
def apply(boneMatrix: Matrix4f) {
boneMatrix.translate(new Vector3f(TX0.toFloat, TY0.toFloat, TZ0.toFloat))
boneMatrix.rotate(X0.toFloat, new Vector3f(1, 0, 0))
boneMatrix.rotate(Y0.toFloat, new Vector3f(0, 1, 0))
boneMatrix.rotate(Z0.toFloat, new Vector3f(0, 0, 1))
boneMatrix.rotate(X1.toFloat, new Vector3f(1, 0, 0))
boneMatrix.rotate(Y1.toFloat, new Vector3f(0, 1, 0))
boneMatrix.rotate(X2.toFloat, new Vector3f(1, 0, 0))
}
}
|
Nightfall/Instrumentality
|
core/src/main/scala/moe/nightfall/instrumentality/PoseBoneTransform.scala
|
Scala
|
bsd-2-clause
| 5,066
|
package es.upm.fi.oeg.morph.tc
class D001Test extends R2RMLTest("D001-1table1column1row") {
"TC0001a" should "generate 1 triple" in{
val dg=generate("R2RMLTC0001a")
dg.getDefaultGraph.size should be (1)
}
"TC0001b" should "generate 1 graph" in{
val dg=generate("R2RMLTC0001b")
dg.getDefaultGraph.size should be (1)
//println(ds.getDefaultModel.listStatements.nextStatement.getSubject.getId)
}
}
|
jpcik/morph
|
morph-r2rml-tc/src/test/scala/es/upm/fi/oeg/morph/tc/D001Test.scala
|
Scala
|
apache-2.0
| 427
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.scaledaction.core.spark
import scala.util.Try
import com.typesafe.config.Config
import java.util.Properties
import org.apache.kafka.clients.producer.ProducerConfig //Ben: kafka or spark???
import com.scaledaction.core.config.{ AppConfig, HasAppConfig }
/**
* Application settings. First attempts to acquire from the deploy environment.
* If not exists, then from -D java system properties, else a default config.
*
* Settings in the environment such as: SPARK_HA_MASTER=local[10] is picked up first.
*
* Settings from the command line in -D will override settings in the deploy environment.
* For example: sbt -Dspark.master="local[12]" run
*
* If you have not yet used Typesafe Config before, you can pass in overrides like so:
*
* {{{
* new Settings(ConfigFactory.parseString("""
* spark.master = "some.ip"
* """))
* }}}
*
* Any of these can also be overridden by your own application.conf.
*
* @param conf Optional config for test
*/
class SparkConfig(
val master: String,
rootConfig: Config) extends AppConfig(rootConfig: Config) {
override def toString(): String = s"master: ${master}"
}
trait HasSparkConfig extends HasAppConfig {
def getSparkConfig: SparkConfig = getSparkConfig(rootConfig.getConfig("spark"))
def getSparkConfig(rootName: String): SparkConfig = getSparkConfig(rootConfig.getConfig(rootName))
private def getSparkConfig(spark: Config): SparkConfig = {
val master = getRequiredValue("SPARK_MASTER", (spark, "master"), "local[2]")
new SparkConfig(master, spark)
}
}
|
scaledaction/weather-service
|
core/src/main/scala/com/scaledaction/core/spark/SparkConfig.scala
|
Scala
|
apache-2.0
| 2,376
|
package drt.client.services
import autowire._
import diode.Implicits.runAfterImpl
import diode._
import diode.data._
import diode.react.ReactConnector
import drt.client.{SPAMain, TableViewUtils}
import drt.client.logger._
import drt.client.services.HandyStuff._
import drt.client.services.RootModel.{FlightCode, mergeTerminalQueues}
import drt.client.services.RootModel.QueueCrunchResults
import drt.shared.FlightsApi.{TerminalName, _}
import drt.shared._
import boopickle.Default._
import diode.ActionResult.NoChange
import drt.client.services.JSDateConversions.SDate
import drt.client.services.JSDateConversions.SDate.JSSDate
import drt.shared.PassengerSplits.{FlightNotFound, VoyagePaxSplits}
import drt.client.components.TerminalDeploymentsTable.TerminalDeploymentsRow
import drt.client.actions.Actions._
import drt.client.components.FlightsWithSplitsTable
import scala.collection.immutable.{Iterable, Map, NumericRange, Seq}
import scala.concurrent.Future
import scala.concurrent.duration._
import scala.language.postfixOps
import scala.scalajs.concurrent.JSExecutionContext.Implicits.queue
import scala.scalajs.js
import scala.scalajs.js.Date
import scala.util.{Failure, Success, Try}
case class DeskRecTimeslot(timeInMillis: Long, deskRec: Int)
trait WorkloadsUtil {
def labelsFromAllQueues(startTime: Long) = {
val oneMinute: Long = 60000
val allMins = startTime until (startTime + 60000 * 60 * 24) by oneMinute
allMins.map(millis => {
val d = new js.Date(millis)
f"${d.getHours()}%02d:${d.getMinutes()}%02d"
})
}
def firstFlightTimeQueue(workloads: Map[String, (Seq[WL], Seq[Pax])]): Long = {
workloads.values.flatMap(_._1.map(_.time)).min
}
def timeStampsFromAllQueues(workloads: Map[String, QueuePaxAndWorkLoads]) = {
val timesMin: Long = firstFlightTimeQueue(workloads)
minuteNumericRange(timesMin, 24)
}
def minuteNumericRange(start: Long, numberOfHours: Int = 24): NumericRange[Long] = {
val oneMinute: Long = 60000
val allMins = start until (start + 60000 * 60 * numberOfHours) by oneMinute
allMins
}
}
// The base model of our application
case class Workloads(workloads: Map[TerminalName, Map[QueueName, QueuePaxAndWorkLoads]]) extends WorkloadsUtil {
lazy val labels = labelsFromAllQueues(startTime)
def timeStamps(): NumericRange[Long] = minuteNumericRange(startTime, 24)
def startTime: Long = {
val now = new Date()
val thisMorning = new Date(now.getFullYear(), now.getMonth(), now.getDate())
thisMorning.getTime().toLong
}
def firstFlightTimeAcrossTerminals: Long = workloads.values.map(firstFlightTimeQueue(_)).min
}
case class RootModel(
motd: Pot[String] = Empty,
workloadPot: Pot[Workloads] = Empty,
queueCrunchResults: Map[TerminalName, Map[QueueName, Pot[PotCrunchResult]]] = Map(),
simulationResult: Map[TerminalName, Map[QueueName, Pot[SimulationResult]]] = Map(),
flights: Pot[FlightsWithSplits] = Empty,
airportInfos: Map[String, Pot[AirportInfo]] = Map(),
airportConfig: Pot[AirportConfig] = Empty,
minutesInASlot: Int = 15,
shiftsRaw: Pot[String] = Empty,
staffMovements: Seq[StaffMovement] = Seq(),
slotsInADay: Int = 96,
flightSplits: Map[FlightCode, Map[MilliDate, VoyagePaxSplits]] = Map()
) {
lazy val flightsWithApiSplits: Pot[List[js.Dynamic]] = {
flights map { fs =>
FlightsWithSplitsTable.reactTableFlightsAsJsonDynamic(fs)
}
}
lazy val staffDeploymentsByTerminalAndQueue: Map[TerminalName, QueueStaffDeployments] = {
val rawShiftsString = shiftsRaw match {
case Ready(rawShifts) => rawShifts
case _ => ""
}
val shifts = ShiftParser(rawShiftsString).parsedShifts.toList
//todo we have essentially this code elsewhere, look for successfulShifts
val staffFromShiftsAndMovementsAt = if (shifts.exists(s => s.isFailure)) {
log.error("Couldn't parse raw shifts")
(t: TerminalName, m: MilliDate) => 0
} else {
val successfulShifts = shifts.collect { case Success(s) => s }
val ss = ShiftService(successfulShifts)
StaffMovements.terminalStaffAt(ss)(staffMovements) _
}
val pdr = PortDeployment.portDeskRecs(queueCrunchResults)
val pd = PortDeployment.terminalDeployments(pdr, staffFromShiftsAndMovementsAt)
val tsa = PortDeployment.terminalStaffAvailable(pd) _
StaffDeploymentCalculator(tsa, queueCrunchResults).getOrElse(Map())
}
lazy val calculatedDeploymentRows: Pot[Map[TerminalName, Pot[List[TerminalDeploymentsRow]]]] = {
airportConfig.map(ac => ac.terminalNames.map(terminalName => {
calculateTerminalDeploymentRows(terminalName)
}).toMap)
}
def calculateTerminalDeploymentRows(terminalName: TerminalName): (TerminalName, Pot[List[TerminalDeploymentsRow]]) = {
val crv = queueCrunchResults.getOrElse(terminalName, Map())
val srv = simulationResult.getOrElse(terminalName, Map())
val udr = staffDeploymentsByTerminalAndQueue.getOrElse(terminalName, Map())
log.info(s"tud: ${terminalName}")
val terminalDeploymentRows: Pot[List[TerminalDeploymentsRow]] = workloadPot.map(workloads => {
workloads.workloads.get(terminalName) match {
case Some(terminalWorkloads) =>
val timestamps = workloads.timeStamps()
val startFromMilli = WorkloadsHelpers.midnightBeforeNow()
val minutesRangeInMillis: NumericRange[Long] = WorkloadsHelpers.minutesForPeriod(startFromMilli, 24)
val paxLoad: Map[String, List[Double]] = WorkloadsHelpers.paxloadPeriodByQueue(terminalWorkloads, minutesRangeInMillis)
TableViewUtils.terminalDeploymentsRows(terminalName, airportConfig, timestamps, paxLoad, crv, srv, udr)
case None =>
Nil
}
})
terminalName -> terminalDeploymentRows
}
override def toString: String =
s"""
|RootModel(
|motd: $motd
|paxload: $workloadPot
|queueCrunchResults: $queueCrunchResults
|userDeskRec: $staffDeploymentsByTerminalAndQueue
|simulationResult: $simulationResult
|flights: $flights
|airportInfos: $airportInfos
|flightPaxSplits: ${flightSplits}
|)
""".stripMargin
}
object RootModel {
type FlightCode = String
type QueueCrunchResults = Map[QueueName, Pot[PotCrunchResult]]
def mergeTerminalQueues[A](m1: Map[QueueName, Map[QueueName, A]], m2: Map[QueueName, Map[QueueName, A]]): Map[String, Map[String, A]] = {
val merged = m1.toSeq ++ m2.toSeq
val grouped = merged.groupBy(_._1)
val cleaned = grouped.mapValues(_.flatMap(_._2).toMap)
cleaned
}
}
case class DeskRecTimeSlots(items: Seq[DeskRecTimeslot]) {
def updated(newItem: DeskRecTimeslot): DeskRecTimeSlots = {
log.info(s"will update ${newItem} into ${items.take(5)}...")
items.indexWhere(_.timeInMillis == newItem.timeInMillis) match {
case -1 =>
log.info("add new")
DeskRecTimeSlots(items :+ newItem)
case idx =>
log.info(s"add old: idx: $idx, newItem: $newItem, ${items(idx)}")
DeskRecTimeSlots(items.updated(idx, newItem))
}
}
}
/**
* Handles actions related to todos
*
* @param modelRW Reader/Writer to access the model
*/
class DeskTimesHandler[M](modelRW: ModelRW[M, Map[TerminalName, QueueStaffDeployments]]) extends LoggingActionHandler(modelRW) {
override def handle = {
case UpdateDeskRecsTime(terminalName, queueName, deskRecTimeSlot) =>
val newDesksPot: Pot[DeskRecTimeSlots] = value(terminalName)(queueName).map(_.updated(deskRecTimeSlot))
val desks = newDesksPot.get.items.map(_.deskRec).toList
updated(
mergeTerminalQueues(value, Map(terminalName -> Map(queueName -> newDesksPot))),
Effect(Future(RunSimulation(terminalName, queueName, desks))))
}
}
abstract class LoggingActionHandler[M, T](modelRW: ModelRW[M, T]) extends ActionHandler(modelRW) {
override def handleAction(model: M, action: Any): Option[ActionResult[M]] = {
// log.info(s"finding handler for ${action.toString.take(100)}")
Try(super.handleAction(model, action)) match {
case Failure(f) =>
log.error(s"Exception from ${getClass} ${f.getMessage()} while handling $action")
val cause = f.getCause()
cause match {
case null => log.error(s"no cause")
case c => log.error(s"Exception from ${getClass} ${c.getMessage()}")
}
throw f
case Success(s) =>
s
}
}
}
class AirportConfigHandler[M](modelRW: ModelRW[M, Pot[AirportConfig]]) extends LoggingActionHandler(modelRW) {
protected def handle = {
case action: GetAirportConfig =>
log.info("requesting workloadsWrapper from server")
updated(Pending(), Effect(AjaxClient[Api].airportConfiguration().call().map(UpdateAirportConfig)))
case UpdateAirportConfig(configHolder) =>
log.info(s"Received airportConfig $configHolder")
log.info("Subscribing to crunches for terminal/queues")
val effects: Effect = createCrunchRequestEffects(configHolder)
updated(Ready(configHolder), effects)
}
def createCrunchRequestEffects(configHolder: AirportConfig): Effect = {
val crunchRequests: Seq[Effect] = for {
tn <- configHolder.terminalNames
qn <- configHolder.queues(tn)
} yield {
Effect(Future(GetLatestCrunch(tn, qn)))
}
val effects = seqOfEffectsToEffectSeq(crunchRequests.toList)
effects
}
}
class WorkloadHandler[M](modelRW: ModelRW[M, Pot[Workloads]]) extends LoggingActionHandler(modelRW) {
protected def handle = {
case action: GetWorkloads =>
log.info("requesting workloadsWrapper from server")
updated(Pending(), Effect(AjaxClient[Api].getWorkloads().call().map(UpdateWorkloads)))
case UpdateWorkloads(terminalQueueWorkloads) =>
updated(Ready(Workloads(terminalQueueWorkloads)))
}
}
object HandyStuff {
type PotCrunchResult = Pot[CrunchResult]
type QueueStaffDeployments = Map[String, Pot[DeskRecTimeSlots]]
type TerminalQueueStaffDeployments = Map[TerminalName, QueueStaffDeployments]
def seqOfEffectsToEffectSeq(crunchRequests: List[Effect]): Effect = {
crunchRequests match {
case Nil =>
Effect(Future{NoAction})
case h :: Nil =>
h
case h :: ts =>
new EffectSeq(h, ts, queue)
}
}
}
class SimulationHandler[M](staffDeployments: ModelR[M, TerminalQueueStaffDeployments],
modelR: ModelR[M, Pot[Workloads]],
modelRW: ModelRW[M, Map[TerminalName, Map[QueueName, Pot[SimulationResult]]]])
extends LoggingActionHandler(modelRW) {
protected def handle = {
case RunAllSimulations() =>
log.info(s"run simulation for ${staffDeployments.value}")
val actions = staffDeployments.value.flatMap {
case (terminalName, queueMapPot) => {
queueMapPot.map {
case (queueName, deployedDesks) =>
val queueWorkload = getTerminalQueueWorkload(terminalName, queueName)
val desks = deployedDesks.get.items.map(_.deskRec)
val desksList = desks.toList
Effect(Future(RunSimulation(terminalName, queueName, desksList)))
}
}
}.toList
log.info(s"runAllSimulations effects ${actions}")
effectOnly(seqOfEffectsToEffectSeq(actions))
case RunSimulation(terminalName, queueName, desks) =>
log.info(s"Requesting simulation for $terminalName, $queueName")
val queueWorkload = getTerminalQueueWorkload(terminalName, queueName)
val simulationResult: Future[SimulationResult] = AjaxClient[Api].processWork(terminalName, queueName, queueWorkload, desks).call()
effectOnly(
Effect(simulationResult.map(resp => UpdateSimulationResult(terminalName, queueName, resp)))
)
case UpdateSimulationResult(terminalName, queueName, simResult) =>
updated(mergeTerminalQueues(value, Map(terminalName -> Map(queueName -> Ready(simResult)))))
}
private def getTerminalQueueWorkload(terminalName: TerminalName, queueName: QueueName): List[Double] = {
val startFromMilli = WorkloadsHelpers.midnightBeforeNow()
val minutesRangeInMillis: NumericRange[Long] = WorkloadsHelpers.minutesForPeriod(startFromMilli, 24)
val workloadPot = modelR.value
workloadPot match {
case Ready(workload) =>
workload.workloads.get(terminalName) match {
case Some(terminalWorkload) =>
val queueWorkload: List[Double] = WorkloadsHelpers.workloadPeriodByQueue(terminalWorkload, minutesRangeInMillis)(queueName)
queueWorkload
case None => Nil
}
case _ => Nil
}
}
}
case class GetLatestCrunch(terminalName: TerminalName, queueName: QueueName) extends Action
case class RequestFlights(from: Long, to: Long) extends Action
case class UpdateFlights(flights: Flights) extends Action
case class UpdateFlightsWithSplits(flights: FlightsWithSplits) extends Action
case class UpdateFlightPaxSplits(splitsEither: Either[FlightNotFound, VoyagePaxSplits]) extends Action
class FlightsHandler[M](modelRW: ModelRW[M, Pot[FlightsWithSplits]]) extends LoggingActionHandler(modelRW) {
protected def handle = {
case RequestFlights(from, to) =>
log.info(s"client requesting flights $from $to")
val flightsEffect = Effect(Future(RequestFlights(0, 0))).after(60L seconds)
val fe = Effect(AjaxClient[Api].flightsWithSplits(from, to).call().map(UpdateFlightsWithSplits(_)))
effectOnly(fe + flightsEffect)
case UpdateFlightsWithSplits(flightsWithSplits) =>
val flights = flightsWithSplits.flights.map(_.apiFlight)
val result = if (value.isReady) {
val oldFlights = value.get
val oldFlightsSet = oldFlights.flights.toSet
val newFlightsSet = flights.toSet
if (oldFlightsSet != newFlightsSet) {
val airportCodes = flights.map(_.Origin).toSet
val airportInfos = Effect(Future(GetAirportInfos(airportCodes)))
val allEffects = airportInfos
updated(Ready(flightsWithSplits), allEffects)
} else {
log.info("no changes to flights")
noChange
}
} else {
val airportCodes = flights.map(_.Origin).toSet
updated(Ready(flightsWithSplits), Effect(Future(GetAirportInfos(airportCodes))))
}
result
case UpdateFlightPaxSplits(Left(failure)) =>
log.info(s"Did not find flightPaxSplits for ${failure}")
noChange
case UpdateFlightPaxSplits(Right(result)) =>
log.info(s"Found flightPaxSplits ${result}")
noChange
}
}
class CrunchHandler[M](modelRW: ModelRW[M, Map[TerminalName, Map[QueueName, Pot[PotCrunchResult]]]])
extends LoggingActionHandler(modelRW) {
def modelQueueCrunchResults = value
override def handle = {
case GetLatestCrunch(terminalName, queueName) =>
val crunchEffect = Effect(Future(GetLatestCrunch(terminalName, queueName))).after(10L seconds)
val fe: Future[Action] = AjaxClient[Api].getLatestCrunchResult(terminalName, queueName).call().map {
case Right(crunchResultWithTimeAndInterval) =>
UpdateCrunchResult(terminalName, queueName, crunchResultWithTimeAndInterval)
case Left(ncr) =>
log.info(s"Failed to fetch crunch - has a crunch run yet? $ncr")
NoAction
}
effectOnly(Effect(fe) + crunchEffect)
case UpdateCrunchResult(terminalName, queueName, crunchResultWithTimeAndInterval) =>
log.info(s"UpdateCrunchResult $queueName. firstTimeMillis: ${crunchResultWithTimeAndInterval.firstTimeMillis}")
val crunchResultsByQueue = mergeTerminalQueues(value, Map(terminalName -> Map(queueName -> Ready(Ready(crunchResultWithTimeAndInterval)))))
if (modelQueueCrunchResults != crunchResultsByQueue) updated(crunchResultsByQueue)
else noChange
}
}
object StaffDeploymentCalculator {
type TerminalQueueStaffDeployments = Map[TerminalName, QueueStaffDeployments]
def apply[M](staffAvailable: (TerminalName) => (MilliDate) => Int, terminalQueueCrunchResultsModel: Map[TerminalName, QueueCrunchResults]):
Try[TerminalQueueStaffDeployments] = {
val terminalQueueCrunchResults = terminalQueueCrunchResultsModel
val firstTerminalName = terminalQueueCrunchResults.keys.headOption.getOrElse("")
val crunchResultWithTimeAndIntervalTry = Try(terminalQueueCrunchResults(firstTerminalName).head._2.get.get)
crunchResultWithTimeAndIntervalTry.map(crunchResultWithTimeAndInterval => {
val drts: DeskRecTimeSlots = calculateDeskRecTimeSlots(crunchResultWithTimeAndInterval)
val newSuggestedStaffDeployments: Map[TerminalName, Map[QueueName, Ready[DeskRecTimeSlots]]] = terminalQueueCrunchResults.map((terminalQueueCrunchResult: (TerminalName, QueueCrunchResults)) => {
val terminalName: TerminalName = terminalQueueCrunchResult._1
val terminalStaffAvailable = staffAvailable(terminalName)
val queueCrunchResult = terminalQueueCrunchResult._2
/*
Fixme: This transpose loses the queue name and thus certainty of order
*/
val queueDeskRecsOverTime: Iterable[Iterable[DeskRecTimeslot]] = queueCrunchResult.transpose {
case (_, Ready(Ready(cr))) => calculateDeskRecTimeSlots(cr).items
}
val timeslotsToInts = (deskRecTimeSlots: Iterable[DeskRecTimeslot]) => {
val timeInMillis = MilliDate(deskRecTimeSlots.headOption.map(_.timeInMillis).getOrElse(0L))
queueRecsToDeployments(_.toInt)(deskRecTimeSlots.map(_.deskRec).toList, terminalStaffAvailable(timeInMillis))
}
val deployments = queueDeskRecsOverTime.map(timeslotsToInts).transpose
val times: Seq[Long] = drts.items.map(_.timeInMillis)
val zipped = queueCrunchResult.keys.zip({
deployments.map(times.zip(_).map { case (t, r) => DeskRecTimeslot(t, r) })
})
(terminalName, zipped.toMap.mapValues((x: Seq[DeskRecTimeslot]) => Ready(DeskRecTimeSlots(x))))
})
newSuggestedStaffDeployments
})
}
def calculateDeskRecTimeSlots(crunchResultWithTimeAndInterval: CrunchResult) = {
val timeIntervalMinutes = 15
val millis = Iterator.iterate(crunchResultWithTimeAndInterval.firstTimeMillis)(_ + timeIntervalMinutes * crunchResultWithTimeAndInterval.intervalMillis).toIterable
val updatedDeskRecTimeSlots: DeskRecTimeSlots = DeskRecTimeSlots(
TableViewUtils
.takeEveryNth(timeIntervalMinutes)(crunchResultWithTimeAndInterval.recommendedDesks)
.zip(millis).map {
case (deskRec, timeInMillis) => DeskRecTimeslot(timeInMillis = timeInMillis, deskRec = deskRec)
}.toList)
updatedDeskRecTimeSlots
}
def queueRecsToDeployments(round: Double => Int)(queueRecs: Seq[Int], staffAvailable: Int): Seq[Int] = {
val totalStaffRec = queueRecs.sum
queueRecs.foldLeft(List[Int]()) {
case (agg, queueRec) if (agg.length < queueRecs.length - 1) =>
agg :+ round(staffAvailable * (queueRec.toDouble / totalStaffRec))
case (agg, _) =>
agg :+ staffAvailable - agg.sum
}
}
}
class AirportCountryHandler[M](timeProvider: () => Long, modelRW: ModelRW[M, Map[String, Pot[AirportInfo]]]) extends LoggingActionHandler(modelRW) {
def mkPending = Pending(timeProvider())
override def handle = {
case GetAirportInfos(codes) =>
val stringToObject: Map[String, Pot[AirportInfo]] = value ++ Map("BHX" -> mkPending, "EDI" -> mkPending)
updated(stringToObject, Effect(AjaxClient[Api].airportInfosByAirportCodes(codes).call().map(UpdateAirportInfos(_))))
case UpdateAirportInfos(infos) =>
val infosReady = infos.map(kv => (kv._1, Ready(kv._2)))
updated(value ++ infosReady)
case GetAirportInfo(code) =>
value.get(code) match {
case None =>
val stringToObject = value + (code -> Empty)
log.info(s"sending request for info for ${code}")
updated(stringToObject, Effect(AjaxClient[Api].airportInfoByAirportCode(code).call().map(res => UpdateAirportInfo(code, res))))
case Some(v) =>
noChange
}
case UpdateAirportInfo(code, Some(airportInfo)) =>
val newValue = value + ((code -> Ready(airportInfo)))
log.info(s"got a new value for ${code} ${airportInfo}")
updated(newValue)
}
}
class ShiftsHandler[M](modelRW: ModelRW[M, Pot[String]]) extends LoggingActionHandler(modelRW) {
protected def handle = {
case SetShifts(shifts: String) =>
updated(Ready(shifts), Effect(Future(RunAllSimulations())))
case SaveShifts(shifts: String) =>
AjaxClient[Api].saveShifts(shifts).call()
noChange
case AddShift(shift) =>
updated(Ready(s"${value.getOrElse("")}\\n${shift.toCsv}"))
case GetShifts() =>
effectOnly(Effect(AjaxClient[Api].getShifts().call().map(res => SetShifts(res))))
}
}
class StaffMovementsHandler[M](modelRW: ModelRW[M, Seq[StaffMovement]]) extends LoggingActionHandler(modelRW) {
protected def handle: PartialFunction[Any, ActionResult[M]] = {
case AddStaffMovement(staffMovement) =>
val v: Seq[StaffMovement] = value
val updatedValue: Seq[StaffMovement] = (v :+ staffMovement).sortBy(_.time)
updated(updatedValue)
case RemoveStaffMovement(idx, uUID) =>
val updatedValue = value.filter(_.uUID != uUID)
updated(updatedValue, Effect(Future(SaveStaffMovements())))
case SetStaffMovements(staffMovements: Seq[StaffMovement]) =>
updated(staffMovements, Effect(Future(RunAllSimulations())))
case GetStaffMovements() =>
effectOnly(Effect(AjaxClient[Api].getStaffMovements().call().map(res => SetStaffMovements(res))))
case SaveStaffMovements() =>
AjaxClient[Api].saveStaffMovements(value).call()
noChange
}
}
trait DrtCircuit extends Circuit[RootModel] with ReactConnector[RootModel] {
val blockWidth = 15
def timeProvider() = new Date().getTime.toLong
// initial application model
override protected def initialModel = RootModel()
// combine all handlers into one
override val actionHandler = {
println("composing handlers")
val composedhandlers: HandlerFunction = composeHandlers(
new WorkloadHandler(zoomRW(_.workloadPot)((m, v) => {
m.copy(workloadPot = v)
})),
new CrunchHandler(zoomRW(m => m.queueCrunchResults)((m, v) => m.copy(queueCrunchResults = v))),
new SimulationHandler(zoom(_.staffDeploymentsByTerminalAndQueue), zoom(_.workloadPot), zoomRW(_.simulationResult)((m, v) => m.copy(simulationResult = v))),
new FlightsHandler(zoomRW(_.flights)((m, v) => m.copy(flights = v))),
new AirportCountryHandler(timeProvider, zoomRW(_.airportInfos)((m, v) => m.copy(airportInfos = v))),
new AirportConfigHandler(zoomRW(_.airportConfig)((m, v) => m.copy(airportConfig = v))),
new ShiftsHandler(zoomRW(_.shiftsRaw)((m, v) => m.copy(shiftsRaw = v))),
new StaffMovementsHandler(zoomRW(_.staffMovements)((m, v) => m.copy(staffMovements = v)))
)
val loggedhandlers: HandlerFunction = (m, t) => {
log.debug(s"functional handler for ${m.toString.take(100)}")
composedhandlers(m, t)
}
loggedhandlers
}
}
object SPACircuit extends DrtCircuit
case class GetAirportInfos(code: Set[String]) extends Action
case class GetAirportInfo(code: String) extends Action
case class UpdateAirportInfo(code: String, info: Option[AirportInfo]) extends Action
case class UpdateAirportInfos(infos: Map[String, AirportInfo]) extends Action
|
somanythings/drt-scalajs-spa-exploration
|
client/src/main/scala/spatutorial/client/services/SPACircuit.scala
|
Scala
|
apache-2.0
| 23,728
|
package responses
object IngressMM {
val BaseURL = "http://ingressmm.com/"
}
|
ponkotuy/mission-recommender
|
app/responses/IngressMM.scala
|
Scala
|
apache-2.0
| 80
|
/*
* Copyright (c) 2014-2018 by The Monix Project Developers.
* See the project homepage at: https://monix.io
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package monix.reactive.subjects
import monix.execution.Scheduler
import monix.reactive.MulticastStrategy
import monix.reactive.OverflowStrategy.Unbounded
object ConcurrentReplaySubjectSuite extends BaseConcurrentSubjectSuite {
def alreadyTerminatedTest(expectedElems: Seq[Long])(implicit s: Scheduler) = {
val c = ConcurrentSubject[Long](MulticastStrategy.replay, Unbounded)
Sample(c, expectedElems.sum)
}
def continuousStreamingTest(expectedElems: Seq[Long])(implicit s: Scheduler) = {
val c = ConcurrentSubject.replay[Long](Unbounded)
Some(Sample(c, expectedElems.sum))
}
}
|
Wogan/monix
|
monix-reactive/shared/src/test/scala/monix/reactive/subjects/ConcurrentReplaySubjectSuite.scala
|
Scala
|
apache-2.0
| 1,278
|
/*
* Copyright 2016 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.ct.ct600.v2.calculations
import org.scalatest.{Matchers, WordSpec}
import uk.gov.hmrc.ct.computations.calculations.QualifyingExpenditureOnMachineryCalculation
import uk.gov.hmrc.ct.computations._
class QualifyingExpenditureOnMachineryCalculationSpec extends WordSpec with Matchers {
"QualifyingExpenditureOnMachinery" should {
"add cp82 and cp83" in new QualifyingExpenditureOnMachineryCalculation {
qualifyingExpenditureCalculation(CP82(3), CP83(4)) shouldBe CP253(7)
}
}
}
|
ahudspith-equalexperts/ct-calculations
|
src/test/scala/uk/gov/hmrc/ct/ct600/v2/calculations/QualifyingExpenditureOnMachineryCalculationSpec.scala
|
Scala
|
apache-2.0
| 1,122
|
object Test extends App {
println(f"%")
println(f"%%")
println(f"%%%")
println(f"%%%%")
println(f"%%%%%")
println(f"%%%%%%")
println(f"%%n")
println(f"%%%n")
println(f"%%%%n")
println(f"%%%%%n")
println(f"%%%%%%n")
println(f"%%%%%%%n")
println(f"${0}%")
println(f"${0}%d")
println(f"${0}%%d")
println(f"${0}%%%d")
println(f"${0}%%%%d")
println(f"${0}%%%%%d")
println(f"${0}%n")
println(f"${0}%d%n")
}
|
felixmulder/scala
|
test/files/neg/t7325.scala
|
Scala
|
bsd-3-clause
| 443
|
package zzb.xmpp.ebus
import org.jivesoftware.smackx.muc.ParticipantStatusListener
/**
* Created by Simon on 2014/8/12
*/
abstract class RoomMemberListener extends ParticipantStatusListener{
override def kicked(participant: String, actor: String, reason: String): Unit = ()
override def voiceGranted(participant: String): Unit = ()
override def voiceRevoked(participant: String): Unit = ()
override def banned(participant: String, actor: String, reason: String): Unit = ()
override def membershipGranted(participant: String): Unit = ()
override def membershipRevoked(participant: String): Unit = ()
override def moderatorGranted(participant: String): Unit = ()
override def moderatorRevoked(participant: String): Unit = ()
override def ownershipGranted(participant: String): Unit = ()
override def ownershipRevoked(participant: String): Unit = ()
override def adminGranted(participant: String): Unit = ()
override def adminRevoked(participant: String): Unit = ()
override def nicknameChanged(participant: String, newNickname: String): Unit = ()
}
|
stepover/zzb-xmpp
|
src/main/scala/zzb/xmpp/ebus/RoomMemberListener.scala
|
Scala
|
mit
| 1,089
|
/**
* This code is generated using [[http://www.scala-sbt.org/contraband/ sbt-contraband]].
*/
// DO NOT EDIT MANUALLY
package sbt.protocol
abstract class SettingQueryResponse() extends sbt.protocol.EventMessage() with Serializable {
override def equals(o: Any): Boolean = o match {
case x: SettingQueryResponse => true
case _ => false
}
override def hashCode: Int = {
37 * (17 + "SettingQueryResponse".##)
}
override def toString: String = {
"SettingQueryResponse()"
}
}
object SettingQueryResponse {
}
|
Duhemm/sbt
|
protocol/src/main/contraband-scala/sbt/protocol/SettingQueryResponse.scala
|
Scala
|
bsd-3-clause
| 522
|
package org.cddcore.carers
import org.joda.time.DateTime
case class TimeLineItem(events: List[(DateRange, KeyAndParams)]) {
val startDate = events.head._1.from
val endDate = events.last._1.to
val daysInWhichIWasOk = events.foldLeft[Int](0)((acc, tuple) => tuple match {
case (dr, keyAndParams) if keyAndParams.key == "ENT" => acc + dr.days
case _ => acc
})
val wasOk = daysInWhichIWasOk >= 2
override def toString = s"TimeLineItem($startDate, $endDate, days=$daysInWhichIWasOk, wasOK=$wasOk, dateRange=\\n ${events.mkString("\\n ")})"
def eventToJsonString(event: (DateRange, KeyAndParams)) =
event match { case (_, KeyAndParams(key, _)) => s"'$key'" }
def jsonToString = {
val renderedStartDate = Claim.toString(startDate)
val renderedEndDate = Claim.toString(endDate)
val renderedEvents = events.map(eventToJsonString(_)).mkString("[", ",", "]")
val result = s"{'startDate': '$renderedStartDate','endDate': '$renderedEndDate', 'wasOk':$wasOk, 'events':$renderedEvents}"
result
}
}
object TimeLineCalcs {
def toJson(list: TimeLine): String =
list.map(_.jsonToString).mkString("[", ",\\n", "]").replaceAll("\\'", "\\"")
type TimeLine = List[TimeLineItem]
/** Returns a DatesToBeProcessedTogether and the days that the claim is valid for */
def findTimeLine(c: CarersXmlSituation): TimeLine = {
val dates = InterestingDates.interestingDates(c)
val result = DateRanges.interestingDatesToDateRangesToBeProcessedTogether(dates, c.world.dayToSplitOn)
result.map((dateRangeToBeProcessedTogether: DateRangesToBeProcessedTogether) => {
TimeLineItem(dateRangeToBeProcessedTogether.dateRanges.map((dr) => {
val result = Carers.engine(dr.from, c)
(dr, result)
}))
})
}
def foldTimelineOnItemKeys(tl: TimeLine): TimeLine = {
type accumulator = (List[TimeLineItem], Option[TimeLineItem])
val initialValue: accumulator = (List[TimeLineItem](), None)
val foldFn: ((accumulator, TimeLineItem) => accumulator) =
(acc: accumulator, v: TimeLineItem) => {
(acc, v) match {
case ((list, None), v) => (list, Some(v))
case ((list, Some(TimeLineItem((DateRange(fromM, toM, reasonM), kAndPM) :: Nil))), TimeLineItem((DateRange(from, to, reason), kAndP) :: Nil)) if kAndPM == kAndP => {
val newTli = TimeLineItem(List((DateRange(fromM, to, reasonM), kAndP)))
(list, Some(newTli))
}
case ((list, Some(mergeV)), v) => ((list :+ mergeV, Some(v)))
}
}
val result = tl.foldLeft[accumulator](initialValue)(foldFn)
result._2 match {
case None => result._1
case Some(tli) => result._1 :+ tli
}
}
def main(args: Array[String]) {
println(findTimeLine(CarersXmlSituation(World(), Claim.getXml("CL800119A"))).mkString("\\n"))
}
}
|
scott-thomson/carers
|
src/main/scala/org/cddcore/carers/TimeLineCalcs.scala
|
Scala
|
bsd-2-clause
| 2,843
|
package japgolly.scalajs.react.extra
import scala.annotation.tailrec
import scala.reflect.ClassTag
import java.util.{Date, UUID}
import org.scalajs.dom.console
import scala.scalajs.js.{Date => JsDate}
import japgolly.scalajs.react._
import japgolly.scalajs.react.extra.internal.ReusabilityMacros
import japgolly.scalajs.react.internal.OptionLike
/**
* Tests whether one instance can be used in place of another.
* Used mostly to compare properties and state of a component to avoid unnecessary updates.
*
* If you imagine a class with 8 fields, equality would compare all 8 fields where as this would typically just compare
* the ID field, the update-date, or the revision number.
* You might think of this as a very quick version of equality.
*
* Don't miss `Reusability.shouldComponentUpdate` which can be applied to a component via
* `ScalaComponent.build#configure`.
*
* @since 0.9.0
*/
final class Reusability[A](val test: (A, A) => Boolean) extends AnyVal {
def contramap[B](f: B => A): Reusability[B] =
new Reusability((x, y) => test(f(x), f(y)))
def narrow[B <: A]: Reusability[B] =
new Reusability[B](test)
def testNot: (A, A) => Boolean =
!test(_, _)
def ||[B <: A](tryNext: Reusability[B]): Reusability[B] =
Reusability[B]((x, y) => test(x, y) || tryNext.test(x, y))
def &&[B <: A](tryNext: Reusability[B]): Reusability[B] =
Reusability[B]((x, y) => test(x, y) && tryNext.test(x, y))
def reusable(a: A)(implicit c: ClassTag[A]): Reusable[A] =
Reusable.explicitly(a)(this)(c)
def logNonReusable: Reusability[A] = logNonReusable()
def logNonReusable(show: A => String = _.toString,
log : String => Unit = console.warn(_),
title: String = "Non-reusability:",
fmt : (String, => String, => String) => String = (t, x, y) => s"$t\\n- $x\\n- $y"): Reusability[A] =
Reusability { (a, b) =>
val r = test(a, b)
if (!r)
log(fmt(title, show(a), show(b)))
r
}
}
object Reusability {
def apply[A](f: (A, A) => Boolean): Reusability[A] =
new Reusability(f)
private[this] val alwaysInstance: Reusability[Any] =
apply((_, _) => true)
def always[A]: Reusability[A] =
alwaysInstance.asInstanceOf[Reusability[A]]
private[this] val neverInstance: Reusability[Any] =
apply((_, _) => false)
def never[A]: Reusability[A] =
neverInstance.asInstanceOf[Reusability[A]]
def const[A](r: Boolean): Reusability[A] =
if (r) always else never
/** Compare by reference. Reuse if both values are the same instance. */
def byRef[A <: AnyRef]: Reusability[A] =
new Reusability((a, b) => a eq b)
/** Compare using universal equality (Scala's == operator). */
def by_==[A]: Reusability[A] =
new Reusability((a, b) => a == b)
/** Compare by reference and if different, compare using universal equality (Scala's == operator). */
def byRefOr_==[A <: AnyRef]: Reusability[A] =
byRef[A] || by_==[A]
def by[A, B](f: A => B)(implicit r: Reusability[B]): Reusability[A] =
r contramap f
def byIterator[I[X] <: Iterable[X], A: Reusability]: Reusability[I[A]] =
apply { (x, y) =>
val i = x.iterator
val j = y.iterator
@tailrec
def go: Boolean = {
val hasNext = i.hasNext
if (hasNext != j.hasNext)
false
else if (!hasNext)
true
else if (i.next() ~/~ j.next())
false
else
go
}
go
}
def indexedSeq[S[X] <: IndexedSeq[X], A: Reusability]: Reusability[S[A]] =
apply((x, y) =>
(x.length == y.length) && x.indices.forall(i => x(i) ~=~ y(i)))
/**
* Generate an instance for A.
*
* If A is a sealed trait or sealed abstract class, Reusability is determined by sub-class reusability (which will
* be derived when it doesn't exist).
*
* If A is a case class, Reusability is determined by each field's Reusability.
*/
def derive[A]: Reusability[A] =
macro ReusabilityMacros.quietDerive[A]
/**
* Same as [[derive]] except the code generated by the macro is printed to stdout.
*/
def deriveDebug[A]: Reusability[A] =
macro ReusabilityMacros.debugDerive[A]
/**
* Generate an instance for a case class by comparing each case field.
*
* @tparam A The case class type.
*/
@deprecated("Use Reusability.derive instead.", "1.1.1")
def caseClass[A]: Reusability[A] =
macro ReusabilityMacros.quietCaseClass[A]
/**
* Same as [[caseClass]] except the code generated by the macro is printed to stdout.
*/
@deprecated("Use Reusability.deriveDebug instead.", "1.1.1")
def caseClassDebug[A]: Reusability[A] =
macro ReusabilityMacros.debugCaseClass[A]
/**
* Generate an instance for a case class by comparing each case field except those specified.
*
* Example:
* ```
* case class Picture(id: Long, url: String, title: String)
*
* implicit val picReuse = Reusability.caseClassExcept[Picture]('url, 'title)
* ```
*
* @tparam A The case class type.
*/
def caseClassExcept[A](field1: Symbol, fieldN: Symbol*): Reusability[A] =
macro ReusabilityMacros.quietCaseClassExcept[A]
/**
* Same as [[caseClassExcept]] except the code generated by the macro is printed to stdout.
*/
def caseClassExceptDebug[A](field1: Symbol, fieldN: Symbol*): Reusability[A] =
macro ReusabilityMacros.debugCaseClassExcept[A]
def double(tolerance: Double): Reusability[Double] =
apply((x, y) => (x - y).abs <= tolerance)
def float(tolerance: Float): Reusability[Float] =
apply((x, y) => (x - y).abs <= tolerance)
/**
* This is not implicit because the point of Reusability is to be fast, where as full comparison of all keys and
* values in a map, is usually not desirable; in some cases it will probably even be faster just rerender and have
* React determine that nothing has changed.
*
* Nonetheless, there are cases where a full comparison is desired and so use this as needed. `Reusability[K]` isn't
* needed because its existence in the map (and thus universal equality) is all that's necessary.
* Time is O(|m₁|+|m₂|).
*/
def map[K, V](implicit rv: Reusability[V]): Reusability[Map[K, V]] =
byRef[Map[K, V]] || apply((m, n) =>
if (m.isEmpty)
n.isEmpty
else if (n.isEmpty)
false
else {
var ok = true
var msize = 0
val mi = m.iterator
while (ok && mi.hasNext) {
val (k, v) = mi.next()
msize += 1
ok = n.get(k).exists(rv.test(v, _))
}
ok && msize == n.size
}
)
/** Declare a type reusable when both values pass a given predicate. */
def when[A](f: A => Boolean): Reusability[A] =
apply((a, b) => f(a) && f(b))
/** Declare a type reusable when both values fail a given predicate. */
def unless[A](f: A => Boolean): Reusability[A] =
when(!f(_))
// -------------------------------------------------------------------------------------------------------------------
// Implicit Instances
// Prohibited:
// ===========
// Array - it's mutable. Reusability & mutability are incompatible.
// Stream - it's lazy. Reusability & non-strictness are incompatible.
@inline implicit def unit : Reusability[Unit ] = always
@inline implicit def boolean: Reusability[Boolean] = by_==
@inline implicit def byte : Reusability[Byte ] = by_==
@inline implicit def char : Reusability[Char ] = by_==
@inline implicit def short : Reusability[Short ] = by_==
@inline implicit def int : Reusability[Int ] = by_==
@inline implicit def long : Reusability[Long ] = by_==
@inline implicit def string : Reusability[String ] = by_==
@inline implicit def date : Reusability[Date ] = by_==
@inline implicit def uuid : Reusability[UUID ] = by_==
implicit def jsDate: Reusability[JsDate] =
apply((x, y) => x.getTime == y.getTime)
@inline implicit def option[A: Reusability]: Reusability[Option[A]] =
optionLike
implicit def optionLike[O[_], A](implicit o: OptionLike[O], r: Reusability[A]): Reusability[O[A]] =
apply((x, y) =>
o.fold(x, o isEmpty y)(xa =>
o.fold(y, false)(ya =>
xa ~=~ ya)))
implicit def either[A: Reusability, B: Reusability]: Reusability[Either[A, B]] =
apply((x, y) =>
x.fold[Boolean](
a => y.fold(a ~=~ _, _ => false),
b => y.fold(_ => false, b ~=~ _)))
implicit def list[A: Reusability]: Reusability[List[A]] =
byRef[List[A]] || byIterator[List, A]
implicit def vector[A: Reusability]: Reusability[Vector[A]] =
byRef[Vector[A]] || indexedSeq[Vector, A]
implicit def set[A]: Reusability[Set[A]] =
byRefOr_== // universal equality must hold for Sets
// Generated by bin/gen-reusable
implicit def tuple2[A:Reusability, B:Reusability]: Reusability[(A,B)] =
apply((x,y) ⇒ (x._1 ~=~ y._1) && (x._2 ~=~ y._2))
implicit def tuple3[A:Reusability, B:Reusability, C:Reusability]: Reusability[(A,B,C)] =
apply((x,y) ⇒ (x._1 ~=~ y._1) && (x._2 ~=~ y._2) && (x._3 ~=~ y._3))
implicit def tuple4[A:Reusability, B:Reusability, C:Reusability, D:Reusability]: Reusability[(A,B,C,D)] =
apply((x,y) ⇒ (x._1 ~=~ y._1) && (x._2 ~=~ y._2) && (x._3 ~=~ y._3) && (x._4 ~=~ y._4))
implicit def tuple5[A:Reusability, B:Reusability, C:Reusability, D:Reusability, E:Reusability]: Reusability[(A,B,C,D,E)] =
apply((x,y) ⇒ (x._1 ~=~ y._1) && (x._2 ~=~ y._2) && (x._3 ~=~ y._3) && (x._4 ~=~ y._4) && (x._5 ~=~ y._5))
implicit def tuple6[A:Reusability, B:Reusability, C:Reusability, D:Reusability, E:Reusability, F:Reusability]: Reusability[(A,B,C,D,E,F)] =
apply((x,y) ⇒ (x._1 ~=~ y._1) && (x._2 ~=~ y._2) && (x._3 ~=~ y._3) && (x._4 ~=~ y._4) && (x._5 ~=~ y._5) && (x._6 ~=~ y._6))
implicit def tuple7[A:Reusability, B:Reusability, C:Reusability, D:Reusability, E:Reusability, F:Reusability, G:Reusability]: Reusability[(A,B,C,D,E,F,G)] =
apply((x,y) ⇒ (x._1 ~=~ y._1) && (x._2 ~=~ y._2) && (x._3 ~=~ y._3) && (x._4 ~=~ y._4) && (x._5 ~=~ y._5) && (x._6 ~=~ y._6) && (x._7 ~=~ y._7))
implicit def tuple8[A:Reusability, B:Reusability, C:Reusability, D:Reusability, E:Reusability, F:Reusability, G:Reusability, H:Reusability]: Reusability[(A,B,C,D,E,F,G,H)] =
apply((x,y) ⇒ (x._1 ~=~ y._1) && (x._2 ~=~ y._2) && (x._3 ~=~ y._3) && (x._4 ~=~ y._4) && (x._5 ~=~ y._5) && (x._6 ~=~ y._6) && (x._7 ~=~ y._7) && (x._8 ~=~ y._8))
implicit def tuple9[A:Reusability, B:Reusability, C:Reusability, D:Reusability, E:Reusability, F:Reusability, G:Reusability, H:Reusability, I:Reusability]: Reusability[(A,B,C,D,E,F,G,H,I)] =
apply((x,y) ⇒ (x._1 ~=~ y._1) && (x._2 ~=~ y._2) && (x._3 ~=~ y._3) && (x._4 ~=~ y._4) && (x._5 ~=~ y._5) && (x._6 ~=~ y._6) && (x._7 ~=~ y._7) && (x._8 ~=~ y._8) && (x._9 ~=~ y._9))
implicit def tuple10[A:Reusability, B:Reusability, C:Reusability, D:Reusability, E:Reusability, F:Reusability, G:Reusability, H:Reusability, I:Reusability, J:Reusability]: Reusability[(A,B,C,D,E,F,G,H,I,J)] =
apply((x,y) ⇒ (x._1 ~=~ y._1) && (x._2 ~=~ y._2) && (x._3 ~=~ y._3) && (x._4 ~=~ y._4) && (x._5 ~=~ y._5) && (x._6 ~=~ y._6) && (x._7 ~=~ y._7) && (x._8 ~=~ y._8) && (x._9 ~=~ y._9) && (x._10 ~=~ y._10))
implicit def tuple11[A:Reusability, B:Reusability, C:Reusability, D:Reusability, E:Reusability, F:Reusability, G:Reusability, H:Reusability, I:Reusability, J:Reusability, K:Reusability]: Reusability[(A,B,C,D,E,F,G,H,I,J,K)] =
apply((x,y) ⇒ (x._1 ~=~ y._1) && (x._2 ~=~ y._2) && (x._3 ~=~ y._3) && (x._4 ~=~ y._4) && (x._5 ~=~ y._5) && (x._6 ~=~ y._6) && (x._7 ~=~ y._7) && (x._8 ~=~ y._8) && (x._9 ~=~ y._9) && (x._10 ~=~ y._10) && (x._11 ~=~ y._11))
implicit def tuple12[A:Reusability, B:Reusability, C:Reusability, D:Reusability, E:Reusability, F:Reusability, G:Reusability, H:Reusability, I:Reusability, J:Reusability, K:Reusability, L:Reusability]: Reusability[(A,B,C,D,E,F,G,H,I,J,K,L)] =
apply((x,y) ⇒ (x._1 ~=~ y._1) && (x._2 ~=~ y._2) && (x._3 ~=~ y._3) && (x._4 ~=~ y._4) && (x._5 ~=~ y._5) && (x._6 ~=~ y._6) && (x._7 ~=~ y._7) && (x._8 ~=~ y._8) && (x._9 ~=~ y._9) && (x._10 ~=~ y._10) && (x._11 ~=~ y._11) && (x._12 ~=~ y._12))
implicit def tuple13[A:Reusability, B:Reusability, C:Reusability, D:Reusability, E:Reusability, F:Reusability, G:Reusability, H:Reusability, I:Reusability, J:Reusability, K:Reusability, L:Reusability, M:Reusability]: Reusability[(A,B,C,D,E,F,G,H,I,J,K,L,M)] =
apply((x,y) ⇒ (x._1 ~=~ y._1) && (x._2 ~=~ y._2) && (x._3 ~=~ y._3) && (x._4 ~=~ y._4) && (x._5 ~=~ y._5) && (x._6 ~=~ y._6) && (x._7 ~=~ y._7) && (x._8 ~=~ y._8) && (x._9 ~=~ y._9) && (x._10 ~=~ y._10) && (x._11 ~=~ y._11) && (x._12 ~=~ y._12) && (x._13 ~=~ y._13))
implicit def tuple14[A:Reusability, B:Reusability, C:Reusability, D:Reusability, E:Reusability, F:Reusability, G:Reusability, H:Reusability, I:Reusability, J:Reusability, K:Reusability, L:Reusability, M:Reusability, N:Reusability]: Reusability[(A,B,C,D,E,F,G,H,I,J,K,L,M,N)] =
apply((x,y) ⇒ (x._1 ~=~ y._1) && (x._2 ~=~ y._2) && (x._3 ~=~ y._3) && (x._4 ~=~ y._4) && (x._5 ~=~ y._5) && (x._6 ~=~ y._6) && (x._7 ~=~ y._7) && (x._8 ~=~ y._8) && (x._9 ~=~ y._9) && (x._10 ~=~ y._10) && (x._11 ~=~ y._11) && (x._12 ~=~ y._12) && (x._13 ~=~ y._13) && (x._14 ~=~ y._14))
implicit def tuple15[A:Reusability, B:Reusability, C:Reusability, D:Reusability, E:Reusability, F:Reusability, G:Reusability, H:Reusability, I:Reusability, J:Reusability, K:Reusability, L:Reusability, M:Reusability, N:Reusability, O:Reusability]: Reusability[(A,B,C,D,E,F,G,H,I,J,K,L,M,N,O)] =
apply((x,y) ⇒ (x._1 ~=~ y._1) && (x._2 ~=~ y._2) && (x._3 ~=~ y._3) && (x._4 ~=~ y._4) && (x._5 ~=~ y._5) && (x._6 ~=~ y._6) && (x._7 ~=~ y._7) && (x._8 ~=~ y._8) && (x._9 ~=~ y._9) && (x._10 ~=~ y._10) && (x._11 ~=~ y._11) && (x._12 ~=~ y._12) && (x._13 ~=~ y._13) && (x._14 ~=~ y._14) && (x._15 ~=~ y._15))
implicit def tuple16[A:Reusability, B:Reusability, C:Reusability, D:Reusability, E:Reusability, F:Reusability, G:Reusability, H:Reusability, I:Reusability, J:Reusability, K:Reusability, L:Reusability, M:Reusability, N:Reusability, O:Reusability, P:Reusability]: Reusability[(A,B,C,D,E,F,G,H,I,J,K,L,M,N,O,P)] =
apply((x,y) ⇒ (x._1 ~=~ y._1) && (x._2 ~=~ y._2) && (x._3 ~=~ y._3) && (x._4 ~=~ y._4) && (x._5 ~=~ y._5) && (x._6 ~=~ y._6) && (x._7 ~=~ y._7) && (x._8 ~=~ y._8) && (x._9 ~=~ y._9) && (x._10 ~=~ y._10) && (x._11 ~=~ y._11) && (x._12 ~=~ y._12) && (x._13 ~=~ y._13) && (x._14 ~=~ y._14) && (x._15 ~=~ y._15) && (x._16 ~=~ y._16))
implicit def tuple17[A:Reusability, B:Reusability, C:Reusability, D:Reusability, E:Reusability, F:Reusability, G:Reusability, H:Reusability, I:Reusability, J:Reusability, K:Reusability, L:Reusability, M:Reusability, N:Reusability, O:Reusability, P:Reusability, Q:Reusability]: Reusability[(A,B,C,D,E,F,G,H,I,J,K,L,M,N,O,P,Q)] =
apply((x,y) ⇒ (x._1 ~=~ y._1) && (x._2 ~=~ y._2) && (x._3 ~=~ y._3) && (x._4 ~=~ y._4) && (x._5 ~=~ y._5) && (x._6 ~=~ y._6) && (x._7 ~=~ y._7) && (x._8 ~=~ y._8) && (x._9 ~=~ y._9) && (x._10 ~=~ y._10) && (x._11 ~=~ y._11) && (x._12 ~=~ y._12) && (x._13 ~=~ y._13) && (x._14 ~=~ y._14) && (x._15 ~=~ y._15) && (x._16 ~=~ y._16) && (x._17 ~=~ y._17))
implicit def tuple18[A:Reusability, B:Reusability, C:Reusability, D:Reusability, E:Reusability, F:Reusability, G:Reusability, H:Reusability, I:Reusability, J:Reusability, K:Reusability, L:Reusability, M:Reusability, N:Reusability, O:Reusability, P:Reusability, Q:Reusability, R:Reusability]: Reusability[(A,B,C,D,E,F,G,H,I,J,K,L,M,N,O,P,Q,R)] =
apply((x,y) ⇒ (x._1 ~=~ y._1) && (x._2 ~=~ y._2) && (x._3 ~=~ y._3) && (x._4 ~=~ y._4) && (x._5 ~=~ y._5) && (x._6 ~=~ y._6) && (x._7 ~=~ y._7) && (x._8 ~=~ y._8) && (x._9 ~=~ y._9) && (x._10 ~=~ y._10) && (x._11 ~=~ y._11) && (x._12 ~=~ y._12) && (x._13 ~=~ y._13) && (x._14 ~=~ y._14) && (x._15 ~=~ y._15) && (x._16 ~=~ y._16) && (x._17 ~=~ y._17) && (x._18 ~=~ y._18))
implicit def tuple19[A:Reusability, B:Reusability, C:Reusability, D:Reusability, E:Reusability, F:Reusability, G:Reusability, H:Reusability, I:Reusability, J:Reusability, K:Reusability, L:Reusability, M:Reusability, N:Reusability, O:Reusability, P:Reusability, Q:Reusability, R:Reusability, S:Reusability]: Reusability[(A,B,C,D,E,F,G,H,I,J,K,L,M,N,O,P,Q,R,S)] =
apply((x,y) ⇒ (x._1 ~=~ y._1) && (x._2 ~=~ y._2) && (x._3 ~=~ y._3) && (x._4 ~=~ y._4) && (x._5 ~=~ y._5) && (x._6 ~=~ y._6) && (x._7 ~=~ y._7) && (x._8 ~=~ y._8) && (x._9 ~=~ y._9) && (x._10 ~=~ y._10) && (x._11 ~=~ y._11) && (x._12 ~=~ y._12) && (x._13 ~=~ y._13) && (x._14 ~=~ y._14) && (x._15 ~=~ y._15) && (x._16 ~=~ y._16) && (x._17 ~=~ y._17) && (x._18 ~=~ y._18) && (x._19 ~=~ y._19))
implicit def tuple20[A:Reusability, B:Reusability, C:Reusability, D:Reusability, E:Reusability, F:Reusability, G:Reusability, H:Reusability, I:Reusability, J:Reusability, K:Reusability, L:Reusability, M:Reusability, N:Reusability, O:Reusability, P:Reusability, Q:Reusability, R:Reusability, S:Reusability, T:Reusability]: Reusability[(A,B,C,D,E,F,G,H,I,J,K,L,M,N,O,P,Q,R,S,T)] =
apply((x,y) ⇒ (x._1 ~=~ y._1) && (x._2 ~=~ y._2) && (x._3 ~=~ y._3) && (x._4 ~=~ y._4) && (x._5 ~=~ y._5) && (x._6 ~=~ y._6) && (x._7 ~=~ y._7) && (x._8 ~=~ y._8) && (x._9 ~=~ y._9) && (x._10 ~=~ y._10) && (x._11 ~=~ y._11) && (x._12 ~=~ y._12) && (x._13 ~=~ y._13) && (x._14 ~=~ y._14) && (x._15 ~=~ y._15) && (x._16 ~=~ y._16) && (x._17 ~=~ y._17) && (x._18 ~=~ y._18) && (x._19 ~=~ y._19) && (x._20 ~=~ y._20))
implicit def tuple21[A:Reusability, B:Reusability, C:Reusability, D:Reusability, E:Reusability, F:Reusability, G:Reusability, H:Reusability, I:Reusability, J:Reusability, K:Reusability, L:Reusability, M:Reusability, N:Reusability, O:Reusability, P:Reusability, Q:Reusability, R:Reusability, S:Reusability, T:Reusability, U:Reusability]: Reusability[(A,B,C,D,E,F,G,H,I,J,K,L,M,N,O,P,Q,R,S,T,U)] =
apply((x,y) ⇒ (x._1 ~=~ y._1) && (x._2 ~=~ y._2) && (x._3 ~=~ y._3) && (x._4 ~=~ y._4) && (x._5 ~=~ y._5) && (x._6 ~=~ y._6) && (x._7 ~=~ y._7) && (x._8 ~=~ y._8) && (x._9 ~=~ y._9) && (x._10 ~=~ y._10) && (x._11 ~=~ y._11) && (x._12 ~=~ y._12) && (x._13 ~=~ y._13) && (x._14 ~=~ y._14) && (x._15 ~=~ y._15) && (x._16 ~=~ y._16) && (x._17 ~=~ y._17) && (x._18 ~=~ y._18) && (x._19 ~=~ y._19) && (x._20 ~=~ y._20) && (x._21 ~=~ y._21))
implicit def tuple22[A:Reusability, B:Reusability, C:Reusability, D:Reusability, E:Reusability, F:Reusability, G:Reusability, H:Reusability, I:Reusability, J:Reusability, K:Reusability, L:Reusability, M:Reusability, N:Reusability, O:Reusability, P:Reusability, Q:Reusability, R:Reusability, S:Reusability, T:Reusability, U:Reusability, V:Reusability]: Reusability[(A,B,C,D,E,F,G,H,I,J,K,L,M,N,O,P,Q,R,S,T,U,V)] =
apply((x,y) ⇒ (x._1 ~=~ y._1) && (x._2 ~=~ y._2) && (x._3 ~=~ y._3) && (x._4 ~=~ y._4) && (x._5 ~=~ y._5) && (x._6 ~=~ y._6) && (x._7 ~=~ y._7) && (x._8 ~=~ y._8) && (x._9 ~=~ y._9) && (x._10 ~=~ y._10) && (x._11 ~=~ y._11) && (x._12 ~=~ y._12) && (x._13 ~=~ y._13) && (x._14 ~=~ y._14) && (x._15 ~=~ y._15) && (x._16 ~=~ y._16) && (x._17 ~=~ y._17) && (x._18 ~=~ y._18) && (x._19 ~=~ y._19) && (x._20 ~=~ y._20) && (x._21 ~=~ y._21) && (x._22 ~=~ y._22))
// ===================================================================================================================
def shouldComponentUpdate[P: Reusability, C <: Children, S: Reusability, B]: ScalaComponent.Config[P, C, S, B] =
_.shouldComponentUpdatePure(i =>
(i.currentProps ~/~ i.nextProps) || (i.currentState ~/~ i.nextState))
def shouldComponentUpdateAnd[P: Reusability, C <: Children, S: Reusability, B](f: ShouldComponentUpdateResult[P, S, B] => Callback): ScalaComponent.Config[P, C, S, B] =
_.shouldComponentUpdate { i =>
val r = ShouldComponentUpdateResult(i)
f(r).map(_ => r.update)
}
def shouldComponentUpdateAndLog[P: Reusability, C <: Children, S: Reusability, B](name: String): ScalaComponent.Config[P, C, S, B] =
shouldComponentUpdateAnd(_ log name)
def shouldComponentUpdateWithOverlay[P: Reusability, C <: Children, S: Reusability, B]: ScalaComponent.Config[P, C, S, B] =
ReusabilityOverlay.install(DefaultReusabilityOverlay.defaults)
final case class ShouldComponentUpdateResult[P: Reusability, S: Reusability, B](self: ScalaComponent.Lifecycle.ShouldComponentUpdate[P, S, B]) {
def mounted = self.mountedImpure
def backend = self.backend
def propsChildren = self.propsChildren
def currentProps = self.currentProps
def currentState = self.currentState
def nextProps = self.nextProps
def nextState = self.nextState
def getDOMNode = self.getDOMNode
val updateProps: Boolean = currentProps ~/~ nextProps
val updateState: Boolean = currentState ~/~ nextState
val update : Boolean = updateProps || updateState
def log(name: String): Callback =
Callback.log(
s"""
|s"$name.shouldComponentUpdate = $update
| Props: $updateProps. [$currentProps] ⇒ [$nextProps]
| State: $updateState. [$currentState] ⇒ [$nextState]
""".stripMargin)
}
}
|
matthughes/scalajs-react
|
extra/src/main/scala/japgolly/scalajs/react/extra/Reusability.scala
|
Scala
|
apache-2.0
| 21,311
|
/*
* Copyright 2016 The BigDL Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.bigdl.dllib.feature.image
import com.intel.analytics.bigdl.dllib.feature.transform.vision.image.ImageFeature
import com.intel.analytics.bigdl.dllib.feature.transform.vision.image.augmentation
/**
* Fill part of image with certain pixel value
*
* @param startX start x ratio
* @param startY start y ratio
* @param endX end x ratio
* @param endY end y ratio
* @param value filling value
*/
class ImageFiller(startX: Float, startY: Float, endX: Float, endY: Float, value: Int = 255)
extends ImageProcessing {
private val internalCrop = new augmentation.Filler(startX, startY, endX, endY, value)
override def apply(prev: Iterator[ImageFeature]): Iterator[ImageFeature] = {
internalCrop.apply(prev)
}
override def transformMat(feature: ImageFeature): Unit = {
internalCrop.transformMat(feature)
}
}
object ImageFiller {
def apply(startX: Float, startY: Float, endX: Float, endY: Float, value: Int = 255): ImageFiller
= new ImageFiller(startX, startY, endX, endY, value)
}
|
intel-analytics/BigDL
|
scala/dllib/src/main/scala/com/intel/analytics/bigdl/dllib/feature/image/ImageFiller.scala
|
Scala
|
apache-2.0
| 1,634
|
package stock
import akka.stream.scaladsl._
import akka.util.{ByteString, ByteStringBuilder}
import org.scalatest.concurrent.ScalaFutures
import org.scalatest.{FlatSpec, Matchers}
import streams.AkkaStreamsTest
import scala.concurrent.Future
class FlowGraphsSpec extends FlatSpec with AkkaStreamsTest with Matchers with ScalaFutures {
import stock.FlowGraphs._
val inCsv =
"""
|Date, Open, High, Low, Close, Volume, Adj Close
|2014-12-31, 25.3, 25.3, 24.19, 24.84, 1438600, 24.84
|2014-12-30, 26.28, 26.37, 25.29, 25.36, 766100, 25.36
|2014-12-29, 26.64, 26.8, 26.13, 26.42, 619700, 26.42
|2014-12-26, 27.25, 27.25, 26.42, 26.71, 360400, 26.71
""".stripMargin.trim
val expCsv =
"""
|Date, Open, High, Low, Close, Volume, Adj Close, Adj Close SMA(3)
|2014-12-31, 25.3, 25.3, 24.19, 24.84, 1438600, 24.84, 25.54
|2014-12-30, 26.28, 26.37, 25.29, 25.36, 766100, 25.36, 26.16
""".stripMargin.trim
"SMA flow" should "calculate properly" in {
val future = Source(1 to 5)
.map(n => (n*n).toDouble)
.via(calculate.sma(3))
.runFold(List.empty[Double])(_ :+ _)
// .runForeach(sma => println(f"$sma%1.2f")
whenReady(future)(_.map(_.formatted("%1.2f")) shouldBe List("4.67", "9.67", "16.67"))
}
"flow" should "append SMA" in {
// val inSource = SynchronousFileSource(new File("input.csv"))
// val expSource = SynchronousFileSource(new File("expected.csv"))
// val outSink = SynchronousFileSink(new File("output.csv"))
// val outSource = SynchronousFileSource(new File("output.csv"))
val inSource = Source.single(ByteString(inCsv))
val expSource = Source.single(ByteString(expCsv))
val builder = new ByteStringBuilder()
val outSink = Sink.foreach[ByteString](builder ++= _)
val outSource = Source(() => Iterator.single(builder.result()))
// println(s"inputCsv: $inputCsv")
val window = 3
val smaName = s"Adj Close SMA($window)"
val future = inSource.via(csv.parse().via(quote.appendSma(window)).via(csv.format)).runWith(outSink)
whenReady(future) { unit =>
// println(s"output: ${builder.result.utf8String}")
// Inspect row counts
val countRows = csv.parse().fold(0)((c, _) => c + 1).toMat(Sink.head)(Keep.right)
val inCount = inSource.runWith(countRows)
val outCount = outSource.runWith(countRows)
whenReady(Future.sequence(Seq(inCount, outCount))) {
case inLines :: outLines :: Nil =>
outLines shouldBe (inLines - window + 1)
}
// Inspect header fields
val inFieldsFuture = inSource.via(csv.parse()).runWith(Sink.head)
val outFieldsFuture = outSource.via(csv.parse()).runWith(Sink.head)
whenReady(Future.sequence(Seq(inFieldsFuture, outFieldsFuture))) {
case inFields :: outFields :: Nil =>
outFields shouldBe (inFields :+ smaName)
}
// Compare SMA column from output and expected
val selectSma = csv.parse().via(csv.select(smaName)).drop(1).map(_.toDouble)
val outFuture = outSource.via(selectSma).runFold(List.empty[Double])(_ :+ _)
val expFuture = expSource.via(selectSma).runFold(List.empty[Double])(_ :+ _)
whenReady(Future.sequence(Seq(outFuture, expFuture))) {
case out :: exp :: Nil =>
out should have size exp.size
out.zip(exp).foreach { case (out, exp) =>
out shouldBe exp
}
}
}
}
}
|
linearregression/akka-streams-http-intro
|
src/test/scala/stock/FlowGraphsSpec.scala
|
Scala
|
apache-2.0
| 3,506
|
/*
* Licensed to Cloudera, Inc. under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Cloudera, Inc. licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.cloudera.hue.livy.spark.interactive
import com.cloudera.hue.livy.sessions.interactive.InteractiveSession
import com.cloudera.hue.livy.spark.{SparkProcess, SparkProcessBuilderFactory}
import com.cloudera.hue.livy.yarn.Client
import scala.concurrent.ExecutionContext
class InteractiveSessionYarnFactory(client: Client, processFactory: SparkProcessBuilderFactory)
extends InteractiveSessionFactory(processFactory) {
implicit def executor: ExecutionContext = ExecutionContext.global
protected override def create(id: Int,
process: SparkProcess,
request: CreateInteractiveRequest): InteractiveSession = {
InteractiveSessionYarn(client, id, process, request)
}
override def sparkBuilder(id: Int, request: CreateInteractiveRequest) = {
val builder = super.sparkBuilder(id, request)
builder.master("yarn-cluster")
builder
}
}
|
MobinRanjbar/hue
|
apps/spark/java/livy-spark/src/main/scala/com/cloudera/hue/livy/spark/interactive/InteractiveSessionYarnFactory.scala
|
Scala
|
apache-2.0
| 1,721
|
package io.questions.testdata
import io.questions.model.questionnaire.FieldName.FieldNameStringSyntax
import io.questions.model.questionnaire.QuestionText.QuestionTextSyntax
import io.questions.model.questionnaire.nodekey.NodeKey
import io.questions.model.questionnaire.{ Element, QuestionnaireNode }
object CompanyIdentifierQuestionnaire {
// don't use Components.standard as we don't want metadata etc. here
val questionnaires: List[QuestionnaireNode] = List(
QuestionnaireNode(
NodeKey.random,
"company".fieldName,
"Company".text,
Element.NonRepeatingParent(
CompanyQuestionnaire.companyName,
CompanyQuestionnaire.companyNumber
)
)
)
}
|
channingwalton/qanda
|
questionnaire/src/test/scala/io/questions/testdata/CompanyIdentifierQuestionnaire.scala
|
Scala
|
mit
| 701
|
/***********************************************************************
* Copyright (c) 2013-2015 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0 which
* accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
*************************************************************************/
package org.locationtech.geomesa.accumulo.iterators
import java.util
import com.google.common.primitives.{Bytes, Longs}
import org.apache.accumulo.core.data.{ByteSequence, Key, Range, Value}
import org.apache.accumulo.core.iterators.{IteratorEnvironment, SortedKeyValueIterator}
import org.apache.hadoop.io.Text
import org.junit.runner.RunWith
import org.locationtech.geomesa.curve.Z3SFC
import org.specs2.mutable.Specification
import org.specs2.runner.JUnitRunner
import scala.collection.JavaConversions._
@RunWith(classOf[JUnitRunner])
class Z3IteratorTest extends Specification {
"Z3Iterator" should {
sequential
val (lx, ly, lt) = (-78.0, 38, 300)
val (ux, uy, ut) = (-75.0, 40, 800)
val Z3Curve = new Z3SFC
val zmin = Z3Curve.index(lx, ly, lt)
val zmax = Z3Curve.index(ux, uy, ut)
val srcIter = new SortedKeyValueIterator[Key, Value] {
var key: Key = null
var staged: Key = null
override def deepCopy(iteratorEnvironment: IteratorEnvironment): SortedKeyValueIterator[Key, Value] = this
override def next(): Unit = {
staged = key
key = null
}
override def getTopValue: Value = null
override def getTopKey: Key = staged
override def init(sortedKeyValueIterator: SortedKeyValueIterator[Key, Value],
map: util.Map[String, String],
iteratorEnvironment: IteratorEnvironment): Unit = {}
override def seek(range: Range, collection: util.Collection[ByteSequence], b: Boolean): Unit = {
key = null
staged = null
}
override def hasTop: Boolean = staged != null
}
val zMap = Map(0.toShort -> (zmin.z, zmax.z))
val iter = new Z3Iterator
iter.init(srcIter, Map(Z3Iterator.zKey -> Z3Iterator.mapToString(zMap)), null)
"keep in bounds values" >> {
val test1 = Z3Curve.index(-76.0, 38.5, 500)
val prefix = Array[Byte](0, 0)
val row = Bytes.concat(prefix, Longs.toByteArray(test1.z))
srcIter.key = new Key(new Text(row))
iter.next()
iter.hasTop must beTrue
}
"drop out of bounds values" >> {
val test2 = Z3Curve.index(-70.0, 38.5, 500)
val prefix = Array[Byte](0, 0)
val row = Bytes.concat(prefix, Longs.toByteArray(test2.z))
srcIter.key = new Key(new Text(row))
iter.next()
iter.hasTop must beFalse
}
}
}
|
giserh/geomesa
|
geomesa-accumulo/geomesa-accumulo-datastore/src/test/scala/org/locationtech/geomesa/accumulo/iterators/Z3IteratorTest.scala
|
Scala
|
apache-2.0
| 2,846
|
package twotails
import org.scalatest.{ FlatSpec, Matchers }
import annotation.{tailrec, switch}
import java.lang.StackOverflowError
class Foo{
@mutualrec final def yo(x: Int): Int = if(0 < x) yo(x-1) else 0
}
class Foo2{
@tailrec final def yo(x: Int): Int = if(0 < x) yo(x-1) else 0
}
class Bar{
@mutualrec final def one(x: Int): Int = if(0 < x) two(x-1) else x
@mutualrec final def two(x: Int): Int = if(0 < x) one(x-2) else x
}
class Rabbit{
@mutualrec final def one(count: Int): Either[String,Int] =
if (count < 0) Left("0") else two(count)
@mutualrec final def two(count: Int): Either[String,Int] =
if (count < 0) Left("0") else if (count == 0) Right(0) else three(count)
@mutualrec final def three(count: Int): Either[String,Int] = if (count == 0) Right(0) else one(count-1)
}
class Dog{
@mutualrec final def dog1(count: Int): Int = if (count < 0) 0 else dog2(count - 1)
@mutualrec final def dog2(count: Int): Int = if (count < 0) dog1(count) else dog2(count - 1)
}
/*class Husky{
@mutualrec final def dog1(count: Int): Int = if (count == 0) count else dog2(count - 1)
@mutualrec final def dog2(count: Int): Int = try{
if (count == 0) count else dog1(count - 1)
}
catch{
case _ => 1
}
}*/
class Moose{
@mutualrec final def one(count: Int): Int = (count: @switch) match{
case 0 => 0
case 1 => three(0)
case _ => two(count-1)
}
@mutualrec final def two(count: Int): Int = (count: @switch) match{
case 0 => 0
case 1 => one(0)
case _ => three(count-1)
}
@mutualrec final def three(count: Int): Int = (count: @switch) match{
case 0 => 0
case 1 => two(0)
case _ => one(count-1)
}
}
final class Chipmunk{
@mutualrec def one(x: Int): Int = if(x < 0) throw new Exception("boom!") else two(x-1)
@mutualrec def two(x: Int): Int = if(x < 0){
throw new Exception("bam!")
}
else one(x-1)
}
class BasicTest extends FlatSpec with Matchers{
val fourK = 400000
"A single argument, annotated method" should "be equivalent to a tailrec" in{
val foo = new Foo
val foo2 = new Foo2
foo.yo(fourK) should equal {
foo2.yo(fourK)
}
}
"Two mutually recursive, single argument, annotated methods" should "not throw a StackOverflow" in{
val b = new Bar
b.one(fourK) should equal (0)
}
"Two mutually recursive, single escape, annotated methods" should "not throw a StackOverflow" in{
val dog = new Dog
dog.dog1(fourK) should equal (dog.dog2(fourK))
}
"A switch statement" should "not prevent mutual tail recursion" in{
val moose = new Moose
moose.one(fourK) should equal (0)
}
"An exception thrown by a mutually recursive function" should "have the correct position" in{
val chip = new Chipmunk
val ex = intercept[Exception]{
chip.one(fourK)
}
val ex2 = intercept[Exception]{
chip.two(fourK)
}
ex.getStackTrace()(0).getLineNumber() should equal(64)
ex2.getStackTrace()(0).getLineNumber() should equal(62)
}
}
|
wheaties/TwoTails
|
core/src/test/scala/twotails/BasicTest.scala
|
Scala
|
apache-2.0
| 3,014
|
object Test extends dotty.runtime.LegacyApp {
def test1(n: Int) = {
println(s"""Bob is $n years old""")
println(f"""Bob is $n%2d years old""")
println(s"""Bob will be ${n+1} years old""")
println(f"""Bob will be ${n+1}%2d years old""")
println(s"""$n+1 = ${n+1}""")
println(f"""$n%d+1 = ${n+1}%d""")
}
def test2(f: Float) = {
println(s"""Best price: $f""")
println(f"""Best price: $f%.2f""")
println(s"""$f% discount included""")
println(f"""$f%3.2f%% discount included""")
}
test1(1)
test1(12)
test1(123)
test2(10.0f)
test2(13.345f)
}
|
yusuke2255/dotty
|
tests/pending/run/interpolationMultiline1.scala
|
Scala
|
bsd-3-clause
| 599
|
package com.nulabinc.backlog.migration.common.utils
import com.nulabinc.backlog.migration.common.dsl.ConsoleDSL
import com.nulabinc.backlog.migration.common.utils.ConsoleOut.outStream
import com.osinka.i18n.Messages
import monix.eval.Task
import monix.execution.Scheduler
import org.fusesource.jansi.Ansi
import org.fusesource.jansi.Ansi.ansi
/**
* @author
* uchida
*/
object ProgressBar extends Logging {
def progress(
name: String,
progressMessage: String,
completeMessage: String
)(implicit consoleDSL: ConsoleDSL[Task], s: Scheduler) = {
var initFlag = true
(index: Int, total: Int) => {
val message =
progressValue(name, progressMessage, completeMessage, index, total)
logger.info(message)
synchronized {
if (initFlag) {
outStream.println()
initFlag = false
}
outStream.print(
ansi.cursorLeft(999).cursorUp(1).eraseLine(Ansi.Erase.ALL)
)
outStream.flush()
outStream.println(
s" ${progressBar(index, total)}${ConsoleDSL[Task].bold(message).runSyncUnsafe()}"
)
}
}
}
private[this] def progressValue(
name: String,
progressMessage: String,
completeMessage: String,
index: Int,
total: Int
) = {
if (index == total)
Messages("message.progress.executed", completeMessage, name)
else Messages("message.progress.executed", progressMessage, name)
}
def progressBar(index: Int, total: Int): String = {
val decile = (10.0 * (index.toFloat / total.toFloat)).toInt
val rate = index.toFloat / total.toFloat
val progress = Messages("message.progress.value", index, total)
val value =
s"${progress} [${("#" * decile)}${(" " * (10 - decile))}] " + f"${100.0 * rate}%5.1f%% "
padLeft(value, 30)
}
private[this] def padLeft(value: String, length: Int): String = {
if (value.length < length) {
(" " * (length - value.length)) + value
} else value
}
}
|
nulab/backlog-migration-common
|
core/src/main/scala/com/nulabinc/backlog/migration/common/utils/ProgressBar.scala
|
Scala
|
mit
| 2,016
|
package io.getquill.norm.capture
import io.getquill.Spec
import io.getquill.testContext._
import io.getquill.Query
class AvoidAliasConflictSpec extends Spec {
"renames alias to avoid conflict between entities during normalization" - {
"flatMap" in {
val q = quote {
qr1.flatMap(a => qr2.flatMap(a => qr3))
}
val n = quote {
qr1.flatMap(a => qr2.flatMap(a1 => qr3))
}
AvoidAliasConflict(q.ast) mustEqual n.ast
}
"concatMap" in {
val q = quote {
qr1.flatMap(a => qr2.concatMap(a => a.s.split(" ")))
}
val n = quote {
qr1.flatMap(a => qr2.concatMap(a1 => a1.s.split(" ")))
}
AvoidAliasConflict(q.ast) mustEqual n.ast
}
"map" in {
val q = quote {
qr1.flatMap(a => qr2.map(a => a.s))
}
val n = quote {
qr1.flatMap(a => qr2.map(a1 => a1.s))
}
AvoidAliasConflict(q.ast) mustEqual n.ast
}
"filter" in {
val q = quote {
qr1.flatMap(a => qr2.filter(a => a.i == 1))
}
val n = quote {
qr1.flatMap(a => qr2.filter(a1 => a1.i == 1))
}
AvoidAliasConflict(q.ast) mustEqual n.ast
}
"sortBy" in {
val q = quote {
qr1.flatMap(a => qr2.sortBy(a => a.s))
}
val n = quote {
qr1.flatMap(a => qr2.sortBy(a1 => a1.s))
}
AvoidAliasConflict(q.ast) mustEqual n.ast
}
"groupBy" in {
val q = quote {
qr1.flatMap(a => qr2.groupBy(a => a.s))
}
val n = quote {
qr1.flatMap(a => qr2.groupBy(a1 => a1.s))
}
AvoidAliasConflict(q.ast) mustEqual n.ast
}
"outer join" - {
"both sides" in {
val q = quote {
for {
a <- qr1
b <- qr1
c <- qr1.leftJoin(qr2).on((a, b) => a.s == b.s)
} yield {
(a, b, c)
}
}
val n = quote {
for {
a <- qr1
b <- qr1
c <- qr1.leftJoin(qr2).on((a1, b1) => a1.s == b1.s)
} yield {
(a, b, c)
}
}
AvoidAliasConflict(q.ast) mustEqual n.ast
}
"left" in {
val q = quote {
for {
a <- qr1
c <- qr1.leftJoin(qr2).on((a, b) => a.s == b.s)
} yield {
(a, c)
}
}
val n = quote {
for {
a <- qr1
c <- qr1.leftJoin(qr2).on((a1, b) => a1.s == b.s)
} yield {
(a, c)
}
}
AvoidAliasConflict(q.ast) mustEqual n.ast
}
"right" in {
val q = quote {
for {
b <- qr1
c <- qr1.leftJoin(qr2).on((a, b) => a.s == b.s)
} yield {
(b, c)
}
}
val n = quote {
for {
b <- qr1
c <- qr1.leftJoin(qr2).on((a, b1) => a.s == b1.s)
} yield {
(b, c)
}
}
AvoidAliasConflict(q.ast) mustEqual n.ast
}
"nested" in {
val q = quote {
qr1.map(t => t.i).leftJoin(qr2.map(t => t.i)).on((a, b) => a == b)
}
val n = quote {
qr1.map(t => t.i).leftJoin(qr2.map(t1 => t1.i)).on((a, b) => a == b)
}
AvoidAliasConflict(q.ast) mustEqual n.ast
}
"nested unaliased" in {
val q = quote {
for {
a <- qr1.nested.groupBy(a => a.i).map(t => (t._1, t._2.map(v => v.i).sum))
b <- qr1.nested.groupBy(a => a.i).map(t => (t._1, t._2.map(v => v.i).sum))
} yield {
(a, b)
}
}
val n = quote {
for {
a <- qr1.nested.groupBy(a => a.i).map(t => (t._1, t._2.map(v => v.i).sum))
b <- qr1.nested.groupBy(a1 => a1.i).map(t1 => (t1._1, t1._2.map(v1 => v1.i).sum))
} yield {
(a, b)
}
}
AvoidAliasConflict(q.ast) mustEqual n.ast
}
"multiple" in {
val q = quote {
qr1.leftJoin(qr2).on((a, b) => a.i == b.i)
.leftJoin(qr1).on((a, b) => a._2.forall(v => v.i == b.i))
.map(t => 1)
}
val n = quote {
qr1.leftJoin(qr2).on((a, b) => a.i == b.i)
.leftJoin(qr1).on((a1, b1) => a1._2.forall(v => v.i == b1.i))
.map(t => 1)
}
AvoidAliasConflict(q.ast) mustEqual n.ast
}
}
}
"considers infix as unaliased" in {
val i = quote {
infix"$qr1".as[Query[TestEntity]]
}
val q = quote {
i.flatMap(a => qr2.flatMap(a => qr3))
}
val n = quote {
i.flatMap(a => qr2.flatMap(a1 => qr3))
}
AvoidAliasConflict(q.ast) mustEqual n.ast
}
"takes in consideration the aliases already defined" - {
"flatMap" in {
val q = quote {
qr1.flatMap(a => qr2.flatMap(a => qr3))
}
val n = quote {
qr1.flatMap(a => qr2.flatMap(a1 => qr3))
}
AvoidAliasConflict(q.ast) mustEqual n.ast
}
"map" in {
val q = quote {
qr1.map(a => a.s).flatMap(s => qr2.map(a => a.s))
}
val n = quote {
qr1.map(a => a.s).flatMap(s => qr2.map(a1 => a1.s))
}
AvoidAliasConflict(q.ast) mustEqual n.ast
}
"filter" in {
val q = quote {
qr1.filter(a => a.s == "s").flatMap(s => qr2.map(a => a.s))
}
val n = quote {
qr1.filter(a => a.s == "s").flatMap(s => qr2.map(a1 => a1.s))
}
AvoidAliasConflict(q.ast) mustEqual n.ast
}
"sortBy" in {
val q = quote {
qr1.sortBy(a => a.s).flatMap(s => qr2.map(a => a.s))
}
val n = quote {
qr1.sortBy(a => a.s).flatMap(s => qr2.map(a1 => a1.s))
}
AvoidAliasConflict(q.ast) mustEqual n.ast
}
"outer join" - {
"left" in {
val q = quote {
qr1.fullJoin(qr2.filter(a => a.i == 1)).on((b, c) => b.s == c.s).flatMap(d => qr2.map(b => b.s))
}
val n = quote {
qr1.fullJoin(qr2.filter(a => a.i == 1)).on((b, c) => b.s == c.s).flatMap(d => qr2.map(b1 => b1.s))
}
AvoidAliasConflict(q.ast) mustEqual n.ast
}
"right" in {
val q = quote {
qr1.filter(a => a.i == 1).fullJoin(qr2).on((b, c) => b.s == c.s).flatMap(d => qr2.map(c => c.s))
}
val n = quote {
qr1.filter(a => a.i == 1).fullJoin(qr2).on((b, c) => b.s == c.s).flatMap(d => qr2.map(c1 => c1.s))
}
AvoidAliasConflict(q.ast) mustEqual n.ast
}
"both" in {
val q = quote {
qr1.fullJoin(qr2).on((a, b) => a.s == b.s).flatMap(c => qr1.flatMap(a => qr2.map(b => b.s)))
}
val n = quote {
qr1.fullJoin(qr2).on((a, b) => a.s == b.s).flatMap(c => qr1.flatMap(a1 => qr2.map(b1 => b1.s)))
}
AvoidAliasConflict(q.ast) mustEqual n.ast
}
}
"join + filter" in {
val q = quote {
qr1.filter(x1 => x1.i == 1)
.join(qr2.filter(x1 => x1.i == 1))
.on((a, b) => a.i == b.i)
}
val n = quote {
qr1.filter(x1 => x1.i == 1)
.join(qr2.filter(x11 => x11.i == 1))
.on((a, b) => a.i == b.i)
}
AvoidAliasConflict(q.ast) mustEqual n.ast
}
}
"handles many alias conflicts" in {
val q = quote {
qr1.flatMap(a => qr2.flatMap(a => qr2.flatMap(a => qr1)))
}
val n = quote {
qr1.flatMap(a => qr2.flatMap(a1 => qr2.flatMap(a2 => qr1)))
}
AvoidAliasConflict(q.ast) mustEqual n.ast
}
"doesn't change the query if it doesn't have conflicts" in {
val q = quote {
qr1.flatMap(a => qr2.sortBy(b => b.s).filter(c => c.s == "s1")).flatMap(d => qr3.map(e => e.s))
}
AvoidAliasConflict(q.ast) mustEqual q.ast
}
}
|
getquill/quill
|
quill-core/src/test/scala/io/getquill/norm/capture/AvoidAliasConflictSpec.scala
|
Scala
|
apache-2.0
| 7,808
|
package mesosphere.marathon
package api.v2
import javax.servlet.http.HttpServletRequest
import javax.ws.rs.core.{ Context, MediaType, Response }
import javax.ws.rs.{ Consumes, GET, Path, Produces }
import com.google.inject.Inject
import mesosphere.chaos.http.HttpConf
import mesosphere.marathon.api.{ AuthResource, MarathonMediaType }
import mesosphere.marathon.core.election.ElectionService
import mesosphere.marathon.plugin.auth._
import mesosphere.marathon.storage.repository.FrameworkIdRepository
import mesosphere.util.state.MesosLeaderInfo
import play.api.libs.json.Json
@Path("v2/info")
@Consumes(Array(MediaType.APPLICATION_JSON))
class InfoResource @Inject() (
mesosLeaderInfo: MesosLeaderInfo,
frameworkIdRepository: FrameworkIdRepository,
electionService: ElectionService,
val authenticator: Authenticator,
val authorizer: Authorizer,
protected val config: MarathonConf with HttpConf
) extends AuthResource {
// Marathon configurations
private[this] lazy val marathonConfigValues = Json.obj(
"master" -> config.mesosMaster.get,
"failover_timeout" -> config.mesosFailoverTimeout.get,
"framework_name" -> config.frameworkName.get,
"ha" -> config.highlyAvailable.get,
"checkpoint" -> config.checkpoint.get,
"local_port_min" -> config.localPortMin.get,
"local_port_max" -> config.localPortMax.get,
"executor" -> config.defaultExecutor.get,
"hostname" -> config.hostname.get,
"webui_url" -> config.webuiUrl.get,
"mesos_role" -> config.mesosRole.get,
"task_launch_timeout" -> config.taskLaunchTimeout.get,
"task_reservation_timeout" -> config.taskReservationTimeout.get,
"reconciliation_initial_delay" -> config.reconciliationInitialDelay.get,
"reconciliation_interval" -> config.reconciliationInterval.get,
"mesos_user" -> config.mesosUser.get,
"leader_proxy_connection_timeout_ms" -> config.leaderProxyConnectionTimeout.get,
"leader_proxy_read_timeout_ms" -> config.leaderProxyReadTimeout.get,
"features" -> config.availableFeatures
)
// ZooKeeper congiurations
private[this] lazy val zookeeperConfigValues = Json.obj(
"zk" -> s"zk://${config.zkHosts}${config.zkPath}",
"zk_timeout" -> config.zooKeeperTimeout(),
"zk_connection_timeout" -> config.zooKeeperConnectionTimeout(),
"zk_session_timeout" -> config.zooKeeperSessionTimeout(),
"zk_max_versions" -> config.maxVersions()
)
private[this] lazy val httpConfigValues = Json.obj(
"http_port" -> config.httpPort.get,
"https_port" -> config.httpsPort.get
)
@GET
@Produces(Array(MarathonMediaType.PREFERRED_APPLICATION_JSON))
def index(@Context req: HttpServletRequest): Response = authenticated(req) { implicit identity =>
withAuthorization(ViewResource, AuthorizedResource.SystemConfig) {
val mesosLeaderUiUrl = Json.obj("mesos_leader_ui_url" -> mesosLeaderInfo.currentLeaderUrl)
Response.ok(
jsonObjString(
"name" -> BuildInfo.name,
"version" -> BuildInfo.version,
"buildref" -> BuildInfo.buildref,
"elected" -> electionService.isLeader,
"leader" -> electionService.leaderHostPort,
"frameworkId" -> result(frameworkIdRepository.get()).map(_.id),
"marathon_config" -> (marathonConfigValues ++ mesosLeaderUiUrl),
"zookeeper_config" -> zookeeperConfigValues,
"http_config" -> httpConfigValues)).build()
}
}
}
|
Caerostris/marathon
|
src/main/scala/mesosphere/marathon/api/v2/InfoResource.scala
|
Scala
|
apache-2.0
| 3,441
|
package zzz.akka.avionics
import akka.actor.{FSM, Actor, ActorRef}
import scala.concurrent.duration._
object FlyingBehaviour {
import ControlSurfaces._
// The states governing behavioural transitions
sealed trait State
case object Idle extends State
case object Flying extends State
case object PreparingToFly extends State
// Helper classes to hold course data
case class CourseTarget(altitude: Double, heading: Float,
byMillis: Long)
case class CourseStatus(altitude: Double, heading: Float,
headingSinceMS: Long,
altitudeSinceMS: Long)
// We're going to allow the FSM to vary the behaviour that
// calculates the control changes using this function
// definition
type Calculator = (CourseTarget, CourseStatus) => Any
// The Data that our FlyingBehaviour can hold
sealed trait Data
case object Uninitialized extends Data
// This is the 'real' data. We're going to stay entirely
// immutable and, in doing so, we're going to encapsulate
// all of the changing state
// data inside this class
case class FlightData(controls: ActorRef,
elevCalc: Calculator,
bankCalc: Calculator,
target: CourseTarget,
status: CourseStatus) extends Data
// Someone can tell the FlyingBehaviour to fly
case class Fly(target: CourseTarget)
def currentMS = System.currentTimeMillis
// Calculates the amount of elevator change we need to
// make and returns it
def calcElevator(target: CourseTarget,
status: CourseStatus): Any = {
val alt = (target.altitude - status.altitude).toFloat
val dur = target.byMillis - status.altitudeSinceMS
if (alt < 0) StickForward((alt / dur) * -1)
else StickBack(alt / dur)
}
// Calculates the amount of bank change we need to make
// and returns it
def calcAilerons(target: CourseTarget,
status: CourseStatus): Any = {
import scala.math.{abs, signum}
val diff = target.heading - status.heading
val dur = target.byMillis - status.headingSinceMS
val amount = if (abs(diff) < 180) diff
else signum(diff) * (abs(diff) - 360f)
if (amount > 0) StickRight(amount / dur)
else StickLeft((amount / dur) * -1)
}
case class NewElevatorCalculator(f: Calculator)
case class NewBankCalculator(f: Calculator)
}
class FlyingBehaviour(plane: ActorRef,
heading: ActorRef,
altimeter: ActorRef) extends Actor
with FSM[FlyingBehaviour.State, FlyingBehaviour.Data] {
import FSM._
import FlyingBehaviour._
import Pilots._
import Plane._
import Altimeter._
import HeadingIndicator._
import EventSource._
case object Adjust
// Sets up the initial values for state and data in the FSM
startWith(Idle, Uninitialized)
// Adjusts the plane's heading and altitude according to
// calculations It also returns the new FlightData to be
// passed to the next state
def adjust(flightData: FlightData): FlightData = {
val FlightData(c, elevCalc, bankCalc, t, s) = flightData
c ! elevCalc(t, s)
c ! bankCalc(t, s)
flightData
}
when(Idle) {
case Event(Fly(target), _) =>
goto(PreparingToFly) using FlightData(
context.system.deadLetters,
calcElevator,
calcAilerons,
target,
CourseStatus(-1, -1, 0, 0))
}
onTransition {
case Idle -> PreparingToFly =>
plane ! GiveMeControl
heading ! RegisterListener(self)
altimeter ! RegisterListener(self)
}
def prepComplete(data: Data): Boolean = {
data match {
case FlightData(c, _, _, _, s) =>
if (!c.isTerminated &&
s.heading != -1f && s.altitude != -1f)
true
else
false
case _ =>
false
}
}
when (PreparingToFly, stateTimeout = 5.seconds)(transform {
case Event(HeadingUpdate(head), d: FlightData) =>
stay using d.copy(status =
d.status.copy(heading = head,
headingSinceMS = currentMS))
case Event(AltitudeUpdate(alt), d: FlightData) =>
stay using d.copy(status =
d.status.copy(altitude = alt,
altitudeSinceMS = currentMS))
case Event(Controls(ctrls), d: FlightData) =>
stay using d.copy(controls = ctrls)
case Event(StateTimeout, _) =>
plane ! LostControl
goto (Idle)
} using {
case s if prepComplete(s.stateData) =>
s.copy(stateName = Flying)
})
onTransition {
case PreparingToFly -> Flying =>
setTimer("Adjustment", Adjust, 200.milliseconds,
repeat = true)
}
when(Flying) {
case Event(AltitudeUpdate(alt), d: FlightData) =>
stay using d.copy(status =
d.status.copy(altitude = alt,
altitudeSinceMS = currentMS))
case Event(HeadingUpdate(head), d: FlightData) =>
stay using d.copy(status =
d.status.copy(heading = head,
headingSinceMS = currentMS))
case Event(Adjust, flightData: FlightData) =>
stay using adjust(flightData)
case Event(NewBankCalculator(f), d: FlightData) =>
stay using d.copy(bankCalc = f)
case Event(NewElevatorCalculator(f), d: FlightData) =>
stay using d.copy(elevCalc = f)
}
onTransition {
case Flying -> _ =>
cancelTimer("Adjustment")
}
onTransition {
case _ -> Idle =>
heading ! UnregisterListener(self)
altimeter ! UnregisterListener(self)
}
whenUnhandled {
case Event(RelinquishControl, _) =>
goto(Idle)
}
initialize
}
|
kevyin/akka-book-wyatt
|
src/main/scala/zzz/akka/avionics/FlyingBehaviour.scala
|
Scala
|
mit
| 5,626
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.streaming.api.scala
import com.esotericsoftware.kryo.Serializer
import org.apache.flink.annotation.{Experimental, Internal, Public, PublicEvolving}
import org.apache.flink.api.common.RuntimeExecutionMode
import org.apache.flink.api.common.eventtime.WatermarkStrategy
import org.apache.flink.api.common.io.{FileInputFormat, FilePathFilter, InputFormat}
import org.apache.flink.api.common.restartstrategy.RestartStrategies.RestartStrategyConfiguration
import org.apache.flink.api.common.typeinfo.TypeInformation
import org.apache.flink.api.connector.source.{Source, SourceSplit}
import org.apache.flink.api.connector.source.lib.NumberSequenceSource
import org.apache.flink.api.java.typeutils.ResultTypeQueryable
import org.apache.flink.api.java.typeutils.runtime.kryo.KryoSerializer
import org.apache.flink.api.scala.ClosureCleaner
import org.apache.flink.configuration.{Configuration, ReadableConfig}
import org.apache.flink.core.execution.{JobClient, JobListener}
import org.apache.flink.runtime.state.StateBackend
import org.apache.flink.streaming.api.environment.{StreamExecutionEnvironment => JavaEnv}
import org.apache.flink.streaming.api.functions.source.SourceFunction.SourceContext
import org.apache.flink.streaming.api.functions.source._
import org.apache.flink.streaming.api.{CheckpointingMode, TimeCharacteristic}
import org.apache.flink.util.SplittableIterator
import _root_.scala.language.implicitConversions
import scala.collection.JavaConverters._
@Public
class StreamExecutionEnvironment(javaEnv: JavaEnv) {
/**
* @return the wrapped Java environment
*/
def getJavaEnv: JavaEnv = javaEnv
/**
* Gets the config object.
*/
def getConfig = javaEnv.getConfig
/**
* Gets cache files.
*/
def getCachedFiles = javaEnv.getCachedFiles
/**
* Gets the config JobListeners.
*/
@PublicEvolving
def getJobListeners = javaEnv.getJobListeners
/**
* Sets the parallelism for operations executed through this environment.
* Setting a parallelism of x here will cause all operators (such as join, map, reduce) to run
* with x parallel instances. This value can be overridden by specific operations using
* [[DataStream#setParallelism(int)]].
*/
def setParallelism(parallelism: Int): Unit = {
javaEnv.setParallelism(parallelism)
}
/**
* Sets the runtime execution mode for the application (see [[RuntimeExecutionMode]]).
* This is equivalent to setting the "execution.runtime-mode" in your application's
* configuration file.
*
* We recommend users to NOT use this method but set the "execution.runtime-mode"
* using the command-line when submitting the application. Keeping the application code
* configuration-free allows for more flexibility as the same application will be able to
* be executed in any execution mode.
*
* @param executionMode the desired execution mode.
* @return The execution environment of your application.
*/
@PublicEvolving
def setRuntimeMode(executionMode: RuntimeExecutionMode): StreamExecutionEnvironment = {
javaEnv.setRuntimeMode(executionMode)
this
}
/**
* Sets the maximum degree of parallelism defined for the program.
* The maximum degree of parallelism specifies the upper limit for dynamic scaling. It also
* defines the number of key groups used for partitioned state.
**/
def setMaxParallelism(maxParallelism: Int): Unit = {
javaEnv.setMaxParallelism(maxParallelism)
}
/**
* Returns the default parallelism for this execution environment. Note that this
* value can be overridden by individual operations using [[DataStream#setParallelism(int)]]
*/
def getParallelism = javaEnv.getParallelism
/**
* Returns the maximum degree of parallelism defined for the program.
*
* The maximum degree of parallelism specifies the upper limit for dynamic scaling. It also
* defines the number of key groups used for partitioned state.
*
*/
def getMaxParallelism = javaEnv.getMaxParallelism
/**
* Sets the maximum time frequency (milliseconds) for the flushing of the
* output buffers. By default the output buffers flush frequently to provide
* low latency and to aid smooth developer experience. Setting the parameter
* can result in three logical modes:
*
* <ul>
* <li>A positive integer triggers flushing periodically by that integer</li>
* <li>0 triggers flushing after every record thus minimizing latency</li>
* <li>-1 triggers flushing only when the output buffer is full thus maximizing throughput</li>
* </ul>
*/
def setBufferTimeout(timeoutMillis: Long): StreamExecutionEnvironment = {
javaEnv.setBufferTimeout(timeoutMillis)
this
}
/**
* Gets the default buffer timeout set for this environment
*/
def getBufferTimeout = javaEnv.getBufferTimeout
/**
* Disables operator chaining for streaming operators. Operator chaining
* allows non-shuffle operations to be co-located in the same thread fully
* avoiding serialization and de-serialization.
*
*/
@PublicEvolving
def disableOperatorChaining(): StreamExecutionEnvironment = {
javaEnv.disableOperatorChaining()
this
}
// ------------------------------------------------------------------------
// Checkpointing Settings
// ------------------------------------------------------------------------
/**
* Gets the checkpoint config, which defines values like checkpoint interval, delay between
* checkpoints, etc.
*/
def getCheckpointConfig = javaEnv.getCheckpointConfig()
/**
* Enables checkpointing for the streaming job. The distributed state of the streaming
* dataflow will be periodically snapshotted. In case of a failure, the streaming
* dataflow will be restarted from the latest completed checkpoint.
*
* The job draws checkpoints periodically, in the given interval. The state will be
* stored in the configured state backend.
*
* NOTE: Checkpointing iterative streaming dataflows in not properly supported at
* the moment. If the "force" parameter is set to true, the system will execute the
* job nonetheless.
*
* @param interval
* Time interval between state checkpoints in millis.
* @param mode
* The checkpointing mode, selecting between "exactly once" and "at least once" guarantees.
* @param force
* If true checkpointing will be enabled for iterative jobs as well.
*/
@deprecated
@PublicEvolving
def enableCheckpointing(interval : Long,
mode: CheckpointingMode,
force: Boolean) : StreamExecutionEnvironment = {
javaEnv.enableCheckpointing(interval, mode, force)
this
}
/**
* Enables checkpointing for the streaming job. The distributed state of the streaming
* dataflow will be periodically snapshotted. In case of a failure, the streaming
* dataflow will be restarted from the latest completed checkpoint.
*
* The job draws checkpoints periodically, in the given interval. The system uses the
* given [[CheckpointingMode]] for the checkpointing ("exactly once" vs "at least once").
* The state will be stored in the configured state backend.
*
* NOTE: Checkpointing iterative streaming dataflows in not properly supported at
* the moment. For that reason, iterative jobs will not be started if used
* with enabled checkpointing. To override this mechanism, use the
* [[enableCheckpointing(long, CheckpointingMode, boolean)]] method.
*
* @param interval
* Time interval between state checkpoints in milliseconds.
* @param mode
* The checkpointing mode, selecting between "exactly once" and "at least once" guarantees.
*/
def enableCheckpointing(interval : Long,
mode: CheckpointingMode) : StreamExecutionEnvironment = {
javaEnv.enableCheckpointing(interval, mode)
this
}
/**
* Enables checkpointing for the streaming job. The distributed state of the streaming
* dataflow will be periodically snapshotted. In case of a failure, the streaming
* dataflow will be restarted from the latest completed checkpoint.
*
* The job draws checkpoints periodically, in the given interval. The program will use
* [[CheckpointingMode.EXACTLY_ONCE]] mode. The state will be stored in the
* configured state backend.
*
* NOTE: Checkpointing iterative streaming dataflows in not properly supported at
* the moment. For that reason, iterative jobs will not be started if used
* with enabled checkpointing. To override this mechanism, use the
* [[enableCheckpointing(long, CheckpointingMode, boolean)]] method.
*
* @param interval
* Time interval between state checkpoints in milliseconds.
*/
def enableCheckpointing(interval : Long) : StreamExecutionEnvironment = {
enableCheckpointing(interval, CheckpointingMode.EXACTLY_ONCE)
}
/**
* Method for enabling fault-tolerance. Activates monitoring and backup of streaming
* operator states. Time interval between state checkpoints is specified in in millis.
*
* Setting this option assumes that the job is used in production and thus if not stated
* explicitly otherwise with calling the [[setRestartStrategy]] method in case of
* failure the job will be resubmitted to the cluster indefinitely.
*/
@deprecated
@PublicEvolving
def enableCheckpointing() : StreamExecutionEnvironment = {
javaEnv.enableCheckpointing()
this
}
def getCheckpointingMode = javaEnv.getCheckpointingMode()
/**
* Sets the state backend that describes how to store and checkpoint operator state. It defines
* both which data structures hold state during execution (for example hash tables, RockDB,
* or other data stores) as well as where checkpointed data will be persisted.
*
* State managed by the state backend includes both keyed state that is accessible on
* [[org.apache.flink.streaming.api.datastream.KeyedStream keyed streams]], as well as
* state maintained directly by the user code that implements
* [[org.apache.flink.streaming.api.checkpoint.CheckpointedFunction CheckpointedFunction]].
*
* The [[org.apache.flink.runtime.state.memory.MemoryStateBackend]], for example,
* maintains the state in heap memory, as objects. It is lightweight without extra dependencies,
* but can checkpoint only small states (some counters).
*
* In contrast, the [[org.apache.flink.runtime.state.filesystem.FsStateBackend]]
* stores checkpoints of the state (also maintained as heap objects) in files.
* When using a replicated file system (like HDFS, S3, MapR FS, Alluxio, etc) this will guarantee
* that state is not lost upon failures of individual nodes and that streaming program can be
* executed highly available and strongly consistent.
*/
@PublicEvolving
def setStateBackend(backend: StateBackend): StreamExecutionEnvironment = {
javaEnv.setStateBackend(backend)
this
}
/**
* Returns the state backend that defines how to store and checkpoint state.
*/
@PublicEvolving
def getStateBackend: StateBackend = javaEnv.getStateBackend()
/**
* Sets the restart strategy configuration. The configuration specifies which restart strategy
* will be used for the execution graph in case of a restart.
*
* @param restartStrategyConfiguration Restart strategy configuration to be set
*/
@PublicEvolving
def setRestartStrategy(restartStrategyConfiguration: RestartStrategyConfiguration): Unit = {
javaEnv.setRestartStrategy(restartStrategyConfiguration)
}
/**
* Returns the specified restart strategy configuration.
*
* @return The restart strategy configuration to be used
*/
@PublicEvolving
def getRestartStrategy: RestartStrategyConfiguration = {
javaEnv.getRestartStrategy()
}
/**
* Sets the number of times that failed tasks are re-executed. A value of zero
* effectively disables fault tolerance. A value of "-1" indicates that the system
* default value (as defined in the configuration) should be used.
*
* @deprecated This method will be replaced by [[setRestartStrategy()]]. The
* FixedDelayRestartStrategyConfiguration contains the number of execution retries.
*/
@PublicEvolving
def setNumberOfExecutionRetries(numRetries: Int): Unit = {
javaEnv.setNumberOfExecutionRetries(numRetries)
}
/**
* Gets the number of times the system will try to re-execute failed tasks. A value
* of "-1" indicates that the system default value (as defined in the configuration)
* should be used.
*
* @deprecated This method will be replaced by [[getRestartStrategy]]. The
* FixedDelayRestartStrategyConfiguration contains the number of execution retries.
*/
@PublicEvolving
def getNumberOfExecutionRetries = javaEnv.getNumberOfExecutionRetries
// --------------------------------------------------------------------------------------------
// Registry for types and serializers
// --------------------------------------------------------------------------------------------
/**
* Adds a new Kryo default serializer to the Runtime.
* <p/>
* Note that the serializer instance must be serializable (as defined by
* java.io.Serializable), because it may be distributed to the worker nodes
* by java serialization.
*
* @param type
* The class of the types serialized with the given serializer.
* @param serializer
* The serializer to use.
*/
def addDefaultKryoSerializer[T <: Serializer[_] with Serializable](
`type`: Class[_],
serializer: T)
: Unit = {
javaEnv.addDefaultKryoSerializer(`type`, serializer)
}
/**
* Adds a new Kryo default serializer to the Runtime.
*
* @param type
* The class of the types serialized with the given serializer.
* @param serializerClass
* The class of the serializer to use.
*/
def addDefaultKryoSerializer(`type`: Class[_], serializerClass: Class[_ <: Serializer[_]]) {
javaEnv.addDefaultKryoSerializer(`type`, serializerClass)
}
/**
* Registers the given type with the serializer at the [[KryoSerializer]].
*
* Note that the serializer instance must be serializable (as defined by java.io.Serializable),
* because it may be distributed to the worker nodes by java serialization.
*/
def registerTypeWithKryoSerializer[T <: Serializer[_] with Serializable](
clazz: Class[_],
serializer: T)
: Unit = {
javaEnv.registerTypeWithKryoSerializer(clazz, serializer)
}
/**
* Registers the given type with the serializer at the [[KryoSerializer]].
*/
def registerTypeWithKryoSerializer(clazz: Class[_], serializer: Class[_ <: Serializer[_]]) {
javaEnv.registerTypeWithKryoSerializer(clazz, serializer)
}
/**
* Registers the given type with the serialization stack. If the type is eventually
* serialized as a POJO, then the type is registered with the POJO serializer. If the
* type ends up being serialized with Kryo, then it will be registered at Kryo to make
* sure that only tags are written.
*
*/
def registerType(typeClass: Class[_]) {
javaEnv.registerType(typeClass)
}
// --------------------------------------------------------------------------------------------
// Time characteristic
// --------------------------------------------------------------------------------------------
/**
* Sets the time characteristic for all streams create from this environment, e.g., processing
* time, event time, or ingestion time.
*
* If you set the characteristic to IngestionTime of EventTime this will set a default
* watermark update interval of 200 ms. If this is not applicable for your application
* you should change it using
* [[org.apache.flink.api.common.ExecutionConfig#setAutoWatermarkInterval(long)]]
*
* @param characteristic The time characteristic.
* @deprecated In Flink 1.12 the default stream time characteristic has been changed to
* [[TimeCharacteristic.EventTime]], thus you don't need to call this method for
* enabling event-time support anymore. Explicitly using processing-time windows and
* timers works in event-time mode. If you need to disable watermarks, please use
* [[org.apache.flink.api.common.ExecutionConfig#setAutoWatermarkInterval(long]]. If
* you are using [[TimeCharacteristic.IngestionTime]], please manually set an
* appropriate [[WatermarkStrategy]]. If you are using generic "time window"
* operations (for example [[KeyedStream.timeWindow()]] that change behaviour based
* on the time characteristic, please use equivalent operations that explicitly
* specify processing time or event time.
*/
@deprecated
@PublicEvolving
def setStreamTimeCharacteristic(characteristic: TimeCharacteristic) : Unit = {
javaEnv.setStreamTimeCharacteristic(characteristic)
}
/**
* Gets the time characteristic/
*
* @see #setStreamTimeCharacteristic
* @return The time characteristic.
*/
@PublicEvolving
def getStreamTimeCharacteristic = javaEnv.getStreamTimeCharacteristic()
/**
* Sets all relevant options contained in the [[ReadableConfig]] such as e.g.
* [[org.apache.flink.streaming.api.environment.StreamPipelineOptions#TIME_CHARACTERISTIC]].
* It will reconfigure [[StreamExecutionEnvironment]],
* [[org.apache.flink.api.common.ExecutionConfig]] and
* [[org.apache.flink.streaming.api.environment.CheckpointConfig]].
*
* It will change the value of a setting only if a corresponding option was set in the
* `configuration`. If a key is not present, the current value of a field will remain
* untouched.
*
* @param configuration a configuration to read the values from
* @param classLoader a class loader to use when loading classes
*/
@PublicEvolving
def configure(configuration: ReadableConfig, classLoader: ClassLoader): Unit = {
javaEnv.configure(configuration, classLoader)
}
// --------------------------------------------------------------------------------------------
// Data stream creations
// --------------------------------------------------------------------------------------------
/**
* Creates a new DataStream that contains a sequence of numbers. This source is a parallel source.
* If you manually set the parallelism to `1` the emitted elements are in order.
*
* @deprecated Use [[fromSequence(long, long)]] instead to create a new data stream
* that contains [[NumberSequenceSource]].
*/
@deprecated
def generateSequence(from: Long, to: Long): DataStream[Long] = {
new DataStream[java.lang.Long](javaEnv.generateSequence(from, to))
.asInstanceOf[DataStream[Long]]
}
/**
* Creates a new data stream that contains a sequence of numbers (longs) and is useful for
* testing and for cases that just need a stream of N events of any kind.
*
* The generated source splits the sequence into as many parallel sub-sequences as there are
* parallel source readers. Each sub-sequence will be produced in order. If the parallelism is
* limited to one, the source will produce one sequence in order.
*
* This source is always bounded. For very long sequences (for example over the entire domain
* of long integer values), you may consider executing the application in a streaming manner
* because of the end bound that is pretty far away.
*
* Use [[fromSource(Source,WatermarkStrategy, String)]] together with
* [[NumberSequenceSource]] if you required more control over the created sources. For
* example, if you want to set a [[WatermarkStrategy]].
*/
def fromSequence(from: Long, to: Long): DataStream[Long] = {
new DataStream[java.lang.Long](javaEnv.fromSequence(from, to))
.asInstanceOf[DataStream[Long]]
}
/**
* Creates a DataStream that contains the given elements. The elements must all be of the
* same type.
*
* Note that this operation will result in a non-parallel data source, i.e. a data source with
* a parallelism of one.
*/
def fromElements[T: TypeInformation](data: T*): DataStream[T] = {
fromCollection(data)
}
/**
* Creates a DataStream from the given non-empty [[Seq]]. The elements need to be serializable
* because the framework may move the elements into the cluster if needed.
*
* Note that this operation will result in a non-parallel data source, i.e. a data source with
* a parallelism of one.
*/
def fromCollection[T: TypeInformation](data: Seq[T]): DataStream[T] = {
require(data != null, "Data must not be null.")
val typeInfo = implicitly[TypeInformation[T]]
val collection = scala.collection.JavaConversions.asJavaCollection(data)
asScalaStream(javaEnv.fromCollection(collection, typeInfo))
}
/**
* Creates a DataStream from the given [[Iterator]].
*
* Note that this operation will result in a non-parallel data source, i.e. a data source with
* a parallelism of one.
*/
def fromCollection[T: TypeInformation] (data: Iterator[T]): DataStream[T] = {
val typeInfo = implicitly[TypeInformation[T]]
asScalaStream(javaEnv.fromCollection(data.asJava, typeInfo))
}
/**
* Creates a DataStream from the given [[SplittableIterator]].
*/
def fromParallelCollection[T: TypeInformation] (data: SplittableIterator[T]):
DataStream[T] = {
val typeInfo = implicitly[TypeInformation[T]]
asScalaStream(javaEnv.fromParallelCollection(data, typeInfo))
}
/**
* Creates a DataStream that represents the Strings produced by reading the
* given file line wise. The file will be read with the system's default
* character set.
*/
def readTextFile(filePath: String): DataStream[String] =
asScalaStream(javaEnv.readTextFile(filePath))
/**
* Creates a data stream that represents the Strings produced by reading the given file
* line wise. The character set with the given name will be used to read the files.
*/
def readTextFile(filePath: String, charsetName: String): DataStream[String] =
asScalaStream(javaEnv.readTextFile(filePath, charsetName))
/**
* Reads the given file with the given input format. The file path should be passed
* as a URI (e.g., "file:///some/local/file" or "hdfs://host:port/file/path").
*/
def readFile[T: TypeInformation](inputFormat: FileInputFormat[T], filePath: String):
DataStream[T] =
asScalaStream(javaEnv.readFile(inputFormat, filePath))
/**
* Creates a DataStream that contains the contents of file created while
* system watches the given path. The file will be read with the system's
* default character set. The user can check the monitoring interval in milliseconds,
* and the way file modifications are handled. By default it checks for only new files
* every 100 milliseconds.
*
*/
@Deprecated
def readFileStream(StreamPath: String, intervalMillis: Long = 100,
watchType: FileMonitoringFunction.WatchType =
FileMonitoringFunction.WatchType.ONLY_NEW_FILES): DataStream[String] =
asScalaStream(javaEnv.readFileStream(StreamPath, intervalMillis, watchType))
/**
* Reads the contents of the user-specified path based on the given [[FileInputFormat]].
* Depending on the provided [[FileProcessingMode]].
*
* @param inputFormat
* The input format used to create the data stream
* @param filePath
* The path of the file, as a URI (e.g., "file:///some/local/file" or
* "hdfs://host:port/file/path")
* @param watchType
* The mode in which the source should operate, i.e. monitor path and react
* to new data, or process once and exit
* @param interval
* In the case of periodic path monitoring, this specifies the interval (in millis)
* between consecutive path scans
* @param filter
* The files to be excluded from the processing
* @return The data stream that represents the data read from the given file
* @deprecated Use [[FileInputFormat#setFilesFilter(FilePathFilter)]] to set a filter and
* [[StreamExecutionEnvironment#readFile(FileInputFormat, String, FileProcessingMode, long)]]
*/
@PublicEvolving
@Deprecated
def readFile[T: TypeInformation](
inputFormat: FileInputFormat[T],
filePath: String,
watchType: FileProcessingMode,
interval: Long,
filter: FilePathFilter): DataStream[T] = {
asScalaStream(javaEnv.readFile(inputFormat, filePath, watchType, interval, filter))
}
/**
* Reads the contents of the user-specified path based on the given [[FileInputFormat]].
* Depending on the provided [[FileProcessingMode]], the source
* may periodically monitor (every `interval` ms) the path for new data
* ([[FileProcessingMode.PROCESS_CONTINUOUSLY]]), or process
* once the data currently in the path and exit
* ([[FileProcessingMode.PROCESS_ONCE]]). In addition,
* if the path contains files not to be processed, the user can specify a custom
* [[FilePathFilter]]. As a default implementation you can use
* [[FilePathFilter.createDefaultFilter()]].
*
* ** NOTES ON CHECKPOINTING: ** If the `watchType` is set to
* [[FileProcessingMode#PROCESS_ONCE]], the source monitors the path ** once **,
* creates the [[org.apache.flink.core.fs.FileInputSplit FileInputSplits]]
* to be processed, forwards them to the downstream
* [[ContinuousFileReaderOperator readers]] to read the actual data,
* and exits, without waiting for the readers to finish reading. This
* implies that no more checkpoint barriers are going to be forwarded
* after the source exits, thus having no checkpoints after that point.
*
* @param inputFormat
* The input format used to create the data stream
* @param filePath
* The path of the file, as a URI (e.g., "file:///some/local/file" or
* "hdfs://host:port/file/path")
* @param watchType
* The mode in which the source should operate, i.e. monitor path and react
* to new data, or process once and exit
* @param interval
* In the case of periodic path monitoring, this specifies the interval (in millis)
* between consecutive path scans
* @return The data stream that represents the data read from the given file
*/
@PublicEvolving
def readFile[T: TypeInformation](
inputFormat: FileInputFormat[T],
filePath: String,
watchType: FileProcessingMode,
interval: Long): DataStream[T] = {
val typeInfo = implicitly[TypeInformation[T]]
asScalaStream(javaEnv.readFile(inputFormat, filePath, watchType, interval, typeInfo))
}
/**
* Creates a new DataStream that contains the strings received infinitely
* from socket. Received strings are decoded by the system's default
* character set. The maximum retry interval is specified in seconds, in case
* of temporary service outage reconnection is initiated every second.
*/
@PublicEvolving
def socketTextStream(hostname: String, port: Int, delimiter: Char = '\\n', maxRetry: Long = 0):
DataStream[String] =
asScalaStream(javaEnv.socketTextStream(hostname, port))
/**
* Generic method to create an input data stream with a specific input format.
* Since all data streams need specific information about their types, this method needs to
* determine the type of the data produced by the input format. It will attempt to determine the
* data type by reflection, unless the input format implements the ResultTypeQueryable interface.
*/
@PublicEvolving
def createInput[T: TypeInformation](inputFormat: InputFormat[T, _]): DataStream[T] =
if (inputFormat.isInstanceOf[ResultTypeQueryable[_]]) {
asScalaStream(javaEnv.createInput(inputFormat))
} else {
asScalaStream(javaEnv.createInput(inputFormat, implicitly[TypeInformation[T]]))
}
/**
* Create a DataStream using a user defined source function for arbitrary
* source functionality. By default sources have a parallelism of 1.
* To enable parallel execution, the user defined source should implement
* ParallelSourceFunction or extend RichParallelSourceFunction.
* In these cases the resulting source will have the parallelism of the environment.
* To change this afterwards call DataStreamSource.setParallelism(int)
*
*/
def addSource[T: TypeInformation](function: SourceFunction[T]): DataStream[T] = {
require(function != null, "Function must not be null.")
val cleanFun = scalaClean(function)
val typeInfo = implicitly[TypeInformation[T]]
asScalaStream(javaEnv.addSource(cleanFun, typeInfo))
}
/**
* Create a DataStream using a user defined source function for arbitrary
* source functionality.
*/
def addSource[T: TypeInformation](function: SourceContext[T] => Unit): DataStream[T] = {
require(function != null, "Function must not be null.")
val sourceFunction = new SourceFunction[T] {
val cleanFun = scalaClean(function)
override def run(ctx: SourceContext[T]) {
cleanFun(ctx)
}
override def cancel() = {}
}
addSource(sourceFunction)
}
/**
* Create a DataStream using a [[Source]].
*/
@Experimental
def fromSource[T: TypeInformation](
source: Source[T, _ <: SourceSplit, _],
watermarkStrategy: WatermarkStrategy[T],
sourceName: String): DataStream[T] = {
val typeInfo = implicitly[TypeInformation[T]]
asScalaStream(javaEnv.fromSource(source, watermarkStrategy, sourceName, typeInfo))
}
/**
* Triggers the program execution. The environment will execute all parts of
* the program that have resulted in a "sink" operation. Sink operations are
* for example printing results or forwarding them to a message queue.
*
* The program execution will be logged and displayed with a generated
* default name.
*
* @return The result of the job execution, containing elapsed time and accumulators.
*/
def execute() = javaEnv.execute()
/**
* Triggers the program execution. The environment will execute all parts of
* the program that have resulted in a "sink" operation. Sink operations are
* for example printing results or forwarding them to a message queue.
*
* The program execution will be logged and displayed with the provided name.
*
* @return The result of the job execution, containing elapsed time and accumulators.
*/
def execute(jobName: String) = javaEnv.execute(jobName)
/**
* Register a [[JobListener]] in this environment. The [[JobListener]] will be
* notified on specific job status changed.
*/
@PublicEvolving
def registerJobListener(jobListener: JobListener): Unit = {
javaEnv.registerJobListener(jobListener)
}
/**
* Clear all registered [[JobListener]]s.
*/
@PublicEvolving def clearJobListeners(): Unit = {
javaEnv.clearJobListeners()
}
/**
* Triggers the program execution asynchronously. The environment will execute all parts of
* the program that have resulted in a "sink" operation. Sink operations are
* for example printing results or forwarding them to a message queue.
*
* The program execution will be logged and displayed with a generated
* default name.
*
* <b>ATTENTION:</b> The caller of this method is responsible for managing the lifecycle
* of the returned [[JobClient]]. This means calling [[JobClient#close()]] at the end of
* its usage. In other case, there may be resource leaks depending on the JobClient
* implementation.
*
* @return A [[JobClient]] that can be used to communicate with the submitted job,
* completed on submission succeeded.
*/
@PublicEvolving
def executeAsync(): JobClient = javaEnv.executeAsync()
/**
* Triggers the program execution asynchronously. The environment will execute all parts of
* the program that have resulted in a "sink" operation. Sink operations are
* for example printing results or forwarding them to a message queue.
*
* The program execution will be logged and displayed with the provided name.
*
* <b>ATTENTION:</b> The caller of this method is responsible for managing the lifecycle
* of the returned [[JobClient]]. This means calling [[JobClient#close()]] at the end of
* its usage. In other case, there may be resource leaks depending on the JobClient
* implementation.
*
* @return A [[JobClient]] that can be used to communicate with the submitted job,
* completed on submission succeeded.
*/
@PublicEvolving
def executeAsync(jobName: String): JobClient = javaEnv.executeAsync(jobName)
/**
* Creates the plan with which the system will execute the program, and
* returns it as a String using a JSON representation of the execution data
* flow graph. Note that this needs to be called, before the plan is
* executed.
*/
def getExecutionPlan = javaEnv.getExecutionPlan
/**
* Getter of the [[org.apache.flink.streaming.api.graph.StreamGraph]] of the streaming job.
* This call clears previously registered
* [[org.apache.flink.api.dag.Transformation transformations]].
*
* @return The StreamGraph representing the transformations
*/
@Internal
def getStreamGraph = javaEnv.getStreamGraph
/**
* Getter of the [[org.apache.flink.streaming.api.graph.StreamGraph]] of the streaming job.
* This call clears previously registered
* [[org.apache.flink.api.dag.Transformation transformations]].
*
* @param jobName Desired name of the job
* @return The StreamGraph representing the transformations
*/
@Internal
def getStreamGraph(jobName: String) = javaEnv.getStreamGraph(jobName)
/**
* Getter of the [[org.apache.flink.streaming.api.graph.StreamGraph]] of the streaming job
* with the option to clear previously registered
* [[org.apache.flink.api.dag.Transformation transformations]]. Clearing the transformations
* allows, for example, to not re-execute the same operations when calling
* [[execute()]] multiple times.
*
* @param jobName Desired name of the job
* @param clearTransformations Whether or not to clear previously registered transformations
* @return The StreamGraph representing the transformations
*/
@Internal
def getStreamGraph(jobName: String, clearTransformations: Boolean) =
javaEnv.getStreamGraph(jobName, clearTransformations)
/**
* Getter of the wrapped [[org.apache.flink.streaming.api.environment.StreamExecutionEnvironment]]
*
* @return The encased ExecutionEnvironment
*/
@Internal
def getWrappedStreamExecutionEnvironment = javaEnv
/**
* Returns a "closure-cleaned" version of the given function. Cleans only if closure cleaning
* is not disabled in the [[org.apache.flink.api.common.ExecutionConfig]]
*/
private[flink] def scalaClean[F <: AnyRef](f: F): F = {
if (getConfig.isClosureCleanerEnabled) {
ClosureCleaner.clean(f, true, getConfig.getClosureCleanerLevel)
} else {
ClosureCleaner.ensureSerializable(f)
}
f
}
/**
* Registers a file at the distributed cache under the given name. The file will be accessible
* from any user-defined function in the (distributed) runtime under a local path. Files
* may be local files (which will be distributed via BlobServer), or files in a distributed file
* system. The runtime will copy the files temporarily to a local cache, if needed.
*
* The [[org.apache.flink.api.common.functions.RuntimeContext]] can be obtained inside UDFs
* via [[org.apache.flink.api.common.functions.RichFunction#getRuntimeContext()]] and
* provides access [[org.apache.flink.api.common.cache.DistributedCache]] via
* [[org.apache.flink.api.common.functions.RuntimeContext#getDistributedCache()]].
*
* @param filePath The path of the file, as a URI (e.g. "file:///some/path" or
* "hdfs://host:port/and/path")
* @param name The name under which the file is registered.
*/
def registerCachedFile(filePath: String, name: String): Unit = {
javaEnv.registerCachedFile(filePath, name)
}
/**
* Registers a file at the distributed cache under the given name. The file will be accessible
* from any user-defined function in the (distributed) runtime under a local path. Files
* may be local files (which will be distributed via BlobServer), or files in a distributed file
* system. The runtime will copy the files temporarily to a local cache, if needed.
*
* The [[org.apache.flink.api.common.functions.RuntimeContext]] can be obtained inside UDFs
* via [[org.apache.flink.api.common.functions.RichFunction#getRuntimeContext()]] and
* provides access [[org.apache.flink.api.common.cache.DistributedCache]] via
* [[org.apache.flink.api.common.functions.RuntimeContext#getDistributedCache()]].
*
* @param filePath The path of the file, as a URI (e.g. "file:///some/path" or
* "hdfs://host:port/and/path")
* @param name The name under which the file is registered.
* @param executable flag indicating whether the file should be executable
*/
def registerCachedFile(filePath: String, name: String, executable: Boolean): Unit = {
javaEnv.registerCachedFile(filePath, name, executable)
}
/**
* Returns whether Unaligned Checkpoints are enabled.
*/
def isUnalignedCheckpointsEnabled: Boolean = javaEnv.isUnalignedCheckpointsEnabled
/**
* Returns whether Unaligned Checkpoints are force-enabled.
*/
def isForceUnalignedCheckpoints: Boolean = javaEnv.isForceUnalignedCheckpoints
}
object StreamExecutionEnvironment {
/**
* Sets the default parallelism that will be used for the local execution
* environment created by [[createLocalEnvironment()]].
*
* @param parallelism The default parallelism to use for local execution.
*/
@PublicEvolving
def setDefaultLocalParallelism(parallelism: Int) : Unit =
JavaEnv.setDefaultLocalParallelism(parallelism)
/**
* Gets the default parallelism that will be used for the local execution environment created by
* [[createLocalEnvironment()]].
*/
@PublicEvolving
def getDefaultLocalParallelism: Int = JavaEnv.getDefaultLocalParallelism
// --------------------------------------------------------------------------
// context environment
// --------------------------------------------------------------------------
/**
* Creates an execution environment that represents the context in which the program is
* currently executed. If the program is invoked standalone, this method returns a local
* execution environment. If the program is invoked from within the command line client
* to be submitted to a cluster, this method returns the execution environment of this cluster.
*/
def getExecutionEnvironment: StreamExecutionEnvironment = {
new StreamExecutionEnvironment(JavaEnv.getExecutionEnvironment)
}
// --------------------------------------------------------------------------
// local environment
// --------------------------------------------------------------------------
/**
* Creates a local execution environment. The local execution environment will run the
* program in a multi-threaded fashion in the same JVM as the environment was created in.
*
* This method sets the environment's default parallelism to given parameter, which
* defaults to the value set via [[setDefaultLocalParallelism(Int)]].
*/
def createLocalEnvironment(parallelism: Int = JavaEnv.getDefaultLocalParallelism):
StreamExecutionEnvironment = {
new StreamExecutionEnvironment(JavaEnv.createLocalEnvironment(parallelism))
}
/**
* Creates a local execution environment. The local execution environment will run the
* program in a multi-threaded fashion in the same JVM as the environment was created in.
*
* @param parallelism The parallelism for the local environment.
* @param configuration Pass a custom configuration into the cluster.
*/
def createLocalEnvironment(parallelism: Int, configuration: Configuration):
StreamExecutionEnvironment = {
new StreamExecutionEnvironment(JavaEnv.createLocalEnvironment(parallelism, configuration))
}
/**
* Creates a [[StreamExecutionEnvironment]] for local program execution that also starts the
* web monitoring UI.
*
* The local execution environment will run the program in a multi-threaded fashion in
* the same JVM as the environment was created in. It will use the parallelism specified in the
* parameter.
*
* If the configuration key 'rest.port' was set in the configuration, that particular
* port will be used for the web UI. Otherwise, the default port (8081) will be used.
*
* @param config optional config for the local execution
* @return The created StreamExecutionEnvironment
*/
@PublicEvolving
def createLocalEnvironmentWithWebUI(config: Configuration = null): StreamExecutionEnvironment = {
val conf: Configuration = if (config == null) new Configuration() else config
new StreamExecutionEnvironment(JavaEnv.createLocalEnvironmentWithWebUI(conf))
}
// --------------------------------------------------------------------------
// remote environment
// --------------------------------------------------------------------------
/**
* Creates a remote execution environment. The remote environment sends (parts of) the program to
* a cluster for execution. Note that all file paths used in the program must be accessible from
* the cluster. The execution will use the cluster's default parallelism, unless the
* parallelism is set explicitly via [[StreamExecutionEnvironment.setParallelism()]].
*
* @param host The host name or address of the master (JobManager),
* where the program should be executed.
* @param port The port of the master (JobManager), where the program should be executed.
* @param jarFiles The JAR files with code that needs to be shipped to the cluster. If the
* program uses
* user-defined functions, user-defined input formats, or any libraries,
* those must be
* provided in the JAR files.
*/
def createRemoteEnvironment(host: String, port: Int, jarFiles: String*):
StreamExecutionEnvironment = {
new StreamExecutionEnvironment(JavaEnv.createRemoteEnvironment(host, port, jarFiles: _*))
}
/**
* Creates a remote execution environment. The remote environment sends (parts of) the program
* to a cluster for execution. Note that all file paths used in the program must be accessible
* from the cluster. The execution will use the specified parallelism.
*
* @param host The host name or address of the master (JobManager),
* where the program should be executed.
* @param port The port of the master (JobManager), where the program should be executed.
* @param parallelism The parallelism to use during the execution.
* @param jarFiles The JAR files with code that needs to be shipped to the cluster. If the
* program uses
* user-defined functions, user-defined input formats, or any libraries,
* those must be
* provided in the JAR files.
*/
def createRemoteEnvironment(
host: String,
port: Int,
parallelism: Int,
jarFiles: String*): StreamExecutionEnvironment = {
val javaEnv = JavaEnv.createRemoteEnvironment(host, port, jarFiles: _*)
javaEnv.setParallelism(parallelism)
new StreamExecutionEnvironment(javaEnv)
}
}
|
aljoscha/flink
|
flink-streaming-scala/src/main/scala/org/apache/flink/streaming/api/scala/StreamExecutionEnvironment.scala
|
Scala
|
apache-2.0
| 45,075
|
package aecor.schedule
import java.time.{ Clock => _, _ }
import java.util.UUID
import aecor.data._
import aecor.runtime.KeyValueStore
import aecor.runtime.akkapersistence._
import aecor.runtime.akkapersistence.readside.JournalEntry
import aecor.schedule.process.{
DefaultScheduleEventJournal,
PeriodicProcessRuntime,
ScheduleProcess
}
import aecor.util.Clock
import akka.NotUsed
import akka.actor.ActorSystem
import akka.stream.Materializer
import akka.stream.scaladsl.Source
import cats.effect.{ ContextShift, Effect }
import cats.implicits._
import com.datastax.driver.core.utils.UUIDs
import scala.concurrent.duration._
trait Schedule[F[_]] {
def addScheduleEntry(scheduleName: String,
entryId: String,
correlationId: String,
dueDate: LocalDateTime): F[Unit]
def committableScheduleEvents(
scheduleName: String,
consumerId: ConsumerId
): Source[Committable[F, JournalEntry[UUID, ScheduleBucketId, ScheduleEvent]], NotUsed]
}
object Schedule {
final case class ScheduleSettings(bucketLength: FiniteDuration,
refreshInterval: FiniteDuration,
eventualConsistencyDelay: FiniteDuration,
consumerId: ConsumerId)
def start[F[_]: Effect: ContextShift](
entityName: String,
dayZero: LocalDate,
clock: Clock[F],
repository: ScheduleEntryRepository[F],
offsetStore: KeyValueStore[F, TagConsumer, UUID],
settings: ScheduleSettings = ScheduleSettings(
1.day,
10.seconds,
40.seconds,
ConsumerId("io.aecor.schedule.ScheduleProcess")
)
)(implicit system: ActorSystem, materializer: Materializer): F[Schedule[F]] = {
val eventTag = EventTag(entityName)
val runtime = AkkaPersistenceRuntime(system, CassandraJournalAdapter(system))
def uuidToLocalDateTime(zoneId: ZoneId): KeyValueStore[F, TagConsumer, LocalDateTime] =
offsetStore.imap(
uuid => LocalDateTime.ofInstant(Instant.ofEpochMilli(UUIDs.unixTimestamp(uuid)), zoneId)
)(value => UUIDs.startOf(value.atZone(zoneId).toInstant.toEpochMilli))
def deployBuckets =
runtime
.deploy(
entityName,
DefaultScheduleBucket.behavior(clock.zonedDateTime),
Tagging.const[ScheduleBucketId](eventTag)
)
def startProcess(buckets: ScheduleBucketId => ScheduleBucket[F]) = clock.zone.flatMap { zone =>
val journal =
DefaultScheduleEventJournal[F](
consumerId = settings.consumerId,
parallelism = 8,
aggregateJournal = runtime.journal[ScheduleBucketId, ScheduleEvent].committable(offsetStore),
eventTag = eventTag
)
val process = ScheduleProcess(
journal = journal,
dayZero = dayZero,
consumerId = settings.consumerId,
offsetStore = uuidToLocalDateTime(zone),
eventualConsistencyDelay = settings.eventualConsistencyDelay,
repository = repository,
buckets = buckets,
clock = clock.localDateTime,
parallelism = 8
)
PeriodicProcessRuntime(
name = entityName,
tickInterval = settings.refreshInterval,
processCycle = process
).run(system)
}
def createSchedule(buckets: ScheduleBucketId => ScheduleBucket[F]): Schedule[F] =
new DefaultSchedule(
clock,
buckets,
settings.bucketLength,
runtime.journal[ScheduleBucketId, ScheduleEvent].committable(offsetStore),
eventTag
)
for {
buckets <- deployBuckets
_ <- startProcess(buckets)
} yield createSchedule(buckets)
}
}
|
notxcain/aecor
|
modules/schedule/src/main/scala/aecor/schedule/Schedule.scala
|
Scala
|
mit
| 3,708
|
package com.gh.helper.config
import com.typesafe.config.ConfigFactory
import util.Try
/**
* Holds service configuration settings.
*/
trait Configuration {
/**
* Application config object.
*/
val config = ConfigFactory.load()
/** Host name/address to start service on. */
lazy val serviceHost = Try(config.getString("service.host")).getOrElse("localhost")
/** Port to start service on. */
lazy val servicePort = Try(config.getInt("service.port")).getOrElse(8080)
/** Database host name/address. */
lazy val dbHost = Try(config.getString("db.host")).getOrElse("localhost")
/** Database host port number. */
lazy val dbPort = Try(config.getInt("db.port")).getOrElse(3306)
/** Service database name. */
lazy val dbName = Try(config.getString("db.name")).getOrElse("rest")
/** User name used to access database. */
lazy val dbUser = Try(config.getString("db.user")).toOption.orNull
/** Password for specified user and database. */
lazy val dbPassword = Try(config.getString("db.password")).toOption.orNull
}
|
saetar/grocery-backend
|
src/main/scala/com/gh/helper/config/Configuration.scala
|
Scala
|
unlicense
| 1,053
|
package slogger.services.processing.aggregation.aggregators.onefield
import slogger.services.processing.aggregation.Aggregator
import play.api.libs.iteratee.Enumerator
import play.api.libs.json.JsObject
import play.api.libs.iteratee.Iteratee
import scala.concurrent.ExecutionContext
import play.api.libs.json.JsObject
import play.api.libs.json.JsArray
import play.api.libs.json.JsValue
import scala.concurrent.Future
import slogger.services.processing.aggregation.aggregators.AggregatorUtils
import slogger.model.processing.Slice
import slogger.model.processing.SliceResult
import slogger.utils.IterateeUtils
import play.api.libs.json.Json
import play.api.libs.json.Format
import slogger.services.processing.aggregation.aggregators.FoldAggregator
class AverageAggregator(config: JsObject) extends FoldAggregator[AverageAggregator.TmpRez] {
import AverageAggregator._
val cfg = config.as[Config]
val resultKey = "[AVERAGE]"
override def name = "SimpleAverageAggregator"
//Slice aggregation
protected def foldInitState = TmpRez()
protected def folder(state: TmpRez, json: JsObject) = {
val values = AggregatorUtils.numberValues(cfg.extractField(json))
if (values.isEmpty) {
state
} else {
TmpRez(
count = state.count + values.length,
sum = state.sum + values.reduce(_ + _)
)
}
}
protected def resultMapper(slice: Slice, tmpRez: TmpRez) =
SliceResult(
slice,
results = Map(resultKey -> safeDiv(tmpRez.sum, tmpRez.count)),
meta = Json.toJson(tmpRez).as[JsObject]
)
def safeDiv(bd: BigDecimal, div: BigDecimal): BigDecimal = if (bd == 0) 0 else {bd / div}
//Total aggregation
override def isSliceMergingSupported = true
override def mergeSlices(slices: Seq[SliceResult]): Map[String, BigDecimal] = {
val tmpRez = slices.map(_.meta.as[TmpRez]).reduce(_ + _)
Map(resultKey -> safeDiv(tmpRez.sum, tmpRez.count))
}
}
object AverageAggregator {
case class TmpRez(count: BigDecimal = 0, sum: BigDecimal = 0) {
def + (other: TmpRez) = this.copy(
count = this.count + other.count,
sum = this.sum + other.sum
)
}
implicit val TmpRezFormat: Format[TmpRez] = Json.format[TmpRez]
}
|
IvanMykhailov/stats-logger
|
core/src/main/scala/slogger/services/processing/aggregation/aggregators/onefield/AverageAggregator.scala
|
Scala
|
mit
| 2,273
|
/*
* fsc-analytics
*/
package de.pc2.dedup.analysis
import scala.collection.mutable.Map
trait FrequencyEstimator[T] {
def add(value: T)
def apply(value: T): Int
def getMaxValue(): Option[T]
}
class MapFrequenceEstimator[T] extends FrequencyEstimator[T] {
val map = Map.empty[T, Int]
def add(value: T) {
if (map.contains(value)) {
map(value) = map(value) + 1
} else {
map(value) = 1
}
}
def apply(value: T): Int = {
if (map.contains(value)) {
map(value)
} else {
0
}
}
def getMaxValue(): Option[T] = {
if (map.isEmpty) {
None
} else {
val m = map.maxBy(_._2)
Some(m._1)
}
}
}
class MisraGries[T](val k: Int) extends FrequencyEstimator[T] {
if (k <= 0) {
throw new IllegalArgumentException("k")
}
val map = Map.empty[T, Int]
override def toString =
"[k %s, %s]".format(k, map)
def add(value: T) {
if (map.contains(value))
map(value) = map(value) + 1
else if (map.size < k)
map(value) = 1
else {
for (key <- map.keys.toList) {
val newValue = map(key) - 1
if (newValue > 0)
map(key) = newValue
else
map -= key
}
}
}
def apply(value: T): Int =
if (map.contains(value))
map(value)
else
0
def getMaxValue(): Option[T] = {
if (map.isEmpty) {
None
} else {
val m = map.maxBy(_._2)
Some(m._1)
}
}
}
|
dmeister/fs-c-analytics
|
src/main/scala/de/pc2/dedup/analysis/MisraGries.scala
|
Scala
|
bsd-3-clause
| 1,459
|
// This benchmark is important because it shows a nice counterexample.
import stainless.annotation._
import stainless.collection._
import stainless.lang._
import stainless.lang.Option._
import stainless.lang.StaticChecks._
import stainless.proof.check
object TreeImmutMapGenericExample {
case class Cell[T](var value: T) extends AnyHeapRef
case class Leaf[T](data: Cell[T]) extends Tree[T]
case class Branch[T](left: Tree[T], right: Tree[T]) extends Tree[T]
sealed abstract class Tree[T] {
@ghost def repr: Set[AnyHeapRef] =
this match {
case Leaf(data) => Set[AnyHeapRef](data)
case Branch(left, right) => left.repr ++ right.repr
}
@opaque
def tmap(f: T => T): Unit = {
reads(repr)
modifies(repr)
decreases(this)
this match {
case Leaf(data) =>
data.value = f(data.value)
case Branch(left, right) =>
left.tmap(f)
right.tmap(f)
}
}
}
/* gives counterexample, such as:
[info] [Warning ] Found counter-example:
[info] [Warning ] t: Tree[T] -> Leaf[Object](HeapRef(12))
[info] [Warning ] c: HeapRef -> HeapRef(12)
[info] [Warning ] y: T -> Open(10)
[info] [Warning ] heap0: Map[HeapRef, Object] -> {HeapRef(12) -> Cell(Cell[Object](Open(7))), * -> SignedBitvector32(2)}
*/
def test[T](t: Tree[T], c: Cell[T], y: T) = {
reads(t.repr ++ Set[AnyHeapRef](c))
modifies(t.repr)
t.tmap(x => y)
} ensuring(_ => c.value == old(c.value))
}
|
epfl-lara/stainless
|
frontends/benchmarks/full-imperative/invalid/OpaqueEffectsGeneric.scala
|
Scala
|
apache-2.0
| 1,549
|
package com.twitter.app
import scala.util.control.NoStackTrace
/**
* An exception that represents collected errors which occurred on close of the app.
*
* @note When execution of the `App#nonExitingMain` throws a [[CloseException]], the app will not
* attempt to call `App#close()` again in the `App#exitOnError(t: Throwable)` function since
* this Exception is assumed be a result of already calling `App#close()`.
*
* @note Collected exceptions which occurred during closing are added as "suppressed" exceptions.
*
* @see [[https://docs.oracle.com/javase/7/docs/api/java/lang/Throwable.html#getSuppressed()]]
*/
final class CloseException private[twitter] (message: String)
extends Exception(message)
with NoStackTrace
|
twitter/util
|
util-app/src/main/scala/com/twitter/app/CloseException.scala
|
Scala
|
apache-2.0
| 753
|
/**
* MIT License
*
* Copyright (c) 2016-2018 James Sherwood-Jones <james.sherwoodjones@gmail.com>
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package com.jsherz.luskydive.itest.dao
import com.jsherz.luskydive.core.CommitteeMember
import com.jsherz.luskydive.dao._
import com.jsherz.luskydive.itest.util.{TestDatabase, TestUtil, Util}
import com.jsherz.luskydive.json.CommitteeMembersJsonSupport.CommitteeMemberFormat
import org.scalatest.concurrent.ScalaFutures._
import org.scalatest.{BeforeAndAfterAll, Matchers, WordSpec}
import scalaz.-\/
import scala.concurrent.ExecutionContext.Implicits.global
class AuthDaoSpec extends WordSpec with Matchers with BeforeAndAfterAll {
implicit val patienceConfig: PatienceConfig = TestUtil.defaultPatienceConfig
private var dao: AuthDao = _
private var cleanup: () => Unit = _
"AuthDao#login" should {
"return Left(error.invalidEmailPass) if the e-mail does not exist" in {
val fakeEmails = Seq(
"amorissette@yahoo.com",
"nborer@yahoo.com",
"irolfson@gmail.com"
)
fakeEmails.foreach { email =>
val result = dao.login(email, email + "_Hunter2").futureValue
result shouldBe a[-\/[_]]
result.leftMap { error =>
error shouldEqual "error.invalidEmailPass"
}
}
}
"return Left(error.invalidEmailPass) if the e-mail is valid but the password is wrong" in {
val validEmails = Seq(
"davisjohn@hodge-davis.com",
"butlerlawrence@hotmail.com",
"jlambert@gutierrez-lester.com"
)
validEmails.foreach { email =>
val result = dao.login(email, email.reverse).futureValue
result shouldBe a[-\/[_]]
result.leftMap { error =>
error shouldEqual "error.invalidEmailPass"
}
}
}
"return Left(error.accountLocked) if the e-mail and password are valid but the account is locked" in {
val lockedAccounts = Seq(
"solisomar@gmail.com",
"probertson@hotmail.com",
"zamorajennifer@hotmail.com"
)
lockedAccounts.foreach { email =>
val result = dao.login(email, email + "_Hunter2").futureValue
result shouldBe a[-\/[_]]
result.leftMap { error =>
error shouldEqual "error.accountLocked"
}
}
}
"return Right(committee member) if the e-mail and password are valid" in {
val result = dao.login("brittney20@robinson.info", "brittney20@robinson.info_Hunter2").futureValue
result.isRight shouldBe true
// Now try and use the generated API key
result.map { cm => cm shouldEqual Util.fixture[CommitteeMember]("5c1140a2.json") }
}
}
override protected def beforeAll(): Unit = {
val TestDatabase(dbService, cleanupFn) = Util.setupGoldTestDb()
cleanup = cleanupFn
dao = new AuthDaoImpl(dbService)
}
override protected def afterAll(): Unit = cleanup()
}
|
jSherz/lsd-members
|
backend/src/it/scala/com/jsherz/luskydive/itest/dao/AuthDaoSpec.scala
|
Scala
|
mit
| 3,984
|
package feh.tec.util
import scala.concurrent.duration.Duration
trait Debugging {
outer =>
def debug: Boolean
protected implicit class DebugLogWrapper[R](r: => R){
def debugLog(f: R => String): R = {
val a = r
outer.debugLog(f(a))
a
}
def debugLog(msg: String): R = debugLog(_ => msg + ": " + r)
def debugLogElapsedTime(msg: Duration => String): R = {
val (res, time) = elapsed(r)
outer.debugLog(msg(time))
res
}
}
def debugMessagePrefix: String
protected def debugLog(a: => Any) = if(debug) println(debugMessagePrefix + a)
}
trait GlobalDebugging extends Debugging{
protected def setup: DebuggingSetup
def debug: Boolean = setup.debug
}
trait DebuggingSetup{
def debug: Boolean
}
trait GlobalDebuggingSetup extends DebuggingSetup{
private var _debug: Boolean = false
def debug: Boolean = _debug
def apply = debug
def update(d: Boolean) = _debug = d
}
trait ThreadLoacalDebuggingSetup extends ThreadLocal[Boolean] with DebuggingSetup{
def debug: Boolean = get()
}
|
fehu/agent-tareas
|
agent/src/main/scala/feh/tec/util/Debugging.scala
|
Scala
|
mit
| 1,059
|
/* Copyright (C) 2015 University of Massachusetts Amherst.
This file is part of “author_coref”
http://github.com/iesl/author_coref
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */
package edu.umass.cs.iesl.author_coref.coreference
import cc.factorie._
import cc.factorie.app.nlp.hcoref.{DebuggableTemplate, Node, NodeVariables}
import cc.factorie.model.TupleTemplateWithStatistics3
import cc.factorie.variable.BagOfWordsVariable
import scala.reflect.ClassTag
class SizeLimitingEntityNameTemplate[Vars <: NodeVariables[Vars]](val firstLetterWeight:Double=4.0, val fullNameWeight:Double=4.0,val weight:Double=64,val saturation:Double=128.0, val penaltyOnNoName:Double=2.0, val sizeLimit: Int = 5, val exceedLimitPenalty: Double = 100, getBag:(Vars => BagOfWordsVariable), bagName:String = "")(implicit ct:ClassTag[Vars], params:Parameters)
extends TupleTemplateWithStatistics3[Node[Vars]#Exists,Node[Vars]#IsRoot,Vars]
with DebuggableTemplate {
val name = "SizeLimitingEntityNameTemplate: %s".format(bagName)
def unroll1(exists: Node[Vars]#Exists) = Factor(exists, exists.node.isRootVar, exists.node.variables)
def unroll2(isRoot: Node[Vars]#IsRoot) = Factor(isRoot.node.existsVar, isRoot, isRoot.node.variables)
def unroll3(vars: Vars) = Factor(vars.node.existsVar, vars.node.isRootVar, vars)
override def score(exists: Node[Vars]#Exists#Value, isRoot: Node[Vars]#IsRoot#Value, vars: Vars) = {
var score = 0.0
var firstLetterMismatches = 0
var nameMismatches = 0
val bag = getBag(vars)
val uniqueEntries = bag.value.asHashMap.keySet
if (uniqueEntries.size > sizeLimit) {
score -= math.min(saturation,exceedLimitPenalty)
} else {
bag.value.asHashMap.keySet.pairs.foreach { case (tokI, tokJ) =>
if (tokI.charAt(0) != tokJ.charAt(0)) {
firstLetterMismatches += 1
}
if (tokI.length > 1 && tokJ.length > 1) {
nameMismatches += tokI editDistance tokJ
}
}
score -= math.min(saturation, firstLetterMismatches * firstLetterWeight)
score -= math.min(saturation, nameMismatches * fullNameWeight)
if (bag.size == 0 && isRoot.booleanValue) {
score -= penaltyOnNoName
}
}
report(score, weight)
score * weight
}
}
|
iesl/author_coref
|
src/main/scala/edu/umass/cs/iesl/author_coref/coreference/SizeLimitingEntityNameTemplate.scala
|
Scala
|
apache-2.0
| 2,774
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql
import org.apache.spark.sql.functions._
import org.apache.spark.sql.test.SharedSQLContext
/**
* A test suite to test DataFrame/SQL functionalities with complex types (i.e. array, struct, map).
*/
class DataFrameComplexTypeSuite extends QueryTest with SharedSQLContext {
import testImplicits._
test("UDF on struct") {
val f = udf((a: String) => a)
val df = sparkContext.parallelize(Seq((1, 1))).toDF("a", "b")
df.select(struct($"a").as("s")).select(f($"s.a")).collect()
}
test("UDF on named_struct") {
val f = udf((a: String) => a)
val df = sparkContext.parallelize(Seq((1, 1))).toDF("a", "b")
df.selectExpr("named_struct('a', a) s").select(f($"s.a")).collect()
}
test("UDF on array") {
val f = udf((a: String) => a)
val df = sparkContext.parallelize(Seq((1, 1))).toDF("a", "b")
df.select(array($"a").as("s")).select(f(expr("s[0]"))).collect()
}
}
|
pronix/spark
|
sql/core/src/test/scala/org/apache/spark/sql/DataFrameComplexTypeSuite.scala
|
Scala
|
apache-2.0
| 1,735
|
/*
* Copyright 2019 Spotify AB.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
// Example: Word Count Example with Assertions
// Usage:
// `sbt "runMain com.spotify.scio.examples.DebuggingWordCount
// --project=[PROJECT] --runner=DataflowRunner --zone=[ZONE]
// --input=gs://apache-beam-samples/shakespeare/kinglear.txt
// --output=gs://[BUCKET]/[PATH]/wordcount"`
package com.spotify.scio.examples
import java.util.regex.Pattern
import com.spotify.scio._
import com.spotify.scio.examples.common.ExampleData
import org.apache.beam.sdk.testing.PAssert
import org.slf4j.LoggerFactory
import scala.jdk.CollectionConverters._
object DebuggingWordCount {
// Logger is an object instance, i.e. statically initialized and thus can be used safely in an
// anonymous function without serialization issue
private val logger = LoggerFactory.getLogger(this.getClass)
def main(cmdlineArgs: Array[String]): Unit = {
// Create `ScioContext` and `Args`
val (sc, args) = ContextAndArgs(cmdlineArgs)
val filter =
Pattern.compile(args.getOrElse("filterPattern", "Flourish|stomach"))
// Create two counter metrics
val matchedWords = ScioMetrics.counter("matchedWords")
val unmatchedWords = ScioMetrics.counter("unmatchedWords")
val filteredWords = sc
.textFile(args.getOrElse("input", ExampleData.KING_LEAR))
// Split input lines, filter out empty tokens and expand into a collection of tokens
.flatMap(_.split("[^a-zA-Z']+").filter(_.nonEmpty))
// Count occurrences of each unique `String` to get `(String, Long)`
.countByValue
// Filter out tokens that matches the pattern, log, and increment counters
.filter { case (k, _) =>
val matched = filter.matcher(k).matches()
if (matched) {
logger.debug(s"Matched $k")
matchedWords.inc()
} else {
logger.trace(s"Did not match: $k")
unmatchedWords.inc()
}
matched
}
// Verify internal Beam `PCollection` with `PAssert`
PAssert
.that(filteredWords.internal)
.containsInAnyOrder(List(("Flourish", 3L), ("stomach", 1L)).asJava)
// Execute the pipeline and block until it finishes
val result = sc.run().waitUntilFinish()
// Retrieve metric values
require(result.counter(matchedWords).committed.get == 2)
require(result.counter(unmatchedWords).committed.get > 100)
}
}
|
spotify/scio
|
scio-examples/src/main/scala/com/spotify/scio/examples/DebuggingWordCount.scala
|
Scala
|
apache-2.0
| 2,933
|
package sclack.domain.factories
import javax.swing.ImageIcon
import sclack.domain.Map
import sclack.domain.NonPlayableCharacter
import sclack.domain.Entity
import sclack.tech.TileManager
/**
* Factory for creating various maps that we may or may not use. I know that
* this class looks horrible due to all the hard coded arrays, but this is
* ok for now. If this were to be a `good' game and have things programmed
* properly, assets would be loaded from a separate location. I don't have
* the luxury of time to do this like that however, so this will have to do.
*
* @author Simon Symeonidis
*/
object MapFactory {
/** Create a map with no portals */
def createSingleMap : sclack.domain.Map = {
var map : sclack.domain.Map = new sclack.domain.Map()
map.data = Array[Array[Int]](
Array(140, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 140),
Array(140, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120,
120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 140),
Array(140, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120,
120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 140),
Array(140, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120,
120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 140),
Array(140, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120,
120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 140),
Array(140, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120,
120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 140),
Array(140, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120,
120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 140),
Array(140, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120,
120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 140),
Array(140, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120,
120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 140),
Array(140, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120,
120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 140),
Array(140, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120,
120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 140),
Array(140, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120,
120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 140),
Array(140, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120,
120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 140),
Array(140, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120,
120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 140),
Array(140, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120,
120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 140),
Array(140, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120,
120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 140),
Array(140, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120,
120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 140),
Array(140, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120,
120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 140),
Array(140, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120,
120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 140),
Array(140, 120, 120, 120, 120, 120, 117, 117, 117, 120, 120, 120,
120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 140),
Array(140, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120,
120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 140),
Array(140, 120, 120, 120, 120, 120, 116, 120, 120, 120, 120, 120,
120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 140),
Array(140, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120,
120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 140),
Array(140, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120,
120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 140),
Array(131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131))
var npcs = createGenericNPCs
map.obstructions = createGenericObstructions
map.entities = Array[(Int, Int, NonPlayableCharacter)](
(160 , 160, npcs(0)),
(160 + 1 * 16, 160, npcs(1)),
(160 + 2 * 16, 160, npcs(2)),
(160 + 3 * 16, 160, npcs(3)),
(160 + 4 * 16, 160, npcs(4)))
for(ent <- map.entities)
map.obstructions(ent._2 / 16)(ent._1 / 16) = 1
map
}
/**
* Create generic NPCs. This is mainly for testing out speech capabilities,
* etc.
*
* @return an array of non playable characters.
*/
def createGenericNPCs : Array[NonPlayableCharacter] = {
Array[String](
"Oh, why hello there!",
"Don't mind us, we're just some random NPCs for testing",
"Hopefully we'll be included in the real game one day!",
"Actually it would be nice to see this thing be finished at some point",
"Hi, my name is Harry and I'M GOING TO KILL YOU"
).map(new NonPlayableCharacter(_,TileManager.tile("fan",110)))
}
/**
* Simple method to create an array representing where the player can and
* cannot go.
*
* @return Array representing the obstructions. 1 is for obstruction, 0 is
* for free things.
*/
def createGenericObstructions : Array[Array[Int]] = {
Array[Array[Int]](
Array[Int](1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1),
Array[Int](1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1),
Array[Int](1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1),
Array[Int](1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1),
Array[Int](1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1),
Array[Int](1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1),
Array[Int](1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1),
Array[Int](1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1),
Array[Int](1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1),
Array[Int](1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1),
Array[Int](1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1),
Array[Int](1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1),
Array[Int](1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1),
Array[Int](1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1),
Array[Int](1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1),
Array[Int](1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1),
Array[Int](1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1),
Array[Int](1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1),
Array[Int](1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1),
Array[Int](1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1),
Array[Int](1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1),
Array[Int](1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1),
Array[Int](1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1),
Array[Int](1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1),
Array[Int](1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1))
}
}
|
psyomn/sclack
|
src/main/scala/domain/factories/MapFactory.scala
|
Scala
|
gpl-3.0
| 8,244
|
package mesosphere.marathon.tasks
import org.apache.mesos.Protos.TaskID
/**
* Utility functions for dealing with TaskIDs
*
* @author Tobi Knaup
*/
object TaskIDUtil {
val taskDelimiter = "_"
def taskId(appName: String, sequence: Int) = {
"%s%s%d-%d".format(appName, taskDelimiter, sequence, System.currentTimeMillis())
}
def appID(taskId: TaskID) = {
val taskIdString = taskId.getValue
taskIdString.substring(0, taskIdString.lastIndexOf(taskDelimiter))
}
}
|
MiLk/marathon
|
src/main/scala/mesosphere/marathon/tasks/TaskIDUtil.scala
|
Scala
|
apache-2.0
| 488
|
/*
* Copyright 2001-2013 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this opt except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalatest
import SharedHelpers.thisLineNumber
import exceptions.TestFailedException
import org.scalactic.Prettifier
import org.scalatest.funspec.AnyFunSpec
import org.scalatest.matchers.should.Matchers._
class ShouldBeDefinedLogicalOrSpec extends AnyFunSpec {
private val prettifier = Prettifier.default
val fileName: String = "ShouldBeDefinedLogicalOrSpec.scala"
def wasEqualTo(left: Any, right: Any): String =
FailureMessages.wasEqualTo(prettifier, left, right)
def wasNotEqualTo(left: Any, right: Any): String =
FailureMessages.wasNotEqualTo(prettifier, left, right)
def equaled(left: Any, right: Any): String =
FailureMessages.equaled(prettifier, left, right)
def didNotEqual(left: Any, right: Any): String =
FailureMessages.didNotEqual(prettifier, left, right)
def wasNotDefined(left: Any): String =
FailureMessages.wasNotDefined(prettifier, left)
def wasDefined(left: Any): String =
FailureMessages.wasDefined(prettifier, left)
def allError(message: String, lineNumber: Int, left: Any): String = {
val messageWithIndex = UnquotedString(" " + FailureMessages.forAssertionsGenTraversableMessageWithStackDepth(prettifier, 0, UnquotedString(message), UnquotedString(fileName + ":" + lineNumber)))
FailureMessages.allShorthandFailed(prettifier, messageWithIndex, left)
}
val something = Some("Something")
val nothing = None
describe("Defined matcher") {
describe("when work with 'opt should be (defined)'") {
it("should do nothing when opt is defined") {
something should (be (defined) or be (something))
nothing should (be (defined) or be (nothing))
something should (be (defined) or be (nothing))
something should (be (something) or be (defined))
something should (be (nothing) or be (defined))
nothing should (be (nothing) or be (defined))
something should (be (defined) or equal (something))
nothing should (be (defined) or equal (nothing))
something should (be (defined) or equal (nothing))
something should (equal (something) or be (defined))
something should (equal (nothing) or be (defined))
nothing should (equal (nothing) or be (defined))
}
it("should throw TestFailedException with correct stack depth when opt is not defined") {
val caught1 = intercept[TestFailedException] {
nothing should (be (defined) or be (something))
}
assert(caught1.message === Some(wasNotDefined(nothing) + ", and " + wasNotEqualTo(nothing, something)))
assert(caught1.failedCodeFileName === Some(fileName))
assert(caught1.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught2 = intercept[TestFailedException] {
nothing should (be (something) or be (defined))
}
assert(caught2.message === Some(wasNotEqualTo(nothing, something) + ", and " + wasNotDefined(nothing)))
assert(caught2.failedCodeFileName === Some(fileName))
assert(caught2.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught3 = intercept[TestFailedException] {
nothing should (be (defined) or equal (something))
}
assert(caught3.message === Some(wasNotDefined(nothing) + ", and " + didNotEqual(nothing, something)))
assert(caught3.failedCodeFileName === Some(fileName))
assert(caught3.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught4 = intercept[TestFailedException] {
nothing should (equal (something) or be (defined))
}
assert(caught4.message === Some(didNotEqual(nothing, something) + ", and " + wasNotDefined(nothing)))
assert(caught4.failedCodeFileName === Some(fileName))
assert(caught4.failedCodeLineNumber === Some(thisLineNumber - 4))
}
}
describe("when work with 'opt should not be defined'") {
it("should do nothing when opt is not defined") {
nothing should (not be defined or not be something)
something should (not be defined or not be nothing)
nothing should (not be defined or not be nothing)
nothing should (not be something or not be defined)
nothing should (not be nothing or not be defined)
something should (not be nothing or not be defined)
nothing should (not be defined or not equal something)
something should (not be defined or not equal nothing)
nothing should (not be defined or not equal nothing)
nothing should (not equal something or not be defined)
nothing should (not equal nothing or not be defined)
something should (not equal nothing or not be defined)
}
it("should throw TestFailedException with correct stack depth when opt is defined") {
val caught1 = intercept[TestFailedException] {
something should (not be defined or not be something)
}
assert(caught1.message === Some(wasDefined(something) + ", and " + wasEqualTo(something, something)))
assert(caught1.failedCodeFileName === Some(fileName))
assert(caught1.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught2 = intercept[TestFailedException] {
something should (not be something or not be defined)
}
assert(caught2.message === Some(wasEqualTo(something, something) + ", and " + wasDefined(something)))
assert(caught2.failedCodeFileName === Some(fileName))
assert(caught2.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught3 = intercept[TestFailedException] {
something should (not be defined or not equal something)
}
assert(caught3.message === Some(wasDefined(something) + ", and " + equaled(something, something)))
assert(caught3.failedCodeFileName === Some(fileName))
assert(caught3.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught4 = intercept[TestFailedException] {
something should (not equal something or not be defined)
}
assert(caught4.message === Some(equaled(something, something) + ", and " + wasDefined(something)))
assert(caught4.failedCodeFileName === Some(fileName))
assert(caught4.failedCodeLineNumber === Some(thisLineNumber - 4))
}
}
describe("when work with 'all(xs) should be (defined)'") {
it("should do nothing when all(xs) is defined") {
all(List(something)) should (be (defined) or be (something))
all(List(nothing)) should (be (defined) or be (nothing))
all(List(something)) should (be (defined) or be (nothing))
all(List(something)) should (be (something) or be (defined))
all(List(something)) should (be (nothing) or be (defined))
all(List(nothing)) should (be (nothing) or be (defined))
all(List(something)) should (be (defined) or equal (something))
all(List(nothing)) should (be (defined) or equal (nothing))
all(List(something)) should (be (defined) or equal (nothing))
all(List(something)) should (equal (something) or be (defined))
all(List(something)) should (equal (nothing) or be (defined))
all(List(nothing)) should (equal (nothing) or be (defined))
}
it("should throw TestFailedException with correct stack depth when xs is not sorted") {
val left1 = List(nothing)
val caught1 = intercept[TestFailedException] {
all(left1) should (be (something) or be (defined))
}
assert(caught1.message === Some(allError(wasNotEqualTo(nothing, something) + ", and " + wasNotDefined(nothing), thisLineNumber - 2, left1)))
assert(caught1.failedCodeFileName === Some(fileName))
assert(caught1.failedCodeLineNumber === Some(thisLineNumber - 4))
val left2 = List(nothing)
val caught2 = intercept[TestFailedException] {
all(left2) should (be (defined) or be (something))
}
assert(caught2.message === Some(allError(wasNotDefined(nothing) + ", and " + wasNotEqualTo(nothing, something), thisLineNumber - 2, left2)))
assert(caught2.failedCodeFileName === Some(fileName))
assert(caught2.failedCodeLineNumber === Some(thisLineNumber - 4))
val left3 = List(nothing)
val caught3 = intercept[TestFailedException] {
all(left3) should (equal (something) or be (defined))
}
assert(caught3.message === Some(allError(didNotEqual(nothing, something) + ", and " + wasNotDefined(nothing), thisLineNumber - 2, left3)))
assert(caught3.failedCodeFileName === Some(fileName))
assert(caught3.failedCodeLineNumber === Some(thisLineNumber - 4))
val left4 = List(nothing)
val caught4 = intercept[TestFailedException] {
all(left4) should (be (defined) or equal (something))
}
assert(caught4.message === Some(allError(wasNotDefined(nothing) + ", and " + didNotEqual(nothing, something), thisLineNumber - 2, left4)))
assert(caught4.failedCodeFileName === Some(fileName))
assert(caught4.failedCodeLineNumber === Some(thisLineNumber - 4))
}
}
describe("when work with 'all(xs) should not be sorted'") {
it("should do nothing when xs is not sorted") {
all(List(nothing)) should (not be defined or not be something)
all(List(something)) should (not be defined or not be nothing)
all(List(nothing)) should (not be defined or not be nothing)
all(List(nothing)) should (not be something or not be defined)
all(List(nothing)) should (not be nothing or not be defined)
all(List(something)) should (not be nothing or not be defined)
all(List(nothing)) should (not be defined or not equal something)
all(List(something)) should (not be defined or not equal nothing)
all(List(nothing)) should (not be defined or not equal nothing)
all(List(nothing)) should (not equal something or not be defined)
all(List(nothing)) should (not equal nothing or not be defined)
all(List(something)) should (not equal nothing or not be defined)
}
it("should throw TestFailedException with correct stack depth when xs is not sorted") {
val left1 = List(something)
val caught1 = intercept[TestFailedException] {
all(left1) should (not be something or not be defined)
}
assert(caught1.message === Some(allError(wasEqualTo(something, something) + ", and " + wasDefined(something), thisLineNumber - 2, left1)))
assert(caught1.failedCodeFileName === Some(fileName))
assert(caught1.failedCodeLineNumber === Some(thisLineNumber - 4))
val left2 = List(something)
val caught2 = intercept[TestFailedException] {
all(left2) should (not be defined or not be something)
}
assert(caught2.message === Some(allError(wasDefined(something) + ", and " + wasEqualTo(something, something), thisLineNumber - 2, left2)))
assert(caught2.failedCodeFileName === Some(fileName))
assert(caught2.failedCodeLineNumber === Some(thisLineNumber - 4))
val left3 = List(something)
val caught3 = intercept[TestFailedException] {
all(left3) should (not equal something or not be defined)
}
assert(caught3.message === Some(allError(equaled(something, something) + ", and " + wasDefined(something), thisLineNumber - 2, left3)))
assert(caught3.failedCodeFileName === Some(fileName))
assert(caught3.failedCodeLineNumber === Some(thisLineNumber - 4))
val left4 = List(something)
val caught4 = intercept[TestFailedException] {
all(left4) should (not be defined or not equal something)
}
assert(caught4.message === Some(allError(wasDefined(something) + ", and " + equaled(something, something), thisLineNumber - 2, left4)))
assert(caught4.failedCodeFileName === Some(fileName))
assert(caught4.failedCodeLineNumber === Some(thisLineNumber - 4))
}
}
}
}
|
scalatest/scalatest
|
jvm/scalatest-test/src/test/scala/org/scalatest/ShouldBeDefinedLogicalOrSpec.scala
|
Scala
|
apache-2.0
| 12,971
|
package controllers.cadmin
import views._
import loom.models.admin.Admin
import controllers._
/**
*
* @author chaosky
*/
object Settings extends AbstractSettings {
def password() = AdminAction {
implicit request =>
Ok(html.admin.settings.password(passwordForm))
}
def changePassword() = AdminAction { implicit request =>
passwordForm.bindFromRequest().fold(
formWithErrors => {
BadRequest(html.admin.settings.password(formWithErrors))
},
passwords => {
val (ret, i18nMsg) = Admin.changePassword(request.aSession.admin.id
, passwords._1, passwords._2)
ret match {
case false =>
val nForm = passwordForm.withGlobalError(i18nMsg)
BadRequest(html.admin.settings.password(nForm))
case true =>
Redirect(routes.Settings.password()).flashing(
"success" -> i18nMsg
)
}
}
)
}
}
|
chaosky/loom
|
app/controllers/cadmin/Settings.scala
|
Scala
|
mit
| 951
|
package fix
package v0_7_0
import scalafix.v1._
import scala.meta._
class BQClientRefactoring extends SyntacticRule("BQClientRefactoring") {
private val imports =
scala.collection.mutable.ArrayBuffer.empty[(String, String)]
// Check that the package is not imported multiple times in the same file
def addImport(p: Position, i: Importer) = {
val Importer(s) = i
val Input.VirtualFile(path, _) = p.input
val t = (s.toString, path)
if (!imports.contains(t)) {
imports += t
Patch.addGlobalImport(i)
} else Patch.empty
}
object BQDef {
def unapply(t: Tree) =
t match {
case Term.Apply(Term.Select(_, t @ Term.Name(c)), _) =>
Option((t, c))
case _ => None
}
}
def addBQImport(i: Tree) =
addImport(i.pos, importer"com.spotify.scio.bigquery.client.BigQuery")
override def fix(implicit doc: SyntacticDocument): Patch =
doc.tree.collect {
case i @ Importee.Name(Name.Indeterminate("BigQueryClient")) =>
Patch.removeImportee(i) + addBQImport(i)
case BQDef(t, "extractLocation" | "extractTables") =>
Patch.addLeft(t, "query.") + addBQImport(t)
case Term.Apply(
Term.Select(n @ Term.Name("BigQueryClient"), Term.Name("defaultInstance")),
_
) =>
Patch.replaceTree(n, "BigQuery") + addBQImport(n)
case BQDef(t, "getQuerySchema") =>
Patch.replaceTree(t, "query.schema") + addBQImport(t)
case BQDef(t, "getQueryRows") =>
Patch.replaceTree(t, "query.rows") + addBQImport(t)
case BQDef(t, "getTableSchema") =>
Patch.replaceTree(t, "tables.schema") + addBQImport(t)
case BQDef(t, "createTable") =>
Patch.replaceTree(t, "tables.create") + addBQImport(t)
case BQDef(t, "getTable") =>
Patch.replaceTree(t, "tables.table") + addBQImport(t)
case BQDef(t, "getTables") =>
Patch.replaceTree(t, "tables.tableReferences") + addBQImport(t)
case BQDef(t, "getTableRows") =>
Patch.replaceTree(t, "tables.rows") + addBQImport(t)
case ap @ BQDef(t, "loadTableFromCsv") =>
Patch.addRight(ap, ".get") + Patch.replaceTree(t, "load.csv") + addBQImport(t)
case ap @ BQDef(t, "loadTableFromJson") =>
Patch.addRight(ap, ".get") + Patch.replaceTree(t, "load.json") + addBQImport(t)
case ap @ BQDef(t, "loadTableFromAvro") =>
Patch.addRight(ap, ".get") + Patch.replaceTree(t, "load.avro") + addBQImport(t)
case BQDef(t, "exportTableAsCsv") =>
Patch.replaceTree(t, "extract.asCsv") + addBQImport(t)
case BQDef(t, "exportTableAsJson") =>
Patch.replaceTree(t, "extract.asJson") + addBQImport(t)
case BQDef(t, "exportTableAsAvro") =>
Patch.replaceTree(t, "extract.asAvro") + addBQImport(t)
case c if c.toString.contains("BigQueryClient") =>
Patch.empty
}.asPatch
}
|
spotify/scio
|
scalafix/rules/src/main/scala/fix/BQClientRefactoring.scala
|
Scala
|
apache-2.0
| 2,904
|
/*
* This file is part of BioScala
*
* BioScala is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* BioScala is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with BioScala. If not, see <http://www.gnu.org/licenses/>.
*
* (c) 2009, Jean-Luc Falcone, jean-luc.falcone@unige.ch
*
*/
import bio.seq.Sequence
import bio.sym._
import bio.sym.dna._
import org.scalatest._
class SequenceTest extends FunSuite with BeforeAndAfter {
test("Annotation") {
val symList = new SymbolList( List( A, T, C, G ) )
val seq = new Sequence( "machin", symList )
val key = "XX"
val value = "yyyyy"
val annot = seq.annotation( key ) = value
val seq2 = seq.updateAnnotation( annot )
assert( value == seq2.annotation(key) )
}
}
|
paradigmatic/BioScala
|
src/test/scala/SequenceTest.scala
|
Scala
|
gpl-3.0
| 1,213
|
package scala.util.parsing.combinator
import scala.util.parsing.input.CharArrayReader
import org.junit.Test
import org.junit.Assert.assertEquals
class JavaTokenParsersTest {
@Test
def parseDecimalNumber: Unit = {
object TestJavaTokenParsers extends JavaTokenParsers
import TestJavaTokenParsers._
assertEquals("1.1", decimalNumber(new CharArrayReader("1.1".toCharArray)).get)
assertEquals("1.", decimalNumber(new CharArrayReader("1.".toCharArray)).get)
assertEquals(".1", decimalNumber(new CharArrayReader(".1".toCharArray)).get)
// should fail to parse and we should get Failure as ParseResult
val failure = decimalNumber(new CharArrayReader("!1".toCharArray)).asInstanceOf[Failure]
assertEquals("""string matching regex `(\\d+(\\.\\d*)?|\\d*\\.\\d+)' expected but `!' found""", failure.msg)
}
@Test
def parseJavaIdent: Unit = {
object javaTokenParser extends JavaTokenParsers
import javaTokenParser._
def parseSuccess(s: String): Unit = {
val parseResult = parseAll(ident, s)
parseResult match {
case Success(r, _) => assertEquals(s, r)
case _ => sys.error(parseResult.toString)
}
}
def parseFailure(s: String, errorColPos: Int): Unit = {
val parseResult = parseAll(ident, s)
parseResult match {
case Failure(_, next) =>
val pos = next.pos
assertEquals(1, pos.line)
assertEquals(errorColPos, pos.column)
case _ => sys.error(parseResult.toString)
}
}
parseSuccess("simple")
parseSuccess("with123")
parseSuccess("with$")
parseSuccess("with\\u00f8\\u00df\\u00f6\\u00e8\\u00e6")
parseSuccess("with_")
parseSuccess("_with")
parseFailure("3start", 1)
parseFailure("-start", 1)
parseFailure("with-s", 5)
// we♥scala
parseFailure("we\\u2665scala", 3)
parseFailure("with space", 6)
}
}
|
l15k4/scala-parser-combinators
|
src/test/scala/scala/util/parsing/combinator/JavaTokenParsersTest.scala
|
Scala
|
bsd-3-clause
| 1,889
|
package ls
object Sbt {
def lib(l: Library, cross: Boolean) =
""" "%s" %s "%s" % "%s" """ format(l.organization, if(cross) "%%" else "%s", l.name, l.version)
def configuration(l: Library) = {
"""libraryDependencies += %s""" format(lib(l, true))
}
}
|
softprops/ls-server
|
src/main/scala/sbt.scala
|
Scala
|
mit
| 265
|
/*
* Copyright (c) 2008, Michael Pradel
* All rights reserved. See LICENSE for details.
*/
package collaborations
import scala.roles._
import scala.collection.mutable.HashSet
import scala.collection.mutable.HashMap
import scala.collection.Set
// TODO use implicit conversions
class HiddenUniversity extends TransientCollaboration {
val studentIds = new HashMap[Integer, Student]()
var maxId = 0
protected trait Student extends Role[Person] {
var supervisor: Professor = _
}
protected object student extends RoleMapper[Person, Student] {
def createRole = new Student{}
}
protected trait Professor extends Role[Person] {
val students = new HashSet[Student]()
}
protected object professor extends RoleMapper[Person, Professor] {
def createRole = new Professor{}
}
def enroll(stud: Person) = {
maxId = maxId + 1
studentIds put (maxId, stud -: student)
maxId
}
def supervise(prof: Person, stud: Person): Unit = {
(stud -: student).supervisor = (prof -: professor)
(prof -: professor).students += (stud -: student)
}
def getStudents(prof: Person): Iterable[Person] = {
for {
student <- (prof -: professor).students
} yield student.core
}
def getStudent(id: Int): Option[Person] = {
studentIds.get(id) match {
case Some(s) => Some(s.core)
case None => None
}
}
}
|
tupshin/Scala-Roles
|
examples/collaborations/HiddenUniversity.scala
|
Scala
|
bsd-3-clause
| 1,370
|
/*
* Copyright 2013 Maurício Linhares
*
* Maurício Linhares licenses this file to you under the Apache License,
* version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.github.mauricio.async.db.mysql
import com.github.mauricio.async.db.util.FutureUtils.awaitFuture
import com.github.mauricio.async.db._
import com.github.mauricio.async.db.pool.{PoolConfiguration, ConnectionPool}
import com.github.mauricio.async.db.mysql.pool.MySQLConnectionFactory
import scala.Some
trait ConnectionHelper {
val createTableNumericColumns =
"""
|create temporary table numbers (
|id int auto_increment not null,
|number_tinyint tinyint not null,
|number_smallint smallint not null,
|number_mediumint mediumint not null,
|number_int int not null,
|number_bigint bigint not null,
|number_decimal decimal(9,6),
|number_float float,
|number_double double,
|primary key (id)
|)
""".stripMargin
val insertTableNumericColumns =
"""
|insert into numbers (
|number_tinyint,
|number_smallint,
|number_mediumint,
|number_int,
|number_bigint,
|number_decimal,
|number_float,
|number_double
|) values
|(-100, 32766, 8388607, 2147483647, 9223372036854775807, 450.764491, 14.7, 87650.9876)
""".stripMargin
val preparedInsertTableNumericColumns =
"""
|insert into numbers (
|number_tinyint,
|number_smallint,
|number_mediumint,
|number_int,
|number_bigint,
|number_decimal,
|number_float,
|number_double
|) values
|(?, ?, ?, ?, ?, ?, ?, ?)
""".stripMargin
val createTableTimeColumns =
"""CREATE TEMPORARY TABLE posts (
id INT NOT NULL AUTO_INCREMENT,
created_at_date DATE not null,
created_at_datetime DATETIME not null,
created_at_timestamp TIMESTAMP not null,
created_at_time TIME not null,
created_at_year YEAR not null,
primary key (id)
)"""
val insertTableTimeColumns =
"""
|insert into posts (created_at_date, created_at_datetime, created_at_timestamp, created_at_time, created_at_year)
|values ( '2038-01-19', '2013-01-19 03:14:07', '2020-01-19 03:14:07', '03:14:07', '1999' )
""".stripMargin
final val createTable = """CREATE TEMPORARY TABLE users (
id INT NOT NULL AUTO_INCREMENT ,
name VARCHAR(255) CHARACTER SET 'utf8' NOT NULL ,
PRIMARY KEY (id) );"""
final val insert = """INSERT INTO users (name) VALUES ('Maurício Aragão')"""
final val select = """SELECT * FROM users"""
def defaultConfiguration = new Configuration(
"mysql_async",
"localhost",
port = 3306,
password = Some("root"),
database = Some("mysql_async_tests"),
preparedStatementCacheSize = 10
)
def withPool[T](fn: (ConnectionPool[MySQLConnection]) => T): T = {
val factory = new MySQLConnectionFactory(this.defaultConfiguration)
val pool =
new ConnectionPool[MySQLConnection](factory, PoolConfiguration.Default)
try {
fn(pool)
} finally {
awaitFuture(pool.close)
}
}
def withConfigurablePool[T](
configuration: Configuration
)(fn: (ConnectionPool[MySQLConnection]) => T): T = {
val factory = new MySQLConnectionFactory(configuration)
val pool =
new ConnectionPool[MySQLConnection](factory, PoolConfiguration.Default)
try {
fn(pool)
} finally {
awaitFuture(pool.close)
}
}
def withConnection[T](fn: (MySQLConnection) => T): T =
withConfigurableConnection(this.defaultConfiguration)(fn)
def withConfigurableConnection[T](
configuration: Configuration
)(fn: (MySQLConnection) => T): T = {
val connection = new MySQLConnection(configuration)
try {
awaitFuture(connection.connect)
fn(connection)
} finally {
awaitFuture(connection.close)
}
}
def executeQuery(connection: Connection, query: String): QueryResult = {
awaitFuture(connection.sendQuery(query))
}
def executePreparedStatement(
connection: Connection,
query: String,
values: Any*
): QueryResult = {
awaitFuture(connection.sendPreparedStatement(query, values))
}
}
|
dripower/postgresql-async
|
mysql-async/src/test/scala/com/github/mauricio/async/db/mysql/ConnectionHelper.scala
|
Scala
|
apache-2.0
| 4,791
|
/*******************************************************************************/
/* */
/* Copyright (C) 2017 by Max Lv <max.c.lv@gmail.com> */
/* Copyright (C) 2017 by Mygod Studio <contact-shadowsocks-android@mygod.be> */
/* */
/* This program is free software: you can redistribute it and/or modify */
/* it under the terms of the GNU General Public License as published by */
/* the Free Software Foundation, either version 3 of the License, or */
/* (at your option) any later version. */
/* */
/* This program is distributed in the hope that it will be useful, */
/* but WITHOUT ANY WARRANTY; without even the implied warranty of */
/* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the */
/* GNU General Public License for more details. */
/* */
/* You should have received a copy of the GNU General Public License */
/* along with this program. If not, see <http://www.gnu.org/licenses/>. */
/* */
/*******************************************************************************/
package com.github.shadowsocks
import java.io.File
import java.util.Locale
import java.net.InetAddress
import java.net.Inet6Address
import android.content._
import android.content.pm.PackageManager.NameNotFoundException
import android.net.VpnService
import android.os._
import android.util.Log
import com.github.shadowsocks.ShadowsocksApplication.app
import com.github.shadowsocks.acl.{Acl, AclSyncJob, Subnet}
import com.github.shadowsocks.database.Profile
import com.github.shadowsocks.utils._
import scala.collection.mutable.ArrayBuffer
class ShadowsocksVpnService extends VpnService with BaseService {
val TAG = "ShadowsocksVpnService"
val VPN_MTU = 1500
val PRIVATE_VLAN = "26.26.26.%s"
val PRIVATE_VLAN6 = "fdfe:dcba:9876::%s"
var conn: ParcelFileDescriptor = _
var vpnThread: ShadowsocksVpnThread = _
private var notification: ShadowsocksNotification = _
var sslocalProcess: GuardedProcess = _
var overtureProcess: GuardedProcess = _
var tun2socksProcess: GuardedProcess = _
override def onBind(intent: Intent): IBinder = {
val action = intent.getAction
if (VpnService.SERVICE_INTERFACE == action) {
return super.onBind(intent)
} else if (Action.SERVICE == action) {
return binder
}
null
}
override def onRevoke() {
stopRunner(stopService = true)
}
override def stopRunner(stopService: Boolean, msg: String = null) {
if (vpnThread != null) {
vpnThread.stopThread()
vpnThread = null
}
if (notification != null) notification.destroy()
// channge the state
changeState(State.STOPPING)
app.track(TAG, "stop")
// reset VPN
killProcesses()
// close connections
if (conn != null) {
conn.close()
conn = null
}
super.stopRunner(stopService, msg)
}
def killProcesses() {
if (sslocalProcess != null) {
sslocalProcess.destroy()
sslocalProcess = null
}
if (tun2socksProcess != null) {
tun2socksProcess.destroy()
tun2socksProcess = null
}
if (overtureProcess != null) {
overtureProcess.destroy()
overtureProcess = null
}
}
override def startRunner(profile: Profile) {
// ensure the VPNService is prepared
if (VpnService.prepare(this) != null) {
val i = new Intent(this, classOf[ShadowsocksRunnerActivity])
i.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK)
startActivity(i)
stopRunner(stopService = true)
return
}
super.startRunner(profile)
}
override def connect() {
super.connect()
vpnThread = new ShadowsocksVpnThread(this)
vpnThread.start()
// reset the context
killProcesses()
// Resolve the server address
if (!Utils.isNumeric(profile.host)) Utils.resolve(profile.host, enableIPv6 = true) match {
case Some(addr) => profile.host = addr
case None => throw NameNotResolvedException()
}
handleConnection()
changeState(State.CONNECTED)
if (profile.route != Acl.ALL && profile.route != Acl.CUSTOM_RULES)
AclSyncJob.schedule(profile.route)
notification = new ShadowsocksNotification(this, profile.name)
}
/** Called when the activity is first created. */
def handleConnection() {
startShadowsocksDaemon()
if (!profile.udpdns) {
startDnsDaemon()
}
val fd = startVpn()
if (!sendFd(fd)) throw new Exception("sendFd failed")
}
override protected def buildPluginCommandLine(): ArrayBuffer[String] = super.buildPluginCommandLine() += "-V"
def startShadowsocksDaemon() {
val cmd = ArrayBuffer[String](getApplicationInfo.nativeLibraryDir + "/libss-local.so",
"-V",
"-u",
"-b", "127.0.0.1",
"-l", profile.localPort.toString,
"-t", "600",
"-c", buildShadowsocksConfig("ss-local-vpn.conf"))
if (profile.route != Acl.ALL) {
cmd += "--acl"
cmd += Acl.getFile(profile.route).getAbsolutePath
}
if (TcpFastOpen.sendEnabled) cmd += "--fast-open"
sslocalProcess = new GuardedProcess(cmd: _*).start()
}
def startDnsDaemon() {
overtureProcess = new GuardedProcess(getApplicationInfo.nativeLibraryDir + "/liboverture.so",
"-c", buildOvertureConfig("overture-vpn.conf"), "-V")
.start()
}
def startVpn(): Int = {
val builder = new Builder()
builder
.setSession(profile.getName)
.setMtu(VPN_MTU)
.addAddress(PRIVATE_VLAN.formatLocal(Locale.ENGLISH, "1"), 24)
builder.addDnsServer("8.8.8.8") // It's fake DNS for tun2socks, not the real remote DNS
if (profile.ipv6) {
builder.addAddress(PRIVATE_VLAN6.formatLocal(Locale.ENGLISH, "1"), 126)
builder.addRoute("::", 0)
}
if (Utils.isLollipopOrAbove) {
if (profile.proxyApps) {
for (pkg <- profile.individual.split('\\n')) {
try {
if (!profile.bypass) {
builder.addAllowedApplication(pkg)
} else {
builder.addDisallowedApplication(pkg)
}
} catch {
case ex: NameNotFoundException =>
Log.e(TAG, "Invalid package name", ex)
}
}
}
}
if (profile.route == Acl.ALL || profile.route == Acl.BYPASS_CHN) {
builder.addRoute("0.0.0.0", 0)
} else {
getResources.getStringArray(R.array.bypass_private_route).foreach(cidr => {
val subnet = Subnet.fromString(cidr)
builder.addRoute(subnet.address.getHostAddress, subnet.prefixSize)
})
val addr = InetAddress.getByName(profile.remoteDns.trim)
if (addr.isInstanceOf[Inet6Address])
builder.addRoute(addr, 128)
else if (addr.isInstanceOf[InetAddress])
builder.addRoute(addr, 32)
}
conn = builder.establish()
if (conn == null) throw new NullConnectionException
val fd = conn.getFd
var cmd = ArrayBuffer[String](getApplicationInfo.nativeLibraryDir + "/libtun2socks.so",
"--netif-ipaddr", PRIVATE_VLAN.formatLocal(Locale.ENGLISH, "2"),
"--netif-netmask", "255.255.255.0",
"--socks-server-addr", "127.0.0.1:" + profile.localPort,
"--tunfd", fd.toString,
"--tunmtu", VPN_MTU.toString,
"--sock-path", "sock_path",
"--loglevel", "3")
if (profile.ipv6)
cmd += ("--netif-ip6addr", PRIVATE_VLAN6.formatLocal(Locale.ENGLISH, "2"))
cmd += "--enable-udprelay"
if (!profile.udpdns)
cmd += ("--dnsgw", "%s:%d".formatLocal(Locale.ENGLISH, PRIVATE_VLAN.formatLocal(Locale.ENGLISH, "1"),
profile.localPort + 53))
tun2socksProcess = new GuardedProcess(cmd: _*).start(() => sendFd(fd))
fd
}
def sendFd(fd: Int): Boolean = {
if (fd != -1) {
var tries = 1
while (tries < 5) {
Thread.sleep(1000 * tries)
if (JniHelper.sendFd(fd, new File(getFilesDir, "sock_path").getAbsolutePath) != -1) {
return true
}
tries += 1
}
}
false
}
}
|
weiwenqiang/GitHub
|
Linux/shadowsocks-android-master/mobile/src/main/scala/com/github/shadowsocks/ShadowsocksVpnService.scala
|
Scala
|
apache-2.0
| 8,469
|
package com.cloudray.scalapress.folder.controller.admin
import org.springframework.stereotype.Controller
import org.springframework.web.bind.annotation.{PathVariable, ModelAttribute, RequestMethod, RequestMapping}
import scala.Array
import org.springframework.beans.factory.annotation.Autowired
import com.cloudray.scalapress.section.SectionDao
import com.cloudray.scalapress.folder.section.FolderContentSection
/** @author Stephen Samuel */
@Controller
@Autowired
@RequestMapping(Array("backoffice/folder/section/content/{id}"))
class FolderContentSectionController(sectionDao: SectionDao) {
@RequestMapping(method = Array(RequestMethod.GET))
def edit(@ModelAttribute("section") section: FolderContentSection) = "admin/folder/section/content.vm"
@RequestMapping(method = Array(RequestMethod.POST))
def save(@ModelAttribute("section") section: FolderContentSection) = {
sectionDao.save(section)
edit(section)
}
@ModelAttribute("section")
def section(@PathVariable("id") id: Long): FolderContentSection =
sectionDao.find(id).asInstanceOf[FolderContentSection]
}
|
vidyacraghav/scalapress
|
src/main/scala/com/cloudray/scalapress/folder/controller/admin/FolderContentSectionController.scala
|
Scala
|
apache-2.0
| 1,092
|
package cromwell.database.sql.tables
case class SummaryStatusEntry
(
summaryTableName: String,
summarizedTableName: String,
maximumId: Long,
summaryStatusEntryId: Option[Int] = None
)
|
ohsu-comp-bio/cromwell
|
database/sql/src/main/scala/cromwell/database/sql/tables/SummaryStatusEntry.scala
|
Scala
|
bsd-3-clause
| 193
|
package unof.cv.tools
import unof.cv.base.charLib.CMShape
import unof.cv.base.DrawingContext
import unof.cv.base.charLib.DrawCommand
import unof.cv.utils.Transforme
import unof.cv.utils.Algebra._
import unof.cv.base.charLib.CMShape
import unof.cv.base.charLib.MoveTo
import unof.cv.base.charLib.CurveTo
import unof.cv.base.charLib.DrawCommand
import unof.cv.base.charLib.MoveTo
import unof.cv.base.charLib.CurveTo
import unof.cv.base.charLib.DrawCommand
import unof.cv.base.charLib.MoveTo
import unof.cv.base.charLib.MoveTo
import unof.cv.base.charLib.DrawCommand
import unof.cv.base.charLib.CurveTo
import unof.cv.base.charLib.MoveTo
import unof.cv.base.charLib.MoveTo
import unof.cv.base.charLib.CurveTo
import unof.cv.base.charLib.CurveTo
import unof.cv.base.charLib.MoveTo
import unof.cv.base.charLib.MoveTo
import unof.cv.base.charLib.CurveTo
import unof.cv.base.charLib.MoveTo
import unof.cv.base.charLib.CurveTo
import unof.cv.base.charLib.CurveTo
import unof.cv.base.charLib.MoveTo
import unof.cv.base.charLib.CurveTo
import unof.cv.base.charLib.CMShape
import unof.cv.base.charLib.CMShape
import unof.cv.base.charLib.CMPart
import unof.cv.base.charLib.CMShape
import unof.cv.base.charLib.CMShape
import unof.cv.base.charLib.MoveTo
import unof.cv.base.charLib.CurveTo
import unof.cv.base.charLib.MoveTo
import unof.cv.base.charLib.MoveTo
import unof.cv.base.charLib.CurveTo
import unof.cv.base.charLib.CurveTo
import unof.cv.base.charLib.DrawCommand
import unof.cv.base.charLib.CurveTo
import unof.cv.base.charLib.MoveTo
object ShapeManipulator {
def drawShapeHandles(shape: CMShape, transforms: Seq[Transforme], context: DrawingContext, settings: CvSetting, selectedCommand: Int) = {
val ctx = context.ctx
val handleSize = settings.shapeHandleSize.doubleValue()
val curves = shape.commands
ctx.setTransform(1, 0, 0, 1, 0, 0)
val t = reduceTransforms(shape, transforms)
val center = t * (0, 0)
curves.zipWithIndex.foldLeft((0.0, 0.0)) {
(prev, curveindex) =>
drawCurve(curveindex._1, prev, t, curveindex._2)
}
drawCenter(center)
def drawCenter(pos: Vec) = {
val hsHalf = handleSize / 2
def drawReticula = {
ctx.beginPath();
ctx.arc(pos.x, pos.y, hsHalf, 0, 2 * math.Pi, false);
ctx.closePath()
ctx.stroke();
var start: Vec = (hsHalf / 2, 0)
var end: Vec = (hsHalf / 2 + hsHalf, 0)
(1 to 4) foreach {
i =>
val s = pos + start
val e = pos + end
ctx.beginPath()
ctx.moveTo(s.x, s.y)
ctx.lineTo(e.x, e.y)
ctx.closePath()
ctx.stroke()
start = start.halfPiRotate
end = end.halfPiRotate
}
}
ctx.strokeStyle = "black";
ctx.lineWidth = 3
drawReticula
ctx.strokeStyle = "white";
ctx.lineWidth = 1
drawReticula
}
def drawBoundingPoint(pos: Vec, selected: Boolean) = {
ctx.beginPath();
ctx.arc(pos.x, pos.y, handleSize / 2, 0, 2 * math.Pi, false);
ctx.fillStyle = "white"
ctx.globalAlpha = 0.7
if (!selected)
ctx.fill();
ctx.lineWidth = 2;
ctx.globalAlpha = 1
ctx.strokeStyle = "black";
ctx.closePath()
ctx.stroke();
if (selected) {
ctx.lineWidth = 1;
ctx.globalAlpha = 1
ctx.strokeStyle = "white";
ctx.stroke();
}
}
def drawControlHandle(boundingPoint: Vec, handlePos: Vec) = {
ctx.beginPath()
ctx.moveTo(boundingPoint.x, boundingPoint.y)
ctx.lineTo(handlePos.x, handlePos.y);
ctx.strokeStyle = "black";
ctx.lineWidth = 2
ctx.stroke()
ctx.strokeStyle = "white";
ctx.lineWidth = 1
ctx.closePath()
ctx.stroke();
ctx.fillStyle = "white"
ctx.globalAlpha = 0.7
val handleCenter = handlePos - (handleSize, handleSize) / 2
ctx.fillRect(handleCenter.x, handleCenter.y, handleSize, handleSize)
ctx.lineWidth = 2;
ctx.globalAlpha = 1
ctx.strokeStyle = "black";
ctx.strokeRect(handleCenter.x, handleCenter.y, handleSize, handleSize)
}
def drawCurve(curve: DrawCommand, lastPoint: Vec, t: Transforme, index: Int) = {
curve match {
case mt: MoveTo =>
val to = t * mt.pos
drawBoundingPoint(to, selectedCommand < 0 || index == selectedCommand)
to
case curve: CurveTo =>
val cp1 = t * curve.cp1
val cp2 = t * curve.cp2
val end = t * curve.end
if (selectedCommand < 0 || index - 1 == selectedCommand)
drawControlHandle(lastPoint, cp1)
if (selectedCommand < 0 || index == selectedCommand)
drawControlHandle(end, cp2)
drawBoundingPoint(end, index == selectedCommand)
end
}
}
}
private def reduceTransforms(s: CMShape, transforms: Seq[Transforme]) = {
(transforms :+ s.transform).reduce(_ * _)
}
def click(
mousePos: Vec,
shape: CMShape,
transforms: Seq[Transforme],
settings: CvSetting,
selectedIndex: Int): Option[(Int, Int)] = {
val hHandleSize = settings.shapeHandleSize.intValue() / 2
val squareHandleRadius = hHandleSize * hHandleSize
def inCircleBounds(circleCenter: Vec) = {
(mousePos <<->> circleCenter) < squareHandleRadius
}
def inSquareBound(squareCenter: Vec) = {
val dif = (mousePos - squareCenter).abs
val hDim: Vec = (hHandleSize, hHandleSize)
dif < hDim
}
val transform = reduceTransforms(shape, transforms)
val center = transform * (0, 0)
if (inCircleBounds(center))
Some((-1, -1))
else
shape.commands.zipWithIndex.flatMap {
case (mt: MoveTo, i) =>
Seq((i, 0, mt.pos))
case (ct: CurveTo, i) =>
Seq((i, 1, ct.cp1), (i, 2, ct.cp2), (i, 0, ct.end))
}
.find {
case (index, inPos, point) =>
if (selectedIndex < 0 ||
inPos == 0 || inPos == 2 && index == selectedIndex ||
inPos == 1 && index - 1 == selectedIndex) {
if (inPos > 0) {
inSquareBound(transform * point)
} else {
inCircleBounds(transform * point)
}
} else false
}
.map(t => (t._1, t._2))
}
def projectOnCurve(target: Vec, squareMaging: Double, p0: Vec, p1: Vec, p2: Vec, p3: Vec) = {
val split = splitBezierCurve(60, p0, p1, p2, p3, 0, 1)
val v = getCloseSegment(target, squareMaging * 4, split)
.flatMap {
case (((start, end), d)) =>
val subsplit = splitBezierCurve(60, p0, p1, p2, p3, start, end)
val vv = getCloseSegment(target, squareMaging, subsplit).map {
case (((startT, endT), err)) =>
(
(startT + endT) / 2,
target <<->> interpolateAt(((startT + endT) / 2), p0, p1, p2, p3))
}
vv.headOption
}
.sortBy(_._2)
v.headOption
}
def projectOnShape(
shape: CMShape,
projected: Vec,
transforms: Seq[Transforme],
errorMaging: Float) = {
val sqrMargin = errorMaging * errorMaging;
val t = reduceTransforms(shape, transforms)
val res = shape.commands.zipWithIndex.foldLeft(((0.0, 0.0), None: Option[(Double, Int)], Double.PositiveInfinity)) {
case ((prev, best, dist), (mt: MoveTo, idx)) =>
(t*mt.pos, best, dist)
case ((prev, None, dist), (ct: CurveTo, idx)) =>
val end = t * ct.end
projectOnCurve(projected, errorMaging*errorMaging, prev, t * ct.cp1, t * ct.cp2, end) match {
case None => (end, None, dist)
case Some((t, error)) =>
(end, Some((t, idx)), error)
}
case ((prev, Some((bestT, idxBt)), dist), (ct: CurveTo, idx)) =>
val end = t * ct.end
projectOnCurve(projected, errorMaging*errorMaging, prev, t * ct.cp1, t * ct.cp2, end) match {
case None => (end, Some((bestT, idxBt)), dist)
case Some((t, error)) =>
if (error < dist)
(end, Some((t, idx)), error)
else
(end, Some((bestT, idxBt)), dist)
}
}
(res._2, res._3)
}
def addHandle(t: Double, start: Vec, curve: DrawCommand): Seq[CurveTo] = {
val (p1, p2, p3) = curve match {
case ct: CurveTo => (ct.cp1, ct.cp2, ct.end)
case mt: MoveTo => (start, mt.pos, mt.pos)
}
val p0_1 = start * (1 - t) + p1 * t
val p1_2 = p1 * (1 - t) + p2 * t
val p2_3 = p2 * (1 - t) + p3 * t
val p01_12 = p0_1 * (1 - t) + p1_2 * t
val p12_23 = p1_2 * (1 - t) + p2_3 * t
val p0112_1223 = p01_12 * (1 - t) + p12_23 * t
Seq(
new CurveTo(p0_1, p01_12, p0112_1223),
new CurveTo(p12_23, p2_3, p3))
}
def move(mousePos: Vec, movedPoint: (Int, Int), pointOwner: CMShape, callback: CallbackCenter, invertScreenMatrix: Transforme, invertPartMatrix: Transforme) = {
val localMousePos = invertScreenMatrix * mousePos
if (movedPoint._1 < 0) {
callback.onShapeOriginChanged(invertPartMatrix * mousePos)
} else
callback.onShapeManipulated(localMousePos, movedPoint._1, movedPoint._2)
}
def addCommande(
hostPart: CMPart,
deltaGroup: Int,
selectedCommand: Int,
commandPos: Vec): (CMPart, Int) = {
var soureNewCommandPos = -1
val newShapes = hostPart.shapes.map {
s =>
if (s.deltaLink.key == deltaGroup) {
val v = addCommande(s, selectedCommand, commandPos)
if (s.deltaLink.isSource)
soureNewCommandPos = v._2
v._1
} else s
}
(hostPart.setShapes(newShapes), soureNewCommandPos)
}
private def addCommande(
shape: CMShape,
selectedCommand: Int,
commandPos: Vec): (CMShape, Int) = {
val commands = shape.commands
def curveTo(from: Int) = new CurveTo(
commands(from).last,
commandPos,
commandPos)
if (selectedCommand < 0 || commands.isEmpty)
(shape.setDrawCommands(commands :+ new MoveTo(commandPos)), commands.size)
else if (selectedCommand == commands.size - 1)
(shape.setDrawCommands(commands :+ curveTo(selectedCommand)), commands.size)
else {
shape.commands(selectedCommand) match {
case mt: MoveTo =>
val newComands = (commands.take(selectedCommand) :+
new MoveTo(commandPos) :+
new CurveTo(commandPos, mt.pos, mt.pos)) ++ commands.drop(selectedCommand + 1)
(shape.setDrawCommands(newComands), selectedCommand)
case ct: CurveTo =>
commands(selectedCommand + 1) match {
case _: CurveTo =>
(shape.setDrawCommands(commands :+ new MoveTo(commandPos)), commands.size)
case mt: MoveTo =>
val newComands = (commands.take(selectedCommand + 1) :+
new CurveTo(ct.end, commandPos, commandPos)) ++
commands.drop(selectedCommand + 1)
(shape.setDrawCommands(newComands), selectedCommand + 1)
}
}
}
}
def removeCommand(hostPart: CMPart, deltaGroupKey: Int, commandIndex: Int) = {
val newShapes = hostPart.shapes.map {
s =>
val delta = s.deltaLink
if (delta.key == deltaGroupKey) {
removeCommande(s, commandIndex)
} else s
}
hostPart.setShapes(newShapes)
}
private def interpolateAt(t: Double, p0: Vec, p1: Vec, p2: Vec, p3: Vec): Vec = {
assert(t >= 0 && t <= 1, "t :" + t)
val one_t = 1 - t
val one_t2 = one_t * one_t
val one_t3 = one_t2 * one_t
p0 * one_t3 + p1 * 3 * t * one_t2 + p2 * 3 * t * t * one_t + p3 * t * t * t
}
def splitBezierCurve(segmentsCount: Int, p0: Vec, p1: Vec, p2: Vec, p3: Vec, startT: Double = 0, endT: Double = 1) = {
def interrpolateAt(t: Double): Vec = interpolateAt(t, p0, p1, p2, p3)
val step = (endT - startT) / segmentsCount
(1 to segmentsCount - 1) map {
i => (interrpolateAt(step * i + startT), step * i + startT)
}
}
def getCloseSegment(target: Vec, sqrMargin: Double, segments: Seq[(Vec, Double)]): Seq[((Double, Double), Double)] = {
segments.zip(segments.tail).map {
case ((v1, f1), (v2, f2)) =>
val d1 = target <<->> v1
val d2 = target <<->> v2
val v12 = (v2 - v1).direction
val v1t = target - v1
val dot = v1t.dot(v12)
val dist = if(dot < 0 || v12.dot(v12) < dot) {
val d1 = target <<->> v1
val d2 = target <<->> v2
d1 min d2
}else {
(v1t) <<->> (v12 * (v1t dot v12))
}
((f1, f2), dist): ((Double, Double), Double)
}.filter(_._2 < sqrMargin).sortBy(_._2).take(4)
}
private def removeCommande(targetShape: CMShape, commandIndex: Int): CMShape = {
val oldCommands = targetShape.commands
val newCommands = {
if (commandIndex == oldCommands.size - 1)
oldCommands.dropRight(1)
else oldCommands(commandIndex) match {
case mt: MoveTo =>
val next = oldCommands(commandIndex + 1) match {
case ct: CurveTo => new MoveTo(ct.end)
case other: DrawCommand => other
}
(oldCommands.take(commandIndex) :+ next) ++ oldCommands.drop(commandIndex + 2)
case other =>
oldCommands.take(commandIndex) ++ oldCommands.drop(commandIndex + 1)
}
}
targetShape.setDrawCommands(newCommands)
}
//(1-t)^3 * P0 + 3*(1-t)²*P1 + 3 * (1-t)²*P2 + t^3 * P3
// q'(t) = -3 a (1 - t)^2 + 3 b (1 - t)^2 - 6 b (1 - t) t + 6 c (1 - t) t - 3 c t^2 + 3 d t^2
// = p0*(-3*(1-t)**2) + p1 * (3*(1-t)**2 - 6*t*(1-t)) + p2 *(6*t*(1-t) - 3*t*t) + p3 * 3 * t*t
class BezierFun(p0: Vec, p1: Vec, p2: Vec, p3: Vec) {
def pointAt(t: Float) = {
assert(t >= 0 && t <= 1)
val one_t = 1 - t
val one_t2 = one_t * one_t
val one_t3 = one_t2 * one_t
p0 * one_t3 + p1 * 3 * t * one_t2 + p2 * 3 * t * t * one_t + p3 * t * t * t
}
def tangentAt(t: Float) = {
val one_t = 1 - t
val one_t2 = one_t * one_t
p0 * (-3 * one_t2) +
p1 * (3 * one_t2 - 6 * t * one_t) +
p2 * (6 * t * one_t - 3 * t * t) +
p3 * 3 * t * t
}
}
}
|
Hgjj/CharViewer
|
js/src/main/scala/unof/cv/tools/ShapeManipulator.scala
|
Scala
|
bsd-3-clause
| 14,252
|
package templemore.liftjson.provider
import net.liftweb.json._
import io.Source
import java.io.{InputStream, OutputStream, OutputStreamWriter}
trait LiftJsonIntegration {
protected def config: ProviderConfig
protected def convertToJson(value: AnyRef,
entityStream: OutputStream,
transformerClass: Option[Class[_ <: JsonASTTransformer]]): Unit = {
val transform = createTransform(transformerClass)_
val jsonAst = transform(Extraction.decompose(value)(DefaultFormats))
Printer.compact(render(jsonAst), new OutputStreamWriter(entityStream))
}
protected def convertFromJson(classType: Class[AnyRef],
entityStream: InputStream,
transformerClass: Option[Class[_ <: JsonASTTransformer]]): Either[MappingError, AnyRef] = {
def extract(jsonAST: JValue, classType: Class[_]): AnyRef =
jsonAST.extract(DefaultFormats, Manifest.classType(classType))
val transform = createTransform(transformerClass)_
val buf = new scala.collection.mutable.StringBuilder()
Source.createBufferedSource(entityStream).getLines().foreach(buf.append)
try {
val jsonAST = transform(parse(buf.toString()))
Right(classType.cast(extract(jsonAST, classType)))
}
catch {
case e: MappingException => Left(MappingError(e))
}
}
private def createTransform(transformerClass: Option[Class[_ <: JsonASTTransformer]])
(jsonAST: JValue): JValue = {
val transformer = transformerClass.map(config.transformerFactory.transformer(_))
transformer.map(_.transform(jsonAST)).getOrElse(jsonAST)
}
}
|
skipoleschris/lift-json-jsr311-provider
|
provider/src/main/scala/templemore/liftjson/provider/LiftJsonIntegration.scala
|
Scala
|
apache-2.0
| 1,695
|
package org.scalacoin.marshallers.script
import org.scalacoin.protocol.BitcoinAddress
import org.scalatest.{FlatSpec, MustMatchers}
import spray.json._
import DefaultJsonProtocol._
/**
* Created by chris on 12/27/15.
*/
class ScriptPubKeyMarshallerTest extends FlatSpec with MustMatchers {
val str =
"""
|{
| "asm" : "OP_DUP OP_HASH160 7ecaa33ef3cd6169517e43188ad3c034db091f5e OP_EQUALVERIFY OP_CHECKSIG",
| "hex" : "76a9147ecaa33ef3cd6169517e43188ad3c034db091f5e88ac",
| "reqSigs" : 1,
| "type" : "pubkeyhash",
| "addresses" : [
| "1CZQtge31s59Evu716oP3teYWjcGhX8oKn"
| ]
|}
""".stripMargin
val json = str.parseJson
"ScriptPubKeyMarshaller" must "parse a script pub key " in {
val scriptPubKey = ScriptPubKeyMarshaller.ScriptPubKeyFormatter.read(json)
scriptPubKey.asm must be ("OP_DUP OP_HASH160 7ecaa33ef3cd6169517e43188ad3c034db091f5e OP_EQUALVERIFY OP_CHECKSIG")
scriptPubKey.hex must be ("76a9147ecaa33ef3cd6169517e43188ad3c034db091f5e88ac")
scriptPubKey.reqSigs must be (1)
scriptPubKey.addressType must be ("pubkeyhash")
scriptPubKey.addresses must be (Seq(BitcoinAddress("1CZQtge31s59Evu716oP3teYWjcGhX8oKn")))
}
}
|
scalacoin/scalacoin
|
src/test/scala/org/scalacoin/marshallers/script/ScriptPubKeyMarshallerTest.scala
|
Scala
|
mit
| 1,225
|
object Test {
val x: 1 = 1
final val y = x
val z: 1 = y
object O { final val x = 42 }
val fourtyTwo: 42 = O.x
final val a = { println("x"); 2 } // side effects don't matter
val b: 2 = a
def f: 3 = 3
final val c = f
val dc: 3.0 = 3.0
final val dc1 = dc
val fc: 3.0f = 3.0f
final val fc1 = fc
val t: true = true
val str: "" = ""
final val str2 = str
}
/* To do: test that after erasure we have generated code like this:
*
package <empty> {
final lazy module val Test: Test$ = new Test$()
final module class Test$() extends Object() { this: <notype> =>
<accessor> def x(): Int = 1
final <accessor> def y(): Int = 1
<accessor> def z(): Int = 1
final lazy module val O: Test.O$ = new Test.O$()
final module class O$() extends Object() { this: <notype> =>
final <accessor> def x(): Int = 42
}
<accessor> def fourtyTwo(): Int = 42
final <accessor> def a(): Int = {
println("x")
2
}
<accessor> def b(): Int = 2
def f(): Int = 3
final <accessor> def c(): Int = Test.f()
<accessor> def dc(): Double = 3.0
final <accessor> def dc1(): Double = 3.0
<accessor> def fc(): Float = 3.0
final <accessor> def fc1(): Float = 3.0
<accessor> def t(): Boolean = true
<accessor> def str(): String = ""
final <accessor> def str2(): String = ""
}
}
*/
|
lampepfl/dotty
|
tests/pos/singletons.scala
|
Scala
|
apache-2.0
| 1,367
|
package com.tsukaby.bean_validation_scala
import javax.validation.{ConstraintValidator, ConstraintValidatorContext}
import org.hibernate.validator.constraints.LuhnCheck
import org.hibernate.validator.internal.constraintvalidators.hv.LuhnCheckValidator
/**
* Luhn algorithm checksum validator for scala.
*
* http://en.wikipedia.org/wiki/Luhn_algorithm
* http://en.wikipedia.org/wiki/Check_digit
*/
class LuhnCheckValidatorForOption extends ConstraintValidator[LuhnCheck, Option[_]] {
private var constraintAnnotation: LuhnCheck = null
override def initialize(constraintAnnotation: LuhnCheck): Unit = {
this.constraintAnnotation = constraintAnnotation
}
override def isValid(value: Option[_], context: ConstraintValidatorContext): Boolean = {
value match {
case Some(x: CharSequence) =>
val v = new LuhnCheckValidator
v.initialize(constraintAnnotation)
v.isValid(x, context)
case None =>
true
case Some(_) =>
throw new IllegalStateException("oops.")
}
}
}
|
bean-validation-scala/bean-validation-scala
|
src/main/scala/com/tsukaby/bean_validation_scala/LuhnCheckValidatorForOption.scala
|
Scala
|
mit
| 1,045
|
package scalariform.lexer
import scalariform._
import scalariform.lexer.Tokens._
import org.scalatest.FlatSpec
import org.scalatest.matchers.ShouldMatchers
import org.scalatest.TestFailedException
import org.scalatest.TestPendingException
import scalariform.utils.Utils._
class RedundantSemicolonDetectorTest extends FlatSpec with ShouldMatchers {
implicit def stringToCheckable(s: String)(implicit scalaVersion: String = ScalaVersions.DEFAULT_VERSION) =
new { def check() = checkSemis(s, scalaVersion) }; // Expected redundant semicolons are indicated with <;>
"""
class A {
def foo = 42<;>
def bar = 123; def baz = 1234
}<;>
""".check();
"""
{
println("Foo")<;>
}
""".check();
"""
class A {
for (
x <- 1 to 10;
y <- 1 to 10
) yield x + y<;>
}
""".check()
{
implicit val scalaVersion = "2.10.0";
"""
s"my name is ${person.name<;>}"
""".check
}
private def checkSemis(encodedSource: String, scalaVersion: String) {
val ordinarySource = encodedSource.replace("<;>", ";")
val semis = RedundantSemicolonDetector.findRedundantSemis(ordinarySource, scalaVersion)
val encodedSourceAgain = semis.reverse.foldLeft(ordinarySource) { (s, semi) ⇒ replaceRange(s, semi.range, "<;>") }
encodedSourceAgain should equal(encodedSource)
}
}
|
triggerNZ/scalariform
|
scalariform/src/test/scala/com/danieltrinh/scalariform/lexer/RedundantSemicolonDetectorTest.scala
|
Scala
|
mit
| 1,370
|
package controllers
import controllers.Application._
import play.api.mvc.Action
import scala.concurrent.Future
object RoutingController {
def index = Action.async {
Future.successful(Ok(views.html.index()))
}
def menu = Action.async {
Future.successful(Ok(views.html.menu()))
}
}
|
Hajtosek/ggEasy
|
app/controllers/RoutingController.scala
|
Scala
|
apache-2.0
| 300
|
/*
* ******************************************************************************
* * Copyright (C) 2013 Christopher Harris (Itszuvalex)
* * Itszuvalex@gmail.com
* *
* * This program is free software; you can redistribute it and/or
* * modify it under the terms of the GNU General Public License
* * as published by the Free Software Foundation; either version 2
* * of the License, or (at your option) any later version.
* *
* * This program is distributed in the hope that it will be useful,
* * but WITHOUT ANY WARRANTY; without even the implied warranty of
* * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* * GNU General Public License for more details.
* *
* * You should have received a copy of the GNU General Public License
* * along with this program; if not, write to the Free Software
* * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
* *****************************************************************************
*/
package com.itszuvalex.femtocraft.industry.blocks
import java.util.Random
import com.itszuvalex.femtocraft.Femtocraft
import com.itszuvalex.femtocraft.core.blocks.TileContainer
import com.itszuvalex.femtocraft.core.traits.block.{DroppableInventory, RotateOnPlace}
import com.itszuvalex.femtocraft.industry.tiles.TileEntityBaseEntityNanoHorologe
import com.itszuvalex.femtocraft.render.RenderSimpleMachine
import com.itszuvalex.femtocraft.utils.FemtocraftUtils
import cpw.mods.fml.relauncher.{Side, SideOnly}
import net.minecraft.block.Block
import net.minecraft.block.material.Material
import net.minecraft.client.renderer.texture.IIconRegister
import net.minecraft.inventory.{Container, IInventory}
import net.minecraft.util.IIcon
import net.minecraft.world.World
class BlockNanoHorologe extends TileContainer(Material.iron) with DroppableInventory with RotateOnPlace {
/**
* Is the random generator used by furnace to drop the inventory contents in random directions.
*/
private val rand = new Random
private var frontIcon: IIcon = null
setBlockName("BlockNanoHorologe")
setHardness(3.5f)
setStepSound(Block.soundTypeMetal)
setCreativeTab(Femtocraft.femtocraftTab)
override def renderAsNormalBlock = false
override def getRenderType = RenderSimpleMachine.renderID
@SideOnly(Side.CLIENT) override def getIcon(par1: Int, par2: Int) = if (par1 == par2) frontIcon else blockIcon
/**
* If this returns true, then comparators facing away from this block will use the value from
* getComparatorInputOverride instead of the actual redstone signal strength.
*/
override def hasComparatorInputOverride = true
/**
* If hasComparatorInputOverride returns true, the return value from this is used instead of the redstone signal
* strength when this block inputs to a comparator.
*/
override def getComparatorInputOverride(par1World: World, par2: Int, par3: Int, par4: Int, par5: Int) = Container.calcRedstoneFromInventory(par1World.getTileEntity(par2, par3, par4).asInstanceOf[IInventory])
@SideOnly(Side.CLIENT) override def registerBlockIcons(par1IconRegister: IIconRegister) {
blockIcon = par1IconRegister.registerIcon(Femtocraft.ID.toLowerCase + ":" + "NanoMachineBlock_side")
frontIcon = par1IconRegister.registerIcon(Femtocraft.ID.toLowerCase + ":" + "NanoHorologe_front")
}
/**
* Returns a new instance of a block's tile entity class. Called on placing the block.
*/
override def createNewTileEntity(par1World: World, metadata: Int) = new TileEntityBaseEntityNanoHorologe
/**
* ejects contained items into the world, and notifies neighbours of an update, as appropriate
*/
override def breakBlock(world: World, x: Int, y: Int, z: Int, block: Block, metadata: Int) {
world.getTileEntity(x, y, z) match {
case te: TileEntityBaseEntityNanoHorologe if te.isWorking => FemtocraftUtils.dropItem(te.chronoStack, world, x, y, z, rand)
case _ =>
}
world.func_147453_f(x, y, z, block)
super.breakBlock(world, x, y, z, block, metadata)
}
}
|
Itszuvalex/Femtocraft-alpha-1
|
src/main/java/com/itszuvalex/femtocraft/industry/blocks/BlockNanoHorologe.scala
|
Scala
|
gpl-2.0
| 4,135
|
package net.batyuk.akkahttp.examples.api
import com.typesafe.config.{ ConfigFactory, Config }
import akka.stream.FlowMaterializer
import akka.actor.ActorSystem
import akka.pattern._
import akka.stream.scaladsl.Flow
import akka.http.Http
import akka.http.server._
import akka.http.model.{HttpResponse, HttpRequest, StatusCodes}
import akka.http.server.directives.AuthenticationDirectives._
import scala.concurrent.duration._
import akka.util.Timeout
import spray.json.DefaultJsonProtocol
import net.batyuk.akkahttp.examples.model.Record
import net.batyuk.akkahttp.examples.core.PongActor
import scala.concurrent.duration.Duration
import scala.collection.JavaConversions._
object TestServer extends App {
val testConf: Config = ConfigFactory.parseString("""
akka.loglevel = INFO
akka.log-dead-letters = off""")
implicit val system = ActorSystem("ServerTest", testConf)
import system.dispatcher
implicit val materializer = FlowMaterializer()
implicit val timeout = Timeout(5 seconds)
import akka.http.marshallers.xml.ScalaXmlSupport._
import Directives._
import akka.http.marshallers.sprayjson.SprayJsonSupport._
object RecordProtocol extends DefaultJsonProtocol {
implicit val recordFormat = jsonFormat3(Record.apply)
}
import RecordProtocol._
def recordList = for(id <- 1 to 5) yield Record(id, "test-"+id, "category-"+id)
def auth =
HttpBasicAuthenticator.provideUserName {
case p @ UserCredentials.Provided(name) ⇒ p.verifySecret(name + "-password")
case _ ⇒ false
}
val binding = Http().bind(interface = "localhost", port = 8080)
val materializedMap = binding startHandlingWith Route.handlerFlow {
get {
path("") {
redirect("web/index.html", StatusCodes.Found)
} ~ pathPrefix("web") {
getFromResourceDirectory(".")
} ~
path("secure") {
HttpBasicAuthentication("My very secure site")(auth) { user ⇒
complete(<html><body>Hello <b>{ user }</b>. Access has been granted!</body></html>)
}
} ~
path("ping") {
complete("PONG directly from the Route!")
} ~
path("coreping") {
complete((system.actorOf(PongActor.props) ? "ping").mapTo[String])
} ~
path("crash") {
complete(sys.error("BOOM!"))
} ~
path("json") {
complete(recordList)
}~
path("shutdown") {
shutdown
complete("SHUTDOWN")
}
}
}
def shutdown(): Unit = binding.unbind(materializedMap).onComplete(_ ⇒ system.shutdown())
}
|
abatyuk/akka-http-examples
|
multiProjectWebPlugin/api/src/main/scala/net/batyuk/akkahttp/examples/api/TestServer.scala
|
Scala
|
apache-2.0
| 2,633
|
/*
Copyright (c) 2017, Qvantel
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
* Neither the name of the Qvantel nor the
names of its contributors may be used to endorse or promote products
derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL Qvantel BE LIABLE FOR ANY
DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package com.qvantel.jsonapi
import cats.effect.IO
import com.netaporter.uri.Uri
/**
* Represents a relationship to zero or more objects of type A
* [[com.qvantel.jsonapi.ToMany.IdsReference]] case class is used to represent a ToMany relationship where
* the objects have not been loaded
* [[com.qvantel.jsonapi.ToMany.Loaded]] case class is used to represent a ToMany relationship where
* the objects have been loaded
*
* @tparam A Type of the object the relationships point to
*/
sealed trait ToMany[A] {
def ids: Set[String]
/** Loaded biased get method as a helper when you don't want to pattern match like crazy */
def get: List[A]
def load(implicit jac: JsonApiClient,
rt: ResourceType[A],
identifiable: Identifiable[A],
pt: PathToId[A],
reader: JsonApiReader[A]): IO[List[A]]
}
object ToMany {
final case class IdsReference[A](ids: Set[String]) extends ToMany[A] {
override def get: List[A] = List.empty
def load(implicit jac: JsonApiClient,
rt: ResourceType[A],
identifiable: Identifiable[A],
pt: PathToId[A],
reader: JsonApiReader[A]): IO[List[A]] =
jac.many[A](ids).flatMap { entities =>
entities.filterNot(x => ids(identifiable.identify(x))) match {
case Nil => IO.pure(entities)
case missing =>
IO.raiseError(ApiError.NoEntityForIds(missing.map(x => (identifiable.identify(x), rt.resourceType)).toSet))
}
}
}
final case class PathReference[A](path: Option[Uri]) extends ToMany[A] {
override def ids: Set[String] = Set.empty
/** Loaded biased get method as a helper when you don't want to pattern match like crazy */
override def get: List[A] = List.empty
def load(implicit jac: JsonApiClient,
rt: ResourceType[A],
identifiable: Identifiable[A],
pt: PathToId[A],
reader: JsonApiReader[A]): IO[List[A]] =
path match {
case Some(uri) => jac.pathMany[A](uri)
case None => IO.pure(List.empty)
}
}
final case class Loaded[A: Identifiable](entities: Iterable[A]) extends ToMany[A] {
val ids = entities.map(implicitly[Identifiable[A]].identify).toSet
override def get: List[A] = entities.toList
def load(implicit jac: JsonApiClient,
rt: ResourceType[A],
identifiable: Identifiable[A],
pt: PathToId[A],
reader: JsonApiReader[A]): IO[List[A]] =
IO.pure(entities.toList)
}
def reference[A]: ToMany[A] = PathReference[A](None)
def reference[A](ids: Set[String]): ToMany[A] = IdsReference[A](ids)
def reference[A](uri: Uri): ToMany[A] = PathReference[A](Some(uri))
def reference[A](uri: String): ToMany[A] = PathReference[A](Some(Uri.parse(uri)))
def loaded[A: Identifiable](entities: Iterable[A]): ToMany[A] = Loaded[A](entities)
}
|
Doikor/jsonapi-scala
|
core/src/main/scala/com/qvantel/jsonapi/ToMany.scala
|
Scala
|
bsd-3-clause
| 4,494
|
/*
* Copyright © 2015 Reactific Software LLC. All Rights Reserved.
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package rxmongo.driver
import java.net.InetSocketAddress
import akka.actor.{ Terminated, ActorRef }
import akka.io.Inet.SocketOption
import akka.io.{ IO, Tcp }
import akka.util.ByteString
import rxmongo.messages.{ ReplyMessage, RequestMessage }
import scala.collection.mutable
import scala.concurrent.duration.Duration
object AkkaIOChannel {
case object Ack extends Tcp.Event
}
/** An RxMongo Channel that is implemented with Akka IO
*
* This kind of Channel implements the Channel protocol with RxMongo by managing back pressure manually
* with the Akka IO Tcp Manager.
*/
class AkkaIOChannel(remote : InetSocketAddress, options : ConnectionOptions, listener : ActorRef, isPrimary : Boolean)
extends Channel(remote, options, listener, isPrimary) {
import context.system
import Tcp._
import AkkaIOChannel._
val manager = IO(Tcp)
var connection : ActorRef = null
var ackPending : Boolean = false
var responsePending : Boolean = false
val connectionMsg = Tcp.Connect(
remoteAddress = remote,
localAddress = Some(
new InetSocketAddress(options.localIP.orNull, options.localPort)
),
options = List[SocketOption](
SO.KeepAlive(options.tcpKeepAlive),
SO.OOBInline(options.tcpOOBInline),
SO.TcpNoDelay(options.tcpNoDelay)
),
timeout = options.connectTimeoutMS match { case 0 ⇒ None; case x : Long ⇒ Some(Duration(x, "ms")) },
pullMode = true
)
manager ! connectionMsg
log.debug("Connection request to TCP Manager sent")
val pendingRequests = mutable.Queue.empty[RequestMessage]
@inline def sendMessage(requestMsg : RequestMessage, msg_to_send : ByteString) : Unit = {
val msg = Write(msg_to_send, Ack)
connection ! msg
ackPending = true
responsePending = requestMsg.requiresResponse
log.debug("Sent Request: {} ({} bytes, queuelen={})", requestMsg, msg_to_send.length, pendingRequests.length)
}
@inline def handleRequest(requestMsg : RequestMessage, msg_to_send : ByteString) : Unit = {
if (ackPending || responsePending) {
pendingRequests += requestMsg
log.debug("Qued Request: {} (queuelen={})", requestMsg, pendingRequests.length)
} else {
sendMessage(requestMsg, msg_to_send)
}
}
@inline override def handleReply(replyMsg : ReplyMessage, toActor : ActorRef) : Unit = {
toActor ! replyMsg
connection ! ResumeReading
responsePending = false
if (!ackPending && pendingRequests.nonEmpty) {
val msg = pendingRequests.dequeue()
sendMessage(msg, msg.finish)
}
}
@inline override def handleClose() : Unit = {
connection ! Close
super.handleClose()
}
override def unconnected : Receive = {
case Ack ⇒ // Receive Write Ack from connection actor
log.warning("In unconnected, got unexpected Ack ")
case CommandFailed(conn : Connect) ⇒
val msg = Channel.ConnectionFailed(s"CommandFailed for connection: $conn")
log.debug(msg.toString)
listener ! msg
context stop self
case c @ Connected(remote_addr, local_addr) ⇒
log.debug("Connected to {} at {}", remote_addr, local_addr)
connection = sender()
connection ! Register(self)
connection ! ResumeReading
context watch connection
context become connected
listener ! Channel.ConnectionSucceeded(remote_addr)
case Terminated(actor) ⇒ // The TCP
if (actor == manager) {
val msg = Channel.ConnectionFailed(s"TCP Manager terminated while connecting: $actor")
log.error(msg.toString)
listener ! msg
context stop self
} else {
log.warning(s"Spurious termination of: $actor")
spuriousMessages += 1
}
case Received(data : ByteString) ⇒ // Receive a reply from Mongo
doReply(data)
case x ⇒
super.unconnected(x)
}
override def connected : Receive = {
case Ack ⇒ // Receive Write Ack from connection actor
ackPending = false
log.debug("Ack with queuelen={}", pendingRequests.length)
if (!responsePending && pendingRequests.nonEmpty) {
val msg = pendingRequests.dequeue()
sendMessage(msg, msg.finish)
}
case Received(data : ByteString) ⇒ // Receive a reply from Mongo
doReply(data)
case Terminated(actor) ⇒ // The TCP connection has terminated
if (actor == connection)
log.debug("TCP Connection terminated unexpectedly: {}", actor)
else
log.debug("Spurious termination: {}", actor)
case CommandFailed(w : Write) ⇒ // A write has failed
writeFailures += 1
val msg = Channel.WriteFailed(s"Command Failed: $w")
log.warning(msg.toString)
// O/S buffer was full
listener ! msg
case x ⇒
super.connected(x)
}
override def closing : Receive = {
case Received(data : ByteString) ⇒
doReply(data)
case Tcp.Closed ⇒
log.debug("Closed")
/** The connection has been closed normally in response to a [[Close]] command. */
context stop self
case Tcp.Aborted ⇒
log.debug("Aborted")
/** The connection has been aborted in response to an [[Abort]] command. */
context stop self
case Tcp.ConfirmedClosed ⇒
log.debug("ConfirmedClosed")
/** The connection has been half-closed by us and then half-close by the peer
* in response to a [[ConfirmedClose]] command.
*/
context stop self
case Tcp.PeerClosed ⇒
log.debug("PeerClosed")
/** The peer has closed its writing half of the connection. */
context stop self
case Tcp.ErrorClosed(cause : String) ⇒
log.debug(s"ErrorClosed: $cause")
/** The connection has been closed due to an IO error. */
context stop self
case _ : Tcp.ConnectionClosed ⇒
log.debug("Other ConnectionClosed")
context stop self
case x ⇒
super.closing(x)
}
}
|
reactific/RxMongo
|
driver/src/main/scala/rxmongo/driver/AkkaIOChannel.scala
|
Scala
|
mit
| 7,070
|
/**
* Copyright (c) 2015, Cloudera, Inc. All Rights Reserved.
*
* Cloudera, Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"). You may not use this file except in
* compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* This software is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for
* the specific language governing permissions and limitations under the
* License.
*/
package com.cloudera.sparkts
import scala.Double.NaN
import breeze.linalg._
import com.cloudera.sparkts.DateTimeIndex._
import com.cloudera.sparkts.TimeSeriesUtils._
import com.github.nscala_time.time.Imports._
import org.scalatest.{FunSuite, ShouldMatchers}
class RebaseSuite extends FunSuite with ShouldMatchers {
test("iterateWithUniformFrequency single value") {
val baseDT = new DateTime("2015-4-8")
val dts = Array(baseDT)
val values = Array(1.0)
val iter = iterateWithUniformFrequency(dts.zip(values).iterator, 1.days, 47.0)
iter.toArray should be (Array((baseDT, 1.0)))
}
test("iterateWithUniformFrequency no gaps") {
val baseDT = new DateTime("2015-4-8")
val dts = Array(baseDT, baseDT + 1.days, baseDT + 2.days, baseDT + 3.days)
val values = Array(1.0, 2.0, 3.0, 4.0)
val iter = iterateWithUniformFrequency(dts.zip(values).iterator, 1.days)
iter.toArray should be (Array((baseDT, 1.0), (baseDT + 1.days, 2.0), (baseDT + 2.days, 3.0),
(baseDT + 3.days, 4.0)))
}
test("iterateWithUniformFrequency multiple gaps") {
val baseDT = new DateTime("2015-4-8")
val dts = Array(baseDT, baseDT + 2.days, baseDT + 5.days)
val values = Array(1.0, 2.0, 3.0)
val iter = iterateWithUniformFrequency(dts.zip(values).iterator, 1.days, 47.0)
iter.toArray should be (Array((baseDT, 1.0), (baseDT + 1.days, 47.0), (baseDT + 2.days, 2.0),
(baseDT + 3.days, 47.0), (baseDT + 4.days, 47.0), (baseDT + 5.days, 3.0)))
}
test("uniform source same range") {
val vec = new DenseVector((0 until 10).map(_.toDouble).toArray)
val source = uniform(new DateTime("2015-4-8"), vec.length, 1.days)
val target = source
val rebased = rebase(source, target, vec, NaN)
rebased.length should be (vec.length)
rebased should be (vec)
}
test("uniform source, target fits in source") {
val vec = new DenseVector((0 until 10).map(_.toDouble).toArray)
val source = uniform(new DateTime("2015-4-8"), vec.length, 1.days)
val target = uniform(new DateTime("2015-4-9"), 5, 1.days)
val rebased = rebase(source, target, vec, NaN)
rebased should be (new DenseVector(Array(1.0, 2.0, 3.0, 4.0, 5.0)))
}
test("uniform source, target overlaps source ") {
val vec = new DenseVector((0 until 10).map(_.toDouble).toArray)
val source = uniform(new DateTime("2015-4-8"), vec.length, 1.days)
val targetBefore = uniform(new DateTime("2015-4-4"), 8, 1.days)
val targetAfter = uniform(new DateTime("2015-4-11"), 8, 1.days)
val rebasedBefore = rebase(source, targetBefore, vec, NaN)
val rebasedAfter = rebase(source, targetAfter, vec, NaN)
assertArraysEqualWithNaN(
rebasedBefore.valuesIterator.toArray,
Array(NaN, NaN, NaN, NaN, 0.0, 1.0, 2.0, 3.0))
assertArraysEqualWithNaN(
rebasedAfter.valuesIterator.toArray,
Array(3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0, NaN))
}
test("uniform source, source fits in target") {
val vec = new DenseVector((0 until 4).map(_.toDouble).toArray)
val source = uniform(new DateTime("2015-4-8"), vec.length, 1.days)
val target = uniform(new DateTime("2015-4-7"), 8, 1.days)
val rebased = rebase(source, target, vec, NaN)
assertArraysEqualWithNaN(
rebased.valuesIterator.toArray,
Array(NaN, 0.0, 1.0, 2.0, 3.0, NaN, NaN, NaN))
}
test("irregular source same range") {
val vec = new DenseVector((4 until 10).map(_.toDouble).toArray)
val source = irregular((4 until 10).map(d => new DateTime(s"2015-4-$d")).toArray)
vec.size should be (source.size)
val target = uniform(new DateTime("2015-4-4"), vec.length, 1.days)
val rebased = rebase(source, target, vec, NaN)
rebased should be (vec)
}
test("irregular source, hole gets filled default value") {
val dt = new DateTime("2015-4-10")
val source = irregular(Array(dt, dt + 1.days, dt + 3.days))
val target = uniform(dt, 4, 1.days)
val vec = new DenseVector(Array(1.0, 2.0, 3.0))
val rebased = rebase(source, target, vec, 47.0)
rebased.toArray should be (Array(1.0, 2.0, 47.0, 3.0))
}
test("irregular source, target fits in source") {
val dt = new DateTime("2015-4-10")
val source = irregular(Array(dt, dt + 1.days, dt + 3.days))
val target = uniform(dt + 1.days, 2, 1.days)
val vec = new DenseVector(Array(1.0, 2.0, 3.0))
val rebased = rebase(source, target, vec, 47.0)
rebased.toArray should be (Array(2.0, 47.0))
}
test("irregular source, target overlaps source ") {
val dt = new DateTime("2015-4-10")
val source = irregular(Array(dt, dt + 1.days, dt + 3.days))
val targetBefore = uniform(new DateTime("2015-4-8"), 4, 1.days)
val vec = new DenseVector(Array(1.0, 2.0, 3.0))
val rebasedBefore = rebase(source, targetBefore, vec, 47.0)
rebasedBefore.toArray should be (Array(47.0, 47.0, 1.0, 2.0))
val targetAfter = uniform(new DateTime("2015-4-11"), 5, 1.days)
val rebasedAfter = rebase(source, targetAfter, vec, 47.0)
rebasedAfter.toArray should be (Array(2.0, 47.0, 3.0, 47.0, 47.0))
}
test("irregular source, source fits in target") {
val dt = new DateTime("2015-4-10")
val source = irregular(Array(dt, dt + 1.days, dt + 3.days))
val target = uniform(dt - 2.days, 7, 1.days)
val vec = new DenseVector(Array(1.0, 2.0, 3.0))
val rebased = rebase(source, target, vec, 47.0)
rebased.toArray should be (Array(47.0, 47.0, 1.0, 2.0, 47.0, 3.0, 47.0))
}
test("irregular source, irregular target") {
// Triples of source index, target index, expected output
// Assumes that at time i, value of source series is i
val cases = Array(
(Array(1, 2, 3), Array(1, 2, 3), Array(1, 2, 3)),
(Array(1, 2, 3), Array(1, 2), Array(1, 2)),
(Array(1, 2), Array(1, 2, 3), Array(1, 2, -1)),
(Array(2, 3), Array(1, 2, 3), Array(-1, 2, 3)),
(Array(1, 2), Array(2, 3), Array(2, -1)),
(Array(1, 2, 3), Array(1, 3), Array(1, 3)),
(Array(1, 2, 3, 4), Array(1, 3), Array(1, 3)),
(Array(1, 2, 3, 4), Array(1, 4), Array(1, 4)),
(Array(1, 2, 3, 4), Array(2, 4), Array(2, 4)),
(Array(1, 2, 3, 4), Array(2, 3), Array(2, 3)),
(Array(1, 2, 3, 4), Array(1, 3, 4), Array(1, 3, 4))
)
cases.foreach { case (source, target, expected) =>
val sourceIndex = irregular(source.map(x => new DateTime(s"2015-04-0$x")))
val targetIndex = irregular(target.map(x => new DateTime(s"2015-04-0$x")))
val vec = new DenseVector[Double](source.map(_.toDouble))
val expectedVec = new DenseVector[Double](expected.map(_.toDouble))
rebase(sourceIndex, targetIndex, vec, -1) should be (expectedVec)
}
}
private def assertArraysEqualWithNaN(arr1: Array[Double], arr2: Array[Double]): Unit = {
assert(arr1.zip(arr2).forall { case (d1, d2) =>
d1 == d2 || (d1.isNaN && d2.isNaN)
}, s"${arr1.mkString(",")} != ${arr2.mkString(",")}")
}
}
|
aba1476/spark-timeseries
|
src/test/scala/com/cloudera/sparkts/RebaseSuite.scala
|
Scala
|
apache-2.0
| 7,498
|
package filodb.query.exec
import scala.collection.mutable.ArrayBuffer
import scala.concurrent.duration._
import com.typesafe.config.ConfigFactory
import monix.eval.Task
import monix.execution.Scheduler
import monix.execution.Scheduler.Implicits.global
import org.scalatest.BeforeAndAfterAll
import org.scalatest.concurrent.ScalaFutures
import org.scalatest.time.{Millis, Seconds, Span}
import filodb.core.MetricsTestData._
import filodb.core.TestData
import filodb.core.binaryrecord2.BinaryRecordRowReader
import filodb.core.memstore.{FixedMaxPartitionsEvictionPolicy, SomeData, TimeSeriesMemStore}
import filodb.core.metadata.Schemas
import filodb.core.query._
import filodb.core.store.{InMemoryMetaStore, NullColumnStore}
import filodb.memory.format.{SeqRowReader, ZeroCopyUTF8String}
import filodb.query._
import filodb.query.exec.TsCardExec.CardCounts
import org.scalatest.funspec.AnyFunSpec
import org.scalatest.matchers.should.Matchers
class MetadataExecSpec extends AnyFunSpec with Matchers with ScalaFutures with BeforeAndAfterAll {
import ZeroCopyUTF8String._
implicit val defaultPatience = PatienceConfig(timeout = Span(30, Seconds), interval = Span(250, Millis))
val config = ConfigFactory.load("application_test.conf").getConfig("filodb")
val queryConfig = new QueryConfig(config.getConfig("query"))
val querySession = QuerySession(QueryContext(), queryConfig)
val policy = new FixedMaxPartitionsEvictionPolicy(20)
val memStore = new TimeSeriesMemStore(config, new NullColumnStore, new InMemoryMetaStore(), Some(policy))
val now = System.currentTimeMillis()
val numRawSamples = 1000
val reportingInterval = 10000
val limit = 2
val tuples = (numRawSamples until 0).by(-1).map { n =>
(now - n * reportingInterval, n.toDouble)
}
val shardPartKeyLabelValues = Seq(
Seq( // shard 0
("http_req_total", Map("instance"->"someHost:8787", "job"->"myCoolService",
"unicode_tag" -> "uni\u03C0tag", "_ws_" -> "demo", "_ns_" -> "App-0")),
("http_foo_total", Map("instance"->"someHost:8787", "job"->"myCoolService",
"unicode_tag" -> "uni\u03BCtag", "_ws_" -> "demo", "_ns_" -> "App-0"))
),
Seq ( // shard 1
("http_req_total", Map("instance"->"someHost:9090", "job"->"myCoolService",
"unicode_tag" -> "uni\u03C0tag", "_ws_" -> "demo", "_ns_" -> "App-0")),
("http_bar_total", Map("instance"->"someHost:8787", "job"->"myCoolService",
"unicode_tag" -> "uni\u03C0tag", "_ws_" -> "demo", "_ns_" -> "App-0")),
("http_req_total-A", Map("instance"->"someHost:9090", "job"->"myCoolService",
"unicode_tag" -> "uni\u03C0tag", "_ws_" -> "demo-A", "_ns_" -> "App-A")),
)
)
val addlLabels = Map("_type_" -> "prom-counter")
val expectedLabelValues = shardPartKeyLabelValues.flatMap { shardSeq =>
shardSeq.map(pair => pair._2 + ("_metric_" -> pair._1) ++ addlLabels)
}
val jobQueryResult1 = ArrayBuffer(("job", "myCoolService"), ("unicode_tag", "uni\u03C0tag"))
val jobQueryResult2 = ArrayBuffer(("job", "myCoolService"), ("unicode_tag", "uni\u03BCtag"))
implicit val execTimeout = 5.seconds
def initShard(memStore: TimeSeriesMemStore,
partKeyLabelValues: Seq[Tuple2[String, Map[String, String]]],
ishard: Int): Unit = {
val partTagsUTF8s = partKeyLabelValues.map{case (m, t) => (m, t.map { case (k, v) => (k.utf8, v.utf8)})}
// NOTE: due to max-chunk-size in storeConf = 100, this will make (numRawSamples / 100) chunks
// Be sure to reset the builder; it is in an Object so static and shared amongst tests
builder.reset()
partTagsUTF8s.map { case (metric, partTagsUTF8) =>
tuples.map { t => SeqRowReader(Seq(t._1, t._2, metric, partTagsUTF8)) }
.foreach(builder.addFromReader(_, Schemas.promCounter))
}
memStore.setup(timeseriesDatasetMultipleShardKeys.ref, Schemas(Schemas.promCounter), ishard, TestData.storeConf)
memStore.ingest(timeseriesDatasetMultipleShardKeys.ref, ishard, SomeData(builder.allContainers.head, 0))
}
override def beforeAll(): Unit = {
for (ishard <- 0 until shardPartKeyLabelValues.size) {
initShard(memStore, shardPartKeyLabelValues(ishard), ishard)
}
memStore.refreshIndexForTesting(timeseriesDatasetMultipleShardKeys.ref)
}
override def afterAll(): Unit = {
memStore.shutdown()
}
val dummyDispatcher = new PlanDispatcher {
override def dispatch(plan: ExecPlan)
(implicit sched: Scheduler): Task[QueryResponse] = plan.execute(memStore,
QuerySession(QueryContext(), queryConfig))(sched)
override def clusterName: String = ???
override def isLocalCall: Boolean = ???
}
val executeDispatcher = new PlanDispatcher {
override def isLocalCall: Boolean = ???
override def clusterName: String = ???
override def dispatch(plan: ExecPlan)
(implicit sched: Scheduler): Task[QueryResponse] = {
plan.execute(memStore, querySession)(sched)
}
}
it ("should read the job names from timeseriesindex matching the columnfilters") {
import ZeroCopyUTF8String._
val filters = Seq(ColumnFilter("_metric_", Filter.Equals("http_req_total".utf8)),
ColumnFilter("job", Filter.Equals("myCoolService".utf8)))
val leaves = (0 until shardPartKeyLabelValues.size).map{ ishard =>
LabelValuesExec(QueryContext(), executeDispatcher, timeseriesDatasetMultipleShardKeys.ref,
ishard, filters, Seq("job", "unicode_tag"), now-5000, now)
}.toSeq
val execPlan = LabelValuesDistConcatExec(QueryContext(), executeDispatcher, leaves)
val resp = execPlan.execute(memStore, querySession).runToFuture.futureValue
val result = (resp: @unchecked) match {
case QueryResult(id, _, response, _, _, _) => {
val rv = response(0)
rv.rows.size shouldEqual 1
val record = rv.rows.next().asInstanceOf[BinaryRecordRowReader]
rv.asInstanceOf[SerializedRangeVector].schema.toStringPairs(record.recordBase, record.recordOffset)
}
}
result shouldEqual jobQueryResult1
}
it("should not return any rows for wrong column filters") {
import ZeroCopyUTF8String._
val filters = Seq (ColumnFilter("__name__", Filter.Equals("http_req_total1".utf8)),
ColumnFilter("job", Filter.Equals("myCoolService".utf8)))
val leaves = (0 until shardPartKeyLabelValues.size).map { ishard =>
PartKeysExec(QueryContext(), executeDispatcher, timeseriesDatasetMultipleShardKeys.ref,
ishard, filters, false, now-5000, now)
}.toSeq
val execPlan = PartKeysDistConcatExec(QueryContext(), executeDispatcher, leaves)
val resp = execPlan.execute(memStore, querySession).runToFuture.futureValue
(resp: @unchecked) match {
case QueryResult(_, _, results, _, _, _) => results.size shouldEqual 0
}
}
it("should read the label names/values from timeseriesindex matching the columnfilters") {
import ZeroCopyUTF8String._
val filters = Seq (ColumnFilter("job", Filter.Equals("myCoolService".utf8)))
val leaves = (0 until shardPartKeyLabelValues.size).map{ ishard =>
PartKeysExec(QueryContext(), executeDispatcher, timeseriesDatasetMultipleShardKeys.ref,
ishard, filters, false, now-5000, now)
}.toSeq
val execPlan = PartKeysDistConcatExec(QueryContext(), executeDispatcher, leaves)
val resp = execPlan.execute(memStore, querySession).runToFuture.futureValue
val result = (resp: @unchecked) match {
case QueryResult(id, _, response, _, _, _) =>
response.size shouldEqual 1
response(0).rows.map { row =>
val r = row.asInstanceOf[BinaryRecordRowReader]
response(0).asInstanceOf[SerializedRangeVector]
.schema.toStringPairs(r.recordBase, r.recordOffset).toMap
}.toSet
}
result shouldEqual expectedLabelValues.toSet
}
it("should return one matching row (limit 1)") {
import ZeroCopyUTF8String._
val filters = Seq(ColumnFilter("job", Filter.Equals("myCoolService".utf8)))
// Reducing limit results in truncated metadata response
val execPlan = PartKeysExec(QueryContext(plannerParams = PlannerParams(sampleLimit = limit - 1)), executeDispatcher,
timeseriesDatasetMultipleShardKeys.ref, 0, filters, false, now - 5000, now)
val resp = execPlan.execute(memStore, querySession).runToFuture.futureValue
val result = (resp: @unchecked) match {
case QueryResult(id, _, response, _, _, _) => {
response.size shouldEqual 1
response(0).rows.map { row =>
val r = row.asInstanceOf[BinaryRecordRowReader]
response(0).asInstanceOf[SerializedRangeVector]
.schema.toStringPairs(r.recordBase, r.recordOffset).toMap
}.toList
}
}
result shouldEqual List(expectedLabelValues(0))
}
it ("should be able to query labels with filter") {
val expectedLabels = Set("job", "_metric_", "unicode_tag", "instance", "_ws_", "_ns_")
val filters = Seq (ColumnFilter("job", Filter.Equals("myCoolService".utf8)))
val leaves = (0 until shardPartKeyLabelValues.size).map{ ishard =>
LabelNamesExec(QueryContext(), executeDispatcher,
timeseriesDatasetMultipleShardKeys.ref, ishard, filters, now-5000, now)
}.toSeq
val execPlan = LabelNamesDistConcatExec(QueryContext(), executeDispatcher, leaves)
val resp = execPlan.execute(memStore, querySession).runToFuture.futureValue
val result = (resp: @unchecked) match {
case QueryResult(id, _, response, _, _, _) => {
val rv = response(0)
rv.rows.size shouldEqual expectedLabels.size
rv.rows.map(row => {
val br = row.asInstanceOf[BinaryRecordRowReader]
br.schema.colValues(br.recordBase, br.recordOffset, br.schema.colNames).head
})
}
}
result.toSet shouldEqual expectedLabels
}
it ("should be able to query with unicode filter") {
val filters = Seq (ColumnFilter("unicode_tag", Filter.Equals("uni\u03BCtag".utf8)))
val leaves = (0 until shardPartKeyLabelValues.size).map{ ishard =>
LabelValuesExec(QueryContext(), executeDispatcher, timeseriesDatasetMultipleShardKeys.ref,
ishard, filters, Seq("job", "unicode_tag"), now-5000, now)
}.toSeq
val execPlan = LabelValuesDistConcatExec(QueryContext(), executeDispatcher, leaves)
val resp = execPlan.execute(memStore, querySession).runToFuture.futureValue
val result = (resp: @unchecked) match {
case QueryResult(id, _, response, _, _, _) => {
val rv = response(0)
rv.rows.size shouldEqual 1
val record = rv.rows.next().asInstanceOf[BinaryRecordRowReader]
rv.asInstanceOf[SerializedRangeVector].schema.toStringPairs(record.recordBase, record.recordOffset)
}
}
result shouldEqual jobQueryResult2
}
it("should be able to query label cardinality") {
// Tests all, LabelCardinalityExec, LabelCardinalityDistConcatExec and LabelCardinalityPresenter
// Though we will search by ns, ws and metric name, technically we can search by any label in index
val filters = Seq(ColumnFilter("instance", Filter.Equals("someHost:8787".utf8)))
val qContext = QueryContext()
val leaves = (0 until shardPartKeyLabelValues.size).map{ ishard =>
LabelCardinalityExec(qContext, dummyDispatcher,
timeseriesDatasetMultipleShardKeys.ref, ishard, filters, now - 5000, now)
}.toSeq
val execPlan = LabelCardinalityReduceExec(qContext, dummyDispatcher, leaves)
execPlan.addRangeVectorTransformer(new LabelCardinalityPresenter())
val resp = execPlan.execute(memStore, querySession).runToFuture.futureValue
(resp: @unchecked) match {
case QueryResult(id, _, response, _, _, _) => {
response.size shouldEqual 2
val rv1 = response(0)
val rv2 = response(1)
rv1.rows.size shouldEqual 1
rv2.rows.size shouldEqual 1
val record1 = rv1.rows.next().asInstanceOf[BinaryRecordRowReader]
val result1 = rv1.asInstanceOf[SerializedRangeVector]
.schema.toStringPairs(record1.recordBase, record1.recordOffset).toMap
val record2 = rv2.rows.next().asInstanceOf[BinaryRecordRowReader]
val result2 = rv2.asInstanceOf[SerializedRangeVector]
.schema.toStringPairs(record2.recordBase, record2.recordOffset).toMap
result1 shouldEqual Map("_ns_" -> "1",
"unicode_tag" -> "1",
"_type_" -> "1",
"job" -> "1",
"instance" -> "1",
"_metric_" -> "1",
"_ws_" -> "1")
result2 shouldEqual Map("_ns_" -> "1",
"unicode_tag" -> "1",
"_type_" -> "1",
"job" -> "1",
"instance" -> "1",
"_metric_" -> "1",
"_ws_" -> "1")
}
}
}
it ("should correctly execute TsCardExec") {
import filodb.query.exec.TsCardExec._
case class TestSpec(shardKeyPrefix: Seq[String], numGroupByFields: Int, exp: Map[Seq[String], CardCounts])
// Note: expected strings are eventually concatenated with a delimiter
// and converted to ZeroCopyUTF8Strings.
Seq(
TestSpec(Seq(), 1, Map(
Seq("demo-A") -> CardCounts(1,1),
Seq("demo") -> CardCounts(4,4))),
TestSpec(Seq(), 2, Map(
Seq("demo", "App-0") -> CardCounts(4,4),
Seq("demo-A", "App-A") -> CardCounts(1,1))),
TestSpec(Seq(), 3, Map(
Seq("demo", "App-0", "http_foo_total") -> CardCounts(1,1),
Seq("demo", "App-0", "http_req_total") -> CardCounts(2,2),
Seq("demo", "App-0", "http_bar_total") -> CardCounts(1,1),
Seq("demo-A", "App-A", "http_req_total-A") -> CardCounts(1,1))),
TestSpec(Seq("demo"), 1, Map(
Seq("demo") -> CardCounts(4,4))),
TestSpec(Seq("demo"), 2, Map(
Seq("demo", "App-0") -> CardCounts(4,4))),
TestSpec(Seq("demo"), 3, Map(
Seq("demo", "App-0", "http_foo_total") -> CardCounts(1,1),
Seq("demo", "App-0", "http_req_total") -> CardCounts(2,2),
Seq("demo", "App-0", "http_bar_total") -> CardCounts(1,1))),
TestSpec(Seq("demo", "App-0"), 2, Map(
Seq("demo", "App-0") -> CardCounts(4,4))),
TestSpec(Seq("demo", "App-0"), 3, Map(
Seq("demo", "App-0", "http_foo_total") -> CardCounts(1,1),
Seq("demo", "App-0", "http_req_total") -> CardCounts(2,2),
Seq("demo", "App-0", "http_bar_total") -> CardCounts(1,1))),
TestSpec(Seq("demo", "App-0", "http_req_total"), 3, Map(
Seq("demo", "App-0", "http_req_total") -> CardCounts(2,2)))
).foreach{ testSpec =>
val leaves = (0 until shardPartKeyLabelValues.size).map{ ishard =>
new TsCardExec(QueryContext(), executeDispatcher,
timeseriesDatasetMultipleShardKeys.ref, ishard, testSpec.shardKeyPrefix, testSpec.numGroupByFields)
}.toSeq
val execPlan = TsCardReduceExec(QueryContext(), executeDispatcher, leaves)
val resp = execPlan.execute(memStore, querySession).runToFuture.futureValue
val result = (resp: @unchecked) match {
case QueryResult(id, _, response, _, _, _) =>
// should only have a single RangeVector
response.size shouldEqual 1
val resultMap = response(0).rows().map{r =>
val data = RowData.fromRowReader(r)
data.group -> data.counts
}.toMap
resultMap shouldEqual testSpec.exp.map { case (prefix, counts) =>
prefixToGroup(prefix) -> counts
}.toMap
}
}
}
}
|
filodb/FiloDB
|
query/src/test/scala/filodb/query/exec/MetadataExecSpec.scala
|
Scala
|
apache-2.0
| 15,753
|
package de.csmath.scalog.substitution
import de.csmath.scalog.Types._
import de.csmath.scalog.AstToProlog._
import scala.collection.immutable.HashMap
trait Substitution {
def apply(term: Term): Term = term match {
case v@Var(x) if mapping.contains(v) =>
mapping(v)
case s@Struct(functor,terms) =>
Struct(functor,terms.map(apply))
case PlCons(head,tail) =>
PlCons(apply(head),subList(tail))
case y => y
}
def apply(terms: List[Term]): List[Term] = terms map apply
def subPred(predicate: Struct): Struct = apply(predicate).asInstanceOf[Struct]
def subPred(predicates: List[Struct]): List[Struct] = predicates map subPred
def subList(list: PlList): PlList = apply(list).asInstanceOf[PlList]
def compose(other: Substitution): Substitution
def restrict(vars: Set[Var]): Substitution
def mapping: Map[Var,Term]
override def toString: String = {
mapping.toList.map { case (k,v) => k.name + " <- " + transBack(v)}.mkString("{ ",", "," }")
}
}
object Substitution {
def apply(): Substitution = new HashMapSubstitution
def apply(aMap: Map[Var,Term]): Substitution = new HashMapSubstitution(HashMap.empty ++ aMap)
}
|
lpcsmath/scalog
|
src/main/scala/de/csmath/scalog/substitution/Substitution.scala
|
Scala
|
bsd-2-clause
| 1,185
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution.datasources
import org.apache.hadoop.mapreduce.TaskAttemptContext
import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.types.StructType
/**
* A factory that produces [[OutputWriter]]s. A new [[OutputWriterFactory]] is created on driver
* side for each write job issued when writing to a [[HadoopFsRelation]], and then gets serialized
* to executor side to create actual [[OutputWriter]]s on the fly.
*/
abstract class OutputWriterFactory extends Serializable {
/** Returns the file extension to be used when writing files out. */
def getFileExtension(context: TaskAttemptContext): String
/**
* When writing to a [[HadoopFsRelation]], this method gets called by each task on executor side
* to instantiate new [[OutputWriter]]s.
*
* @param path Path to write the file.
* @param dataSchema Schema of the rows to be written. Partition columns are not included in the
* schema if the relation being written is partitioned.
* @param context The Hadoop MapReduce task context.
*/
def newInstance(
path: String,
dataSchema: StructType,
context: TaskAttemptContext): OutputWriter
}
/**
* [[OutputWriter]] is used together with [[HadoopFsRelation]] for persisting rows to the
* underlying file system. Subclasses of [[OutputWriter]] must provide a zero-argument constructor.
* An [[OutputWriter]] instance is created and initialized when a new output file is opened on
* executor side. This instance is used to persist rows to this single output file.
*/
abstract class OutputWriter {
/**
* Persists a single row. Invoked on the executor side. When writing to dynamically partitioned
* tables, dynamic partition columns are not included in rows to be written.
*/
def write(row: InternalRow): Unit
/**
* Closes the [[OutputWriter]]. Invoked on the executor side after all rows are persisted, before
* the task output is committed.
*/
def close(): Unit
}
|
witgo/spark
|
sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/OutputWriter.scala
|
Scala
|
apache-2.0
| 2,815
|
package org.jetbrains.sbt
package project.structure
import com.intellij.openapi.util.Key
/**
* @author Pavel Fatin
*/
sealed abstract class OutputType
object OutputType {
object StdOut extends OutputType
object StdErr extends OutputType
object MySystem extends OutputType
final case class Other(key: Key[_]) extends OutputType
}
|
JetBrains/intellij-scala
|
scala/scala-impl/src/org/jetbrains/sbt/project/structure/OutputType.scala
|
Scala
|
apache-2.0
| 342
|
package epfl.pacman
package maze
trait Thingies { this: Models =>
private var id = 0
private def freshId = { id += 1; id }
abstract class Thingy { val pos: Position }
abstract class Figure extends Thingy {
override val pos: OffsetPosition
val dir: Direction
val stopped: Boolean
def incrOffset {
dir match {
case Up => pos.yo -= 1
case Left => pos.xo -= 1
case Down => pos.yo += 1
case Right => pos.xo += 1
}
}
}
/**
* PacMan
*/
case class Angle(var counter: Int) {
def value = if (counter > 30) 60-counter else counter
}
case class PacMan(pos: OffsetPosition,
dir: Direction,
stopped: Boolean = true,
hunter: Boolean = false,
angle: Angle = Angle(0),
lives: Int = Settings.nbLives) extends Figure {
def incrAngle {
angle.counter = (angle.counter + 2) % 60
}
}
/**
* Monsters
*/
sealed abstract class MonsterTyp
object Cerebro extends MonsterTyp
object Info extends MonsterTyp
case class AnimationSettings(var status: Boolean, var animOffset: Int)
case class Monster(pos: OffsetPosition,
dir: Direction,
typ: MonsterTyp,
stopped: Boolean = false,
anim: AnimationSettings = AnimationSettings(true, 0),
id: Int = freshId) extends Figure {
def incrAnimOffset {
anim.animOffset = (anim.animOffset + 1) % 6
}
def activateAnim {
anim.status = true
}
def deactivateAnim {
anim.status = false
}
}
/**
* Walls
*/
case class Wall(pos: Position, tpe: WallType = BlueWall) extends Thingy
sealed abstract class WallType
case object BlueWall extends WallType
case object RedWall extends WallType
case object NoWall extends WallType
/**
* Points
*/
case class NormalPoint(pos: Position) extends Thingy
case class SuperPoint(pos: Position) extends Thingy
}
|
lrytz/pacman
|
src/main/scala/epfl/pacman/maze/Thingies.scala
|
Scala
|
bsd-2-clause
| 2,076
|
package nodes.util
import breeze.linalg.SparseVector
import org.apache.spark.rdd.RDD
import pipelines.Estimator
import scala.reflect.ClassTag
/**
* An Estimator that chooses all sparse features observed when training,
* and produces a transformer which builds a sparse vector out of them
*/
case class AllSparseFeatures[T: ClassTag]() extends Estimator[Seq[(T, Double)], SparseVector[Double]] {
override def fit(data: RDD[Seq[(T, Double)]]): SparseFeatureVectorizer[T] = {
val featureSpace = data.flatMap(_.map(_._1)).distinct()
.zipWithIndex().collect().map(x => (x._1, x._2.toInt)).toMap
new SparseFeatureVectorizer(featureSpace)
}
}
|
shivaram/keystone
|
src/main/scala/nodes/util/AllSparseFeatures.scala
|
Scala
|
apache-2.0
| 662
|
package cc.factorie.directed
import cc.factorie.util.FastLogging
import cc.factorie.variable.DoubleVariable
import org.junit.Test
import org.scalatest.junit.JUnitSuite
class TestDirectedModel extends JUnitSuite with FastLogging {
@Test
def testDirectedModel(): Unit = {
implicit val model = DirectedModel()
implicit val random = new scala.util.Random(0)
assert(model.isInstanceOf[ItemizedDirectedModel])
val mean = new DoubleVariable(1)
val variance = new DoubleVariable(2.0)
val data = for (i <- 1 to 10) yield new DoubleVariable :~ Gaussian(mean, variance)
assert(model.factors(mean).size == 10)
assert(model.childFactors(mean).size == 10)
}
}
|
hlin117/factorie
|
src/test/scala/cc/factorie/directed/TestDirectedModel.scala
|
Scala
|
apache-2.0
| 693
|
package com.imaginea.activegrid.core.models
/**
* Created by nagulmeeras on 01/11/16.
*/
sealed trait GroupType {
def groupType: String
}
case object GroupType {
def toGroupType(groupType: String): GroupType = {
groupType match {
case "role" => Role
}
}
case object Role extends GroupType {
override def groupType: String = "role"
}
}
|
eklavya/activeGrid
|
src/main/scala/com/imaginea/activegrid/core/models/GroupType.scala
|
Scala
|
apache-2.0
| 372
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.ml.feature
import scala.beans.BeanInfo
import org.apache.spark.SparkFunSuite
import org.apache.spark.ml.param.ParamsSuite
import org.apache.spark.mllib.util.MLlibTestSparkContext
import org.apache.spark.sql.{DataFrame, Row}
@BeanInfo
case class TokenizerTestData(rawText: String, wantedTokens: Array[String])
class TokenizerSuite extends SparkFunSuite {
test("params") {
ParamsSuite.checkParams(new Tokenizer)
}
}
class RegexTokenizerSuite extends SparkFunSuite with MLlibTestSparkContext {
import org.apache.spark.ml.feature.RegexTokenizerSuite._
test("params") {
ParamsSuite.checkParams(new RegexTokenizer)
}
test("RegexTokenizer") {
val tokenizer0 = new RegexTokenizer()
.setGaps(false)
.setPattern("\\\\w+|\\\\p{Punct}")
.setInputCol("rawText")
.setOutputCol("tokens")
val dataset0 = sqlContext.createDataFrame(Seq(
TokenizerTestData("Test for tokenization.", Array("Test", "for", "tokenization", ".")),
TokenizerTestData("Te,st. punct", Array("Te", ",", "st", ".", "punct"))
))
testRegexTokenizer(tokenizer0, dataset0)
val dataset1 = sqlContext.createDataFrame(Seq(
TokenizerTestData("Test for tokenization.", Array("Test", "for", "tokenization")),
TokenizerTestData("Te,st. punct", Array("punct"))
))
tokenizer0.setMinTokenLength(3)
testRegexTokenizer(tokenizer0, dataset1)
val tokenizer2 = new RegexTokenizer()
.setInputCol("rawText")
.setOutputCol("tokens")
val dataset2 = sqlContext.createDataFrame(Seq(
TokenizerTestData("Test for tokenization.", Array("Test", "for", "tokenization.")),
TokenizerTestData("Te,st. punct", Array("Te,st.", "punct"))
))
testRegexTokenizer(tokenizer2, dataset2)
}
}
object RegexTokenizerSuite extends SparkFunSuite {
def testRegexTokenizer(t: RegexTokenizer, dataset: DataFrame): Unit = {
t.transform(dataset)
.select("tokens", "wantedTokens")
.collect()
.foreach { case Row(tokens, wantedTokens) =>
assert(tokens === wantedTokens)
}
}
}
|
pronix/spark
|
mllib/src/test/scala/org/apache/spark/ml/feature/TokenizerSuite.scala
|
Scala
|
apache-2.0
| 2,895
|
package com.donniewest.titan.tent
object identityJson {
// contains Json strings for authentication
val registration = """{
"type": "https://tent.io/types/app/v0#",
"content": {
"name": "Titan",
"url": "https://tent.donniewest.com",
"types": {
"read": [
"https://tent.io/types/app/v0"
],
"write": [
"https://tent.io/types/status/v0",
"https://tent.io/types/photo/v0"
]
},
"redirect_uri": "titan://oauth"
},
"permissions": {
"public": false
}
}"""
}
|
DonnieWest/Titan
|
src/main/scala/com/donniewest/titan/tent/identityJson.scala
|
Scala
|
apache-2.0
| 516
|
/*
* Copyright 2016 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.ct.ct600.v2
import uk.gov.hmrc.ct.box.{Linked, CtOptionalInteger, CtBoxIdentifier}
import uk.gov.hmrc.ct.computations.CP670
case class B106(value: Option[Int]) extends CtBoxIdentifier("Balancing charges(Machinery & Plant - special rate pool)") with CtOptionalInteger
object B106 extends Linked[CP670, B106] {
override def apply(source: CP670): B106 = B106(source.value)
}
|
ahudspith-equalexperts/ct-calculations
|
src/main/scala/uk/gov/hmrc/ct/ct600/v2/B106.scala
|
Scala
|
apache-2.0
| 1,003
|
/*
* Copyright (C) 2016-2019 Lightbend Inc. <https://www.lightbend.com>
*/
package com.lightbend.lagom.macrotestkit
import scala.language.experimental.macros
import java.util.regex.Pattern
import scala.reflect.macros.TypecheckException
import scala.reflect.macros.blackbox
/**
* A macro that ensures that a code snippet does not typecheck.
*/
object ShouldNotTypecheck {
def apply(name: String, code: String): Unit = macro ShouldNotTypecheck.applyImplNoExp
def apply(name: String, code: String, expected: String): Unit = macro ShouldNotTypecheck.applyImpl
}
final class ShouldNotTypecheck(val c: blackbox.Context) {
import c.universe._
def applyImplNoExp(name: Expr[String], code: Expr[String]): Expr[Unit] = applyImpl(name, code, c.Expr(EmptyTree))
def applyImpl(name: Expr[String], code: Expr[String], expected: Expr[String]): Expr[Unit] = {
val Expr(Literal(Constant(codeStr: String))) = code
val Expr(Literal(Constant(nameStr: String))) = name
val (expPat, expMsg) = expected.tree match {
case EmptyTree => (Pattern.compile(".*"), "Expected some error.")
case Literal(Constant(s: String)) =>
(Pattern.compile(s, Pattern.CASE_INSENSITIVE), "Expected error matching: " + s)
}
try c.typecheck(c.parse("{ " + codeStr + " }"))
catch {
case e: TypecheckException =>
val msg = e.getMessage
if (!expPat.matcher(msg).matches) {
c.abort(c.enclosingPosition, s"$nameStr failed in an unexpected way.\n$expMsg\nActual error: $msg")
} else {
println(s"$nameStr passed.")
return reify(())
}
}
c.abort(c.enclosingPosition, s"$nameStr succeeded unexpectedly.\n$expMsg")
}
}
|
rcavalcanti/lagom
|
macro-testkit/src/main/scala/com/lightbend/lagom/macrotestkit/ShouldNotTypecheck.scala
|
Scala
|
apache-2.0
| 1,706
|
/*
* (c) Copyright 2016 Hewlett Packard Enterprise Development LP
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
//package cogdebugger.ui.fieldvisualizations.vector
//
//import cogx._
//import scala.swing._
//import cogx.platform.cpumemory.{VectorFieldMemory, ComplexFieldMemory, ScalarFieldMemory}
//import cogdebugger.ui.fieldvisualizations.scalar.ScalarMemoryView
//
///** Stand-alone testing of a field viewer.
// *
// * @author Greg Snider
// */
//object TestComplexFieldSuperPanel extends SimpleSwingApplication {
// // Create a simple complex field
// val Rows = 20
// val Columns = 20
// val vectorField = VectorFieldMemory(Rows, Columns,
// (r, c) => new Vector(r, c)
// )
//
// lazy val top = new MainFrame {
// title = "Test Geometric2DVectorView"
// contents = new BoxPanel(Orientation.Horizontal) {
// contents += new Geometric2DVectorView(null, vectorField.fieldShape,
// vectorField.tensorShape)
// {
// update(null, vectorField, 0L)
// }
// // contents += new Geometric2DVectorView(field0D)
// }
// minimumSize = new Dimension(250, 100)
// }
//}
|
hpe-cct/cct-core
|
src/test/scala/cogdebugger/ui/fieldvisualizations/vector/TestGeometric2DVectorViewer.scala
|
Scala
|
apache-2.0
| 1,639
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.deploy
import scala.collection.immutable.List
import org.apache.spark.deploy.ExecutorState.ExecutorState
import org.apache.spark.deploy.master.{ApplicationInfo, DriverInfo, WorkerInfo}
import org.apache.spark.deploy.master.DriverState.DriverState
import org.apache.spark.deploy.master.RecoveryState.MasterState
import org.apache.spark.deploy.worker.{DriverRunner, ExecutorRunner}
import org.apache.spark.resource.ResourceInformation
import org.apache.spark.rpc.{RpcAddress, RpcEndpointRef}
import org.apache.spark.util.Utils
private[deploy] sealed trait DeployMessage extends Serializable
/** Contains messages sent between Scheduler endpoint nodes. */
private[deploy] object DeployMessages {
// Worker to Master
/**
* @param id the worker id
* @param host the worker host
* @param port the worker post
* @param worker the worker endpoint ref
* @param cores the core number of worker
* @param memory the memory size of worker
* @param workerWebUiUrl the worker Web UI address
* @param masterAddress the master address used by the worker to connect
* @param resources the resources of worker
*/
case class RegisterWorker(
id: String,
host: String,
port: Int,
worker: RpcEndpointRef,
cores: Int,
memory: Int,
workerWebUiUrl: String,
masterAddress: RpcAddress,
resources: Map[String, ResourceInformation] = Map.empty)
extends DeployMessage {
Utils.checkHost(host)
assert (port > 0)
}
/**
* An internal message that used by Master itself, in order to handle the
* `DecommissionWorkersOnHosts` request from `MasterWebUI` asynchronously.
* @param ids A collection of Worker ids, which should be decommissioned.
*/
case class DecommissionWorkers(ids: Seq[String]) extends DeployMessage
/**
* A message that sent from Master to Worker to decommission the Worker.
* It's used for the case where decommission is triggered at MasterWebUI.
*
* Note that decommission a Worker will cause all the executors on that Worker
* to be decommissioned as well.
*/
object DecommissionWorker extends DeployMessage
/**
* A message that sent by the Worker to itself when it receives a signal,
* indicating the Worker starts to decommission.
*/
object WorkerDecommissionSigReceived extends DeployMessage
/**
* A message sent from Worker to Master to tell Master that the Worker has started
* decommissioning. It's used for the case where decommission is triggered at Worker.
*
* @param id the worker id
* @param workerRef the worker endpoint ref
*/
case class WorkerDecommissioning(id: String, workerRef: RpcEndpointRef) extends DeployMessage
case class ExecutorStateChanged(
appId: String,
execId: Int,
state: ExecutorState,
message: Option[String],
exitStatus: Option[Int])
extends DeployMessage
case class DriverStateChanged(
driverId: String,
state: DriverState,
exception: Option[Exception])
extends DeployMessage
case class WorkerExecutorStateResponse(
desc: ExecutorDescription,
resources: Map[String, ResourceInformation])
case class WorkerDriverStateResponse(
driverId: String,
resources: Map[String, ResourceInformation])
case class WorkerSchedulerStateResponse(
id: String,
execResponses: List[WorkerExecutorStateResponse],
driverResponses: Seq[WorkerDriverStateResponse])
/**
* A worker will send this message to the master when it registers with the master. Then the
* master will compare them with the executors and drivers in the master and tell the worker to
* kill the unknown executors and drivers.
*/
case class WorkerLatestState(
id: String,
executors: Seq[ExecutorDescription],
driverIds: Seq[String]) extends DeployMessage
case class Heartbeat(workerId: String, worker: RpcEndpointRef) extends DeployMessage
/**
* Used by the MasterWebUI to request the master to decommission all workers that are active on
* any of the given hostnames.
* @param hostnames: A list of hostnames without the ports. Like "localhost", "foo.bar.com" etc
*/
case class DecommissionWorkersOnHosts(hostnames: Seq[String])
// Master to Worker
sealed trait RegisterWorkerResponse
/**
* @param master the master ref
* @param masterWebUiUrl the master Web UI address
* @param masterAddress the master address used by the worker to connect. It should be
* [[RegisterWorker.masterAddress]].
* @param duplicate whether it is a duplicate register request from the worker
*/
case class RegisteredWorker(
master: RpcEndpointRef,
masterWebUiUrl: String,
masterAddress: RpcAddress,
duplicate: Boolean) extends DeployMessage with RegisterWorkerResponse
case class RegisterWorkerFailed(message: String) extends DeployMessage with RegisterWorkerResponse
case object MasterInStandby extends DeployMessage with RegisterWorkerResponse
case class ReconnectWorker(masterUrl: String) extends DeployMessage
case class KillExecutor(masterUrl: String, appId: String, execId: Int) extends DeployMessage
case class LaunchExecutor(
masterUrl: String,
appId: String,
execId: Int,
appDesc: ApplicationDescription,
cores: Int,
memory: Int,
resources: Map[String, ResourceInformation] = Map.empty)
extends DeployMessage
case class LaunchDriver(
driverId: String,
driverDesc: DriverDescription,
resources: Map[String, ResourceInformation] = Map.empty) extends DeployMessage
case class KillDriver(driverId: String) extends DeployMessage
case class ApplicationFinished(id: String)
// Worker internal
case object WorkDirCleanup // Sent to Worker endpoint periodically for cleaning up app folders
case object ReregisterWithMaster // used when a worker attempts to reconnect to a master
// AppClient to Master
case class RegisterApplication(appDescription: ApplicationDescription, driver: RpcEndpointRef)
extends DeployMessage
case class UnregisterApplication(appId: String)
case class MasterChangeAcknowledged(appId: String)
case class RequestExecutors(appId: String, requestedTotal: Int)
case class KillExecutors(appId: String, executorIds: Seq[String])
// Master to AppClient
case class RegisteredApplication(appId: String, master: RpcEndpointRef) extends DeployMessage
// TODO(matei): replace hostPort with host
case class ExecutorAdded(id: Int, workerId: String, hostPort: String, cores: Int, memory: Int) {
Utils.checkHostPort(hostPort)
}
// When the host of Worker is lost or decommissioned, the `workerHost` is the host address
// of that Worker. Otherwise, it's None.
case class ExecutorUpdated(id: Int, state: ExecutorState, message: Option[String],
exitStatus: Option[Int], workerHost: Option[String])
case class ApplicationRemoved(message: String)
case class WorkerRemoved(id: String, host: String, message: String)
// DriverClient <-> Master
case class RequestSubmitDriver(driverDescription: DriverDescription) extends DeployMessage
case class SubmitDriverResponse(
master: RpcEndpointRef, success: Boolean, driverId: Option[String], message: String)
extends DeployMessage
case class RequestKillDriver(driverId: String) extends DeployMessage
case class KillDriverResponse(
master: RpcEndpointRef, driverId: String, success: Boolean, message: String)
extends DeployMessage
case class RequestDriverStatus(driverId: String) extends DeployMessage
case class DriverStatusResponse(found: Boolean, state: Option[DriverState],
workerId: Option[String], workerHostPort: Option[String], exception: Option[Exception])
// Internal message in AppClient
case object StopAppClient
// Master to Worker & AppClient
case class MasterChanged(master: RpcEndpointRef, masterWebUiUrl: String)
// MasterWebUI To Master
case object RequestMasterState
// Master to MasterWebUI
case class MasterStateResponse(
host: String,
port: Int,
restPort: Option[Int],
workers: Array[WorkerInfo],
activeApps: Array[ApplicationInfo],
completedApps: Array[ApplicationInfo],
activeDrivers: Array[DriverInfo],
completedDrivers: Array[DriverInfo],
status: MasterState) {
Utils.checkHost(host)
assert (port > 0)
def uri: String = "spark://" + host + ":" + port
def restUri: Option[String] = restPort.map { p => "spark://" + host + ":" + p }
}
// WorkerWebUI to Worker
case object RequestWorkerState
// Worker to WorkerWebUI
case class WorkerStateResponse(host: String, port: Int, workerId: String,
executors: List[ExecutorRunner], finishedExecutors: List[ExecutorRunner],
drivers: List[DriverRunner], finishedDrivers: List[DriverRunner], masterUrl: String,
cores: Int, memory: Int, coresUsed: Int, memoryUsed: Int, masterWebUiUrl: String,
resources: Map[String, ResourceInformation] = Map.empty,
resourcesUsed: Map[String, ResourceInformation] = Map.empty) {
Utils.checkHost(host)
assert (port > 0)
}
// Liveness checks in various places
case object SendHeartbeat
}
|
ueshin/apache-spark
|
core/src/main/scala/org/apache/spark/deploy/DeployMessage.scala
|
Scala
|
apache-2.0
| 10,072
|
package scalaj.http
/** scalaj.http
Copyright 2010 Jonathan Hoffman
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
import collection.immutable.TreeMap
import java.lang.reflect.Field
import java.net.{HttpCookie, HttpURLConnection, InetSocketAddress, Proxy, URL, URLEncoder, URLDecoder}
import java.io.{DataOutputStream, InputStream, BufferedReader, InputStreamReader, ByteArrayInputStream,
ByteArrayOutputStream}
import java.security.cert.X509Certificate
import javax.net.ssl.HttpsURLConnection
import javax.net.ssl.SSLContext
import javax.net.ssl.SSLSession
import javax.net.ssl.SSLSocketFactory
import javax.net.ssl.TrustManager
import javax.net.ssl.X509TrustManager
import javax.net.ssl.HostnameVerifier
import java.util.zip.{GZIPInputStream, InflaterInputStream}
import scala.collection.JavaConverters._
import scala.util.matching.Regex
/** Helper functions for modifying the underlying HttpURLConnection */
object HttpOptions {
type HttpOption = HttpURLConnection => Unit
val officialHttpMethods = Set("GET", "POST", "HEAD", "OPTIONS", "PUT", "DELETE", "TRACE")
private lazy val methodField: Field = {
val m = classOf[HttpURLConnection].getDeclaredField("method")
m.setAccessible(true)
m
}
def method(methodOrig: String): HttpOption = c => {
val method = methodOrig.toUpperCase
if (officialHttpMethods.contains(method)) {
c.setRequestMethod(method)
} else {
// HttpURLConnection enforces a list of official http METHODs, but not everyone abides by the spec
// this hack allows us set an unofficial http method
c match {
case cs: HttpsURLConnection =>
cs.getClass.getDeclaredFields.find(_.getName == "delegate").foreach{ del =>
del.setAccessible(true)
methodField.set(del.get(cs), method)
}
case c =>
methodField.set(c, method)
}
}
}
def connTimeout(timeout: Int): HttpOption = c => c.setConnectTimeout(timeout)
def readTimeout(timeout: Int): HttpOption = c => c.setReadTimeout(timeout)
def followRedirects(shouldFollow: Boolean): HttpOption = c => c.setInstanceFollowRedirects(shouldFollow)
/** Ignore the cert chain */
def allowUnsafeSSL: HttpOption = c => c match {
case httpsConn: HttpsURLConnection =>
val hv = new HostnameVerifier() {
def verify(urlHostName: String, session: SSLSession) = true
}
httpsConn.setHostnameVerifier(hv)
val trustAllCerts = Array[TrustManager](new X509TrustManager() {
def getAcceptedIssuers: Array[X509Certificate] = null
def checkClientTrusted(certs: Array[X509Certificate], authType: String) = {}
def checkServerTrusted(certs: Array[X509Certificate], authType: String) = {}
})
val sc = SSLContext.getInstance("SSL")
sc.init(null, trustAllCerts, new java.security.SecureRandom())
httpsConn.setSSLSocketFactory(sc.getSocketFactory())
case _ => // do nothing
}
/** Add your own SSLSocketFactory to do certificate authorization or pinning */
def sslSocketFactory(sslSocketFactory: SSLSocketFactory): HttpOption = c => c match {
case httpsConn: HttpsURLConnection =>
httpsConn.setSSLSocketFactory(sslSocketFactory)
case _ => // do nothing
}
}
object MultiPart {
def apply(name: String, filename: String, mime: String, data: String): MultiPart = {
apply(name, filename, mime, data.getBytes(HttpConstants.utf8))
}
def apply(name: String, filename: String, mime: String, data: Array[Byte]): MultiPart = {
MultiPart(name, filename, mime, new ByteArrayInputStream(data), data.length, n => ())
}
}
case class MultiPart(val name: String, val filename: String, val mime: String, val data: InputStream, val numBytes: Long,
val writeCallBack: Long => Unit)
case class HttpStatusException(
code: Int,
statusLine: String,
body: String
) extends RuntimeException(s"${code} Error: ${statusLine}")
/** Result of executing a [[scalaj.http.HttpRequest]]
* @tparam T the body response since it can be parsed directly to things other than String
* @param body the Http response body
* @param code the http response code from the status line
* @param headers the response headers
*/
case class HttpResponse[T](body: T, code: Int, headers: Map[String, IndexedSeq[String]]) {
/** test if code is in between lower and upper inclusive */
def isCodeInRange(lower: Int, upper: Int): Boolean = lower <= code && code <= upper
/** is response code 2xx */
def is2xx: Boolean = isCodeInRange(200, 299)
/** same as is2xx */
def isSuccess: Boolean = is2xx
/** is response code 3xx */
def is3xx: Boolean = isCodeInRange(300, 399)
/** same as is3xx */
def isRedirect: Boolean = is3xx
/** is response code 4xx */
def is4xx: Boolean = isCodeInRange(400, 499)
/** same as is4xx */
def isClientError: Boolean = is4xx
/** is response code 5xx */
def is5xx: Boolean = isCodeInRange(500, 599)
/** same as is5xx */
def isServerError: Boolean = is5xx
/** same as (is4xx || is5xx) */
def isError: Boolean = is4xx || is5xx
/** same as !isError */
def isNotError: Boolean = !isError
/** helper method for throwing status exceptions */
private def throwIf(condition: Boolean): HttpResponse[T] = {
if (condition) {
throw HttpStatusException(code, header("Status").getOrElse("UNKNOWN"), body.toString)
}
this
}
/** Throw a {{{scalaj.http.HttpStatusException}} if {{{isError}}} is true. Otherwise returns reference to self
*
* Useful if you don't want to handle 4xx or 5xx error codes from the server and just want bubble up an Exception
* instead. HttpException.body will just be body.toString.
*
* Allows for chaining like this: {{{val result: String = Http(url).asString.throwError.body}}}
*/
def throwError: HttpResponse[T] = throwIf(isError)
/** Throw a {{{scalaj.http.HttpStatusException}} if {{{isServerError}}} is true. Otherwise returns reference to self
*
* Useful if you don't want to 5xx error codes from the server and just want bubble up an Exception instead.
* HttpException.body will just be body.toString.
*
* Allows for chaining like this: {{{val result: String = Http(url).asString.throwServerError.body}}}
*/
def throwServerError: HttpResponse[T] = throwIf(isServerError)
/** Get the response header value for a key */
def header(key: String): Option[String] = headers.get(key).flatMap(_.headOption)
/** Get all the response header values for a repeated key */
def headerSeq(key: String): IndexedSeq[String] = headers.getOrElse(key, IndexedSeq.empty)
/** The full status line. like "HTTP/1.1 200 OK"
* throws a RuntimeException if "Status" is not in headers
*/
def statusLine: String = header("Status").getOrElse(throw new RuntimeException("headers doesn't contain Status"))
/** Location header value sent for redirects. By default, this library will not follow redirects. */
def location: Option[String] = header("Location")
/** Content-Type header value */
def contentType: Option[String] = header("Content-Type")
/** Get the parsed cookies from the "Set-Cookie" header **/
def cookies: IndexedSeq[HttpCookie] = headerSeq("Set-Cookie").flatMap(HttpCookie.parse(_).asScala)
}
/** Immutable builder for creating an http request
*
* This is the workhorse of the scalaj-http library.
*
* You shouldn't need to construct this manually. Use [[scalaj.http.Http.apply]] to get an instance
*
* The params, headers and options methods are all additive. They will always add things to the request. If you want to
* replace those things completely, you can do something like {{{.copy(params=newparams)}}}
*
*/
case class HttpRequest(
url: String,
method: String,
connectFunc: HttpConstants.HttpExec,
params: Seq[(String,String)],
headers: Seq[(String,String)],
options: Seq[HttpOptions.HttpOption],
proxyConfig: Option[Proxy],
charset: String,
sendBufferSize: Int,
urlBuilder: (HttpRequest => String),
compress: Boolean,
digestCreds: Option[(String, String)]
) {
/** Add params to the GET querystring or POST form request */
def params(p: Map[String, String]): HttpRequest = params(p.toSeq)
/** Add params to the GET querystring or POST form request */
def params(p: Seq[(String,String)]): HttpRequest = copy(params = params ++ p)
/** Add params to the GET querystring or POST form request */
def params(p: (String,String), rest: (String, String)*): HttpRequest = params(p +: rest)
/** Add a param to the GET querystring or POST form request */
def param(key: String, value: String): HttpRequest = params(key -> value)
/** Add http headers to the request */
def headers(h: Map[String, String]): HttpRequest = headers(h.toSeq)
/** Add http headers to the request */
def headers(h: Seq[(String,String)]): HttpRequest = copy(headers = headers ++ h)
/** Add http headers to the request */
def headers(h: (String,String), rest: (String, String)*): HttpRequest = headers(h +: rest)
/** Add a http header to the request */
def header(key: String, value: String): HttpRequest = headers(key -> value)
/** Add Cookie header to the request */
def cookie(name: String, value: String): HttpRequest = header("Cookie", name + "=" + value + ";")
/** Add Cookie header to the request */
def cookie(ck: HttpCookie): HttpRequest = cookie(ck.getName, ck.getValue)
/** Add multiple cookies to the request. Usefull for round tripping cookies from HttpResponse.cookies */
def cookies(cks: Seq[HttpCookie]): HttpRequest = header(
"Cookie",
cks.map(ck => ck.getName + "=" + ck.getValue).mkString("; ")
)
/** Entry point for modifying the [[java.net.HttpURLConnection]] before the request is executed */
def options(o: Seq[HttpOptions.HttpOption]): HttpRequest = copy(options = o ++ options)
/** Entry point for modifying the [[java.net.HttpURLConnection]] before the request is executed */
def options(o: HttpOptions.HttpOption, rest: HttpOptions.HttpOption*): HttpRequest = options(o +: rest)
/** Entry point for modifying the [[java.net.HttpURLConnection]] before the request is executed */
def option(o: HttpOptions.HttpOption): HttpRequest = options(o)
/** Add a standard basic authorization header */
def auth(user: String, password: String) = header("Authorization", HttpConstants.basicAuthValue(user, password))
/** Add a proxy basic authorization header */
def proxyAuth(user: String, password: String) = header("Proxy-Authorization", HttpConstants.basicAuthValue(user, password))
/** Add digest authentication credentials */
def digestAuth(user: String, password: String) = copy(digestCreds = Some(user -> password))
/** OAuth v1 sign the request with the consumer token */
def oauth(consumer: Token): HttpRequest = oauth(consumer, None, None)
/** OAuth v1 sign the request with with both the consumer and client token */
def oauth(consumer: Token, token: Token): HttpRequest = oauth(consumer, Some(token), None)
/** OAuth v1 sign the request with with both the consumer and client token and a verifier*/
def oauth(consumer: Token, token: Token, verifier: String): HttpRequest = oauth(consumer, Some(token), Some(verifier))
/** OAuth v1 sign the request with with both the consumer and client token and a verifier*/
def oauth(consumer: Token, token: Option[Token], verifier: Option[String]): HttpRequest = {
OAuth.sign(this, consumer, token, verifier)
}
/** Change the http request method.
* The library will allow you to set this to whatever you want. If you want to do a POST, just use the
* postData, postForm, or postMulti methods. If you want to setup your request as a form, data or multi request, but
* want to change the method type, call this method after the post method:
*
* {{{Http(url).postData(dataBytes).method("PUT").asString}}}
*/
def method(m: String): HttpRequest = copy(method=m)
/** Should HTTP compression be used
* If true, Accept-Encoding: gzip,deflate will be sent with request.
* If the server response with Content-Encoding: (gzip|deflate) the client will automatically handle decompression
*
* This is on by default
*
* @param c should compress
*/
def compress(c: Boolean): HttpRequest = copy(compress=c)
/** Send request via a standard http proxy */
def proxy(host: String, port: Int): HttpRequest = proxy(host, port, Proxy.Type.HTTP)
/** Send request via a proxy. You choose the type (HTTP or SOCKS) */
def proxy(host: String, port: Int, proxyType: Proxy.Type): HttpRequest = {
copy(proxyConfig = Some(HttpConstants.proxy(host, port, proxyType)))
}
/** Send request via a proxy */
def proxy(proxy: Proxy): HttpRequest = {
copy(proxyConfig = Some(proxy))
}
/** Change the charset used to encode the request and decode the response. UTF-8 by default */
def charset(cs: String): HttpRequest = copy(charset = cs)
/** The buffer size to use when sending Multipart posts */
def sendBufferSize(numBytes: Int): HttpRequest = copy(sendBufferSize = numBytes)
/** The socket connection and read timeouts in milliseconds. Defaults are 1000 and 5000 respectively */
def timeout(connTimeoutMs: Int, readTimeoutMs: Int): HttpRequest = options(
Seq(HttpOptions.connTimeout(connTimeoutMs), HttpOptions.readTimeout(readTimeoutMs))
)
/** Executes this request
*
* Keep in mind that if you're parsing the response to something other than String, you may hit parsing error if
* the server responds with a different content type for error cases.
*
* @tparam T the type returned by the input stream parser
* @param parser function to process the response body InputStream. Will be used for all response codes
*/
def execute[T](
parser: InputStream => T = (is: InputStream) => HttpConstants.readString(is, charset)
): HttpResponse[T] = {
exec((code: Int, headers: Map[String, IndexedSeq[String]], is: InputStream) => parser(is))
}
/** Executes this request
*
* This is a power user method for parsing the response body. The parser function will be passed the response code,
* response headers and the InputStream
*
* @tparam T the type returned by the input stream parser
* @param parser function to process the response body InputStream
*/
def exec[T](parser: (Int, Map[String, IndexedSeq[String]], InputStream) => T): HttpResponse[T] = {
doConnection(parser, new URL(urlBuilder(this)), connectFunc)
}
private def doConnection[T](
parser: (Int, Map[String, IndexedSeq[String]], InputStream) => T,
urlToFetch: URL,
connectFunc: (HttpRequest, HttpURLConnection) => Unit
): HttpResponse[T] = {
proxyConfig.map(urlToFetch.openConnection).getOrElse(urlToFetch.openConnection) match {
case conn: HttpURLConnection =>
conn.setInstanceFollowRedirects(false)
HttpOptions.method(method)(conn)
if (compress) {
conn.setRequestProperty("Accept-Encoding", "gzip,deflate")
}
headers.reverse.foreach{ case (name, value) =>
conn.setRequestProperty(name, value)
}
options.reverse.foreach(_(conn))
try {
connectFunc(this, conn)
toResponse(conn, parser, conn.getInputStream)
} catch {
case e: java.io.IOException if conn.getResponseCode > 0 =>
toResponse(conn, parser, conn.getErrorStream)
} finally {
closeStreams(conn)
}
}
}
private def toResponse[T](
conn: HttpURLConnection,
parser: (Int, Map[String, IndexedSeq[String]], InputStream) => T,
inputStream: InputStream
): HttpResponse[T] = {
val responseCode: Int = conn.getResponseCode
val headers: Map[String, IndexedSeq[String]] = getResponseHeaders(conn)
val encoding: Option[String] = headers.get("Content-Encoding").flatMap(_.headOption)
// handle a WWW-Authenticate digest round-trip
// check if digest header already exists to prevent infinite loops
val AuthHeaderName = "Authorization"
(if (responseCode == 401 && !this.headers.exists(p => p._1 == AuthHeaderName && p._2.startsWith(DigestAuth.DigestPrefix))) {
def toUri(url: URL): String = {
url.getPath + Option(url.getQuery).map(q => "?" + q).getOrElse("")
}
for {
(username, password) <- digestCreds
authParams: WwwAuthenticate <- {
headers.get("WWW-Authenticate").flatMap(_.headOption).flatMap(DigestAuth.getAuthDetails)
}
if authParams.authType.equalsIgnoreCase(DigestAuth.DigestPrefix)
url = new URL(urlBuilder(this))
digestResult <- DigestAuth.createHeaderValue(
username,
password,
method,
toUri(url),
HttpConstants.readBytes(inputStream),
authParams.params
)
} yield {
header(AuthHeaderName, digestResult).doConnection(parser, url, connectFunc)
}
} else None).getOrElse {
// HttpURLConnection won't redirect from https <-> http, so we handle manually here
(if (conn.getInstanceFollowRedirects && (responseCode == 301 || responseCode == 302 || responseCode == 307 || responseCode == 308)) {
headers.get("Location").flatMap(_.headOption).map(location => {
doConnection(parser, new URL(location), connectFunc)
})
} else None).getOrElse {
val body: T = {
val shouldDecompress = compress && inputStream != null
val theStream = if (shouldDecompress && encoding.exists(_.equalsIgnoreCase("gzip"))) {
new GZIPInputStream(inputStream)
} else if (shouldDecompress && encoding.exists(_.equalsIgnoreCase("deflate"))) {
new InflaterInputStream(inputStream)
} else inputStream
parser(responseCode, headers, theStream)
}
HttpResponse[T](body, responseCode, headers)
}
}
}
private def getResponseHeaders(conn: HttpURLConnection): Map[String, IndexedSeq[String]] = {
// There can be multiple values for the same response header key (this is common with Set-Cookie)
// http://stackoverflow.com/questions/4371328/are-duplicate-http-response-headers-acceptable
// according to javadoc, there can be a headerField value where the HeaderFieldKey is null
// at the 0th row in some implementations. In that case it's the http status line
new TreeMap[String, IndexedSeq[String]]()(Ordering.by(_.toLowerCase)) ++ {
Stream.from(0).map(i => i -> conn.getHeaderField(i)).takeWhile(_._2 != null).map{ case (i, value) =>
Option(conn.getHeaderFieldKey(i)).getOrElse("Status") -> value
}.groupBy(_._1).mapValues(_.map(_._2).toIndexedSeq)
}
}
private def closeStreams(conn: HttpURLConnection): Unit = {
try {
conn.getInputStream.close
} catch {
case e: Exception => //ignore
}
try {
if(conn.getErrorStream != null) {
conn.getErrorStream.close
}
} catch {
case e: Exception => //ignore
}
}
/** Standard form POST request */
def postForm: HttpRequest = postForm(Nil)
/** Standard form POST request and set some parameters. Same as .postForm.params(params) */
def postForm(params: Seq[(String, String)]): HttpRequest = {
copy(method="POST", connectFunc=FormBodyConnectFunc, urlBuilder=PlainUrlFunc)
.header("content-type", "application/x-www-form-urlencoded").params(params)
}
/** Raw data POST request. String bytes written out using configured charset */
def postData(data: String): HttpRequest = body(data).method("POST")
/** Raw byte data POST request */
def postData(data: Array[Byte]): HttpRequest = body(data).method("POST")
/** Raw data PUT request. String bytes written out using configured charset */
def put(data: String): HttpRequest = body(data).method("PUT")
/** Raw byte data PUT request */
def put(data: Array[Byte]): HttpRequest = body(data).method("PUT")
private def body(data: String): HttpRequest = copy(connectFunc=StringBodyConnectFunc(data))
private def body(data: Array[Byte]): HttpRequest = copy(connectFunc=ByteBodyConnectFunc(data))
/** Multipart POST request.
*
* This is probably what you want if you need to upload a mix of form data and binary data (like a photo)
*/
def postMulti(parts: MultiPart*): HttpRequest = {
copy(method="POST", connectFunc=MultiPartConnectFunc(parts), urlBuilder=PlainUrlFunc)
}
/** Execute this request and parse http body as Array[Byte] */
def asBytes: HttpResponse[Array[Byte]] = execute(HttpConstants.readBytes)
/** Execute this request and parse http body as String using server charset or configured charset*/
def asString: HttpResponse[String] = exec((code: Int, headers: Map[String, IndexedSeq[String]], is: InputStream) => {
val reqCharset: String = headers.get("content-type").flatMap(_.headOption).flatMap(ct => {
HttpConstants.CharsetRegex.findFirstMatchIn(ct).map(_.group(1))
}).getOrElse(charset)
HttpConstants.readString(is, reqCharset)
})
/** Execute this request and parse http body as query string key-value pairs */
def asParams: HttpResponse[Seq[(String, String)]] = execute(HttpConstants.readParams(_, charset))
/** Execute this request and parse http body as query string key-value pairs */
def asParamMap: HttpResponse[Map[String, String]] = execute(HttpConstants.readParamMap(_, charset))
/** Execute this request and parse http body as a querystring containing oauth_token and oauth_token_secret tupple */
def asToken: HttpResponse[Token] = execute(HttpConstants.readToken)
}
case object DefaultConnectFunc extends Function2[HttpRequest, HttpURLConnection, Unit] {
def apply(req: HttpRequest, conn: HttpURLConnection): Unit = {
conn.connect
}
override def toString = "DefaultConnectFunc"
}
case object FormBodyConnectFunc extends Function2[HttpRequest, HttpURLConnection, Unit] {
def apply(req: HttpRequest, conn: HttpURLConnection): Unit = {
conn.setDoOutput(true)
conn.connect
conn.getOutputStream.write(HttpConstants.toQs(req.params, req.charset).getBytes(req.charset))
}
override def toString = "FormBodyConnectFunc"
}
case class ByteBodyConnectFunc(data: Array[Byte]) extends Function2[HttpRequest, HttpURLConnection, Unit] {
def apply(req: HttpRequest, conn: HttpURLConnection): Unit = {
conn.setDoOutput(true)
conn.connect
conn.getOutputStream.write(data)
}
override def toString = "ByteBodyConnectFunc(Array[Byte]{" + data.length + "})"
}
case class StringBodyConnectFunc(data: String) extends Function2[HttpRequest, HttpURLConnection, Unit] {
def apply(req: HttpRequest, conn: HttpURLConnection): Unit = {
conn.setDoOutput(true)
conn.connect
conn.getOutputStream.write(data.getBytes(req.charset))
}
override def toString = "StringBodyConnectFunc(" + data + ")"
}
case class MultiPartConnectFunc(parts: Seq[MultiPart]) extends Function2[HttpRequest, HttpURLConnection, Unit] {
def apply(req: HttpRequest, conn: HttpURLConnection): Unit = {
val CrLf = "\\r\\n"
val Pref = "--"
val Boundary = "--gc0pMUlT1B0uNdArYc0p"
val ContentDisposition = "Content-Disposition: form-data; name=\\""
val Filename = "\\"; filename=\\""
val ContentType = "Content-Type: "
conn.setDoOutput(true)
conn.setDoInput(true)
conn.setUseCaches(false)
val contentType = req.headers.find(_._1 == "Content-Type").map(_._2).getOrElse("multipart/form-data")
conn.setRequestProperty("Content-Type", contentType + "; boundary=" + Boundary)
conn.setRequestProperty("MIME-Version", "1.0")
// encode params up front for the length calculation
val paramBytes = req.params.map(p => (p._1.getBytes(req.charset) -> p._2.getBytes(req.charset)))
val partBytes = parts.map(p => (p.name.getBytes(req.charset),
p.filename.getBytes(req.charset),
p))
// we need to pre-calculate the Content-Length of this HttpRequest because most servers don't
// support chunked transfer
val totalBytesToSend: Long = {
val paramOverhead = Pref.length + Boundary.length + ContentDisposition.length + 1 + (CrLf.length * 4)
val paramsLength = paramBytes.map(p => p._1.length + p._2.length + paramOverhead).sum
val fileOverhead = Pref.length + Boundary.length + ContentDisposition.length + Filename.length + 1 +
(CrLf.length * 5) + ContentType.length
val filesLength =
partBytes.map(p => fileOverhead + p._1.length + p._2.length + p._3.mime.length + p._3.numBytes).sum
val finaleBoundaryLength = (Pref.length * 2) + Boundary.length + CrLf.length
paramsLength + filesLength + finaleBoundaryLength
}
HttpConstants.setFixedLengthStreamingMode(conn, totalBytesToSend)
val out = conn.getOutputStream()
def writeBytes(s: String): Unit = {
// this is only used for the structural pieces, not user input, so should be plain old ascii
out.write(s.getBytes(HttpConstants.utf8))
}
paramBytes.foreach {
case (name, value) =>
writeBytes(Pref + Boundary + CrLf)
writeBytes(ContentDisposition)
out.write(name)
writeBytes("\\"" + CrLf)
writeBytes(CrLf)
out.write(value)
writeBytes(CrLf)
}
val buffer = new Array[Byte](req.sendBufferSize)
partBytes.foreach {
case(name, filename, part) =>
writeBytes(Pref + Boundary + CrLf)
writeBytes(ContentDisposition)
out.write(name)
writeBytes(Filename)
out.write(filename)
writeBytes("\\"" + CrLf)
writeBytes(ContentType + part.mime + CrLf + CrLf)
var bytesWritten: Long = 0L
def readOnce(): Unit = {
val len = part.data.read(buffer)
if (len > 0) {
out.write(buffer, 0, len)
bytesWritten += len
part.writeCallBack(bytesWritten)
}
if (len >= 0) {
readOnce()
}
}
readOnce()
writeBytes(CrLf)
}
writeBytes(Pref + Boundary + Pref + CrLf)
out.flush()
out.close()
}
override def toString = "MultiPartConnectFunc(" + parts + ")"
}
case object QueryStringUrlFunc extends Function1[HttpRequest, String] {
def apply(req: HttpRequest): String = {
HttpConstants.appendQs(req.url, req.params, req.charset)
}
override def toString = "QueryStringUrlFunc"
}
case object PlainUrlFunc extends Function1[HttpRequest, String] {
def apply(req: HttpRequest): String = req.url
override def toString = "PlainUrlFunc"
}
/**
* Mostly helper methods
*/
object HttpConstants {
val CharsetRegex = new Regex("(?i)\\\\bcharset=\\\\s*\\"?([^\\\\s;\\"]*)")
type HttpExec = (HttpRequest, HttpURLConnection) => Unit
def defaultOptions: Seq[HttpOptions.HttpOption] = Seq(
HttpOptions.connTimeout(1000),
HttpOptions.readTimeout(5000),
HttpOptions.followRedirects(false)
)
val setFixedLengthStreamingMode: (HttpURLConnection, Long) => Unit = {
case (connection, contentLength) => connection.setFixedLengthStreamingMode(contentLength)
}
def urlEncode(name: String, charset: String): String = URLEncoder.encode(name, charset)
def urlDecode(name: String, charset: String): String = URLDecoder.decode(name, charset)
def base64(bytes: Array[Byte]): String = new String(Base64.encode(bytes))
def base64(in: String): String = base64(in.getBytes(utf8))
def basicAuthValue(user: String, password: String): String = {
"Basic " + base64(user + ":" + password)
}
def toQs(params: Seq[(String,String)], charset: String): String = {
params.map(p => urlEncode(p._1, charset) + "=" + urlEncode(p._2, charset)).mkString("&")
}
def appendQs(url:String, params: Seq[(String,String)], charset: String): String = {
url + (if(params.isEmpty) "" else {
(if(url.contains("?")) "&" else "?") + toQs(params, charset)
})
}
def readString(is: InputStream): String = readString(is, utf8)
/**
* [lifted from lift]
*/
def readString(is: InputStream, charset: String): String = {
if (is == null) {
""
} else {
val in = new InputStreamReader(is, charset)
val bos = new StringBuilder
val ba = new Array[Char](4096)
def readOnce(): Unit = {
val len = in.read(ba)
if (len > 0) bos.appendAll(ba, 0, len)
if (len >= 0) readOnce()
}
readOnce()
bos.toString
}
}
/**
* [lifted from lift]
* Read all data from a stream into an Array[Byte]
*/
def readBytes(in: InputStream): Array[Byte] = {
if (in == null) {
Array[Byte]()
} else {
val bos = new ByteArrayOutputStream
val ba = new Array[Byte](4096)
def readOnce(): Unit = {
val len = in.read(ba)
if (len > 0) bos.write(ba, 0, len)
if (len >= 0) readOnce()
}
readOnce()
bos.toByteArray
}
}
def readParams(in: InputStream, charset: String = utf8): Seq[(String,String)] = {
readString(in, charset).split("&").flatMap(_.split("=") match {
case Array(k,v) => Some((urlDecode(k, charset), urlDecode(v, charset)))
case _ => None
}).toList
}
def readParamMap(in: InputStream, charset: String = utf8): Map[String, String] = Map(readParams(in, charset):_*)
def readToken(in: InputStream): Token = {
val params = readParamMap(in)
Token(params("oauth_token"), params("oauth_token_secret"))
}
def proxy(host: String, port: Int, proxyType: Proxy.Type = Proxy.Type.HTTP): Proxy = {
new Proxy(proxyType, new InetSocketAddress(host, port))
}
val utf8 = "UTF-8"
}
/** Default entry point to this library */
object Http extends BaseHttp
/**
* Extends and override this class to setup your own defaults
*
* @param proxyConfig http proxy; defaults to the Java default proxy (see http://docs.oracle.com/javase/8/docs/technotes/guides/net/proxies.html).
* You can use [[scalaj.http.HttpConstants.proxy]] to specify an alternate proxy, or specify
* [[java.net.Proxy.NO_PROXY]] to explicitly use not use a proxy.
* @param options set things like timeouts, ssl handling, redirect following
* @param charset charset to use for encoding request and decoding response
* @param sendBufferSize buffer size for multipart posts
* @param userAgent User-Agent request header
* @param compress use HTTP Compression
*/
class BaseHttp (
proxyConfig: Option[Proxy] = None,
options: Seq[HttpOptions.HttpOption] = HttpConstants.defaultOptions,
charset: String = HttpConstants.utf8,
sendBufferSize: Int = 4096,
userAgent: String = s"scalaj-http/${BuildInfo.version}",
compress: Boolean = true
) {
/** Create a new [[scalaj.http.HttpRequest]]
*
* @param url the full url of the request. Querystring params can be added to a get request with the .params methods
*/
def apply(url: String): HttpRequest = HttpRequest(
url = url,
method = "GET",
connectFunc = DefaultConnectFunc,
params = Nil,
headers = Seq("User-Agent" -> userAgent),
options = options,
proxyConfig = proxyConfig,
charset = charset,
sendBufferSize = sendBufferSize,
urlBuilder = QueryStringUrlFunc,
compress = compress,
digestCreds = None
)
}
|
scalaj/scalaj-http
|
src/main/scala/scalaj/http/Http.scala
|
Scala
|
apache-2.0
| 31,853
|
package edu.gemini.spModel.core
import org.scalacheck.{Arbitrary, Gen}
import org.scalacheck.Gen._
import java.text.SimpleDateFormat
import java.util.Date
/**
* ProgramId Generator.
*/
object ProgramIdGen {
val genYear: Gen[Int] = choose(2000, 2020)
val genSite: Gen[Site] = oneOf(Site.GN, Site.GS)
val genSemester: Gen[Semester] =
for {
year <- genYear
half <- oneOf(Semester.Half.values())
} yield new Semester(year, half)
val genScienceId: Gen[SPProgramID] =
for {
site <- genSite
sem <- genSemester
pt <- oneOf(ProgramType.All.filter(_.isScience).map(_.abbreviation))
num <- choose(1, 999)
} yield SPProgramID.toProgramID(s"$site-$sem-$pt-$num")
def genDate(site: Site): Gen[Date] =
for {
sem <- genSemester
time <- choose(sem.getStartDate(site).getTime, sem.getEndDate(site).getTime - 1)
} yield new Date(time)
val genDailyId: Gen[SPProgramID] = {
def format(d: Date): String = new SimpleDateFormat("yyyyMMdd").format(d)
for {
site <- genSite
pt <- oneOf(ProgramType.All.filterNot(_.isScience).map(_.abbreviation))
date <- genDate(site)
} yield SPProgramID.toProgramID(s"$site-$pt${format(date)}")
}
val unstructuredId: Gen[SPProgramID] =
for {
i <- choose(1,10)
s <- listOfN(i, oneOf[Char](alphaNumChar, '-')).map(t => t.map(_.toString).mkString)
} yield SPProgramID.toProgramID(s)
val genSomeId: Gen[SPProgramID] = frequency(
(4, genScienceId),
(5, genDailyId),
(1, unstructuredId)
)
implicit val arbId = Arbitrary(genSomeId)
}
|
fnussber/ocs
|
bundle/edu.gemini.spModel.core/src/test/scala/edu/gemini/spModel/core/ProgramIdGen.scala
|
Scala
|
bsd-3-clause
| 1,614
|
package org.jetbrains.plugins.scala.failed.typeInference
import org.jetbrains.plugins.scala.PerfCycleTests
import org.jetbrains.plugins.scala.base.ScalaLightCodeInsightFixtureTestAdapter
import org.junit.experimental.categories.Category
/**
* @author Anton Yalyshev
* @since 10.01.2018.
*/
@Category(Array(classOf[PerfCycleTests]))
class TypeAliasInferenceTest extends ScalaLightCodeInsightFixtureTestAdapter {
override protected def shouldPass: Boolean = false
def testSCL13137(): Unit = {
checkTextHasNoErrors(
"""
|trait A[T] {
| def f(x : T => T) : Unit = { }
| def g(a: A[_]) : Unit = a.f(z => z)
|}
""".stripMargin)
}
def testSCL13139(): Unit = {
checkTextHasNoErrors(
"""
|trait A {
| type T
| def f(a: A)(x : a.T => a.T)
| def g(a: A) : Unit = a.f(a)((z : a.T) => z)
|}
""".stripMargin)
}
def testSCL13607(): Unit = {
checkTextHasNoErrors(
"""
|trait Foo {
| type Bar
|}
|
|def apply[A](foo: Foo { type Bar = A }): Unit = ()
|
|def test(f: Foo): Unit = apply[f.Bar](f)
""".stripMargin)
}
def testSCL13797(): Unit = {
checkTextHasNoErrors(
"""
|trait Test {
| type X
| def self: Test { type X = Test.this.X } = this
|}
""".stripMargin)
}
}
|
jastice/intellij-scala
|
scala/scala-impl/test/org/jetbrains/plugins/scala/failed/typeInference/TypeAliasInferenceTest.scala
|
Scala
|
apache-2.0
| 1,412
|
package monocle.state
import cats.{Eval, Now}
import monocle.PLens
import cats.data.{IndexedStateT, State}
trait StateLensSyntax {
implicit def toStateLensOps[S, T, A, B](lens: PLens[S, T, A, B]): StateLensOps[S, T, A, B] =
new StateLensOps[S, T, A, B](lens)
}
final class StateLensOps[S, T, A, B](private val lens: PLens[S, T, A, B]) extends AnyVal {
/** transforms a PLens into a State */
def toState: State[S, A] =
State(s => (s, lens.get(s)))
/** alias for toState */
def st: State[S, A] =
toState
/** extracts the value viewed through the lens */
def extract: State[S, A] =
toState
/** extracts the value viewed through the lens and applies `f` over it */
def extracts[B](f: A => B): State[S, B] =
extract.map(f)
/** modify the value viewed through the lens and returns its *new* value */
def mod(f: A => B): IndexedStateT[Eval, S, T, B] =
IndexedStateT { s =>
val a = lens.get(s)
val b = f(a)
Now((lens.set(b)(s), b))
}
/** modify the value viewed through the lens and returns its *old* value */
def modo(f: A => B): IndexedStateT[Eval, S, T, A] =
toState.bimap(lens.modify(f), identity)
/** modify the value viewed through the lens and ignores both values */
def mod_(f: A => B): IndexedStateT[Eval, S, T, Unit] =
IndexedStateT(s => Now((lens.modify(f)(s), ())))
/** set the value viewed through the lens and returns its *new* value */
def assign(b: B): IndexedStateT[Eval, S, T, B] =
mod(_ => b)
/** set the value viewed through the lens and returns its *old* value */
def assigno(b: B): IndexedStateT[Eval, S, T, A] =
modo(_ => b)
/** set the value viewed through the lens and ignores both values */
def assign_(b: B): IndexedStateT[Eval, S, T, Unit] =
mod_(_ => b)
}
|
aoiroaoino/Monocle
|
state/src/main/scala/monocle/state/StateLensSyntax.scala
|
Scala
|
mit
| 1,798
|
package pw.ian.sysadmincraft.listeners
import org.bukkit.entity.EntityType
import org.bukkit.event.block.BlockBreakEvent
import org.bukkit.event.entity.CreatureSpawnEvent
import org.bukkit.event.entity.CreatureSpawnEvent.SpawnReason
import org.bukkit.event.player.PlayerJoinEvent
import org.bukkit.event.{EventHandler, Listener}
import org.bukkit.inventory.ItemStack
import org.bukkit.{Material, GameMode, Location}
import pw.ian.sysadmincraft.SysAdmincraft
import pw.ian.sysadmincraft.world.WorldConstants._
case class MiscListener(plugin: SysAdmincraft) extends Listener {
@EventHandler
def onPlayerJoin(event: PlayerJoinEvent): Unit = {
event.getPlayer.setGameMode(GameMode.CREATIVE)
event.getPlayer.teleport(new Location(plugin.world, 0, START_HEIGHT + 1, 0))
event.getPlayer.getInventory.addItem(new ItemStack(Material.IRON_SWORD, 1))
}
@EventHandler
def onBlockBreak(event: BlockBreakEvent): Unit = {
event.setCancelled(true)
}
@EventHandler
def onCreatureSpawn(event: CreatureSpawnEvent): Unit = {
if (event.getSpawnReason == SpawnReason.NATURAL || event.getEntityType == EntityType.SLIME) {
event.setCancelled(true)
}
}
}
|
simplyianm/sysadmincraft
|
src/main/scala/pw/ian/sysadmincraft/listeners/MiscListener.scala
|
Scala
|
isc
| 1,184
|
/* sbt -- Simple Build Tool
* Copyright 2009 Mark Harrah
*/
package sbt
import java.lang.{Process => JProcess, ProcessBuilder => JProcessBuilder}
import java.io.{Closeable, File, IOException}
import java.io.{BufferedReader, InputStream, InputStreamReader, OutputStream, PipedInputStream, PipedOutputStream}
import java.net.URL
trait ProcessExtra
{
import Process._
implicit def builderToProcess(builder: JProcessBuilder): ProcessBuilder = apply(builder)
implicit def fileToProcess(file: File): FilePartialBuilder = apply(file)
implicit def urlToProcess(url: URL): URLPartialBuilder = apply(url)
implicit def xmlToProcess(command: scala.xml.Elem): ProcessBuilder = apply(command)
implicit def buildersToProcess[T](builders: Seq[T])(implicit convert: T => SourcePartialBuilder): Seq[SourcePartialBuilder] = applySeq(builders)
implicit def stringToProcess(command: String): ProcessBuilder = apply(command)
implicit def stringSeqToProcess(command: Seq[String]): ProcessBuilder = apply(command)
}
/** Methods for constructing simple commands that can then be combined. */
object Process extends ProcessExtra
{
def apply(command: String): ProcessBuilder = apply(command, None)
def apply(command: Seq[String]): ProcessBuilder = apply (command.toArray, None)
def apply(command: String, arguments: Seq[String]): ProcessBuilder = apply(command :: arguments.toList, None)
/** create ProcessBuilder with working dir set to File and extra environment variables */
def apply(command: String, cwd: File, extraEnv: (String,String)*): ProcessBuilder =
apply(command, Some(cwd), extraEnv : _*)
/** create ProcessBuilder with working dir set to File and extra environment variables */
def apply(command: Seq[String], cwd: File, extraEnv: (String,String)*): ProcessBuilder =
apply(command, Some(cwd), extraEnv : _*)
/** create ProcessBuilder with working dir optionaly set to File and extra environment variables */
def apply(command: String, cwd: Option[File], extraEnv: (String,String)*): ProcessBuilder = {
apply(command.split("""\s+"""), cwd, extraEnv : _*)
// not smart to use this on windows, because CommandParser uses \ to escape ".
/*CommandParser.parse(command) match {
case Left(errorMsg) => error(errorMsg)
case Right((cmd, args)) => apply(cmd :: args, cwd, extraEnv : _*)
}*/
}
/** create ProcessBuilder with working dir optionaly set to File and extra environment variables */
def apply(command: Seq[String], cwd: Option[File], extraEnv: (String,String)*): ProcessBuilder = {
val jpb = new JProcessBuilder(command.toArray : _*)
cwd.foreach(jpb directory _)
extraEnv.foreach { case (k, v) => jpb.environment.put(k, v) }
apply(jpb)
}
def apply(builder: JProcessBuilder): ProcessBuilder = new SimpleProcessBuilder(builder)
def apply(file: File): FilePartialBuilder = new FileBuilder(file)
def apply(url: URL): URLPartialBuilder = new URLBuilder(url)
def apply(command: scala.xml.Elem): ProcessBuilder = apply(command.text.trim)
def applySeq[T](builders: Seq[T])(implicit convert: T => SourcePartialBuilder): Seq[SourcePartialBuilder] = builders.map(convert)
def apply(value: Boolean): ProcessBuilder = apply(value.toString, if(value) 0 else 1)
def apply(name: String, exitValue: => Int): ProcessBuilder = new DummyProcessBuilder(name, exitValue)
def cat(file: SourcePartialBuilder, files: SourcePartialBuilder*): ProcessBuilder = cat(file :: files.toList)
def cat(files: Seq[SourcePartialBuilder]): ProcessBuilder =
{
require(!files.isEmpty)
files.map(_.cat).reduceLeft(_ #&& _)
}
}
trait SourcePartialBuilder extends NotNull
{
/** Writes the output stream of this process to the given file. */
def #> (f: File): ProcessBuilder = toFile(f, false)
/** Appends the output stream of this process to the given file. */
def #>> (f: File): ProcessBuilder = toFile(f, true)
/** Writes the output stream of this process to the given OutputStream. The
* argument is call-by-name, so the stream is recreated, written, and closed each
* time this process is executed. */
def #>(out: => OutputStream): ProcessBuilder = #> (new OutputStreamBuilder(out))
def #>(b: ProcessBuilder): ProcessBuilder = new PipedProcessBuilder(toSource, b, false)
private def toFile(f: File, append: Boolean) = #> (new FileOutput(f, append))
def cat = toSource
protected def toSource: ProcessBuilder
}
trait SinkPartialBuilder extends NotNull
{
/** Reads the given file into the input stream of this process. */
def #< (f: File): ProcessBuilder = #< (new FileInput(f))
/** Reads the given URL into the input stream of this process. */
def #< (f: URL): ProcessBuilder = #< (new URLInput(f))
/** Reads the given InputStream into the input stream of this process. The
* argument is call-by-name, so the stream is recreated, read, and closed each
* time this process is executed. */
def #<(in: => InputStream): ProcessBuilder = #< (new InputStreamBuilder(in))
def #<(b: ProcessBuilder): ProcessBuilder = new PipedProcessBuilder(b, toSink, false)
protected def toSink: ProcessBuilder
}
trait URLPartialBuilder extends SourcePartialBuilder
trait FilePartialBuilder extends SinkPartialBuilder with SourcePartialBuilder
{
def #<<(f: File): ProcessBuilder
def #<<(u: URL): ProcessBuilder
def #<<(i: => InputStream): ProcessBuilder
def #<<(p: ProcessBuilder): ProcessBuilder
}
/** Represents a process that is running or has finished running.
* It may be a compound process with several underlying native processes (such as 'a #&& b`).*/
trait Process extends NotNull
{
/** Blocks until this process exits and returns the exit code.*/
def exitValue(): Int
/** Destroys this process. */
def destroy(): Unit
}
/** Represents a runnable process. */
trait ProcessBuilder extends SourcePartialBuilder with SinkPartialBuilder
{
/** Starts the process represented by this builder, blocks until it exits, and returns the output as a String. Standard error is
* sent to the console. If the exit code is non-zero, an exception is thrown.*/
def !! : String
/** Starts the process represented by this builder, blocks until it exits, and returns the output as a String. Standard error is
* sent to the provided ProcessLogger. If the exit code is non-zero, an exception is thrown.*/
def !!(log: ProcessLogger) : String
/** Starts the process represented by this builder. The output is returned as a Stream that blocks when lines are not available
* but the process has not completed. Standard error is sent to the console. If the process exits with a non-zero value,
* the Stream will provide all lines up to termination and then throw an exception. */
def lines: Stream[String]
/** Starts the process represented by this builder. The output is returned as a Stream that blocks when lines are not available
* but the process has not completed. Standard error is sent to the provided ProcessLogger. If the process exits with a non-zero value,
* the Stream will provide all lines up to termination but will not throw an exception. */
def lines(log: ProcessLogger): Stream[String]
/** Starts the process represented by this builder. The output is returned as a Stream that blocks when lines are not available
* but the process has not completed. Standard error is sent to the console. If the process exits with a non-zero value,
* the Stream will provide all lines up to termination but will not throw an exception. */
def lines_! : Stream[String]
/** Starts the process represented by this builder. The output is returned as a Stream that blocks when lines are not available
* but the process has not completed. Standard error is sent to the provided ProcessLogger. If the process exits with a non-zero value,
* the Stream will provide all lines up to termination but will not throw an exception. */
def lines_!(log: ProcessLogger): Stream[String]
/** Starts the process represented by this builder, blocks until it exits, and returns the exit code. Standard output and error are
* sent to the console.*/
def ! : Int
/** Starts the process represented by this builder, blocks until it exits, and returns the exit code. Standard output and error are
* sent to the given ProcessLogger.*/
def !(log: ProcessLogger): Int
/** Starts the process represented by this builder, blocks until it exits, and returns the exit code. Standard output and error are
* sent to the console. The newly started process reads from standard input of the current process.*/
def !< : Int
/** Starts the process represented by this builder, blocks until it exits, and returns the exit code. Standard output and error are
* sent to the given ProcessLogger. The newly started process reads from standard input of the current process.*/
def !<(log: ProcessLogger) : Int
/** Starts the process represented by this builder. Standard output and error are sent to the console.*/
def run(): Process
/** Starts the process represented by this builder. Standard output and error are sent to the given ProcessLogger.*/
def run(log: ProcessLogger): Process
/** Starts the process represented by this builder. I/O is handled by the given ProcessIO instance.*/
def run(io: ProcessIO): Process
/** Starts the process represented by this builder. Standard output and error are sent to the console.
* The newly started process reads from standard input of the current process if `connectInput` is true.*/
def run(connectInput: Boolean): Process
/** Starts the process represented by this builder, blocks until it exits, and returns the exit code. Standard output and error are
* sent to the given ProcessLogger.
* The newly started process reads from standard input of the current process if `connectInput` is true.*/
def run(log: ProcessLogger, connectInput: Boolean): Process
def runBuffered(log: ProcessLogger, connectInput: Boolean): Process
/** Constructs a command that runs this command first and then `other` if this command succeeds.*/
def #&& (other: ProcessBuilder): ProcessBuilder
/** Constructs a command that runs this command first and then `other` if this command does not succeed.*/
def #|| (other: ProcessBuilder): ProcessBuilder
/** Constructs a command that will run this command and pipes the output to `other`. `other` must be a simple command.*/
def #| (other: ProcessBuilder): ProcessBuilder
/** Constructs a command that will run this command and then `other`. The exit code will be the exit code of `other`.*/
def ### (other: ProcessBuilder): ProcessBuilder
def canPipeTo: Boolean
}
/** Each method will be called in a separate thread.*/
final class ProcessIO(val writeInput: OutputStream => Unit, val processOutput: InputStream => Unit, val processError: InputStream => Unit) extends NotNull
{
def withOutput(process: InputStream => Unit): ProcessIO = new ProcessIO(writeInput, process, processError)
def withError(process: InputStream => Unit): ProcessIO = new ProcessIO(writeInput, processOutput, process)
def withInput(write: OutputStream => Unit): ProcessIO = new ProcessIO(write, processOutput, processError)
}
trait ProcessLogger
{
def info(s: => String): Unit
def error(s: => String): Unit
def buffer[T](f: => T): T
}
|
kuochaoyi/xsbt
|
util/process/Process.scala
|
Scala
|
bsd-3-clause
| 11,137
|
/*
* Copyright (c) <2015-2016>, see CONTRIBUTORS
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* * Neither the name of the <organization> nor the
* names of its contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package ch.usi.inf.l3.usi.test
import ch.usi.inf.l3.sana
import sana.tiny.dsl._
import sana.tiny.core._
import sana.tiny.core.Implicits._
/*
Nate's comments:
1- Add a case for default action, i.e. when a syntax component is missing
2- A macro that generates boilerplate, for example define a list
of all AST trees once, then tell it what the transformation
components are named, and the macro generates the instances
for you.
3- Try to implement a language that needs name resolution with
this approach.
*/
// Syntax
trait Expr {
def tpe: Type
}
case object NoExpr extends Expr {
def tpe: Type = ErrorType
}
case class IntLit(value: Int) extends Expr {
val tpe: Type = IntType
}
case class Add(lhs: Expr, rhs: Expr, tpe: Type = NoType) extends Expr
case class Mul(lhs: Expr, rhs: Expr, tpe: Type = NoType) extends Expr
trait Type
case object ErrorType extends Type
case object NoType extends Type
case object IntType extends Type
// Transformation Component #1
trait TypeCheckerComponent extends TransformationComponent[Expr, Expr] {
def typeCheck: Expr => Expr
}
trait IntLitTypeCheckerComponent extends TypeCheckerComponent {
def apply(expr: Expr): Expr = expr match {
case lit: IntLit => lit
}
def isDefinedAt(expr: Expr): Boolean = expr match {
case lit: IntLit => true
case _ => false
}
}
trait AddTypeCheckerComponent extends TypeCheckerComponent {
def apply(expr: Expr): Expr = expr match {
case Add(l, r, _) =>
val nl = typeCheck(l)
val nr = typeCheck(r)
val ty1 = nl.tpe
val ty2 = nr.tpe
if(ty1 == IntType && ty2 == IntType)
Add(nl, nr, IntType)
else
Add(nl, nr, ErrorType)
}
def isDefinedAt(expr: Expr): Boolean = expr match {
case add: Add => true
case _ => false
}
}
trait MulTypeCheckerComponent extends TypeCheckerComponent {
def apply(expr: Expr): Expr = expr match {
case Mul(l, r, _) =>
val nl = typeCheck(l)
val nr = typeCheck(r)
val ty1 = nl.tpe
val ty2 = nr.tpe
if(ty1 == IntType && ty2 == IntType)
Mul(nl, nr, IntType)
else
Mul(nl, nr, ErrorType)
}
def isDefinedAt(expr: Expr): Boolean = expr match {
case mul: Mul => true
case _ => false
}
}
// Transformation Component #2
trait PrettyPrinterComponent extends TransformationComponent[Expr, String] {
def pprint: Expr => String
}
trait IntLitPrettyPrinterComponent extends PrettyPrinterComponent {
def apply(expr: Expr): String = expr match {
case IntLit(v) => v.toString
}
def isDefinedAt(expr: Expr): Boolean = expr match {
case lit: IntLit => true
case _ => false
}
}
trait AddPrettyPrinterComponent extends PrettyPrinterComponent {
def apply(expr: Expr): String = expr match {
case Add(l, r, _) =>
val sl = pprint(l)
val sr = pprint(r)
s"($sl + $sr)"
}
def isDefinedAt(expr: Expr): Boolean = expr match {
case add: Add => true
case _ => false
}
}
trait MulPrettyPrinterComponent extends PrettyPrinterComponent {
def apply(expr: Expr): String = expr match {
case Mul(l, r, _) =>
val sl = pprint(l)
val sr = pprint(r)
s"($sl * $sr)"
}
def isDefinedAt(expr: Expr): Boolean = expr match {
case mul: Mul => true
case _ => false
}
}
// Third family, checker
trait TestCheckerComponent extends CheckerComponent[Expr] {
def check: Expr => Unit
}
trait IntLitTestChecerComponent extends TestCheckerComponent {
def apply(expr: Expr): Unit = expr match {
case IntLit(v) => println(v.toString)
}
def isDefinedAt(expr: Expr): Boolean = expr match {
case lit: IntLit => true
case _ => false
}
}
trait AddTestCheckerComponent extends TestCheckerComponent {
def apply(expr: Expr): Unit = expr match {
case Add(l, r, _) =>
val sl = check(l)
val sr = check(r)
println(s"($sl + $sr)")
}
def isDefinedAt(expr: Expr): Boolean = expr match {
case add: Add => true
case _ => false
}
}
trait MulTestCheckerComponent extends TestCheckerComponent {
def apply(expr: Expr): Unit = expr match {
case Mul(l, r, _) =>
val sl = check(l)
val sr = check(r)
println(s"($sl * $sr)")
}
def isDefinedAt(expr: Expr): Boolean = expr match {
case mul: Mul => true
case _ => false
}
}
// Transformation Family #1
trait TypeCheckerFamily extends TransformationFamily[Expr, Expr] {
self =>
def components: List[PartialFunction[Expr, Expr]] =
List(new IntLitTypeCheckerComponent {
def typeCheck: Expr => Expr = self.typeCheck
def compiler: CompilerInterface = ???
},
new AddTypeCheckerComponent{
def typeCheck: Expr => Expr = self.typeCheck
def compiler: CompilerInterface = ???
},
new MulTypeCheckerComponent{
def typeCheck: Expr => Expr = self.typeCheck
def compiler: CompilerInterface = ???
})
def typeCheck: Expr => Expr = family
def compiler: CompilerInterface = ???
}
object TypeCheckerFamily extends TypeCheckerFamily
// Transformation Family #2
trait PrettyPrinterFamily extends TransformationFamily[Expr, String] {
self =>
def compiler: CompilerInterface = ???
def components: List[PartialFunction[Expr, String]] =
List(new IntLitPrettyPrinterComponent {
def pprint: Expr => String = self.pprint
def compiler: CompilerInterface = ???
},
new AddPrettyPrinterComponent{
def pprint: Expr => String = self.pprint
def compiler: CompilerInterface = ???
},
new MulPrettyPrinterComponent{
def pprint: Expr => String = self.pprint
def compiler: CompilerInterface = ???
})
def pprint: Expr => String = family
}
object PrettyPrinterFamily extends PrettyPrinterFamily
// Checker Family #1
trait TestCheckerFamily extends CheckerFamily[Expr] {
self =>
def components: List[PartialFunction[Expr, Unit]] =
List(new IntLitTestChecerComponent {
def check: Expr => Unit = self.check
def compiler: CompilerInterface = ???
},
new AddTestCheckerComponent{
def check: Expr => Unit = self.check
def compiler: CompilerInterface = ???
},
new MulTestCheckerComponent{
def check: Expr => Unit = self.check
def compiler: CompilerInterface = ???
})
def check: Expr => Unit = family
def compiler: CompilerInterface = ???
}
object TestCheckerFamily extends TestCheckerFamily
// A complex language module
trait ComplexExprLang extends LanguageModule[Expr, String] {
def compile: Expr => String = {
TypeCheckerFamily.typeCheck join TestCheckerFamily.check join PrettyPrinterFamily.pprint
}
}
object ComplexExprLang extends ComplexExprLang
// Make the language simpler (reduce its elements)
trait SimpleTypeCheckerFamily extends TypeCheckerFamily {
self =>
override def components: List[PartialFunction[Expr, Expr]] =
List(new IntLitTypeCheckerComponent {
def typeCheck: Expr => Expr = self.typeCheck
def compiler: CompilerInterface = ???
},
new AddTypeCheckerComponent{
def typeCheck: Expr => Expr = self.typeCheck
def compiler: CompilerInterface = ???
})
}
object SimpleTypeCheckerFamily extends SimpleTypeCheckerFamily
trait SimplePrettyPrinterFamily extends PrettyPrinterFamily {
self =>
// private[this] final val CMP = "IntLit,Add" //Array("IntLit", "Add")
override val components = {
generateComponents[Expr, String]("IntLit,Add",
"PrettyPrinterComponent", "pprint", "")
}
// def pprint: Expr => String = ???
}
object SimplePrettyPrinterFamily extends SimplePrettyPrinterFamily
trait SimpleExprLang extends LanguageModule[Expr, String] {
def compile: Expr => String = {
SimpleTypeCheckerFamily.typeCheck join SimplePrettyPrinterFamily.pprint
}
}
object SimpleExprLang extends SimpleExprLang
// object Main {
// def main(args: Array[String]): Unit = {
// // Testing complex lang
// val expr1 = Add(Mul(IntLit(1), IntLit(2)), IntLit(3))
// val res = ComplexExprLang.compile(expr1)
// println(expr1)
// println("Evaluates to...")
// println(res)
// // Testing simple
// // Testing complex lang
// val expr2 = Add(Add(IntLit(1), IntLit(2)), IntLit(3))
// val res1 = SimpleExprLang.compile(expr2)
// val res2 = ComplexExprLang.compile(expr2)
// println(expr2)
// println("Using simple lang, evaluates to...")
// println(res1)
// println("Using complex lang, evaluates to...")
// println(res2)
//
// }
// }
|
amanjpro/languages-a-la-carte
|
testLang/src/main/scala/components/Langs.scala
|
Scala
|
bsd-3-clause
| 10,618
|
/*
* Copyright 2011-2017 Chris de Vreeze
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package eu.cdevreeze.yaidom.integrationtest
import java.{util => jutil}
import eu.cdevreeze.yaidom.core.EName
import eu.cdevreeze.yaidom.core.QName
import eu.cdevreeze.yaidom.core.Scope
import eu.cdevreeze.yaidom.indexed
import eu.cdevreeze.yaidom.parse.DocumentParserUsingDom
import eu.cdevreeze.yaidom.print.DocumentPrinterUsingDom
import eu.cdevreeze.yaidom.queryapi.ClarkElemApi
import eu.cdevreeze.yaidom.queryapi.ClarkNodes
import eu.cdevreeze.yaidom.queryapi.UpdatableElemLike
import eu.cdevreeze.yaidom.resolved
import eu.cdevreeze.yaidom.simple.Document
import eu.cdevreeze.yaidom.simple.Elem
import eu.cdevreeze.yaidom.simple.Node
import eu.cdevreeze.yaidom.simple.Text
import javax.xml.parsers.DocumentBuilderFactory
import javax.xml.transform.TransformerFactory
import org.scalatest.funsuite.AnyFunSuite
import scala.collection.immutable
/**
* XML functional update test case.
*
* @author Chris de Vreeze
*/
class UpdateTest extends AnyFunSuite {
private val logger: jutil.logging.Logger = jutil.logging.Logger.getLogger("eu.cdevreeze.yaidom.integrationtest")
private val docParser = DocumentParserUsingDom.newInstance()
private val docPrinter = {
val dbf = DocumentBuilderFactory.newInstance
val tf = TransformerFactory.newInstance
try {
tf.getAttribute("indent-number") // Throws an exception if "indent-number" is not supported
tf.setAttribute("indent-number", java.lang.Integer.valueOf(4))
} catch {
case _: Exception => () // Ignore
}
DocumentPrinterUsingDom.newInstance(dbf, tf)
}
test("testUpdateUsingPaths") {
val is = classOf[UpdateTest].getResourceAsStream("books.xml")
val doc1: Document = docParser.parse(is)
assertResult(Set(EName("Price"), EName("Edition"))) {
attrNames[Elem](doc1.documentElement).intersect(Set(EName("Price"), EName("Edition")))
}
assertResult(Set()) {
elemNames[Elem](doc1.documentElement)
.intersect(Set(EName("{http://bookstore}Price"), EName("{http://bookstore}Edition")))
}
val updElem = { (e: Elem, attr: String) =>
updateBook(e, attr)
}
val doc2 = Document(turnBookAttributeIntoElem(
turnBookAttributeIntoElem(doc1.documentElement, "Price", updElem),
"Edition",
updElem).removeAllInterElementWhitespace)
assertResult(Set()) {
attrNames[Elem](doc2.documentElement).intersect(Set(EName("Price"), EName("Edition")))
}
assertResult(Set(EName("{http://bookstore}Price"), EName("{http://bookstore}Edition"))) {
elemNames[Elem](doc2.documentElement)
.intersect(Set(EName("{http://bookstore}Price"), EName("{http://bookstore}Edition")))
}
}
test("testUpdateUsingPathSet") {
val is = classOf[UpdateTest].getResourceAsStream("books.xml")
val doc1: Document = docParser.parse(is)
assertResult(Set(EName("Price"), EName("Edition"))) {
attrNames[Elem](doc1.documentElement).intersect(Set(EName("Price"), EName("Edition")))
}
assertResult(Set()) {
elemNames[Elem](doc1.documentElement)
.intersect(Set(EName("{http://bookstore}Price"), EName("{http://bookstore}Edition")))
}
val updElem = { (e: Elem, attr: String) =>
updateBook(e, attr)
}
val doc2 = Document(
turnBookAttributeIntoElemUsingPathSet(
turnBookAttributeIntoElemUsingPathSet(doc1.documentElement, "Price", updElem),
"Edition",
updElem).removeAllInterElementWhitespace)
assertResult(Set()) {
attrNames[Elem](doc2.documentElement).intersect(Set(EName("Price"), EName("Edition")))
}
assertResult(Set(EName("{http://bookstore}Price"), EName("{http://bookstore}Edition"))) {
elemNames[Elem](doc2.documentElement)
.intersect(Set(EName("{http://bookstore}Price"), EName("{http://bookstore}Edition")))
}
}
test("testUpdateUsingTransform") {
val is = classOf[UpdateTest].getResourceAsStream("books.xml")
val doc1: Document = docParser.parse(is)
assertResult(Set(EName("Price"), EName("Edition"))) {
attrNames[Elem](doc1.documentElement).intersect(Set(EName("Price"), EName("Edition")))
}
assertResult(Set()) {
elemNames[Elem](doc1.documentElement)
.intersect(Set(EName("{http://bookstore}Price"), EName("{http://bookstore}Edition")))
}
val updElem = { (e: Elem, attr: String) =>
updateBook(e, attr)
}
val doc2 = Document(
turnBookAttributeIntoElemUsingTransform(
turnBookAttributeIntoElemUsingTransform(doc1.documentElement, "Price", updElem),
"Edition",
updElem).removeAllInterElementWhitespace)
assertResult(Set()) {
attrNames[Elem](doc2.documentElement).intersect(Set(EName("Price"), EName("Edition")))
}
assertResult(Set(EName("{http://bookstore}Price"), EName("{http://bookstore}Edition"))) {
elemNames[Elem](doc2.documentElement)
.intersect(Set(EName("{http://bookstore}Price"), EName("{http://bookstore}Edition")))
}
val resolvedOriginalElm = resolved.Elem.from(doc1.documentElement)
val resolvedUpdatedElm = resolved.Elem.from(doc2.documentElement)
val updResolvedElem = { (e: resolved.Elem, attr: String) =>
updateBook(e, attr)
}
val updatedResolvedElm =
turnBookAttributeIntoElemUsingTransform(
turnBookAttributeIntoElemUsingTransform(resolvedOriginalElm, "Price", updResolvedElem),
"Edition",
updResolvedElem).removeAllInterElementWhitespace
assertResult(false) {
resolvedOriginalElm == resolvedUpdatedElm
}
assertResult(true) {
resolvedUpdatedElm == updatedResolvedElm
}
}
/** Same example as http://www.journaldev.com/901/how-to-edit-xml-file-in-java-dom-parser, but now using yaidom functional updates */
test("testAnotherUpdate") {
val is = classOf[UpdateTest].getResourceAsStream("employee.xml")
val doc: Document = docParser.parse(is)
import Node._
// Updates on Employee elements:
// 1. Id attribute prefix with (one char) gender
// 2. Name element made uppercase. This is coded as a separate case in the partial function below!
// 3. Element gender removed
// 4. Element salary added (with value 10000)
val f: Elem => Elem = {
case e @ Elem(QName(_, "Employee"), _, _, _) =>
val gender = (e \ (_.localName == "gender")).map(_.text).mkString("")
val genderPrefix = if (gender == "Male") "M" else "F"
val newId = genderPrefix + (e \@ EName("id")).head
val scope = e.scope ++ Scope.from("" -> "http://www.journaldev.com/901/how-to-edit-xml-file-in-java-dom-parser")
val salaryElem = textElem(QName("salary"), scope, "10000")
val newChildren = e.children.collect { case che: Elem if che.localName != "gender" => che } :+ salaryElem
e.plusAttribute(QName("id"), newId).withChildren(newChildren)
case e @ Elem(QName(_, "name"), _, _, _) =>
e.withChildren(Vector(Text(e.text.toUpperCase, isCData = false)))
case e: Elem => e
}
val updatedDoc = doc.transformElemsOrSelf(f)
val formattedUpdatedDoc = updatedDoc.withDocumentElement(updatedDoc.documentElement.prettify(4))
logger.info(
"Result of update (using function updated):%n%s".format(docPrinter.print(formattedUpdatedDoc.documentElement)))
// Parse and check the file with the expected updated result
val expectedNewDoc = docParser.parse(classOf[UpdateTest].getResourceAsStream("updatedEmployee.xml"))
val expectedResolvedNewRoot = resolved.Elem.from(expectedNewDoc.documentElement.removeAllInterElementWhitespace)
// Is the parsed expected update result indeed as expected?
assertResult(Seq("M1", "F2")) {
(expectedResolvedNewRoot \\ (_.localName == "Employee")).flatMap(_ \@ EName("id"))
}
assertResult(Seq("PANKAJ", "LISA")) {
(expectedResolvedNewRoot \\ (_.localName == "name")).map(_.text)
}
assertResult(Seq()) {
expectedResolvedNewRoot \\ (_.localName == "gender")
}
assertResult(Seq("10000", "10000")) {
(expectedResolvedNewRoot \\ (_.localName == "salary")).map(_.text)
}
// Finally we check the result of the functional update against this parsed expected update result
assertResult(expectedResolvedNewRoot) {
resolved.Elem.from(formattedUpdatedDoc.documentElement.removeAllInterElementWhitespace)
}
// Same check, but invoking plusAttribute and minusAttribute as well.
assertResult(expectedResolvedNewRoot) {
resolved.Elem.from(
formattedUpdatedDoc.documentElement.removeAllInterElementWhitespace
.minusAttribute(QName("x"))
.plusAttribute(QName("x"), "v")
.minusAttribute(QName("x")))
}
}
/** Same example as http://www.journaldev.com/901/how-to-edit-xml-file-in-java-dom-parser, but now using yaidom function topmostUpdated */
test("testAnotherUpdateUsingTransformTopmost") {
val is = classOf[UpdateTest].getResourceAsStream("employee.xml")
val doc: Document = docParser.parse(is)
import Node._
// Updates on Employee elements:
// 1. Id attribute prefix with (one char) gender
// 2. Tried but not picked up: name element made uppercase. This is coded as a separate case in the partial function below!
// 3. Element gender removed
// 4. Element salary added (with value 10000)
val f: Elem => Elem = {
case e @ Elem(QName(_, "Employee"), _, _, _) =>
val gender = (e \ (_.localName == "gender")).map(_.text).mkString("")
val genderPrefix = if (gender == "Male") "M" else "F"
val newId = genderPrefix + (e \@ EName("id")).head
val scope = e.scope ++ Scope.from("" -> "http://www.journaldev.com/901/how-to-edit-xml-file-in-java-dom-parser")
val salaryElem = textElem(QName("salary"), scope, "10000")
val newChildren = e.children.collect { case che: Elem if che.localName != "gender" => che } :+ salaryElem
e.plusAttribute(QName("id"), newId).withChildren(newChildren)
case e @ Elem(QName(_, "name"), _, _, _) =>
e.withChildren(Vector(Text(e.text.toUpperCase, isCData = false)))
case e: Elem => e
}
// The name update is also picked up.
val updatedDoc = doc.transformElemsOrSelf(f)
val formattedUpdatedDoc = updatedDoc.withDocumentElement(updatedDoc.documentElement.prettify(4))
logger.info(
"Result of update (using function topmostUpdated):%n%s".format(
docPrinter.print(formattedUpdatedDoc.documentElement)))
// Parse and check the file with the expected updated result
val expectedNewDoc = docParser.parse(classOf[UpdateTest].getResourceAsStream("updatedEmployee.xml"))
val expectedResolvedNewRoot = resolved.Elem.from(expectedNewDoc.documentElement.removeAllInterElementWhitespace)
// Is the parsed expected update result indeed as expected?
assertResult(Seq("M1", "F2")) {
(expectedResolvedNewRoot \\ (_.localName == "Employee")).flatMap(_ \@ EName("id"))
}
assertResult(Seq("PANKAJ", "LISA")) {
(expectedResolvedNewRoot \\ (_.localName == "name")).map(_.text)
}
assertResult(Seq()) {
expectedResolvedNewRoot \\ (_.localName == "gender")
}
assertResult(Seq("10000", "10000")) {
(expectedResolvedNewRoot \\ (_.localName == "salary")).map(_.text)
}
// Finally we check the result of the functional update against this parsed expected update result
assertResult(expectedResolvedNewRoot.findAllElemsOrSelf.map(_.resolvedName)) {
resolved.Elem
.from(formattedUpdatedDoc.documentElement.removeAllInterElementWhitespace)
.findAllElemsOrSelf
.map(_.resolvedName)
}
assertResult(expectedResolvedNewRoot.findAllElemsOrSelf.flatMap(_.resolvedAttributes)) {
resolved.Elem
.from(formattedUpdatedDoc.documentElement.removeAllInterElementWhitespace)
.findAllElemsOrSelf
.flatMap(_.resolvedAttributes)
}
assertResult(expectedResolvedNewRoot) {
resolved.Elem.from(formattedUpdatedDoc.documentElement.removeAllInterElementWhitespace)
}
}
private def attrNames[E <: ClarkNodes.Elem.Aux[_, E] with UpdatableElemLike.Aux[_, E]](rootElm: E): Set[EName] = {
val allElems = rootElm.findAllElemsOrSelf.asInstanceOf[immutable.IndexedSeq[ClarkElemApi]]
val result = allElems.flatMap(e => e.resolvedAttributes.toMap.keySet)
result.toSet
}
private def elemNames[E <: ClarkNodes.Elem.Aux[_, E] with UpdatableElemLike.Aux[_, E]](rootElm: E): Set[EName] = {
val allElems = rootElm.findAllElemsOrSelf.asInstanceOf[immutable.IndexedSeq[ClarkElemApi]]
val result = allElems.map { e =>
e.resolvedName
}
result.toSet
}
private def turnBookAttributeIntoElem(rootElm: Elem, attrName: String, upd: (Elem, String) => Elem): Elem = {
// Regression in Scala 2.13.0-M3:
// Cannot construct a collection of type That with elements of type eu.cdevreeze.yaidom.core.Path based on
// a collection of type scala.collection.immutable.IndexedSeq[eu.cdevreeze.yaidom.indexed.IndexedScopedNode.Elem[eu.cdevreeze.yaidom.simple.Elem]].
// Circumventing this compilation error by introducing an extra variable for the indexed.Elem.
val indexedRootElm = indexed.Elem(rootElm)
val matchingPaths =
indexedRootElm
.filterElems { e =>
e.attributeOption(EName(attrName)).isDefined && e.path.endsWithName(EName("{http://bookstore}Book"))
}
.map(_.path)
matchingPaths.reverse.foldLeft(rootElm) { (acc, path) =>
require(rootElm.findElemOrSelfByPath(path).isDefined)
acc.updateElemOrSelf(path) { e =>
upd(e, attrName)
}
}
}
private def turnBookAttributeIntoElemUsingPathSet(
rootElm: Elem,
attrName: String,
upd: (Elem, String) => Elem): Elem = {
// Regression in Scala 2.13.0-M3:
// Cannot construct a collection of type That with elements of type eu.cdevreeze.yaidom.core.Path based on
// a collection of type scala.collection.immutable.IndexedSeq[eu.cdevreeze.yaidom.indexed.IndexedScopedNode.Elem[eu.cdevreeze.yaidom.simple.Elem]].
// Circumventing this compilation error by introducing an extra variable for the indexed.Elem.
val indexedRootElm = indexed.Elem(rootElm)
val matchingPaths =
indexedRootElm
.filterElems { e =>
e.attributeOption(EName(attrName)).isDefined && e.path.endsWithName(EName("{http://bookstore}Book"))
}
.map(_.path)
rootElm.updateElemsOrSelf(matchingPaths.toSet) { (elem, path) =>
require(rootElm.findElemOrSelfByPath(path).isDefined)
upd(elem, attrName)
}
}
private def turnBookAttributeIntoElemUsingTransform(
rootElm: Elem,
attrName: String,
upd: (Elem, String) => Elem): Elem = {
val f: Elem => Elem = {
case e: Elem
if e.resolvedName == EName("{http://bookstore}Book") && e.attributeOption(EName(attrName)).isDefined =>
upd(e, attrName)
case e => e
}
rootElm.transformElemsOrSelf(f)
}
private def turnBookAttributeIntoElemUsingTransform(
rootElm: resolved.Elem,
attrName: String,
upd: (resolved.Elem, String) => resolved.Elem): resolved.Elem = {
val f: resolved.Elem => resolved.Elem = {
case e: resolved.Elem
if e.resolvedName == EName("{http://bookstore}Book") && e.attributeOption(EName(attrName)).isDefined =>
upd(e, attrName)
case e => e
}
rootElm.transformElemsOrSelf(f)
}
def updateBook(bookElm: Elem, attrName: String): Elem = {
require(bookElm.localName == "Book")
require(bookElm.attributeOption(EName(attrName)).isDefined)
val attrValue = bookElm.attribute(EName(attrName))
import Node._
elem(
qname = bookElm.qname,
attributes = bookElm.attributes.filterNot { case (qn, _) => qn == QName(attrName) },
scope = bookElm.scope,
children = bookElm.children :+ textElem(qname = QName(attrName), scope = bookElm.scope, txt = attrValue)
)
}
def updateBook(bookElm: resolved.Elem, attrName: String): resolved.Elem = {
require(bookElm.localName == "Book")
require(bookElm.attributeOption(EName(attrName)).isDefined)
val attrValue = bookElm.attribute(EName(attrName))
resolved.Elem(
resolvedName = bookElm.resolvedName,
resolvedAttributes = bookElm.resolvedAttributes.filterNot { case (en, _) => en == EName(attrName) },
children = bookElm.children :+ resolved.Elem(
resolvedName = EName("http://bookstore", attrName),
resolvedAttributes = Map(),
children = Vector(resolved.Text(attrValue)))
)
}
}
|
dvreeze/yaidom
|
jvm/src/test/scala/eu/cdevreeze/yaidom/integrationtest/UpdateTest.scala
|
Scala
|
apache-2.0
| 17,251
|
package org.eichelberger.sfc.examples.quickstart
object Example2 extends App {
import org.eichelberger.sfc._
import org.eichelberger.sfc.SpaceFillingCurve._
// create a 4D curve
val zCurve = new ZCurve(OrdinalVector(10, 20, 15, 4))
// map from an input point to a hashed point
val idx = zCurve.index(OrdinalVector(7, 28, 2001, 8))
println(s"(7, 28, 2001, 8) -> $idx")
// invert the map back to inputs
val point = zCurve.inverseIndex(idx)
println(s"$idx <- $point")
}
|
cne1x/sfseize
|
src/main/scala/org/eichelberger/sfc/examples/quickstart/Example2.scala
|
Scala
|
apache-2.0
| 491
|
/*
* Copyright (c) 2014-2020 by The Monix Project Developers.
* See the project homepage at: https://monix.io
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package monix.reactive.consumers
import cats.laws._
import cats.laws.discipline._
import monix.execution.exceptions.DummyException
import monix.reactive.{BaseTestSuite, Consumer, Observable}
import scala.util.Failure
object MapConsumerSuite extends BaseTestSuite {
test("consumer.map equivalence with task.map") { implicit s =>
check1 { (obs: Observable[Int]) =>
val consumer = Consumer.foldLeft[Long, Int](0L)(_ + _)
val t1 = obs.consumeWith(consumer.map(_ + 100))
val t2 = obs.consumeWith(consumer).map(_ + 100)
t1 <-> t2
}
}
test("consumer.map streams error") { implicit s =>
check2 { (obs: Observable[Int], ex: Throwable) =>
val withError = obs.endWithError(ex)
val consumer = Consumer.foldLeft[Long, Int](0L)(_ + _)
val t1 = withError.consumeWith(consumer.map(_ + 100))
val t2 = withError.consumeWith(consumer).map(_ + 100)
t1 <-> t2
}
}
test("consumer.map protects against user code") { implicit s =>
val ex = DummyException("dummy")
val f = Observable(1)
.consumeWith(Consumer.head[Int].map(_ => throw ex))
.runToFuture
s.tick()
assertEquals(f.value, Some(Failure(ex)))
}
}
|
alexandru/monifu
|
monix-reactive/shared/src/test/scala/monix/reactive/consumers/MapConsumerSuite.scala
|
Scala
|
apache-2.0
| 1,872
|
// scalac: -Xplugin:. -Yrangepos:false
import scala.language.experimental.macros
import scala.reflect.macros.blackbox.Context
object Macros {
def impl1(c: Context) = {
import c.universe._
q"""println("impl1")"""
}
def impl2(c: Context) = {
import c.universe._
q"""println("impl2")"""
}
def foo1: Unit = macro 1
def foo2: Unit = macro 2
}
|
scala/scala
|
test/files/run/macroPlugins-typedMacroBody/Macros_2.scala
|
Scala
|
apache-2.0
| 370
|
package io.buoyant.k8s.v1beta1
import com.twitter.finagle.http._
import com.twitter.finagle.{Service => FService}
import com.twitter.io.Buf
import com.twitter.util._
import io.buoyant.test.{Awaits, Exceptions}
import org.scalatest.FunSuite
class ApiTest extends FunSuite with Awaits with Exceptions {
val ingressResource = Buf.Utf8("""{
"kind":"Ingress",
"apiVersion":"extensions/v1beta",
"metadata":{"name":"test-ingress","namespace":"srv","selfLink":"/apis/extensions/v1beta1/namespaces/srv/ingresses/test-ingress","uid":"1b0e7393-4d10-11e5-9859-42010af01815","resourceVersion":"4430527","creationTimestamp":"2015-08-27T23:05:27Z"},
"spec": {
"rules": [{
"http": {
"paths": [{
"path": "/fooPath",
"backend": {
"serviceName": "/fooService",
"servicePort": "/fooPort"
}
}]
}
}]
}
}""")
test("get ingress") {
@volatile var reqCount = 0
@volatile var failure: Throwable = null
val service = FService.mk[Request, Response] { req =>
reqCount += 1
reqCount match {
case 1 =>
try {
assert(req.uri == s"/apis/extensions/v1beta1/namespaces/srv/ingresses/test-ingress")
val rsp = Response()
rsp.version = req.version
rsp.setContentTypeJson()
rsp.headerMap("Transfer-Encoding") = "chunked"
rsp.writer.write(ingressResource) before rsp.writer.close()
Future.value(rsp)
} catch {
case e: Throwable =>
failure = e
Future.exception(e)
}
case _ => Future.never
}
}
val ns = Api(service).withNamespace("srv")
val ingress = await(ns.ingresses.named("test-ingress").get)
val paths = for (
spec <- ingress.spec.toSeq;
rules <- spec.rules.toSeq;
rule <- rules;
http <- rule.http.toSeq;
path <- http.paths
) yield {
assert(path.path == Some("/fooPath"))
assert(path.backend.serviceName == "/fooService")
assert(path.backend.servicePort == "/fooPort")
path
}
assert(paths.size == 1)
}
}
|
denverwilliams/linkerd
|
k8s/src/test/scala/io/buoyant/k8s/v1beta1/ApiTest.scala
|
Scala
|
apache-2.0
| 2,195
|
package net.scalax.ubw.database.test
object Sample01 {
val friend1 = Friend(name = "魔理沙", nick = "小莎莎", age = Option(2333), grade = 3)
val friend2 = Friend(name = "jilen", nick = "jilen 酱", age = Option(30), grade = 4)
val friend3 = Friend(name = "品神", nick = "kerr", age = Option(28), grade = 5)
val friend4 = Friend(name = "廖师虎", nick = "shihu", age = None, grade = 6)
}
|
scalax/fsn
|
sample/commonSlick/src/main/scala/Sample01.scala
|
Scala
|
mit
| 416
|
Subsets and Splits
Filtered Scala Code Snippets
The query filters and retrieves a sample of code snippets that meet specific criteria, providing a basic overview of the dataset's content without revealing deeper insights.