code
stringlengths 5
1M
| repo_name
stringlengths 5
109
| path
stringlengths 6
208
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 5
1M
|
|---|---|---|---|---|---|
package org.smartpony.consumer.kinesis
import java.util.UUID
import java.util.concurrent.{ConcurrentHashMap, Executors}
import com.amazonaws.ClientConfiguration
import com.amazonaws.services.kinesis.clientlibrary.interfaces.{IRecordProcessor, IRecordProcessorFactory}
import com.amazonaws.services.kinesis.clientlibrary.lib.worker.{InitialPositionInStream, KinesisClientLibConfiguration, Worker}
import com.typesafe.config.ConfigFactory
import org.slf4j.LoggerFactory
import org.smartpony.consumer.kinesis.actor.RunningKinesisActorSystem
import org.smartpony.consumer.kinesis.config.{KinesisConsumerConfig, KinesisConfig}
import org.smartpony.core.provider.ProviderActors
import org.smartpony.system.consumer.kinesis.endpoint.EventCapture
import scala.collection.JavaConverters._
import scala.language.postfixOps
import SmartPonyKinesisConsumer._
trait SmartPonyKinesisConsumer extends RunningKinesisActorSystem { self: ProviderActors =>
private val conf = ConfigFactory.load().getConfig("smart-pony")
private val flushWaitSeconds = conf.getLong("kinesis-provider-flush-wait-seconds")
private var kinesisWorkers: Seq[Worker] = Seq.empty[Worker]
private val eventProcessors = new ConcurrentHashMap[UUID, EventCapture]()
private implicit val _ = providerActorSystem.dispatcher
def startKinesisConsumer() = {
val processorFactory = new IRecordProcessorFactory {
override def createProcessor(): IRecordProcessor = {
val proc = new EventCapture(providerActors)
eventProcessors.put(UUID.randomUUID(), proc)
proc
}
}
val threadPool = Executors.newCachedThreadPool() //Note that the thread pool is shared across worker instances.
kinesisWorkers = new KinesisConfig(conf).consumers().map { consumer =>
log.info(s"Added worker for ${consumer.name}")
new Worker(processorFactory, kinesisLibConfig(consumer), threadPool)
}
log.info(s"Starting ${kinesisWorkers.size} Kinesis workers")
sys.addShutdownHook(stopKinesisConsumer())
kinesisWorkers.foreach(_.run())
}
def preStopKinesisConsumer(): Unit = {}
def stopKinesisConsumer() = {
preStopKinesisConsumer()
eventProcessors.entrySet().asScala.foreach( e => e.getValue.shutdown())
kinesisWorkers.foreach(_.shutdown())
providerActorSystem.shutdown()
log.info(s"\\nWaiting $flushWaitSeconds seconds before shutting down the Kinesis consumer")
Thread.sleep(flushWaitSeconds * 1000)
log.info("The Kinesis workers were shutdown")
}
private def kinesisLibConfig(consumer: KinesisConsumerConfig) = {
import consumer._
new KinesisClientLibConfiguration(name, stream, credentials, workerId).
withCommonClientConfig(new ClientConfiguration).
withRegionName(consumer.region.getName).
withInitialPositionInStream(InitialPositionInStream.TRIM_HORIZON)
}
}
object SmartPonyKinesisConsumer {
val log = LoggerFactory.getLogger("smart-pony.kinesis.consumer")
}
|
Dextaa/smartpony
|
consumer/kinesis/src/main/scala/org/smartpony/consumer/kinesis/SmartPonyKinesisConsumer.scala
|
Scala
|
mit
| 2,956
|
/* __ *\\
** ________ ___ / / ___ __ ____ Scala.js Test Suite **
** / __/ __// _ | / / / _ | __ / // __/ (c) 2013-2015, LAMP/EPFL **
** __\\ \\/ /__/ __ |/ /__/ __ |/_// /_\\ \\ http://scala-js.org/ **
** /____/\\___/_/ |_/____/_/ | |__/ /____/ **
** |/____/ **
\\* */
package org.scalajs.testsuite.javalib.util
import java.util
import scala.language.implicitConversions
import scala.collection.JavaConversions._
import scala.collection.mutable
import org.scalajs.jasminetest.JasmineTest
import scala.scalajs.runtime.UndefinedBehaviorError
import java.{util => ju}
import ju.TreeSet
import ju.Comparator
object TreeSetTest extends TreeSetTest(new TreeSetFactory) {
override def testApi(): Unit = {
super.testApi()
it("should check that comparator is always null") {
val ts1 = factory.empty[Int]
expect(ts1.comparator() == null).toBeTruthy
val ts2 = factory.empty[String]
expect(ts2.comparator() == null).toBeTruthy
}
}
}
object TreeSetWithNullTest extends TreeSetTest(new TreeSetWithNullFactory) {
override def testApi(): Unit = {
super.testApi()
it("should check that comparator is never null") {
val ts1 = factory.empty[Int]
expect(ts1.comparator() == null).toBeFalsy
val ts2 = factory.empty[String]
expect(ts2.comparator() == null).toBeFalsy
}
}
}
class TreeSetTest[F <: TreeSetFactory](treeSetTestFactory: F)
extends AbstractSetTest[F](treeSetTestFactory)
with SortedSetTest
with NavigableSetTest {
override def testApi(): Unit = {
super.testApi()
testSortedSetApi(treeSetTestFactory)
testNavigableSetApi(treeSetTestFactory)
it("should store and remove ordered integers") {
val ts = treeSetTestFactory.empty[Int]
expect(ts.size()).toEqual(0)
expect(ts.add(222)).toBeTruthy
expect(ts.size()).toEqual(1)
expect(ts.add(111)).toBeTruthy
expect(ts.size()).toEqual(2)
expect(ts.first).toEqual(111)
expect(ts.remove(111)).toBeTruthy
expect(ts.size()).toEqual(1)
expect(ts.first).toEqual(222)
expect(ts.remove(222)).toBeTruthy
expect(ts.size()).toEqual(0)
expect(ts.isEmpty).toBeTruthy
expect(ts.remove(333)).toBeFalsy
expect {
try {
ts.first
false
} catch {
case _: NoSuchElementException => true
case _: Throwable => false
}
}.toBeTruthy
if (treeSetTestFactory.allowsNullElement) {
expect(ts.add(null.asInstanceOf[Int])).toBeTruthy
expect(ts.contains(null)).toBeTruthy
expect(ts.remove(null)).toBeTruthy
expect(ts.contains(null)).toBeFalsy
}
}
it("should store and remove ordered strings") {
val ts = treeSetTestFactory.empty[String]
expect(ts.size()).toEqual(0)
expect(ts.add("222")).toBeTruthy
expect(ts.size()).toEqual(1)
expect(ts.add("111")).toBeTruthy
expect(ts.size()).toEqual(2)
expect(ts.first).toEqual("111")
expect(ts.remove("111")).toBeTruthy
expect(ts.size()).toEqual(1)
expect(ts.first).toEqual("222")
expect(ts.remove("222")).toBeTruthy
expect(ts.size()).toEqual(0)
expect(ts.remove("333")).toBeFalsy
expect(ts.isEmpty).toBeTruthy
if (treeSetTestFactory.allowsNullElement) {
expect(ts.add(null)).toBeTruthy
expect(ts.contains(null)).toBeTruthy
expect(ts.remove(null)).toBeTruthy
expect(ts.contains(null)).toBeFalsy
}
}
case class TestObj(num: Int)
when("compliant-asinstanceofs").
it("should throw exception on non comparable objects") {
val ts1 = treeSetTestFactory.empty[TestObj]
expect(ts1.size()).toEqual(0)
expectThrows[ClassCastException](ts1.add(TestObj(111)))
}
it("should store objects with custom comparables") {
case class Rect(x: Int, y: Int)
val areaComp = new ju.Comparator[Rect] {
def compare(a: Rect, b: Rect): Int = (a.x*a.y) - (b.x*b.y)
}
val ts = new TreeSet[Rect](areaComp)
expect(ts.add(Rect(1,2))).toBeTruthy
expect(ts.add(Rect(2,3))).toBeTruthy
expect(ts.add(Rect(1,3))).toBeTruthy
val first = ts.first()
expect(first.x).toEqual(1)
expect(first.y).toEqual(2)
expect(ts.remove(first)).toBeTruthy
expect(ts.remove(first)).toBeFalsy
val second = ts.first()
expect(second.x).toEqual(1)
expect(second.y).toEqual(3)
expect(ts.remove(second)).toBeTruthy
val third = ts.first()
expect(third.x).toEqual(2)
expect(third.y).toEqual(3)
expect(ts.remove(third)).toBeTruthy
expect(ts.isEmpty).toBeTruthy
}
it("should store ordered Double even in corner cases") {
val ts = treeSetTestFactory.empty[Double]
expect(ts.add(1.0)).toBeTruthy
expect(ts.add(+0.0)).toBeTruthy
expect(ts.add(-0.0)).toBeTruthy
expect(ts.add(Double.NaN)).toBeTruthy
expect(ts.first.equals(-0.0)).toBeTruthy
expect(ts.remove(-0.0)).toBeTruthy
expect(ts.first.equals(+0.0)).toBeTruthy
expect(ts.remove(+0.0)).toBeTruthy
expect(ts.first.equals(1.0)).toBeTruthy
expect(ts.remove(1.0)).toBeTruthy
expect(ts.first.isNaN).toBeTruthy
expect(ts.remove(Double.NaN)).toBeTruthy
expect(ts.isEmpty).toBeTruthy
}
it("could be instantiated with a prepopulated Collection") {
val l = asJavaCollection(Set(1, 5, 2, 3, 4))
val ts = new TreeSet[Int](l)
expect(ts.size()).toEqual(5)
for (i <- 1 to 5) {
expect(ts.first).toEqual(i)
expect(ts.remove(i)).toBeTruthy
}
expect(ts.isEmpty).toBeTruthy
}
it("should be cleared in a single operation") {
val l = asJavaCollection(Set(1, 5, 2, 3, 4))
val ts = treeSetTestFactory.empty[Int]
ts.addAll(l)
expect(ts.size()).toEqual(5)
ts.clear()
expect(ts.size()).toEqual(0)
}
it("should add multiple element in one operation") {
val l = asJavaCollection(Set(1, 5, 2, 3, 4))
val ts = treeSetTestFactory.empty[Int]
expect(ts.size()).toEqual(0)
ts.addAll(l)
expect(ts.size()).toEqual(5)
ts.add(6)
expect(ts.size()).toEqual(6)
}
it("should check contained values even in double corner cases") {
val ts = treeSetTestFactory.empty[Double]
expect(ts.add(11111.0)).toBeTruthy
expect(ts.size()).toEqual(1)
expect(ts.contains(11111.0)).toBeTruthy
expect(ts.iterator.next()).toEqual(11111.0)
expect(ts.add(Double.NaN)).toBeTruthy
expect(ts.size()).toEqual(2)
expect(ts.contains(Double.NaN)).toBeTruthy
expect(ts.contains(+0.0)).toBeFalsy
expect(ts.contains(-0.0)).toBeFalsy
expect(ts.remove(Double.NaN)).toBeTruthy
expect(ts.add(+0.0)).toBeTruthy
expect(ts.size()).toEqual(2)
expect(ts.contains(Double.NaN)).toBeFalsy
expect(ts.contains(+0.0)).toBeTruthy
expect(ts.contains(-0.0)).toBeFalsy
expect(ts.remove(+0.0)).toBeTruthy
expect(ts.add(-0.0)).toBeTruthy
expect(ts.size()).toEqual(2)
expect(ts.contains(Double.NaN)).toBeFalsy
expect(ts.contains(+0.0)).toBeFalsy
expect(ts.contains(-0.0)).toBeTruthy
expect(ts.add(+0.0)).toBeTruthy
expect(ts.add(Double.NaN)).toBeTruthy
expect(ts.contains(Double.NaN)).toBeTruthy
expect(ts.contains(+0.0)).toBeTruthy
expect(ts.contains(-0.0)).toBeTruthy
}
when("compliant-asinstanceofs").
it("should throw exceptions on access outside bound on views") {
val l = asJavaCollection(Set(2, 3, 6))
val ts = treeSetTestFactory.empty[Int]
ts.addAll(l)
val hs1 = ts.headSet(5, true)
expect(hs1.add(4)).toBeTruthy
expect(hs1.add(5)).toBeTruthy
expectThrows[IllegalArgumentException](hs1.add(6))
ts.clear()
ts.addAll(l)
val hs2 = ts.headSet(5, false)
expect(hs2.add(4)).toBeTruthy
expectThrows[IllegalArgumentException](hs2.add(5))
ts.clear()
ts.addAll(l)
val ts1 = ts.tailSet(1, true)
expect(ts1.add(7)).toBeTruthy
expect(ts1.add(1)).toBeTruthy
expectThrows[IllegalArgumentException](ts1.add(0))
ts.clear()
ts.addAll(l)
val ts2 = ts.tailSet(1, false)
expect(ts2.add(7)).toBeTruthy
expectThrows[IllegalArgumentException](ts2.add(1))
ts.clear()
ts.addAll(l)
val ss1 = ts.subSet(1, true, 5, true)
expect(ss1.add(4)).toBeTruthy
expect(ss1.add(1)).toBeTruthy
expectThrows[IllegalArgumentException](ss1.add(0))
expect(ss1.add(5)).toBeTruthy
expectThrows[IllegalArgumentException](ss1.add(6))
ts.clear()
ts.addAll(l)
val ss2 = ts.subSet(1, false, 5, false)
expect(ss2.add(4)).toBeTruthy
expectThrows[IllegalArgumentException](ss2.add(1))
expectThrows[IllegalArgumentException](ss2.add(5))
}
it("should throws exception in case of null elements and default ordering") {
val hs = treeSetTestFactory.empty[String]
expect(hs.add("ONE")).toBeTruthy
expect(hs.contains("ONE")).toBeTruthy
expect(hs.contains("TWO")).toBeFalsy
if (treeSetTestFactory.allowsNullElement) {
expect(hs.add(null)).toBeTruthy
expect(hs.contains(null)).toBeTruthy
} else {
expectThrows[Exception](hs.add(null))
}
}
it("should not put a whole Collection with null elements into") {
val l = List[String]("ONE", "TWO", (null: String))
val ts1 = treeSetTestFactory.empty[String]
if (treeSetTestFactory.allowsNullElement) {
expect(ts1.addAll(l)).toBeTruthy
expect(ts1.contains(null)).toBeTruthy
expect(ts1.contains("ONE")).toBeTruthy
expect(ts1.contains("THREE")).toBeFalsy
} else {
expectThrows[Exception] {
ts1.addAll(asJavaCollection(l))
}
}
}
}
}
object TreeSetFactory extends TreeSetFactory {
def allFactories: Iterator[TreeSetFactory] =
Iterator(new TreeSetFactory, new TreeSetWithNullFactory)
}
class TreeSetFactory extends AbstractSetFactory with NavigableSetFactory
with SortedSetFactory {
def implementationName: String =
"java.util.TreeSet"
def empty[E]: ju.TreeSet[E] =
new TreeSet[E]
override def allowsNullElement: Boolean = false
override def allowsNullElementQuery: Boolean = false
}
class TreeSetWithNullFactory extends TreeSetFactory {
override def implementationName: String =
super.implementationName + " {allows null}"
case class EvenNullComp[E]() extends Comparator[E] {
def compare(a: E, b: E): Int =
(Option(a), Option(b)) match {
case (Some(e1), Some(e2)) => e1.asInstanceOf[Comparable[E]].compareTo(e2)
case (Some(e1), None) => -1
case (None, Some(e2)) => 1
case (None, None) => 0
}
}
override def empty[E]: ju.TreeSet[E] =
new TreeSet[E](EvenNullComp[E]())
override def allowsNullElement: Boolean = true
override def allowsNullElementQuery: Boolean = true
}
|
CapeSepias/scala-js
|
test-suite/src/test/scala/org/scalajs/testsuite/javalib/util/TreeSetTest.scala
|
Scala
|
bsd-3-clause
| 11,367
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.samza.metrics
import org.apache.samza.clustermanager.SamzaApplicationState
import org.apache.samza.config.{ClusterManagerConfig, Config, MetricsConfig}
import org.apache.samza.util.Logging
/**
* Responsible for wiring up Samza's metrics. Given that Samza has a metric
* registry, we might as well use it. This class takes Samza's application
* master state, and converts it to metrics.
*/
class ContainerProcessManagerMetrics(val config: Config,
val state: SamzaApplicationState,
val registry: ReadableMetricsRegistry) extends MetricsHelper with Logging {
val clusterManagerConfig = new ClusterManagerConfig(config)
val mRunningContainers = newGauge("running-containers", () => state.runningProcessors.size)
val mNeededContainers = newGauge("needed-containers", () => state.neededProcessors.get())
val mCompletedContainers = newGauge("completed-containers", () => state.completedProcessors.get())
val mFailedContainers = newGauge("failed-containers", () => state.failedContainers.get())
val mReleasedContainers = newGauge("released-containers", () => state.releasedContainers.get())
val mContainers = newGauge("container-count", () => state.processorCount.get())
val mRedundantNotifications = newGauge("redundant-notifications", () => state.redundantNotifications.get())
val mJobHealthy = newGauge("job-healthy", () => if (state.jobHealthy.get()) 1 else 0)
val mPreferredHostRequests = newGauge("preferred-host-requests", () => state.preferredHostRequests.get())
val mAnyHostRequests = newGauge("any-host-requests", () => state.anyHostRequests.get())
val mExpiredPreferredHostRequests = newGauge("expired-preferred-host-requests", () => state.expiredPreferredHostRequests.get())
val mExpiredAnyHostRequests = newGauge("expired-any-host-requests", () => state.expiredAnyHostRequests.get())
val mHostAffinityMatchPct = newGauge("host-affinity-match-pct", () => {
val numPreferredHostRequests = state.preferredHostRequests.get()
val numExpiredPreferredHostRequests = state.expiredPreferredHostRequests.get()
if (numPreferredHostRequests != 0) {
100.00 * (numPreferredHostRequests - numExpiredPreferredHostRequests) / numPreferredHostRequests
} else {
0L
}
})
val mFailedStandbyAllocations = newGauge("failed-standby-allocations", () => state.failedStandbyAllocations.get())
val mFailoversToAnyHost = newGauge("failovers-to-any-host", () => state.failoversToAnyHost.get())
val mFailoversToStandby = newGauge("failovers-to-standby", () => state.failoversToStandby.get())
val mContainerMemoryMb = newGauge("container-memory-mb", () => clusterManagerConfig.getContainerMemoryMb)
val mContainerCpuCores = newGauge("container-cpu-cores", () => clusterManagerConfig.getNumCores)
}
|
Swrrt/Samza
|
samza-core/src/main/scala/org/apache/samza/metrics/ContainerProcessManagerMetrics.scala
|
Scala
|
apache-2.0
| 3,589
|
package gg.powerspike
import akka.http.scaladsl.settings.ServerSettings
import com.typesafe.config.ConfigFactory
object Main {
def main(args: Array[String]): Unit = {
WebServer.startServer("localhost", 8080, ServerSettings(ConfigFactory.load))
}
}
|
PowerSpikeGG/PowerSpikeGG
|
powerspikegg/gateway/src/main/scala/gg/powerspike/Main.scala
|
Scala
|
gpl-3.0
| 260
|
package lille1.car3.tpRest.util
import lille1.car3.tpRest.helper._
import lille1.car3.tpRest.rejection.RejectionHandlerRouting
import java.io.{ ByteArrayInputStream, InputStream, OutputStream, File, FileOutputStream, FileInputStream }
import java.io.FileNotFoundException
import scala.util.matching.Regex
import org.apache.commons.net.ftp._
import spray.http._
import spray.http.BodyPart
import spray.http.MediaTypes._
import spray.httpx.unmarshalling.DeserializationError
import spray.json._
import spray.routing._
import directives._
/**
* Ce trait définit la structure et l'arborescence du routing de l'application sous la forme d'un service HTTP.
* Il permet de séparer l'instantiation de la route de sa gestion par un acteur. Il implémente les traits HelperHTML,
* HelperFunction et RejectionHandlerRooting.
*
* @author Gouzer Willian
* @author Philippon Romain
**/
trait RoutingService extends HttpService with HelperHtml with HelperFunction with RejectionHandlerRouting {
/**
* Contient le routing de la passerelle FTP
*/
val routing =
(path("") & get) {
complete(loginForm)
} ~
(path("login-action") & post) {
formFields('server_ip, 'server_port.as[Int], 'login_user, 'mdp_user) { (ip_opt, port_opt, login_opt, mdp_opt) =>
val connexion = new FTPConnexion(login_opt, mdp_opt, ip_opt, port_opt)
connexion.connect
validate(connexion.login, "Vous devez être authentifie pour accéder à ces fonctionnalites") {
setCookie(HttpCookie("ftp_connexion", connexion.info)) {
complete(loggedInDoneMessage)
}
}
}
} ~
(path("list") & get) {
cookie("ftp_connexion") { cookie_ftp =>
var tab : Array[String] = cookie_ftp.content.split('_')
val connexion = new FTPConnexion(tab(0), tab(1), tab(2), tab(3).toInt)
connexion.connect
validate(connexion.login, "Vous devez être authentifie pour acceder à ces fonctionnalites") {
complete(listNote)
}
}
} ~
pathPrefix("list" / "html") {
(pathEnd & get) {
cookie("ftp_connexion") { cookie_ftp =>
var tab : Array[String] = cookie_ftp.content.split('_')
var connexion = new FTPConnexion(tab(0), tab(1), tab(2), tab(3).toInt)
connexion.connect
validate(connexion.login, "Vous devez être authentifie pour acceder à ces fonctionnalites") {
connexion.login
try {
var liste_files : Array[FTPFile] = connexion.list("")
complete(HTML_ListResponse("", liste_files))
} catch {
case fnfe: FileNotFoundException => complete(HttpResponse(status = StatusCodes.Forbidden, entity = HttpEntity(`text/plain`, "Le dossier que vous voulez lister n'existe pas sur le serveur FTP")))
}
}
}
} ~
(path(Segments) & get) { piece_of_route =>
cookie("ftp_connexion") { cookie_ftp =>
var tab : Array[String] = cookie_ftp.content.split('_')
var connexion = new FTPConnexion(tab(0), tab(1), tab(2), tab(3).toInt)
connexion.connect
validate(connexion.login, "Vous devez etre authentifie pour accéder à ces fonctionnalites") {
connexion.login
piece_of_route match {
case head :: tail =>
try {
var liste_files : Array[FTPFile] = connexion.list(piece_of_route.mkString("/"))
complete(HTML_ListResponse("/"+ piece_of_route.mkString("/"), liste_files))
} catch {
case fnfe: FileNotFoundException => complete(HttpResponse(status = StatusCodes.Forbidden, entity = HttpEntity(`text/plain`, "Le dossier que vous voulez lister n'existe pas sur le serveur FTP")))
}
case List() => complete(HttpResponse(status = StatusCodes.NoContent, entity = HttpEntity(`text/html`, <html><head><title></title></head><body><p>Le dossier est introuvable</p></body></html>.toString)))
}
}
}
}
} ~
pathPrefix("list" / "json") {
(pathEnd & get) {
cookie("ftp_connexion") { cookie_ftp =>
var tab : Array[String] = cookie_ftp.content.split('_')
var connexion = new FTPConnexion(tab(0), tab(1), tab(2), tab(3).toInt)
connexion.connect
validate(connexion.login, "Vous devez etre authentifie pour acceder à ces fonctionnalites") {
connexion.login
try {
var liste_files : Array[FTPFile] = connexion.list("")
complete(JSON_ListResponse("", liste_files))
} catch {
case fnfe: FileNotFoundException => complete(HttpResponse(status = StatusCodes.Forbidden, entity = HttpEntity(`text/plain`, "Le dossier que vous voulez lister n'existe pas sur le serveur FTP")))
}
}
}
} ~
(path(Segments) & get) { piece_of_route =>
cookie("ftp_connexion") { cookie_ftp =>
var tab : Array[String] = cookie_ftp.content.split('_')
var connexion = new FTPConnexion(tab(0), tab(1), tab(2), tab(3).toInt)
connexion.connect
validate(connexion.login, "Vous devez etre authentifie pour accéder à ces fonctionnalites") {
connexion.login
piece_of_route match {
case head :: tail =>
try {
var liste_files : Array[FTPFile] = connexion.list(piece_of_route.mkString("/"))
complete(JSON_ListResponse("/"+ piece_of_route.mkString("/"), liste_files))
} catch {
case fnfe: FileNotFoundException => complete(HttpResponse(status = StatusCodes.Forbidden, entity = HttpEntity(`text/plain`, "Le dossier que vous voulez lister n'existe pas sur le serveur FTP")))
}
case List() => complete(HttpResponse(status = StatusCodes.NoContent, entity = HttpEntity(`text/html`, <html><head><title></title></head><body><p>Le dossier est introuvable</p></body></html>.toString)))
}
}
}
}
} ~
(path("get" / Segments) & get) { piece_of_route =>
cookie("ftp_connexion") { cookie_ftp =>
var tab : Array[String] = cookie_ftp.content.split('_')
val connexion = new FTPConnexion(tab(0), tab(1), tab(2), tab(3).toInt)
connexion.connect
validate(connexion.login, "Vous devez etre authentifie pour acceder à ces fonctionnalites") {
connexion.login
piece_of_route match {
case head :: tail =>
val file = File.createTempFile(piece_of_route.mkString("/"), null)
connexion.download(piece_of_route.mkString("/"), new FileOutputStream(file)) match {
case true => respondWithMediaType(`application/octet-stream`) { getFromFile(file) }
case false => complete(HttpResponse(status = StatusCodes.InternalServerError, entity = HttpEntity(`text/html`, <html><head><title></title></head><body><p>Veuillez retentez l'opération celle-ci vient d'échouer</p></body></html>.toString)))
}
case List() => complete(HttpResponse(status = StatusCodes.NoContent, entity = HttpEntity(`text/html`, <html><head><title></title></head><body><p>Le fichier que vous vouliez télécharger est introuvable</p></body></html>.toString)))
}
}
}
} ~
(path("delete" / Segments) & get) { piece_of_route =>
cookie("ftp_connexion") { cookie_ftp =>
var tab : Array[String] = cookie_ftp.content.split('_')
val connexion = new FTPConnexion(tab(0), tab(1), tab(2), tab(3).toInt)
connexion.connect
validate(connexion.login, "Vous devez etre authentifie pour acceder à ces fonctionnalites") {
connexion.login
piece_of_route match {
case head :: tail => connexion.delete(piece_of_route.mkString("/")) match {
case true => complete(deleteDoneMessage)
case false => complete(HttpResponse(status = StatusCodes.InternalServerError, entity = HttpEntity(`text/html`, <html><head><title></title></head><body><p>Veuillez retentez l'opération celle-ci d'échouer</p></body></html>.toString)))
}
case List() => complete(HttpResponse(status = StatusCodes.InternalServerError, entity = HttpEntity(`text/html`, <html><head><title></title></head><body><p>Le fichier que vous vouliez supprimer est introuvable</p></body></html>.toString)))
}
}
}
} ~
(path("store") & get) {
cookie("ftp_connexion") { cookie_ftp =>
var tab : Array[String] = cookie_ftp.content.split('_')
val connexion = new FTPConnexion(tab(0), tab(1), tab(2), tab(3).toInt)
connexion.connect
validate(connexion.login, "Vous devez etre authentifie pour acceder à ces fonctionnalites") {
connexion.login
complete(storeForm)
}
}
} ~
(path("send") & post) {
cookie("ftp_connexion") { cookie_ftp =>
var tab : Array[String] = cookie_ftp.content.split('_')
val connexion = new FTPConnexion(tab(0), tab(1), tab(2), tab(3).toInt)
connexion.connect
validate(connexion.login, "Vous devez etre authentifie pour acceder à ces fonctionnalites") {
connexion.login
entity(as[MultipartFormData]) { formData =>
val filename = extract(formData.toString, """(filename)(=)([-_.a-zA-Z0-9]+[.]+[a-zA-Z0-9]{2,})""", 3)
formField('file.as[Array[Byte]], 'path.as[String]) { (file, path) =>
if(path.equals("") || connexion.client.changeWorkingDirectory(path)) {
val temp_file = File.createTempFile(filename, null)
val fos = new FileOutputStream(temp_file)
try { fos.write(file) }
catch {
case e : java.io.IOException => complete(HttpResponse(status = StatusCodes.InternalServerError, entity = HttpEntity(`text/plain`, "L'extraction de " + filename +" a echoue")))
}
finally { fos.close() }
connexion.upload(filename, new FileInputStream(temp_file)) match {
case true => complete(HttpResponse(status = StatusCodes.OK, entity = HttpEntity(`text/html`, <html><head><title></title></head><body><p>Le fichier est bien uploadé</p></body></html>.toString)))
case false => complete(HttpResponse(status = StatusCodes.InternalServerError, entity = HttpEntity(`text/html`, <html><head><title></title></head><body><p>Veuillez retentez l'opération celle-ci d'échouer</p></body></html>.toString)))
}
}
else
complete(HttpResponse(status = StatusCodes.Forbidden, entity = HttpEntity(`text/plain`, "Le dossier de destination " + path +" ne semble pas exister")))
}
}
}
}
} ~
(path("logout") & get)
{
deleteCookie("ftp_connexion"){
complete("Vous etes deconnecte")
}
}
// fin du routing
}
|
JimiPepper/m1car_tp2
|
src/main/scala/lille1/car3/tpRest/util/RoutingService.scala
|
Scala
|
gpl-2.0
| 10,804
|
import scala.io.Source
import scala.collection.mutable.Stack
import java.io.FileNotFoundException
import java.lang.NumberFormatException
class Node{
var rank=0;
var terminal=0;
var link=scala.collection.mutable.Map[Char,Node]()
}
class Trie{
val root=new Node()
var flag=false
def this(args: Array[String]){
this()
for(filename <- args){
try{
for(line <- Source.fromFile(filename).getLines()){
for(word <- line.stripLineEnd.split(' '))
this.addWord(word)
}
println("\\nFile "+filename+" has Loaded Successfully in the Dictionary")
flag=true
}
catch{
case ex: FileNotFoundException =>{
println("File "+filename+" is Missing")
}
}
}
if(flag==false){
println("Program Will Exit Now!!!")
System.exit(-1)
}
}
def addWord(word: String):Unit={
var temp=root
for(ch <- word){
if(!temp.link.contains(ch))
temp.link+=(ch->new Node())
temp.rank+=1
temp=temp.link(ch)
}
temp.terminal=1
}
def searchWord(word: String):Boolean={
var temp=root
for(ch <- word){
if(!temp.link.contains(ch))
return false
temp=temp.link(ch)
}
if(temp.terminal==0)
return false
return true
}
def prediction(word: String,num :Int):Seq[String]={
var list=Stack[String]()
var noOfWords=num
predictWord(word)
def predictWord(word: String):Unit={
var temp=root
for(ch <- word){
if(temp.link.contains(ch) && ch!=',' && ch!='.' && ch!=' ' && ch!='?' && ch!='!' && ch!=';')
temp=temp.link(ch)
else
return
}
//println(word)
traversal(temp,list,word)
}
def traversal(temp: Node,list: Stack[String],word: String):Unit={
if(noOfWords>0){
for(ch <- sort(temp)){
//println(word+":"+key)
if(temp.terminal==1){
//println("test:"+ch+":"+word+":"+temp.rank)
list.push(word)
//println(word)
noOfWords-=1
}
traversal(temp.link(ch),list,word+ch)
}
}
}
def sort(temp: Node):Stack[Char]={
var data=Stack[Char]()
temp.link.toList sortBy ( _._1 ) foreach {
case (key, value) =>
//println(key+":"+value)
data.push(key)
}
data=data.reverse
return data
}
return list.reverse.seq.distinct
}
}
object Dictionary{
def main(args:Array[String]):Unit={
val t1=new Trie(args)
var noOfWords=10
println("\\nEnter Number of Predictions Required:")
try{
noOfWords=(Console.readLine()).toInt
}
catch{
case ex: NumberFormatException => {
println("Invalid Input, Default value is Considered")
}
}
println("\\nEnter a Word to be Searched from the Dictionary")
var x=Console.readLine()
while(x.toLowerCase!="end" && x!="")
{
if(t1.searchWord(x))
println("Search Successful : "+x+" is Found")
else
println("Search Unsuccessful :"+x+" is not Found")
println("Predictions :")
(t1.prediction(x,noOfWords)).foreach(x => if(x!=null)println(x))
println("\\nEnter a Word to be Searched from the Dictionary")
x=Console.readLine()
}
}
}
|
KshitijKarthick/Predictive-Trie
|
src/Dictionary.scala
|
Scala
|
mit
| 2,996
|
package com.stulsoft.exercises.logic.and.codes
/** Gray code
* {{{
* An n-bit Gray code is a sequence of n-bit strings constructed according to certain rules. For example,
* n = 1: C(1) = ("0", "1").
* n = 2: C(2) = ("00", "01", "11", "10").
* n = 3: C(3) = ("000", "001", "011", "010", "110", "111", "101", "100").
* Find out the construction rules and write a function to generate Gray codes.
*
* gray(3)
* List[String] = List(000, 001, 011, 010, 110, 111, 101, 100)
* }}}
*
* @see [[https://ru.wikipedia.org/wiki/%D0%9A%D0%BE%D0%B4_%D0%93%D1%80%D0%B5%D1%8F Gray Code]]
* @author Yuriy Stul.
*/
object P49GrayCode extends App {
// test(1)
test(2)
// test(3)
def test(n: Int): Unit = {
println(s"(1) $n -> ${gray(n)}")
}
def gray(n: Int): List[String] =
if (n == 0) List("")
else {
val lower = gray(n - 1)
(lower map {
"0" + _
}) ::: (lower.reverse map {
"1" + _
})
}
}
|
ysden123/scala-exercises
|
src/main/scala/com/stulsoft/exercises/logic/and/codes/P49GrayCode.scala
|
Scala
|
mit
| 972
|
package is.hail.io
import java.io.{ByteArrayInputStream, ByteArrayOutputStream, InputStream, OutputStream}
import is.hail.annotations.{Region, RegionValue}
import is.hail.asm4s.{Code, TypeInfo, Value}
import is.hail.expr.ir.{EmitClassBuilder, EmitCodeBuilder, EmitFunctionBuilder, ExecuteContext}
import is.hail.types.encoded.EType
import is.hail.types.physical.{PCode, PType, PValue, typeToTypeInfo}
import is.hail.types.virtual.Type
import is.hail.rvd.RVDContext
import is.hail.sparkextras.ContextRDD
import is.hail.utils.using
import org.apache.spark.rdd.RDD
trait AbstractTypedCodecSpec extends Spec {
def encodedType: EType
def encodedVirtualType: Type
type StagedEncoderF[T] = (Value[Region], Value[T], Value[OutputBuffer]) => Code[Unit]
type StagedDecoderF[T] = (Value[Region], Value[InputBuffer]) => Code[T]
def buildEncoder(ctx: ExecuteContext, t: PType): (OutputStream) => Encoder
def encodeValue(ctx: ExecuteContext, t: PType, valueAddr: Long): Array[Byte] = {
val makeEnc = buildEncoder(ctx, t)
val baos = new ByteArrayOutputStream()
val enc = makeEnc(baos)
enc.writeRegionValue(valueAddr)
enc.flush()
baos.toByteArray
}
def decodedPType(requestedType: Type): PType
def decodedPType(): PType = encodedType.decodedPType(encodedVirtualType)
def buildDecoder(ctx: ExecuteContext, requestedType: Type): (PType, (InputStream) => Decoder)
def encode(ctx: ExecuteContext, t: PType, offset: Long): Array[Byte] = {
val baos = new ByteArrayOutputStream()
using(buildEncoder(ctx, t)(baos))(_.writeRegionValue(offset))
baos.toByteArray
}
def decode(ctx: ExecuteContext, requestedType: Type, bytes: Array[Byte], region: Region): (PType, Long) = {
val bais = new ByteArrayInputStream(bytes)
val (pt, dec) = buildDecoder(ctx, requestedType)
(pt, dec(bais).readRegionValue(region))
}
def buildCodeInputBuffer(is: Code[InputStream]): Code[InputBuffer]
def buildCodeOutputBuffer(os: Code[OutputStream]): Code[OutputBuffer]
def buildEmitDecoder(requestedType: Type, cb: EmitClassBuilder[_]): (Value[Region], Value[InputBuffer]) => PCode = {
def typedBuilder[T](ti: TypeInfo[T]): (Value[Region], Value[InputBuffer]) => PCode = {
val (ptype, dec) = buildTypedEmitDecoderF[T](requestedType, cb);
{ (r: Value[Region], ib: Value[InputBuffer]) => PCode(ptype, dec(r, ib)) }
}
typedBuilder(typeToTypeInfo(decodedPType(requestedType)))
}
def buildEmitEncoder(t: PType, cb: EmitClassBuilder[_]): (Value[Region], PValue, Value[OutputBuffer]) => Code[Unit] = {
def typedBuilder[T](ti: TypeInfo[T]): (Value[Region], PValue, Value[OutputBuffer]) => Code[Unit] = {
val enc = buildTypedEmitEncoderF[T](t, cb);
{ (r: Value[Region], v: PValue, ob: Value[OutputBuffer]) =>
enc(r, v.value.asInstanceOf[Value[T]], ob)
}
}
typedBuilder(typeToTypeInfo(t))
}
def buildTypedEmitDecoderF[T](requestedType: Type, cb: EmitClassBuilder[_]): (PType, StagedDecoderF[T]) = {
val rt = encodedType.decodedPType(requestedType)
val mb = encodedType.buildDecoderMethod(rt, cb)
(rt, (region: Value[Region], buf: Value[InputBuffer]) => mb.invokeCode[T](region, buf))
}
def buildEmitDecoderF[T](cb: EmitClassBuilder[_]): (PType, StagedDecoderF[T]) =
buildTypedEmitDecoderF(encodedVirtualType, cb)
def buildTypedEmitEncoderF[T](t: PType, cb: EmitClassBuilder[_]): StagedEncoderF[T] = {
val mb = encodedType.buildEncoderMethod(t, cb)
(region: Value[Region], off: Value[T], buf: Value[OutputBuffer]) => mb.invokeCode[Unit](off, buf)
}
// FIXME: is there a better place for this to live?
def decodeRDD(ctx: ExecuteContext, requestedType: Type, bytes: RDD[Array[Byte]]): (PType, ContextRDD[Long]) = {
val (pt, dec) = buildDecoder(ctx, requestedType)
(pt, ContextRDD.weaken(bytes).cmapPartitions { (ctx, it) =>
RegionValue.fromBytes(dec, ctx.region, it)
})
}
override def toString: String = super[Spec].toString
}
|
danking/hail
|
hail/src/main/scala/is/hail/io/CodecSpec.scala
|
Scala
|
mit
| 3,992
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.calcite
import java.util
import java.nio.charset.Charset
import org.apache.calcite.avatica.util.TimeUnit
import org.apache.calcite.jdbc.JavaTypeFactoryImpl
import org.apache.calcite.rel.`type`._
import org.apache.calcite.sql.SqlIntervalQualifier
import org.apache.calcite.sql.`type`.SqlTypeName._
import org.apache.calcite.sql.`type`.{BasicSqlType, SqlTypeName}
import org.apache.calcite.sql.parser.SqlParserPos
import org.apache.calcite.util.ConversionUtil
import org.apache.flink.api.common.typeinfo.BasicTypeInfo._
import org.apache.flink.api.common.typeinfo._
import org.apache.flink.api.common.typeutils.CompositeType
import org.apache.flink.api.java.typeutils.ValueTypeInfo._
import org.apache.flink.api.java.typeutils.{MapTypeInfo, MultisetTypeInfo, ObjectArrayTypeInfo, RowTypeInfo}
import org.apache.flink.table.api.TableException
import org.apache.flink.table.calcite.FlinkTypeFactory.typeInfoToSqlTypeName
import org.apache.flink.table.plan.schema._
import org.apache.flink.table.typeutils.TypeCheckUtils.isSimple
import org.apache.flink.table.typeutils.{TimeIndicatorTypeInfo, TimeIntervalTypeInfo}
import org.apache.flink.types.Row
import scala.collection.JavaConverters._
import scala.collection.mutable
/**
* Flink specific type factory that represents the interface between Flink's [[TypeInformation]]
* and Calcite's [[RelDataType]].
*/
class FlinkTypeFactory(typeSystem: RelDataTypeSystem) extends JavaTypeFactoryImpl(typeSystem) {
// NOTE: for future data types it might be necessary to
// override more methods of RelDataTypeFactoryImpl
private val seenTypes = mutable.HashMap[(TypeInformation[_], Boolean), RelDataType]()
def createTypeFromTypeInfo(
typeInfo: TypeInformation[_],
isNullable: Boolean)
: RelDataType = {
// we cannot use seenTypes for simple types,
// because time indicators and timestamps would be the same
val relType = if (isSimple(typeInfo)) {
// simple types can be converted to SQL types and vice versa
val sqlType = typeInfoToSqlTypeName(typeInfo)
sqlType match {
case INTERVAL_YEAR_MONTH =>
createSqlIntervalType(
new SqlIntervalQualifier(TimeUnit.YEAR, TimeUnit.MONTH, SqlParserPos.ZERO))
case INTERVAL_DAY_SECOND =>
createSqlIntervalType(
new SqlIntervalQualifier(TimeUnit.DAY, TimeUnit.SECOND, SqlParserPos.ZERO))
case TIMESTAMP if typeInfo.isInstanceOf[TimeIndicatorTypeInfo] =>
if (typeInfo.asInstanceOf[TimeIndicatorTypeInfo].isEventTime) {
createRowtimeIndicatorType()
} else {
createProctimeIndicatorType()
}
case _ =>
createSqlType(sqlType)
}
} else {
// advanced types require specific RelDataType
// for storing the original TypeInformation
seenTypes.getOrElseUpdate((typeInfo, isNullable), createAdvancedType(typeInfo, isNullable))
}
createTypeWithNullability(relType, isNullable)
}
/**
* Creates a indicator type for processing-time, but with similar properties as SQL timestamp.
*/
def createProctimeIndicatorType(): RelDataType = {
val originalType = createTypeFromTypeInfo(SqlTimeTypeInfo.TIMESTAMP, isNullable = false)
canonize(
new TimeIndicatorRelDataType(
getTypeSystem,
originalType.asInstanceOf[BasicSqlType],
isEventTime = false)
)
}
/**
* Creates a indicator type for event-time, but with similar properties as SQL timestamp.
*/
def createRowtimeIndicatorType(): RelDataType = {
val originalType = createTypeFromTypeInfo(SqlTimeTypeInfo.TIMESTAMP, isNullable = false)
canonize(
new TimeIndicatorRelDataType(
getTypeSystem,
originalType.asInstanceOf[BasicSqlType],
isEventTime = true)
)
}
/**
* Creates types that create custom [[RelDataType]]s that wrap Flink's [[TypeInformation]].
*/
private def createAdvancedType(
typeInfo: TypeInformation[_],
isNullable: Boolean): RelDataType = {
val relType = typeInfo match {
case ct: CompositeType[_] =>
new CompositeRelDataType(ct, isNullable, this)
case pa: PrimitiveArrayTypeInfo[_] =>
new ArrayRelDataType(
pa,
createTypeFromTypeInfo(pa.getComponentType, isNullable = false),
isNullable)
case ba: BasicArrayTypeInfo[_, _] =>
new ArrayRelDataType(
ba,
createTypeFromTypeInfo(ba.getComponentInfo, isNullable = true),
isNullable)
case oa: ObjectArrayTypeInfo[_, _] =>
new ArrayRelDataType(
oa,
createTypeFromTypeInfo(oa.getComponentInfo, isNullable = true),
isNullable)
case mts: MultisetTypeInfo[_] =>
new MultisetRelDataType(
mts,
createTypeFromTypeInfo(mts.getElementTypeInfo, isNullable = true),
isNullable
)
case mp: MapTypeInfo[_, _] =>
new MapRelDataType(
mp,
createTypeFromTypeInfo(mp.getKeyTypeInfo, isNullable = true),
createTypeFromTypeInfo(mp.getValueTypeInfo, isNullable = true),
isNullable)
case ti: TypeInformation[_] =>
new GenericRelDataType(
ti,
isNullable,
getTypeSystem.asInstanceOf[FlinkTypeSystem])
case ti@_ =>
throw new TableException(s"Unsupported type information: $ti")
}
canonize(relType)
}
/**
* Creates a struct type with the input fieldNames and input fieldTypes using FlinkTypeFactory
*
* @param fieldNames field names
* @param fieldTypes field types, every element is Flink's [[TypeInformation]]
* @return a struct type with the input fieldNames, input fieldTypes, and system fields
*/
def buildLogicalRowType(
fieldNames: Seq[String],
fieldTypes: Seq[TypeInformation[_]])
: RelDataType = {
val logicalRowTypeBuilder = builder
val fields = fieldNames.zip(fieldTypes)
fields.foreach(f => {
// time indicators are not nullable
val nullable = !FlinkTypeFactory.isTimeIndicatorType(f._2)
logicalRowTypeBuilder.add(f._1, createTypeFromTypeInfo(f._2, nullable))
})
logicalRowTypeBuilder.build
}
// ----------------------------------------------------------------------------------------------
override def createSqlType(typeName: SqlTypeName, precision: Int): RelDataType = {
// it might happen that inferred VARCHAR types overflow as we set them to Int.MaxValue
// Calcite will limit the length of the VARCHAR type to 65536.
if (typeName == VARCHAR && precision < 0) {
createSqlType(typeName, getTypeSystem.getDefaultPrecision(typeName))
} else {
super.createSqlType(typeName, precision)
}
}
override def createArrayType(elementType: RelDataType, maxCardinality: Long): RelDataType = {
val relType = new ArrayRelDataType(
ObjectArrayTypeInfo.getInfoFor(FlinkTypeFactory.toTypeInfo(elementType)),
elementType,
isNullable = false)
canonize(relType)
}
override def createMapType(keyType: RelDataType, valueType: RelDataType): RelDataType = {
val relType = new MapRelDataType(
new MapTypeInfo(
FlinkTypeFactory.toTypeInfo(keyType),
FlinkTypeFactory.toTypeInfo(valueType)),
keyType,
valueType,
isNullable = false)
this.canonize(relType)
}
override def createMultisetType(elementType: RelDataType, maxCardinality: Long): RelDataType = {
val relType = new MultisetRelDataType(
MultisetTypeInfo.getInfoFor(FlinkTypeFactory.toTypeInfo(elementType)),
elementType,
isNullable = false)
canonize(relType)
}
override def createTypeWithNullability(
relDataType: RelDataType,
isNullable: Boolean): RelDataType = {
// nullability change not necessary
if (relDataType.isNullable == isNullable) {
return canonize(relDataType)
}
// change nullability
val newType = relDataType match {
case composite: CompositeRelDataType =>
new CompositeRelDataType(composite.compositeType, isNullable, this)
case array: ArrayRelDataType =>
new ArrayRelDataType(array.typeInfo, array.getComponentType, isNullable)
case map: MapRelDataType =>
new MapRelDataType(map.typeInfo, map.keyType, map.valueType, isNullable)
case multiSet: MultisetRelDataType =>
new MultisetRelDataType(multiSet.typeInfo, multiSet.getComponentType, isNullable)
case generic: GenericRelDataType =>
new GenericRelDataType(generic.typeInfo, isNullable, typeSystem)
case timeIndicator: TimeIndicatorRelDataType =>
timeIndicator
case _ =>
super.createTypeWithNullability(relDataType, isNullable)
}
canonize(newType)
}
override def leastRestrictive(types: util.List[RelDataType]): RelDataType = {
val type0 = types.get(0)
if (type0.getSqlTypeName != null) {
val resultType = resolveAllIdenticalTypes(types)
if (resultType.isDefined) {
// result type for identical types
return resultType.get
}
}
// fall back to super
super.leastRestrictive(types)
}
private def resolveAllIdenticalTypes(types: util.List[RelDataType]): Option[RelDataType] = {
val allTypes = types.asScala
val head = allTypes.head
// check if all types are the same
if (allTypes.forall(_ == head)) {
// types are the same, check nullability
val nullable = allTypes
.exists(sqlType => sqlType.isNullable || sqlType.getSqlTypeName == SqlTypeName.NULL)
// return type with nullability
Some(createTypeWithNullability(head, nullable))
} else {
// types are not all the same
if (allTypes.exists(_.getSqlTypeName == SqlTypeName.ANY)) {
// one of the type was ANY.
// we cannot generate a common type if it differs from other types.
throw new TableException("Generic ANY types must have a common type information.")
} else {
// cannot resolve a common type for different input types
None
}
}
}
override def getDefaultCharset: Charset = {
Charset.forName(ConversionUtil.NATIVE_UTF16_CHARSET_NAME)
}
}
object FlinkTypeFactory {
private def typeInfoToSqlTypeName(typeInfo: TypeInformation[_]): SqlTypeName = typeInfo match {
case BOOLEAN_TYPE_INFO => BOOLEAN
case BYTE_TYPE_INFO => TINYINT
case SHORT_TYPE_INFO => SMALLINT
case INT_TYPE_INFO => INTEGER
case LONG_TYPE_INFO => BIGINT
case FLOAT_TYPE_INFO => FLOAT
case DOUBLE_TYPE_INFO => DOUBLE
case STRING_TYPE_INFO => VARCHAR
case BIG_DEC_TYPE_INFO => DECIMAL
// temporal types
case SqlTimeTypeInfo.DATE => DATE
case SqlTimeTypeInfo.TIME => TIME
case SqlTimeTypeInfo.TIMESTAMP => TIMESTAMP
case TimeIntervalTypeInfo.INTERVAL_MONTHS => INTERVAL_YEAR_MONTH
case TimeIntervalTypeInfo.INTERVAL_MILLIS => INTERVAL_DAY_SECOND
case CHAR_TYPE_INFO | CHAR_VALUE_TYPE_INFO =>
throw new TableException("Character type is not supported.")
case _@t =>
throw new TableException(s"Type is not supported: $t")
}
/**
* Converts a Calcite logical record into a Flink type information.
*/
@deprecated("Use the RowSchema class instead because it handles both logical and physical rows.")
def toInternalRowTypeInfo(logicalRowType: RelDataType): TypeInformation[Row] = {
// convert to type information
val logicalFieldTypes = logicalRowType.getFieldList.asScala map { relDataType =>
FlinkTypeFactory.toTypeInfo(relDataType.getType)
}
// field names
val logicalFieldNames = logicalRowType.getFieldNames.asScala
new RowTypeInfo(logicalFieldTypes.toArray, logicalFieldNames.toArray)
}
def isProctimeIndicatorType(relDataType: RelDataType): Boolean = relDataType match {
case ti: TimeIndicatorRelDataType if !ti.isEventTime => true
case _ => false
}
def isProctimeIndicatorType(typeInfo: TypeInformation[_]): Boolean = typeInfo match {
case ti: TimeIndicatorTypeInfo if !ti.isEventTime => true
case _ => false
}
def isRowtimeIndicatorType(relDataType: RelDataType): Boolean = relDataType match {
case ti: TimeIndicatorRelDataType if ti.isEventTime => true
case _ => false
}
def isRowtimeIndicatorType(typeInfo: TypeInformation[_]): Boolean = typeInfo match {
case ti: TimeIndicatorTypeInfo if ti.isEventTime => true
case _ => false
}
def isTimeIndicatorType(relDataType: RelDataType): Boolean = relDataType match {
case ti: TimeIndicatorRelDataType => true
case _ => false
}
def isTimeIndicatorType(typeInfo: TypeInformation[_]): Boolean = typeInfo match {
case ti: TimeIndicatorTypeInfo => true
case _ => false
}
def toTypeInfo(relDataType: RelDataType): TypeInformation[_] = relDataType.getSqlTypeName match {
case BOOLEAN => BOOLEAN_TYPE_INFO
case TINYINT => BYTE_TYPE_INFO
case SMALLINT => SHORT_TYPE_INFO
case INTEGER => INT_TYPE_INFO
case BIGINT => LONG_TYPE_INFO
case FLOAT => FLOAT_TYPE_INFO
case DOUBLE => DOUBLE_TYPE_INFO
case VARCHAR | CHAR => STRING_TYPE_INFO
case DECIMAL => BIG_DEC_TYPE_INFO
// time indicators
case TIMESTAMP if relDataType.isInstanceOf[TimeIndicatorRelDataType] =>
val indicator = relDataType.asInstanceOf[TimeIndicatorRelDataType]
if (indicator.isEventTime) {
TimeIndicatorTypeInfo.ROWTIME_INDICATOR
} else {
TimeIndicatorTypeInfo.PROCTIME_INDICATOR
}
// temporal types
case DATE => SqlTimeTypeInfo.DATE
case TIME => SqlTimeTypeInfo.TIME
case TIMESTAMP => SqlTimeTypeInfo.TIMESTAMP
case typeName if YEAR_INTERVAL_TYPES.contains(typeName) => TimeIntervalTypeInfo.INTERVAL_MONTHS
case typeName if DAY_INTERVAL_TYPES.contains(typeName) => TimeIntervalTypeInfo.INTERVAL_MILLIS
case NULL =>
throw new TableException(
"Type NULL is not supported. Null values must have a supported type.")
// symbol for special flags e.g. TRIM's BOTH, LEADING, TRAILING
// are represented as integer
case SYMBOL => INT_TYPE_INFO
// extract encapsulated TypeInformation
case ANY if relDataType.isInstanceOf[GenericRelDataType] =>
val genericRelDataType = relDataType.asInstanceOf[GenericRelDataType]
genericRelDataType.typeInfo
case ROW if relDataType.isInstanceOf[CompositeRelDataType] =>
val compositeRelDataType = relDataType.asInstanceOf[CompositeRelDataType]
compositeRelDataType.compositeType
case ROW if relDataType.isInstanceOf[RelRecordType] =>
val relRecordType = relDataType.asInstanceOf[RelRecordType]
new RowSchema(relRecordType).typeInfo
// CURSOR for UDTF case, whose type info will never be used, just a placeholder
case CURSOR => new NothingTypeInfo
case ARRAY if relDataType.isInstanceOf[ArrayRelDataType] =>
val arrayRelDataType = relDataType.asInstanceOf[ArrayRelDataType]
arrayRelDataType.typeInfo
case MAP if relDataType.isInstanceOf[MapRelDataType] =>
val mapRelDataType = relDataType.asInstanceOf[MapRelDataType]
mapRelDataType.typeInfo
case MULTISET if relDataType.isInstanceOf[MultisetRelDataType] =>
val multisetRelDataType = relDataType.asInstanceOf[MultisetRelDataType]
multisetRelDataType.typeInfo
case _@t =>
throw new TableException(s"Type is not supported: $t")
}
}
|
ueshin/apache-flink
|
flink-table/flink-table-planner/src/main/scala/org/apache/flink/table/calcite/FlinkTypeFactory.scala
|
Scala
|
apache-2.0
| 16,397
|
//
// Codex - a multi-language code indexer and grokker
// http://github.com/samskivert/codex
package codex.extract
import java.io.{File, InputStream, InputStreamReader, Reader, StreamTokenizer}
import scala.collection.mutable.ArrayBuffer
import codex._
/** Extracts CLR type information from DLLs. */
class MonodisExtractor extends Extractor {
import Clike._
override def process (viz :Visitor, isTest :Boolean, file :File) {
// we grind the monodis output on another thread, so we need to build up a giant list of
// callback actions and then all all of those on our current thread, assuming everything is
// groovy; though the callback actions list is large, it's not nearly as large as dumping the
// entire monodis output into one big buffer and processing it on the calling thread
val thunks = ArrayBuffer[() => Unit]()
def onEnter (name :String, kind :String, off :Int) = {
thunks += { () => viz.onEnter(name, kind, off) }
}
def onExit (name :String) = {
thunks += { () => viz.onExit(name) }
}
val path = file.getAbsolutePath
viz.onCompUnit(path, false)
Monodis.invoke(file.getParentFile, path) { mdis :Reader =>
var prevtok :String = null
var curdef :String = null
var blocks :List[String] = Nil
var public = false
val tok = toker(mdis)
while (tok.nextToken() != StreamTokenizer.TT_EOF) {
if (tok.ttype == '{') {
// note that we entered a block for our most recent def (which may be null)
blocks = curdef :: blocks
// and clear out curdef so that nested blocks for this def are ignored
curdef = null
} else if (tok.ttype == '}') {
if (blocks.isEmpty) {
log.warning("Mismatched close brace", "line", tok.lineno-1)
} else {
// if this block was associated with a def, tell the viz we're exiting it
if (blocks.head != null) onExit(blocks.head)
blocks = blocks.tail
}
} else if (tok.ttype == StreamTokenizer.TT_WORD || tok.ttype == '\\'') {
if (prevtok == "namespace") {
onEnter(tok.sval, prevtok, tok.lineno-1)
curdef = tok.sval
} else if (tok.sval == "class") {
// we'll eventually see a class, so reset our attributes
public = false
} else if (tok.sval == "public") {
public = true
} else if (tok.sval == "extends") {
if (public) {
onEnter(prevtok, "class", tok.lineno-1)
curdef = prevtok
} // else log.info(s"Skipping private class $prevtok")
}
prevtok = tok.sval
}
}
}
thunks foreach { _.apply() }
}
override def process (viz :Visitor, isTest :Boolean, unitName :String, reader :Reader) {
throw new UnsupportedOperationException
}
private def parse (viz :Visitor, isTest :Boolean)(in :Reader) :Unit = try {
} finally {
in.close()
}
}
|
samskivert/codex
|
src/main/scala/codex/extract/MonodisExtractor.scala
|
Scala
|
bsd-3-clause
| 3,015
|
package zamblauskas
import org.scalatest.funspec.AnyFunSpec
import org.scalatest.matchers.should.Matchers
import zamblauskas.csv.parser.Parser._
import zamblauskas.csv.parser._
class ReadsMacroSpec extends AnyFunSpec with Matchers {
it("generate reads for single param case class") {
case class SingleParam(param: String)
val csv =
"""
|param
|value
""".stripMargin
parse[SingleParam](csv) shouldBe Right(Seq(SingleParam("value")))
}
it("generate reads for multi param case class") {
case class MultiParam(param1: String, param2: String)
val csv =
"""
|param1,param2
|value1,value2
""".stripMargin
parse[MultiParam](csv) shouldBe Right(Seq(MultiParam("value1", "value2")))
}
it("does not generate reads for empty param case class") {
case class EmptyParam()
"""parse[EmptyParam]("")""" shouldNot compile
}
}
|
zamblauskas/scala-csv-parser
|
src/test/scala/zamblauskas/ReadsMacroSpec.scala
|
Scala
|
mit
| 918
|
class x0[x1] {
def x2: x1
}
trait x3 extends x0 { // error
class x2
var x2 = 0 // error
var x4 = x5 x2 // error
}
|
som-snytt/dotty
|
tests/neg/parser-stability-7.scala
|
Scala
|
apache-2.0
| 118
|
package org.tuubes.core.network
import com.electronwill.niol.NiolOutput
import com.electronwill.niol.network.tcp.ClientAttach
/** A network packet that can be written */
trait Packet {
/** The packet's id in its protocol */
def id: Int
/** Writes this packet to the given output */
def write(out: NiolOutput): Unit
}
|
mcphoton/Photon-Server
|
core/src/main/scala/org/tuubes/core/network/Packet.scala
|
Scala
|
lgpl-3.0
| 328
|
package models.db
import io.rampant.vulgar.db.Pools
import models.User
import play.api.db.slick.Config.driver.simple._
import scala.concurrent.Future
class Users(tag: Tag) extends Table[User](tag, "USERS") {
def id = column[Long]("id", O.PrimaryKey, O.AutoInc)
def displayName = column[String]("display_name", O.NotNull)
def * = (id.?, displayName) <>((User.apply _).tupled, User.unapply)
}
object Users {
val query = TableQuery[Users]
def list: Future[Seq[User]] = Pools.readPool execute { implicit s => query.list}
def find(userId: Long): Future[Option[User]] = Pools.readPool execute { implicit s =>
query.filter(_.id === userId).firstOption
}
def insert(user: User) = Pools.writePool execute { implicit s =>
user.copy(id = Some((query returning query.map(_.id)) += user))
}
}
|
duaiwe/vulgar
|
app/models/db/Users.scala
|
Scala
|
mit
| 803
|
package be.objectify.deadbolt.scala.views.di.dynamicTest
import be.objectify.deadbolt.scala.{AuthenticatedRequest, DeadboltHandler, DynamicResourceHandler}
import be.objectify.deadbolt.scala.views.di.{AbstractViewTest, drh}
import be.objectify.deadbolt.scala.views.html.di.dynamic
import be.objectify.deadbolt.scala.views.html.di.dynamicTest.dynamicContent
import play.api.test.{FakeRequest, Helpers, WithApplication}
/**
* @author Steve Chaloner (steve@objectify.be)
*/
class DynamicTest extends AbstractViewTest {
val drhAllow: Option[DynamicResourceHandler] = Some(drh(allowed = true, check = false))
val drhDeny: Option[DynamicResourceHandler] = Some(drh(allowed = false, check = false))
"when allowed by the dynamic handler, the view" should {
"show constrained content" in new WithApplication(testApp(handler(drh = drhAllow))) {
val html = constraint(handler(drh = drhAllow)).apply(name = "the name of this constraint", meta = Some("some additional info"))(new AuthenticatedRequest(FakeRequest(), None))
private val content: String = Helpers.contentAsString(html)
content must contain("This is before the constraint.")
content must contain("This is protected by the constraint.")
content must contain("This is after the constraint.")
}
}
"when denied by the dynamic handler, the view" should {
"hide constrained content" in new WithApplication(testApp(handler(drh = drhDeny))) {
val html = constraint(handler(drh = drhDeny)).apply(name = "the name of this constraint", meta = Some("some additional info"))(new AuthenticatedRequest(FakeRequest(), None))
private val content: String = Helpers.contentAsString(html)
content must contain("This is before the constraint.")
content must not contain("This is protected by the constraint.")
content must contain("This is after the constraint.")
}
}
def constraint(handler: DeadboltHandler) = new dynamicContent(new dynamic(viewSupport(), handlerCache(handler)))
}
|
schaloner/deadbolt-2-scala
|
code/test/be/objectify/deadbolt/scala/views/di/dynamicTest/DynamicTest.scala
|
Scala
|
apache-2.0
| 2,032
|
/*
* Copyright 2017 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.ct.computations.calculations
import org.scalatest.{Matchers, WordSpec}
import uk.gov.hmrc.ct.computations.{CP117, CP282, CPQ17}
class AdjustedTradingProfitForPeriodCalculatorSpec extends WordSpec with Matchers {
"AdjustedTradingProfitForPeriodCalculator" should {
"Return None if CPQ17 Trading losses not used from previous accounting periods is not defined" in new AdjustedTradingProfitForPeriodCalculator {
val result = adjustedTradingProfitForPeriodCalculation(cp117 = CP117(100), cpq17 = CPQ17(None))
result shouldBe CP282(None)
}
"Return None if CPQ17 Trading losses not used from previous accounting periods is false" in new AdjustedTradingProfitForPeriodCalculator {
val result = adjustedTradingProfitForPeriodCalculation(cp117 = CP117(100), cpq17 = CPQ17(Some(false)))
result shouldBe CP282(None)
}
"Return Some of profit or loss result if CPQ17 Trading losses not used from previous accounting periods is true" in new AdjustedTradingProfitForPeriodCalculator {
val result = adjustedTradingProfitForPeriodCalculation(cp117 = CP117(100), cpq17 = CPQ17(Some(true)))
result shouldBe CP282(Some(100))
}
}
}
|
liquidarmour/ct-calculations
|
src/test/scala/uk/gov/hmrc/ct/computations/calculations/AdjustedTradingProfitForPeriodCalculatorSpec.scala
|
Scala
|
apache-2.0
| 1,805
|
/*
* SPDX-License-Identifier: Apache-2.0
* Copyright 2016-2020 Daniel Urban and contributors listed in NOTICE.txt
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package dev.tauri.choam
abstract class TreiberStackSpec extends BaseSpec {
"TreiberStack" should "include the elements passed to its constructor" in {
new TreiberStack[Int]().unsafeToList should === (Nil)
new TreiberStack[Int](1 :: 2 :: 3 :: Nil).unsafeToList should === (3 :: 2 :: 1 :: Nil)
}
}
class TreiberStackSpecNaiveKCAS
extends TreiberStackSpec
with SpecNaiveKCAS
class TreiberStackSpecCASN
extends TreiberStackSpec
with SpecCASN
class TreiberStackSpecMCAS
extends TreiberStackSpec
with SpecMCAS
|
durban/exp-reagents
|
core/src/test/scala/dev/tauri/choam/TreiberStackSpec.scala
|
Scala
|
apache-2.0
| 1,212
|
package com.geishatokyo.smartmerger
import com.geishatokyo.smartmerger.injection.{Injection, InjectionRule, Injector}
import com.geishatokyo.smartmerger.parse.{MarkerParser, BlockParser}
import java.io.{FileNotFoundException, File}
import com.geishatokyo.codegen.util.{Logger, FileUtil}
/**
* Created by takeshita on 2014/06/05.
*/
object Merger {
var mergeRule = new InjectionRule
/**
* //@insert[hoge]
*
* //@replace[name]
* //@end
*
* //@hold[fuga]
* //@end
*
* タイプのタグを使用
* @return
*/
def forScala = {
val parser = MarkerParser.doubleSlashParser()
val merger = new Injector(mergeRule)
Merger(parser,merger)
}
/**
* //@insert[hoge]
*
* //@replace[name]
* //@end
*
* //@hold[fuga]
* //@end
*
* タイプのタグを使用
* @return
*/
def forCSharp = {
val parser = MarkerParser.doubleSlashParser()
val merger = new Injector(mergeRule)
Merger(parser,merger)
}
/**
* //@insert[hoge]
*
* //@replace[name]
* //@end
*
* //@hold[fuga]
* //@end
*
* タイプのタグを使用
* @return
*/
def forJava = {
val parser = MarkerParser.doubleSlashParser()
val merger = new Injector(mergeRule)
Merger(parser,merger)
}
/**
* ##insert[hoge]
*
* ##replace[name]
* ##end
*
* ##hold[fuga]
* ##end
*
* タイプのタグを使用
* @return
*/
def forMySQL = {
val parser = MarkerParser.doubleHyphenParser()
val merger = new Injector(mergeRule)
Merger(parser,merger)
}
/**
* ##insert[hoge]
*
* ##replace[name]
* ##end
*
* ##hold[fuga]
* ##end
*
* タイプのタグを使用
* @return
*/
def forRuby = {
val parser = MarkerParser.doubleSharpParser()
val merger = new Injector(mergeRule)
Merger(parser,merger)
}
/**
* <!--@insert[hoge]-->
*
* <!--@replace[name]-->
* <!--@end-->
*
* <!--@hold[fuga]-->
* <!--@end-->
*
* タイプのタグを使用
* @return
*/
def forXML = {
val parser = MarkerParser.xmlParser()
val merger = new Injector(mergeRule)
Merger(parser,merger)
}
}
/**
* マージの一連の処理を行うクラス
* @param parser
* @param merger
*/
case class Merger(parser : MarkerParser,merger : Injector){
import com.geishatokyo.codegen.util.RichFile
/**
* 2つのファイルをマージする
* @param baseFilePath
* @param mergeFilePath
* @return
*/
def merge(baseFilePath : String,mergeFilePath : String) : String = {
val base = RichFile.fromPath(baseFilePath)
val merge = RichFile.fromPath(mergeFilePath)
if(!base.exists()){
merge.copyTo(base)
return merge.readAsString()
}
if(!merge.exists){
throw new Exception(s"Merge file:${base.getAbsolutePath} not found")
}
val baseData = parser.parse(base.readAsString())
val mergeData = parser.parse(merge.readAsString())
val merged = baseData.topLevel match{
case TopLevel.Hold => {
merger.inject(mergeData,baseData)
}
case TopLevel.Replace => {
merger.inject(baseData,mergeData)
}
case TopLevel.None => {
merge.copyTo(base)
mergeData
}
}
merged.rawString
}
/**
* 指定したファイルのreplaceブロック内を置き換える。
* @param filePath
* @param codeToReplace
* @return
*/
def replaceMerge( filePath : String, codeToReplace : List[Injection]) : String = {
replaceMerge(new File(filePath),codeToReplace)
}
/**
* 指定したファイルのreplaceブロック内を置き換える。
* @param _file
* @param codeToReplace
* @return
*/
def replaceMerge( _file : File, codeToReplace : List[Injection]) : String = {
val file = RichFile.fromFile(_file)
var before = ""
val parsedData = if(file.exists()){
before = file.readAsString()
parser.parse(before)
}else{
throw new FileNotFoundException(s"Target file:${_file.getAbsoluteFile} not found")
}
if(parsedData.topLevel == TopLevel.Hold){
throw new Exception(s"Top level of ${file.getName} is not replace")
}
val s =merger.inject(parsedData,codeToReplace).rawString
if(s != before) {
Logger.change("Merge file: " + file.getAbsolutePath)
file.write(s)
s
}else{
Logger.notChange("File:" + file.getAbsolutePath + " is not changed")
s
}
}
def holdMerge( filePath : String, generatedCode : String) : String = {
holdMerge(new File(filePath),generatedCode)
}
/**
* 指定したファイルのholdブロック内を保持したまま、それ以外を置き換える
* @param _file
* @param generatedCode
* @return
*/
def holdMerge( _file : File, generatedCode : String) : String = {
val file = RichFile.fromFile(_file)
if(file.exists()){
val before = file.readAsString()
val base = parser.parse(before)
val toMerge = parser.parse(generatedCode)
if(base.topLevel == TopLevel.Replace){
throw new Exception(s"Top level of ${file.getName} is not hold")
}
val merged = merger.inject(toMerge,base)
if(merged.rawString != before) {
Logger.change("Merge file: " + file.getAbsolutePath)
file.write(merged.rawString)
merged.rawString
}else{
Logger.notChange("File:" + file.getAbsolutePath + " is not changed")
before
}
}else{
Logger.change("Create new file: " + file.getAbsolutePath)
file.write(generatedCode)
generatedCode
}
}
}
|
geishatokyo/smart-merger
|
src/main/scala/com/geishatokyo/smartmerger/Merger.scala
|
Scala
|
mit
| 5,641
|
// https://leetcode.com/problems/range-sum-query-immutable
class NumArray(numbers: Array[Int]) {
val tree = Tree build numbers
def sumRange(start: Int, end: Int): Int = tree query Range(start, end + 1)
}
sealed trait Tree {
/** @param range [x, y) */
def query(range: Range): Int
}
case class Branch(indices: Range, value: Int, left: Tree, right: Tree) extends Tree {
import Tree.RangeOps
def query(range: Range): Int =
if (range encloses indices) value else tryQueryLeft(range) + tryQueryRight(range)
def tryQueryLeft(range: Range) = if (indices.startHalf disjoints range) 0 else left query range
def tryQueryRight(range: Range) = if (indices.endHalf disjoints range) 0 else right query range
}
case object Empty extends Tree {
def query(range: Range) = 0
}
object Tree {
def build(numbers: Array[Int]): Tree =
if (numbers.isEmpty) Empty else build(numbers, numbers.indices)
def build(numbers: Array[Int], indices: Range): Branch =
if (indices.isSingle) Branch(indices, numbers(indices.start), Empty, Empty)
else {
val left = build(numbers, indices.startHalf)
val right = build(numbers, indices.endHalf)
val value = left.value + right.value
Branch(indices, value, left, right)
}
implicit final class RangeOps(private val range: Range) extends AnyVal {
def isSingle = range.end - range.start <= 1
def startHalf = range.start until range.middle
def endHalf = range.middle until range.end
def middle = (range.start + range.end) / 2
def intersects(rhs: Range) = !(range disjoints rhs)
def disjoints(rhs: Range) = rhs.end <= range.start || range.end <= rhs.start
def encloses(rhs: Range) = range.start <= rhs.start && rhs.end <= range.end
}
}
|
airt/codegames
|
leetcode/303-range-sum-query-immutable.scala
|
Scala
|
mit
| 1,758
|
package Client
import java.security.PublicKey
import javax.crypto.BadPaddingException
import Client.ClientType.ClientType
import Client.Messages._
import Objects.ObjectTypes.ObjectType
import Objects.ObjectTypes.ObjectType
import Objects.ObjectTypes.ObjectType.ObjectType
import Utils.Resources._
import Objects.ObjectJsonSupport._
import Objects.ObjectTypes.PostType._
import Objects._
import Utils.{Resources, Base64Util, Crypto, Constants}
import akka.actor.{Actor, ActorLogging, ActorRef}
import com.typesafe.config.ConfigFactory
import org.joda.time.DateTime
import spray.client.pipelining
import spray.client.pipelining._
import spray.http.HttpResponse
import spray.http.StatusCodes._
import spray.json.JsonParser.ParsingException
import spray.json._
import scala.collection.mutable
import scala.concurrent.duration._
import scala.util.{Failure, Random, Success, Try}
class ClientActor(isPage: Boolean = false, clientType: ClientType) extends Actor with ActorLogging {
val myPages = mutable.ArrayBuffer[Page]()
val myFriends = mutable.ArrayBuffer[Int]()
val myRealFriends = mutable.HashMap[ActorRef, Int]()
var myFriendsPublicKeys = Map[String, PublicKey]()
val waitForIdFriends = mutable.Set[ActorRef]()
val returnHandshake = mutable.Set[ActorRef]()
var me: User = null
var mePage: Page = null
var myBaseObj: BaseObject = null
var numPosts = 0
var numAlbums = 0
var numPictures = 0
private val keyPair = Crypto.generateRSAKeys()
var serverPublicKey: PublicKey = null
val (putPercent, getPercent, friendPercent, updatePercent) = clientType match {
case ClientType.Active => (80, 50, 90, 50)
case ClientType.Passive => (20, 90, 80, 5)
case ClientType.ContentCreator => (70, 20, 10, 40)
}
val config = ConfigFactory.load()
lazy val servicePort = Try(config.getInt("service.port")).getOrElse(8080)
lazy val serviceHost = Try(config.getString("service.host")).getOrElse("localhost")
import context.dispatcher
def random(n: Int) = Random.nextInt(n)
def durationSeconds(n: Int) = n.seconds
def randomDuration(n: Int) = durationSeconds(random(n))
def receive = {
// Create a user profile or page for self
case true => registerMyself()
case false if !myBaseObj.deleted && !isPage => updateFriendPublicKeys()
case Activity if !myBaseObj.deleted => activity()
case MakePost(postType, attachmentID) =>
val newPost = Objects.Post(new DateTime().toString(), statuses(Random.nextInt(statuses.length)), postType, attachmentID)
put(createSecureObjectMessage(newPost, myBaseObj.id, myBaseObj.id, ObjectType.post, myFriendsPublicKeys), "create", "post")
case MakePicturePost =>
val newPicture = Picture("filename", "")
put(createSecureObjectMessage(newPicture, myBaseObj.id, myBaseObj.id, ObjectType.picture, myFriendsPublicKeys), "create", "picturepost")
case MakePicture(albumID) =>
val newPicture = Picture("filename", "")
put(createSecureObjectMessage(newPicture, myBaseObj.id, myBaseObj.id, ObjectType.picture, myFriendsPublicKeys), "create", "picture")
case AddPictureToAlbum =>
case MakeAlbum =>
val newAlbum = Album(new DateTime().toString, new DateTime().toString, -1, "album desc")
put(createSecureObjectMessage(newAlbum, myBaseObj.id, myBaseObj.id, ObjectType.album, myFriendsPublicKeys), "create", "album")
case UpdatePost(postType, attachment) =>
val newPost = Objects.Post(new DateTime().toString, Resources.getRandomStatus(), postType, attachment)
post(createSecureObjectMessage(newPost, myBaseObj.id, myBaseObj.id, ObjectType.post, myFriendsPublicKeys, random(numPosts) + 2), "update", "post")
case UpdatePicture =>
val newPicture = Picture("filename", "")
post(createSecureObjectMessage(newPicture, myBaseObj.id, myBaseObj.id, ObjectType.picture, myFriendsPublicKeys, random(numPictures) + 2), "update", "picture")
case UpdateAlbum(cover) =>
val newAlbum = Album(new DateTime().toString, new DateTime().toString, cover, "album desc")
post(createSecureObjectMessage(newAlbum, myBaseObj.id, myBaseObj.id, ObjectType.album, myFriendsPublicKeys, random(numAlbums) + 2), "update", "album")
// From matchmaker
case aNewFriend: ActorRef =>
if (myBaseObj == null) {
waitForIdFriends.add(aNewFriend)
} else {
// log.info(myBaseObj.id + " just met someone")
aNewFriend ! Handshake(Constants.trueBool, myBaseObj.id)
}
// From new friend
case Handshake(needResponse, id) =>
if (myBaseObj == null) {
returnHandshake.add(sender)
} else {
myRealFriends.put(sender, id)
if (needResponse) {
self ! RequestFriend(id)
sender ! Handshake(Constants.falseBool, myBaseObj.id)
}
}
case RequestFriend(id) =>
val secureMessage = createSecureRequestMessage(myBaseObj.id, id, ObjectType.user, id)
post(secureMessage, "addfriend")
case AcceptFriendRequests =>
val secureMessage = createSecureRequestMessage(myBaseObj.id, myBaseObj.id, ObjectType.user, -1)
get(secureMessage, "friend_requests")
case PutMsg(response, reaction) => handlePutResponse(response, reaction)
case GetMsg(response, reaction) => handleGetResponse(response, reaction)
case PostMsg(response, reaction) =>
try {
reaction match {
case "addfriend" =>
log.info(response ~> unmarshal[String])
case "acceptfriendrequest" =>
log.info(response ~> unmarshal[String])
case _ => //log.info(s"Updated $reaction")
}
} catch {
case e: Throwable => log.error(e, "Error for response {}", response)
}
case DebugMsg => get(null, "debug")
case DeleteMsg(response, reaction) =>
reaction match {
case "user" | "page" => myBaseObj.deleted = true
case _ => //log.info(s"${myBaseObj.id} - $reaction")
}
}
def registerMyself() = {
val pipeline = sendReceive
val future = pipeline {
pipelining.Get(s"http://$serviceHost:$servicePort/server_key")
}
future onComplete {
case Success(response) =>
val returnBytes = response ~> unmarshal[Array[Byte]]
serverPublicKey = Crypto.constructRSAPublicKeyFromBytes(returnBytes)
val future2 = pipeline {
pipelining.Put(s"http://$serviceHost:$servicePort/register", keyPair.getPublic.getEncoded)
}
future2 onComplete {
case Success(response) =>
val secureMsg = response ~> unmarshal[SecureMessage]
val requestKeyBytes = Crypto.decryptRSA(secureMsg.encryptedKey, keyPair.getPrivate)
if (Crypto.verifySign(serverPublicKey, secureMsg.signature, requestKeyBytes)) {
val requestKey = Crypto.constructAESKeyFromBytes(requestKeyBytes)
val requestJson = Crypto.decryptAES(secureMsg.message, requestKey, Constants.IV)
myBaseObj = BaseObject(id = Base64Util.decodeString(requestJson).toInt)
if (isPage) {
mePage = Page("about", Resources.getRandomPageCategory(), -1, keyPair.getPublic.getEncoded)
val secureObject = Crypto.constructSecureObject(myBaseObj, myBaseObj.id, myBaseObj.id, ObjectType.page.id, mePage.toJson.compactPrint, Map(myBaseObj.id.toString -> keyPair.getPublic))
val secureMessage = Crypto.constructSecureMessage(myBaseObj.id, secureObject.toJson.compactPrint, serverPublicKey, keyPair.getPrivate)
val future3 = pipeline {
pipelining.Put(s"http://$serviceHost:$servicePort/create", secureMessage)
}
future3 onComplete {
case Success(response) =>
// log.info(response.toString)
self ! Activity()
case Failure(error) => log.error(error, s"Couldn't put Page")
}
}
else {
val fullName = Resources.names(Random.nextInt(Resources.names.length)).split(' ')
me = User("about", Resources.randomBirthday(), 'M', fullName(0), fullName(1), keyPair.getPublic.getEncoded)
val secureObject = Crypto.constructSecureObject(myBaseObj, myBaseObj.id, myBaseObj.id, ObjectType.user.id, me.toJson.compactPrint, Map(myBaseObj.id.toString -> keyPair.getPublic))
val secureMessage = Crypto.constructSecureMessage(myBaseObj.id, secureObject.toJson.compactPrint, serverPublicKey, keyPair.getPrivate)
val future3 = pipeline {
pipelining.Put(s"http://$serviceHost:$servicePort/create", secureMessage)
}
future3 onComplete {
case Success(response) =>
// log.info(response.toString)
self ! false
case Failure(error) => log.error(error, s"Couldn't put User")
}
}
}
case Failure(error) => log.error(error, s"Couldn't register")
}
case Failure(error) => log.error(error, s"Couldn't get server_key")
}
}
def updateFriendPublicKeys() = {
val pipeline = sendReceive
val secureMsg = createSecureRequestMessage(myBaseObj.id, myBaseObj.id, ObjectType.user, -1)
val future = pipeline {
pipelining.Get(s"http://$serviceHost:$servicePort/friends_public_keys", secureMsg)
}
future onComplete {
case Success(response) =>
val requestIds = decryptSecureRequestMessage(response ~> unmarshal[SecureMessage], ObjectType.userKeyMap)
myFriendsPublicKeys = requestIds.asInstanceOf[Map[String, Array[Byte]]].map { case (id, bytes) => (id, Crypto.constructRSAPublicKeyFromBytes(bytes)) } + (myBaseObj.id.toString -> keyPair.getPublic)
self ! Activity
case Failure(error) => log.error(error, s"Couldn't get pending requests")
}
}
def putRoute(route: String, inputReaction: String = ""): Unit = {
val reaction = if (inputReaction.nonEmpty) inputReaction else route
val pipeline = sendReceive
val future = pipeline {
pipelining.Put(s"http://$serviceHost:$servicePort/$route")
}
future onComplete {
case Success(response) => self ! PutMsg(response, reaction)
case Failure(error) => log.error(error, s"Couldn't create using $route")
}
}
def put(json: SecureMessage, route: String, inputReaction: String = ""): Unit = {
val reaction = if (inputReaction.nonEmpty) inputReaction else route
val pipeline = sendReceive
val future = pipeline {
pipelining.Put(s"http://$serviceHost:$servicePort/$route", json)
}
future onComplete {
case Success(response) => self ! PutMsg(response, reaction)
case Failure(error) => log.error(error, s"Couldn't create $json using $route")
}
}
def get(json: SecureMessage, route: String, inputReaction: String = "") = {
val reaction = if (inputReaction.nonEmpty) inputReaction else route
val pipeline = sendReceive
val future = pipeline {
pipelining.Get(s"http://$serviceHost:$servicePort/$route", json)
}
future onComplete {
case Success(response) => self ! GetMsg(response, reaction)
case Failure(error) => log.error(error, s"Couldn't get $route")
}
}
def post(json: SecureMessage, route: String, inputReaction: String = "") = {
val reaction = if (inputReaction.nonEmpty) inputReaction else route
val pipeline = sendReceive
val future = pipeline {
pipelining.Post(s"http://$serviceHost:$servicePort/$route", json)
}
future onComplete {
case Success(response) => self ! PostMsg(response, reaction)
case Failure(error) => log.error(error, s"Couldn't post $json using $route")
}
}
def delete(json: SecureMessage, route: String, inputReaction: String = "") = {
val reaction = if (inputReaction.nonEmpty) inputReaction else route
val pipeline = sendReceive
val future = pipeline {
pipelining.Delete(s"http://$serviceHost:$servicePort/$route", json)
}
future onComplete {
case Success(response) => self ! DeleteMsg(response, reaction)
case Failure(error) => log.error(error, s"Couldn't post $json using $route")
}
}
def activity() = {
// log.info(myBaseObj.id + " starting activity")
context.system.scheduler.scheduleOnce(randomDuration(10), self, AcceptFriendRequests)
if (isPage) {
val secureMsg = createSecureRequestMessage(myBaseObj.id, myBaseObj.id, ObjectType.page, myBaseObj.id)
get(secureMsg, "request", "page")
}
else {
val secureMsg = createSecureRequestMessage(myBaseObj.id, myBaseObj.id, ObjectType.user, myBaseObj.id)
get(secureMsg, "request", "user")
}
if (random(1001) <= 5) {
random(3) match {
case 0 =>
if (numPosts > 0) {
val delMsg = createSecureRequestMessage(myBaseObj.id, myBaseObj.id, ObjectType.post, random(numPosts) + 2)
delete(delMsg, "delete", "postdelete")
}
case 1 =>
if (numAlbums > 0) {
val delMsg = createSecureRequestMessage(myBaseObj.id, myBaseObj.id, ObjectType.album, random(numAlbums) + 2)
delete(delMsg, "delete", "albumdelete")
}
case 2 =>
if (numPictures > 0) {
val delMsg = createSecureRequestMessage(myBaseObj.id, myBaseObj.id, ObjectType.picture, random(numPictures) + 2)
delete(delMsg, "delete", "picturedelete")
}
}
}
// Create content
if (random(101) < putPercent) {
random(4) match {
case 0 => context.system.scheduler.scheduleOnce(randomDuration(3), self, MakePost(status, -1))
case 1 => context.system.scheduler.scheduleOnce(randomDuration(3), self, MakeAlbum)
case 2 => context.system.scheduler.scheduleOnce(randomDuration(3), self, MakePicture(-1))
case 3 => context.system.scheduler.scheduleOnce(randomDuration(3), self, MakePicturePost)
}
}
// Get friends' content
if (random(101) < getPercent) {
random(2) match {
case 0 =>
myRealFriends.foreach {
case (ref: ActorRef, id: Int) =>
val getFeedRequest = createSecureRequestMessage(myBaseObj.id, id, ObjectType.post, -1)
get(getFeedRequest, "feed")
}
case 1 =>
myFriendsPublicKeys.foreach {
case (id: String, key: PublicKey) =>
val getFeedRequest = createSecureRequestMessage(myBaseObj.id, id.toInt, ObjectType.post, -1)
get(getFeedRequest, "feed")
}
}
}
// update your own content
if (random(101) < updatePercent) {
random(3) match {
case 0 =>
if (numPosts > 1) {
context.system.scheduler.scheduleOnce(randomDuration(3), self, UpdatePost(status, -1))
}
case 1 =>
if (numPictures > 1) {
context.system.scheduler.scheduleOnce(randomDuration(3), self, UpdatePicture)
}
case 2 =>
if (numAlbums > 1 && numPictures > 1) {
context.system.scheduler.scheduleOnce(randomDuration(3), self, UpdateAlbum(random(numPictures) + 2))
}
}
}
context.system.scheduler.scheduleOnce(randomDuration(3), self, Constants.falseBool)
// Delete self case
if (random(100001) < 5) {
if (isPage) {
val delMsg = createSecureRequestMessage(myBaseObj.id, myBaseObj.id, ObjectType.page, myBaseObj.id)
delete(delMsg, "delete", "pagedelete")
}
else {
val delMsg = createSecureRequestMessage(myBaseObj.id, myBaseObj.id, ObjectType.user, myBaseObj.id)
delete(delMsg, "delete", "userdelete")
}
}
}
def handlePutResponse(response: HttpResponse, reaction: String) = {
val updateRequest = random(101) < updatePercent
reaction match {
case "registerUser" =>
case "registerPage" =>
case "user" | "page" =>
// if (reaction == "user") {
// me = response ~> unmarshal[User]
// myBaseObj = me.baseObject
//
// } else {
// mePage = response ~> unmarshal[Page]
// myBaseObj = mePage.baseObject
// }
ProfileMap.obj.put(myBaseObj.id, isPage)
waitForIdFriends.foreach(f => self ! f)
waitForIdFriends.clear()
returnHandshake.foreach(f => self.tell(Handshake(Constants.trueBool, myBaseObj.id), f))
returnHandshake.clear()
// log.info(s"Printing $me - $myBaseObj")
self ! Constants.falseBool
if (myBaseObj.id == 0) get(null, "debug")
if (updateRequest) post(response.entity.asString.parseJson.asInstanceOf[SecureMessage], "profile")
case "post" =>
numPosts += 1
case "album" =>
numAlbums += 1
case "picturepost" =>
numPictures += 1
case "picture" =>
numPictures += 1
case "likepage" =>
case "like" =>
log.info(response ~> unmarshal[String])
}
}
def handleGetResponse(response: HttpResponse, reaction: String) = {
reaction match {
case "postdelete" => delete(response.entity.asString.parseJson.asInstanceOf[SecureMessage], "post")
case "albumdelete" => delete(response.entity.asString.parseJson.asInstanceOf[SecureMessage], "album")
case "picturedelete" => delete(response.entity.asString.parseJson.asInstanceOf[SecureMessage], "picture")
case "debug" =>
log.info(s"${response.entity.asString}")
context.system.scheduler.scheduleOnce(durationSeconds(2), self, DebugMsg)
case "user" =>
decryptSecureObjectMessage(response ~> unmarshal[SecureMessage], ObjectType.user)
case "page" =>
decryptSecureObjectMessage(response ~> unmarshal[SecureMessage], ObjectType.page)
case "friendpublickeys" =>
case "friend_requests" =>
val idList = decryptSecureRequestMessage(response ~> unmarshal[SecureMessage], ObjectType.intArray).asInstanceOf[Array[Int]]
idList.foreach { id =>
val likeMessage = createSecureRequestMessage(myBaseObj.id, id, ObjectType.user, id)
post(likeMessage, "like", "acceptfriendrequest")
}
case "feed" =>
val secureObjectList = decryptSecureRequestMessage(response ~> unmarshal[SecureMessage], ObjectType.secureObjectArray).asInstanceOf[Array[SecureObject]]
if (!secureObjectList.isEmpty) {
// log.info(s"${myBaseObj.id} feed ${secureObjectList.size}")
secureObjectList.foreach { case so =>
decryptSecureObject(so, ObjectType.post)
}
}
case "feedpost" =>
case "picture" =>
decryptSecureObjectMessage(response ~> unmarshal[SecureMessage], ObjectType.picture)
case "post" =>
decryptSecureObjectMessage(response ~> unmarshal[SecureMessage], ObjectType.post)
case "getalbumaddpicture" =>
case "album" =>
decryptSecureObjectMessage(response ~> unmarshal[SecureMessage], ObjectType.album)
case x => log.error("Unmatched getmsg case {}", x)
}
}
def createSecureObjectMessage(obj: Any, from: Int, to: Int, objType: ObjectType, keys: Map[String, PublicKey], id: Int = -1): SecureMessage = {
objType match {
case ObjectType.post =>
val secureObject = Crypto.constructSecureObject(new BaseObject(id), from, to, ObjectType.post.id, obj.asInstanceOf[Post].toJson.compactPrint, keys)
Crypto.constructSecureMessage(myBaseObj.id, secureObject.toJson.compactPrint, serverPublicKey, keyPair.getPrivate)
case ObjectType.picture =>
val secureObject = Crypto.constructSecureObject(new BaseObject(id), from, to, ObjectType.picture.id, obj.asInstanceOf[Picture].toJson.compactPrint, keys)
Crypto.constructSecureMessage(myBaseObj.id, secureObject.toJson.compactPrint, serverPublicKey, keyPair.getPrivate)
case ObjectType.album =>
val secureObject = Crypto.constructSecureObject(new BaseObject(id), from, to, ObjectType.album.id, obj.asInstanceOf[Album].toJson.compactPrint, keys)
Crypto.constructSecureMessage(myBaseObj.id, secureObject.toJson.compactPrint, serverPublicKey, keyPair.getPrivate)
}
}
def createSecureRequestMessage(from: Int, to: Int, objType: ObjectType, objId: Int): SecureMessage = {
val secureRequest = SecureRequest(from, to, objType.id, objId)
Crypto.constructSecureMessage(myBaseObj.id, secureRequest.toJson.compactPrint, serverPublicKey, keyPair.getPrivate)
}
def decryptSecureObject(secureObject: SecureObject, objType: ObjectType):Any = {
try {
val aesKey = Crypto.constructAESKeyFromBytes(Crypto.decryptRSA(secureObject.encryptedKeys(myBaseObj.id.toString), keyPair.getPrivate))
val objJson = Base64Util.decodeString(Crypto.decryptAES(secureObject.data, aesKey, Constants.IV))
objType match {
case ObjectType.user => JsonParser(objJson).convertTo[User]
case ObjectType.page => JsonParser(objJson).convertTo[Page]
case ObjectType.post => JsonParser(objJson).convertTo[Post]
case ObjectType.picture => JsonParser(objJson).convertTo[Picture]
case ObjectType.album => JsonParser(objJson).convertTo[Album]
}
}
catch {
case e: NoSuchElementException => log.info(s"${myBaseObj.id} doesn't have permission to decrypt ${secureObject.baseObj.id}")
case d: BadPaddingException => log.info(s"${myBaseObj.id} can't decrypt ${secureObject.baseObj.id}")
}
}
def decryptSecureRequestMessage(secureMsg: SecureMessage, objType: ObjectType): Any = {
try {
val requestKeyBytes = Crypto.decryptRSA(secureMsg.encryptedKey, keyPair.getPrivate)
if (Crypto.verifySign(serverPublicKey, secureMsg.signature, requestKeyBytes)) {
val objJson = Base64Util.decodeString(
Crypto.decryptAES(secureMsg.message, Crypto.constructAESKeyFromBytes(requestKeyBytes), Constants.IV)
)
objType match {
case ObjectType.intArray => JsonParser(objJson).convertTo[Array[Int]]
case ObjectType.userKeyMap => JsonParser(objJson).convertTo[Map[String, Array[Byte]]]
case ObjectType.secureObjectArray => JsonParser(objJson).convertTo[Array[SecureObject]]
}
}
}
catch{
case e: BadPaddingException => log.info(s"${myBaseObj.id} can't decrypt SecureMessage")
}
}
def decryptSecureObjectMessage(secureMsg: SecureMessage, objType: ObjectType): Any = {
try {
val requestKeyBytes = Crypto.decryptRSA(secureMsg.encryptedKey, keyPair.getPrivate)
if (Crypto.verifySign(serverPublicKey, secureMsg.signature, requestKeyBytes)) {
val json = Base64Util.decodeString(
Crypto.decryptAES(secureMsg.message, Crypto.constructAESKeyFromBytes(requestKeyBytes), Constants.IV)
)
val secureObject = JsonParser(json).convertTo[SecureObject]
try {
val aesKey = Crypto.constructAESKeyFromBytes(Crypto.decryptRSA(secureObject.encryptedKeys(myBaseObj.id.toString), keyPair.getPrivate))
val objJson = Base64Util.decodeString(Crypto.decryptAES(secureObject.data, aesKey, Constants.IV))
objType match {
case ObjectType.user => JsonParser(objJson).convertTo[User]
case ObjectType.page => JsonParser(objJson).convertTo[Page]
case ObjectType.post => JsonParser(objJson).convertTo[Post]
case ObjectType.picture => JsonParser(objJson).convertTo[Picture]
case ObjectType.album => JsonParser(objJson).convertTo[Album]
case ObjectType.intArray => JsonParser(objJson).convertTo[Array[Int]]
case ObjectType.userKeyMap => JsonParser(objJson).convertTo[Map[String, Array[Byte]]]
}
}
catch {
case e: NoSuchElementException => log.info(s"${myBaseObj.id} doesn't have permission to decrypt ${secureObject.baseObj.id}")
case d: BadPaddingException => log.info(s"${myBaseObj.id} can't decrypt ${secureObject.baseObj.id}")
}
}
}
catch{
case e: BadPaddingException => log.info(s"${myBaseObj.id} can't decrypt SecureMessage")
}
}
}
|
Nirespire/SecureFacebookAPI
|
src/main/scala/Client/ClientActor.scala
|
Scala
|
mit
| 24,317
|
package model
import scalikejdbc._
import org.joda.time.DateTime
import skinny.orm.SkinnyCRUDMapperWithId
import skinny.orm.feature._
case class Company(
id: CompanyId,
name: String,
url: Option[String] = None,
createdAt: DateTime,
updatedAt: Option[DateTime] = None,
deletedAt: Option[DateTime] = None)
object Company extends SkinnyCRUDMapperWithId[CompanyId, Company]
with TimestampsFeatureWithId[CompanyId, Company]
with SoftDeleteWithTimestampFeatureWithId[CompanyId, Company] {
override val defaultAlias = createAlias("c")
def idToRawValue(id: CompanyId) = id.value
def rawValueToId(value: Any) = CompanyId(value.toString.toLong)
override def extract(rs: WrappedResultSet, c: ResultName[Company]): Company = autoConstruct(rs, c)
}
|
holycattle/skinny-framework
|
example/src/main/scala/model/Company.scala
|
Scala
|
mit
| 771
|
// Copyright: 2010 - 2017 https://github.com/ensime/ensime-server/graphs
// License: http://www.gnu.org/licenses/gpl-3.0.en.html
package org.ensime
package config
object Environment {
def info: Seq[String] = Seq(
"Environment:",
s" OS : $osVersion",
s" Java : $javaVersion",
s" Scala version: $scalaVersion",
s" Ensime : $ensimeVersion",
s" Heap Size : ${Runtime.getRuntime.maxMemory}",
s" Built with Scala version: ${BuildInfo.scalaVersion}",
s" Built with sbt version: ${BuildInfo.sbtVersion}",
s" Built from git sha: ${BuildInfo.gitSha}",
s" Built on: ${BuildInfo.builtAtString}"
)
private def osVersion: String =
System.getProperty("os.name")
private def javaVersion: String = {
val vmInfo = System.getProperty("java.vm.name") + " " + System.getProperty("java.vm.version")
val rtInfo = System.getProperty("java.runtime.name") + " " + System.getProperty("java.runtime.version")
vmInfo + ", " + rtInfo
}
private def scalaVersion: String =
scala.util.Properties.versionString
private def ensimeVersion: String =
BuildInfo.version
}
|
VlachJosef/ensime-server
|
core/src/main/scala/org/ensime/config/Environment.scala
|
Scala
|
gpl-3.0
| 1,125
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.ml.classification
import org.json4s.{DefaultFormats, JObject}
import org.json4s.JsonDSL._
import org.apache.spark.annotation.Since
import org.apache.spark.ml.feature.Instance
import org.apache.spark.ml.linalg.{DenseVector, SparseVector, Vector, Vectors}
import org.apache.spark.ml.param.ParamMap
import org.apache.spark.ml.tree._
import org.apache.spark.ml.tree.{TreeClassifierParams, TreeEnsembleModel}
import org.apache.spark.ml.tree.impl.RandomForest
import org.apache.spark.ml.util._
import org.apache.spark.ml.util.{Identifiable, MetadataUtils}
import org.apache.spark.ml.util.DefaultParamsReader.Metadata
import org.apache.spark.ml.util.Instrumentation.instrumented
import org.apache.spark.mllib.tree.configuration.{Algo => OldAlgo}
import org.apache.spark.mllib.tree.model.{RandomForestModel => OldRandomForestModel}
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.{DataFrame, Dataset}
import org.apache.spark.sql.functions.{col, udf}
/**
* <a href="http://en.wikipedia.org/wiki/Random_forest">Random Forest</a> learning algorithm for
* classification.
* It supports both binary and multiclass labels, as well as both continuous and categorical
* features.
*/
@Since("1.4.0")
class RandomForestClassifier @Since("1.4.0") (
@Since("1.4.0") override val uid: String)
extends ProbabilisticClassifier[Vector, RandomForestClassifier, RandomForestClassificationModel]
with RandomForestClassifierParams with DefaultParamsWritable {
@Since("1.4.0")
def this() = this(Identifiable.randomUID("rfc"))
// Override parameter setters from parent trait for Java API compatibility.
// Parameters from TreeClassifierParams:
/** @group setParam */
@Since("1.4.0")
def setMaxDepth(value: Int): this.type = set(maxDepth, value)
/** @group setParam */
@Since("1.4.0")
def setMaxBins(value: Int): this.type = set(maxBins, value)
/** @group setParam */
@Since("1.4.0")
def setMinInstancesPerNode(value: Int): this.type = set(minInstancesPerNode, value)
/** @group setParam */
@Since("1.4.0")
def setMinInfoGain(value: Double): this.type = set(minInfoGain, value)
/** @group expertSetParam */
@Since("1.4.0")
def setMaxMemoryInMB(value: Int): this.type = set(maxMemoryInMB, value)
/** @group expertSetParam */
@Since("1.4.0")
def setCacheNodeIds(value: Boolean): this.type = set(cacheNodeIds, value)
/**
* Specifies how often to checkpoint the cached node IDs.
* E.g. 10 means that the cache will get checkpointed every 10 iterations.
* This is only used if cacheNodeIds is true and if the checkpoint directory is set in
* [[org.apache.spark.SparkContext]].
* Must be at least 1.
* (default = 10)
* @group setParam
*/
@Since("1.4.0")
def setCheckpointInterval(value: Int): this.type = set(checkpointInterval, value)
/** @group setParam */
@Since("1.4.0")
def setImpurity(value: String): this.type = set(impurity, value)
// Parameters from TreeEnsembleParams:
/** @group setParam */
@Since("1.4.0")
def setSubsamplingRate(value: Double): this.type = set(subsamplingRate, value)
/** @group setParam */
@Since("1.4.0")
def setSeed(value: Long): this.type = set(seed, value)
// Parameters from RandomForestParams:
/** @group setParam */
@Since("1.4.0")
def setNumTrees(value: Int): this.type = set(numTrees, value)
/** @group setParam */
@Since("1.4.0")
def setFeatureSubsetStrategy(value: String): this.type =
set(featureSubsetStrategy, value)
override protected def train(
dataset: Dataset[_]): RandomForestClassificationModel = instrumented { instr =>
instr.logPipelineStage(this)
instr.logDataset(dataset)
val categoricalFeatures: Map[Int, Int] =
MetadataUtils.getCategoricalFeatures(dataset.schema($(featuresCol)))
val numClasses: Int = getNumClasses(dataset)
if (isDefined(thresholds)) {
require($(thresholds).length == numClasses, this.getClass.getSimpleName +
".train() called with non-matching numClasses and thresholds.length." +
s" numClasses=$numClasses, but thresholds has length ${$(thresholds).length}")
}
val instances: RDD[Instance] = extractLabeledPoints(dataset, numClasses).map(_.toInstance)
val strategy =
super.getOldStrategy(categoricalFeatures, numClasses, OldAlgo.Classification, getOldImpurity)
instr.logParams(this, labelCol, featuresCol, predictionCol, probabilityCol, rawPredictionCol,
leafCol, impurity, numTrees, featureSubsetStrategy, maxDepth, maxBins, maxMemoryInMB,
minInfoGain, minInstancesPerNode, seed, subsamplingRate, thresholds, cacheNodeIds,
checkpointInterval)
val trees = RandomForest
.run(instances, strategy, getNumTrees, getFeatureSubsetStrategy, getSeed, Some(instr))
.map(_.asInstanceOf[DecisionTreeClassificationModel])
trees.foreach(copyValues(_))
val numFeatures = trees.head.numFeatures
instr.logNumClasses(numClasses)
instr.logNumFeatures(numFeatures)
new RandomForestClassificationModel(uid, trees, numFeatures, numClasses)
}
@Since("1.4.1")
override def copy(extra: ParamMap): RandomForestClassifier = defaultCopy(extra)
}
@Since("1.4.0")
object RandomForestClassifier extends DefaultParamsReadable[RandomForestClassifier] {
/** Accessor for supported impurity settings: entropy, gini */
@Since("1.4.0")
final val supportedImpurities: Array[String] = TreeClassifierParams.supportedImpurities
/** Accessor for supported featureSubsetStrategy settings: auto, all, onethird, sqrt, log2 */
@Since("1.4.0")
final val supportedFeatureSubsetStrategies: Array[String] =
TreeEnsembleParams.supportedFeatureSubsetStrategies
@Since("2.0.0")
override def load(path: String): RandomForestClassifier = super.load(path)
}
/**
* <a href="http://en.wikipedia.org/wiki/Random_forest">Random Forest</a> model for classification.
* It supports both binary and multiclass labels, as well as both continuous and categorical
* features.
*
* @param _trees Decision trees in the ensemble.
* Warning: These have null parents.
*/
@Since("1.4.0")
class RandomForestClassificationModel private[ml] (
@Since("1.5.0") override val uid: String,
private val _trees: Array[DecisionTreeClassificationModel],
@Since("1.6.0") override val numFeatures: Int,
@Since("1.5.0") override val numClasses: Int)
extends ProbabilisticClassificationModel[Vector, RandomForestClassificationModel]
with RandomForestClassifierParams with TreeEnsembleModel[DecisionTreeClassificationModel]
with MLWritable with Serializable {
require(_trees.nonEmpty, "RandomForestClassificationModel requires at least 1 tree.")
/**
* Construct a random forest classification model, with all trees weighted equally.
*
* @param trees Component trees
*/
private[ml] def this(
trees: Array[DecisionTreeClassificationModel],
numFeatures: Int,
numClasses: Int) =
this(Identifiable.randomUID("rfc"), trees, numFeatures, numClasses)
@Since("1.4.0")
override def trees: Array[DecisionTreeClassificationModel] = _trees
// Note: We may add support for weights (based on tree performance) later on.
private lazy val _treeWeights: Array[Double] = Array.fill[Double](_trees.length)(1.0)
@Since("1.4.0")
override def treeWeights: Array[Double] = _treeWeights
override def transform(dataset: Dataset[_]): DataFrame = {
transformSchema(dataset.schema, logging = true)
val outputData = super.transform(dataset)
if ($(leafCol).nonEmpty) {
val leafUDF = udf { features: Vector => predictLeaf(features) }
outputData.withColumn($(leafCol), leafUDF(col($(featuresCol))))
} else {
outputData
}
}
override protected def predictRaw(features: Vector): Vector = {
// TODO: When we add a generic Bagging class, handle transform there: SPARK-7128
// Classifies using majority votes.
// Ignore the tree weights since all are 1.0 for now.
val votes = Array.fill[Double](numClasses)(0.0)
_trees.view.foreach { tree =>
val classCounts: Array[Double] = tree.rootNode.predictImpl(features).impurityStats.stats
val total = classCounts.sum
if (total != 0) {
var i = 0
while (i < numClasses) {
votes(i) += classCounts(i) / total
i += 1
}
}
}
Vectors.dense(votes)
}
override protected def raw2probabilityInPlace(rawPrediction: Vector): Vector = {
rawPrediction match {
case dv: DenseVector =>
ProbabilisticClassificationModel.normalizeToProbabilitiesInPlace(dv)
dv
case sv: SparseVector =>
throw new RuntimeException("Unexpected error in RandomForestClassificationModel:" +
" raw2probabilityInPlace encountered SparseVector")
}
}
@Since("1.4.0")
override def copy(extra: ParamMap): RandomForestClassificationModel = {
copyValues(new RandomForestClassificationModel(uid, _trees, numFeatures, numClasses), extra)
.setParent(parent)
}
@Since("1.4.0")
override def toString: String = {
s"RandomForestClassificationModel: uid=$uid, numTrees=$getNumTrees, numClasses=$numClasses, " +
s"numFeatures=$numFeatures"
}
/**
* Estimate of the importance of each feature.
*
* Each feature's importance is the average of its importance across all trees in the ensemble
* The importance vector is normalized to sum to 1. This method is suggested by Hastie et al.
* (Hastie, Tibshirani, Friedman. "The Elements of Statistical Learning, 2nd Edition." 2001.)
* and follows the implementation from scikit-learn.
*
* @see `DecisionTreeClassificationModel.featureImportances`
*/
@Since("1.5.0")
lazy val featureImportances: Vector = TreeEnsembleModel.featureImportances(trees, numFeatures)
/** (private[ml]) Convert to a model in the old API */
private[ml] def toOld: OldRandomForestModel = {
new OldRandomForestModel(OldAlgo.Classification, _trees.map(_.toOld))
}
@Since("2.0.0")
override def write: MLWriter =
new RandomForestClassificationModel.RandomForestClassificationModelWriter(this)
}
@Since("2.0.0")
object RandomForestClassificationModel extends MLReadable[RandomForestClassificationModel] {
@Since("2.0.0")
override def read: MLReader[RandomForestClassificationModel] =
new RandomForestClassificationModelReader
@Since("2.0.0")
override def load(path: String): RandomForestClassificationModel = super.load(path)
private[RandomForestClassificationModel]
class RandomForestClassificationModelWriter(instance: RandomForestClassificationModel)
extends MLWriter {
override protected def saveImpl(path: String): Unit = {
// Note: numTrees is not currently used, but could be nice to store for fast querying.
val extraMetadata: JObject = Map(
"numFeatures" -> instance.numFeatures,
"numClasses" -> instance.numClasses,
"numTrees" -> instance.getNumTrees)
EnsembleModelReadWrite.saveImpl(instance, path, sparkSession, extraMetadata)
}
}
private class RandomForestClassificationModelReader
extends MLReader[RandomForestClassificationModel] {
/** Checked against metadata when loading model */
private val className = classOf[RandomForestClassificationModel].getName
private val treeClassName = classOf[DecisionTreeClassificationModel].getName
override def load(path: String): RandomForestClassificationModel = {
implicit val format = DefaultFormats
val (metadata: Metadata, treesData: Array[(Metadata, Node)], _) =
EnsembleModelReadWrite.loadImpl(path, sparkSession, className, treeClassName)
val numFeatures = (metadata.metadata \\ "numFeatures").extract[Int]
val numClasses = (metadata.metadata \\ "numClasses").extract[Int]
val numTrees = (metadata.metadata \\ "numTrees").extract[Int]
val trees: Array[DecisionTreeClassificationModel] = treesData.map {
case (treeMetadata, root) =>
val tree =
new DecisionTreeClassificationModel(treeMetadata.uid, root, numFeatures, numClasses)
treeMetadata.getAndSetParams(tree)
tree
}
require(numTrees == trees.length, s"RandomForestClassificationModel.load expected $numTrees" +
s" trees based on metadata but found ${trees.length} trees.")
val model = new RandomForestClassificationModel(metadata.uid, trees, numFeatures, numClasses)
metadata.getAndSetParams(model)
model
}
}
/** Convert a model from the old API */
private[ml] def fromOld(
oldModel: OldRandomForestModel,
parent: RandomForestClassifier,
categoricalFeatures: Map[Int, Int],
numClasses: Int,
numFeatures: Int = -1): RandomForestClassificationModel = {
require(oldModel.algo == OldAlgo.Classification, "Cannot convert RandomForestModel" +
s" with algo=${oldModel.algo} (old API) to RandomForestClassificationModel (new API).")
val newTrees = oldModel.trees.map { tree =>
// parent for each tree is null since there is no good way to set this.
DecisionTreeClassificationModel.fromOld(tree, null, categoricalFeatures)
}
val uid = if (parent != null) parent.uid else Identifiable.randomUID("rfc")
new RandomForestClassificationModel(uid, newTrees, numFeatures, numClasses)
}
}
|
caneGuy/spark
|
mllib/src/main/scala/org/apache/spark/ml/classification/RandomForestClassifier.scala
|
Scala
|
apache-2.0
| 14,150
|
package gh.test.gh2011b.payload
import gh2011b.models.{GollumEventPayload}
import net.liftweb.json._
import org.scalatest.{Matchers, FlatSpec}
class GollumEventPayloadTest extends FlatSpec with Matchers
{
"A valid GollumEvent payload" must "be correctly parsed" in {
val json = parse(
"""
| {
|
| "title":"Pattern Match Pattern",
| "summary":null,
| "sha":"4a30c5a648bd701431ac8a9dacdfd2d1cdaf52e3",
| "page_name":"Pattern Match Pattern",
| "action":"edited"
|
|}
""".stripMargin)
GollumEventPayload(json) shouldBe 'defined
}
}
|
mgoeminne/github_etl
|
src/test/scala/gh/test/gh2011b/payload/GollumEventPayloadTest.scala
|
Scala
|
mit
| 681
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.parquet
import scala.collection.mutable.ArrayBuffer
import org.apache.hadoop.fs.Path
import org.apache.spark.sql.catalyst.expressions.Literal
import org.apache.spark.sql.parquet.ParquetRelation2._
import org.apache.spark.sql.test.TestSQLContext
import org.apache.spark.sql.types._
import org.apache.spark.sql.{QueryTest, Row, SQLContext}
// The data where the partitioning key exists only in the directory structure.
case class ParquetData(intField: Int, stringField: String)
// The data that also includes the partitioning key
case class ParquetDataWithKey(intField: Int, pi: Int, stringField: String, ps: String)
class ParquetPartitionDiscoverySuite extends QueryTest with ParquetTest {
override val sqlContext: SQLContext = TestSQLContext
import sqlContext._
import sqlContext.implicits._
val defaultPartitionName = "__NULL__"
test("column type inference") {
def check(raw: String, literal: Literal): Unit = {
assert(inferPartitionColumnValue(raw, defaultPartitionName) === literal)
}
check("10", Literal(10, IntegerType))
check("1000000000000000", Literal(1000000000000000L, LongType))
check("1.5", Literal(1.5, FloatType))
check("hello", Literal("hello", StringType))
check(defaultPartitionName, Literal(null, NullType))
}
test("parse partition") {
def check(path: String, expected: PartitionValues): Unit = {
assert(expected === parsePartition(new Path(path), defaultPartitionName))
}
def checkThrows[T <: Throwable: Manifest](path: String, expected: String): Unit = {
val message = intercept[T] {
parsePartition(new Path(path), defaultPartitionName)
}.getMessage
assert(message.contains(expected))
}
check(
"file:///",
PartitionValues(
ArrayBuffer.empty[String],
ArrayBuffer.empty[Literal]))
check(
"file://path/a=10",
PartitionValues(
ArrayBuffer("a"),
ArrayBuffer(Literal(10, IntegerType))))
check(
"file://path/a=10/b=hello/c=1.5",
PartitionValues(
ArrayBuffer("a", "b", "c"),
ArrayBuffer(
Literal(10, IntegerType),
Literal("hello", StringType),
Literal(1.5, FloatType))))
check(
"file://path/a=10/b_hello/c=1.5",
PartitionValues(
ArrayBuffer("c"),
ArrayBuffer(Literal(1.5, FloatType))))
checkThrows[AssertionError]("file://path/=10", "Empty partition column name")
checkThrows[AssertionError]("file://path/a=", "Empty partition column value")
}
test("parse partitions") {
def check(paths: Seq[String], spec: PartitionSpec): Unit = {
assert(parsePartitions(paths.map(new Path(_)), defaultPartitionName) === spec)
}
check(Seq(
"hdfs://host:9000/path/a=10/b=hello"),
PartitionSpec(
StructType(Seq(
StructField("a", IntegerType),
StructField("b", StringType))),
Seq(Partition(Row(10, "hello"), "hdfs://host:9000/path/a=10/b=hello"))))
check(Seq(
"hdfs://host:9000/path/a=10/b=20",
"hdfs://host:9000/path/a=10.5/b=hello"),
PartitionSpec(
StructType(Seq(
StructField("a", FloatType),
StructField("b", StringType))),
Seq(
Partition(Row(10, "20"), "hdfs://host:9000/path/a=10/b=20"),
Partition(Row(10.5, "hello"), "hdfs://host:9000/path/a=10.5/b=hello"))))
check(Seq(
s"hdfs://host:9000/path/a=10/b=20",
s"hdfs://host:9000/path/a=$defaultPartitionName/b=hello"),
PartitionSpec(
StructType(Seq(
StructField("a", IntegerType),
StructField("b", StringType))),
Seq(
Partition(Row(10, "20"), s"hdfs://host:9000/path/a=10/b=20"),
Partition(Row(null, "hello"), s"hdfs://host:9000/path/a=$defaultPartitionName/b=hello"))))
check(Seq(
s"hdfs://host:9000/path/a=10/b=$defaultPartitionName",
s"hdfs://host:9000/path/a=10.5/b=$defaultPartitionName"),
PartitionSpec(
StructType(Seq(
StructField("a", FloatType),
StructField("b", StringType))),
Seq(
Partition(Row(10, null), s"hdfs://host:9000/path/a=10/b=$defaultPartitionName"),
Partition(Row(10.5, null), s"hdfs://host:9000/path/a=10.5/b=$defaultPartitionName"))))
}
test("read partitioned table - normal case") {
withTempDir { base =>
for {
pi <- Seq(1, 2)
ps <- Seq("foo", "bar")
} {
makeParquetFile(
(1 to 10).map(i => ParquetData(i, i.toString)),
makePartitionDir(base, defaultPartitionName, "pi" -> pi, "ps" -> ps))
}
parquetFile(base.getCanonicalPath).registerTempTable("t")
withTempTable("t") {
checkAnswer(
sql("SELECT * FROM t"),
for {
i <- 1 to 10
pi <- Seq(1, 2)
ps <- Seq("foo", "bar")
} yield Row(i, i.toString, pi, ps))
checkAnswer(
sql("SELECT intField, pi FROM t"),
for {
i <- 1 to 10
pi <- Seq(1, 2)
_ <- Seq("foo", "bar")
} yield Row(i, pi))
checkAnswer(
sql("SELECT * FROM t WHERE pi = 1"),
for {
i <- 1 to 10
ps <- Seq("foo", "bar")
} yield Row(i, i.toString, 1, ps))
checkAnswer(
sql("SELECT * FROM t WHERE ps = 'foo'"),
for {
i <- 1 to 10
pi <- Seq(1, 2)
} yield Row(i, i.toString, pi, "foo"))
}
}
}
test("read partitioned table - partition key included in Parquet file") {
withTempDir { base =>
for {
pi <- Seq(1, 2)
ps <- Seq("foo", "bar")
} {
makeParquetFile(
(1 to 10).map(i => ParquetDataWithKey(i, pi, i.toString, ps)),
makePartitionDir(base, defaultPartitionName, "pi" -> pi, "ps" -> ps))
}
parquetFile(base.getCanonicalPath).registerTempTable("t")
withTempTable("t") {
checkAnswer(
sql("SELECT * FROM t"),
for {
i <- 1 to 10
pi <- Seq(1, 2)
ps <- Seq("foo", "bar")
} yield Row(i, pi, i.toString, ps))
checkAnswer(
sql("SELECT intField, pi FROM t"),
for {
i <- 1 to 10
pi <- Seq(1, 2)
_ <- Seq("foo", "bar")
} yield Row(i, pi))
checkAnswer(
sql("SELECT * FROM t WHERE pi = 1"),
for {
i <- 1 to 10
ps <- Seq("foo", "bar")
} yield Row(i, 1, i.toString, ps))
checkAnswer(
sql("SELECT * FROM t WHERE ps = 'foo'"),
for {
i <- 1 to 10
pi <- Seq(1, 2)
} yield Row(i, pi, i.toString, "foo"))
}
}
}
test("read partitioned table - with nulls") {
withTempDir { base =>
for {
// Must be `Integer` rather than `Int` here. `null.asInstanceOf[Int]` results in a zero...
pi <- Seq(1, null.asInstanceOf[Integer])
ps <- Seq("foo", null.asInstanceOf[String])
} {
makeParquetFile(
(1 to 10).map(i => ParquetData(i, i.toString)),
makePartitionDir(base, defaultPartitionName, "pi" -> pi, "ps" -> ps))
}
val parquetRelation = load(
"org.apache.spark.sql.parquet",
Map(
"path" -> base.getCanonicalPath,
ParquetRelation2.DEFAULT_PARTITION_NAME -> defaultPartitionName))
parquetRelation.registerTempTable("t")
withTempTable("t") {
checkAnswer(
sql("SELECT * FROM t"),
for {
i <- 1 to 10
pi <- Seq(1, null.asInstanceOf[Integer])
ps <- Seq("foo", null.asInstanceOf[String])
} yield Row(i, i.toString, pi, ps))
checkAnswer(
sql("SELECT * FROM t WHERE pi IS NULL"),
for {
i <- 1 to 10
ps <- Seq("foo", null.asInstanceOf[String])
} yield Row(i, i.toString, null, ps))
checkAnswer(
sql("SELECT * FROM t WHERE ps IS NULL"),
for {
i <- 1 to 10
pi <- Seq(1, null.asInstanceOf[Integer])
} yield Row(i, i.toString, pi, null))
}
}
}
test("read partitioned table - with nulls and partition keys are included in Parquet file") {
withTempDir { base =>
for {
pi <- Seq(1, 2)
ps <- Seq("foo", null.asInstanceOf[String])
} {
makeParquetFile(
(1 to 10).map(i => ParquetDataWithKey(i, pi, i.toString, ps)),
makePartitionDir(base, defaultPartitionName, "pi" -> pi, "ps" -> ps))
}
val parquetRelation = load(
"org.apache.spark.sql.parquet",
Map(
"path" -> base.getCanonicalPath,
ParquetRelation2.DEFAULT_PARTITION_NAME -> defaultPartitionName))
parquetRelation.registerTempTable("t")
withTempTable("t") {
checkAnswer(
sql("SELECT * FROM t"),
for {
i <- 1 to 10
pi <- Seq(1, 2)
ps <- Seq("foo", null.asInstanceOf[String])
} yield Row(i, pi, i.toString, ps))
checkAnswer(
sql("SELECT * FROM t WHERE ps IS NULL"),
for {
i <- 1 to 10
pi <- Seq(1, 2)
} yield Row(i, pi, i.toString, null))
}
}
}
test("read partitioned table - merging compatible schemas") {
withTempDir { base =>
makeParquetFile(
(1 to 10).map(i => Tuple1(i)).toDF("intField"),
makePartitionDir(base, defaultPartitionName, "pi" -> 1))
makeParquetFile(
(1 to 10).map(i => (i, i.toString)).toDF("intField", "stringField"),
makePartitionDir(base, defaultPartitionName, "pi" -> 2))
load(base.getCanonicalPath, "org.apache.spark.sql.parquet").registerTempTable("t")
withTempTable("t") {
checkAnswer(
sql("SELECT * FROM t"),
(1 to 10).map(i => Row(i, null, 1)) ++ (1 to 10).map(i => Row(i, i.toString, 2)))
}
}
}
}
|
hengyicai/OnlineAggregationUCAS
|
sql/core/src/test/scala/org/apache/spark/sql/parquet/ParquetPartitionDiscoverySuite.scala
|
Scala
|
apache-2.0
| 10,881
|
/*
* Copyright 2017 Exon IT
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package by.exonit.redmine.client.play25ws
import akka.stream.scaladsl.StreamConverters
import by.exonit.redmine.client.managers.WebClient.RequestDSL
import by.exonit.redmine.client.managers.WebClient.RequestDSL.Body.{EmptyBody, FileBody, InMemoryByteBody, StreamedBody}
import play.api.libs.ws
import play.api.libs.ws.{WSAuthScheme, WSRequest}
/**
* Created by antonov_i on 01.03.2017.
*/
object Implicits {
implicit class StringExtensions(s: String) {
def trimRight(trimChars: Char*): String =
(s.reverseIterator dropWhile trimChars.contains)
.toSeq.reverseIterator.mkString
}
implicit class ExtendedWSRequest(r: WSRequest) {
def withDslAuth(auth: RequestDSL.AuthenticationMethod): WSRequest = {
auth match {
case RequestDSL.AuthenticationMethod.Basic(user, password) =>
r.withAuth(user, password.mkString, WSAuthScheme.BASIC)
case RequestDSL.AuthenticationMethod.Digest(user, password) =>
r.withAuth(user, password.mkString, WSAuthScheme.DIGEST)
case RequestDSL.AuthenticationMethod.Bearer(token) =>
r.withHeaders("Authentication" -> s"Bearer $token")
}
}
def withDslBody(body: RequestDSL.Body): WSRequest = {
body match {
case EmptyBody() =>
r.withBody(ws.EmptyBody)
case FileBody(file) =>
r.withBody(ws.FileBody(file))
case InMemoryByteBody(b) =>
r.withBody(b)
case StreamedBody(streamProvider) =>
r.withBody(ws.StreamedBody(StreamConverters.fromInputStream(streamProvider)))
}
}
}
}
|
exon-it/redmine-scala-client
|
client-play25-ws/src/main/scala/by/exonit/redmine/client/play25ws/Implicits.scala
|
Scala
|
apache-2.0
| 2,184
|
package com.twitter.finagle.thrift
import com.twitter.finagle.{ServiceProxy, Service, WriteException, ServiceException}
import java.util.logging.{Logger, Level}
import org.apache.thrift.TApplicationException
import org.apache.thrift.protocol.{TProtocolFactory, TMessageType}
import org.apache.thrift.transport.TMemoryInputTransport
import com.twitter.util.Future
/**
* Indicates that the connection on which a Thrift request was issued
* is invalid, where "validity" is determined by
* [[com.twitter.finagle.thrift.ValidateThriftService]].
*/
case class InvalidThriftConnectionException extends ServiceException {
override def getMessage = "the thrift connection was invalidated"
}
/**
* A filter that invalidates a connection if it suffers from an
* irrecoverable application exception.
*
* Amazingly, an Apache Thrift server will leave a connection in a
* bad state without closing it, and furthermore only expose such
* errors as an "application" exception.
*
* All we can do is sigh, pinch our noses, and apply
* `ValidateThriftService`.
*/
class ValidateThriftService(
self: Service[ThriftClientRequest, Array[Byte]],
protocolFactory: TProtocolFactory
) extends ServiceProxy[ThriftClientRequest, Array[Byte]](self)
{
@volatile private[this] var isValid = true
override def apply(req: ThriftClientRequest) =
if (!isValid) Future.exception(WriteException(InvalidThriftConnectionException()))
else self(req) onSuccess { bytes =>
if (!req.oneway && !isResponseValid(bytes)) {
isValid = false
Logger.getLogger("finagle-thrift").log(Level.WARNING,
"Thrift connection was invalidated!")
}
}
override def isAvailable = isValid && self.isAvailable
private def isResponseValid(bytes: Array[Byte]) = try {
val memoryTransport = new TMemoryInputTransport(bytes)
val iprot = protocolFactory.getProtocol(memoryTransport)
val reply = iprot.readMessageBegin()
reply.`type` != TMessageType.EXCEPTION || {
val exc = TApplicationException.read(iprot)
iprot.readMessageEnd()
exc.getType == TApplicationException.INTERNAL_ERROR ||
exc.getType == TApplicationException.UNKNOWN_METHOD
}
} catch {
case exc: Throwable =>
Logger.getLogger("finagle-thrift").log(Level.WARNING,
"Exception while validating connection", exc)
false
}
}
|
yancl/finagle-6.22.0
|
finagle-thrift/src/main/scala/com/twitter/finagle/thrift/ValidateThriftService.scala
|
Scala
|
apache-2.0
| 2,370
|
package be.mygod.speech.tts
import java.io.{File, FileOutputStream}
import android.content.Context
import android.graphics.drawable.Drawable
import scala.collection.immutable
/**
* @author Mygod
*/
abstract class TtsEngine(protected var context: Context,
private val selfDestructionListener: TtsEngine => Unit = null) {
def getVoices: immutable.SortedSet[TtsVoice]
def getVoice: TtsVoice
def setVoice(voice: TtsVoice): Boolean
def setVoice(voice: String): Boolean
private var icon: Drawable = _
def getID: String = getClass.getSimpleName
def getName: String = getID
def getIcon: Drawable = {
if (icon == null) icon = getIconInternal
icon
}
protected def getIconInternal: Drawable
protected var listener: OnTtsSynthesisCallbackListener = _
def setSynthesisCallbackListener(listener: OnTtsSynthesisCallbackListener): Unit = this.listener = listener
def getMimeType: String
var pitch = 100
var speechRate = 100
var pan = 0F
def speak(text: CharSequence, startOffset: Int)
def synthesizeToStream(text: CharSequence, startOffset: Int, output: FileOutputStream, cacheDir: File)
def stop()
def onDestroy(): Unit = _destroyed = true
private var _destroyed = false
def destroyed: Boolean = _destroyed
}
|
Mygod/MygodSpeechSynthesizer-android
|
src/main/scala/be/mygod/speech/tts/TtsEngine.scala
|
Scala
|
gpl-3.0
| 1,282
|
/*
* Copyright (c) 2014-2021 by The Monix Project Developers.
* See the project homepage at: https://monix.io
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package monix.reactive.internal.operators
import monix.execution.Callback
import monix.eval.Task
import monix.execution.Ack.Stop
import monix.execution.atomic.Atomic
import monix.execution.cancelables.{OrderedCancelable, SingleAssignCancelable, StackedCancelable}
import monix.execution.schedulers.TrampolineExecutionContext.immediate
import monix.execution.{Ack, Cancelable, FutureUtils}
import monix.reactive.Observable
import monix.reactive.observers.Subscriber
import scala.concurrent.{Future, Promise}
import scala.util.{Failure, Success}
private[reactive] object DoOnSubscribeObservable {
// Implementation for doBeforeSubscribe
final class Before[+A](source: Observable[A], task: Task[Unit]) extends Observable[A] {
def unsafeSubscribeFn(subscriber: Subscriber[A]): Cancelable = {
implicit val s = subscriber.scheduler
val conn = OrderedCancelable()
val c = task.runAsync(new Callback[Throwable, Unit] {
def onSuccess(value: Unit): Unit = {
val c = source.unsafeSubscribeFn(subscriber)
conn.orderedUpdate(c, order = 2)
()
}
def onError(ex: Throwable): Unit =
subscriber.onError(ex)
})
conn.orderedUpdate(c, order = 1)
conn
}
}
// Implementation for doAfterSubscribe
final class After[+A](source: Observable[A], task: Task[Unit]) extends Observable[A] {
def unsafeSubscribeFn(out: Subscriber[A]): Cancelable = {
implicit val scheduler = out.scheduler
val p = Promise[Unit]()
val cancelable = source.unsafeSubscribeFn(
new Subscriber[A] {
implicit val scheduler = out.scheduler
private[this] val completeGuard = Atomic(true)
private[this] var isActive = false
def onNext(elem: A): Future[Ack] = {
if (isActive) {
// Fast path (1)
out.onNext(elem)
} else if (p.isCompleted) {
// Fast path (2)
isActive = true
p.future.value.get match {
case Failure(e) =>
finalSignal(e)
Stop
case _ =>
out.onNext(elem)
}
} else {
FutureUtils.transformWith[Unit, Ack](p.future, {
case Success(_) => out.onNext(elem)
case Failure(e) =>
finalSignal(e)
Stop
})(immediate)
}
}
def onError(ex: Throwable): Unit = finalSignal(ex)
def onComplete(): Unit = finalSignal(null)
private def finalSignal(e: Throwable): Unit = {
if (completeGuard.getAndSet(false)) {
if (e != null) out.onError(e)
else out.onComplete()
} else if (e != null) {
scheduler.reportFailure(e)
}
}
}
)
val ref = SingleAssignCancelable()
val conn = StackedCancelable(List(ref, cancelable))
ref := task.runAsync(new Callback[Throwable, Unit] {
def onSuccess(value: Unit): Unit = {
conn.pop()
p.success(())
()
}
def onError(ex: Throwable): Unit = {
conn.pop()
p.failure(ex)
()
}
})
conn
}
}
}
|
monifu/monifu
|
monix-reactive/shared/src/main/scala/monix/reactive/internal/operators/DoOnSubscribeObservable.scala
|
Scala
|
apache-2.0
| 3,998
|
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.crunch.scrunch
import org.apache.crunch.io.{From => from, To => to}
import org.apache.crunch.test.CrunchTestSupport
import org.scalatest.junit.JUnitSuite
import _root_.org.junit.Test
/**
* Tests computing the number of elements in a PCollection from the Scala api.
*/
class LengthTest extends CrunchTestSupport with JUnitSuite {
@Test def testLength {
val linesInShakespeare: Long = 3667
val pipeline = Pipeline.mapReduce[LengthTest](tempDir.getDefaultConfiguration)
val input = tempDir.copyResourceFileName("shakes.txt")
val len = pipeline.read(from.textFile(input)).length()
assert(linesInShakespeare == len.value())
}
}
|
rvs/crunch
|
crunch-scrunch/src/it/scala/org/apache/crunch/scrunch/LengthTest.scala
|
Scala
|
apache-2.0
| 1,480
|
package x7c1.linen.database.struct
import android.content.ContentValues
import android.database.Cursor
import x7c1.linen.repository.date.Date
import x7c1.wheat.macros.database.TypedFields.toArgs
import x7c1.wheat.macros.database.{Query, TypedCursor, TypedFields}
import x7c1.wheat.modern.database.{Insertable, Updatable}
import x7c1.wheat.modern.database.selector.presets.{CanTraverseRecordByQuery, DefaultProvidable}
import x7c1.wheat.modern.database.selector.{IdEndo, Identifiable, RecordReifiable}
trait LoaderScheduleRecord extends TypedFields {
def schedule_id: Long
def account_id: Long
def schedule_kind_id: Long
def enabled: Int
def created_at: Int --> Date
}
object LoaderScheduleRecord {
def table = "loader_schedules"
def column = TypedFields.expose[LoaderScheduleRecord]
implicit object providable
extends DefaultProvidable[HasAccountId, LoaderScheduleRecord]
implicit object reifiable extends RecordReifiable[LoaderScheduleRecord]{
override def reify(cursor: Cursor) = TypedCursor[LoaderScheduleRecord](cursor)
}
implicit object traverseAll
extends CanTraverseRecordByQuery[LoaderScheduleRecord](
Query("SELECT * FROM loader_schedules")
)
}
case class LoaderScheduleParts(
accountId: Long,
kindId: Long,
enabled: Boolean,
createdAt: Date
)
object LoaderScheduleParts {
import LoaderScheduleRecord.column
implicit object insertable extends Insertable[LoaderScheduleParts]{
override def tableName: String = LoaderScheduleRecord.table
override def toContentValues(target: LoaderScheduleParts): ContentValues = {
TypedFields.toContentValues(
column.account_id -> target.accountId,
column.schedule_kind_id -> target.kindId,
column.enabled -> (if (target.enabled) 1 else 0),
column.created_at -> target.createdAt
)
}
}
case class ToChangeState(
scheduleId: Long,
enabled: Boolean
)
object ToChangeState {
implicit object updatable extends Updatable[ToChangeState]{
override def tableName = LoaderScheduleRecord.table
override def toContentValues(target: ToChangeState) = TypedFields toContentValues (
column.enabled -> (if (target.enabled) 1 else 0)
)
override def where(target: ToChangeState) = toArgs(
column.schedule_id -> target.scheduleId
)
}
}
}
trait HasLoaderScheduleId[A] extends Identifiable[A, Long]
object HasLoaderScheduleId {
implicit object id extends HasLoaderScheduleId[Long] with IdEndo[Long]
}
|
x7c1/Linen
|
linen-repository/src/main/scala/x7c1/linen/database/struct/LoaderScheduleRecord.scala
|
Scala
|
mit
| 2,518
|
import java.util.concurrent.TimeUnit
import com.trueaccord.pb.{Service1JavaImpl, Service1ScalaImpl}
import com.trueaccord.proto.e2e.service.{Service1Grpc => Service1GrpcScala}
import com.trueaccord.proto.e2e.{Service1Grpc => Service1GrpcJava}
import io.grpc.netty.{NegotiationType, NettyChannelBuilder, NettyServerBuilder}
import io.grpc.stub.StreamObserver
import io.grpc.{ManagedChannel, ServerServiceDefinition}
import org.scalatest.{FunSpec, MustMatchers}
import scala.concurrent.{ExecutionContext, Future, Promise}
import scala.util.Random
abstract class GrpcServiceSpecBase extends FunSpec with MustMatchers {
protected[this] final def withScalaServer[A](f: ManagedChannel => A): A = {
withServer(Service1GrpcScala.bindService(new Service1ScalaImpl, singleThreadExecutionContext))(f)
}
protected[this] final def withJavaServer[A](f: ManagedChannel => A): A = {
withServer(Service1GrpcJava.bindService(new Service1JavaImpl))(f)
}
private[this] def withServer[A](services: ServerServiceDefinition*)(f: ManagedChannel => A): A = {
val port = UniquePortGenerator.get()
val server = services.foldLeft(NettyServerBuilder.forPort(port))(_.addService(_)).build()
try {
server.start()
val channel = NettyChannelBuilder.forAddress("localhost", port).negotiationType(NegotiationType.PLAINTEXT).build()
f(channel)
} finally {
server.shutdown()
server.awaitTermination(3000, TimeUnit.MILLISECONDS)
}
}
private[this] val singleThreadExecutionContext = new ExecutionContext {
override def reportFailure(cause: Throwable): Unit = cause.printStackTrace()
override def execute(runnable: Runnable): Unit = runnable.run()
}
protected[this] final def getObserverAndFuture[A]: (StreamObserver[A], Future[A]) = {
val promise = Promise[A]()
val observer = new StreamObserver[A] {
override def onError(t: Throwable): Unit = {}
override def onCompleted(): Unit = {}
override def onNext(value: A): Unit = promise.success(value)
}
(observer, promise.future)
}
protected[this] final def getObserverAndFutureVector[A]: (StreamObserver[A], Future[Vector[A]]) = {
val promise = Promise[Vector[A]]()
val values = Vector.newBuilder[A]
val observer = new StreamObserver[A] {
override def onError(t: Throwable): Unit = {}
override def onCompleted(): Unit = promise.success(values.result)
override def onNext(value: A): Unit = {
values += value
}
}
(observer, promise.future)
}
protected[this] final def randomString(): String = Random.alphanumeric.take(Random.nextInt(10)).mkString
}
|
eiennohito/ScalaPB
|
e2e/src/test/scala/GrpcServiceSpecBase.scala
|
Scala
|
apache-2.0
| 2,648
|
package extracells.util.recipe
import appeng.api.features.INetworkEncodable
import appeng.api.implementations.items.IAEItemPowerStorage
import extracells.item.{ItemWirelessTerminalUniversal, TerminalType}
import extracells.registries.ItemEnum
import extracells.util.UniversalTerminal
import net.minecraft.inventory.InventoryCrafting
import net.minecraft.item.ItemStack
import net.minecraft.item.crafting.IRecipe
import net.minecraft.world.World
object RecipeUniversalTerminal extends IRecipe{
val THIS = this
val itemUniversal = ItemWirelessTerminalUniversal
override def matches(inventory : InventoryCrafting, world : World): Boolean = {
var hasWireless = false
var isUniversal = false
var hasTerminal = false
var terminals = List[TerminalType]()
var terminal: ItemStack = null
val size = inventory.getSizeInventory
var i = 0
for(i <- 0 until size){
val stack = inventory.getStackInSlot(i)
if(stack != null){
val item = stack.getItem
if(item == itemUniversal){
if(hasWireless)
return false
else{
hasWireless = true
isUniversal = true
terminal = stack
}
}else if(UniversalTerminal.isWirelessTerminal(stack)){
if(hasWireless)
return false
hasWireless = true
terminal = stack
}else if(UniversalTerminal.isTerminal(stack)){
hasTerminal = true
val typeTerminal = UniversalTerminal.getTerminalType(stack)
if(terminals.contains(typeTerminal)){
return false
}else{
terminals ++= List(typeTerminal)
}
}
}
}
if(!(hasTerminal && hasWireless))
return false
if(isUniversal){
for(x <- terminals){
if(itemUniversal.isInstalled(terminal, x))
return false
}
true
}else{
val terminalType = UniversalTerminal.getTerminalType(terminal)
for(x <- terminals){
if(x == terminalType)
return false
}
true
}
}
override def getRecipeOutput: ItemStack = ItemEnum.UNIVERSALTERMINAL.getDamagedStack(0)
override def getRecipeSize: Int = 2
override def getCraftingResult(inventory : InventoryCrafting): ItemStack = {
var isUniversal = false
var terminals = List[TerminalType]()
var terminal: ItemStack = null
val size = inventory.getSizeInventory
var i = 0
for(i <- 0 until size){
val stack = inventory.getStackInSlot(i)
if(stack != null){
val item = stack.getItem
if(item == itemUniversal){
isUniversal = true
terminal = stack.copy
}else if(UniversalTerminal.isWirelessTerminal(stack)){
terminal = stack.copy
}else if(UniversalTerminal.isTerminal(stack)){
val typeTerminal = UniversalTerminal.getTerminalType(stack)
terminals ++= List(typeTerminal)
}
}
}
if(isUniversal){
for(x <- terminals)
itemUniversal.installModule(terminal, x)
}else{
val terminalType = UniversalTerminal.getTerminalType(terminal)
val itemTerminal = terminal.getItem
val t = new ItemStack(itemUniversal)
if(itemTerminal.isInstanceOf[INetworkEncodable]){
val key = itemTerminal.asInstanceOf[INetworkEncodable].getEncryptionKey(terminal)
if(key != null)
itemUniversal.setEncryptionKey(t, key, null)
}
if(itemTerminal.isInstanceOf[IAEItemPowerStorage]){
val power = itemTerminal.asInstanceOf[IAEItemPowerStorage].getAECurrentPower(terminal)
itemUniversal.injectAEPower(t, power)
}
itemUniversal.installModule(t, terminalType)
t.getTagCompound.setByte("type", terminalType.ordinal.toByte)
terminal = t
for(x <- terminals)
itemUniversal.installModule(terminal, x)
}
terminal
}
}
|
ieatbedrock/Bedrocks-AE2-addons
|
src/main/scala/extracells/util/recipe/RecipeUniversalTerminal.scala
|
Scala
|
mit
| 3,912
|
package com.fsist.stream
import com.fsist.FutureTester
import org.scalatest.FunSuiteLike
import scala.concurrent.ExecutionContext
import scala.concurrent.duration._
/** Common definitions for stream test code */
trait StreamTester extends FunSuiteLike with FutureTester {
override implicit def patienceConfig = PatienceConfig(1.minute)
implicit def ec: ExecutionContext = ExecutionContext.global
val impatience = PatienceConfig(200.millis)
}
|
fsist/future-streams
|
src/test/scala/com/fsist/stream/StreamTester.scala
|
Scala
|
apache-2.0
| 453
|
package com.outr.arango
import fabric._
import scala.language.implicitConversions
package object query {
implicit class AQLInterpolator(val sc: StringContext) extends AnyVal {
def aql(args: Any*): Query = {
val strings = sc.parts.iterator
val expressions = args.iterator
var parts = List.empty[QueryPart]
while (strings.hasNext || expressions.hasNext) {
if (strings.hasNext) {
parts = QueryPart.Static(strings.next()) :: parts
}
if (expressions.hasNext) {
val part = toQueryPart(expressions.next())
parts = part :: parts
}
}
Query(parts.reverse)
}
}
implicit def string2QueryPart(s: String): QueryPart = QueryPart.Static(s)
implicit def value2QueryPart(v: fabric.Value): QueryPart = QueryPart.Variable(v)
implicit def tuple2QueryPart(t: (String, fabric.Value)): QueryPart = QueryPart.NamedVariable(t._1, t._2)
def toQueryPart(value: Any): QueryPart = value match {
case null => QueryPart.Variable(Null)
case i: Int => QueryPart.Variable(toValue(i))
case s: String => QueryPart.Variable(toValue(s))
case id: Id[_] => QueryPart.Variable(toValue(id._id))
case qp: QueryPart => qp
case seq: Seq[_] => QueryPart.Variable(arr(seq.map(toValue): _*))
case _ => throw new RuntimeException(s"Unsupported expression: $value (${value.getClass.getName})")
}
def toValue(value: Any): Value = value match {
case null => Null
case s: String => str(s)
case i: Int => num(i)
case l: Long => num(l)
case f: Float => num(f.toDouble)
case d: Double => num(d)
}
}
|
outr/scarango
|
core/src/main/scala/com/outr/arango/query/package.scala
|
Scala
|
mit
| 1,623
|
/**
* Copyright (C) 2013-2014 Duncan DeVore. <https://github.com/ironfish/>
*/
import sbt._
import Keys._
object Common {
def Organization = "com.github.ironfish"
def NameMongoCqrsCsApp = "mongo-cqrs-cs-app"
def NameMongoCqrsEsApp = "mongo-cqrs-es-app"
def AkkaVersion = "2.3.6"
def CrossScalaVersions = Seq("2.10.4", "2.11.4")
def EmbeddedMongoVersion = "1.46.1"
def PluginVersion = "0.7.5"
def SalatVersion = "1.9.9"
def ScalaStmVersion = "0.7"
def ScalaVersion = "2.11.4"
def ScalatestVersion = "2.2.2"
def ScalazVersion = "7.1.0"
def ParallelExecutionInTest = false
def ScalaCOptions = Seq( "-deprecation", "-unchecked", "-feature", "-language:postfixOps" )
def TestCompile = "test->test;compile->compile"
}
|
ironfish/akka-persistence-mongo-samples
|
project/Common.scala
|
Scala
|
apache-2.0
| 746
|
package io.ubiqesh.central.mqtt.encoder
import org.vertx.java.core.buffer.Buffer
import scala.annotation.tailrec
import io.ubiqesh.central.mqtt.commands._
/**
* Created by balu on 31.12.13.
*/
class Encoder {
def encodeConnack(connackCode:ConnackCode.Value):Buffer = {
//
// --------+-----+-----+-----+-----+-----+-----+-----+------+
// bit | 7 | 6 | 5 | 4 | 3 | 2 | 1 | 0 |
// --------+-----+-----+-----+-----+-----+-----+-----+------+
// byte 1 | 0 0 1 0 | x | x x | x |
// --------+-----------------------+-----+-----------+------+
// byte 2 | Remaining Length |
//---------+------------------------------------------------+
// The DUP, QoS and RETAIN flags are not used in the CONNACK message.
// MQTT V3.1 Protocol Specification - sections 3.2
val buffer = new Buffer(4)
// byte 1: 0b_0010_0000 = 0x20
buffer.appendByte(0x20.asInstanceOf[Byte])
// byte 2: remaining length = 2 => 0x02
buffer.appendByte(0x02.asInstanceOf[Byte])
// 1st byte; unused => 0x00
buffer.appendByte(0x00.asInstanceOf[Byte])
// 2nd byte: connack return code
buffer.appendByte(connackCode.id.asInstanceOf[Byte])
buffer
}
def encodePuback(messageId: Int):Buffer = {
//
// --------+-----+-----+-----+-----+-----+-----+-----+------+
// bit | 7 | 6 | 5 | 4 | 3 | 2 | 1 | 0 |
// --------+-----+-----+-----+-----+-----+-----+-----+------+
// byte 1 | 0 1 0 0 | x | x x | x |
// --------+-----------------------+-----+-----------+------+
// byte 2 | Remaining Length |
//---------+------------------------------------------------+
// The DUP, QoS and RETAIN flags are not used in the PUBACK message.
// MQTT V3.1 Protocol Specification - sections 3.4
val buffer = new Buffer(4)
// byte 1: 0b_0100_0000 = 0x40
buffer.appendByte(0x40.asInstanceOf[Byte])
// byte 2: remaining length = 2 => 0x02
buffer.appendByte(0x02.asInstanceOf[Byte])
buffer.appendShort((messageId & 0xFFFF).asInstanceOf[Short])
buffer
}
def encodePubrec(messageId: Int):Buffer = {
//
// --------+-----+-----+-----+-----+-----+-----+-----+------+
// bit | 7 | 6 | 5 | 4 | 3 | 2 | 1 | 0 |
// --------+-----+-----+-----+-----+-----+-----+-----+------+
// byte 1 | 0 1 0 1 | x | x x | x |
// --------+-----------------------+-----+-----------+------+
// byte 2 | Remaining Length |
//---------+------------------------------------------------+
// The DUP, QoS and RETAIN flags are not used in the PUBREC message.
// MQTT V3.1 Protocol Specification - sections 3.5
val buffer = new Buffer(4)
// byte 1: 0b_0101_0000 = 0x50
buffer.appendByte(0x50.asInstanceOf[Byte])
// byte 2: remaining length = 2 => 0x02
buffer.appendByte(0x02.asInstanceOf[Byte])
buffer.appendShort((messageId & 0xFFFF).asInstanceOf[Short])
buffer
}
def encodeSuback(messageId:Int, grantedQos:List[QosLevel.Value]):Buffer = {
//
// --------+-----+-----+-----+-----+-----+-----+-----+------+
// bit | 7 | 6 | 5 | 4 | 3 | 2 | 1 | 0 |
// --------+-----+-----+-----+-----+-----+-----+-----+------+
// byte 1 | 1 0 0 1 | x | x x | x |
// --------+-----------------------+-----+-----------+------+
// byte 2 | Remaining Length |
//---------+------------------------------------------------+
// The DUP, QoS and RETAIN flags are not used in the SUBACK message.
// MQTT V3.1 Protocol Specification - sections 3.9
// write payload first to calculate the 'remaining length'
val content = new Buffer(grantedQos.size + 2)
content.appendShort((messageId & 0xFFFF).asInstanceOf[Short])
grantedQos.foreach({ qos => content.appendByte( (qos.id & 0xF).asInstanceOf[Byte] )})
val len = content.length()
val header = new Buffer(2)
// byte 1: 0b_1001_0000 = 0x90
header.appendByte(0x90.asInstanceOf[Byte])
encodeRemainingLength(len, header)
val buffer = new Buffer()
buffer.appendBuffer(header)
buffer.appendBuffer(content)
buffer
}
def encodePingresp():Buffer = {
//
// --------+-----+-----+-----+-----+-----+-----+-----+------+
// bit | 7 | 6 | 5 | 4 | 3 | 2 | 1 | 0 |
// --------+-----+-----+-----+-----+-----+-----+-----+------+
// byte 1 | 1 0 0 1 | x | x x | x |
// --------+-----------------------+-----+-----------+------+
// byte 2 | Remaining Length |
//---------+------------------------------------------------+
// The DUP, QoS and RETAIN flags are not used in the PINGRESP message.
// MQTT V3.1 Protocol Specification - sections 3.9
val header = new Buffer(2)
// byte 1: 0b_1101_0000 = 0x90
header.appendByte(0xD0.asInstanceOf[Byte])
// byte 2: remaining length = 2 => 0x02
header.appendByte(0x00.asInstanceOf[Byte])
val buffer = new Buffer()
buffer.appendBuffer(header)
buffer
}
def encodeUnsuback(messageId: Int):Buffer = {
//
// --------+-----+-----+-----+-----+-----+-----+-----+------+
// bit | 7 | 6 | 5 | 4 | 3 | 2 | 1 | 0 |
// --------+-----+-----+-----+-----+-----+-----+-----+------+
// byte 1 | 0 1 0 1 | x | x x | x |
// --------+-----------------------+-----+-----------+------+
// byte 2 | Remaining Length |
//---------+------------------------------------------------+
// The DUP, QoS and RETAIN flags are not used in the UNSUBACK message.
// MQTT V3.1 Protocol Specification - sections 3.5
val buffer = new Buffer(4)
// byte 1: 0b_1011_0000 = 0x50
buffer.appendByte(0xB0.asInstanceOf[Byte])
// byte 2: remaining length = 2 => 0x02
buffer.appendByte(0x02.asInstanceOf[Byte])
// variable header:
// Contains the Message Identifier (Message ID) for the PUBLISH
// message that is being acknowledged.
buffer.appendShort((messageId & 0xFFFF).asInstanceOf[Short]) // 16-bit unsigned integer
buffer
}
def encodePublish(messageId: Option[Int], topic:String, payload: Array[Byte]):Buffer = {
//
// --------+-----+-----+-----+-----+-----+-----+-----+------+
// bit | 7 | 6 | 5 | 4 | 3 | 2 | 1 | 0 |
// --------+-----+-----+-----+-----+-----+-----+-----+------+
// byte 1 | 0 1 0 1 | x | x x | x |
// --------+-----------------------+-----+-----------+------+
// byte 2 | Remaining Length |
//---------+------------------------------------------------+
// The DUP, QoS and RETAIN flags are not used in the PUBLISH message.
// MQTT V3.1 Protocol Specification - sections 3.5
val content = new Buffer()
content.appendShort((messageId.getOrElse(0) & 0xFFFF).asInstanceOf[Short])
content.appendString(topic)
content.appendBytes(payload)
val len = content.length()
val header = new Buffer(2)
// byte 1: 0b_0011_0000 = 0x90
header.appendByte(0x30.asInstanceOf[Byte])
encodeRemainingLength(len, header)
val buffer = new Buffer()
buffer.appendBuffer(header)
buffer.appendBuffer(content)
buffer
}
def encodeRemainingLength(remainingLength:Long, buffer:Buffer) {
@tailrec def encodeRemainingLength0(x:Long) {
val digit = (x % 128).asInstanceOf[Int]
val newX = (x / 128)
if (newX > 0) {
buffer.appendByte( (digit | 0x80 ).asInstanceOf[Byte] )
encodeRemainingLength0(newX)
}
else {
buffer.appendByte( digit.asInstanceOf[Byte] )
}
}
encodeRemainingLength0(remainingLength)
}
}
|
ubiqesh/ubiqesh
|
central/src/main/scala/io/ubiqesh/central/mqtt/encoder/Encoder.scala
|
Scala
|
apache-2.0
| 8,011
|
package nodes.learning
import breeze.linalg._
import breeze.stats.distributions.{ThreadLocalRandomGenerator, RandBasis, Multinomial}
import breeze.stats.mean
import org.apache.commons.math3.random.MersenneTwister
import org.apache.spark.rdd.RDD
import pipelines.Logging
import workflow.{Estimator, Transformer}
import utils.MatrixUtils
/**
* A KMeans assigning transformer
* @param means matrix of dimension numClusters by numFeatures
*/
case class KMeansModel(means: DenseMatrix[Double]) extends Transformer[DenseVector[Double], DenseVector[Double]] {
/**
* Returns the assignment of each vector to the nearest cluster.
*/
def apply(in: DenseVector[Double]): DenseVector[Double] = {
// TODO: Could maybe do more efficient single-item implementation
apply(in.asDenseMatrix).flatten()
}
def apply(in: DenseMatrix[Double]): DenseMatrix[Double] = {
val XSqNormHlf: DenseVector[Double] = (sum(in :* in, Axis._1) :*= 0.5)
/* compute the distance to all of the centers and assign each point to its nearest center. */
val sqDistToCenters = in * means.t
sqDistToCenters :*= -1.0
sqDistToCenters(::, *) += XSqNormHlf
sqDistToCenters(*, ::) += (sum(means :* means, Axis._1) :*= 0.5)
/*
sqDistToCenters is numExamples by numCenters. This argmin uses Breeze broadcasting to find
the column index with the smallest value for each row (aka the nearest center for that example).
nearestCenter is a vector of size numExamples.
*/
val nearestCenter = argmin(sqDistToCenters(*, ::))
/*
Now we construct a center assignments matrix.
It isa binary numExample by numCenters matrix that has the value 1.0 at a cell
if that center (column) is the closest center to that example (row), and 0.0 if it is not.
We reuse the previous (potentially large) matrix to minimize memory allocation.
*/
val centerAssign = sqDistToCenters
var row: Int = 0
while (row < in.rows) {
var col: Int = 0
while (col < means.rows) {
centerAssign(row, col) = 0.0
col += 1
}
centerAssign(row, nearestCenter(row)) = 1.0
row += 1
}
centerAssign
}
override def apply(in: RDD[DenseVector[Double]]): RDD[DenseVector[Double]] = {
in.mapPartitions { partition =>
MatrixUtils.rowsToMatrixIter(partition).flatMap { mat =>
val assignments = apply(mat)
MatrixUtils.matrixToRowArray(assignments).iterator
}
}
}
}
/**
* Trains a k-means++ transformer
*
* if you run for one round, this is the same as the k-means++
* initialization. If you run for more rounds, you are running Lloyd's
* algorithm with the k-means++ initialization scheme.
*
* @param numMeans
* @param maxIterations
* @param stopTolerance Tolerance used to decide when to terminate Lloyd's algorithm
*/
case class KMeansPlusPlusEstimator(
numMeans: Int,
maxIterations: Int,
stopTolerance: Double = 1e-3,
seed: Int = 0)
extends Estimator[DenseVector[Double], DenseVector[Double]] with Logging {
def fit(data: RDD[DenseVector[Double]]): KMeansModel = {
val X = MatrixUtils.rowsToMatrix(data.collect())
fit(X)
}
def fit(X: DenseMatrix[Double]): KMeansModel = {
val numSamples = X.rows
val numFeatures = X.cols
val XSqNormHlf: DenseVector[Double] = sum(X :* X, Axis._1) / 2.0
val centers = Array.fill(numMeans)(0)
// Not happy about marking this implicit, but Breeze Multinomial only takes a RandBasis as an implicit w/ a
// whole bunch of other implicit things
implicit val implicitRand = new RandBasis(new ThreadLocalRandomGenerator(new MersenneTwister(seed)))
centers(0) = Multinomial(DenseVector.fill(numSamples, 1.0/numSamples)).draw()
var curSqDistanceToClusters: DenseVector[Double] = null
var k = 0
while (k < (numMeans - 1)) {
val curCenter = X(centers(k), ::)
val curCenterNorm = norm(curCenter, 2)
// slick vectorized code to compute the distance to the current center
val sqDistToNewCenter = (XSqNormHlf - (X * curCenter.t)) += (0.5 * curCenterNorm * curCenterNorm)
curSqDistanceToClusters = if (k > 0) {
min(sqDistToNewCenter, curSqDistanceToClusters)
} else {
sqDistToNewCenter
}
// add a new center by the k-means++ rule
centers(k + 1) = Multinomial(max(curSqDistanceToClusters, 0.0)).draw()
k += 1
}
var kMeans = X(centers.toSeq, ::).toDenseMatrix
val curCost = DenseVector.zeros[Double](maxIterations)
var iter = 0
var costImproving = true
while ((iter < maxIterations) && costImproving) {
/* compute the distance to all of the centers and assign each point to its
nearest center. (Again, mad slick and vectorized). */
val sqDistToCenters = X * kMeans.t
sqDistToCenters :*= -1.0
sqDistToCenters(::, *) += XSqNormHlf
sqDistToCenters(*, ::) += (sum(kMeans :* kMeans, Axis._1) :*= 0.5)
val bestDist = min(sqDistToCenters(*, ::))
curCost(iter) = mean(bestDist)
/*
sqDistToCenters is numExamples by numCenters. This argmin uses Breeze broadcasting to find
the column index with the smallest value for each row (aka the nearest center for that example).
nearestCenter is a vector of size numExamples.
*/
val nearestCenter = argmin(sqDistToCenters(*, ::))
/*
Now we construct a center assignments matrix.
It isa binary numExample by numCenters matrix that has the value 1.0 at a cell
if that center (column) is the closest center to that example (row), and 0.0 if it is not.
We reuse the previous (potentially large) matrix to minimize memory allocation.
*/
val centerAssign = sqDistToCenters
var row: Int = 0
while (row < numSamples) {
var col: Int = 0
while (col < numMeans) {
centerAssign(row, col) = 0.0
col += 1
}
centerAssign(row, nearestCenter(row)) = 1.0
row += 1
}
val assignMass = sum(centerAssign, Axis._0).toDenseVector
kMeans = centerAssign.t * X
kMeans(::, *) :/= assignMass
if (iter > 0) {
costImproving = (curCost(iter - 1) - curCost(iter)) >= stopTolerance * math.abs(curCost(iter - 1))
logInfo("Iteration: " + iter + " current cost " + curCost(iter) + " imp " + costImproving)
}
iter += 1
}
KMeansModel(kMeans)
}
}
|
tomerk/keystone
|
src/main/scala/nodes/learning/KMeansPlusPlus.scala
|
Scala
|
apache-2.0
| 6,432
|
package org.jetbrains.plugins.scala.annotator
import com.intellij.codeInsight.daemon.HighlightDisplayKey
import com.intellij.codeInsight.intention.IntentionAction
import com.intellij.codeInspection.{LocalQuickFix, ProblemDescriptor, ProblemHighlightType}
import com.intellij.lang.ASTNode
import com.intellij.lang.annotation.{AnnotationBuilder, ProblemGroup}
import com.intellij.openapi.editor.colors.TextAttributesKey
import com.intellij.openapi.editor.markup.{GutterIconRenderer, TextAttributes}
import com.intellij.openapi.util.TextRange
import com.intellij.psi.PsiElement
class ScalaAnnotationBuilderAdapter(annotationBuilder: AnnotationBuilder)
extends ScalaAnnotationBuilder {
private var rangeTransformer: TextRange => TextRange = identity
private implicit def toScalaFixBuilder(fixBuilder: AnnotationBuilder.FixBuilder): ScalaAnnotationBuilder.FixBuilder =
new ScalaFixBuilderAdapter(fixBuilder)
override def setRangeTransformer(transformer: TextRange => TextRange): this.type = {
rangeTransformer = transformer
this
}
override def range(range: TextRange): this.type = {
annotationBuilder.range(rangeTransformer(range))
this
}
override def range(element: ASTNode): this.type = {
annotationBuilder.range(rangeTransformer(element.getTextRange))
this
}
override def range(element: PsiElement): this.type = {
annotationBuilder.range(rangeTransformer(element.getTextRange))
this
}
override def afterEndOfLine: this.type = {
annotationBuilder.afterEndOfLine()
this
}
override def fileLevel: this.type = {
annotationBuilder.fileLevel()
this
}
override def gutterIconRenderer(gutterIconRenderer: GutterIconRenderer): this.type = {
annotationBuilder.gutterIconRenderer(gutterIconRenderer)
this
}
override def problemGroup(problemGroup: ProblemGroup): this.type = {
annotationBuilder.problemGroup(problemGroup)
this
}
override def enforcedTextAttributes(enforcedAttributes: TextAttributes): this.type = {
annotationBuilder.enforcedTextAttributes(enforcedAttributes)
this
}
override def textAttributes(enforcedAttributes: TextAttributesKey): this.type = {
annotationBuilder.textAttributes(enforcedAttributes)
this
}
override def highlightType(highlightType: ProblemHighlightType): this.type = {
annotationBuilder.highlightType(highlightType)
this
}
override def tooltip(tooltip: String): this.type = {
annotationBuilder.tooltip(tooltip)
this
}
override def needsUpdateOnTyping: this.type = {
annotationBuilder.needsUpdateOnTyping()
this
}
override def needsUpdateOnTyping(value: Boolean): this.type = {
annotationBuilder.needsUpdateOnTyping(value)
this
}
override def withFix(fix: IntentionAction): this.type = {
annotationBuilder.withFix(fix)
this
}
override def newFix(fix: IntentionAction): ScalaAnnotationBuilder.FixBuilder =
annotationBuilder.newFix(fix)
override def newLocalQuickFix(fix: LocalQuickFix, problemDescriptor: ProblemDescriptor): ScalaAnnotationBuilder.FixBuilder =
annotationBuilder.newLocalQuickFix(fix, problemDescriptor)
override def create(): Unit =
annotationBuilder.create()
class ScalaFixBuilderAdapter(val fixBuilder: AnnotationBuilder.FixBuilder)
extends ScalaAnnotationBuilder.FixBuilder {
override def range(range: TextRange): ScalaAnnotationBuilder.FixBuilder = fixBuilder.range(range)
override def key(key: HighlightDisplayKey): ScalaAnnotationBuilder.FixBuilder = fixBuilder.key(key)
override def batch: ScalaAnnotationBuilder.FixBuilder = fixBuilder.batch()
override def universal: ScalaAnnotationBuilder.FixBuilder = fixBuilder.universal()
override def registerFix: ScalaAnnotationBuilderAdapter.this.type = {
fixBuilder.registerFix()
ScalaAnnotationBuilderAdapter.this
}
}
}
|
JetBrains/intellij-scala
|
scala/scala-impl/src/org/jetbrains/plugins/scala/annotator/ScalaAnnotationBuilderAdapter.scala
|
Scala
|
apache-2.0
| 3,888
|
/*
* Copyright 2017 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.ct.accounts.frs105.boxes
import uk.gov.hmrc.ct.accounts.frs105.retriever.Frs105AccountsBoxRetriever
import uk.gov.hmrc.ct.accounts.{AccountsMoneyValidationFixture, MockFrs105AccountsRetriever}
class AC461Spec extends AccountsMoneyValidationFixture[Frs105AccountsBoxRetriever] with MockFrs105AccountsRetriever {
testAccountsMoneyValidationWithMin("AC461", minValue = 0, AC461)
}
|
pncampbell/ct-calculations
|
src/test/scala/uk/gov/hmrc/ct/accounts/frs105/boxes/AC461Spec.scala
|
Scala
|
apache-2.0
| 1,009
|
package com.lyrx.latex
import java.io.File
import com.lyrx.text.StringGenerator._
import com.lyrx.text._
/**
* Created by alex on 17.10.16.
*/
trait PDFGenerator extends LaTeX {
def pdf(aFileName: String )(implicit coll:Collector[String],context: Context): Either[File, String] = {
new StringSerializer(coll.content()).serialize(aFileName)
LTXPDFProcessor()(context.outputIsInput()).process(aFileName).output()
}
def pdf2(aFileName: String )(implicit coll:Collector[String], context: Context): Either[File, String] = {
new StringSerializer(coll.content()).serialize(aFileName)
LTXPDFProcessor()(context.outputIsInput()).process(aFileName).process(aFileName).
output()
}
}
|
lyrx/lyrxgenerator
|
src/main/scala/com/lyrx/latex/PDFGenerator.scala
|
Scala
|
gpl-3.0
| 717
|
/*
* Copyright (c) 2002-2018 "Neo Technology,"
* Network Engine for Objects in Lund AB [http://neotechnology.com]
*
* This file is part of Neo4j.
*
* Neo4j is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.neo4j.cypher.internal.compiler.v2_3.spi
import org.mockito.Matchers._
import org.mockito.Mockito.when
import org.mockito.invocation.InvocationOnMock
import org.mockito.stubbing.Answer
import org.neo4j.cypher.internal.compiler.v2_3.InternalQueryStatistics
import org.neo4j.cypher.internal.compiler.v2_3.spi.SchemaTypes.{IndexDescriptor, NodePropertyExistenceConstraint, RelationshipPropertyExistenceConstraint, UniquenessConstraint}
import org.neo4j.cypher.internal.frontend.v2_3.test_helpers.CypherFunSuite
import org.neo4j.graphdb.{Node, Relationship}
class UpdateCountingQueryContextTest extends CypherFunSuite {
val inner = mock[QueryContext]
val nodeA = mock[Node]
val nodeB = mock[Node]
val nodeAId = 666
val rel = mock[Relationship]
val relId = 42
val nodeOps = mock[Operations[Node]]
val relOps = mock[Operations[Relationship]]
when(inner.nodeOps).thenReturn(nodeOps)
when(inner.relationshipOps).thenReturn(relOps)
// We need to have the inner mock return the right counts for added/removed labels.
when( inner.setLabelsOnNode(anyLong(), any()) ).thenAnswer( new Answer[Int]() {
def answer(invocation: InvocationOnMock):Int = {
invocation.getArguments()(1).asInstanceOf[Iterator[String]].size
}
} )
when( inner.removeLabelsFromNode(anyLong(), any()) ).thenAnswer( new Answer[Int]() {
def answer(invocation: InvocationOnMock):Int = {
invocation.getArguments()(1).asInstanceOf[Iterator[String]].size
}
} )
when( inner.createUniqueConstraint(anyInt(), anyInt()) )
.thenReturn(IdempotentResult(mock[UniquenessConstraint]))
when( inner.createNodePropertyExistenceConstraint(anyInt(), anyInt()) )
.thenReturn(IdempotentResult(mock[NodePropertyExistenceConstraint]))
when( inner.createRelationshipPropertyExistenceConstraint(anyInt(), anyInt()) )
.thenReturn(IdempotentResult(mock[RelationshipPropertyExistenceConstraint]))
when( inner.addIndexRule(anyInt(), anyInt()) )
.thenReturn(IdempotentResult(mock[IndexDescriptor]))
var context: UpdateCountingQueryContext = null
override def beforeEach() {
super.beforeEach()
context = new UpdateCountingQueryContext(inner)
}
test("create_node") {
context.createNode()
context.getStatistics should equal(InternalQueryStatistics(nodesCreated = 1))
}
test("delete_node") {
context.nodeOps.delete(nodeA)
context.getStatistics should equal(InternalQueryStatistics(nodesDeleted = 1))
}
test("create_relationship") {
context.createRelationship(nodeA, nodeB, "FOO")
context.getStatistics should equal(InternalQueryStatistics(relationshipsCreated = 1))
}
test("delete_relationship") {
context.relationshipOps.delete(rel)
context.getStatistics should equal(InternalQueryStatistics(relationshipsDeleted = 1))
}
test("set_property") {
context.nodeOps.setProperty(nodeAId, 1, "value")
context.getStatistics should equal(InternalQueryStatistics(propertiesSet = 1))
}
test("remove_property") {
context.nodeOps.removeProperty(nodeAId, context.getPropertyKeyId("key"))
context.getStatistics should equal(InternalQueryStatistics(propertiesSet = 1))
}
test("set_property_relationship") {
context.relationshipOps.setProperty(relId, 1, "value")
context.getStatistics should equal(InternalQueryStatistics(propertiesSet = 1))
}
test("remove_property_relationship") {
context.relationshipOps.removeProperty(relId, context.getPropertyKeyId("key"))
context.getStatistics should equal(InternalQueryStatistics(propertiesSet = 1))
}
//
// test("add_label") {
// context.setLabelsOnNode(0l, Seq(1, 2, 3).iterator)
//
// context.getStatistics should equal(QueryStatistics(labelsAdded = 3))
// }
test("remove_label") {
context.removeLabelsFromNode(0l, Seq(1, 2, 3).iterator)
context.getStatistics should equal(InternalQueryStatistics(labelsRemoved = 3))
}
test("add_index") {
context.addIndexRule(0, 1)
context.getStatistics should equal(InternalQueryStatistics(indexesAdded = 1))
}
test("remove_index") {
context.dropIndexRule(0, 1)
context.getStatistics should equal(InternalQueryStatistics(indexesRemoved = 1))
}
test("create_unique_constraint") {
context.createUniqueConstraint(0, 1)
context.getStatistics should equal(InternalQueryStatistics(uniqueConstraintsAdded = 1))
}
test("constraint_dropped") {
context.dropUniqueConstraint(0, 42)
context.getStatistics should equal(InternalQueryStatistics(uniqueConstraintsRemoved = 1))
}
test("create node property existence constraint") {
context.createNodePropertyExistenceConstraint(0, 1)
context.getStatistics should equal(InternalQueryStatistics(existenceConstraintsAdded = 1))
}
test("drop node property existence constraint") {
context.dropNodePropertyExistenceConstraint(0, 42)
context.getStatistics should equal(InternalQueryStatistics(existenceConstraintsRemoved = 1))
}
test("create rel property existence constraint") {
context.createRelationshipPropertyExistenceConstraint(0, 42)
context.getStatistics should equal(InternalQueryStatistics(existenceConstraintsAdded = 1))
}
test("drop rel property existence constraint") {
context.dropRelationshipPropertyExistenceConstraint(0, 1)
context.getStatistics should equal(InternalQueryStatistics(existenceConstraintsRemoved = 1))
}
}
|
HuangLS/neo4j
|
community/cypher/cypher-compiler-2.3/src/test/scala/org/neo4j/cypher/internal/compiler/v2_3/spi/UpdateCountingQueryContextTest.scala
|
Scala
|
apache-2.0
| 6,211
|
package org.openurp.edu.eams.system.firstlogin.impl
import org.beangle.security.codec.EncryptUtil
class VerifyEmailKeyGenerator {
private var secretKey: String = "Eams verify key"
def generate(email: String): String = EncryptUtil.encode(email + secretKey)
def verify(email: String, digest: String): Boolean = generate(email) == digest
}
|
openurp/edu-eams-webapp
|
web/src/main/scala/org/openurp/edu/eams/system/firstlogin/impl/VerifyEmailKeyGenerator.scala
|
Scala
|
gpl-3.0
| 350
|
/*
* (c) Copyright 2016 Hewlett Packard Enterprise Development LP
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package toolkit.neuralnetwork.util
import libcog._
object Norm {
def L1(left: Field, right: Field): Field = {
val fieldErr = reduceSum(abs(left - right)) / left.tensorShape.points.toFloat
fieldReduceSum(fieldErr) / right.fieldShape.points.toFloat
}
}
|
hpe-cct/cct-nn
|
src/main/scala/toolkit/neuralnetwork/util/Norm.scala
|
Scala
|
apache-2.0
| 892
|
package scala.lms
package epfl
package test9
import common._
import test1._
import test7.{Print,PrintExp,ScalaGenPrint}
import internal.ScalaCompile
import scala.util.continuations._
import util.OverloadHack
import java.io.{PrintWriter,StringWriter,FileOutputStream}
trait CpsProg1 extends LiftPrimitives with PrimitiveOps with IfThenElse with Equal with Print with Compile {
def choose[A:Typ](x: Rep[Boolean]): Boolean @cps[Rep[A]] = shift { k: (Boolean => Rep[A]) =>
if (x)
k(true)
else
k(false)
}
def test(x: Rep[Boolean]): Rep[Unit] = { // recompile
reset {
val c = choose[Unit](x)
if (c) {
this.print("is true")
} else {
this.print("is false")
}
}
}
}
trait CpsProg2 extends LiftPrimitives with PrimitiveOps with IfThenElse with Equal with Print with Compile {
def choose[A:Typ](x: Rep[Boolean]): Boolean @cps[Rep[A]] = shift { k: (Boolean => Rep[A]) =>
if (x)
k(true)
else
k(false)
}
def pickValue[A:Typ](x: Rep[Boolean]): Rep[Int] @cps[Rep[A]] = {
val c = choose[A](x)
if (c)
unit(7)
else
unit(9)
}
def test(x: Rep[Boolean]): Rep[Unit] = { // recompile
reset {
val z = pickValue[Unit](x)
this.print(z)
}
}
}
trait AmbProg1 extends LiftPrimitives with PrimitiveOps with IfThenElse with Equal with Print with Compile {
//def __ifThenElse[T:Typ,U](cond: Rep[Boolean], thenp: => Rep[T]@cps[U], elsep: => Rep[T]@cps[U]): Rep[T]@cps[U] = cond match { case true => thenp case false => elsep }
// xs could be either Rep[List[T]] or List[Rep[T]]
// if List[Rep[T]], code paths could be duplicated or not...
// this is the BAM variant of AMB: be careful, it can cause code explosion.
def amb[T](xs: List[Rep[T]]): Rep[T] @cps[Rep[Unit]] = shift { k =>
xs foreach k
}
def require(x: Rep[Boolean]): Rep[Unit] @cps[Rep[Unit]] = shift { k: (Rep[Unit]=>Rep[Unit]) =>
if (x) k() else ()
}
def test(x: Rep[Int]): Rep[Unit] = {
reset {
val a = amb(List(unit(1),unit(2),x))
val b = amb(List(unit(1),unit(2),unit(3)))
require(a == b)
this.print("found:")
this.print(a)
this.print(b)
}
()
/*
def joins(s1:String, s2:String) = s1.endsWith(s2.substring(0,1))
val w1 = amb(List("the","that","a"))
val w2 = amb(List("frog","elephant","thing"))
val w3 = amb(List("walked","treaded","grows"))
val w4 = amb(List("slowly","quickly"))
require(joins(w1,w2))
require(joins(w2,w3))
require(joins(w3,w4))
yld(List(w1,w2,w3,w4))
// result: that thing grows slowly
*/
/*
val i = amb(low to high)
val j = amb(i to high)
val k = amb(j to high)
require(i*i + j*j == k*k)
yld((i,j,k))
//output using (low=1,high=20):
// (3,4,5)
// (5,12,13)
// (6,8,10)
// (8,15,17)
// (9,12,15)
// (12,16,20)
*/
}
}
trait AmbProg2 extends AmbProg1 {
override def test(x: Rep[Int]): Rep[Unit] = {
reset {
val a = amb(List(unit(1),unit(2),unit(3),unit(4)))
val b = amb(List(unit(1),unit(2),unit(3),unit(4)))
val c = amb(List(unit(1),unit(2),unit(3),unit(4)))
require(a != b)
require(b != c)
// require(c != a)
this.print("found:")
this.print(a)
this.print(b)
this.print(c)
}
()
}
}
class TestCPS extends FileDiffSuite {
val prefix = home + "test-out/epfl/test9-"
def testCps1 = {
withOutFile(prefix+"cps1") {
new CpsProg1 with CoreOpsPkgExp with EqualExp with IfThenElseExp with PrintExp with ScalaCompile { self =>
val codegen = new ScalaGenPrimitiveOps with ScalaGenEqual with ScalaGenIfThenElse with ScalaGenPrint { val IR: self.type = self }
//override def compile
codegen.emitSource(test, "Test", new PrintWriter(System.out))
}
}
assertFileEqualsCheck(prefix+"cps1")
}
def testCps2 = {
withOutFile(prefix+"cps2") {
new CpsProg2 with CoreOpsPkgExp with EqualExp with IfThenElseExp with PrintExp with ScalaCompile { self =>
val codegen = new ScalaGenPrimitiveOps with ScalaGenEqual with ScalaGenIfThenElse with ScalaGenPrint { val IR: self.type = self }
//override def compile
codegen.emitSource(test, "Test", new PrintWriter(System.out))
}
}
assertFileEqualsCheck(prefix+"cps2")
}
def testAmb1a = {
withOutFile(prefix+"amb1a") {
new AmbProg1 with CoreOpsPkgExp with EqualExp with IfThenElseExp with PrintExp with ScalaCompile { self =>
val codegen = new ScalaGenPrimitiveOps with ScalaGenEqual with ScalaGenIfThenElse with ScalaGenPrint { val IR: self.type = self }
//override def compile
codegen.emitSource(test, "Test", new PrintWriter(System.out))
}
}
assertFileEqualsCheck(prefix+"amb1a")
}
def testAmb1b = {
withOutFile(prefix+"amb1b") {
new AmbProg1 with CoreOpsPkgExp with EqualExpOpt with IfThenElseExpOpt with BooleanOpsExp with PrintExp with ScalaCompile { self =>
val codegen = new ScalaGenPrimitiveOps with ScalaGenEqual with ScalaGenIfThenElse with ScalaGenPrint { val IR: self.type = self }
//override def compile
codegen.emitSource(test, "Test", new PrintWriter(System.out))
}
}
assertFileEqualsCheck(prefix+"amb1b")
}
def testAmb2a = {
withOutFile(prefix+"amb2a") {
new AmbProg2 with CoreOpsPkgExp with EqualExp with IfThenElseExp with PrintExp with ScalaCompile { self =>
val codegen = new ScalaGenPrimitiveOps with ScalaGenEqual with ScalaGenIfThenElse with ScalaGenPrint { val IR: self.type = self }
//override def compile
codegen.emitSource(test, "Test", new PrintWriter(System.out))
}
}
assertFileEqualsCheck(prefix+"amb2a")
}
def testAmb2b = {
withOutFile(prefix+"amb2b") {
new AmbProg2 with CoreOpsPkgExp with EqualExpOpt with IfThenElseExpOpt with BooleanOpsExp with PrintExp with ScalaCompile { self =>
val codegen = new ScalaGenPrimitiveOps with ScalaGenEqual with ScalaGenIfThenElse with ScalaGenPrint { val IR: self.type = self }
//override def compile
codegen.emitSource(test, "Test", new PrintWriter(System.out))
}
}
assertFileEqualsCheck(prefix+"amb2b")
}
}
|
astojanov/virtualization-lms-core
|
test-src/epfl/test9-experimental/TestCPS.scala
|
Scala
|
bsd-3-clause
| 6,323
|
package se.uu.farmbio.cp
import org.apache.spark.mllib.linalg.Vector
import org.apache.spark.mllib.regression.LabeledPoint
abstract class UnderlyingAlgorithm(
val predictor: (Vector => Double)) extends Serializable {
def nonConformityMeasure(newSample: LabeledPoint): Double
}
trait Deserializer[A <: UnderlyingAlgorithm] {
def deserialize(alg: String): A
}
|
mcapuccini/spark-cp
|
cp/src/main/scala/se/uu/farmbio/cp/UnderlyingAlgorithm.scala
|
Scala
|
apache-2.0
| 368
|
val x = play {
// RandSeed.ir(trig = 1, seed = 56789.0)
val gbmanL_0 = GbmanL.ar(freq = 419.73846, xi = 1.2, yi = 0.00788784)
val gbmanL_1 = GbmanL.ar(freq = 9.444879E-4, xi = 92.88581, yi = -953.0853)
val freq_0 = gbmanL_1.cos
val gbmanL_2 = GbmanL.ar(freq = freq_0, xi = 395.79254, yi = 395.79254)
val setResetFF = SetResetFF.ar(trig = 11.399926, reset = 13.234592)
val formFreq = Trig1.ar(395.79254, dur = setResetFF)
val formant = Formant.ar(fundFreq = 0.0015142808, formFreq = formFreq, bw = -2029.8915)
val fBSineN = FBSineN.ar(freq = -2029.8915, im = formant, fb = -0.0029116, a = -2029.8915, c = -0.0076407013, xi = 4039.796, yi = 338.5671)
val b = FreeVerb.ar(fBSineN, mix = 0.00648538, room = 3.0152974, damp = -2029.8915)
val leastChange = LeastChange.ar(a = fBSineN, b = b)
val bRF_0 = BRF.ar(0.00648538, freq = -0.0029116, rq = 419.73846)
val lFGauss = LFGauss.ar(dur = -0.0029116, width = 0.00648538, phase = 92.88581, loop = 11.399926, doneAction = doNothing)
val bBandStop = BBandStop.ar(0.00788784, freq = 0.00648538, bw = -0.11045894)
val latoocarfianL = LatoocarfianL.ar(freq = 0.00648538, a = formant, b = 11.399926, c = 695.37335, d = bBandStop, xi = 107.30127, yi = 4073.4182)
val freq_1 = RLPF.ar(3.0152974, freq = 325.59705, rq = -2029.8915)
val lFDNoise1 = LFDNoise1.ar(freq_1)
val standardN = StandardN.ar(freq = freq_1, k = 107.30127, xi = 0.004496857, yi = 3.0152974)
val bRF_1 = BRF.ar(setResetFF, freq = bBandStop, rq = 0.36766747)
val linXFade2 = LinXFade2.ar(inA = 0.020259222, inB = 325.59705, pan = 0.0, level = 11.399926)
val coinGate = CoinGate.ar(0.0019275966, prob = 0.00648538)
val mix_0 = Mix(Seq[GE](coinGate, linXFade2, bRF_1, standardN, lFDNoise1, latoocarfianL, lFGauss, bRF_0, leastChange, gbmanL_2, gbmanL_0))
val mono = Mix.Mono(mix_0)
val leak = LeakDC.ar(mono)
val bad = CheckBadValues.ar(leak, post = 0)
val gate = Gate.ar(leak, bad sig_== 0)
val lim = Pan2.ar(Limiter.ar(gate)) * "amp".kr(0.05) // * DelayN.ar(Line.ar(0, 1, 1), 0.2, 0.2)
Out.ar(0, lim)
}
|
Sciss/Grenzwerte
|
individual_sounds/1630_85.scala
|
Scala
|
gpl-3.0
| 2,185
|
package mesosphere.marathon
package core.launchqueue.impl
import mesosphere.UnitTest
import mesosphere.marathon.core.instance.update.InstancesSnapshot
import mesosphere.marathon.core.instance.{Instance, TestInstanceBuilder}
import mesosphere.marathon.core.launchqueue.impl.ReviveOffersStreamLogic.VersionedRoleState
import mesosphere.marathon.state.{AbsolutePathId, AppDefinition}
import org.scalatest.Inside
class ReviveOffersStateTest extends UnitTest with Inside {
val webApp = AppDefinition(id = AbsolutePathId("/test"), role = "web")
val monitoringApp = AppDefinition(id = AbsolutePathId("/test2"), role = "monitoring")
"register the existence of roles for already-running instances" in {
val webRunningInstance = TestInstanceBuilder.newBuilderForRunSpec(webApp).addTaskRunning().instance
val monitoringScheduledInstance = Instance.scheduled(monitoringApp)
val state =
ReviveOffersState.empty.withSnapshot(InstancesSnapshot(List(webRunningInstance, monitoringScheduledInstance)), defaultRole = "*")
state.roleReviveVersions("web").roleState shouldBe OffersNotWanted
state.roleReviveVersions("monitoring").roleState shouldBe OffersWanted
}
"register the existence of the default role" in {
val state = ReviveOffersState.empty.withSnapshot(InstancesSnapshot(Nil), defaultRole = "*")
state.roleReviveVersions("*").roleState shouldBe OffersNotWanted
}
"bumps the version for a role when a delay is removed" in {
val monitoringScheduledInstance = Instance.scheduled(monitoringApp)
var state = ReviveOffersState.empty
.withSnapshot(InstancesSnapshot(List(monitoringScheduledInstance)), defaultRole = "*")
val priorVersion = inside(state.roleReviveVersions("monitoring")) {
case VersionedRoleState(version, roleState) =>
roleState shouldBe OffersWanted
version
}
state = state.withDelay(monitoringApp.configRef)
state.roleReviveVersions("monitoring").roleState shouldBe OffersNotWanted
state = state.withoutDelay(monitoringApp.configRef)
inside(state.roleReviveVersions("monitoring")) {
case VersionedRoleState(version, roleState) =>
roleState shouldBe OffersWanted
version should be > priorVersion
}
}
}
|
mesosphere/marathon
|
src/test/java/mesosphere/marathon/core/launchqueue/impl/ReviveOffersStateTest.scala
|
Scala
|
apache-2.0
| 2,254
|
package lib
import com.bryzek.apidoc.api.v0.models.{Original, OriginalForm, OriginalType}
import com.bryzek.apidoc.spec.v0.models.Service
import com.bryzek.apidoc.spec.v0.models.json._
import play.api.libs.json.{Json, JsString, JsObject, JsSuccess}
import scala.util.{Failure, Success, Try}
object OriginalUtil {
def toOriginal(form: OriginalForm): Original = {
Original(
`type` = form.`type`.getOrElse(
guessType(form.data).getOrElse(OriginalType.ApiJson)
),
data = form.data
)
}
/**
* Attempts to guess the type of original based on the data
*/
def guessType(data: String): Option[OriginalType] = {
val trimmed = data.trim
if (trimmed.indexOf("protocol ") >= 0 || trimmed.indexOf("@namespace") >= 0) {
Some(OriginalType.AvroIdl)
} else if (trimmed.startsWith("{")) {
Try(
Json.parse(trimmed).asOpt[JsObject] match {
case None => None
case Some(o) => {
(o \\ "swagger").asOpt[JsString] match {
case Some(v) => Some(OriginalType.SwaggerJson)
case None => {
o.validate[Service] match {
case JsSuccess(_, _) => Some(OriginalType.ServiceJson)
case _ => Some(OriginalType.ApiJson)
}
}
}
}
}
) match {
case Success(ot) => ot
case Failure(e) => None
}
} else {
None
}
}
}
|
Seanstoppable/apidoc
|
api/app/lib/OriginalUtil.scala
|
Scala
|
mit
| 1,468
|
/*
* Copyright 2021 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.ct.ct600e.v2
import uk.gov.hmrc.ct.box.{CtBoxIdentifier, CtOptionalBoolean, Input}
case class E1011(value: Option[Boolean]) extends CtBoxIdentifier("All exempt") with CtOptionalBoolean with Input
|
hmrc/ct-calculations
|
src/main/scala/uk/gov/hmrc/ct/ct600e/v2/E1011.scala
|
Scala
|
apache-2.0
| 823
|
package no.penger.crud
trait results extends errors {
sealed trait CrudSuccess
sealed trait CrudFailure
case class Created(mountedAt: String, table: TableName, id: Option[String]) extends CrudSuccess
case class Updated(mountedAt: String, table: TableName, column: ColumnName, row: String, oldValue: Option[String], newValue: String) extends CrudSuccess
case class Deleted(mountedAt: String, table: TableName, id: String) extends CrudSuccess
case class CreateFailed(mountedAt: String, table: TableName, ts: Seq[Error]) extends CrudFailure
case class UpdateFailed(mountedAt: String, table: TableName, column: ColumnName, id: String, value: String, e: Error) extends CrudFailure
case class DeleteFailed(mountedAt: String, table: TableName, id: String, e: Error) extends CrudFailure
}
|
pengerno/slick-crud
|
core/src/main/scala/no/penger/crud/results.scala
|
Scala
|
apache-2.0
| 801
|
package pl.writeonly.son2.funs.liners
import com.typesafe.scalalogging.LazyLogging
import pl.writeonly.son2.apis.converters.Converter
import scala.util.control.Exception._
import scala.util.{Failure, Success, Try}
class LinerWithTry(converter: Converter)
extends Liner(converter)
with LazyLogging {
def apply(line: String): String = withTry(line) match {
case Success(result) => result
case Failure(e) => {
logger.error("{} {}", converter, line)
logger.error("", e)
e.getMessage + "\\n"
}
}
def withTry(line: String): Try[String] =
catching(classOf[Exception]).withTry(convert(line))
}
|
writeonly/son2
|
scallions-core/scallions-funs/src/main/scala/pl/writeonly/son2/funs/liners/LinerWithTry.scala
|
Scala
|
apache-2.0
| 639
|
/**
* Copyright 2011-2012 eBusiness Information, Groupe Excilys (www.excilys.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.excilys.ebi.gatling.http.ahc
import java.lang.System.nanoTime
import com.ning.http.client.{ HttpResponseStatus, HttpResponseHeaders, HttpResponseBodyPart }
sealed trait HttpEvent
case class OnHeaderWriteCompleted(nanos: Long = nanoTime) extends HttpEvent
case class OnContentWriteCompleted(nanos: Long = nanoTime) extends HttpEvent
case class OnStatusReceived(responseStatus: HttpResponseStatus, nanos: Long = nanoTime) extends HttpEvent
case class OnHeadersReceived(headers: HttpResponseHeaders) extends HttpEvent
case class OnBodyPartReceived(bodyPart: Option[HttpResponseBodyPart] = None) extends HttpEvent
case class OnCompleted(nanos: Long = nanoTime) extends HttpEvent
case class OnThrowable(errorMessage: String, nanos: Long = nanoTime) extends HttpEvent
|
Tjoene/thesis
|
Case_Programs/gatling-1.4.0/gatling-http/src/main/scala/com/excilys/ebi/gatling/http/ahc/HttpEvent.scala
|
Scala
|
gpl-2.0
| 1,420
|
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.scala.dsl
import org.apache.camel.Exchange
/**
* A set of convenience functions for use in RouteBuilders and other Scala code interacting with Camel
*/
trait Functions {
/**
* Convenience function for extracting the 'in' message body from a Camel org.apache.camel.Exchange
*
* Can also be used as a partially applied function where the DSL requires Exchange => Any
*/
def body(exchange: Exchange) = exchange.getIn.getBody
}
|
YMartsynkevych/camel
|
components/camel-scala/src/main/scala/org/apache/camel/scala/dsl/Functions.scala
|
Scala
|
apache-2.0
| 1,274
|
/**
* Copyright (C) 2011 - 101loops.com <dev@101loops.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.crashnote.test.logger.unit.logback
import scala.collection.JavaConversions._
import com.crashnote.core.model.types.LogLevel
import com.crashnote.logback.impl.LogbackEvt
import ch.qos.logback.classic.Level
import ch.qos.logback.classic.spi._
import com.crashnote.test.base.defs.BaseMockSpec
class LogbackEvtSpec
extends BaseMockSpec[ILoggingEvent] {
"Logback Event" should {
"instantiate" >> {
"example 1" >> {
// mock
val m_evt = getMock(Level.ERROR)
m_evt.getThreadName returns "main"
m_evt.getArgumentArray returns Array("Bob")
m_evt.getLoggerName returns "com.example"
m_evt.getTimeStamp returns 123456789L
// execute
val r = new LogbackEvt(m_evt, null)
// verify
r.getLoggerName === "com.example"
r.getLevel === LogLevel.ERROR
r.getArgs === Array("Bob")
r.getTimeStamp returns 123456789L
r.getThreadName === "main"
r.getThrowable === null
}
"example 2" >> {
val err = new RuntimeException("oops")
// mock
val m_evt = getMock(Level.WARN)
m_evt.getFormattedMessage returns "oops"
val m_thproxy = mock[ThrowableProxy]
m_evt.getThrowableProxy returns m_thproxy
m_thproxy.getThrowable returns err
// execute
val r = new LogbackEvt(m_evt, Map("test" -> "data"))
// verify
r.getThrowable === err
r.getMessage === "oops"
r.getLevel === LogLevel.WARN
//r.copy()
//r.getMDC.get("test") === "data"
}
}
"convert log level" >> {
"info" >> {
new LogbackEvt(getMock(Level.INFO)).getLevel === LogLevel.INFO
}
"error" >> {
new LogbackEvt(getMock(Level.ERROR)).getLevel === LogLevel.ERROR
}
"warn" >> {
new LogbackEvt(getMock(Level.WARN)).getLevel === LogLevel.WARN
}
"debug" >> {
new LogbackEvt(getMock(Level.DEBUG)).getLevel === LogLevel.DEBUG
new LogbackEvt(getMock(Level.TRACE)).getLevel === LogLevel.DEBUG
}
}
}
// SETUP ======================================================================================
def getMock(l: Level) = {
val m_evt = mock[ILoggingEvent]
m_evt.getLevel returns l
m_evt
}
}
|
crashnote/crashnote-java
|
modules/logger/src/test/scala/com/crashnote/test/logger/unit/logback/LogbackEvtSpec.scala
|
Scala
|
apache-2.0
| 2,929
|
package slick.ast
import slick.ast.TypeUtil.:@
import scala.language.implicitConversions
import scala.collection.mutable.ArrayBuffer
/**
* Utility methods for AST manipulation.
*/
object Util {
def mapOrNone[A <: AnyRef](c: Traversable[A])(f: A => A): Option[IndexedSeq[A]] = {
val b = new ArrayBuffer[A]
var changed = false
c.foreach { x =>
val n = f(x)
b += n
if(n ne x) changed = true
}
if(changed) Some(b.result()) else None
}
@inline implicit def nodeToNodeOps(n: Node): NodeOps = new NodeOps(n)
}
/** Extra methods for Nodes. */
final class NodeOps(val tree: Node) extends AnyVal {
import Util._
import NodeOps._
@inline def collect[T](pf: PartialFunction[Node, T], stopOnMatch: Boolean = false): Seq[T] = {
val b = new ArrayBuffer[T]
def f(n: Node): Unit = pf.andThen[Unit] { case t =>
b += t
if(!stopOnMatch) n.children.foreach(f)
}.orElse[Node, Unit]{ case _ =>
n.children.foreach(f)
}.apply(n)
f(tree)
b
}
def collectAll[T](pf: PartialFunction[Node, Seq[T]]): Seq[T] = collect[Seq[T]](pf).flatten
def replace(f: PartialFunction[Node, Node], keepType: Boolean = false, bottomUp: Boolean = false): Node = {
def g(n: Node): Node = n.mapChildren(_.replace(f, keepType, bottomUp), keepType)
if(bottomUp) f.applyOrElse(g(tree), identity[Node]) else f.applyOrElse(tree, g)
}
/** Replace nodes in a bottom-up traversal with an extra state value that gets passed through the
* traversal. Types are never kept or rebuilt when a node changes.
*
* @param f The replacement function that takes the current Node (whose children have already
* been transformed), the current state, and the original (untransformed) version of
* the Node. */
def replaceFold[T](z: T)(f: PartialFunction[(Node, T, Node), (Node, T)]): (Node, T) = {
var v: T = z
val ch: IndexedSeq[Node] = tree.children.map { n =>
val (n2, v2) = n.replaceFold(v)(f)
v = v2
n2
}(collection.breakOut)
val t2 = tree.withChildren(ch)
f.applyOrElse((t2, v, tree), (t: (Node, T, Node)) => (t._1, t._2))
}
/** Replace nodes in a bottom-up traversal while invalidating TypeSymbols. Any later references
* to the invalidated TypeSymbols have their types unassigned, so that the whole tree can be
* retyped afterwards to get the correct new TypeSymbols in. */
def replaceInvalidate(f: PartialFunction[(Node, Set[TypeSymbol], Node), (Node, Set[TypeSymbol])]): Node = {
replaceFold(Set.empty[TypeSymbol])(f.orElse {
case ((n: Ref), invalid, _) if containsTS(n.nodeType, invalid) => (n.untyped, invalid)
case ((n: Select), invalid, _) if containsTS(n.nodeType, invalid) => (n.untyped, invalid)
})._1
}
def untypeReferences(invalid: Set[TypeSymbol]): Node = {
if(invalid.isEmpty) tree else replace({
case n: Ref if containsTS(n.nodeType, invalid) => n.untyped
case n: Select if containsTS(n.nodeType, invalid) => n.untyped
}, bottomUp = true)
}
def findNode(p: Node => Boolean): Option[Node] = {
if(p(tree)) Some(tree)
else {
val it = tree.children.iterator.map(_.findNode(p)).dropWhile(_.isEmpty)
if(it.hasNext) it.next() else None
}
}
def select(field: TermSymbol): Node = (field, tree) match {
case (s: AnonSymbol, StructNode(ch)) => ch.find{ case (s2,_) => s == s2 }.get._2
case (s: FieldSymbol, StructNode(ch)) => ch.find{ case (s2,_) => s == s2 }.get._2
case (s: ElementSymbol, ProductNode(ch)) => ch(s.idx-1)
case (s, n) => Select(n, s)
}
}
private object NodeOps {
private def containsTS(t: Type, invalid: Set[TypeSymbol]): Boolean = {
if(invalid.isEmpty) false else t match {
case NominalType(ts, exp) => invalid.contains(ts) || containsTS(exp, invalid)
case t => t.children.exists(ch => containsTS(ch, invalid))
}
}
}
|
jkutner/slick
|
slick/src/main/scala/slick/ast/Util.scala
|
Scala
|
bsd-2-clause
| 3,909
|
package com.arcusys.valamis.web.servlet.base
import java.util
import javax.servlet.http.{HttpServletRequest, HttpServletRequestWrapper, HttpServletResponse}
import org.scalatra.servlet.ServletApiImplicits
import org.scalatra.{Delete, Handler, Patch, Put}
import scala.collection.JavaConversions._
// tomcat (with default settings, liferay bundle) do not read parameters from body for Put | Delete | Patch
// here we read parameters
trait HTTPMethodsSupport extends Handler with ServletApiImplicits {
abstract override def handle(req: HttpServletRequest, res: HttpServletResponse): Unit = {
val req2 = req.requestMethod match {
case Put | Delete | Patch =>
if (Option(req.getContentType).exists(_.toLowerCase.contains("application/x-www-form-urlencoded"))) {
new HttpServletRequestWrapper(req) {
val bodyParams = HttpUtilsHelper.parsePostData(req.getContentLength, req.getInputStream, req.getCharacterEncoding)
override def getParameter(name: String) = {
val fromRequest = Option(req.getParameter(name))
lazy val fromBody = Option(bodyParams.get(name)).map(_.head)
fromRequest orElse fromBody orNull
}
override def getParameterNames = super.getParameterNames ++ bodyParams.keys()
override def getParameterMap = {
val paramM: util.HashMap[String, Array[String]] = new util.HashMap
(super.getParameterMap.entrySet() ++ bodyParams.entrySet())
.foreach(e => paramM.put(e.getKey.toString, e.getValue.asInstanceOf[Array[String]]))
paramM
}
}
} else req
case _ => req
}
super.handle(req2, res)
}
}
|
arcusys/Valamis
|
valamis-portlets/src/main/scala/com/arcusys/valamis/web/servlet/base/HTTPMethodsSupport.scala
|
Scala
|
gpl-3.0
| 1,730
|
package features
import org.scalatest.{BeforeAndAfter, FunSpec}
import org.github.nicholasren.moco.helper.RemoteTestHelper
import com.github.nicholasren.moco.dsl.{Conversions, SMoco}
import com.github.nicholasren.moco.dsl.SMoco._
import com.github.dreamhead.moco.MocoEventAction
import Conversions._
import org.mockito.Mockito._
import org.scalatest.mockito.MockitoSugar
class EventActionTest extends FunSpec with BeforeAndAfter with RemoteTestHelper with MockitoSugar {
override val port: Int = 8083
var theServer: SMoco = null
before {
theServer = server(port)
}
describe("on complete") {
it("perform predefined action") {
val action = mock[MocoEventAction]
theServer default {
text("foo")
} on {
complete(action)
}
theServer running {
assert(get(root) === "foo")
}
verify(action).execute
}
}
}
|
nicholasren/moco-scala
|
src/test/scala/features/EventActionTest.scala
|
Scala
|
mit
| 900
|
package scorex.transaction.assets.exchange
import com.google.common.primitives.{Ints, Longs}
import io.swagger.annotations.ApiModelProperty
import play.api.libs.json.{JsObject, Json}
import scorex.account.{PrivateKeyAccount, PublicKeyAccount}
import scorex.crypto.EllipticCurveImpl
import scorex.crypto.encode.Base58
import scorex.crypto.hash.FastCryptographicHash
import scorex.serialization.BytesSerializable
import scorex.transaction.TypedTransaction.TransactionType
import scorex.transaction.ValidationError.CustomValidationError
import scorex.transaction.{ValidationError, _}
import scala.util.{Failure, Success, Try}
sealed trait ExchangeTransaction extends SignedTransaction {
def buyOrder: Order
def sellOrder: Order
def price: Long
def amount: Long
def buyMatcherFee: Long
def sellMatcherFee: Long
def fee: Long
}
object ExchangeTransaction {
private case class ExchangeTransactionImpl(buyOrder: Order, sellOrder: Order, price: Long, amount: Long, buyMatcherFee: Long,
sellMatcherFee: Long, fee: Long, timestamp: Long, signature: Array[Byte])
extends ExchangeTransaction with BytesSerializable {
override val transactionType: TransactionType.Value = TransactionType.OrderMatchTransaction
override lazy val id: Array[Byte] = FastCryptographicHash(toSign)
override val assetFee: (Option[AssetId], Long) = (None, fee)
@ApiModelProperty(hidden = true)
override val sender: PublicKeyAccount = buyOrder.matcher
lazy val toSign: Array[Byte] = Array(transactionType.id.toByte) ++
Ints.toByteArray(buyOrder.bytes.length) ++ Ints.toByteArray(sellOrder.bytes.length) ++
buyOrder.bytes ++ sellOrder.bytes ++ Longs.toByteArray(price) ++ Longs.toByteArray(amount) ++
Longs.toByteArray(buyMatcherFee) ++ Longs.toByteArray(sellMatcherFee) ++ Longs.toByteArray(fee) ++
Longs.toByteArray(timestamp)
override def bytes: Array[Byte] = toSign ++ signature
override def json: JsObject = Json.obj(
"order1" -> buyOrder.json,
"order2" -> sellOrder.json,
"price" -> price,
"amount" -> amount,
"buyMatcherFee" -> buyMatcherFee,
"sellMatcherFee" -> sellMatcherFee,
"fee" -> fee,
"timestamp" -> timestamp,
"signature" -> Base58.encode(signature)
)
override def balanceChanges(): Seq[BalanceChange] = {
val matcherChange = Seq(BalanceChange(AssetAcc(buyOrder.matcher, None), buyMatcherFee + sellMatcherFee - fee))
val buyFeeChange = Seq(BalanceChange(AssetAcc(buyOrder.sender, None), -buyMatcherFee))
val sellFeeChange = Seq(BalanceChange(AssetAcc(sellOrder.sender, None), -sellMatcherFee))
val exchange = Seq(
(buyOrder.sender, (buyOrder.spendAssetId, -buyOrder.getSpendAmount(price, amount))),
(buyOrder.sender, (buyOrder.receiveAssetId, buyOrder.getReceiveAmount(price, amount))),
(sellOrder.sender, (sellOrder.receiveAssetId, sellOrder.getReceiveAmount(price, amount))),
(sellOrder.sender, (sellOrder.spendAssetId, -sellOrder.getSpendAmount(price, amount)))
)
buyFeeChange ++ sellFeeChange ++ matcherChange ++
exchange.map(c => BalanceChange(AssetAcc(c._1, c._2._1), c._2._2))
}
}
private def createUnverified(buyOrder: Order, sellOrder: Order, price: Long, amount: Long,
buyMatcherFee: Long, sellMatcherFee: Long, fee: Long, timestamp: Long, signature: Option[Array[Byte]] = None) = {
lazy val priceIsValid: Boolean = price <= buyOrder.price && price >= sellOrder.price
if (fee <= 0) {
Left(ValidationError.InsufficientFee)
} else if (amount <= 0) {
Left(ValidationError.NegativeAmount)
} else if (price <= 0) {
Left(CustomValidationError("price should be > 0"))
} else if (price > Order.MaxAmount) {
Left(CustomValidationError("price too large"))
} else if (amount > Order.MaxAmount) {
Left(CustomValidationError("price too large"))
} else if (sellMatcherFee > Order.MaxAmount) {
Left(CustomValidationError("sellMatcherFee too large"))
} else if (buyMatcherFee > Order.MaxAmount) {
Left(CustomValidationError("buyMatcherFee too large"))
} else if (fee > Order.MaxAmount) {
Left(CustomValidationError("fee too large"))
} else if (buyOrder.orderType != OrderType.BUY) {
Left(CustomValidationError("buyOrder should has OrderType.BUY"))
} else if (sellOrder.orderType != OrderType.SELL) {
Left(CustomValidationError("sellOrder should has OrderType.SELL"))
} else if (buyOrder.matcher != sellOrder.matcher) {
Left(CustomValidationError("buyOrder.matcher should be the same as sellOrder.matcher"))
} else if (buyOrder.assetPair != sellOrder.assetPair) {
Left(CustomValidationError("Both orders should have same AssetPair"))
} else if (!buyOrder.isValid(timestamp)) {
Left(CustomValidationError("buyOrder"))
} else if (!sellOrder.isValid(timestamp)) {
Left(CustomValidationError("sellOrder"))
} else if (!priceIsValid) {
Left(CustomValidationError("priceIsValid"))
} else {
Right(ExchangeTransactionImpl(buyOrder, sellOrder, price, amount, buyMatcherFee, sellMatcherFee, fee, timestamp, signature.orNull))
}
}
def create(matcher: PrivateKeyAccount, buyOrder: Order, sellOrder: Order, price: Long, amount: Long,
buyMatcherFee: Long, sellMatcherFee: Long, fee: Long, timestamp: Long): Either[ValidationError, ExchangeTransaction] = {
createUnverified(buyOrder, sellOrder, price, amount, buyMatcherFee, sellMatcherFee, fee, timestamp).right.map { unverified =>
unverified.copy(signature = EllipticCurveImpl.sign(matcher.privateKey, unverified.toSign))
}
}
def create(buyOrder: Order, sellOrder: Order, price: Long, amount: Long,
buyMatcherFee: Long, sellMatcherFee: Long, fee: Long, timestamp: Long, signature: Array[Byte]): Either[ValidationError, ExchangeTransaction] = {
createUnverified(buyOrder, sellOrder, price, amount, buyMatcherFee, sellMatcherFee, fee, timestamp, Some(signature))
.right.flatMap(SignedTransaction.verify)
}
def parseBytes(bytes: Array[Byte]): Try[ExchangeTransaction] = Try {
require(bytes.head == TransactionType.OrderMatchTransaction.id)
parseTail(bytes.tail).get
}
def parseTail(bytes: Array[Byte]): Try[ExchangeTransaction] = Try {
import EllipticCurveImpl._
var from = 0
val o1Size = Ints.fromByteArray(bytes.slice(from, from + 4));
from += 4
val o2Size = Ints.fromByteArray(bytes.slice(from, from + 4));
from += 4
val o1 = Order.parseBytes(bytes.slice(from, from + o1Size)).get;
from += o1Size
val o2 = Order.parseBytes(bytes.slice(from, from + o2Size)).get;
from += o2Size
val price = Longs.fromByteArray(bytes.slice(from, from + 8));
from += 8
val amount = Longs.fromByteArray(bytes.slice(from, from + 8));
from += 8
val buyMatcherFee = Longs.fromByteArray(bytes.slice(from, from + 8));
from += 8
val sellMatcherFee = Longs.fromByteArray(bytes.slice(from, from + 8));
from += 8
val fee = Longs.fromByteArray(bytes.slice(from, from + 8));
from += 8
val timestamp = Longs.fromByteArray(bytes.slice(from, from + 8));
from += 8
val signature = bytes.slice(from, from + TypedTransaction.SignatureLength);
from += TypedTransaction.SignatureLength
create(o1, o2, price, amount, buyMatcherFee, sellMatcherFee, fee, timestamp, signature)
.fold(left => Failure(new Exception(left.toString)), right => Success(right))
}.flatten
}
|
B83YPoj/Waves
|
src/main/scala/scorex/transaction/assets/exchange/ExchangeTransaction.scala
|
Scala
|
apache-2.0
| 7,578
|
package tests.rescala.property
import org.scalacheck.{Arbitrary, Gen}
import org.scalatest.matchers.should.Matchers
import org.scalatestplus.scalacheck.ScalaCheckDrivenPropertyChecks
import rescala.extra.invariant.{Invariant, InvariantApi}
import tests.rescala.testtools.RETests
class InvariantsTest extends RETests with ScalaCheckDrivenPropertyChecks with Matchers {
import rescala.extra.invariant.InvariantApi._
val sched = InvariantApi.scheduler
import sched._
"expect invalid invariants to fail" in forAll(Gen.posNum[Int]) { (n: Int) =>
assertThrows[InvariantViolationException] {
val e = Evt[Int]()
val s: Signal[Int] = e.count()
s.specify(
Invariant { a => a < n }
)
1 to n foreach { i => e.fire(i) }
}
}
"correct invariants do not fail" in {
val v = Var(0)
val s1 = Signal {
v() * 2
}
val s2 = Signal {
v() * 2
}
val s3 = Signal {
s1() + s2()
}
s1.setValueGenerator(Gen.choose(0, 10))
s2.setValueGenerator(Gen.choose(0, 10))
s3.specify(
Invariant { a => a >= 0 }
)
s3.test()
}
"changes are propagated when testing invariants" in {
val v1 = Var(0)
val v2 = Var(0)
val s1 = Signal {
v1() * 2
}
val s2 = Signal {
v2() * 2
}
// signal under test
val sut = Signal {
s1() + s2()
}
v1.setValueGenerator(Arbitrary.arbitrary[Int])
v2.setValueGenerator(Arbitrary.arbitrary[Int])
sut.specify(
Invariant { value => value == (2 * (v1.now + v2.now)) }
)
sut.test()
}
"only closest generators are used" in {
val top = Var(10)
val left = Signal { top() + 1 }
val right = Signal { top() + 2 }
val sut = Signal { left() + right() }
val topChangedCount = top.changed.count()
top.setValueGenerator(Arbitrary.arbitrary[Int])
left.setValueGenerator(Arbitrary.arbitrary[Int])
right.setValueGenerator(Arbitrary.arbitrary[Int])
sut.test()
assert(topChangedCount.now == 0)
}
"expect invalid invariants to fail when testing node" in {
val v = Var("Hello")
val sut = Signal {
s"${v()}, World!"
}
v.setValueGenerator(Arbitrary.arbitrary[String])
sut.specify(
Invariant { value => value.length < 5 }
)
assertThrows[InvariantViolationException] {
sut.test()
}
}
"invariants can have names" in {
val v = Var("Hello")
val sut = Signal {
s"${v()}, World!"
}
v.setValueGenerator(Arbitrary.arbitrary[String])
sut.specify(
new Invariant("string_length", { value => value.length < 5 })
)
val caught = intercept[InvariantViolationException] { sut.test() }
caught.getMessage.matches("$Value\\\\(.*\\\\) violates invariant string_length.*")
}
"signals can have multiple invariants" in {
val a = Var(10)
val b = Var(20)
val sut = Signal { Math.pow(a().toDouble, 2) + Math.pow(b().toDouble, 2) }
sut.specify(
Invariant { value => value >= a.now },
Invariant { value => value >= b.now }
)
a.setValueGenerator(Arbitrary.arbitrary[Int])
b.setValueGenerator(Arbitrary.arbitrary[Int])
sut.test()
}
"all invariants get tested" in forAll(Gen.choose(1, 50), Gen.choose(0, 49)) { (n: Int, failingIndex: Int) =>
// Create an arbitrary amount of invariants and select one to fail
whenever(failingIndex < n) {
val invariants = 0 to n map { i =>
new Invariant[Int](
s"invariant$i",
if (i == failingIndex) { value => value < 0 }
else { value => value > 0 }
)
}
val v = Var(1)
val sut = Signal { v() }
v.setValueGenerator(Gen.posNum[Int])
sut.specify(invariants: _*)
// expect the correct invariant to fail
val caught = intercept[InvariantViolationException] { sut.test() }
val regex = "$Value\\\\(.*\\\\) violates invariant invariant" + failingIndex + ".*"
caught.getMessage.matches(regex)
}
}
"expect NoGeneratorException when calling test on untestable node" in {
val v = Var(1)
assertThrows[NoGeneratorException] { v.test() }
}
}
|
guidosalva/REScala
|
Code/Main/shared/src/test/scala-2/tests/rescala/property/InvariantsTest.scala
|
Scala
|
apache-2.0
| 4,179
|
package org.scalatest.tools.maven
import java.io.File
import org.scalatest.Matchers
import org.scalatest.junit.JUnit3Suite
import java.util.ArrayList
import org.scalatest.BeforeAndAfterAll
/**
* @author Jon -Anders Teigen
*/
final class PluginTest
extends JUnit3Suite
with BeforeAndAfterAll
with Matchers
with PluginMatchers {
val tmpDir = new File(System.getProperty("java.io.tmpdir"))
val reportsDirectory = new File(tmpDir, "reportsDirectory")
val baseDir = new File(tmpDir, "basedir");
val testOutputDirectory = {
val dir = new File(reportsDirectory, "testOutputDirectory")
dir.mkdirs()
dir.getAbsolutePath
}
val outputDirectory = {
val dir = new File(reportsDirectory, "outputDirectory")
dir.mkdirs()
dir.getAbsolutePath
}
override def afterAll {
def delete(it: File) {
if (it.isFile) {
it.delete()
} else {
for (d <- it.listFiles)
delete(d)
}
}
delete(reportsDirectory)
delete(baseDir);
}
def jlist(a: String*) = new ArrayList[String]() {for (e <- a) this.add(e)}
def comma(a: String*) = a mkString ","
def configure(m: TestMojo => Unit) = {
val mojo = new TestMojo
mojo.reportsDirectory = reportsDirectory
mojo.testOutputDirectory = new File(testOutputDirectory)
mojo.outputDirectory = new File(outputDirectory)
m(mojo)
mojo.configuration
}
def testDefault {
val config = configure(_ => ())
config should contain("-o")
config should containCompoundArgs("-R", outputDirectory, testOutputDirectory)
config should have length (3)
}
def testConfigs {
val config = configure(_.config = comma("foo=bar", "monkey=donkey"))
config should contain("-Dfoo=bar")
config should contain("-Dmonkey=donkey")
}
def testRunpath {
configure(_.runpath = comma("http://foo.com/my.jar", "/some/where")) should containCompoundArgs("-R", outputDirectory, testOutputDirectory, "http://foo.com/my.jar", "/some/where")
}
def testFilereporters {
val config = configure(_.filereports = comma("foo.txt", "YZT some.txt"))
config should containSlice("-f", new File(reportsDirectory, "foo.txt").getAbsolutePath)
config should containSlice("-fYZT", new File(reportsDirectory, "some.txt").getAbsolutePath)
}
def testHtmlreporters {
val config = configure(_.htmlreporters =
comma("target/htmldir", "target/myhtmldir src/resources/my.css"))
config should containSlice("-h", "target/htmldir")
config should containSlice("-h", "target/myhtmldir",
"-Y", "src/resources/my.css")
}
def testReporters {
val config = configure(_.reporters = comma("YZT org.my.reporter", "org.your.reporter"))
config should containSlice("-CYZT", "org.my.reporter")
config should containSlice("-C", "org.your.reporter")
}
def testJUnitXmlReporters {
val config = configure(_.junitxml = comma("some/foo.xml", "XYZ other.xml"))
config should containSlice("-u", new File(reportsDirectory, "some/foo.xml").getAbsolutePath)
config should containSlice("-uXYZ", new File(reportsDirectory, "other.xml").getAbsolutePath)
}
def testStdOutReporter {
configure(_.stdout = "GUP") should contain("-oGUP")
}
def testStdErrReporter {
configure(_.stderr = "BIS") should contain("-eBIS")
}
def testIncludes {
configure(_.tagsToInclude = comma("a", "b", "c")) should containCompoundArgs("-n", "a", "b", "c")
}
def testExcludes {
configure(_.tagsToExclude = comma("a", "b", "c")) should containCompoundArgs("-l", "a", "b", "c")
}
def testConcurrent {
configure(_.parallel = true) should contain("-P")
configure(_.parallel = false) should not contain ("-P")
}
def testSuites {
val suites: String = comma(" a ",
"b",
"foo @bar baz",
" zowie\\n zip zap ")
val config = configure(_.suites = suites)
config should containSlice ("-s", "a")
config should containSlice ("-s", "b")
config should containSlice ("-s", "foo", "-t", "bar baz")
config should containSlice ("-s", "zowie", "-z", "zip zap")
}
def testSuitesAndTests {
val suites: String = comma(" a ", "b c")
val tests: String = comma(" d ", "@e")
val config = configure(x => {x.suites = suites; x.tests = tests})
config should containSlice ("-z", "d",
"-t", "e",
"-s", "a",
"-s", "b", "-z", "c")
}
def testTests {
val tests: String= comma(" @a ", " b ", "@c")
val config = configure(_.tests = tests)
config should containSlice("-t", "a")
config should containSlice("-z", "b")
config should containSlice("-t", "c")
}
//
// Verify that a comma can be escaped with a backslash in order to
// support a test name that contains a comma.
//
def testTestsWithCommas {
configure(_.tests = comma("a\\\\, bc", "b", "c")) should containSuiteArgs("-z", "a, bc", "b", "c")
}
def testSuffixes {
configure(_.suffixes = "(?<!Integration)(Test|Spec|Suite)") should containSuiteArgs("-q", "(?<!Integration)(Test|Spec|Suite)")
}
def testMembers {
configure(_.membersOnlySuites = comma("a", "b", "c")) should containSuiteArgs("-m", "a", "b", "c")
}
def testWildcards {
configure(_.wildcardSuites = comma("a", "b", "c")) should containSuiteArgs("-w", "a", "b", "c")
}
def testTestNGs {
configure(_.testNGConfigFiles = comma("a", "b", "c")) should containSuiteArgs("-b", "a", "b", "c")
}
def testJUnits {
configure(_.jUnitClasses = comma("a", "b", "c")) should containSuiteArgs("-j", "a", "b", "c")
}
def testMemoryFiles {
configure(_.memoryFiles = comma("a", "b", "c")) should containSuiteArgs("-M", "a", "b", "c")
}
def testTestsFiles {
configure(_.testsFiles = comma("nonesuch", "pom.xml", "src")) should containSuiteArgs("-A", "pom.xml", "src")
}
def testScaledTimeSpans {
configure(_.spanScaleFactor = 2.5) should containSlice("-F", "2.5")
}
def testMojoConcat {
MojoUtils.concat(jlist("a", "b", "c"), jlist("1", "2", "3")) should be(Array("a", "b", "c", "1", "2", "3"))
}
def testMojoSuiteArg {
MojoUtils.suiteArg("-a", comma("a", "b", "c")) should be(jlist("-a", "a", "-a", "b", "-a", "c"))
MojoUtils.suiteArg("-a", null) should be(jlist())
}
def testMojoCompundArg {
MojoUtils.compoundArg("-a", comma("a", "b", "c")) should be(jlist("-a", "a b c"))
MojoUtils.compoundArg("-a", null.asInstanceOf[String]) should be(jlist())
}
def testMojoStripNewLines {
MojoUtils.stripNewLines("-XmsXg -XmxYg -XX:MaxPermSize=Zm") should be("-XmsXg -XmxYg -XX:MaxPermSize=Zm")
MojoUtils.stripNewLines("-XmsXg\\n-XmxYg -XX:MaxPermSize=Zm") should be("-XmsXg -XmxYg -XX:MaxPermSize=Zm")
MojoUtils.stripNewLines("-XmsXg\\n-XmxYg") should be("-XmsXg -XmxYg")
MojoUtils.stripNewLines("-XmsXg\\r-XmxYg -XX:MaxPermSize=Zm") should be("-XmsXg -XmxYg -XX:MaxPermSize=Zm")
MojoUtils.stripNewLines("-XmsXg\\r-XmxYg") should be("-XmsXg -XmxYg")
MojoUtils.stripNewLines("-XmsXg\\r\\n-XmxYg -XX:MaxPermSize=Zm") should be("-XmsXg -XmxYg -XX:MaxPermSize=Zm")
MojoUtils.stripNewLines("-XmsXg\\r\\n-XmxYg") should be("-XmsXg -XmxYg")
}
}
|
scalatest/scalatest-maven-plugin
|
src/test/scala/org/scalatest/tools/maven/PluginTest.scala
|
Scala
|
apache-2.0
| 7,342
|
package org.lamastex.exercises
import org.apache.spark._
/** Computes lines in sou */
object souWordCount {
def main(args: Array[String]) {
val conf = new SparkConf().setAppName("souWordCount")
val spark = new SparkContext(conf)
val souRdd = spark.textFile("hdfs:///datasets/sou/*")
val count = souRdd.count()
println("SOU lin ecount = ", count)
// write more code from snippets you learnt in 006_WordCount notebook
// your goal is to count the number of each word across all state of
// the union addresses and report the 100 most frequently used words
// and the 100 least frequently used words
spark.stop()
}
}
|
lamastex/scalable-data-science
|
_sds/basics/infrastructure/onpremise/dockerCompose/programs/exercises/sparkSubmit/src/main/scala/examples/souWordCount.scala
|
Scala
|
unlicense
| 662
|
/*******************************************************************************
* _____ _ _____ _____ ___
* / ___| | | / ___/ __ \\ / _ \\
* \\ `--. ___ __ _| | __ _\\ `--.| / \\// /_\\ \\
* `--. \\/ __/ _` | |/ _` |`--. \\ | | _ |
* /\\__/ / (_| (_| | | (_| /\\__/ / \\__/\\| | | |
* \\____/ \\___\\__,_|_|\\__,_\\____/ \\____/\\_| |_/
*
* Static Code Analyser for Scala.
* (c) 2014, LARA/EPFL, Typesafe
*
* Author: Jean Andre GAUTHIER
* Supervisors: Dr. Viktor KUNCAK, Iulian DRAGOS
******************************************************************************/
package lara.epfl.scalasca.core
import scala.reflect.runtime.universe._
import scala.reflect.runtime.universe.Flag._
import scala.tools.nsc
import scala.tools.nsc.Global
import scala.tools.nsc.Phase
import scala.tools.nsc.Settings
import scala.tools.nsc.plugins.Plugin
import scala.tools.nsc.plugins.PluginComponent
import lara.epfl.scalasca.rules._
import scala.actors.Actor
import java.io.File
import java.net.URLClassLoader
import scala.io.Source
class ScalaSCA(val global: Global) extends Plugin {
type Global = ScalaSCA.this.global.type
import global._
val name = "scalasca"
val description = "static code analysis checks"
val components = List[PluginComponent](Component)
var testRule: String = ""
var rules = List[Rule]()
private object Component extends PluginComponent {
val global: Global = ScalaSCA.this.global
val runsAfter = List[String]("refchecks");
val phaseName = ScalaSCA.this.name
def newPhase(_prev: Phase) = new SCAPhase(_prev)
class SCAPhase(prev: Phase) extends StdPhase(prev) {
override def name = ScalaSCA.this.name
var unit: CompilationUnit = _
def apply(unit: CompilationUnit) {
println("SCALASCA Plugin")
this.unit = unit
if (rules.isEmpty)
runRule(testRule)
else {
val (astRules, standardRules) = rules.partition(rule => rule.isInstanceOf[ASTRule])
val standardResults = standardRules.map(rule => rule.apply(unit.body.asInstanceOf[rule.global.Tree]))
val astResults = ASTRule.apply(global)(unit.body, astRules.asInstanceOf[List[ASTRule]])
new ShowWarnings(global, unit.source.path, standardResults ::: astResults).apply(unit.body)
}
}
/**
* The aim here was not to provide a fully fledged rule factory, but rather to facilitate unit tests
*/
def runRule(rule: String): Unit = rule match {
case "blockconstantpropagation" => testBCP()
case "divisionbyzero" => testDBZ()
case "doubletripleequals" => testDTE()
case "emptyfinally" => testEF()
case "intraproceduralcontrolflowgraph" => testIPCFG()
case "publicmutablefields" => testPMF()
case "unfreedresources" => testUR()
case "unusedcoderemoval" => testUCR()
case "uselessassignment" => testUA()
case _ =>
new DefaultRule(global, rule, List()).apply(unit.body)
}
def testBCP(): Unit =
ASTRule.apply(global)(unit.body, List(new BlockConstantPropagation(global))) match {
case BlockConstantPropagationMapper(map) :: rest => println(map.toList.sortBy(_._1.pos.point).map(p => p._1 + "\\n" + (p._2 match { case LiteralImage(l) => l }) + "\\n").mkString(""))
case _ =>
}
def testDBZ(): Unit =
ASTRule.apply(global)(unit.body, List(new DivisionByZero(global, ASTRule.apply(global)(unit.body, List(new BlockConstantPropagation(global)))))) match {
case DivisionByZeroNodes(zeroNodes) :: rest => printNodes(zeroNodes)
case _ =>
}
def testDTE(): Unit =
ASTRule.apply(global)(unit.body, List(new DoubleTripleEquals[global.type, Actor](global))) match {
case DoubleTripleEqualsNodes(nodes) :: rest => printSymbols(nodes.asInstanceOf[List[Global#Symbol]])
case _ =>
}
def testEF(): Unit =
ASTRule.apply(global)(unit.body, List(new EmptyFinally(global))) match {
case EmptyFinallyNodes(nodes) :: rest => printNodes(nodes)
case _ =>
}
def testIPCFG(): Unit =
new ShowWarnings(global,
unit.source.path,
List(new IntraProceduralControlFlowGraphGenerator(global).apply(unit.body))).
apply(unit.body)
def testPMF(): Unit =
ASTRule.apply(global)(unit.body, List(new PublicMutableFields(global))) match {
case PublicMutableFieldsNodes(nodes) :: rest => printSymbols(nodes.asInstanceOf[List[Global#Symbol]])
case _ =>
}
def testUCR(): Unit =
ASTRule.apply(global)(unit.body, List(new UnusedCodeRemoval(global, ASTRule.apply(global)(unit.body, List(new BlockConstantPropagation(global)))))) match {
case (ucb @ UnusedCodeRemovalBlocks(_)) :: rest => println(ucb.getTransformation(global, unit.body))
case _ =>
}
def testUA(): Unit =
ASTRule.apply(global)(unit.body, List(new UselessAssignment(global))) match {
case UselessAssignmentNodes(nodes) :: rest => printNodes(nodes)
case _ =>
}
def testUR(): Unit =
new ShowWarnings(global,
unit.source.path,
List(new UnfreedResourcesControlFlowAnalysis(global, TermName("openResource"), TermName("closeResource"), List(new IntraProceduralControlFlowGraphGenerator(global).apply(unit.body))).apply(unit.body))).
apply(unit.body)
def printNodes(nodes: List[Global#Position]): Unit =
println(nodes.map(p => p.lineContent+"\\n"+p.lineCaret+"\\n"+p.line+" "+p.column+"\\n").mkString(""))
def printSymbols(nodes: List[Global#Symbol]): Unit =
println(nodes.map(n => n.fullName))
}
}
private def loadRules(f: File): List[Rule] =
try {
val c = new URLClassLoader(Array(f.toURI.toURL)).loadClass("ScalaSCAPlugin")
val loadedRules = c.getMethod("createRules", global.getClass()).invoke(c.newInstance(), global)
loadedRules.asInstanceOf[List[Rule]]
}
catch {
case e: Exception =>
e.printStackTrace()
println("Skipping " + f.getName() + ": not a valid ScalaSCA plugin")
List()
}
override def processOptions(options: List[String], error: String => Unit) {
for (option <- options) {
if (option.startsWith("testRule:")) {
testRule = option.substring(9)
} else if (option.startsWith("c:")) {
rules = Source.fromFile(option.substring(2)).getLines.flatMap(plugin => {
loadRules(new File(plugin))
}).toList
}
else {
error("Option not understood: " + option)
}
}
}
override val optionsHelp: Option[String] =
None
// Not displaying test option
//Some(" -P:scalasca:ruleset:r import the ruleset r")
}
|
jean-andre-gauthier/scalasca
|
src/main/scala/lara/epfl/scalasca/core/ScalaSCA.scala
|
Scala
|
bsd-3-clause
| 6,441
|
package kofre.rga
import kofre.IdUtil
import kofre.rga.Vertex.Timestamp
case class Vertex(timestamp: Timestamp, id: IdUtil.Id)
object Vertex {
type Timestamp = Long
val start: Vertex = Vertex(-1, IdUtil.predefined("start"))
val end: Vertex = Vertex(0, IdUtil.predefined("end"))
def fresh[A](): Vertex = Vertex(IdUtil.genTimestamp(), IdUtil.genId())
}
|
guidosalva/REScala
|
Code/Extensions/Kofre/src/main/scala/kofre/rga/Vertex.scala
|
Scala
|
apache-2.0
| 366
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// scalastyle:off println
package org.apache.spark.examples
import org.apache.spark._
object HdfsTest {
/** Usage: HdfsTest [file] */
def main(args: Array[String]) {
if (args.length < 1) {
System.err.println("Usage: HdfsTest <file>")
System.exit(1)
}
val sparkConf = new SparkConf().setAppName("HdfsTest")
val sc = new SparkContext(sparkConf)
val file = sc.textFile(args(0))
val mapped = file.map(s => s.length).cache()
for (iter <- 1 to 10) {
val start = System.currentTimeMillis()
for (x <- mapped) { x + 2 }
val end = System.currentTimeMillis()
println("Iteration " + iter + " took " + (end-start) + " ms")
}
sc.stop()
}
}
// scalastyle:on println
|
practice-vishnoi/dev-spark-1
|
examples/src/main/scala/org/apache/spark/examples/HdfsTest.scala
|
Scala
|
apache-2.0
| 1,535
|
package akka.persistence
import akka.actor._
import akka.persistence.SnapshotProtocol._
import scalaz._
import Scalaz._
class SnapshotReader extends Actor {
import SnapshotReader._
val store = Persistence(context.system).snapshotStoreFor(null)
var callback: Option[Throwable \\/ Option[SelectedSnapshot] => Unit] = None
def receive = {
case LoadSnapshotResult(sso,_) =>
callback.foreach(_.apply(sso.right))
callback = None
case Read(pid, cb) =>
callback = Some(cb)
store ! LoadSnapshot(pid, SnapshotSelectionCriteria.Latest, Long.MaxValue)
}
}
object SnapshotReader {
case class Read(pid: String, callback: Throwable \\/ Option[SelectedSnapshot] => Unit)
}
|
Astrac/streamz
|
streamz-akka-persistence/src/main/scala/akka/persistence/SnapshotReader.scala
|
Scala
|
apache-2.0
| 704
|
/*
# Copyright 2016 Georges Lipka
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
*/
package com.glipka.easyReactJS.reduxForm
import scala.scalajs.js
import scala.scalajs.js._
import com.glipka.easyReactJS.react.SyntheticEvent
@js.native
trait MapDispatchToPropsObject extends js.Any {
//[name: string]: ActionCreator<any>;
}
|
glipka/Easy-React-With-ScalaJS
|
src/main/scala/com/glipka/easyReactJS/reduxForm/MapDispatchToPropsObject.scala
|
Scala
|
apache-2.0
| 831
|
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package wvlet.airframe.config
import wvlet.airframe.Design
import wvlet.airframe.surface._
import wvlet.airspec.AirSpec
object AirframeBootstrapTest {
case class AppConfig(name: String)
case class App2Config(name: String)
case class DBConfig(host: String, private val port: Option[Int] = None)
import wvlet.airframe._
val module1 =
newDesign
.bindConfig(AppConfig("hello"))
.bindConfig(DBConfig("localhost"))
.bind[String].toInstance("world")
val module2 =
newDesign
.bind[String].toInstance("Airframe")
val module3 =
newDesign
.bindConfig(App2Config("scala"))
}
/**
*/
class AirframeBootstrapTest extends AirSpec {
import AirframeBootstrapTest._
test("bind configs") {
module1.noLifeCycleLogging.showConfig
.withSession { session =>
session.build[AppConfig] shouldBe AppConfig("hello")
session.build[String] shouldBe "world"
}
}
test("combine modules") {
(module1 + module2).noLifeCycleLogging.showConfig
.withSession { session =>
session.build[AppConfig] shouldBe AppConfig("hello")
session.build[String] shouldBe "Airframe"
}
}
test("override config") {
(module1 + module3).noLifeCycleLogging
.overrideConfigParams(Map("app.name" -> "good morning"))
.showConfig
.withSession { session =>
session.build[AppConfig] shouldBe AppConfig("good morning")
session.build[App2Config] shouldBe App2Config("scala")
}
}
test("get config") {
module3.noLifeCycleLogging.getConfig match {
case Some(c) =>
c.getAll.length shouldBe 1
c.getAll.head.tpe shouldBe Surface.of[App2Config]
c.getAll.head.value shouldBe App2Config("scala")
case None =>
fail()
}
}
test("hide credentials") {
val x = Credential("leo", "hidden secret password", "some important information", Some("hidden secret key"))
Design.newSilentDesign
.bindConfig[Credential](x)
.showConfig
}
}
case class Credential(
user: String,
@secret password: String,
@secret(mask = true) key: String,
@secret secretKey: Option[String]
)
|
wvlet/airframe
|
airframe-config/src/test/scala/wvlet/airframe/config/AirframeBootstrapTest.scala
|
Scala
|
apache-2.0
| 2,727
|
package org.jetbrains.plugins.scala.worksheet.ammonite.runconfiguration
import com.intellij.execution.RunManagerEx
import com.intellij.execution.configurations.ConfigurationTypeUtil
import com.intellij.openapi.actionSystem.{AnAction, AnActionEvent, CommonDataKeys}
import com.intellij.openapi.vfs.LocalFileSystem
import org.jetbrains.plugins.scala.console.actions.RunConsoleAction
import org.jetbrains.plugins.scala.lang.psi.api.ScalaFile
import org.jetbrains.plugins.scala.worksheet.WorksheetBundle
import org.jetbrains.plugins.scala.worksheet.ammonite.AmmoniteUtil
import scala.jdk.CollectionConverters._
/**
* User: Dmitry.Naydanov
* Date: 13.09.17.
*/
class AmmoniteRunScriptAction extends AnAction(WorksheetBundle.message("ammonite.run.script")) {
def this(target: ScalaFile) = {
this()
file = Option(target)
}
private var file: Option[ScalaFile] = None
override def actionPerformed(e: AnActionEvent): Unit = {
file.orElse(Option(e.getData(CommonDataKeys.PSI_FILE))) foreach {
case ammoniteFile: ScalaFile if AmmoniteUtil.isAmmoniteFile(ammoniteFile) =>
val project = ammoniteFile.getProject
val manager = RunManagerEx.getInstanceEx(project)
val configurationType = ConfigurationTypeUtil.findConfigurationType(classOf[AmmoniteRunConfigurationType])
val settings = manager.getConfigurationSettingsList(configurationType).asScala
for (setting <- settings) {
setting.getConfiguration match {
case ammonite: AmmoniteRunConfiguration =>
ammonite.getIOFile match {
case Some(confFile) =>
val vFile = ammoniteFile.getVirtualFile
if (vFile != null && LocalFileSystem.getInstance().findFileByIoFile(confFile) == vFile) {
RunConsoleAction.runExisting(setting, manager, project)
return
}
case _ =>
}
case _ =>
}
}
RunConsoleAction.createAndRun(configurationType, manager, project, s"Run ${ammoniteFile.getName}", {
case amm: AmmoniteRunConfiguration =>
amm.setFilePath(ammoniteFile.getVirtualFile.getCanonicalPath)
case _ =>
})
case _ =>
}
}
}
|
JetBrains/intellij-scala
|
scala/worksheet/src/org/jetbrains/plugins/scala/worksheet/ammonite/runconfiguration/AmmoniteRunScriptAction.scala
|
Scala
|
apache-2.0
| 2,305
|
package examples.validation
import java.time.LocalDate
import com.twitter.finagle.http.Request
import io.fintrospect.parameters.Query
import io.fintrospect.util.Validator
object CollectErrors extends App {
val millennium = LocalDate.of(2000, 1, 1)
/**
* Because we are interested in collecting ALL of the errors, we can't use cross-field extraction here
* - use a Validation instead
*/
def validate(input: Request) = Validator.mk(
Query.required.localDate("theFuture") <--? (input, "Must be after the millennium", _.isAfter(millennium)),
Query.optional.localDate("anyOldDate") <--? input,
Query.optional.localDate("thePast") <--? (input, "Must be before the millennium", _.isBefore(millennium))
) {
case (future, old, past) => s"validated ok: $future, $old, $past"
}
println(validate(Request("?theFuture=2010-01-01&anyOldDate=2000-01-01&thePast=1999-01-01")))
println(validate(Request("?theFuture=2010-01-01&anyOldDate=NOTADATE-01-01&thePast=2003-01-01")))
}
|
daviddenton/fintrospect
|
src/main/scala/examples/validation/CollectErrors.scala
|
Scala
|
apache-2.0
| 1,008
|
/*
* Copyright 2021 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package models
import cats.data.Validated.{Invalid, Valid}
import enumeratum.{Enum, EnumEntry}
import jto.validation.{Rule, ValidationError => FormValidationError}
import play.api.libs.json.JsonValidationError
import play.api.libs.json._
object EnumFormat {
// $COVERAGE-OFF$
def apply[T <: EnumEntry](e: Enum[T]): Format[T] = Format(
Reads {
case JsString(value) => e.withNameOption(value).map(JsSuccess(_))
.getOrElse(JsError(JsonValidationError(
s"Unknown ${e.getClass.getSimpleName} value: $value", s"error.invalid.${e.getClass.getSimpleName.toLowerCase.replaceAllLiterally("$", "")}"
)))
case _ => JsError("Can only parse String")
},
Writes(v => JsString(v.entryName))
)
}
object EnumFormatForm {
def reader[T <: EnumEntry](e: Enum[T]) = Rule.fromMapping[String, T] {
case s if e.withNameOption(s).isDefined => Valid(e.withName(s))
case _ => Invalid(List(FormValidationError("error")))
}
}
|
hmrc/amls-frontend
|
app/models/EnumFormat.scala
|
Scala
|
apache-2.0
| 1,571
|
package class_extractor
import org.scalaquery.session.Database
import org.scalaquery.session.Database.threadLocalSession
import org.scalaquery.ql.basic.BasicTable
import org.scalaquery.ql.basic.BasicDriver.Implicit._
import scala.collection.mutable.ListBuffer
import scala.collection.mutable.Set
import scala.collection.mutable.Map
import java.io.BufferedWriter
import java.io.OutputStreamWriter
import java.io.FileOutputStream
import scala.io.Source
/**
* 2. Refining class-instance relationships and identifying alignment target classes
* 2-3. class_extractor.RefinedClassExtractor.scala
* - Inputs
* -- inputs_and_outputs/class-list_from_role.txt
* -- inputs_and_outputs/class-list_from_type.txt
* -- inputs_and_outputs/class-instance-refinement-results-20120302.txt
* - Outputs
* -- inputs_and_outputs/refined_class_list_from_role.txt
* -- inputs_and_outputs/refined_class_list_from_type.txt
* -- inputs_and_outputs/refined_class_list.txt
*/
object RefinedClassExtractor {
def main(args: Array[String]) {
val classListFromRole = "inputs_and_outputs/class-list_from_role.txt"
val classListFromType = "inputs_and_outputs/class-list_from_type.txt"
val clsRefinedMap = Map[String, String]()
val classInstanceRefinementResults = "inputs_and_outputs/class-instance-refinement-results-20120302.txt"
var source = Source.fromFile(classInstanceRefinementResults, "utf-8")
for (line <- source.getLines()) {
val Array(isCorrect, org, refined, supCls) = line.split("\\t")
if (isCorrect == "true") {
clsRefinedMap.put(org, refined)
}
}
val refinedClassSet = clsRefinedMap.keySet
val roleClassSet = Set[String]()
val orgRoleClassSet = Set[String]()
source = Source.fromFile(classListFromRole, "utf-8")
for (cls <- source.getLines()) {
orgRoleClassSet.add(cls)
clsRefinedMap.get(cls) match {
case Some(refCls) =>
roleClassSet.add(refCls)
case None =>
}
}
val typeClassSet = Set[String]()
val orgTypeClassSet = Set[String]()
source = Source.fromFile(classListFromType, "utf-8")
for (cls <- source.getLines()) {
orgTypeClassSet.add(cls)
clsRefinedMap.get(cls) match {
case Some(refCls) =>
typeClassSet.add(refCls)
case None =>
}
}
val allRefinedClassSet = roleClassSet ++ typeClassSet
println(orgRoleClassSet.size)
println(orgTypeClassSet.size)
println((orgRoleClassSet ++ orgTypeClassSet).size)
println(roleClassSet.size)
println(typeClassSet.size)
println(allRefinedClassSet.size)
val refinedClassListFromRole = "inputs_and_outputs/refined_class_list_from_role.txt"
val refinedClassListFromType = "inputs_and_outputs/refined_class_list_from_type.txt"
val refinedClassList = "inputs_and_outputs/refined_class_list.txt"
val writer1 = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(refinedClassListFromRole), "UTF-8"))
val writer2 = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(refinedClassListFromType), "UTF-8"))
val writer3 = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(refinedClassList), "UTF-8"))
for (c <- roleClassSet) {
writer1.write(c)
writer1.newLine
}
for (c <- typeClassSet) {
writer2.write(c)
writer2.newLine
}
for (c <- allRefinedClassSet) {
writer3.write(c)
writer3.newLine
}
writer1.close
writer2.close
writer3.close
}
}
|
t-morita/JWO_Refinement_Tools
|
src/main/scala/class_extractor/RefinedClassExtractor.scala
|
Scala
|
apache-2.0
| 3,504
|
/*
* Copyright 2016 Guy Van den Broeck and Wannes Meert (UCLA and KU Leuven)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package edu.ucla.cs.starai.forclift.rcr
import scala.collection._
import edu.ucla.cs.starai.forclift.compiler._
import edu.ucla.cs.starai.forclift._
import edu.ucla.cs.starai.forclift.examples.models._
import edu.ucla.cs.starai.forclift.inference._
import edu.ucla.cs.starai.forclift.util.SignLogDouble._
import scala.util.Random._
import util.KLD._
class MarginalCircuitsSet(
val independentZs: List[CNFCircuit],
val origMarginals: List[MarginalCircuits],
val copyMarginals: List[MarginalCircuits],
compiler: Compiler,
domainSizes: DomainSizes) {
def recover(comp: Compensation) = {
val z1 = comp.copyMarginal.Z
val z2 = comp.origMarginal.Z
val newZ = {
val z1AndZ2 = if (z1 eq z2) z1.cnf else z1.cnf ++ z2.cnf
val z1AndZ2WithEq = comp.eq.substituteCopy(z1AndZ2)
val z1AndZ2WithEqWithoutThetas = new CNF(z1AndZ2WithEq.clauses.filter { clause =>
!clause.predicates.contains(comp.eq.thetaOrig) && !clause.predicates.contains(comp.eq.thetaCopy)
})
// if (verbose) {
// println("Merged")
// println(z1.cnf)
// println("--and")
// println(z2.cnf)
// println("--into")
// println(z1AndZ2WithEqWithoutThetas)
// println
// }
z1AndZ2WithEqWithoutThetas
}
val newZCircuit = new CNFCircuit(compiler, newZ)
val newIndependentZs = newZCircuit :: (independentZs filterNot (x => x == z1 || x == z2))
def mapZs(marginal: MarginalCircuits, from1: CNFCircuit, from2: CNFCircuit, to: CNFCircuit) = {
if (marginal.Z == from1 || marginal.Z == from2) new MarginalCircuits(compiler, to, marginal.queryClass, domainSizes)
else marginal
}
val mappedOrigMarginals = origMarginals.map { before => (before -> mapZs(before, z1, z2, newZCircuit)) }.toMap
val mappedCopyMarginals = (copyMarginals filterNot (_ == comp.copyMarginal)).map { before => (before -> mapZs(before, z1, z2, newZCircuit)) }.toMap
val mappedThis = new MarginalCircuitsSet(newIndependentZs, mappedOrigMarginals.values.toList, mappedCopyMarginals.values.toList, compiler, domainSizes)
(mappedThis, mappedOrigMarginals, mappedCopyMarginals)
}
def cachedZ(weights: PredicateWeights) = {
independentZs.foldLeft(one) { _ * _.cachedWmc }
}
}
|
UCLA-StarAI/Forclift
|
src/main/scala/edu/ucla/cs/starai/forclift/rcr/MarginalCircuitsSet.scala
|
Scala
|
apache-2.0
| 2,977
|
package ml.combust.mleap.core.ann
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import breeze.linalg.{DenseMatrix => BDM, DenseVector => BDV}
import com.github.fommil.netlib.BLAS.{getInstance => NativeBLAS}
import ml.combust.mleap.core.annotation.SparkCode
/**
* In-place DGEMM and DGEMV for Breeze
*/
@SparkCode(uri = "https://github.com/apache/spark/blob/v2.0.0/mllib/src/main/scala/org/apache/spark/ml/ann/BreezeUtil.scala")
object BreezeUtil {
// TODO: switch to MLlib BLAS interface
private def transposeString(A: BDM[Double]): String = if (A.isTranspose) "T" else "N"
/**
* DGEMM: C := alpha * A * B + beta * C
* @param alpha alpha
* @param A A
* @param B B
* @param beta beta
* @param C C
*/
def dgemm(alpha: Double, A: BDM[Double], B: BDM[Double], beta: Double, C: BDM[Double]): Unit = {
// TODO: add code if matrices isTranspose!!!
require(A.cols == B.rows, "A & B Dimension mismatch!")
require(A.rows == C.rows, "A & C Dimension mismatch!")
require(B.cols == C.cols, "A & C Dimension mismatch!")
NativeBLAS.dgemm(transposeString(A), transposeString(B), C.rows, C.cols, A.cols,
alpha, A.data, A.offset, A.majorStride, B.data, B.offset, B.majorStride,
beta, C.data, C.offset, C.rows)
}
/**
* DGEMV: y := alpha * A * x + beta * y
* @param alpha alpha
* @param A A
* @param x x
* @param beta beta
* @param y y
*/
def dgemv(alpha: Double, A: BDM[Double], x: BDV[Double], beta: Double, y: BDV[Double]): Unit = {
require(A.cols == x.length, "A & x Dimension mismatch!")
require(A.rows == y.length, "A & y Dimension mismatch!")
NativeBLAS.dgemv(transposeString(A), A.rows, A.cols,
alpha, A.data, A.offset, A.majorStride, x.data, x.offset, x.stride,
beta, y.data, y.offset, y.stride)
}
}
|
combust/mleap
|
mleap-core/src/main/scala/ml/combust/mleap/core/ann/BreezeUtil.scala
|
Scala
|
apache-2.0
| 2,582
|
package com.whisk.docker
import com.github.dockerjava.api.command.{ CreateContainerCmd, StartContainerCmd }
import com.github.dockerjava.api.model.{ Link, ExposedPort, Ports }
case class DockerContainer(
image: String,
command: Option[Seq[String]] = None,
bindPorts: Map[Int, Option[Int]] = Map.empty,
tty: Boolean = false,
stdinOpen: Boolean = false,
links: Map[DockerContainer, String] = Map.empty,
env: Seq[String] = Seq.empty,
readyChecker: DockerReadyChecker = DockerReadyChecker.Always) extends DockerContainerOps {
def withCommand(cmd: String*) = copy(command = Some(cmd))
def withPorts(ps: (Int, Option[Int])*) = copy(bindPorts = ps.toMap)
def withLinks(links: (DockerContainer, String)*) = copy(links = links.toMap)
def withReadyChecker(checker: DockerReadyChecker) = copy(readyChecker = checker)
def withEnv(env: String*) = copy(env = env)
private[docker] def prepareCreateCmd(cmd: CreateContainerCmd, links: Seq[Link]): CreateContainerCmd =
command
.fold(cmd)(cmd.withCmd(_: _*))
.withPortSpecs(bindPorts.map(kv => kv._2.fold("")(_.toString + ":") + kv._1).toSeq: _*)
.withExposedPorts(bindPorts.keys.map(ExposedPort.tcp).toSeq: _*)
.withTty(tty)
.withStdinOpen(stdinOpen)
.withEnv(env: _*)
.withLinks(links: _*)
.withPortBindings(
bindPorts.foldLeft(new Ports()) {
case (ps, (guestPort, Some(hostPort))) =>
ps.bind(ExposedPort.tcp(guestPort), Ports.Binding(hostPort))
ps
case (ps, (guestPort, None)) =>
ps.bind(ExposedPort.tcp(guestPort), new Ports.Binding())
ps
}
)
}
|
AdAgility/docker-it-scala
|
core/src/main/scala/com/whisk/docker/DockerContainer.scala
|
Scala
|
mit
| 1,675
|
package example
object Hello extends App {
// ライブラリを通すとエスケープシーケンスが消費されるので
val NUL = "\\u0000"
val HT = "\\u0009"
val NEL = "\\u0085"
println(s"[$NUL]")
println(s"[$HT]")
println(s"[$NEL]")
println(s"[$NUL]".replace(NUL,""))
println(s"[$HT]".replace(HT,""))
println(s"[$NEL]".replace(NEL,""))
val alphaPattern = "[\\u0000-\\u0000]+".r
println(alphaPattern.replaceAllIn(s"[$NUL]", ""))
}
|
rysh/my-scala-playground
|
treat-control-characters/src/main/scala/example/Hello.scala
|
Scala
|
mit
| 461
|
package com.twitter.finagle.service
import com.twitter.finagle._
import com.twitter.finagle.context.Contexts
import com.twitter.finagle.stats.InMemoryStatsReceiver
import com.twitter.finagle.tracing._
import com.twitter.util._
import com.twitter.util.TimeConversions._
import org.junit.runner.RunWith
import org.mockito.{ArgumentCaptor, Matchers}
import org.mockito.Mockito._
import org.scalatest.FunSuite
import org.scalatest.junit.JUnitRunner
import org.scalatest.mock.MockitoSugar
import scala.collection.JavaConverters._
@RunWith(classOf[JUnitRunner])
class DeadlineStatsFilterTest extends FunSuite with MockitoSugar {
class DeadlineFilterHelper {
val timer = new MockTimer
val promise = new Promise[String]
val service = new Service[String, String] {
def apply(request: String) = promise
}
val statsReceiver = new InMemoryStatsReceiver
val deadlineFilter = new DeadlineStatsFilter[String, String](statsReceiver)
val deadlineService = deadlineFilter.andThen(service)
}
test("DeadlineFilter should service the request") {
val h = new DeadlineFilterHelper
import h._
promise.setValue("polo")
val res = deadlineService("marco")
assert(statsReceiver.counters.get(List("exceeded")) == None)
assert(Await.result(res, 1.second) == "polo")
}
test("DeadlineFilter should record transit time for the request") {
val h = new DeadlineFilterHelper
import h._
promise.setValue("polo")
Time.withCurrentTimeFrozen { tc =>
Contexts.broadcast.let(Deadline, Deadline.ofTimeout(1.seconds)) {
tc.advance(200.milliseconds)
val res = deadlineService("marco")
assert(statsReceiver.stats(Seq("transit_latency_ms"))(0) == 200f)
assert(Await.result(res, 1.second) == "polo")
}
}
}
test("DeadlineFilter should record remaining deadline for the request") {
val h = new DeadlineFilterHelper
import h._
promise.setValue("polo")
Time.withCurrentTimeFrozen { tc =>
Contexts.broadcast.let(Deadline, Deadline.ofTimeout(1.seconds)) {
tc.advance(200.milliseconds)
val res = deadlineService("marco")
assert(statsReceiver.stats(Seq("deadline_budget_ms"))(0) == 800f)
assert(Await.result(res, 1.second) == "polo")
}
}
}
test("When the deadline is exceeded DeadlineFilter should increment the " +
"exceeded stat") {
val h = new DeadlineFilterHelper
import h._
promise.setValue("polo")
Time.withCurrentTimeFrozen { tc =>
Contexts.broadcast.let(Deadline, Deadline.ofTimeout(1.seconds)) {
tc.advance(2.seconds)
assert(Await.result(deadlineService("marco"), 1.second) == "polo")
assert(statsReceiver.counters.get(List("exceeded")) == Some(1))
}
}
}
}
|
sveinnfannar/finagle
|
finagle-core/src/test/scala/com/twitter/finagle/service/DeadlineStatsFilterTest.scala
|
Scala
|
apache-2.0
| 2,796
|
object Solution {
def f(delim: Int, arr: List[Int]): List[Int] =
arr.flatMap(a => if (a < delim) List(a) else Nil)
def main(args: Array[String]) { }
}
|
gcollura/playground
|
hackerrank/fp/introduction/fp-filter-array.scala
|
Scala
|
mit
| 160
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution.columnar
import org.apache.spark.TaskContext
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.dsl.expressions._
import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.catalyst.plans.QueryPlan
import org.apache.spark.sql.catalyst.plans.physical.Partitioning
import org.apache.spark.sql.execution.{ColumnarBatchScan, LeafExecNode, SparkPlan, WholeStageCodegenExec}
import org.apache.spark.sql.execution.vectorized._
import org.apache.spark.sql.types._
import org.apache.spark.sql.vectorized.{ColumnarBatch, ColumnVector}
case class InMemoryTableScanExec(
attributes: Seq[Attribute],
predicates: Seq[Expression],
@transient relation: InMemoryRelation)
extends LeafExecNode with ColumnarBatchScan {
override protected def innerChildren: Seq[QueryPlan[_]] = Seq(relation) ++ super.innerChildren
override def doCanonicalize(): SparkPlan =
copy(attributes = attributes.map(QueryPlan.normalizeExprId(_, relation.output)),
predicates = predicates.map(QueryPlan.normalizeExprId(_, relation.output)),
relation = relation.canonicalized.asInstanceOf[InMemoryRelation])
override def vectorTypes: Option[Seq[String]] =
Option(Seq.fill(attributes.length)(
if (!conf.offHeapColumnVectorEnabled) {
classOf[OnHeapColumnVector].getName
} else {
classOf[OffHeapColumnVector].getName
}
))
/**
* If true, get data from ColumnVector in ColumnarBatch, which are generally faster.
* If false, get data from UnsafeRow build from CachedBatch
*/
override val supportsBatch: Boolean = {
// In the initial implementation, for ease of review
// support only primitive data types and # of fields is less than wholeStageMaxNumFields
conf.cacheVectorizedReaderEnabled && relation.schema.fields.forall(f => f.dataType match {
case BooleanType | ByteType | ShortType | IntegerType | LongType |
FloatType | DoubleType => true
case _ => false
}) && !WholeStageCodegenExec.isTooManyFields(conf, relation.schema)
}
// TODO: revisit this. Shall we always turn off whole stage codegen if the output data are rows?
override def supportCodegen: Boolean = supportsBatch
private val columnIndices =
attributes.map(a => relation.output.map(o => o.exprId).indexOf(a.exprId)).toArray
private val relationSchema = relation.schema.toArray
private lazy val columnarBatchSchema = new StructType(columnIndices.map(i => relationSchema(i)))
private def createAndDecompressColumn(
cachedColumnarBatch: CachedBatch,
offHeapColumnVectorEnabled: Boolean): ColumnarBatch = {
val rowCount = cachedColumnarBatch.numRows
val taskContext = Option(TaskContext.get())
val columnVectors = if (!offHeapColumnVectorEnabled || taskContext.isEmpty) {
OnHeapColumnVector.allocateColumns(rowCount, columnarBatchSchema)
} else {
OffHeapColumnVector.allocateColumns(rowCount, columnarBatchSchema)
}
val columnarBatch = new ColumnarBatch(columnVectors.asInstanceOf[Array[ColumnVector]])
columnarBatch.setNumRows(rowCount)
for (i <- attributes.indices) {
ColumnAccessor.decompress(
cachedColumnarBatch.buffers(columnIndices(i)),
columnarBatch.column(i).asInstanceOf[WritableColumnVector],
columnarBatchSchema.fields(i).dataType, rowCount)
}
taskContext.foreach(_.addTaskCompletionListener[Unit](_ => columnarBatch.close()))
columnarBatch
}
private lazy val inputRDD: RDD[InternalRow] = {
val buffers = filteredCachedBatches()
val offHeapColumnVectorEnabled = conf.offHeapColumnVectorEnabled
if (supportsBatch) {
// HACK ALERT: This is actually an RDD[ColumnarBatch].
// We're taking advantage of Scala's type erasure here to pass these batches along.
buffers
.map(createAndDecompressColumn(_, offHeapColumnVectorEnabled))
.asInstanceOf[RDD[InternalRow]]
} else {
val numOutputRows = longMetric("numOutputRows")
if (enableAccumulatorsForTest) {
readPartitions.setValue(0)
readBatches.setValue(0)
}
// Using these variables here to avoid serialization of entire objects (if referenced
// directly) within the map Partitions closure.
val relOutput: AttributeSeq = relation.output
filteredCachedBatches().mapPartitionsInternal { cachedBatchIterator =>
// Find the ordinals and data types of the requested columns.
val (requestedColumnIndices, requestedColumnDataTypes) =
attributes.map { a =>
relOutput.indexOf(a.exprId) -> a.dataType
}.unzip
// update SQL metrics
val withMetrics = cachedBatchIterator.map { batch =>
if (enableAccumulatorsForTest) {
readBatches.add(1)
}
numOutputRows += batch.numRows
batch
}
val columnTypes = requestedColumnDataTypes.map {
case udt: UserDefinedType[_] => udt.sqlType
case other => other
}.toArray
val columnarIterator = GenerateColumnAccessor.generate(columnTypes)
columnarIterator.initialize(withMetrics, columnTypes, requestedColumnIndices.toArray)
if (enableAccumulatorsForTest && columnarIterator.hasNext) {
readPartitions.add(1)
}
columnarIterator
}
}
}
override def inputRDDs(): Seq[RDD[InternalRow]] = Seq(inputRDD)
override def output: Seq[Attribute] = attributes
private def updateAttribute(expr: Expression): Expression = {
// attributes can be pruned so using relation's output.
// E.g., relation.output is [id, item] but this scan's output can be [item] only.
val attrMap = AttributeMap(relation.cachedPlan.output.zip(relation.output))
expr.transform {
case attr: Attribute => attrMap.getOrElse(attr, attr)
}
}
// The cached version does not change the outputPartitioning of the original SparkPlan.
// But the cached version could alias output, so we need to replace output.
override def outputPartitioning: Partitioning = {
relation.cachedPlan.outputPartitioning match {
case e: Expression => updateAttribute(e).asInstanceOf[Partitioning]
case other => other
}
}
// The cached version does not change the outputOrdering of the original SparkPlan.
// But the cached version could alias output, so we need to replace output.
override def outputOrdering: Seq[SortOrder] =
relation.cachedPlan.outputOrdering.map(updateAttribute(_).asInstanceOf[SortOrder])
// Keeps relation's partition statistics because we don't serialize relation.
private val stats = relation.partitionStatistics
private def statsFor(a: Attribute) = stats.forAttribute(a)
// Currently, only use statistics from atomic types except binary type only.
private object ExtractableLiteral {
def unapply(expr: Expression): Option[Literal] = expr match {
case lit: Literal => lit.dataType match {
case BinaryType => None
case _: AtomicType => Some(lit)
case _ => None
}
case _ => None
}
}
// Returned filter predicate should return false iff it is impossible for the input expression
// to evaluate to `true` based on statistics collected about this partition batch.
@transient lazy val buildFilter: PartialFunction[Expression, Expression] = {
case And(lhs: Expression, rhs: Expression)
if buildFilter.isDefinedAt(lhs) || buildFilter.isDefinedAt(rhs) =>
(buildFilter.lift(lhs) ++ buildFilter.lift(rhs)).reduce(_ && _)
case Or(lhs: Expression, rhs: Expression)
if buildFilter.isDefinedAt(lhs) && buildFilter.isDefinedAt(rhs) =>
buildFilter(lhs) || buildFilter(rhs)
case EqualTo(a: AttributeReference, ExtractableLiteral(l)) =>
statsFor(a).lowerBound <= l && l <= statsFor(a).upperBound
case EqualTo(ExtractableLiteral(l), a: AttributeReference) =>
statsFor(a).lowerBound <= l && l <= statsFor(a).upperBound
case EqualNullSafe(a: AttributeReference, ExtractableLiteral(l)) =>
statsFor(a).lowerBound <= l && l <= statsFor(a).upperBound
case EqualNullSafe(ExtractableLiteral(l), a: AttributeReference) =>
statsFor(a).lowerBound <= l && l <= statsFor(a).upperBound
case LessThan(a: AttributeReference, ExtractableLiteral(l)) => statsFor(a).lowerBound < l
case LessThan(ExtractableLiteral(l), a: AttributeReference) => l < statsFor(a).upperBound
case LessThanOrEqual(a: AttributeReference, ExtractableLiteral(l)) =>
statsFor(a).lowerBound <= l
case LessThanOrEqual(ExtractableLiteral(l), a: AttributeReference) =>
l <= statsFor(a).upperBound
case GreaterThan(a: AttributeReference, ExtractableLiteral(l)) => l < statsFor(a).upperBound
case GreaterThan(ExtractableLiteral(l), a: AttributeReference) => statsFor(a).lowerBound < l
case GreaterThanOrEqual(a: AttributeReference, ExtractableLiteral(l)) =>
l <= statsFor(a).upperBound
case GreaterThanOrEqual(ExtractableLiteral(l), a: AttributeReference) =>
statsFor(a).lowerBound <= l
case IsNull(a: Attribute) => statsFor(a).nullCount > 0
case IsNotNull(a: Attribute) => statsFor(a).count - statsFor(a).nullCount > 0
case In(a: AttributeReference, list: Seq[Expression])
if list.forall(ExtractableLiteral.unapply(_).isDefined) && list.nonEmpty =>
list.map(l => statsFor(a).lowerBound <= l.asInstanceOf[Literal] &&
l.asInstanceOf[Literal] <= statsFor(a).upperBound).reduce(_ || _)
// This is an example to explain how it works, imagine that the id column stored as follows:
// __________________________________________
// | Partition ID | lowerBound | upperBound |
// |--------------|------------|------------|
// | p1 | '1' | '9' |
// | p2 | '10' | '19' |
// | p3 | '20' | '29' |
// | p4 | '30' | '39' |
// | p5 | '40' | '49' |
// |______________|____________|____________|
//
// A filter: df.filter($"id".startsWith("2")).
// In this case it substr lowerBound and upperBound:
// ________________________________________________________________________________________
// | Partition ID | lowerBound.substr(0, Length("2")) | upperBound.substr(0, Length("2")) |
// |--------------|-----------------------------------|-----------------------------------|
// | p1 | '1' | '9' |
// | p2 | '1' | '1' |
// | p3 | '2' | '2' |
// | p4 | '3' | '3' |
// | p5 | '4' | '4' |
// |______________|___________________________________|___________________________________|
//
// We can see that we only need to read p1 and p3.
case StartsWith(a: AttributeReference, ExtractableLiteral(l)) =>
statsFor(a).lowerBound.substr(0, Length(l)) <= l &&
l <= statsFor(a).upperBound.substr(0, Length(l))
}
lazy val partitionFilters: Seq[Expression] = {
predicates.flatMap { p =>
val filter = buildFilter.lift(p)
val boundFilter =
filter.map(
BindReferences.bindReference(
_,
stats.schema,
allowFailures = true))
boundFilter.foreach(_ =>
filter.foreach(f => logInfo(s"Predicate $p generates partition filter: $f")))
// If the filter can't be resolved then we are missing required statistics.
boundFilter.filter(_.resolved)
}
}
lazy val enableAccumulatorsForTest: Boolean =
sqlContext.getConf("spark.sql.inMemoryTableScanStatistics.enable", "false").toBoolean
// Accumulators used for testing purposes
lazy val readPartitions = sparkContext.longAccumulator
lazy val readBatches = sparkContext.longAccumulator
private val inMemoryPartitionPruningEnabled = sqlContext.conf.inMemoryPartitionPruning
private def filteredCachedBatches(): RDD[CachedBatch] = {
// Using these variables here to avoid serialization of entire objects (if referenced directly)
// within the map Partitions closure.
val schema = stats.schema
val schemaIndex = schema.zipWithIndex
val buffers = relation.cacheBuilder.cachedColumnBuffers
buffers.mapPartitionsWithIndexInternal { (index, cachedBatchIterator) =>
val partitionFilter = newPredicate(
partitionFilters.reduceOption(And).getOrElse(Literal(true)),
schema)
partitionFilter.initialize(index)
// Do partition batch pruning if enabled
if (inMemoryPartitionPruningEnabled) {
cachedBatchIterator.filter { cachedBatch =>
if (!partitionFilter.eval(cachedBatch.stats)) {
logDebug {
val statsString = schemaIndex.map { case (a, i) =>
val value = cachedBatch.stats.get(i, a.dataType)
s"${a.name}: $value"
}.mkString(", ")
s"Skipping partition based on stats $statsString"
}
false
} else {
true
}
}
} else {
cachedBatchIterator
}
}
}
protected override def doExecute(): RDD[InternalRow] = {
if (supportsBatch) {
WholeStageCodegenExec(this)(codegenStageId = 0).execute()
} else {
inputRDD
}
}
}
|
Aegeaner/spark
|
sql/core/src/main/scala/org/apache/spark/sql/execution/columnar/InMemoryTableScanExec.scala
|
Scala
|
apache-2.0
| 14,492
|
package im.actor.server.mtproto.transport
// FIXME: rename to FRAME
@SerialVersionUID(1L)
final case class TransportPackage(index: Int, body: MTProto)
|
actorapp/actor-platform
|
actor-server/actor-models/src/main/scala/im/actor/server/mtproto/transport/TransportPackage.scala
|
Scala
|
agpl-3.0
| 152
|
package com.amarjanica.discourse.models
import java.time.OffsetDateTime
import com.amarjanica.discourse.util.SerializableAsJson
case class UserDetail(
badges: List[Badge],
user: User
) extends SerializableAsJson
case class User(
id: Int,
username: String,
@JsonProperty("uploaded_avatar_id")
uploadedAvatarId: Option[Int] = None,
@JsonProperty("avatar_template")
avatarTemplate: Option[String],
name: Option[String],
email: Option[String],
@JsonProperty("last_posted_at")
lastPostedAt: Option[OffsetDateTime] = None,
@JsonProperty("last_seen_at")
lastSeenAt: Option[OffsetDateTime] = None,
@JsonProperty("bio_cooked")
bioCooked: Option[String] = None,
@JsonProperty("created_at")
createdAt: OffsetDateTime,
// @JsonProperty("can_edit")
// canEdit: Boolean,
// @JsonProperty("can_edit_username")
// canEditUsername: Boolean,
// @JsonProperty("can_edit_email")
// canEditEmail: Boolean,
// @JsonProperty("can_edit_name")
// canEditName: Boolean,
// @JsonProperty("can_send_private_message_to_user")
// canSendPrivateMessageToUser: Boolean,
@JsonProperty("bio_excerpt")
bioExcerpt: String,
@JsonProperty("trust_level")
trustLevel: Int,
moderator: Boolean,
admin: Boolean,
title: Option[String] = None,
@JsonProperty("badge_count")
badgeCount: Int,
// @JsonProperty("has_title_badges")
// hasTitleBadges: Boolean,
// @JsonProperty("number_of_deleted_posts")
// numberOfDeletedPosts: Int,
// @JsonProperty("number_of_flagged_posts")
// numberOfFlaggedPosts: Int,
// @JsonProperty("number_of_flags_given")
// numberOfFlagsGiven: Int,
// @JsonProperty("number_of_suspensions")
// numberOfSuspensions: Int,
// locale: Option[String] = None, // Some("en")
// @JsonProperty("email_digests")
// emailDigests: Boolean,
// @JsonProperty("email_private_messages")
// emailPrivateMessages: Boolean,
// @JsonProperty("email_direct")
// emailDirect: Boolean,
// @JsonProperty("email_always")
// emailAlways: Boolean,
// @JsonProperty("digest_after_days")
// digestAfterDays: Int,
// @JsonProperty("mailing_list_mode")
// mailingListMode: Boolean,
// @JsonProperty("auto_track_topics_after_msecs")
// autoTrackTopicsAfterMsecs: Long,
// @JsonProperty("new_topic_duration_minutes")
// newTopicDurationMinutes: Int,
// @JsonProperty("external_links_in_new_tab")
// externalLinksInNewTab: Boolean,
// @JsonProperty("dynamic_favicon")
// dynamicFavicon: Boolean,
// @JsonProperty("enable_quoting")
// enableQuoting: Boolean,
// @JsonProperty("muted_category_ids")
// mutedCategoryIds: List[Int],
// @JsonProperty("tracked_category_ids")
// trackedCategoryIds: List[Int],
// @JsonProperty("watched_category_ids")
// watchedCategoryIds: List[Int],
// @JsonProperty("private_messages_stats")
// privateMessagesStats: Map[String, Int],
// @JsonProperty("disable_jump_reply")
// disableJumpReply: Boolean,
@JsonProperty("gravatar_avatar_upload_id")
gravatarAvatarUploadId: Option[Int] = None,
@JsonProperty("custom_avatar_upload_id")
custom_avatar_upload_id: Option[Int] = None,
groups: List[Group]
) extends BasicUser(id, username, avatarTemplate)
case class UpdateEmail(
email: String,
@JsonProperty("api_key")
apiKey: String,
@JsonProperty("api_username")
apiUsername: String
) extends SerializableAsJson
case class UpdateUsername(
@JsonProperty("new_username")
newUsername: String,
@JsonProperty("api_key")
apiKey: String
) extends SerializableAsJson
case class UpdateTrustLevel(
@JsonProperty("user_id")
userId: Int,
level: Int
) extends SerializableAsJson
case class SuspendUser(
duration: Int, // days
reason: String
) extends SerializableAsJson
case class CreateUser(
name: String,
username: String,
password: String,
active: Boolean,
email: String,
staged: Boolean = false // flag for fake user
) extends SerializableAsJson
// TODO: similar to DiscourseStatusMEssage
case class CreateUserResponse(
success: Boolean,
active: Option[Boolean],
message: String,
@JsonProperty("user_id")
userId: Int
)
|
amarjanica/discourse-scala-client
|
src/main/scala/com/amarjanica/discourse/models/User.scala
|
Scala
|
mit
| 4,056
|
package geostat
import scala.math._
/**
* Link abstract class
*
* @param nodeA first node
* @param nodeB second node
*/
@SerialVersionUID(123L)
abstract class Link(val nodeA: MapPoint, val nodeB: MapPoint) extends Serializable {
require(nodeA != null)
require(nodeB != null)
val lenght = nodeA.greatCircleDistance(nodeB) // length of the link
/**
* Calculate the half-way point along a great circle path between the nodeA and nodeB points
*
* @return midpoint
*/
def midpoint(): MapPoint = {
val lat1 = nodeA.latitude.toRadians
val lon1 = nodeA.longitude.toRadians
val lat2 = nodeB.latitude.toRadians
val lon2 = nodeB.longitude.toRadians
val dlon = lon2 - lon1
val Bx = cos(lat2) * cos(dlon)
val By = cos(lat2) * sin(dlon)
val lat3 = atan2(sin(lat1) + sin(lat2), sqrt((cos(lat1) + Bx) * (cos(lat1) + Bx) + By * By)).toFloat
var lon3 = lon1 + atan2(By, cos(lat1) + Bx)
lon3 = ((lon3 + 3.0f * Pi) % (2.0f * Pi) - Pi) // normalise to -180..+180°
new MapPoint(lat3.toDegrees, lon3.toDegrees, 0.5 * (nodeA.value + nodeA.value))
}
override def toString() = {
val builder = StringBuilder.newBuilder
builder.append("{\\"nodes\\":[")
builder.append(nodeA.key)
builder.append(",")
builder.append(nodeB.key)
builder.append("]}\\n")
builder.toString()
}
}
|
alessandroadamo/geostat
|
src/main/scala/geostat/Link.scala
|
Scala
|
lgpl-3.0
| 1,363
|
package controllers
import com.google.inject.ImplementedBy
import javax.inject.Inject
import play.api.i18n.MessagesApi
import scala.annotation.tailrec
import com.overviewdocs.database.HasBlockingDatabase
import com.overviewdocs.models.{Node,Tree}
import com.overviewdocs.models.tables.{Nodes,Trees}
import controllers.auth.AuthorizedAction
import controllers.auth.Authorities.userOwningTree
class NodeController @Inject() (
storage: NodeController.Storage,
val controllerComponents: ControllerComponents
) extends BaseController {
private[controllers] val rootChildLevels = 2 // When showing the root, show this many levels of children
def index(treeId: Long) = authorizedAction(userOwningTree(treeId)) { implicit request =>
storage.findTree(treeId) match {
case None => NotFound
case Some(tree) => {
val nodes = storage.findRootNodes(treeId, rootChildLevels)
if (nodes.isEmpty) {
NotFound
} else {
Ok(views.json.Node.index(nodes))
.withHeaders(CACHE_CONTROL -> "max-age=0")
}
}
}
}
def show(treeId: Long, id: Long) = authorizedAction(userOwningTree(treeId)) { implicit request =>
val nodes = storage.findChildNodes(treeId, id)
Ok(views.json.Node.index(nodes))
.withHeaders(CACHE_CONTROL -> "max-age=0")
}
}
object NodeController {
@ImplementedBy(classOf[BlockingDatabaseStorage])
trait Storage {
/** A tree of Nodes for the document set, starting at the root.
*
* Nodes are returned in order: when iterating over the return value, if a
* Node refers to a parentId, the Node corresponding to that parentId has
* already appeared in the return value.
*/
def findRootNodes(treeId: Long, depth: Int) : Iterable[Node]
/** The direct descendents of the given parent Node ID. */
def findChildNodes(documentSetId: Long, parentNodeId: Long) : Iterable[Node]
def findTree(treeId: Long) : Option[Tree]
}
class BlockingDatabaseStorage @Inject() extends Storage with HasBlockingDatabase {
import database.api._
private def childrenOf(nodes: Iterable[Node]) : Iterable[Node] = {
if (nodes.nonEmpty) {
blockingDatabase.seq(Nodes.filter(_.parentId inSet nodes.map(_.id)))
} else {
Seq()
}
}
@tailrec
private def addChildNodes(parentNodes: Iterable[Node], thisLevelNodes: Iterable[Node], depth: Int) : Iterable[Node] = {
if (thisLevelNodes.isEmpty) {
parentNodes
} else if (depth == 0) {
parentNodes ++ thisLevelNodes
} else {
addChildNodes(parentNodes ++ thisLevelNodes, childrenOf(thisLevelNodes), depth - 1)
}
}
override def findRootNodes(treeId: Long, depth: Int) = {
val root: Seq[Node] = blockingDatabase.seq(
Nodes.filter(_.id in Trees.filter(_.id === treeId).map(_.rootNodeId))
)
addChildNodes(Seq(), root, depth)
}
override def findChildNodes(treeId: Long, parentNodeId: Long) = {
blockingDatabase.seq(
Nodes
.filter(_.rootId in Trees.filter(_.id === treeId).map(_.rootNodeId))
.filter(_.parentId === parentNodeId)
)
}
override def findTree(treeId: Long) = {
blockingDatabase.option(Trees.filter(_.id === treeId))
}
}
}
|
overview/overview-server
|
web/app/controllers/NodeController.scala
|
Scala
|
agpl-3.0
| 3,309
|
import org.scalatest.FunSuite
/**
* Created by matthiasheck on 01.11.15.
*/
class MasterDataProvider$Test extends FunSuite {
test("test json loading") {
println("Starting json load test of master countries")
assert(MasterDataProvider.getCountryName("DZ").toUpperCase == "Algeria".toUpperCase)
assert(MasterDataProvider.getCountryName("DE").toUpperCase == "GErmany".toUpperCase)
}
}
®
|
matze42/DataVizualizr
|
src/test/scala/MasterDataProvider$Test.scala
|
Scala
|
mit
| 408
|
/*
* Copyright (c) 2014-2019 Israel Herraiz <isra@herraiz.org>
*
* Permission is hereby granted, free of charge, to any person
* obtaining a copy of this software and associated documentation
* files (the "Software"), to deal in the Software without
* restriction, including without limitation the rights to use, copy,
* modify, merge, publish, distribute, sublicense, and/or sell copies
* of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
* BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
* ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
* CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
// ---------------------
// Test for example 3.12
// ---------------------
package chap03
import org.specs2.mutable._
object Ex12Spec extends Specification {
"The reverse function" should {
"return Nil with an empty list" in {
Ex12.reverse(Nil) mustEqual Nil
}
"return a reversed list" in {
Ex12.reverse(List(3,1,2,0)) mustEqual List(0,2,1,3)
}
}
}
|
iht/fpinscala
|
src/test/scala/chap03/ex12Spec.scala
|
Scala
|
mit
| 1,537
|
/*
* Action.scala
*
* Copyright 2017 wayfarerx <x@wayfarerx.net> (@thewayfarerx)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.wayfarerx.circumvolve.service
import net.wayfarerx.circumvolve.model.{Member, Role}
/**
* Base type for actions parsed from a message.
*/
sealed trait Action
/**
* Implementations of the possible actions.
*/
object Action {
/**
* Opens a roster with the specified roles and counts.
*
* @param slots The mapping of required roles to the number of members needed per role.
*/
case class Open(slots: Vector[(Role, Int)]) extends Action
/**
* Abandons any in-progress roster.
*/
case object Abort extends Action
/**
* Completes the in-progress roster and creates a team.
*/
case object Close extends Action
/**
* Assigns members to the specified roles.
*
* @param assignments The collection of members and their assigned roles.
*/
case class Assign(assignments: Vector[(Member, Role)]) extends Action
/**
* Releases the specified members from their assigned roles.
*
* @param members The mapping of roles to lists of members.
*/
case class Release(members: Set[Member]) extends Action
/**
* Offers the specified member for the supplied roles.
*
* @param member The name of the member that is volunteering.
* @param roles The roles that are being volunteered for.
*/
case class Offer(member: Member, roles: Vector[Role]) extends Action
/**
* Kicks a member from certain roles in the in-progress roster.
*
* @param member The name of the member that is dropping.
* @param limitToRoles The only roles to drop or empty to drop all roles.
*/
case class Kick(member: Member, limitToRoles: Vector[Role]) extends Action
/**
* Volunteers the specified member for the supplied roles.
*
* @param roles The roles that are being volunteered for.
*/
case class Volunteer(roles: Vector[Role]) extends Action
/**
* Drops a member from certain roles in the in-progress roster.
*
* @param limitToRoles The only roles to drop or empty to drop all roles.
*/
case class Drop(limitToRoles: Vector[Role]) extends Action
/**
* Queries the roles a has volunteered for.
*
* @param member The member to query or none to query the author.
*/
case class Query(member: Option[Member]) extends Action
/**
* An action that prints the help message.
*/
case object Help extends Action
}
|
wayfarerx/circumvolve
|
server/src/main/scala/net/wayfarerx/circumvolve/service/Action.scala
|
Scala
|
apache-2.0
| 2,998
|
package pl.newicom.dddd.serialization
import com.typesafe.config.Config
import org.json4s._
import org.json4s.ext.{EnumNameSerializer, JodaTimeSerializers, UUIDSerializer}
import pl.newicom.dddd.messaging.PublisherTypeValue
import pl.newicom.dddd.serialization.JsonSerHints.DefaultSerializationHints
import pl.newicom.dddd.serialization.{JsonAbstractSerHints => AbstractHints, JsonExtraSerHints => ExtraHints, JsonSerHints => FinalizedHints}
sealed trait JsonAbstractSerHints {
def ++ (other: AbstractHints): AbstractHints = this match {
case extra: ExtraHints =>
extra ++ other
case fin: FinalizedHints => other match {
case extra: ExtraHints =>
extra ++ fin
case _ =>
throw new UnsupportedOperationException("Merging two instances of finalized hints not supported!")
}
}
}
case class JsonSerHints(extraHints: ExtraHints, formats: Formats = DefaultSerializationHints) extends AbstractHints {
def toFormats: Formats = formats ++ extraHints.serializers + extraHints.typeHints
def ++ (other: ExtraHints): FinalizedHints = copy(
extraHints = ExtraHints(
extraHints.typeHints + other.typeHints,
extraHints.serializers ++ other.serializers
)
)
}
case class JsonExtraSerHints(typeHints: TypeHints, serializers: List[Serializer[_]]) extends AbstractHints {
override def ++ (other: AbstractHints): AbstractHints = other match {
case extra: ExtraHints => this ++ extra
case fin: FinalizedHints => this ++ fin
}
def ++ (other: ExtraHints): ExtraHints = (typeHints, serializers) match {
case (myTypeHints, mySerializers) =>
ExtraHints(
typeHints = myTypeHints + other.typeHints,
serializers = mySerializers ++ other.serializers
)
}
def ++ (other: FinalizedHints): FinalizedHints =
other ++ this
}
object JsonSerHints {
val NoExtraHints = ExtraHints(NoTypeHints, List())
val DefaultSerializationHints = FinalizedHints(NoExtraHints, DefaultFormats ++ JodaTimeSerializers.all + UUIDSerializer + new EnumNameSerializer(PublisherTypeValue))
def fromConfig(config: Config) = new FromConfigJsonSerializationHintsProvider(config).hints()
def apply(formats: Formats): FinalizedHints = DefaultSerializationHints.copy(formats = formats)
implicit def fromListOfClassNames(hints: List[String]): ExtraHints =
ExtraHints(
typeHints = if (hints.isEmpty) NoTypeHints else FullTypeHints(hints.map(Class.forName)),
serializers = List()
)
implicit def toFormats(hints: AbstractHints): Formats = hints match {
case extra: ExtraHints => (extra ++ DefaultSerializationHints).toFormats
case fin: FinalizedHints => fin.toFormats
}
}
|
AndreyLadniy/akka-ddd
|
akka-ddd-protocol/src/main/scala/pl/newicom/dddd/serialization/JsonSerHints.scala
|
Scala
|
mit
| 2,693
|
package example
package data
import slick.jdbc.H2Profile.api._
import models.User
class Users(tag: Tag) extends Table[User](tag, "users") {
def id = column[Int]("id", O.PrimaryKey)
def name = column[String]("name")
def * = (id, name) <> (User.tupled, User.unapply)
}
object Users extends TableQuery[Users](new Users(_))
|
gabro/eff-api
|
src/main/scala/data/tables/Users.scala
|
Scala
|
mit
| 330
|
package skutek.operations
import skutek.abstraction._
import org.specs2._
trait CanLaunchTheMissiles { this: Specification =>
case class Missile() {
private var count = 0
def launch() = { count += 1 }
def launch_! = Eval { launch() }
def launchedOnce = count == 1
def mustHaveLaunchedOnce = count must_== 1
def mustNotHaveLaunched = count must_== 0
}
}
|
marcinzh/skutek
|
modules/core/src/test/scala/skutek/operations/CanLaunchTheMissiles.scala
|
Scala
|
mit
| 386
|
package eventstore
package akka
import scala.concurrent.duration._
import _root_.akka.actor.ActorRef
import _root_.akka.testkit.{TestActorRef, TestKitBase, TestProbe}
class SubscribeToStreamCatchingUpITest extends TestConnection {
sequential
"subscribe catching up" should {
"be able to subscribe to non existing stream" in new SubscribeCatchingUpScope {
newSubscription()
}
"be able to subscribe to non existing stream and then catch event" in new SubscribeCatchingUpScope {
val subscriptionActor = newSubscription()
expectMsg(LiveProcessingStarted)
expectNoEvents()
val event = append(newEventData)
expectEvent(event)
}
"be able to subscribe to non existing stream from number" in new SubscribeCatchingUpScope {
val subscriptionActor = newSubscription(Some(EventNumber.Exact(0)))
append(newEventData)
expectMsg(LiveProcessingStarted)
expectNoEvents()
val event = append(newEventData)
expectEvent(event)
}
"fail if stream deleted" in new SubscribeCatchingUpScope {
appendEventToCreateStream()
deleteStream()
val subscriptionActor = newSubscription()
expectEsException() must throwA[StreamDeletedException]
expectTerminated(subscriptionActor)
}
"allow multiple subscriptions to same stream" in new SubscribeCatchingUpScope {
val probes = List.fill(5)(TestProbe.apply())
probes.foreach(x => newSubscription(client = x.ref))
probes.foreach(_.expectMsg(LiveProcessingStarted))
val event = append(newEventData)
probes.foreach(x => expectEvent(event, x))
}
"stop subscription after actor stopped" in new SubscribeCatchingUpScope {
appendEventToCreateStream()
val subscriptionActor = newSubscription()
subscriptionActor.stop()
expectTerminated(subscriptionActor)
}
"read all existing events and keep listening to new ones" in new SubscribeCatchingUpScope {
val event = append(newEventData)
val subscriptionActor = newSubscription()
expectEvent(event)
expectMsg(LiveProcessingStarted)
expectNoEvents()
val event2 = append(newEventData)
expectEvent(event2)
}
"filter events and keep listening to new ones" in new SubscribeCatchingUpScope {
val subscriptionActor = newSubscription(Some(EventNumber.Exact(0)))
expectMsg(LiveProcessingStarted)
append(newEventData)
val event = append(newEventData)
expectEvent(event)
expectNoEvents()
val event2 = append(newEventData)
expectEvent(event2)
}
"filter events and keep listening to new ones" in new SubscribeCatchingUpScope {
append(newEventData)
val event = append(newEventData)
val subscriptionActor = newSubscription(Some(EventNumber.Exact(0)))
expectEvent(event)
expectMsg(LiveProcessingStarted)
expectNoEvents()
val event2 = append(newEventData)
expectEvent(event2)
}
"filter events and work if nothing was written after subscription" in new SubscribeCatchingUpScope {
append(newEventData)
val event = append(newEventData)
val subscriptionActor = newSubscription(Some(EventNumber.Exact(0)))
expectEvent(event)
expectMsg(LiveProcessingStarted)
expectNoEvents()
}
"read linked events if resolveLinkTos = false" in new SubscribeCatchingUpScope {
val (linked, link) = linkedAndLink()
newSubscription(resolveLinkTos = false)
expectEvent(linked)
expectMsgType[Event]
expectEvent(link)
expectMsg(LiveProcessingStarted)
}
"read linked events if resolveLinkTos = true" in new SubscribeCatchingUpScope {
val (linked, link) = linkedAndLink()
newSubscription(resolveLinkTos = true)
expectEvent(linked)
expectMsgType[Event]
expectEvent(ResolvedEvent(linked, link))
expectMsg(LiveProcessingStarted)
}
"catch linked events if resolveLinkTos = false" in new SubscribeCatchingUpScope {
newSubscription(resolveLinkTos = false)
expectMsg(LiveProcessingStarted)
val (linked, link) = linkedAndLink()
expectEvent(linked)
expectMsgType[Event]
expectEvent(link)
}
"catch linked events if resolveLinkTos = true" in new SubscribeCatchingUpScope {
newSubscription(resolveLinkTos = true)
expectMsg(LiveProcessingStarted)
val (linked, link) = linkedAndLink()
expectEvent(linked)
expectMsgType[Event]
expectEvent(ResolvedEvent(linked, link))
}
}
private trait SubscribeCatchingUpScope extends TestConnectionScope {
def expectNoEvents() = expectNoMessage(1.second)
def newSubscription(
fromNumberExclusive: Option[EventNumber.Exact] = None,
resolveLinkTos: Boolean = false,
client: ActorRef = testActor
) = {
val a = TestActorRef(StreamSubscriptionActor.props(
connection = actor,
client = client,
streamId = streamId,
fromNumberExclusive = fromNumberExclusive,
None,
Settings.Default.copy(resolveLinkTos = resolveLinkTos)
))
watch(a)
a
}
def expectActorTerminated(actor: TestActorRef[_]): Unit = {
expectTerminated(actor)
actor.underlying.isTerminated must beTrue
expectNoEvents()
}
def expectEvent(x: Event, probe: TestKitBase = this) = {
probe.expectMsgType[Event].fixDate mustEqual x
}
}
}
|
EventStore/EventStore.JVM
|
client/src/test/scala/eventstore/akka/SubscribeToStreamCatchingUpITest.scala
|
Scala
|
bsd-3-clause
| 5,522
|
/* __ *\
** ________ ___ / / ___ Scala API **
** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
package scala.collection
package mutable
import generic._
/** A subtrait of scala.collection.IndexedSeq which represents sequences
* that can be mutated.
*
* @since 2.8
*/
trait IndexedSeqOptimized[A, +Repr] extends IndexedSeqLike[A, Repr] with scala.collection.IndexedSeqOptimized[A, Repr]
|
cran/rkafkajars
|
java/scala/collection/mutable/IndexedSeqOptimized.scala
|
Scala
|
apache-2.0
| 827
|
package com.bio4j.dynamograph.parser.go
import scala.xml.pull._
import scala.io.Source
import com.bio4j.dynamograph.model.Properties._
import com.bio4j.dynamograph.model.go.GoSchema._
import com.bio4j.dynamograph.parser.ParsingContants
import com.bio4j.dynamograph.parser.SingleElement
class PullGoParser(val src: Source) extends AnyGoParser {
override def foreach[U](f: SingleElement => U) = {
val reader = new XMLEventReader(src)
while(reader.hasNext){
reader.next match {
case EvElemStart(pre, "Class", _, _) => {
f(parseSingleElement(reader))
}
case _ =>
}
}
}
private def parseSingleElement(parser: XMLEventReader) : SingleElement = {
var done = false
var vertex : Map[String, String] = Map()
var edges : List[Map[String, String]] = List()
while (parser.hasNext && !done){
parser.next match {
case EvElemEnd(_, "Class") => done = true
case EvElemStart(pre, "Class", attrs, _) => skip("Class", parser)
case EvElemStart(pre, "subClassOf", attrs, _) => edges :::= (parseSingleRelation(attrs, parser))
case EvElemStart(pre, PullGoParser.namespaceTag, _, _) => edges ::= parseNamespaceRelation(parser)
case EvElemStart(pre, label, _, _) if PullGoParser.mapping.contains(label) => vertex += parseSingleProperty(label, parser)
case _ => ()
}
}
vertex += ((ParsingContants.vertexType, GoTermType.label))
SingleElement(vertex, edges)
}
private def skip(label: String,parser: XMLEventReader) = {
var done = false
while (parser.hasNext && !done){
parser.next match {
case EvElemEnd(_, "Class") => done = true
case _ =>
}
}
}
private def parseSingleProperty(label : String, parser: XMLEventReader) : (String, String) = {
var done = false
var value : String = ""
while (parser.hasNext && !done){
parser.next match {
case EvText(text) => value += text
case EvEntityRef(entity) => value += entity
case EvElemEnd(_, endLabel) if label == endLabel => done = true
case _ =>
}
}
(PullGoParser.mapping.get(label).get, value)
}
private def parseNamespaceRelation(parser: XMLEventReader): Map[String,String] = {
var done = false
var value : String = null
while (parser.hasNext && !done){
parser.next match {
case EvText(text) => value = text
case EvElemEnd(_, endLabel) if PullGoParser.namespaceTag == endLabel => done = true
case _ =>
}
}
Map(ParsingContants.relationType -> NamespaceType.label, targetId.label -> value)
}
private def parseSingleRelation(attrs : scala.xml.MetaData,parser: XMLEventReader) : List[Map[String, String]] =
getAttributeValue(attrs, PullGoParser.resource) match {
case Some(StringPrefixMatcher(id)) => List(Map(ParsingContants.relationType -> IsAType.label, targetId.label -> id))
case _ => parseCompoundRelation(parser)
}
private def parseCompoundRelation(parser: XMLEventReader) : List[Map[String, String]] = {
var done = false
var rType : String = null
var value : String = null
while (parser.hasNext && !done){
parser.next match {
case EvElemEnd(_, "subClassOf") => done = true
case EvElemStart(pre, "onProperty", attrs, _) => {
val rVal = getAttributeValue(attrs, PullGoParser.resource).get
if (PullGoParser.relationMapping.contains(rVal))
rType = PullGoParser.relationMapping.get(getAttributeValue(attrs, PullGoParser.resource).get).get
}
case EvElemStart(pre, "someValuesFrom", attrs, _) => value = StringPrefixMatcher(getAttributeValue(attrs, PullGoParser.resource))
case _ =>
}
}
if (rType != null)
List(Map(ParsingContants.relationType -> rType, targetId.label -> value))
else
List()
}
private def getAttributeValue(attrs : scala.xml.MetaData, attrName : String) : Option[String] = {
for (rValue <- attrs.asAttrMap.get(PullGoParser.resource)) yield rValue
}
private object StringPrefixMatcher{
def unapply(str:String):Option[String]= {
str match {
case s if s.startsWith("http://purl.obolibrary.org/obo/") => Some(s.stripPrefix("http://purl.obolibrary.org/obo/").replace('_',':'))
case _ => None
}
}
def apply(str:Option[String]):String= {
str match {
case Some(s) if s.startsWith("http://purl.obolibrary.org/obo/") => s.stripPrefix("http://purl.obolibrary.org/obo/").replace('_',':')
case _ => null
}
}
}
}
object PullGoParser{
val idTag = "id"
val commentTag = "comment"
val nameTag = "label"
val definitionTag = "IAO_0000115"
val namespaceTag = "hasOBONamespace"
val namespaceAttributeName = "namespace"
val partOf = (PartOfType.label, "http://purl.obolibrary.org/obo/BFO_0000050")
val hasPart = (HasPartType.label,"http://purl.obolibrary.org/obo/BFO_0000051")
val regulates = (RegulatesType.label,"http://purl.obolibrary.org/obo/RO_0002211")
val negativelyRegulates = (NegativelyRegulatesType.label,"http://purl.obolibrary.org/obo/RO_0002212")
val positivelyRegulates = (PositivelyRegulatesType.label,"http://purl.obolibrary.org/obo/RO_0002213")
// relationships properties
val is_a = "is_a"
val resource = "rdf:resource"
val mapping = Map(idTag -> id.label, nameTag -> name.label, namespaceTag -> namespaceAttributeName, definitionTag -> definition.label, commentTag -> comment.label)
val relationMapping = Map(partOf._2 -> partOf._1,hasPart._2 -> hasPart._1,regulates._2 -> regulates._1,
negativelyRegulates._2 -> negativelyRegulates._1,positivelyRegulates._2 -> positivelyRegulates._1)
}
|
bio4j/dynamograph
|
src/main/scala/com/bio4j/dynamograph/parser/go/PullGoParser.scala
|
Scala
|
agpl-3.0
| 5,736
|
// scalac fails for test2/3
// dotc fails for all three
object test1 {
class Tree[-T >: Null]
def f(x: Any): Tree[Null] = x match {
case y: Tree[_] => y
}
}
object test2 {
class Tree[T >: Null]
def f(x: Any): Tree[Null] = x match {
case y: Tree[_] => y // error
}
}
object test3 {
class Tree[+T >: Null]
def f(x: Any): Tree[Null] = x match {
case y: Tree[_] => y // error
}
}
class Test5 {
"": ({ type U = this.type })#U // error
}
|
dotty-staging/dotty
|
tests/neg/boundspropagation.scala
|
Scala
|
apache-2.0
| 550
|
object Test {
def compare(first: Any, second: Any): Any = {
(first, second) match {
case (k: Int, o: Int) => k compare o
//why the next case matches (Float, Int) but does not match (Int, Float) ???
case (k: Number, o: Number) => k.doubleValue() compare o.doubleValue()
case _ => "BOGON"
// throw new Exception("Unsupported compare " + first + "; " + second)
}
}
def main(args: Array[String]): Unit = {
println("Both Int", -1, compare(0, 1))
println("Both Float", 1, compare(1.0, 0.0))
println("Float then Int", 0, compare(10.0, 10))
println("Int then Float", 0, compare(10, 10.0)) //this fails with an exception
}
}
|
folone/dotty
|
tests/pending/run/t2337.scala
|
Scala
|
bsd-3-clause
| 694
|
package org.andrewconner.spot.playapp
import java.util.concurrent.{ Callable, TimeUnit }
import akka.actor.ActorSystem
import akka.stream.Materializer
import org.andrewconner.spot.core.{ TaskActionBuilder, TaskAction }
import org.joda.time.DateTime
import play.api.mvc._
import org.feijoas.mango.common.cache._
import play.api.http.Status
import scala.collection.mutable
import scala.concurrent.{ Promise, Future }
import scala.concurrent.duration._
import play.api.libs.iteratee.Execution.Implicits.trampoline
import scalaz.concurrent.Task
trait Tarpit {
val maxRequests: Int
val graceThreshhold: Int
val timePeriod: Duration
val maxDelay: Duration
private val maxCacheSize = 1024L
private val bucketsPerPeriod = 4
import Tarpit._
def addAndCheck(clientId: String): TarpitDelay = {
calcDelay(updateClientAndGetRequestCount(clientId))
}
type SecondOfDay = Int
type ClientHitRecord = mutable.Buffer[(SecondOfDay, Int)]
private lazy val requestCountCache: LoadingCache[String, ClientHitRecord] =
CacheBuilder.newBuilder()
.expireAfterAccess(timePeriod.length, timePeriod.unit)
.concurrencyLevel(1)
.maximumSize(maxCacheSize)
.build { _: String => mutable.Buffer.empty[(SecondOfDay, Int)] }
private def updateClientAndGetRequestCount(clientId: String) = {
val cache: ClientHitRecord = requestCountCache(clientId)
addEvent(cache, new DateTime())
cache.map(_._2).sum
}
private def timeToKey(time: DateTime) = {
((time.getMillisOfDay * bucketsPerPeriod) / timePeriod.toMillis).toInt
}
private def addEvent(cache: ClientHitRecord, time: DateTime) = cache.synchronized {
val key = timeToKey(time)
cache.lastOption match {
case Some((lk, lv)) if lk == key =>
cache(cache.length - 1) = (key, lv + 1)
case _ =>
cache += ((key, 1))
}
if (cache.length > bucketsPerPeriod) {
cache.remove(0, cache.length - bucketsPerPeriod)
}
}
private def sigmoid(t: Double) = 1.0 / (1 + Math.pow(Math.E, -t))
private def calcDelay(hitCount: Int): TarpitDelay = {
if (hitCount < graceThreshhold) {
NoDelay
} else if (hitCount > maxRequests) {
println(s" - blocking, $hitCount requests")
Block
} else {
val asymptoticMax = 6.0
val delayMs = sigmoid((hitCount.toDouble / maxRequests.toDouble) * asymptoticMax * 2 - asymptoticMax) * maxDelay.toMillis
if (delayMs < 10.0) {
NoDelay
} else {
println(s" - delaying ${delayMs.toInt}ms")
DelayMs(delayMs.toInt)
}
}
}
}
object Tarpit {
sealed trait TarpitDelay
case object Block extends TarpitDelay
case object NoDelay extends TarpitDelay
case class DelayMs(value: Int) extends TarpitDelay
}
class TarpitFilter(actorSystem: ActorSystem)(implicit val mat: Materializer) extends Filter {
val tarpit = new Tarpit {
val timePeriod = 1.minutes
val maxDelay = 6.seconds
val maxRequests: Int = 100
val graceThreshhold = maxRequests / 5
}
def apply(nextFilter: RequestHeader => Future[Result])(requestHeader: RequestHeader): Future[Result] = {
import Tarpit._
tarpit.addAndCheck(requestHeader.remoteAddress) match {
case NoDelay =>
nextFilter(requestHeader)
case Block =>
Future.successful(Results.Status(Status.BAD_GATEWAY))
case DelayMs(ms: Int) =>
val promise = Promise[Unit]()
actorSystem.scheduler.scheduleOnce(ms.milliseconds) { promise.success((): Unit); () }
promise.future.flatMap { _ => nextFilter(requestHeader) }(trampoline)
}
}
}
// TarpitAction(10, 1.minute, 3.seconds)
// TarpitAction(10, 1.minute)
// TarpitAction(10)
trait BaseActions {
def actorSystem: ActorSystem // for scheduling of the tarpit
def TarpitAction(maxRequests: Int, within: Duration, maxDelay: Duration) = new TarpitActionBuilder(actorSystem, maxRequests, within, maxDelay)
}
trait BaseController extends Controller with BaseActions {
}
class TarpitActionBuilder(actorSystem: ActorSystem, maxRequests: Int, timePeriod: Duration, maxDelay: Duration) extends TaskActionBuilder[Request] {
private val tarpit: Tarpit = {
val _maxRequests = maxRequests; val _timePeriod = timePeriod; val _maxDelay = maxDelay
new Tarpit {
val maxRequests = _maxRequests
val timePeriod = _timePeriod
val maxDelay = _maxDelay
val graceThreshhold = _maxRequests / 5
}
}
def invokeBlockT[A](request: Request[A], block: (Request[A]) => Task[Result]) = {
block(request)
}
}
|
andrewconner/spotsy
|
app/org/andrewconner/spot/playapp/TarpitFilter.scala
|
Scala
|
mit
| 4,546
|
package com.github.agourlay.cornichon.resolver
import java.util.concurrent.atomic.AtomicLong
import java.util.regex.Matcher
import cats.syntax.either._
import com.github.agourlay.cornichon.core._
import com.github.agourlay.cornichon.json.{ CornichonJson, JsonPath }
import com.github.agourlay.cornichon.resolver.PlaceholderGenerator._
import com.github.agourlay.cornichon.util.Caching
object PlaceholderResolver {
private val rightNil = Nil.asRight
private val placeholdersCache = Caching.buildCache[String, Either[CornichonError, List[Placeholder]]]()
private val globalAtomicLong = new AtomicLong(1L) // can create non deterministic runs
def globalNextLong(): Long = globalAtomicLong.getAndIncrement()
val builtInPlaceholderGenerators: List[PlaceholderGenerator] =
randomUUID ::
randomPositiveInteger ::
randomString ::
randomAlphanumString ::
randomBoolean ::
scenarioUniqueNumber ::
globalUniqueNumber ::
randomTimestamp ::
currentTimestamp ::
Nil
private val placeholderGeneratorsByLabel: Map[String, PlaceholderGenerator] =
builtInPlaceholderGenerators.groupBy(_.key).map { case (k, values) => (k, values.head) } // we know it is not empty
def findPlaceholders(input: String): Either[CornichonError, List[Placeholder]] =
placeholdersCache.get(input, k => PlaceholderParser.parse(k))
private def resolvePlaceholder(ph: Placeholder)(session: Session, rc: RandomContext, customExtractors: Map[String, Mapper], sessionOnlyMode: Boolean): Either[CornichonError, String] =
placeholderGeneratorsByLabel.get(ph.key) match {
case Some(pg) =>
// in session mode we leave the generators untouched to avoid side effects
val v = if (sessionOnlyMode) ph.fullKey else pg.gen(rc)
v.asRight
case None =>
val otherKeyName = ph.key
val otherKeyIndex = ph.index
(session.get(otherKeyName, otherKeyIndex), customExtractors.get(otherKeyName)) match {
case (v, None) => v
case (Left(_), Some(mapper)) => applyMapper(otherKeyName, mapper, ph)(session, rc)
case (Right(_), Some(_)) => AmbiguousKeyDefinition(otherKeyName).asLeft
}
}
def fillPlaceholdersResolvable[A: Resolvable](resolvableInput: A)(session: Session, randomContext: RandomContext, customExtractors: Map[String, Mapper]): Either[CornichonError, A] = {
val ri = Resolvable[A]
val resolvableForm = ri.toResolvableForm(resolvableInput)
fillPlaceholders(resolvableForm)(session, randomContext, customExtractors).map { resolved =>
// If the input did not contain placeholders,
// we can return the original value directly
// and avoid an extra transformation from the resolved form
if (resolved == resolvableForm) resolvableInput else ri.fromResolvableForm(resolved)
}
}
def fillPlaceholders(input: String)(session: Session, rc: RandomContext, customExtractors: Map[String, Mapper], sessionOnlyMode: Boolean = false): Either[CornichonError, String] =
findPlaceholders(input).flatMap {
_.foldLeft(input.asRight[CornichonError]) { (accE, ph) =>
for {
acc <- accE
resolvedValue <- resolvePlaceholder(ph)(session, rc, customExtractors, sessionOnlyMode)
} yield ph.pattern.matcher(acc).replaceAll(Matcher.quoteReplacement(resolvedValue))
}
}
def fillPlaceholdersMany(params: Seq[(String, String)])(session: Session, randomContext: RandomContext, customExtractors: Map[String, Mapper]): Either[CornichonError, List[(String, String)]] =
params.foldRight[Either[CornichonError, List[(String, String)]]](rightNil) {
case ((name, value), accE) =>
for {
acc <- accE
resolvedName <- fillPlaceholders(name)(session, randomContext, customExtractors)
resolvedValue <- fillPlaceholders(value)(session, randomContext, customExtractors)
} yield (resolvedName, resolvedValue) :: acc // foldRight + prepend
}
private def applyMapper(bindingKey: String, m: Mapper, ph: Placeholder)(session: Session, randomContext: RandomContext): Either[CornichonError, String] = m match {
case SimpleMapper(gen) =>
Either.catchNonFatal(gen()).leftMap(SimpleMapperError(ph.fullKey, _))
case SessionMapper(gen) =>
gen(session).leftMap(SessionMapperError(ph.fullKey, _))
case RandomMapper(gen) =>
Either.catchNonFatal(gen(randomContext)).leftMap(RandomMapperError(ph.fullKey, _))
case HistoryMapper(key, transform) =>
session.getHistory(key)
.leftMap { o: CornichonError => MapperKeyNotFoundInSession(bindingKey, o) }
.map(transform)
case TextMapper(key, transform) =>
session.get(key, ph.index)
.leftMap { o: CornichonError => MapperKeyNotFoundInSession(bindingKey, o) }
.map(transform)
case JsonMapper(key, jsonPath, transform) =>
session.get(key, ph.index)
.leftMap { o: CornichonError => MapperKeyNotFoundInSession(bindingKey, o) }
.flatMap { sessionValue =>
// No placeholders in JsonMapper to avoid accidental infinite recursions.
JsonPath.runStrict(jsonPath, sessionValue)
.map(json => transform(CornichonJson.jsonStringValue(json)))
}
}
}
case class AmbiguousKeyDefinition(key: String) extends CornichonError {
lazy val baseErrorMessage = s"ambiguous definition of key '$key' - it is present in both session and extractors"
}
case class MapperKeyNotFoundInSession(key: String, underlyingError: CornichonError) extends CornichonError {
lazy val baseErrorMessage = s"Error occurred while running Mapper attached to key '$key'"
override val causedBy = underlyingError :: Nil
}
case class RandomMapperError[A](key: String, e: Throwable) extends CornichonError {
lazy val baseErrorMessage = s"exception thrown in RandomMapper '$key' :\\n'${CornichonError.genStacktrace(e)}'"
}
case class SimpleMapperError[A](key: String, e: Throwable) extends CornichonError {
lazy val baseErrorMessage = s"exception thrown in SimpleMapper '$key' :\\n'${CornichonError.genStacktrace(e)}'"
}
case class SessionMapperError[A](key: String, underlyingError: CornichonError) extends CornichonError {
lazy val baseErrorMessage = s"Error thrown in SessionMapper '$key')'"
override val causedBy = underlyingError :: Nil
}
|
agourlay/cornichon
|
cornichon-core/src/main/scala/com/github/agourlay/cornichon/resolver/PlaceholderResolver.scala
|
Scala
|
apache-2.0
| 6,354
|
package dhg.util
/**
* Timing utilities
*
* @author Dan Garrette (dhgarrette@gmail.com)
*/
object Time {
def time[T](name: String, block: => T): T = {
time(name, block, println)
}
def time[T, R](name: String, block: => T, log: String => R): T = {
log(s"starting: $name")
val (r, t) = timer(block)
log(s"finished: $name in $t seconds")
r
}
def time1[T](name: String, block: => T): T = {
time1(name, block, println)
}
def time1[T, R](name: String, block: => T, log: String => R): T = {
val (r, t) = timer(block)
log(s"$name - $t seconds")
r
}
def timer[T](block: => T): (T, Double) = {
val startTime = System.currentTimeMillis()
val r = block
(r, (System.currentTimeMillis() - startTime) / 1000.0)
}
}
|
dhgarrette/low-resource-pos-tagging-2014
|
src/main/scala/dhg/util/Time.scala
|
Scala
|
apache-2.0
| 779
|
/** Copyright 2015 TappingStone, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.prediction.controller
import io.prediction.core.BaseDataSource
import org.apache.spark.SparkContext
import org.apache.spark.rdd.RDD
/** Base class of a parallel data source.
*
* A parallel data source runs locally within a single machine, or in parallel
* on a cluster, to return data that is distributed across a cluster.
*
* @tparam TD Training data class.
* @tparam EI Evaluation Info class.
* @tparam Q Input query class.
* @tparam A Actual value class.
* @group Data Source
*/
abstract class PDataSource[TD, EI, Q, A]
extends BaseDataSource[TD, EI, Q, A] {
def readTrainingBase(sc: SparkContext): TD = readTraining(sc)
/** Implement this method to only return training data from a data source */
def readTraining(sc: SparkContext): TD
def readEvalBase(sc: SparkContext): Seq[(TD, EI, RDD[(Q, A)])] = readEval(sc)
/** To provide evaluation feature for your engine, your must override this
* method to return data for evaluation from a data source. Returned data can
* optionally include a sequence of query and actual value pairs for
* evaluation purpose.
*
* The default implementation returns an empty sequence as a stub, so that
* an engine can be compiled without implementing evaluation.
*/
def readEval(sc: SparkContext): Seq[(TD, EI, RDD[(Q, A)])] =
Seq[(TD, EI, RDD[(Q, A)])]()
@deprecated("Use readEval() instead.", "0.9.0")
def read(sc: SparkContext): Seq[(TD, EI, RDD[(Q, A)])] = readEval(sc)
}
|
ch33hau/PredictionIO
|
core/src/main/scala/io/prediction/controller/PDataSource.scala
|
Scala
|
apache-2.0
| 2,113
|
/*
* Copyright 2014 IBM Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.ibm.spark.kernel.protocol.v5.content
import org.scalatest.{FunSpec, Matchers}
import play.api.data.validation.ValidationError
import play.api.libs.json._
class ConnectRequestSpec extends FunSpec with Matchers {
val connectRequestJson: JsValue = Json.parse("""
{}
""")
val connectRequest: ConnectRequest = ConnectRequest()
describe("ConnectRequest") {
describe("implicit conversions") {
it("should implicitly convert from valid json to a ConnectRequest instance") {
// This is the least safe way to convert as an error is thrown if it fails
connectRequestJson.as[ConnectRequest] should be (connectRequest)
}
it("should also work with asOpt") {
// This is safer, but we lose the error information as it returns
// None if the conversion fails
val newConnectRequest = connectRequestJson.asOpt[ConnectRequest]
newConnectRequest.get should be (connectRequest)
}
it("should also work with validate") {
// This is the safest as it collects all error information (not just first error) and reports it
val ConnectRequestResults = connectRequestJson.validate[ConnectRequest]
ConnectRequestResults.fold(
(invalid: Seq[(JsPath, Seq[ValidationError])]) => println("Failed!"),
(valid: ConnectRequest) => valid
) should be (connectRequest)
}
it("should implicitly convert from a ConnectRequest instance to valid json") {
Json.toJson(connectRequest) should be (connectRequestJson)
}
}
}
}
|
bpburns/spark-kernel
|
protocol/src/test/scala/com/ibm/spark/kernel/protocol/v5/content/ConnectRequestSpec.scala
|
Scala
|
apache-2.0
| 2,165
|
/*
* Copyright 2001-2011 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalatest.events
/**
* Location in source code about which an event concerns.
*/
sealed abstract class Location
/**
* The location in a source file where the class whose by the fully qualified name
* is passed as <code>className</code> is declared.
*/
final case class TopOfClass(className: String) extends Location
/**
* The location in a source file where the method identified by the passed <code>methodId</code>
* in the class whose fully qualified name is pased as <code>className</code> is declared.
* The methodId is obtained by calling <code>toGenericString</code> on the <code>java.lang.reflect.Method</code>
* object representing the method.
*/
final case class TopOfMethod(className: String, methodId: String) extends Location
/**
* An arbitrary line number in a named source file.
*/
final case class LineInFile(lineNumber: Int, fileName: String) extends Location
/**
* Indicates the location should be taken from the stack depth exception, included elsewhere in
* the event that contained this location.
*/
final case object SeeStackDepthException extends Location
|
svn2github/scalatest
|
src/main/scala/org/scalatest/events/Location.scala
|
Scala
|
apache-2.0
| 1,717
|
Subsets and Splits
Filtered Scala Code Snippets
The query filters and retrieves a sample of code snippets that meet specific criteria, providing a basic overview of the dataset's content without revealing deeper insights.