code stringlengths 5 1M | repo_name stringlengths 5 109 | path stringlengths 6 208 | language stringclasses 1 value | license stringclasses 15 values | size int64 5 1M |
|---|---|---|---|---|---|
package com.despegar.soffheap.perftest
import com.despegar.soffheap.ProviderMappingInfo
import java.util.Random
object DbsProviderMappingTest extends App {
val elements = System.getProperty("elements").toInt
val readers = System.getProperty("readers").toInt
val writers = System.getProperty("writers").toInt
val arrayWriter: (Unit => java.util.ArrayList[ProviderMappingInfo]) = { Unit =>
val list = new java.util.ArrayList[ProviderMappingInfo]()
(1 to elements) foreach { i =>
val mapping = new ProviderMappingInfo()
mapping.setSupplierCode(s"ABC$i")
mapping.setHotelQuantity(600+i)
mapping.setExternalHotelCode(s"ABCDEFGHIJK$i")
mapping.setExternalCityCode(s"qwertyuiop$i")
list.add(mapping) }
list
}
val rand = new Random()
val scenario = new ReadersWriterScenario[Long, java.util.ArrayList[ProviderMappingInfo]](readers, writers, arrayWriter, { Unit =>
rand.nextInt(200001)
})
scenario.start()
Thread.sleep(60000)
scenario.stopWriters()
} | despegar/soffheap | src/test/java/com/despegar/soffheap/perftest/DbsProviderMappingTest.scala | Scala | bsd-2-clause | 1,034 |
import org.portablescala.sbtplatformdeps.PlatformDepsPlugin.autoImport._
import sbt._
object Deps {
object V {
val bouncyCastle = "1.70"
val dropwizardMetricsV = "4.2.7" //https://github.com/dropwizard/metrics
val logback = "1.2.10"
val grizzledSlf4j = "1.3.4"
val scalacheck = "1.15.4"
val scalaTest = "3.2.10"
val scalaTestPlus =
"3.2.2.0" //super annoying... https://oss.sonatype.org/content/groups/public/org/scalatestplus/
val slf4j = "1.7.33"
val spray = "1.3.6"
val zeromq = "0.5.2"
val akkav = "10.2.7"
val playv = "2.9.2"
val akkaStreamv = "2.6.18"
val jUnixSocketV = "2.4.0"
val scodecV = "1.1.30"
val junitV = "0.13.3"
val nativeLoaderV = "2.4.0"
val typesafeConfigV = "1.4.1"
val scalaFxV = "16.0.0-R25"
val javaFxV = "18-ea+10"
val asyncNewScalaV = "1.0.1"
val flywayV = "6.4.2"
val postgresV = "42.3.1"
val akkaActorV = akkaStreamv
val slickV = "3.3.3"
val sqliteV = "3.36.0.3"
val scalameterV = "0.17"
val scalamockV = "5.2.0"
val scalaCollectionCompatV = "2.6.0"
val pgEmbeddedV = "0.13.4"
val breezeV = "1.3"
val newMicroPickleV = "1.3.8"
val newMicroJsonV = newMicroPickleV
// akka-http-upickle is not yet published
// to Maven central. There's a PR for adding
// suport, https://github.com/hseeberger/akka-http-json/pull/314.
// Until that's merged, you'll have to pull down
// that PR, do `sbt publishLocal` and replace the
// value here with whatever is in there. This
// obviously has to be changed before this is
// merged.
val sourcecodeV = "0.2.7"
val scalaJsStubsV = "1.1.0"
// CLI deps
val scoptV = "4.0.1"
val sttpV = "1.7.2"
val codehausV = "3.1.6"
val scalaJsTimeV = "2.3.0"
val zxingV = "3.4.1"
val monixV = "3.4.0"
}
object Compile {
val bouncycastle =
"org.bouncycastle" % "bcprov-jdk15on" % V.bouncyCastle withSources () withJavadoc ()
val scodec =
Def.setting(
"org.scodec" %%% "scodec-bits" % V.scodecV withSources () withJavadoc ())
val slf4j =
"org.slf4j" % "slf4j-api" % V.slf4j % "provided" withSources () withJavadoc ()
val zeromq =
"org.zeromq" % "jeromq" % V.zeromq withSources () withJavadoc ()
val akkaHttp =
"com.typesafe.akka" %% "akka-http" % V.akkav withSources () withJavadoc ()
val akkaHttp2 =
"com.typesafe.akka" %% "akka-http2-support" % V.akkav withSources () withJavadoc ()
val akkaStream =
"com.typesafe.akka" %% "akka-stream" % V.akkaStreamv withSources () withJavadoc ()
val akkaDiscovery =
"com.typesafe.akka" %% "akka-discovery" % V.akkaStreamv withSources () withJavadoc ()
val akkaActor =
"com.typesafe.akka" %% "akka-actor" % V.akkaStreamv withSources () withJavadoc ()
val akkaSlf4j =
"com.typesafe.akka" %% "akka-slf4j" % V.akkaStreamv withSources () withJavadoc ()
val akkaTestkit =
"com.typesafe.akka" %% "akka-testkit" % V.akkaActorV withSources () withJavadoc ()
val jUnixSocket =
"com.kohlschutter.junixsocket" % "junixsocket-core" % V.jUnixSocketV
val scalaFx =
"org.scalafx" %% "scalafx" % V.scalaFxV withSources () withJavadoc ()
lazy val arch = System.getProperty("os.arch")
lazy val osName = System.getProperty("os.name") match {
case n if n.startsWith("Linux") => "linux"
case n if n.startsWith("Mac") =>
if (arch == "aarch64") {
//needed to accommodate the different chip
//arch for M1
s"mac-${arch}"
} else {
"mac"
}
case n if n.startsWith("Windows") => "win"
case x => throw new Exception(s"Unknown platform $x!")
}
// Not sure if all of these are needed, some might be possible to remove
lazy val javaFxBase =
"org.openjfx" % s"javafx-base" % V.javaFxV classifier osName withSources () withJavadoc ()
lazy val javaFxControls =
"org.openjfx" % s"javafx-controls" % V.javaFxV classifier osName withSources () withJavadoc ()
lazy val javaFxGraphics = "org.openjfx" % s"javafx-graphics" % V.javaFxV classifier osName withSources () withJavadoc ()
lazy val javaFxMedia = "org.openjfx" % s"javafx-media" % V.javaFxV classifier osName withSources () withJavadoc ()
lazy val javaFxDeps = List(javaFxBase,
javaFxControls,
javaFxGraphics,
javaFxMedia)
val breezeViz =
("org.scalanlp" %% "breeze-viz" % V.breezeV withSources () withJavadoc ())
.exclude("bouncycastle", "bcprov-jdk14")
val playJson =
"com.typesafe.play" %% "play-json" % V.playv withSources () withJavadoc ()
val typesafeConfig =
"com.typesafe" % "config" % V.typesafeConfigV withSources () withJavadoc ()
val logback =
"ch.qos.logback" % "logback-classic" % V.logback withSources () withJavadoc ()
val grizzledSlf4j =
"org.clapper" %% "grizzled-slf4j" % V.grizzledSlf4j withSources () withJavadoc ()
val codehaus = "org.codehaus.janino" % "janino" % V.codehausV
//for loading secp256k1 natively
val nativeLoader =
"org.scijava" % "native-lib-loader" % V.nativeLoaderV withSources () withJavadoc ()
//node deps
val slick =
"com.typesafe.slick" %% "slick" % V.slickV withSources () withJavadoc ()
val slickHikari = "com.typesafe.slick" %% "slick-hikaricp" % V.slickV
val sqlite = "org.xerial" % "sqlite-jdbc" % V.sqliteV
val postgres = "org.postgresql" % "postgresql" % V.postgresV
val flyway = "org.flywaydb" % "flyway-core" % V.flywayV
val newMicroJson = "com.lihaoyi" %% "ujson" % V.newMicroJsonV
val newMicroPickle =
Def.setting("com.lihaoyi" %%% "upickle" % V.newMicroPickleV)
// get access to reflection data at compile-time
val sourcecode = "com.lihaoyi" %% "sourcecode" % V.sourcecodeV
// parsing of CLI opts and args
val scopt = "com.github.scopt" %% "scopt" % V.scoptV
// HTTP client lib
val sttp = "com.softwaremill.sttp" %% "core" % V.sttpV
val scalaCollectionCompat =
"org.scala-lang.modules" %% "scala-collection-compat" % V.scalaCollectionCompatV
val scalacheck =
Def.setting(
"org.scalacheck" %%% "scalacheck" % V.scalacheck withSources () withJavadoc ())
val scalaJsStubs =
"org.scala-js" %% "scalajs-stubs" % V.scalaJsStubsV % "provided"
val scalaJsTime =
Def.setting(
"io.github.cquiroz" %%% "scala-java-time" % V.scalaJsTimeV withSources () withJavadoc ())
val scalaTest =
Def.setting(
"org.scalatest" %%% "scalatest" % V.scalaTest withSources () withJavadoc ())
val scalaTestPlus = Def.setting(
"org.scalatestplus" %%% "scalacheck-1-14" % V.scalaTestPlus withSources () withJavadoc ())
val pgEmbedded =
"com.opentable.components" % "otj-pg-embedded" % V.pgEmbeddedV withSources () withJavadoc ()
val dropwizardMetrics =
"io.dropwizard.metrics" % "metrics-core" % V.dropwizardMetricsV withSources () withJavadoc ()
val zxingCore =
"com.google.zxing" % "core" % V.zxingV withSources () withJavadoc ()
val zxingJ2SE =
"com.google.zxing" % "javase" % V.zxingV withSources () withJavadoc ()
val monixExecution =
Def.setting(
"io.monix" %%% "monix-execution" % V.monixV withSources () withJavadoc ())
}
object Test {
val newAsync =
"org.scala-lang.modules" %% "scala-async" % V.asyncNewScalaV % "test" withSources () withJavadoc ()
val junitInterface =
"com.github.sbt" % "junit-interface" % V.junitV % "test" withSources () withJavadoc ()
val logback = Compile.logback % "test"
val grizzledSlf4j = Compile.grizzledSlf4j % "test"
val scalacheck = Def.setting(
"org.scalacheck" %%% "scalacheck" % V.scalacheck % "test" withSources () withJavadoc ())
val scalaTest = Def.setting(
"org.scalatest" %%% "scalatest" % V.scalaTest % "test" withSources () withJavadoc ())
val scalaMock = "org.scalamock" %% "scalamock" % V.scalamockV
val spray =
"io.spray" %% "spray-json" % V.spray % "test" withSources () withJavadoc ()
val akkaHttpTestkit =
"com.typesafe.akka" %% "akka-http-testkit" % V.akkav % "test" withSources () withJavadoc ()
val akkaStream =
"com.typesafe.akka" %% "akka-stream-testkit" % V.akkaStreamv % "test" withSources () withJavadoc ()
val playJson = Compile.playJson % "test"
val scalameter =
"com.storm-enroute" %% "scalameter" % V.scalameterV % "test" withSources () withJavadoc ()
val scalaCollectionCompat =
"org.scala-lang.modules" %% "scala-collection-compat" % V.scalaCollectionCompatV
val pgEmbedded =
"com.opentable.components" % "otj-pg-embedded" % V.pgEmbeddedV % "test" withSources () withJavadoc ()
val akkaTestkit =
"com.typesafe.akka" %% "akka-testkit" % V.akkaActorV withSources () withJavadoc ()
}
def asyncUtils = Def.setting {
Vector(Compile.monixExecution.value)
}
val chain = List(
Compile.logback
)
val chainTest = List(
Test.pgEmbedded
)
val appCommons = Def.setting {
List(
Compile.newMicroPickle.value,
Compile.playJson,
Compile.slf4j,
Compile.grizzledSlf4j,
Compile.typesafeConfig
)
}
def core = Def.setting {
List(
Compile.bouncycastle,
Compile.scodec.value
)
}
val cryptoJVM = List(
Compile.bouncycastle,
Compile.scalaJsStubs
)
def crypto: Def.Initialize[Seq[ModuleID]] = {
Def.setting {
List(
Compile.scodec.value
)
}
}
val dlcWallet =
List(
Compile.newMicroJson,
Compile.grizzledSlf4j
)
val dlcNode =
List(
Compile.newMicroJson,
Compile.grizzledSlf4j,
Compile.akkaActor
)
val dlcNodeTest =
List(
Test.akkaTestkit
)
val dlcWalletTest =
List(
Test.akkaTestkit,
Test.pgEmbedded
)
val secp256k1jni = List(
Compile.nativeLoader,
Test.junitInterface
)
def coreTest = Def.setting {
List(
Test.junitInterface,
Test.scalaTest.value,
Test.scalaCollectionCompat,
Compile.newMicroPickle.value
)
}
val coreTestJVM = Def.setting {
List(
Test.junitInterface,
Test.scalaTest.value,
Test.scalaCollectionCompat
)
}
val coreJs = Def.setting {
List(
Compile.scalaJsTime.value
)
}
def cryptoTest = Def.setting {
List(
Test.scalaTest.value,
Test.scalacheck.value,
Compile.scalaTestPlus.value
)
}
def bitcoindZmq = Def.setting {
List(
Compile.zeromq,
Compile.slf4j,
Compile.grizzledSlf4j,
Test.logback,
Test.scalacheck.value,
Test.scalaTest.value
)
}
val bitcoindRpc = List(
Compile.akkaHttp,
Compile.akkaStream,
Compile.typesafeConfig,
Compile.slf4j,
Compile.grizzledSlf4j
)
def bitcoindRpcTest = Def.setting {
List(
Test.akkaHttpTestkit,
Test.akkaStream,
Test.logback,
Test.scalaTest.value,
Test.scalacheck.value,
Test.newAsync,
Test.scalaCollectionCompat
)
}
val bench = List(
"org.slf4j" % "slf4j-api" % V.slf4j withSources () withJavadoc (),
Compile.logback
)
def dbCommons = Def.setting {
List(
Compile.dropwizardMetrics,
Compile.flyway,
Compile.slick,
Compile.sourcecode,
Compile.logback,
Compile.sqlite,
Compile.postgres,
Compile.slickHikari,
Compile.slf4j,
Compile.grizzledSlf4j,
Test.scalaTest.value,
Test.pgEmbedded
)
}
val cli = Def.setting {
List(
Compile.sttp,
Compile.newMicroPickle.value,
Compile.scopt,
//we can remove this dependency when this is fixed
//https://github.com/oracle/graal/issues/1943
//see https://github.com/bitcoin-s/bitcoin-s/issues/1100
Compile.codehaus
)
}
val gui = List(Compile.akkaActor,
Compile.breezeViz,
Compile.scalaFx,
Compile.zxingCore,
Compile.zxingJ2SE) ++ Compile.javaFxDeps
val server = Def.setting {
List(
Compile.newMicroPickle.value,
Compile.logback,
Compile.akkaActor,
Compile.akkaHttp,
Compile.akkaStream,
Compile.akkaSlf4j
)
}
val oracleServer = Def.setting {
List(
Compile.newMicroPickle.value,
Compile.logback,
Compile.akkaActor,
Compile.akkaHttp,
Compile.akkaSlf4j
)
}
val eclairRpc = List(
Compile.akkaHttp,
Compile.akkaStream,
Compile.playJson,
Compile.slf4j,
Compile.grizzledSlf4j
)
val clightningRpc = List(
Compile.jUnixSocket,
Compile.playJson,
Compile.grizzledSlf4j
)
val clightningRpcTest = Def.setting {
List(
Test.logback,
Test.scalaTest.value,
Test.scalacheck.value
)
}
val tor: Def.Initialize[List[ModuleID]] = Def.setting {
List(
Compile.akkaStream,
Compile.akkaHttp,
Compile.scodec.value,
Compile.grizzledSlf4j
)
}
val lndRpc = List(
Compile.akkaHttp,
Compile.akkaHttp2,
Compile.akkaStream,
Compile.akkaDiscovery,
Compile.playJson,
Compile.slf4j,
Compile.grizzledSlf4j
)
def eclairRpcTest = Def.setting {
List(
Test.akkaHttpTestkit,
Test.akkaStream,
Test.logback,
Test.scalaTest.value,
Test.scalacheck.value
)
}
def feeProvider = Def.setting {
List(
Compile.akkaHttp,
Compile.akkaActor,
Compile.akkaStream
)
}
def feeProviderTest = Def.setting {
List(
Test.akkaTestkit,
Test.scalaTest.value
)
}
val esplora = Def.setting {
List(
Compile.akkaHttp,
Compile.akkaActor,
Compile.akkaStream
)
}
val esploraTest = Def.setting {
List(
Test.akkaTestkit,
Test.scalaTest.value
)
}
val node = List(
Compile.akkaActor,
Compile.logback,
Compile.slick,
Compile.slickHikari,
Compile.sqlite,
Compile.slf4j,
Compile.grizzledSlf4j
)
val nodeTest = Def.setting {
List(
Test.akkaTestkit,
Test.scalaTest.value,
Test.pgEmbedded
)
}
def testkitCore = Def.setting {
List(
Compile.newMicroPickle.value,
Compile.scalaCollectionCompat,
Compile.scalacheck.value,
Compile.scalaTest.value,
Compile.scalaTestPlus.value
)
}
def testkit = Def.setting {
List(
Compile.scalacheck.value,
Compile.scalaTest.value,
Compile.scalaTestPlus.value,
Compile.pgEmbedded,
Compile.slf4j,
Compile.grizzledSlf4j,
Compile.akkaTestkit
)
}
def keyManager(scalaVersion: String) =
List(
Compile.newMicroJson
)
val keyManagerTest = List(
Compile.slf4j,
Test.logback
)
def wallet(scalaVersion: String) =
List(
Compile.newMicroJson,
Compile.logback,
Compile.slf4j,
Compile.grizzledSlf4j
)
val walletTest = List(
Test.pgEmbedded
)
def docs = Def.setting {
List(
Compile.logback,
Test.scalaTest.value,
Test.logback
)
}
val walletServerTest = List(
Compile.typesafeConfig,
Test.scalaMock,
Test.akkaHttpTestkit,
Test.akkaStream
)
val dlcOracle =
List(
Compile.newMicroJson,
Compile.logback,
Compile.slf4j,
Compile.grizzledSlf4j
)
val dlcOracleTest =
List(
Compile.newMicroJson,
Compile.logback
)
val serverRoutes = List(
Compile.akkaHttp,
Compile.akkaActor,
Compile.akkaSlf4j,
Compile.akkaStream,
Compile.slf4j,
Compile.grizzledSlf4j
)
val oracleExplorerClient = Vector(
Compile.akkaActor,
Compile.akkaHttp,
Compile.akkaStream,
Compile.playJson
)
val dlcTest = Vector(
Compile.playJson
)
}
| bitcoin-s/bitcoin-s | project/Deps.scala | Scala | mit | 16,023 |
/*
Copyright (c) 2017 KAPSARC
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package org.economicsl.auctions.actors
import org.economicsl.auctions.Contract
/** Stub `SettlementActor` that logs contracts.
*
* @todo this Actor should be moved down to the esl-settlement.
*/
class LoggingSettlementActor
extends StackableActor {
wrappedBecome(contractLogger)
def contractLogger: Receive = {
case contract: Contract => log.info(contract.toString)
}
}
| EconomicSL/auctions | src/main/scala/org/economicsl/auctions/actors/LoggingSettlementActor.scala | Scala | apache-2.0 | 953 |
import _root_.runtime.RosalindSolution
object DegreeArray extends RosalindSolution {
def problem_name: String = "deg"
def get_answer(input_lines: Iterator[String]): String = {
val graph = parsed_edge_list(input_lines)
graph.map{_.sum}.mkString("\\n")
}
}
| ChrisCooper/rosalind-algorithmic-heights | src/DegreeArray.scala | Scala | gpl-2.0 | 272 |
package scalan.primitives
import scalan.common.Lazy
import scalan.staged.BaseExp
import scalan.{ ScalanExp, Scalan, ScalanStd }
trait Loops { self: Scalan =>
def loopUntil[A:Elem](s1: Rep[A])(isMatch: Rep[A => Boolean], step: Rep[A => A]): Rep[A]
def loopUntilAux[A:Elem](s1: Rep[A])(isMatch: Rep[A] => Rep[Boolean], step: Rep[A] => Rep[A]): Rep[A] = {
val eA = elemFromRep(s1)
val leA = Lazy(eA)
loopUntil(s1)(fun(isMatch)(leA, BooleanElement), fun(step)(leA, eA))
}
private def elemFromRep[A](x: Rep[A])(implicit eA: Elem[A]): Elem[A] = eA match {
case ve: ViewElem[_,_] =>
x.unsafeElem
case pe: PairElem[a,b] =>
implicit val ea = pe.eFst
implicit val eb = pe.eSnd
val pair = x.asRep[(a, b)]
pairElement(elemFromRep(pair._1)(ea), elemFromRep(pair._2)(eb))
case _ => eA
}
def loopUntil2[A:Elem, B:Elem](s1: Rep[A], s2: Rep[B])
(isMatch: (Rep[A],Rep[B]) => Rep[Boolean],
step: (Rep[A], Rep[B]) => (Rep[A], Rep[B])): Rep[(A,B)]
= loopUntilAux(Pair(s1, s2))({case Pair(a,b) => isMatch(a,b)}, {case Pair(a,b) => step(a,b)})
def loopUntil3[A:Elem, B:Elem, C:Elem](s1: Rep[A], s2: Rep[B], s3: Rep[C])
(isMatch: (Rep[A],Rep[B],Rep[C]) => Rep[Boolean],
step: (Rep[A], Rep[B], Rep[C]) => (Rep[A], Rep[B], Rep[C])): Rep[(A,(B,C))]
= loopUntilAux(Tuple(s1, s2, s3))({case Tuple(a,b,c) => isMatch(a,b,c) }, {case Tuple(a,b,c) => step(a,b,c)})
def loopUntil4[A:Elem, B:Elem, C:Elem, D:Elem](s1: Rep[A], s2: Rep[B], s3: Rep[C], s4: Rep[D])
(isMatch: (Rep[A],Rep[B],Rep[C],Rep[D]) => Rep[Boolean],
step: (Rep[A], Rep[B], Rep[C], Rep[D]) => (Rep[A], Rep[B], Rep[C], Rep[D])): Rep[(A,(B,(C,D)))]
= loopUntilAux(Tuple(s1, s2, s3,s4))({case Tuple(a,b,c,d) => isMatch(a,b,c,d) }, {case Tuple(a,b,c,d) => step(a,b,c,d)})
def loopUntil5[A:Elem, B:Elem, C:Elem, D:Elem, E:Elem](s1: Rep[A], s2: Rep[B], s3: Rep[C], s4: Rep[D], s5: Rep[E])
(isMatch: (Rep[A],Rep[B],Rep[C],Rep[D],Rep[E]) => Rep[Boolean],
step: (Rep[A], Rep[B], Rep[C], Rep[D], Rep[E]) => (Rep[A], Rep[B], Rep[C], Rep[D], Rep[E])): Rep[(A,(B,(C,(D,E))))]
= loopUntilAux(Tuple(s1, s2, s3,s4, s5))({case Tuple(a,b,c,d,e) => isMatch(a,b,c,d,e) }, {case Tuple(a,b,c,d,e) => step(a,b,c,d,e)})
def loopUntil6[A:Elem, B:Elem, C:Elem, D:Elem, E:Elem, F:Elem](s1: Rep[A], s2: Rep[B], s3: Rep[C], s4: Rep[D], s5: Rep[E], s6: Rep[F])
(isMatch: (Rep[A],Rep[B],Rep[C],Rep[D],Rep[E],Rep[F]) => Rep[Boolean],
step: (Rep[A], Rep[B], Rep[C], Rep[D], Rep[E],Rep[F]) => (Rep[A], Rep[B], Rep[C], Rep[D], Rep[E],Rep[F])): Rep[(A,(B,(C,(D,(E,F)))))]
= loopUntilAux(Tuple(s1, s2, s3,s4, s5,s6))({case Tuple(a,b,c,d,e,f) => isMatch(a,b,c,d,e,f) }, {case Tuple(a,b,c,d,e,f) => step(a,b,c,d,e,f)})
def loopUntil7[A:Elem, B:Elem, C:Elem, D:Elem, E:Elem, F:Elem, G:Elem](s1: Rep[A], s2: Rep[B], s3: Rep[C], s4: Rep[D], s5: Rep[E], s6: Rep[F], s7: Rep[G])
(isMatch: (Rep[A],Rep[B],Rep[C],Rep[D],Rep[E],Rep[F],Rep[G]) => Rep[Boolean],
step: (Rep[A], Rep[B], Rep[C], Rep[D], Rep[E],Rep[F],Rep[G]) => (Rep[A], Rep[B], Rep[C], Rep[D], Rep[E],Rep[F],Rep[G])): Rep[(A,(B,(C,(D,(E,(F,G))))))]
= loopUntilAux(Tuple(s1, s2, s3,s4, s5,s6,s7))({case Tuple(a,b,c,d,e,f,g) => isMatch(a,b,c,d,e,f,g) }, {case Tuple(a,b,c,d,e,f,g) => step(a,b,c,d,e,f,g)})
def loopUntil8[A:Elem, B:Elem, C:Elem, D:Elem, E:Elem, F:Elem, G:Elem, H:Elem](s1: Rep[A], s2: Rep[B], s3: Rep[C], s4: Rep[D], s5: Rep[E], s6: Rep[F], s7: Rep[G], s8: Rep[H])
(isMatch: (Rep[A],Rep[B],Rep[C],Rep[D],Rep[E],Rep[F],Rep[G],Rep[H]) => Rep[Boolean],
step: (Rep[A], Rep[B], Rep[C], Rep[D], Rep[E],Rep[F],Rep[G],Rep[H]) => (Rep[A], Rep[B], Rep[C], Rep[D], Rep[E],Rep[F],Rep[G],Rep[H])): Rep[(A,(B,(C,(D,(E,(F,(G,H)))))))]
= loopUntilAux(Tuple(s1, s2, s3,s4, s5, s6, s7, s8))({case Tuple(a,b,c,d,e,f,g,h) => isMatch(a,b,c,d,e,f,g,h) }, {case Tuple(a,b,c,d,e,f,g,h) => step(a,b,c,d,e,f,g,h)})
def from[A:Elem](s1: Rep[A]) = new From1(s1)
def from[A:Elem, B:Elem](s1: Rep[A], s2: Rep[B]) = new From2(s1, s2)
def from[A:Elem, B:Elem, C:Elem](s1: Rep[A], s2: Rep[B], s3: Rep[C]) =
new From3(s1, s2, s3)
def from[A:Elem, B:Elem, C:Elem, D:Elem](s1: Rep[A], s2: Rep[B], s3: Rep[C], s4: Rep[D]) =
new From4(s1, s2, s3, s4)
def from[A:Elem, B:Elem, C:Elem, D:Elem, E:Elem](s1: Rep[A], s2: Rep[B], s3: Rep[C], s4: Rep[D], s5: Rep[E]) =
new From5(s1, s2, s3, s4, s5)
def from[A:Elem, B:Elem, C:Elem, D:Elem, E:Elem, F:Elem](s1: Rep[A], s2: Rep[B], s3: Rep[C], s4: Rep[D], s5: Rep[E], s6: Rep[F]) =
new From6(s1, s2, s3, s4, s5, s6)
def from[A:Elem, B:Elem, C:Elem, D:Elem, E:Elem, F:Elem,G:Elem](s1: Rep[A], s2: Rep[B], s3: Rep[C], s4: Rep[D], s5: Rep[E], s6: Rep[F], s7:Rep[G]) =
new From7(s1, s2, s3, s4, s5, s6, s7)
def from[A:Elem, B:Elem, C:Elem, D:Elem, E:Elem, F:Elem, G:Elem, H:Elem](s1: Rep[A], s2: Rep[B], s3: Rep[C], s4: Rep[D], s5: Rep[E], s6: Rep[F], s7:Rep[G], s8:Rep[H]) =
new From8(s1, s2, s3, s4, s5, s6, s7, s8)
class From1[A:Elem](s1: Rep[A]) {
def until(isMatch: Rep[A] => Rep[Boolean])(step: Rep[A] => Rep[A]) =
loopUntilAux(s1)({case a => isMatch(a) }, {case a => step(a)})
}
class From2[A:Elem, B:Elem](s1: Rep[A], s2: Rep[B]) {
def until(isMatch: (Rep[A],Rep[B]) => Rep[Boolean])(step: (Rep[A], Rep[B]) => (Rep[A], Rep[B])) =
loopUntil2(s1, s2)(isMatch, step)
}
class From3[A:Elem, B:Elem, C:Elem](s1: Rep[A], s2: Rep[B], s3: Rep[C]) {
def until(isMatch: (Rep[A],Rep[B],Rep[C]) => Rep[Boolean])(step: (Rep[A],Rep[B],Rep[C]) => (Rep[A],Rep[B],Rep[C])) =
loopUntil3(s1, s2, s3)(isMatch, step)
}
class From4[A:Elem, B:Elem, C:Elem, D:Elem](s1: Rep[A], s2: Rep[B], s3: Rep[C], s4: Rep[D]) {
def until(isMatch: (Rep[A],Rep[B],Rep[C],Rep[D]) => Rep[Boolean])(step: (Rep[A],Rep[B],Rep[C],Rep[D]) => (Rep[A],Rep[B],Rep[C],Rep[D])) =
loopUntil4(s1, s2, s3, s4)(isMatch, step)
}
class From5[A:Elem, B:Elem, C:Elem, D:Elem, E:Elem](s1: Rep[A], s2: Rep[B], s3: Rep[C], s4: Rep[D], s5: Rep[E]) {
def until(isMatch: (Rep[A],Rep[B],Rep[C],Rep[D],Rep[E]) => Rep[Boolean])(step: (Rep[A],Rep[B],Rep[C],Rep[D],Rep[E]) => (Rep[A],Rep[B],Rep[C],Rep[D],Rep[E])) =
loopUntil5(s1, s2, s3, s4, s5)(isMatch, step)
}
class From6[A:Elem, B:Elem, C:Elem, D:Elem, E:Elem, F:Elem](s1: Rep[A], s2: Rep[B], s3: Rep[C], s4: Rep[D], s5: Rep[E], s6: Rep[F]) {
def until(isMatch: (Rep[A],Rep[B],Rep[C],Rep[D],Rep[E],Rep[F]) => Rep[Boolean])(step: (Rep[A],Rep[B],Rep[C],Rep[D],Rep[E],Rep[F]) => (Rep[A],Rep[B],Rep[C],Rep[D],Rep[E],Rep[F])) =
loopUntil6(s1, s2, s3, s4, s5,s6)(isMatch, step)
}
class From7[A:Elem, B:Elem, C:Elem, D:Elem, E:Elem, F:Elem,G:Elem](s1: Rep[A], s2: Rep[B], s3: Rep[C], s4: Rep[D], s5: Rep[E], s6: Rep[F], s7:Rep[G]) {
def until(isMatch: (Rep[A],Rep[B],Rep[C],Rep[D],Rep[E],Rep[F],Rep[G]) => Rep[Boolean])(step: (Rep[A],Rep[B],Rep[C],Rep[D],Rep[E],Rep[F],Rep[G]) => (Rep[A],Rep[B],Rep[C],Rep[D],Rep[E],Rep[F],Rep[G])) =
loopUntil7(s1, s2, s3, s4, s5,s6,s7)(isMatch, step)
}
class From8[A:Elem, B:Elem, C:Elem, D:Elem, E:Elem, F:Elem, G:Elem, H:Elem](s1: Rep[A], s2: Rep[B], s3: Rep[C], s4: Rep[D], s5: Rep[E], s6: Rep[F], s7:Rep[G], s8: Rep[H]) {
def until(isMatch: (Rep[A],Rep[B],Rep[C],Rep[D],Rep[E],Rep[F],Rep[G],Rep[H]) => Rep[Boolean])(step: (Rep[A],Rep[B],Rep[C],Rep[D],Rep[E],Rep[F],Rep[G],Rep[H]) => (Rep[A],Rep[B],Rep[C],Rep[D],Rep[E],Rep[F],Rep[G],Rep[H])) =
loopUntil8(s1, s2, s3, s4, s5, s6, s7, s8)(isMatch, step)
}
}
trait LoopsStd extends Loops { self: ScalanStd =>
def loopUntil[A:Elem](s1: Rep[A])(isMatch: Rep[A => Boolean], step: Rep[A => A]): Rep[A] = {
if (isMatch(s1)) return s1
var state = s1
while (!isMatch(state)) {
state = step(state)
}
state
}
}
trait LoopsExp extends Loops with BaseExp { self: ScalanExp =>
def loopUntil[A: Elem](s1: Rep[A])(isMatch: Rep[A => Boolean], step: Rep[A => A]): Rep[A] = LoopUntil(s1, step, isMatch)
case class LoopUntil[A](s1: Rep[A], step: Rep[A => A], isMatch: Rep[A => Boolean]) extends Def[A] {
lazy val selfType = s1.elem.leastUpperBound(step.elem.eRange).asElem[A]
override def productIterator = List(step, isMatch, s1).toIterator
}
}
| scalan/scalan | core/src/main/scala/scalan/primitives/Loops.scala | Scala | apache-2.0 | 8,973 |
import annotation.varargs
trait C {
@varargs def v(i: Int*) = ()
}
class D extends C { // error: name clash between defined and inherited member
def v(i: Array[Int]) = ()
} | dotty-staging/dotty | tests/neg/varargs-annot-2.scala | Scala | apache-2.0 | 178 |
package com.twitter.inject.module
import com.google.inject.{AbstractModule, Provides}
import javax.inject.Singleton
import net.codingwell.scalaguice.ScalaModule
object SeqAbstractModule extends AbstractModule with ScalaModule {
@Singleton
@Provides
def provideSeq: Seq[Array[Byte]] =
Seq.empty[Array[Byte]]
}
| twitter/util | util-inject/src/test/scala/com/twitter/util/inject/module/SeqAbstractModule.scala | Scala | apache-2.0 | 321 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.runtime.aggregate
import java.lang.Iterable
import org.apache.flink.types.Row
import org.apache.flink.configuration.Configuration
import org.apache.flink.streaming.api.functions.windowing.RichAllWindowFunction
import org.apache.flink.streaming.api.windowing.windows.Window
import org.apache.flink.util.Collector
/**
* Computes the final aggregate value from incrementally computed aggregates.
*
* @param finalRowArity The arity of the final output row.
*/
class IncrementalAggregateAllWindowFunction[W <: Window](
private val finalRowArity: Int)
extends RichAllWindowFunction[Row, Row, W] {
private var output: Row = _
override def open(parameters: Configuration): Unit = {
output = new Row(finalRowArity)
}
/**
* Calculate aggregated values output by aggregate buffer, and set them into output
* Row based on the mapping relation between intermediate aggregate data and output data.
*/
override def apply(
window: W,
records: Iterable[Row],
out: Collector[Row]): Unit = {
val iterator = records.iterator
if (iterator.hasNext) {
val record = iterator.next()
var i = 0
while (i < record.getArity) {
output.setField(i, record.getField(i))
i += 1
}
out.collect(output)
}
}
}
| DieBauer/flink | flink-libraries/flink-table/src/main/scala/org/apache/flink/table/runtime/aggregate/IncrementalAggregateAllWindowFunction.scala | Scala | apache-2.0 | 2,135 |
/*
* Copyright 2016 sadikovi
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.github.sadikovi.parsebox
import org.scalatest.ConfigMap
import org.apache.spark.sql.SQLContext
import com.github.sadikovi.testutil.{UnitTestSpec, SparkLocal}
import com.github.sadikovi.testutil.implicits._
class ParseboxContextSuite extends UnitTestSpec with SparkLocal {
// define global Spark SQL context
var sqlContext: SQLContext = null
val jsonPath1 = testDirectory / "resources" / "json" / "options.json"
override def beforeAll(configMap: ConfigMap) {
startSparkContext()
sqlContext = new SQLContext(sc)
}
override def afterAll(configMap: ConfigMap) {
stopSparkContext()
sqlContext = null
}
test("set and get configuration") {
val pc = new ParseboxContext(sqlContext)
pc.getConf("key1") should be (None)
pc.setConf("key2", "val2").getConf("key2") should be (Some("val2"))
pc.setConf("key3", "val3").setConf("key3", "val4").getConf("key3") should be (Some("val4"))
}
test("read correct format") {
val pc = new ParseboxContext(sqlContext)
val reader = pc.format("com.github.sadikovi.parsebox.examples.json")
reader.isInstanceOf[FormatReader] should be (true)
}
test("fail if format is unresolved") {
val pc = new ParseboxContext(sqlContext)
intercept[ClassNotFoundException] {
pc.format("xyz.test")
}
}
test("load correct format") {
val pc = new ParseboxContext(sqlContext)
val df = pc.format("com.github.sadikovi.parsebox.examples.json").load(jsonPath1)
df.count() should be (10)
}
test("create context") {
val pc1 = ParseboxContext.create(sqlContext)
val pc2 = ParseboxContext.create(sqlContext)
(pc1 eq pc2) should be (false)
}
test("get or create context") {
val pc1 = ParseboxContext.getOrCreate(sqlContext)
val pc2 = ParseboxContext.getOrCreate(sqlContext)
(pc1 eq pc2) should be (true)
}
}
| sadikovi/parsebox | src/test/scala/com/github/sadikovi/parsebox/ParseboxContextSuite.scala | Scala | apache-2.0 | 2,453 |
package au.com.dius.pact.server
import au.com.dius.pact.com.typesafe.scalalogging.StrictLogging
import au.com.dius.pact.consumer.DefaultMockProvider
import au.com.dius.pact.model.{MockProviderConfig, Pact, Request, Response}
import org.jboss.netty.handler.codec.http.QueryStringDecoder
import org.json4s.JsonDSL.{int2jvalue, map2jvalue, string2jvalue}
import org.json4s.jackson.JsonMethods.pretty
import scala.collection.JavaConversions._
object Create extends StrictLogging {
def create(state: String, requestBody: String, oldState: ServerState, config: Config): Result = {
val pact = Pact.from(requestBody)
val mockConfig: MockProviderConfig = MockProviderConfig.create(config.portLowerBound, config.portUpperBound)
.copy(hostname = config.host)
val server = DefaultMockProvider.apply(mockConfig)
val port = server.config.port
val entry = port -> server
val body = pretty(map2jvalue(Map("port" -> port)))
server.start(pact)
Result(Response(201, Response.CrossSiteHeaders ++ Map("Content-Type" -> "application/json"), body, null), oldState + entry)
}
def apply(request: Request, oldState: ServerState, config: Config): Result = {
def errorJson = pretty(map2jvalue(Map("error" -> "please provide state param and pact body")))
def clientError = Result(Response(400, Response.CrossSiteHeaders, errorJson, null), oldState)
val params = new QueryStringDecoder(s"/?${request.query.getOrElse("")}").getParameters.toMap
logger.debug(s"path=${request.path}")
logger.debug(s"query=${request.query.toString}")
logger.debug(request.body.toString)
val result = for {
stateList <- params.get("state")
state <- stateList.toList.headOption
body <- request.body
} yield create(state, body, oldState, config)
result getOrElse clientError
}
}
| sangohan/pact-jvm | pact-jvm-server/src/main/scala/au/com/dius/pact/server/Create.scala | Scala | apache-2.0 | 1,838 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.deploy
import java.io.{ByteArrayOutputStream, PrintStream}
import java.lang.reflect.InvocationTargetException
import java.net.URI
import java.nio.charset.StandardCharsets
import java.util.{List => JList}
import java.util.jar.JarFile
import scala.collection.JavaConverters._
import scala.collection.mutable.{ArrayBuffer, HashMap}
import scala.io.Source
import scala.util.Try
import org.apache.spark.deploy.SparkSubmitAction._
import org.apache.spark.launcher.SparkSubmitArgumentsParser
import org.apache.spark.network.util.JavaUtils
import org.apache.spark.util.Utils
/**
* Parses and encapsulates arguments from the spark-submit script.
* The env argument is used for testing.
*/
private[deploy] class SparkSubmitArguments(args: Seq[String], env: Map[String, String] = sys.env)
extends SparkSubmitArgumentsParser {
var master: String = null
var deployMode: String = null
var executorMemory: String = null
var executorCores: String = null
var totalExecutorCores: String = null
var propertiesFile: String = null
var driverMemory: String = null
var driverExtraClassPath: String = null
var driverExtraLibraryPath: String = null
var driverExtraJavaOptions: String = null
var queue: String = null
var numExecutors: String = null
var files: String = null
var archives: String = null
var mainClass: String = null
var primaryResource: String = null
var name: String = null
var childArgs: ArrayBuffer[String] = new ArrayBuffer[String]()
var jars: String = null
var packages: String = null
var repositories: String = null
var ivyRepoPath: String = null
var packagesExclusions: String = null
var verbose: Boolean = false
var isPython: Boolean = false
var pyFiles: String = null
var isR: Boolean = false
var action: SparkSubmitAction = null
val sparkProperties: HashMap[String, String] = new HashMap[String, String]()
var proxyUser: String = null
var principal: String = null
var keytab: String = null
// Standalone cluster mode only
var supervise: Boolean = false
var driverCores: String = null
var submissionToKill: String = null
var submissionToRequestStatusFor: String = null
var useRest: Boolean = true // used internally
/** Default properties present in the currently defined defaults file. */
lazy val defaultSparkProperties: HashMap[String, String] = {
val defaultProperties = new HashMap[String, String]()
// scalastyle:off println
if (verbose) SparkSubmit.printStream.println(s"Using properties file: $propertiesFile")
Option(propertiesFile).foreach { filename =>
val properties = Utils.getPropertiesFromFile(filename)
properties.foreach { case (k, v) =>
defaultProperties(k) = v
}
// Property files may contain sensitive information, so redact before printing
if (verbose) {
Utils.redact(properties).foreach { case (k, v) =>
SparkSubmit.printStream.println(s"Adding default property: $k=$v")
}
}
}
// scalastyle:on println
defaultProperties
}
// Set parameters from command line arguments
try {
parse(args.asJava)
} catch {
case e: IllegalArgumentException =>
SparkSubmit.printErrorAndExit(e.getMessage())
}
// Populate `sparkProperties` map from properties file
mergeDefaultSparkProperties()
// Remove keys that don't start with "spark." from `sparkProperties`.
ignoreNonSparkProperties()
// Use `sparkProperties` map along with env vars to fill in any missing parameters
loadEnvironmentArguments()
validateArguments()
/**
* Merge values from the default properties file with those specified through --conf.
* When this is called, `sparkProperties` is already filled with configs from the latter.
*/
private def mergeDefaultSparkProperties(): Unit = {
// Use common defaults file, if not specified by user
propertiesFile = Option(propertiesFile).getOrElse(Utils.getDefaultPropertiesFile(env))
// Honor --conf before the defaults file
defaultSparkProperties.foreach { case (k, v) =>
if (!sparkProperties.contains(k)) {
sparkProperties(k) = v
}
}
}
/**
* Remove keys that don't start with "spark." from `sparkProperties`.
*/
private def ignoreNonSparkProperties(): Unit = {
sparkProperties.foreach { case (k, v) =>
if (!k.startsWith("spark.")) {
sparkProperties -= k
SparkSubmit.printWarning(s"Ignoring non-spark config property: $k=$v")
}
}
}
/**
* Load arguments from environment variables, Spark properties etc.
*/
private def loadEnvironmentArguments(): Unit = {
master = Option(master)
.orElse(sparkProperties.get("spark.master"))
.orElse(env.get("MASTER"))
.orNull
driverExtraClassPath = Option(driverExtraClassPath)
.orElse(sparkProperties.get("spark.driver.extraClassPath"))
.orNull
driverExtraJavaOptions = Option(driverExtraJavaOptions)
.orElse(sparkProperties.get("spark.driver.extraJavaOptions"))
.orNull
driverExtraLibraryPath = Option(driverExtraLibraryPath)
.orElse(sparkProperties.get("spark.driver.extraLibraryPath"))
.orNull
driverMemory = Option(driverMemory)
.orElse(sparkProperties.get("spark.driver.memory"))
.orElse(env.get("SPARK_DRIVER_MEMORY"))
.orNull
driverCores = Option(driverCores)
.orElse(sparkProperties.get("spark.driver.cores"))
.orNull
executorMemory = Option(executorMemory)
.orElse(sparkProperties.get("spark.executor.memory"))
.orElse(env.get("SPARK_EXECUTOR_MEMORY"))
.orNull
executorCores = Option(executorCores)
.orElse(sparkProperties.get("spark.executor.cores"))
.orElse(env.get("SPARK_EXECUTOR_CORES"))
.orNull
totalExecutorCores = Option(totalExecutorCores)
.orElse(sparkProperties.get("spark.cores.max"))
.orNull
name = Option(name).orElse(sparkProperties.get("spark.app.name")).orNull
jars = Option(jars).orElse(sparkProperties.get("spark.jars")).orNull
files = Option(files).orElse(sparkProperties.get("spark.files")).orNull
ivyRepoPath = sparkProperties.get("spark.jars.ivy").orNull
packages = Option(packages).orElse(sparkProperties.get("spark.jars.packages")).orNull
packagesExclusions = Option(packagesExclusions)
.orElse(sparkProperties.get("spark.jars.excludes")).orNull
repositories = Option(repositories)
.orElse(sparkProperties.get("spark.jars.repositories")).orNull
deployMode = Option(deployMode)
.orElse(sparkProperties.get("spark.submit.deployMode"))
.orElse(env.get("DEPLOY_MODE"))
.orNull
numExecutors = Option(numExecutors)
.getOrElse(sparkProperties.get("spark.executor.instances").orNull)
queue = Option(queue).orElse(sparkProperties.get("spark.yarn.queue")).orNull
keytab = Option(keytab).orElse(sparkProperties.get("spark.yarn.keytab")).orNull
principal = Option(principal).orElse(sparkProperties.get("spark.yarn.principal")).orNull
// Try to set main class from JAR if no --class argument is given
if (mainClass == null && !isPython && !isR && primaryResource != null) {
val uri = new URI(primaryResource)
val uriScheme = uri.getScheme()
uriScheme match {
case "file" =>
try {
val jar = new JarFile(uri.getPath)
// Note that this might still return null if no main-class is set; we catch that later
mainClass = jar.getManifest.getMainAttributes.getValue("Main-Class")
} catch {
case e: Exception =>
SparkSubmit.printErrorAndExit(s"Cannot load main class from JAR $primaryResource")
}
case _ =>
SparkSubmit.printErrorAndExit(
s"Cannot load main class from JAR $primaryResource with URI $uriScheme. " +
"Please specify a class through --class.")
}
}
// Global defaults. These should be keep to minimum to avoid confusing behavior.
master = Option(master).getOrElse("local[*]")
// In YARN mode, app name can be set via SPARK_YARN_APP_NAME (see SPARK-5222)
if (master.startsWith("yarn")) {
name = Option(name).orElse(env.get("SPARK_YARN_APP_NAME")).orNull
}
// Set name from main class if not given
name = Option(name).orElse(Option(mainClass)).orNull
if (name == null && primaryResource != null) {
name = Utils.stripDirectory(primaryResource)
}
// Action should be SUBMIT unless otherwise specified
action = Option(action).getOrElse(SUBMIT)
}
/** Ensure that required fields exists. Call this only once all defaults are loaded. */
private def validateArguments(): Unit = {
action match {
case SUBMIT => validateSubmitArguments()
case KILL => validateKillArguments()
case REQUEST_STATUS => validateStatusRequestArguments()
}
}
private def validateSubmitArguments(): Unit = {
if (args.length == 0) {
printUsageAndExit(-1)
}
if (primaryResource == null) {
SparkSubmit.printErrorAndExit("Must specify a primary resource (JAR or Python or R file)")
}
if (mainClass == null && SparkSubmit.isUserJar(primaryResource)) {
SparkSubmit.printErrorAndExit("No main class set in JAR; please specify one with --class")
}
if (driverMemory != null
&& Try(JavaUtils.byteStringAsBytes(driverMemory)).getOrElse(-1L) <= 0) {
SparkSubmit.printErrorAndExit("Driver Memory must be a positive number")
}
if (executorMemory != null
&& Try(JavaUtils.byteStringAsBytes(executorMemory)).getOrElse(-1L) <= 0) {
SparkSubmit.printErrorAndExit("Executor Memory cores must be a positive number")
}
if (executorCores != null && Try(executorCores.toInt).getOrElse(-1) <= 0) {
SparkSubmit.printErrorAndExit("Executor cores must be a positive number")
}
if (totalExecutorCores != null && Try(totalExecutorCores.toInt).getOrElse(-1) <= 0) {
SparkSubmit.printErrorAndExit("Total executor cores must be a positive number")
}
if (numExecutors != null && Try(numExecutors.toInt).getOrElse(-1) <= 0) {
SparkSubmit.printErrorAndExit("Number of executors must be a positive number")
}
if (pyFiles != null && !isPython) {
SparkSubmit.printErrorAndExit("--py-files given but primary resource is not a Python script")
}
if (master.startsWith("yarn")) {
val hasHadoopEnv = env.contains("HADOOP_CONF_DIR") || env.contains("YARN_CONF_DIR")
if (!hasHadoopEnv && !Utils.isTesting) {
throw new Exception(s"When running with master '$master' " +
"either HADOOP_CONF_DIR or YARN_CONF_DIR must be set in the environment.")
}
}
if (proxyUser != null && principal != null) {
SparkSubmit.printErrorAndExit("Only one of --proxy-user or --principal can be provided.")
}
}
private def validateKillArguments(): Unit = {
if (!master.startsWith("spark://") && !master.startsWith("mesos://")) {
SparkSubmit.printErrorAndExit(
"Killing submissions is only supported in standalone or Mesos mode!")
}
if (submissionToKill == null) {
SparkSubmit.printErrorAndExit("Please specify a submission to kill.")
}
}
private def validateStatusRequestArguments(): Unit = {
if (!master.startsWith("spark://") && !master.startsWith("mesos://")) {
SparkSubmit.printErrorAndExit(
"Requesting submission statuses is only supported in standalone or Mesos mode!")
}
if (submissionToRequestStatusFor == null) {
SparkSubmit.printErrorAndExit("Please specify a submission to request status for.")
}
}
def isStandaloneCluster: Boolean = {
master.startsWith("spark://") && deployMode == "cluster"
}
override def toString: String = {
s"""Parsed arguments:
| master $master
| deployMode $deployMode
| executorMemory $executorMemory
| executorCores $executorCores
| totalExecutorCores $totalExecutorCores
| propertiesFile $propertiesFile
| driverMemory $driverMemory
| driverCores $driverCores
| driverExtraClassPath $driverExtraClassPath
| driverExtraLibraryPath $driverExtraLibraryPath
| driverExtraJavaOptions $driverExtraJavaOptions
| supervise $supervise
| queue $queue
| numExecutors $numExecutors
| files $files
| pyFiles $pyFiles
| archives $archives
| mainClass $mainClass
| primaryResource $primaryResource
| name $name
| childArgs [${childArgs.mkString(" ")}]
| jars $jars
| packages $packages
| packagesExclusions $packagesExclusions
| repositories $repositories
| verbose $verbose
|
|Spark properties used, including those specified through
| --conf and those from the properties file $propertiesFile:
|${Utils.redact(sparkProperties).mkString(" ", "\\n ", "\\n")}
""".stripMargin
}
/** Fill in values by parsing user options. */
override protected def handle(opt: String, value: String): Boolean = {
opt match {
case NAME =>
name = value
case MASTER =>
master = value
case CLASS =>
mainClass = value
case DEPLOY_MODE =>
if (value != "client" && value != "cluster") {
SparkSubmit.printErrorAndExit("--deploy-mode must be either \\"client\\" or \\"cluster\\"")
}
deployMode = value
case NUM_EXECUTORS =>
numExecutors = value
case TOTAL_EXECUTOR_CORES =>
totalExecutorCores = value
case EXECUTOR_CORES =>
executorCores = value
case EXECUTOR_MEMORY =>
executorMemory = value
case DRIVER_MEMORY =>
driverMemory = value
case DRIVER_CORES =>
driverCores = value
case DRIVER_CLASS_PATH =>
driverExtraClassPath = value
case DRIVER_JAVA_OPTIONS =>
driverExtraJavaOptions = value
case DRIVER_LIBRARY_PATH =>
driverExtraLibraryPath = value
case PROPERTIES_FILE =>
propertiesFile = value
case KILL_SUBMISSION =>
submissionToKill = value
if (action != null) {
SparkSubmit.printErrorAndExit(s"Action cannot be both $action and $KILL.")
}
action = KILL
case STATUS =>
submissionToRequestStatusFor = value
if (action != null) {
SparkSubmit.printErrorAndExit(s"Action cannot be both $action and $REQUEST_STATUS.")
}
action = REQUEST_STATUS
case SUPERVISE =>
supervise = true
case QUEUE =>
queue = value
case FILES =>
files = Utils.resolveURIs(value)
case PY_FILES =>
pyFiles = Utils.resolveURIs(value)
case ARCHIVES =>
archives = Utils.resolveURIs(value)
case JARS =>
jars = Utils.resolveURIs(value)
case PACKAGES =>
packages = value
case PACKAGES_EXCLUDE =>
packagesExclusions = value
case REPOSITORIES =>
repositories = value
case CONF =>
val (confName, confValue) = SparkSubmit.parseSparkConfProperty(value)
sparkProperties(confName) = confValue
case PROXY_USER =>
proxyUser = value
case PRINCIPAL =>
principal = value
case KEYTAB =>
keytab = value
case HELP =>
printUsageAndExit(0)
case VERBOSE =>
verbose = true
case VERSION =>
SparkSubmit.printVersionAndExit()
case USAGE_ERROR =>
printUsageAndExit(1)
case _ =>
throw new IllegalArgumentException(s"Unexpected argument '$opt'.")
}
true
}
/**
* Handle unrecognized command line options.
*
* The first unrecognized option is treated as the "primary resource". Everything else is
* treated as application arguments.
*/
override protected def handleUnknown(opt: String): Boolean = {
if (opt.startsWith("-")) {
SparkSubmit.printErrorAndExit(s"Unrecognized option '$opt'.")
}
primaryResource =
if (!SparkSubmit.isShell(opt) && !SparkSubmit.isInternal(opt)) {
Utils.resolveURI(opt).toString
} else {
opt
}
isPython = SparkSubmit.isPython(opt)
isR = SparkSubmit.isR(opt)
false
}
override protected def handleExtraArgs(extra: JList[String]): Unit = {
childArgs ++= extra.asScala
}
private def printUsageAndExit(exitCode: Int, unknownParam: Any = null): Unit = {
// scalastyle:off println
val outStream = SparkSubmit.printStream
if (unknownParam != null) {
outStream.println("Unknown/unsupported param " + unknownParam)
}
val command = sys.env.get("_SPARK_CMD_USAGE").getOrElse(
"""Usage: spark-submit [options] <app jar | python file | R file> [app arguments]
|Usage: spark-submit --kill [submission ID] --master [spark://...]
|Usage: spark-submit --status [submission ID] --master [spark://...]
|Usage: spark-submit run-example [options] example-class [example args]""".stripMargin)
outStream.println(command)
val mem_mb = Utils.DEFAULT_DRIVER_MEM_MB
outStream.println(
s"""
|Options:
| --master MASTER_URL spark://host:port, mesos://host:port, yarn, or local
| (Default: local[*]).
| --deploy-mode DEPLOY_MODE Whether to launch the driver program locally ("client") or
| on one of the worker machines inside the cluster ("cluster")
| (Default: client).
| --class CLASS_NAME Your application's main class (for Java / Scala apps).
| --name NAME A name of your application.
| --jars JARS Comma-separated list of jars to include on the driver
| and executor classpaths.
| --packages Comma-separated list of maven coordinates of jars to include
| on the driver and executor classpaths. Will search the local
| maven repo, then maven central and any additional remote
| repositories given by --repositories. The format for the
| coordinates should be groupId:artifactId:version.
| --exclude-packages Comma-separated list of groupId:artifactId, to exclude while
| resolving the dependencies provided in --packages to avoid
| dependency conflicts.
| --repositories Comma-separated list of additional remote repositories to
| search for the maven coordinates given with --packages.
| --py-files PY_FILES Comma-separated list of .zip, .egg, or .py files to place
| on the PYTHONPATH for Python apps.
| --files FILES Comma-separated list of files to be placed in the working
| directory of each executor. File paths of these files
| in executors can be accessed via SparkFiles.get(fileName).
|
| --conf PROP=VALUE Arbitrary Spark configuration property.
| --properties-file FILE Path to a file from which to load extra properties. If not
| specified, this will look for conf/spark-defaults.conf.
|
| --driver-memory MEM Memory for driver (e.g. 1000M, 2G) (Default: ${mem_mb}M).
| --driver-java-options Extra Java options to pass to the driver.
| --driver-library-path Extra library path entries to pass to the driver.
| --driver-class-path Extra class path entries to pass to the driver. Note that
| jars added with --jars are automatically included in the
| classpath.
|
| --executor-memory MEM Memory per executor (e.g. 1000M, 2G) (Default: 1G).
|
| --proxy-user NAME User to impersonate when submitting the application.
| This argument does not work with --principal / --keytab.
|
| --help, -h Show this help message and exit.
| --verbose, -v Print additional debug output.
| --version, Print the version of current Spark.
|
| Cluster deploy mode only:
| --driver-cores NUM Number of cores used by the driver, only in cluster mode
| (Default: 1).
|
| Spark standalone or Mesos with cluster deploy mode only:
| --supervise If given, restarts the driver on failure.
| --kill SUBMISSION_ID If given, kills the driver specified.
| --status SUBMISSION_ID If given, requests the status of the driver specified.
|
| Spark standalone and Mesos only:
| --total-executor-cores NUM Total cores for all executors.
|
| Spark standalone and YARN only:
| --executor-cores NUM Number of cores per executor. (Default: 1 in YARN mode,
| or all available cores on the worker in standalone mode)
|
| YARN-only:
| --queue QUEUE_NAME The YARN queue to submit to (Default: "default").
| --num-executors NUM Number of executors to launch (Default: 2).
| If dynamic allocation is enabled, the initial number of
| executors will be at least NUM.
| --archives ARCHIVES Comma separated list of archives to be extracted into the
| working directory of each executor.
| --principal PRINCIPAL Principal to be used to login to KDC, while running on
| secure HDFS.
| --keytab KEYTAB The full path to the file that contains the keytab for the
| principal specified above. This keytab will be copied to
| the node running the Application Master via the Secure
| Distributed Cache, for renewing the login tickets and the
| delegation tokens periodically.
""".stripMargin
)
if (SparkSubmit.isSqlShell(mainClass)) {
outStream.println("CLI options:")
outStream.println(getSqlShellOptions())
}
// scalastyle:on println
SparkSubmit.exitFn(exitCode)
}
/**
* Run the Spark SQL CLI main class with the "--help" option and catch its output. Then filter
* the results to remove unwanted lines.
*
* Since the CLI will call `System.exit()`, we install a security manager to prevent that call
* from working, and restore the original one afterwards.
*/
private def getSqlShellOptions(): String = {
val currentOut = System.out
val currentErr = System.err
val currentSm = System.getSecurityManager()
try {
val out = new ByteArrayOutputStream()
val stream = new PrintStream(out)
System.setOut(stream)
System.setErr(stream)
val sm = new SecurityManager() {
override def checkExit(status: Int): Unit = {
throw new SecurityException()
}
override def checkPermission(perm: java.security.Permission): Unit = {}
}
System.setSecurityManager(sm)
try {
Utils.classForName(mainClass).getMethod("main", classOf[Array[String]])
.invoke(null, Array(HELP))
} catch {
case e: InvocationTargetException =>
// Ignore SecurityException, since we throw it above.
if (!e.getCause().isInstanceOf[SecurityException]) {
throw e
}
}
stream.flush()
// Get the output and discard any unnecessary lines from it.
Source.fromString(new String(out.toByteArray(), StandardCharsets.UTF_8)).getLines
.filter { line =>
!line.startsWith("log4j") && !line.startsWith("usage")
}
.mkString("\\n")
} finally {
System.setSecurityManager(currentSm)
System.setOut(currentOut)
System.setErr(currentErr)
}
}
}
| ajaysaini725/spark | core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala | Scala | apache-2.0 | 25,666 |
/***********************************************************************
* Copyright (c) 2013-2016 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
*************************************************************************/
package org.locationtech.geomesa.kafka09
import java.util.concurrent.CountDownLatch
import com.google.common.util.concurrent.AtomicLongMap
import com.typesafe.scalalogging.LazyLogging
import com.vividsolutions.jts.geom.{Coordinate, Point}
import org.geotools.data._
import org.geotools.factory.CommonFactoryFinder
import org.geotools.filter.identity.FeatureIdImpl
import org.geotools.filter.text.ecql.ECQL
import org.geotools.geometry.jts.JTSFactoryFinder
import org.joda.time.DateTime
import org.junit.runner.RunWith
import org.locationtech.geomesa.kafka.{KafkaFeatureEvent, TestLambdaFeatureListener, KafkaDataStoreHelper}
import org.locationtech.geomesa.utils.geotools.SimpleFeatureTypes
import org.opengis.filter.Filter
import org.specs2.mutable.Specification
import org.specs2.runner.JUnitRunner
import scala.collection.JavaConversions._
@RunWith(classOf[JUnitRunner])
class LiveKafkaConsumerFeatureSourceTest extends Specification with HasEmbeddedKafka with LazyLogging {
sequential // this doesn't really need to be sequential, but we're trying to reduce zk load
// skip embedded kafka tests unless explicitly enabled, they often fail randomly
skipAllUnless(sys.props.get(SYS_PROP_RUN_TESTS).exists(_.toBoolean))
val gf = JTSFactoryFinder.getGeometryFactory
"LiveKafkaConsumerFeatureSource" should {
"allow for configurable expiration" >> {
val zkPath = "/geomesa/kafka/testexpiry"
val producerParams = Map(
"brokers" -> brokerConnect,
"zookeepers" -> zkConnect,
"zkPath" -> zkPath,
"isProducer" -> true)
val consumerParams = Map(
"brokers" -> brokerConnect,
"zookeepers" -> zkConnect,
"zkPath" -> zkPath,
"isProducer" -> false)
val sft = {
val sft = SimpleFeatureTypes.createType("expiry", "name:String,age:Int,dtg:Date,*geom:Point:srid=4326")
KafkaDataStoreHelper.createStreamingSFT(sft, zkPath)
}
val producerDS = DataStoreFinder.getDataStore(producerParams)
producerDS.createSchema(sft)
val expiryConsumer = DataStoreFinder.getDataStore(consumerParams ++ Map(KafkaDataStoreFactoryParams.EXPIRATION_PERIOD.getName -> 1000L))
val consumerFC = expiryConsumer.getFeatureSource("expiry")
val fw = producerDS.getFeatureWriter("expiry", null, Transaction.AUTO_COMMIT)
val sf = fw.next()
sf.setAttributes(Array("smith", 30, DateTime.now().toDate).asInstanceOf[Array[AnyRef]])
sf.setDefaultGeometry(gf.createPoint(new Coordinate(0.0, 0.0)))
fw.write()
Thread.sleep(500)
val bbox = ECQL.toFilter("bbox(geom,-10,-10,10,10)")
// verify the feature is written - hit the cache directly
{
val features = consumerFC.getFeatures(Filter.INCLUDE).features()
features.hasNext must beTrue
val readSF = features.next()
sf.getID must be equalTo readSF.getID
sf.getAttribute("dtg") must be equalTo readSF.getAttribute("dtg")
features.hasNext must beFalse
}
// verify the feature is written - hit the spatial index
{
val features = consumerFC.getFeatures(bbox).features()
features.hasNext must beTrue
val readSF = features.next()
sf.getID must be equalTo readSF.getID
sf.getAttribute("dtg") must be equalTo readSF.getAttribute("dtg")
features.hasNext must beFalse
}
// allow the cache to expire
Thread.sleep(500)
// verify feature has expired - hit the cache directly
{
val features = consumerFC.getFeatures(Filter.INCLUDE).features()
features.hasNext must beFalse
}
// force the cache cleanup - normally this would happen during additional reads and writes
consumerFC.asInstanceOf[LiveKafkaConsumerFeatureSource].featureCache.cleanUp()
// verify feature has expired - hit the spatial index
{
val features = consumerFC.getFeatures(bbox).features()
features.hasNext must beFalse
}
}
"support listeners" >> {
val zkPath = "/geomesa/kafka/testlisteners"
val producerParams = Map(
"brokers" -> brokerConnect,
"zookeepers" -> zkConnect,
"zkPath" -> zkPath,
"isProducer" -> true)
val consumerParams = Map(
"brokers" -> brokerConnect,
"zookeepers" -> zkConnect,
"zkPath" -> zkPath,
"isProducer" -> false)
val m = AtomicLongMap.create[String]()
val id = "testlistener"
val numUpdates = 100
val maxLon = 80.0
var latestLon = -1.0
val sft = {
val sft = SimpleFeatureTypes.createType("listeners", "name:String,age:Int,dtg:Date,*geom:Point:srid=4326")
KafkaDataStoreHelper.createStreamingSFT(sft, zkPath)
}
val producerDS = DataStoreFinder.getDataStore(producerParams)
producerDS.createSchema(sft)
val listenerConsumerDS = DataStoreFinder.getDataStore(consumerParams)
val consumerFC = listenerConsumerDS.getFeatureSource("listeners")
val latch = new CountDownLatch(numUpdates)
val featureListener = new TestLambdaFeatureListener((fe: KafkaFeatureEvent) => {
val f = fe.feature
val geom: Point = f.getDefaultGeometry.asInstanceOf[Point]
latestLon = geom.getX
m.incrementAndGet(f.getID)
latch.countDown()
})
consumerFC.addFeatureListener(featureListener)
val fw = producerDS.getFeatureWriter("listeners", null, Transaction.AUTO_COMMIT)
(numUpdates to 1 by -1).foreach { writeUpdate }
def writeUpdate(i: Int) = {
val ll = maxLon - maxLon/i
val sf = fw.next()
sf.getIdentifier.asInstanceOf[FeatureIdImpl].setID("testlistener")
sf.setAttributes(Array("smith", 30, DateTime.now().toDate).asInstanceOf[Array[AnyRef]])
sf.setDefaultGeometry(gf.createPoint(new Coordinate(ll, ll)))
fw.write()
}
logger.debug("Wrote feature")
while(latch.getCount > 0) {
Thread.sleep(100)
}
logger.debug("getting id")
m.get(id) must be equalTo numUpdates
latestLon must be equalTo 0.0
}
"handle filters" >> {
val zkPath = "/geomesa/kafka/testfilters"
val producerParams = Map(
"brokers" -> brokerConnect,
"zookeepers" -> zkConnect,
"zkPath" -> zkPath,
"isProducer" -> true)
val consumerParams = Map(
"brokers" -> brokerConnect,
"zookeepers" -> zkConnect,
"zkPath" -> zkPath,
"isProducer" -> false)
val sft = {
val sft = SimpleFeatureTypes.createType("filts", "name:String,age:Int,dtg:Date,*geom:Point:srid=4326")
KafkaDataStoreHelper.createStreamingSFT(sft, zkPath)
}
val producerDS = DataStoreFinder.getDataStore(producerParams)
producerDS.createSchema(sft)
val ff = CommonFactoryFinder.getFilterFactory2
val filt = ff.bbox("geom", -80, 35, -75, 40, "EPSG:4326")
val listenerConsumerDS = DataStoreFinder.getDataStore(consumerParams).asInstanceOf[KafkaDataStore]
val consumerFC = listenerConsumerDS.getFeatureSource("filts", filt)
val fw = producerDS.getFeatureWriter("filts", null, Transaction.AUTO_COMMIT)
var sf = fw.next()
sf.getIdentifier.asInstanceOf[FeatureIdImpl].setID("testfilt-1")
sf.setAttributes(Array("smith", 30, DateTime.now().toDate).asInstanceOf[Array[AnyRef]])
sf.setDefaultGeometry(gf.createPoint(new Coordinate(-77, 38)))
fw.write()
logger.debug("Wrote feature")
sf = fw.next()
sf.getIdentifier.asInstanceOf[FeatureIdImpl].setID("testfilt-2")
sf.setAttributes(Array("smith", 30, DateTime.now().toDate).asInstanceOf[Array[AnyRef]])
sf.setDefaultGeometry(gf.createPoint(new Coordinate(-88, 38)))
fw.write()
Thread.sleep(1000)
import org.locationtech.geomesa.utils.geotools.Conversions._
val features = consumerFC.getFeatures.features().toList
features.size must be equalTo 1
features.head.getID must be equalTo "testfilt-1"
}
}
step {
shutdown()
}
}
| tkunicki/geomesa | geomesa-kafka/geomesa-kafka-datastore/geomesa-kafka-09-datastore/src/test/scala/org/locationtech/geomesa/kafka09/LiveKafkaConsumerFeatureSourceTest.scala | Scala | apache-2.0 | 8,602 |
package com.sksamuel.scapegoat.inspections
import com.sksamuel.scapegoat.{ Inspection, InspectionContext, Inspector, Levels }
/** @author Stephen Samuel */
class DoubleNegation extends Inspection {
def inspector(context: InspectionContext): Inspector = new Inspector(context) {
override def postTyperTraverser = Some apply new context.Traverser {
import context.global._
private val Bang = TermName("unary_$bang")
override def inspect(tree: Tree): Unit = {
tree match {
case Select(Select(_, Bang), Bang) =>
context.warn("Double negation",
tree.pos,
Levels.Info,
"Double negation can be removed: " + tree.toString().take(200),
DoubleNegation.this)
case _ => continue(tree)
}
}
}
}
}
| pwwpche/scalac-scapegoat-plugin | src/main/scala/com/sksamuel/scapegoat/inspections/DoubleNegation.scala | Scala | apache-2.0 | 830 |
/*
* Copyright 2017 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.ct
import uk.gov.hmrc.ct.box._
import uk.gov.hmrc.ct.computations.calculations.PoolPercentageCalculator
import uk.gov.hmrc.ct.computations.retriever.ComputationsBoxRetriever
case class CATO21(value: BigDecimal) extends CtBoxIdentifier(name = "Apportioned Main Rate") with CtBigDecimal with NotInPdf
object CATO21 extends Calculated[CATO21, ComputationsBoxRetriever] {
override def calculate(fieldValueRetriever: ComputationsBoxRetriever): CATO21 = CATO21(PoolPercentageCalculator().apportionedMainRate(fieldValueRetriever.cp1, fieldValueRetriever.cp2))
}
| pncampbell/ct-calculations | src/main/scala/uk/gov/hmrc/ct/CATO21.scala | Scala | apache-2.0 | 1,187 |
package app.circumstances
import utils.WithJsBrowser
import app.FunctionalTestCommon
import utils.pageobjects.circumstances.start_of_process.GReportChangesPage
import utils.pageobjects.xml_validation.{XMLBusinessValidation, XMLCircumstancesBusinessValidation}
import utils.pageobjects.{Page, PageObjects, TestData, XmlPage}
/**
* End-to-End functional tests using input files created by Steve Moody.
* @author Jorge Migueis
* Date: 02/08/2013
*/
class FunctionalTestCase12Spec extends FunctionalTestCommon {
isolated
section("functional")
"The application Circumstances" should {
"Successfully run absolute Circumstances Test Case 12" in new WithJsBrowser with PageObjects {
val page = GReportChangesPage(context)
val circs = TestData.readTestDataFromFile("/functional_scenarios/circumstances/TestCase12.csv")
page goToThePage()
val lastPage = page runClaimWith(circs)
lastPage match {
case p: XmlPage => {
val validator: XMLBusinessValidation = new XMLCircumstancesBusinessValidation
validateAndPrintErrors(p, circs, validator) should beTrue
}
case p: Page => println(p.source)
}
}
}
section("functional")
}
| Department-for-Work-and-Pensions/ClaimCapture | c3/test/app/circumstances/FunctionalTestCase12Spec.scala | Scala | mit | 1,224 |
class Test {
import scala.collection.immutable.{TreeSet, HashSet}
def f2[T](x: Ordering[T]) = {
val (given Ordering[T]) = x // error: given Ordering[T] not allowed here
new TreeSet[T] // error: no implicit ordering defined for T
}
def f3[T](x: Ordering[T]) = {
val given Ordering[T] = x // error: given Ordering[T] not allowed here
new TreeSet[T] // error: no implicit ordering defined for T
}
}
| dotty-staging/dotty | tests/neg/given-pattern.scala | Scala | apache-2.0 | 425 |
package cbb.gmtrainner
import scala.collection.mutable
import Strand._
case class Gene(accession: String, seq: String, features: List[Feature]) {
var exons = mutable.ListBuffer[String]()
var introns = mutable.ListBuffer[String]()
for (i <- features.indices) {
val feature = features(i)
exons append Utils.extactSeq(seq, feature)
if (i < features.size - 1) {
val nextFeature = features(i+1)
introns append Utils.extactSeq(seq, feature.end+1, nextFeature.start-1, feature.strand)
}
}
val head = features.head
val last = features.last
val reverse = head.strand == ANTISENSE
val tseq = if (reverse) Utils.complementReverseSeq(seq) else seq
var utr5 = Utils.extactSeq(seq, 1, head.start-1, SENSE)
var utr3 = Utils.extactSeq(seq, last.end+1, seq.length, SENSE)
if (reverse) {
exons = exons.reverse
introns = introns.reverse
utr5 = Utils.extactSeq(seq, last.end+1, seq.length, ANTISENSE)
utr3 = Utils.extactSeq(seq, 1, head.start-1, ANTISENSE)
}
def cds_seq: String = exons.mkString
def introns_seq: String = introns.mkString
def intron(idx: Int): String = {
introns(idx)
}
def donors(flank: Int):List[String] = {
if (features.head.ftype == "Single") {
return List[String]()
}
val fd = mutable.ListBuffer[String]()
val q = "GT"
var index = tseq.indexOf(q)
while (index >= 0) {
if ( (!reverse && features.exists((f) => f.end == index && f!= features.last) )
|| (reverse && features.exists((f) => f.start == tseq.length-index+1 && f!= features.head) ) ) {
val start = index - flank - 1
val end = index + flank - 1
val s = safeSub(index, start, end, tseq)
fd.append(s)
}
index = tseq.indexOf(q, index + 1)
}
fd.toList
}
def falseDornors(flank: Int): List[String] = {
val fd = mutable.ListBuffer[String]()
val q = "GT"
var index = seq.indexOf(q)
while (index >= 0) {
if ( !features.exists((f) => f.end == index) ) {
val start = index - flank - 1
val end = index + flank - 1
fd.append(safeSub(index, start, end, seq))
}
index = seq.indexOf(q, index + 1)
}
fd.toList
}
def acceptors(flank: Int): List[String] = {
val fd = mutable.ListBuffer[String]()
val q = "AG"
var index = tseq.indexOf(q)
while (index >= 0) {
if ( (!reverse && features.exists((f) => f.start-3 == index && f!= features.head))
|| (reverse && features.exists((f) => f.end+3 == tseq.length-index+1 && f!= features.last)) ) {
val start = index - flank + 2
val end = index + flank + 2
fd.append(safeSub(index, start, end, tseq))
}
index = tseq.indexOf(q, index + 1)
}
fd.toList
}
def falseAcceptors(flank: Int): List[String] = {
val fd = mutable.ListBuffer[String]()
val q = "AG"
var index = seq.indexOf(q)
while (index >= 0) {
if ( !features.exists((f) => f.start-3 == index) ) {
val start = index - flank + 2
val end = index + flank + 2
fd.append(safeSub(index, start, end, seq))
}
index = seq.indexOf(q, index + 1)
}
fd.toList
}
def startSite(flank: Int): String = {
var index = 0
var start = 0
var end = 0
if (reverse) {
index = tseq.length - features.last.end
start = index - flank
end = index + flank
} else {
index = features.head.start - 2
start = index - flank + 1
end = index + flank + 1
}
safeSub(index, start, end, tseq)
}
def falseStarters(flank: Int): List[String] = {
val fd = mutable.ListBuffer[String]()
val q = "ATG"
var index = seq.indexOf(q)
while (index >= 0) {
if ( !features.exists((f) => f.start-1 == index )) {
val start = index - flank
val end = index + flank + 2
fd.append(safeSub(index, start, end, seq))
}
index = seq.indexOf(q, index + 1)
}
fd.toList
}
def safeSub2(x: String, subAt: Int, leftPad: Boolean, idx: Int=0): String = {
var prefix = ""
var content = ""
var suffix = ""
var j = subAt
if (leftPad) {
if (subAt < 0) {
prefix = "N"*(-subAt)
j = 0
}
content = x.substring(j)
} else {
if (subAt > x.length) {
j = x.length
val remain = subAt-j
suffix = "N"*(remain)
}
content = x.substring(0, j)
}
prefix + content + suffix
}
def safeSub(index:Int, start: Int, end: Int, seq: String): String = {
var prefix = ""
var suffix = ""
var sstart = start
var send = end
if (sstart < 0) {
prefix = "N"*(-sstart)
sstart = 0
}
if (send > seq.length) {
suffix = "N"*(send - seq.length)
send = seq.length
}
prefix + seq.substring(sstart, send) + suffix
}
}
| XingjianXu/gmtrainer | src/main/scala/cbb/gmtrainner/Gene.scala | Scala | lgpl-3.0 | 4,874 |
package io.iohk.ethereum.db.storage
import io.iohk.ethereum.ObjectGenerators
import io.iohk.ethereum.db.dataSource.EphemDataSource
import io.iohk.ethereum.domain.Receipt
import org.scalacheck.Gen
import org.scalatestplus.scalacheck.ScalaCheckPropertyChecks
import org.scalatest.funsuite.AnyFunSuite
class ReceiptStorageSuite extends AnyFunSuite with ScalaCheckPropertyChecks with ObjectGenerators {
test("ReceiptStorage insert") {
forAll(Gen.listOf(byteStringOfLengthNGen(32))) { blockByteArrayHashes =>
val blockHashes = blockByteArrayHashes.distinct
val receipts = receiptsGen(blockHashes.length).sample.get
val blockHashesReceiptsPair = receipts.zip(blockHashes)
val storage = new ReceiptStorage(EphemDataSource())
val batchUpdates = blockHashesReceiptsPair.foldLeft(storage.emptyBatchUpdate) {
case (updates, (receiptList, blockHash)) =>
updates.and(storage.put(blockHash, receiptList))
}
batchUpdates.commit()
blockHashesReceiptsPair.foreach { case (rs, bh) =>
val obtainedReceipts: Option[Seq[Receipt]] = storage.get(bh)
assert(obtainedReceipts.contains(rs))
}
}
}
test("ReceiptStorage delete") {
forAll(Gen.listOf(byteStringOfLengthNGen(32))) { blockByteArrayHashes =>
val blockHashes = blockByteArrayHashes.distinct
val receipts = receiptsGen(blockHashes.length).sample.get
val blockHashesReceiptsPair = receipts.zip(blockHashes)
//Receipts are inserted
val storage = new ReceiptStorage(EphemDataSource())
val storageInsertions = blockHashesReceiptsPair.foldLeft(storage.emptyBatchUpdate) {
case (updates, (receiptList, blockHash)) =>
updates.and(storage.put(blockHash, receiptList))
}
storageInsertions.commit()
//Receipts are deleted
val (toDelete, toLeave) = blockHashesReceiptsPair.splitAt(Gen.choose(0, blockHashesReceiptsPair.size).sample.get)
val storageDeletions = toDelete.foldLeft(storage.emptyBatchUpdate) { case (updates, (_, blockHash)) =>
updates.and(storage.remove(blockHash))
}
storageDeletions.commit()
toLeave.foreach { case (rs, bh) =>
val obtainedReceipts = storage.get(bh)
assert(obtainedReceipts.contains(rs))
}
toDelete.foreach { case (_, bh) => assert(storage.get(bh).isEmpty) }
}
}
}
| input-output-hk/etc-client | src/test/scala/io/iohk/ethereum/db/storage/ReceiptStorageSuite.scala | Scala | mit | 2,376 |
import scala.reflect.runtime.universe._
import scala.tools.reflect.ToolBox
import scala.tools.reflect.Eval
trait O { trait I }
class A extends O {
val impl = new I {}
}
object Test extends App {
val code = reify {
val v: A#I = (new A).impl
v
}
println(showRaw(code.tree))
val v: A#I = code.eval
}
| som-snytt/dotty | tests/disabled/macro/run/t6591_2.scala | Scala | apache-2.0 | 319 |
import sbt._
object Dependencies {
lazy val scalaTest = "org.scalatest" %% "scalatest" % "3.2.7"
}
| MerHS/biryo | project/Dependencies.scala | Scala | apache-2.0 | 102 |
package org.scalafmt.config
import metaconfig._
import org.scalafmt.config.SpaceBeforeContextBound.Never
/**
*
* @param beforeContextBoundColon formats [A: T] as [A : T]
* @param afterTripleEquals If true, formats ===( as === (
* @param inImportCurlyBraces
* If true, formats `import a.b.{ c, d }`.
* If false, formats `import a.b.{c, d}`.
* @param inParentheses If true, formats `foo(a, b)` as `foo( a, b )`.
* @param neverAroundInfixTypes
* If ["##"] is specified as operator then
* formats `Generic[Foo] ## Repr` as `Generic[Foo]##Repr`.
* @param afterKeywordBeforeParen if false, does not add a space between a keyword and a parenthesis.
* For example:
* if(a) println("HELLO!")
* while(a) println("HELLO!")
* @param inByNameTypes
* If false, removes space in by-name parameter.
* `def foo(a: =>A)`
* @param afterSymbolicDefs If true, adds a single space after an operator method
* For example:
* def <=> [T](that: T): Boolean
*/
case class Spaces(
beforeContextBoundColon: SpaceBeforeContextBound = Never,
afterTripleEquals: Boolean = false,
inImportCurlyBraces: Boolean = false,
inParentheses: Boolean = false,
neverAroundInfixTypes: Seq[String] = Nil,
afterKeywordBeforeParen: Boolean = true,
inByNameTypes: Boolean = true,
afterSymbolicDefs: Boolean = false
) {
implicit val reader: ConfDecoder[Spaces] = generic.deriveDecoder(this).noTypos
}
object Spaces {
implicit lazy val surface: generic.Surface[Spaces] = generic.deriveSurface
implicit lazy val encoder: ConfEncoder[Spaces] = generic.deriveEncoder
}
| olafurpg/scalafmt | scalafmt-core/shared/src/main/scala/org/scalafmt/config/Spaces.scala | Scala | apache-2.0 | 1,619 |
package test_data.v24
import scala.xml.Elem
case class SectionEvidenceList(xml:Elem) {
val rootPath = xml \\ "DWPCATransaction" \\ "DWPCAClaim" \\ "EvidenceList"
val address:Seq[String] = {
val lines = (rootPath \\ "RecipientAddress" \\ "Answer" \\ "Line")
val elems = (for( elements <- lines; element <- elements.child )yield { element.text.trim })(collection.breakOut)
elems
}
val postCode = rootPath \\ "RecipientAddress" \\ "Answer" \\ "PostCode"
val evidenceList: Seq[String] = {
val evidences = rootPath \\\\ "Evidence"
val elems = (for( elements <- evidences; element <- elements.child )yield { element.text.trim })(collection.breakOut)
elems
}
}
| Department-for-Work-and-Pensions/RenderingService | test/test_data/v24/SectionEvidenceList.scala | Scala | mit | 688 |
package yugioh.card
import sun.reflect.generics.reflectiveObjects.NotImplementedException
import yugioh._
import yugioh.action._
import yugioh.card.state.{ControlledState, SpellOrTrapControlledState}
import yugioh.events.EventsModule
object Card {
type AnyCard = Card[_ <: ControlledState]
}
trait Card[CS <: ControlledState] {
val PrintedName: String
val Owner: Player
var location: Location = InDeck
var controller: Player = Owner
def actions(implicit gameState: GameState, eventsModule: EventsModule, actionModule: ActionModule): Seq[Action]
private[this] var _maybeControlledState: Option[CS] = None
def maybeControlledState: Option[CS] = _maybeControlledState
def maybeControlledState_=(controlledState: Option[CS]): Unit = {
for (controlledState <- maybeControlledState) {
controlledState.close()
}
_maybeControlledState = controlledState
}
def name: String = PrintedName
override def toString: String = name
def toString(viewer: Player): String = {
maybeControlledState.map { controlledState =>
if (controlledState.faceup) {
name
} else {
if (viewer == Owner) {
s"Set($this)"
} else {
"<Set>"
}
}
}.getOrElse(name)
}
def discard(cause: Cause)(implicit gameState: GameState, eventsModule: EventsModule, actionModule: ActionModule): Unit = Owner.field.discard(cause, this)
def destroy(cause: Cause)(implicit gameState: GameState, eventsModule: EventsModule, actionModule: ActionModule): Unit = Owner.field.destroy(cause, this)
def sendToGrave(cause: Cause)(implicit gameState: GameState, eventsModule: EventsModule, actionModule: ActionModule): Unit = Owner.field.sendToGrave(cause, this)
}
object EffectCard {
type AnyEffectCard = EffectCard[_ <: ControlledState]
}
trait EffectCard[CS <: ControlledState] extends Card[CS] {
var activated = false
val Effects: Seq[Effect]
}
trait SpellOrTrap extends EffectCard[SpellOrTrapControlledState] {
/**
* The turn this card was set on the field, if at all.
*/
var maybeTurnSet: Option[Int] = None
val spellSpeed: SpellSpeed
override def actions(implicit gameState: GameState, eventsModule: EventsModule, actionModule: ActionModule): Seq[Action] = {
gameState match {
case GameState(_, TurnPlayers(Owner, _), OpenGameState, _: MainPhase, _, _) if InHand(this) =>
Seq(actionModule.newSetAsSpellOrTrap(Owner, this))
case _ => Seq()
}
}
}
/**
* Should not be inherited from outside of game mechanics. Instead, use one of:
* NormalTrap, ContinuousTrap, CounterTrap.
*/
sealed trait NonContinuousSpellOrTrap extends SpellOrTrap {
/**
* After a chain has resolved that involved this card, and it remains on the field, send it to grave.
*/
def afterChainCleanup()(implicit gameState: GameState, eventsModule: EventsModule, actionModule: ActionModule): Unit = {
if (InSpellTrapZone(this)) {
sendToGrave(GameMechanics)
}
}
}
trait ContinuousSpellOrTrap extends SpellOrTrap
// spells
sealed trait Spell extends SpellOrTrap {
override def actions(implicit gameState: GameState, eventsModule: EventsModule, actionModule: ActionModule): Seq[Action] = {
val maybeActivation = if (Effects.size == 1) {
gameState match {
case GameState(_, TurnPlayers(Owner, _), OpenGameState, _: MainPhase, _, _) if canActivate =>
Seq(CardActivation(this, Owner))
case _ => Seq()
}
} else {
throw new NotImplementedException()
}
// super allows setting
super.actions ++ maybeActivation
}
private def canActivate(implicit gameState: GameState): Boolean = {
// also assumes a single effect
assert(Effects.length == 1, "Code assumes only a single effect will be present.")
// if activation condition is met, and the spell is either already on the field or it's in hand and there's space to place it...
Effects.head.ActivationConditions.met && (InSpellTrapZone(this) || (InHand(this) && controller.field.hasFreeSpellOrTrapZone))
}
}
trait NormalSpell extends Spell with NonContinuousSpellOrTrap {
override val spellSpeed: SpellSpeed = SpellSpeed1
}
trait QuickPlaySpell extends Spell with NonContinuousSpellOrTrap {
override val spellSpeed: SpellSpeed = SpellSpeed2
}
trait RitualSpell extends Spell with NonContinuousSpellOrTrap {
override val spellSpeed: SpellSpeed = SpellSpeed1
}
trait EquipSpell extends Spell {
override val spellSpeed: SpellSpeed = SpellSpeed1
}
trait ContinuousSpell extends Spell {
override val spellSpeed: SpellSpeed = SpellSpeed1
}
trait FieldSpell extends Spell {
override val spellSpeed: SpellSpeed = SpellSpeed1
}
trait SpellEffect extends Effect {
override def activationTimingCorrect(implicit gameState: GameState): Boolean = {
gameState match {
case GameState(_, TurnPlayers(Card.Owner, _), OpenGameState, _: MainPhase, _, _) => true
case _ => false
}
}
}
// traps
sealed trait Trap extends SpellOrTrap
trait NormalTrap extends Trap with NonContinuousSpellOrTrap {
override val spellSpeed: SpellSpeed = SpellSpeed2
override def actions(implicit gameState: GameState, eventsModule: EventsModule, actionModule: ActionModule): Seq[Action] = {
val maybeActivation = Effects match {
case Seq(effect) if canActivate && effect.activationTimingCorrect =>
Seq(CardActivation(this, Owner))
case _ =>
Seq() // subclass will have to write specific logic for this
}
// super handles setting
super.actions ++ maybeActivation
}
private def canActivate(implicit gameState: GameState): Boolean = {
// if activation condition is met, and the spell is already set on the field, and it wasn't set this turn...
Effects.head.ActivationConditions.met && InSpellTrapZone(this) && maybeControlledState.map(!_.faceup).get && maybeTurnSet.exists(_ < gameState.turnCount)
}
}
trait CounterTrap extends Trap with NonContinuousSpellOrTrap {
override val spellSpeed: SpellSpeed = SpellSpeed3
}
trait ContinuousTrap extends Trap {
override val spellSpeed: SpellSpeed = SpellSpeed2
}
trait TrapEffect extends Effect {
/**
* Applicable for non-counter traps.
*/
override def activationTimingCorrect(implicit gameState: GameState): Boolean = {
gameState match {
case GameState(_, _, _, _, _: DamageStep, _) =>
false
case GameState(_, _, _: CheckForTrigger | _: PlayerFastEffects | _: ChainRules, _, _, _) =>
true
case _ =>
false
}
}
}
| micseydel/yugioh | src/main/scala/yugioh/card/Card.scala | Scala | mit | 6,557 |
object Test {
def foo: Int = bar
def bar: Int = foo
def main(args: Array[String]): Unit =
println(foo)
}
| som-snytt/dotty | tests/vulpix-tests/unit/stackOverflow.scala | Scala | apache-2.0 | 116 |
/*
* Copyright 2015 Scopely
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.scopely.affqis
import java.util.concurrent.CountDownLatch
object Main extends App {
JawampaRouter(Seq("hive"))
new HiveWampDBClient()()
// This is your life now.
val latch = new CountDownLatch(1)
latch.await()
}
| scopely/affqis | src/main/scala/com/scopely/affqis/Main.scala | Scala | apache-2.0 | 860 |
package sandbox
import com.mongodb.casbah.Imports._
import scala.io.Source.fromFile
/**
* @author Rafic
*/
object StudentCourseEnrollment extends App {
val DataFile = "src/main/resources/courses/harvardx-2013-04-07/ER22x/spring-student-course-enrollment.sql"
val mongoClient = MongoClient()
val mongoDB = mongoClient("casbah_test")("test_data")
val lines = fromFile(DataFile).getLines()
val fields = lines.next().split("\t")
for (line <- lines) {
val newObj = MongoDBObject()
for ((field, value) <- fields.zip(line.split("\t"))) {
newObj += (field -> value)
}
mongoDB += newObj
}
mongoDB.foreach(println)
mongoDB.dropCollection()
}
| jimwaldo/HarvardX-Tools | src/main/scala/sandbox/StudentCourseEnrollment.scala | Scala | bsd-3-clause | 689 |
package pl.edu.agh.mplt.parser.declaration.data
import scala.util.parsing.combinator.JavaTokenParsers
import pl.edu.agh.mplt.parser.phrase.set.Indexing
import language.postfixOps
trait DatatypeDeclarationAMPLParser extends JavaTokenParsers {
def indexing: Parser[Indexing]
def setAttribute: Parser[Attribute]
def paramAttribute: Parser[Attribute]
def varAttribute: Parser[Attribute]
def nonKeyword: Parser[String]
def nonAttributeKeyword: Parser[String]
private[this] def common = nonKeyword ~ (nonAttributeKeyword ?) ~ (indexing ?)
def datatypeDeclaration: Parser[DataDeclaration] =
"param" ~> common ~ repsep(paramAttribute, "," ?) <~ ";" ^^ {
case name ~ optAlias ~ optIndexing ~ optAttributes => ParameterDeclaration(name, optAlias, optIndexing,
optAttributes)
} | "set" ~> common ~ repsep(setAttribute, "," ?) <~ ";" ^^ {
case name ~ optAlias ~ optIndexing ~ optAttributes => SetDeclaration(name, optAlias, optIndexing,
optAttributes)
} | "var" ~> common ~ repsep(varAttribute, "," ?) <~ ";" ^^ {
case name ~ optAlias ~ optIndexing ~ optAttributes => VariableDeclaration(name, optAlias, optIndexing,
optAttributes)
}
}
| marek1840/MPLT | src/main/scala/pl/edu/agh/mplt/parser/declaration/data/DatatypeDeclarationAMPLParser.scala | Scala | mit | 1,281 |
/* NSC -- new Scala compiler
* Copyright 2005-2013 LAMP/EPFL
* @author Martin Odersky
*/
package scala.tools
package nsc
import scala.language.postfixOps
/** The main class for NSC, a compiler for the programming
* language Scala.
*/
class MainClass extends Driver with EvalLoop {
def resident(compiler: Global): Unit = loop { line =>
val command = new CompilerCommand(line split "\\\\s+" toList, new Settings(scalacError))
compiler.reporter.reset()
new compiler.Run() compile command.files
}
override def newCompiler(): Global = Global(settings)
override def doCompile(compiler: Global) {
if (settings.resident) resident(compiler)
else super.doCompile(compiler)
}
}
object Main extends MainClass { }
| felixmulder/scala | src/compiler/scala/tools/nsc/Main.scala | Scala | bsd-3-clause | 741 |
/***********************************************************************
* Copyright (c) 2013-2017 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
*************************************************************************/
package org.locationtech.geomesa.accumulo.process
import java.util.Date
import com.vividsolutions.jts.geom.Point
import org.geotools.data.collection.ListFeatureCollection
import org.junit.runner.RunWith
import org.locationtech.geomesa.accumulo.TestWithDataStore
import org.locationtech.geomesa.features.ScalaSimpleFeature
import org.locationtech.geomesa.filter.function.{BasicValues, Convert2ViewerFunction, EncodedValues, ExtendedValues}
import org.opengis.filter.Filter
import org.specs2.runner.JUnitRunner
@RunWith(classOf[JUnitRunner])
class BinConversionProcessTest extends TestWithDataStore {
import scala.collection.JavaConversions._
sequential
override val spec = "name:String,track:String,dtg:Date,dtg2:Date,*geom:Point:srid=4326,geom2:Point:srid=4326"
val process = new BinConversionProcess
val features = (0 until 10).map { i =>
val sf = new ScalaSimpleFeature(s"0$i", sft)
sf.setAttribute("name", s"name$i")
sf.setAttribute("track", s"$i")
sf.setAttribute("dtg", s"2017-02-20T00:00:0$i.000Z")
sf.setAttribute("dtg2", s"2017-02-21T00:00:0$i.000Z")
sf.setAttribute("geom", s"POINT(40 ${50 + i})")
sf.setAttribute("geom2", s"POINT(20 ${30 + i})")
sf
}
val ids = features.map(_.getID.hashCode)
val names = features.map(_.getAttribute("name").hashCode)
val tracks = features.map(_.getAttribute("track").hashCode)
val dates = features.map(_.getAttribute("dtg").asInstanceOf[Date].getTime)
val dates2 = features.map(_.getAttribute("dtg2").asInstanceOf[Date].getTime)
val lonlat = features.map(_.getAttribute("geom").asInstanceOf[Point]).map(p => (p.getY.toFloat, p.getX.toFloat))
val latlon = lonlat.map(_.swap)
val lonlat2 = features.map(_.getAttribute("geom2").asInstanceOf[Point]).map(p => (p.getY.toFloat, p.getX.toFloat))
val latlon2 = lonlat2.map(_.swap)
val listCollection = new ListFeatureCollection(sft, features)
addFeatures(features)
// converts to tuples that we can compare to zipped values
def toTuples(value: EncodedValues): Any = value match {
case BasicValues(lat, lon, dtg, trackId) => ((trackId, dtg), (lat, lon))
case ExtendedValues(lat, lon, dtg, trackId, label) => (((trackId, dtg), (lat, lon)), label)
}
"BinConversionProcess" should {
"encode an empty feature collection" in {
val bytes = process.execute(new ListFeatureCollection(sft), null, null, null, null, "lonlat")
bytes must beEmpty
}
"encode a generic feature collection" in {
val bytes = process.execute(listCollection, null, null, null, null, "lonlat").toList
bytes must haveLength(10)
val decoded = bytes.map(Convert2ViewerFunction.decode).map(toTuples)
decoded must containTheSameElementsAs(ids.zip(dates).zip(lonlat))
}
"encode a generic feature collection with alternate values" in {
val bytes = process.execute(listCollection, "name", "geom2", "dtg2", null, "lonlat").toList
bytes must haveLength(10)
val decoded = bytes.map(Convert2ViewerFunction.decode).map(toTuples)
decoded must containTheSameElementsAs(names.zip(dates2).zip(lonlat2))
}
"encode a generic feature collection with labels" in {
val bytes = process.execute(listCollection, null, null, null, "track", "lonlat").toList
bytes must haveLength(10)
val decoded = bytes.map(Convert2ViewerFunction.decode).map(toTuples)
decoded must containTheSameElementsAs(ids.zip(dates).zip(lonlat).zip(tracks))
}
"encode an accumulo feature collection in distributed fashion" in {
val bytes = process.execute(fs.getFeatures(Filter.INCLUDE), "name", null, null, null, "lonlat").toList
bytes.length must beLessThan(10)
val decoded = bytes.reduceLeft(_ ++ _).grouped(16).toSeq.map(Convert2ViewerFunction.decode).map(toTuples)
decoded must containTheSameElementsAs(names.zip(dates).zip(lonlat))
}
"encode an accumulo feature collection in distributed fashion with alternate values" in {
val bytes = process.execute(fs.getFeatures(Filter.INCLUDE), "name", "geom2", "dtg2", null, "lonlat").toList
bytes.length must beLessThan(10)
val decoded = bytes.reduceLeft(_ ++ _).grouped(16).toSeq.map(Convert2ViewerFunction.decode).map(toTuples)
decoded must containTheSameElementsAs(names.zip(dates2).zip(lonlat2))
}
"encode an accumulo feature collection in distributed fashion with labels" in {
val bytes = process.execute(fs.getFeatures(Filter.INCLUDE), "name", null, null, "track", "lonlat").toList
bytes.length must beLessThan(10)
val decoded = bytes.reduceLeft(_ ++ _).grouped(24).toSeq.map(Convert2ViewerFunction.decode).map(toTuples)
decoded must containTheSameElementsAs(names.zip(dates).zip(lonlat).zip(tracks))
}
"encode an accumulo feature collection using feature id" in {
failure("not implemented")
val bytes = process.execute(fs.getFeatures(Filter.INCLUDE), null, null, null, "track", "lonlat").toList
bytes.length must beLessThan(10)
val decoded = bytes.reduceLeft(_ ++ _).grouped(24).toSeq.map(Convert2ViewerFunction.decode).map(toTuples)
decoded must containTheSameElementsAs(ids.zip(dates).zip(lonlat).zip(tracks))
}.pendingUntilFixed
}
}
| tkunicki/geomesa | geomesa-accumulo/geomesa-accumulo-datastore/src/test/scala/org/locationtech/geomesa/accumulo/process/BinConversionProcessTest.scala | Scala | apache-2.0 | 5,712 |
package org.jetbrains.plugins.scala.lang.psi.api
import com.intellij.psi.{PsiElement, PsiNamedElement}
import com.intellij.psi.tree.IElementType
import org.jetbrains.plugins.scala.extensions._
import org.jetbrains.plugins.scala.lang.psi.api.base.ScEnd
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.ScNamedElement
trait ScBegin extends ScalaPsiElement {
/** @return the definition keyword (such as "class") */
final def keyword: PsiElement = findFirstChildByType(keywordTokenType).get
/** @return the name or keyword (for navigation from an end marker) */
def tag: PsiElement = namedTag.getOrElse(keyword)
protected def namedTag: Option[ScNamedElement] = None
protected def keywordTokenType: IElementType
/** @return the corresponding ScEnd element, if present */
def end: Option[ScEnd] = endParent.flatMap(_.lastChild).filterByType[ScEnd]
/** @return an element that contains ScEnd, if present */
protected def endParent: Option[PsiElement] = Some(this)
}
object ScBegin {
def unapply(begin: ScBegin): Option[(ScBegin, Option[ScEnd])] = Some((begin, begin.end))
} | JetBrains/intellij-scala | scala/scala-impl/src/org/jetbrains/plugins/scala/lang/psi/api/ScBegin.scala | Scala | apache-2.0 | 1,104 |
import _root_.runtime.RosalindSolution
object BinarySearch extends RosalindSolution {
def problem_name: String = "bins"
def get_answer(input_lines: Iterator[String]): String = {
val search_pool_length = input_lines.next().toInt
val num_queries = input_lines.next().toInt
val pool = input_lines.next().split("\\\\s").map{_.toInt}
val queries = input_lines.next().split("\\\\s").map{_.toInt}
return queries.map{binary_search(_, pool)}.mkString(" ")
}
def binary_search(query: Int, pool: Array[Int]): Int = {
var lower_bound = 0
var upper_bound = pool.length
while (lower_bound < upper_bound) {
val test_i: Int = (lower_bound + upper_bound) / 2
pool(test_i) match {
case e if query > e => lower_bound = test_i + 1
case e if query < e => upper_bound = test_i
case _ => return test_i + 1 // question asks for 1 to N
}
}
return -1
}
}
| ChrisCooper/rosalind-algorithmic-heights | src/BinarySearch.scala | Scala | gpl-2.0 | 924 |
package com.aerospike.spark
import org.scalatest.FlatSpec
import org.scalatest.Matchers
import com.aerospike.client.Bin
import com.aerospike.client.Key
import com.aerospike.client.policy.WritePolicy
import com.aerospike.spark.sql.AerospikeConnection
class AeroSparkSessionFunctionsSpec extends FlatSpec with Matchers with SparkASITSpecBase {
val ages = Array(25, 26, 27, 28, 29)
val colours = Array("blue", "red", "yellow", "green", "orange")
val animals = Array("cat", "dog", "mouse", "snake", "lion")
val wp = new WritePolicy()
wp.expiration = 600
behavior of " Session Func for spark"
it should "Insert data" in {
val cl = AerospikeConnection.getClient(conf)
var i = 0
for (x <- 1 to 100) {
val name = new Bin("name", "name:" + i)
val age = new Bin("age", ages(i))
val colour = new Bin("color", colours(i))
val animal = new Bin("animal", animals(i))
val key = new Key(Globals.namespace, "selector", "selector-test:" + x)
cl.put(null, key, name, age, colour, animal)
i += 1
if (i == 5) i = 0
}
i = 0
for (x <- 1 to 100) {
val name = new Bin("name", "name:" + i)
val age = new Bin("age", ages(i))
val colour = new Bin("color", colours(i))
val animal = new Bin("animal", animals(i))
val key = new Key(Globals.namespace, "selectorInt", x)
cl.put(null, key, name, age, colour, animal)
i += 1
if (i == 5) i = 0
}
}
it should "Scan Set" in {
val c = session.scanSet("selector").count
assert(c == 100)
}
it should "clean up because it's mother doesn't work here" in {
val cl = AerospikeConnection.getClient(conf)
var i = 0
for (x <- 1 to 100) {
val name = new Bin("name", "name:" + i)
val age = new Bin("age", ages(i))
val colour = new Bin("color", colours(i))
val animal = new Bin("animal", animals(i))
val key = new Key(Globals.namespace, "selector", "selector-test:" + x)
cl.delete(null, key)
i += 1
if (i == 5) i = 0
}
i = 0
for (x <- 1 to 100) {
val name = new Bin("name", "name:" + i)
val age = new Bin("age", ages(i))
val colour = new Bin("color", colours(i))
val animal = new Bin("animal", animals(i))
val key = new Key(Globals.namespace, "selectorInt", x)
cl.delete(null, key)
i += 1
if (i == 5) i = 0
}
}
} | xiaotiejiang888/scalamongoas | src/test/scala/com/aerospike/spark/AeroSparkSessionFunctionsSpec.scala | Scala | apache-2.0 | 2,403 |
package org.scalatra
package servlet
import javax.servlet.{ServletContextEvent, ServletContextListener}
import grizzled.slf4j.Logger
class ScalatraListener extends ServletContextListener {
import ScalatraListener._
private val logger: Logger = Logger[this.type]
private var cycle: LifeCycle = _
private var appCtx: ServletApplicationContext = _
def contextInitialized(sce: ServletContextEvent) = {
appCtx = ServletApplicationContext(sce.getServletContext)
val cycleClassName =
Option(appCtx.getInitParameter(LifeCycleKey)) getOrElse DefaultLifeCycle
val cycleClass = Class.forName(cycleClassName)
cycle = cycleClass.newInstance.asInstanceOf[LifeCycle]
logger.info("Initializing life cycle class: %s".format(cycleClassName))
cycle.init(appCtx)
}
def contextDestroyed(sce: ServletContextEvent) = {
if (cycle != null) {
logger.info("Destroying life cycle class: %s".format(cycle.getClass.getName))
cycle.destroy(appCtx)
}
}
}
object ScalatraListener {
val DefaultLifeCycle = "Scalatra"
val LifeCycleKey = "org.scalatra.LifeCycle"
}
| louk/scalatra | core/src/main/scala/org/scalatra/servlet/ScalatraListener.scala | Scala | bsd-2-clause | 1,110 |
import scala.concurrent.Future
import init.Init
import org.apache.commons.logging.LogFactory
import play.api._
import play.api.mvc._
import play.api.mvc.Results._
object Global extends GlobalSettings {
private[this] final val Log = LogFactory.getLog(this.getClass)
override def onHandlerNotFound(request: RequestHeader) = {
Future.successful(NotFound)
}
override def onBadRequest(request: RequestHeader, error: String) = {
Future.successful(BadRequest("Bad Request: " + error))
}
override def onError(request: RequestHeader, ex: Throwable) = {
Logger.error(ex.toString, ex)
Future.successful(InternalServerError(ex.toString))
}
override def onStart(app: Application) {
Init.init()
}
override def onStop(app: Application) {
Init.shutdown()
}
}
| gilt/cave | scheduler/app/Global.scala | Scala | mit | 798 |
/* Copyright 2009-2021 EPFL, Lausanne */
package stainless
package termination
trait TerminationAnalysis extends AbstractAnalysis {
val program: Program
import program._
import program.trees._
type Duration = Long
type Record = TerminationReport.Status
val results: Map[FunDef, Record]
val sources: Set[Identifier] // set of functions that were considered for the analysis
override val name: String = TerminationComponent.name
override type Report = TerminationReport
override def toReport = new TerminationReport(records, sources)
private lazy val records = results.toSeq map { case (fd, status) =>
TerminationReport.Record(fd.id, fd.getPos, status, derivedFrom = fd.source)
}
}
| epfl-lara/stainless | core/src/main/scala/stainless/termination/TerminationAnalysis.scala | Scala | apache-2.0 | 717 |
/**
* Licensed to the Minutemen Group under one or more contributor license
* agreements. See the COPYRIGHT file distributed with this work for
* additional information regarding copyright ownership.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you
* may not use this file except in compliance with the License. You may
* obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package silhouette.authenticator
import java.time.{ Clock, Instant, ZoneId }
import org.specs2.concurrent.ExecutionEnv
import org.specs2.mock.Mockito
import org.specs2.mutable.Specification
import org.specs2.specification.Scope
import silhouette.LoginInfo
import silhouette.RichInstant._
import silhouette.authenticator.Validator.{ Invalid, Valid }
import silhouette.http.{ Fake, RequestPipeline, SilhouetteRequest }
import silhouette.specs2.WaitPatience
import scala.concurrent.Future
import scala.concurrent.duration._
/**
* Test case for the [[Authenticator]] class.
*
* @param ev The execution environment.
*/
class AuthenticatorSpec(implicit ev: ExecutionEnv) extends Specification with Mockito with WaitPatience {
"The `expiresIn` method" should {
"return the duration the authenticator expires in" in new Context {
authenticator.copy(expires = Some(instant)).expiresIn(Clock.fixed(instant.minusSeconds(10), UTC)) must
beSome(10.seconds)
}
"return a negative duration if the authenticator is already expired" in new Context {
authenticator.copy(expires = Some(instant)).expiresIn(Clock.fixed(instant.plusSeconds(10), UTC)) must
beSome(-10.seconds)
}
}
"The `touchedAt` method" should {
"return the duration the authenticator was last touched at" in new Context {
authenticator.copy(touched = Some(instant)).touchedAt(Clock.fixed(instant.plusSeconds(10), UTC)) must
beSome(10.seconds)
}
"return a negative duration if the authenticator wasn't used" in new Context {
authenticator.copy(touched = Some(instant)).touchedAt(Clock.fixed(instant.minusSeconds(10), UTC)) must
beSome(-10.seconds)
}
}
"The `touch` method" should {
"touch an authenticator" in new Context {
authenticator.touch(clock).touched must beSome(instant)
}
}
"The `withExpiry` method" should {
"set an authenticator expiry" in new Context {
authenticator.withExpiry(10.hours, clock).expires must beSome(clock.instant() + 10.hours)
}
}
"The `withFingerPrint` method" should {
"set a default fingerprint" in new Context {
implicit val request: RequestPipeline[SilhouetteRequest] = Fake.request
authenticator.withFingerPrint().fingerprint must beSome(request.fingerprint())
}
"set a custom fingerprint" in new Context {
val fingerPrintGenerator = (_: RequestPipeline[SilhouetteRequest]) => "test.fingerprint"
implicit val request: RequestPipeline[SilhouetteRequest] = Fake.request
authenticator.withFingerPrint(fingerPrintGenerator).fingerprint must
beSome(request.fingerprint(fingerPrintGenerator))
}
}
"The `withTags` method" should {
"add new tags" in new Context {
authenticator.withTags("test1", "test2").tags should be equalTo Seq("test1", "test2")
}
}
"The `isTouched` method" should {
"return true if the authenticator was touched" in new Context {
authenticator.touch(clock).isTouched should beTrue
}
"return false if the authenticator was not touched" in new Context {
authenticator.isTouched should beFalse
}
}
"The `isTaggedWith` method" should {
"return true if the authenticator was tagged with the given tag" in new Context {
authenticator.withTags("test").isTaggedWith("test") should beTrue
}
"return true if the authenticator was tagged with all the given tags" in new Context {
authenticator.withTags("test1", "test2", "test3").isTaggedWith("test1", "test2") should beTrue
}
"return false if the authenticator was not tagged with the given tag" in new Context {
authenticator.withTags("test").isTaggedWith("test1") should beFalse
}
"return false if the authenticator was not tagged with any of the given tags" in new Context {
authenticator.withTags("test1", "test2").isTaggedWith("test1", "test3") should beFalse
}
}
"The `isValid` method" should {
"return Invalid if at least one validator fails" in new Context {
val errors = Seq("error1", "error2")
val validator1 = mock[Validator].smart
val validator2 = mock[Validator].smart
validator1.isValid(authenticator) returns Future.successful(Valid)
validator2.isValid(authenticator) returns Future.successful(Invalid(errors))
val validators = Set(validator1, validator2)
authenticator.isValid(validators) must beEqualTo(Invalid(errors)).awaitWithPatience
}
"return Valid if all validators are successful" in new Context {
val validator1 = mock[Validator].smart
val validator2 = mock[Validator].smart
validator1.isValid(authenticator) returns Future.successful(Valid)
validator2.isValid(authenticator) returns Future.successful(Valid)
val validators = Set(validator1, validator2)
authenticator.isValid(validators) must beEqualTo(Valid).awaitWithPatience
}
"combine all error messages from all validators" in new Context {
val validator1 = mock[Validator].smart
val validator2 = mock[Validator].smart
validator1.isValid(authenticator) returns Future.successful(Invalid(Seq("error1", "error2")))
validator2.isValid(authenticator) returns Future.successful(Invalid(Seq("error3", "error4")))
val validators = Set(validator1, validator2)
authenticator.isValid(validators) must beEqualTo(
Invalid(Seq("error1", "error2", "error3", "error4"))
).awaitWithPatience
}
}
/**
* The context.
*/
trait Context extends Scope {
/**
* The UTC time zone.
*/
val UTC = ZoneId.of("UTC")
/**
* An instant of time.
*/
val instant = Instant.parse("2017-10-22T20:50:45.0Z")
/**
* A clock instance.
*/
val clock: Clock = Clock.fixed(instant, UTC)
/**
* The authenticator instance to test.
*/
val authenticator = Authenticator(
id = "test-id",
loginInfo = LoginInfo("credentials", "john@doe.com")
)
}
}
| mohiva/silhouette | modules/authenticator/src/test/scala/silhouette/authenticator/AuthenticatorSpec.scala | Scala | apache-2.0 | 6,768 |
// /////////////////////////////////////////// //
// Fureteur - https://github.com/gip/fureteur //
// /////////////////////////////////////////// //
//package fureteur.sync
// We are using Akka actors
import akka.actor._
import akka.event.Logging
//import akka.util.duration._
import scala.concurrent.duration._
/*import fureteur.collection.FIFO
import fureteur.control.Control*/
// Control messages
abstract class Ctrl
case class StatsReq(handler: List[(String, String)] => Unit) extends Ctrl
// Get stats out of the actor model
case class Stats(l: List[(String, String)]) extends Ctrl
// Get stats as a message
case class NoData() extends Ctrl
case class PseudoTimeout() extends Ctrl
case class DataReq(req: ActorRef, n: Int) extends Ctrl
case class DataIn[T](req: ActorRef, e: List[T]) extends Ctrl
case class DataOut[T](req: ActorRef, e: List[T]) extends Ctrl
// Generic processor base class
// The processor gets data of type T and process them into data of type U
abstract class genericProcessor[T, U](thres_in: Int, // Input threshold
thres_out: Int, // Output threshold
producer: ActorRef,
reseller: ActorRef,
timeout: Option[Long]
) extends Actor {
val log = Logging(context.system, this)
timeout match {
case Some(ms) => context.setReceiveTimeout(ms milliseconds)
}
val fifo = new FIFO[T](Some(thres_in, request))
var fifo_max = 0
var processed = List[U]()
var processed_count = 0
var total_count = 0
var partial_send_count = 0
var send_count = 0
def request(n: Int): Unit = {
producer ! DataReq(self, n)
}
override def preStart(): Unit = {
fifo init()
}
def receive = {
case DataIn(_, elems: List[T]) => {
fifo pushn elems
processShift()
}
case StatsReq(handler) => handler(stats())
case ReceiveTimeout => {
processShift()
}
case _ => log.info("received unknown message")
}
def processShift(): Unit = {
if (fifo.isEmpty()) {
if (processed_count > 0) {
send()
}
return
}
if (fifo.length > fifo_max) {
fifo_max = fifo.length()
}
try {
while (true) {
val e = fifo pop()
// log.info("input data " + e.toString)
processed = process(e) :: processed // Queueing up the answers
processed_count += 1
if (processed_count >= thres_out) {
send()
}
total_count += 1
}
} catch {
case e: NoSuchElementException => // The queue is empty, we need more stuff
}
}
def send() = {
reseller ! DataOut(self, processed)
processed = List[U]()
send_count += 1
if (processed_count < thres_out) {
partial_send_count += 1
}
processed_count = 0
}
def stats(): List[(String, String)] = {
var l = getStats()
l = ("total_processed", total_count.toString) :: l
l = ("fifo_max_length", fifo_max.toString) :: l
l = ("partial_send_count", partial_send_count.toString) :: l
l = ("send_count", send_count.toString) :: l
l
}
def process(in: T): U
def getStats(): List[(String, String)] = {
List()
}
}
// Generic producer base class (working in batches)
//
abstract class genericBatchProducer[T](size: Int, // Size of a batch
thres: Int, // Treshold (expressed in number of batches)
timeout: Option[Long], // Timeout in ms
control: Control // Control class
) extends Actor {
val log = Logging(context.system, this)
timeout match {
case Some(ms) => context.setReceiveTimeout(ms milliseconds)
}
var timeouts = 0
var batches_sent = 0
val fifo = new FIFO[List[T]](Some(thres, singleRequest))
val reqfifo = new FIFO[ActorRef](None)
def singleRequest(n: Int): Unit = {
if (fifo.length <= thres) {
requestBatch()
}
}
def multiRequest(): Unit = {
if (fifo.length <= thres && requestBatch()) {
multiRequest()
}
}
def receive = {
case DataReq(req, _) => {
reqfifo push req
handleRequests()
}
case ReceiveTimeout => {
timeouts += 1
handleRequests()
}
case StatsReq(handler) => handler(stats())
case _ => log.info("received unknown message")
}
def handleRequests(): Unit = {
// if(!control.acceptInput()) { return }
(fifo.isEmpty(), reqfifo.isEmpty()) match {
case (false, false) => {
batches_sent += 1
reqfifo.pop() ! DataIn(self, fifo.pop())
handleRequests()
}
case (true, false) => {
singleRequest(0)
if (!fifo.isEmpty) {
handleRequests()
}
}
case (_, true) => multiRequest()
}
}
override def preStart(): Unit = {
init()
multiRequest()
}
def requestBatch(): Boolean = {
getBatch(size) match {
case Some(l: List[T]) => {
fifo push l
control.addCounter(l.length)
true
}
case None => false;
}
}
def stats(): List[(String, String)] = {
("timeouts", timeouts.toString) ::("batches_sent", batches_sent.toString) :: getStats()
}
def getBatch(n: Int): Option[List[T]]
// This function MUST not block
def getStats(): List[(String, String)] = {
List()
}
def init() = {}
}
// Generic batch reseller
//
abstract class genericBatchReseller[T](control: Control) extends Actor {
val log = Logging(context.system, this)
var c = 0
def receive = {
case DataOut(req, out: List[T]) => {
val sz = out.length
c += sz
resell(out)
control.subCounter(sz)
}
case StatsReq(handler) => handler(stats())
case _ => log.info("received unknown message")
}
def stats(): List[(String, String)] = {
getStats()
}
def resell(d: List[T])
def getStats(): List[(String, String)] = {
List()
}
}
| njosephef/fetcher | src/main/scala/sync.scala | Scala | mit | 6,079 |
import org.junit.Assert._
import org.junit.Test
import org.junit.Ignore
import Chisel._
import chiselutils.utils._
import scala.util.Random
import scala.collection.mutable.ArrayBuffer
class SerializerSuite extends TestSuite {
@Test def testSerailizer {
class UserMod( val vecInSize : Int, val vecOutSize : Int, val bitWidth : Int) extends Module {
val io = new Bundle {
val dataIn = Decoupled( Vec.fill( vecInSize ) { UInt(INPUT, bitWidth) } ).flip
val flush = Bool(INPUT)
val dataOut = Valid( Vec.fill( vecOutSize ) { UInt(OUTPUT, bitWidth) } )
val flushed = Bool(OUTPUT)
}
val genType = UInt( width = bitWidth )
val serMod = Module(new Serializer(genType, vecInSize, vecOutSize))
io <> serMod.io
}
class UserTests(c : UserMod, cycles : Int) extends Tester(c) {
val myRand = new Random
val inputData = ArrayBuffer.fill( cycles ) { ArrayBuffer.fill( c.vecInSize ) { myRand.nextInt( 1 << c.bitWidth ) } }
var count = 0;
var countOld = count
var lastVld = false
var outCount = 0;
var flushOutCount = count*c.vecInSize
while ( count < cycles ) {
val inputVld = if ( count != countOld || !lastVld) (myRand.nextInt(5) != 0) else lastVld
lastVld = inputVld
val flush = (myRand.nextInt(15) == 0)
if ( flush ) {
flushOutCount = { if (inputVld) (count + 1)*c.vecInSize else count*c.vecInSize }
println("flushOutCount = " + flushOutCount)
}
poke( c.io.flush, flush )
poke( c.io.dataIn.valid, inputVld )
(0 until c.vecInSize).foreach( i => poke( c.io.dataIn.bits(i), inputData(count)(i) ) )
val outputValid = peek( c.io.dataOut.valid )
val flushed = ( peek( c.io.flushed ) == BigInt(1) )
if ( outputValid == 1) {
for ( i <- 0 until c.vecOutSize ) {
println("outCount = " + outCount)
val outCyc = (outCount - (outCount % c.vecInSize)) / c.vecInSize
if ( ( flushed && ( outCount < flushOutCount ) ) || !flushed ) {
expect( c.io.dataOut.bits(i), inputData( outCyc )( outCount - (outCyc * c.vecInSize )) )
outCount = outCount + 1
} else
peek( c.io.dataOut.bits(i) )
}
}
val ready = peek(c.io.dataIn.ready)
countOld = count
if ( ready == 1 && inputVld ) {
count = count + 1
}
step(1)
}
}
for ( bitWidth <- 1 until 10 ) {
for ( vecOutSize <- 1 until 20 ) {
for ( vecInSize <- 1 until 20 ) {
chiselMainTest(Array("--genHarness", "--compile", "--test", "--backend", "c",
"--targetDir", dir.getPath.toString()), () => Module(
new UserMod( vecInSize, vecOutSize, bitWidth )) ) { c => new UserTests(c, scala.math.max( vecOutSize, vecInSize )*5 ) }
}
}
}
}
}
| da-steve101/chisel-utils | src/test/scala/SerializerSuite.scala | Scala | lgpl-3.0 | 2,923 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.planner.runtime.stream.sql
import org.apache.flink.api.scala._
import org.apache.flink.table.planner.factories.TestValuesTableFactory
import org.apache.flink.table.planner.runtime.utils.{StreamingWithStateTestBase, TestData, TestingRetractSink}
import org.apache.flink.table.planner.runtime.utils.StreamingWithStateTestBase.StateBackendMode
import org.apache.flink.table.api.bridge.scala._
import org.apache.flink.types.{Row, RowKind}
import org.junit.Assert.assertEquals
import org.junit.{Before, Test}
import org.junit.runner.RunWith
import org.junit.runners.Parameterized
import scala.collection.JavaConversions._
import scala.collection.Seq
@RunWith(classOf[Parameterized])
class ChangelogSourceITCase(state: StateBackendMode) extends StreamingWithStateTestBase(state) {
val dataId: String = TestValuesTableFactory.registerData(TestData.userChangelog)
@Before
override def before(): Unit = {
super.before()
val ddl =
s"""
|CREATE TABLE user_logs (
| user_id STRING,
| user_name STRING,
| email STRING,
| balance DECIMAL(18,2),
| balance2 AS balance * 2
|) WITH (
| 'connector' = 'values',
| 'data-id' = '$dataId',
| 'changelog-mode' = 'I,UA,UB,D'
|)
|""".stripMargin
tEnv.executeSql(ddl)
}
@Test
def testChangelogSourceAndToRetractStream(): Unit = {
val result = tEnv.sqlQuery("SELECT * FROM user_logs").toRetractStream[Row]
val sink = new TestingRetractSink()
result.addSink(sink).setParallelism(result.parallelism)
env.execute()
val expected = Seq(
"user1,Tom,tom123@gmail.com,8.10,16.20",
"user3,Bailey,bailey@qq.com,9.99,19.98",
"user4,Tina,tina@gmail.com,11.30,22.60")
assertEquals(expected.sorted, sink.getRetractResults.sorted)
}
@Test
def testChangelogSourceAndUpsertSink(): Unit = {
val sinkDDL =
s"""
|CREATE TABLE user_sink (
| user_id STRING PRIMARY KEY NOT ENFORCED,
| user_name STRING,
| email STRING,
| balance DECIMAL(18,2),
| balance2 DECIMAL(18,2)
|) WITH (
| 'connector' = 'values',
| 'sink-insert-only' = 'false'
|)
|""".stripMargin
val dml =
s"""
|INSERT INTO user_sink
|SELECT * FROM user_logs
|""".stripMargin
tEnv.executeSql(sinkDDL)
execInsertSqlAndWaitResult(dml)
val expected = Seq(
"user1,Tom,tom123@gmail.com,8.10,16.20",
"user3,Bailey,bailey@qq.com,9.99,19.98",
"user4,Tina,tina@gmail.com,11.30,22.60")
assertEquals(expected.sorted, TestValuesTableFactory.getResults("user_sink").sorted)
}
@Test
def testAggregateOnChangelogSource(): Unit = {
val query =
s"""
|SELECT count(*), sum(balance), max(email)
|FROM user_logs
|""".stripMargin
val result = tEnv.sqlQuery(query).toRetractStream[Row]
val sink = new TestingRetractSink()
result.addSink(sink).setParallelism(result.parallelism)
env.execute()
val expected = Seq("3,29.39,tom123@gmail.com")
assertEquals(expected.sorted, sink.getRetractResults.sorted)
}
@Test
def testAggregateOnChangelogSourceAndUpsertSink(): Unit = {
val sinkDDL =
s"""
|CREATE TABLE user_sink (
| `scope` STRING,
| cnt BIGINT,
| sum_balance DECIMAL(18,2),
| max_email STRING,
| PRIMARY KEY (`scope`) NOT ENFORCED
|) WITH (
| 'connector' = 'values',
| 'sink-insert-only' = 'false'
|)
|""".stripMargin
val dml =
s"""
|INSERT INTO user_sink
|SELECT 'ALL', count(*), sum(balance), max(email)
|FROM user_logs
|GROUP BY 'ALL'
|""".stripMargin
tEnv.executeSql(sinkDDL)
execInsertSqlAndWaitResult(dml)
val expected = Seq("ALL,3,29.39,tom123@gmail.com")
assertEquals(expected.sorted, TestValuesTableFactory.getResults("user_sink").sorted)
}
@Test
def testAggregateOnInsertDeleteChangelogSource(): Unit = {
// only contains INSERT and DELETE
val userChangelog = TestData.userChangelog.map { row =>
row.getKind match {
case RowKind.INSERT | RowKind.DELETE => row
case RowKind.UPDATE_BEFORE =>
val ret = Row.copy(row)
ret.setKind(RowKind.DELETE)
ret
case RowKind.UPDATE_AFTER =>
val ret = Row.copy(row)
ret.setKind(RowKind.INSERT)
ret
}
}
val dataId = TestValuesTableFactory.registerData(userChangelog)
val ddl =
s"""
|CREATE TABLE user_logs2 (
| user_id STRING,
| user_name STRING,
| email STRING,
| balance DECIMAL(18,2)
|) WITH (
| 'connector' = 'values',
| 'data-id' = '$dataId',
| 'changelog-mode' = 'I,D'
|)
|""".stripMargin
tEnv.executeSql(ddl)
val query =
s"""
|SELECT count(*), sum(balance), max(email)
|FROM user_logs2
|""".stripMargin
val result = tEnv.sqlQuery(query).toRetractStream[Row]
val sink = new TestingRetractSink()
result.addSink(sink).setParallelism(result.parallelism)
env.execute()
val expected = Seq("3,29.39,tom123@gmail.com")
assertEquals(expected.sorted, sink.getRetractResults.sorted)
}
}
| tzulitai/flink | flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/runtime/stream/sql/ChangelogSourceITCase.scala | Scala | apache-2.0 | 6,252 |
/**
* CopyrightΒ (c)Β 2016 IntelΒ CorporationΒ
*
* LicensedΒ underΒ theΒ ApacheΒ License,Β VersionΒ 2.0Β (theΒ "License");
* youΒ mayΒ notΒ useΒ thisΒ fileΒ exceptΒ inΒ complianceΒ withΒ theΒ License.
* YouΒ mayΒ obtainΒ aΒ copyΒ ofΒ theΒ LicenseΒ at
*
* Β Β Β Β Β http://www.apache.org/licenses/LICENSE-2.0
*
* UnlessΒ requiredΒ byΒ applicableΒ lawΒ orΒ agreedΒ toΒ inΒ writing,Β software
* distributedΒ underΒ theΒ LicenseΒ isΒ distributedΒ onΒ anΒ "ASΒ IS"Β BASIS,
* WITHOUTΒ WARRANTIESΒ ORΒ CONDITIONSΒ OFΒ ANYΒ KIND,Β eitherΒ expressΒ orΒ implied.
* SeeΒ theΒ LicenseΒ forΒ theΒ specificΒ languageΒ governingΒ permissionsΒ and
* limitationsΒ underΒ theΒ License.
*/
package org.trustedanalytics.sparktk.dicom.internal.ops
import org.trustedanalytics.sparktk.dicom.internal.{ BaseDicom, DicomTransform, DicomState }
trait DropRowsByKeywordsTransform extends BaseDicom {
/**
* Drop the rows based on Map(keyword, value) from column holding xml string
*
* @param keywordsValuesMap Map with keyword and associated value from xml string
*/
def dropRowsByKeywords(keywordsValuesMap: Map[String, String]) = {
execute(DropRowsByKeywords(keywordsValuesMap))
}
}
case class DropRowsByKeywords(keywordsValuesMap: Map[String, String]) extends DicomTransform {
override def work(state: DicomState): DicomState = {
FilterByKeywords.filterOrDropByKeywordsImpl(state.metadata, keywordsValuesMap, isDropRows = true)
val filteredIdFrame = state.metadata.copy(Some(Map("id" -> "id")))
val filteredPixeldata = filteredIdFrame.joinInner(state.pixeldata, List("id"))
DicomState(state.metadata, filteredPixeldata)
}
} | ashaarunkumar/spark-tk | sparktk-core/src/main/scala/org/trustedanalytics/sparktk/dicom/internal/ops/DropRowsByKeywords.scala | Scala | apache-2.0 | 1,671 |
/*
* Scala (https://www.scala-lang.org)
*
* Copyright EPFL and Lightbend, Inc.
*
* Licensed under Apache License 2.0
* (http://www.apache.org/licenses/LICENSE-2.0).
*
* See the NOTICE file distributed with this work for
* additional information regarding copyright ownership.
*/
package scala
package reflect
package runtime
/**
* This symbol table trait fills in the definitions so that class information is obtained by reflection.
* It can be used either from a reflexive universe (class scala.reflect.runtime.JavaUniverse), or else from
* a runtime compiler that uses reflection to get a class information (class scala.tools.reflect.ReflectGlobal)
*/
private[scala] trait SymbolTable extends internal.SymbolTable with JavaMirrors with SymbolLoaders with SynchronizedOps with Gil with ThreadLocalStorage {
def info(msg: => String) =
if (settings.verbose) println("[reflect-compiler] "+msg)
def debugInfo(msg: => String) =
if (settings.isDebug) info(msg)
/** Declares that this is a runtime reflection universe.
*
* This means that we can make certain assumptions to optimize the universe.
* For example, we may auto-initialize symbols on flag and annotation requests
* (see `shouldTriggerCompleter` below for more details).
*
* On the other hand, this also means that usage scenarios of the universe
* will differ from the conventional ones. For example, we have to do additional cleanup
* in order to prevent memory leaks: https://groups.google.com/group/scala-internals/browse_thread/thread/eabcf3d406dab8b2.
*/
override def isCompilerUniverse = false
}
| lrytz/scala | src/reflect/scala/reflect/runtime/SymbolTable.scala | Scala | apache-2.0 | 1,629 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.repl
import java.io.File
import java.net.{URI, URL, URLClassLoader}
import java.nio.channels.{FileChannel, ReadableByteChannel}
import java.nio.charset.StandardCharsets
import java.nio.file.{Paths, StandardOpenOption}
import java.util
import java.util.Collections
import javax.tools.{JavaFileObject, SimpleJavaFileObject, ToolProvider}
import scala.io.Source
import scala.language.implicitConversions
import com.google.common.io.Files
import org.mockito.Matchers.anyString
import org.mockito.Mockito._
import org.mockito.invocation.InvocationOnMock
import org.mockito.stubbing.Answer
import org.scalatest.BeforeAndAfterAll
import org.scalatest.mockito.MockitoSugar
import org.apache.spark._
import org.apache.spark.internal.Logging
import org.apache.spark.rpc.RpcEnv
import org.apache.spark.util.Utils
class ExecutorClassLoaderSuite
extends SparkFunSuite
with BeforeAndAfterAll
with MockitoSugar
with Logging {
val childClassNames = List("ReplFakeClass1", "ReplFakeClass2")
val parentClassNames = List("ReplFakeClass1", "ReplFakeClass2", "ReplFakeClass3")
val parentResourceNames = List("fake-resource.txt")
var tempDir1: File = _
var tempDir2: File = _
var url1: String = _
var urls2: Array[URL] = _
override def beforeAll() {
super.beforeAll()
tempDir1 = Utils.createTempDir()
tempDir2 = Utils.createTempDir()
url1 = tempDir1.toURI.toURL.toString
urls2 = List(tempDir2.toURI.toURL).toArray
childClassNames.foreach(TestUtils.createCompiledClass(_, tempDir1, "1"))
parentResourceNames.foreach { x =>
Files.write("resource".getBytes(StandardCharsets.UTF_8), new File(tempDir2, x))
}
parentClassNames.foreach(TestUtils.createCompiledClass(_, tempDir2, "2"))
}
override def afterAll() {
try {
Utils.deleteRecursively(tempDir1)
Utils.deleteRecursively(tempDir2)
SparkEnv.set(null)
} finally {
super.afterAll()
}
}
test("child over system classloader") {
// JavaFileObject for scala.Option class
val scalaOptionFile = new SimpleJavaFileObject(
URI.create(s"string:///scala/Option.java"),
JavaFileObject.Kind.SOURCE) {
override def getCharContent(ignoreEncodingErrors: Boolean): CharSequence = {
"package scala; class Option {}"
}
}
// compile fake scala.Option class
ToolProvider
.getSystemJavaCompiler
.getTask(null, null, null, null, null, Collections.singletonList(scalaOptionFile)).call()
// create 'scala' dir in tempDir1
val scalaDir = new File(tempDir1, "scala")
assert(scalaDir.mkdir(), s"Failed to create 'scala' directory in $tempDir1")
// move the generated class into scala dir
val filename = "Option.class"
val result = new File(filename)
assert(result.exists(), "Compiled file not found: " + result.getAbsolutePath)
val out = new File(scalaDir, filename)
Files.move(result, out)
assert(out.exists(), "Destination file not moved: " + out.getAbsolutePath)
// construct class loader tree
val parentLoader = new URLClassLoader(urls2, null)
val classLoader = new ExecutorClassLoader(
new SparkConf(), null, url1, parentLoader, true)
// load 'scala.Option', using ClassforName to do the exact same behavior as
// what JavaDeserializationStream does
// scalastyle:off classforname
val optionClass = Class.forName("scala.Option", false, classLoader)
// scalastyle:on classforname
assert(optionClass.getClassLoader == classLoader,
"scala.Option didn't come from ExecutorClassLoader")
}
test("child first") {
val parentLoader = new URLClassLoader(urls2, null)
val classLoader = new ExecutorClassLoader(new SparkConf(), null, url1, parentLoader, true)
val fakeClass = classLoader.loadClass("ReplFakeClass2").getConstructor().newInstance()
val fakeClassVersion = fakeClass.toString
assert(fakeClassVersion === "1")
}
test("parent first") {
val parentLoader = new URLClassLoader(urls2, null)
val classLoader = new ExecutorClassLoader(new SparkConf(), null, url1, parentLoader, false)
val fakeClass = classLoader.loadClass("ReplFakeClass1").getConstructor().newInstance()
val fakeClassVersion = fakeClass.toString
assert(fakeClassVersion === "2")
}
test("child first can fall back") {
val parentLoader = new URLClassLoader(urls2, null)
val classLoader = new ExecutorClassLoader(new SparkConf(), null, url1, parentLoader, true)
val fakeClass = classLoader.loadClass("ReplFakeClass3").getConstructor().newInstance()
val fakeClassVersion = fakeClass.toString
assert(fakeClassVersion === "2")
}
test("child first can fail") {
val parentLoader = new URLClassLoader(urls2, null)
val classLoader = new ExecutorClassLoader(new SparkConf(), null, url1, parentLoader, true)
intercept[java.lang.ClassNotFoundException] {
classLoader.loadClass("ReplFakeClassDoesNotExist").getConstructor().newInstance()
}
}
test("resource from parent") {
val parentLoader = new URLClassLoader(urls2, null)
val classLoader = new ExecutorClassLoader(new SparkConf(), null, url1, parentLoader, true)
val resourceName: String = parentResourceNames.head
val is = classLoader.getResourceAsStream(resourceName)
assert(is != null, s"Resource $resourceName not found")
val bufferedSource = Source.fromInputStream(is, "UTF-8")
Utils.tryWithSafeFinally {
val content = bufferedSource.getLines().next()
assert(content.contains("resource"), "File doesn't contain 'resource'")
} {
bufferedSource.close()
}
}
test("resources from parent") {
val parentLoader = new URLClassLoader(urls2, null)
val classLoader = new ExecutorClassLoader(new SparkConf(), null, url1, parentLoader, true)
val resourceName: String = parentResourceNames.head
val resources: util.Enumeration[URL] = classLoader.getResources(resourceName)
assert(resources.hasMoreElements, s"Resource $resourceName not found")
val bufferedSource = Source.fromInputStream(resources.nextElement().openStream())
Utils.tryWithSafeFinally {
val fileReader = bufferedSource.bufferedReader()
assert(fileReader.readLine().contains("resource"), "File doesn't contain 'resource'")
} {
bufferedSource.close()
}
}
test("fetch classes using Spark's RpcEnv") {
val env = mock[SparkEnv]
val rpcEnv = mock[RpcEnv]
when(env.rpcEnv).thenReturn(rpcEnv)
when(rpcEnv.openChannel(anyString())).thenAnswer(new Answer[ReadableByteChannel]() {
override def answer(invocation: InvocationOnMock): ReadableByteChannel = {
val uri = new URI(invocation.getArguments()(0).asInstanceOf[String])
val path = Paths.get(tempDir1.getAbsolutePath(), uri.getPath().stripPrefix("/"))
FileChannel.open(path, StandardOpenOption.READ)
}
})
val classLoader = new ExecutorClassLoader(new SparkConf(), env, "spark://localhost:1234",
getClass().getClassLoader(), false)
val fakeClass = classLoader.loadClass("ReplFakeClass2").getConstructor().newInstance()
val fakeClassVersion = fakeClass.toString
assert(fakeClassVersion === "1")
intercept[java.lang.ClassNotFoundException] {
classLoader.loadClass("ReplFakeClassDoesNotExist").getConstructor().newInstance()
}
}
}
| facaiy/spark | repl/src/test/scala/org/apache/spark/repl/ExecutorClassLoaderSuite.scala | Scala | apache-2.0 | 8,158 |
package utils
import com.mongodb.spark.MongoConnector
import com.mongodb.spark.config.WriteConfig
import org.apache.spark.{SparkConf, SparkContext}
/**
* Created by jyothi on 11/12/16.
*/
@deprecated
trait MongoSparkContext {
/**
*
* @param db Database Name
* @param collection Collection Name
* @param host Host name (optional)
* @return
*/
def getMongoSparkContext(db: String, collection: String, host: String = "localhost"): SparkContext = {
val uri: String = s"mongodb://$host/$db.$collection"
val conf = new SparkConf()
.setMaster("local[*]")
.setAppName("SparkMongoScalaPlay")
.set("spark.app.id", "SparkMongoScalaPlay")
.set("spark.mongodb.input.uri", uri)
.set("spark.mongodb.output.uri", uri)
.set("spark.driver.allowMultipleContexts", "true")
val sc = new SparkContext(conf)
MongoConnector(sc).withDatabaseDo(WriteConfig(sc), {db => /*db.drop()//drops the entire db*/}) //for bootstrapping if any
sc
}
}
| arajajyothibabu/MongoDB-Cassandra-Migration | app/utils/MongoSparkContext.scala | Scala | mit | 1,009 |
/*
* Copyright 2016 OrgSync.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.orgsync.oskr.events.streams
import java.time.{Duration, Instant}
import java.util.UUID
import com.orgsync.oskr.events.messages.{Message, ExpandedPart}
import com.orgsync.oskr.events.streams.grouping.ExpandedPartGroupingWindows
import org.apache.flink.api.scala._
import org.apache.flink.configuration.Configuration
import org.apache.flink.streaming.api.scala.DataStream
import org.apache.flink.streaming.api.windowing.time.Time
import org.apache.flink.streaming.api.windowing.windows.TimeWindow
import org.apache.flink.util.Collector
import org.json4s.JsonAST.{JArray, JValue}
import org.threeten.extra.Interval
import scala.collection.mutable
import scala.collection.mutable.ListBuffer
class GroupStream(parameters: Configuration) {
private val groupingGap = Duration.parse(parameters.getString("groupingGap", "PT5M"))
private val allowedLateness = Time.milliseconds(
Duration.parse(parameters.getString("allowedLateness", "PT1H")).toMillis
)
private val reducePartsWindow = (
key : (String, String),
window : TimeWindow,
partIterable: Iterable[ExpandedPart],
out : Collector[Message]
) => {
val parts = partIterable.toList.sortBy(_.sentAt)
val emittedAt = Instant.ofEpochMilli(window.getEnd)
val idBuf = new StringBuilder
val senderIds = mutable.Set[String]()
var lastPart = Option.empty[ExpandedPart]
val sentAt = mutable.TreeSet[Instant]()
val tags = mutable.Set[String]()
val partIds = mutable.Set[String]()
val partData = ListBuffer[JValue]()
parts.foreach(part => {
idBuf ++= part.recipient.id
idBuf ++= part.id
senderIds += part.senderId
sentAt += part.sentAt
tags ++= part.tags.getOrElse(Set())
partIds += part.id
partData += part.data
lastPart = Option(part)
})
lastPart.foreach(part => {
val id = UUID.nameUUIDFromBytes(idBuf.toString.getBytes)
val sentInterval = Interval.of(sentAt.firstKey, sentAt.lastKey)
out.collect(Message(
id, emittedAt, senderIds.toSet, part.recipient, sentInterval,
part.digestKey, tags.toSet, part.templates, partIds.toSet,
JArray(partData.toList)
))
})
}
def getStream(partStream: DataStream[ExpandedPart]): DataStream[Message] = {
partStream
.keyBy(p => (p.recipient.id, p.groupingKey.getOrElse("default")))
.window(new ExpandedPartGroupingWindows(groupingGap))
.allowedLateness(allowedLateness)
.apply(reducePartsWindow).name("group_window")
.uid("grouped_messages")
}
}
| orgsync/oskr-events | src/main/scala/com/orgsync/oskr/events/streams/GroupStream.scala | Scala | apache-2.0 | 3,167 |
package lang.lightweightjava.ast
import name.namegraph.NameGraphExtended
import name.{Name, Nominal, Renaming}
object AST {
def isLegalName(name: Name) = name.length > 0 && name.length < 256 &&
Character.isJavaIdentifierStart(name.charAt(0)) &&
name.forall(Character.isJavaIdentifierPart) && !isKeyword(name)
def isKeyword(name: Name) = Set("this", "class", "public", "private", "extends", "return", "if", "else", "new", "null").contains(name)
def genFreshName(usedNames: Set[Name], oldName: Name, counter : Int = 0) : Name = {
val currentName = oldName + counter
if (usedNames.contains(currentName)) genFreshName(usedNames, oldName, counter + 1)
else currentName
}
}
trait AST extends Nominal {
override def rename(renaming: Renaming): AST
def resolveNames(nameEnvironment : ClassNameEnvironment): NameGraphExtended
override def resolveNames = resolveNames(Map())
}
| matthisk/hygienic-transformations | scala/src/main/scala/lang/lightweightjava/ast/AST.scala | Scala | lgpl-3.0 | 908 |
package org.springframework.core
import java.util.concurrent.Callable
package object task {
implicit def asRunnable(func: => Unit): Runnable = new Runnable {
def run: Unit = func
}
implicit def asCallable[T](func: => T): Callable[T] = new Callable[T] {
def call: T = func
}
}
| orthanner/spring4scala | spring4scala-core/src/main/scala/org/springframework/core/task/package.scala | Scala | mit | 295 |
package com.pwootage.metroidprime.formats.mrea
import com.pwootage.metroidprime.formats.BinarySerializable
import com.pwootage.metroidprime.formats.io.PrimeDataFile
class MREACompressedBlockHeader extends BinarySerializable {
var bufferSize: Int = -1
var uncompressedSize: Int = -1
var compressedSize: Int = -1
var dataSectionCount: Int = -1
override def write(f: PrimeDataFile): Unit = {
f.write32(bufferSize)
f.write32(uncompressedSize)
f.write32(compressedSize)
f.write32(dataSectionCount)
}
override def read(f: PrimeDataFile): Unit = {
bufferSize = f.read32()
uncompressedSize = f.read32()
compressedSize = f.read32()
dataSectionCount = f.read32()
}
}
| Pwootage/prime-patcher | src/main/scala/com/pwootage/metroidprime/formats/mrea/MREACompressedBlockHeader.scala | Scala | gpl-3.0 | 709 |
package utils
import models.{Folder, Link}
/**
* Avoid the creation of a pickler every time 'read' or 'write' is called
*/
object ImplicitPicklers {
implicit val folderPkl = upickle.default.macroRW[Folder]
implicit val linkPkl = upickle.default.macroRW[Link]
implicit val reqResPkl = upickle.default.macroRW[RequestResult]
}
| agoetschm/linkmanager | shared/src/main/scala/utils/ImplicitPicklers.scala | Scala | gpl-3.0 | 337 |
package lore.compiler.feedback
import lore.compiler.core.Position
import lore.compiler.semantics.NamePath
import lore.compiler.semantics.scopes.{StructBinding, StructObjectBinding}
import lore.compiler.semantics.structures.{StructDefinition, StructPropertyDefinition}
import lore.compiler.types.{ShapeType, TypeVariable}
object StructFeedback {
case class DuplicateProperty(definition: StructDefinition, property: StructPropertyDefinition) extends Feedback.Error(property) {
override def message = s"The property ${property.name} is declared twice in the struct ${definition.name}."
}
case class ConstructorExpected(name: NamePath, override val position: Position) extends Feedback.Error(position) {
override def message: String = s"The type $name doesn't have an associated constructor. It must be a struct."
}
case class CompanionModuleExpected(binding: StructBinding, memberName: String, override val position: Position) extends Feedback.Error(position) {
override def message: String = s"The struct ${binding.definition.name} does not have a companion module which" +
s" might define a member $memberName."
}
object Shape {
case class MissingProperty(definition: StructDefinition, property: ShapeType.Property) extends Feedback.Error(definition) {
override def message: String = s"The struct ${definition.name} should declare a property '${property.name}' of type " +
s"${property.tpe} (or a subtype thereof) according to the struct's inherited shape type. Such a property is missing."
}
case class InvalidPropertyType(
definition: StructDefinition,
structProperty: StructPropertyDefinition,
shapeProperty: ShapeType.Property,
) extends Feedback.Error(structProperty) {
override def message: String = s"The property '${structProperty.name}' should have the type ${shapeProperty.tpe} " +
s"(or a subtype thereof), but actually has the type ${structProperty.tpe}."
}
}
object OpenTypeParameter {
case class CovarianceRequired(typeParameter: TypeVariable, override val position: Position) extends Feedback.Error(position) {
override def message: String = s"The open type parameter $typeParameter must be covariant."
}
case class IllegalLowerBound(typeParameter: TypeVariable, override val position: Position) extends Feedback.Error(position) {
override def message: String = s"The open type parameter $typeParameter may not have a lower bound."
}
case class NotUniquelyDeducible(typeParameter: TypeVariable, override val position: Position) extends Feedback.Error(position) {
override def message: String = s"The open type parameter $typeParameter is not uniquely deducible. It may only be" +
s" used once in a single property, and not within a sum or intersection type."
}
case class MutableProperty(typeParameter: TypeVariable, property: StructPropertyDefinition) extends Feedback.Error(property) {
override def message: String = s"The open type parameter $typeParameter is used in a mutable property $property." +
s" It may only be used in an immutable property."
}
}
object Object {
case class MissingDefault(struct: StructDefinition, property: StructPropertyDefinition) extends Feedback.Error(property) {
override def message: String = s"The property ${property.name} must have a default value because ${struct.name}" +
s" is an object. Objects cannot be instantiated directly, so the default value ensures that each property is" +
s" assigned an appropriate value."
}
case class NoConstructor(name: NamePath, override val position: Position) extends Feedback.Error(position) {
override def message: String = s"The type $name is an object, which doesn't have a constructor. Objects cannot be" +
s" constructed. You can refer to the object value simply by the variable $name."
}
case class MemberNameTaken(struct: StructDefinition, name: String, memberPosition: Position) extends Feedback.Error(memberPosition) {
override def message: String = s"The struct object ${struct.name.simpleName} already has a property $name." +
s" Companion module members and struct properties may not share names."
}
case class CompanionModuleExpected(binding: StructObjectBinding, memberName: String, override val position: Position) extends Feedback.Error(position) {
override def message: String = s"The struct object ${binding.definition.name} does not have a property $memberName," +
s" nor a companion module which might define such a member."
}
}
}
| marcopennekamp/lore | compiler/src/lore/compiler/feedback/StructFeedback.scala | Scala | mit | 4,633 |
package ru.avhaliullin.whatever.frontend
/**
* @author avhaliullin
*/
package object syntax {
case class SyntaxTree(nodes: Seq[Definition], imports: Seq[Import])
case class TypeExpression(name: QualifiedName, args: Seq[TypeExpression])
case class Import(name: QualifiedName)
type FnDefinition = Definition.FnDefinition
val FnDefinition = Definition.FnDefinition
type StructDefinition = Definition.StructDefinition
val StructDefinition = Definition.StructDefinition
type StructImplementation = Definition.StructImplementation
val StructImplementation = Definition.StructImplementation
}
| avhaliullin/whatever-compiler | compiler/src/main/scala/ru/avhaliullin/whatever/frontend/syntax/package.scala | Scala | mit | 615 |
package com.dt.scala.type_parameterization
/**
* Author: Wang Jialin
* Date 2015/7/5
* Contact Information:
* WeChat: 18610086859
* QQ: 1740415547
* Email: 18610086859@126.com
* Tel: 18610086859
*/
//class Pair[T](val first : T, val second : T)
class Pair[T <: Comparable[T]](val first: T, val second: T) {
def bigger = if (first.compareTo(second) > 0) first else second
}
class Pair_Lower_Bound[T](val first: T, val second: T) {
def replaceFirst[R >: T](newFirst: R) = new Pair_Lower_Bound[R](newFirst, second)
}
object Typy_Variables_Bounds {
def main(args: Array[String]) {
val pair = new Pair("Spark", "Hadoop")
println(pair.bigger)
}
} | slieer/scala-tutorials | src/main/scala/com/dt/scala/type_parameterization/Typy_Variables_Bounds.scala | Scala | apache-2.0 | 700 |
package model
import org.scalatest.FlatSpec
import org.scalatest.Matchers._
/**
* Created by ghseeli on 1/17/17.
*/
class NDimlCoordinateTest extends FlatSpec {
behavior of "NDimlCoordinate"
it should "list all n-dimensional neighbors with positive coordinates" in {
NDimlCoordinate(Seq(2)).neighbors.toSet shouldEqual Set(NDimlCoordinate(Seq(1)), NDimlCoordinate(Seq(3)))
NDimlCoordinate(Seq(1)).neighbors.toSet shouldEqual Set(NDimlCoordinate(Seq(2)))
NDimlCoordinate(Seq(1,1,1)).neighbors.toSet shouldEqual Set(Seq(2,2,2),Seq(2,2,1),Seq(2,1,2),Seq(2,1,1),Seq(1,2,2),Seq(1,2,1),Seq(1,1,2)).map(NDimlCoordinate)
}
it should "be able to add coordinates" in {
NDimlCoordinate(Seq(1,1)).add(NDimlCoordinate(Seq(2,-1))) shouldEqual NDimlCoordinate(Seq(3,0))
}
it should "be able to negate coordinates" in {
NDimlCoordinate(Seq(-2,1,0,3)).negate shouldEqual NDimlCoordinate(Seq(2,-1,0,-3))
}
it should "be able to print in standard coordinate notation" in {
NDimlCoordinate(Seq(1,0,-1)).display shouldEqual "(1,0,-1)"
}
}
| ghseeli/four-dim-tic-tac-toe | src/test/scala/model/NDimlCoordinateTest.scala | Scala | gpl-3.0 | 1,069 |
package mgoeminne.scalaggplot.stat
import mgoeminne.scalaggplot.position
import mgoeminne.scalaggplot.geom.{area, Geom}
import mgoeminne.scalaggplot.{aes, geom}
import mgoeminne.scalaggplot.position.{stack, Position}
import org.saddle.Frame
/**
* 1d kernel density estimate.
*
* == Aesthetics ==
*
* This function understands the following aesthetics (required aesthetics are in bold):
*
* - '''x'''
* - fill
* - y
*
* == Examples ==
*
* TODO
*
* @param mapping The aesthetic mapping, usually constructed with [[aes.aes]] or [[aes.string]].
* Only needs to be set at the layer level if you are overriding the plot defaults.
* @param data A layer specific dataset - only needed if you want to override the plot defaults.
* @param geom The geometric object to use display the data.
* @param position The position adjustment to use for overlappling points on this layer.
* @param adjust See density for details.
* @param kernel Kernel used for density estimation, see density for details
* @param trim If true, the default, densities are trimmed to the actual range of the data.
* If false, they are extended by the default 3 bandwidths (as specified by the cut parameter to density).
* @param removeNA If false (the default), removes missing values with a warning. If true, silently removes missing values.
* @tparam T
* @return data.frame with additional columns: densitydensity estimate countdensity * number of points - useful for
* stacked density plots scaleddensity estimate, scaled to maximum of 1.
*/
case class density[T]( mapping: Option[(Seq[Numeric[T]], Seq[Numeric[T]])] = None,
data: Option[Frame[Any,Any,T]] = None,
geom: Geom = densityUtil.defaultGeom,
position: Position = densityUtil.defaultPos,
adjust: Double = 1,
kernel: String = "gaussian",
trim: Boolean = false,
removeNA: Boolean = false) extends Statistic
private object densityUtil
{
val defaultGeom = new geom.area()
val defaultPos = new position.stack()
} | mgoeminne/scala-ggplot | src/main/scala/mgoeminne/scalaggplot/stat/density.scala | Scala | lgpl-3.0 | 2,204 |
package scan
import java.io.FileNotFoundException
import java.io.IOException
import java.nio.file._
import cats._
import cats.data._
import cats.implicits._
import org.atnos.eff._
import org.atnos.eff.all._
import org.atnos.eff.syntax.all._
import org.atnos.eff.addon.monix._
import org.atnos.eff.addon.monix.task._
import org.atnos.eff.syntax.addon.monix.task._
import org.specs2._
import scala.collection.immutable.SortedSet
import scala.concurrent.duration._
import monix.eval._
import monix.execution.Scheduler.Implicits.global
class ScannerSpec extends mutable.Specification {
case class MockFilesystem(directories: Map[Directory, List[FilePath]], fileSizes: Map[File, Long]) extends Filesystem {
def length(file: File) = fileSizes.getOrElse(file, throw new IOException())
def listFiles(directory: Directory) = directories.getOrElse(directory, throw new IOException())
def filePath(path: String): FilePath =
if (directories.keySet.contains(Directory(path)))
Directory(path)
else if (fileSizes.keySet.contains(File(path)))
File(path)
else
throw new FileNotFoundException(path)
}
val base = Directory("base")
val base1 = File(s"${base.path}/1.txt")
val base2 = File(s"${base.path}/2.txt")
val subdir = Directory(s"${base.path}/subdir")
val sub1 = File(s"${subdir.path}/1.txt")
val sub3 = File(s"${subdir.path}/3.txt")
val directories = Map(
base -> List(subdir, base1, base2),
subdir -> List(sub1, sub3)
)
val fileSizes = Map(base1 -> 1L, base2 -> 2L, sub1 -> 1L, sub3 -> 3L)
val fs = MockFilesystem(directories, fileSizes)
type R = Fx.fx4[Task, Reader[Filesystem, ?], Reader[ScanConfig, ?], Writer[Log, ?]]
def run[T](program: Eff[R, T], fs: Filesystem) =
program.runReader(ScanConfig(2)).runReader(fs).taskAttempt.runWriter.runAsync.runSyncUnsafe(3.seconds)
val expected = Right(new PathScan(SortedSet(FileSize(sub3, 3), FileSize(base2, 2)), 7, 4))
val expectedLogs = Set(
Log.info("Scan started on Directory(base)"),
Log.debug("Scanning directory 'Directory(base)': 1 subdirectories and 2 files"),
Log.debug("File base/1.txt Size 1 B"),
Log.debug("File base/2.txt Size 2 B"),
Log.debug("Scanning directory 'Directory(base/subdir)': 0 subdirectories and 2 files"),
Log.debug("File base/subdir/1.txt Size 1 B"),
Log.debug("File base/subdir/3.txt Size 3 B")
)
val (actual, logs) = run(Scanner.pathScan(base), fs)
"Report Format" ! {actual.mustEqual(expected)}
"Logs messages are emitted (ignores order due to non-determinstic concurrent execution)" ! {
logs.forall(expectedLogs.contains)
}
}
| benhutchison/GettingWorkDoneWithExtensibleEffects | solutions/exerciseConcurrent/src/test/scala/scan/ScannerSpec.scala | Scala | apache-2.0 | 2,650 |
package org.vaadin.addons.rinne
import com.vaadin.server.ThemeResource
import org.scalatest.FunSpec
class VVideoSpec extends FunSpec {
describe("A VVideo") {
describe("should allow to set") {
it("poster") {
val video = new VVideo
val res = new ThemeResource("img.png")
assert(video.poster === None)
video.poster = res
assert(video.poster === Some(res))
video.poster = None
assert(video.poster === None)
}
}
}
} | LukaszByczynski/rinne | src/test/scala/org/vaadin/addons/rinne/VVideoSpec.scala | Scala | apache-2.0 | 497 |
/*
* Copyright (c) 2009 Sony Pictures Imageworks Inc.
*
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the
* distribution. Neither the name of Sony Pictures Imageworks nor the
* names of its contributors may be used to endorse or promote
* products derived from this software without specific prior written
* permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
* FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
* COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
* HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
* STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
* OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package com.imageworks.migration
/**
* The Scala Database Adapter classes uses Scala case classes in their public
* constructors and public methods which makes it difficult to use from a pure
* Java environment. This class provides Java-friendly factory functions to
* create adapters.
*/
object JavaDatabaseAdapter {
/**
* Create a Derby Database Adapter.
*
* @return newly constructed DerbyDatabaseAdapter
*/
def getDerbyDatabaseAdapter: DerbyDatabaseAdapter = {
new DerbyDatabaseAdapter(None)
}
/**
* Create a Derby Database Adapter.
*
* @param schemaName the default schema name in the adapter
* @return newly constructed DerbyDatabaseAdapter
*/
def getDerbyDatabaseAdapter(schemaName: String): DerbyDatabaseAdapter = {
new DerbyDatabaseAdapter(Some(schemaName))
}
/**
* Create a MySQL Database Adapter.
*
* @return newly constructed MysqlDatabaseAdapter
*/
def getMysqlDatabaseAdapter: MysqlDatabaseAdapter = {
new MysqlDatabaseAdapter(None)
}
/**
* Create a MySQL Database Adapter.
*
* @param schemaName the default schema name in the adapter
* @return newly constructed MysqlDatabaseAdapter
*/
def getMysqlDatabaseAdapter(schemaName: String): MysqlDatabaseAdapter = {
new MysqlDatabaseAdapter(Some(schemaName))
}
/**
* Create an Oracle Database Adapter.
*
* @return newly constructed OracleDatabaseAdapter
*/
def getOracleDatabaseAdapter: OracleDatabaseAdapter = {
new OracleDatabaseAdapter(None)
}
/**
* Create an Oracle Database Adapter.
*
* @param schemaName the default schema name in the adapter
* @return newly constructed OracleDatabaseAdapter
*/
def getOracleDatabaseAdapter(schemaName: String): OracleDatabaseAdapter = {
new OracleDatabaseAdapter(Some(schemaName))
}
/**
* Create a PostgreSQL Database Adapter.
*
* @return newly constructed PostgresqlDatabaseAdapter
*/
def getPostgresqlDatabaseAdapter: PostgresqlDatabaseAdapter = {
new PostgresqlDatabaseAdapter(None)
}
/**
* Create a PostgreSQL Database Adapter.
*
* @param schemaName the default schema name in the adapter
* @return newly constructed PostgresqlDatabaseAdapter
*/
def getPostgresqlDatabaseAdapter(schemaName: String): PostgresqlDatabaseAdapter = {
new PostgresqlDatabaseAdapter(Some(schemaName))
}
/**
* Create a H2 Database Adapter.
*
* @return newly constructed H2DatabaseAdapter
*/
def getH2DatabaseAdapter: H2DatabaseAdapter = {
new H2DatabaseAdapter(None)
}
/**
* Create a H2 Database Adapter.
*
* @param schemaName the default schema name in the adapter
* @return newly constructed H2DatabaseAdapter
*/
def getH2DatabaseAdapter(schemaName: String): H2DatabaseAdapter = {
new H2DatabaseAdapter(Some(schemaName))
}
}
| jcai/scala-migrations | src/main/scala/com/imageworks/migration/JavaDatabaseAdapter.scala | Scala | bsd-3-clause | 4,445 |
package controllers
import models.{Event, Events}
import play.api.data.Form
import play.api.libs.json.Json
import play.api.mvc.{Action, Controller}
import play.api.data.Forms._
import play.api.libs.concurrent.Execution.Implicits.defaultContext
/**
* Created by henrique on 23/06/15.
*/
object EventController extends Controller {
implicit val writes = Json.writes[Event]
implicit val reads = Json.reads[Event]
val eventForm = Form(
mapping(
"name" -> text,
"date" -> sqlDate("dd/MM/yyyy"),
"clientMax" -> number,
"maleTicket" -> number,
"femaleTicket" -> number,
"description" -> optional(text),
"style" -> optional(text)
)(Event.apply)(Event.unapply)
)
def list = Action(parse.empty) { implicit request => Ok(Json.toJson(Events.all)) }
def create = Action(parse.json) { implicit request =>
eventForm.bindFromRequest.fold(
formWithErrors => BadRequest(formWithErrors.errorsAsJson),
(model: Event) => {
Events create(model) match {
case true => Ok
case _ => BadRequest
}
}
)
}
def find(name: String) = Action(parse.empty) { implicit request =>
Events find(name) match {
case club => Ok(Json.toJson(club))
case _ => NotFound("There is no event with the given id")
}
}
def update = Action(parse.json) { implicit request =>
eventForm.bindFromRequest.fold(
formWithErrors => BadRequest(formWithErrors.errorsAsJson),
(model: Event) => {
Events update(model) match {
case 1 => Ok
case _ => NotFound("There is no event with the given id")
}
}
)
}
def delete(name: String) = Action(parse.empty) { implicit request =>
Events delete(name) match {
case 1 => Ok
case _ => NotFound("There is no event with the given id")
}
}
}
| neomartins/iPary-backEnd | app/controllers/EventController.scala | Scala | mit | 1,867 |
import leon.annotation._
import leon.lang._
import leon.collection._
case class Entry(id: Int, version: Int, versionSynced: Int, f1: Int, f2: Int) {
def update(f1: Int, f2: Int): Entry = {
Entry(id, version+1, versionSynced, f1, f2)
}
def markSynced = {
Entry(id, version, version, f1, f2)
} ensuring { _.isSynced }
def isSynced = {
version == versionSynced
}
}
object Sync {
def idSorted(l: collection.List[Entry]): Boolean = l match {
case Cons(v1, Cons(v2, xs)) => v1.id < v2.id && idSorted(collection.Cons(v2, xs))
case _ => true
}
// Raw content (ignoring version/versionSynced)
def content(l: collection.List[Entry]): Set[(Int, Int, Int)] = l match {
case Cons(h, t) => Set((h.id, h.f1, h.f2)) ++ content(t)
case Nil() => Set()
}
def ids(l: collection.List[Entry]): Set[Int] = l match {
case Cons(h, t) => Set(h.id) ++ ids(t)
case _ => Set()
}
def markSynced(l1: collection.List[Entry]): collection.List[Entry] = {
require(idSorted(l1))
(l1 match {
case Cons(e1, t1) => collection.Cons(e1.markSynced, markSynced(t1))
case Nil() => collection.Nil()
}) : collection.List[Entry]
} ensuring { res =>
idSorted(res) &&
content(res) == content(l1) &&
ids(res) == ids(l1) &&
allSynced(res)
}
def allSynced(l1: collection.List[Entry]): Boolean = {
l1 match {
case Cons(h1, t1) => h1.isSynced && allSynced(t1)
case Nil() => true
}
}
def sync(v1: collection.List[Entry], v2: collection.List[Entry]): collection.List[Entry] = {
require(idSorted(v1) && idSorted(v2))
((v1, v2) match {
case (Cons(e1, t1), Cons(e2, t2)) =>
if (e1.id < e2.id) {
collection.Cons(e1.markSynced, sync(t1, v2))
} else if (e1.id > e2.id) {
collection.Cons(e2.markSynced, sync(v1, t2))
} else {
if (e1.version > e2.version) {
collection.Cons(e1.markSynced, sync(t1, t2))
} else {
collection.Cons(e2.markSynced, sync(t1, t2))
}
}
case (Nil(), l2) => markSynced(l2)
case (l1, Nil()) => markSynced(l1)
}): collection.List[Entry]
} ensuring {
res =>
idSorted(res) &&
(content(res) subsetOf (content(v1) ++ content(v2))) &&
(ids(res) == ids(v1) ++ ids(v2)) &&
allSynced(res)
}
def test() = {
val e1 = Entry(1, 1, 0, 1, 1)
val e2 = Entry(2, 1, 0, 2, 2)
val e3 = Entry(3, 1, 0, 3, 3)
val l1 = collection.Cons(e1, collection.Cons(e2, collection.Nil()))
val l2 = collection.Cons(e2.update(5, 5), collection.Cons(e3, collection.Nil()))
sync(l1, l2)
}
}
| ericpony/scala-examples | testcases/verification/case-studies/Sync.scala | Scala | mit | 2,652 |
package net.sansa_stack.rdf.common.partition.layout
import scala.reflect.runtime.universe.Type
import scala.reflect.runtime.universe.typeOf
import net.sansa_stack.rdf.common.partition.core.RdfPartitionerDefault
import net.sansa_stack.rdf.common.partition.schema.SchemaStringLong
import org.apache.jena.graph.Triple
object TripleLayoutLong
extends TripleLayout {
override def schema: Type = typeOf[SchemaStringLong]
override def fromTriple(t: Triple): SchemaStringLong = {
val s = t.getSubject
val o = t.getObject
val v = if (o.isLiteral && o.getLiteralValue.isInstanceOf[Number]) {
o.getLiteralValue.asInstanceOf[Number]
} else throw new RuntimeException("Layout only for doubles" + t)
val sStr = RdfPartitionerDefault.getUriOrBNodeString(s)
SchemaStringLong(sStr, v.longValue)
}
}
| SANSA-Stack/SANSA-RDF | sansa-rdf/sansa-rdf-common/src/main/scala/net/sansa_stack/rdf/common/partition/layout/TripleLayoutLong.scala | Scala | apache-2.0 | 826 |
/*
* Copyright 2014β2018 SlamData Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package quasar.connector.datasource
import quasar.RenderTreeT
import quasar.common.PhaseResultTell
import quasar.connector.{Datasource, QScriptEvaluator}
import quasar.qscript.{MonadPlannerErr, QScriptEducated}
import matryoshka.{BirecursiveT, EqualT, ShowT}
import scalaz.Monad
/** A Datasource capable of executing QScript. */
abstract class HeavyweightDatasource[
T[_[_]]: BirecursiveT: EqualT: ShowT: RenderTreeT,
F[_]: Monad: MonadPlannerErr: PhaseResultTell,
G[_],
R]
extends QScriptEvaluator[T, F, R]
with Datasource[F, G, T[QScriptEducated[T, ?]], R]
| slamdata/slamengine | connector/src/main/scala/quasar/connector/datasource/HeavyweightDatasource.scala | Scala | apache-2.0 | 1,191 |
/**
* Copyright 2015 Yahoo Inc. Licensed under the Apache License, Version 2.0
* See accompanying LICENSE file.
*/
package kafka.manager.utils
import TopicErrors._
import kafka.manager.ActorModel.TopicDescription
import kafka.manager.{Kafka_0_8_2_0, TopicIdentity}
/**
* @author hiral
*/
class TestCreateTopic extends CuratorAwareTest {
val adminUtils = new AdminUtils(Kafka_0_8_2_0)
test("create topic with empty name") {
checkError[TopicNameEmpty] {
withCurator { curator =>
val brokerList = IndexedSeq(1,2)
adminUtils.createTopic(curator,brokerList,"",10,2)
}
}
}
test("create topic with invalid name") {
withCurator { curator =>
val brokerList = IndexedSeq(1,2)
checkError[InvalidTopicName] {
adminUtils.createTopic(curator,brokerList,".",10,2)
}
checkError[InvalidTopicName] {
adminUtils.createTopic(curator,brokerList,"..",10,2)
}
}
}
test("create topic with name too long") {
checkError[InvalidTopicLength] {
withCurator { curator =>
val brokerList = IndexedSeq(1,2)
adminUtils.createTopic(curator,brokerList,"adfasfdsafsfasdfsadfasfsdfasffsdfsadfsdfsdfsfasdfdsfdsafasdfsfdsafasdfdsfdsafsdfdsafasdfsdafasdfadsfdsfsdafsdfsadfdsfasfdfasfsdafsdfdsfdsfasdfdsfsdfsadfsdfasdfdsafasdfsadfdfdsfdsfsfsfdsfdsfdssafsdfdsafadfasdfsdafsdfasdffasfdfadsfasdfasfadfafsdfasfdssafffffffffffdsadfsafdasdfsafsfsfsdfafs",10,2)
}
}
}
test("create topic with bad chars in name") {
checkError[IllegalCharacterInName] {
withCurator { curator =>
val brokerList = IndexedSeq(1,2)
adminUtils.createTopic(curator,brokerList,"bad!Topic!",10,2)
}
}
}
test("create topic with invalid partitions") {
checkError[PartitionsGreaterThanZero] {
withCurator { curator =>
val brokerList = IndexedSeq(1,2)
adminUtils.createTopic(curator,brokerList,"mytopic",0,2)
}
}
}
test("create topic with invalid replication") {
checkError[ReplicationGreaterThanZero] {
withCurator { curator =>
val brokerList = IndexedSeq(1,2)
adminUtils.createTopic(curator,brokerList,"mytopic",10,0)
}
}
}
test("create topic with # of brokers < replication") {
checkError[ReplicationGreaterThanNumBrokers] {
withCurator { curator =>
val brokerList = IndexedSeq.empty[Int]
adminUtils.createTopic(curator,brokerList,"mytopic",10,3)
}
}
}
test("create topic") {
withCurator { curator =>
val brokerList = IndexedSeq(1,2,3)
adminUtils.createTopic(curator,brokerList,"mytopic",10,3)
val json:String = curator.getData.forPath(ZkUtils.getTopicPath("mytopic"))
val configJson : String = curator.getData.forPath(ZkUtils.getTopicConfigPath("mytopic"))
val td = TopicIdentity.from(3,TopicDescription("mytopic",json,None,Option(configJson),false))
assert(td.partitions == 10)
assert(td.replicationFactor == 3)
}
}
test("create topic - topic already exists") {
checkError[TopicAlreadyExists] {
withCurator { curator =>
val brokerList = IndexedSeq(1,2,3)
adminUtils.createTopic(curator, brokerList, "mytopic", 10, 3)
val json: String = curator.getData.forPath(ZkUtils.getTopicPath("mytopic"))
assert(json == "{\\"version\\":1,\\"partitions\\":{\\"8\\":[2,3,1],\\"4\\":[1,3,2],\\"9\\":[3,2,1],\\"5\\":[2,1,3],\\"6\\":[3,1,2],\\"1\\":[1,2,3],\\"0\\":[3,1,2],\\"2\\":[2,3,1],\\"7\\":[1,2,3],\\"3\\":[3,2,1]}}")
}
}
}
}
| vvutharkar/kafka-manager | test/kafka/manager/utils/TestCreateTopic.scala | Scala | apache-2.0 | 3,563 |
package org.sisioh.aws4s.eb
import com.amazonaws.services.elasticbeanstalk.AWSElasticBeanstalkClient
import com.amazonaws.services.elasticbeanstalk.model._
import org.sisioh.aws4s.eb.model._
object Implicits extends Implicits
trait Implicits extends ModelImplicits {
implicit def richAWSElasticBeanstalkClient(underlying: AWSElasticBeanstalkClient): RichAWSElasticBeanstalkClient =
new RichAWSElasticBeanstalkClient(underlying)
}
trait ModelImplicits {
implicit def richApplicationDescription(underlying: ApplicationDescription): RichApplicationDescription =
new RichApplicationDescription(underlying)
implicit def richApplicationVersionDescription(
underlying: ApplicationVersionDescription
): RichApplicationVersionDescription =
new RichApplicationVersionDescription(underlying)
implicit def richAutoScalingGroup(underlying: AutoScalingGroup): RichAutoScalingGroup =
new RichAutoScalingGroup(underlying)
implicit def richCheckDNSAvailabilityRequest(
underlying: CheckDNSAvailabilityRequest
): RichCheckDNSAvailabilityRequest =
new RichCheckDNSAvailabilityRequest(underlying)
implicit def richCheckDNSAvailabilityResult(underlying: CheckDNSAvailabilityResult): RichCheckDNSAvailabilityResult =
new RichCheckDNSAvailabilityResult(underlying)
implicit def richConfigurationOptionDescription(
underlying: ConfigurationOptionDescription
): RichConfigurationOptionDescription =
new RichConfigurationOptionDescription(underlying)
implicit def richConfigurationOptionSetting(underlying: ConfigurationOptionSetting): RichConfigurationOptionSetting =
new RichConfigurationOptionSetting(underlying)
implicit def richConfigurationSettingsDescription(
underlying: ConfigurationSettingsDescription
): RichConfigurationSettingsDescription =
new RichConfigurationSettingsDescription(underlying)
implicit def richCreateApplicationRequest(underlying: CreateApplicationRequest): RichCreateApplicationRequest =
new RichCreateApplicationRequest(underlying)
implicit def richCreateApplicationResult(underlying: CreateApplicationResult): RichCreateApplicationResult =
new RichCreateApplicationResult(underlying)
implicit def richCreateApplicationVersionRequest(
underlying: CreateApplicationVersionRequest
): RichCreateApplicationVersionRequest =
new RichCreateApplicationVersionRequest(underlying)
implicit def richCreateApplicationVersionResult(
underlying: CreateApplicationVersionResult
): RichCreateApplicationVersionResult =
new RichCreateApplicationVersionResult(underlying)
implicit def richCreateConfigurationTemplateRequest(
underlying: CreateConfigurationTemplateRequest
): RichCreateConfigurationTemplateRequest =
new RichCreateConfigurationTemplateRequest(underlying)
implicit def richCreateConfigurationTemplateResult(
underlying: CreateConfigurationTemplateResult
): RichCreateConfigurationTemplateResult =
new RichCreateConfigurationTemplateResult(underlying)
implicit def richCreateEnvironmentRequest(underlying: CreateEnvironmentRequest): RichCreateEnvironmentRequest =
new RichCreateEnvironmentRequest(underlying)
implicit def richCreateEnvironmentResult(underlying: CreateEnvironmentResult): RichCreateEnvironmentResult =
new RichCreateEnvironmentResult(underlying)
implicit def richCreateStorageLocationResult(
underlying: CreateStorageLocationResult
): RichCreateStorageLocationResult =
new RichCreateStorageLocationResult(underlying)
implicit def richDeleteApplicationRequest(underlying: DeleteApplicationRequest): RichDeleteApplicationRequest =
new RichDeleteApplicationRequest(underlying)
implicit def richDeleteApplicationVersionRequest(
underlying: DeleteApplicationVersionRequest
): RichDeleteApplicationVersionRequest =
new RichDeleteApplicationVersionRequest(underlying)
implicit def richDeleteConfigurationTemplateRequest(
underlying: DeleteConfigurationTemplateRequest
): RichDeleteConfigurationTemplateRequest =
new RichDeleteConfigurationTemplateRequest(underlying)
implicit def richDeleteEnvironmentConfigurationRequest(
underlying: DeleteEnvironmentConfigurationRequest
): RichDeleteEnvironmentConfigurationRequest =
new RichDeleteEnvironmentConfigurationRequest(underlying)
implicit def richDescribeApplicationsRequest(
underlying: DescribeApplicationsRequest
): RichDescribeApplicationsRequest =
new RichDescribeApplicationsRequest(underlying)
implicit def richDescribeApplicationsResult(underlying: DescribeApplicationsResult): RichDescribeApplicationsResult =
new RichDescribeApplicationsResult(underlying)
implicit def richDescribeApplicationVersionsRequest(
underlying: DescribeApplicationVersionsRequest
): RichDescribeApplicationVersionsRequest =
new RichDescribeApplicationVersionsRequest(underlying)
implicit def richDescribeApplicationVersionsResult(
underlying: DescribeApplicationVersionsResult
): RichDescribeApplicationVersionsResult =
new RichDescribeApplicationVersionsResult(underlying)
implicit def richDescribeConfigurationOptionsRequest(
underlying: DescribeConfigurationOptionsRequest
): RichDescribeConfigurationOptionsRequest =
new RichDescribeConfigurationOptionsRequest(underlying)
implicit def richDescribeConfigurationOptionsResult(
underlying: DescribeConfigurationOptionsResult
): RichDescribeConfigurationOptionsResult =
new RichDescribeConfigurationOptionsResult(underlying)
implicit def richDescribeConfigurationSettingsRequest(
underlying: DescribeConfigurationSettingsRequest
): RichDescribeConfigurationSettingsRequest =
new RichDescribeConfigurationSettingsRequest(underlying)
implicit def richDescribeConfigurationSettingsResult(
underlying: DescribeConfigurationSettingsResult
): RichDescribeConfigurationSettingsResult =
new RichDescribeConfigurationSettingsResult(underlying)
implicit def richDescribeEnvironmentResourcesRequest(
underlying: DescribeEnvironmentResourcesRequest
): RichDescribeEnvironmentResourcesRequest =
new RichDescribeEnvironmentResourcesRequest(underlying)
implicit def richDescribeEnvironmentResourcesResult(
underlying: DescribeEnvironmentResourcesResult
): RichDescribeEnvironmentResourcesResult =
new RichDescribeEnvironmentResourcesResult(underlying)
implicit def richDescribeEnvironmentsRequest(
underlying: DescribeEnvironmentsRequest
): RichDescribeEnvironmentsRequest =
new RichDescribeEnvironmentsRequest(underlying)
implicit def richDescribeEnvironmentsResult(underlying: DescribeEnvironmentsResult): RichDescribeEnvironmentsResult =
new RichDescribeEnvironmentsResult(underlying)
implicit def richDescribeEventsRequest(underlying: DescribeEventsRequest): RichDescribeEventsRequest =
new RichDescribeEventsRequest(underlying)
implicit def richDescribeEventsResult(underlying: DescribeEventsResult): RichDescribeEventsResult =
new RichDescribeEventsResult(underlying)
implicit def richEnvironmentDescription(underlying: EnvironmentDescription): RichEnvironmentDescription =
new RichEnvironmentDescription(underlying)
implicit def richEnvironmentInfoDescription(underlying: EnvironmentInfoDescription): RichEnvironmentInfoDescription =
new RichEnvironmentInfoDescription(underlying)
implicit def richEnvironmentResourceDescription(
underlying: EnvironmentResourceDescription
): RichEnvironmentResourceDescription =
new RichEnvironmentResourceDescription(underlying)
implicit def richEnvironmentResourcesDescription(
underlying: EnvironmentResourcesDescription
): RichEnvironmentResourcesDescription =
new RichEnvironmentResourcesDescription(underlying)
implicit def richEnvironmentTier(underlying: EnvironmentTier): RichEnvironmentTier =
new RichEnvironmentTier(underlying)
implicit def richEventDescription(underlying: EventDescription): RichEventDescription =
new RichEventDescription(underlying)
implicit def richInstance(underlying: Instance): RichInstance =
new RichInstance(underlying)
implicit def richLaunchConfiguration(underlying: LaunchConfiguration): RichLaunchConfiguration =
new RichLaunchConfiguration(underlying)
implicit def richListAvailableSolutionStacksResult(
underlying: ListAvailableSolutionStacksResult
): RichListAvailableSolutionStacksResult =
new RichListAvailableSolutionStacksResult(underlying)
implicit def richListener(underlying: Listener): RichListener =
new RichListener(underlying)
implicit def richLoadBalancer(underlying: LoadBalancer): RichLoadBalancer =
new RichLoadBalancer(underlying)
implicit def richLoadBalancerDescription(underlying: LoadBalancerDescription): RichLoadBalancerDescription =
new RichLoadBalancerDescription(underlying)
implicit def richOptionRestrictionRegex(underlying: OptionRestrictionRegex): RichOptionRestrictionRegex =
new RichOptionRestrictionRegex(underlying)
implicit def richOptionSpecification(underlying: OptionSpecification): RichOptionSpecification =
new RichOptionSpecification(underlying)
implicit def richQueue(underlying: Queue): RichQueue =
new RichQueue(underlying)
implicit def richRebuildEnvironmentRequest(underlying: RebuildEnvironmentRequest): RichRebuildEnvironmentRequest =
new RichRebuildEnvironmentRequest(underlying)
implicit def richRequestEnvironmentInfoRequest(
underlying: RequestEnvironmentInfoRequest
): RichRequestEnvironmentInfoRequest =
new RichRequestEnvironmentInfoRequest(underlying)
implicit def richRestartAppServerRequest(underlying: RestartAppServerRequest): RichRestartAppServerRequest =
new RichRestartAppServerRequest(underlying)
implicit def richRetrieveEnvironmentInfoRequest(
underlying: RetrieveEnvironmentInfoRequest
): RichRetrieveEnvironmentInfoRequest =
new RichRetrieveEnvironmentInfoRequest(underlying)
implicit def richRetrieveEnvironmentInfoResult(
underlying: RetrieveEnvironmentInfoResult
): RichRetrieveEnvironmentInfoResult =
new RichRetrieveEnvironmentInfoResult(underlying)
implicit def richS3Location(underlying: S3Location): RichS3Location =
new RichS3Location(underlying)
implicit def richSolutionStackDescription(underlying: SolutionStackDescription): RichSolutionStackDescription =
new RichSolutionStackDescription(underlying)
implicit def richSourceConfiguration(underlying: SourceConfiguration): RichSourceConfiguration =
new RichSourceConfiguration(underlying)
implicit def richSwapEnvironmentCNAMEsRequest(
underlying: SwapEnvironmentCNAMEsRequest
): RichSwapEnvironmentCNAMEsRequest =
new RichSwapEnvironmentCNAMEsRequest(underlying)
implicit def richTag(underlying: Tag): RichTag = new RichTag(underlying)
implicit def richTerminateEnvironmentRequest(
underlying: TerminateEnvironmentRequest
): RichTerminateEnvironmentRequest =
new RichTerminateEnvironmentRequest(underlying)
implicit def richTerminateEnvironmentResult(underlying: TerminateEnvironmentResult): RichTerminateEnvironmentResult =
new RichTerminateEnvironmentResult(underlying)
implicit def richTrigger(underlying: Trigger): RichTrigger =
new RichTrigger(underlying)
implicit def richUpdateApplicationRequest(underlying: UpdateApplicationRequest): RichUpdateApplicationRequest =
new RichUpdateApplicationRequest(underlying)
implicit def richUpdateApplicationResult(underlying: UpdateApplicationResult): RichUpdateApplicationResult =
new RichUpdateApplicationResult(underlying)
implicit def richUpdateApplicationVersionRequest(
underlying: UpdateApplicationVersionRequest
): RichUpdateApplicationVersionRequest =
new RichUpdateApplicationVersionRequest(underlying)
implicit def richUpdateApplicationVersionResult(
underlying: UpdateApplicationVersionResult
): RichUpdateApplicationVersionResult =
new RichUpdateApplicationVersionResult(underlying)
implicit def richUpdateConfigurationTemplateRequest(
underlying: UpdateConfigurationTemplateRequest
): RichUpdateConfigurationTemplateRequest =
new RichUpdateConfigurationTemplateRequest(underlying)
implicit def richUpdateConfigurationTemplateResult(
underlying: UpdateConfigurationTemplateResult
): RichUpdateConfigurationTemplateResult =
new RichUpdateConfigurationTemplateResult(underlying)
implicit def richUpdateEnvironmentRequest(underlying: UpdateEnvironmentRequest): RichUpdateEnvironmentRequest =
new RichUpdateEnvironmentRequest(underlying)
implicit def richUpdateEnvironmentResult(underlying: UpdateEnvironmentResult): RichUpdateEnvironmentResult =
new RichUpdateEnvironmentResult(underlying)
implicit def richValidateConfigurationSettingsRequest(
underlying: ValidateConfigurationSettingsRequest
): RichValidateConfigurationSettingsRequest =
new RichValidateConfigurationSettingsRequest(underlying)
implicit def richValidateConfigurationSettingsResult(
underlying: ValidateConfigurationSettingsResult
): RichValidateConfigurationSettingsResult =
new RichValidateConfigurationSettingsResult(underlying)
implicit def richValidationMessage(underlying: ValidationMessage): RichValidationMessage =
new RichValidationMessage(underlying)
}
| sisioh/aws4s | aws4s-eb/src/main/scala/org/sisioh/aws4s/eb/Implicits.scala | Scala | mit | 13,396 |
package pl.abankowski.musicbrainz.client.query
final class FieldOps(name: String) {
def equalsValue(value: String) = Eq(name, value)
def notEqualsValue(value: String) = Neq(name, value)
def startsWith(start: String) = ???
def startsWith(start: String, end: String) = ???
} | abankowski/musicbrainz-scala-client | src/main/scala/pl/abankowski/musicbrainz/client/query/FieldOps.scala | Scala | mit | 281 |
package rere.sasl.scram.server
import rere.sasl.scram.crypto.ScramAuthMechanism
import rere.sasl.scram.crypto.entropy.EntropySource
import rere.sasl.scram.storage.SaltedPasswordStorage
object SCRAMServer {
def apply(
authMechanism: ScramAuthMechanism,
entropySource: EntropySource,
storage: SaltedPasswordStorage
): ServerFirstStep = {
new impl.ServerFirstStepImpl(authMechanism, entropySource, storage)
}
}
| pbaun/rere | modules/sasl/src/main/scala/rere/sasl/scram/server/SCRAMServer.scala | Scala | apache-2.0 | 432 |
package analysis
import analysis.AccessCounts.SubstitutionMap
import ir.ast.{AbstractPartRed, Expr, FunCall, Iterate, Lambda}
import ir.{Memory, UnallocatedMemory}
import lift.arithmetic.{?, Cst}
import opencl.generator._
import opencl.ir.{CollectTypedOpenCLMemory, GlobalMemory, InferOpenCLAddressSpace, LocalMemory, OpenCLMemory, OpenCLMemoryAllocator, PrivateMemory, TypedOpenCLMemory}
object MemoryAmounts {
def apply(
lambda: Lambda,
localSize: NDRange = NDRange(?,?,?),
globalSize: NDRange = NDRange(?,?,?),
valueMap: SubstitutionMap = collection.immutable.Map()
) = new MemoryAmounts(lambda, localSize, globalSize, valueMap)
}
class MemoryAmounts(
lambda: Lambda,
localSize: NDRange,
globalSize: NDRange,
valueMap: SubstitutionMap
) extends Analyser(lambda, localSize, globalSize, valueMap) {
private var globalMemories = Seq[TypedOpenCLMemory]()
private var localMemories = Seq[TypedOpenCLMemory]()
private var privateMemories = Seq[TypedOpenCLMemory]()
private var valueMemories = Set[Memory]()
private lazy val globalMemoryUsed =
globalMemories.map(_.mem.size).fold(Cst(0))(_ + _)
private lazy val localMemoryUsed =
localMemories.map(_.mem.size).fold(Cst(0))(_ + _)
private lazy val privateMemoryUsed =
privateMemories.map(_.mem.size).fold(Cst(0))(_ + _)
def getGlobalMemories = globalMemories
def getLocalMemories = localMemories
def getPrivateMemories = privateMemories
def getGlobalMemoryUsed(exact: Boolean = false) = getExact(globalMemoryUsed, exact)
def getLocalMemoryUsed(exact: Boolean = false) = getExact(localMemoryUsed, exact)
def getPrivateMemoryUsed(exact: Boolean = false) = getExact(privateMemoryUsed, exact)
determine()
private def determine(): Unit = {
if (lambda.body.mem == UnallocatedMemory) {
// Allocate memory
RangesAndCounts(lambda, localSize, globalSize, valueMap)
InferOpenCLAddressSpace(lambda)
OpenCLMemoryAllocator(lambda)
}
val allowedPrivate = getReduceAndIteratePrivates
// Get the allocated buffers
val kernelMemory = CollectTypedOpenCLMemory.asFlatSequence(lambda)
val buffers = CollectTypedOpenCLMemory.asFlatSequence(lambda, includePrivate = true)
valueMemories =
Expr.visitWithState(Set[Memory]())(lambda.body, (lambda, set) =>
lambda match {
case value: ir.ast.Value => set + value.mem
case _ => set
})
privateMemories =
buffers.
diff(kernelMemory).
filterNot(m => valueMemories.contains(m.mem)).
filter(m => allowedPrivate.contains(m.mem))
localMemories = buffers.filter(_.mem.addressSpace == LocalMemory)
globalMemories = buffers.filter(_.mem.addressSpace == GlobalMemory)
}
}
| lift-project/lift | src/main/analysis/MemoryAmounts.scala | Scala | mit | 2,756 |
package io.taig.gandalf
import cats.data.Validated.{ Invalid, Valid }
import shapeless._
abstract class Rule[N <: String, T, A <: HList](
implicit
w: Witness.Aux[N]
) extends Term[N, T, T, A] {
override type V = Rule[N, T, A] :: HNil
override type E = Error[N, A]
override def validations = this :: HNil
}
object Rule {
def apply[T]( name: String ): Builder1[name.type, T] = new Builder1()( Witness.mkWitness( name ) )
class Builder1[N <: String, T]( implicit w: Witness.Aux[N] ) {
def apply( f: T β Boolean ): Rule[N, T, HNil] with Chain1[N, T] = new Rule[N, T, HNil] with Chain1[N, T] {
override def validate( input: T ) = f( input ) match {
case true β Valid( input )
case false β Invalid( Error( HNil ) )
}
override def apply[A <: HList]( g: T β A ): Rule[N, T, A] = new Rule[N, T, A] {
override def validate( input: T ) = f( input ) match {
case true β Valid( input )
case false β Invalid( Error( g( input ) ) )
}
}
}
}
trait Chain1[N <: String, T] {
def apply[A <: HList]( f: T β A ): Rule[N, T, A]
}
def apply[T, U]( name: String ): Builder2[name.type, T, U] = new Builder2()( Witness.mkWitness( name ) )
class Builder2[N <: String, T, U]( implicit w: Witness.Aux[N] ) {
def apply( g: T β U )( f: U β Boolean ): Rule[N, T, HNil] with Chain2[N, T, U] = {
new Rule[N, T, HNil] with Chain2[N, T, U] {
override def validate( input: T ) = f( g( input ) ) match {
case true β Valid( input )
case false β Invalid( Error( HNil ) )
}
override def apply[A <: HList]( h: ( T, U ) β A ): Rule[N, T, A] = new Rule[N, T, A] {
override def validate( input: T ) = {
val transformed = g( input )
f( transformed ) match {
case true β Valid( input )
case false β Invalid( Error( h( input, transformed ) ) )
}
}
}
}
}
}
trait Chain2[N <: String, T, U] {
def apply[A <: HList]( f: ( T, U ) β A ): Rule[N, T, A]
}
} | Taig/Gandalf | core/src/main/scala/io/taig/gandalf/Rule.scala | Scala | mit | 2,411 |
package lila.forum
import lila.common.paginator._
import lila.db.api._
import lila.db.Implicits._
import lila.db.paginator._
import lila.user.{ User, UserContext }
import tube._
private[forum] final class CategApi(env: Env) {
def list(teams: List[String], troll: Boolean): Fu[List[CategView]] = for {
categs β CategRepo withTeams teams
views β (categs map { categ =>
env.postApi get (categ lastPostId troll) map { topicPost =>
CategView(categ, topicPost map {
_ match {
case (topic, post) => (topic, post, env.postApi lastPageOf topic)
}
}, troll)
}
}).sequenceFu
} yield views
def teamNbPosts(slug: String): Fu[Int] = CategRepo nbPosts teamSlug(slug)
def makeTeam(slug: String, name: String): Funit =
CategRepo.nextPosition flatMap { position =>
val categ = Categ(
id = teamSlug(slug),
name = name,
desc = "Forum of the team " + name,
pos = position,
team = slug.some,
nbTopics = 0,
nbPosts = 0,
lastPostId = "",
nbTopicsTroll = 0,
nbPostsTroll = 0,
lastPostIdTroll = "")
val topic = Topic.make(
categId = categ.slug,
slug = slug + "-forum",
name = name + " forum",
troll = false,
featured = true)
val post = Post.make(
topicId = topic.id,
author = none,
userId = "lichess".some,
ip = none,
text = "Welcome to the %s forum!\\nOnly members of the team can post here, but everybody can read." format name,
number = 1,
troll = false,
hidden = topic.hidden,
lang = "en".some,
categId = categ.id)
$insert(categ) >>
$insert(post) >>
$insert(topic withPost post) >>
$update(categ withTopic post)
}
def show(slug: String, page: Int, troll: Boolean): Fu[Option[(Categ, Paginator[TopicView])]] =
optionT(CategRepo bySlug slug) flatMap { categ =>
optionT(env.topicApi.paginator(categ, page, troll) map { (categ, _).some })
}
def denormalize(categ: Categ): Funit = for {
topics β TopicRepo byCateg categ
topicIds = topics map (_.id)
nbPosts β PostRepo countByTopics topicIds
lastPost β PostRepo lastByTopics topicIds
topicsTroll β TopicRepoTroll byCateg categ
topicIdsTroll = topicsTroll map (_.id)
nbPostsTroll β PostRepoTroll countByTopics topicIdsTroll
lastPostTroll β PostRepoTroll lastByTopics topicIdsTroll
_ β $update(categ.copy(
nbTopics = topics.size,
nbPosts = nbPosts,
lastPostId = lastPost ?? (_.id),
nbTopicsTroll = topicsTroll.size,
nbPostsTroll = nbPostsTroll,
lastPostIdTroll = lastPostTroll ?? (_.id)
))
} yield ()
def denormalize: Funit = $find.all[Categ] flatMap { categs =>
categs.map(denormalize).sequenceFu
} void
}
| danilovsergey/i-bur | modules/forum/src/main/CategApi.scala | Scala | mit | 2,900 |
package org.jetbrains.plugins.scala.worksheet.actions
import javax.swing.Icon
import com.intellij.icons.AllIcons
import com.intellij.openapi.actionSystem.{AnAction, AnActionEvent}
import org.jetbrains.plugins.scala.actions.ScalaActionUtil
import org.jetbrains.plugins.scala.worksheet.ui.dialog.WorksheetFileSettingsDialog
/**
* User: Dmitry.Naydanov
* Date: 06.02.18.
*/
class ShowWorksheetSettingsAction extends AnAction with TopComponentAction {
override def actionPerformed(e: AnActionEvent): Unit = {
ScalaActionUtil.getFileFrom(e).orElse(getSelectedFile(e.getProject)).foreach (
new WorksheetFileSettingsDialog(_).show()
)
}
override def bundleKey: String = "worksheet.settings.button"
override def actionIcon: Icon = AllIcons.General.Settings
}
| jastice/intellij-scala | scala/scala-impl/src/org/jetbrains/plugins/scala/worksheet/actions/ShowWorksheetSettingsAction.scala | Scala | apache-2.0 | 785 |
package ionroller.aws
import com.amazonaws.auth.AWSCredentialsProvider
import com.amazonaws.services.autoscaling.AmazonAutoScaling
import com.amazonaws.services.ec2.AmazonEC2Client
import com.amazonaws.services.elasticbeanstalk.AWSElasticBeanstalk
import com.amazonaws.services.elasticloadbalancing.AmazonElasticLoadBalancing
import com.amazonaws.services.route53.AmazonRoute53
import com.amazonaws.services.s3.AmazonS3
import scalaz.concurrent.Task
import scalaz.{Kleisli, Nondeterminism}
class AWSClientCache(
val role: String,
val credentialsProvider: AWSCredentialsProvider,
val route53: AmazonRoute53,
val elasticBeanstalk: AWSElasticBeanstalk,
val s3: AmazonS3,
val asg: AmazonAutoScaling,
val elb: AmazonElasticLoadBalancing
)
object AWSClientCache {
private[ionroller] val cache: java.util.concurrent.ConcurrentHashMap[String, AWSClientCache] = new java.util.concurrent.ConcurrentHashMap
val getCache: Kleisli[Task, String, AWSClientCache] = {
Kleisli { role =>
Option(cache.get(role)) match {
case None =>
for {
credentials <- CredentialsProvider(role)
route53Client = Route53.client(credentials)
elasticBeanstalkClient = ElasticBeanstalk.client(credentials)
s3Client = S3.client(credentials)
asgClient = AutoScaling.client(credentials)
elbClient = ElasticLoadBalancing.client(credentials)
newItem <- Nondeterminism[Task].apply5(route53Client, elasticBeanstalkClient, s3Client, asgClient, elbClient) {
case (r53, eb, s3, asg, elb) =>
val newEntry = new AWSClientCache(role, credentials, r53, eb, s3, asg, elb)
cache.put(role, newEntry)
newEntry
}
} yield newItem
case Some(e) => Task.now(e)
}
}
}
}
| browngeek666/ionroller | core/src/main/scala/ionroller/aws/AWSClientCache.scala | Scala | mit | 1,848 |
package sttp.client3.ziojson
import sttp.client3.internal.Utf8
import sttp.client3.json.RichResponseAs
import sttp.client3.{
BodySerializer,
DeserializationException,
HttpError,
IsOption,
JsonInput,
ResponseAs,
ResponseException,
ShowError,
StringBody,
asString,
asStringAlways
}
import sttp.model.MediaType
trait SttpZioJsonApi extends SttpZioJsonApiExtensions {
import zio.json._
private[ziojson] implicit val stringShowError: ShowError[String] = t => t
implicit def zioJsonBodySerializer[B: JsonEncoder]: BodySerializer[B] =
b => StringBody(b.toJson, Utf8, MediaType.ApplicationJson)
/** If the response is successful (2xx), tries to deserialize the body from a string into JSON. Returns:
* - `Right(b)` if the parsing was successful
* - `Left(HttpError(String))` if the response code was other than 2xx (deserialization is not attempted)
* - `Left(DeserializationException)` if there's an error during deserialization
*/
def asJson[B: JsonDecoder: IsOption]: ResponseAs[Either[ResponseException[String, String], B], Any] =
asString.mapWithMetadata(ResponseAs.deserializeRightWithError(deserializeJson)).showAsJson
/** Tries to deserialize the body from a string into JSON, regardless of the response code. Returns:
* - `Right(b)` if the parsing was successful
* - `Left(DeserializationException)` if there's an error during deserialization
*/
def asJsonAlways[B: JsonDecoder: IsOption]: ResponseAs[Either[DeserializationException[String], B], Any] =
asStringAlways.map(ResponseAs.deserializeWithError(deserializeJson)).showAsJsonAlways
/** Tries to deserialize the body from a string into JSON, using different deserializers depending on the status code.
* Returns:
* - `Right(B)` if the response was 2xx and parsing was successful
* - `Left(HttpError(E))` if the response was other than 2xx and parsing was successful
* - `Left(DeserializationException)` if there's an error during deserialization
*/
def asJsonEither[E: JsonDecoder: IsOption, B: JsonDecoder: IsOption]
: ResponseAs[Either[ResponseException[E, String], B], Any] =
asJson[B].mapLeft {
case HttpError(e, code) => deserializeJson[E].apply(e).fold(DeserializationException(e, _), HttpError(_, code))
case de @ DeserializationException(_, _) => de
}.showAsJsonEither
def deserializeJson[B: JsonDecoder: IsOption]: String => Either[String, B] =
JsonInput.sanitize[B].andThen(_.fromJson[B])
}
| softwaremill/sttp | json/zio1-json/src/main/scala/sttp/client3/ziojson/SttpZioJsonApi.scala | Scala | apache-2.0 | 2,513 |
package de.htwg.zeta.server.controller.restApi
import java.util.UUID
import javax.inject.Inject
import scala.concurrent.ExecutionContext
import scala.concurrent.Future
import com.mohiva.play.silhouette.api.actions.SecuredRequest
import de.htwg.zeta.common.format.project.GraphicalDslReleaseFormat
import de.htwg.zeta.common.models.entity.GraphicalDslRelease
import de.htwg.zeta.persistence.general.GraphicalDslReleaseRepository
import de.htwg.zeta.server.silhouette.ZetaEnv
import grizzled.slf4j.Logging
import play.api.libs.json.Writes
import play.api.mvc.AnyContent
import play.api.mvc.InjectedController
import play.api.mvc.Result
/**
* REST-ful API for filter definitions
*/
class MetaModelReleaseRestApi @Inject()(
metaModelReleaseRepo: GraphicalDslReleaseRepository,
metaModelReleaseFormat: GraphicalDslReleaseFormat,
implicit val ec: ExecutionContext
) extends InjectedController with Logging {
/** Lists all filter.
*
* @param request The request
* @return The result
*/
def showForUser()(request: SecuredRequest[ZetaEnv, AnyContent]): Future[Result] = {
metaModelReleaseRepo.readAllIds().flatMap(getIds).map(getJsonArray).recover {
case e: Exception => BadRequest(e.getMessage)
}
}
private def getIds(ids: Set[UUID]) = {
val list = ids.toList.map(metaModelReleaseRepo.read)
Future.sequence(list)
}
private def getJsonArray(list: List[GraphicalDslRelease]) = {
Ok(Writes.list(metaModelReleaseFormat).writes(list))
}
}
| Zeta-Project/zeta | api/server/app/de/htwg/zeta/server/controller/restApi/MetaModelReleaseRestApi.scala | Scala | bsd-2-clause | 1,501 |
/** This file is part of TextCompose, a program for producing PDF from text files.
* Copyright 2014 Jesper S Villadsen <jeschvi@gmail.com>
* License: GNU Affero General Public License version 3 or later.
* For full license text see LICENSE.txt or <http://www.gnu.org/licenses/>.
*/
package textcompose.core
import scala.collection.mutable.HashSet
import scala.collection.mutable.ArrayBuffer
object TagRegister {
private var TagNames = new HashSet[String]
def addBuiltInTags {
TagNames += "extension"
TagNames += "def"
TagNames += "/def"
TagNames += "sub"
TagNames += "/sub"
TagNames += "main"
TagNames += "/main"
TagNames += "template"
TagNames += "font"
TagNames += "size"
TagNames += "face"
TagNames += "color"
TagNames += "underline"
TagNames += "highlight"
TagNames += "/highlight"
TagNames += "letter-spacing"
TagNames += "scale-letter"
TagNames += "image"
TagNames += "scale-image"
TagNames += "fit-image"
TagNames += "rotate-image"
TagNames += "frame"
TagNames += "blend"
TagNames += "opacity"
TagNames += "bookmark"
TagNames += "label"
TagNames += "ref"
TagNames += "/ref"
TagNames += "rise"
TagNames += "align"
TagNames += "indent"
TagNames += "height"
TagNames += "document"
TagNames += "viewer"
TagNames += "margins"
TagNames += "page-size"
TagNames += "orientation"
TagNames += "columns"
TagNames += "new"
TagNames += "paragraph-space"
TagNames += "paragraph-indent"
TagNames += "glyph"
TagNames += "char"
TagNames += "Roman"
TagNames += "format-list"
TagNames += "list"
TagNames += "item"
TagNames += "/list"
TagNames += "table"
TagNames += "/table"
TagNames += "cell"
TagNames += "cell-padding"
TagNames += "border-width"
TagNames += "move-to"
TagNames += "line-width"
TagNames += "line-cap"
TagNames += "line-dash"
TagNames += "line-to"
TagNames += "draw"
TagNames += "position"
TagNames += "inject"
TagNames += "help"
TagNames += "loop"
TagNames += "whitespace"
TagNames += "store"
TagNames += "restore"
TagNames += "reset"
TagNames += "var"
TagNames += "set"
TagNames += "add"
TagNames += "show"
TagNames += "replace"
TagNames += "insert"
TagNames += "include"
TagNames += "view"
TagNames += "encrypt"
TagNames += "/set"
TagNames += "/add"
}
def AddNewTag(t: String) { TagNames += t }
def getNames: List[String] = TagNames.toList
} | jvilladsen/TextCompose | src/main/scala/core/TagRegister.scala | Scala | agpl-3.0 | 2,592 |
package org.jetbrains.plugins.scala
package lang
package parser
package parsing
package base
import org.jetbrains.plugins.scala.lang.lexer.ScalaTokenTypes
import org.jetbrains.plugins.scala.lang.parser.parsing.builder.ScalaPsiBuilder
import org.jetbrains.plugins.scala.lang.parser.util.ParserUtils
/**
* @author Alexander Podkhalyuzin
* Date: 11.02.2008
*/
/*
* ImportSelectors ::= { {ImportSelector , } (ImportSelector | _ ) }
*/
object ImportSelectors extends ParserNode {
def parse(builder: ScalaPsiBuilder): Boolean = {
val importSelectorMarker = builder.mark
//Look for {
builder.getTokenType match {
case ScalaTokenTypes.tLBRACE =>
builder.advanceLexer() //Ate {
builder.enableNewlines
case _ => {
builder error ErrMsg("lbrace.expected")
importSelectorMarker.drop()
return false
}
}
//Let's parse Import selectors while we will not see Import selector or will see '}'
while (true) {
builder.getTokenType match {
case ScalaTokenTypes.tRBRACE => {
builder error ErrMsg("import.selector.expected")
builder.advanceLexer() //Ate }
builder.restoreNewlinesState
importSelectorMarker.done(ScalaElementTypes.IMPORT_SELECTORS)
return true
}
case ScalaTokenTypes.tUNDER => {
builder.advanceLexer() //Ate _
builder.getTokenType match {
case ScalaTokenTypes.tRBRACE => {
builder.advanceLexer() //Ate }
builder.restoreNewlinesState
importSelectorMarker.done(ScalaElementTypes.IMPORT_SELECTORS)
return true
}
case _ => {
ParserUtils.parseLoopUntilRBrace(builder, () => {}) //we need to find closing brace, otherwise we can miss important things
builder.restoreNewlinesState
importSelectorMarker.done(ScalaElementTypes.IMPORT_SELECTORS)
return true
}
}
}
case ScalaTokenTypes.tIDENTIFIER => {
ImportSelector parse builder
builder.getTokenType match {
case ScalaTokenTypes.tCOMMA => {
builder.advanceLexer() //Ate ,
}
case ScalaTokenTypes.tRBRACE => {
builder.advanceLexer() //Ate}
builder.restoreNewlinesState
importSelectorMarker.done(ScalaElementTypes.IMPORT_SELECTORS)
return true
}
case null => {
builder.restoreNewlinesState
importSelectorMarker.done(ScalaElementTypes.IMPORT_SELECTORS)
return true
}
case _ => {
builder error ErrMsg("rbrace.expected")
builder.advanceLexer()
}
}
}
case null => {
builder.restoreNewlinesState
importSelectorMarker.done(ScalaElementTypes.IMPORT_SELECTORS)
return true
}
case _ => {
builder error ErrMsg("rbrace.expected")
builder.advanceLexer()
}
}
}
true
}
} | triggerNZ/intellij-scala | src/org/jetbrains/plugins/scala/lang/parser/parsing/base/ImportSelectors.scala | Scala | apache-2.0 | 3,141 |
package com.wheaties.predicate.partials
trait PartialFunction4[-T1, -T2, -T3, -T4, +R] extends ((T1, T2, T3, T4) => R){
self =>
import PartialFunction4._
override def tupled = new PartialFunction[(T1, T2, T3, T4), R]{
def isDefinedAt(v1: (T1, T2, T3, T4)) = self isDefinedAt(v1._1, v1._2, v1._3, v1._4)
def apply(v1: (T1, T2, T3, T4)) = self(v1._1, v1._2, v1._3, v1._4)
}
def isDefinedAt(arg1: T1, arg2: T2, arg3: T3, arg4: T4): Boolean
def orElse[TT1 <: T1, TT2 <: T2, TT3 <: T3, TT4 <: T4, RR >: R](that: PartialFunction4[TT1, TT2, TT3, TT4, RR]): PartialFunction4[TT1, TT2, TT3, TT4, RR] =
new orElse4(this, that)
def lift: (T1, T2, T3, T4) => Option[R] = new Lifted4(this)
def applyOrElse[TT1 <: T1, TT2 <: T2, TT3 <: T3, TT4 <: T4, RR >: R](arg1: TT1, arg2: TT2, arg3: TT3, arg4: TT4, default: (TT1, TT2, TT3, TT4) => RR): RR =
if(isDefinedAt(arg1, arg2, arg3, arg4)) apply(arg1, arg2, arg3, arg4) else default(arg1, arg2, arg3, arg4)
}
object PartialFunction4{
private class orElse4[-T1, -T2, -T3, -T4, +R](p: PartialFunction4[T1, T2, T3, T4, R], q: PartialFunction4[T1, T2, T3, T4, R])
extends PartialFunction4[T1, T2, T3, T4, R]{
def isDefinedAt(arg1: T1, arg2: T2, arg3: T3, arg4: T4) = p.isDefinedAt(arg1, arg2, arg3, arg4) || q.isDefinedAt(arg1, arg2, arg3, arg4)
def apply(arg1: T1, arg2: T2, arg3: T3, arg4: T4) = p applyOrElse (arg1, arg2, arg3, arg4, q)
}
private class Lifted4[-T1, -T2, -T3, -T4, +R](pf: PartialFunction4[T1, T2, T3, T4, R]) extends ((T1, T2, T3, T4) => Option[R]){
def apply(arg1: T1, arg2: T2, arg3: T3, arg4: T4): Option[R] = Option{
pf applyOrElse (arg1, arg2, arg3, arg4, defaultFn.asInstanceOf[(T1, T2, T3, T4) => R])
}
}
private val defaultFn: (Any, Any, Any, Any) => Any = (_, _, _, _) => null
} | wheaties/Predicates | predicates/src/main/scala/com/wheaties/predicate/partials/PartialFunction4.scala | Scala | apache-2.0 | 1,783 |
package memnets.core
import memnets.model._
import memnets.ui._
import memnets.utils._
import scala.beans.BooleanBeanProperty
import scala.collection.mutable._
trait EngineTask[T] {
/** any calls that modify UI should use AppUI.runOnUI */
def call(): T
def onCompleted(t: T): Unit = {}
}
final class Engine(
val modelUI: ModelUI,
val appUI: AppUI
) extends BeanSupport
with EngineListener
with Logging {
private var _listeners = new ArrayBuffer[EngineListener](initialSize = 2)
modelUI.engine = this
addEngineListener(this)
addEngineListener(appUI)
def publish(event: EngineEvent): Unit = {
val len = _listeners.length
var i = 0
while (i < len) {
_listeners(i).process(event)
i += 1
}
}
def addEngineListener(listener: EngineListener): Subscribed = {
_listeners += listener
() =>
_listeners -= listener
}
val paramsModel = new SelectionModel[Param]()
val trialsModel = new SelectionModel[Trial]()
val inputModel = new SelectionModel[Signal]()
val skinsModel = new SelectionModel[SkinType]()
// NOTE : can only use @BeanProperty/Boolean if Engine doesn't modify itself
@BooleanBeanProperty var loop = false
@BooleanBeanProperty var useValidator = false
private var _builderChanging = false
private var _disabled = false
private var _error = false
private var _fps = 0
private var _lastNanoTime, _nanoTime: Long = _
private var _playing = false
private var _simStepper: SimStepper = _
private var _simSteps = 1
private var _speedMod = 1
private var _speedModEff = 1
private val _speedMods = Array(-60, -30, -15, -8, -4, -2, 1, 2, 4)
private var _speedCount = 0
// TODO: 2.13 complains here. look at alternatives
private val _timeToSigs = new HashMap[Int, Set[SignalEvent]] with MultiMap[Int, SignalEvent]
private var _trialChanging = false
private object _gh extends GoalHandler {
def goalGroupOver(grp: Goals): Unit = {
publish(GoalsEvent(grp, start = false))
val i = trial.goals.indexOf(grp) + 1
if (i < trial.goals.length) {
val active = trial.goals(i)
trial.activeGoals = active
publish(GoalsEvent(active))
} else
gameLost()
}
def goalOver(g: Goal): Unit = publish(GoalEvent(g, start = false))
def goalStart(g: Goal): Unit = publish(GoalEvent(g))
}
trialsModel.onSelection { t =>
logger.debug(s"trial changed : $t")
// can get null when set items on trialsModel
if (!_builderChanging && null != t) {
_trialChanging = true
appUI.task("Setting trial: " + t.description) { () =>
setTrialHelper(t)
appUI.runOnUI {
modelUI.rebuild()
publish(ResetEvent(t))
_trialChanging = false
}
}
} else
disabled = true
}
skinsModel.onSelectionChange { (prior, newSkin) =>
if (!_trialChanging && newSkin != null && prior != newSkin) {
appUI.task("Building Skin...") { () =>
appUI.runOnUI {
modelUI.skin = newSkin
prior match {
case fullSkin: FullScene[_, _] =>
skinsModel.removeItem(fullSkin)
case default =>
}
}
}
}
}
def builderChanging: Boolean = _builderChanging
def model: BuiltModel = modelUI.model
def setBuilder(builder: ModelBuilder, resetCfg: Boolean = false): Unit = {
logger.debug(s"builder = $builder")
appUI.task("Building model...") { () =>
logger.debug("builder task running")
_builderChanging = true
if (resetCfg) {
appUI.config.hints = builder.hints
appUI.config.reset()
}
_builderChanging = true
cleanUp(postMsg = true)
progress("Building model...")
val newModel = builder.build(appUI.config)
progress("Building simulation....")
val newSim = newModel.buildSim(doHeadTrial = false)
_simStepper = appUI.config.createStepper(newSim)
trialsModel.setItems(newModel.trials)
// always set model before skin
modelUI.model = newModel
val validSkins = newModel.skins.filter(modelUI.isValid)
// always add after builder skins
if (validSkins.isEmpty || newModel.useDefaultSkins) {
for (skin <- modelUI.defaultSkins if skin.isSuitable(newModel.system))
validSkins += skin
}
skinsModel.setItems(validSkins)
paramsModel.setItems(newModel.system.params.sorted)
progress("Building skin....")
appUI.runOnUI {
paramsModel.selectFirst()
_trialChanging = true
trialsModel.selectFirst()
setTrialHelper(trial)
skinsModel.selectFirst()
// needs modelUI.trial set
modelUI.skin = skinsModel.getSelectedItem
publish(ResetEvent(trial))
publish(BuiltModelEvent(newModel))
_trialChanging = false
_builderChanging = false
}
newModel
}
}
def changeTrial(next: Boolean): Unit = publish(TrialChangeEvent(next = next))
def cleanUp(postMsg: Boolean = false): Unit = {
disabled = true
logger.debug("engine clean up")
// stop potential multi-thread first. will block until last tick done
if (null != _simStepper) {
_simStepper.reset()
_simStepper.sim.destroy()
}
modelUI.stopAllAnimations()
val old = model
if (null != old) {
progress("Destroying old model+sim...")
old.destroy()
}
speedMod = 1
}
def fireReset(): Unit = publish(ResetEvent(trial))
def sim: Sim = _simStepper.sim
def speedMods: Array[Int] = _speedMods
def trial: Trial = trialsModel.getSelected
/**
* NOTE: this method is mostly called from event handlers,
* so should be in UI thread already
*/
def process(e: EngineEvent): Unit = {
try {
logger.trace("evt : " + e)
if (model != null) model.onEvent.process(e)
e match {
case RealSampleEvent(rt, act) =>
logger.trace("user drag")
rt.add(sim.t, act)
case RealStartEvent(y, elem, src, init, touch) =>
logger.debug(s"user start at ${sim.t}")
val rt = RealTime(owner = elem, y = y, on = sim.t + 1, us = src)(trial)
for (init <- init) rt.add(1, init)
_timeToSigs.addBinding(rt.on, SignalEvent(signal = rt, on = true))
case RealEndEvent(us, touch) =>
logger.debug("user end")
// todo : issue here if Sim using Future...
us.stop(sim.t)
modelUI.remove(us)
_timeToSigs.removeBinding(us.off, SignalEvent(signal = us, on = false))
// delete last to avoid adding twice...
us.delete()
logger.trace(s"num realtimes = ${trial.inputs.flatMap(_.as[RealTime]).size}")
case SignalEvent(sig, on) =>
if (on) {
logger.trace(s"sig on : $sig")
modelUI.add(sig)
} else {
logger.trace(s"sig off : $sig")
sig.onOff.body()
modelUI.remove(sig)
}
case ResetEvent(tri) =>
if (!_trialChanging)
stop()
reset()
case GoalEvent(g, start) =>
if (start) {
g.startTick = sim.nowTick.t
modelUI.add(g)
} else {
modelUI.remove(g)
import TrialState._
if (g.isCompleted) {
g.completedTick = sim.nowTick.t
trial.score = trial.score + g.reward
if (trial.state == Playing) {
if (trial.score >= trial.winScore) {
trial.finishTime = sim.nowTick.t
trial.bonus = trial.goals.map(_.bonusCalc(trial)).sum
val timeLeft = trial.time - trial.finishTime
val halfSec = (30 s)
trial.timeBonus = timeLeft / halfSec
publish(GameEndEvent(sim.nowTick.t, win = true))
} else if (trial.score < 0 || (trial.goals.map(_.goalsLeft).sum + trial.score) < trial.winScore)
gameLost()
modelUI.updateScore()
}
}
if (trial.state == Playing) {
for (ag <- trial.activeGoals)
ag.next(_gh)
}
}
case pl: TogglePlayEvent =>
if (isPlaying)
msg("System paused by user")
else
msg("System resumed by user")
setPlaying(!isPlaying)
case TrialChangeEvent(next) =>
if (next)
trialsModel.selectNext()
else
trialsModel.selectPrevious()
case gie: GameIntroOverEvent =>
disabled = false
case GameEndEvent(t, win) =>
// NOTE: difference here that trial.state updated AFTER event fired
trial.state = if (win) TrialState.Won else TrialState.Lost
logger.trace(s"score = ${trial.score}, win = ${win}")
disabled = true
modelUI.playGameOver(win)
case SpeedEvent(inc) =>
var i = 0
val speedIndex = _speedMods.indexOf(_speedMod)
if (inc) {
i = speedIndex + 1
if (i == _speedMods.length) i = _speedMods.length - 1
} else {
i = speedIndex - 1
if (i < 0) i = 0
}
// call setter for beansupport event
speedMod = _speedMods(i)
logger.debug(s"speed $inc: mod = ${_speedMod}, modEff = ${_speedModEff}")
// really don't like this here, but FF on non mod Tick.t will screw up chart/listeners
if (speedMod > 1 && (sim.t % speedMod != 0)) {
logger.debug("advancing sim")
_speedCount = -1
var zeroMod = false
while (!zeroMod) {
sim.step()
zeroMod = sim.t % speedMod == 0
}
}
case FlashTitleEvent(repeat) =>
logger.debug("flash title")
for (i <- 0 until repeat)
modelUI.flashTitle()
case default =>
}
} catch {
case th: Throwable =>
logger.error("catch error")
publish(ErrorEvent("processing error", th))
}
}
def tick(now: Long): Unit = {
try {
if (_playing && !_disabled) {
_speedCount += 1
if (_speedCount > 0 && (_speedCount % _speedModEff == 0)) {
_speedCount = if (_speedModEff == 1) 0 else if (appUI.config.slowExtraTick) -1 else 0
_nanoTime = System.nanoTime
if (_nanoTime > _lastNanoTime + 1000000000) {
modelUI.setFPS(_fps)
_fps = 0
_lastNanoTime = _nanoTime
}
_simStepper.step(_simSteps)
_fps += _simSteps
if (trial.forceLose)
gameLost()
val tick = sim.nowTick
val t = tick.t
if (_timeToSigs.contains(t)) {
for (se <- _timeToSigs(t))
publish(se)
}
if (trial.activeGoals.isDefined)
trial.activeGoals.get.tick(tick, _gh)
modelUI.tick(tick)
appUI.tick(tick)
if (useValidator)
model.validator.tick(tick)
if (tick.end) {
if (sim.system.game)
gameLost()
else {
if (loop)
fireReset()
else {
disabled = true
trial.state = TrialState.Done
publish(TrialDoneEvent(trial))
}
}
} else
_simStepper.next(isPlaying)
}
}
// else if (sysUI.dirty)
// sysUI.tick(sim.nowTick)
} catch {
case th: Throwable =>
publish(ErrorEvent("engine tick error", th))
}
}
def error: Boolean = _error
def error_=(value: Boolean): Unit = {
logger.error("error: " + value)
this.synchronized {
val oldValue = this._error
if (oldValue != value) {
this._error = value
this._pcs.firePropertyChange("error", oldValue, value)
if (error) disabled = true
}
}
}
/** user modifiable */
def playing: Boolean = _playing
def playing_=(value: Boolean): Unit = {
logger.trace("playing: " + value)
this.synchronized {
val oldValue = this._playing
if (oldValue != value) {
this._playing = value
this._pcs.firePropertyChange("playing", oldValue, value)
}
}
}
def disabled: Boolean = _disabled
def disabled_=(value: Boolean): Unit = {
logger.trace("disabled: " + value)
this.synchronized {
val oldValue = this._disabled
if (oldValue != value) {
if (!error || value) {
this._disabled = value
this._pcs.firePropertyChange("disabled", oldValue, value)
playing = !value
} else if (error)
logger.warn("when error, disable = false ignored")
}
}
}
def speedMod: Int = _speedMod
def speedMod_=(value: Int): Unit = {
require(_speedMods.indexOf(value) != -1, "invalid speed")
val oldValue = this._speedMod
this._speedMod = value
_speedCount = 0
// defaults
_speedModEff = 1
_simSteps = 1
if (value < 0) {
_speedModEff = -value
} else
_simSteps = value
this._pcs.firePropertyChange("speedMod", oldValue, value)
}
private def gameLost(): Unit = publish(GameEndEvent(t = sim.nowTick.t, win = false))
private def progress(msg: String): Unit = { publish(ProgressEvent(msg)) }
private def msg(msg: String): Unit = { publish(MessageEvent(msg)) }
private def reset(): Unit = {
sim.reset()
_timeToSigs.clear()
for (in <- trial.inputs if in.on != 0) {
_timeToSigs.addBinding(in.on, SignalEvent(signal = in, on = true))
_timeToSigs.addBinding(in.off, SignalEvent(signal = in, on = false))
}
appUI.runOnUI {
modelUI.reset(fullReset = true)
for (grp <- trial.activeGoals) {
publish(GoalsEvent(grp, start = true))
for (g <- grp.startGoals)
publish(GoalEvent(goal = g, start = true))
}
modelUI.playGameIntro()
}
}
private def setTrialHelper(t: Trial): Unit = {
stop()
_simStepper.sim.trial = t
inputModel.setItems(t.inputs)
modelUI.trial = t
publish(TrialEvent(t))
}
private def stop(): Unit = {
disabled = true
_simStepper.reset() // will wait if threaded old sim
}
// Java
def isDisabled: Boolean = disabled
def setDisabled(value: Boolean): Unit = disabled = value
def isError: Boolean = error
def setError(value: Boolean): Unit = error = value
def isPlaying: Boolean = playing
def setPlaying(value: Boolean): Unit = playing = value
def getSpeedMod: Int = speedMod
def setSpeedMod(value: Int): Unit = speedMod = value
}
| MemoryNetworks/memnets | api/src/main/scala/memnets/core/Engine.scala | Scala | apache-2.0 | 14,701 |
object Test {
trait Generic[T] {
type Repr
}
object Generic {
type Aux[T, R] = Generic[T] { type Repr = R }
implicit def genTuple3[T, U, V]: Aux[(T, U, V), (T, (U, (V, Unit)))] = ???
implicit def genTuple5[T, U, V, W, X]: Aux[(T, U, V, W, X), (T, (U, (V, (W, (X, Unit)))))] = ???
}
trait Show[T]
object Show {
implicit val showUnit: Show[Unit] = ???
implicit val showInt: Show[Int] = ???
implicit def showPair[T, U](implicit st: Show[T], su: Show[U]): Show[(T, U)] = ???
implicit def showGen[T, R](implicit gen: Generic.Aux[T, R], sr: => Show[R]): Show[T] = ???
}
type I5 = (Int, Int, Int, Int, Int)
// Demonstrates that the bynamity of sr suppresses the false positive divergence test
// which would otherwise see 5 nested pairs dominating 3 nested pairs.
implicitly[Show[(I5, I5, I5)]]
implicitly[Show[(Int, I5, Int)]]
implicitly[Show[(I5, (I5, I5, I5), Int)]]
}
| lampepfl/dotty | tests/pos/byname-implicits-15.scala | Scala | apache-2.0 | 927 |
object Test extends App {
import scala.jdk.CollectionConverters._
def ser(a: AnyRef) =
(new java.io.ObjectOutputStream(new java.io.ByteArrayOutputStream())).writeObject(a)
val l = java.util.Arrays.asList("pigdog").asScala
ser(l)
println("ok")
}
| scala/scala | test/files/run/t5974.scala | Scala | apache-2.0 | 261 |
package highchair.datastore
object Connection {
import com.google.appengine.api.datastore.{
DatastoreServiceFactory => Factory
}
/** A default synchronous connection. */
implicit lazy val default = Factory.getDatastoreService()
/** A default asynchronous connection. */
implicit lazy val defaultAsync = Factory.getAsyncDatastoreService()
}
| chrislewis/highchair | datastore/src/main/scala/Connection.scala | Scala | mit | 357 |
//package mlbigbook.ml
//
//import mlbigbook.data.Labeled
//import mlbigbook.wordcount.LocalSparkContext
//import org.scalatest.FunSuite
//
//class KnnLshClassifierTest extends FunSuite with LocalSparkContext {
//
// import KnnLshClassifierTest._
//
// ignore("classify simple addresses") {
// fail("unimplemented")
// }
//
//}
//
//object KnnLshClassifierTest {
//
// import NearestNeighborsLSHTest._
//
// def classificationTest[T](c: Learning[T, Labeled]#Classifier, input: T, expected: Labeled): Err = {
// val actual = c(input)
// if (actual.label != expected.label)
// Some(s"Expected and actual labels dont match. Expecting: ${actual.label} . Actual: ${actual.label}")
// else
// None
// }
//
//}
//
| malcolmgreaves/bigmlbook | fp4ml-main/src/test/scala/mlbigbook/ml/KnnLshClassifierTest.scala | Scala | lgpl-3.0 | 735 |
/*
* Copyright 2012-2020 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package laika.render.epub
import cats.effect.IO
import laika.format.EPUB
import laika.io.model.RenderedTreeRoot
import munit.FunSuite
class ContainerWriterSpec extends FunSuite {
val writer = new ContainerWriter
val standardFiles = Seq(
"/mimetype",
"/META-INF/container.xml",
"/META-INF/com.apple.ibooks.display-options.xml",
"/EPUB/content.opf",
"/EPUB/nav.xhtml",
"/EPUB/toc.ncx"
)
def collectInputs (renderResult: RenderedTreeRoot[IO]): Seq[String] =
writer
.collectInputs(renderResult, EPUB.BookConfig())
.map(_.path.toString)
test("collect a single target document") {
assertEquals(collectInputs(SingleDocument.input), standardFiles :+ "/EPUB/content/foo.xhtml")
}
test("render a tree with a two documents") {
val result = Seq(
"/EPUB/content/foo.xhtml",
"/EPUB/content/bar.xhtml"
)
assertEquals(collectInputs(TwoDocuments.input), standardFiles ++ result)
}
test("render a tree with a nested tree") {
val result = Seq(
"/EPUB/content/foo.xhtml",
"/EPUB/content/sub/bar.xhtml"
)
assertEquals(collectInputs(NestedTree.input), standardFiles ++ result)
}
test("render a tree with two nested trees") {
val result = Seq(
"/EPUB/content/foo.xhtml",
"/EPUB/content/sub1/bar.xhtml",
"/EPUB/content/sub1/baz.xhtml",
"/EPUB/content/sub2/bar.xhtml",
"/EPUB/content/sub2/baz.xhtml"
)
assertEquals(collectInputs(TwoNestedTrees.input), standardFiles ++ result)
}
test("render a tree with a nested tree and static documents") {
val result = Seq(
"/EPUB/content/foo.xhtml",
"/EPUB/content/sub/bar.xhtml",
"/EPUB/content/sub/image-1.5x.jpg",
"/EPUB/content/sub/styles.epub.css"
)
assertEquals(collectInputs(TreeWithStaticDocuments.input), standardFiles ++ result)
}
}
| planet42/Laika | io/src/test/scala/laika/render/epub/ContainerWriterSpec.scala | Scala | apache-2.0 | 2,493 |
package com.microsoft.partnercatalyst.fortis.spark.sinks.kafka
import java.util.Properties
import com.esotericsoftware.kryo.serializers.DefaultSerializers.StringSerializer
import com.github.benfradet.spark.kafka010.writer.dStreamToKafkaWriter
import com.microsoft.partnercatalyst.fortis.spark.dto.FortisEvent
import net.liftweb.json
import net.liftweb.json.Extraction.decompose
import net.liftweb.json.JsonAST.{JArray, JNothing, JString, compactRender}
import org.apache.kafka.clients.producer.ProducerRecord
import org.apache.spark.streaming.dstream.DStream
object KafkaSink {
def apply(dstream: Option[DStream[FortisEvent]], host: String, topic: String): Unit = {
if (dstream.isDefined) {
val kafkaConf = new Properties
kafkaConf.setProperty("bootstrap.servers", host)
kafkaConf.setProperty("key.serializer", classOf[StringSerializer].getName)
kafkaConf.setProperty("value.serializer", classOf[StringSerializer].getName)
dstream.get.map(KafkaSchema(_)).writeToKafka(kafkaConf, row => new ProducerRecord[String, String](topic, row))
}
}
}
case class KafkaRow(
language: String,
locations: List[String],
sentiments: List[Double],
keywords: List[String],
entities: List[String],
summary: String,
pipelinekey: String,
eventid: String,
eventtime: Long,
body: String,
title: String,
externalsourceid: String,
sourceurl: String
)
object KafkaSchema {
def apply(event: FortisEvent): String = {
implicit val defaults = json.DefaultFormats
compactRender(decompose(KafkaRow(
language = event.analysis.language.getOrElse(""),
locations = event.analysis.locations.map(_.wofId),
sentiments = event.analysis.sentiments,
//todo genders = event.analysis.genders.map(_.name),
keywords = event.analysis.keywords.map(_.name),
pipelinekey = event.details.pipelinekey,
entities = event.analysis.entities.map(_.name),
summary = event.analysis.summary.getOrElse(""),
eventid = event.details.eventid.toString,
eventtime = event.details.eventtime,
body = event.details.body,
title = event.details.title,
externalsourceid = event.details.externalsourceid,
sourceurl = event.details.sourceurl
)).transform({
case JArray(Nil) => JNothing
case JString("") => JNothing
}))
}
}
| CatalystCode/project-fortis-spark | src/main/scala/com/microsoft/partnercatalyst/fortis/spark/sinks/kafka/KafkaSink.scala | Scala | mit | 2,339 |
package algebra
package laws
import algebra.lattice._
import algebra.ring._
import algebra.instances.all._
import algebra.instances.BigDecimalAlgebra
import catalysts.Platform
import catalysts.macros.TypeTagM // need this import for implicit macros
import cats.kernel.laws._
import org.typelevel.discipline.Laws
import org.typelevel.discipline.scalatest.Discipline
import org.scalacheck.{Arbitrary, Cogen}
import Arbitrary.arbitrary
import org.scalactic.anyvals.{PosZDouble, PosInt, PosZInt}
import org.scalatest.FunSuite
import org.scalatest.prop.Configuration
import scala.collection.immutable.BitSet
import scala.util.Random
class LawTests extends FunSuite with Configuration with Discipline {
lazy val checkConfiguration: PropertyCheckConfiguration =
PropertyCheckConfiguration(
minSuccessful = if (Platform.isJvm) PosInt(50) else PosInt(5),
maxDiscardedFactor = if (Platform.isJvm) PosZDouble(5.0) else PosZDouble(50.0),
minSize = PosZInt(0),
sizeRange = if (Platform.isJvm) PosZInt(10) else PosZInt(5),
workers = PosInt(1))
// The scalacheck defaults (100,100) are too high for Scala-js, so we reduce to 10/100.
implicit override val generatorDrivenConfig: PropertyCheckConfiguration =
if (Platform.isJvm) PropertyCheckConfiguration(sizeRange = 100, minSuccessful = 100)
else PropertyCheckConfiguration(sizeRange = 10, minSuccessful = 100)
implicit val byteLattice: Lattice[Byte] = ByteMinMaxLattice
implicit val shortLattice: Lattice[Short] = ShortMinMaxLattice
implicit val intLattice: BoundedDistributiveLattice[Int] = IntMinMaxLattice
implicit val longLattice: BoundedDistributiveLattice[Long] = LongMinMaxLattice
implicit def orderLaws[A: Cogen: Eq: Arbitrary] = OrderLaws[A]
implicit def groupLaws[A: Eq: Arbitrary] = GroupLaws[A]
implicit def logicLaws[A: Eq: Arbitrary] = LogicLaws[A]
implicit def latticeLaws[A: Eq: Arbitrary] = LatticeLaws[A]
implicit def ringLaws[A: Eq: Arbitrary: AdditiveMonoid] = RingLaws[A]
implicit def baseLaws[A: Eq: Arbitrary] = BaseLaws[A]
implicit def latticePartialOrderLaws[A: Eq: Arbitrary] = LatticePartialOrderLaws[A]
case class HasEq[A](a: A)
object HasEq {
implicit def hasEq[A: Eq]: Eq[HasEq[A]] =
Eq[A].on(_.a)
implicit def hasEqArbitrary[A: Arbitrary]: Arbitrary[HasEq[A]] =
Arbitrary(arbitrary[A].map(HasEq(_)))
implicit def hasEqCogen[A: Cogen]: Cogen[HasEq[A]] =
Cogen[A].contramap[HasEq[A]](_.a)
}
case class HasPartialOrder[A](a: A)
object HasPartialOrder {
implicit def hasPartialOrder[A: PartialOrder]: PartialOrder[HasPartialOrder[A]] =
PartialOrder[A].on(_.a)
implicit def hasPartialOrderArbitrary[A: Arbitrary]: Arbitrary[HasPartialOrder[A]] =
Arbitrary(arbitrary[A].map(HasPartialOrder(_)))
implicit def hasPartialOrderCogen[A: Cogen]: Cogen[HasPartialOrder[A]] =
Cogen[A].contramap[HasPartialOrder[A]](_.a)
}
case class LawChecker[L <: Laws](name: String, laws: L) {
def check(f: L => L#RuleSet): Unit = checkAll(name, f(laws))
}
private[laws] def laws[L[_] <: Laws, A](implicit
lws: L[A], tag: TypeTagM[A]): LawChecker[L[A]] = laws[L, A]("")
private[laws] def laws[L[_] <: Laws, A](extraTag: String)(implicit
laws: L[A], tag: TypeTagM[A]): LawChecker[L[A]] =
LawChecker("[" + tag.name.toString + (if(extraTag != "") "@@" + extraTag else "") + "]", laws)
laws[OrderLaws, Boolean].check(_.order)
laws[LogicLaws, Boolean].check(_.bool)
laws[LogicLaws, SimpleHeyting].check(_.heyting)
laws[LatticePartialOrderLaws, Boolean].check(_.boundedLatticePartialOrder)
laws[RingLaws, Boolean].check(_.boolRing(booleanRing))
// ensure that Bool[A].asBoolRing is a valid BoolRing
laws[RingLaws, Boolean]("ring-from-bool").check(_.boolRing(Bool[Boolean].asBoolRing))
// ensure that BoolRing[A].asBool is a valid Bool
laws[LogicLaws, Boolean]("bool-from-ring").check(_.bool(new BoolFromBoolRing(booleanRing)))
laws[OrderLaws, String].check(_.order)
laws[GroupLaws, String].check(_.monoid)
{
implicit val g: Group[Int] = AdditiveGroup.additive[Int]
laws[OrderLaws, Option[HasEq[Int]]].check(_.eqv)
laws[OrderLaws, Option[HasPartialOrder[Int]]].check(_.partialOrder)
laws[OrderLaws, Option[Int]].check(_.order)
laws[GroupLaws, Option[Int]].check(_.monoid)
laws[OrderLaws, Option[HasEq[String]]].check(_.eqv)
laws[OrderLaws, Option[HasPartialOrder[String]]].check(_.partialOrder)
laws[OrderLaws, Option[String]].check(_.order)
laws[GroupLaws, Option[String]].check(_.monoid)
}
laws[OrderLaws, List[HasEq[Int]]].check(_.eqv)
laws[OrderLaws, List[HasPartialOrder[Int]]].check(_.partialOrder)
laws[OrderLaws, List[Int]].check(_.order)
laws[GroupLaws, List[Int]].check(_.monoid)
laws[OrderLaws, List[HasEq[String]]].check(_.eqv)
laws[OrderLaws, List[HasPartialOrder[String]]].check(_.partialOrder)
laws[OrderLaws, List[String]].check(_.order)
laws[GroupLaws, List[String]].check(_.monoid)
laws[LogicLaws, Set[Byte]].check(_.generalizedBool)
laws[RingLaws, Set[Byte]].check(_.boolRng(setBoolRng[Byte]))
laws[LogicLaws, Set[Byte]]("bool-from-rng").check(_.generalizedBool(new GenBoolFromBoolRng(setBoolRng)))
laws[RingLaws, Set[Byte]]("rng-from-bool").check(_.boolRng(GenBool[Set[Byte]].asBoolRing))
laws[OrderLaws, Set[Int]].check(_.partialOrder)
laws[RingLaws, Set[Int]].check(_.semiring)
laws[RingLaws, Set[String]].check(_.semiring)
laws[OrderLaws, Map[Char, Int]].check(_.eqv)
laws[RingLaws, Map[Char, Int]].check(_.semiring)
laws[OrderLaws, Map[Int, BigInt]].check(_.eqv)
laws[RingLaws, Map[Int, BigInt]].check(_.semiring)
laws[OrderLaws, Byte].check(_.order)
laws[RingLaws, Byte].check(_.commutativeRing)
laws[LatticeLaws, Byte].check(_.lattice)
laws[OrderLaws, Short].check(_.order)
laws[RingLaws, Short].check(_.commutativeRing)
laws[LatticeLaws, Short].check(_.lattice)
laws[OrderLaws, Char].check(_.order)
laws[OrderLaws, Int].check(_.order)
laws[RingLaws, Int].check(_.commutativeRing)
laws[LatticeLaws, Int].check(_.boundedDistributiveLattice)
{
implicit val comrig: CommutativeRig[Int] = IntMinMaxLattice.asCommutativeRig
laws[RingLaws, Int].check(_.commutativeRig)
}
laws[OrderLaws, Long].check(_.order)
laws[RingLaws, Long].check(_.commutativeRing)
laws[LatticeLaws, Long].check(_.boundedDistributiveLattice)
laws[RingLaws, BigInt].check(_.commutativeRing)
laws[RingLaws, FPApprox[Float]].check(_.field)
laws[RingLaws, FPApprox[Double]].check(_.field)
// let's limit our BigDecimal-related tests to the JVM for now.
if (Platform.isJvm) {
{
// we need a less intense arbitrary big decimal implementation.
// this keeps the values relatively small/simple and avoids some
// of the numerical errors we might hit.
implicit val arbBigDecimal: Arbitrary[BigDecimal] =
Arbitrary(arbitrary[Int].map(x => BigDecimal(x, java.math.MathContext.UNLIMITED)))
// BigDecimal does have numerical errors, so we can't pass all of
// the field laws.
laws[RingLaws, BigDecimal].check(_.ring)
}
{
// We check the full field laws using a FPApprox.
val mc = java.math.MathContext.DECIMAL32
implicit val arbBigDecimal: Arbitrary[BigDecimal] =
Arbitrary(arbitrary[Double].map(x => BigDecimal(x, mc)))
implicit val epsBigDecimal = FPApprox.Epsilon.bigDecimalEpsilon(mc)
implicit val algebra = FPApprox.fpApproxAlgebra(new BigDecimalAlgebra(mc), Order[BigDecimal], epsBigDecimal)
laws[RingLaws, FPApprox[BigDecimal]].check(_.field(algebra))
}
} else ()
{
implicit val arbBitSet: Arbitrary[BitSet] =
Arbitrary(arbitrary[List[Byte]].map(s => BitSet(s.map(_ & 0xff): _*)))
laws[LogicLaws, BitSet].check(_.generalizedBool)
}
laws[RingLaws, (Int, Int)].check(_.ring)
{
implicit val band = new Band[(Int, Int)] {
def combine(a: (Int, Int), b: (Int, Int)) = (a._1, b._2)
}
checkAll("(Int, Int) Band", GroupLaws[(Int, Int)].band)
}
laws[OrderLaws, Unit].check(_.order)
laws[RingLaws, Unit].check(_.commutativeRing)
laws[RingLaws, Unit].check(_.multiplicativeMonoid)
laws[LatticeLaws, Unit].check(_.boundedSemilattice)
{
// In order to check the monoid laws for `Order[N]`, we need
// `Arbitrary[Order[N]]` and `Eq[Order[N]]` instances.
// Here we have a bit of a hack to create these instances.
val nMax: Int = 13
final case class N(n: Int) { require(n >= 0 && n < nMax) }
// The arbitrary `Order[N]` values are created by mapping N values to random
// integers.
implicit val arbNOrder: Arbitrary[Order[N]] = Arbitrary(arbitrary[Int].map { seed =>
val order = new Random(seed).shuffle(Vector.range(0, nMax))
Order.by { (n: N) => order(n.n) }
})
// The arbitrary `Eq[N]` values are created by mapping N values to random
// integers.
implicit val arbNEq: Arbitrary[Eq[N]] = Arbitrary(arbitrary[Int].map { seed =>
val mapping = new Random(seed).shuffle(Vector.range(0, nMax))
Eq.by { (n: N) => mapping(n.n) }
})
// needed because currently we don't have Vector instances
implicit val vectorNEq: Eq[Vector[N]] = Eq.fromUniversalEquals
// The `Eq[Order[N]]` instance enumerates all possible `N` values in a
// `Vector` and considers two `Order[N]` instances to be equal if they
// result in the same sorting of that vector.
implicit val NOrderEq: Eq[Order[N]] = Eq.by { order: Order[N] =>
Vector.tabulate(nMax)(N).sorted(order.toOrdering)
}
implicit val NEqEq: Eq[Eq[N]] = new Eq[Eq[N]] {
def eqv(a: Eq[N], b: Eq[N]) =
Iterator.tabulate(nMax)(N)
.flatMap { x => Iterator.tabulate(nMax)(N).map((x, _)) }
.forall { case (x, y) => a.eqv(x, y) == b.eqv(x, y) }
}
implicit val monoidOrderN: Monoid[Order[N]] = Order.whenEqualMonoid[N]
laws[GroupLaws, Order[N]].check(_.monoid)
{
implicit val bsEqN: BoundedSemilattice[Eq[N]] = Eq.allEqualBoundedSemilattice[N]
laws[GroupLaws, Eq[N]].check(_.boundedSemilattice)
}
{
implicit val sEqN: Semilattice[Eq[N]] = Eq.anyEqualSemilattice[N]
laws[GroupLaws, Eq[N]].check(_.semilattice)
}
}
laws[OrderLaws, Int]("fromOrdering").check(_.order(Order.fromOrdering[Int]))
laws[OrderLaws, Array[Int]].check(_.order)
laws[OrderLaws, Array[Int]].check(_.partialOrder)
// Rational tests do not return on Scala-js, so we make them JVM only.
if (Platform.isJvm) laws[RingLaws, Rat].check(_.field)
else ()
}
| sritchie/algebra | laws/src/test/scala/algebra/laws/LawTests.scala | Scala | mit | 10,588 |
// Copyright 2012 Foursquare Labs Inc. All Rights Reserved.
package io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime
import org.joda.time.format.{DateTimeFormat, DateTimeFormatter}
object DateFieldHelpers {
/** Used primarily to parse/format the java.util.Date enhanced type. */
private val javaUtilDateFormatter: DateTimeFormatter = DateTimeFormat.forPattern("yyyy-MM-dd\'T\'HH:mm:ss\'Z\'")
def printJavaDate(date: java.util.Date): String = {
javaUtilDateFormatter.print(date.getTime)
}
def parseJavaDate(dateString: String): java.util.Date = {
javaUtilDateFormatter.parseDateTime(dateString).toDate
}
}
| foursquare/fsqio | src/jvm/io/fsq/spindle/codegen/__shaded_for_spindle_bootstrap__/runtime/DateFieldHelpers.scala | Scala | apache-2.0 | 639 |
package chapter4
import scala.io.Source
object FileRead {
def main(args: Array[String]): Unit = {
def processFile(filename: String, width: Int): Unit = {
def processLine(line: String): Unit = {
if (line.length > width)
println(filename + ":" + line)
}
//val filename="/home/nikhil/test.txt"
val source = Source.fromFile(filename)
for (line <- Source.fromFile("/home/nikhil/test.txt").getLines())
processLine(line)
}
}
}
| NikhilJose/ScalaPractice | src/main/scala/chapter4/FileRead.scala | Scala | apache-2.0 | 491 |
package br.com.caelum.hibernatequerydsl
import net.sf.cglib.proxy.InvocationHandler
import java.lang.reflect.Method
import java.lang.{ThreadLocal, Boolean}
import org.hibernate.criterion.{Projections, Order, MatchMode, Restrictions}
object Pig{
val tl = new ThreadLocal[InvocationMemorizingCallback]
}
class StringWithRubyPowers(str: String) {
def withFirstCharLowered = {
str.substring(0, 1).toLowerCase + str.substring(1, str.length)
}
}
class InvocationMemorizingCallback(val prefix:String = "") extends InvocationHandler {
Pig.tl.set(this)
implicit def string2WithRubyPowers(str: String) = new StringWithRubyPowers(str)
private var _invoked: String = ""
var properties = List[String]()
def invokedPath = properties.mkString(".")
def invoke(proxy: AnyRef, method: java.lang.reflect.Method, args: Array[AnyRef]) = {
_invoked = method.getName
val GetterExpression = """(get|is)?(\\w*){1}""".r
_invoked match {
case GetterExpression(_, part2) => {
properties = part2.withFirstCharLowered :: properties
}
}
//a little "mangue". I tried to test instanceOf[java.lang.Boolean] and scala.Boolean and the return was false...
if(method.getReturnType.getName.eq("boolean")) new Boolean("false") else null
}
}
| asouza/hibernate-query-dsl | src/main/scala/br/com/caelum/hibernatequerydsl/InvocationMemorizingCallback.scala | Scala | lgpl-2.1 | 1,291 |
package recfun
import org.scalatest.FunSuite
import org.junit.runner.RunWith
import org.scalatest.junit.JUnitRunner
@RunWith(classOf[JUnitRunner])
class BalanceSuite extends FunSuite {
import Main.balance
test("'(if (zero? x) max (/ 1 x))' is balanced.") {
assert(balance("(if (zero? x) max (/ 1 x))".toList))
}
test("'I told him ...' is balanced.") {
assert(balance("I told him (that it's not (yet) done).\n(But he wasn't listening)".toList))
}
test("':-)' is unbalanced.") {
assert(!balance(":-)".toList))
}
test("'unmatched parenthesis leave weird tension all day long)' is unbalanced.") {
assert(!balance("unmatched parenthesis leave weird tension all day long)".toList))
}
test("Counting is not enough.") {
assert(!balance("())(".toList))
}
test("Counting is not enough - part 2.") {
assert(!balance("()(".toList))
}
test("Counting is not enough - part 3.") {
assert(!balance("()((".toList))
}
test("Counting is not enough - part 4.") {
assert(balance("()()".toList))
}
test("Counting is not enough - part 5.") {
assert(!balance("((()()".toList))
}
test("Counting is not enough - part 6.") {
assert(!balance("(".toList))
}
test("Counting is not enough - part 7.") {
assert(!balance(")".toList))
}
test("Counting is not enough - part 8.") {
assert(!balance(")(".toList))
}
test("Counting is not enough - part 9.") {
assert(!balance(")()".toList))
}
test("Counting is not enough - part 10.") {
assert(!balance(")()(".toList))
}
test("Counting is not enough - part 11.") {
assert(!balance("(()".toList))
}
test("Counting is not enough - part 12.") {
assert(balance("(())".toList))
}
}
| keshavbashyal/playground-notes | functional-programming-principles-in-scala/recfun/src/test/scala/recfun/BalanceSuite.scala | Scala | mit | 1,830 |
package travelservice.webclient.snippet
import _root_.scala.xml.NodeSeq
import _root_.net.liftweb.http._
import _root_.net.liftweb.util._
import _root_.net.liftweb.mapper._
import _root_.net.liftweb.common._
import Helpers._
import travelservice.model._
class TicketDisplay {
def show(xhtml:NodeSeq):NodeSeq={
S.param("id") match {
case Full(ident) => {
val ticket = Ticket.find(By(Ticket.uid, ident))
ticket match {
case Full(t) => {
bind("ticket", xhtml, "main" -> t.uid, "travelers"->t.travelers.get.map(e => e.toXHTML), "flights" -> t.flights.get.map(e => e.toXHTMLTable), "price" -> t.price, "paymentStatus" -> t.paymentStatus)
}
case _ => S.error("Ticket not found.");S.redirectTo("/index.html")
}
}
case _ => S.error("No ID specified.");S.redirectTo("/index.html")
}
}
}
| jwachter/travel-service | src/main/scala/travelservice/webclient/snippet/TicketDisplay.scala | Scala | apache-2.0 | 874 |
/*
* Copyright 2015 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.atlas.core.validation
import com.typesafe.config.Config
/**
* Verifies that the keys and values only use alpha-numeric, underscore, dash, and period.
*/
class ValidCharactersRule(config: Config) extends TagRule {
import ValidCharactersRule._
def validate(k: String, v: String): ValidationResult = {
var i = 0
var length = k.length
while (i < length) {
if (!isSupported(k.charAt(i))) {
return failure(s"invalid characters in key: [$k] ([-_.A-Za-z0-9] are allowed)")
}
i += 1
}
i = 0
length = v.length
while (i < length) {
if (!isSupported(v.charAt(i))) {
return failure(s"invalid characters in value: $k = [$v] ([-_.A-Za-z0-9] are allowed)")
}
i += 1
}
ValidationResult.Pass
}
}
object ValidCharactersRule {
private final val supported = {
val cs = new Array[Boolean](128)
(0 until 128).foreach { i => cs(i) = false }
('A' to 'Z').foreach { c => cs(c) = true }
('a' to 'z').foreach { c => cs(c) = true }
('0' to '9').foreach { c => cs(c) = true }
cs('-') = true
cs('_') = true
cs('.') = true
cs
}
private final def isSupported(c: Char): Boolean = {
c >=0 && c < 128 && supported(c)
}
}
| jasimmk/atlas | atlas-core/src/main/scala/com/netflix/atlas/core/validation/ValidCharactersRule.scala | Scala | apache-2.0 | 1,865 |
/*
* Copyright (c) 2014, Brook 'redattack34' Heisler
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* * Neither the name of the ModularRayguns team nor the
* names of its contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package com.castlebravostudios.rayguns.items.chambers
import com.castlebravostudios.rayguns.api.items.ItemModule
import com.castlebravostudios.rayguns.entities.effects.TractorEffect
import com.castlebravostudios.rayguns.mod.ModularRayguns
import com.castlebravostudios.rayguns.items.emitters.Emitters
import com.castlebravostudios.rayguns.items.misc.Tier2EmptyChamber
object TractorChamber extends BaseChamber {
val moduleKey = "TractorChamber"
val powerModifier = 1.5
val shotEffect = TractorEffect
val nameSegmentKey = "rayguns.TractorChamber.segment"
def createItem() : ItemModule = new ItemChamber( this,
Emitters.tractorEmitter, Tier2EmptyChamber )
.setUnlocalizedName("rayguns.TractorChamber")
.setTextureName("rayguns:chamber_tractor")
.setCreativeTab( ModularRayguns.raygunsTab )
.setMaxStackSize(1)
def registerShotHandlers() : Unit = {
registerSingleShotHandlers()
registerScatterShotHandler()
registerChargedShotHandler()
registerPreciseShotHandler()
}
} | Redattack34/ModularRayguns | src/main/scala/com/castlebravostudios/rayguns/items/chambers/TractorChamber.scala | Scala | bsd-3-clause | 2,620 |
package ucesoft.cbm.misc
import java.io.{ObjectInputStream, ObjectOutputStream}
import ucesoft.cbm.{CBMComponent, CBMComponentType}
object Switcher {
final val VIC = 0x01
final val CIA = 0x02
final val CRT = 0x04
final val KB = 0x08
final val EXT = 0x10
}
class Switcher(name:String,handler: (Boolean) => Unit) extends CBMComponent {
val componentID = s"$name Switcher"
val componentType = CBMComponentType.INTERNAL
private[this] var bus = 0
override def getProperties = {
properties.setProperty(componentID,bus.toBinaryString)
properties
}
def init : Unit = {}
def reset : Unit = {
bus = 0
}
final def setLine(line:Int,set:Boolean) : Unit = {
bus = if (set) bus | line else bus & ~line
handler(bus > 0)
}
protected def saveState(out:ObjectOutputStream) : Unit = {
out.writeInt(bus)
}
protected def loadState(in:ObjectInputStream) : Unit = {
bus = in.readInt()
}
protected def allowsStateRestoring : Boolean = true
}
| abbruzze/kernal64 | Kernal64/src/ucesoft/cbm/misc/Switcher.scala | Scala | mit | 997 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.hive.online.joins
import java.util.{HashSet => JHashSet}
import org.apache.spark.sql.catalyst.expressions.{MutableProjection, Row}
import scala.collection.JavaConversions._
object HashedSet {
def apply(iter: Iterator[Row], keyGenerator: MutableProjection): Iterator[Row] = {
val hashSet = new JHashSet[Row]()
// Create a Hash set of buildKeys
while (iter.hasNext) {
val currentRow = iter.next()
val rowKey = keyGenerator(currentRow)
if (!rowKey.anyNull) {
val keyExists = hashSet.contains(rowKey)
if (!keyExists) {
hashSet.add(rowKey.copy())
}
}
}
hashSet.iterator()
}
def apply(iter: Iterator[Row]): JHashSet[Row] = {
val hashSet = new JHashSet[Row]()
// Create a Hash set of buildKeys
while (iter.hasNext) {
val rowKey = iter.next()
if (!rowKey.anyNull) {
val keyExists = hashSet.contains(rowKey)
if (!keyExists) {
hashSet.add(rowKey)
}
}
}
hashSet
}
}
| andrewor14/iolap | sql/hive/src/main/scala/org/apache/spark/sql/hive/online/joins/HashedSet.scala | Scala | apache-2.0 | 1,849 |
/*
* Copyright 2012 Twitter Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.twitter.zipkin.conversions
import com.twitter.algebird.Moments
import com.twitter.util.Time
import com.twitter.zipkin.common._
import com.twitter.zipkin.query._
import com.twitter.zipkin.thriftscala
import scala.collection.breakOut
import scala.language.implicitConversions
/**
* Convenience implicits for converting between common classes and Thrift.
*/
object thrift {
/* Endpoint */
class ThriftEndpoint(e: Endpoint) {
lazy val toThrift = thriftscala.Endpoint(e.ipv4, e.port, e.serviceName)
}
class WrappedEndpoint(e: thriftscala.Endpoint) {
lazy val toEndpoint = {
val serviceName = e.serviceName match {
case (null | "") => Endpoint.UnknownServiceName
case _ => e.serviceName
}
new Endpoint(e.ipv4, e.port, serviceName)
}
}
implicit def endpointToThriftEndpoint(e: Endpoint) = new ThriftEndpoint(e)
implicit def thriftEndpointToEndpoint(e: thriftscala.Endpoint) = new WrappedEndpoint(e)
/* AnnotationType */
class ThriftAnnotationType(a: AnnotationType) {
lazy val toThrift = thriftscala.AnnotationType(a.value)
}
class WrappedAnnotationType(a: thriftscala.AnnotationType) {
lazy val toAnnotationType = AnnotationType(a.value, a.name)
}
implicit def annotationTypeToThriftAnnotationType(a: AnnotationType) = new ThriftAnnotationType(a)
implicit def thriftAnnotationTypeToAnnotationType(a: thriftscala.AnnotationType) = new WrappedAnnotationType(a)
/* Annotation */
class ThriftAnnotation(a: Annotation) {
lazy val toThrift = {
thriftscala.Annotation(a.timestamp, a.value, a.host.map(_.toThrift))
}
}
class WrappedAnnotation(a: thriftscala.Annotation) {
lazy val toAnnotation = {
if (a.timestamp <= 0)
throw new IllegalArgumentException("Annotation must have a timestamp: %s".format(a.toString))
if ("".equals(a.value))
throw new IllegalArgumentException("Annotation must have a value: %s".format(a.toString))
new Annotation(a.timestamp, a.value, a.host.map(_.toEndpoint))
}
}
implicit def annotationToThriftAnnotation(a: Annotation) = new ThriftAnnotation(a)
implicit def thriftAnnotationToAnnotation(a: thriftscala.Annotation) = new WrappedAnnotation(a)
/* BinaryAnnotation */
class ThriftBinaryAnnotation(b: BinaryAnnotation) {
lazy val toThrift = {
thriftscala.BinaryAnnotation(b.key, b.value, b.annotationType.toThrift, b.host.map(_.toThrift))
}
}
class WrappedBinaryAnnotation(b: thriftscala.BinaryAnnotation) {
lazy val toBinaryAnnotation = {
BinaryAnnotation(b.key, b.value, b.annotationType.toAnnotationType, b.host.map(_.toEndpoint))
}
}
implicit def binaryAnnotationToThriftBinaryAnnotation(b: BinaryAnnotation) = new ThriftBinaryAnnotation(b)
implicit def thriftBinaryAnnotationToBinaryAnnotation(b: thriftscala.BinaryAnnotation) = new WrappedBinaryAnnotation(b)
/* Span */
class ThriftSpan(s: Span) {
lazy val toThrift = {
thriftscala.Span(s.traceId, s.name, s.id, s.parentId, s.annotations.map { _.toThrift },
s.binaryAnnotations.map { _.toThrift }, s.debug)
}
}
class WrappedSpan(s: thriftscala.Span) {
lazy val toSpan = {
s.name match {
case null => throw new IncompleteTraceDataException("No name set in Span")
case _ => ()
}
Span(
s.traceId,
s.name,
s.id,
s.parentId,
s.annotations match {
case null => List.empty[Annotation]
case as => as.map(_.toAnnotation)(breakOut)
},
s.binaryAnnotations match {
case null => List.empty[BinaryAnnotation]
case b => b.map(_.toBinaryAnnotation)(breakOut)
},
s.debug
)
}
}
implicit def spanToThriftSpan(s: Span) = new ThriftSpan(s)
implicit def thriftSpanToSpan(s: thriftscala.Span) = new WrappedSpan(s)
/* Trace */
class WrappedTrace(t: Trace) {
lazy val toThrift = thriftscala.Trace(t.spans.map{ _.toThrift })
}
class ThriftTrace(t: thriftscala.Trace) {
lazy val toTrace = Trace(t.spans.map { _.toSpan })
}
implicit def traceToThrift(t: Trace) = new WrappedTrace(t)
implicit def thriftToTrace(t: thriftscala.Trace) = new ThriftTrace(t)
class WrappedDependencyLink(dl: DependencyLink) {
lazy val toThrift = thriftscala.DependencyLink(dl.parent, dl.child, dl.callCount)
}
class ThriftDependencyLink(dl: thriftscala.DependencyLink) {
lazy val toDependencyLink = DependencyLink(dl.parent, dl.child, dl.callCount)
}
implicit def dependencyLinkToThrift(dl: DependencyLink) = new WrappedDependencyLink(dl)
implicit def thriftToDependencyLink(dl: thriftscala.DependencyLink) = new ThriftDependencyLink(dl)
class WrappedDependencies(d: Dependencies) {
lazy val toThrift = thriftscala.Dependencies(d.startTime.inMicroseconds, d.endTime.inMicroseconds, d.links.map {_.toThrift}.toSeq )
}
class ThriftDependencies(d: thriftscala.Dependencies) {
lazy val toDependencies = Dependencies(
Time.fromMicroseconds(d.startTime),
Time.fromMicroseconds(d.endTime),
d.links.map {_.toDependencyLink}
)
}
implicit def dependenciesToThrift(d: Dependencies) = new WrappedDependencies(d)
implicit def thriftToDependencies(d: thriftscala.Dependencies) = new ThriftDependencies(d)
}
| jstanier/zipkin | zipkin-scrooge/src/main/scala/com/twitter/zipkin/conversions/thrift.scala | Scala | apache-2.0 | 5,908 |
package fs2
package util
private[fs2] sealed trait Eq[A,B] {
def flip: Eq[B,A] = this.asInstanceOf[Eq[B,A]]
def apply(a: A): B = Eq.subst[({type f[x] = x})#f, A, B](a)(this)
}
object Eq {
private val _instance = new Eq[Unit,Unit] {}
implicit def refl[A]: Eq[A,A] = _instance.asInstanceOf[Eq[A,A]]
def subst[F[_],A,B](f: F[A])(implicit Eq: Eq[A,B]): F[B] =
f.asInstanceOf[F[B]]
def substStream[F[_],A,B](s: Stream[F,A])(implicit Eq: Eq[A,B]): Stream[F,B] =
subst[({ type f[x] = Stream[F,x] })#f, A, B](s)
def substPull[F[_],W,A,B](p: Pull[F,W,A])(implicit Eq: Eq[A,B]): Pull[F,W,B] =
subst[({ type f[x] = Pull[F,W,x] })#f, A, B](p)
def substHandler[F[_],A,B](h: Throwable => Stream[F,A])(implicit Eq: Eq[A,B]): Throwable => Stream[F,B] =
subst[({ type f[x] = Throwable => Stream[F,x] })#f, A, B](h)
}
| japgolly/scalaz-stream | core/src/main/scala/fs2/util/Eq.scala | Scala | mit | 838 |
/*
* Copyright 2016 The BigDL Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.bigdl.nn.tf
import com.intel.analytics.bigdl.tensor.Tensor
import com.intel.analytics.bigdl.utils.T
import com.intel.analytics.bigdl.utils.serializer.ModuleSerializationTest
import scala.util.Random
class SoftplusGradSerialTest extends ModuleSerializationTest {
override def test(): Unit = {
val sofplusGrad = SoftplusGrad[Float, Float].setName("sofplusGrad")
val input = T(Tensor[Float](2, 2, 2).apply1(_ => Random.nextFloat()),
Tensor[Float](2, 2, 2).apply1(_ => Random.nextFloat()))
runSerializationTest(sofplusGrad, input)
}
}
| yiheng/BigDL | spark/dl/src/test/scala/com/intel/analytics/bigdl/nn/tf/SoftplusGradSpec.scala | Scala | apache-2.0 | 1,186 |
package io.getquill
import io.getquill.idiom.{ Idiom => BaseIdiom }
import io.getquill.context.sql.SqlContext
import io.getquill.context.sql.encoding.mirror.ArrayMirrorEncoding
class SqlMirrorContext[Idiom <: BaseIdiom, Naming <: NamingStrategy](idiom: Idiom, naming: Naming)
extends MirrorContext(idiom, naming)
with SqlContext[Idiom, Naming]
with ArrayMirrorEncoding | getquill/quill | quill-sql/src/main/scala/io/getquill/SqlMirrorContext.scala | Scala | apache-2.0 | 376 |
package tv.camfire.media_server.server
import akka.actor.{ActorRef, Actor}
import scala.Some
import org.webrtc.{MediaStream, IceCandidate, SessionDescription}
import org.slf4j.{Logger, LoggerFactory}
import tv.camfire.media_server.util.BCryptMap
import tv.camfire.media_server.ErrorMessages._
/**
* User: jonathan
* Date: 5/2/13
* Time: 8:06 PM
*/
sealed trait SessionCompanionEvent
case class ClientOffer(sessionId: String, remoteDescription: SessionDescription) extends SessionCompanionEvent {}
case class ClientAnswer(sessionId: String, remoteDescription: SessionDescription) extends SessionCompanionEvent
case class ClientIceCandidate(sessionId: String, iceCandidate: IceCandidate) extends SessionCompanionEvent
case class SubscribeToMediaStream(userId: String, targetSessionIdHash: String) extends SessionCompanionEvent
case class GetSessionIdHash(sessionId: String) extends SessionCompanionEvent
case class GetAvailableStreams() extends SessionCompanionEvent
case class RegisterMediaStream(sessionId: String, mediaStream: MediaStream) extends SessionCompanionEvent
case class UnRegisterAllMediaStreams(sessionId: String) extends SessionCompanionEvent
case class GetMediaStream() extends SessionCompanionEvent
case class TellActorMediaStream(actorRef: ActorRef) extends SessionCompanionEvent
case class ReceiveActorMediaStream(mediaStream: MediaStream) extends SessionCompanionEvent
trait MediaManager extends Actor {
private val log: Logger = LoggerFactory.getLogger(getClass)
val sessions: BCryptMap[ActorRef]
protected def mediaManagement: Receive = {
// TODO: Find a way to make this more generic
case msg@ClientOffer(sessionId, remoteSessionDescription) =>
log.debug("Received ClientOffer from [%s]".format(sessionId))
forwardToSession(sessionId, msg)
case msg@ClientAnswer(sessionId, remoteSessionDescription) =>
forwardToSession(sessionId, msg)
case msg@ClientIceCandidate(sessionId, iceCandidate) =>
forwardToSession(sessionId, msg)
case msg@SubscribeToMediaStream(sessionId, targetSessionIdHash) =>
val requestingActor = sessions.get(sessionId)
val targetActor = sessions.get(targetSessionIdHash)
targetActor ! TellActorMediaStream(requestingActor)
// TODO: The Following is broken, clean up
// if (requestingActor.isDefined) {
// sendToSession(sessionId, TellActorMediaStream(requestingActor.get))
// } else {
// log.warn(NO_SESSION_COMPANION(sessionId, msg))
// }
case msg@RegisterMediaStream(sessionId, mediaStream) =>
forwardToSession(sessionId, msg)
case msg@UnRegisterAllMediaStreams(sessionId) =>
forwardToSession(sessionId, msg)
case msg@GetAvailableStreams() =>
// val streams = redisClient.keys("stream:*")
// val all = StreamModel.findAll()
}
def sendToSession(sessionId: String, msg: SessionCompanionEvent) {
val session = getSession(sessionId)
println("ARRGG:")
println(session)
if (session.isDefined) {
session.get ! msg
} else {
log.warn(NO_SESSION_COMPANION(sessionId, msg))
}
}
def forwardToSession(sessionId: String, msg: AnyRef) {
// log.trace("SIZE: %s".format(sessions.size()))
// import scala.collection.JavaConversions._
// sessions.descendingMap().foreach(kv => {
// println("%s : %s".format(kv._1, kv._2.path.toString))
// })
val session = getSession(sessionId)
if (session.isDefined) {
log.debug("Forwarding message to session companion...")
session.get forward msg
} else {
log.warn(NO_SESSION_COMPANION(sessionId, msg))
}
}
private def getSession(sessionId: String): Option[ActorRef] = {
if (sessions.containsKey(sessionId))
Some(sessions.get(sessionId))
else {
None
}
}
}
| jgrowl/camfire-signaling | signaling-server/src/main/scala/tv/camfire/media_server/server/MediaManager.scala | Scala | mit | 3,814 |
/*
* Scala (https://www.scala-lang.org)
*
* Copyright EPFL and Lightbend, Inc.
*
* Licensed under Apache License 2.0
* (http://www.apache.org/licenses/LICENSE-2.0).
*
* See the NOTICE file distributed with this work for
* additional information regarding copyright ownership.
*/
package scala.tools.nsc.classpath
import java.io.{Closeable, File}
import java.net.URL
import scala.reflect.io.{AbstractFile, PlainFile, PlainNioFile}
import scala.tools.nsc.util.{ClassPath, ClassRepresentation, EfficientClassPath}
import FileUtils._
import scala.jdk.CollectionConverters._
import scala.reflect.internal.JDK9Reflectors
import scala.tools.nsc.CloseableRegistry
import scala.tools.nsc.classpath.PackageNameUtils.{packageContains, separatePkgAndClassNames}
/**
* A trait allowing to look for classpath entries in directories. It provides common logic for
* classes handling class and source files.
* It makes use of the fact that in the case of nested directories it's easy to find a file
* when we have a name of a package.
* It abstracts over the file representation to work with both JFile and AbstractFile.
*/
trait DirectoryLookup[FileEntryType <: ClassRepresentation] extends EfficientClassPath {
type F
val dir: F
protected def emptyFiles: Array[F] // avoids reifying ClassTag[F]
protected def getSubDir(dirName: String): Option[F]
protected def listChildren(dir: F, filter: Option[F => Boolean] = None): Array[F]
protected def getName(f: F): String
protected def toAbstractFile(f: F): AbstractFile
protected def isPackage(f: F): Boolean
protected def createFileEntry(file: AbstractFile): FileEntryType
protected def isMatchingFile(f: F): Boolean
private def getDirectory(forPackage: PackageName): Option[F] = {
if (forPackage.isRoot) {
Some(dir)
} else {
getSubDir(forPackage.dirPathTrailingSlash)
}
}
override private[nsc] def hasPackage(pkg: PackageName) = getDirectory(pkg).isDefined
private[nsc] def packages(inPackage: PackageName): Seq[PackageEntry] = {
val dirForPackage = getDirectory(inPackage)
val nestedDirs: Array[F] = dirForPackage match {
case None => emptyFiles
case Some(directory) => listChildren(directory, Some(isPackage))
}
scala.collection.immutable.ArraySeq.unsafeWrapArray(
nestedDirs.map(f => PackageEntryImpl(inPackage.entryName(getName(f))))
)
}
protected def files(inPackage: PackageName): Seq[FileEntryType] = {
val dirForPackage = getDirectory(inPackage)
val files: Array[F] = dirForPackage match {
case None => emptyFiles
case Some(directory) => listChildren(directory, Some(isMatchingFile))
}
files.iterator.map(f => createFileEntry(toAbstractFile(f))).toSeq
}
override private[nsc] def list(inPackage: PackageName, onPackageEntry: PackageEntry => Unit, onClassesAndSources: ClassRepresentation => Unit): Unit = {
val dirForPackage = getDirectory(inPackage)
dirForPackage match {
case None =>
case Some(directory) =>
for (file <- listChildren(directory)) {
if (isPackage(file))
onPackageEntry(PackageEntryImpl(inPackage.entryName(getName(file))))
else if (isMatchingFile(file))
onClassesAndSources(createFileEntry(toAbstractFile(file)))
}
}
}
}
trait JFileDirectoryLookup[FileEntryType <: ClassRepresentation] extends DirectoryLookup[FileEntryType] {
type F = File
protected def emptyFiles: Array[File] = Array.empty
protected def getSubDir(packageDirName: String): Option[File] = {
val packageDir = new File(dir, packageDirName)
if (packageDir.exists && packageDir.isDirectory && packageDir.canRead) Some(packageDir)
else None
}
protected def listChildren(dir: File, filter: Option[File => Boolean]): Array[File] = {
val listing = filter match {
case Some(f) => dir.listFiles(mkFileFilter(f))
case None => dir.listFiles()
}
// Sort by file name for stable order of directory .class entries in package scope.
// This gives stable results ordering of base type sequences for unrelated classes
// with the same base type depth.
//
// Notably, this will stably infer`Product with Serializable`
// as the type of `case class C(); case class D(); List(C(), D()).head`, rather than the opposite order.
// On Mac, the HFS performs this sorting transparently, but on Linux the order is unspecified.
//
// Note this behaviour can be enabled in javac with `javac -XDsortfiles`, but that's only
// intended to improve determinism of the compiler for compiler hackers.
java.util.Arrays.sort(listing, (o1: File, o2: File) => o1.getName.compareTo(o2.getName))
listing
}
protected def getName(f: File): String = f.getName
protected def toAbstractFile(f: File): AbstractFile = new PlainFile(new scala.reflect.io.File(f))
protected def isPackage(f: File): Boolean = f.isPackage
assert(dir != null, "Directory file in DirectoryFileLookup cannot be null")
def asURLs: Seq[URL] = Seq(dir.toURI.toURL)
def asClassPathStrings: Seq[String] = Seq(dir.getPath)
}
object JrtClassPath {
import java.nio.file._, java.net.URI
private val jrtClassPathCache = new FileBasedCache[Unit, JrtClassPath]()
private val ctSymClassPathCache = new FileBasedCache[String, CtSymClassPath]()
def apply(release: Option[String], closeableRegistry: CloseableRegistry): Option[ClassPath] = {
import scala.util.Properties._
if (!isJavaAtLeast("9")) None
else {
// TODO escalate errors once we're sure they are fatal
// I'm hesitant to do this immediately, because -release will still work for multi-release JARs
// even if we're running on a JRE or a non OpenJDK JDK where ct.sym is unavailable.
//
// Longer term we'd like an official API for this in the JDK
// Discussion: https://mail.openjdk.java.net/pipermail/compiler-dev/2018-March/thread.html#11738
val currentMajorVersion: Int = JDK9Reflectors.runtimeVersionMajor(JDK9Reflectors.runtimeVersion()).intValue()
release match {
case Some(v) if v.toInt < currentMajorVersion =>
try {
val ctSym = Paths.get(javaHome).resolve("lib").resolve("ct.sym")
if (Files.notExists(ctSym)) None
else {
val classPath = ctSymClassPathCache.getOrCreate(v, ctSym :: Nil, () => new CtSymClassPath(ctSym, v.toInt), closeableRegistry, true)
Some(classPath)
}
} catch {
case _: Throwable => None
}
case _ =>
try {
val fs = FileSystems.getFileSystem(URI.create("jrt:/"))
val classPath = jrtClassPathCache.getOrCreate((), Nil, () => new JrtClassPath(fs), closeableRegistry, false)
Some(classPath)
} catch {
case _: ProviderNotFoundException | _: FileSystemNotFoundException => None
}
}
}
}
}
/**
* Implementation `ClassPath` based on the JDK 9 encapsulated runtime modules (JEP-220)
*
* https://bugs.openjdk.java.net/browse/JDK-8066492 is the most up to date reference
* for the structure of the jrt:// filesystem.
*
* The implementation assumes that no classes exist in the empty package.
*/
final class JrtClassPath(fs: java.nio.file.FileSystem) extends ClassPath with NoSourcePaths {
import java.nio.file.Path, java.nio.file._
type F = Path
private val dir: Path = fs.getPath("/packages")
// e.g. "java.lang" -> Seq("/modules/java.base")
private val packageToModuleBases: Map[String, Seq[Path]] = {
val ps = Files.newDirectoryStream(dir).iterator.asScala
def lookup(pack: Path): Seq[Path] = {
Files.list(pack).iterator.asScala.map(l => if (Files.isSymbolicLink(l)) Files.readSymbolicLink(l) else l).toList
}
ps.map(p => (p.toString.stripPrefix("/packages/"), lookup(p))).toMap
}
/** Empty string represents root package */
override private[nsc] def hasPackage(pkg: PackageName) = packageToModuleBases.contains(pkg.dottedString)
override private[nsc] def packages(inPackage: PackageName): Seq[PackageEntry] = {
packageToModuleBases.keysIterator.filter(pack => packageContains(inPackage.dottedString, pack)).map(PackageEntryImpl(_)).toVector
}
private[nsc] def classes(inPackage: PackageName): Seq[ClassFileEntry] = {
if (inPackage.isRoot) Nil
else {
packageToModuleBases.getOrElse(inPackage.dottedString, Nil).flatMap(x =>
Files.list(x.resolve(inPackage.dirPathTrailingSlash)).iterator.asScala.filter(_.getFileName.toString.endsWith(".class"))).map(x =>
ClassFileEntryImpl(new PlainNioFile(x))).toVector
}
}
override private[nsc] def list(inPackage: PackageName): ClassPathEntries =
if (inPackage.isRoot) ClassPathEntries(packages(inPackage), Nil)
else ClassPathEntries(packages(inPackage), classes(inPackage))
def asURLs: Seq[URL] = Seq(new URL("jrt:/"))
// We don't yet have a scheme to represent the JDK modules in our `-classpath`.
// java models them as entries in the new "module path", we'll probably need to follow this.
def asClassPathStrings: Seq[String] = Nil
def findClassFile(className: String): Option[AbstractFile] = {
if (!className.contains(".")) None
else {
val (inPackage, _) = separatePkgAndClassNames(className)
packageToModuleBases.getOrElse(inPackage, Nil).iterator.flatMap { x =>
val file = x.resolve(className.replace('.', '/') + ".class")
if (Files.exists(file)) new scala.reflect.io.PlainNioFile(file) :: Nil else Nil
}.take(1).toList.headOption
}
}
}
/**
* Implementation `ClassPath` based on the \\$JAVA_HOME/lib/ct.sym backing https://openjdk.java.net/jeps/247
*/
final class CtSymClassPath(ctSym: java.nio.file.Path, release: Int) extends ClassPath with NoSourcePaths with Closeable {
import java.nio.file.Path, java.nio.file._
private val fileSystem: FileSystem = FileSystems.newFileSystem(ctSym, null: ClassLoader)
private val root: Path = fileSystem.getRootDirectories.iterator.next
private val roots = Files.newDirectoryStream(root).iterator.asScala.toList
// https://mail.openjdk.java.net/pipermail/compiler-dev/2018-March/011737.html
private def codeFor(major: Int): String = if (major < 10) major.toString else ('A' + (major - 10)).toChar.toString
private val releaseCode: String = codeFor(release)
private def fileNameMatchesRelease(fileName: String) = !fileName.contains("-") && fileName.contains(releaseCode) // exclude `9-modules`
private val rootsForRelease: List[Path] = roots.filter(root => fileNameMatchesRelease(root.getFileName.toString))
// e.g. "java.lang" -> Seq(/876/java/lang, /87/java/lang, /8/java/lang))
private val packageIndex: scala.collection.Map[String, scala.collection.Seq[Path]] = {
val index = collection.mutable.AnyRefMap[String, collection.mutable.ListBuffer[Path]]()
val isJava12OrHigher = scala.util.Properties.isJavaAtLeast("12")
rootsForRelease.foreach(root => Files.walk(root).iterator().asScala.filter(Files.isDirectory(_)).foreach { p =>
val moduleNamePathElementCount = if (isJava12OrHigher) 1 else 0
if (p.getNameCount > root.getNameCount + moduleNamePathElementCount) {
val packageDotted = p.subpath(moduleNamePathElementCount + root.getNameCount, p.getNameCount).toString.replace('/', '.')
index.getOrElseUpdate(packageDotted, new collection.mutable.ListBuffer) += p
}
})
index
}
/** Empty string represents root package */
override private[nsc] def hasPackage(pkg: PackageName) = packageIndex.contains(pkg.dottedString)
override private[nsc] def packages(inPackage: PackageName): Seq[PackageEntry] = {
packageIndex.keysIterator.filter(pack => packageContains(inPackage.dottedString, pack)).map(PackageEntryImpl(_)).toVector
}
private[nsc] def classes(inPackage: PackageName): Seq[ClassFileEntry] = {
if (inPackage.isRoot) Nil
else {
val sigFiles = packageIndex.getOrElse(inPackage.dottedString, Nil).iterator.flatMap(p =>
Files.list(p).iterator.asScala.filter(_.getFileName.toString.endsWith(".sig")))
sigFiles.map(f => ClassFileEntryImpl(new PlainNioFile(f))).toVector
}
}
override private[nsc] def list(inPackage: PackageName): ClassPathEntries =
if (inPackage.isRoot) ClassPathEntries(packages(inPackage), Nil)
else ClassPathEntries(packages(inPackage), classes(inPackage))
def asURLs: Seq[URL] = Nil
def asClassPathStrings: Seq[String] = Nil
override def close(): Unit = fileSystem.close()
def findClassFile(className: String): Option[AbstractFile] = {
if (!className.contains(".")) None
else {
val (inPackage, classSimpleName) = separatePkgAndClassNames(className)
packageIndex.getOrElse(inPackage, Nil).iterator.flatMap { p =>
val file = p.resolve(classSimpleName + ".sig")
if (Files.exists(file)) new scala.reflect.io.PlainNioFile(file) :: Nil else Nil
}.take(1).toList.headOption
}
}
}
case class DirectoryClassPath(dir: File) extends JFileDirectoryLookup[ClassFileEntryImpl] with NoSourcePaths {
override def findClass(className: String): Option[ClassRepresentation] = findClassFile(className) map ClassFileEntryImpl
def findClassFile(className: String): Option[AbstractFile] = {
val relativePath = FileUtils.dirPath(className)
val classFile = new File(s"$dir/$relativePath.class")
if (classFile.exists) {
val wrappedClassFile = new scala.reflect.io.File(classFile)
val abstractClassFile = new PlainFile(wrappedClassFile)
Some(abstractClassFile)
} else None
}
protected def createFileEntry(file: AbstractFile): ClassFileEntryImpl = ClassFileEntryImpl(file)
protected def isMatchingFile(f: File): Boolean = f.isClass
private[nsc] def classes(inPackage: PackageName): Seq[ClassFileEntry] = files(inPackage)
}
case class DirectorySourcePath(dir: File) extends JFileDirectoryLookup[SourceFileEntryImpl] with NoClassPaths {
def asSourcePathString: String = asClassPathString
protected def createFileEntry(file: AbstractFile): SourceFileEntryImpl = SourceFileEntryImpl(file)
protected def isMatchingFile(f: File): Boolean = endsScalaOrJava(f.getName)
override def findClass(className: String): Option[ClassRepresentation] = findSourceFile(className) map SourceFileEntryImpl
private def findSourceFile(className: String): Option[AbstractFile] = {
val relativePath = FileUtils.dirPath(className)
val sourceFile = Iterator("scala", "java")
.map(ext => new File(s"$dir/$relativePath.$ext"))
.collectFirst { case file if file.exists() => file }
sourceFile.map { file =>
val wrappedSourceFile = new scala.reflect.io.File(file)
val abstractSourceFile = new PlainFile(wrappedSourceFile)
abstractSourceFile
}
}
private[nsc] def sources(inPackage: PackageName): Seq[SourceFileEntry] = files(inPackage)
}
| scala/scala | src/compiler/scala/tools/nsc/classpath/DirectoryClassPath.scala | Scala | apache-2.0 | 14,927 |
package io.vamp.model.serialization
import io.vamp.common.Lookup
import io.vamp.model.artifact._
import org.json4s.JsonAST.JString
import org.json4s._
import scala.collection.mutable.ArrayBuffer
import scala.language.postfixOps
object GatewaySerializationFormat extends io.vamp.common.json.SerializationFormat {
override def customSerializers = super.customSerializers :+
new GatewaySerializer() :+
new GatewayStickySerializer() :+
new RouteSerializer() :+
new ExternalRouteTargetSerializer() :+
new ConditionSerializer() :+
new RewriteSerializer()
override def fieldSerializers = super.fieldSerializers :+
new RouteTargetFieldSerializer()
}
class GatewaySerializer extends ArtifactSerializer[Gateway] with GatewayDecomposer {
override def serialize(implicit format: Formats): PartialFunction[Any, JValue] = serializeGateway
}
trait GatewayDecomposer extends ReferenceSerialization with RouteDecomposer {
def serializeGateway(implicit format: Formats): PartialFunction[Any, JValue] = serialize(full = true, port = true)
def serializeAnonymousGateway(port: Boolean)(implicit format: Formats): PartialFunction[Any, JValue] = serialize(full = false, port)
private def serialize(full: Boolean, port: Boolean)(implicit format: Formats): PartialFunction[Any, JValue] = {
case gateway: Gateway β
val list = new ArrayBuffer[JField]
if (full) {
if (gateway.name.nonEmpty) {
list += JField("name", JString(gateway.name))
list += JField("kind", JString(gateway.kind))
list += JField("metadata", Extraction.decompose(gateway.metadata)(DefaultFormats))
}
list += JField(Lookup.entry, JString(gateway.lookupName))
list += JField("internal", JBool(gateway.internal))
if (gateway.service.isDefined) {
val serviceHost = JField("host", JString(gateway.service.get.host))
val servicePort = JField("port", gateway.service.get.port.value match {
case Some(value) β JString(value)
case _ β JString(gateway.port.toValue)
})
list += JField("service", new JObject(serviceHost :: servicePort :: Nil))
}
list += JField("deployed", JBool(gateway.deployed))
}
if (full || port) {
list += JField("port", gateway.port.value match {
case Some(value) β JString(value)
case _ β JString(gateway.port.toValue)
})
}
list += JField("sticky", if (gateway.sticky.isDefined) Extraction.decompose(gateway.sticky) else JNull)
list += JField("virtual_hosts", Extraction.decompose(gateway.virtualHosts))
list += JField("routes", Extraction.decompose {
gateway.routes.map { route β
(route.path.segments match {
case _ :: _ :: s :: _ :: Nil if !full β s
case _ :: _ :: _ :: Nil if !full β GatewayPath(route.path.segments.tail).normalized
case _ β route.path.source
}) β serializeRoute(full, () β { Option(GatewayLookup.lookup(gateway, route.path.segments)) })(format)(route)
} toMap
})
new JObject(list.toList)
}
}
class GatewayStickySerializer extends CustomSerializer[Gateway.Sticky.Value](format β ({
case JString(sticky) β Gateway.Sticky.byName(sticky).getOrElse(throw new UnsupportedOperationException(s"Cannot deserialize sticky value: $sticky"))
}, {
case sticky: Gateway.Sticky.Value β JString(sticky.toString.toLowerCase)
}))
trait RouteDecomposer extends ReferenceSerialization with ConditionDecomposer {
def serializeRoute(full: Boolean = true, lookup: () β Option[String] = () β None)(implicit format: Formats): PartialFunction[Any, JValue] = {
case route: RouteReference β serializeReference(route)
case route: DefaultRoute β
val list = new ArrayBuffer[JField]
if (route.name.nonEmpty) {
list += JField("name", JString(route.name))
list += JField("kind", JString(route.kind))
list += JField("metadata", Extraction.decompose(route.metadata)(DefaultFormats))
}
list += JField(Lookup.entry, JString(lookup().getOrElse(route.lookupName)))
list += JField("weight", if (route.weight.isDefined) JString(route.weight.get.normalized) else JNull)
list += JField("balance", if (route.balance.isDefined) JString(route.balance.get) else JString(DefaultRoute.defaultBalance))
list += JField("condition", if (route.condition.isDefined) serializeCondition(full = false)(format)(route.condition.get) else JNull)
list += JField("condition_strength", if (route.conditionStrength.isDefined) JString(route.conditionStrength.get.normalized) else JNull)
list += JField("rewrites", Extraction.decompose(route.rewrites))
if (full && route.targets.nonEmpty) list += JField("targets", Extraction.decompose(route.targets))
new JObject(list.toList)
}
}
class RouteSerializer extends ArtifactSerializer[Route] with ReferenceSerialization with RouteDecomposer {
override def serialize(implicit format: Formats): PartialFunction[Any, JValue] = serializeRoute(full = true)
}
class RouteTargetFieldSerializer extends ArtifactFieldSerializer[RouteTarget] {
override val serializer: PartialFunction[(String, Any), Option[(String, Any)]] = {
case ("kind", _) β None
}
}
class ExternalRouteTargetSerializer extends ArtifactSerializer[ExternalRouteTarget] {
override def serialize(implicit format: Formats): PartialFunction[Any, JValue] = {
case target: ExternalRouteTarget β
val list = new ArrayBuffer[JField]
list += JField("url", JString(target.url))
new JObject(list.toList)
}
}
class ConditionSerializer extends ArtifactSerializer[Condition] with ConditionDecomposer {
override def serialize(implicit format: Formats): PartialFunction[Any, JValue] = serializeCondition(full = true)
}
trait ConditionDecomposer extends ReferenceSerialization {
def serializeCondition(full: Boolean)(implicit format: Formats): PartialFunction[Any, JValue] = {
case condition: ConditionReference β serializeReference(condition)
case condition: DefaultCondition β
val list = new ArrayBuffer[JField]
if (condition.name.nonEmpty && full) {
list += JField("name", JString(condition.name))
list += JField("kind", JString(condition.kind))
list += JField("metadata", Extraction.decompose(condition.metadata)(DefaultFormats))
}
list += JField("condition", JString(condition.definition))
new JObject(list.toList)
}
}
class RewriteSerializer extends ArtifactSerializer[Rewrite] with ReferenceSerialization {
override def serialize(implicit format: Formats): PartialFunction[Any, JValue] = {
case rewrite: RewriteReference β serializeReference(rewrite)
case rewrite: PathRewrite β
val list = new ArrayBuffer[JField]
if (rewrite.name.nonEmpty) {
list += JField("name", JString(rewrite.name))
list += JField("kind", JString(rewrite.kind))
}
list += JField("path", JString(rewrite.definition))
new JObject(list.toList)
}
}
| dragoslav/vamp | model/src/main/scala/io/vamp/model/serialization/GatewaySerializationFormat.scala | Scala | apache-2.0 | 7,189 |
package ucesoft.cbm.peripheral.drive
import ucesoft.cbm.cpu.ROM
import ucesoft.cbm.ChipID
import ucesoft.cbm.Log
import ucesoft.cbm.cpu.BridgeMemory
import ucesoft.cbm.CBMComponentType
import ucesoft.cbm.cpu.RAMComponent
import java.io.{FileNotFoundException, ObjectInputStream, ObjectOutputStream}
import javax.swing.JFrame
object C1541Mems {
import ROM._
val KERNEL_M = 0xC000
private class DISK_KERNEL extends ROM(null,"C1541_KERNEL",KERNEL_M,16384,D1541_DOS_ROM_PROP) {
private[this] val startAndLen = {
try {
val in = ROM.getROMInputStream(this,resourceName)
val al = (0x10000 - in.available,in.available)
in.close
al
}
catch {
case _: FileNotFoundException => (0,0)
}
}
override val startAddress = startAndLen._1
override val length = startAndLen._2
final override def write(address: Int, value: Int, chipID: ChipID.ID = ChipID.CPU) : Unit = {}
}
private class RAM extends RAMComponent {
val componentID = "DISK RAM"
val componentType = CBMComponentType.MEMORY
val isRom = false
val name = "C1541_RAM"
val startAddress = 0x0
val length = 0x0800
private[this] val mem = Array.fill(length)(0)
final val isActive = true
private[this] var channelActive = 0
def isChannelActive = channelActive != 0
def getChannelsState = channelActive
def init : Unit = {
Log.info("Initialaizing C1541 RAM memory ...")
java.util.Arrays.fill(mem,0)
}
def reset : Unit = {}
override def hardReset: Unit = init
final def read(address: Int, chipID: ChipID.ID = ChipID.CPU): Int = mem(address & 0xFFFF)
final def write(address: Int, value: Int, chipID: ChipID.ID = ChipID.CPU) : Unit = {
mem(address & 0xFFFF) = value & 0xff
if (address >= 0x22B && address <= 0x239) {
val channel = address - 0x22B
if (value != 0xFF) channelActive |= 1 << channel else channelActive &= ~(1 << channel)
}
}
protected def saveState(out:ObjectOutputStream) : Unit = {
out.writeObject(mem)
out.writeInt(channelActive)
}
protected def loadState(in:ObjectInputStream) : Unit = {
loadMemory[Int](mem,in)
channelActive = in.readInt
}
protected def allowsStateRestoring : Boolean = true
}
class EXP_RAM(baseAddress:Int) extends RAMComponent {
val componentID = "Extended RAM " + Integer.toHexString(baseAddress)
val componentType = CBMComponentType.MEMORY
val isRom = false
val name = "C1541_RAM"
val startAddress = baseAddress
val length = 0x2000
private[this] val mem = Array.fill(length)(0)
var isActive = false
def init : Unit = {
Log.info(s"Initialaizing C1541 Extended RAM ${Integer.toHexString(baseAddress)} memory ...")
java.util.Arrays.fill(mem,0)
}
def reset : Unit = {}
override def hardReset: Unit = init
final def read(address: Int, chipID: ChipID.ID = ChipID.CPU): Int = if (isActive) mem(address - baseAddress) else {
if (baseAddress < KERNEL.startAddress) 0 else KERNEL.read(address)
}
final def write(address: Int, value: Int, chipID: ChipID.ID = ChipID.CPU) : Unit = {
if (isActive) mem(address - baseAddress) = value & 0xff
}
// state
protected def saveState(out:ObjectOutputStream) : Unit = {
out.writeObject(mem)
out.writeBoolean(isActive)
}
protected def loadState(in:ObjectInputStream) : Unit = {
loadMemory[Int](mem,in)
isActive = in.readBoolean
}
protected def allowsStateRestoring : Boolean = true
}
private[this] val KERNEL = new DISK_KERNEL
val RAM_EXP_2000 = new EXP_RAM(0x2000)
val RAM_EXP_4000 = new EXP_RAM(0x4000)
val RAM_EXP_6000 = new EXP_RAM(0x6000)
val RAM_EXP_8000 = new EXP_RAM(0x8000)
val RAM_EXP_A000 = new EXP_RAM(0xA000)
class C1541_RAM extends BridgeMemory {
val componentID = "MAIN DISK RAM"
val componentType = CBMComponentType.MEMORY
val isRom = false
val name = "C1541_MAIN_RAM"
val startAddress = 0x0
val length = 0xFFFF
final val isActive = true
private[this] val RAM = new RAM
def init : Unit = {
addBridge(KERNEL)
addBridge(RAM)
addBridge(RAM_EXP_2000)
addBridge(RAM_EXP_4000)
addBridge(RAM_EXP_6000)
addBridge(RAM_EXP_8000)
addBridge(RAM_EXP_A000)
}
def reset : Unit = {}
def isChannelActive = RAM.isChannelActive
def getChannelsState = RAM.getChannelsState
override def defaultValue(address:Int) = Some(address >> 8)
// state
protected def saveState(out:ObjectOutputStream) : Unit = {}
protected def loadState(in:ObjectInputStream) : Unit = {}
protected def allowsStateRestoring : Boolean = true
}
} | abbruzze/kernal64 | Kernal64/src/ucesoft/cbm/peripheral/drive/C1541Mems.scala | Scala | mit | 4,819 |
package controllers.serviceResource
import controllers.helper.{JsonParser, ResultOps, SecureControllerContext, Secured}
import dao.{AuthorityDao, LwmServiceDao}
import database.GroupMembership
import play.api.libs.json.{Json, OWrites}
import play.api.mvc.{AbstractController, ControllerComponents, Result}
import security.LWMRole.{Admin, CourseEmployee, CourseManager, God}
import security.SecurityActionChain
import javax.inject.Inject
import scala.concurrent.{ExecutionContext, Future}
import scala.util.{Failure, Success, Try}
final class LwmServiceController @Inject()(
cc: ControllerComponents,
val authorityDao: AuthorityDao,
val securedAction: SecurityActionChain,
val serviceDao: LwmServiceDao,
implicit val executionContext: ExecutionContext
) extends AbstractController(cc)
with Secured
with SecureControllerContext
with ResultOps
with JsonParser {
private implicit def membershipWrites: OWrites[GroupMembership] = Json.writes[GroupMembership]
import models.LabworkApplication.{writes => lappWrites}
import models.ReportCardEntry.{writes => cardWrites}
import models.Student.{writes => studentWrites}
import models.ReportCardEvaluation.{writes => evalWrites}
import models.GroupLike.{writes => groupWrites}
def insertStudentToGroup(course: String) = restrictedContext(course)(Create) asyncAction { request =>
(parseJson[GroupChangeRequest] _ andThen mapJson andThen (r => r(insertIntoGroup))) (request)
}
def removeStudentFromGroup(course: String) = restrictedContext(course)(Delete) asyncAction { request =>
(parseJson[GroupChangeRequest] _ andThen mapJson andThen (r => r(removeFromGroup))) (request)
}
def moveStudentToGroup(course: String) = restrictedContext(course)(Update) asyncAction { request =>
(parseJson[GroupMovingRequest] _ andThen mapJson andThen (r => r(moveToGroup))) (request)
}
def evaluateExplicit(course: String) = restrictedContext(course)(Create) asyncAction { request =>
(parseJson[ExplicitEvaluationRequest] _ andThen mapJson andThen (r => r(evalExplicit))) (request)
}
def mergeUsers() = contextFrom(Update) asyncAction { request =>
(for {
json <- Future.fromTry(unwrap(request))
origin <- Future.fromTry(asTry(string(json.\\("origin"))))
drop <- Future.fromTry(asTry(string(json.\\("drop"))))
res <- serviceDao.mergeUser(origin, drop)
} yield res).jsonResult
}
def duplicateUsers() = contextFrom(Get) asyncAction { _ =>
serviceDao.duplicateStudents().jsonResult
}
def usersWithoutRegistrationId() = contextFrom(Get) asyncAction { _ =>
serviceDao.usersWithoutRegistrationId().jsonResult
}
private def insertIntoGroup(request: GroupChangeRequest): Future[Result] =
serviceDao.insertStudentToGroup(request.student, request.labwork, request.group).jsonResult {
case (membership, app, cards, _) => ok(
"labworkApplication" -> Json.toJson(app),
"membership" -> Json.toJson(membership),
"reportCardEntries" -> Json.toJson(cards)
)
}
private def removeFromGroup(request: GroupChangeRequest): Future[Result] =
serviceDao.removeStudentFromGroup(request.student, request.labwork, request.group).jsonResult {
case (groupDeleted, deleteApp, deletedCards) => ok(
"changedMembership" -> Json.toJson(groupDeleted),
"labworkApplication" -> Json.toJson(deleteApp),
"reportCardEntries" -> Json.toJson(deletedCards)
)
}
private def moveToGroup(request: GroupMovingRequest): Future[Result] =
serviceDao.moveStudentToGroup(request.student, request.labwork, request.srcGroup, request.destGroup).jsonResult {
case (groupDeleted, newMembership, _, _, updatedCards) => ok(
"changedMembership" -> Json.toJson(groupDeleted),
"newMembership" -> Json.toJson(newMembership),
"updatedReportCardEntries" -> Json.toJson(updatedCards)
)
}
private def evalExplicit(request: ExplicitEvaluationRequest): Future[Result] =
serviceDao.evaluateExplicit(request.student, request.labwork, request.group, request.kind).jsonResult {
case (evals, group) => ok(
"evals" -> Json.toJson(evals),
"group" -> Json.toJson(group)
)
}
private def mapJson[A](json: Try[A])(f: A => Future[Result]): Future[Result] =
json match {
case Success(r) =>
f(r)
case Failure(e) =>
(badRequest _ andThen Future.successful) (e)
}
override protected def restrictedContext(restrictionId: String): PartialFunction[Rule, SecureContext] = {
case Update => SecureBlock(restrictionId, List(CourseEmployee, CourseManager))
case Create | Delete => SecureBlock(restrictionId, List(CourseManager))
case _ => PartialSecureBlock(List(God))
}
override protected def contextFrom: PartialFunction[Rule, SecureContext] = {
case Update => PartialSecureBlock(List(Admin))
case Get => PartialSecureBlock(List(Admin))
}
}
| THK-ADV/lwm-reloaded | app/controllers/serviceResource/LwmServiceController.scala | Scala | mit | 4,942 |
Subsets and Splits
Filtered Scala Code Snippets
The query filters and retrieves a sample of code snippets that meet specific criteria, providing a basic overview of the dataset's content without revealing deeper insights.