code stringlengths 5 1M | repo_name stringlengths 5 109 | path stringlengths 6 208 | language stringclasses 1 value | license stringclasses 15 values | size int64 5 1M |
|---|---|---|---|---|---|
/*
* Copyright (c) 2016 SnappyData, Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you
* may not use this file except in compliance with the License. You
* may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License. See accompanying
* LICENSE file.
*/
package io.snappydata.hydra
import java.io.{File, FileOutputStream, PrintWriter}
import scala.util.{Failure, Success, Try}
import com.typesafe.config.Config
import org.apache.spark.sql._
/**
* Fetches already created tables. Airline table is already persisted in
* Snappy store. Cache the airline table in Spark cache as well for
* comparison. Sample airline table and persist it in Snappy store.
* Run a aggregate query on column and row table and return the results.
* This Map will be sent over REST.
*/
/**
* Created by swati on 6/4/16.
*/
object AirlineDataQueriesJob extends SnappySQLJob {
override def runSnappyJob(snSession: SnappySession, jobConfig: Config): Any = {
val colTable = "AIRLINE"
val parquetTable = "STAGING_AIRLINE"
val rowTable = "AIRLINEREF"
// val sampleTable = "AIRLINE_SAMPLE"
val snc = snSession.sqlContext
def getCurrentDirectory = new java.io.File(".").getCanonicalPath
// scalastyle:off println
val pw = new PrintWriter(new FileOutputStream(new File(jobConfig.getString("logFileName")), true));
Try {
snc.sql("set spark.sql.shuffle.partitions=6")
// Get the already created tables
val airlineDF: DataFrame = snc.table(colTable)
val airlineCodeDF: DataFrame = snc.table(rowTable)
val airlineParquetDF: DataFrame = snc.table(parquetTable)
// val sampleDF: DataFrame = snc.table(sampleTable)
// Cache the airline data in a Spark table as well.
// We will use this to compare against the Snappy Store table
airlineParquetDF.cache()
airlineParquetDF.count()
runQueries(pw, snc)
} match {
case Success(v) => pw.close()
s"See ${getCurrentDirectory}/${jobConfig.getString("logFileName")}"
case Failure(e) => pw.close();
throw e;
}
}
// Method for running all olap and oltp queries and calculating total elapsed time
// for each query collectively at the end along with the number of times query Executed.
def runQueries(pw: PrintWriter, snc: SnappyContext): Unit = {
var query1ExecutionCount, query2ExecutionCount,
query3ExecutionCount, query4ExecutionCount, query5ExecutionCount = 0
var totalTimeQuery1, totalTimeQuery2, totalTimeQuery3,
totalTimeQuery4, totalTimeQuery5: Long = 0
val startTime = System.currentTimeMillis
val EndTime: Long = startTime + 600000
while (EndTime > System.currentTimeMillis()) {
// while (startTime < EndTime) {
// This Query retrives which airline had the most flights each year.
val query1: String = "select count(*) flightRecCount, description AirlineName, UniqueCarrier carrierCode ,Year_ \\n " +
"from airline , airlineref\\n " +
"where airline.UniqueCarrier = airlineref.code\\n " +
"group by UniqueCarrier,description, Year_ \\n " +
"order by flightRecCount desc limit 10 "
val query1Result = snc.sql(query1)
val startTimeQuery1 = System.currentTimeMillis
val result1 = query1Result.collect()
totalTimeQuery1 += (System.currentTimeMillis - startTimeQuery1)
query1ExecutionCount += 1
// This query retrives which Airlines Arrive On Schedule
val query2: String = "select AVG(ArrDelay) arrivalDelay, UniqueCarrier carrier from airline \\n" +
" group by UniqueCarrier\\n" +
"order by arrivalDelay "
val query2Result = snc.sql(query2)
val startTimeQuery2 = System.currentTimeMillis
val result2 = query2Result.collect()
totalTimeQuery2 += (System.currentTimeMillis - startTimeQuery2)
query2ExecutionCount += 1
// This method retrives which Airlines Arrive On Schedule. JOIN with reference table.
val query3: String = "select AVG(ArrDelay) arrivalDelay, description AirlineName, UniqueCarrier carrier \\n" +
"from airline, airlineref \\n" +
"where airline.UniqueCarrier = airlineref.Code \\n " +
"group by UniqueCarrier, description \\n " +
"order by arrivalDelay "
val Query3Result = snc.sql(query3)
val startTimeQuery3 = System.currentTimeMillis
val result3 = Query3Result.collect()
totalTimeQuery3 += (System.currentTimeMillis - startTimeQuery3)
query3ExecutionCount += 1
// This query retrives the trend in arrival delays across all airlines in the US
val query4: String = "select AVG(ArrDelay) ArrivalDelay, Year_\\n " +
"from airline \\n " +
"group by Year_ \\n " +
"order by Year_ "
val Query4Result = snc.sql(query4)
val startTimeQuery4 = System.currentTimeMillis
val result4 = Query4Result.collect()
totalTimeQuery4 += (System.currentTimeMillis - startTimeQuery4)
query4ExecutionCount += 1
// This query retrives Which airline out of SanFrancisco had most delays due to weather
val query5: String = "SELECT sum(WeatherDelay) totalWeatherDelay, airlineref.DESCRIPTION \\n " +
" FROM airline, airlineref \\n " +
" WHERE airline.UniqueCarrier = airlineref.CODE" +
" AND Origin like '%SFO%' AND WeatherDelay > 0 \\n" +
" GROUP BY DESCRIPTION \\n limit 20"
val Query5Result = snc.sql(query5)
val startTimeQuery5 = System.currentTimeMillis
val result5 = Query4Result.collect()
totalTimeQuery5 += (System.currentTimeMillis - startTimeQuery5)
query5ExecutionCount += 1
// startTime = System.currentTimeMillis
// pw.flush()
}
pw.println(s"\\n****** countQueryWithGroupByOrderBy Execution " +
s"took ${totalTimeQuery1} ms******")
pw.println(s"\\n****** countQueryWithGroupByOrderBy Execution " +
s"count is :: ${query1ExecutionCount} ******")
pw.println(s"\\n****** avgArrDelayWithGroupByOrderByForScheduleQuery Execution " +
s"took ${totalTimeQuery2} ms******")
pw.println(s"\\n****** avgArrDelayWithGroupByOrderByForScheduleQuery Execution " +
s"count is :: ${query2ExecutionCount} ******")
pw.println(s"\\n****** avgArrDelayWithGroupByOrderByWithJoinForScheduleQuery Execution " +
s"took ${totalTimeQuery3} ms******")
pw.println(s"\\n****** avgArrDelayWithGroupByOrderByWithJoinForScheduleQuery Execution " +
s"count is :: ${query3ExecutionCount} ******")
pw.println(s"\\n****** avgArrDelayWithGroupByOrderByForTrendAnalysisQuery Execution " +
s"took ${totalTimeQuery4} ms******")
pw.println(s"\\n****** avgArrDelayWithGroupByOrderByForTrendAnalysisQuery Execution " +
s"count is :: ${query4ExecutionCount} ******")
pw.println(s"\\n****** sumWeatherDelayWithGroupByWithLimitQuery Execution " +
s"took ${totalTimeQuery5} ms******")
pw.println(s"\\n****** sumWeatherDelayWithGroupByWithLimitQuery Execution " +
s"count is :: ${query5ExecutionCount} ******")
}
override def isValidJob(sc: SnappySession, config: Config): SnappyJobValidation = SnappyJobValid()
}
| vjr/snappydata | dtests/src/test/scala/io/snappydata/hydra/AirlineDataQueriesJob.scala | Scala | apache-2.0 | 7,614 |
package pbdirect
object IdentityMaps {
def emptyJavaIdentityMap[K, V]: java.util.Map[K, V] = new java.util.IdentityHashMap[K, V]
}
| btlines/pbdirect | jvm/src/main/scala/pbdirect/IdentityMaps.scala | Scala | mit | 134 |
/*
* -╥⌐⌐⌐⌐ -⌐⌐⌐⌐-
* ≡╢░░░░⌐\░░░φ ╓╝░░░░⌐░░░░╪╕
* ╣╬░░` `░░░╢┘ φ▒╣╬╝╜ ░░╢╣Q
* ║╣╬░⌐ ` ╤▒▒▒Å` ║╢╬╣
* ╚╣╬░⌐ ╔▒▒▒▒`«╕ ╢╢╣▒
* ╫╬░░╖ .░ ╙╨╨ ╣╣╬░φ ╓φ░╢╢Å
* ╙╢░░░░⌐"░░░╜ ╙Å░░░░⌐░░░░╝`
* ``˚¬ ⌐ ˚˚⌐´
*
* Copyright © 2016 Flipkart.com
*/
package com.flipkart.connekt.commons.services
import com.flipkart.connekt.commons.cache.{LocalCacheManager, LocalCacheType}
import com.flipkart.connekt.commons.core.Wrappers._
import com.flipkart.connekt.commons.dao.{PrivDao, TUserInfo}
import com.flipkart.connekt.commons.entities.UserType.UserType
import com.flipkart.connekt.commons.entities.{ResourcePriv, UserType}
import com.flipkart.connekt.commons.factories.{ConnektLogger, LogFile}
import com.flipkart.connekt.commons.sync.SyncType.SyncType
import com.flipkart.connekt.commons.sync.{SyncDelegate, SyncManager, SyncType}
import scala.util.{Failure, Success, Try}
class AuthorisationService(privDao: PrivDao, userInfoDao: TUserInfo) extends TAuthorisationService with SyncDelegate {
SyncManager.get().addObserver(this, List(SyncType.AUTH_CHANGE))
private lazy val globalPrivileges = {
read("*", UserType.GLOBAL) match {
case Some(priv) =>
priv.resources.split(',').toList
case None =>
List()
}
}
private def cacheKey(identifier: String, level: UserType): String = identifier + level.toString
private def removeCache(identifier: String, level: UserType): Unit = {
val key = cacheKey(identifier, level)
LocalCacheManager.getCache(LocalCacheType.ResourcePriv).remove(key)
}
private def read(identifier: String, level: UserType): Option[ResourcePriv] = {
val key = cacheKey(identifier, level)
LocalCacheManager.getCache(LocalCacheType.ResourcePriv).get[Option[ResourcePriv]](key) match {
case Some(p: Option[ResourcePriv]) => p
case None =>
try {
val accessPrivilege = privDao.getPrivileges(identifier, level)
LocalCacheManager.getCache(LocalCacheType.ResourcePriv).put[Option[ResourcePriv]](key, accessPrivilege)
accessPrivilege
} catch {
case e: Exception =>
throw e
}
}
}
override def getGroupPrivileges(groupName: String): List[String] = {
read(groupName, UserType.GROUP).map(_.resources.split(',').toList).getOrElse(List())
}
override def getUserPrivileges(userName: String): List[String] = {
read(userName, UserType.USER).map(_.resources.split(',').toList).getOrElse(List())
}
def getGroups(userName: String): List[String] = {
LocalCacheManager.getCache(LocalCacheType.UserGroups).get[List[String]](userName).getOrElse {
val userDomainGroup = if(userName.contains("@")) Some(userName.split("@")(1)) else None
val userGroups = userInfoDao.getUserInfo(userName).map(_.getUserGroups).getOrElse(List.empty)
val userAllGroups = userGroups ::: userDomainGroup.toList
LocalCacheManager.getCache(LocalCacheType.UserGroups).put(userName, userAllGroups)
userAllGroups.toList
}
}
override def getAllPrivileges(userName: String): List[String] = {
val userPrivs = getUserPrivileges(userName)
val groupPrivs = getGroups(userName).flatMap(getGroupPrivileges)
userPrivs ++ groupPrivs ++ globalPrivileges
}
override def isAuthorized(username: String, resource: String*): Try[Boolean] = {
try {
Success(getAllPrivileges(username).intersect(resource.map(_.toUpperCase)).nonEmpty)
} catch {
case e: Exception =>
ConnektLogger(LogFile.SERVICE).error(s"Error isAuthorized user [$username] info: ${e.getMessage}", e)
Failure(e)
}
}
override def removeAuthorization(userId: String, userType: UserType, resources: List[String]): Try[Unit] = {
try {
privDao.removePrivileges(userId, userType, resources)
removeCache(userId, userType)
Success(Unit)
} catch {
case e: Exception =>
Failure(e)
}
}
override def addAuthorization(userId: String, userType: UserType, resources: List[String]): Try[Unit] = {
try {
privDao.addPrivileges(userId, userType, resources)
removeCache(userId, userType)
Success(Unit)
} catch {
case e: Exception =>
Failure(e)
}
}
override def onUpdate(_type: SyncType, args: List[AnyRef]): Unit = {
_type match {
case SyncType.AUTH_CHANGE => Try_ {
removeCache(args.head.toString, UserType.withName(args.last.toString))
}
case _ =>
}
}
}
| Flipkart/connekt | commons/src/main/scala/com/flipkart/connekt/commons/services/AuthorisationService.scala | Scala | mit | 4,827 |
class WootServer(host: String, port: Int) {
import java.net.InetSocketAddress
import org.log4s.getLogger
import org.http4s.server.blaze.BlazeBuilder
import scala.concurrent.duration.Duration
private val logger = getLogger
logger.info(s"Starting Http4s-blaze WootServer on '$host:$port'")
def run(): Unit = {
BlazeBuilder.
bindSocketAddress(new InetSocketAddress(host, port)).
withWebSockets(true).
withIdleTimeout(Duration.Inf).
mountService(new StaticRoutes().service, "/").
mountService(new WootRoutes().service, "/woot/").
run.
awaitShutdown()
}
}
object Main extends App {
import scala.util.Properties.envOrNone
val ip = envOrNone("HOST") getOrElse("0.0.0.0")
val port = envOrNone("PORT").map(_.toInt) getOrElse(8080)
new WootServer(ip, port).run()
} | d6y/wootjs | server/src/main/scala/main.scala | Scala | apache-2.0 | 832 |
package eu.tilk.gf
import scala.collection.immutable.IntMap
private[gf]
class Linearize(pgf : PGF, cnc : Concr) {
def linTree(e : Expr) : List[Linearization] = lin(None, 0, Nil, e, Nil, Nil, Nil, e, Nil).map(_._2).distinct
def ss(s : String) = Vector(List(BTLeafKS(s)))
private def lin(mb_cty : Option[CncType], n_fid : FId, loc0 : Loc, e0 : Expr,
ys : List[CId], xs : List[CId], loc : Loc, e : Expr, es : List[(Loc, Expr)]) : List[(FId, Linearization)] = e match {
case EAbs(_, x, e) => lin(mb_cty, n_fid, loc0, e0, ys, x::xs, SAbs::loc, e, es)
case EApp(e1, e2) => lin(mb_cty, n_fid, loc0, e0, ys, xs, SAppL::loc, e1, (SAppR::loc, e2)::es)
case EImplArg(e) => lin(mb_cty, n_fid, loc0, e0, ys, xs, SImplArg::loc, e, es)
case ETyped(e, _) => lin(mb_cty, n_fid, loc0, e0, ys, xs, STyped::loc, e, es)
case EFun(f) => apply(mb_cty, n_fid, loc0, e0, ys, xs, f, es)
case EMeta(i) => df(mb_cty, n_fid, loc0, e0, ys, xs, "?"+i)
case EVar(i) => df(mb_cty, n_fid, loc0, e0, ys, xs, (xs++ys:List[CId])(i).value)
case ELit(l) => l match {
case LStr(s) => List((n_fid+1, Linearization((cidString, n_fid), fidString, wildCId, List((loc0, e0)), LinTable(Nil, ss(s)))))
case LInt(n) => List((n_fid+1, Linearization((cidInt , n_fid), fidInt, wildCId, List((loc0, e0)), LinTable(Nil, ss(n.toString)))))
case LFlt(f) => List((n_fid+1, Linearization((cidFloat , n_fid), fidFloat, wildCId, List((loc0, e0)), LinTable(Nil, ss(f.toString)))))
}
}
private def apply(mb_cty : Option[CncType], n_fid : FId, loc0 : Loc, e0 : Expr,
ys : List[CId], xs : List[CId], f : CId, es : List[(Loc, Expr)]) : List[(FId, Linearization)] = {
def descend(n_fid : FId, fes : List[(CncType, (Loc, Expr))]) : List[(FId, List[Linearization])] = fes match {
case Nil => List((n_fid, Nil))
case (cty, (loc, e))::fes => for {
(n_fid, arg) <- lin(Some(cty), n_fid, loc.reverse, e, xs ++ ys, Nil, Nil, e, Nil)
(n_fid, args) <- descend(n_fid, fes)
} yield (n_fid, arg::args)
}
def getApps(prods : IntMap[Set[Production]]) : List[(FunId, CncType, List[CncType])] = {
def toApp(fid : FId, p : Production) : List[(FunId, CncType, List[CncType])] = p match {
case PApply(funid, pargs) =>
val (ty,_,_,_) = pgf.abstr.funs(f)
val (args, res) = ty.catSkeleton
List((funid, (res, fid), args zip pargs.map(_.fid)))
case PCoerce(fid) => prods.get(fid).map(_.toList.flatMap(toApp(fid, _))).getOrElse(Nil)
case PConst(_, _, _) => throw new Exception();
}
mb_cty match {
case Some((cat, fid)) => prods.get(fid).map(_.toList.flatMap(toApp(fid, _))).getOrElse(Nil)
case None => (for {(fid, set) <- prods; prod <- set} yield toApp(fid, prod)).toList.flatten
}
}
cnc.lproductions.get(f) match {
case Some(prods) => for {
(funid, (cat, fid), ctys) <- getApps(prods)
(n_fid, args) <- descend(n_fid, ctys zip es)
} yield (n_fid+1, Linearization((cat, n_fid), fid, f, List((loc0, e0)), LinTable(cnc, _ => true, xs, funid, args)))
case None => df(mb_cty, n_fid, loc0, e0, ys, xs, ("[" ++ f.value ++ "]"))
}
}
private def df(mb_cty : Option[CncType], n_fid : FId, loc0 : Loc, e0 : Expr,
ys : List[CId], xs : List[CId], s : String) : List[(FId, Linearization)] = mb_cty match {
case Some((cat, fid)) => cnc.lindefs.get(fid) match {
case Some(funs) => for {
funid <- funs
args = List(Linearization((wildCId, n_fid), fidString, wildCId, List((loc0, e0)), LinTable(Nil, ss(s))))
} yield (n_fid+2, Linearization((cat, n_fid+1), fid, wildCId, List((loc0, e0)), LinTable(cnc, _ => true, xs, funid, args)))
case None =>
if (isPredefFId(fid)) List((n_fid+2, Linearization((cat, n_fid+1), fid, wildCId, List((loc0, e0)), LinTable(xs, ss(s)))))
else for {
PCoerce(fid) <- cnc.productions.get(fid).map(_.toList).getOrElse(Nil)
r <- df(Some((cat, fid)), n_fid, loc0, e0, ys, xs, s)
} yield r
}
case None => Nil
}
} | tilk/scala-gf | shared/src/main/scala/eu/tilk/gf/Linearize.scala | Scala | lgpl-3.0 | 4,105 |
package sbtscalaxb
import sbt._
import scalaxb.{compiler => sc}
import scalaxb.compiler.{Config => ScConfig}
import scalaxb.BuildInfo
object Plugin extends sbt.Plugin {
import Keys._
import ScalaxbKeys._
object ScalaxbKeys {
lazy val scalaxb = TaskKey[Seq[File]]("scalaxb")
lazy val generate = TaskKey[Seq[File]]("scalaxb-generate")
lazy val scalaxbConfig = SettingKey[ScConfig]("scalaxb-config")
lazy val xsdSource = SettingKey[File]("scalaxb-xsd-source")
lazy val wsdlSource = SettingKey[File]("scalaxb-wsdl-source")
lazy val packageName = SettingKey[String]("scalaxb-package-name")
lazy val packageNames = SettingKey[Map[URI, String]]("scalaxb-package-names")
lazy val classPrefix = SettingKey[Option[String]]("scalaxb-class-prefix")
lazy val paramPrefix = SettingKey[Option[String]]("scalaxb-param-prefix")
lazy val attributePrefix = SettingKey[Option[String]]("scalaxb-attribute-prefix")
lazy val prependFamily = SettingKey[Boolean]("scalaxb-prepend-family")
lazy val wrapContents = SettingKey[Seq[String]]("scalaxb-wrap-contents")
lazy val contentsSizeLimit = SettingKey[Int]("scalaxb-contents-size-limit")
lazy val chunkSize = SettingKey[Int]("scalaxb-chunk-size")
lazy val namedAttributes = SettingKey[Boolean]("scalaxb-named-attributes")
lazy val packageDir = SettingKey[Boolean]("scalaxb-package-dir")
lazy val generateRuntime = SettingKey[Boolean]("scalaxb-generate-runtime")
lazy val protocolFileName = SettingKey[String]("scalaxb-protocol-file-name")
lazy val protocolPackageName = SettingKey[Option[String]]("scalaxb-protocol-package-name")
lazy val laxAny = SettingKey[Boolean]("scalaxb-lax-any")
lazy val combinedPackageNames = SettingKey[Map[Option[String], Option[String]]]("scalaxb-combined-package-names")
lazy val dispatchVersion = SettingKey[String]("scalaxb-dispatch-version")
lazy val async = SettingKey[Boolean]("scalaxb-async")
}
object ScalaxbCompile {
def apply(sources: Seq[File], packageName: String, outDir: File, cacheDir: File): Seq[File] =
apply(sources, sc.Config(packageNames = Map(None -> Some(packageName))), outDir, cacheDir, false)
def apply(sources: Seq[File], config: sc.Config, outDir: File, cacheDir: File, verbose: Boolean = false): Seq[File] = {
import sbinary.{DefaultProtocol,Format}
import DefaultProtocol.{FileFormat, immutableMapFormat, StringFormat, UnitFormat}
import Tracked.{inputChanged, outputChanged}
import Types.:+:
import Cache._
import FilesInfo.{lastModified, exists}
def compile: Seq[File] =
sources.headOption map { src =>
import sc._
sc.Log.configureLogger(verbose)
val module = Module.moduleByFileName(src)
module.processFiles(sources, config.copy(outdir = outDir))
} getOrElse {Nil}
def cachedCompile =
inputChanged(cacheDir / "scalaxb-inputs") { (inChanged, inputs: Seq[File] :+: FilesInfo[ModifiedFileInfo] :+: String :+: HNil) =>
outputChanged(cacheDir / "scalaxb-output") { (outChanged, outputs: FilesInfo[PlainFileInfo]) =>
if (inChanged || outChanged) compile
else outputs.files.toSeq map {_.file}
}
}
def inputs = sources :+: lastModified(sources.toSet) :+: BuildInfo.version :+: HNil
cachedCompile(inputs)(() => exists((outDir ** "*.scala").get.toSet))
}
}
lazy val scalaxbSettings: Seq[Def.Setting[_]] = inConfig(Compile)(baseScalaxbSettings)
lazy val baseScalaxbSettings: Seq[Def.Setting[_]] = Seq(
scalaxb := (generate in scalaxb).value,
sourceManaged in scalaxb := {
sourceManaged.value / "sbt-scalaxb"
},
xsdSource in scalaxb := {
val src = sourceDirectory.value
if (Seq(Compile, Test) contains configuration.value) src / "xsd"
else src / "main" / "xsd"
},
wsdlSource in scalaxb := {
val src = sourceDirectory.value
if (Seq(Compile, Test) contains configuration.value) src / "wsdl"
else src / "main" / "wsdl"
},
logLevel in scalaxb <<= logLevel?? Level.Info
) ++ inTask(scalaxb)(Seq(
generate := {
val s = streams.value
val ll = logLevel.value
ScalaxbCompile(sources.value, scalaxbConfig.value, sourceManaged.value, s.cacheDirectory, ll == Level.Debug)
},
sources := {
val xsd = xsdSource.value
val wsdl = wsdlSource.value
(wsdl ** "*.wsdl").get.sorted ++ (xsd ** "*.xsd").get.sorted
},
clean := {
val outdir = sourceManaged.value
IO.delete((outdir ** "*").get)
IO.createDirectory(outdir)
},
combinedPackageNames := {
val x = packageName.value
val xs = packageNames.value
(xs map { case (k, v) => ((Some(k.toString): Option[String]), Some(v)) }) updated (None, Some(x))
},
packageName := "generated",
packageNames := Map(),
classPrefix := None,
paramPrefix := None,
attributePrefix := None,
prependFamily := false,
wrapContents := Nil,
contentsSizeLimit := Int.MaxValue,
chunkSize := 10,
namedAttributes := false,
packageDir := true,
generateRuntime := true,
protocolFileName := sc.Defaults.protocolFileName,
protocolPackageName := None,
laxAny := false,
dispatchVersion := "0.11.1",
async in scalaxb := true,
scalaxbConfig :=
ScConfig(packageNames = combinedPackageNames.value,
packageDir = packageDir.value,
classPrefix = classPrefix.value,
classPostfix = None,
paramPrefix = paramPrefix.value,
attributePrefix = attributePrefix.value,
outdir = new File("."),
prependFamilyName = prependFamily.value,
wrappedComplexTypes = wrapContents.value.toList,
seperateProtocol = true,
protocolFileName = protocolFileName.value,
protocolPackageName = protocolPackageName.value,
defaultNamespace = None,
generateRuntime = generateRuntime.value,
contentsSizeLimit = contentsSizeLimit.value,
sequenceChunkSize = chunkSize.value,
namedAttributes = namedAttributes.value,
laxAny = laxAny.value,
dispatchVersion = dispatchVersion.value,
async = async.value)
))
}
| Banno/scalaxb | sbt-scalaxb/src/main/scala/sbtscalaxb/Plugin.scala | Scala | mit | 6,619 |
/* sbt -- Simple Build Tool
* Copyright 2011 Mark Harrah
*/
package sbt
import java.io.File
import compiler.AnalyzingCompiler
object ConsoleProject
{
def apply(state: State, extra: String, cleanupCommands: String = "", options: Seq[String] = Nil)(implicit log: Logger)
{
val extracted = Project extract state
val bindings = ("currentState" -> state) :: ("extracted" -> extracted ) :: Nil
val unit = extracted.currentUnit
val compiler = Compiler.compilers(ClasspathOptions.repl)(state.configuration, log).scalac
val imports = Load.getImports(unit.unit) ++ Load.importAll(bindings.map(_._1))
val importString = imports.mkString("", ";\n", ";\n\n")
val initCommands = importString + extra
(new Console(compiler))(unit.classpath, options, initCommands, cleanupCommands)(Some(unit.loader), bindings)
}
}
| olove/xsbt | main/src/main/scala/sbt/ConsoleProject.scala | Scala | bsd-3-clause | 825 |
/* ____ __ ____ ____ ____,,___ ____ __ __ ____
* ( _ \ /__\ (_ )(_ _)( ___)/ __) ( _ \( )( )( _ \ Read
* ) / /(__)\ / /_ _)(_ )__) \__ \ )___/ )(__)( ) _ < README.txt
* (_)\_)(__)(__)(____)(____)(____)(___/ (__) (______)(____/ LICENSE.txt
*/
package razie.diesel.engine
import org.bson.types.ObjectId
import razie.diesel.engine.nodes.{CanHtml, EMsg}
import razie.diesel.expr.ECtx
import scala.collection.mutable.ListBuffer
/** mix this in if you want to control display/traversal */
trait DomAstInfo {
/** prune i.e. stop showing children */
def shouldPrune : Boolean
/** ignore this node and branch */
def shouldIgnore : Boolean
/** skip this node */
def shouldSkip : Boolean
/** don't show this node, just show it's children as if they'r eunder the parent */
def shouldRollup : Boolean
}
/** a tree node
*
* kind is spec/sampled/generated/test etc
*
* todo optimize tree structure, tree binding
*
* todo need ID conventions to suit distributed services
*/
case class DomAst(
var value: Any,
var kind: String = AstKinds.GENERATED,
childrenCol: ListBuffer[DomAst] = new ListBuffer[DomAst](),
id : String = new ObjectId().toString
) extends CanHtml {
/** children should be read-only. If you need to modify them, use append* - do not modify the children directly */
def children: List[DomAst] = childrenCol.toList
//=========== runtime data
var parent: Option[DomAst] = None
// todo the nodes should remember the context they had, so we can see the values at that point, later
// todo most likely clone the environment, when processing done?
private var imyCtx: Option[ECtx] = None
/** get the closest enclosing context */
def getCtx: Option[ECtx] = imyCtx orElse parent.flatMap(_.getCtx)
/** get my context if any, no parent fallbacks */
def getMyOwnCtx: Option[ECtx] = imyCtx
/** reset to null if you're changing parent... */
def resetParent(f: => DomAst) = {
this.parent = Option.apply(f)
this
}
def withParent(f: => DomAst) = {
if (!this.parent.isDefined) {
this.parent = Some(f)
}
else {
throw new IllegalStateException("Ast already has a parent...")
}
this
}
/** use null to remove context */
def replaceCtx(f: => ECtx) = {
this.imyCtx = Option(f)
}
def withCtx(f: => ECtx) = {
if (!this.imyCtx.isDefined) {
this.imyCtx = Some(f)
this.imyCtx.get
}
else {
throw new IllegalStateException("Ast already has a context...")
}
}
def orWithCtx(f: => Option[ECtx]) = {
if (!this.imyCtx.isDefined) {
this.imyCtx = f
}
this.imyCtx
}
var guard: String = DomState.GUARD_NONE
private var istatus: String = DomState.INIT
def status: String = istatus
def status_=(s: String): Unit = istatus = s
/** timestamp started */
var tstart: Long = System.currentTimeMillis()
/** timestamp started */
var tend: Long = System.currentTimeMillis()
/** execution sequence number - an engine is a single sequence */
var seqNo: Long = -1
/** SPECIAL: NO EVENTS generated - use carefully */
def prependAllNoEvents(other: List[DomAst]): Unit = {
this.childrenCol.prependAll(other)
other.foreach(_.withParent(this))
}
/** SPECIAL: NO EVENTS generated - use carefully */
def appendAllNoEvents(other: List[DomAst]): Unit = {
this.childrenCol.appendAll(other)
other.foreach(_.withParent(this))
}
/** SPECIAL: NO EVENTS generated - use carefully */
def moveAllNoEvents(from: DomAst)(implicit engine: DomEngineState): Unit = {
this.childrenCol.appendAll(from.children)
from.children.foreach(_.resetParent(this))
from.childrenCol.clear() // need to remove them from parent, or they will be duplos and create problems
}
/** will force updates to go through a DES */
def appendAll(other: List[DomAst])(implicit engine: DomEngineState): Unit = {
engine.evAppChildren(this, other)
}
/** will force updates to go through a DES */
def append(other: DomAst)(implicit engine: DomEngineState) {
engine.evAppChildren(this, other)
}
def start(seq: Long) {
tstart = System.currentTimeMillis()
seqNo = seq
}
def end() = {
tend = System.currentTimeMillis()
}
//============ domain details
var moreDetails = " "
var specs: List[Any] = Nil
var prereq: List[String] = Nil
/** depends on other nodes by IDs
*
* this node will not start unless those prereq are complete
*
* engines strategies, tags and rules dictate how these prerequisites are setup
*/
def withPrereq(s: List[String]) = {
prereq = (s ::: prereq).distinct // distinct is important for some reason - hanoi fails miserably otherwise
this
}
/** this node has a spec */
def withSpec(s: Any) = {
if (s.isInstanceOf[Option[_]]) {
s.asInstanceOf[Option[_]].foreach { x =>
specs = x :: specs
}
}
else specs = s :: specs
this
}
def withStatus(s: String) = {
this.status = s
this
}
def withDetails(s: String) = {
moreDetails = moreDetails + s
this
}
/** reduce footprint/memory size */
def removeDetails() = {
moreDetails = " "
}
/** prune story - call this on Story nodes */
def removeTestDetails() = {
childrenCol.foreach(_.childrenCol.clear())
}
//============== traversal
private def shouldPrune(k: DomAst) =
AstKinds.shouldPrune(k.kind) ||
k.value.isInstanceOf[DomAstInfo] && k.value.asInstanceOf[DomAstInfo].shouldPrune
private def shouldIgnore(k: DomAst) =
AstKinds.shouldIgnore(k.kind) ||
k.value.isInstanceOf[DomAstInfo] && k.value.asInstanceOf[DomAstInfo].shouldIgnore
private def shouldSkip(k: DomAst) =
AstKinds.shouldSkip(k.kind) ||
k.value.isInstanceOf[DomAstInfo] && k.value.asInstanceOf[DomAstInfo].shouldSkip
private def shouldRollup(k: DomAst) =
AstKinds.shouldRollup(k.kind) ||
k.value.isInstanceOf[DomAstInfo] && k.value.asInstanceOf[DomAstInfo].shouldRollup
// visit/recurse with filter
def collect[T](f: PartialFunction[DomAst, T]): List[T] = {
val res = new ListBuffer[T]()
def inspect(d: DomAst, level: Int) {
if (f.isDefinedAt(d)) res append f(d)
d.children.foreach(inspect(_, level + 1))
}
inspect(this, 0)
res.toList
}
// visit/recurse with filter AND level
def collect2[T](f: PartialFunction[(DomAst, Int), T]): List[T] = {
val res = new ListBuffer[T]()
def inspect(d: DomAst, level: Int) {
if (f.isDefinedAt((d, level))) res append f((d, level))
d.children.foreach(inspect(_, level + 1))
}
inspect(this, 0)
res.toList
}
//================= view
/** non-recursive tostring */
def meTos(level: Int, html: Boolean): String = {
def theKind = {
val duration = tend - tstart
val (style, cls) = kind match {
case AstKinds.RECEIVED => ("font-weight:bold", "label label-warning")
case AstKinds.TEST => ("font-weight:bold", "label label-warning")
case AstKinds.TRACE => ("color:lightgray", "")
case _ => ("", "")
}
if (html)
s"""<span status="$status" seqNo="$seqNo" msec="$duration" id="$id" prereq="${prereq.mkString(",")}"
|title="$kind, $duration ms" style="$style" class="$cls">${kind.take(3)}</span>""".stripMargin
else kind
}
def theState = {
if (!DomState.inProgress(this.status)) {
""
} else {
if (html)
s""" <span class="glyphicon glyphicon-exclamation-sign" style="color:red" title="State: $status"></span>"""
else s" ($status"
}
}
// todo - this prints the context type and id, add it with popup for values?
val c = ""
// +
// this.getMyOwnCtx.map(_.getClass.getSimpleName).mkString.take(2) +
// this.getMyOwnCtx.map(_.hashCode().toString.reverse.take(3)).mkString
(c + " " * level) +
theKind +
"::" +
theState + {
value match {
case c: CanHtml if html => c.toHtml(kind)
case x => x.toString
}
}.lines.map((" " * 1) + _).mkString("\n") + moreDetails
}
/** recursive tostring */
private def tos(level: Int, html: Boolean): String = {
def h(s: String) = if (html) s else ""
def toschildren(level: Int, kids: List[DomAst]): List[Any] =
kids.filter(k => !shouldIgnore(k)).flatMap { k =>
if (shouldRollup(k) && k.children.size == 1) {
// rollup NEXT nodes and others - just show the children
toschildren(level + 1, k.children)
} else
List(k.tos(level + 1, html))
}
if (!shouldSkip(this)) {
h(s"""<div kind="$kind" level="$level">""") +
meTos(level, html) + "\n" +
toschildren(level, children).mkString +
h("</div>")
} else {
toschildren(level, children).mkString
}
}
override def toString = tos(0, html = false)
/** this html works well in a diesel fiddle, use toHtmlInPage elsewhere */
override def toHtml = tos(0, html = true)
def toHtml (level : Int) = tos(level, html = true)
/** as opposed to toHtml, this will produce an html that can be displayed in any page, not just the fiddle */
def toHtmlInPage = toHtml.replaceAllLiterally("weref", "wefiddle")
type HasJ = {def toj : collection.Map[String,Any]}
def toj : collection.Map[String,Any] = {
Map (
"class" -> "DomAst",
"kind" -> kind,
"value" ->
(value match {
case m if m.getClass.getDeclaredMethods.exists(_.getName == "toj") => m.asInstanceOf[HasJ].toj
case x => x.toString
}),
"details" -> moreDetails,
"id" -> id,
"status" -> status,
"children" -> tojchildren(children)
)
}
def tojchildren (kids : List[DomAst]) : List[Any] =
kids.filter(k=> !AstKinds.shouldIgnore(k.kind)).flatMap{k=>
if(shouldSkip(k)) {
tojchildren(k.children.toList)
} else
List(k.toj)
}
def toJson = toj
/** GUI needs position info for surfing */
def posInfo = collect{
case d@DomAst(m:EMsg, _, _, _) if m.pos.nonEmpty =>
Map(
"kind" -> "msg",
"id" -> (m.entity+"."+m.met),
"pos" -> m.pos.get.toJmap
)
}
/** find in subtree, by id */
def find(id:String) : Option[DomAst] =
if(this.id == id)
Some(this)
else
children.foldLeft(None: Option[DomAst])((a, b) => a orElse b.find(id))
/** find in subtree, by predicate */
def find(pred: DomAst => Boolean): Option[DomAst] =
if (pred(this))
Some(this)
else
children.foldLeft(None: Option[DomAst])((a, b) => a orElse b.find(pred))
def setKinds(kkk: String): DomAst = {
this.kind = kkk
this.children.map(_.setKinds(kkk))
this
}
}
/** how many kids to keep, for loops and large items */
trait KeepOnlySomeChildren {
var keepCount: Int = 3
/** how many kids to keep, for loops and large items */
def withKeepCount(k: Int) = {
this.keepCount = k;
this
}
}
/** how many kids to keep, for loops and large items */
trait KeepOnlySomeSiblings {
var keepCount: Int = 3
/** how many kids to keep, for loops and large items */
def withKeepCount(k: Int) = {
this.keepCount = k;
this
}
}
| razie/diesel-hydra | diesel/src/main/scala/razie/diesel/engine/DomAst.scala | Scala | apache-2.0 | 11,339 |
import com.rabbitmq.client._
object ReceiveLogs {
private val EXCHANGE_NAME = "logs"
def main(argv: Array[String]) {
val factory = new ConnectionFactory()
factory.setHost("localhost")
val connection = factory.newConnection()
val channel = connection.createChannel()
channel.exchangeDeclare(EXCHANGE_NAME, "fanout")
val queueName = channel.queueDeclare().getQueue
channel.queueBind(queueName, EXCHANGE_NAME, "")
println(" [*] Waiting for messages. To exit press CTRL+C")
val deliverCallback: DeliverCallback = (_, delivery) => {
val message = new String(delivery.getBody, "UTF-8")
println(" [x] Received '" + message + "'")
}
channel.basicConsume(queueName, true, deliverCallback, _ => {})
}
}
| rabbitmq/rabbitmq-tutorials | scala/src/main/scala/ReceiveLogs.scala | Scala | apache-2.0 | 759 |
package jsonslim
import org.scalatest.FunSpec
import org.json4s.JsonDSL._
class TrimSpec extends FunSpec {
val JsonDoc =
"""
|{
| "people":[{
| "name": "bob",
| "titles": ["boss","senior manager"]
| "badges": [{"color":"green"}]
| }]
|}
""".stripMargin
describe ("Trim") {
it ("should return only the defined paths when requested") {
val names = Trim.only("people.name")(JsonDoc)
assert(names === Some("""{"people":[{"name":"bob"}]}"""))
}
it ("should omit defined paths when requested") {
val names = Trim.omit("people.titles")(JsonDoc)
assert(names == Some("""{"people":[{"name":"bob","badges":[{"color":"green"}]}]}"""))
}
it ("should work with arrays") {
val names = Trim.omit("people.titles")(s"[$JsonDoc]")
assert(names == Some("""[{"people":[{"name":"bob","badges":[{"color":"green"}]}]}]"""))
}
it ("should return an array of empty objects when nothing matches given an array input") {
val names = Trim.only("bogus")(s"[$JsonDoc]")
assert(names == Some("""[{}]"""))
}
it ("should return an empty obj when nothing matches given an obj input") {
val names = Trim.only("bogus")(JsonDoc)
assert(names == Some("""{}"""))
}
it ("should not affect input with no only filter") {
val names = Trim.only()(JsonDoc)
assert(names == Some("""{"people":[{"name":"bob","titles":["boss","senior manager"],"badges":[{"color":"green"}]}]}"""))
}
it ("should not affect input with no omit filter") {
val names = Trim.only()(JsonDoc)
assert(names == Some("""{"people":[{"name":"bob","titles":["boss","senior manager"],"badges":[{"color":"green"}]}]}"""))
}
it ("should honor multiple only lists") {
val names = Trim.only("people.name", "people.titles")(JsonDoc)
assert(names === Some("""{"people":[{"titles":["boss","senior manager"],"name":"bob"}]}"""))
}
it ("should handle case of single nesting") {
val fooBar = Trim.only("foo.bar")("""{"foo":{"bar":"boom"}}""")
assert(fooBar === Some("""{"foo":{"bar":"boom"}}"""))
}
}
}
| meetup/json-slim | src/test/scala/trim.scala | Scala | mit | 2,150 |
package org.elasticmq.actor.test
import org.scalatest.{Suite, BeforeAndAfterEach}
import akka.actor.{Props, ActorSystem, ActorRef}
import org.elasticmq.actor.QueueManagerActor
import org.elasticmq.util.NowProvider
trait QueueManagerForEachTest extends BeforeAndAfterEach {
this: Suite =>
val system: ActorSystem
var queueManagerActor: ActorRef = _
var nowProvider: MutableNowProvider = _
override protected def beforeEach() {
super.beforeEach()
nowProvider = new MutableNowProvider
queueManagerActor = system.actorOf(Props(new QueueManagerActor(nowProvider)))
}
override protected def afterEach() {
system.stop(queueManagerActor)
super.afterEach()
}
}
| nagyistoce/elasticmq | core/src/test/scala/org/elasticmq/actor/test/QueueManagerForEachTest.scala | Scala | apache-2.0 | 694 |
package org.jetbrains.plugins.scala
package debugger
package evaluateExpression
import org.junit.experimental.categories.Category
@Category(Array(classOf[DebuggerTests]))
class ScalaObjectEvaluationTest_2_11 extends ScalaObjectEvaluationTestBase {
override protected def supportedIn(version: ScalaVersion) = version <= LatestScalaVersions.Scala_2_11
}
@Category(Array(classOf[DebuggerTests]))
class ScalaObjectEvaluationTest_2_12 extends ScalaObjectEvaluationTestBase {
override protected def supportedIn(version: ScalaVersion) =
version >= LatestScalaVersions.Scala_2_12 && version <= LatestScalaVersions.Scala_2_13
}
@Category(Array(classOf[DebuggerTests]))
class ScalaObjectEvaluationTest_3_0 extends ScalaObjectEvaluationTestBase {
override protected def supportedIn(version: ScalaVersion) = version >= LatestScalaVersions.Scala_3_0
override def testEvaluateObjects(): Unit = failing(super.testEvaluateObjects())
override def testInnerClassObjectFromObject(): Unit = failing(super.testInnerClassObjectFromObject())
}
@Category(Array(classOf[DebuggerTests]))
abstract class ScalaObjectEvaluationTestBase extends ScalaDebuggerTestCase {
addFileWithBreakpoints("SimpleObject.scala",
s"""
|object EvaluateObjects {
| def main(args: Array[String]): Unit = {
| println()$bp
| }
|}
""".stripMargin.trim()
)
addSourceFile("Simple.scala", "object Simple")
addSourceFile("qual/Simple.scala",
s"""
|package qual
|
|object Simple
""".stripMargin.trim()
)
addSourceFile("qual/SimpleCaseClass.scala",
s"""
|package qual
|
|case class SimpleCaseClass()
""".stripMargin.trim()
)
addSourceFile("StableInner.scala",
s"""
|package qual
|
|object StableInner {
| object Inner
|}
""".stripMargin.trim()
)
addSourceFile("qual/ClassInner.scala",
s"""
|package qual
|
|class ClassInner {
| object Inner
|}
""".stripMargin.trim()
)
def testEvaluateObjects(): Unit = {
runDebugger() {
waitForBreakpoint()
evalStartsWith("Simple", "Simple$")
evalStartsWith("qual.Simple", "qual.Simple$")
evalStartsWith("collection.JavaConversions", "scala.collection.JavaConversions$")
evalEquals("qual.SimpleCaseClass", "SimpleCaseClass")
evalStartsWith("qual.StableInner.Inner", "qual.StableInner$Inner$")
evalStartsWith("val x = new qual.ClassInner(); x.Inner", "qual.ClassInner$Inner$")
}
}
addFileWithBreakpoints("InnerClassObjectFromObject.scala",
s"""
|object InnerClassObjectFromObject {
| class S {
| object SS {
| object S {
| def foo(): Unit = {
| SS.S //to have $$outer field
| println()$bp
| }
| }
| object G
| }
| def foo(): Unit = {
| SS.S.foo()
| }
| }
|
| def main(args: Array[String]): Unit = {
| val x = new S()
| x.foo()
| }
|}
""".stripMargin.trim()
)
def testInnerClassObjectFromObject(): Unit = {
runDebugger() {
waitForBreakpoint()
evalStartsWith("SS.G", "InnerClassObjectFromObject$S$SS$G")
evalStartsWith("SS.S", "InnerClassObjectFromObject$S$SS$S")
evalStartsWith("S", "InnerClassObjectFromObject$S$SS$S")
evalStartsWith("SS", "InnerClassObjectFromObject$S$SS$")
}
}
} | JetBrains/intellij-scala | scala/scala-impl/test/org/jetbrains/plugins/scala/debugger/evaluateExpression/ScalaObjectEvaluationTest.scala | Scala | apache-2.0 | 3,534 |
/*
Copyright 2011 Ben Biddington
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package org.coriander.oauth.core.cryptography
abstract class Sha1 {
def create(key : String, message : String) : Array[Byte]
} | ben-biddington/Coriander.OAuth | src/org/coriander/oauth/core/cryptography/Sha1.scala | Scala | apache-2.0 | 694 |
package org.jetbrains.plugins.scala
package lang
package psi
package stubs
package impl
import api.base.ScFieldId
import com.intellij.psi.PsiElement
import com.intellij.psi.stubs.{IStubElementType, StubElement}
import com.intellij.util.io.StringRef
/**
* User: Alexander Podkhalyuzin
* Date: 19.07.2009
*/
class ScFieldIdStubImpl[ParentPsi <: PsiElement](parent: StubElement[ParentPsi],
elemType: IStubElementType[_ <: StubElement[_ <: PsiElement], _ <: PsiElement])
extends StubBaseWrapper[ScFieldId](parent, elemType) with ScFieldIdStub {
private var name: StringRef = _
def this(parent: StubElement[ParentPsi],
elemType: IStubElementType[_ <: StubElement[_ <: PsiElement], _ <: PsiElement],
name: String) = {
this(parent, elemType.asInstanceOf[IStubElementType[StubElement[PsiElement], PsiElement]])
this.name = StringRef.fromString(name)
}
def getName: String = StringRef.toString(name)
} | consulo/consulo-scala | src/org/jetbrains/plugins/scala/lang/psi/stubs/impl/ScFieldIdStubImpl.scala | Scala | apache-2.0 | 987 |
/*
This file is part of scala-conduit.
scala-conduit is free software: you can redistribute it and/or modify it
under the terms of the GNU Lesser General Public License as published by
the Free Software Foundation, either version 3 of the License, or (at your
option) any later version.
scala-conduit is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
License for more details.
You should have received a copy of the GNU Lesser General Public License
along with scala-conduit. If not, see <http://www.gnu.org/licenses/>.
*/
package conduit.test
import org.scalatest._
import org.scalatest.PropSpec
import org.scalatest.prop.PropertyChecks
import org.scalatest.matchers.ShouldMatchers
/**
* A helper testing class that records a timestamp each time its
* [[Event.record]] method is called. Timestamps don't represent any
* absolute time, they are only meant to be compared to each other to
* determine when an event ocurred compared to other events.
*/
class Event(implicit c: Event.Counter = Event.sharedCounter)
extends Function0[Unit]
{
private[this] var _marks = new ArrayBuffer[Event.Mark];
/**
* Requests a new time mark from [[Event.Counter]] and records it.
*/
def record() {
_marks += c.mark();
}
/**
* An alias for [[record]].
*/
override def apply(): Unit = record();
/**
* Returns an identity function whose side effect is calling [[record()]].
*/
def id[R]: R => R =
(x: R) => { record(); x; }
/**
* Calls [[record()]] and then evaluates the given computation.
*/
def fn[R](f: => R): R =
{ record(); f; }
/**
* Augments a given function with calling [[record()]].
*/
def fn0[R](f: () => R): () => R =
() => { record(); f(); }
/**
* Augments a given function with calling [[record()]].
*/
def fn1[A,R](f: (A) => R): (A) => R =
(x: A) => { record(); f(x); }
/**
* Augments a given function with calling [[record()]].
*/
def fn2[A,B,R](f: (A,B) => R): (A,B) => R =
(x: A, y: B) => { record(); f(x, y); }
/**
* Returns the list of accumulated marks.
*/
def marks: Traversable[Event.Mark] = _marks;
/**
* Returns the number of times this event was invoked.
*/
def count: Int = marks.size;
/**
* Checks that this event was invoked just once.
*/
def justOnce =
count should be (1);
/**
* Check that this event ocurred only after all the given events.
* Note that this condition is trivially satisfied if this event
* has never been invoked.
*/
def mustAfter(es: Event*) {
for(e <- es;
i <- e.marks;
j <- this.marks)
i should be < j;
}
/**
* Check that this event ocurred only before all the given events.
* Note that this condition is trivially satisfied if this event
* has never been invoked.
*/
def mustBefore(es: Event*) {
for(e <- es;
i <- e.marks;
j <- this.marks)
i should be > j;
}
}
object Event {
type Mark = Int
class Counter {
private[this] val lock = new Object;
private[this] var counter: Mark = 0;
/**
* Create a new mark. Every time the function is called,
* it returns a mark that his higher than the previous onme.
*/
def mark(): Mark = lock.synchronized {
counter += 1;
counter;
}
}
implicit val sharedCounter = new Counter;
}
| ppetr/scala-conduit | src/test/scala/conduit/test/Event.scala | Scala | gpl-3.0 | 3,544 |
package io.console.commands
import components.Dsl._
import components._
import components.{Constraint => ModelConstraint}
import io.console.ConsoleState
import scala.util.Try
object Constraint extends Command {
override val stringRep: String = ""
override val description: String = "Adds a constraint to the model"
override val execute = (state: ConsoleState) =>
Try {
val regex = "([A-Za-z0-9 +-]+)([><=]{2})([ \\\\d]+[.]*[\\\\d]*)".r
val regex(termsString, equalityString, rhsString) = state.commandString
val termRegex = "[+-]*[\\\\d]*[.]*[\\\\d]*[A-Za-z]+".r
termRegex.findAllIn(termsString.replaceAll(" ", ""))
val terms: Set[Term] = termRegex.findAllIn(termsString.replaceAll(" ", ""))
.toSet[String]
.map(x => stringToTermConverter(x))
val constraint = ModelConstraint(terms, equalityString, rhsString.trim.toDouble)
val newModel = state.model match {
case None => Model(Set(constraint), None)
case Some(model) => model.+(constraint)
}
state.copy(model = Some(newModel))
} getOrElse(throw new Exception(s"Could not parse constraint '${state.commandString}'"))
} | akwanashie/constraints-dsl | src/main/scala/io/console/commands/Constraint.scala | Scala | mit | 1,168 |
// Databricks notebook source
// MAGIC %md
// MAGIC ScaDaMaLe Course [site](https://lamastex.github.io/scalable-data-science/sds/3/x/) and [book](https://lamastex.github.io/ScaDaMaLe/index.html)
// COMMAND ----------
// MAGIC %md # Clustering of country features in the Covid 19 dataset
// COMMAND ----------
// MAGIC %md Execute relevant notebooks to load and preprocess data
// COMMAND ----------
// MAGIC %run "./02_DataPreprocess"
// COMMAND ----------
// MAGIC %md ## Clustering using Kmeans
// COMMAND ----------
display(valid_distinct_features)
// COMMAND ----------
// transform to features in order to perform kmeans
import org.apache.spark.ml.feature.VectorAssembler
// define input cols and output (add additional columns here...)
val va = new VectorAssembler().setInputCols(Array("population","population_density","median_age","aged_65_older","aged_70_older","gdp_per_capita","cardiovasc_death_rate","diabetes_prevalence","female_smokers","male_smokers","hospital_beds_per_thousand","life_expectancy","human_development_index")).setOutputCol("features")
// create features
val df_feats = va.transform(valid_distinct_features)
display(df_feats)
// COMMAND ----------
import org.apache.spark.ml.clustering.KMeans
import org.apache.spark.ml.evaluation.ClusteringEvaluator
// number of clusters
val num_clusters: Int = 6
// fixed seed for initialization
val seed: Int = 2
// init kmeans method
val kmeans = new KMeans().setK(num_clusters).setSeed(seed).setFeaturesCol("features")
// train kmeans cluster
val model = kmeans.fit(df_feats)
// cluster predictions
val preds = model.transform(df_feats)
// evaluate clustering base on Silhouette metric
val cluster_evaluator = new ClusteringEvaluator()
val silhouette_metric = cluster_evaluator.evaluate(preds)
// show evaluation and results
println(s"Silhouette metric: $silhouette_metric")
// cluster centers
println("Cluster centers:")
model.clusterCenters.foreach(println)
// COMMAND ----------
// check model parameters
model.extractParamMap
// COMMAND ----------
val df_clstr = preds.withColumnRenamed("prediction", "kmeans_class")
display(df_clstr)
// COMMAND ----------
// MAGIC %md ## Visualization
// MAGIC Based on each country's features, the countries can be clustered accordingly
// COMMAND ----------
val df_clstr_filtered = df_clstr.select($"iso_code",$"kmeans_class")
display(df_clstr_filtered)
// COMMAND ----------
| lamastex/scalable-data-science | dbcArchives/2021/000_0-sds-3-x-projects/student-project-12_group-CovidPandemic/05_Clustering.scala | Scala | unlicense | 2,420 |
import leon.lang._
object NestedFunParamsMutation1 {
def f(): Int = {
def g(a: Array[Int]): Unit = {
require(a.length > 0)
a(0) = 10
}
val a = Array(1,2,3,4)
g(a)
a(0)
} ensuring(_ == 10)
}
| epfl-lara/leon | src/test/resources/regression/verification/xlang/valid/NestedFunParamsMutation1.scala | Scala | gpl-3.0 | 230 |
import org.apache.lucene.analysis.ja.JapaneseAnalyzer
import org.apache.lucene.analysis.tokenattributes.CharTermAttribute
import org.apache.lucene.util.Version
object LuceneKuromojiDict {
def main(args: Array[String]): Unit = {
val luceneVersion = Version.LUCENE_45
val analyzer = new JapaneseAnalyzer(luceneVersion)
for (text <- Array("すもももももももものうち。",
"メガネは顔の一部です。",
"日本経済新聞でモバゲーの記事を読んだ。",
"Java, Scala, Groovy, Clojure",
"LUCENE、SOLR、Lucene, Solr",
"アイウエオカキクケコさしすせそABCXYZ123456",
"Lucene is a full-featured text search engine library written in Java.",
"このComputerは、10回に1回の割合で起動中に青い画面が表示されます。")) {
println(s"Original[$text]")
val tokenStream = analyzer.tokenStream("", text)
val charTermAttribute = tokenStream.addAttribute(classOf[CharTermAttribute])
tokenStream.reset()
println {
Iterator
.continually(tokenStream)
.takeWhile(_.incrementToken())
.map(t => charTermAttribute.toString)
.mkString("Tokenize[", " ", "]")
}
tokenStream.close()
println()
}
}
}
| kazuhira-r/lucene-examples | lucene-kuromoji-dict/src/main/scala/LuceneKuromojiDict.scala | Scala | mit | 1,451 |
package com.twitter.finagle.redis.param
import com.twitter.finagle.Stack
import com.twitter.hashing
import com.twitter.io.Buf
object RedisKeyHasher {
// murmur3 hash was picked because it's very fast, it has a reasonably good
// distribution, and it's not very collision prone. it's not difficult to
// reverse though, so it shouldn't be used as a cryptographic hash.
implicit val param: Stack.Param[RedisKeyHasher] =
Stack.Param(RedisKeyHasher(hashing.KeyHasher.MURMUR3))
}
case class RedisKeyHasher(hasher: hashing.KeyHasher) {
def mk(): (RedisKeyHasher, Stack.Param[RedisKeyHasher]) =
(this, RedisKeyHasher.param)
}
/**
* A class eligible for configuring a redis client's database after connection init phase.
*/
case class Database(index: Option[Int])
object Database {
implicit val param: Stack.Param[Database] = Stack.Param(Database(None))
}
/**
* A class eligible for configuring a redis client's password after connection init phase.
*/
case class Password(code: Option[Buf])
object Password {
implicit val param: Stack.Param[Password] = Stack.Param(Password(None))
}
| twitter/finagle | finagle-redis/src/main/scala/com/twitter/finagle/redis/param/Params.scala | Scala | apache-2.0 | 1,107 |
package rename.organizer.tv
object Main {
def main(args: Array[String]): Unit = {
// val listing = new Series("E:\\\\Guillaume\\\\Vidéos\\\\Animé\\\\Arrow")
// listing findName()
val listing = new TheTVDB
println(listing findSeriesName "E:\\\\Guillaume\\\\Vidéos\\\\Séries\\\\Arrow\\\\Saison 1\\\\Arrow.S01E03.720p.HDTV.X264-DIMENSION")
println(listing findSeriesName "E:\\\\Guillaume\\\\Vidéos\\\\Séries\\\\Arrow")
}
}
| ghivert/Student-Projects | Miscellaneous/Various Scala/tv-organizer/src/main/scala/rename/organizer/tv/Main.scala | Scala | mit | 432 |
package org.jetbrains.plugins.hocon.codestyle
import com.intellij.application.options.SmartIndentOptionsEditor
import com.intellij.lang.Language
import com.intellij.psi.codeStyle.LanguageCodeStyleSettingsProvider.SettingsType
import com.intellij.psi.codeStyle.{CodeStyleSettingsCustomizable, CommonCodeStyleSettings, DisplayPriority, LanguageCodeStyleSettingsProvider}
import org.jetbrains.plugins.hocon.lang.HoconLanguage
class HoconLanguageCodeStyleSettingsProvider extends LanguageCodeStyleSettingsProvider {
override def getLanguage: Language = HoconLanguage
override def getDisplayPriority = DisplayPriority.COMMON_SETTINGS
private val ObjectsWrap = "Objects"
private val ListsWrap = "Lists"
private val ObjectFieldsWithColonWrap = "Object fields with ':'"
private val ObjectFieldsWithAssignmentWrap = "Object fields with '=' or '+='"
override def customizeSettings(consumer: CodeStyleSettingsCustomizable, settingsType: SettingsType): Unit = {
def showCustomOption(name: String, title: String, group: String, options: AnyRef*): Unit =
consumer.showCustomOption(classOf[HoconCustomCodeStyleSettings], name, title, group, options: _*)
import com.intellij.psi.codeStyle.CodeStyleSettingsCustomizable._
import com.intellij.psi.codeStyle.LanguageCodeStyleSettingsProvider.SettingsType._
settingsType match {
case SPACING_SETTINGS =>
import com.intellij.psi.codeStyle.CodeStyleSettingsCustomizable.SpacingOption._
consumer.showStandardOptions(List(
SPACE_WITHIN_BRACES,
SPACE_WITHIN_BRACKETS,
SPACE_WITHIN_METHOD_CALL_PARENTHESES,
SPACE_BEFORE_COMMA,
SPACE_AFTER_COMMA
).map(_.name): _*)
consumer.renameStandardOption(SPACE_WITHIN_BRACES.name, "Object braces")
consumer.renameStandardOption(SPACE_WITHIN_METHOD_CALL_PARENTHESES.name, "Include qualifier parentheses")
showCustomOption("SPACE_BEFORE_COLON", "Before colon", SPACES_AROUND_OPERATORS)
showCustomOption("SPACE_AFTER_COLON", "After colon", SPACES_AROUND_OPERATORS)
showCustomOption("SPACE_BEFORE_ASSIGNMENT", "Before assignment ('=' and '+=')", SPACES_AROUND_OPERATORS)
showCustomOption("SPACE_AFTER_ASSIGNMENT", "After assignment ('=' and '+=')", SPACES_AROUND_OPERATORS)
showCustomOption("SPACE_BEFORE_LBRACE_AFTER_PATH", "Immediately after path expression", SPACES_BEFORE_LEFT_BRACE)
showCustomOption("SPACE_WITHIN_SUBSTITUTION_BRACES", "Substitution braces", SPACES_WITHIN)
showCustomOption("SPACE_AFTER_QMARK", "After '?'", SPACES_OTHER)
case WRAPPING_AND_BRACES_SETTINGS =>
import com.intellij.psi.codeStyle.CodeStyleSettingsCustomizable.WrappingOrBraceOption._
consumer.showStandardOptions(KEEP_LINE_BREAKS.name)
showCustomOption("HASH_COMMENTS_AT_FIRST_COLUMN", "Hash comments at first column", WRAPPING_KEEP)
showCustomOption("DOUBLE_SLASH_COMMENTS_AT_FIRST_COLUMN", "Double slash comments at first column", WRAPPING_KEEP)
showCustomOption("OBJECTS_WRAP", ObjectsWrap, null, WRAP_OPTIONS, WRAP_VALUES)
showCustomOption("OBJECTS_ALIGN_WHEN_MULTILINE", "Align when multiline", ObjectsWrap)
showCustomOption("OBJECTS_NEW_LINE_AFTER_LBRACE", "New line after '{'", ObjectsWrap)
showCustomOption("OBJECTS_RBRACE_ON_NEXT_LINE", "Place '}' on new line", ObjectsWrap)
showCustomOption("LISTS_WRAP", ListsWrap, null, WRAP_OPTIONS, WRAP_VALUES)
showCustomOption("LISTS_ALIGN_WHEN_MULTILINE", "Align when multiline", ListsWrap)
showCustomOption("LISTS_NEW_LINE_AFTER_LBRACKET", "New line after '['", ListsWrap)
showCustomOption("LISTS_RBRACKET_ON_NEXT_LINE", "Place ']' on new line", ListsWrap)
showCustomOption("OBJECT_FIELDS_WITH_COLON_WRAP", ObjectFieldsWithColonWrap, null,
WRAP_OPTIONS_FOR_SINGLETON, WRAP_VALUES_FOR_SINGLETON)
showCustomOption("OBJECT_FIELDS_COLON_ON_NEXT_LINE", "Colon on next line", ObjectFieldsWithColonWrap)
showCustomOption("OBJECT_FIELDS_WITH_ASSIGNMENT_WRAP", ObjectFieldsWithAssignmentWrap, null,
WRAP_OPTIONS_FOR_SINGLETON, WRAP_VALUES_FOR_SINGLETON)
showCustomOption("OBJECT_FIELDS_ASSIGNMENT_ON_NEXT_LINE", "Assignment operator on next line", ObjectFieldsWithAssignmentWrap)
showCustomOption("INCLUDED_RESOURCE_WRAP", "Included resource", null,
WRAP_OPTIONS_FOR_SINGLETON, WRAP_VALUES_FOR_SINGLETON)
case BLANK_LINES_SETTINGS =>
showCustomOption("KEEP_BLANK_LINES_IN_OBJECTS", "In objects", BLANK_LINES_KEEP)
showCustomOption("KEEP_BLANK_LINES_BEFORE_RBRACE", "Before '}'", BLANK_LINES_KEEP)
showCustomOption("KEEP_BLANK_LINES_IN_LISTS", "In lists", BLANK_LINES_KEEP)
showCustomOption("KEEP_BLANK_LINES_BEFORE_RBRACKET", "Before ']'", BLANK_LINES_KEEP)
case _ =>
}
}
override def getDefaultCommonSettings: CommonCodeStyleSettings = {
val commonCodeStyleSettings = new CommonCodeStyleSettings(getLanguage)
val indentOptions = commonCodeStyleSettings.initIndentOptions
indentOptions.INDENT_SIZE = 2
indentOptions.TAB_SIZE = 2
indentOptions.CONTINUATION_INDENT_SIZE = 2
commonCodeStyleSettings
}
override def getIndentOptionsEditor = new SmartIndentOptionsEditor
def getCodeSample(settingsType: SettingsType): String = settingsType match {
case SettingsType.INDENT_SETTINGS =>
"""object {
| key = value
| some.path: 42
| list = [
| something here
| more stuff
| ]
| some.very.long.path =
| very very very long value
|}
| """.stripMargin.trim
case SettingsType.SPACING_SETTINGS =>
"""include file("application.conf")
|
|object {
| quix: 42
| foo.bar = stuff
| obj = {key: value, kye: vlaue}
| list = [1, 2, 3]
| subst = ${some.path}
| optsubst = ${?some.path}
|}
| """.stripMargin.trim
case SettingsType.WRAPPING_AND_BRACES_SETTINGS =>
"""include "someExtraordinarilyLongName"
|
|object {
| #comment
| key = value
| simplelist = [element]
| simpleobj = {k: v}
| longlist = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
| longobj = {key: value, foo: bar, stuff: 42, quix: 3.14}
| some.path: long long long long long long value
| another.path = another very very very very long value
| anotherobj {key: value, foo: bar, stuff: 42, quix: 3.14}
|
|#comment originally at first column
|//comment originally at first column
|
|}
| """.stripMargin.trim
case SettingsType.BLANK_LINES_SETTINGS =>
"""include "application"
|
|
|object {
| key: value
|
| num = 42
|
|}
|
|list = [
| value
|
| another one
|
|]
| """.stripMargin.trim
case _ => ""
}
}
| ghik/intellij-hocon | src/org/jetbrains/plugins/hocon/codestyle/HoconLanguageCodeStyleSettingsProvider.scala | Scala | apache-2.0 | 7,087 |
package org.apache.spark
import org.apache.spark.util.Utils
import org.apache.spark._
import scala.tools.nsc.Settings
object Boot {
def classServer = {
val conf = new SparkConf()
val tmp = System.getProperty("java.io.tmpdir")
val rootDir = conf.get("spark.repl.classdir", tmp)
val outputDir = Utils.createTempDir(rootDir)
val s = new Settings()
s.processArguments(List("-Yrepl-class-based", "-Yrepl-outdir", s"${outputDir.getAbsolutePath}", "-Yrepl-sync"), true)
val server = new HttpServer(conf, outputDir, new SecurityManager(conf))
server
}
} | radek1st/spark-notebook | modules/spark/src/main/scala_2.11/spark-pre1.5/Boot.scala | Scala | apache-2.0 | 586 |
package com.geeksville.util
/**
* C++ style using
* http://stackoverflow.com/questions/2207425/what-automatic-resource-management-alternatives-exists-for-scala
*
* Example:
*
* using(new BufferedReader(new FileReader("file"))) { r =>
* var count = 0
* while (r.readLine != null) count += 1
* println(count)
* }
*/
object Using {
def using[T <: { def close() }, ResType](resource: T)(block: T => ResType) =
{
try {
block(resource)
} finally {
if (resource != null) resource.close()
}
}
}
| geeksville/arduleader | common/src/main/scala/com/geeksville/util/Using.scala | Scala | gpl-3.0 | 544 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution.datasources
import java.io.Closeable
import java.net.URI
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.Path
import org.apache.hadoop.io.Text
import org.apache.hadoop.mapreduce._
import org.apache.hadoop.mapreduce.lib.input.{FileSplit, LineRecordReader}
import org.apache.hadoop.mapreduce.task.TaskAttemptContextImpl
/**
* An adaptor from a [[PartitionedFile]] to an [[Iterator]] of [[Text]], which are all of the lines
* in that file.
*
* @param file A part (i.e. "block") of a single file that should be read line by line.
* @param lineSeparator A line separator that should be used for each line. If the value is `None`,
* it covers `\r`, `\r\n` and `\n`.
* @param conf Hadoop configuration
*
* @note The behavior when `lineSeparator` is `None` (covering `\r`, `\r\n` and `\n`) is defined
* by [[LineRecordReader]], not within Spark.
*/
class HadoopFileLinesReader(
file: PartitionedFile,
lineSeparator: Option[Array[Byte]],
conf: Configuration) extends Iterator[Text] with Closeable {
def this(file: PartitionedFile, conf: Configuration) = this(file, None, conf)
private val iterator = {
val fileSplit = new FileSplit(
new Path(new URI(file.filePath)),
file.start,
file.length,
// TODO: Implement Locality
Array.empty)
val attemptId = new TaskAttemptID(new TaskID(new JobID(), TaskType.MAP, 0), 0)
val hadoopAttemptContext = new TaskAttemptContextImpl(conf, attemptId)
val reader = lineSeparator match {
case Some(sep) => new LineRecordReader(sep)
// If the line separator is `None`, it covers `\r`, `\r\n` and `\n`.
case _ => new LineRecordReader()
}
reader.initialize(fileSplit, hadoopAttemptContext)
new RecordReaderIterator(reader)
}
override def hasNext: Boolean = iterator.hasNext
override def next(): Text = iterator.next()
override def close(): Unit = iterator.close()
}
| bravo-zhang/spark | sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/HadoopFileLinesReader.scala | Scala | apache-2.0 | 2,791 |
/*
* Copyright (C) 2007-2008 Artima, Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Example code from:
*
* Programming in Scala (First Edition, Version 6)
* by Martin Odersky, Lex Spoon, Bill Venners
*
* http://booksites.artima.com/programming_in_scala
*/
abstract class Browser {
val database: Database
def recipesUsing(food: Food) =
database.allRecipes.filter(recipe =>
recipe.ingredients.contains(food))
def displayCategory(category: database.FoodCategory) {
println(category)
}
}
| peachyy/scalastu | modules/Browser.scala | Scala | apache-2.0 | 1,063 |
package ch.ltouroumov.modularmachines.common.texture
import net.minecraft.block.Block
import net.minecraft.client.renderer.texture.IIconRegister
import net.minecraft.util.{Facing, IIcon}
import net.minecraft.world.IBlockAccess
import net.minecraftforge.common.util.ForgeDirection
class ConnectedTextureHandler[TBlock](val baseName: String, val canConnect: Block => Boolean) extends TextureHandler {
var textureNames = List[String](
"None", "All", "All_Closed", "NorthSouth", "WestEast",
"North", "South", "East", "West",
"NorthEast", "NorthWest", "SouthEast", "SouthWest",
"North_Closed", "South_Closed", "East_Closed", "West_Closed",
"North_Open", "South_Open", "East_Open", "West_Open",
"NorthEast_Closed", "NorthWest_Closed", "SouthEast_Closed", "SouthWest_Closed"
)
var textures = Map[String, IIcon]()
def loadTextures(register: IIconRegister) = {
val pairs = for(name <- textureNames)
yield (name, register.registerIcon(baseName + "_" + name))
textures = pairs.toMap
}
def getTexture(side: ForgeDirection): IIcon = {
textures("All_Closed")
}
// N = z- / S = z+
// E = x+ / W = x-
// U = y+ / D = y-
def getTexture(world: IBlockAccess, x: Int, y: Int, z: Int, side: ForgeDirection): IIcon = {
implicit val w = world
val connected = side match {
case ForgeDirection.UP | ForgeDirection.DOWN => searchXZ(x, y, z)
case ForgeDirection.NORTH => searchXY(x, y, z, -1)
case ForgeDirection.SOUTH => searchXY(x, y, z, 1)
case ForgeDirection.EAST => searchZY(x, y, z, -1)
case ForgeDirection.WEST => searchZY(x, y, z, 1)
case _ => throw new IllegalArgumentException("Invalid side")
}
connected match {
// Truth table
// Bitmask(N, S, E, W,NE,NW,SE,SW)
// Squares
case Bitmask(1, 1, 1, 1, 1, 1, 1, 1) => textures("None")
case Bitmask(0, 0, 0, 0, 0, 0, 0, 0) => textures("All_Closed")
case Bitmask(1, 1, 1, 1, _, _, _, _) => textures("All")
// Straight
case Bitmask(1, 1, 0, 0, _, _, _, _) => textures("NorthSouth")
case Bitmask(0, 0, 1, 1, _, _, _, _) => textures("WestEast")
// Sides
case Bitmask(0, 1, 1, 1, _, _, 1, 1) => textures("North")
case Bitmask(1, 0, 1, 1, 1, 1, _, _) => textures("South")
case Bitmask(1, 1, 0, 1, 1, _, 1, _) => textures("East")
case Bitmask(1, 1, 1, 0, _, 1, _, 1) => textures("West")
// T Junctions
case Bitmask(0, 1, 1, 1, _, _, _, _) => textures("North_Open")
case Bitmask(1, 0, 1, 1, _, _, _, _) => textures("South_Open")
case Bitmask(1, 1, 0, 1, _, _, _, _) => textures("West_Open")
case Bitmask(1, 1, 1, 0, _, _, _, _) => textures("East_Open")
// U Junctions
case Bitmask(0, 0, 0, 1, _, _, _, _) => textures("East_Closed")
case Bitmask(0, 0, 1, 0, _, _, _, _) => textures("West_Closed")
case Bitmask(0, 1, 0, 0, _, _, _, _) => textures("North_Closed")
case Bitmask(1, 0, 0, 0, _, _, _, _) => textures("South_Closed")
// Angles
case Bitmask(1, 0, 0, 1, 0, _, _, _) => textures("SouthEast_Closed")
case Bitmask(1, 0, 1, 0, _, 0, _, _) => textures("SouthWest_Closed")
case Bitmask(0, 1, 0, 1, _, _, 0, _) => textures("NorthEast_Closed")
case Bitmask(0, 1, 1, 0, _, _, _, 0) => textures("NorthWest_Closed")
case Bitmask(1, 0, 0, 1, 1, _, _, _) => textures("SouthEast")
case Bitmask(1, 0, 1, 0, _, 1, _, _) => textures("SouthWest")
case Bitmask(0, 1, 0, 1, _, _, 1, _) => textures("NorthEast")
case Bitmask(0, 1, 1, 0, _, _, _, 1) => textures("NorthWest")
case mask =>
System.out.println(mask)
textures("All_Closed")
}
}
def shouldRenderSide(world: IBlockAccess, x: Int, y: Int, z: Int, side: Int) : Boolean = {
true
//!searchAt(x + Facing.offsetsXForSide(side), y + Facing.offsetsYForSide(side), z + Facing.offsetsZForSide(side), orOpaque = true)(world)
/*ForgeDirection.getOrientation(side) match {
case ForgeDirection.UP => !searchAt(x, y + 1, z)
case ForgeDirection.DOWN => !searchAt(x, y - 1, z)
case ForgeDirection.NORTH => !searchAt(x, y, z - 1)
case ForgeDirection.SOUTH => !searchAt(x, y, z + 1)
case ForgeDirection.WEST => !searchAt(x - 1, y, z)
case ForgeDirection.EAST => !searchAt(x + 1, y, z)
case _ => true
}*/
}
case class Bitmask(north: Int, south: Int,
east: Int, west: Int,
ne: Int, nw: Int,
se: Int, sw: Int)
def searchXZ(x :Int, y: Int, z: Int)(implicit world: IBlockAccess) : Bitmask =
Bitmask(north = searchAtBit(x, y, z - 1),
south = searchAtBit(x, y, z + 1),
east = searchAtBit(x + 1, y, z),
west = searchAtBit(x - 1, y, z),
ne = searchAtBit(x - 1, y, z - 1),
nw = searchAtBit(x + 1, y, z - 1),
se = searchAtBit(x - 1, y, z + 1),
sw = searchAtBit(x + 1, y, z + 1))
def searchXY(x :Int, y: Int, z: Int, m: Int)(implicit world: IBlockAccess) : Bitmask = {
Bitmask(north = searchAtBit(x, y + 1, z),
south = searchAtBit(x, y - 1, z),
east = searchAtBit(x + m, y, z),
west = searchAtBit(x - m, y, z),
ne = searchAtBit(x - m, y + 1, z),
nw = searchAtBit(x + m, y + 1, z),
se = searchAtBit(x - m, y - 1, z),
sw = searchAtBit(x + m, y - 1, z))
}
def searchZY(x :Int, y: Int, z: Int, m: Int)(implicit world: IBlockAccess) : Bitmask = {
Bitmask(north = searchAtBit(x, y + 1, z),
south = searchAtBit(x, y - 1, z),
east = searchAtBit(x, y, z + m),
west = searchAtBit(x, y, z - m),
ne = searchAtBit(x, y + 1, z - m),
nw = searchAtBit(x, y + 1, z + m),
se = searchAtBit(x, y - 1, z - m),
sw = searchAtBit(x, y - 1, z + m))
}
def searchAt(x: Int, y: Int, z: Int, orOpaque: Boolean = false)(implicit world: IBlockAccess) : Boolean = {
val block = world.getBlock(x, y, z)
canConnect(block) || (orOpaque && block.isOpaqueCube)
}
def searchAtBit(x: Int, y: Int, z: Int)(implicit world: IBlockAccess) : Int = {
if (searchAt(x, y, z)) 1
else 0
}
}
| ltouroumov/modular-machines | src/main/scala/ch/ltouroumov/modularmachines/common/texture/ConnectedTextureHandler.scala | Scala | gpl-2.0 | 6,339 |
package quizleague.web.maintain.fixtures
import quizleague.web.maintain.component._
import quizleague.web.model._
import scala.scalajs.js
import TemplateElements._
import quizleague.web.maintain.text.TextService
import js.Dynamic.{global => g}
import quizleague.web.util.Logging
import quizleague.web.maintain.competition.CompetitionService
import quizleague.web.maintain.team.TeamService
import quizleague.web.model.Team
import quizleague.web.maintain.venue.VenueService
import quizleague.web.maintain.util.TeamManager
import quizleague.web.util.rx._
import quizleague.web.maintain.competition.CompetitionComponentConfig
import quizleague.web.maintain.competition.CompetitionComponent
import quizleague.web.util.rx.RefObservable
import quizleague.web.util.rx.RefObservable
import quizleague.web.maintain.util.TeamManager
import quizleague.web.maintain.component.ItemComponentConfig._
import quizleague.web.core._
import com.felstar.scalajs.vue.VueRxComponent
import quizleague.web.util.component.{SelectUtils, SelectWrapper}
@js.native
trait FixturesComponent extends CompetitionComponent{
var teamManager:TeamManager
val teams:js.Array[SelectWrapper[Team]]
var homeTeam:RefObservable[Team]
var awayTeam:RefObservable[Team]
val fxs:Fixtures
var venue:RefObservable[Venue]
}
object FixturesComponent extends CompetitionComponentConfig{
override type facade = FixturesComponent
override def parentKey(c:facade) = s"season/${c.$route.params("seasonId")}/competition/${c.$route.params("id")}"
val fixtureService = FixtureService
components(FixtureComponent)
val template = s"""
<v-container v-if="item && season && fxs && teams && fixtures && venues">
<h2>Fixtures : {{item.name}}</h2>
<v-form v-model="valid">
<v-layout column>
<v-layout column>
<v-text-field label="Date" v-model="fxs.date" type="date" required :rules=${valRequired("Date")}></v-text-field>
<v-text-field label="Time" v-model="fxs.start" type="time" required :rules=${valRequired("Time")}></v-text-field>
<v-text-field label="Description" v-model="fxs.description" type="text" required :rules=${valRequired("Description")}></v-text-field>
</v-layout>
<v-layout column>
<h4>Fixture List</h4>
<v-layout row v-if="venues && teams">
<v-select label="Home" v-model="homeTeam" :items="unusedTeams(awayTeam)" @input="setVenue(homeTeam)"></v-select>
<v-select label="Away" v-model="awayTeam" :items="unusedTeams(homeTeam)"></v-select>
<v-select label="Venue" v-model="venue" :items="venues"></v-select>
<v-btn style="top:5px;" icon v-on:click="addFixture()" :disabled="!(homeTeam && awayTeam && venue)"><v-icon >mdi-plus</v-icon></v-btn>
</v-layout>
<v-layout column>
<v-layout row v-for="fixture in fixtures" :key="fixture.id">
<v-btn style="top:-14px;" icon v-on:click="removeFixture(fixture)" ><v-icon>mdi-cancel</v-icon></v-btn>
<fixture :fixture="fixture" :fixtures="fxs" :teamManager="teamManager"></fixture>
</v-layout>
</v-layout>
</v-layout>
</v-layout>
$formButtons
</v-form>
</v-container>
"""
def unusedTeams(c:facade, other: RefObservable[Team]) = teamManager(c).unusedTeams(other)
def setVenue(c:facade, team: RefObservable[Team]) = {
TeamService.get(team.id).subscribe(t => c.venue = t.venue)
}
def addFixture(c:facade) = {
val f = fixtureService.instance(
c.fxs,
teamManager(c).take(c.homeTeam),
teamManager(c).take(c.awayTeam),
c.venue,
c.item.typeName == CompetitionType.subsidiary.toString)
fixtureService.save(f).subscribe(x => c.$forceUpdate())
c.homeTeam = null
c.awayTeam = null
c.venue = null
}
def removeFixture(c:facade, fx:Fixture) = {
if(org.scalajs.dom.window.confirm("Delete ?")){
c.teamManager.untake(fx.home)
c.teamManager.untake(fx.away)
fixtureService.delete(fx).subscribe(x => c.$forceUpdate())
}
}
def teamManager(c:facade) = if(c.teamManager == null) {c.teamManager = new TeamManager(c.teams); c.teamManager} else c.teamManager
def venues() = SelectUtils.model[Venue](FilteredVenueService)(_.name)
def teams() = SelectUtils.model[Team](FilteredTeamService)(_.name)
override def save(c:facade):Unit = {
val item = c.fxs
item.key = FixturesService.key(parentKey(c), item.id)
FixturesService.save(item)
c.$router.back()
}
method("addFixture")({addFixture _ }:js.ThisFunction)
method("setVenue")({setVenue _ }:js.ThisFunction)
method("unusedTeams")({unusedTeams _ }:js.ThisFunction)
method("save")({save _ }:js.ThisFunction)
method("removeFixture")({removeFixture _ }:js.ThisFunction)
subscription("fxs")(c => obsFromParam(c,"fixturesId", FixturesService))
subscription("fixtures")(c => obsFromParam(c,"fixturesId", FixturesService).flatMap(_.fixture))
subscription("venues")(c => venues())
subscription("teams")(c => teams())
data("teamManager", null)
data("venue", null)
data("homeTeam", null)
data("awayTeam", null)
}
@js.native
trait FixtureComponent extends VueRxComponent{
val fixture:Fixture
var fx:Fixture
val fixtures:Fixtures
val teamManager:TeamManager
var showResult:Boolean
}
object FixtureComponent extends Component{
type facade = FixtureComponent
val name = "fixture"
val template = """
<v-layout column v-if="fx">
<v-layout row>
<v-btn style="top:-14px;" icon v-if="fx.result" v-on:click="showResult = !showResult"><v-icon>mdi-check</v-icon></v-btn>
<v-btn style="top:-14px;" icon v-if="!fx.result" v-on:click="addResult()"><v-icon>mdi-plus</v-icon></v-btn>
<span >{{async(fx.home).name}} - {{async(fx.away).name}} @ {{async(fx.venue).name}}</span>
</v-layout>
<v-layout row v-if="showResult && fx.result">
<span style="position:relative;top:28px;"><h4>Result : </h4></span>
<v-text-field label="Home Score" v-model.number="fx.result.homeScore" type="number"></v-text-field>
<v-text-field label="Away Score" v-model.number="fx.result.awayScore" type="number"></v-text-field>
<v-btn icon fab color="primary" v-on:click="save(fx)"><v-icon>mdi-floppy</v-icon>
</v-layout>
<v-layout row v-if="showResult && fx.result.reports">
<span style="position:relative;top:14px;"><h4>Reports : </h4></span><v-btn text v-on:click="editText(report.text.id)" v-for="report in async(fx.result.reports).reports" :key="report.text.id">{{async(report.team).shortName}}...</v-btn>
</v-layout>
<v-divider></v-divider>
<span> </span>
</v-layout>
"""
def editText(c:facade, textId:String) = {
c.$router.push(s"/maintain/text/$textId")
}
def addResult(c:facade) = {
c.fx = FixtureService.addResult(c.fx)
FixtureService.save(c.fx)
c.showResult = true
}
def save(fixture:Fixture) = {
FixtureService.save(fixture)
}
data("showResult", false)
data("fx")(c => {c.teamManager.take(c.fixture.home);c.teamManager.take(c.fixture.away);c.fixture})
props("fixture","fixtures","teamManager")
//subscription("fx")(c => c.fixture.obs.map(f => FixtureService.cache(f)).map(x => {c.teamManager.take(x.home);c.teamManager.take(x.away);x}))
method("editText")({editText _ }:js.ThisFunction)
method("addResult")({addResult _ }:js.ThisFunction)
method("save")({save _})
}
| gumdrop/quizleague-maintain | js/src/main/scala/quizleague/web/maintain/fixtures/FixturesComponent.scala | Scala | mit | 7,500 |
//
// OrcBacktrace.scala -- Code for building an Orc Backtrace from a Truffle stack.
// Project PorcE
//
// Created by amp on Mar 1, 2018.
//
// Copyright (c) 2018 The University of Texas at Austin. All rights reserved.
//
// Use and redistribution of this file is governed by the license terms in
// the LICENSE file found in the project's top-level directory and also found at
// URL: http://orc.csres.utexas.edu/license.shtml .
//
package orc.run.porce.runtime
import scala.collection.JavaConverters.asScalaBufferConverter
import orc.compile.parse.OrcSourceRange
import orc.error.runtime.{ JavaException, TokenException }
import orc.run.porce.HasPorcNode
import com.oracle.truffle.api.{ TruffleException, TruffleStackTraceElement }
import com.oracle.truffle.api.nodes.Node
object OrcBacktrace {
def fromTruffleException(e: Throwable, node: Node): Array[OrcSourceRange] = {
/*
* Truffle bug work-around: TruffleStackTraceElement.fillIn adds
* non-serializable values to the Throwable, so we create a temporary
* copy and leave the original alone
*/
val throwAwayThowable: Throwable = e match {
case te: TruffleException => {
val newE = new Throwable with TruffleException() {
override def getLocation() = te.getLocation()
override def getStackTraceElementLimit() = te.getStackTraceElementLimit()
override def isInternalError() = te.isInternalError()
}
newE
}
case _ => new Throwable()
}
throwAwayThowable.initCause(e.getCause)
TruffleStackTraceElement.fillIn(throwAwayThowable)
val truffleFrames = TruffleStackTraceElement.getStackTrace(throwAwayThowable).asScala
if (truffleFrames == null) {
Array()
} else {
val locations = findRange(node) +: truffleFrames.map(frame => findRange(frame.getLocation))
locations.flatten.toArray
}
}
def orcifyException(e: Throwable, node: Node): TokenException = {
val backtrace = fromTruffleException(e, node)
val r = e match {
case te: TokenException => te
case _ => new JavaException(e)
}
findRange(node).foreach(r.setPosition(_))
r.setBacktrace(backtrace)
r
}
private def findRange(n: Node): Option[OrcSourceRange] = n match {
case n: HasPorcNode if n.porcNode.isDefined =>
n.porcNode flatMap { _.value.sourceTextRange }
case n: Node =>
findRange(n.getParent)
case null =>
None
}
}
| orc-lang/orc | PorcE/src/orc/run/porce/runtime/OrcBacktrace.scala | Scala | bsd-3-clause | 2,455 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kafka.server
import java.util.concurrent._
import java.util.concurrent.atomic._
import java.util.concurrent.locks.ReentrantReadWriteLock
import com.yammer.metrics.core.Gauge
import kafka.metrics.KafkaMetricsGroup
import kafka.utils.CoreUtils.{inReadLock, inWriteLock}
import kafka.utils._
import kafka.utils.timer._
import scala.collection._
import scala.collection.mutable.ListBuffer
/**
* An operation whose processing needs to be delayed for at most the given delayMs. For example
* a delayed produce operation could be waiting for specified number of acks; or
* a delayed fetch operation could be waiting for a given number of bytes to accumulate.
*
* The logic upon completing a delayed operation is defined in onComplete() and will be called exactly once.
* Once an operation is completed, isCompleted() will return true. onComplete() can be triggered by either
* forceComplete(), which forces calling onComplete() after delayMs if the operation is not yet completed,
* or tryComplete(), which first checks if the operation can be completed or not now, and if yes calls
* forceComplete().
*
* A subclass of DelayedOperation needs to provide an implementation of both onComplete() and tryComplete().
*/
abstract class DelayedOperation(override val delayMs: Long) extends TimerTask with Logging {
private val completed = new AtomicBoolean(false)
/*
* Force completing the delayed operation, if not already completed.
* This function can be triggered when
*
* 1. The operation has been verified to be completable inside tryComplete()
* 2. The operation has expired and hence needs to be completed right now
*
* Return true iff the operation is completed by the caller: note that
* concurrent threads can try to complete the same operation, but only
* the first thread will succeed in completing the operation and return
* true, others will still return false
*/
def forceComplete(): Boolean = {
if (completed.compareAndSet(false, true)) {
// cancel the timeout timer
cancel()
onComplete()
true
} else {
false
}
}
/**
* Check if the delayed operation is already completed
*/
def isCompleted: Boolean = completed.get()
/**
* Call-back to execute when a delayed operation gets expired and hence forced to complete.
*/
def onExpiration(): Unit
/**
* Process for completing an operation; This function needs to be defined
* in subclasses and will be called exactly once in forceComplete()
*/
def onComplete(): Unit
/**
* Try to complete the delayed operation by first checking if the operation
* can be completed by now. If yes execute the completion logic by calling
* forceComplete() and return true iff forceComplete returns true; otherwise return false
*
* This function needs to be defined in subclasses
*/
def tryComplete(): Boolean
/**
* Thread-safe variant of tryComplete(). This can be overridden if the operation provides its
* own synchronization.
*/
def safeTryComplete(): Boolean = {
synchronized {
tryComplete()
}
}
/*
* run() method defines a task that is executed on timeout
*/
override def run(): Unit = {
if (forceComplete())
onExpiration()
}
}
object DelayedOperationPurgatory {
def apply[T <: DelayedOperation](purgatoryName: String,
brokerId: Int = 0,
purgeInterval: Int = 1000): DelayedOperationPurgatory[T] = {
val timer = new SystemTimer(purgatoryName)
new DelayedOperationPurgatory[T](purgatoryName, timer, brokerId, purgeInterval)
}
}
/**
* A helper purgatory class for bookkeeping delayed operations with a timeout, and expiring timed out operations.
*/
final class DelayedOperationPurgatory[T <: DelayedOperation](purgatoryName: String,
timeoutTimer: Timer,
brokerId: Int = 0,
purgeInterval: Int = 1000,
reaperEnabled: Boolean = true)
extends Logging with KafkaMetricsGroup {
/* a list of operation watching keys */
private val watchersForKey = new Pool[Any, Watchers](Some((key: Any) => new Watchers(key)))
private val removeWatchersLock = new ReentrantReadWriteLock()
// the number of estimated total operations in the purgatory
private[this] val estimatedTotalOperations = new AtomicInteger(0)
/* background thread expiring operations that have timed out */
private val expirationReaper = new ExpiredOperationReaper()
private val metricsTags = Map("delayedOperation" -> purgatoryName)
newGauge(
"PurgatorySize",
new Gauge[Int] {
def value: Int = watched
},
metricsTags
)
newGauge(
"NumDelayedOperations",
new Gauge[Int] {
def value: Int = delayed
},
metricsTags
)
if (reaperEnabled)
expirationReaper.start()
/**
* Check if the operation can be completed, if not watch it based on the given watch keys
*
* Note that a delayed operation can be watched on multiple keys. It is possible that
* an operation is completed after it has been added to the watch list for some, but
* not all of the keys. In this case, the operation is considered completed and won't
* be added to the watch list of the remaining keys. The expiration reaper thread will
* remove this operation from any watcher list in which the operation exists.
*
* @param operation the delayed operation to be checked
* @param watchKeys keys for bookkeeping the operation
* @return true iff the delayed operations can be completed by the caller
*/
def tryCompleteElseWatch(operation: T, watchKeys: Seq[Any]): Boolean = {
assert(watchKeys.nonEmpty, "The watch key list can't be empty")
// The cost of tryComplete() is typically proportional to the number of keys. Calling
// tryComplete() for each key is going to be expensive if there are many keys. Instead,
// we do the check in the following way. Call tryComplete(). If the operation is not completed,
// we just add the operation to all keys. Then we call tryComplete() again. At this time, if
// the operation is still not completed, we are guaranteed that it won't miss any future triggering
// event since the operation is already on the watcher list for all keys. This does mean that
// if the operation is completed (by another thread) between the two tryComplete() calls, the
// operation is unnecessarily added for watch. However, this is a less severe issue since the
// expire reaper will clean it up periodically.
var isCompletedByMe = operation.safeTryComplete()
if (isCompletedByMe)
return true
var watchCreated = false
for(key <- watchKeys) {
// If the operation is already completed, stop adding it to the rest of the watcher list.
if (operation.isCompleted)
return false
watchForOperation(key, operation)
if (!watchCreated) {
watchCreated = true
estimatedTotalOperations.incrementAndGet()
}
}
isCompletedByMe = operation.safeTryComplete()
if (isCompletedByMe)
return true
// if it cannot be completed by now and hence is watched, add to the expire queue also
if (!operation.isCompleted) {
timeoutTimer.add(operation)
if (operation.isCompleted) {
// cancel the timer task
operation.cancel()
}
}
false
}
/**
* Check if some delayed operations can be completed with the given watch key,
* and if yes complete them.
*
* @return the number of completed operations during this process
*/
def checkAndComplete(key: Any): Int = {
val watchers = inReadLock(removeWatchersLock) { watchersForKey.get(key) }
if(watchers == null)
0
else
watchers.tryCompleteWatched()
}
/**
* Return the total size of watch lists the purgatory. Since an operation may be watched
* on multiple lists, and some of its watched entries may still be in the watch lists
* even when it has been completed, this number may be larger than the number of real operations watched
*/
def watched: Int = allWatchers.map(_.countWatched).sum
/**
* Return the number of delayed operations in the expiry queue
*/
def delayed: Int = timeoutTimer.size
def cancelForKey(key: Any): List[T] = {
inWriteLock(removeWatchersLock) {
val watchers = watchersForKey.remove(key)
if (watchers != null)
watchers.cancel()
else
Nil
}
}
/*
* Return all the current watcher lists,
* note that the returned watchers may be removed from the list by other threads
*/
private def allWatchers = inReadLock(removeWatchersLock) { watchersForKey.values }
/*
* Return the watch list of the given key, note that we need to
* grab the removeWatchersLock to avoid the operation being added to a removed watcher list
*/
private def watchForOperation(key: Any, operation: T) {
inReadLock(removeWatchersLock) {
val watcher = watchersForKey.getAndMaybePut(key)
watcher.watch(operation)
}
}
/*
* Remove the key from watcher lists if its list is empty
*/
private def removeKeyIfEmpty(key: Any, watchers: Watchers) {
inWriteLock(removeWatchersLock) {
// if the current key is no longer correlated to the watchers to remove, skip
if (watchersForKey.get(key) != watchers)
return
if (watchers != null && watchers.isEmpty) {
watchersForKey.remove(key)
}
}
}
/**
* Shutdown the expire reaper thread
*/
def shutdown() {
if (reaperEnabled)
expirationReaper.shutdown()
timeoutTimer.shutdown()
}
/**
* A linked list of watched delayed operations based on some key
*/
private class Watchers(val key: Any) {
private[this] val operations = new ConcurrentLinkedQueue[T]()
// count the current number of watched operations. This is O(n), so use isEmpty() if possible
def countWatched: Int = operations.size
def isEmpty: Boolean = operations.isEmpty
// add the element to watch
def watch(t: T) {
operations.add(t)
}
// traverse the list and try to complete some watched elements
def tryCompleteWatched(): Int = {
var completed = 0
val iter = operations.iterator()
while (iter.hasNext) {
val curr = iter.next()
if (curr.isCompleted) {
// another thread has completed this operation, just remove it
iter.remove()
} else if (curr.safeTryComplete()) {
iter.remove()
completed += 1
}
}
if (operations.isEmpty)
removeKeyIfEmpty(key, this)
completed
}
def cancel(): List[T] = {
val iter = operations.iterator()
var cancelled = new ListBuffer[T]()
while (iter.hasNext) {
val curr = iter.next()
curr.cancel()
iter.remove()
cancelled += curr
}
cancelled.toList
}
// traverse the list and purge elements that are already completed by others
def purgeCompleted(): Int = {
var purged = 0
val iter = operations.iterator()
while (iter.hasNext) {
val curr = iter.next()
if (curr.isCompleted) {
iter.remove()
purged += 1
}
}
if (operations.isEmpty)
removeKeyIfEmpty(key, this)
purged
}
}
def advanceClock(timeoutMs: Long) {
timeoutTimer.advanceClock(timeoutMs)
// Trigger a purge if the number of completed but still being watched operations is larger than
// the purge threshold. That number is computed by the difference btw the estimated total number of
// operations and the number of pending delayed operations.
if (estimatedTotalOperations.get - delayed > purgeInterval) {
// now set estimatedTotalOperations to delayed (the number of pending operations) since we are going to
// clean up watchers. Note that, if more operations are completed during the clean up, we may end up with
// a little overestimated total number of operations.
estimatedTotalOperations.getAndSet(delayed)
debug("Begin purging watch lists")
val purged = allWatchers.map(_.purgeCompleted()).sum
debug("Purged %d elements from watch lists.".format(purged))
}
}
/**
* A background reaper to expire delayed operations that have timed out
*/
private class ExpiredOperationReaper extends ShutdownableThread(
"ExpirationReaper-%d".format(brokerId),
false) {
override def doWork() {
advanceClock(200L)
}
}
}
| rhauch/kafka | core/src/main/scala/kafka/server/DelayedOperation.scala | Scala | apache-2.0 | 13,623 |
/**
* This file is part of objc2swift.
* https://github.com/yahoojapan/objc2swift
*
* Copyright (c) 2015 Yahoo Japan Corporation
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*/
package org.objc2swift
import org.objc2swift.ObjCParser._
import scala.collection.JavaConversions._
/**
* Implements visit methods for method-contexts.
*/
protected trait MethodVisitor extends BaseConverter {
/**
* Returns translated text of instance method declaration context.
*
* @param ctx the parse tree
* @return Strings of Swift's instance method code
*/
override def visitInstance_method_declaration(ctx: Instance_method_declarationContext): String =
Option(ctx.method_declaration()).map(c =>
s"${indent(ctx)}${optional(ctx)}${visit(c)}".stripSuffix(" ")
).getOrElse("")
/**
* Returns translated text of class method declaration context.
*
* @param ctx the parse tree
**/
override def visitClass_method_declaration(ctx: Class_method_declarationContext): String =
Option(ctx.method_declaration()).map(c =>
s"${indent(ctx)}${optional(ctx)}class ${visit(c)}".stripSuffix(" ")
).getOrElse("")
/**
* Returns translated text of method definition context.
*
* @param ctx the parse tree
* @return Strings of Swift code
*/
override def visitInstance_method_definition(ctx: Instance_method_definitionContext): String =
ctx.method_definition() match {
case c if !isVisited(c) => s"${indent(ctx)}${visit(c)}".stripSuffix(" ")
case _ => "" // Already printed
}
/**
* Returns translated text of class method definition context.
*
* @param ctx the parse tree
**/
override def visitClass_method_definition(ctx: Class_method_definitionContext): String =
ctx.method_definition() match {
case c if !isVisited(c) => s"${indent(ctx)}class ${visit(c)}".stripSuffix(" ")
case _ => "" // Already printed
}
/**
* Returns translated text of method declaration context.
*
* @param ctx the parse tree
* @return Strings of Swift's method code
*/
override def visitMethod_declaration(ctx: Method_declarationContext): String =
findCorrespondingMethodDefinition(ctx) match {
case Some(impl: Method_definitionContext) => visit(impl)
case _ =>
// Has no definition
val slct = ctx.method_selector()
val tp = Option(ctx.method_type())
val hd = createMethodHeader(slct, tp)
// Check ancestor is protocol or not
ctx.parent.parent.parent match {
case _: Protocol_declarationContext => hd
case _ => s"$hd {\\n${indent(ctx)}}"
}
}
/**
* Returns translated text of method definition context.
*
* @param ctx the parse tree
**/
override def visitMethod_definition(ctx: Method_definitionContext): String = {
val builder = List.newBuilder[String]
val slct = ctx.method_selector()
val tp = Option(ctx.method_type())
val hd = createMethodHeader(slct, tp)
builder += s"$hd {\\n"
builder += visit(ctx.compound_statement())
builder += s"${indent(ctx)}}"
builder.result().mkString
}
/**
* Returns translated text of method selector context.
* @param ctx the parse tree
**/
override def visitMethod_selector(ctx: Method_selectorContext): String =
Option(ctx.selector()) match {
case Some(s) => s"${visit(s)}()" // No parameters
case _ =>
// Method name(selector)
val selector = Option(ctx.keyword_declarator(0).selector()).map(visit).getOrElse("")
// First parameter
val head = visitKeyword_declarator(ctx.keyword_declarator(0), isHead = true)
// Other parameters
val tail = ctx.keyword_declarator().tail.foldLeft("")((z, c) => {
z + ", " + visitKeyword_declarator(c)
})
s"$selector($head$tail)"
}
/**
* Returns translated text of keyword declarator
*
* @param ctx the parse tree
**/
override def visitKeyword_declarator(ctx: Keyword_declaratorContext): String =
this.visitKeyword_declarator(ctx, isHead = false)
/**
* Returns translated text of keyword declarator
*
* @param ctx the parse tree
* @param isHead node index in branches
* @return parameter code
*/
private def visitKeyword_declarator(ctx: Keyword_declaratorContext, isHead: Boolean): String = {
// Parameter's Internal name
val paramName = ctx.IDENTIFIER().getText
// Method name(idx = 0) or Parameter's External name
val selector = Option(ctx.selector()).map(visit).getOrElse("")
// Parameter's Type
val it = ctx.method_type().toIterator
val paramType = it.map(visit).find(!_.isEmpty).getOrElse("")
selector match {
case s if s.isEmpty => s"$paramName: $paramType" // No external name
case s if isHead => s"$paramName: $paramType" // head param has no external name
case s if s == paramName => s"$paramName: $paramType" // external name equals internal one
case _ => s"$selector $paramName: $paramType"
}
}
/**
* Return method/parameter type on Swift rule.
*
* @param ctx the parse tree
* @return Swift method type
*/
override def visitMethod_type(ctx: Method_typeContext): String = {
val retType = (for {
x <- Option(ctx.type_name().specifier_qualifier_list())
y <- Option(x.type_specifier())
} yield y).map(concatType(_)).getOrElse("AnyObject")
retType match {
case "void" => ""
case _ => retType
}
}
/**
* Returns method header text.
* @param sctx method_selector_context tree
* @param tctx method_type_context tree (Optional)
* @return Translated text of method header contexts.
*/
private def createMethodHeader(sctx: Method_selectorContext, tctx: Option[Method_typeContext]): String =
tctx match {
case Some(c) => visit(c) match {
case "IBAction" => s"@IBAction func ${visit(sctx)}" // IBAction
case s if !s.isEmpty => s"func ${visit(sctx)} -> $s"
case _ => s"func ${visit(sctx)}" // void
}
case None => s"func ${visit(sctx)} -> AnyObject" // Default
}
def findCorrespondingMethodDefinition(declCtx: Method_declarationContext): Option[Method_definitionContext] = {
val selector = declCtx.method_selector.getText
val implDefList = declCtx.parent.parent.parent match {
case classCtx: Class_interfaceContext =>
findCorrespondingClassImplementation(classCtx) match {
case Some(implCtx) => implCtx.implementation_definition_list
case None => return None
}
case catCtx: Category_interfaceContext =>
findCorrespondingCategoryImplementation(catCtx) match {
case Some(implCtx) => implCtx.implementation_definition_list
case None => return None
}
case _ => return None
}
declCtx.parent match {
case _: Instance_method_declarationContext =>
implDefList.instance_method_definition.map(_.method_definition())
.find(_.method_selector.getText == selector)
case _: Class_method_declarationContext =>
implDefList.class_method_definition.map(_.method_definition())
.find(_.method_selector.getText == selector)
case _ => None
}
}
}
| johndpope/objc2swift | src/main/scala/org/objc2swift/MethodVisitor.scala | Scala | mit | 7,412 |
// Copyright 2014,2015,2016,2017,2018,2019,2020 Commonwealth Bank of Australia
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package commbank.grimlock.scalding.content
import commbank.grimlock.framework.{ Persist => FwPersist }
import commbank.grimlock.framework.content.{ Contents => FwContents, IndexedContents => FwIndexedContents, Content }
import commbank.grimlock.framework.environment.tuner.{ Default, Tuner }
import commbank.grimlock.framework.position.Position
import commbank.grimlock.scalding.environment.Context
import commbank.grimlock.scalding.Persist
import shapeless.HList
/** Rich wrapper around a `TypedPipe[Content]`. */
case class Contents(data: Context.U[Content]) extends FwContents[Context] with Persist[Content] {
def saveAsText[
T <: Tuner
](
context: Context,
file: String,
writer: FwPersist.TextWriter[Content],
tuner: T = Default()
)(implicit
ev: FwPersist.SaveAsTextTuner[Context.U, T]
): Context.U[Content] = saveText(context, file, writer, tuner)
}
/** Rich wrapper around a `TypedPipe[(Position[P], Content]`. */
case class IndexedContents[
P <: HList
](
data: Context.U[(Position[P], Content)]
) extends FwIndexedContents[P, Context]
with Persist[(Position[P], Content)] {
def saveAsText[
T <: Tuner
](
context: Context,
file: String,
writer: FwPersist.TextWriter[(Position[P], Content)],
tuner: T = Default()
)(implicit
ev: FwPersist.SaveAsTextTuner[Context.U, T]
): Context.U[(Position[P], Content)] = saveText(context, file, writer, tuner)
}
| CommBank/grimlock | grimlock-core/src/main/scala/commbank/grimlock/scalding/Content.scala | Scala | apache-2.0 | 2,063 |
package java.security
final class AccessControlException private () extends Exception
| cedricviaccoz/scala-native | javalib/src/main/scala/java/security/AccessControlException.scala | Scala | bsd-3-clause | 87 |
package structures
import simulacrum.typeclass
/**
* Type class that describes functors which have a lawful `Apply` instance and
* that support a `pure` method which adheres to the laws described in
* `structures.laws.ApplicativeLaws`.
*
* Also known as idioms.
*
* @see http://strictlypositive.org/IdiomLite.pdf
*/
@typeclass trait Applicative[F[_]] extends Any with Apply[F] { self =>
def pure[A](a: A): F[A]
override def map[A, B](fa: F[A])(f: A => B): F[B] =
apply(fa)(pure(f))
def compose[G[_]: Applicative]: Applicative[Lambda[X => F[G[X]]]] =
new Applicative.Composite[F, G] {
def F = self
def G = Applicative[G]
}
}
object Applicative {
trait Composite[F[_], G[_]] extends Any with Applicative[Lambda[X => F[G[X]]]] with Apply.Composite[F, G] {
def F: Applicative[F]
def G: Applicative[G]
def pure[A](a: A): F[G[A]] = F.pure(G.pure(a))
}
}
| mpilquist/Structures | core/shared/src/main/scala/structures/Applicative.scala | Scala | bsd-3-clause | 909 |
/* ____ __ ____ ____ ____,,___ ____ __ __ ____
* ( _ \\ /__\\ (_ )(_ _)( ___)/ __) ( _ \\( )( )( _ \\ Read
* ) / /(__)\\ / /_ _)(_ )__) \\__ \\ )___/ )(__)( ) _ < README.txt
* (_)\\_)(__)(__)(____)(____)(____)(___/ (__) (______)(____/ LICENSE.txt
*/
package razie.diesel.expr
import razie.diesel.dom.RDOM.P
import razie.diesel.dom._
/** a "function" call: built-in functions, msg functions (exec'd in same engine, sync) */
case class LambdaFuncExpr(argName:String, ex: Expr, parms: List[RDOM.P]=Nil) extends Expr {
override def getType: WType = ex.getType
override def expr = argName + "=>" + ex.toDsl
override def apply(v: Any)(implicit ctx: ECtx) = applyTyped(v).calculatedValue
override def applyTyped(v: Any)(implicit ctx: ECtx): P = {
val sctx = new StaticECtx(List(P.fromTypedValue(argName, v)), Some(ctx))
val res = ex.applyTyped(v)(sctx)
res
}
override def toDsl = expr + "(" + parms.mkString(",") + ")"
override def toHtml = tokenValue(toDsl)
}
| razie/diesel-rx | diesel/src/main/scala/razie/diesel/expr/LambdaFuncExpr.scala | Scala | apache-2.0 | 1,046 |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.hexagrammatic.cruciform
import StreamUtils.copyHandler
import StreamUtils.FunctionFilterStream
import StreamUtils.NullStreamHandler
import java.io.{ByteArrayOutputStream, InputStream, OutputStream}
import java.security._
import java.security.cert.Certificate
import javax.crypto.Cipher
import javax.crypto.Cipher._
import javax.crypto.CipherInputStream
import javax.crypto.SecretKey
import javax.crypto.spec.IvParameterSpec
trait Ciphers extends Core with StreamConversions {
//Maps the default cipher type for a given key type
private[this] val CipherForKeyType = Map(
"AES" -> "AES/CBC/PKCS5Padding",
"DES" -> "DES/CBC/PKCS5Padding",
"RSA" -> "RSA/ECB/PKCS1Padding")
//Maps the default signature type for a given key type
private[this] val SignatureForKeyType = Map(
"RSA" -> "SHA256withRSA",
"DSA" -> "SHA1withDSA")
private def algorithmForKey(key: Key, map: Map[String, String]): String =
map getOrElse(key.getAlgorithm,
throw new NoSuchAlgorithmException(s"Cipher not found for key algorithm " + key.getAlgorithm))
private def createCipher(algorithm: Option[String], key: Key, provider: OptionalProvider): Cipher = {
val foundAlgorithm = algorithm getOrElse algorithmForKey(key, CipherForKeyType)
fromProvider[Cipher](
provider,
(p: Provider) => Cipher getInstance(foundAlgorithm, p),
(s: String) => Cipher getInstance(foundAlgorithm, s)
)
}
private def createSignature(algorithm: Option[String], key: Key, provider: OptionalProvider): Signature = {
val foundAlgorithm = algorithm getOrElse algorithmForKey(key, SignatureForKeyType)
fromProvider[Signature](
provider,
(p: Provider) => Signature getInstance(foundAlgorithm, p),
(s: String) => Signature getInstance(foundAlgorithm, s)
)
}
private def makeSigningFilterStream(data: InputStream, signer: Signature): FunctionFilterStream = {
new FunctionFilterStream(
data,
(b: Byte) => signer.update(b),
Option((a: Array[Byte], off: Int, len: Int) => signer.update(a, off, len))
)
}
class AsymmetricEncryptOperation(
data: InputStream,
key: Key,
algorithm: Option[String] = None,
provider: OptionalProvider = DefaultProvider) extends Writeable {
def to[T <: OutputStream](out: T): T = {
val cipher = createCipher(algorithm, key, provider)
cipher init(ENCRYPT_MODE, key)
copyHandler(out)(new CipherInputStream(data, cipher))
out
}
/**
* Sets the algorithm to use with this asymmetric encryption.
*/
def withAlgorithm(algorithm: String): AsymmetricEncryptOperation =
new AsymmetricEncryptOperation(data, key, Option(algorithm), provider)
/**
* Sets the JCE provider to use with this asymmetric encryption.
*/
def withProvider(provider: OptionalProvider): AsymmetricEncryptOperation =
new AsymmetricEncryptOperation(data, key, algorithm, provider)
}
class SymmetricEncryptOperation(
data: InputStream,
key: Key,
algorithm: Option[String] = None,
provider: OptionalProvider = DefaultProvider) {
def to[T <: OutputStream](out: T): (T, Option[Array[Byte]]) = {
val cipher = createCipher(algorithm, key, provider)
cipher init(ENCRYPT_MODE, key)
copyHandler(out)(new CipherInputStream(data, cipher))
(out, Option(cipher.getIV))
}
def asBytes:(Array[Byte], Option[Array[Byte]]) = {
val (out, iv) = to(new ByteArrayOutputStream)
(out.toByteArray, iv)
}
def asString:(String, Option[Array[Byte]]) = {
val (bytes, iv) = asBytes
(new String(bytes), iv)
}
/**
* Sets the algorithm to use with this symmetric encryption.
*/
def withAlgorithm(algorithm: String): SymmetricEncryptOperation =
new SymmetricEncryptOperation(data, key, Option(algorithm), provider)
/**
* Sets the JCE provider to use with this symmetric encryption.
*/
def withProvider(provider: OptionalProvider): SymmetricEncryptOperation =
new SymmetricEncryptOperation(data, key, algorithm, provider)
}
class EncryptAskForKey(data: InputStream) {
/**
* Sets the certificate to use when performing this encryption operation and sets the operation
* into asymmetric mode. Follow this statement with asymmetric encryption options.
*/
def using(cert: Certificate): AsymmetricEncryptOperation = this using (cert.getPublicKey)
/**
* Sets the public to use when performing this encryption operation and sets the operation
* into asymmetric mode. Follow this statement with asymmetric encryption options.
*/
def using(key: PublicKey): AsymmetricEncryptOperation = new AsymmetricEncryptOperation(data, key)
/**
* Sets the symmetric key to use when performing this encryption operation and sets the operation
* into symmetric mode. Follow this statement with symmetric encryption options.
*/
def using(key: SecretKey): SymmetricEncryptOperation = new SymmetricEncryptOperation(data, key)
/**
* Sets the public key to use when performing this encryption operation from the provided keypair
* and sets the into asymmetric mode. Follow this statement with asymmetric encryption options.
*/
def using(pair: KeyPair): AsymmetricEncryptOperation = this using (pair.getPublic)
}
class AsymmetricEncryptAskForData(key: PublicKey) {
/**
* Sets the data to encrypt. Follow this statement with asymmetric encryption options.
*/
def data(data: InputStream): AsymmetricEncryptOperation = new AsymmetricEncryptOperation(data, key)
}
class SymmetricEncryptAskForData(key: SecretKey) {
/**
* Sets the data to encrypt. Follow this statement with symmetric encryption options.
*/
def data(data: InputStream): SymmetricEncryptOperation = new SymmetricEncryptOperation(data, key)
}
class EncryptAskForDataOrKey {
/**
* Sets the data to encrypt. Follow this statement with `using <key>`.
*/
def data(data: InputStream): EncryptAskForKey = new EncryptAskForKey(data)
/**
* Sets the certificate to use when performing this encryption operation and sets the operation
* into asymmetric mode. Follow this statement with `data <stream>`.
*/
def using(cert: Certificate): AsymmetricEncryptAskForData = this using (cert.getPublicKey)
/**
* Sets the symmetric key to use when performing this encryption operation and sets the operation
* into symmetric mode. Follow this statement with `data <stream>`.
*/
def using(key: SecretKey): SymmetricEncryptAskForData = new SymmetricEncryptAskForData(key)
/**
* Sets the public key to use when performing this encryption operation and sets the operation
* into asymmetric mode. Follow this statement with `data <stream>`.
*/
def using(key: PublicKey): AsymmetricEncryptAskForData = new AsymmetricEncryptAskForData(key)
/**
* Sets the public key to use when performing this encryption operation from the provided keypair
* and sets the operation into asymmetric mode. Follow this statement with `data <stream>`.
*/
def using(pair: KeyPair): AsymmetricEncryptAskForData = this using (pair.getPublic)
}
/**
* Starts an encryption operation. Follow this statement with `data <steam>` or `using <key>`.
*/
def encrypt: EncryptAskForDataOrKey = new EncryptAskForDataOrKey
class DecryptOperation(
data: InputStream,
key: Key,
initVector: Option[Array[Byte]] = None,
algorithm: Option[String] = None,
provider: OptionalProvider = DefaultProvider) extends Writeable {
/**
* Performs the decryption and writes the plaintext out to the provided stream.
* Use `asBytes` or `asString` to return the plaintext as an Array[Byte] or Stirng, repsectively.
*/
def to[T <: OutputStream](out: T): T = {
val cipher = createCipher(algorithm, key, provider)
val spec = initVector match {
case Some(iv) => new IvParameterSpec(iv)
case None => null
}
cipher init(DECRYPT_MODE, key, spec)
copyHandler(out)(new CipherInputStream(data, cipher))
out
}
/**
* Sets the algorithm to use in this decryption.
*/
def withAlgorithm(algorithm: String): DecryptOperation =
new DecryptOperation(data, key, initVector, Option(algorithm), provider)
/**
* Sets the init vector to use in this decryption.
*/
def withInitVector(iv: Array[Byte]): DecryptOperation =
new DecryptOperation(data, key, Option(iv), algorithm, provider)
/**
* Sets the JCE provider to use in this decryption.
*/
def withProvider(provider: OptionalProvider): DecryptOperation =
new DecryptOperation(data, key, initVector, algorithm, provider)
}
class DecryptAskForKey(data: InputStream) {
/**
* Sets the key to use in this decryption. Follow this statement with decrypt options.
*/
def using(key: Key): DecryptOperation = new DecryptOperation(data, key)
/**
* Sets the key from a keypair to use in this decryption. Follow this statement with decrypt options.
*/
def using(pair: KeyPair): DecryptOperation = this using pair.getPrivate
}
/**
* Sets the data to sign. Follow this statement with decrypt options.
*/
class DecryptAskForData(key: Key) {
def data(data: InputStream): DecryptOperation = new DecryptOperation(data, key)
}
class DecryptAskForDataOrKey {
/**
* Sets the data to sign. Follow this statement with `using <key>`.
*/
def data(data: InputStream): DecryptAskForKey = new DecryptAskForKey(data)
/**
* Sets the key to use in this decryption. Follow this statement with `data <stream>`.
*/
def using(key: Key): DecryptAskForData = new DecryptAskForData(key)
/**
* Sets the key from a keypair to use in this decryption. Follow this statement with `data <stream>`.
*/
def using(pair: KeyPair): DecryptAskForData = this using pair.getPrivate
}
/**
* Starts a decryption operation. Follow this statement with `data <steam>` or `using <key>`.
*/
def decrypt: DecryptAskForDataOrKey = new DecryptAskForDataOrKey
class SignOperation(
data: InputStream,
key: PrivateKey,
algorithm: Option[String] = None,
provider: OptionalProvider = DefaultProvider) extends Writeable {
/**
* Completes the signing operation and writes out the signature bytes to the provided stream.
* Use `asBytes` or `asString` to return the plaintext as an Array[Byte] or Stirng, repsectively.
*/
def to[T <: OutputStream](out: T): T = {
val signer = createSignature(algorithm, key, provider)
signer initSign(key)
NullStreamHandler(makeSigningFilterStream(data, signer))
out write(signer.sign)
out
}
/**
* Sets the algorithm to use with this signing operation.
*/
def withAlgorithm(algorithm: String): SignOperation =
new SignOperation(data, key, Option(algorithm), provider)
/**
* Sets the JCE provider to use with this JCE operation.
*/
def withProvider(provider: OptionalProvider): SignOperation =
new SignOperation(data, key, algorithm, provider)
}
class SignAskForKey(data: InputStream) {
/**
* Sets the private key to use when generating the signature. Follow this statement with signing options.
*/
def using(key: PrivateKey): SignOperation = new SignOperation(data, key)
/**
* Sets the private key froim a keypair to use when generating the signature.
* Follow this statement with signing options.
*/
def using(pair: KeyPair): SignOperation = this using pair.getPrivate
}
class SignAskForData(key: PrivateKey) {
/**
* Sets the data to sign. Follow this statement with signing options.
*
*@param data the data, as an `InputStream` - implicits provide convenience conversions.
**/
def data(data: InputStream): SignOperation = new SignOperation(data, key)
}
class SignAskForDataOrKey {
/**
* Sets the data to sign. Follow this statement with `using <key>`.
*
* @param data the data, as an `InputStream` - implicits provide convenience conversions.
**/
def data(data: InputStream): SignAskForKey = new SignAskForKey(data)
/**
* Sets the private key to use when generating the signature. Follow this statement with `data <stream>`.
*/
def using(key: PrivateKey): SignAskForData = new SignAskForData(key)
/**
* Sets the private key froim a keypair to use when generating the signature.
* Follow this statement with `data <stream>`.
*/
def using(pair: KeyPair): SignAskForData = this using pair.getPrivate
}
/**
* Starts a signing operation, writing out the signature to another stream.
*
* Follow this statement with either `using <key>` or `data <stream>`.
*
*/
def sign: SignAskForDataOrKey = new SignAskForDataOrKey
class VerifyOperation(
signature: InputStream,
key: PublicKey,
algorithm: Option[String] = None,
provider: OptionalProvider = DefaultProvider) {
/**
* Sets the `InputStream` that contains the data to use in signature verification and then performs the
* verification, returning `true` if the verification succeeds.
* @param data the data, as an `InputStream` - implicits provide convenience conversions.
*/
def from(data: InputStream): Boolean = {
val sigbytes = new ByteArrayOutputStream
val signer = createSignature(algorithm, key, provider)
copyHandler(sigbytes)(signature)
signer initVerify(key)
NullStreamHandler(makeSigningFilterStream(data, signer))
try {
signer verify(sigbytes.toByteArray)
} catch {
case ex: SignatureException => false
}
}
/**
* Sets the algorithm to use when verifying this signature.
*/
def withAlgorithm(algorithm: String): VerifyOperation =
new VerifyOperation(signature, key, Option(algorithm), provider)
/**
* Sets the JCE provider to use when verifying this signature.
*/
def withProvider(provider: OptionalProvider): VerifyOperation =
new VerifyOperation(signature, key, algorithm, provider)
}
class VerifyAskForKey(signature: InputStream) {
/**
* Adds the certificate to use in verification. Follow this statement with signature options.
*/
def using(cert: Certificate): VerifyOperation = this using cert.getPublicKey
/**
* Adds the public key to use in verification. Follow this statement with signature options.
*/
def using(key: PublicKey): VerifyOperation = new VerifyOperation(signature, key)
/**
* Adds public key from a keypair to use in verification. Follow this statement with signature options.
*/
def using(pair: KeyPair): VerifyOperation = this using pair.getPublic
}
class VerifyAskForSignature(key: PublicKey) {
/**
* Adds the signature to verify. Follow this statement with signature options.
* @param signature the signature, as an `InputStream` - implicits provide conversions.
*/
def signature(signature: InputStream): VerifyOperation = new VerifyOperation(signature, key)
}
class VerifyAskForSignatureOrKey {
/**
* Adds the signature to verify. Follow this statement with `using <key>`
* @param signature the signature, as an `InputStream` - implicits provide convenience conversions.
*/
def signature(signature: InputStream): VerifyAskForKey = new VerifyAskForKey(signature)
/**
* Adds the certificate to use in verification. Follow this statement with `signature <stream>`.
*/
def using(cert: Certificate): VerifyAskForSignature = this using cert.getPublicKey
/**
* Adds the public key to use in verification. Follow this statement with `signature <stream>`.
*/
def using(key: PublicKey): VerifyAskForSignature = new VerifyAskForSignature(key)
/**
* Adds the public key from a keypair to use in verification. Follow this statement with `signature <stream>`.
*/
def using(pair: KeyPair): VerifyAskForSignature = this using pair.getPublic
}
/**
* Starts a verification operation, returning `true` if the verification is
* successful, `false` otherwise.
*
* Follow this statement with either `using <key>` or `signature <stream>`.
*
*/
def verify: VerifyAskForSignatureOrKey = new VerifyAskForSignatureOrKey
} | bdimmick/cruciform | src/main/scala/com/hexagrammatic/cruciform/Ciphers.scala | Scala | apache-2.0 | 17,937 |
import sbt._
import Keys._
import sbtassembly.Plugin._
import AssemblyKeys._
import org.scalatra.sbt._
import org.scalatra.sbt.PluginKeys._
import com.mojolly.scalate.ScalatePlugin._
import ScalateKeys._
object DefW extends Build {
val Organization = "defw"
val Name = "DefW"
val Version = "0.4.0"
val ScalaVersion = "2.10.2"
val baseSettings = Seq(
version := Version,
organization := Organization,
scalaVersion := ScalaVersion,
name := Name,
resolvers += Classpaths.typesafeReleases,
libraryDependencies ++= Seq(
"junit" % "junit" % "4.8.1" % "test" withSources(),
"org.scalatest" %% "scalatest" % "1.9.1" % "test",
"org.scala-tools.testing" %% "specs" % "1.6.9" % "test" withSources()
),
scalacOptions ++= Seq("-encoding", "utf8", "-unchecked", "-deprecation")
)
lazy val defwUtil = Project(
id = "defw-util",
base = file("./util"),
settings = Project.defaultSettings ++
baseSettings ++
assemblySettings
).settings(
name := "defw-util",
libraryDependencies ++= Seq(
"org.slf4j" % "slf4j-api" % "1.7.5",
"ch.qos.logback" % "logback-classic" % "1.0.13",
"com.typesafe" %% "scalalogging-slf4j" % "1.0.1",
"commons-io" % "commons-io" % "2.4",
"org.apache.httpcomponents" % "httpclient" % "4.3.1",
"postgresql" % "postgresql" % "9.1-901.jdbc4",
"redis.clients" % "jedis" % "2.1.0",
"com.twitter" % "util-eval_2.10" % "6.10.0"
)
)
lazy val defwWebapp = Project(
id = "defw-webapp",
base = file("./webapp"),
settings = Project.defaultSettings ++
baseSettings ++
ScalatraPlugin.scalatraWithJRebel ++
scalateSettings
).settings(
name := "defw-webapp",
libraryDependencies ++= Seq(
"org.scalatra" %% "scalatra" % "2.2.2",
"org.scalatra" %% "scalatra-scalate" % "2.2.2",
"org.scalatra" %% "scalatra-specs2" % "2.2.2" % "test",
"ch.qos.logback" % "logback-classic" % "1.0.6" % "runtime",
"org.eclipse.jetty" % "jetty-webapp" % "8.1.8.v20121106" % "container",
"org.eclipse.jetty.orbit" % "javax.servlet" % "3.0.0.v201112011016" % "container;provided;test" artifacts (Artifact("javax.servlet", "jar", "jar"))
),
scalateTemplateConfig in Compile <<= (sourceDirectory in Compile){ base =>
Seq(
TemplateConfig(
base / "webapp" / "WEB-INF" / "templates",
Seq.empty, /* default imports should be added here */
Seq(
Binding("context", "_root_.org.scalatra.scalate.ScalatraRenderContext", importMembers = true, isImplicit = true)
), /* add extra bindings here */
Some("templates")
)
)
}
).dependsOn(defwUtil)
lazy val defwWebsocket = Project(
id = "defw-websocket",
base = file("./websocket"),
settings = Project.defaultSettings ++
baseSettings ++
ScalatraPlugin.scalatraWithJRebel
).settings(
name := "defw-websocket",
libraryDependencies ++= Seq(
"org.eclipse.jetty" % "jetty-webapp" % "8.1.12.v20130726" % "container",
"org.eclipse.jetty" % "jetty-websocket" % "8.1.12.v20130726",
"org.eclipse.jetty.orbit" % "javax.servlet" % "3.0.0.v201112011016" % "container;provided;test" artifacts (Artifact("javax.servlet", "jar", "jar"))
)
).dependsOn(defwUtil)
}
| takahish0306/scala-defw | project/Build.scala | Scala | apache-2.0 | 3,343 |
package isabelle.eclipse.launch.tabs
import org.eclipse.jface.resource.{JFaceResources, LocalResourceManager}
import isabelle.eclipse.launch.{IsabelleLaunchImages, IsabelleLaunchPlugin}
/**
* Tab to configure additional directories for Session lookup.
*
* Sets branding options only - the actual tab content is provided via LaunchComponents.
*
* @author Andrius Velykis
*/
class SessionDirsTab(components: List[LaunchComponent[_]])
extends LaunchComponentTab(components) {
override def getName = "Session Source"
override def getId = IsabelleLaunchPlugin.plugin.pluginId + ".sessionDirsTab"
// cannot access a Control here, so dispose manually in #dispose()
private val resourceManager = new LocalResourceManager(JFaceResources.getResources)
override def getImage = resourceManager.createImageWithDefault(
IsabelleLaunchImages.TAB_SESSION_DIRS)
override def dispose() {
resourceManager.dispose()
super.dispose()
}
}
| andriusvelykis/isabelle-eclipse | isabelle.eclipse.launch/src/isabelle/eclipse/launch/tabs/SessionDirsTab.scala | Scala | epl-1.0 | 970 |
package rocks.muki.graphql
import rocks.muki.graphql.codegen._
import rocks.muki.graphql.schema.SchemaLoader
import sbt.Keys._
import sbt.{Result => _, _}
import scala.meta._
object GraphQLCodegenPlugin extends AutoPlugin {
override def requires: Plugins = GraphQLPlugin
object autoImport {
val graphqlCodegenSchema = taskKey[File]("GraphQL schema file")
val graphqlCodegenQueries = taskKey[Seq[File]]("GraphQL query documents")
val graphqlCodegenJson =
taskKey[JsonCodeGen]("Configure a json decoder code generator")
val graphqlCodegenStyle =
settingKey[CodeGenStyles.Style]("The resulting code generation style")
val graphqlCodegenPackage =
settingKey[String]("Package for the generated code")
val graphqlCodegenImports =
settingKey[Seq[String]]("Additional imports to add to the generated code")
val graphqlCodegenPreProcessors =
taskKey[Seq[PreProcessor]]("Preprocessors that should be applied before the graphql file is parsed")
val graphqlCodegen = taskKey[Seq[File]]("Generate GraphQL API code")
val Apollo = CodeGenStyles.Apollo
val Sangria = CodeGenStyles.Sangria
val JsonCodec = JsonCodeGens
}
import autoImport._
def codegenTask(config: Configuration) =
inConfig(config)(
Seq(
sourceGenerators += graphqlCodegen.taskValue,
sourceDirectory in graphqlCodegen := sourceDirectory.value / "graphql",
sourceDirectories in graphqlCodegen := List((sourceDirectory in (config, graphqlCodegen)).value),
target in graphqlCodegen := sourceManaged.value / "sbt-graphql",
graphqlCodegenQueries := Defaults
.collectFiles(
sourceDirectories in graphqlCodegen,
includeFilter in graphqlCodegen,
excludeFilter in graphqlCodegen
)
.value,
graphqlCodegenPreProcessors in config := List(
PreProcessors.magicImports((sourceDirectories in (config, graphqlCodegen)).value)
),
graphqlCodegen in config := {
val log = streams.value.log
val targetDir = (target in (config, graphqlCodegen)).value
//val generator = ScalametaGenerator((name in graphqlCodegen).value)
val queries = graphqlCodegenQueries.value
val schemaFile = graphqlCodegenSchema.value
log.info(s"Generate code for ${queries.length} queries")
log.info(s"Use schema $schemaFile for query validation")
val packageName = graphqlCodegenPackage.value
val schema =
SchemaLoader.fromFile(schemaFile).loadSchema()
val imports = graphqlCodegenImports.value
val jsonCodeGen = graphqlCodegenJson.value
val preProcessors = graphqlCodegenPreProcessors.value
log.info(s"Generating json decoding with: ${jsonCodeGen.getClass.getSimpleName}")
log.info(s"Adding imports: ${imports.mkString(",")}")
val moduleName = (name in (config, graphqlCodegen)).value
val context = CodeGenContext(
schema,
targetDir,
queries,
packageName,
moduleName,
jsonCodeGen,
imports,
preProcessors,
log
)
graphqlCodegenStyle.value(context)
}
)
)
override def projectSettings: Seq[Setting[_]] =
Seq(
graphqlCodegenStyle := Apollo,
graphqlCodegenSchema := (resourceDirectory in Compile).value / "schema.graphql",
graphqlCodegenJson := JsonCodeGens.None,
includeFilter in graphqlCodegen := "*.graphql",
excludeFilter in graphqlCodegen := HiddenFileFilter || "*.fragment.graphql",
graphqlCodegenPackage := "graphql.codegen",
graphqlCodegenImports := Seq.empty,
name in graphqlCodegen := "GraphQLCodegen",
graphqlCodegen := (graphqlCodegen in Compile).value
) ++ codegenTask(Compile) ++ codegenTask(Test)
}
| muuki88/sbt-graphql | src/main/scala/rocks/muki/graphql/GraphQLCodegenPlugin.scala | Scala | apache-2.0 | 3,947 |
package com.gilt.handlebars.scala
import java.io.File
import com.gilt.handlebars.scala.binding.{Binding, BindingFactory}
import com.gilt.handlebars.scala.helper.Helper
import com.gilt.handlebars.scala.parser.{HandlebarsGrammar, Program}
import com.gilt.handlebars.scala.partial.PartialHelper
import scala.io.Source
trait CachingHandlebars[T] extends Handlebars[T] {
def sourceFile: Option[String]
def reload: Handlebars[T]
}
object CachingHandlebars {
def apply[T](
template: String,
partials: Map[String, Handlebars[T]],
helpers: Map[String, Helper[T]],
sourceFile: Option[String])(implicit f: BindingFactory[T]): Handlebars[T] = {
val parseResult = HandlebarsGrammar(template)
parseResult.map {
program =>
CachingHandlebarsImpl(program, partials, helpers, sourceFile)
}.getOrElse(sys.error("Could not parse template:\\n\\n%s".format(parseResult.toString)))
}
def apply[T](
file: File,
helpers: Map[String, Helper[T]] = Map.empty[String, Helper[T]])(implicit contextFactory: BindingFactory[T]): Handlebars[T] = {
if (file.exists()) {
try {
val partials = PartialHelper.findAllPartials(file).mapValues(Handlebars(_))
apply(Source.fromFile(file).mkString, partials, helpers, Some(file.getAbsolutePath))
} catch {
case ex: Exception => sys.error("Error while loading template\\n%s".format(ex))
}
} else {
sys.error("Could not load template from file: %s".format(file.getAbsolutePath))
}
}
}
case class CachingHandlebarsImpl[T](
program: Program,
partials: Map[String, Handlebars[T]],
helpers: Map[String, Helper[T]],
sourceFile: Option[String])(implicit f: BindingFactory[T]) extends CachingHandlebars[T] {
// TODO: Warn if we getOrElse is called. Didn't know how to re-load files.
// TODO: Use handlebars builder to construct the new instance?
def reload = sourceFile.map(file => CachingHandlebars.apply(new File(file))).getOrElse(this)
def apply(
context: Binding[T],
data: Map[String, Binding[T]] = Map.empty[String, Binding[T]],
partials: Map[String, Handlebars[T]] = Map.empty[String, Handlebars[T]],
helpers: Map[String, Helper[T]] = Map.empty[String, Helper[T]])(implicit c: BindingFactory[T]): String = "" // Call to HandlebarsVisitor
} | QiaoBuTang/handlebars.scala | src/main/scala/com/gilt/handlebars/scala/CachingHandlebars.scala | Scala | apache-2.0 | 2,314 |
/*
* Copyright (C) 2005, The Beangle Software.
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published
* by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.beangle.data.hibernate.model
import org.beangle.data.model.IntId
import org.beangle.commons.lang.time.WeekState
import org.beangle.commons.lang.time.HourMinute
import org.beangle.commons.lang.time.WeekDay
class TimeBean extends IntId {
var time: HourMinute = _
var weekday: WeekDay = _
var state: WeekState = _
}
| beangle/data | hibernate/src/test/scala/org/beangle/data/hibernate/model/TimeBean.scala | Scala | lgpl-3.0 | 1,058 |
/*
* Shadowsocks - A shadowsocks client for Android
* Copyright (C) 2015 <max.c.lv@gmail.com>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
*
* ___====-_ _-====___
* _--^^^#####// \\\\#####^^^--_
* _-^##########// ( ) \\\\##########^-_
* -############// |\\^^/| \\\\############-
* _/############// (@::@) \\\\############\\_
* /#############(( \\\\// ))#############\\
* -###############\\\\ (oo) //###############-
* -#################\\\\ / VV \\ //#################-
* -###################\\\\/ \\//###################-
* _#/|##########/\\######( /\\ )######/\\##########|\\#_
* |/ |#/\\#/\\#/\\/ \\#/\\##\\ | | /##/\\#/ \\/\\#/\\#/\\#| \\|
* ` |/ V V ` V \\#\\| | | |/#/ V ' V V \\| '
* ` ` ` ` / | | | | \\ ' ' ' '
* ( | | | | )
* __\\ | | | | /__
* (vvv(VVV)(VVV)vvv)
*
* HERE BE DRAGONS
*
*/
package com.github.shadowsocks
import java.io.{File, FileDescriptor, IOException}
import java.util.concurrent.Executors
import android.net.{LocalServerSocket, LocalSocket, LocalSocketAddress}
import android.util.Log
import com.github.shadowsocks.ShadowsocksApplication.app
object ShadowsocksVpnThread {
val getInt = classOf[FileDescriptor].getDeclaredMethod("getInt$")
}
class ShadowsocksVpnThread(vpnService: ShadowsocksVpnService) extends Thread {
import ShadowsocksVpnThread._
val TAG = "ShadowsocksVpnService"
lazy val PATH = vpnService.getApplicationInfo.dataDir + "/protect_path"
@volatile var isRunning: Boolean = true
@volatile var serverSocket: LocalServerSocket = _
def closeServerSocket() {
if (serverSocket != null) {
try {
serverSocket.close()
} catch {
case _: Exception => // ignore
}
serverSocket = null
}
}
def stopThread() {
isRunning = false
closeServerSocket()
}
override def run() {
new File(PATH).delete()
try {
val localSocket = new LocalSocket
localSocket.bind(new LocalSocketAddress(PATH, LocalSocketAddress.Namespace.FILESYSTEM))
serverSocket = new LocalServerSocket(localSocket.getFileDescriptor)
} catch {
case e: IOException =>
Log.e(TAG, "unable to bind", e)
app.track(e)
return
}
val pool = Executors.newFixedThreadPool(4)
while (isRunning) {
try {
val socket = serverSocket.accept()
pool.execute(() => {
try {
val input = socket.getInputStream
val output = socket.getOutputStream
input.read()
val fds = socket.getAncillaryFileDescriptors
if (fds.nonEmpty) {
val fd = getInt.invoke(fds(0)).asInstanceOf[Int]
val ret = vpnService.protect(fd)
// Trick to close file decriptor
System.jniclose(fd)
if (ret) {
output.write(0)
} else {
output.write(1)
}
}
input.close()
output.close()
} catch {
case e: Exception =>
Log.e(TAG, "Error when protect socket", e)
app.track(e)
}
// close socket
try {
socket.close()
} catch {
case _: Exception => // ignore
}
})
} catch {
case e: IOException =>
Log.e(TAG, "Error when accept socket", e)
app.track(e)
return
}
}
}
}
| liuminzhi/shadowsocks-android | src/main/scala/com/github/shadowsocks/ShadowsocksVpnThread.scala | Scala | gpl-3.0 | 4,384 |
/*
* Copyright 2012 Twitter Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.twitter.zipkin.common
/**
* A subset of thrift base types, except [[AnnotationType.Bytes]].
*/
object AnnotationType {
/**
* Set to 0x01 when key is [[com.twitter.zipkin.Constants.ClientAddr]] or [[com.twitter.zipkin.Constants.ServerAddr]].
*/
val Bool = AnnotationType(0, "Bool")
/** No encoding, or type is unknown. */
val Bytes = AnnotationType(1, "Bytes")
val I16 = AnnotationType(2, "I16")
val I32 = AnnotationType(3, "I32")
val I64 = AnnotationType(4, "I64")
val Double = AnnotationType(5, "Double")
/** The only type zipkin v1 supports search against. */
val String = AnnotationType(6, "String")
def fromInt(v:Int) = v match {
case Bool.value => Bool
case Bytes.value => Bytes
case I16.value => I16
case I32.value => I32
case I64.value => I64
case Double.value => Double
case String.value => String
case _ => String /* Uh... */
}
def fromName(v:String) = v match {
case Bool.name => Bool
case Bytes.name => Bytes
case I16.name => I16
case I32.name => I32
case I64.name => I64
case Double.name => Double
case String.name => String
}
}
case class AnnotationType(value: Int, name: String)
| rocwzp/zipkin | zipkin-common/src/main/scala/com/twitter/zipkin/common/AnnotationType.scala | Scala | apache-2.0 | 1,864 |
package org.jetbrains.plugins.scala.codeInspection.collections
import org.jetbrains.plugins.scala.lang.psi.api.expr._
import scala.annotation.tailrec
import scala.collection.mutable.ArrayBuffer
/**
* @author Nikolay.Tropin
*/
class MethodRepr private (val itself: ScExpression,
val optionalBase: Option[ScExpression],
val optionalMethodRef: Option[ScReferenceExpression],
val args: Seq[ScExpression])
object MethodRepr {
//method represented by optional base expression, optional method reference and arguments
def unapply(expr: ScExpression): Option[(ScExpression, Option[ScExpression], Option[ScReferenceExpression], Seq[ScExpression])] = {
expr match {
case call: ScMethodCall =>
val args = call.args match {
case exprList: ScArgumentExprList => exprList.exprs.map(stripped)
case _ => Nil
}
call.getEffectiveInvokedExpr match {
case baseExpr: ScExpression if call.isApplyOrUpdateCall && !call.isUpdateCall =>
Some(expr, Some(baseExpr), None, args)
case ref: ScReferenceExpression => Some(expr, ref.qualifier, Some(ref), args)
case genericCall: ScGenericCall =>
genericCall.referencedExpr match {
case ref: ScReferenceExpression => Some(expr, ref.qualifier, Some(ref), args)
case other => Some(expr, None, None, args)
}
case methCall: ScMethodCall => Some(expr, Some(methCall), None, args)
case other => Some(expr, None, None, args)
}
case infix: ScInfixExpr =>
val args = infix.getArgExpr match {
case tuple: ScTuple => tuple.exprs
case _ => Seq(infix.getArgExpr)
}
Some(expr, Some(stripped(infix.getBaseExpr)), Some(infix.operation), args)
case prefix: ScPrefixExpr => Some(expr, Some(stripped(prefix.getBaseExpr)), Some(prefix.operation), Seq())
case postfix: ScPostfixExpr => Some(expr, Some(stripped(postfix.getBaseExpr)), Some(postfix.operation), Seq())
case refExpr: ScReferenceExpression =>
refExpr.getParent match {
case _: ScMethodCall | _: ScGenericCall => None
case ScInfixExpr(_, `refExpr`, _) => None
case ScPostfixExpr(_, `refExpr`) => None
case ScPrefixExpr(`refExpr`, _) => None
case _ => Some(expr, refExpr.qualifier, Some(refExpr), Seq())
}
case genCall: ScGenericCall =>
genCall.getParent match {
case _: ScMethodCall => None
case _ => genCall.referencedExpr match {
case ref: ScReferenceExpression => Some(genCall, ref.qualifier, Some(ref), Seq.empty)
case other => Some(genCall, None, None, Seq.empty)
}
}
case _ => None
}
}
def apply(itself: ScExpression, optionalBase: Option[ScExpression], optionalMethodRef: Option[ScReferenceExpression], args: Seq[ScExpression]) = {
new MethodRepr(itself, optionalBase, optionalMethodRef, args)
}
}
object MethodSeq {
def unapplySeq(expr: ScExpression): Option[Seq[MethodRepr]] = {
val result = ArrayBuffer[MethodRepr]()
@tailrec
def extractMethods(expr: ScExpression) {
expr match {
case MethodRepr(itself, optionalBase, optionalMethodRef, args) =>
result += MethodRepr(expr, optionalBase, optionalMethodRef, args)
optionalBase match {
case Some(ScParenthesisedExpr(inner)) => extractMethods(stripped(inner))
case Some(expression) => extractMethods(expression)
case _ =>
}
case _ =>
}
}
extractMethods(expr)
if (result.length > 0) Some(result) else None
}
} | triggerNZ/intellij-scala | src/org/jetbrains/plugins/scala/codeInspection/collections/MethodRepr.scala | Scala | apache-2.0 | 3,740 |
package manu.tron.bot.impl
import manu.tron.bot._
import manu.tron.common.Vocabulary._
import manu.tron.service.impl._
/**
* Created by manu on 1/26/14.
*/
trait BotDefinitionComponent {
this: GameBasicLogicServiceComponent
with GameOperatorServiceComponent
with VoronoiServiceComponent
with MinimaxServiceComponent =>
val botDefinition: BotDefinitionImpl
class BotDefinitionImpl extends BotDefinition {
private val MinimaxDepth = 3
override def nextMove(status: GameStatus) =
// launch the minimax algorithm
minimaxService.minimax(status, childsFinder, voronoiHeuristic, MinimaxDepth)
/**
* Function to find the 4 childs status
*/
private val childsFinder: GameStatus => Map[Direction, GameStatus] =
status =>
if(gameOperatorService.isGameOver(status))
Map.empty
else
gameBasicLogicService.allDirections.map( dir =>
(dir, gameOperatorService.applyPlayerMove(status, status.nextPlayerToPlay.head, dir))
).toMap
/**
* Heuristic function to rate a status
*/
private val voronoiHeuristic: (GameStatus, PlayerId) => Int =
(status, playerId) =>
if(gameOperatorService.isGameOver(status))
//who died ?
if (status.deadPlayers.contains(playerId))
Int.MinValue
else
Int.MaxValue
else {
//perform a flood-fill algorithm to separate the two players' Voronoi regions
val regions = voronoiService.computeVoronoiRegions(status, None)
//the score is simply the difference in the sizes
regions(playerId).size - regions(gameBasicLogicService.otherPlayer(status, playerId)).size
}
}
}
| implicitdef/tron | src/main/scala/manu/tron/bot/impl/BotDefinitionComponent.scala | Scala | mit | 1,748 |
/*
Copyright 2014 Twitter, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package com.twitter.scalding
import com.twitter.algebird.monad.Reader
import com.twitter.algebird.{ Monoid, Monad, Semigroup }
import com.twitter.scalding.cascading_interop.FlowListenerPromise
import com.twitter.scalding.Dsl.flowDefToRichFlowDef
import java.util.concurrent.{ ConcurrentHashMap, LinkedBlockingQueue }
import scala.concurrent.{ Await, Future, ExecutionContext => ConcurrentExecutionContext, Promise }
import scala.util.{ Failure, Success, Try }
import scala.util.control.NonFatal
import cascading.flow.{ FlowDef, Flow }
/**
* Execution[T] represents and computation that can be run and
* will produce a value T and keep track of counters incremented
* inside of TypedPipes using a Stat.
*
* Execution[T] is the recommended way to compose multistep computations
* that involve branching (if/then), intermediate calls to remote
* services, file operations, or looping (e.g. testing for convergence).
*
* Library functions are encouraged to implement functions from
* TypedPipes or ValuePipes to Execution[R] for some result R.
* Refrain from calling run in library code. Let the caller
* of your library call run.
*
* Note this is a Monad, meaning flatMap composes in series as you expect.
* It is also an applicative functor, which means zip (called join
* in some libraries) composes two Executions is parallel. Prefer
* zip to flatMap if you want to run two Executions in parallel.
*/
sealed trait Execution[+T] extends java.io.Serializable {
import Execution.{ EvalCache, FlatMapped, GetCounters, ResetCounters, Mapped, OnComplete, RecoverWith, Zipped }
/**
* Scala uses the filter method in for syntax for pattern matches that can fail.
* If this filter is false, the result of run will be an exception in the future
*/
def filter(pred: T => Boolean): Execution[T] =
flatMap {
case good if pred(good) => Execution.from(good)
case failed => Execution.from(sys.error("Filter failed on: " + failed.toString))
}
/**
* First run this Execution, then move to the result
* of the function
*/
def flatMap[U](fn: T => Execution[U]): Execution[U] =
FlatMapped(this, fn)
/**
* This is the same as flatMap(identity)
*/
def flatten[U](implicit ev: T <:< Execution[U]): Execution[U] =
flatMap(ev)
/**
* Apply a pure function to the result. This may not
* be called if subsequently the result is discarded with .unit
* For side effects see onComplete.
*/
def map[U](fn: T => U): Execution[U] =
Mapped(this, fn)
/**
* Reads the counters into the value, but does not reset them.
* You may want .getAndResetCounters.
*/
def getCounters: Execution[(T, ExecutionCounters)] =
GetCounters(this)
/**
* Reads the counters and resets them to zero. Probably what
* you want in a loop that is using counters to check for
* convergence.
*/
def getAndResetCounters: Execution[(T, ExecutionCounters)] =
getCounters.resetCounters
/**
* This function is called when the current run is completed. This is
* only a side effect (see unit return).
*
* ALSO You must .run the result. If
* you throw away the result of this call, your fn will never be
* called. When you run the result, the Future you get will not
* be complete unless fn has completed running. If fn throws, it
* will be handled be the scala.concurrent.ExecutionContext.reportFailure
* NOT by returning a Failure in the Future.
*/
def onComplete(fn: Try[T] => Unit): Execution[T] = OnComplete(this, fn)
/**
* This allows you to handle a failure by giving a replacement execution
* in some cases. This execution may be a retry if you know that your
* execution can have spurious errors, or it could be a constant or an
* alternate way to compute. Be very careful creating looping retries that
* could hammer your cluster when the data is missing or when
* when there is some real problem with your job logic.
*/
def recoverWith[U >: T](rec: PartialFunction[Throwable, Execution[U]]): Execution[U] =
RecoverWith(this, rec)
/**
* Resets the counters back to zero. This is useful if
* you want to reset before a zip or a call to flatMap
*/
def resetCounters: Execution[T] =
ResetCounters(this)
/**
* This causes the Execution to occur. The result is not cached, so each call
* to run will result in the computation being re-run. Avoid calling this
* until the last possible moment by using flatMap, zip and recoverWith.
*
* Seriously: pro-style is for this to be called only once in a program.
*/
final def run(conf: Config, mode: Mode)(implicit cec: ConcurrentExecutionContext): Future[T] = {
val ec = new EvalCache
val confWithId = conf.setScaldingExecutionId(java.util.UUID.randomUUID.toString)
val result = runStats(confWithId, mode, ec)(cec).map(_._1)
// When the final future in complete we stop the submit thread
result.onComplete { _ => ec.finished() }
// wait till the end to start the thread in case the above throws
ec.start()
result
}
/**
* This is the internal method that must be implemented
* Given a config, mode, and cache of evaluations for this config and mode,
* return the new cache with as much evaluation as possible before the future
* completes, and a future of the result, counters and cache after the future
* is complete
*/
protected def runStats(conf: Config,
mode: Mode,
cache: EvalCache)(implicit cec: ConcurrentExecutionContext): Future[(T, ExecutionCounters)]
/**
* This is convenience for when we don't care about the result.
* like .map(_ => ())
*/
def unit: Execution[Unit] = map(_ => ())
/**
* This waits synchronously on run, using the global execution context
* Avoid calling this if possible, prefering run or just Execution
* composition. Every time someone calls this, be very suspect. It is
* always code smell. Very seldom should you need to wait on a future.
*/
def waitFor(conf: Config, mode: Mode): Try[T] =
Try(Await.result(run(conf, mode)(ConcurrentExecutionContext.global),
scala.concurrent.duration.Duration.Inf))
/**
* This is here to silence warnings in for comprehensions, but is
* identical to .filter.
*
* Users should never directly call this method, call .filter
*/
def withFilter(p: T => Boolean): Execution[T] = filter(p)
/*
* run this and that in parallel, without any dependency. This will
* be done in a single cascading flow if possible.
*/
def zip[U](that: Execution[U]): Execution[(T, U)] =
Zipped(this, that)
}
/**
* Execution has many methods for creating Execution[T] instances, which
* are the preferred way to compose computations in scalding libraries.
*/
object Execution {
/**
* This is an instance of Monad for execution so it can be used
* in functions that apply to all Monads
*/
implicit object ExecutionMonad extends Monad[Execution] {
override def apply[T](t: T): Execution[T] = Execution.from(t)
override def map[T, U](e: Execution[T])(fn: T => U): Execution[U] = e.map(fn)
override def flatMap[T, U](e: Execution[T])(fn: T => Execution[U]): Execution[U] = e.flatMap(fn)
override def join[T, U](t: Execution[T], u: Execution[U]): Execution[(T, U)] = t.zip(u)
}
/**
* This is the standard semigroup on an Applicative (zip, then inside the Execution do plus)
*/
implicit def semigroup[T: Semigroup]: Semigroup[Execution[T]] = Semigroup.from[Execution[T]] { (a, b) =>
a.zip(b).map { case (ta, tb) => Semigroup.plus(ta, tb) }
}
/**
* This is the standard monoid on an Applicative (zip, then inside the Execution do plus)
* useful to combine unit Executions:
* Monoid.sum(ex1, ex2, ex3, ex4): Execution[Unit]
* where each are exi are Execution[Unit]
*/
implicit def monoid[T: Monoid]: Monoid[Execution[T]] = Monoid.from(Execution.from(Monoid.zero[T])) { (a, b) =>
a.zip(b).map { case (ta, tb) => Monoid.plus(ta, tb) }
}
/**
* This is a mutable state that is kept internal to an execution
* as it is evaluating.
*/
private[scalding] class EvalCache {
private[this] val cache =
new ConcurrentHashMap[Execution[Any], Future[(Any, ExecutionCounters)]]()
private[this] val toWriteCache =
new ConcurrentHashMap[ToWrite, Future[ExecutionCounters]]()
/**
* We send messages from other threads into the submit thread here
*/
sealed trait FlowDefAction
case class RunFlowDef(conf: Config,
mode: Mode,
fd: FlowDef,
result: Promise[JobStats]) extends FlowDefAction
case object Stop extends FlowDefAction
private val messageQueue = new LinkedBlockingQueue[FlowDefAction]()
/**
* Hadoop and/or cascading has some issues, it seems, with starting jobs
* from multiple threads. This thread does all the Flow starting.
*/
private val thread = new Thread(new Runnable {
def run() {
@annotation.tailrec
def go(): Unit = messageQueue.take match {
case Stop => ()
case RunFlowDef(conf, mode, fd, promise) =>
try {
promise.completeWith(
ExecutionContext.newContext(conf)(fd, mode).run)
} catch {
case t: Throwable =>
// something bad happened, but this thread is a daemon
// that should only stop if all others have stopped or
// we have received the stop message.
// Stopping this thread prematurely can deadlock
// futures from the promise we have.
// In a sense, this thread does not exist logically and
// must forward all exceptions to threads that requested
// this work be started.
promise.tryFailure(t)
}
// Loop
go()
}
// Now we actually run the recursive loop
go()
}
})
def runFlowDef(conf: Config, mode: Mode, fd: FlowDef): Future[JobStats] =
try {
val promise = Promise[JobStats]()
val fut = promise.future
messageQueue.put(RunFlowDef(conf, mode, fd, promise))
// Don't do any work after the .put call, we want no chance for exception
// after the put
fut
} catch {
case NonFatal(e) =>
Future.failed(e)
}
def start(): Unit = {
// Make sure this thread can't keep us running if all others are gone
thread.setDaemon(true)
thread.start()
}
/*
* This is called after we are done submitting all jobs
*/
def finished(): Unit = messageQueue.put(Stop)
def getOrLock(write: ToWrite): Either[Promise[ExecutionCounters], Future[ExecutionCounters]] = {
/*
* Since we don't want to evaluate res twice, we make a promise
* which we will use if it has not already been evaluated
*/
val promise = Promise[ExecutionCounters]()
val fut = promise.future
toWriteCache.putIfAbsent(write, fut) match {
case null =>
Left(promise)
case exists =>
Right(exists)
}
}
def getOrElseInsertWithFeedback[T](ex: Execution[T],
res: => Future[(T, ExecutionCounters)])(implicit ec: ConcurrentExecutionContext): (Boolean, Future[(T, ExecutionCounters)]) = {
/*
* Since we don't want to evaluate res twice, we make a promise
* which we will use if it has not already been evaluated
*/
val promise = Promise[(T, ExecutionCounters)]()
val fut = promise.future
cache.putIfAbsent(ex, fut) match {
case null =>
// note res is by-name, so we just evaluate it now:
promise.completeWith(res)
(true, fut)
case exists => (false, exists.asInstanceOf[Future[(T, ExecutionCounters)]])
}
}
def getOrElseInsert[T](ex: Execution[T],
res: => Future[(T, ExecutionCounters)])(implicit ec: ConcurrentExecutionContext): Future[(T, ExecutionCounters)] =
getOrElseInsertWithFeedback(ex, res)._2
}
private case class FutureConst[T](get: ConcurrentExecutionContext => Future[T]) extends Execution[T] {
def runStats(conf: Config, mode: Mode, cache: EvalCache)(implicit cec: ConcurrentExecutionContext) =
cache.getOrElseInsert(this,
for {
futt <- toFuture(Try(get(cec)))
t <- futt
} yield (t, ExecutionCounters.empty))
// Note that unit is not optimized away, since Futures are often used with side-effects, so,
// we ensure that get is always called in contrast to Mapped, which assumes that fn is pure.
}
private case class FlatMapped[S, T](prev: Execution[S], fn: S => Execution[T]) extends Execution[T] {
def runStats(conf: Config, mode: Mode, cache: EvalCache)(implicit cec: ConcurrentExecutionContext) =
cache.getOrElseInsert(this,
for {
(s, st1) <- prev.runStats(conf, mode, cache)
next = fn(s)
fut2 = next.runStats(conf, mode, cache)
(t, st2) <- fut2
} yield (t, Monoid.plus(st1, st2)))
}
private case class Mapped[S, T](prev: Execution[S], fn: S => T) extends Execution[T] {
def runStats(conf: Config, mode: Mode, cache: EvalCache)(implicit cec: ConcurrentExecutionContext) =
cache.getOrElseInsert(this,
prev.runStats(conf, mode, cache)
.map { case (s, stats) => (fn(s), stats) })
}
private case class GetCounters[T](prev: Execution[T]) extends Execution[(T, ExecutionCounters)] {
def runStats(conf: Config, mode: Mode, cache: EvalCache)(implicit cec: ConcurrentExecutionContext) =
cache.getOrElseInsert(this,
prev.runStats(conf, mode, cache).map { case tc @ (t, c) => (tc, c) })
}
private case class ResetCounters[T](prev: Execution[T]) extends Execution[T] {
def runStats(conf: Config, mode: Mode, cache: EvalCache)(implicit cec: ConcurrentExecutionContext) =
cache.getOrElseInsert(this,
prev.runStats(conf, mode, cache).map { case (t, _) => (t, ExecutionCounters.empty) })
}
private case class OnComplete[T](prev: Execution[T], fn: Try[T] => Unit) extends Execution[T] {
def runStats(conf: Config, mode: Mode, cache: EvalCache)(implicit cec: ConcurrentExecutionContext) =
cache.getOrElseInsert(this, {
val res = prev.runStats(conf, mode, cache)
/**
* The result we give is only completed AFTER fn is run
* so callers can wait on the result of this OnComplete
*/
val finished = Promise[(T, ExecutionCounters)]()
res.onComplete { tryT =>
try {
fn(tryT.map(_._1))
} finally {
// Do our best to signal when we are done
finished.complete(tryT)
}
}
finished.future
})
}
private case class RecoverWith[T](prev: Execution[T], fn: PartialFunction[Throwable, Execution[T]]) extends Execution[T] {
def runStats(conf: Config, mode: Mode, cache: EvalCache)(implicit cec: ConcurrentExecutionContext) =
cache.getOrElseInsert(this,
prev.runStats(conf, mode, cache)
.recoverWith(fn.andThen(_.runStats(conf, mode, cache))))
}
/**
* Use our internal faster failing zip function rather than the standard one due to waiting
*/
def failFastSequence[T](t: Iterable[Future[T]])(implicit cec: ConcurrentExecutionContext): Future[List[T]] = {
t.foldLeft(Future.successful(Nil: List[T])) { (f, i) =>
failFastZip(f, i).map { case (tail, h) => h :: tail }
}
.map(_.reverse)
}
/**
* Standard scala zip waits forever on the left side, even if the right side fails
*/
def failFastZip[T, U](ft: Future[T], fu: Future[U])(implicit cec: ConcurrentExecutionContext): Future[(T, U)] = {
type State = Either[(T, Promise[U]), (U, Promise[T])]
val middleState = Promise[State]()
ft.onComplete {
case f @ Failure(err) =>
if (!middleState.tryFailure(err)) {
// the right has already succeeded
middleState.future.foreach {
case Right((_, pt)) => pt.complete(f)
case Left((t1, _)) => // This should never happen
sys.error(s"Logic error: tried to set Failure($err) but Left($t1) already set")
}
}
case Success(t) =>
// Create the next promise:
val pu = Promise[U]()
if (!middleState.trySuccess(Left((t, pu)))) {
// we can't set, so the other promise beat us here.
middleState.future.foreach {
case Right((_, pt)) => pt.success(t)
case Left((t1, _)) => // This should never happen
sys.error(s"Logic error: tried to set Left($t) but Left($t1) already set")
}
}
}
fu.onComplete {
case f @ Failure(err) =>
if (!middleState.tryFailure(err)) {
// we can't set, so the other promise beat us here.
middleState.future.foreach {
case Left((_, pu)) => pu.complete(f)
case Right((u1, _)) => // This should never happen
sys.error(s"Logic error: tried to set Failure($err) but Right($u1) already set")
}
}
case Success(u) =>
// Create the next promise:
val pt = Promise[T]()
if (!middleState.trySuccess(Right((u, pt)))) {
// we can't set, so the other promise beat us here.
middleState.future.foreach {
case Left((_, pu)) => pu.success(u)
case Right((u1, _)) => // This should never happen
sys.error(s"Logic error: tried to set Right($u) but Right($u1) already set")
}
}
}
middleState.future.flatMap {
case Left((t, pu)) => pu.future.map((t, _))
case Right((u, pt)) => pt.future.map((_, u))
}
}
private case class Zipped[S, T](one: Execution[S], two: Execution[T]) extends Execution[(S, T)] {
def runStats(conf: Config, mode: Mode, cache: EvalCache)(implicit cec: ConcurrentExecutionContext) =
cache.getOrElseInsert(this, {
val f1 = one.runStats(conf, mode, cache)
val f2 = two.runStats(conf, mode, cache)
failFastZip(f1, f2)
.map { case ((s, ss), (t, st)) => ((s, t), Monoid.plus(ss, st)) }
})
}
private case class UniqueIdExecution[T](fn: UniqueID => Execution[T]) extends Execution[T] {
def runStats(conf: Config, mode: Mode, cache: EvalCache)(implicit cec: ConcurrentExecutionContext) =
cache.getOrElseInsert(this, {
val (uid, nextConf) = conf.ensureUniqueId
fn(uid).runStats(nextConf, mode, cache)
})
}
/*
* This allows you to run any cascading flowDef as an Execution.
*/
private case class FlowDefExecution(result: (Config, Mode) => FlowDef) extends Execution[Unit] {
def runStats(conf: Config, mode: Mode, cache: EvalCache)(implicit cec: ConcurrentExecutionContext) =
cache.getOrElseInsert(this,
for {
flowDef <- toFuture(Try(result(conf, mode)))
_ = FlowStateMap.validateSources(flowDef, mode)
jobStats <- cache.runFlowDef(conf, mode, flowDef)
_ = FlowStateMap.clear(flowDef)
} yield ((), ExecutionCounters.fromJobStats(jobStats)))
}
/*
* This is here so we can call without knowing the type T
* but with proof that pipe matches sink
*/
private trait ToWrite {
def write(config: Config, flowDef: FlowDef, mode: Mode): Unit
}
private case class SimpleWrite[T](pipe: TypedPipe[T], sink: TypedSink[T]) extends ToWrite {
def write(config: Config, flowDef: FlowDef, mode: Mode): Unit = {
// This has the side effect of mutating flowDef
pipe.write(sink)(flowDef, mode)
()
}
}
private case class PreparedWrite[T](fn: (Config, Mode) => SimpleWrite[T]) extends ToWrite {
def write(config: Config, flowDef: FlowDef, mode: Mode): Unit =
fn(config, mode).write(config, flowDef, mode)
}
/**
* This is the fundamental execution that actually happens in TypedPipes, all the rest
* are based on on this one. By keeping the Pipe and the Sink, can inspect the Execution
* DAG and optimize it later (a goal, but not done yet).
*/
private case class WriteExecution[T](head: ToWrite, tail: List[ToWrite], fn: (Config, Mode) => T) extends Execution[T] {
/**
* Apply a pure function to the result. This may not
* be called if subsequently the result is discarded with .unit
* For side effects see onComplete.
*
* Here we inline the map operation into the presentation function so we can zip after map.
*/
override def map[U](mapFn: T => U): Execution[U] =
WriteExecution(head, tail, { (conf: Config, mode: Mode) => mapFn(fn(conf, mode)) })
/* Run a list of ToWrite elements */
private[this] def scheduleToWrites(conf: Config,
mode: Mode,
cache: EvalCache,
head: ToWrite,
tail: List[ToWrite])(implicit cec: ConcurrentExecutionContext): Future[ExecutionCounters] = {
for {
flowDef <- toFuture(Try { val fd = new FlowDef; (head :: tail).foreach(_.write(conf, fd, mode)); fd })
_ = FlowStateMap.validateSources(flowDef, mode)
jobStats <- cache.runFlowDef(conf, mode, flowDef)
_ = FlowStateMap.clear(flowDef)
} yield (ExecutionCounters.fromJobStats(jobStats))
}
def unwrapListEither[A, B, C](it: List[(A, Either[B, C])]): (List[(A, B)], List[(A, C)]) = it match {
case (a, Left(b)) :: tail =>
val (l, r) = unwrapListEither(tail)
((a, b) :: l, r)
case (a, Right(c)) :: tail =>
val (l, r) = unwrapListEither(tail)
(l, (a, c) :: r)
case Nil => (Nil, Nil)
}
// We look up to see if any of our ToWrite elements have already been ran
// if so we remove them from the cache.
// Anything not already ran we run as part of a single flow def, using their combined counters for the others
def runStats(conf: Config, mode: Mode, cache: EvalCache)(implicit cec: ConcurrentExecutionContext) =
cache.getOrElseInsert(this, {
val cacheLookup: List[(ToWrite, Either[Promise[ExecutionCounters], Future[ExecutionCounters]])] = (head :: tail).map{ tw => (tw, cache.getOrLock(tw)) }
val (weDoOperation, someoneElseDoesOperation) = unwrapListEither(cacheLookup)
val otherResult = failFastSequence(someoneElseDoesOperation.map(_._2))
otherResult.value match {
case Some(Failure(e)) => Future.failed(e)
case _ => // Either successful or not completed yet
val localFlowDefCountersFuture: Future[ExecutionCounters] =
weDoOperation match {
case all @ (h :: tail) =>
val futCounters: Future[ExecutionCounters] = scheduleToWrites(conf, mode, cache, h._1, tail.map(_._1))
// Complete all of the promises we put into the cache
// with this future counters set
weDoOperation.foreach {
case (toWrite, promise) =>
promise.completeWith(futCounters)
}
futCounters
case Nil => Future.successful(ExecutionCounters.empty) // No work to do, provide a fulled set of 0 counters to operate on
}
failFastZip(otherResult, localFlowDefCountersFuture).map {
case (lCounters, fdCounters) =>
val summedCounters: ExecutionCounters = Monoid.sum(fdCounters :: lCounters)
(fn(conf, mode), summedCounters)
}
}
})
/*
* run this and that in parallel, without any dependency. This will
* be done in a single cascading flow if possible.
*
* If both sides are write executions then merge them
*/
override def zip[U](that: Execution[U]): Execution[(T, U)] =
that match {
case WriteExecution(h, t, otherFn) =>
val newFn = { (conf: Config, mode: Mode) =>
(fn(conf, mode), otherFn(conf, mode))
}
WriteExecution(head, h :: t ::: tail, newFn)
case o => Zipped(this, that)
}
}
/**
* This is called Reader, because it just returns its input to run as the output
*/
private case object ReaderExecution extends Execution[(Config, Mode)] {
def runStats(conf: Config, mode: Mode, cache: EvalCache)(implicit cec: ConcurrentExecutionContext) =
Future.successful(((conf, mode), ExecutionCounters.empty))
}
private def toFuture[R](t: Try[R]): Future[R] =
t match {
case Success(s) => Future.successful(s)
case Failure(err) => Future.failed(err)
}
/**
* This creates a definitely failed Execution.
*/
def failed(t: Throwable): Execution[Nothing] = fromTry(Failure(t))
/**
* This makes a constant execution that runs no job.
* Note this is a lazy parameter that is evaluated every
* time run is called.
*/
def from[T](t: => T): Execution[T] = fromTry(Try(t))
def fromTry[T](t: => Try[T]): Execution[T] = fromFuture { _ => toFuture(t) }
/**
* The call to fn will happen when the run method on the result is called.
* The ConcurrentExecutionContext will be the same one used on run.
* This is intended for cases where you need to make asynchronous calls
* in the middle or end of execution. Presumably this is used with flatMap
* either before or after
*/
def fromFuture[T](fn: ConcurrentExecutionContext => Future[T]): Execution[T] = FutureConst(fn)
/** Returns a constant Execution[Unit] */
val unit: Execution[Unit] = from(())
/**
* This converts a function into an Execution monad. The flowDef returned
* is never mutated.
*/
def fromFn(fn: (Config, Mode) => FlowDef): Execution[Unit] =
FlowDefExecution(fn)
/**
* Creates an Execution to do a write
*
* This variant allows the user to supply a method using the config and mode to build a new
* type U for the resultant execution.
*/
private[scalding] def write[T, U](pipe: TypedPipe[T], sink: TypedSink[T], generatorFn: (Config, Mode) => U): Execution[U] =
WriteExecution(SimpleWrite(pipe, sink), Nil, generatorFn)
/**
* The simplest form, just sink the typed pipe into the sink and get a unit execution back
*/
private[scalding] def write[T](pipe: TypedPipe[T], sink: TypedSink[T]): Execution[Unit] =
write(pipe, sink, ())
private[scalding] def write[T, U](pipe: TypedPipe[T], sink: TypedSink[T], presentType: => U): Execution[U] =
WriteExecution(SimpleWrite(pipe, sink), Nil, { (_: Config, _: Mode) => presentType })
/**
* Here we allow both the targets to write and the sources to be generated from the config and mode.
* This allows us to merge things looking for the config and mode without using flatmap.
*/
private[scalding] def write[T, U](fn: (Config, Mode) => (TypedPipe[T], TypedSink[T]), generatorFn: (Config, Mode) => U): Execution[U] =
WriteExecution(PreparedWrite({ (cfg: Config, m: Mode) =>
val r = fn(cfg, m)
SimpleWrite(r._1, r._2)
}), Nil, generatorFn)
/**
* Convenience method to get the Args
*/
def getArgs: Execution[Args] = ReaderExecution.map(_._1.getArgs)
/**
* Use this to read the configuration, which may contain Args or options
* which describe input on which to run
*/
def getConfig: Execution[Config] = ReaderExecution.map(_._1)
/** Use this to get the mode, which may contain the job conf */
def getMode: Execution[Mode] = ReaderExecution.map(_._2)
/** Use this to get the config and mode. */
def getConfigMode: Execution[(Config, Mode)] = ReaderExecution
/**
* This is convenience method only here to make it slightly cleaner
* to get Args, which are in the Config
*/
def withArgs[T](fn: Args => Execution[T]): Execution[T] =
getConfig.flatMap { conf => fn(conf.getArgs) }
/**
* Use this to use counters/stats with Execution. You do this:
* Execution.withId { implicit uid =>
* val myStat = Stat("myStat") // uid is implicitly pulled in
* pipe.map { t =>
* if(someCase(t)) myStat.inc
* fn(t)
* }
* .writeExecution(mySink)
* }
*/
def withId[T](fn: UniqueID => Execution[T]): Execution[T] = UniqueIdExecution(fn)
/*
* This runs a Flow using Cascading's built in threads. The resulting JobStats
* are put into a promise when they are ready
*/
def run[C](flow: Flow[C]): Future[JobStats] =
// This is in Java because of the cascading API's raw types on FlowListener
FlowListenerPromise.start(flow, { f: Flow[C] => JobStats(f.getFlowStats) })
/*
* This blocks the current thread until the job completes with either success or
* failure.
*/
def waitFor[C](flow: Flow[C]): Try[JobStats] =
Try {
flow.complete;
JobStats(flow.getStats)
}
/**
* combine several executions and run them in parallel when .run is called
*/
def zip[A, B](ax: Execution[A], bx: Execution[B]): Execution[(A, B)] =
ax.zip(bx)
/**
* combine several executions and run them in parallel when .run is called
*/
def zip[A, B, C](ax: Execution[A], bx: Execution[B], cx: Execution[C]): Execution[(A, B, C)] =
ax.zip(bx).zip(cx).map { case ((a, b), c) => (a, b, c) }
/**
* combine several executions and run them in parallel when .run is called
*/
def zip[A, B, C, D](ax: Execution[A],
bx: Execution[B],
cx: Execution[C],
dx: Execution[D]): Execution[(A, B, C, D)] =
ax.zip(bx).zip(cx).zip(dx).map { case (((a, b), c), d) => (a, b, c, d) }
/**
* combine several executions and run them in parallel when .run is called
*/
def zip[A, B, C, D, E](ax: Execution[A],
bx: Execution[B],
cx: Execution[C],
dx: Execution[D],
ex: Execution[E]): Execution[(A, B, C, D, E)] =
ax.zip(bx).zip(cx).zip(dx).zip(ex).map { case ((((a, b), c), d), e) => (a, b, c, d, e) }
/*
* If you have many Executions, it is better to combine them with
* zip than flatMap (which is sequential). sequence just calls
* zip on each item in the input sequence.
*
* Note, despite the name, which is taken from the standard scala Future API,
* these executions are executed in parallel: run is called on all at the
* same time, not one after the other.
*/
def sequence[T](exs: Seq[Execution[T]]): Execution[Seq[T]] = {
@annotation.tailrec
def go(xs: List[Execution[T]], acc: Execution[List[T]]): Execution[List[T]] = xs match {
case Nil => acc
case h :: tail => go(tail, h.zip(acc).map { case (y, ys) => y :: ys })
}
// This pushes all of them onto a list, and then reverse to keep order
go(exs.toList, from(Nil)).map(_.reverse)
}
}
/**
* This represents the counters portion of the JobStats that are returned.
* Counters are just a vector of longs with counter name, group keys.
*/
trait ExecutionCounters {
/**
* immutable set of the keys.
*/
def keys: Set[StatKey]
/**
* Same as get(key).getOrElse(0L)
* Note if a counter is never incremented, get returns None.
* But you can't tell 0L that comes from None vs. a counter
* that was incremented then decremented.
*/
def apply(key: StatKey): Long = get(key).getOrElse(0L)
/**
* If the counter is present, return it.
*/
def get(key: StatKey): Option[Long]
def toMap: Map[StatKey, Long] = keys.map { k => (k, get(k).getOrElse(0L)) }.toMap
}
/**
* The companion gives several ways to create ExecutionCounters from
* other CascadingStats, JobStats, or Maps
*/
object ExecutionCounters {
/**
* This is the zero of the ExecutionCounter Monoid
*/
def empty: ExecutionCounters = new ExecutionCounters {
def keys = Set.empty
def get(key: StatKey) = None
override def toMap = Map.empty
}
/**
* Just gets the counters from the CascadingStats and ignores
* all the other fields present
*/
def fromCascading(cs: cascading.stats.CascadingStats): ExecutionCounters = new ExecutionCounters {
import scala.collection.JavaConverters._
val keys = (for {
group <- cs.getCounterGroups.asScala
counter <- cs.getCountersFor(group).asScala
} yield StatKey(counter, group)).toSet
def get(k: StatKey) =
if (keys(k)) {
// Yes, cascading is reversed frow what we did in Stats. :/
Some(cs.getCounterValue(k.group, k.counter))
} else None
}
/**
* Gets just the counters from the JobStats
*/
def fromJobStats(js: JobStats): ExecutionCounters = {
val counters = js.counters
new ExecutionCounters {
def keys = for {
group <- counters.keySet
counter <- counters(group).keys
} yield StatKey(counter, group)
def get(k: StatKey) = counters.get(k.group).flatMap(_.get(k.counter))
}
}
/**
* A Simple wrapper over a Map[StatKey, Long]
*/
def fromMap(allValues: Map[StatKey, Long]): ExecutionCounters =
new ExecutionCounters {
def keys = allValues.keySet
def get(k: StatKey) = allValues.get(k)
override def toMap = allValues
}
/**
* This allows us to merge the results of two computations. It just
* does pointwise addition.
*/
implicit def monoid: Monoid[ExecutionCounters] = new Monoid[ExecutionCounters] {
override def isNonZero(that: ExecutionCounters) = that.keys.nonEmpty
def zero = ExecutionCounters.empty
def plus(left: ExecutionCounters, right: ExecutionCounters) = {
fromMap((left.keys ++ right.keys)
.map { k => (k, left(k) + right(k)) }
.toMap)
}
}
}
| Chasego/scalding | scalding-core/src/main/scala/com/twitter/scalding/Execution.scala | Scala | apache-2.0 | 34,126 |
package sledtr.util
import sledtr.MyPreDef._
import sledtr.shelf._
import sledtr._
import sledtr.section._
import sledtr.actors._
abstract class MobiOutput {
val sb = new StringBuffer
def contents: String = sb.toString
def start(book: Book) = { }
def startChapter(c: Chapter, n: Int) = { }
def inSection(s: Section, n: Int, m: Int) = { }
def endChapter() = { }
def end() = { }
}
class OutputHtml extends MobiOutput {
override def startChapter(c: Chapter, n: Int) = {
sb.a("<h2 id=%s>%d. %s</h2>".format(c.title.p, n + 1, c.title))
sb.a("<mbp:pagebreak />")
}
override def inSection(s: Section, n: Int, m: Int) = {
sb.a("<h2 id=%s>%d-%d. %s</h3>".format(s.url_list(0).p, n + 1, m + 1, s.title))
sb.a("<p>%s</p>".format(s.url_list))
sb.a(s.formatHtml)
sb.a("<mbp:pagebreak />")
}
}
class HtmlToc extends MobiOutput {
override def start(book: Book) = {
sb.a("<h1>%s</h1>".format(book.title))
sb.a("<mbp:pagebreak />")
sb.a("<h2 id=toc>Contents</h2>")
sb.a("<ul>")
}
override def startChapter(c: Chapter, n: Int) = {
sb.a("<li><a href=#%s>%d. %s</a></li>".format(c.title.p, n + 1, c.title))
sb.a("<ul>")
}
override def inSection(s: Section, n: Int, m: Int) =
sb.a("<li><a href=#%s>%d-%d. %s</a></li>".format(s.url_list(0).p, n + 1, m + 1, s.title))
override def endChapter() = sb.a("</ul>")
override def end() = {
sb.a("</ul>")
sb.a("<mbp:pagebreak />")
}
}
class Ncx extends MobiOutput {
override def startChapter(c: Chapter, n: Int) =
sb.a(MobiGenerator.createNavi(c.title, c.title.p))
override def inSection(s: Section, n: Int, m: Int) =
sb.a(MobiGenerator.createNavi(s.title, s.url_list(0).p))
}
class CheckText extends MobiOutput {
override def inSection(s: Section, n: Int, m: Int) = {
sb.a("===== " + s.title + " =====")
s.url_list.foreach { u => sb.a(u) }
import sledtr.section._
s match {
case f: SimpleSection =>
sb.a(filter.HtmlInfo.check(f.src, f.findMaybeTarget))
case _ =>
}
}
}
object MobiGenerator {
val opf_name = "book.opf"
val ncx_name = "toc.ncx"
val chk_name = "check.txt"
val con_name = "contents.html"
def generateBook(book: Book) = {
val outputs = List(new OutputHtml, new HtmlToc, new Ncx, new CheckText)
val con :: toc :: ncx :: chk :: Nil = outputs
outputs.foreach { _.start(book) }
book.chapters.zipWithIndex.foreach { case (c, n) =>
outputs.foreach { _.startChapter(c, n) }
c.sections.zipWithIndex.foreach { case(s, m) => outputs.foreach { _.inSection(s, n, m) } }
outputs.foreach { _.endChapter() }
}
outputs.foreach { _.end() }
FileUtil.write(book.workDir + "/" + opf_name, createOpf(book.title, con_name))
FileUtil.write(book.workDir + "/" + ncx_name, createToc(book.title, ncx.contents))
FileUtil.write(book.workDir + "/" + chk_name, chk.contents)
ImageDownloadManager.waitDone()
generate(book, toc.contents + con.contents)
}
def generate(book: Book, body: String): Unit = {
FileUtil.write(book.workDir + "/" + con_name, formatHtml(book.title, body))
val ret = ExeExecutor.exec("%s %s".format(Environ.kindleGenPath, book.workDir + "\\\\" + opf_name))
for(s <- ret) log(s)
}
def formatHtml(title: String, body: String): String = {
"""
<html>
<head>
<meta http-equiv="Content-Type" content="text/html; charset=utf-8">
<title>%s</title>
</head>
<body>
%s
</body>
</html>
""".format(title, body)
}
def createOpf(title: String, html_name: String): String = {
import java.util.Calendar
val c = Calendar.getInstance()
val date = "%1$02d/%2$02d/%3$d".format(c.get(Calendar.DATE), c.get(Calendar.MONTH) + 1, c.get(Calendar.YEAR))
"""
<?xml version="1.0" encoding="utf-8"?>
<package unique-identifier="uid">
<metadata>
<dc-metadata
xmlns:dc="http://purl.org/metadata/dublin_core"
xmlns:oebpackage="http://openebook.org/namespaces/oeb-package/1.0/">
<dc:Title>%1$s</dc:Title>
<dc:Language>en-us</dc:Language>
<dc:Creator>Internet</dc:Creator>
<dc:Description>from the Internet</dc:Description>
<dc:Date>%2$s</dc:Date>
</dc-metadata>
<x-metadata>
<output encoding="utf-8" content-type="text/x-oeb1-document"></output>
</x-metadata>
</metadata>
<manifest>
<item id="item1" media-type="text/x-oeb1-document" href="%3$s"></item>
<item id="toc" media-type="application/x-dtbncx+xml" href="toc.ncx"></item>
</manifest>
<spine toc="toc">
<itemref idref="item1" />
</spine>
<tours></tours>
<guide>
<reference type="toc" title="Table of Contents" href="%3$s#toc"></reference>
<reference type="start" title="Startup Page" href="%3$s"></reference>
</guide>
</package>
""".format(title, date, html_name)
}
def createNavi(label: String, content: String): String = {
"""
<navPoint id="titlepage" playOrder="1">
<navLabel><text>%1$s</text></navLabel>
<content src="%2$s#%3$s"/>
</navPoint>
""".format(label, con_name, content)
}
def createToc(title: String, nav: String): String = {
"""
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE ncx PUBLIC "-//NISO//DTD ncx 2005-1//EN"
"http://www.daisy.org/z3986/2005/ncx-2005-1.dtd">
<ncx xmlns="http://www.daisy.org/z3986/2005/ncx/" version="2005-1" xml:lang="en-US">
<head>
<meta name="dtb:uid" content="uid"/>
<meta name="dtb:depth" content="1"/>
<meta name="dtb:totalPageCount" content="0"/>
<meta name="dtb:maxPageNumber" content="0"/>
</head>
<docTitle><text>%1$s</text></docTitle>
<docAuthor><text>Internet</text></docAuthor>
<navMap>
%2$s
</navMap>
</ncx>
""".format(title, nav)
}
}
| K2Da/sledtr | src/main/scala/sledtr/util/MobiGenerator.scala | Scala | gpl-3.0 | 5,814 |
/*
* Copyright 2017 Datamountaineer.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.datamountaineer.streamreactor.connect.mongodb.sink
import java.util
import com.datamountaineer.streamreactor.connect.mongodb.config.{MongoConfig, MongoConfigConstants}
import com.datamountaineer.streamreactor.connect.utils.{ProgressCounter, JarManifest}
import com.typesafe.scalalogging.slf4j.StrictLogging
import org.apache.kafka.clients.consumer.OffsetAndMetadata
import org.apache.kafka.common.TopicPartition
import org.apache.kafka.connect.errors.ConnectException
import org.apache.kafka.connect.sink.{SinkRecord, SinkTask}
import scala.collection.JavaConversions._
import scala.util.{Failure, Success, Try}
/**
* <h1>MongoSinkTask</h1>
*
* Kafka Connect Mongo DB sink task. Called by
* framework to put records to the target sink
**/
class MongoSinkTask extends SinkTask with StrictLogging {
private var writer: Option[MongoWriter] = None
private val manifest = JarManifest(getClass.getProtectionDomain.getCodeSource.getLocation)
private val progressCounter = new ProgressCounter
private var enableProgress: Boolean = false
logger.info("Task initialising")
/**
* Parse the configurations and setup the writer
**/
override def start(props: util.Map[String, String]): Unit = {
val taskConfig = Try(MongoConfig(props)) match {
case Failure(f) => throw new ConnectException("Couldn't start Mongo Sink due to configuration error.", f)
case Success(s) => s
}
logger.info(scala.io.Source.fromInputStream(getClass.getResourceAsStream("/mongo-ascii.txt")).mkString + s" v $version")
logger.info(manifest.printManifest())
writer = Some(MongoWriter(taskConfig, context = context))
enableProgress = taskConfig.getBoolean(MongoConfigConstants.PROGRESS_COUNTER_ENABLED)
}
/**
* Pass the SinkRecords to the mongo db writer for storing them
**/
override def put(records: util.Collection[SinkRecord]): Unit = {
require(writer.nonEmpty, "Writer is not set!")
val seq = records.toVector
writer.foreach(w => w.write(seq))
if (enableProgress) {
progressCounter.update(seq)
}
}
override def stop(): Unit = {
logger.info("Stopping Mongo Database sink.")
writer.foreach(w => w.close())
progressCounter.empty
}
override def flush(map: util.Map[TopicPartition, OffsetAndMetadata]): Unit = {}
override def version: String = manifest.version()
}
| CodeSmell/stream-reactor | kafka-connect-mongodb/src/main/scala/com/datamountaineer/streamreactor/connect/mongodb/sink/MongoSinkTask.scala | Scala | apache-2.0 | 2,975 |
package com.ripjar.spark.process
import com.ripjar.spark.data._
import com.ripjar.zmq.ZMQWorker
import org.slf4j.Logger
import org.slf4j.LoggerFactory
import java.lang.Boolean
import org.apache.spark.streaming.dstream.DStream
import com.ripjar.spark.job.InstanceConfig
object ExternalZeroMQ {
val logger = LoggerFactory.getLogger(classOf[ExternalZeroMQ])
}
/*
* Used to pass jobs to out of process methods such as ner
*
* Config parameters:
* connection string
*
* Task parameters:
* Just pass on the task block
*
*/
// TODO: Handle timeouts / failures
class ExternalZeroMQ(config: InstanceConfig) extends Processor with Serializable {
val connectStr = config.getMandatoryParameter("connectStr")
override def process(input: DStream[DataItem]): DStream[DataItem] = {
input.map(process(_))
}
def process(input: DataItem): DataItem = {
val zmqWorker: ZMQWorker = new ZMQWorker
val json = input.toString
val rtn = zmqWorker.doWork(connectStr, json)
DataItem.fromJSON(rtn)
}
} | Yingmin-Li/spark-stream | src/main/scala/com/ripjar/spark/process/ExternalZeroMQ.scala | Scala | gpl-2.0 | 1,027 |
package looty
package model
import looty.poeapi.PoeTypes.AnyItem
import looty.model.WeaponTypes.WeaponType
//////////////////////////////////////////////////////////////
// Copyright (c) 2013 Ben Jackman, Jeff Gomberg
// All Rights Reserved
// please contact ben@jackman.biz or jeff@cgtanalytics.com
// for licensing inquiries
// Created by bjackman @ 12/14/13 1:06 PM
//////////////////////////////////////////////////////////////
case class MinMaxDamage(var min: Double, var max: Double) {
def avg = (min + max) / 2.0
def +=(min: Double, max: Double) {
this.min += min
this.max += max
}
def set(that: MinMaxDamage) {
this.min = that.min
this.max = that.max
}
}
//object ComputedItem {
// implicit class ComputedItemExtensions(val item : ComputedItem) extends AnyVal {
//
// }
//
// def apply(item : AnyItem) : ComputedItem = {
// ???
// }
//}
class ComputedItem(val item: AnyItem, val containerId: LootContainerId, val locationName: String) {
lazy val maxLinks: Int = item.sockets.toOption.map(_.toList.map(_.group).groupBy(x => x).map(_._2.size).maxOptI.getOrElse(0)).getOrElse(0)
object Scores {
lazy val default: ItemScore = ItemScorer(ComputedItem.this).getOrElse(ItemScore(Nil, 0))
lazy val custom : ItemScore = ItemScorer(ComputedItem.this).getOrElse(ItemScore(Nil, 0))
lazy val custom2: HighScore = HighScorer(ComputedItem.this).getOrElse(HighScore(Nil, 0))
}
def maxResist = plusTo.resistance.all.max
def magicFind = increased.quantityOfItemsFound + increased.rarityOfItemsFound
def isEquippable = !item.isGem &&
!item.isCurrency &&
!item.isMap &&
!item.isQuest &&
!item.isFragment &&
!item.isHideoutItem &&
!item.isJewel &&
!item.isDivinationCard &&
!item.isProphecy &&
!item.isLeaguestone
lazy val displayName = {
var n = item.getName
if (n.nullSafe.isEmpty || n.isEmpty) n = item.getTypeLine
n
}
lazy val cleanTypeLine = item.getTypeLine
def forumLocationName = {
//[linkItem location="Stash4" league="Rampage" x="0" y="0"]
//[linkItem location="MainInventory" character="frostlarr" x="0" y="2"]
for (x <- item.x.toOption; y <- item.y.toOption) yield {
containerId match {
case CharInvId(char) =>
val slot = item.inventoryId.toOption.getOrElse("???")
s"""[linkItem location="$slot" character="$char" x="$x" y="$y"]"""
case StashTabIdx(idx) =>
val league = item.league
s"""[linkItem location="Stash${idx + 1}" league="$league" x="$x" y="$y"]"""
}
}
}
def locationId = item.locationId.toOption.getOrElse {
console.error("Unable to find a location id", this.asJsAny, item)
sys.error("Unable to find a location id")
}
//This location includes coordinates
lazy val locAndCoords = {
val l = Some(locationName)
val s = containerId match {
case StashTabIdx(i) => Some("s:" + i)
case _ => None
}
val x = item.x.toOption.map(_ + 1).map("x:" + _)
val y = item.y.toOption.map(_ + 1).map("y:" + _)
List(l, s, x, y).flatten.mkString(" ")
}
lazy val typeName = {
if (slots.isAmulet) "Amulet"
else if (slots.isRing) "Ring"
else if (slots.isHelmet) "Arm Helmet"
else if (slots.isChest) "Arm Chest"
else if (slots.isGloves) "Arm Gloves"
else if (slots.isBoots) "Arm Boots"
else if (slots.isBelt) "Arm Belt"
else if (slots.isShield) "Arm Shield"
else if (slots.isQuiver) "Quiver"
else if (slots.isFlask) "Flask"
else if (slots.isWeapon) "Wep " + properties.weaponType.toShortString
else if (item.isHideoutItem) "Hideout"
else if (item.isCurrency) "Currency"
else if (item.isSupportGem) "Support Gem"
else if (item.isSkillGem) "Skill Gem"
else if (item.isMap) "Map"
else if (item.isFragment) "Fragment"
else if (item.isQuest) "QuestItem"
else if (item.isJewel) "Jewel"
else if (item.isDivinationCard) "DivCard"
else if (item.isLeaguestone) "Leaguestone"
else if (item.isProphecy) "SealedProphecy"
else "UNKNOWN"
}
object increased {
val damage = Elements mutable 0.0
var bleedingDamage = 0.0
var burningDamage = 0.0
var elementalAttackDamage = 0.0
var stunDurationOnEnemies = 0.0
var chillDurationOnEnemies = 0.0
var attackSpeed = 0.0
var globalCriticalStrikeMultiplier = 0.0
var globalCriticalStrikeChance = 0.0
var criticalStrikeChance = 0.0
var criticalStrikeChanceForSpells = 0.0
var armour = 0.0
var evasion = 0.0
var energyShield = 0.0
var maximumEnergyShield = 0.0
var quantityOfItemsFound = 0.0
var rarityOfItemsFound = 0.0
var movementSpeed = 0.0
var blockAndStunRecovery = 0.0
var spellDamage = 0.0
var manaRegenerationRate = 0.0
var elementalDamageWithWeapons = 0.0
var lightRadius = 0.0
var castSpeed = 0.0
var projectileSpeed = 0.0
var accuracyRating = 0.0
var blockRecovery = 0.0
var elementalDamage = 0.0
}
object increasedSpell {
def elemental = increased.spellDamage + increased.elementalDamage
val elements = new Elements[Double] {
override def physical: Double = increased.spellDamage + increased.damage.physical
override def fire: Double = increased.spellDamage + increased.damage.fire
override def cold: Double = increased.spellDamage + increased.damage.cold
override def chaos: Double = increased.spellDamage + increased.damage.lightning
override def lightning: Double = increased.spellDamage + increased.damage.chaos
}
}
object reduced {
var attributeRequirements = 0.0
var enemyStunThreshold = 0.0
}
var sockets: List[List[String]] = Nil
lazy val socketColors = {
if (sockets.nonEmpty) {
sockets.map(_.mkString("-")).mkString(" ")
} else if (item.isGem) {
requirements.attribute.toMap.toList.filter(_._2 > 0).maxByOpt(_._2).map(_._1.color) match {
case Some(color) => color.toOneLetter
case None => "?"
}
} else {
""
}
}
lazy val socketCnt : Int = sockets.map(_.size).sum
lazy val maxLink = sockets.map(_.size).maxOpt.getOrElse(0)
lazy val propLevel : Int = item.getLevel.getOrElse(0)
lazy val mapLevel : Int = item.getMapLevel.getOrElse(0)
lazy val countInStack: Int = item.getCountInStack.getOrElse(0)
lazy val misc: Double = {
if (countInStack > 0) countInStack
else if (socketCnt > 0) socketCnt
else if (propLevel > 0) propLevel
else if (mapLevel > 0) mapLevel
else 0.0
}
object requirements {
var level = 0.0
var attribute = Attributes.mutable(0.0)
}
val damages = Elements of MinMaxDamage(0, 0)
val damagesWithBows = Elements of MinMaxDamage(0, 0)
val addDamagesToSpells = Elements of MinMaxDamage(0, 0)
def addsDamageToSpellsTotal = addDamagesToSpells.all.map(_.avg).sum
object plusTo {
val attribute = Attributes mutable 0.0
val resistance = Elements mutable 0.0
def totalResistance = resistance.all.sum
def maxResistance = resistance.all.max
val lifeAndMana = LifeAndMana mutable 0.0
lazy val lifeAndManaWithStrInt = lifeAndMana.map2(_ + plusTo.attribute.strength * .5, _ + plusTo.attribute.intelligence * .5)
var accuracyRating = 0.0
lazy val accuracyRatingWithDex = accuracyRating + plusTo.attribute.dexterity * 2
var evasionRating = 0.0
var armour = 0.0
var energyShield = 0.0
}
object leech {var physical = LifeAndMana mutable 0.0}
object onKill {var lifeAndMana = LifeAndMana mutable 0.0}
object onAttackHit {var lifeAndMana = LifeAndMana mutable 0.0}
object gemLevel {
val element = Elements mutable 0.0
val attribute = Attributes mutable 0.0
var melee = 0.0
var minion = 0.0
var bow = 0.0
var any = 0.0
var support = 0.0
def addToAll(n: Double) = {
Elements.all.foreach(element +=(_, n))
Attributes.all.foreach(attribute +=(_, n))
melee += n
minion += n
bow += n
any += n
}
def max = (List(melee, minion, bow) ::: attribute.all ::: element.all).max
}
object total {
lazy val dps = perElementDps.all.sum
lazy val eDps = perElementDps.fire + perElementDps.cold + perElementDps.lightning
lazy val avgDamage = properties.damages.all.map(_.avg).sum
lazy val perElementDps = Elements calculatedWith { element =>
if (slots.isWeapon) {
properties.damages(element).avg * properties.attacksPerSecond
} else {
damages(element).avg
}
}
lazy val avgDamages = Elements calculatedWith { element =>
if (slots.isWeapon) {
properties.damages(element).avg
} else {
damages(element).avg
}
}
def armour = properties.armour.oIf(_ == 0.0, x => plusTo.armour, x => x)
def evasionRating = properties.evasionRating.oIf(_ == 0.0, x => plusTo.evasionRating, x => x)
def energyShield = properties.energyShield.oIf(_ == 0.0, x => plusTo.energyShield, x => x)
def globalEnergyShield = increased.maximumEnergyShield + plusTo.attribute.intelligence * .2
def critChance = (100 + increased.globalCriticalStrikeChance) / 100.0 *
properties.criticalStrikeChance
}
object slots {
def is1H: Boolean = properties.weaponType.is1H
def is2H: Boolean = properties.weaponType.is2H
def isWeapon: Boolean = properties.weaponType.isWeapon
def isFlask = item.isFlask
var isSpiritShield = false
var isShield = false
var isHelmet = false
var isChest = false
var isGloves = false
var isAmulet = false
var isRing = false
var isBelt = false
var isBoots = false
var isQuiver = false
}
object properties {
var weaponType: WeaponType = WeaponTypes.NoWeaponType
var armour = 0.0
var energyShield = 0.0
var evasionRating = 0.0
val damages = Elements of MinMaxDamage(0, 0)
var quality = 0.0
var criticalStrikeChance = 0.0
var attacksPerSecond = 0.0
var chanceToBlock = 0.0
var weaponRange = 0.0
var stackSize = 0.0
}
var reflectsPhysicalDamageToAttackers = 0.0
var blockChance = 0.0
var numExplicitModSockets = 0.0
var minusToManaCostOfSkills = 0.0
var arrowPierceChance = 0.0
var bleedingChance = 0.0
var freezeChance = 0.0
var shockChance = 0.0
val regeneratedPerSecond = LifeAndMana mutable 0.0
object flask {
object increased {
var lifeRecoveryRate = 0.0
var effectDuration = 0.0
var manaRecoveryRate = 0.0
var flaskRecoverySpeed = 0.0
var chargesGained = 0.0
var chargeRecovery = 0.0
var stunRecovery = 0.0
var recoverySpeed = 0.0
var amountRecovered = 0.0
var recoveryOnLowLife = 0.0
var lifeRecovered = 0.0
var manaRecovered = 0.0
var armour = 0.0
var evasion = 0.0
var movementSpeed = 0.0
}
object reduced {
var amountRecovered = 0.0
var recoverySpeed = 0.0
var flaskChargesUsed = 0.0
}
var extraCharges = 0.0
var amountAppliedInstantly = 0.0
var chargesOnCriticalStrikeTaken = 0.0
var chargesOnCriticalStrikeGiven = 0.0
var lifeFromMana = 0.0
var manaFromLife = 0.0
var additionalResistances = 0.0
var lifeRecoveryToMinions = 0.0
var removesFrozenAndChilled = false
var removesShocked = false
var removesBurning = false
var removesBleeding = false
var removesCurses = false
var removesPoison = false
// var frozenAndChilledImmunity = false
// var shockedImmunity = false
// var burningImmunity = false
// var bleedingImmunity = false
// var cursesImmunity = false
var knockback = false
var instantRecovery = false
var instantRecoveryLowLife = false
}
}
| mihailim/looty | looty/src/main/scala/looty/model/ComputedItem.scala | Scala | gpl-2.0 | 12,647 |
import snunit._
import upickle.default._
case class Message(message: String)
object Message {
implicit val messageRW: ReadWriter[Message] = macroRW[Message]
}
object Main {
def main(args: Array[String]): Unit = {
AsyncServerBuilder()
.withRequestHandler(req =>
if (req.method == Method.GET && req.path == "/plaintext")
req.send(
statusCode = StatusCode.OK,
content = "Hello, World!",
headers = Seq("Content-Type" -> "text/plain")
)
else if (req.method == Method.GET && req.path == "/json")
req.send(
statusCode = StatusCode.OK,
content = stream(Message("Hello, World!")),
headers = Seq.empty
)
else
req.send(
statusCode = StatusCode.NotFound,
content = "Not found",
headers = Seq("Content-Type" -> "text/plain")
)
)
.build()
}
}
| sumeetchhetri/FrameworkBenchmarks | frameworks/Scala/snunit/src/main/scala/Main.scala | Scala | bsd-3-clause | 952 |
/*
* Copyright 2020 LINE Corporation
*
* LINE Corporation licenses this file to you under the Apache License,
* version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.linecorp.armeria.server.scalapb
import com.google.common.collect.ImmutableSet
import com.linecorp.armeria.internal.server.grpc.GrpcDocServicePlugin
import com.linecorp.armeria.server.docs.{DocServiceFilter, DocServicePlugin, ServiceSpecification}
import com.linecorp.armeria.server.{Service, ServiceConfig}
import java.util.{Map => JMap, Set => JSet}
import scalapb.GeneratedMessage
import scalapb.json4s.Printer
/**
* A [[com.linecorp.armeria.server.docs.DocServicePlugin]] implementation that supports
* [[scalapb.GeneratedMessage]] for [[com.linecorp.armeria.server.grpc.GrpcService]].
*/
class ScalaPbGrpcDocServicePlugin extends DocServicePlugin {
private val grpcDocServicePlugin: GrpcDocServicePlugin = new GrpcDocServicePlugin
private val printer: Printer = new Printer().includingDefaultValueFields
override def name = "armeria-scalapb"
override def supportedServiceTypes(): JSet[Class[_ <: Service[_, _]]] =
grpcDocServicePlugin.supportedServiceTypes
override def generateSpecification(
serviceConfigs: JSet[ServiceConfig],
filter: DocServiceFilter): ServiceSpecification =
grpcDocServicePlugin.generateSpecification(serviceConfigs, filter)
override def loadDocStrings(serviceConfigs: JSet[ServiceConfig]): JMap[String, String] =
grpcDocServicePlugin.loadDocStrings(serviceConfigs)
override def supportedExampleRequestTypes: JSet[Class[_]] =
ImmutableSet.of(classOf[GeneratedMessage])
override def serializeExampleRequest(serviceName: String, methodName: String, exampleRequest: Any): String =
printer.print(exampleRequest.asInstanceOf[GeneratedMessage])
}
| line/armeria | scalapb/scalapb_2.13/src/main/scala/com/linecorp/armeria/server/scalapb/ScalaPbGrpcDocServicePlugin.scala | Scala | apache-2.0 | 2,274 |
package org.scalacoin.script.bitwise
import org.scalatest.{FlatSpec, MustMatchers}
/**
* Created by chris on 1/8/16.
*/
class BitwiseOperationsFactoryTest extends FlatSpec with MustMatchers with BitwiseOperationsFactory {
"BitwiseOperationsFactory" must "match strings with bitwise operations" in {
fromString("OP_EQUAL") must be (Some(OP_EQUAL))
fromString("OP_EQUALVERIFY") must be (Some(OP_EQUALVERIFY))
fromString("RANDOM") must be (None)
}
}
| TomMcCabe/scalacoin | src/test/scala/org/scalacoin/script/bitwise/BitwiseOperationsFactoryTest.scala | Scala | mit | 468 |
package com.github.tarao
package bullet
object Example {
type CarId = Long
type EngineId = Long
type WheelPosition = Int
case class Car(id: CarId, name: String)
case class Engine(id: EngineId, carId: CarId)
case class Crankshaft(engineId: EngineId)
case class Wheel(position: WheelPosition, carId: CarId)
object Wheel {
val frontLeft: WheelPosition = 1
val frontRight: WheelPosition = 2
val rearLeft: WheelPosition = 3
val rearRight: WheelPosition = 4
}
type CarWithEngine = (Car, Engine)
type CarWithAll = (Car, Engine, Seq[Wheel])
class ResolveEngine(car: Car) extends Monad.Resolve[Engine, Car](car) {
override protected[bullet]
def run(ms: Seq[Monad.Resolve[Engine, Car]]): Seq[Engine] = {
ms.map(_.value).map { car =>
if (car.id % 2 == 0) None
else Some(Engine(1000L + car.id, car.id))
}.flatten
}
}
object ResolveEngine {
def apply(car: Car): Monad.Resolve[Engine, Car] = new ResolveEngine(car)
}
class ResolveWheels(car: Car) extends Monad.Resolve[Seq[Wheel], Car](car) {
override protected[bullet]
def run(ms: Seq[Monad.Resolve[Seq[Wheel], Car]]): Seq[Seq[Wheel]] = {
ms.map(_.value).map { car => Seq(
Wheel(Wheel.frontLeft, car.id),
Wheel(Wheel.frontRight, car.id),
Wheel(Wheel.rearLeft, car.id),
Wheel(Wheel.rearRight, car.id)
) }
}
}
object ResolveWheels {
def apply(car: Car): Monad.Resolve[Seq[Wheel], Car] = new ResolveWheels(car)
}
trait CarHasAnEngine extends HasA[Car, Engine] {
def map(cars: Seq[Car]): Seq[Engine] = {
var idx: Long = 1000L
cars.map { car =>
idx += 1
if (car.id % 2 == 0) None
else Some(Engine(idx, car.id))
}.flatten
}
}
object CarHasAnEngine extends CarHasAnEngine
trait EngineHasACrankshaft extends HasA[Engine, Crankshaft] {
def map(engines: Seq[Engine]): Seq[Crankshaft] =
engines.map { e => Crankshaft(e.id) }
}
object EngineHasACrankshaft extends EngineHasACrankshaft
trait CarJoinsAnEngine extends CarHasAnEngine
with Join[CarWithEngine, CarId, Car, Engine] {
def leftKey(car: Car): CarId = car.id
def rightKey(engine: Engine): CarId = engine.carId
def merge(car: Car, engine: Engine) = (car, engine)
}
object CarJoinsAnEngine extends CarJoinsAnEngine
trait CarJoinsWheels
extends Join[CarWithAll, CarId, CarWithEngine, (CarId, Seq[Wheel])] {
def map(cars: Seq[CarWithEngine]): Seq[(CarId, Seq[Wheel])] =
cars.map { car =>
val carId = car._1.id
(carId, Seq(
Wheel(Wheel.frontLeft, carId),
Wheel(Wheel.frontRight, carId),
Wheel(Wheel.rearLeft, carId),
Wheel(Wheel.rearRight, carId)
))
}
def leftKey(car: CarWithEngine): CarId = car._1.id
def rightKey(wheels: (CarId, Seq[Wheel])): CarId = wheels._1
def merge(car: CarWithEngine, wheels: (CarId, Seq[Wheel])): CarWithAll =
(car._1, car._2, wheels._2)
}
object CarJoinsWheels extends CarJoinsWheels
object Implicits {
implicit class CarRelation(val car: Car) extends AnyVal {
def toEngine = HasA.Monadic(car, CarHasAnEngine)
def withEngine = Join.Monadic(car, CarJoinsAnEngine)
}
implicit class CarWithEngineRelation(val car: CarWithEngine) extends AnyVal {
def withWheels = Join.Monadic(car, CarJoinsWheels)
}
implicit class EngineRelation(val engine: Engine) extends AnyVal {
def toCrankshaft = HasA.Monadic(engine, EngineHasACrankshaft)
}
}
}
| tarao/bullet-scala | src/test/scala/com/github/tarao/bullet/Example.scala | Scala | mit | 3,559 |
package memnets.ui
import java.net.URL
import memnets.model._
import memnets.utils._
import scala.beans._
object Skin extends Logging {
def apply(init: SkinType => Unit = sk => {})(implicit sf: SF): SkinType = {
val skin = sf.create()
init(skin)
skin
}
}
abstract class Skin[T, COL <: AnyRef] extends Comparable[Skin[T, _]] with Dsl with Logging {
type UI <: TickableUI[T]
implicit def convertToCol(colorf: Colorf): COL = {
if (colorf.converted == null)
colorf.converted = convertToColHelper(colorf)
colorf.converted.asInstanceOf[COL]
}
protected var _yGradientMap: YGradientMap[COL] = _
protected var _yColorMap: Option[YColorMap[COL]] = None
protected var _colorMap: Option[ColorMap[COL]] = None
@BeanProperty var name = ""
@BeanProperty var backColor: Colorf = defaultBackCol
@BeanProperty var backImage = SkinImage.TWO
@BooleanBeanProperty var backImageOn = false
@BooleanBeanProperty var gradientCustom = true
@BeanProperty var vizLevel = Viz.Fade
@BeanProperty var colorMapType = ColorMapType.HeatMap
@BeanProperty var layerVizType = LayerVizType.Layer
@BeanProperty var gridVizType = GridVizType.Image
@BeanProperty var meshType = MeshType.Fill
@BeanProperty var meshDivs = 64
@BeanProperty var gradientHints = GradientHints()
@BooleanBeanProperty var bestQuality: Boolean = true
@BooleanBeanProperty var sparseLayer: Int = 1024
@BooleanBeanProperty var sparseType: VariableType = VariableType.Continuous
@BooleanBeanProperty var sparseHints: Boolean = false
@BeanProperty var sparseWidth: Int = 1000
@BeanProperty var sparseHeight: Int = 300
@BooleanBeanProperty var sparseGlass: Boolean = true
@BooleanBeanProperty var sparseEffect: Boolean = false
@BeanProperty var chartSampling = 2
@BooleanBeanProperty var chartOn = true
@BooleanBeanProperty var chartDynamic = true
@BooleanBeanProperty var chartGlassOn = true
@BooleanBeanProperty var signalsOn = true
@BooleanBeanProperty var titleOn = true
@BooleanBeanProperty var fpsOn = true
@BooleanBeanProperty var topPaneOn = true
@BooleanBeanProperty var useElementDpad = false
@BeanProperty var zoom = 1.0
@BeanProperty var zoomAuto = true
@BeanProperty var layoutAuto = true
@BeanProperty var edgeScale = 1.0
def backColorConverted: COL = backColor
def colorMap: ColorMap[COL] = {
import ColorMapType._
colorMapType match {
case HeatMap =>
heatMap
case OpacityMap =>
opacityMap
case GrayMap =>
grayMap
case InvertedGrayMap =>
invertedGrayMap
case CustomMap =>
_colorMap.getOrElseP(defaultColorMap)
}
}
def colorMap_=(map: Option[ColorMap[COL]]): Unit = {
_colorMap = map
if (map.isDefined)
colorMapType = ColorMapType.CustomMap
}
def colorMapName: String = colorMapType.name()
def yColorMap: YColorMap[COL] = _yColorMap.getOrElseP(_yGradientMap)
def compareTo(other: Skin[T, _]): Int = other.rank - rank // ascending
def create(elem: Element): Option[UI] = {
if (elementFilter(elem)) {
elem match {
case y: Y => createY(y)
case s: Signal => createSignal(s)
case f: F if elementFilter(f.owner) => createF(f)
case yg: YGoal => createYGoal(yg)
case g: Goal => createGoal(g)
case lg: Plot => createPlot(lg)
case p: PhasePlot => createPhasePlot(p)
case gr: Grd => createGrid(gr)
case l: LayerLike if !l.ui.plot.user => createLayer(l)
case sc: Scene3D => create3d(sc)
case o: Osc => createOsc(o)
case tc: Tracer => createTracer(tc)
case tr: Trigger => createTrigger(tr)
case default => createHelper(elem)
}
} else
None
}
/** true if should skin should create */
def elementFilter(elem: Element): Boolean = elem.ui.viz >= vizLevel
/** option to create whole system at once. helpful if create edge objects for small graph */
def createSystem(system: DynamicSystem): Iterable[UI] = Iterable.empty
def createHelper(elem: Element): Option[UI] = None
def createY(y: Y): Option[UI]
def createOsc(o: Osc): Option[UI] = None
def createTrigger(tr: Trigger): Option[UI] = None
def createTracer(tc: Tracer): Option[UI] = None
def createF(f: F): Option[UI] = None
def createYGoal(yg: YGoal): Option[UI] = None
def createGoal(g: Goal): Option[UI] = None
def create3d(sc: Scene3D): Option[UI] = None
def createSignal(s: Signal): Option[UI] = None
def createPlot(l: Plot): Option[UI] = None
def createPhasePlot(pl: PhasePlot): Option[UI] = None
def createImage(im: GridData): Option[UI] = None
def createMesh(gd: GridData, meshType: MeshType = MeshType.Fill): Option[UI] = None
def createGrid(gr: Grd): Option[UI] = {
import GridVizType._
gridVizType match {
case Image =>
createImage(gr)
case Mesh =>
createMesh(gr, meshType)
}
}
def createLayer(lay: LayerLike): Option[UI] = {
layerVizType match {
case LayerVizType.Layer =>
createPlot(lay.ui.plot)
case LayerVizType.Grid =>
createGrid(Grid(lay))
case LayerVizType.Sliding =>
createGrid(new Sliding(lay))
}
}
/**
* is model.sparse already being visualized?
* default checks if Plot.user or elements has YGrid.
* if not shown, UI may show sparse as layer
*/
def hasSparseViz(system: DynamicSystem): Boolean = {
system.sparse.ui.plot.user || system.elements.exists(x => x.ui.isShown && x.isInstanceOf[YGrid])
}
def imageLookup(i: SkinImage): Option[String] = {
logger.debug(s"skinImage= $i has no resource")
None
}
def imageLookupURL(i: SkinImage): Option[URL] = imageLookup(i).map(_.asURL)
def init(system: DynamicSystem): Unit = {
if (1.0 ~ zoom && zoomAuto)
zoom = zoomDefault(system)
_yGradientMap = createYGradientMap(system)
for (hints <- system.sparse.ui.gradient) {
logger.trace("applying custom sparse hints")
gradientHints = hints
}
_yGradientMap.hints = gradientHints
backColor.converted = convertToCol(backColor)
}
/** adjust viz based on size or other props... */
def isSmall(system: DynamicSystem): Boolean = {
system.layers.isEmpty && system.variables.count(x => x.ui.isShown) <= 10
}
def isSuitable(system: DynamicSystem): Boolean = true
def layout(system: DynamicSystem): Unit = {
// todo : issue here w/ hidden layers...
if (system.onLayout.isDefined)
system.onLayout.get.layout()
else if (layoutAuto) {
val layers = system.layers.iterator.filter(x => x.ui.isShown && !x.ui.plot.user && x.ui.plot.autoLayout)
val layouts = system.elements.iterator.filter(_.isInstanceOf[Layout]).map(_.asInstanceOf[Layout]).toList
val hasPhase = system.elements.exists(_.isInstanceOf[PhasePlot])
if (layouts.nonEmpty) {
logger.debug("found layouts to run")
for (lay <- layouts)
lay.layout()
} else if (system.layers.isEmpty && system.variables.length == 2 && hasPhase) {
logger.debug("default 2nd order layout")
val x = system.variables(0)
val y = system.variables(1)
x.ui.loc = Loc().left(140).down(40)
y.ui.loc = x.ui.loc.left(160)
} else if (layers.isEmpty && isSmall(system) && !hasPhase) {
logger.debug("default small net layout")
system.variables.toList.center(Loc())
} else if (layers.nonEmpty && system.variables.isEmpty) {
logger.debug("default layer layout")
val h = Display.height
val ySpacing = h / (layers.size + 1)
for ((lay, i) <- layers.zipWithIndex) {
lay.ui.loc = Loc(x = Loc().x, y = h - ySpacing * (i + 1) + lay.ui.plot.height / 2.0)
}
} else if (layers.isEmpty) {
// try to use structure to help layout
// todo....
}
}
}
def rank = 0
/** special case when Skin has state... */
def remove(elem: Element): Unit = {}
def safeMeshDivs: Int = {
val divs = meshDivs
if (divs < 4) 4 else divs
}
// skins should be largely stateless, but here just in case
def systemBuilt(): Unit = {}
def zoomDefault(system: DynamicSystem): Double = if (!system.game && isSmall(system)) 1.3 else 1.0
override def toString = s"Skin[name= $name, rank= $rank]"
// Java
def getColorMapName: String = colorMapName
protected def convertToColHelper(colorf: Colorf): COL
protected def defaultBackCol: Colorf = {
Colorf.gray(0.05 + 0.1 * Math.random())
/* if want a bit more...
val col = Colorf.hsb(Random.nextInt(360))
val scale = 0.05 + 0.1*Math.random()
Colorf(scale * col.r, scale * col.g, scale * col.b, 1.0)
*/
}
protected def createYGradientMap(model: DynamicSystem): YGradientMap[COL]
protected def defaultColorMap: ColorMap[COL] = heatMap
protected def heatMap: ColorMap[COL]
protected def opacityMap: ColorMap[COL]
protected def grayMap: ColorMap[COL]
protected def invertedGrayMap: ColorMap[COL]
}
| MemoryNetworks/memnets | api/src/main/scala/memnets/ui/Skin.scala | Scala | apache-2.0 | 9,267 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.samza.system.hdfs
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.FileSystem
import org.apache.samza.system.hdfs.writer.HdfsWriter
import org.apache.samza.system.{OutgoingMessageEnvelope, SystemProducer}
import org.apache.samza.util.{Logging, TimerUtils}
import scala.collection.mutable.{Map => MMap}
class HdfsSystemProducer(
systemName: String, clientId: String, config: HdfsConfig, metrics: HdfsSystemProducerMetrics,
val clock: () => Long = () => System.currentTimeMillis) extends SystemProducer with Logging with TimerUtils {
val dfs = FileSystem.newInstance(new Configuration(true))
val writers: MMap[String, HdfsWriter[_]] = MMap.empty[String, HdfsWriter[_]]
private val lock = new Object //synchronization lock for thread safe access
def start(): Unit = {
info("entering HdfsSystemProducer.start() call for system: " + systemName + ", client: " + clientId)
}
def stop(): Unit = {
info("entering HdfsSystemProducer.stop() for system: " + systemName + ", client: " + clientId)
lock.synchronized {
writers.values.map(_.close)
dfs.close
}
}
def register(source: String): Unit = {
info("entering HdfsSystemProducer.register(" + source + ") " +
"call for system: " + systemName + ", client: " + clientId)
lock.synchronized {
writers += (source -> HdfsWriter.getInstance(dfs, systemName, config))
}
}
def flush(source: String): Unit = {
debug("entering HdfsSystemProducer.flush(" + source + ") " +
"call for system: " + systemName + ", client: " + clientId)
metrics.flushes.inc
lock.synchronized {
try {
updateTimer(metrics.flushMs) {
writers.get(source).head.flush
}
} catch {
case e: Exception => {
metrics.flushFailed.inc
warn("Exception thrown while client " + clientId + " flushed HDFS out stream, msg: " + e.getMessage)
debug("Detailed message from exception thrown by client " + clientId + " in HDFS flush: ", e)
writers.get(source).head.close
throw e
}
}
}
metrics.flushSuccess.inc
}
def send(source: String, ome: OutgoingMessageEnvelope) = {
debug("entering HdfsSystemProducer.send(source = " + source + ", envelope) " +
"call for system: " + systemName + ", client: " + clientId)
metrics.sends.inc
lock.synchronized {
try {
updateTimer(metrics.sendMs) {
writers.get(source).head.write(ome)
}
} catch {
case e: Exception => {
metrics.sendFailed.inc
warn("Exception thrown while client " + clientId + " wrote to HDFS, msg: " + e.getMessage)
debug("Detailed message from exception thrown by client " + clientId + " in HDFS write: ", e)
writers.get(source).head.close
throw e
}
}
}
metrics.sendSuccess.inc
}
} | TiVo/samza | samza-hdfs/src/main/scala/org/apache/samza/system/hdfs/HdfsSystemProducer.scala | Scala | apache-2.0 | 3,737 |
/***********************************************************************
* Copyright (c) 2013-2022 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.convert.json
import com.google.gson.{JsonElement, JsonNull, JsonObject, JsonPrimitive}
import org.locationtech.jts.geom._
import org.locationtech.jts.geom.impl.CoordinateArraySequence
import org.locationtech.geomesa.utils.text.WKTUtils
trait GeoJsonParsing {
import GeoJsonParsing._
import scala.collection.JavaConverters._
private val factory = new GeometryFactory
/**
* Determines if the element is a geojson feature collection
*
* @param el element
* @return
*/
def isFeatureCollection(el: JsonElement): Boolean = isType(el, FeatureCollectionType)
/**
* Determines if the element is a geojson feature
*
* @param el element
* @return
*/
def isFeature(el: JsonElement): Boolean = isType(el, FeatureType)
/**
* Parse a geojson feature collection element
*
* @see `isFeatureCollection` to determine if this is likely to succeed
*
* @param el element
* @return
*/
def parseFeatureCollection(el: JsonElement): Seq[GeoJsonFeature] = {
val features = el.getAsJsonObject.get(FeaturesKey).getAsJsonArray
Seq.tabulate(features.size)(i => parseFeature(features.get(i)))
}
/**
* Parse a geojson feature element
*
* @see `isFeature` to determine if this is likely to succeed
*
* @param el element
* @return
*/
def parseFeature(el: JsonElement): GeoJsonFeature = {
val obj = el.getAsJsonObject
val id = obj.get(IdKey) match {
case s: JsonPrimitive if s.isString => Some(s.getAsString)
case _ => None
}
val geometry = parseGeometry(obj.get(GeometryKey))
val props: Map[String, String] = obj.get(PropertiesKey) match {
case o: JsonObject => parseProperties(o, s"$$['$PropertiesKey']")
case _ => Map.empty
}
GeoJsonFeature(id, geometry, props)
}
/**
* Parse a geometry element
*
* @param el element
* @return
*/
def parseGeometry(el: JsonElement): Geometry = el match {
case o: JsonObject => parseGeometryObject(o)
case o: JsonPrimitive => WKTUtils.read(o.getAsString)
case _: JsonNull => null.asInstanceOf[Geometry]
case _ => throw new IllegalArgumentException(s"Unknown geometry type: $el")
}
/**
* Parse a geometry object
*
* @param obj object
* @return
*/
private def parseGeometryObject(obj: JsonObject): Geometry = {
obj.get(TypeKey).getAsString.toLowerCase match {
case "point" =>
factory.createPoint(toPointCoords(obj.get(CoordinatesKey)))
case "linestring" =>
factory.createLineString(toCoordSeq(obj.get(CoordinatesKey)))
case "polygon" =>
toPolygon(obj.get(CoordinatesKey))
case "multipoint" =>
factory.createMultiPoint(toCoordSeq(obj.get(CoordinatesKey)))
case "multilinestring" =>
val coords = obj.get(CoordinatesKey).getAsJsonArray.asScala
.map(c => factory.createLineString(toCoordSeq(c))).toArray
factory.createMultiLineString(coords)
case "multipolygon" =>
factory.createMultiPolygon(obj.get(CoordinatesKey).getAsJsonArray.asScala.map(toPolygon).toArray)
case "geometrycollection" =>
factory.createGeometryCollection(obj.get(GeometriesKey).getAsJsonArray.asScala.map(parseGeometry).toArray)
case unknown =>
throw new NotImplementedError(s"Can't parse geometry type of $unknown")
}
}
private def toPointCoords(el: JsonElement): Coordinate = {
el.getAsJsonArray.asScala.map(_.getAsDouble).toSeq match {
case Seq(x, y) => new Coordinate(x, y)
case Seq(x, y, z) => new Coordinate(x, y, z)
case s => throw new IllegalArgumentException(s"Invalid point - expected 2 or 3 values, got ${s.mkString(", ")}")
}
}
private def toCoordSeq(el: JsonElement): CoordinateSequence =
new CoordinateArraySequence(el.getAsJsonArray.asScala.map(_.getAsJsonArray).map(toPointCoords).toArray)
private def toPolygon(el: JsonElement): Polygon = {
val rings = el.getAsJsonArray.iterator.asScala.map(c => factory.createLinearRing(toCoordSeq(c)))
val shell = rings.next
if (rings.hasNext) {
factory.createPolygon(shell, rings.toArray)
} else {
factory.createPolygon(shell)
}
}
private def parseProperties(o: JsonObject, path: String): Map[String, String] = {
val builder = Map.newBuilder[String, String]
o.entrySet().asScala.foreach { entry =>
val p = s"$path['${entry.getKey}']"
entry.getValue match {
case e: JsonPrimitive => builder += p -> e.getAsString
case e: JsonObject => builder ++= parseProperties(e, p)
case _ => // no-op
}
}
builder.result()
}
private def isType(el: JsonElement, t: String): Boolean = el match {
case o: JsonObject => Option(o.get(TypeKey)).exists(e => e.isJsonPrimitive && e.getAsString == t)
case _ => false
}
}
object GeoJsonParsing {
/**
* Parsed geojson feature element
*
* @param id id, if present
* @param geom geometry
* @param properties 'properties' values - key is json path to value, value is a primitive converted to a string
* nested elements will be flattened out, with a path pointing into the element
*/
case class GeoJsonFeature(id: Option[String], geom: Geometry, properties: Map[String, String])
private val FeatureType = "Feature"
private val FeatureCollectionType = "FeatureCollection"
private val TypeKey = "type"
private val FeaturesKey = "features"
private val CoordinatesKey = "coordinates"
private val PropertiesKey = "properties"
private val GeometryKey = "geometry"
private val GeometriesKey = "geometries"
private val IdKey = "id"
}
| locationtech/geomesa | geomesa-convert/geomesa-convert-json/src/main/scala/org/locationtech/geomesa/convert/json/GeoJsonParsing.scala | Scala | apache-2.0 | 6,197 |
package rtmp.status
import rtmp.amf.Serializer
/**
* Created by vim on 5/23/14.
*/
class PlayStart(val streamName:String, val clientID:Int) extends Status(true, "NetStream.Play.Start", "status", s"Started playing $streamName.") {
override def serialize(serializer: Serializer): Unit = {
super.serialize(serializer)
serializer.writeProperty("details", streamName)
serializer.writeProperty("clientid", clientID)
}
}
| vimvim/AkkaTest | src/main/scala/rtmp/status/PlayStart.scala | Scala | agpl-3.0 | 437 |
package actors
import akka.actor.{Props, ActorRef, Actor}
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.node.ArrayNode;
import com.fasterxml.jackson.databind.node.ObjectNode;
import scala.collection.JavaConverters._
import play.Play;
import play.libs.Json;
import play.mvc.WebSocket;
/**
* The broker between the WebSocket and the StockActor(s). The UserActor holds the connection and sends serialized
* JSON data to the client.
*/
class UserActor(out: WebSocket.Out[JsonNode]) extends Actor {
val defaultStocks: List[String] = Play.application().configuration().getStringList("default.stocks").asScala.toList
init
def init: Unit = {
val stocksActor = StocksActor.stocksActor
defaultStocks.foreach( stockSymbol => {
stocksActor.tell(new WatchStock(stockSymbol), self);
})
}
def receive = {
case stockUpdate: StockUpdate => {
// push the stock to the client
val stockUpdateMessage = Json.newObject();
stockUpdateMessage.put("type", "stockupdate");
stockUpdateMessage.put("symbol", stockUpdate.symbol);
stockUpdateMessage.put("price", stockUpdate.price.doubleValue);
out.write(stockUpdateMessage)
}
case stockHistory: StockHistory => {
// push the history to the client
val stockUpdateMessage = Json.newObject();
stockUpdateMessage.put("type", "stockhistory");
stockUpdateMessage.put("symbol", stockHistory.symbol);
val historyJson = stockUpdateMessage.putArray("history");
stockHistory.history.foreach( price => {
historyJson.add(price.doubleValue())
})
out.write(stockUpdateMessage);
}
}
}
| alanktwong/typesafe_activators | reactive-stocks/app/actors/UserActor.scala | Scala | mit | 1,613 |
/**
* Created on February 23, 2011
* Copyright (c) 2011, Wei-ju Wu
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* * Neither the name of Wei-ju Wu nor the
* names of its contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY WEI-JU WU ''AS IS'' AND ANY
* EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL WEI-JU WU BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.dmpp.adf.app
import org.scalatest.FlatSpec
import org.scalatest.BeforeAndAfterEach
import org.scalatest.matchers.ShouldMatchers
import org.junit.runner.RunWith
import org.scalatest.junit.JUnitRunner
import java.io._
import java.util.Date
import org.dmpp.adf.app._
@RunWith(classOf[JUnitRunner])
class FileWriteSpec extends FlatSpec with ShouldMatchers with BeforeAndAfterEach {
var emptyDiskFFS: UserVolume = null
var emptyDiskOFS: UserVolume = null
override def beforeEach {
emptyDiskFFS = UserVolumeFactory.createEmptyDoubleDensityDisk()
emptyDiskOFS = UserVolumeFactory.createEmptyDoubleDensityDisk("OFSEmpty", "OFS")
}
"UserVolume" should "create an FFS file" in {
val data: Array[Byte] = Array(0xde.asInstanceOf[Byte],
0xad.asInstanceOf[Byte],
0xbe.asInstanceOf[Byte],
0xef.asInstanceOf[Byte])
emptyDiskFFS.rootDirectory.createFile("steak", data)
emptyDiskFFS.logicalVolume.rootBlock.hashtableSize should equal (0x48)
emptyDiskFFS.logicalVolume.rootBlock.bitmapIsValid should be (true)
emptyDiskFFS.logicalVolume.rootBlock.checksumIsValid should be (true)
shouldBeRecent(emptyDiskFFS.lastModificationTime)
val rootdir = emptyDiskFFS.rootDirectory
shouldBeRecent(rootdir.lastModificationTime)
val file = rootdir.find("steak").get.asInstanceOf[UserFile]
file.fileHeaderBlock.checksumIsValid should be (true)
file.fileHeaderBlock.fileSize should equal (4)
file.fileHeaderBlock.parent should equal (880)
shouldBeRecent(file.lastModificationTime)
val resultData = file.dataBytes
resultData.length should equal (4)
resultData(0) & 0xff should equal (0xde)
resultData(1) & 0xff should equal (0xad)
resultData(2) & 0xff should equal (0xbe)
resultData(3) & 0xff should equal (0xef)
}
it should "create an FFS file that spans two blocks" in {
val data = new Array[Byte](1024)
for (i <- 0 to 1020 by 4) {
data(i) = 0xde.asInstanceOf[Byte]
data(i + 1) = 0xad.asInstanceOf[Byte]
data(i + 2) = 0xbe.asInstanceOf[Byte]
data(i + 3) = 0xef.asInstanceOf[Byte]
}
emptyDiskFFS.rootDirectory.createFile("steak", data)
val file = emptyDiskFFS.rootDirectory.find("steak").get.asInstanceOf[UserFile]
file.fileHeaderBlock.checksumIsValid should be (true)
file.fileHeaderBlock.fileSize should equal (1024)
val resultData = file.dataBytes
resultData.length should equal (1024)
for (i <- 0 to 1020 by 4) {
resultData(i + 0) & 0xff should equal (0xde)
resultData(i + 1) & 0xff should equal (0xad)
resultData(i + 2) & 0xff should equal (0xbe)
resultData(i + 3) & 0xff should equal (0xef)
}
}
it should "create an OFS file" in {
val data: Array[Byte] = Array(0xde.asInstanceOf[Byte],
0xad.asInstanceOf[Byte],
0xbe.asInstanceOf[Byte],
0xef.asInstanceOf[Byte])
emptyDiskOFS.rootDirectory.createFile("steak", data)
val file = emptyDiskOFS.rootDirectory.find("steak").get.asInstanceOf[UserFile]
file.fileHeaderBlock.checksumIsValid should be (true)
file.fileHeaderBlock.fileSize should equal (4)
val resultData = file.dataBytes
resultData.length should equal (4)
resultData(0) & 0xff should equal (0xde)
resultData(1) & 0xff should equal (0xad)
resultData(2) & 0xff should equal (0xbe)
resultData(3) & 0xff should equal (0xef)
}
def formatted(date: Date) = {
val dateFormat = new java.text.SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSS")
dateFormat.format(date)
}
def shouldBeRecent(date: Date) = {
(System.currentTimeMillis - date.getTime) should be < (1000l)
}
}
| weiju/adf-tools | adf-core/src/test/scala/org/dmpp/adf/app/FileWriteTest.scala | Scala | bsd-3-clause | 5,487 |
package models.gitbucket
import org.joda.time.DateTime
import org.specs2.mutable._
import scalikejdbc._
import scalikejdbc.specs2.mutable.AutoRollback
class ActivitySpec extends Specification {
"Activity" should {
val a = Activity.syntax("a")
"find by primary keys" in new AutoRollback {
val maybeFound = Activity.find(123)
maybeFound.isDefined should beTrue
}
"find by where clauses" in new AutoRollback {
val maybeFound = Activity.findBy(sqls.eq(a.activityId, 123))
maybeFound.isDefined should beTrue
}
"find all records" in new AutoRollback {
val allResults = Activity.findAll()
allResults.size should be_>(0)
}
"count all records" in new AutoRollback {
val count = Activity.countAll()
count should be_>(0L)
}
"find all by where clauses" in new AutoRollback {
val results = Activity.findAllBy(sqls.eq(a.activityId, 123))
results.size should be_>(0)
}
"count by where clauses" in new AutoRollback {
val count = Activity.countBy(sqls.eq(a.activityId, 123))
count should be_>(0L)
}
"create new record" in new AutoRollback {
val created = Activity.create(userName = "MyString", repositoryName = "MyString", activityUserName = "MyString", activityType = "MyString", message = null, activityDate = DateTime.now)
created should not beNull
}
"save a record" in new AutoRollback {
val entity = Activity.findAll().head
// TODO modify something
val modified = entity
val updated = Activity.save(modified)
updated should not equalTo(entity)
}
"destroy a record" in new AutoRollback {
val entity = Activity.findAll().head
Activity.destroy(entity)
val shouldBeNone = Activity.find(123)
shouldBeNone.isDefined should beFalse
}
}
}
| thomaschoo/gitolite-to-gitbucket | src/test/scala/models/gitbucket/ActivitySpec.scala | Scala | mit | 1,843 |
package mist.api.internal
import shadedshapeless._
/**
* Converts Any or Tuple into HList
*/
trait HLister[A] {
type Out
def apply(a: A): Out
}
trait LowerPriorityHLister {
type Aux[A, Out0] = HLister[A] {type Out = Out0 }
implicit def forAny[A]: Aux[A, A :: HNil] = new HLister[A] {
override type Out = A :: HNil
override def apply(a: A): A :: HNil = a :: HNil
}
}
object HLister extends HListerInstances
| Hydrospheredata/mist | mist-lib/src/main/scala/mist/api/internal/HLister.scala | Scala | apache-2.0 | 436 |
package org.example.similarproduct
import org.apache.predictionio.controller.PPreparator
import org.apache.spark.SparkContext
import org.apache.spark.rdd.RDD
class Preparator
extends PPreparator[TrainingData, PreparedData] {
override
def prepare(sc: SparkContext, trainingData: TrainingData): PreparedData = {
new PreparedData(
users = trainingData.users,
items = trainingData.items,
viewEvents = trainingData.viewEvents)
}
}
class PreparedData(
val users: RDD[(String, User)],
val items: RDD[(String, Item)],
val viewEvents: RDD[ViewEvent]
) extends Serializable
| minhtule/Tapster-iOS-Demo-Similar-Product-Engine | src/main/scala/Preparator.scala | Scala | apache-2.0 | 605 |
package wom.util
import scala.util.{Failure, Success, Try}
object FileUtil {
def parseTsv(tsv: String): Try[Array[Array[String]]] = {
val table = tsv.trim.split("\\n").map(_.split("\\t"))
table.map(_.length).toSet match {
case s if s.size > 1 => Failure(new UnsupportedOperationException("TSV is not uniform"))
case _ => Success(table)
}
}
}
| ohsu-comp-bio/cromwell | wom/src/main/scala/wom/util/FileUtil.scala | Scala | bsd-3-clause | 370 |
/*
* Copyright 2008 WorldWide Conferencing, LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions
* and limitations under the License.
*/
package ${package}.model
import _root_.java.io.Serializable
import _root_.java.sql.PreparedStatement
import _root_.java.sql.ResultSet
import _root_.java.sql.SQLException
import _root_.java.sql.Types
import _root_.org.hibernate.HibernateException
import _root_.org.hibernate.usertype.UserType
/**
* Helper class to translate money amount for hibernate
*/
abstract class CurrencyUserType[CZ <: CurrencyZone](cz: CZ) extends UserType {
type MyCurrency = CZ#Currency
val SQL_TYPES = Array(Types.NUMERIC.asInstanceOf[Int])
override def sqlTypes() = SQL_TYPES
override def returnedClass = cz.CurrencyUnit.getClass
override def equals(x: Object, y: Object): Boolean = {
if (x == null || y == null) return false
else return x == y
}
override def hashCode(x: Object) = x.hashCode
override def nullSafeGet(resultSet: ResultSet, names: Array[String], owner: Object): Object = {
val dollarVal = resultSet.getBigDecimal(names(0))
if (resultSet.wasNull()) return cz.make(0)
else return cz.make(new BigDecimal(dollarVal))
}
override def nullSafeSet(statement: PreparedStatement, value: Object, index: Int): Unit = {
if (value == null) {
statement.setNull(index, Types.NUMERIC)
} else {
val dollarVal = value.asInstanceOf[MyCurrency]
statement.setBigDecimal(index, dollarVal.amount.bigDecimal)
}
}
override def deepCopy(value: Object): Object = value
override def isMutable() = false
override def disassemble(value: Object) = value.asInstanceOf[Serializable]
override def assemble(cached: Serializable, owner: Object): Serializable = cached
override def replace(original: Object, target: Object, owner: Object) = original
}
| jeppenejsum/liftweb | archetypes/lift-archetype-jpa-basic/src/main/resources/archetype-resources/spa/src/main/scala/model/CurrencyUserType.scala | Scala | apache-2.0 | 2,320 |
/***********************************************************************
* Copyright (c) 2013-2015 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0 which
* accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
*************************************************************************/
package org.locationtech.geomesa.accumulo.iterators
import java.util
import com.vividsolutions.jts.geom.Point
import com.vividsolutions.jts.io.WKTReader
import org.apache.accumulo.core.data.{Key, Value}
import org.apache.accumulo.core.iterators.Combiner
import org.locationtech.geomesa.utils.geohash.BoundingBox
import scala.collection.JavaConversions._
/*
The BBOXCombiner is used to reduce known extents into a single Bounding Box
This is useful for figuring out the total extent of a raster pyramid layer as an example
*/
class BBOXCombiner extends Combiner {
import org.locationtech.geomesa.accumulo.iterators.BBOXCombiner._
override def reduce(p1: Key, p2: util.Iterator[Value]): Value = {
if (p2.hasNext) bboxToValue(reduceValuesToBoundingBox(p2))
else new Value()
}
}
object BBOXCombiner {
val wktReader = new ThreadLocal[WKTReader] {
override def initialValue = new WKTReader()
}
def reduceValuesToBoundingBox(values: util.Iterator[Value]): BoundingBox = {
values.map(valueToBbox).reduce( (a, b) => BoundingBox.getCoveringBoundingBox(a, b) )
}
// These two functions are inverse
def bboxToValue(bbox: BoundingBox): Value = {
new Value((bbox.ll.toString + ":" + bbox.ur.toString).getBytes)
}
def valueToBbox(value: Value): BoundingBox = {
val wkts = value.toString.split(":")
val localReader = wktReader.get
BoundingBox(localReader.read(wkts(0)).asInstanceOf[Point], localReader.read(wkts(1)).asInstanceOf[Point])
}
} | drackaer/geomesa | geomesa-accumulo/geomesa-accumulo-datastore/src/main/scala/org/locationtech/geomesa/accumulo/iterators/BBOXCombiner.scala | Scala | apache-2.0 | 1,957 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.ml.regression
import scala.collection.mutable
import breeze.linalg.{DenseVector => BDV}
import breeze.optimize.{CachedDiffFunction, DiffFunction, LBFGS => BreezeLBFGS}
import org.apache.hadoop.fs.Path
import org.apache.spark.SparkException
import org.apache.spark.annotation.Since
import org.apache.spark.broadcast.Broadcast
import org.apache.spark.internal.Logging
import org.apache.spark.ml.{Estimator, Model}
import org.apache.spark.ml.linalg.{BLAS, Vector, Vectors, VectorUDT}
import org.apache.spark.ml.param._
import org.apache.spark.ml.param.shared._
import org.apache.spark.ml.stat.SummaryBuilderImpl._
import org.apache.spark.ml.util._
import org.apache.spark.ml.util.Instrumentation.instrumented
import org.apache.spark.mllib.util.MLUtils
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.{Column, DataFrame, Dataset, Row}
import org.apache.spark.sql.functions._
import org.apache.spark.sql.types.{DoubleType, StructType}
import org.apache.spark.storage.StorageLevel
/**
* Params for accelerated failure time (AFT) regression.
*/
private[regression] trait AFTSurvivalRegressionParams extends Params
with HasFeaturesCol with HasLabelCol with HasPredictionCol with HasMaxIter
with HasTol with HasFitIntercept with HasAggregationDepth with Logging {
/**
* Param for censor column name.
* The value of this column could be 0 or 1.
* If the value is 1, it means the event has occurred i.e. uncensored; otherwise censored.
* @group param
*/
@Since("1.6.0")
final val censorCol: Param[String] = new Param(this, "censorCol", "censor column name")
/** @group getParam */
@Since("1.6.0")
def getCensorCol: String = $(censorCol)
setDefault(censorCol -> "censor")
/**
* Param for quantile probabilities array.
* Values of the quantile probabilities array should be in the range (0, 1)
* and the array should be non-empty.
* @group param
*/
@Since("1.6.0")
final val quantileProbabilities: DoubleArrayParam = new DoubleArrayParam(this,
"quantileProbabilities", "quantile probabilities array",
(t: Array[Double]) => t.forall(ParamValidators.inRange(0, 1, false, false)) && t.length > 0)
/** @group getParam */
@Since("1.6.0")
def getQuantileProbabilities: Array[Double] = $(quantileProbabilities)
setDefault(quantileProbabilities -> Array(0.01, 0.05, 0.1, 0.25, 0.5, 0.75, 0.9, 0.95, 0.99))
/**
* Param for quantiles column name.
* This column will output quantiles of corresponding quantileProbabilities if it is set.
* @group param
*/
@Since("1.6.0")
final val quantilesCol: Param[String] = new Param(this, "quantilesCol", "quantiles column name")
/** @group getParam */
@Since("1.6.0")
def getQuantilesCol: String = $(quantilesCol)
/** Checks whether the input has quantiles column name. */
private[regression] def hasQuantilesCol: Boolean = {
isDefined(quantilesCol) && $(quantilesCol).nonEmpty
}
/**
* Validates and transforms the input schema with the provided param map.
* @param schema input schema
* @param fitting whether this is in fitting or prediction
* @return output schema
*/
protected def validateAndTransformSchema(
schema: StructType,
fitting: Boolean): StructType = {
SchemaUtils.checkColumnType(schema, $(featuresCol), new VectorUDT)
if (fitting) {
SchemaUtils.checkNumericType(schema, $(censorCol))
SchemaUtils.checkNumericType(schema, $(labelCol))
}
val schemaWithQuantilesCol = if (hasQuantilesCol) {
SchemaUtils.appendColumn(schema, $(quantilesCol), new VectorUDT)
} else schema
SchemaUtils.appendColumn(schemaWithQuantilesCol, $(predictionCol), DoubleType)
}
}
/**
* Fit a parametric survival regression model named accelerated failure time (AFT) model
* (see <a href="https://en.wikipedia.org/wiki/Accelerated_failure_time_model">
* Accelerated failure time model (Wikipedia)</a>)
* based on the Weibull distribution of the survival time.
*/
@Since("1.6.0")
class AFTSurvivalRegression @Since("1.6.0") (@Since("1.6.0") override val uid: String)
extends Estimator[AFTSurvivalRegressionModel] with AFTSurvivalRegressionParams
with DefaultParamsWritable with Logging {
@Since("1.6.0")
def this() = this(Identifiable.randomUID("aftSurvReg"))
/** @group setParam */
@Since("1.6.0")
def setFeaturesCol(value: String): this.type = set(featuresCol, value)
/** @group setParam */
@Since("1.6.0")
def setLabelCol(value: String): this.type = set(labelCol, value)
/** @group setParam */
@Since("1.6.0")
def setCensorCol(value: String): this.type = set(censorCol, value)
/** @group setParam */
@Since("1.6.0")
def setPredictionCol(value: String): this.type = set(predictionCol, value)
/** @group setParam */
@Since("1.6.0")
def setQuantileProbabilities(value: Array[Double]): this.type = set(quantileProbabilities, value)
/** @group setParam */
@Since("1.6.0")
def setQuantilesCol(value: String): this.type = set(quantilesCol, value)
/**
* Set if we should fit the intercept
* Default is true.
* @group setParam
*/
@Since("1.6.0")
def setFitIntercept(value: Boolean): this.type = set(fitIntercept, value)
setDefault(fitIntercept -> true)
/**
* Set the maximum number of iterations.
* Default is 100.
* @group setParam
*/
@Since("1.6.0")
def setMaxIter(value: Int): this.type = set(maxIter, value)
setDefault(maxIter -> 100)
/**
* Set the convergence tolerance of iterations.
* Smaller value will lead to higher accuracy with the cost of more iterations.
* Default is 1E-6.
* @group setParam
*/
@Since("1.6.0")
def setTol(value: Double): this.type = set(tol, value)
setDefault(tol -> 1E-6)
/**
* Suggested depth for treeAggregate (greater than or equal to 2).
* If the dimensions of features or the number of partitions are large,
* this param could be adjusted to a larger size.
* Default is 2.
* @group expertSetParam
*/
@Since("2.1.0")
def setAggregationDepth(value: Int): this.type = set(aggregationDepth, value)
setDefault(aggregationDepth -> 2)
/**
* Extract [[featuresCol]], [[labelCol]] and [[censorCol]] from input dataset,
* and put it in an RDD with strong types.
*/
protected[ml] def extractAFTPoints(dataset: Dataset[_]): RDD[AFTPoint] = {
dataset.select(col($(featuresCol)), col($(labelCol)).cast(DoubleType),
col($(censorCol)).cast(DoubleType)).rdd.map {
case Row(features: Vector, label: Double, censor: Double) =>
AFTPoint(features, label, censor)
}
}
@Since("2.0.0")
override def fit(dataset: Dataset[_]): AFTSurvivalRegressionModel = instrumented { instr =>
transformSchema(dataset.schema, logging = true)
val instances = extractAFTPoints(dataset)
val handlePersistence = dataset.storageLevel == StorageLevel.NONE
if (handlePersistence) instances.persist(StorageLevel.MEMORY_AND_DISK)
val featuresSummarizer = instances.treeAggregate(
createSummarizerBuffer("mean", "variance", "count"))(
seqOp = (c: SummarizerBuffer, v: AFTPoint) => c.add(v.features),
combOp = (c1: SummarizerBuffer, c2: SummarizerBuffer) => c1.merge(c2),
depth = $(aggregationDepth)
)
val featuresStd = featuresSummarizer.variance.toArray.map(math.sqrt)
val numFeatures = featuresStd.size
instr.logPipelineStage(this)
instr.logDataset(dataset)
instr.logParams(this, labelCol, featuresCol, censorCol, predictionCol, quantilesCol,
fitIntercept, maxIter, tol, aggregationDepth)
instr.logNamedValue("quantileProbabilities.size", $(quantileProbabilities).length)
instr.logNumFeatures(numFeatures)
instr.logNumExamples(featuresSummarizer.count)
if (!$(fitIntercept) && (0 until numFeatures).exists { i =>
featuresStd(i) == 0.0 && featuresSummarizer.mean(i) != 0.0 }) {
instr.logWarning("Fitting AFTSurvivalRegressionModel without intercept on dataset with " +
"constant nonzero column, Spark MLlib outputs zero coefficients for constant nonzero " +
"columns. This behavior is different from R survival::survreg.")
}
val bcFeaturesStd = instances.context.broadcast(featuresStd)
val costFun = new AFTCostFun(instances, $(fitIntercept), bcFeaturesStd, $(aggregationDepth))
val optimizer = new BreezeLBFGS[BDV[Double]]($(maxIter), 10, $(tol))
/*
The parameters vector has three parts:
the first element: Double, log(sigma), the log of scale parameter
the second element: Double, intercept of the beta parameter
the third to the end elements: Doubles, regression coefficients vector of the beta parameter
*/
val initialParameters = Vectors.zeros(numFeatures + 2)
val states = optimizer.iterations(new CachedDiffFunction(costFun),
initialParameters.asBreeze.toDenseVector)
val parameters = {
val arrayBuilder = mutable.ArrayBuilder.make[Double]
var state: optimizer.State = null
while (states.hasNext) {
state = states.next()
arrayBuilder += state.adjustedValue
}
if (state == null) {
val msg = s"${optimizer.getClass.getName} failed."
throw new SparkException(msg)
}
state.x.toArray.clone()
}
bcFeaturesStd.destroy()
if (handlePersistence) instances.unpersist()
val rawCoefficients = parameters.slice(2, parameters.length)
var i = 0
while (i < numFeatures) {
rawCoefficients(i) *= { if (featuresStd(i) != 0.0) 1.0 / featuresStd(i) else 0.0 }
i += 1
}
val coefficients = Vectors.dense(rawCoefficients)
val intercept = parameters(1)
val scale = math.exp(parameters(0))
copyValues(new AFTSurvivalRegressionModel(uid, coefficients, intercept, scale).setParent(this))
}
@Since("1.6.0")
override def transformSchema(schema: StructType): StructType = {
validateAndTransformSchema(schema, fitting = true)
}
@Since("1.6.0")
override def copy(extra: ParamMap): AFTSurvivalRegression = defaultCopy(extra)
}
@Since("1.6.0")
object AFTSurvivalRegression extends DefaultParamsReadable[AFTSurvivalRegression] {
@Since("1.6.0")
override def load(path: String): AFTSurvivalRegression = super.load(path)
}
/**
* Model produced by [[AFTSurvivalRegression]].
*/
@Since("1.6.0")
class AFTSurvivalRegressionModel private[ml] (
@Since("1.6.0") override val uid: String,
@Since("2.0.0") val coefficients: Vector,
@Since("1.6.0") val intercept: Double,
@Since("1.6.0") val scale: Double)
extends Model[AFTSurvivalRegressionModel] with AFTSurvivalRegressionParams with MLWritable {
@Since("3.0.0")
lazy val numFeatures: Int = coefficients.size
/** @group setParam */
@Since("1.6.0")
def setFeaturesCol(value: String): this.type = set(featuresCol, value)
/** @group setParam */
@Since("1.6.0")
def setPredictionCol(value: String): this.type = set(predictionCol, value)
/** @group setParam */
@Since("1.6.0")
def setQuantileProbabilities(value: Array[Double]): this.type = set(quantileProbabilities, value)
/** @group setParam */
@Since("1.6.0")
def setQuantilesCol(value: String): this.type = set(quantilesCol, value)
@Since("2.0.0")
def predictQuantiles(features: Vector): Vector = {
// scale parameter for the Weibull distribution of lifetime
val lambda = math.exp(BLAS.dot(coefficients, features) + intercept)
// shape parameter for the Weibull distribution of lifetime
val k = 1 / scale
val quantiles = $(quantileProbabilities).map {
q => lambda * math.exp(math.log(-math.log1p(-q)) / k)
}
Vectors.dense(quantiles)
}
@Since("2.0.0")
def predict(features: Vector): Double = {
math.exp(BLAS.dot(coefficients, features) + intercept)
}
@Since("2.0.0")
override def transform(dataset: Dataset[_]): DataFrame = {
transformSchema(dataset.schema, logging = true)
var predictionColNames = Seq.empty[String]
var predictionColumns = Seq.empty[Column]
if ($(predictionCol).nonEmpty) {
val predictUDF = udf { features: Vector => predict(features) }
predictionColNames :+= $(predictionCol)
predictionColumns :+= predictUDF(col($(featuresCol)))
}
if (hasQuantilesCol) {
val predictQuantilesUDF = udf { features: Vector => predictQuantiles(features)}
predictionColNames :+= $(quantilesCol)
predictionColumns :+= predictQuantilesUDF(col($(featuresCol)))
}
if (predictionColNames.nonEmpty) {
dataset.withColumns(predictionColNames, predictionColumns)
} else {
this.logWarning(s"$uid: AFTSurvivalRegressionModel.transform() does nothing" +
" because no output columns were set.")
dataset.toDF()
}
}
@Since("1.6.0")
override def transformSchema(schema: StructType): StructType = {
validateAndTransformSchema(schema, fitting = false)
}
@Since("1.6.0")
override def copy(extra: ParamMap): AFTSurvivalRegressionModel = {
copyValues(new AFTSurvivalRegressionModel(uid, coefficients, intercept, scale), extra)
.setParent(parent)
}
@Since("1.6.0")
override def write: MLWriter =
new AFTSurvivalRegressionModel.AFTSurvivalRegressionModelWriter(this)
@Since("3.0.0")
override def toString: String = {
s"AFTSurvivalRegressionModel: uid=$uid, numFeatures=$numFeatures"
}
}
@Since("1.6.0")
object AFTSurvivalRegressionModel extends MLReadable[AFTSurvivalRegressionModel] {
@Since("1.6.0")
override def read: MLReader[AFTSurvivalRegressionModel] = new AFTSurvivalRegressionModelReader
@Since("1.6.0")
override def load(path: String): AFTSurvivalRegressionModel = super.load(path)
/** [[MLWriter]] instance for [[AFTSurvivalRegressionModel]] */
private[AFTSurvivalRegressionModel] class AFTSurvivalRegressionModelWriter (
instance: AFTSurvivalRegressionModel
) extends MLWriter with Logging {
private case class Data(coefficients: Vector, intercept: Double, scale: Double)
override protected def saveImpl(path: String): Unit = {
// Save metadata and Params
DefaultParamsWriter.saveMetadata(instance, path, sc)
// Save model data: coefficients, intercept, scale
val data = Data(instance.coefficients, instance.intercept, instance.scale)
val dataPath = new Path(path, "data").toString
sparkSession.createDataFrame(Seq(data)).repartition(1).write.parquet(dataPath)
}
}
private class AFTSurvivalRegressionModelReader extends MLReader[AFTSurvivalRegressionModel] {
/** Checked against metadata when loading model */
private val className = classOf[AFTSurvivalRegressionModel].getName
override def load(path: String): AFTSurvivalRegressionModel = {
val metadata = DefaultParamsReader.loadMetadata(path, sc, className)
val dataPath = new Path(path, "data").toString
val data = sparkSession.read.parquet(dataPath)
val Row(coefficients: Vector, intercept: Double, scale: Double) =
MLUtils.convertVectorColumnsToML(data, "coefficients")
.select("coefficients", "intercept", "scale")
.head()
val model = new AFTSurvivalRegressionModel(metadata.uid, coefficients, intercept, scale)
metadata.getAndSetParams(model)
model
}
}
}
/**
* AFTAggregator computes the gradient and loss for a AFT loss function,
* as used in AFT survival regression for samples in sparse or dense vector in an online fashion.
*
* The loss function and likelihood function under the AFT model based on:
* Lawless, J. F., Statistical Models and Methods for Lifetime Data,
* New York: John Wiley & Sons, Inc. 2003.
*
* Two AFTAggregator can be merged together to have a summary of loss and gradient of
* the corresponding joint dataset.
*
* Given the values of the covariates $x^{'}$, for random lifetime $t_{i}$ of subjects i = 1,..,n,
* with possible right-censoring, the likelihood function under the AFT model is given as
*
* <blockquote>
* $$
* L(\\beta,\\sigma)=\\prod_{i=1}^n[\\frac{1}{\\sigma}f_{0}
* (\\frac{\\log{t_{i}}-x^{'}\\beta}{\\sigma})]^{\\delta_{i}}S_{0}
* (\\frac{\\log{t_{i}}-x^{'}\\beta}{\\sigma})^{1-\\delta_{i}}
* $$
* </blockquote>
*
* Where $\\delta_{i}$ is the indicator of the event has occurred i.e. uncensored or not.
* Using $\\epsilon_{i}=\\frac{\\log{t_{i}}-x^{'}\\beta}{\\sigma}$, the log-likelihood function
* assumes the form
*
* <blockquote>
* $$
* \\iota(\\beta,\\sigma)=\\sum_{i=1}^{n}[-\\delta_{i}\\log\\sigma+
* \\delta_{i}\\log{f_{0}}(\\epsilon_{i})+(1-\\delta_{i})\\log{S_{0}(\\epsilon_{i})}]
* $$
* </blockquote>
* Where $S_{0}(\\epsilon_{i})$ is the baseline survivor function,
* and $f_{0}(\\epsilon_{i})$ is corresponding density function.
*
* The most commonly used log-linear survival regression method is based on the Weibull
* distribution of the survival time. The Weibull distribution for lifetime corresponding
* to extreme value distribution for log of the lifetime,
* and the $S_{0}(\\epsilon)$ function is
*
* <blockquote>
* $$
* S_{0}(\\epsilon_{i})=\\exp(-e^{\\epsilon_{i}})
* $$
* </blockquote>
*
* and the $f_{0}(\\epsilon_{i})$ function is
*
* <blockquote>
* $$
* f_{0}(\\epsilon_{i})=e^{\\epsilon_{i}}\\exp(-e^{\\epsilon_{i}})
* $$
* </blockquote>
*
* The log-likelihood function for Weibull distribution of lifetime is
*
* <blockquote>
* $$
* \\iota(\\beta,\\sigma)=
* -\\sum_{i=1}^n[\\delta_{i}\\log\\sigma-\\delta_{i}\\epsilon_{i}+e^{\\epsilon_{i}}]
* $$
* </blockquote>
*
* Due to minimizing the negative log-likelihood equivalent to maximum a posteriori probability,
* the loss function we use to optimize is $-\\iota(\\beta,\\sigma)$.
* The gradient functions for $\\beta$ and $\\log\\sigma$ respectively are
*
* <blockquote>
* $$
* \\frac{\\partial (-\\iota)}{\\partial \\beta}=
* \\sum_{1=1}^{n}[\\delta_{i}-e^{\\epsilon_{i}}]\\frac{x_{i}}{\\sigma} \\\\
*
* \\frac{\\partial (-\\iota)}{\\partial (\\log\\sigma)}=
* \\sum_{i=1}^{n}[\\delta_{i}+(\\delta_{i}-e^{\\epsilon_{i}})\\epsilon_{i}]
* $$
* </blockquote>
*
* @param bcParameters The broadcasted value includes three part: The log of scale parameter,
* the intercept and regression coefficients corresponding to the features.
* @param fitIntercept Whether to fit an intercept term.
* @param bcFeaturesStd The broadcast standard deviation values of the features.
*/
private class AFTAggregator(
bcParameters: Broadcast[BDV[Double]],
fitIntercept: Boolean,
bcFeaturesStd: Broadcast[Array[Double]]) extends Serializable {
private val length = bcParameters.value.length
// make transient so we do not serialize between aggregation stages
@transient private lazy val parameters = bcParameters.value
// the regression coefficients to the covariates
@transient private lazy val coefficients = parameters.slice(2, length)
@transient private lazy val intercept = parameters(1)
// sigma is the scale parameter of the AFT model
@transient private lazy val sigma = math.exp(parameters(0))
private var totalCnt: Long = 0L
private var lossSum = 0.0
// Here we optimize loss function over log(sigma), intercept and coefficients
private lazy val gradientSumArray = Array.ofDim[Double](length)
def count: Long = totalCnt
def loss: Double = {
require(totalCnt > 0.0, s"The number of instances should be " +
s"greater than 0.0, but got $totalCnt.")
lossSum / totalCnt
}
def gradient: BDV[Double] = {
require(totalCnt > 0.0, s"The number of instances should be " +
s"greater than 0.0, but got $totalCnt.")
new BDV(gradientSumArray.map(_ / totalCnt.toDouble))
}
/**
* Add a new training data to this AFTAggregator, and update the loss and gradient
* of the objective function.
*
* @param data The AFTPoint representation for one data point to be added into this aggregator.
* @return This AFTAggregator object.
*/
def add(data: AFTPoint): this.type = {
val xi = data.features
val ti = data.label
val delta = data.censor
require(ti > 0.0, "The lifetime or label should be greater than 0.")
val localFeaturesStd = bcFeaturesStd.value
val margin = {
var sum = 0.0
xi.foreachActive { (index, value) =>
if (localFeaturesStd(index) != 0.0 && value != 0.0) {
sum += coefficients(index) * (value / localFeaturesStd(index))
}
}
sum + intercept
}
val epsilon = (math.log(ti) - margin) / sigma
lossSum += delta * math.log(sigma) - delta * epsilon + math.exp(epsilon)
val multiplier = (delta - math.exp(epsilon)) / sigma
gradientSumArray(0) += delta + multiplier * sigma * epsilon
gradientSumArray(1) += { if (fitIntercept) multiplier else 0.0 }
xi.foreachActive { (index, value) =>
if (localFeaturesStd(index) != 0.0 && value != 0.0) {
gradientSumArray(index + 2) += multiplier * (value / localFeaturesStd(index))
}
}
totalCnt += 1
this
}
/**
* Merge another AFTAggregator, and update the loss and gradient
* of the objective function.
* (Note that it's in place merging; as a result, `this` object will be modified.)
*
* @param other The other AFTAggregator to be merged.
* @return This AFTAggregator object.
*/
def merge(other: AFTAggregator): this.type = {
if (other.count != 0) {
totalCnt += other.totalCnt
lossSum += other.lossSum
var i = 0
while (i < length) {
this.gradientSumArray(i) += other.gradientSumArray(i)
i += 1
}
}
this
}
}
/**
* AFTCostFun implements Breeze's DiffFunction[T] for AFT cost.
* It returns the loss and gradient at a particular point (parameters).
* It's used in Breeze's convex optimization routines.
*/
private class AFTCostFun(
data: RDD[AFTPoint],
fitIntercept: Boolean,
bcFeaturesStd: Broadcast[Array[Double]],
aggregationDepth: Int) extends DiffFunction[BDV[Double]] {
override def calculate(parameters: BDV[Double]): (Double, BDV[Double]) = {
val bcParameters = data.context.broadcast(parameters)
val aftAggregator = data.treeAggregate(
new AFTAggregator(bcParameters, fitIntercept, bcFeaturesStd))(
seqOp = (c, v) => (c, v) match {
case (aggregator, instance) => aggregator.add(instance)
},
combOp = (c1, c2) => (c1, c2) match {
case (aggregator1, aggregator2) => aggregator1.merge(aggregator2)
}, depth = aggregationDepth)
bcParameters.destroy()
(aftAggregator.loss, aftAggregator.gradient)
}
}
/**
* Class that represents the (features, label, censor) of a data point.
*
* @param features List of features for this data point.
* @param label Label for this data point.
* @param censor Indicator of the event has occurred or not. If the value is 1, it means
* the event has occurred i.e. uncensored; otherwise censored.
*/
private[regression] case class AFTPoint(features: Vector, label: Double, censor: Double) {
require(censor == 1.0 || censor == 0.0, "censor of class AFTPoint must be 1.0 or 0.0")
}
| caneGuy/spark | mllib/src/main/scala/org/apache/spark/ml/regression/AFTSurvivalRegression.scala | Scala | apache-2.0 | 23,901 |
package org.nephtys.keepaseat
import org.scalatest.{Matchers, WordSpec}
import akka.http.scaladsl.model.{HttpHeader, StatusCode, StatusCodes}
import akka.http.scaladsl.testkit.ScalatestRouteTest
import akka.http.scaladsl.server._
import Directives._
import akka.http.scaladsl.model.headers.{BasicHttpCredentials, HttpChallenge, HttpChallenges}
import akka.http.scaladsl.server.AuthenticationFailedRejection.CredentialsMissing
import org.nephtys.cmac.BasicAuthHelper.LoginData
import org.nephtys.cmac.MacSource
import org.nephtys.keepaseat.internal.{GetRetreiveRoute, LinkJWTRoute, PostChangesRoute, StaticRoute}
import org.nephtys.keepaseat.internal.configs.{PasswordConfig, ServerConfig}
import org.nephtys.keepaseat.internal.eventdata.{Event, EventElementBlock}
import org.nephtys.keepaseat.internal.linkkeys.{ConfirmationOrDeletion, ReservationRequest, SimpleConfirmationOrDeletion, SimpleReservation}
import org.nephtys.keepaseat.internal.testmocks.{MockDatabase, MockMailer}
import upickle.default._
import akka.http.scaladsl.model.HttpHeader.ParsingResult.Ok
import org.nephtys.keepaseat.filter.XSSCleaner
import org.nephtys.keepaseat.internal.posts.{SimpleSuperuserPost, SimpleUserPost, UserPost}
import org.nephtys.keepaseat.internal.validators.{BasicSuperuserPostValidator, SuperuserPostValidator, UserPostValidator}
import scala.concurrent.{Await, Future}
import scala.concurrent.duration.Duration
/**
* Created by nephtys on 9/28/16.
*/
class CompleteRouteSpec extends WordSpec with Matchers with ScalatestRouteTest {
val username = "john"
val superusername = "superjohn"
val userpassword = "12345"
val superuserpassword = "678910"
def readFile(filepath: String) = {
val source = scala.io.Source.fromFile(filepath, "utf-8")
val lines = try source.mkString finally source.close()
lines
}
val secretKey = org.nephtys.cmac.HmacHelper.keys.generateNewKey(256, "HmacSHA256")
implicit val macSource: MacSource = new MacSource(secretKey)
val indexHTMLString: String = readFile("""src/test/resources/web/index.html""")
assert(indexHTMLString.startsWith("""<!doctype html>"""))
val fileTxtString: String = readFile("""src/test/resources/web/file.txt""")
assert(fileTxtString.equals("""this is a txt file"""))
val deeperTxtString: String = readFile("""src/test/resources/web/subdirectory/deeper.txt""")
assert(deeperTxtString.equals("""I am so deep right now"""))
val indexHTMLRedirect: String = """The request, and all future requests should be repeated using <a href="./index.html">this URI</a>."""
implicit val notifier = new MockMailer
implicit val database = new MockDatabase
implicit val xss = new XSSCleaner()
implicit val validatorsUser: Seq[UserPostValidator] = Seq.empty
implicit val validatorsSuperuser: Seq[SuperuserPostValidator] = Seq(new BasicSuperuserPostValidator())
implicit val serverConfigSource: ServerConfig = new ServerConfig {
//assume "web" as default value
override def pathToStaticWebDirectory(rootdir : String): String = rootdir+"/web"
override def port: Int = 1234
override def filepathToDatabaseWithoutFileEnding: Option[String] = None //should not be used anyway
override def httpsPassword: Option[String] = None
}
implicit val passwordConfigSource: PasswordConfig = new PasswordConfig {
override def normalUser: LoginData = LoginData(username, userpassword)
override def superUser: LoginData = LoginData(superusername, superuserpassword)
override def realmForCredentials(): String = "security realm for unit tests"
override def useCSRFProtection: Boolean = true
}
val staticRoute = new StaticRoute("./src/test/resources").extractRoute
"The Static Route" should {
"require basic auth on accessing a file" in {
Get("/file.txt") ~> staticRoute ~> check {
rejection shouldEqual authmissingreject
}
}
"require basic auth on root" in {
Get() ~> staticRoute ~> check {
rejection shouldEqual authmissingreject
}
}
"return a txt-file in the web direcrory for /file.txt" in {
println(serverConfigSource.pathToStaticWebDirectory("./src/test/resources"))
Get("/file.txt") ~> addCredentials(BasicHttpCredentials(username, userpassword)) ~> staticRoute ~> check {
responseAs[String] shouldEqual fileTxtString
}
}
"return a txt-file inside a subdirectory of /web" in {
Get("/subdirectory/deeper.txt") ~> addCredentials(BasicHttpCredentials(username, userpassword)) ~> staticRoute ~> check {
responseAs[String] shouldEqual deeperTxtString
}
}
"return index.html if looking at path /index.html" in {
Get("/index.html") ~> addCredentials(BasicHttpCredentials(username, userpassword)) ~> staticRoute ~> check {
responseAs[String] shouldEqual indexHTMLString
}
}
"return index.html for empty path" in {
Get() ~> addCredentials(BasicHttpCredentials(username, userpassword)) ~> staticRoute ~> check {
responseAs[String] shouldEqual indexHTMLRedirect
}
}
}
val helloRt2 = path("hello") {
get {
complete {
"Hello world"
}
}
}
val jwtRouteContainer = new LinkJWTRoute()
val jwtRoute = jwtRouteContainer.extractRoute
def authmissingreject = AuthenticationFailedRejection.apply(AuthenticationFailedRejection.CredentialsMissing,
HttpChallenges.basic(passwordConfigSource.realmForCredentials))
def superauthmissingreject = AuthenticationFailedRejection.apply(AuthenticationFailedRejection.CredentialsMissing,
HttpChallenges.basic(passwordConfigSource.realmForCredentials+"-adminrealm"))
"The JWT-Link Route" should {
def examplereservationlink: String = LinkJWTRoute.computeLinkCompletepathForEmailConfirmation("", examplereservation
.toURLencodedJWT())
def examplereservation: SimpleReservation = SimpleReservation("",
elements = Seq(EventElementBlock("Bed A", System.currentTimeMillis() + 9999, System.currentTimeMillis() + 9999 + (1000 * 3600 * 24))),
name = "chris",
email = "chris@somwhere.org",
telephone = "013264355523434",
commentary = "Some comment to make",
randomNumber = 1337.asInstanceOf[Long])
def examplereservation2 = examplereservation.copy(elements = Seq(EventElementBlock("Bed A", 9999 + 1000000, 9999 + 1000000 +
(1000 *
3600 * 24))))
def exampleconfirm(combine: (Long, String)): ConfirmationOrDeletion = SimpleConfirmationOrDeletion("",combine._1,
combine._2,
confirmingThisReservation = true, 13)
def exampledecline(combine: (Long, String)): ConfirmationOrDeletion = SimpleConfirmationOrDeletion("",combine
._1, combine._2,
confirmingThisReservation = false, 14)
"require basic auth on confirm-email" in {
Get(LinkJWTRoute.computeLinkCompletepathForEmailConfirmation("",ReservationRequest.makeUrlencodedJWT
(examplereservation))) ~> jwtRoute ~> check {
rejection shouldEqual authmissingreject
}
}
"work with user auth on confirm-email" in {
//This does compile, red markers are intelliJ bugs
Get(LinkJWTRoute.computeLinkCompletepathForEmailConfirmation("",ReservationRequest.makeUrlencodedJWT
(examplereservation))) ~> addCredentials(BasicHttpCredentials(username, userpassword)) ~> jwtRoute ~> check {
responseAs[String] shouldEqual LinkJWTRoute.selfClosingHTML(LinkJWTRoute.emailConfirmSuccessText)
}
}
"require npormal user auth on confirm-reservation" in {
if (database.getUnconfirmedEventID.isDefined) {
database.create(examplereservation.toNewEventWithoutID)
}
Get(LinkJWTRoute.computeLinkCompletepathForSuperuserConfirmation("",ConfirmationOrDeletion.makeUrlencodedJWT
(exampleconfirm(database.getUnconfirmedEventID.get)))) ~> jwtRoute ~> check {
rejection shouldEqual authmissingreject
}
}
"work with superuser auth on confirm-reservation" in {
//This does compile, red markers are intelliJ bugs
if (database.getUnconfirmedEventID.isDefined) {
database.create(examplereservation.toNewEventWithoutID)
}
Get(LinkJWTRoute.computeLinkCompletepathForSuperuserConfirmation("", ConfirmationOrDeletion.makeUrlencodedJWT
(exampleconfirm(database.getUnconfirmedEventID.get)))) ~> addCredentials(BasicHttpCredentials(superusername,
superuserpassword)) ~> jwtRoute ~> check {
responseAs[String] shouldEqual LinkJWTRoute.selfClosingHTML(LinkJWTRoute.confirmReservationText)
}
}
"be rejected if the jwt is incomplete" in {
//This does compile, red markers are intelliJ bugs
val long = LinkJWTRoute.computeLinkCompletepathForEmailConfirmation("", ReservationRequest.makeUrlencodedJWT
(examplereservation))
val short = long.substring(0, long.length / 2)
Get(short) ~> addCredentials(BasicHttpCredentials(username, userpassword)) ~> jwtRoute ~> check {
rejection.asInstanceOf[akka.http.scaladsl.server.MalformedQueryParamRejection].errorMsg.startsWith("jwt for " +
"email confirm " +
"unparsable") shouldBe true
}
}
//following: check mechanic of route
//test mechanic 1 - everything is right, but the user deletes after superuser confirmation
"work normally under typical workflow with superuser confirming and user deleting afterwards" in {
database.clearDatabase()
notifier.notifications.clear()
//get first email confirm link
val emailconfirmlink = examplereservationlink
Await.result(database.retrieve(), Duration(1, "second")).size shouldEqual 0
//call get on email confirm link (with auth) => should create event in db
Get(emailconfirmlink) ~> addCredentials(BasicHttpCredentials(username, userpassword)) ~> jwtRoute ~> check {
status.isSuccess() shouldEqual true
}
val databaseretreive1 = Await.result(database.retrieve(), Duration(1, "second"))
databaseretreive1.size shouldEqual 1
val eventAfterEmailConfirm = databaseretreive1.head
eventAfterEmailConfirm.confirmedBySupseruser shouldEqual false
//get user delete and superuser confirm link
notifier.notifications.size shouldEqual 2
val userdeletelink: String = notifier.notifications.find(a => !a.toSuperuserInsteadOfUser).get.links.head
val superuserconfirmlink: String = notifier.notifications.find(a => a.toSuperuserInsteadOfUser).get.links.head
//call get on superuser confirm link (with auth) => should update event in db
Get(superuserconfirmlink) ~> addCredentials(BasicHttpCredentials(superusername, superuserpassword)) ~> jwtRoute ~>
check {
status.isSuccess() shouldEqual true
}
val databaseretreive2 = Await.result(database.retrieve(), Duration(1, "second"))
databaseretreive2.size shouldEqual 1
notifier.notifications.size shouldEqual 4
val eventAfterSuperuserConfirm = databaseretreive2.head
eventAfterSuperuserConfirm.confirmedBySupseruser shouldEqual true
//call get on user delete link (with auth) => should delete event in db
Get(userdeletelink) ~> addCredentials(BasicHttpCredentials(username, userpassword)) ~> jwtRoute ~> check {
status.isSuccess() shouldEqual true
}
val databaseretreive3 = Await.result(database.retrieve(), Duration(1, "second"))
databaseretreive3.size shouldEqual 0
notifier.notifications.size shouldEqual 6
}
//test mechanic 2 - everything is right, but the user deletes before superuser confirmation (which fails)
"work with user deleting and superuser trying to confirm afterwards" in {
database.clearDatabase()
notifier.notifications.clear()
//get first email confirm link
val emailconfirmlink = examplereservationlink
Await.result(database.retrieve(), Duration(1, "second")).size shouldEqual 0
//call get on email confirm link (with auth) => should create event in db
Get(emailconfirmlink) ~> addCredentials(BasicHttpCredentials(username, userpassword)) ~> jwtRoute ~> check {
status.isSuccess() shouldEqual true
}
val databaseretreive1 = Await.result(database.retrieve(), Duration(1, "second"))
databaseretreive1.size shouldEqual 1
val eventAfterEmailConfirm = databaseretreive1.head
eventAfterEmailConfirm.confirmedBySupseruser shouldEqual false
//get user delete and superuser confirm link
notifier.notifications.size shouldEqual 2
val userdeletelink: String = notifier.notifications.find(a => !a.toSuperuserInsteadOfUser).get.links.head
val superuserconfirmlink: String = notifier.notifications.find(a => a.toSuperuserInsteadOfUser).get.links.head
//call get on user delete link (with auth) => should delete event in db
Get(userdeletelink) ~> addCredentials(BasicHttpCredentials(username, userpassword)) ~> jwtRoute ~> check {
status.isSuccess() shouldEqual true
}
val databaseretreive3 = Await.result(database.retrieve(), Duration(1, "second"))
databaseretreive3.size shouldEqual 0
notifier.notifications.size shouldEqual 4
//call get on superuser confirm link (with auth) - should fail
Get(superuserconfirmlink) ~> addCredentials(BasicHttpCredentials(superusername, superuserpassword)) ~> jwtRoute ~> check {
handled shouldEqual false
}
val databaseretreive2 = Await.result(database.retrieve(), Duration(1, "second"))
databaseretreive2.size shouldEqual 0
notifier.notifications.size shouldEqual 4
}
//test mechanic 3 - everything is right, but the superuser declines
"work normally with the superuser declining" in {
database.clearDatabase()
notifier.notifications.clear()
//get first email confirm link
val emailconfirmlink = examplereservationlink
Await.result(database.retrieve(), Duration(1, "second")).size shouldEqual 0
//call get on email confirm link (with auth) => should create event in db
Get(emailconfirmlink) ~> addCredentials(BasicHttpCredentials(username, userpassword)) ~> jwtRoute ~> check {
status.isSuccess() shouldEqual true
}
val databaseretreive1 = Await.result(database.retrieve(), Duration(1, "second"))
databaseretreive1.size shouldEqual 1
val eventAfterEmailConfirm = databaseretreive1.head
eventAfterEmailConfirm.confirmedBySupseruser shouldEqual false
//get user delete and superuser delete link
notifier.notifications.size shouldEqual 2
val userdeletelink: String = notifier.notifications.find(a => !a.toSuperuserInsteadOfUser).get.links.head
val superuserdeletelink: String = notifier.notifications.find(a => a.toSuperuserInsteadOfUser).get.links.last
//call get on superuser delete link (with auth) => should delete event in db
Get(superuserdeletelink) ~> addCredentials(BasicHttpCredentials(superusername, superuserpassword)) ~> jwtRoute ~> check {
status.isSuccess() shouldEqual true
}
val databaseretreive4 = Await.result(database.retrieve(), Duration(1, "second"))
databaseretreive4.size shouldEqual 0
//call get on user delete link (with auth) - should fail
Get(userdeletelink) ~> addCredentials(BasicHttpCredentials(username, userpassword)) ~> jwtRoute ~> check {
handled shouldEqual false
}
val databaseretreive2 = Await.result(database.retrieve(), Duration(1, "second"))
databaseretreive2.size shouldEqual 0
notifier.notifications.size shouldEqual 4
}
}
val retreiveRouteContainer = new GetRetreiveRoute()
val retreiveRoute = retreiveRouteContainer.extractRoute
val eventsWithoutIDs: Seq[Event] = Seq(
Event(-1, Seq(EventElementBlock("Bed A", 1000, 2000)), "tom", "tom@mouse.com", "telephone", "event 1", false),
Event(-1, Seq(EventElementBlock("Bed B", 1000, 2000)), "jerry", "jerry@cat.com", "telephone", "event 2", false),
Event(-1, Seq(EventElementBlock("Bed A", 3000, 4000), EventElementBlock("Bed B", 3000, 4000)), "tom",
"tom@mouse.com", "telephone", "event 3", false),
Event(-1, Seq(EventElementBlock("Bed B", 8000, 10000)), "jerry", "jerry@cat.com", "telephone", "event 4", false),
Event(-1, Seq(EventElementBlock("Bed A", 14000, 20000)), "tom", "tom@mouse.com", "telephone", "event 5", false)
)
def fillDatabase()(implicit db: MockDatabase): Seq[Event] = {
db.clearDatabase()
Await.result(Future.sequence(eventsWithoutIDs.map(e => db.create(e))), Duration(1, "minute"))
db.getAll
}
"The Retreive Route" should {
val retreiveLink: String = retreiveRouteContainer.receivePath
"retreive all events with min/max parameters" in {
val dbvals = fillDatabase()
val min: Long = Long.MinValue
val max: Long = Long.MaxValue
val mustVals = dbvals
Get(retreiveLink + "?from=" + min + "&to=" + max) ~>
addCredentials(BasicHttpCredentials(username, userpassword)) ~>
retreiveRoute ~>
check {
read[Seq[Event]](responseAs[String]).sortBy(_.id) shouldEqual mustVals.sortBy(_.id)
}
}
"retreive only a given period with correct from to parameters" in {
val dbvals = fillDatabase()
val min: Long = 2500
val max: Long = 9500
val mustVals: Seq[Event] = dbvals.filter(e => Databaseable.intersect(min, e.elements.map(_.from).min, max, e
.elements.map(_.to).max))
assert(mustVals.size == 2)
Get(retreiveLink + "?from=" + min + "&to=" + max) ~>
addCredentials(BasicHttpCredentials(username, userpassword)) ~>
retreiveRoute ~>
check {
read[Seq[Event]](responseAs[String]) shouldEqual mustVals
}
}
}
def xforwardhost: HttpHeader = HttpHeader.parse(PostChangesRoute.XForwardedHostHeader, "localhost:8000") match {
case Ok(header, errors) => header
case _ => throw new IllegalArgumentException()
}
def rootpath: HttpHeader = HttpHeader.parse(PostChangesRoute.RootPathHeader, "localhost:8000") match {
case Ok(header, errors) => header
case _ => throw new IllegalArgumentException()
}
def origin: HttpHeader = HttpHeader.parse(PostChangesRoute.OriginHeader, "localhost:8000") match {
case Ok(header, errors) => header
case _ => throw new IllegalArgumentException()
}
def xrequestedwith: HttpHeader = HttpHeader.parse(PostChangesRoute.XRequestedWithHeader, PostChangesRoute.XRequestedWithValue) match {
case Ok(header, errors) => header
case _ => throw new IllegalArgumentException()
}
def mailer = notifier
"The POST Routes" should {
import upickle.default._
val routecontainer = new PostChangesRoute()
val route = routecontainer.extractRoute
def correctUserpostJson: String = write(SimpleUserPost("john", "john@somewhere.org", "32525 555", "this is a " +
"comment",
Seq(EventElementBlock("Bed A", 24345, 50000))))
//Test Case 1 - correct userpostroute should lead to complete and confirm mail
"evolve a correct userpost to a a mail with confirm link and a complete" in {
val oldmailersize = mailer.notifications.size
Post(PostChangesRoute.userPostPath, correctUserpostJson) ~> addHeaders(xforwardhost, rootpath, origin, xrequestedwith) ~>
addCredentials(BasicHttpCredentials(username, userpassword)) ~> route ~> check {
responseAs[String] shouldEqual PostChangesRoute.userresponsetext
}
mailer.notifications.size shouldEqual (oldmailersize + 1)
mailer.notifications.last.sumOfFlags shouldEqual (1) //confirm email link notification
}
//Test Case 2 - nonsensical json should lead to reject
def nonsensicaluserpostjson: String = "{'hackidiy hack' : true}"
"reject nonsensical userposts" in {
Post(PostChangesRoute.userPostPath, nonsensicaluserpostjson) ~>
addCredentials(BasicHttpCredentials(username, userpassword)) ~> addHeaders(xforwardhost, rootpath, origin, xrequestedwith) ~> route ~> check {
handled shouldEqual false
}
}
//Test Case 3 - superuserroute with delete should lead to complete and delete from db and mails
"complete a superuserpost based delete" in {
val dbvals = fillDatabase()
def idToDeleteBySuperuser = dbvals.head.id
def superuserpostdeletejson: String = write(SimpleSuperuserPost(idToDeleteBySuperuser, Some(true), None))
val oldmailersize = mailer.notifications.size
Await.result(database.retrieveSpecific(idToDeleteBySuperuser), Duration(1, "second")).isDefined shouldEqual (true)
Post(PostChangesRoute.superuserPostPath, superuserpostdeletejson) ~>
addCredentials(BasicHttpCredentials(superusername, superuserpassword)) ~> addHeaders(xforwardhost, origin, xrequestedwith) ~> route ~> check {
responseAs[String] shouldEqual s"Event with ID = $idToDeleteBySuperuser was deleted"
}
Await.result(database.retrieveSpecific(idToDeleteBySuperuser), Duration(1, "second")) shouldEqual (None)
mailer.notifications.size shouldEqual (oldmailersize + 1)
mailer.notifications.last.sumOfFlags shouldEqual (4) //decline to user notification
}
//Test Case 4 - superuserroute with confirm should lead to complete and db changes and mails
"complete a superuserpost based confirm" in {
val dbvals = fillDatabase()
def idToConfirmBySuperuser = dbvals.head.id
def superuserpostconfirmjson: String = write(SimpleSuperuserPost(idToConfirmBySuperuser, None, Some(true)))
val oldmailersize = mailer.notifications.size
Await.result(database.retrieveSpecific(idToConfirmBySuperuser), Duration(1, "second")).get.confirmedBySupseruser
.shouldEqual(false)
Post(PostChangesRoute.superuserPostPath, superuserpostconfirmjson) ~>
addCredentials(BasicHttpCredentials(superusername, superuserpassword)) ~> addHeaders(xforwardhost, rootpath, origin, xrequestedwith) ~> route ~> check {
responseAs[String] shouldEqual s"Event with ID = ${idToConfirmBySuperuser} was confirmed"
}
Await.result(database.retrieveSpecific(idToConfirmBySuperuser), Duration(1, "second")).get.confirmedBySupseruser
.shouldEqual(true)
mailer.notifications.size shouldEqual (oldmailersize + 2)
mailer.notifications.apply(mailer.notifications.size - 2).sumOfFlags.shouldEqual(10)
mailer.notifications.last.sumOfFlags shouldEqual (2)
}
//Test Case 5 - superuserroute with unconfirm should lead to complete and db changes and mails
"complete a superuserpost based unconfirm" in {
val dbvals = fillDatabase()
def idToUnConfirmBySuperuser = dbvals.head.id
Await.result(database.updateConfirmation(idToUnConfirmBySuperuser, true), Duration(1, "second"))
def superuserpostunconfirmjson: String = write(SimpleSuperuserPost(idToUnConfirmBySuperuser, None, Some(false)))
val oldmailersize = mailer.notifications.size
Await.result(database.retrieveSpecific(idToUnConfirmBySuperuser), Duration(1, "second")).get.confirmedBySupseruser
.shouldEqual(true)
Post(PostChangesRoute.superuserPostPath, superuserpostunconfirmjson) ~>
addCredentials(BasicHttpCredentials(superusername, superuserpassword)) ~> addHeaders(xforwardhost, rootpath, origin, xrequestedwith) ~> route ~> check {
responseAs[String] shouldEqual s"Event with ID = ${idToUnConfirmBySuperuser} was set to unconfirmed"
}
Await.result(database.retrieveSpecific(idToUnConfirmBySuperuser), Duration(1, "second")).get.confirmedBySupseruser
.shouldEqual(false)
mailer.notifications.size shouldEqual (oldmailersize + 1)
mailer.notifications.last.sumOfFlags shouldEqual (0)
}
}
"The Complete Routeset" should {
//Test one case with normal reservation from start to finish (calling get route before, during, and after it)
"allow a normal reservation from start to finish" in {
import upickle.default._
//combined route with own database and mailer
implicit val db = new MockDatabase()
implicit val mailer = new MockMailer()
//implicit val password = this.passwordConfigSource
val (route, _, _, _,_) = KeepASeat.routeDefinitions()(userPostValidators = this.validatorsUser, superuserPostValidators =
this.validatorsSuperuser, serverConfigSource = this.serverConfigSource,
xssCleaner = this.xss, macSource = this.macSource,
database = db, passwordConfigSource = this.passwordConfigSource,
emailNotifier = mailer)
//fill/clear database and mockdatabase
val dbvals = fillDatabase()(db)
val postBlocked: SimpleUserPost = SimpleUserPost("Eve", "Eve@somewhere.com", "0315235 2352432 23523", "just a normal" +
" registration", dbvals.head.elements)
val postFree: SimpleUserPost = SimpleUserPost("Eve", "Eve@somewhere.com", "0315235 2352432 23523", "just a normal" +
" registration", Seq(EventElementBlock("Bed A", dbvals.map(_.elements.map(_.to).max).max + 1000, dbvals.map(_
.elements.map(_.to).max).max + 5000)))
val freeid: Long = dbvals.map(_.id).max + 1
val eventwithidUnconfirmed: Event = Event(freeid, postFree.elements, postFree.name, postFree.email, postFree
.telephone,
postFree.commentary, confirmedBySupseruser = false)
val firsgetShouldResult: Seq[Event] = dbvals
val secondgetShouldResult: Seq[Event] = firsgetShouldResult.+:(eventwithidUnconfirmed)
val thirdgetShouldResult: Seq[Event] = firsgetShouldResult.+:(
eventwithidUnconfirmed.copy(confirmedBySupseruser = true))
def emailconfirmlink: String = "http://"+mailer.notifications.filter(e => e.sumOfFlags == 1).last.links.head
def superuserconfirmlink: String = "http://"+mailer.notifications.filter(e => e.sumOfFlags == 8).last.links.head
def freepostresponsetext: String = "You have received an email containing a link. Press that link to confirm " +
"your email address."
def emailconfirmresponsetext: String = LinkJWTRoute.emailConfirmSuccessText
def superuserconfirmresponsetext: String = LinkJWTRoute.confirmReservationText
implicit val ordering: Ordering[Event] = new scala.math.Ordering[Event]() {
override def compare(x: Event, y: Event): Int = x.id.compareTo(y.id)
}
//get and verify results
val min: Long = Long.MinValue
val max: Long = Long.MaxValue
Get("/events" + "?from=" + min + "&to=" + max) ~>
addCredentials(BasicHttpCredentials(username, userpassword)) ~> route ~> check {
read[Seq[Event]](responseAs[String]).sorted shouldEqual firsgetShouldResult.sorted
}
//userpost on blocked time (should be rejected)
Post("/newevent", write(postBlocked)) ~>
addCredentials(BasicHttpCredentials(username, userpassword)) ~> addHeaders(xforwardhost, rootpath, origin, xrequestedwith) ~> route ~> check {
handled shouldEqual false
}
//userpost on time not blocked (should be accepted)
Post("/newevent", write(postFree)) ~>
addCredentials(BasicHttpCredentials(username, userpassword)) ~> addHeaders(xforwardhost,rootpath, origin, xrequestedwith) ~> route ~> check {
responseAs[String] shouldEqual freepostresponsetext
}
//extract confirm link from mailer & confirm via link
//println(dbvals)
//println(emailconfirmlink)
//println(postFree)
println(emailconfirmlink)
Get(emailconfirmlink) ~>
addCredentials(BasicHttpCredentials(username, userpassword)) ~> route ~> check {
responseAs[String] shouldEqual LinkJWTRoute.selfClosingHTML(emailconfirmresponsetext)
}
//get and check that event is inserted
Get("/events" + "?from=" + min + "&to=" + max) ~>
addCredentials(BasicHttpCredentials(username, userpassword)) ~> route ~> check {
read[Seq[Event]](responseAs[String]).sorted shouldEqual secondgetShouldResult.sorted
}
//extract superuser confirm link from mailer & confirm via link
Get(superuserconfirmlink) ~>
addCredentials(BasicHttpCredentials(superusername, superuserpassword)) ~> route ~> check {
responseAs[String] shouldEqual LinkJWTRoute.selfClosingHTML(superuserconfirmresponsetext)
}
//get and check that event is inserted and both superuser and user received mail
Get("/events" + "?from=" + min + "&to=" + max) ~>
addCredentials(BasicHttpCredentials(username, userpassword)) ~> route ~> check {
read[Seq[Event]](responseAs[String]).sorted shouldEqual thirdgetShouldResult.sorted
}
//println(mailer.notifications.map(_.sumOfFlags))
mailer.notifications.apply(mailer.notifications.size - 2).sumOfFlags shouldEqual 10
mailer.notifications.apply(mailer.notifications.size - 1).sumOfFlags shouldEqual 2
}
}
}
| n3phtys/keep-a-seat | src/test/scala/org/nephtys/keepaseat/CompleteRouteSpec.scala | Scala | mit | 28,918 |
// Copyright: 2010 - 2016 https://github.com/ensime/ensime-server/graphs
// License: http://www.gnu.org/licenses/gpl-3.0.en.html
package org.ensime.fixture
import akka.actor._
import akka.testkit._
import org.ensime.api._
import org.ensime.core._
import org.ensime.vfs._
import org.ensime.indexer.SearchService
import org.scalatest._
trait AnalyzerFixture {
def withAnalyzer(testCode: (EnsimeConfig, TestActorRef[Analyzer]) => Any): Any
}
object AnalyzerFixture {
private[fixture] def create(search: SearchService)(implicit system: ActorSystem, config: EnsimeConfig, vfs: EnsimeVFS): TestActorRef[Analyzer] = {
val indexer = TestProbe()
val projectActor = TestProbe()
TestActorRef(Analyzer(projectActor.ref, indexer.ref, search))
}
}
trait IsolatedAnalyzerFixture
extends AnalyzerFixture
with IsolatedEnsimeVFSFixture
with IsolatedSearchServiceFixture
with IsolatedTestKitFixture {
override def withAnalyzer(testCode: (EnsimeConfig, TestActorRef[Analyzer]) => Any): Any = {
withVFS { implicit vfs =>
withTestKit { testkit =>
import testkit._
withSearchService { (config, search) =>
implicit val c = config
testCode(config, AnalyzerFixture.create(search))
}
}
}
}
}
trait SharedAnalyzerFixture
extends AnalyzerFixture
with SharedTestKitFixture
with SharedSearchServiceFixture
with BeforeAndAfterAll {
private[fixture] var analyzer: TestActorRef[Analyzer] = _
override def beforeAll(): Unit = {
super.beforeAll()
implicit val sys = _testkit.system
implicit val config = _config
analyzer = AnalyzerFixture.create(_search)
}
override def withAnalyzer(testCode: (EnsimeConfig, TestActorRef[Analyzer]) => Any): Any = testCode(_config, analyzer)
}
| espinhogr/ensime-server | core/src/it/scala/org/ensime/fixture/AnalyzerFixture.scala | Scala | gpl-3.0 | 1,793 |
package org.jetbrains.sbt
package project
import java.io.File
import ProjectStructureDsl._
import org.jetbrains.plugins.scala.SlowTests
import org.jetbrains.plugins.scala.util.TestUtils
import org.junit.experimental.categories.Category
@Category(Array(classOf[SlowTests]))
class ProjectImportingTest extends ImportingTestCase with InexactMatch {
def ivyCacheDir: File = new File(TestUtils.getIvyCachePath)
def testSimple() = runTest(
new project("testSimple") {
lazy val scalaLibrary = new library("SBT: org.scala-lang:scala-library:2.11.6:jar") {
classes += (ivyCacheDir / "org.scala-lang" / "scala-library" / "jars" / "scala-library-2.11.6.jar").getAbsolutePath
}
libraries += scalaLibrary
modules += new module("simple") {
contentRoots += getProjectPath
ProjectStructureDsl.sources := Seq("src/main/scala", "src/main/java")
testSources := Seq("src/test/scala", "src/test/java")
resources := Seq("src/main/resources")
testResources := Seq("src/test/resources")
excluded := Seq("target")
libraryDependencies += scalaLibrary
}
modules += new module("simple-build") {
ProjectStructureDsl.sources := Seq("")
excluded := Seq("project/target", "target")
}
})
def testMultiModule() = runTest(
new project("testMultiModule") {
lazy val foo = new module("foo") {
moduleDependencies += new dependency(bar) {
isExported := true
}
}
lazy val bar = new module("bar")
lazy val root = new module("multiModule")
modules := Seq(root, foo, bar)
})
def testUnmanagedDependency() = runTest(
new project("testUnmanagedDependency") {
modules += new module("unmanagedDependency") {
lazy val unmanagedLibrary = new library("SBT: unmanaged-jars") {
classes += (testProjectDir / "lib" / "unmanaged.jar").getAbsolutePath
}
libraries += unmanagedLibrary
libraryDependencies += unmanagedLibrary
}
}
)
def testSharedSources() = runTest(
new project("testSharedSources") {
lazy val sharedSourcesModule = new module("sharedSources-sources") {
contentRoots += getProjectPath + "/shared"
ProjectStructureDsl.sources += "src/main/scala"
}
lazy val foo = new module("foo") {
moduleDependencies += sharedSourcesModule
}
lazy val bar = new module("bar") {
moduleDependencies += sharedSourcesModule
}
modules := Seq(foo, bar, sharedSourcesModule)
}
)
}
| SergeevPavel/intellij-scala | test/org/jetbrains/sbt/project/ProjectImportingTest.scala | Scala | apache-2.0 | 2,585 |
object X {
val x = s""
val y = true
}
/* was:
4 | val y = true
| ^^^
| end of statement expected but 'val' found
*/
| som-snytt/dotty | tests/pos/t7919.scala | Scala | apache-2.0 | 129 |
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package org.apache.kudu.backup
import java.net.InetAddress
import org.apache.kudu.client.AsyncKuduClient
import org.apache.yetus.audience.InterfaceAudience
import org.apache.yetus.audience.InterfaceStability
import scopt.OptionParser
@InterfaceAudience.Private
@InterfaceStability.Unstable
trait CommonOptions {
val tables: Seq[String]
val rootPath: String
val kuduMasterAddresses: String
}
@InterfaceAudience.Private
@InterfaceStability.Unstable
case class BackupOptions(
tables: Seq[String],
rootPath: String,
kuduMasterAddresses: String = InetAddress.getLocalHost.getCanonicalHostName,
toMs: Long = System.currentTimeMillis(),
forceFull: Boolean = BackupOptions.DefaultForceFull,
fromMs: Long = BackupOptions.DefaultFromMS,
format: String = BackupOptions.DefaultFormat,
scanBatchSize: Int = BackupOptions.DefaultScanBatchSize,
scanRequestTimeoutMs: Long = BackupOptions.DefaultScanRequestTimeoutMs,
scanLeaderOnly: Boolean = BackupOptions.DefaultScanLeaderOnly,
scanPrefetching: Boolean = BackupOptions.DefaultScanPrefetching,
keepAlivePeriodMs: Long = BackupOptions.DefaultKeepAlivePeriodMs)
extends CommonOptions
object BackupOptions {
val DefaultForceFull: Boolean = false
val DefaultFromMS: Long = 0
val DefaultFormat: String = "parquet"
val DefaultScanBatchSize: Int = 1024 * 1024 * 20 // 20 MiB
val DefaultScanRequestTimeoutMs: Long =
AsyncKuduClient.DEFAULT_OPERATION_TIMEOUT_MS // 30 seconds
val DefaultScanLeaderOnly: Boolean = false
// TODO (KUDU-1260): Add a test and enable by default?
val DefaultScanPrefetching: Boolean = false
val DefaultKeepAlivePeriodMs: Long = AsyncKuduClient.DEFAULT_KEEP_ALIVE_PERIOD_MS
// We use the program name to make the help output show a the spark invocation required.
val ClassName: String = KuduBackup.getClass.getCanonicalName.dropRight(1) // Remove trailing `$`
val ProgramName: String = "spark-submit --class " + ClassName + " [spark-options] " +
"<application-jar>"
val parser: OptionParser[BackupOptions] =
new OptionParser[BackupOptions](ProgramName) {
opt[String]("rootPath")
.action((v, o) => o.copy(rootPath = v))
.text("The root path to output backup data. Accepts any Spark compatible path.")
.required()
opt[String]("kuduMasterAddresses")
.action((v, o) => o.copy(kuduMasterAddresses = v))
.text("Comma-separated addresses of Kudu masters. Default: localhost")
.optional()
opt[Boolean]("forceFull")
.action((v, o) => o.copy(forceFull = v))
.text("If true, this will be a full backup even if another full already exists. " +
"Default: " + DefaultForceFull)
.optional()
opt[Long]("fromMs")
.action((v, o) => o.copy(fromMs = v))
.text(
"A UNIX timestamp in milliseconds that defines the start time of an incremental " +
"backup. If unset, the fromMs will be defined by previous backups in the root " +
"directory.")
.optional()
opt[Long]("timestampMs")
.action((v, o) => o.copy(toMs = v))
// TODO (KUDU-2677): Document the limitations based on cluster configuration.
.text("A UNIX timestamp in milliseconds since the epoch to execute scans at. " +
"Default: `System.currentTimeMillis()`")
.optional()
opt[Int]("scanBatchSize")
.action((v, o) => o.copy(scanBatchSize = v))
.text("The maximum number of bytes returned by the scanner, on each batch. " +
"Default: " + DefaultScanBatchSize)
.optional()
opt[Int]("scanRequestTimeoutMs")
.action((v, o) => o.copy(scanRequestTimeoutMs = v))
.text("Sets how long in milliseconds each scan request to a server can last. " +
"Default: " + DefaultScanRequestTimeoutMs)
.optional()
opt[Long]("keepAlivePeriodMs")
.action((v, o) => o.copy(keepAlivePeriodMs = v))
.text("Sets the period at which to send keep-alive requests to the tablet server to " +
"ensure that scanners do not time out. Default: " + DefaultKeepAlivePeriodMs)
.optional()
opt[Boolean]("scanLeaderOnly")
.action((v, o) => o.copy(scanLeaderOnly = v))
.text("If true scans will only use the leader replica, otherwise scans will take place " +
"at the closest replica. Default: " + DefaultScanLeaderOnly)
.hidden()
.optional()
opt[String]("format")
.action((v, o) => o.copy(format = v))
.text("The file format to use when writing the data. Default: " + DefaultFormat)
.hidden()
.optional()
opt[Unit]("scanPrefetching")
.action((_, o) => o.copy(scanPrefetching = true))
.text("An experimental flag to enable pre-fetching data. " +
"Default: " + DefaultScanPrefetching)
.hidden()
.optional()
help("help").text("prints this usage text")
arg[String]("<table>...")
.unbounded()
.action((v, o) => o.copy(tables = o.tables :+ v))
.text("A list of tables to be backed up.")
}
/**
* Parses the passed arguments into Some[KuduBackupOptions].
*
* If the arguments are bad, an error message is displayed
* and None is returned.
*
* @param args The arguments to parse.
* @return Some[KuduBackupOptions] if parsing was successful, None if not.
*/
def parse(args: Seq[String]): Option[BackupOptions] = {
parser.parse(args, BackupOptions(Seq(), null))
}
}
@InterfaceAudience.Private
@InterfaceStability.Unstable
case class RestoreOptions(
tables: Seq[String],
rootPath: String,
kuduMasterAddresses: String = InetAddress.getLocalHost.getCanonicalHostName,
tableSuffix: String = "",
createTables: Boolean = RestoreOptions.DefaultCreateTables,
timestampMs: Long = System.currentTimeMillis()
) extends CommonOptions
object RestoreOptions {
val DefaultCreateTables: Boolean = true
val ClassName: String = KuduRestore.getClass.getCanonicalName.dropRight(1) // Remove trailing `$`
val ProgramName: String = "spark-submit --class " + ClassName + " [spark-options] " +
"<application-jar>"
val parser: OptionParser[RestoreOptions] =
new OptionParser[RestoreOptions](ProgramName) {
opt[String]("rootPath")
.action((v, o) => o.copy(rootPath = v))
.text("The root path to the backup data. Accepts any Spark compatible path.")
.required()
opt[String]("kuduMasterAddresses")
.action((v, o) => o.copy(kuduMasterAddresses = v))
.text("Comma-separated addresses of Kudu masters. Default: localhost")
.optional()
opt[Boolean]("createTables")
.action((v, o) => o.copy(createTables = v))
.text("If true, create the tables during restore. Set to false if the target tables " +
"already exist. Default: " + DefaultCreateTables)
.optional()
opt[String]("tableSuffix")
.action((v, o) => o.copy(tableSuffix = v))
.text("If set, the suffix to add to the restored table names. Only used when " +
"createTables is true.")
.optional()
opt[Long]("timestampMs")
.action((v, o) => o.copy(timestampMs = v))
.text("A UNIX timestamp in milliseconds that defines the latest time to use when " +
"selecting restore candidates. Default: `System.currentTimeMillis()`")
.optional()
help("help").text("prints this usage text")
arg[String]("<table>...")
.unbounded()
.action((v, o) => o.copy(tables = o.tables :+ v))
.text("A list of tables to be restored.")
}
/**
* Parses the passed arguments into Some[KuduRestoreOptions].
*
* If the arguments are bad, an error message is displayed
* and None is returned.
*
* @param args The arguments to parse.
* @return Some[KuduRestoreOptions] if parsing was successful, None if not.
*/
def parse(args: Seq[String]): Option[RestoreOptions] = {
parser.parse(args, RestoreOptions(Seq(), null))
}
}
| InspurUSA/kudu | java/kudu-backup/src/main/scala/org/apache/kudu/backup/Options.scala | Scala | apache-2.0 | 8,900 |
/*
* This file is part of Apparat.
*
* Copyright (C) 2010 Joa Ebert
* http://www.joa-ebert.com/
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
package apparat.taas.backend.jbc
import apparat.utils.IO._
import java.io.{FileOutputStream => JFileOutputStream, File => JFile}
import apparat.log.SimpleLog
/**
* @author Joa Ebert
*/
class JbcClassWriter(map: Map[String, Array[Byte]]) extends SimpleLog {
def write(parent: JFile) = {
parent.mkdirs()
assume(parent.isDirectory)
val parentPath = parent.getAbsolutePath+JFile.separator
for((name, data) <- map) {
val dir = new JFile(parentPath+dirname(name))
val file = new JFile(parentPath+filename(name))
dir.mkdirs()
log.debug("Writing %s to %s.", name, file.toString)
using(new JFileOutputStream(file)) { _ write data }
}
}
def dirname(name: String): String = name lastIndexOf '.' match {
case -1 => ""
case n => name.substring(0, n).replaceAll("\\.", "\\"+JFile.separator)
}
def filename(name: String): String = name.replaceAll("\\.", "\\"+JFile.separator)+".class"
}
| joa/apparat | apparat-taas/src/main/scala/apparat/taas/backend/jbc/JbcClassWriter.scala | Scala | lgpl-2.1 | 1,763 |
package sbtdocker.immutable
import sbtdocker.{StageFile, DockerfileLike, Instruction}
object Dockerfile {
def empty = Dockerfile()
}
/**
* Immutable Dockerfile.
*
* @example {{{
* val jarFile: File
*
* Dockerfile.empty
* .from("dockerfile/java")
* .add(jarFile, "/srv/app.jar")
* .workDir("/srv")
* .cmd("java", "-jar", "app.jar")
* }}}
*
* @param instructions Ordered sequence of Dockerfile instructions.
* @param stagedFiles Files and directories that should be copied to the stage directory.
*/
case class Dockerfile(instructions: Seq[Instruction] = Seq.empty,
stagedFiles: Seq[StageFile] = Seq.empty) extends DockerfileLike[Dockerfile] {
def addInstruction(instruction: Instruction) = Dockerfile(instructions :+ instruction, stagedFiles)
def stageFile(file: StageFile) = Dockerfile(instructions, stagedFiles :+ file)
def stageFiles(files: TraversableOnce[StageFile]) = Dockerfile(instructions, stagedFiles ++ files)
}
| Banno/sbt-docker | src/main/scala/sbtdocker/immutable/Dockerfile.scala | Scala | mit | 988 |
package io.pathfinder.data
abstract class CrudDao[K,M] {
/**
* Adds the specified model to the database
*/
def create(model: M): M
/**
* Creates a model according to the specified resource, if the update returns
* false, no model is created in the database and None is returned
*/
def create(create: Resource[M]): Option[M]
/**
* applies the resource as an update to the model with the given id,
* returns the model if it exists, otherwise returns None
*/
def update(id: K, update: Resource[M]): Option[M]
/**
* updates the specified model
*/
def update(model: M): Option[M]
/**
* deletes the model with the specified id
*/
def delete(id: K): Option[M]
/**
* returns the model with the specified id, if the id does not exist,
* None is returned instead
*/
def read(id: K): Option[M]
/**
* returns all of the models as a Seq
*/
def readAll: Seq[M]
}
| CSSE497/pathfinder-server | app/io/pathfinder/data/CrudDao.scala | Scala | mit | 998 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst.optimizer
import org.apache.spark.sql.catalyst.plans.logical._
import org.apache.spark.sql.catalyst.rules.Rule
/**
* Replaces [[ResolvedHint]] operators from the plan. Move the [[HintInfo]] to associated [[Join]]
* operators, otherwise remove it if no [[Join]] operator is matched.
*/
object EliminateResolvedHint extends Rule[LogicalPlan] {
// This is also called in the beginning of the optimization phase, and as a result
// is using transformUp rather than resolveOperators.
def apply(plan: LogicalPlan): LogicalPlan = {
val pulledUp = plan transformUp {
case j: Join =>
val (newLeft, leftHints) = extractHintsFromPlan(j.left)
val (newRight, rightHints) = extractHintsFromPlan(j.right)
val newJoinHint = JoinHint(mergeHints(leftHints), mergeHints(rightHints))
j.copy(left = newLeft, right = newRight, hint = newJoinHint)
}
pulledUp.transformUp {
case h: ResolvedHint =>
handleInvalidHintInfo(h.hints)
h.child
}
}
/**
* Combine a list of [[HintInfo]]s into one [[HintInfo]].
*/
private def mergeHints(hints: Seq[HintInfo]): Option[HintInfo] = {
hints.reduceOption((h1, h2) => h1.merge(h2, handleOverriddenHintInfo))
}
/**
* Extract all hints from the plan, returning a list of extracted hints and the transformed plan
* with [[ResolvedHint]] nodes removed. The returned hint list comes in top-down order.
* Note that hints can only be extracted from under certain nodes. Those that cannot be extracted
* in this method will be cleaned up later by this rule, and may emit warnings depending on the
* configurations.
*/
private[sql] def extractHintsFromPlan(plan: LogicalPlan): (LogicalPlan, Seq[HintInfo]) = {
plan match {
case h: ResolvedHint =>
val (plan, hints) = extractHintsFromPlan(h.child)
(plan, h.hints +: hints)
case u: UnaryNode =>
val (plan, hints) = extractHintsFromPlan(u.child)
(u.withNewChildren(Seq(plan)), hints)
// TODO revisit this logic:
// except and intersect are semi/anti-joins which won't return more data then
// their left argument, so the broadcast hint should be propagated here
case i: Intersect =>
val (plan, hints) = extractHintsFromPlan(i.left)
(i.copy(left = plan), hints)
case e: Except =>
val (plan, hints) = extractHintsFromPlan(e.left)
(e.copy(left = plan), hints)
case p: LogicalPlan => (p, Seq.empty)
}
}
private def handleInvalidHintInfo(hint: HintInfo): Unit = {
logWarning(s"A join hint $hint is specified but it is not part of a join relation.")
}
private def handleOverriddenHintInfo(hint: HintInfo): Unit = {
logWarning(s"Join hint $hint is overridden by another hint and will not take effect.")
}
}
| highfei2011/spark | sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/optimizer/EliminateResolvedHint.scala | Scala | apache-2.0 | 3,658 |
/*
* Copyright (C) 2009-2017 Lightbend Inc. <https://www.lightbend.com>
*/
package play.api.mvc
import play.api.http.HeaderNames
import play.api.mvc.request.RemoteConnection
import play.api.test.FakeRequest
class HttpSpec extends org.specs2.mutable.Specification {
title("HTTP")
"Absolute URL" should {
val req = FakeRequest().withHeaders(HeaderNames.HOST -> "playframework.com")
"have HTTP scheme" in {
(Call("GET", "/playframework").absoluteURL()(req).
aka("absolute URL 1") must_== "http://playframework.com/playframework").
and(Call("GET", "/playframework").absoluteURL(secure = false)(req).
aka("absolute URL 2") must_== (
"http://playframework.com/playframework"))
}
"have HTTPS scheme" in {
(Call("GET", "/playframework").absoluteURL()(req.withConnection(RemoteConnection(req.connection.remoteAddress, true, req.connection.clientCertificateChain))).
aka("absolute URL 1") must_== (
"https://playframework.com/playframework")) and (
Call("GET", "/playframework").absoluteURL(secure = true)(req).
aka("absolute URL 2") must_== (
"https://playframework.com/playframework"))
}
}
"Web socket URL" should {
val req = FakeRequest().withHeaders(
HeaderNames.HOST -> "playframework.com")
"have WS scheme" in {
(Call("GET", "/playframework").webSocketURL()(req).
aka("absolute URL 1") must_== "ws://playframework.com/playframework").
and(Call("GET", "/playframework").webSocketURL(secure = false)(req).
aka("absolute URL 2") must_== (
"ws://playframework.com/playframework"))
}
"have WSS scheme" in {
(Call("GET", "/playframework").webSocketURL()(req.withConnection(RemoteConnection(req.connection.remoteAddress, true, req.connection.clientCertificateChain))).
aka("absolute URL 1") must_== (
"wss://playframework.com/playframework")) and (
Call("GET", "/playframework").webSocketURL(secure = true)(req).
aka("absolute URL 2") must_== (
"wss://playframework.com/playframework"))
}
}
"RequestHeader" should {
"parse quoted and unquoted charset" in {
FakeRequest().withHeaders(
HeaderNames.CONTENT_TYPE -> """text/xml; charset="utf-8"""").
charset aka "request charset" must beSome("utf-8")
}
"parse quoted and unquoted charset" in {
FakeRequest().withHeaders(
HeaderNames.CONTENT_TYPE -> "text/xml; charset=utf-8").
charset aka "request charset" must beSome("utf-8")
}
}
}
| Shruti9520/playframework | framework/src/play-integration-test/src/test/scala/play/it/mvc/HttpSpec.scala | Scala | apache-2.0 | 2,614 |
import sbt._
import Keys._
import StringUtilities.normalize
object Util {
lazy val scalaKeywords = TaskKey[Set[String]]("scala-keywords")
lazy val generateKeywords = TaskKey[File]("generateKeywords")
lazy val javaOnlySettings = Seq[Setting[_]](
crossPaths := false,
compileOrder := CompileOrder.JavaThenScala,
unmanagedSourceDirectories in Compile <<= Seq(javaSource in Compile).join,
crossScalaVersions := Seq(Dependencies.scala211),
autoScalaLibrary := false
)
def getScalaKeywords: Set[String] =
{
val g = new scala.tools.nsc.Global(new scala.tools.nsc.Settings)
g.nme.keywords.map(_.toString)
}
def writeScalaKeywords(base: File, keywords: Set[String]): File =
{
val init = keywords.map(tn => '"' + tn + '"').mkString("Set(", ", ", ")")
val ObjectName = "ScalaKeywords"
val PackageName = "sbt.internal.util"
val keywordsSrc =
"""package %s
object %s {
val values = %s
}""".format(PackageName, ObjectName, init)
val out = base / PackageName.replace('.', '/') / (ObjectName + ".scala")
IO.write(out, keywordsSrc)
out
}
def keywordsSettings: Seq[Setting[_]] = inConfig(Compile)(Seq(
scalaKeywords := getScalaKeywords,
generateKeywords <<= (sourceManaged, scalaKeywords) map writeScalaKeywords,
sourceGenerators <+= generateKeywords map (x => Seq(x))
))
}
| Duhemm/util | project/Util.scala | Scala | bsd-3-clause | 1,387 |
package org.broadinstitute.sting.queue.util
object TextFormatUtils {
/**
* Returns the string "s" if x is greater than 1.
* @param x Value to test.
* @return "s" if x is greater than one else "".
*/
def plural(x: Int) = if (x > 1) "s" else ""
/**
* Returns the string "s" if x is greater than 1.
* @param x Value to test.
* @return "s" if x is greater than one else "".
*/
def plural(x: Long) = if (x > 1) "s" else ""
/**
* Returns the string "s" if x is not equal to 1.
* @param x Value to test.
* @return "s" if x is greater than one else "".
*/
def plural(x: Float) = if (x != 1) "s" else ""
/**
* Returns the string "s" if x is not equal to 1.
* @param x Value to test.
* @return "s" if x is greater than one else "".
*/
def plural(x: Double) = if (x != 1) "s" else ""
}
| iontorrent/Torrent-Variant-Caller-stable | public/scala/src/org/broadinstitute/sting/queue/util/TextFormatUtils.scala | Scala | mit | 843 |
package justin.db.storage
import java.util.UUID
import justin.db.storage.PluggableStorageProtocol.{Ack, DataOriginality, StorageGetData}
import scala.concurrent.Future
trait GetStorageProtocol {
def get(id: UUID)(resolveOriginality: UUID => DataOriginality): Future[StorageGetData]
}
trait PutStorageProtocol {
def put(data: JustinData)(resolveOriginality: UUID => DataOriginality): Future[Ack]
}
trait PluggableStorageProtocol extends GetStorageProtocol with PutStorageProtocol
object PluggableStorageProtocol {
sealed trait StorageGetData
object StorageGetData {
case class Single(data: JustinData) extends StorageGetData
case object None extends StorageGetData
}
sealed trait Ack
case object Ack extends Ack {
val future: Future[Ack] = Future.successful(Ack)
}
sealed trait DataOriginality { def ringPartitionId: RingPartitionId }
object DataOriginality {
case class Primary(ringPartitionId: RingPartitionId) extends DataOriginality
case class Replica(ringPartitionId: RingPartitionId) extends DataOriginality
}
}
| speedcom/JustinDB | justin-storage-api/src/main/scala/justin/db/storage/PluggableStorageProtocol.scala | Scala | apache-2.0 | 1,090 |
package com.twitter.finagle.redis.integration
import com.twitter.finagle.redis._
import com.twitter.finagle.redis.tags.{RedisTest, ClientTest}
import com.twitter.util.{Await, Return}
final class ConnectionClientIntegrationSuite extends RedisClientTest {
test("Correctly perform the SELECT command", RedisTest, ClientTest) {
withRedisClient { client => assert(Await.result(client.select(1).liftToTry) == Return.Unit) }
}
test("Correctly perform the QUIT command", RedisTest, ClientTest) {
withRedisClient { client => assert(Await.result(client.quit().liftToTry) == Return.Unit) }
}
test("Correctly perform the PING command without arguments", RedisTest, ClientTest) {
withRedisClient { client => assert(Await.result(client.ping().liftToTry) == Return.Unit) }
}
}
| twitter/finagle | finagle-redis/src/it/scala/com/twitter/finagle/redis/commands/connection/ConnectionClientIntegrationSuite.scala | Scala | apache-2.0 | 791 |
package it.polimi.genomics.spark.implementation.RegionsOperators
import it.polimi.genomics.core.DataTypes.{GRECORD, MetaType}
import it.polimi.genomics.core.GMQLLoader
import it.polimi.genomics.spark.implementation.loaders.Loaders._
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.{FileSystem, Path, PathFilter}
import org.apache.spark.SparkContext
import org.apache.spark.rdd.RDD
import org.slf4j.LoggerFactory
/**
* Created by abdulrahman Kaitoua on 25/05/15.
*/
object ReadRD {
private final val logger = LoggerFactory.getLogger(SelectRD.getClass);
def apply(paths: List[String], loader: GMQLLoader[Any, Any, Any, Any], sc: SparkContext): RDD[GRECORD] = {
def parser(x: (Long, String)) = loader.asInstanceOf[GMQLLoader[(Long, String), Option[GRECORD], (Long, String), Option[MetaType]]].region_parser(x)
val conf = new Configuration();
val path = new org.apache.hadoop.fs.Path(paths.head);
val fs = FileSystem.get(path.toUri(), conf);
var files = paths.flatMap { dirInput =>
val file = new Path(dirInput)
if (fs.isDirectory(file))
fs.listStatus(file, new PathFilter {
override def accept(path: Path): Boolean = fs.exists(new Path(path.toString + ".meta"))
}).map(x => x.getPath.toString).toList;
else List(dirInput)
}
sc.forPath(files.mkString(",")).LoadRegionsCombineFiles(parser)
}
}
| DEIB-GECO/GMQL | GMQL-Spark/src/main/scala/it/polimi/genomics/spark/implementation/RegionsOperators/SelectRegions/ReadRD.scala | Scala | apache-2.0 | 1,402 |
package com.twitter.finagle
import com.twitter.util.Activity
import java.net.InetSocketAddress
import org.junit.runner.RunWith
import org.scalatest.FunSuite
import org.scalatest.junit.{AssertionsForJUnit, JUnitRunner}
@RunWith(classOf[JUnitRunner])
class DtabTest extends FunSuite with AssertionsForJUnit {
def pathTree(t: String) =
NameTree.read(t).map(Name(_))
def assertEquiv[T: Equiv](left: T, right: T) = assert(
if (Equiv[T].equiv(left, right)) None
else Some(left + "!=" + right)
)
test("Lookup all prefixes in reverse order") {
val dtab = Dtab.read("/foo/bar=>/xxx;/foo=>/yyy")
assertEquiv(
dtab.lookup(Path.read("/foo/bar/baz")).sample(),
pathTree("/yyy/bar/baz | /xxx/baz"))
}
test("Expand names") {
val dtab = Dtab.read("/foo/bar => /xxx|/yyy&/zzz")
assertEquiv(
dtab.lookup(Path.read("/foo/bar/baz")).sample(),
pathTree("/xxx/baz | /yyy/baz & /zzz/baz"))
}
test("d1 ++ d2") {
val d1 = Dtab.read("/foo => /bar")
val d2 = Dtab.read("/foo=>/biz;/biz=>/$/inet/0/8080;/bar=>/$/inet/0/9090")
assert(d1++d2 === Dtab.read("""
/foo=>/bar;
/foo=>/biz;
/biz=>/$/inet/0/8080;
/bar=>/$/inet/0/9090
"""))
def assertEval(dtab: Dtab, path: Path, expected: Name.Bound*) {
val dtab2 = Dtab.read("/=>/#/com.twitter.finagle.namer.global") ++ dtab
dtab2.bind(NameTree.Leaf(path)).sample().eval match {
case Some(actual) => assert(actual.map(_.addr.sample) === expected.map(_.addr.sample).toSet)
case _ => assert(false)
}
}
assertEval(d1 ++ d2, Path.read("/foo"), Name.bound(new InetSocketAddress(8080)))
assertEval(d2 ++ d1, Path.read("/foo"), Name.bound(new InetSocketAddress(9090)))
}
test("d1 ++ Dtab.empty") {
val d1 = Dtab.read("/foo=>/bar;/biz=>/baz")
assert(d1 ++ Dtab.empty === d1)
}
test("Dtab.stripPrefix") {
val d1, d2 = Dtab.read("/foo=>/bar;/baz=>/xxx/yyy")
assert(d1.stripPrefix(d1).isEmpty)
assert(d1.stripPrefix(d2).isEmpty)
assertEquiv(
(d1 + Dentry.read("/foo => /123")).stripPrefix(d1),
Dtab.read("/foo=>/123"))
assertEquiv(d1.stripPrefix(d1 + Dentry.read("/s => /b")), d1)
assert(Dtab.empty.stripPrefix(d1).isEmpty)
}
// These are mostly just compilation tests.
test("Dtab is a Scala collection") {
val b = Dtab.newBuilder
b += Dentry.read("/a => /b")
b += Dentry.read("/c => /d")
val dtab = b.result
val dtab1: Dtab = dtab map { case Dentry(a, b) =>
Dentry.read("%s=>%s".format(a.show.toUpperCase, b.show.toUpperCase))
}
assert(dtab1.size === 2)
dtab1(0) match {
case Dentry(a, b) =>
assert(a === Path.Utf8("A"))
assert(b === NameTree.Leaf(Path.Utf8("B")))
}
}
test("Allows trailing semicolon") {
val dtab = try {
Dtab.read("""
/b => /c;
/a => /b;
""")
} catch { case _: IllegalArgumentException => Dtab.empty }
assert(dtab.length === 2)
}
test("/# path loads Namer class, rewrite is ignored") {
assert(Dtab.read("/# => /foo").lookup(Path.read("/#/com.twitter.finagle.dtabtest.TestNamer")).sample()
=== NameTree.Leaf(Name.Path(Path.read("/plugh/xyzzy"))))
}
}
package dtabtest {
class TestNamer extends Namer {
def lookup(path: Path) = Activity.value(NameTree.Leaf(Name.Path(Path.read("/plugh/xyzzy"))))
def enum(prefix: Path) = throw new UnsupportedOperationException
}
}
| jamescway/finagle | finagle-core/src/test/scala/com/twitter/finagle/DtabTest.scala | Scala | apache-2.0 | 3,473 |
package com.codahale.jerkson.deser
import language.higherKinds
import com.fasterxml.jackson.databind.JavaType
import com.fasterxml.jackson.databind.{ DeserializationContext, JsonDeserializer }
import com.fasterxml.jackson.core.{ JsonToken, JsonParser }
import collection.generic.MapFactory
import collection.MapLike
import com.fasterxml.jackson.databind.deser.ResolvableDeserializer
class ImmutableMapDeserializer[CC[A, B] <: Map[A, B] with MapLike[A, B, CC[A, B]]](companion: MapFactory[CC],
valueType: JavaType)
extends JsonDeserializer[Object] with ResolvableDeserializer {
var valueDeserializer: JsonDeserializer[Object] = _
def deserialize(jp: JsonParser, ctxt: DeserializationContext): CC[String, Object] = {
val builder = companion.newBuilder[String, Object]
if (jp.getCurrentToken == JsonToken.START_OBJECT) {
jp.nextToken()
}
if (jp.getCurrentToken != JsonToken.FIELD_NAME &&
jp.getCurrentToken != JsonToken.END_OBJECT) {
throw ctxt.mappingException(valueType.getRawClass)
}
while (jp.getCurrentToken != JsonToken.END_OBJECT) {
val name = jp.getCurrentName
jp.nextToken()
builder += ((name, valueDeserializer.deserialize(jp, ctxt)))
jp.nextToken()
}
builder.result()
}
def resolve(ctxt: DeserializationContext) {
valueDeserializer = ctxt.findRootValueDeserializer(valueType)
}
override def isCachable = true
}
| mDialog/jerkson | src/main/scala/com/codahale/jerkson/deser/ImmutableMapDeserializer.scala | Scala | mit | 1,426 |
/*
* Licensed to Intel Corporation under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* Intel Corporation licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.bigdl.nn
import breeze.linalg.sum
import com.intel.analytics.bigdl.nn.abstractnn.TensorModule
import com.intel.analytics.bigdl.tensor.Tensor
import com.intel.analytics.bigdl.tensor.TensorNumericMath.TensorNumeric
import scala.reflect.ClassTag
/**
* Reshape with the support of infered size,
* Positive numbers are used directly, setting the corresponding dimension of the output tensor.
* In addition, two special values are accepted:
* 0 means "copy the respective dimension of the input".
* i.e., if the input has 2 as its 1st dimension,
* the output will have 2 as its 1st dimension as well
* -1 stands for "infer this from the other dimensions"
* this dimension is calculated to keep the overall element count the same as in the input.
* At most one -1 can be used in a reshape operation.
*
* For example, (4, 5, 6, 7) -> InferReshape (4, 0, 3, -1) -> (4, 5, 3, 14)
* with 1st and 3rd dim same as given size, with 2nd dim same as input, and the infered dim is 14
* @param size the target tensor size
* @param batchMode whether in batch mode
* @tparam T type
*/
class InferReshape[@specialized(Float, Double) T: ClassTag](
size: Array[Int], var batchMode: Boolean = false)(
implicit ev: TensorNumeric[T]) extends TensorModule[T] {
private var inferedSizes: Array[Int] = _
private var startIndex = 0
private var inferIndex = -1
private var subTotal = 1
init()
private def init(): Unit = {
var minusOneCount = 0
inferedSizes = if (batchMode) new Array[Int](size.length + 1) else new Array[Int](size.length)
if (batchMode) startIndex = 1
var i = 0
while (i < size.length) {
if (size(i) == -1) {
minusOneCount += 1
inferIndex = i + startIndex
}
else if (size(i) != 0) { // use the exact value in given size
inferedSizes(i + startIndex) = size(i)
subTotal *= size(i)
}
i += 1
}
require(minusOneCount == 1, "at most a single value of -1 may be specified")
}
override def updateOutput(input: Tensor[T]): Tensor[T] = {
var total = subTotal
var i = 0
while (i < size.length) {
if (size(i) == 0) { // use the same dim value as input
inferedSizes(i + startIndex) = input.size(i + 1)
total *= input.size(i + 1)
}
i += 1
}
require(total <= input.nElement(), "inferred size dim product must be <= total input #elements")
if (inferIndex != -1) {
inferedSizes(inferIndex) = input.nElement() / total
if (batchMode) inferedSizes(inferIndex) = inferedSizes(inferIndex) / input.size(1)
}
if (batchMode) {
inferedSizes(0) = input.size(1)
}
if (input.isContiguous()) {
output = input.view(inferedSizes)
} else {
output = input.contiguous().view(inferedSizes)
}
output
}
override def updateGradInput(input: Tensor[T], gradOutput: Tensor[T]): Tensor[T] = {
if (gradOutput.isContiguous()) {
gradInput = gradOutput.view(input.size())
} else {
gradInput = gradOutput.contiguous().view(input.size())
}
gradInput
}
override def equals(obj: Any): Boolean = {
if (!super.equals(obj)) {
return false
}
if (!obj.isInstanceOf[InferReshape[T]]) {
return false
}
val other = obj.asInstanceOf[InferReshape[T]]
if (this.eq(other)) {
return true
}
var i = 0
while (i < inferedSizes.length) {
if (inferedSizes(i) != other.inferedSizes(i)) {
return false
}
i += 1
}
batchMode == other.batchMode
}
override def hashCode(): Int = {
val seed = 37
var hash = super.hashCode()
var i = 0
while (i < inferedSizes.length) {
hash = hash * seed + inferedSizes(i).hashCode()
i += 1
}
hash = hash * seed + batchMode.hashCode()
hash
}
override def toString(): String = {
s"nn.InferReshape(${
size.mkString("x")
})"
}
}
object InferReshape {
def apply[@specialized(Float, Double) T: ClassTag](size: Array[Int], batchMode: Boolean = false)
(implicit ev: TensorNumeric[T]): InferReshape[T] =
new InferReshape(size, batchMode)
}
| dding3/BigDL | dl/src/main/scala/com/intel/analytics/bigdl/nn/InferReshape.scala | Scala | apache-2.0 | 4,957 |
package your.pack
import akka.stream.alpakka.cassandra.CqlSessionProvider
import com.datastax.dse.driver.api.core.DseSession
import com.datastax.oss.driver.api.core.CqlSession
import scala.compat.java8.FutureConverters._
import scala.concurrent.{ ExecutionContext, Future }
//#dse-session-provider
class DseSessionProvider extends CqlSessionProvider {
override def connect()(implicit ec: ExecutionContext): Future[CqlSession] = {
DseSession
.builder()
// .withAuthProvider() can add any DSE specific authentication here
.buildAsync()
.toScala
}
}
//#dse-session-provider
| chbatey/akka-persistence-cassandra | dse-test/src/test/scala/your/pack/DseSessionProvider.scala | Scala | apache-2.0 | 606 |
package utils.auth
import com.mohiva.play.silhouette.api.Env
import com.mohiva.play.silhouette.impl.authenticators.JWTAuthenticator
import models.User
/**
* The default env.
*/
trait DefaultEnv extends Env {
type I = User
type A = JWTAuthenticator
} | mohiva/play-silhouette-angular-seed | app/utils/auth/DefaultEnv.scala | Scala | apache-2.0 | 257 |
class Guild(info: sbt.ProjectInfo) extends sbt.DefaultProject(info) with posterous.Publish {
override def compileOptions = super.compileOptions ++ List(Deprecation, Unchecked)
/**
* Home Repo
*/
override def managedStyle = sbt.ManagedStyle.Maven
val publishTo = sbt.Resolver.file("Local Cache", ("." / "target" / "repo").asFile)
/**
* Publish to a local temp repo, then rsync the files over to repo.codahale.com.
*/
def publishToLocalRepoAction = super.publishAction
override def publishAction = task {
log.info("Uploading to repo.codahale.com")
sbt.Process("rsync", "-avz" :: "target/repo/" :: "codahale.com:/home/codahale/repo.codahale.com" :: Nil) ! log
None
} describedAs("what") dependsOn(test, publishToLocalRepoAction)
/**
* Publish the source as well as the class files.
*/
override def packageSrcJar= defaultJarPath("-sources.jar")
val sourceArtifact = sbt.Artifact(artifactID, "src", "jar", Some("sources"), Nil, None)
override def packageToPublishActions = super.packageToPublishActions ++ Seq(packageSrc)
/**
* Repositories
*/
val scalaToolsSnapshots = "scala-tools.org Snapshots" at "http://scala-tools.org/repo-snapshots"
/**
* Dependencies
*/
val jetLang = "org.jetlang" % "jetlang" % "0.2.0" withSources()
/**
* Test Dependencies
*/
val scalaTest = buildScalaVersion match {
case "2.8.0.Beta1" => "org.scalatest" % "scalatest" % "1.0.1-for-scala-2.8.0.Beta1-with-test-interfaces-0.3-SNAPSHOT" % "test" withSources() intransitive()
case "2.8.0.RC2" => "org.scalatest" % "scalatest" % "1.2-for-scala-2.8.0.RC2-SNAPSHOT" % "test" withSources() intransitive()
case unknown => error("no known scalatest impl for %s".format(unknown))
}
val mockito = "org.mockito" % "mockito-all" % "1.8.4" % "test" withSources()
}
| codahale/guild | project/build/Guild.scala | Scala | mit | 1,833 |
package scodec.msgpack
import scala.util.control.NonFatal
import java.io.ByteArrayOutputStream
import org.msgpack.core.{MessagePack => JMessagePack, MessagePacker, MessageUnpacker}
import scodec._
import scodec.bits.BitVector
object JavaCodec {
val bitVector2Unpacker: BitVector => MessageUnpacker = { bits =>
JMessagePack.newDefaultUnpacker(bits.toByteArray)
}
def withPacker(f: MessagePacker => Unit): Attempt[BitVector] = {
val out = new ByteArrayOutputStream()
val packer = JMessagePack.newDefaultPacker(out)
f(packer)
packer.close
Attempt.successful(BitVector(out.toByteArray))
}
def fromTryCatchNonFatal[A](a: => DecodeResult[A]): Attempt[DecodeResult[A]] = try {
Attempt.successful(a)
} catch {
case NonFatal(t) => Attempt.failure(Err.apply(t.toString))
}
def withUnpacker[A](f: MessageUnpacker => A): BitVector => Attempt[DecodeResult[A]] =
bitVector2Unpacker.andThen { unpacker =>
val result = fromTryCatchNonFatal(
DecodeResult(f(unpacker), BitVector.empty)
)
unpacker.close()
result
}
def javacodec[A](encoder: (MessagePacker, A) => Unit, decoder: MessageUnpacker => A): Codec[A] =
Codec(
v => withPacker(packer => encoder(packer, v)),
withUnpacker(decoder)
)
val int: Codec[Int] = javacodec(
_ packInt _,
_.unpackInt
)
val long: Codec[Long] = javacodec(
_ packLong _,
_.unpackLong
)
val bool: Codec[Boolean] = javacodec(
_ packBoolean _,
_.unpackBoolean
)
val double: Codec[Double] = javacodec(
_ packDouble _,
_.unpackDouble
)
val float: Codec[Float] = javacodec(
_ packFloat _,
_.unpackFloat
)
val str: Codec[String] = javacodec(
_ packString _,
_.unpackString
)
}
| pocketberserker/scodec-msgpack | jvm/src/test/scala/scodec/msgpack/JavaCodec.scala | Scala | mit | 1,775 |
abstract class Person(name: String){ override def toString = name }
class Man(name: String) extends Person(name)
class Woman(name: String) extends Person(name)
class Child(name: String) extends Person(name)
| grzegorzbalcerek/scala-book-examples | examples/Person.scala | Scala | mit | 207 |
/*
Stratagem is a model checker for transition systems described using rewriting
rules and strategies.
Copyright (C) 2013 - SMV@Geneva University.
Program written by Edmundo Lopez Bobeda <edmundo [at] lopezbobeda.net>.
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
package ch.unige.cui.smv.stratagem.ts
/**
* Represents a FixPointStrategy.
* @param S strategy that will be repeated.
* @author mundacho
*
*/
case class FixPointStrategy(S: Strategy) extends NonVariableStrategy | didierbuchs/oldstratagem | src/main/scala/ch/unige/cui/smv/stratagem/ts/FixPointStrategy.scala | Scala | gpl-2.0 | 1,115 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql
import java.sql.{Date, Timestamp}
import org.apache.spark.SparkConf
import org.apache.spark.sql.catalyst.plans.logical.Union
import org.apache.spark.sql.functions._
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.test.{ExamplePoint, ExamplePointUDT, SharedSparkSession}
import org.apache.spark.sql.test.SQLTestData.NullStrings
import org.apache.spark.sql.types._
class DataFrameSetOperationsSuite extends QueryTest with SharedSparkSession {
import testImplicits._
override def sparkConf: SparkConf =
super.sparkConf
.setAppName("test")
.set("spark.sql.parquet.columnarReaderBatchSize", "4096")
.set("spark.sql.sources.useV1SourceList", "avro")
.set("spark.sql.extensions", "com.intel.oap.ColumnarPlugin")
.set("spark.sql.execution.arrow.maxRecordsPerBatch", "4096")
//.set("spark.shuffle.manager", "org.apache.spark.shuffle.sort.ColumnarShuffleManager")
.set("spark.memory.offHeap.enabled", "true")
.set("spark.memory.offHeap.size", "50m")
.set("spark.sql.join.preferSortMergeJoin", "false")
.set("spark.sql.columnar.codegen.hashAggregate", "false")
.set("spark.oap.sql.columnar.wholestagecodegen", "false")
.set("spark.sql.columnar.window", "false")
.set("spark.unsafe.exceptionOnMemoryLeak", "false")
//.set("spark.sql.columnar.tmp_dir", "/codegen/nativesql/")
.set("spark.sql.columnar.sort.broadcastJoin", "true")
.set("spark.oap.sql.columnar.preferColumnar", "true")
test("except") {
checkAnswer(
lowerCaseData.except(upperCaseData),
Row(1, "a") ::
Row(2, "b") ::
Row(3, "c") ::
Row(4, "d") :: Nil)
checkAnswer(lowerCaseData.except(lowerCaseData), Nil)
checkAnswer(upperCaseData.except(upperCaseData), Nil)
// check null equality
checkAnswer(
nullInts.except(nullInts.filter("0 = 1")),
nullInts)
checkAnswer(
nullInts.except(nullInts),
Nil)
// check if values are de-duplicated
checkAnswer(
allNulls.except(allNulls.filter("0 = 1")),
Row(null) :: Nil)
checkAnswer(
allNulls.except(allNulls),
Nil)
// check if values are de-duplicated
val df = Seq(("id1", 1), ("id1", 1), ("id", 1), ("id1", 2)).toDF("id", "value")
checkAnswer(
df.except(df.filter("0 = 1")),
Row("id1", 1) ::
Row("id", 1) ::
Row("id1", 2) :: Nil)
// check if the empty set on the left side works
checkAnswer(
allNulls.filter("0 = 1").except(allNulls),
Nil)
}
ignore("SPARK-23274: except between two projects without references used in filter") {
val df = Seq((1, 2, 4), (1, 3, 5), (2, 2, 3), (2, 4, 5)).toDF("a", "b", "c")
val df1 = df.filter($"a" === 1)
val df2 = df.filter($"a" === 2)
checkAnswer(df1.select("b").except(df2.select("b")), Row(3) :: Nil)
checkAnswer(df1.select("b").except(df2.select("c")), Row(2) :: Nil)
}
ignore("except distinct - SQL compliance") {
val df_left = Seq(1, 2, 2, 3, 3, 4).toDF("id")
val df_right = Seq(1, 3).toDF("id")
checkAnswer(
df_left.except(df_right),
Row(2) :: Row(4) :: Nil
)
}
ignore("except - nullability") {
val nonNullableInts = Seq(Tuple1(11), Tuple1(3)).toDF()
assert(nonNullableInts.schema.forall(!_.nullable))
val df1 = nonNullableInts.except(nullInts)
checkAnswer(df1, Row(11) :: Nil)
assert(df1.schema.forall(!_.nullable))
val df2 = nullInts.except(nonNullableInts)
checkAnswer(df2, Row(1) :: Row(2) :: Row(null) :: Nil)
assert(df2.schema.forall(_.nullable))
val df3 = nullInts.except(nullInts)
checkAnswer(df3, Nil)
assert(df3.schema.forall(_.nullable))
val df4 = nonNullableInts.except(nonNullableInts)
checkAnswer(df4, Nil)
assert(df4.schema.forall(!_.nullable))
}
test("except all") {
checkAnswer(
lowerCaseData.exceptAll(upperCaseData),
Row(1, "a") ::
Row(2, "b") ::
Row(3, "c") ::
Row(4, "d") :: Nil)
checkAnswer(lowerCaseData.exceptAll(lowerCaseData), Nil)
checkAnswer(upperCaseData.exceptAll(upperCaseData), Nil)
// check null equality
checkAnswer(
nullInts.exceptAll(nullInts.filter("0 = 1")),
nullInts)
checkAnswer(
nullInts.exceptAll(nullInts),
Nil)
// check that duplicate values are preserved
checkAnswer(
allNulls.exceptAll(allNulls.filter("0 = 1")),
Row(null) :: Row(null) :: Row(null) :: Row(null) :: Nil)
checkAnswer(
allNulls.exceptAll(allNulls.limit(2)),
Row(null) :: Row(null) :: Nil)
// check that duplicates are retained.
val df = spark.sparkContext.parallelize(
NullStrings(1, "id1") ::
NullStrings(1, "id1") ::
NullStrings(2, "id1") ::
NullStrings(3, null) :: Nil).toDF("id", "value")
checkAnswer(
df.exceptAll(df.filter("0 = 1")),
Row(1, "id1") ::
Row(1, "id1") ::
Row(2, "id1") ::
Row(3, null) :: Nil)
// check if the empty set on the left side works
checkAnswer(
allNulls.filter("0 = 1").exceptAll(allNulls),
Nil)
}
ignore("exceptAll - nullability") {
val nonNullableInts = Seq(Tuple1(11), Tuple1(3)).toDF()
assert(nonNullableInts.schema.forall(!_.nullable))
val df1 = nonNullableInts.exceptAll(nullInts)
checkAnswer(df1, Row(11) :: Nil)
assert(df1.schema.forall(!_.nullable))
val df2 = nullInts.exceptAll(nonNullableInts)
checkAnswer(df2, Row(1) :: Row(2) :: Row(null) :: Nil)
assert(df2.schema.forall(_.nullable))
val df3 = nullInts.exceptAll(nullInts)
checkAnswer(df3, Nil)
assert(df3.schema.forall(_.nullable))
val df4 = nonNullableInts.exceptAll(nonNullableInts)
checkAnswer(df4, Nil)
assert(df4.schema.forall(!_.nullable))
}
ignore("intersect") {
checkAnswer(
lowerCaseData.intersect(lowerCaseData),
Row(1, "a") ::
Row(2, "b") ::
Row(3, "c") ::
Row(4, "d") :: Nil)
checkAnswer(lowerCaseData.intersect(upperCaseData), Nil)
// check null equality
checkAnswer(
nullInts.intersect(nullInts),
Row(1) ::
Row(2) ::
Row(3) ::
Row(null) :: Nil)
// check if values are de-duplicated
checkAnswer(
allNulls.intersect(allNulls),
Row(null) :: Nil)
// check if values are de-duplicated
val df = Seq(("id1", 1), ("id1", 1), ("id", 1), ("id1", 2)).toDF("id", "value")
checkAnswer(
df.intersect(df),
Row("id1", 1) ::
Row("id", 1) ::
Row("id1", 2) :: Nil)
}
ignore("intersect - nullability") {
val nonNullableInts = Seq(Tuple1(1), Tuple1(3)).toDF()
assert(nonNullableInts.schema.forall(!_.nullable))
val df1 = nonNullableInts.intersect(nullInts)
checkAnswer(df1, Row(1) :: Row(3) :: Nil)
assert(df1.schema.forall(!_.nullable))
val df2 = nullInts.intersect(nonNullableInts)
checkAnswer(df2, Row(1) :: Row(3) :: Nil)
assert(df2.schema.forall(!_.nullable))
val df3 = nullInts.intersect(nullInts)
checkAnswer(df3, Row(1) :: Row(2) :: Row(3) :: Row(null) :: Nil)
assert(df3.schema.forall(_.nullable))
val df4 = nonNullableInts.intersect(nonNullableInts)
checkAnswer(df4, Row(1) :: Row(3) :: Nil)
assert(df4.schema.forall(!_.nullable))
}
test("intersectAll") {
checkAnswer(
lowerCaseDataWithDuplicates.intersectAll(lowerCaseDataWithDuplicates),
Row(1, "a") ::
Row(2, "b") ::
Row(2, "b") ::
Row(3, "c") ::
Row(3, "c") ::
Row(3, "c") ::
Row(4, "d") :: Nil)
checkAnswer(lowerCaseData.intersectAll(upperCaseData), Nil)
// check null equality
checkAnswer(
nullInts.intersectAll(nullInts),
Row(1) ::
Row(2) ::
Row(3) ::
Row(null) :: Nil)
// Duplicate nulls are preserved.
checkAnswer(
allNulls.intersectAll(allNulls),
Row(null) :: Row(null) :: Row(null) :: Row(null) :: Nil)
val df_left = Seq(1, 2, 2, 3, 3, 4).toDF("id")
val df_right = Seq(1, 2, 2, 3).toDF("id")
checkAnswer(
df_left.intersectAll(df_right),
Row(1) :: Row(2) :: Row(2) :: Row(3) :: Nil)
}
ignore("intersectAll - nullability") {
val nonNullableInts = Seq(Tuple1(1), Tuple1(3)).toDF()
assert(nonNullableInts.schema.forall(!_.nullable))
val df1 = nonNullableInts.intersectAll(nullInts)
checkAnswer(df1, Row(1) :: Row(3) :: Nil)
assert(df1.schema.forall(!_.nullable))
val df2 = nullInts.intersectAll(nonNullableInts)
checkAnswer(df2, Row(1) :: Row(3) :: Nil)
assert(df2.schema.forall(!_.nullable))
val df3 = nullInts.intersectAll(nullInts)
checkAnswer(df3, Row(1) :: Row(2) :: Row(3) :: Row(null) :: Nil)
assert(df3.schema.forall(_.nullable))
val df4 = nonNullableInts.intersectAll(nonNullableInts)
checkAnswer(df4, Row(1) :: Row(3) :: Nil)
assert(df4.schema.forall(!_.nullable))
}
ignore("SPARK-10539: Project should not be pushed down through Intersect or Except") {
val df1 = (1 to 100).map(Tuple1.apply).toDF("i")
val df2 = (1 to 30).map(Tuple1.apply).toDF("i")
val intersect = df1.intersect(df2)
val except = df1.except(df2)
assert(intersect.count() === 30)
assert(except.count() === 70)
}
ignore("SPARK-10740: handle nondeterministic expressions correctly for set operations") {
val df1 = (1 to 20).map(Tuple1.apply).toDF("i")
val df2 = (1 to 10).map(Tuple1.apply).toDF("i")
// When generating expected results at here, we need to follow the implementation of
// Rand expression.
def expected(df: DataFrame): Seq[Row] = {
df.rdd.collectPartitions().zipWithIndex.flatMap {
case (data, index) =>
val rng = new org.apache.spark.util.random.XORShiftRandom(7 + index)
data.filter(_.getInt(0) < rng.nextDouble() * 10)
}
}
val union = df1.union(df2)
checkAnswer(
union.filter($"i" < rand(7) * 10),
expected(union)
)
checkAnswer(
union.select(rand(7)),
union.rdd.collectPartitions().zipWithIndex.flatMap {
case (data, index) =>
val rng = new org.apache.spark.util.random.XORShiftRandom(7 + index)
data.map(_ => rng.nextDouble()).map(i => Row(i))
}
)
val intersect = df1.intersect(df2)
checkAnswer(
intersect.filter($"i" < rand(7) * 10),
expected(intersect)
)
val except = df1.except(df2)
checkAnswer(
except.filter($"i" < rand(7) * 10),
expected(except)
)
}
ignore("SPARK-17123: Performing set operations that combine non-scala native types") {
val dates = Seq(
(new Date(0), BigDecimal.valueOf(1), new Timestamp(2)),
(new Date(3), BigDecimal.valueOf(4), new Timestamp(5))
).toDF("date", "timestamp", "decimal")
val widenTypedRows = Seq(
(new Timestamp(2), 10.5D, "string")
).toDF("date", "timestamp", "decimal")
dates.union(widenTypedRows).collect()
dates.except(widenTypedRows).collect()
dates.intersect(widenTypedRows).collect()
}
test("SPARK-19893: cannot run set operations with map type") {
val df = spark.range(1).select(map(lit("key"), $"id").as("m"))
val e = intercept[AnalysisException](df.intersect(df))
assert(e.message.contains(
"Cannot have map type columns in DataFrame which calls set operations"))
val e2 = intercept[AnalysisException](df.except(df))
assert(e2.message.contains(
"Cannot have map type columns in DataFrame which calls set operations"))
val e3 = intercept[AnalysisException](df.distinct())
assert(e3.message.contains(
"Cannot have map type columns in DataFrame which calls set operations"))
withTempView("v") {
df.createOrReplaceTempView("v")
val e4 = intercept[AnalysisException](sql("SELECT DISTINCT m FROM v"))
assert(e4.message.contains(
"Cannot have map type columns in DataFrame which calls set operations"))
}
}
test("union all") {
val unionDF = testData.union(testData).union(testData)
.union(testData).union(testData)
// Before optimizer, Union should be combined.
assert(unionDF.queryExecution.analyzed.collect {
case j: Union if j.children.size == 5 => j }.size === 1)
checkAnswer(
unionDF.agg(avg("key"), max("key"), min("key"), sum("key")),
Row(50.5, 100, 1, 25250) :: Nil
)
// unionAll is an alias of union
val unionAllDF = testData.unionAll(testData).unionAll(testData)
.unionAll(testData).unionAll(testData)
checkAnswer(unionDF, unionAllDF)
}
test("union should union DataFrames with UDTs (SPARK-13410)") {
val rowRDD1 = sparkContext.parallelize(Seq(Row(1, new ExamplePoint(1.0, 2.0))))
val schema1 = StructType(Array(StructField("label", IntegerType, false),
StructField("point", new ExamplePointUDT(), false)))
val rowRDD2 = sparkContext.parallelize(Seq(Row(2, new ExamplePoint(3.0, 4.0))))
val schema2 = StructType(Array(StructField("label", IntegerType, false),
StructField("point", new ExamplePointUDT(), false)))
val df1 = spark.createDataFrame(rowRDD1, schema1)
val df2 = spark.createDataFrame(rowRDD2, schema2)
checkAnswer(
df1.union(df2).orderBy("label"),
Seq(Row(1, new ExamplePoint(1.0, 2.0)), Row(2, new ExamplePoint(3.0, 4.0)))
)
}
test("union by name") {
var df1 = Seq((1, 2, 3)).toDF("a", "b", "c")
var df2 = Seq((3, 1, 2)).toDF("c", "a", "b")
val df3 = Seq((2, 3, 1)).toDF("b", "c", "a")
val unionDf = df1.unionByName(df2.unionByName(df3))
checkAnswer(unionDf,
Row(1, 2, 3) :: Row(1, 2, 3) :: Row(1, 2, 3) :: Nil
)
// Check if adjacent unions are combined into a single one
assert(unionDf.queryExecution.optimizedPlan.collect { case u: Union => true }.size == 1)
// Check failure cases
df1 = Seq((1, 2)).toDF("a", "c")
df2 = Seq((3, 4, 5)).toDF("a", "b", "c")
var errMsg = intercept[AnalysisException] {
df1.unionByName(df2)
}.getMessage
assert(errMsg.contains(
"Union can only be performed on tables with the same number of columns, " +
"but the first table has 2 columns and the second table has 3 columns"))
df1 = Seq((1, 2, 3)).toDF("a", "b", "c")
df2 = Seq((4, 5, 6)).toDF("a", "c", "d")
errMsg = intercept[AnalysisException] {
df1.unionByName(df2)
}.getMessage
assert(errMsg.contains("""Cannot resolve column name "b" among (a, c, d)"""))
}
test("union by name - type coercion") {
var df1 = Seq((1, "a")).toDF("c0", "c1")
var df2 = Seq((3, 1L)).toDF("c1", "c0")
checkAnswer(df1.unionByName(df2), Row(1L, "a") :: Row(1L, "3") :: Nil)
df1 = Seq((1, 1.0)).toDF("c0", "c1")
df2 = Seq((8L, 3.0)).toDF("c1", "c0")
checkAnswer(df1.unionByName(df2), Row(1.0, 1.0) :: Row(3.0, 8.0) :: Nil)
df1 = Seq((2.0f, 7.4)).toDF("c0", "c1")
df2 = Seq(("a", 4.0)).toDF("c1", "c0")
checkAnswer(df1.unionByName(df2), Row(2.0, "7.4") :: Row(4.0, "a") :: Nil)
df1 = Seq((1, "a", 3.0)).toDF("c0", "c1", "c2")
df2 = Seq((1.2, 2, "bc")).toDF("c2", "c0", "c1")
val df3 = Seq(("def", 1.2, 3)).toDF("c1", "c2", "c0")
checkAnswer(df1.unionByName(df2.unionByName(df3)),
Row(1, "a", 3.0) :: Row(2, "bc", 1.2) :: Row(3, "def", 1.2) :: Nil
)
}
test("union by name - check case sensitivity") {
def checkCaseSensitiveTest(): Unit = {
val df1 = Seq((1, 2, 3)).toDF("ab", "cd", "ef")
val df2 = Seq((4, 5, 6)).toDF("cd", "ef", "AB")
checkAnswer(df1.unionByName(df2), Row(1, 2, 3) :: Row(6, 4, 5) :: Nil)
}
withSQLConf(SQLConf.CASE_SENSITIVE.key -> "true") {
val errMsg2 = intercept[AnalysisException] {
checkCaseSensitiveTest()
}.getMessage
assert(errMsg2.contains("""Cannot resolve column name "ab" among (cd, ef, AB)"""))
}
withSQLConf(SQLConf.CASE_SENSITIVE.key -> "false") {
checkCaseSensitiveTest()
}
}
test("union by name - check name duplication") {
Seq((true, ("a", "a")), (false, ("aA", "Aa"))).foreach { case (caseSensitive, (c0, c1)) =>
withSQLConf(SQLConf.CASE_SENSITIVE.key -> caseSensitive.toString) {
var df1 = Seq((1, 1)).toDF(c0, c1)
var df2 = Seq((1, 1)).toDF("c0", "c1")
var errMsg = intercept[AnalysisException] {
df1.unionByName(df2)
}.getMessage
assert(errMsg.contains("Found duplicate column(s) in the left attributes:"))
df1 = Seq((1, 1)).toDF("c0", "c1")
df2 = Seq((1, 1)).toDF(c0, c1)
errMsg = intercept[AnalysisException] {
df1.unionByName(df2)
}.getMessage
assert(errMsg.contains("Found duplicate column(s) in the right attributes:"))
}
}
}
test("SPARK-25368 Incorrect predicate pushdown returns wrong result") {
def check(newCol: Column, filter: Column, result: Seq[Row]): Unit = {
val df1 = spark.createDataFrame(Seq(
(1, 1)
)).toDF("a", "b").withColumn("c", newCol)
val df2 = df1.union(df1).withColumn("d", spark_partition_id).filter(filter)
checkAnswer(df2, result)
}
check(lit(null).cast("int"), $"c".isNull, Seq(Row(1, 1, null, 0), Row(1, 1, null, 1)))
check(lit(null).cast("int"), $"c".isNotNull, Seq())
check(lit(2).cast("int"), $"c".isNull, Seq())
check(lit(2).cast("int"), $"c".isNotNull, Seq(Row(1, 1, 2, 0), Row(1, 1, 2, 1)))
check(lit(2).cast("int"), $"c" === 2, Seq(Row(1, 1, 2, 0), Row(1, 1, 2, 1)))
check(lit(2).cast("int"), $"c" =!= 2, Seq())
}
}
| Intel-bigdata/OAP | oap-native-sql/core/src/test/scala/org/apache/spark/sql/DataFrameSetOperationsSuite.scala | Scala | apache-2.0 | 18,388 |
class a{
try {
}
finally {
<caret>
}
}
/*
abstract
case
class
def
do
false
final
for
if
implicit
import
lazy
new
null
object
override
private
protected
return
sealed
super
this
throw
trait
true
try
type
val
var
while
*/ | JetBrains/intellij-scala | scala/scala-impl/testdata/keywordCompletion/expressions/finally3.scala | Scala | apache-2.0 | 232 |
package com.typesafe.slick.testkit.tests
import com.typesafe.slick.testkit.util.{JdbcTestDB, AsyncTest}
import slick.jdbc.{ResultSetHoldability, ResultSetConcurrency, ResultSetType, JdbcBackend}
class JdbcMiscTest extends AsyncTest[JdbcTestDB] {
import tdb.profile.api._
def testNullability = {
class T1(tag: Tag) extends Table[String](tag, "t1") {
def a = column[String]("a")
def * = a
}
val t1 = TableQuery[T1]
class T3(tag: Tag) extends Table[Option[String]](tag, "t3") {
def a = column[Option[String]]("a")
def * = a
}
val t3 = TableQuery[T3]
seq(
(t1.schema ++ t3.schema).create,
t1 += "a",
t3 += Some("a"),
t3 += None,
(t1 += null.asInstanceOf[String]).failed
)
}
def testSimpleDBIO = {
val getAutoCommit = SimpleDBIO[Boolean](_.connection.getAutoCommit)
getAutoCommit.map(_ shouldBe true)
}
def testStatementParameters = {
def check(sp: JdbcBackend.StatementParameters) =
GetStatementParameters.map { csp => csp shouldBe sp }
DBIO.seq(
check(JdbcBackend.StatementParameters(ResultSetType.Auto, ResultSetConcurrency.Auto, ResultSetHoldability.Auto, null, 0)),
DBIO.seq(
check(JdbcBackend.StatementParameters(ResultSetType.ScrollInsensitive, ResultSetConcurrency.Auto, ResultSetHoldability.Auto, null, 0)),
check(JdbcBackend.StatementParameters(ResultSetType.ScrollInsensitive, ResultSetConcurrency.Auto, ResultSetHoldability.HoldCursorsOverCommit, null, 100)).
withStatementParameters(rsHoldability = ResultSetHoldability.HoldCursorsOverCommit, fetchSize = 100),
check(JdbcBackend.StatementParameters(ResultSetType.ScrollInsensitive, ResultSetConcurrency.Auto, ResultSetHoldability.Auto, null, 0))
).withStatementParameters(rsType = ResultSetType.ScrollInsensitive),
check(JdbcBackend.StatementParameters(ResultSetType.Auto, ResultSetConcurrency.Auto, ResultSetHoldability.Auto, null, 0))
)
}
def testOverrideStatements = {
class T(tag: Tag) extends Table[Int](tag, u"t") {
def id = column[Int]("a")
def * = id
}
val t = TableQuery[T]
val a1 = t.filter(_.id === 1)
val a2 = t.filter(_.id === 2)
seq(
t.schema.create,
t ++= Seq(1, 2, 3),
a1.result.map(_ shouldBe Seq(1)),
a1.result.overrideStatements(a2.result.statements).map(_ shouldBe Seq(2)),
a1.result.head.map(_ shouldBe 1),
a1.result.head.overrideStatements(a2.result.head.statements).map(_ shouldBe 2)
)
}
}
| jkutner/slick | slick-testkit/src/main/scala/com/typesafe/slick/testkit/tests/JdbcMiscTest.scala | Scala | bsd-2-clause | 2,547 |
package jp.ne.opt.chronoscala
import java.time.temporal.{ChronoUnit, TemporalAmount}
import java.time.{Duration, Instant, Period}
class RichInstant(val underlying: Instant) extends AnyVal with Ordered[Instant] {
def +(millis: Long): Instant = underlying.plus(millis, ChronoUnit.MILLIS)
def +(amount: TemporalAmount): Instant = underlying.plus(amount)
def +(duration: Duration): Instant = underlying.plus(duration)
def +(period: Period): Instant = underlying.plus(period)
def -(millis: Long): Instant = underlying.minus(millis, ChronoUnit.MILLIS)
def -(amount: TemporalAmount): Instant = underlying.minus(amount)
def -(duration: Duration): Instant = underlying.minus(duration)
def -(period: Period): Instant = underlying.minus(period)
def to(end: Instant): Interval = Interval(underlying, end)
def compare(that: Instant): Int = underlying.compareTo(that)
}
| opt-tech/chronoscala | shared/src/main/scala/jp/ne/opt/chronoscala/RichInstant.scala | Scala | mit | 889 |
package org.pfcoperez.dailyalgorithm.applications
import org.scalacheck.Gen
import org.scalatest.{ FlatSpec, Matchers }
class ConnectedComponentsSpec extends FlatSpec with Matchers {
def generateForest(nNodes: Int, nArchs: Int): (Set[Int], Seq[(Int, Int)]) = {
val nodes = Seq(1 to nNodes: _*)
val archGenerator: Gen[(Int, Int)] =
for {
from <- Gen.oneOf(nodes)
to <- Gen.oneOf(nodes.filterNot(_ == from))
} yield (from, to)
nodes.toSet -> Gen.listOfN(nArchs, archGenerator).sample.get
}
"Both implementations (Deep First Search & Disjoint Sets)" should "provide equivalent results" in {
val nNodes: Int = 1000
val archsRatio: Double = 0.8
val nArchs: Int = (nNodes * archsRatio) toInt
val (nodes, archs) = generateForest(nNodes, nArchs)
val traditionalCCs = RegularSetsConnectedComponentsOps.connectedNodes(nodes, archs).toSet
val disjointsetsCCs = DisjointSetsConnectedComponentsOps.connectedNodes(nodes, archs).toSet
traditionalCCs should equal(disjointsetsCCs)
}
}
| pfcoperez/algorithmaday | src/test/scala/org/pfcoperez/dailyalgorithm/applications/ConnectedComponentsSpec.scala | Scala | gpl-3.0 | 1,055 |
package texteditor.events
import javax.swing
import rescala._
import scala.swing.event.Event
case class TimerEvent(val source: Timer) extends Event
class Timer(delay0: Int) {
val peer: swing.Timer = new swing.Timer(delay0, null) {
override def fireActionPerformed(e: java.awt.event.ActionEvent) = fired.fire(())
}
def this(delay: Int, repeating: Boolean) {
this(delay)
this.repeating = repeating
}
def running = peer.isRunning
val fired = Evt[Unit] //#EVT
def delay = peer.getDelay
def delay_=(delay: Int) = peer.setDelay(delay)
def repeating = peer.isRepeats
def repeating_=(repeating: Boolean) = peer.setRepeats(repeating)
def restart = { peer.restart(); this }
def start = { peer.start(); this }
def stop = { peer.stop(); this }
}
| volkc/REScala | Examples/Editor/src/main/scala/texteditor/events/Timer.scala | Scala | apache-2.0 | 781 |
/*
* Copyright 2022 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package services.testdata.candidate.fsb
import factories.UUIDFactory
import javax.inject.{ Inject, Singleton }
import model.command.testdata.CreateEventRequest
import model.command.{ CandidateAllocation, CandidateAllocations }
import model.exchange.testdata.CreateCandidateResponse.CreateCandidateResponse
import model.persisted.eventschedules.EventType
import model.testdata.candidate.CreateCandidateData.CreateCandidateData
import model.{ AllocationStatuses, ProgressStatuses }
import play.api.mvc.RequestHeader
import repositories.SchemeRepository
import repositories.application.GeneralApplicationRepository
import services.allocation.CandidateAllocationService
import services.application.FsbService
import services.testdata.candidate.ConstructiveGenerator
import services.testdata.event.EventGenerator
import uk.gov.hmrc.http.HeaderCarrier
import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.Future
@Singleton
class FsbAllocationConfirmedStatusGenerator @Inject() (val previousStatusGenerator: FsbAwaitingAllocationStatusGenerator,
applicationRepository: GeneralApplicationRepository,
fsbTestGroupService: FsbService,
candidateAllocationService: CandidateAllocationService,
eventGenerator: EventGenerator,
schemeRepository: SchemeRepository,
uuidFactory: UUIDFactory
) extends ConstructiveGenerator {
def generate(generationId: Int, createCandidateData: CreateCandidateData)
(implicit hc: HeaderCarrier, rh: RequestHeader): Future[CreateCandidateResponse] = {
val allSchemes = schemeRepository.schemes.toList
for {
candidateInPreviousStatus <- previousStatusGenerator.generate(generationId, createCandidateData)
appId = candidateInPreviousStatus.applicationId.get
topSchemeId = candidateInPreviousStatus.schemePreferences.get.schemes.head
fsbType = allSchemes.find(_.id == topSchemeId).flatMap(_.fsbType)
event <- eventGenerator.createEvent(
generationId,
CreateEventRequest.random.copy(eventType = Some(EventType.FSB), description = fsbType.map(_.key))
).map(_.data)
_ <- candidateAllocationService.allocateCandidates(
CandidateAllocations(
uuidFactory.generateUUID().toString,
event.id,
event.sessions.head.id,
List(CandidateAllocation(appId, AllocationStatuses.CONFIRMED))
), append = false)
_ <- applicationRepository.addProgressStatusAndUpdateAppStatus(appId, ProgressStatuses.FSB_ALLOCATION_CONFIRMED)
} yield {
candidateInPreviousStatus
}
}
}
| hmrc/fset-faststream | app/services/testdata/candidate/fsb/FsbAllocationConfirmedStatusGenerator.scala | Scala | apache-2.0 | 3,543 |
/** Copyright 2016 - 2021 Martin Mauch (@nightscape)
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.crealytics.spark.v2.excel
import org.apache.spark.sql.catalyst.csv.CSVFilters
import org.apache.spark.sql.sources
import org.apache.spark.sql.types.StructType
/** Wrapping the API change between spark 3.0 vs 3.1 */
class ExcelFilters(filters: Seq[sources.Filter], requiredSchema: StructType)
extends CSVFilters(filters, requiredSchema) {}
object ExcelFilters {
def pushedFilters(filters: Array[sources.Filter], schema: StructType): Array[sources.Filter] =
CSVFilters.pushedFilters(filters, schema)
}
| crealytics/spark-excel | src/main/3.0/scala/com/crealytics/spark/v2/excel/ExcelFilters.scala | Scala | apache-2.0 | 1,137 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.hive.execution
import java.io._
import java.util.Properties
import scala.collection.JavaConverters._
import scala.util.control.NonFatal
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.hive.ql.exec.{RecordReader, RecordWriter}
import org.apache.hadoop.hive.serde.serdeConstants
import org.apache.hadoop.hive.serde2.AbstractSerDe
import org.apache.hadoop.hive.serde2.objectinspector._
import org.apache.hadoop.io.Writable
import org.apache.spark.TaskContext
import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.execution._
import org.apache.spark.sql.hive.HiveInspectors
import org.apache.spark.sql.hive.HiveShim._
import org.apache.spark.sql.types.DataType
import org.apache.spark.util.{CircularBuffer, Utils}
/**
* Transforms the input by forking and running the specified script.
*
* @param input the set of expression that should be passed to the script.
* @param script the command that should be executed.
* @param output the attributes that are produced by the script.
* @param child logical plan whose output is transformed.
* @param ioschema the class set that defines how to handle input/output data.
*/
private[hive] case class HiveScriptTransformationExec(
input: Seq[Expression],
script: String,
output: Seq[Attribute],
child: SparkPlan,
ioschema: ScriptTransformationIOSchema)
extends BaseScriptTransformationExec {
import HiveScriptIOSchema._
private def createOutputIteratorWithSerde(
writerThread: BaseScriptTransformationWriterThread,
inputStream: InputStream,
proc: Process,
stderrBuffer: CircularBuffer,
outputSerde: AbstractSerDe,
outputSoi: StructObjectInspector,
hadoopConf: Configuration): Iterator[InternalRow] = {
new Iterator[InternalRow] with HiveInspectors {
var curLine: String = null
val scriptOutputStream = new DataInputStream(inputStream)
val scriptOutputReader =
recordReader(ioschema, scriptOutputStream, hadoopConf).orNull
var scriptOutputWritable: Writable = null
val reusedWritableObject = outputSerde.getSerializedClass.getConstructor().newInstance()
val mutableRow = new SpecificInternalRow(output.map(_.dataType))
@transient
lazy val unwrappers = outputSoi.getAllStructFieldRefs.asScala.map(unwrapperFor)
override def hasNext: Boolean = {
try {
if (scriptOutputWritable == null) {
scriptOutputWritable = reusedWritableObject
if (scriptOutputReader != null) {
if (scriptOutputReader.next(scriptOutputWritable) <= 0) {
checkFailureAndPropagate(writerThread, null, proc, stderrBuffer)
return false
}
} else {
try {
scriptOutputWritable.readFields(scriptOutputStream)
} catch {
case _: EOFException =>
// This means that the stdout of `proc` (i.e. TRANSFORM process) has exhausted.
// Ideally the proc should *not* be alive at this point but
// there can be a lag between EOF being written out and the process
// being terminated. So explicitly waiting for the process to be done.
checkFailureAndPropagate(writerThread, null, proc, stderrBuffer)
return false
}
}
}
true
} catch {
case NonFatal(e) =>
// If this exception is due to abrupt / unclean termination of `proc`,
// then detect it and propagate a better exception message for end users
checkFailureAndPropagate(writerThread, e, proc, stderrBuffer)
throw e
}
}
override def next(): InternalRow = {
if (!hasNext) {
throw new NoSuchElementException
}
val raw = outputSerde.deserialize(scriptOutputWritable)
scriptOutputWritable = null
val dataList = outputSoi.getStructFieldsDataAsList(raw)
var i = 0
while (i < dataList.size()) {
if (dataList.get(i) == null) {
mutableRow.setNullAt(i)
} else {
unwrappers(i)(dataList.get(i), mutableRow, i)
}
i += 1
}
mutableRow
}
}
}
override def processIterator(
inputIterator: Iterator[InternalRow],
hadoopConf: Configuration): Iterator[InternalRow] = {
val (outputStream, proc, inputStream, stderrBuffer) = initProc
val (inputSerde, inputSoi) = initInputSerDe(ioschema, input).getOrElse((null, null))
// For HiveScriptTransformationExec, if inputSerde == null, but outputSerde != null
// We will use StringBuffer to pass data, in this case, we should cast data as string too.
val finalInput = if (inputSerde == null) {
inputExpressionsWithoutSerde
} else {
input
}
val outputProjection = new InterpretedProjection(finalInput, child.output)
// This new thread will consume the ScriptTransformation's input rows and write them to the
// external process. That process's output will be read by this current thread.
val writerThread = HiveScriptTransformationWriterThread(
inputIterator.map(outputProjection),
finalInput.map(_.dataType),
inputSerde,
inputSoi,
ioschema,
outputStream,
proc,
stderrBuffer,
TaskContext.get(),
hadoopConf
)
val (outputSerde, outputSoi) = {
initOutputSerDe(ioschema, output).getOrElse((null, null))
}
val outputIterator = if (outputSerde == null) {
createOutputIteratorWithoutSerde(writerThread, inputStream, proc, stderrBuffer)
} else {
createOutputIteratorWithSerde(
writerThread, inputStream, proc, stderrBuffer, outputSerde, outputSoi, hadoopConf)
}
writerThread.start()
outputIterator
}
}
private[hive] case class HiveScriptTransformationWriterThread(
iter: Iterator[InternalRow],
inputSchema: Seq[DataType],
inputSerde: AbstractSerDe,
inputSoi: StructObjectInspector,
ioSchema: ScriptTransformationIOSchema,
outputStream: OutputStream,
proc: Process,
stderrBuffer: CircularBuffer,
taskContext: TaskContext,
conf: Configuration)
extends BaseScriptTransformationWriterThread with HiveInspectors {
import HiveScriptIOSchema._
override def processRows(): Unit = {
val dataOutputStream = new DataOutputStream(outputStream)
val scriptInputWriter = recordWriter(ioSchema, dataOutputStream, conf).orNull
if (inputSerde == null) {
processRowsWithoutSerde()
} else {
// Convert Spark InternalRows to hive data via `HiveInspectors.wrapperFor`.
val hiveData = new Array[Any](inputSchema.length)
val fieldOIs = inputSoi.getAllStructFieldRefs.asScala.map(_.getFieldObjectInspector).toArray
val wrappers = fieldOIs.zip(inputSchema).map { case (f, dt) => wrapperFor(f, dt) }
iter.foreach { row =>
var i = 0
while (i < fieldOIs.length) {
hiveData(i) = if (row.isNullAt(i)) null else wrappers(i)(row.get(i, inputSchema(i)))
i += 1
}
val writable = inputSerde.serialize(hiveData, inputSoi)
if (scriptInputWriter != null) {
scriptInputWriter.write(writable)
} else {
prepareWritable(writable, ioSchema.outputSerdeProps).write(dataOutputStream)
}
}
}
}
}
object HiveScriptIOSchema extends HiveInspectors {
def initInputSerDe(
ioschema: ScriptTransformationIOSchema,
input: Seq[Expression]): Option[(AbstractSerDe, StructObjectInspector)] = {
ioschema.inputSerdeClass.map { serdeClass =>
val (columns, columnTypes) = parseAttrs(input)
val serde = initSerDe(serdeClass, columns, columnTypes, ioschema.inputSerdeProps)
val fieldObjectInspectors = columnTypes.map(toInspector)
val objectInspector = ObjectInspectorFactory
.getStandardStructObjectInspector(columns.asJava, fieldObjectInspectors.asJava)
(serde, objectInspector)
}
}
def initOutputSerDe(
ioschema: ScriptTransformationIOSchema,
output: Seq[Attribute]): Option[(AbstractSerDe, StructObjectInspector)] = {
ioschema.outputSerdeClass.map { serdeClass =>
val (columns, columnTypes) = parseAttrs(output)
val serde = initSerDe(serdeClass, columns, columnTypes, ioschema.outputSerdeProps)
val structObjectInspector = serde.getObjectInspector().asInstanceOf[StructObjectInspector]
(serde, structObjectInspector)
}
}
private def parseAttrs(attrs: Seq[Expression]): (Seq[String], Seq[DataType]) = {
val columns = attrs.zipWithIndex.map(e => s"${e._1.prettyName}_${e._2}")
val columnTypes = attrs.map(_.dataType)
(columns, columnTypes)
}
def initSerDe(
serdeClassName: String,
columns: Seq[String],
columnTypes: Seq[DataType],
serdeProps: Seq[(String, String)]): AbstractSerDe = {
val serde = Utils.classForName[AbstractSerDe](serdeClassName).getConstructor().
newInstance()
val columnTypesNames = columnTypes.map(_.toTypeInfo.getTypeName()).mkString(",")
var propsMap = serdeProps.toMap + (serdeConstants.LIST_COLUMNS -> columns.mkString(","))
propsMap = propsMap + (serdeConstants.LIST_COLUMN_TYPES -> columnTypesNames)
val properties = new Properties()
// Can not use properties.putAll(propsMap.asJava) in scala-2.12
// See https://github.com/scala/bug/issues/10418
propsMap.foreach { case (k, v) => properties.put(k, v) }
serde.initialize(null, properties)
serde
}
def recordReader(
ioschema: ScriptTransformationIOSchema,
inputStream: InputStream,
conf: Configuration): Option[RecordReader] = {
ioschema.recordReaderClass.map { klass =>
val instance = Utils.classForName[RecordReader](klass).getConstructor().
newInstance()
val props = new Properties()
// Can not use props.putAll(outputSerdeProps.toMap.asJava) in scala-2.12
// See https://github.com/scala/bug/issues/10418
ioschema.outputSerdeProps.toMap.foreach { case (k, v) => props.put(k, v) }
instance.initialize(inputStream, conf, props)
instance
}
}
def recordWriter(
ioschema: ScriptTransformationIOSchema,
outputStream: OutputStream,
conf: Configuration): Option[RecordWriter] = {
ioschema.recordWriterClass.map { klass =>
val instance = Utils.classForName[RecordWriter](klass).getConstructor().
newInstance()
instance.initialize(outputStream, conf)
instance
}
}
}
| witgo/spark | sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/HiveScriptTransformationExec.scala | Scala | apache-2.0 | 11,531 |
Subsets and Splits
Filtered Scala Code Snippets
The query filters and retrieves a sample of code snippets that meet specific criteria, providing a basic overview of the dataset's content without revealing deeper insights.