code
stringlengths 5
1M
| repo_name
stringlengths 5
109
| path
stringlengths 6
208
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 5
1M
|
|---|---|---|---|---|---|
package bootstrap.liftweb
import scala.xml.{Null, UnprefixedAttribute}
import javax.mail.internet.MimeMessage
import net.liftweb._
import common._
import http._
import util._
import util.Helpers._
import net.liftweb.squerylrecord.RecordTypeMode._
import mapmartadero.config._
import mapmartadero.model.{DbSchema, SystemUser, User}
import net.liftmodules.extras.{Gravatar, LiftExtras}
import net.liftmodules.mongoauth.MongoAuth
import mapmartadero.lib.DataRetriever
import org.quartz.{Trigger, JobDetail, Scheduler, SchedulerException}
import org.quartz.impl.StdSchedulerFactory
import org.quartz.JobBuilder._
import org.quartz.TriggerBuilder._
import org.quartz.SimpleScheduleBuilder._
import mapmartadero.lib.quartz.SyncJob
import mapmartadero.comet.{UpdateEventsNg, EventsServer}
/**
* A class that's instantiated early and run. It allows the application
* to modify lift's environment
*/
class Boot extends Loggable {
private def quartzInit {
// Grab the Scheduler instance from the Factory
val scheduler: Scheduler = StdSchedulerFactory.getDefaultScheduler();
// and start it off
scheduler.start()
// define the job and tie it to our HelloJob class
val job: JobDetail = newJob(classOf[SyncJob])
.withIdentity("job1", "group1")
.build()
// Trigger the job to run now, and then repeat every 40 seconds
val trigger: Trigger = newTrigger()
.withIdentity("trigger1", "group1")
.startNow()
.withSchedule(simpleSchedule().withIntervalInSeconds(300).repeatForever()).build()
// Tell quartz to schedule the job using our trigger
scheduler.scheduleJob(job, trigger);
}
def boot {
logger.info("Run Mode: "+Props.mode.toString)
// init auth-squeryl
SquerylConfig.init
S.addAround(new LoanWrapper {
override def apply[T](f: => T): T = {
val result = inTransaction {
try {
Right(f)
} catch {
case e: LiftFlowOfControlException => Left(e)
}
}
result match {
case Right(r) => r
case Left(exception) => throw exception
}
}
})
// init mongodb
MongoConfig.init()
// init auth-mongo
/*
MongoAuth.authUserMeta.default.set(User)
MongoAuth.loginTokenAfterUrl.default.set(Site.password.url)
MongoAuth.siteName.default.set("Mapa Martadero")
MongoAuth.systemEmail.default.set(SystemUser.user.email.get)
MongoAuth.systemUsername.default.set(SystemUser.user.name.get)*/
// For S.loggedIn_? and TestCond.loggedIn/Out builtin snippet
// LiftRules.loggedInTest = Full(() => User.isLoggedIn)
// checks for ExtSession cookie
// LiftRules.earlyInStateful.append(User.testForExtSession)
// Gravatar
//Gravatar.defaultImage.default.set("wavatar")
// config an email sender
SmtpMailer.init
// where to search snippet
LiftRules.addToPackages("mapmartadero")
// set the default htmlProperties
LiftRules.htmlProperties.default.set((r: Req) => new Html5Properties(r.userAgent))
// Build SiteMap
LiftRules.setSiteMap(Site.siteMap)
// Error handler
ErrorHandler.init
// 404 handler
LiftRules.uriNotFound.prepend(NamedPF("404handler") {
case (req, failure) =>
NotFoundAsTemplate(ParsePath(List("404"), "html", false, false))
})
// Show the spinny image when an Ajax call starts
LiftRules.ajaxStart =
Full(() => LiftRules.jsArtifacts.show("ajax-spinner").cmd)
// Make the spinny image go away when it ends
LiftRules.ajaxEnd =
Full(() => LiftRules.jsArtifacts.hide("ajax-spinner").cmd)
// Force the request to be UTF-8
LiftRules.early.append(_.setCharacterEncoding("UTF-8"))
// Init Extras
LiftExtras.init()
LiftRules.ajaxPostTimeout = 120000
//LiftRules.cometGetTimeout = 240000
//LiftRules.cometRenderTimeout = 120 seconds
// don't include the liftAjax.js code. It's served statically.
LiftRules.autoIncludeAjaxCalc.default.set(() => (session: LiftSession) => false)
// Mailer
Mailer.devModeSend.default.set((m: MimeMessage) => logger.info("Dev mode message:\\n" + prettyPrintMime(m)))
Mailer.testModeSend.default.set((m: MimeMessage) => logger.info("Test mode message:\\n" + prettyPrintMime(m)))
quartzInit
}
private def prettyPrintMime(m: MimeMessage): String = {
val buf = new StringBuilder
val hdrs = m.getAllHeaderLines
while (hdrs.hasMoreElements)
buf ++= hdrs.nextElement.toString + "\\n"
val out =
"""
|%s
|====================================
|%s
""".format(buf.toString, m.getContent.toString).stripMargin
out
}
}
|
jgenso/mapamartadero
|
src/main/scala/bootstrap/liftweb/Boot.scala
|
Scala
|
apache-2.0
| 4,705
|
package io.sqooba.oss.timeseries.entity
import org.scalatest.flatspec.AnyFlatSpec
import org.scalatest.matchers.should.Matchers
import scala.util.{Failure, Try}
class TsLabelSpec extends AnyFlatSpec with Matchers {
"TsLabel.unit" should "be derived from label name" in {
implicit val physicsParser: LabelUnitMapper = new LabelUnitMapper {
private val AMPERE_PATTERN = "(.*_A_.*)".r
private val HERTZ_PATTERN = "(.*_Hz_.*)".r
private val PHS_VOLT_PATTERN = "(.*_PhsV_.*)".r
private val TEMP_PATTERN = "(.*Temp_.*)".r
// scalastyle:off non.ascii.character.disallowed
def deriveUnit(in: TsLabel): Option[String] =
in.value match {
case AMPERE_PATTERN(_) => Some("A")
case HERTZ_PATTERN(_) => Some("Hz")
case PHS_VOLT_PATTERN(_) => Some("V")
case TEMP_PATTERN(_) => Some("°C")
case _ => None
}
}
Seq(
("MMCX_A_10m_Avg", Some("A")),
("MMCX_Hz_10m_Avg", Some("Hz")),
("MMCX_PhsV_PhsA_10m_Avg", Some("V")),
("WCNV_XXTemp_10m_Avg", Some("°C")),
("MMCX_PF_10m_Avg", None),
("this_is_an_unkown_unit", None)
).foreach {
case (label, unit) => TsLabel(label).unit shouldBe unit
}
}
// scalastyle:on non.ascii.character.disallowed
it should "return a failure if a case is not handled" in {
implicit val parser: LabelUnitMapper = (in: TsLabel) =>
in.value match {
case "not exhaustive" => Some("unit")
case _ => None
}
TsLabel("anything").unit shouldBe None
TsLabel("not exhaustive").unit shouldBe Some("unit")
}
}
|
Shastick/tslib
|
src/test/scala/io/sqooba/oss/timeseries/entity/TsLabelSpec.scala
|
Scala
|
mit
| 1,677
|
/*
* SpringMusic.scala
* (Configuration)
*
* Copyright (c) 2015 Hanns Holger Rutz. All rights reserved.
*
* This software is published under the GNU General Public License v3+
*
*
* For further information, please contact Hanns Holger Rutz at
* contact@sciss.de
*/
package de.sciss.configuration.video
import java.awt.Color
import java.awt.geom.AffineTransform
import java.awt.image.{AffineTransformOp, BufferedImage}
import javax.imageio.ImageIO
import com.jhlabs.image.ThresholdFilter
import de.sciss.file._
import de.sciss.numbers
import scala.concurrent.{ExecutionContext, Future}
import scala.swing.Swing._
import scala.swing.{Component, Graphics2D, MainFrame, SwingApplication}
object SpringMusic extends SwingApplication {
val IN_WIDTH = 640
val IN_HEIGHT = 480
val OUT_WIDTH = 1080
val OUT_HEIGHT = 1920
val FPS = 25
val NUM_FRAMES = 12673
val THRESH_LO = 170
val THRESH_HI = 180
val FADE_IN = 10.0
val FADE_OUT = 30.0
def startup(args: Array[String]): Unit = {
val scale = OUT_WIDTH.toDouble / IN_WIDTH
val bW = OUT_WIDTH
val bH = (IN_HEIGHT * scale).toInt
val bScale = new BufferedImage(bW, bH, BufferedImage.TYPE_INT_ARGB)
val atScale = AffineTransform.getScaleInstance(scale, scale)
val fScale = new AffineTransformOp(atScale, AffineTransformOp.TYPE_BICUBIC)
// val fEdge = new EdgeFilter
// fEdge.filter(bIn, bOut)
val fThresh = new ThresholdFilter
val fadeInFrames = (FADE_IN * FPS).toInt
val fadeOutFrames = (FADE_OUT * FPS).toInt
def perform(i: Int): Unit = {
val bIn = ImageIO.read(file("spring_music") / f"frame$i%03d.png")
val bOut = new BufferedImage(OUT_WIDTH, OUT_HEIGHT, BufferedImage.TYPE_INT_ARGB)
fScale.filter(bIn, bScale)
val g = bOut.createGraphics()
g.setColor(Color.black)
g.fill(0, 0, OUT_WIDTH, OUT_HEIGHT)
import numbers.Implicits._
val wIn = i.clip(0, fadeInFrames).linlin(0, fadeInFrames, 0.0, 1.0)
val wOut = i.clip(NUM_FRAMES - fadeOutFrames, NUM_FRAMES).linlin(NUM_FRAMES - fadeOutFrames, NUM_FRAMES, 1.0, 0.0)
val w = wIn * wOut
fThresh.setLowerThreshold(w.linlin(0, 1, 256, THRESH_LO).toInt)
fThresh.setUpperThreshold(w.linlin(0, 1, 256, THRESH_HI).toInt)
fThresh.filter(bScale, bScale)
g.drawImage(bScale, 0, (OUT_HEIGHT - bH)/2, null)
g.dispose()
ImageIO.write(bOut, "png", file("render") / f"frame$i%d.png")
}
// fThresh.filter(bScale, bScale)
// perform(fadeInFrames/2)
val view: Component = new Component {
preferredSize = (bW, bH)
override protected def paintComponent(g: Graphics2D): Unit =
g.drawImage(bScale, 0,0, peer)
}
// val icon = new ImageIcon(bScale)
val frame = new MainFrame {
contents = view
pack().centerOnScreen()
open()
}
import ExecutionContext.Implicits.global
val fut = Future {
var lastProg = -1
for (i <- 1 to NUM_FRAMES) {
perform(i)
onEDT {
view.repaint()
import numbers.Implicits._
val prog = i.linlin(1, NUM_FRAMES, 0, 100).toInt
if (lastProg < prog) {
lastProg = prog
frame.title = s"$prog%"
}
}
}
}
fut.onSuccess {
case _ => onEDT(frame.title = "COMPLETED")
}
}
}
|
Sciss/Configuration
|
video/src/main/scala/de/sciss/configuration/video/SpringMusic.scala
|
Scala
|
gpl-3.0
| 3,423
|
/*
* This file is part of Kiama.
*
* Copyright (C) 2008-2015 Anthony M Sloane, Macquarie University.
*
* Kiama is free software: you can redistribute it and/or modify it under
* the terms of the GNU Lesser General Public License as published by the
* Free Software Foundation, either version 3 of the License, or (at your
* option) any later version.
*
* Kiama is distributed in the hope that it will be useful, but WITHOUT ANY
* WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
* FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for
* more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with Kiama. (See files COPYING and COPYING.LESSER.) If not, see
* <http://www.gnu.org/licenses/>.
*/
package org.kiama
package attribution
/**
* An attribution module. Use an instance of this module to encapuslate
* related attributes. You should ensure that more than one circular
* attribute evaluation from a single module is not executing at the
* same time because the current implementation has shared state between
* related circular attributes. If your attributes are unrelated (i.e.,
* can't possibly call each other) you should base them on different
* attribution module instances and then it is safe for attributes from
* different collections to execute in parallel.
*/
class Attribution extends AttributionCore
|
solomono/kiama
|
library/src/org/kiama/attribution/Attribution.scala
|
Scala
|
gpl-3.0
| 1,436
|
/**
*
* @author Richard Li
*/
object puzzle7 extends App {
val seqInSeq = Seq(Seq("a", "b", "c"), Seq("d", "e", "f"), Seq("g", "h"), Seq("i", "j", "k"))
val seqList = for (Seq(x, y, z) <- seqInSeq) yield x + y + z
//val obj = seqInSeq.map{ case Seq(x, y, z) => x + y + z} // error
println(seqList)
// 0 to 10 is actually a generator, left hand side of assignment is a pattern
val x = for( i@j <- 0 to 10) yield i + j
println(x)
// val seqList = for (Seq(x, y, z) <- seqInSeq) yield x + y + z will be desugared to following statement
val eq = seqInSeq.withFilter{
case Seq(x, y, z) => true
case _ => false
}.map{
case Seq(x, y, z) => x + y + z
}
println(eq)
}
|
precompiler/scala-101
|
puzzles/src/main/scala/puzzle7.scala
|
Scala
|
apache-2.0
| 703
|
package guru.nidi.minecraft.core
import java.io._
import scala.collection.mutable
/**
*
*/
object NbtWriter {
def write(root: Tag): Array[Byte] = {
val writer = new NbtWriter
writer.writeTag(root)
writer.baos.toByteArray
}
}
private class NbtWriter {
val baos = new ByteArrayOutputStream()
private val out = new DataOutputStream(baos)
def writeByteArray(value: Array[Byte]) = {
out.writeInt(value.length)
out.write(value)
}
def writeIntArray(value: Array[Int]) = {
out.writeInt(value.length)
for (v <- value) {
out.writeInt(v)
}
}
def writeCompound(value: collection.Map[String, Tag]) = {
for (v <- value.values) {
writeTag(v)
}
writeTag(EndTag())
}
def writeList[T <: Tag](id: Byte, value: mutable.Buffer[T]) = {
out.writeByte(id)
out.writeInt(value.length)
for (v <- value) {
writeTagValue(v)
}
}
def writeTag(tag: Tag): Unit = {
out.writeByte(tag match {
case EndTag() => 0
case ByteTag(_, _) => 1
case ShortTag(_, _) => 2
case IntTag(_, _) => 3
case LongTag(_, _) => 4
case FloatTag(_, _) => 5
case DoubleTag(_, _) => 6
case ByteArrayTag(_, _) => 7
case StringTag(_, _) => 8
case ListTag(_, _, _) => 9
case CompoundTag(_, _) => 10
case IntArrayTag(_, _) => 11
case other => throw new IllegalArgumentException(s"unknown tag id $other")
})
tag match {
case EndTag() =>
case _ =>
out.writeUTF(tag.name)
writeTagValue(tag)
}
}
def writeTagValue(tag: Tag): Unit = {
tag match {
case ByteTag(_, value) => out.writeByte(value)
case ShortTag(_, value) => out.writeShort(value)
case IntTag(_, value) => out.writeInt(value)
case LongTag(_, value) => out.writeLong(value)
case FloatTag(_, value) => out.writeFloat(value)
case DoubleTag(_, value) => out.writeDouble(value)
case ByteArrayTag(_, value) => writeByteArray(value)
case StringTag(_, value) => out.writeUTF(value)
case ListTag(_, id, value) => writeList(id, value)
case CompoundTag(_, value) => writeCompound(value)
case IntArrayTag(_, value) => writeIntArray(value)
case other => throw new IllegalArgumentException(s"unknown tag id $other")
}
}
}
|
nidi3/mineedit
|
minecraft-core/src/main/scala/guru/nidi/minecraft/core/NbtWriter.scala
|
Scala
|
apache-2.0
| 2,327
|
package com.twitter.gizzard.nameserver
import com.twitter.conversions.time._
import com.twitter.gizzard.shards._
import com.twitter.gizzard.test.NameServerDatabase
import com.twitter.gizzard.ConfiguredSpecification
import org.specs.Specification
import org.specs.mock.{ClassMocker, JMocker}
class MemoryShardSpec extends ConfiguredSpecification with JMocker with ClassMocker {
"MemoryShard" should {
val SQL_SHARD = "com.example.SqlShard"
val forwardShardInfo = new ShardInfo(SQL_SHARD, "forward_table", "localhost")
val backwardShardInfo = new ShardInfo(SQL_SHARD, "backward_table", "localhost")
val forwardShardId = new ShardId("localhost", "forward_table")
val backwardShardId = new ShardId("localhost", "backward_table")
val shardId1 = new ShardId("localhost", "shard1")
val shardId2 = new ShardId("localhost", "shard2")
val shardId3 = new ShardId("localhost", "shard3")
val shardId4 = new ShardId("localhost", "shard4")
val shardId5 = new ShardId("localhost", "shard5")
var nsShard: ShardManagerSource = null
var remoteClusterShard: RemoteClusterManagerSource = null
doBefore {
nsShard = new MemoryShardManagerSource()
remoteClusterShard = new MemoryRemoteClusterManagerSource()
}
"create" in {
"a new shard" >> {
nsShard.createShard(forwardShardInfo)
nsShard.getShard(forwardShardId) mustEqual forwardShardInfo
}
"when the shard already exists" >> {
"when the shard matches existing data" >> {
val shardId = nsShard.createShard(forwardShardInfo)
nsShard.getShard(forwardShardId) mustEqual forwardShardInfo
nsShard.createShard(forwardShardInfo)
nsShard.getShard(forwardShardId) mustEqual forwardShardInfo
}
"when the shard contradicts existing data" >> {
nsShard.createShard(forwardShardInfo)
val otherShard = forwardShardInfo.clone()
otherShard.className = "garbage"
nsShard.createShard(otherShard) must throwA[InvalidShard]
}
}
}
"find" in {
"a created shard" >> {
nsShard.createShard(forwardShardInfo)
nsShard.getShard(forwardShardId).tablePrefix mustEqual forwardShardInfo.tablePrefix
}
"when the shard doesn't exist" >> {
nsShard.getShard(backwardShardId) must throwA[NonExistentShard]
}
}
"delete" in {
nsShard.createShard(forwardShardInfo)
nsShard.getShard(forwardShardId).tablePrefix mustEqual forwardShardInfo.tablePrefix
nsShard.deleteShard(forwardShardId)
nsShard.getShard(forwardShardId) must throwA[NonExistentShard]
}
"links" in {
"add & find" >> {
nsShard.addLink(shardId1, shardId2, 3)
nsShard.addLink(shardId1, shardId3, 2)
nsShard.addLink(shardId1, shardId4, 1)
nsShard.listDownwardLinks(shardId1) mustEqual
List(LinkInfo(shardId1, shardId2, 3), LinkInfo(shardId1, shardId3, 2),
LinkInfo(shardId1, shardId4, 1))
}
"remove" >> {
nsShard.addLink(shardId1, shardId2, 2)
nsShard.addLink(shardId1, shardId3, 2)
nsShard.addLink(shardId1, shardId4, 1)
nsShard.removeLink(shardId1, shardId3)
nsShard.listDownwardLinks(shardId1) mustEqual
List(LinkInfo(shardId1, shardId2, 2), LinkInfo(shardId1, shardId4, 1))
}
"add & remove, retaining order" >> {
nsShard.addLink(shardId1, shardId2, 5)
nsShard.addLink(shardId1, shardId3, 2)
nsShard.addLink(shardId1, shardId4, 1)
nsShard.removeLink(shardId1, shardId3)
nsShard.addLink(shardId1, shardId5, 8)
nsShard.listDownwardLinks(shardId1) mustEqual
List(LinkInfo(shardId1, shardId5, 8), LinkInfo(shardId1, shardId2, 5),
LinkInfo(shardId1, shardId4, 1))
}
}
"set shard busy" in {
nsShard.createShard(forwardShardInfo)
nsShard.markShardBusy(forwardShardId, Busy.Busy)
nsShard.getShard(forwardShardId).busy mustEqual Busy.Busy
}
"forwarding changes" in {
var forwarding: Forwarding = null
doBefore {
nsShard.createShard(forwardShardInfo)
forwarding = new Forwarding(1, 0L, forwardShardId)
}
"set and get for shard" in {
nsShard.setForwarding(forwarding)
nsShard.getForwardingForShard(forwarding.shardId) mustEqual forwarding
}
"replace" in {
nsShard.setForwarding(forwarding)
nsShard.replaceForwarding(forwarding.shardId, shardId2)
nsShard.getForwardingForShard(shardId2).shardId mustEqual shardId2
}
"set and get" in {
nsShard.setForwarding(forwarding)
nsShard.getForwarding(1, 0L).shardId mustEqual forwardShardId
}
"get all" in {
nsShard.setForwarding(forwarding)
nsShard.getForwardings() mustEqual List(forwarding)
}
}
"advanced shard navigation" in {
val shard1 = new ShardInfo(SQL_SHARD, "forward_1", "localhost")
val shard2 = new ShardInfo(SQL_SHARD, "forward_1_also", "localhost")
val shard3 = new ShardInfo(SQL_SHARD, "forward_1_too", "localhost")
val shard4 = new ShardInfo(SQL_SHARD, "forward_2", "localhost")
doBefore {
nsShard.createShard(shard1)
nsShard.createShard(shard2)
nsShard.createShard(shard3)
nsShard.createShard(shard4)
nsShard.addLink(shard1.id, shard2.id, 10)
nsShard.addLink(shard2.id, shard3.id, 10)
nsShard.setForwarding(Forwarding(0, 0, shard1.id))
nsShard.setForwarding(Forwarding(0, 1, shard2.id))
nsShard.setForwarding(Forwarding(1, 0, shard4.id))
}
"shardsForHostname" in {
nsShard.shardsForHostname("localhost").map { _.id }.toList mustEqual List(shard1.id, shard2.id, shard3.id, shard4.id)
}
"getBusyShards" in {
nsShard.getBusyShards() mustEqual List()
nsShard.markShardBusy(shard1.id, Busy.Busy)
nsShard.getBusyShards().map { _.id } mustEqual List(shard1.id)
}
"listUpwardLinks" in {
nsShard.listUpwardLinks(shard3.id).map { _.upId }.toList mustEqual List(shard2.id)
nsShard.listUpwardLinks(shard2.id).map { _.upId }.toList mustEqual List(shard1.id)
nsShard.listUpwardLinks(shard1.id).map { _.upId }.toList mustEqual List[ShardId]()
}
"list tables" in {
nsShard.listTables must haveTheSameElementsAs(List(0, 1))
}
}
"remote host config management" in {
val host1 = new Host("remoteapp1", 7777, "c1", HostStatus.Normal)
val host2 = new Host("remoteapp2", 7777, "c1", HostStatus.Normal)
val host3 = new Host("remoteapp3", 7777, "c2", HostStatus.Normal)
val host4 = new Host("remoteapp4", 7777, "c2", HostStatus.Normal)
doBefore { List(host1, host2, host3, host4).foreach(remoteClusterShard.addRemoteHost) }
"addRemoteHost" in {
val h = new Host("new_host", 7777, "c3", HostStatus.Normal)
val sql = "SELECT * FROM hosts WHERE hostname = 'new_host' AND port = 7777"
remoteClusterShard.addRemoteHost(h)
remoteClusterShard.getRemoteHost(h.hostname, h.port) mustEqual h
remoteClusterShard.addRemoteHost(h)
remoteClusterShard.listRemoteHosts().length mustEqual 5
}
"removeRemoteHost" in {
remoteClusterShard.getRemoteHost(host1.hostname, host1.port) mustEqual host1
remoteClusterShard.removeRemoteHost(host1.hostname, host1.port)
remoteClusterShard.getRemoteHost(host1.hostname, host1.port) must throwA[ShardException]
}
def reloadedHost(h: Host) = remoteClusterShard.getRemoteHost(h.hostname, h.port)
"setRemoteHostStatus" in {
remoteClusterShard.setRemoteHostStatus(host1.hostname, host1.port, HostStatus.Blocked)
reloadedHost(host1).status mustEqual HostStatus.Blocked
(Set() ++ List(host2, host3, host4).map(reloadedHost(_).status)) mustEqual Set(HostStatus.Normal)
}
"setRemoteClusterStatus" in {
remoteClusterShard.setRemoteClusterStatus("c2", HostStatus.Blackholed)
(Set() ++ List(host3, host4).map(reloadedHost(_).status)) mustEqual Set(HostStatus.Blackholed)
(Set() ++ List(host1, host2).map(reloadedHost(_).status)) mustEqual Set(HostStatus.Normal)
}
"getRemoteHost" in {
remoteClusterShard.getRemoteHost(host1.hostname, host1.port) mustEqual host1
}
"listRemoteClusters" in {
remoteClusterShard.listRemoteClusters mustEqual List("c1", "c2")
}
"listRemoteHosts" in {
remoteClusterShard.listRemoteHosts mustEqual List(host1, host2, host3, host4)
}
"listRemoteHostsInCluster" in {
remoteClusterShard.listRemoteHostsInCluster("c1") mustEqual List(host1, host2)
}
}
}
}
|
kmiku7/gizzard
|
src/test/scala/com/twitter/gizzard/nameserver/MemoryShardSpec.scala
|
Scala
|
apache-2.0
| 8,866
|
package pomf.core.actors
import akka.actor._
class UnhandledMessageListener extends CommonActor {
context.system.eventStream.subscribe(self, classOf[UnhandledMessage])
val unhandledReceived = metrics.meter("events")
override def receive = {
case message: UnhandledMessage ⇒
unhandledReceived.mark()
log.warning(s"actor ${message.getRecipient} did not handle message ${message.getMessage}")
}
}
object UnhandledMessageListener {
def props = Props(classOf[UnhandledMessageListener])
}
|
agourlay/post-on-my-fridge
|
src/main/scala/pomf/core/actors/UnhandledMessageListener.scala
|
Scala
|
apache-2.0
| 516
|
package com.xenopsconsulting.gamedayapi
import org.scalatest.junit.AssertionsForJUnit
import org.junit.Test
import org.junit.Assert._
import org.junit.Before
import java.text.SimpleDateFormat
import java.util.Date
class InningTest extends AssertionsForJUnit {
var game: Game = _
var innings: Innings = _
var inning: Inning = _
var date: Date = _
var team: String = _
@Before def initialize {
date = new SimpleDateFormat("yyy-MM-dd").parse("2011-08-13")
team = "sea"
game = new Game(date, team) with TestFetchStrategyProvider
innings = game.innings()
inning = innings.inning(1).get
}
@Test def testNum {
assertEquals("1", inning.num)
}
@Test def testAwayTeam {
assertEquals("bos", inning.awayTeam)
}
@Test def testHomeTeam {
assertEquals("sea", inning.homeTeam)
}
@Test def testNext {
assertEquals("Y", inning.next)
}
@Test def testTopExists {
assertNotNull(inning.top)
}
@Test def testBottomExists {
assertNotNull(inning.bottom)
}
@Test def testAtBats {
assertEquals(11, inning.atBats.size)
assertEquals("1", inning.atBats.head.num)
assertEquals("11", inning.atBats.last.num)
assertEquals(AtBat(<atbat_node/>, "top").getClass, inning.atBats.head.getClass)
}
@Test def testHomeTeamAtBats {
assertEquals(8, inning.homeTeamAtBats.size)
assertEquals("4", inning.homeTeamAtBats.head.num)
}
@Test def testAwayTeamAtBats {
assertEquals(3, inning.awayTeamAtBats.size)
assertEquals("1", inning.awayTeamAtBats.head.num)
}
@Test def testAtBatsNoBottom {
date = new SimpleDateFormat("yyy-MM-dd").parse("2011-08-13")
team = "sea"
game = new Game(date, team) with TestNoBottomInningFetchStrategyProvider
innings = game.innings()
inning = innings.inning(1).get
assertEquals(3, inning.atBats.size)
}
@Test def testPitches {
assertEquals(46, inning.pitches.size)
assertEquals("3", inning.pitches.head.id)
}
@Test def testHomeTeamPitches {
assertEquals(12, inning.homeTeamPitches.size)
assertEquals("3", inning.homeTeamPitches.head.id)
}
@Test def testAwayTeamPitches {
assertEquals(34, inning.awayTeamPitches.size)
assertEquals("25", inning.awayTeamPitches.head.id)
}
}
|
ecopony/scala-gameday-api
|
src/test/scala/com/xenopsconsulting/gamedayapi/InningTest.scala
|
Scala
|
mit
| 2,260
|
package net.scalaleafs.test
import net.scalaleafs.Template
import net.scalaleafs.Ident
import net.scalaleafs.Var
import net.scalaleafs.implicits._
import net.scalaleafs.Context
import net.scalaleafs.Noop
class BootstrapShowcase extends Template {
val isActive = Var(false)
def render =
"#biggest-buttons" #> {
bind(isActive) { isActive =>
"button" #> {
addClass("active").when(isActive) &
onclick(this.isActive.set(!isActive) & Noop)
}
}
}
}
|
scalastuff/scalaleafs
|
src/main/scala/net/scalaleafs/test/BootstrapShowcase.scala
|
Scala
|
apache-2.0
| 501
|
/*
* Copyright © 2014 TU Berlin (emma@dima.tu-berlin.de)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.emmalanguage
package api.flink
import api._
import api.alg._
import api.backend.ComprehensionCombinators
import api.backend.Runtime
import org.apache.flink.api.java.io.TypeSerializerInputFormat
import org.apache.flink.api.java.io.TypeSerializerOutputFormat
import org.apache.flink.api.scala.DataSet
import org.apache.flink.api.scala.{ExecutionEnvironment => FlinkEnv}
import org.apache.flink.core.fs.FileSystem
import java.net.URI
/** Flink backend operators. */
object FlinkOps extends ComprehensionCombinators[FlinkEnv] with Runtime[FlinkEnv] {
import FlinkDataSet.typeInfoForType
import FlinkDataSet.wrap
import Meta.Projections._
// ---------------------------------------------------------------------------
// ComprehensionCombinators
// ---------------------------------------------------------------------------
def cross[A: Meta, B: Meta](
xs: DataBag[A], ys: DataBag[B]
)(implicit flink: FlinkEnv): DataBag[(A, B)] = {
val datasetOf = new DataSetExtractor(flink)
(xs, ys) match {
case (datasetOf(xsDS), datasetOf(ysDS)) => xsDS cross ysDS
}
}
def equiJoin[A: Meta, B: Meta, K: Meta](
keyx: A => K, keyy: B => K)(xs: DataBag[A], ys: DataBag[B]
)(implicit flink: FlinkEnv): DataBag[(A, B)] = {
val datasetOf = new DataSetExtractor(flink)
(xs, ys) match {
case (datasetOf(xsDS), datasetOf(ysDS)) =>
(xsDS join ysDS) where keyx equalTo keyy
}
}
private class DataSetExtractor(flink: FlinkEnv) {
def unapply[A: Meta](bag: DataBag[A]): Option[DataSet[A]] = bag match {
case bag: FlinkDataSet[A] => Some(bag.rep)
case _ => Some(flink.fromCollection(bag.collect()))
}
}
// ---------------------------------------------------------------------------
// Runtime
// ---------------------------------------------------------------------------
def cache[A: Meta](xs: DataBag[A])(implicit flink: FlinkEnv): DataBag[A] =
xs match {
case xs: FlinkDataSet[A] =>
val sinkName = sink(xs.rep)
xs.env.execute(s"emma-cache-$sinkName")
source[A](sinkName)
case _ => xs
}
def foldGroup[A: Meta, B: Meta, K: Meta](
xs: DataBag[A], key: A => K, alg: Alg[A, B]
)(implicit flink: FlinkEnv): DataBag[Group[K, B]] = xs match {
case xs: FlinkDataSet[A] => xs.rep
.map(x => Group(key(x), alg.init(x))).groupBy("key")
.reduce((x, y) => Group(x.key, alg.plus(x.values, y.values)))
}
private def sink[A: Meta](xs: DataSet[A])(implicit flink: FlinkEnv): String = {
val typeInfo = typeInfoForType[A]
val tempName = tempNames.next()
val outFmt = new TypeSerializerOutputFormat[A]
outFmt.setInputType(typeInfo, flink.getConfig)
outFmt.setSerializer(typeInfo.createSerializer(flink.getConfig))
xs.write(outFmt, tempPath(tempName), FileSystem.WriteMode.OVERWRITE)
tempName
}
private def source[A: Meta](fileName: String)(implicit flink: FlinkEnv): DataSet[A] = {
val filePath = tempPath(fileName)
val typeInfo = typeInfoForType[A]
val inFmt = new TypeSerializerInputFormat[A](typeInfo)
inFmt.setFilePath(filePath)
flink.readFile(inFmt, filePath)
}
private val tempBase =
new URI(System.getProperty("emma.flink.temp-base", "file:///tmp/emma/flink-temp/"))
private[emmalanguage] val tempNames = Stream.iterate(0)(_ + 1)
.map(i => f"dataflow$i%03d")
.toIterator
private[emmalanguage] def tempPath(tempName: String): String =
tempBase.resolve(tempName).toURL.toString
}
|
aalexandrov/emma
|
emma-flink/src/main/scala/org/emmalanguage/api/flink/FlinkOps.scala
|
Scala
|
apache-2.0
| 4,142
|
/*
* Copyright (c) 2014 Dufresne Management Consulting LLC.
*/
package com.nickelsoftware.bettercare4me.hedis;
import org.joda.time.LocalDate
import org.scalatestplus.play.OneAppPerSuite
import org.scalatestplus.play.PlaySpec
import com.nickelsoftware.bettercare4me.models.MedClaim
import com.nickelsoftware.bettercare4me.models.Patient
import com.nickelsoftware.bettercare4me.models.PatientHistory
import com.nickelsoftware.bettercare4me.models.PatientHistoryFactory
import com.nickelsoftware.bettercare4me.models.RuleConfig
import com.nickelsoftware.bettercare4me.models.SimplePersistenceLayer
import com.nickelsoftware.bettercare4me.utils.NickelException
import org.joda.time.DateTime
import org.joda.time.Interval
import com.nickelsoftware.bettercare4me.utils.Utils
import scala.util.Random
object HEDISRulesTestSpec {
def setupTest(name: String, eligibleRate: java.lang.Integer, exclusionRate: java.lang.Integer, meetMeasureRate: java.lang.Integer): (Patient, PatientHistory, HEDISRule) = {
val dob = new LocalDate(1960, 9, 12).toDateTimeAtStartOfDay()
setupTest(name, "F", dob, eligibleRate, exclusionRate, meetMeasureRate)
}
def setupTest(name: String, gender: String, dob: DateTime, eligibleRate: java.lang.Integer, exclusionRate: java.lang.Integer, meetMeasureRate: java.lang.Integer): (Patient, PatientHistory, HEDISRule) = {
val persistenceLayer = new SimplePersistenceLayer(88)
val c = new RuleConfig(Map("name" -> name, "eligibleRate" -> eligibleRate, "exclusionRate" -> exclusionRate, "meetMeasureRate" -> meetMeasureRate))
val rule = HEDISRules.createRuleByName(c.name, c, new LocalDate(2014, 12, 31).toDateTimeAtStartOfDay())
val patient = persistenceLayer.createPatient("first", "last", gender, dob)
val claims = rule.generateClaims(persistenceLayer, patient, persistenceLayer.createProvider("first", "last"), Random.nextInt(100), Random.nextInt(100), Random.nextInt(100))
val patientHistory = PatientHistoryFactory.createPatientHistory(patient, claims)
(patient, patientHistory, rule)
}
}
class HEDISRulesTestSpec extends PlaySpec {
"The DateTime class" must {
"compute age from an HEDIS date" in {
val hedisDate = new LocalDate(2014, 12, 31).toDateTimeAtStartOfDay()
val dobMin = new LocalDate(2012, 1, 1).toDateTimeAtStartOfDay()
val dobMax = new LocalDate(2012, 12, 31).toDateTimeAtStartOfDay()
Patient("key1", "Michel", "Dufresne", "M", dobMin).age(hedisDate) mustBe 2
Patient("key1", "Michel", "Dufresne", "M", dobMax).age(hedisDate) mustBe 2
Patient("key1", "Michel", "Dufresne", "M", dobMin).ageInMonths(hedisDate) mustBe 35
Patient("key1", "Michel", "Dufresne", "M", dobMax).ageInMonths(hedisDate) mustBe 24
}
"compute intervale from an HEDIS date" in {
val hedisDate = new LocalDate(2014, 12, 31).toDateTimeAtStartOfDay()
val dobMin = new LocalDate(2014, 1, 1).toDateTimeAtStartOfDay()
val dobMax = new LocalDate(2014, 12, 31).toDateTimeAtStartOfDay()
Utils.getIntervalFromMonths(12, hedisDate).contains(dobMin) mustBe true
Utils.getIntervalFromMonths(12, hedisDate).contains(dobMax) mustBe true
Utils.getIntervalFromMonths(12, hedisDate).contains(dobMin.minusDays(1)) mustBe false
Utils.getIntervalFromMonths(12, hedisDate).contains(dobMax.minusDays(1)) mustBe true
Utils.getIntervalFromMonths(12, hedisDate).contains(dobMin.plusDays(1)) mustBe true
Utils.getIntervalFromMonths(12, hedisDate).contains(dobMax.plusDays(1)) mustBe false
Utils.getIntervalFromDays(10, hedisDate).contains(new LocalDate(2014, 12, 22).toDateTimeAtStartOfDay()) mustBe true
Utils.getIntervalFromDays(10, hedisDate).contains(new LocalDate(2014, 12, 21).toDateTimeAtStartOfDay()) mustBe false
Utils.getIntervalFromDays(10, hedisDate).contains(dobMax) mustBe true
}
}
"The HEDISRules class" must {
"create a TestRule properly from config" in {
val persistenceLayer = new SimplePersistenceLayer(88)
val c = new RuleConfig(Map("name" -> "TEST", "eligibleRate" -> new java.lang.Integer(40), "exclusionRate" -> new java.lang.Integer(5), "meetMeasureRate" -> new java.lang.Integer(92)))
val hedisDate = new LocalDate(2014, 12, 31).toDateTimeAtStartOfDay()
val rule = HEDISRules.createRuleByName(c.name, c, hedisDate)
rule.name mustBe "TEST"
rule.fullName mustBe "Test Rule"
rule.description mustBe "This rule is for testing."
val patient = persistenceLayer.createPatient("M", "D", "M", new LocalDate(1962, 7, 27).toDateTimeAtStartOfDay())
val provider = persistenceLayer.createProvider("M", "D")
val claims = rule.generateClaims(persistenceLayer, patient, provider, Random.nextInt(100), Random.nextInt(100), Random.nextInt(100))
claims.size mustBe 1
claims(0) match {
case claim: MedClaim =>
claim.patientID mustBe patient.patientID
claim.providerID mustBe provider.providerID
case _ => fail("Invalid claim class type!")
}
}
"contains a date withing a specified interval" in {
val c = new RuleConfig(Map("name" -> "TEST", "eligibleRate" -> new java.lang.Integer(40), "exclusionRate" -> new java.lang.Integer(5), "meetMeasureRate" -> new java.lang.Integer(92)))
val hedisDate = new LocalDate(2014, 12, 31).toDateTimeAtStartOfDay()
val rule = new TestRule(c, hedisDate)
rule.getIntervalFromMonths(6).contains(new LocalDate(2014, 7, 1).toDateTimeAtStartOfDay()) mustBe true
rule.getIntervalFromDays(31).contains(new LocalDate(2014, 12, 1).toDateTimeAtStartOfDay()) mustBe true
rule.getIntervalFromMonths(6).contains(new LocalDate(2014, 6, 30).toDateTimeAtStartOfDay()) mustBe false
rule.getIntervalFromDays(31).contains(new LocalDate(2014, 11, 30).toDateTimeAtStartOfDay()) mustBe false
}
"throw NickelException when try to create a rule with an unknown name" in {
val persistenceLayer = new SimplePersistenceLayer(88)
val c = new RuleConfig(Map("name" -> "Unknown Name", "eligibleRate" -> new java.lang.Integer(40), "exclusionRate" -> new java.lang.Integer(5), "meetMeasureRate" -> new java.lang.Integer(92)))
val hedisDate = new LocalDate(2015, 1, 1).toDateTimeAtStartOfDay()
a[NickelException] should be thrownBy {
HEDISRules.createRuleByName(c.name, c, hedisDate)
}
}
}
}
|
reactivecore01/bettercare4.me
|
play/test/com/nickelsoftware/bettercare4me/hedis/HEDISRulesTestSpec.scala
|
Scala
|
apache-2.0
| 6,516
|
package json.source
import java.io.File
import java.net.URI
import argonaut.Argonaut._
import argonaut.Json
import scalaz.{-\/, \/, \/-}
import scala.collection.mutable
import scala.util.control.NonFatal
import scalaz.syntax.std.either._
trait JsonSource[A] {
def uri(addr: A): URI
def json(addr: A): String \/ Json
}
object JsonSource {
implicit val json: JsonSource[Json] = new JsonSource[Json] {
override def uri(t: Json): URI = new URI("#")
override def json(t: Json): String \/ Json = \/-(t)
}
implicit val string: JsonSource[String] = new JsonSource[String] {
override def uri(t: String): URI = new URI("#")
override def json(t: String): String \/ Json = t.parse.disjunction
}
implicit val file: JsonSource[File] = new JsonSource[File] {
override def uri(t: File): URI = t.toURI
override def json(t: File): String \/ Json =
try scala.io.Source.fromFile(t).mkString.parse.disjunction
catch {
case NonFatal(e) => -\/(e.getMessage)
}
}
implicit val uri: JsonSource[URI] = new JsonSource[URI] {
override def uri(t: URI): URI = t
override def json(t: URI): String \/ Json =
try {
import scala.io.Source
val html = if (t.isAbsolute) Source.fromURL(t.toURL) else Source.fromURI(t)
val s = html.mkString
s.parse.disjunction
} catch {
case NonFatal(e) => -\/(e.getMessage)
}
}
def apply[T: JsonSource]: JsonSource[T] = implicitly[JsonSource[T]]
def withCaching[T](implicit wrapped: JsonSource[T]): JsonSource[T] =
new JsonSource[T] {
val cache = mutable.Map.empty[URI, String \/ Json]
override def uri(t: T): URI = wrapped.uri(t)
override def json(t: T): \/[String, Json] = {
// remove fragment, as whole document for that uri is cached
val key = uri(t).resolve("#")
cache.getOrElseUpdate(key, wrapped.json(t))
}
}
}
|
VoxSupplyChain/json-schema-parser
|
src/main/scala/json/source/JsonSource.scala
|
Scala
|
apache-2.0
| 1,932
|
/*
* Copyright (c) 2014-2018 by The Monix Project Developers.
* See the project homepage at: https://monix.io
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package monix.reactive.internal.operators
import monix.execution.Ack.{Continue, Stop}
import scala.util.control.NonFatal
import monix.reactive.Observable.Operator
import monix.reactive.observers.Subscriber
private[reactive] final class FilterOperator[A](p: A => Boolean)
extends Operator[A,A] {
def apply(out: Subscriber[A]): Subscriber[A] =
new Subscriber[A] {
implicit val scheduler = out.scheduler
private[this] var isDone = false
def onNext(elem: A) = {
// Protects calls to user code from within the operator and
// stream the error downstream if it happens, but if the
// error happens because of calls to `onNext` or other
// protocol calls, then the behavior should be undefined.
var streamError = true
try {
if (p(elem)) {
streamError = false
out.onNext(elem)
}
else
Continue
}
catch {
case NonFatal(ex) if streamError =>
onError(ex)
Stop
}
}
def onError(ex: Throwable): Unit =
if (!isDone) {
isDone = true
out.onError(ex)
}
def onComplete(): Unit =
if (!isDone) {
isDone = true
out.onComplete()
}
}
}
|
Wogan/monix
|
monix-reactive/shared/src/main/scala/monix/reactive/internal/operators/FilterOperator.scala
|
Scala
|
apache-2.0
| 1,984
|
package de.qlextension
object StartNetLogo {
def main (args: Array[String]) {
// starting the NetLogo GUI
org.nlogo.app.App.main(args)
}
}
|
JZschache/NetLogo-QLearning-Extension
|
src/de/qlextension/StartNetLogo.scala
|
Scala
|
gpl-2.0
| 163
|
/*
* Copyright 2014 IBM Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package test.utils
import akka.testkit.TestKit
import com.ibm.spark.boot.layer.SparkKernelDeployer
class NoArgSparkKernelTestKit
extends TestKit(SparkKernelDeployer.getNoArgSparkKernelActorSystem)
{
// Force initialization of no-arg Spark Kernel
SparkKernelDeployer.noArgKernelBootstrap
}
|
yeghishe/spark-kernel
|
kernel/src/test/scala/test/utils/NoArgSparkKernelTestKit.scala
|
Scala
|
apache-2.0
| 900
|
/*
* Artificial Intelligence for Humans
* Volume 2: Nature Inspired Algorithms
* Java Version
* http://www.aifh.org
* http://www.jeffheaton.com
*
* Code repository:
* https://github.com/jeffheaton/aifh
*
* Copyright 2014 by Jeff Heaton
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* For more information on Heaton Research copyrights, licenses
* and trademarks visit:
* http://www.heatonresearch.com/copyright
*/
package com.heatonresearch.aifh.examples.util
import javax.swing._
import java.awt._
/**
* This class is used to display a grid. This grid is often used to simulate cellular automation.
*
* @param rows The number of rows in the grid.
* @param cols The number of columns in the grid.
* @param showGrid True, if the grid is to be shown.
*/
class WorldPanel(rows: Int, cols: Int, val showGrid: Boolean) extends JPanel {
/**
* The primary, displayed grid.
*/
private val primaryGrid = Array.ofDim[Boolean](rows, cols)
/**
* The backup grid.
*/
private val backupGrid = Array.ofDim[Boolean](rows, cols)
/**
* @return The number of rows.
*/
def getRows: Int = primaryGrid.length
/**
* @return The number of columns.
*/
def getCols: Int = primaryGrid(0).length
/**
* @return The primary grid.
*/
def getPrimaryGrid: Array[Array[Boolean]] = primaryGrid
/**
* @return The backup grid.
*/
def getBackupGrid: Array[Array[Boolean]] = backupGrid
override def paint(g: Graphics) {
super.paint(g)
val width: Int = this.getWidth
val height: Int = this.getHeight
val cellWidth: Double = width.asInstanceOf[Double] / getCols.asInstanceOf[Double]
val cellHeight: Double = height.asInstanceOf[Double] / getRows.asInstanceOf[Double]
g.setColor(Color.WHITE)
g.fillRect(0, 0, width, height)
if (this.showGrid) {
g.setColor(Color.black)
for(row <- 0 until getRows) {
val y: Int = (row * cellHeight).asInstanceOf[Int]
g.drawLine(0, y, width, y)
}
for(col <- 0 until getCols) {
val x: Int = (col * cellWidth).asInstanceOf[Int]
g.drawLine(x, 0, x, height)
}
}
for(row <- 0 until getRows;
col <- 0 until getCols) {
val x: Int = (col * cellWidth).asInstanceOf[Int]
val y: Int = (row * cellHeight).asInstanceOf[Int]
if (this.primaryGrid(row)(col)) {
g.setColor(Color.black)
g.fillRect(x, y, cellWidth.asInstanceOf[Int], cellHeight.asInstanceOf[Int])
}
}
}
/**
* Advance backup grid to primary.
*/
def advanceBackupGrid() {
for(row <- 0 until getRows) {
System.arraycopy(this.backupGrid(row), 0, this.primaryGrid(row), 0, getCols)
}
}
}
|
PeterLauris/aifh
|
vol2/vol2-scala-examples/src/main/scala/com/heatonresearch/aifh/examples/util/WorldPanel.scala
|
Scala
|
apache-2.0
| 3,217
|
package dielectric.spark
import org.apache.spark.rdd.RDD
import scala.reflect.ClassTag
import spire.algebra.Semigroup
trait RDDInstances {
implicit def rddInstance[A : ClassTag]: Semigroup[RDD[A]] =
new Semigroup[RDD[A]] {
def op(x: RDD[A], y: RDD[A]): RDD[A] = x ++ y
}
}
|
adelbertc/dielectric
|
src/main/scala/dielectric/spark/RDD.scala
|
Scala
|
apache-2.0
| 293
|
package scife.enumeration
package iterable
package lzy
object Empty extends scife.enumeration.Empty with ResetIter[Nothing] with Touchable[Nothing] {
override def next =
throw new RuntimeException
override def hasNext = false
}
|
kaptoxic/SciFe
|
src/main/scala/scife/enumeration/iterable/lzy/Empty.scala
|
Scala
|
gpl-2.0
| 249
|
package pl.newicom.dddd.delivery
import akka.actor.ActorPath
import akka.persistence.AtLeastOnceDelivery.AtLeastOnceDeliverySnapshot
import akka.persistence._
import pl.newicom.dddd.aggregate.EntityId
import pl.newicom.dddd.delivery.protocol.alod.Delivered
import pl.newicom.dddd.messaging.{AddressableMessage, Message}
import pl.newicom.dddd.persistence.{PersistentActorLogging, SaveSnapshotRequest}
case class DeliveryStateSnapshot(state: DeliveryState, alodSnapshot: AtLeastOnceDeliverySnapshot)
trait AtLeastOnceDeliverySupport extends PersistentActor with AtLeastOnceDelivery with PersistentActorLogging {
type DeliverableMessage = Message with AddressableMessage
def isSupporting_MustFollow_Attribute: Boolean = true
private var deliveryState: DeliveryState = InitialState
def destination(msg: Message): ActorPath
def recoveryCompleted(): Unit
def lastSentDeliveryId: Option[Long] = deliveryState.lastSentOpt
def oldestUnconfirmedDeliveryId: Option[Long] = deliveryState.oldestUnconfirmedDeliveryId
def unconfirmedNumber: Int = deliveryState.unconfirmedNumber
def deliver(msg: Message, deliveryId: Long): Unit =
persist(msg.withDeliveryId(deliveryId))(updateState)
def deliveryIdToMessage(msg: DeliverableMessage, destination: ActorPath): Long ⇒ Any = internalDeliveryId => {
val deliveryId = msg.deliveryId.get
val destinationId: EntityId = msg.destination.get
val lastSentToDestinationMsgId: Option[EntityId] = deliveryState.lastSentToDestinationMsgId(destinationId)
deliveryState = deliveryState.withSent(msg.id, internalDeliveryId, deliveryId, destinationId)
val msgToDeliver =
if (isSupporting_MustFollow_Attribute) msg.withMustFollow(lastSentToDestinationMsgId)
else msg
log.debug(s"[DELIVERY-ID: $deliveryId] Delivering: $msgToDeliver to $destination")
msgToDeliver
}
def updateState(msg: Any): Unit = msg match {
case message: DeliverableMessage =>
if (message.destination.isEmpty) {
log.warning(s"No entityId. Skipping $message")
} else {
val dest: ActorPath = destination(message)
deliver(dest)(deliveryIdToMessage(message, dest))
}
case receipt: Delivered =>
val deliveryId = receipt.deliveryId
deliveryState.internalDeliveryId(deliveryId).foreach { internalDeliveryId =>
log.debug(s"[DELIVERY-ID: $deliveryId] - Delivery confirmed")
if (confirmDelivery(internalDeliveryId)) {
deliveryState = deliveryState.withDelivered(deliveryId)
deliveryConfirmed(internalDeliveryId)
}
}
}
def deliveryStateReceive: Receive = {
case receipt: Delivered =>
persist(receipt)(updateState)
case SaveSnapshotRequest =>
val snapshot = DeliveryStateSnapshot(deliveryState, getDeliverySnapshot)
saveSnapshot(snapshot)
case SaveSnapshotSuccess(metadata) =>
log.debug("Snapshot saved successfully with metadata: {}", metadata)
case f @ SaveSnapshotFailure(metadata, reason) =>
log.error(s"$f")
throw reason
}
override def receiveRecover: Receive = {
case RecoveryCompleted =>
log.debug("Recovery completed")
recoveryCompleted()
case SnapshotOffer(metadata, DeliveryStateSnapshot(dState, alodSnapshot)) =>
setDeliverySnapshot(alodSnapshot)
deliveryState = dState
log.debug(s"Snapshot restored: $deliveryState")
case msg =>
updateState(msg)
}
def deliveryConfirmed(deliveryId: Long): Unit = {
// do nothing
}
}
|
pawelkaczor/akka-ddd
|
akka-ddd-core/src/main/scala/pl/newicom/dddd/delivery/AtLeastOnceDeliverySupport.scala
|
Scala
|
mit
| 3,610
|
package com.lynbrookrobotics.potassium.commons.drivetrain.twoSided
import com.lynbrookrobotics.potassium.Signal
import com.lynbrookrobotics.potassium.commons.drivetrain.unicycle.{UnicycleDrive, UnicycleSignal}
import com.lynbrookrobotics.potassium.streams.Stream
import com.lynbrookrobotics.potassium.units._
import squants.space.Length
import squants.{Dimensionless, Percent, Velocity}
/**
* A drivetrain with two side control (such as a tank drive)
*/
abstract class TwoSidedDrive extends UnicycleDrive {
self =>
type DriveSignal <: TwoSided[_]
type OpenLoopSignal = TwoSided[Dimensionless]
type Hardware <: TwoSidedDriveHardware
type Properties <: TwoSidedDriveProperties
protected def driveClosedLoop(
signal: Stream[OpenLoopSignal]
)(implicit hardware: Hardware, props: Signal[Properties]): Stream[DriveSignal] =
velocityControl(
signal.map(
d =>
TwoSided[Velocity](
props.get.maxLeftVelocity * d.left,
props.get.maxRightVelocity * d.right
)
)
)
def velocityControl(
target: Stream[TwoSided[Velocity]]
)(implicit hardware: Hardware, props: Signal[Properties]): Stream[DriveSignal]
def blendedVelocityControl(
arcadeSignal: Stream[UnicycleSignal],
curvature: Stream[Ratio[Dimensionless, Length]],
targetForwardVelocity: Stream[Velocity]
)(implicit hardware: Hardware, props: Signal[Properties]): Stream[DriveSignal] = {
val twoSidedSignal = arcadeSignal.map(unicycleToOpenLoopSignal)
val targetTankSpeeds = twoSidedSignal.map(expectedVelocity(_)(props.get))
val blendedVelocities = BlendedDriving.blendedDrive(
targetTankSpeeds,
targetForwardVelocity,
curvature
)
velocityControl(blendedVelocities)
}
/**
* Output the current signal to actuators with the hardware
*
* @param hardware the hardware to output with
* @param signal the signal to output
*/
protected def output(hardware: Hardware, signal: DriveSignal): Unit
protected def expectedVelocity(drive: OpenLoopSignal)(implicit props: Properties): TwoSided[Velocity] = {
TwoSided(
props.maxForwardVelocity * drive.left,
props.maxForwardVelocity * drive.right
)
}
override protected def unicycleToOpenLoopSignal(uni: UnicycleSignal): TwoSided[Dimensionless] = {
val left = uni.forward + uni.turn
val right = uni.forward - uni.turn
TwoSided(
left,
right
)
// if (left.abs > Percent(100) && right.abs <= Percent(100)) {
// TwoSided(
// Percent(100),
// Percent(100) + (right - left)
// )
// } else if(right.abs > Percent(100) && left.abs <= Percent(100)) {
// TwoSided(
// Percent(100) + (left - right),
// Percent(100)
// )
// } else if (left.abs > Percent(100) && right.abs > Percent(100)){
// // default to left at 100%
// TwoSided(
// Percent(100),
// Percent(100) + (right - left)
// )
// } else {
// TwoSided(left, right)
// }
}
}
|
Team846/potassium
|
commons/src/main/scala/com/lynbrookrobotics/potassium/commons/drivetrain/twoSided/TwoSidedDrive.scala
|
Scala
|
mit
| 3,033
|
package uk.gov.digital.ho.proving.financialstatus.domain
import java.time.{LocalDate, Period}
object CourseLengthCalculator {
val MONTHS_IN_YEAR = 12
def differenceInMonths(firstDate: LocalDate, secondDate: LocalDate): Int = {
val (startDate, endDate) = if (secondDate.isAfter(firstDate)) (firstDate, secondDate) else (secondDate, firstDate)
// Add 1 day to end date as we must include the end date
val period = Period.between(startDate, endDate.plusDays(1))
val months = period.getMonths + (MONTHS_IN_YEAR * period.getYears)
if (period.getDays > 0) months + 1 else months
}
}
|
UKHomeOffice/pttg-fs-api
|
src/main/scala/uk/gov/digital/ho/proving/financialstatus/domain/CourseLengthCalculator.scala
|
Scala
|
mit
| 608
|
/**
* Copyright (C) 2012 Typesafe, Inc. <http://www.typesafe.com>
*/
package org.pantsbuild.zinc
import java.io.File
import java.util.{ List => JList, Map => JMap }
import sbt.Logger
import sbt.Path._
import sbt.inc.{ Analysis, ZincPrivateAnalysis }
import scala.collection.JavaConverters._
import xsbti.compile.CompileOrder
/**
* All inputs for a compile run.
*/
case class Inputs(
classpath: Seq[File],
sources: Seq[File],
classesDirectory: File,
scalacOptions: Seq[String],
javacOptions: Seq[String],
cacheFile: File,
analysisMap: AnalysisMap,
javaOnly: Boolean,
compileOrder: CompileOrder,
incOptions: IncOptions)
object Inputs {
/**
* Create inputs based on command-line settings.
*/
def apply(log: Logger, settings: Settings): Inputs = {
import settings._
inputs(
log,
classpath,
sources,
classesDirectory,
scalacOptions,
javacOptions,
analysis.cache,
analysis.cacheMap,
javaOnly,
compileOrder,
incOptions)
}
/**
* Create normalised and defaulted Inputs.
*/
def inputs(
log: Logger,
classpath: Seq[File],
sources: Seq[File],
classesDirectory: File,
scalacOptions: Seq[String],
javacOptions: Seq[String],
analysisCache: Option[File],
analysisCacheMap: Map[File, File],
javaOnly: Boolean,
compileOrder: CompileOrder,
incOptions: IncOptions): Inputs =
{
val normalise: File => File = { _.getAbsoluteFile }
val cp = classpath map normalise
val srcs = sources map normalise
val classes = normalise(classesDirectory)
val cacheFile = normalise(analysisCache.getOrElse(defaultCacheLocation(classesDirectory)))
val analysisMap =
AnalysisMap.create(
analysisCacheMap.collect {
case (k, v) if normalise(k) != classes =>
(normalise(k), normalise(v))
},
log
)
val incOpts = updateIncOptions(incOptions, classesDirectory, normalise)
new Inputs(
cp, srcs, classes, scalacOptions, javacOptions, cacheFile, analysisMap,
javaOnly, compileOrder, incOpts
)
}
/**
* Java API for creating Inputs.
*/
def create(
log: Logger,
classpath: JList[File],
sources: JList[File],
classesDirectory: File,
scalacOptions: JList[String],
javacOptions: JList[String],
analysisCache: File,
analysisMap: JMap[File, File],
compileOrder: String,
incOptions: IncOptions): Inputs =
inputs(
log,
classpath.asScala,
sources.asScala,
classesDirectory,
scalacOptions.asScala,
javacOptions.asScala,
Option(analysisCache),
analysisMap.asScala.toMap,
javaOnly = false,
Settings.compileOrder(compileOrder),
incOptions
)
/**
* By default the cache location is relative to the classes directory (for example, target/classes/../cache/classes).
*/
def defaultCacheLocation(classesDir: File) = {
classesDir.getParentFile / "cache" / classesDir.getName
}
/**
* Normalise files and default the backup directory.
*/
def updateIncOptions(incOptions: IncOptions, classesDir: File, normalise: File => File): IncOptions = {
incOptions.copy(
apiDumpDirectory = incOptions.apiDumpDirectory map normalise,
backup = getBackupDirectory(incOptions, classesDir, normalise)
)
}
/**
* Get normalised, default if not specified, backup directory. If transactional.
*/
def getBackupDirectory(incOptions: IncOptions, classesDir: File, normalise: File => File): Option[File] = {
if (incOptions.transactional)
Some(normalise(incOptions.backup.getOrElse(defaultBackupLocation(classesDir))))
else
None
}
/**
* By default the backup location is relative to the classes directory (for example, target/classes/../backup/classes).
*/
def defaultBackupLocation(classesDir: File) = {
classesDir.getParentFile / "backup" / classesDir.getName
}
/**
* Verify inputs and update if necessary.
* Currently checks that the cache file is writable.
*/
def verify(inputs: Inputs): Inputs = {
inputs.copy(cacheFile = verifyCacheFile(inputs.cacheFile, inputs.classesDirectory))
}
/**
* Check that the cache file is writable.
* If not writable then the fallback is within the zinc cache directory.
*
*/
def verifyCacheFile(cacheFile: File, classesDir: File): File = {
if (Util.checkWritable(cacheFile)) cacheFile
else Setup.zincCacheDir / "analysis-cache" / Util.pathHash(classesDir)
}
/**
* Debug output for inputs.
*/
def debug(inputs: Inputs, log: xsbti.Logger): Unit = {
show(inputs, s => log.debug(sbt.Logger.f0(s)))
}
/**
* Debug output for inputs.
*/
def show(inputs: Inputs, output: String => Unit): Unit = {
import inputs._
val incOpts = Seq(
"transitive step" -> incOptions.transitiveStep,
"recompile all fraction" -> incOptions.recompileAllFraction,
"debug relations" -> incOptions.relationsDebug,
"debug api" -> incOptions.apiDebug,
"api dump" -> incOptions.apiDumpDirectory,
"api diff context size" -> incOptions.apiDiffContextSize,
"transactional" -> incOptions.transactional,
"backup directory" -> incOptions.backup,
"recompile on macro def" -> incOptions.recompileOnMacroDef,
"name hashing" -> incOptions.nameHashing
)
val values = Seq(
"classpath" -> classpath,
"sources" -> sources,
"output directory" -> classesDirectory,
"scalac options" -> scalacOptions,
"javac options" -> javacOptions,
"cache file" -> cacheFile,
"analysis map" -> analysisMap,
"java only" -> javaOnly,
"compile order" -> compileOrder,
"incremental compiler options" -> incOpts)
Util.show(("Inputs", values), output)
}
}
|
kwlzn/pants
|
src/scala/org/pantsbuild/zinc/Inputs.scala
|
Scala
|
apache-2.0
| 6,033
|
package com.olegych.scastie
package sbt
import akka.actor.Actor
import com.olegych.scastie.api.FormatRequest
import com.olegych.scastie.api.FormatResponse
import com.olegych.scastie.api.ScalaTarget
import org.scalafmt.Formatted
import org.scalafmt.Scalafmt
import org.scalafmt.config.ScalafmtConfig
import org.scalafmt.config.ScalafmtRunner
import org.scalafmt.config.ScalafmtRunner.Dialect
import org.slf4j.LoggerFactory
object FormatActor {
private[sbt] def format(code: String, isWorksheetMode: Boolean, scalaTarget: ScalaTarget): Either[String, String] = {
val config: ScalafmtConfig = {
val dialect =
if (scalaTarget.scalaVersion.startsWith("2.12")) ScalafmtRunner.Dialect.scala212
else if (scalaTarget.scalaVersion.startsWith("2.13")) ScalafmtRunner.Dialect.scala213
else if (scalaTarget.scalaVersion.startsWith("3")) scala.meta.dialects.Scala3
else ScalafmtRunner.Dialect.scala213
val runner = {
val tmp = ScalafmtRunner(dialect = dialect)
if (isWorksheetMode && scalaTarget.hasWorksheetMode)
tmp.forSbt
else tmp
}
ScalafmtConfig.default.copy(runner = runner)
}
Scalafmt.format(code, style = config) match {
case Formatted.Success(formattedCode) => Right(formattedCode)
case Formatted.Failure(failure) => Left(failure.toString)
}
}
}
class FormatActor() extends Actor {
import FormatActor._
private val log = LoggerFactory.getLogger(getClass)
override def receive: Receive = {
case FormatRequest(code, isWorksheetMode, scalaTarget) =>
log.info(s"format (isWorksheetMode: $isWorksheetMode)")
log.info(code)
sender() ! FormatResponse(format(code, isWorksheetMode, scalaTarget))
}
}
|
scalacenter/scastie
|
sbt-runner/src/main/scala/com.olegych.scastie.sbt/FormatActor.scala
|
Scala
|
apache-2.0
| 1,752
|
package ingraph.compiler.cypher2gplan
import java.util.concurrent.atomic.AtomicLong
import ingraph.compiler.cypher2gplan.util.TransformUtil
import ingraph.compiler.exceptions._
import ingraph.model.expr.{PropertyAttribute, ResolvableName}
import ingraph.model.expr.types.{TResolvedName, TSortOrder}
import ingraph.model.gplan.{GNode, Grouping, Projection, UnaryGNode}
import ingraph.model.{expr, gplan, misc}
import org.apache.spark.sql.catalyst.analysis.{UnresolvedAttribute, UnresolvedFunction, UnresolvedStar}
import org.apache.spark.sql.catalyst.expressions.{Expression, SortOrder}
import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
import org.apache.spark.sql.catalyst.{expressions => cExpr}
import scala.collection.mutable
import scala.collection.mutable.ListBuffer
object GPlanResolver {
/**
* structure to hold the name resolution cache:
* key is the lookup name (String)
* value is a pair of:
* - the result of the name resolution and
* - the expression resulted in this particular resolution
*/
protected class TNameResolverCache {
type TKey = String
type TValue = (expr.types.TResolvedNameValue, cExpr.Expression)
private val impl = mutable.Map[TKey, TValue]()
def get(k: TKey): Option[TValue] = impl.get(k)
def put(k: TKey, v: TValue) = impl.put(k, v)
/** Put en entry under the key of the original basename stored in the resolved value v */
def put(rn: expr.ResolvableName) = rn match {
case rne: cExpr.Expression => impl.put(rn.resolvedName.get.baseName, (rn.resolvedName.get, rne))
}
def update(k:TKey, v: TValue) = impl.update(k, v)
}
type TScopedNameResolver = (String, cExpr.Expression) => expr.types.TResolvedName
def resolveGPlan(unresolvedQueryPlan: gplan.GNode): gplan.GNode = {
// should there be other rule sets (partial functions), combine them using orElse,
// e.g. pfunc1 orElse pfunc2
val resolvedNames = resolveNamesForGNode(unresolvedQueryPlan)
val resolved = resolvedNames.transform(gplanResolver).asInstanceOf[gplan.GNode]
// val elements = unresolvedQueryPlan.flatMap {
// case gplan.GetVertices(v) => Some(v)
// case gplan.Expand(src, trg, edge, _, _) => Some(edge, trg)
// case _ => None
// }
resolved
}
/**
* This is to resolve the names in expressions of GPlan.
*/
private def resolveNamesForGNode(q: gplan.GNode): gplan.GNode = {
// this holds the GNode's to be processed to create build the reverse Polish Notation of the tree
val qNodeStack = mutable.Stack[gplan.GNode](q)
// this holds the gplan in reverse polish notation, i.e. stack head has the leafs while its bottom element should hold the root node
val gPlanPolishNotation = mutable.Stack[gplan.GNode]()
// this will hold the operand stack for the Polish Notation evaluation algorithm used to rebuild the tree
val operandStack = mutable.Stack[gplan.GNode]()
// create the reverse polish notation in the stack gPlanPolishNotation
while (qNodeStack.nonEmpty) {
val n = qNodeStack.pop
gPlanPolishNotation.push(n)
n match {
case u: gplan.UnaryGNode => qNodeStack.push(u.child)
/*
* Ensure that right child comes out first, so it will be closer in gPlanPolishNotation to n itself,
* meaning that in "evaluation time" (i.e. for the sake of name resolution), left child will be evaluated first.
*
* This is important as query parts (those that end at WITH or RETURN clause) are assembled to form a left deep tree of join nodes
*
* This also means that name resolution builds on the operand order of joins, so before name resolution, joins are non-commutative
*/
case b: gplan.BinaryGNode => qNodeStack.push(b.left, b.right)
case _: gplan.LeafGNode => {}
case x => throw new UnexpectedTypeException(x, "GPlan tree")
}
}
reAssembleTreeResolvingNames(gPlanPolishNotation, operandStack)
// return the re-constructed tree
operandStack.length match {
case 1 => operandStack.pop
case _ => throw new CompilerException(s"A single item expected in the stack after re-assembling the GNode tree at name resolution. Instead, it has ${operandStack.length} entries.")
}
}
protected def reAssembleTreeResolvingNames(gPlanPolishNotation: mutable.Stack[gplan.GNode], operandStack: mutable.Stack[gplan.GNode]) = {
// this will hold those names that are in scope, so no new resolution should be invented
var nameResolverCache = new TNameResolverCache
// HACK: this will hold the previous name resolving scope, which will be used for Sort and Top operators
var oldNameResolverScope: Option[TNameResolverCache] = None
// scoped name resolver shorthand
def r[T <: Expression](v: T): T = r2(v, nameResolverCache)
// scoped name resolver shorthand allowing to pass in the nameResolverCache
def r2[T <: Expression](v: T, nrs: TNameResolverCache): T = v.transform(expressionNameResolver(new SNR(nrs))).asInstanceOf[T]
// re-assemble the tree resolving names
while (gPlanPolishNotation.nonEmpty) {
val newGNode: gplan.GNode = gPlanPolishNotation.pop match {
case b: gplan.BinaryGNode => {
val rightChild = operandStack.pop
val leftChild = operandStack.pop
val newOp: gplan.BinaryGNode = b match {
case gplan.Union(all, _, _) => gplan.Union(all, leftChild, rightChild)
case gplan.Join(_, _) => gplan.Join(leftChild, rightChild)
case gplan.LeftOuterJoin(_, _) => gplan.LeftOuterJoin(leftChild, rightChild)
case gplan.ThetaLeftOuterJoin(_, _, condition) => gplan.ThetaLeftOuterJoin(leftChild, rightChild, r(condition))
// case AntiJoin skipped because it is introduced later in the beautification step
}
newOp
}
case u: gplan.UnaryGNode => {
val child = operandStack.pop
val newOp: gplan.UnaryGNode = u match {
case gplan.AllDifferent(e, _) => gplan.AllDifferent(e.map(r(_)), child)
case gplan.DuplicateElimination(_) => gplan.DuplicateElimination(child)
case gplan.Expand(src, trg, edge, dir, _) => gplan.Expand(r(src), r(trg), r(edge), dir, child)
// resolve names in listexpression, then resolve the unwindattribute itself
case gplan.Unwind(ua, _) => gplan.Unwind(r(expr.UnwindAttribute(r(ua.list), ua.name, ua.resolvedName)), child)
case gplan.Production(_) => gplan.Production(child)
case gplan.UnresolvedProjection(projectList, _, distinct, sortOrder, skipExpr, limitExpr, selectionCondition) => {
// initialize new namespace applicable after the projection operator
val nextQueryPartNameResolverCache = new TNameResolverCache
val nextSnr = new SNR(nextQueryPartNameResolverCache)
val resolvedProjectListBuf = ListBuffer.empty[expr.ReturnItem]
for (ri <- projectList) {
val resolvedChild = r(ri.child)
val resolvedName: expr.types.TResolvedName = ri.alias match {
case Some(alias) => nextSnr.resolve(alias, resolvedChild)
case None => resolvedChild match {
case rc: expr.ResolvableName => {
rc match {
case ea: expr.ElementAttribute => nextQueryPartNameResolverCache.put(rc)
case pa: expr.PropertyAttribute => nextQueryPartNameResolverCache.put(rc)
case ua: expr.UnwindAttribute => nextQueryPartNameResolverCache.put(rc)
case ea: expr.ExpressionAttribute => nextQueryPartNameResolverCache.put(rc)
case x => throw new UnexpectedTypeException(x, "return item position")
}
rc.resolvedName
}
case rc => nextSnr.resolve("_expr", rc)
}
}
resolvedProjectListBuf.append(expr.ReturnItem(resolvedChild, ri.alias, resolvedName))
}
val resolvedProjectList: expr.types.TProjectList = resolvedProjectListBuf.toSeq
// retain old name resolver scope
oldNameResolverScope = Some(nameResolverCache)
nameResolverCache = nextQueryPartNameResolverCache
val resolvedSortOrder: Option[TSortOrder] = sortOrder.map( (so) => so.map( _ match {
case cExpr.SortOrder(sortExpr, dir, no, se) => try {
cExpr.SortOrder(r(sortExpr), dir, no, se)
} catch {
// in case of name resolution problem, we fall back to the last name resolution scope, if available. If again can't resolve, we throw the exception
case nre: NameResolutionException => cExpr.SortOrder(r2(sortExpr, oldNameResolverScope.getOrElse(throw nre)), dir, no, se)
}
}))
// PLACEHOLDER: when we will allow expressions in skip/limit, it should be resolved here
val resolvedSelectionCondition: Option[Expression] = selectionCondition.flatMap( (cond) => Some(r(cond)))
gplan.UnresolvedProjection(resolvedProjectList, child, distinct, resolvedSortOrder, skipExpr, limitExpr, resolvedSelectionCondition)
}
// case {Projection, Grouping, Sort, Top} skipped because it is introduced in a later resolution stage when resolving UnresolvedProjection
// however, resolution for the sort keys and the possible selection condition is done
case gplan.Selection(condition, _) => gplan.Selection(r(condition), child)
case gplan.Create(attributes, _) => gplan.Create(attributes.map(r(_)), child)
case gplan.UnresolvedDelete(attributes, detach, _) => gplan.UnresolvedDelete(attributes, detach, child)
case gplan.Merge(attributes, _) => gplan.Merge(attributes.map(r(_)), child)
case gplan.SetNode(vertexLabelUpdates, _) => gplan.SetNode(vertexLabelUpdates, child)
case gplan.Remove(vertexLabelUpdates, _) => gplan.Remove(vertexLabelUpdates, child)
}
newOp
}
case l: gplan.LeafGNode => {
val newOp: gplan.LeafGNode = l match {
case gplan.GetVertices(v) => gplan.GetVertices(r(v))
case x => x
}
newOp
}
}
operandStack.push(newGNode)
}
}
def expressionNameResolver(snr: SNR): PartialFunction[Expression, Expression] = {
case rn: expr.ResolvableName =>
if (rn.resolvedName.isDefined) rn // do not resolve already resolved stuff again
else rn match {
case expr.VertexAttribute (name, labels, properties, isAnonymous, _) => expr.VertexAttribute(name, labels, properties.mapValues(_.transform(expressionNameResolver(snr))), isAnonymous, snr.resolve(name, rn))
case expr.EdgeAttribute(name, labels, properties, isAnonymous, _) => expr.EdgeAttribute(name, labels, properties.mapValues(_.transform(expressionNameResolver(snr))), isAnonymous, snr.resolve(name, rn))
case expr.RichEdgeAttribute(src, trg, edge, dir) => expr.RichEdgeAttribute(
src.transform(expressionNameResolver(snr)).asInstanceOf[expr.VertexAttribute],
trg.transform(expressionNameResolver(snr)).asInstanceOf[expr.VertexAttribute],
edge.transform(expressionNameResolver(snr)).asInstanceOf[expr.EdgeAttribute],
dir
)
case expr.EdgeListAttribute(name, labels, properties, isAnonymous, minHops, maxHops, _) => expr.EdgeListAttribute(name, labels, properties.mapValues(_.transform(expressionNameResolver(snr))), isAnonymous, minHops, maxHops, snr.resolve(name, rn))
case expr.PropertyAttribute(name, elementAttribute, _) => expr.PropertyAttribute(name,
// see "scoped name resolver shorthand" above
elementAttribute.transform(expressionNameResolver(snr)).asInstanceOf[expr.ElementAttribute],
snr.resolve(s"${elementAttribute.name}.${name}", rn))
case expr.UnwindAttribute(list, name, _) => expr.UnwindAttribute(list, name, snr.resolve(name, rn))
}
case UnresolvedAttribute(nameParts) => nameParts.length match {
case 1 | 2 => {
val elementName = nameParts(0) // should be OK as .length >= 1
val scopeCacheEntry = snr.nameResolverCache.get(elementName)
val elementAttribute = scopeCacheEntry match { //if (scopeCacheEntry.isDefined) {
case Some((rnString, entry)) => {
val rn = Some(rnString)
// copy the type with basic stuff only
entry match {
case expr.VertexAttribute(name, _, _, isAnonymous, _) => expr.VertexAttribute(name, isAnonymous = isAnonymous, resolvedName = rn)
case expr.EdgeAttribute(name, _, _, isAnonymous, _) => expr.EdgeAttribute(name, isAnonymous = isAnonymous, resolvedName = rn)
case expr.EdgeListAttribute(name, _, _, isAnonymous, minHops, maxHops, _) => expr.EdgeListAttribute(name, isAnonymous = isAnonymous, resolvedName = rn, minHops = minHops, maxHops = maxHops)
// handle PropertyAttribute chained from previous query part under some alias
case expr.PropertyAttribute(name, elementAttribute, _) => expr.PropertyAttribute(name, elementAttribute, resolvedName = rn)
case expr.UnwindAttribute(list, name, _) => expr.UnwindAttribute(list, name, resolvedName = rn)
case expr.ExpressionAttribute(e, name, _) => expr.ExpressionAttribute(e, name, resolvedName = rn)
// fallback for expressions: expression references get wrapped into ExpressionAttribute upon resolution
case e: cExpr.Expression => expr.ExpressionAttribute(e, elementName, resolvedName = rn)
}
}
case _ => throw new NameResolutionException(elementName)
}
if (nameParts.length == 1) {
elementAttribute
} else { // nameParts.length == 2
val propertyName = nameParts(1) // should be OK as .length == 2
elementAttribute match {
// if nameParts.length == 2, base should always be an ElementAttribute
case ea: expr.ElementAttribute =>
expr.PropertyAttribute(propertyName, ea, snr.resolve(s"${ea.name}.${propertyName}",
expr.PropertyAttribute(propertyName, ea)) // this is a dirty hack to tell the resolver that we are about to resolve a PropertyAttribute instance
)
case x => throw new UnexpectedTypeException(x, "basis position of property dereferencing")
}
}
}
case _ => throw new CompilerException(s"Unexpected number of name parts, namely ${nameParts.length} for ${nameParts}")
}
// fallback: no-op resolution.
case x => x
}
/**
* Scoped name resolver that holds its cache it utilizes.
* @param pNameResolverCache the name resolution cache to utilize
*/
class SNR(pNameResolverCache: TNameResolverCache) {
def nameResolverCache = pNameResolverCache
def resolve: TScopedNameResolver = (baseName, target) => {
val resolvedName: expr.types.TResolvedNameValue = pNameResolverCache.get(baseName) match {
// we don't have the result type info for UNWIND and single index lookup expressions
// so our best guess is to use the first type we encounter in the current query type
case Some((rn, expr.UnwindAttribute(_, _, _))) => {
pNameResolverCache.update(baseName, (rn, target))
rn
}
case Some((rn, expr.IndexLookupExpression(_, _))) => {
pNameResolverCache.update(baseName, (rn, target))
rn
}
case Some((rn, entry)) => if (entry.getClass != target.getClass) {
throw new CompilerException(s"Name collision across types: ${baseName}. In the cache, it is ${entry.getClass}, but now it was passed as ${target.getClass}")
} else {
rn
}
case None => {
val rn = expr.types.TResolvedNameValue(baseName, SNR.generateUniqueName(baseName))
pNameResolverCache.put(baseName, (rn, target))
rn
}
}
Some(resolvedName)
}
}
object SNR {
// always use .getAndIncrement on this object
private val generatedNameCounterMap = mutable.Map[String, AtomicLong]()
def resetResolverNameCounters_IKnowWhatImDoing = {
generatedNameCounterMap.clear()
}
def generateUniqueName(baseName: String): String = {
s"${baseName}#${generatedNameCounterMap.getOrElseUpdate(baseName, new AtomicLong).getAndIncrement}"
}
}
/**
* These are the resolver rules that applies to all unresolved GPlans.
*
* There are some nodes that do not need resolution: GetVertices, DuplicateElimination, Expand, Join, Union, etc.
*/
val gplanResolver: PartialFunction[LogicalPlan, LogicalPlan] = {
// Unary
case gplan.UnresolvedProjection(projectList, child, distinct, sortOrder, skipExpr, limitExpr, selectionCondition) => {
val resolvedSkipExpr = TransformUtil.transformOption(skipExpr, expressionResolver)
val resolvedLimitExpr = TransformUtil.transformOption(limitExpr, expressionResolver)
if (sortOrder.isEmpty && (resolvedSkipExpr.isDefined || resolvedLimitExpr.isDefined))
throw new IllegalSkipLimitUsageException
val resolvedProjectList = projectList.map( pi => expr.ReturnItem(pi.child.transform(expressionResolver), pi.alias, pi.resolvedName) )
val afterTopSortProjectOp: gplan.GNode = projectionResolveHelper(resolvedProjectList, child) match {
case g: Grouping => {
// DISTINCT is by definition useless when grouping
// TODO: check if ORDER BY refers only to the result of grouping
val afterSortOp = sortOrder.fold[GNode](g)( sortResolveHelper(_, g) )
wrapInTopOperatorHelper(resolvedSkipExpr, resolvedLimitExpr, afterSortOp)
}
case p: Projection => {
val afterDistinct = if (distinct) gplan.DuplicateElimination(p) else p
sortOrder.fold[gplan.GNode](afterDistinct)( (so) => {
val newSortOp = sortResolveHelper(so, afterDistinct)
// find resolved names that the sort involves but the projection hides
val additionalSortItems: TSortOrder = newSortOp.order.flatMap( so => so.child match {
// in case of sorting by a porpertyattribute, its enough to have itself or its base elementattribute in the projection list
case pa: PropertyAttribute => p.projectList.foldLeft[ Option[SortOrder] ]( Some(so) )(
(acc, ri) => if (ri.resolvedName == pa.resolvedName || ri.resolvedName == pa.elementAttribute.resolvedName) None else acc
)
// in case of sorting by other resolvable names, we need to have itself present after the projection
case rn: ResolvableName => p.projectList.foldLeft[ Option[SortOrder] ]( Some(so) )(
(acc, ri) => if (ri.resolvedName == rn.resolvedName) None else acc
)
case _ => None
})
if (additionalSortItems.isEmpty) {
// no resolved names needed for the sorting were hidden by the projection,
// so we pack that in a Top operator if needed, and return that
wrapInTopOperatorHelper(resolvedSkipExpr, resolvedLimitExpr, newSortOp)
} else {
// extra variables needed for the sorting, but DISTINCT was in place
if (distinct) {
// FIXME: possibly merge this with the creation of additionalSortItems
// see if all of them is legal
val allLegal: Boolean = additionalSortItems.map(_.child).foldLeft[Boolean](true)( (acc, sortExpr) => {
// this item appears aliased in the project list.
// Note: the order by requires this item unaliased, but they will be added to the introduced projection when building the more loose projection below
val appearAliased: Boolean = sortExpr match {
case sortRN: ResolvableName => projectList.find( ri => ri.child match {
case riRN: ResolvableName => sortRN.resolvedName == riRN.resolvedName
case _ => false
}).isDefined
case _ => false
}
// this is a propertyAttribute whose elementAttribute appear aliased in the project list.
val baseAppearAliased: Boolean = sortExpr match {
case sortRN: PropertyAttribute => projectList.find( ri => ri.child match {
case riRN: ResolvableName => sortRN.elementAttribute.resolvedName == riRN.resolvedName
case _ => false
}).isDefined
case _ => false
}
acc && (appearAliased || baseAppearAliased)
})
if (!allLegal) {
throw new IllegalSortingAfterDistinctException(additionalSortItems.map(_.child.toString()).toString())
}
}
// we build a more loose projection, then sort, then fall back to the projection originally requested.
val innerSortOp = gplan.Sort(newSortOp.order,
gplan.Projection((p.projectList ++ additionalSortItems.map(so => so.child match {
case rn: ResolvableName => expr.ReturnItem(so.child, None, rn.resolvedName)
case x => throw new UnexpectedTypeException(x, "we were filtering for resolvedNames in sort list")
}) ++ // add the aliased returnitems without aliasing to allow referencing it in the effective projection
p.projectList.flatMap( pi => pi.child match {
case rn: ResolvableName => if (pi.resolvedName == rn.resolvedName) None else Some(expr.ReturnItem(rn, None, rn.resolvedName))
case _ => None
})).distinct // this is to remove duplicates that might originate from e.g. adding aliased return items and items from sorting without using that alias
, p.child // note: we checked above not to have distinct
)
)
val afterTopOp = wrapInTopOperatorHelper(resolvedSkipExpr, resolvedLimitExpr, innerSortOp)
gplan.Projection(p.projectList, afterTopOp)
}
})
}
}
selectionCondition.fold[gplan.GNode](afterTopSortProjectOp)( (c) => gplan.Selection( c.transform(expressionResolver), afterTopSortProjectOp) )
}
case gplan.Selection(condition, child) => gplan.Selection(condition.transform(expressionResolver), child)
case gplan.Sort(order, child) => sortResolveHelper(order, child)
case gplan.ThetaLeftOuterJoin(left, right, condition) => gplan.ThetaLeftOuterJoin(left, right, condition.transform(expressionResolver))
case gplan.Top(skipExpr, limitExpr, child) => gplan.Top(
TransformUtil.transformOption(skipExpr, expressionResolver)
, TransformUtil.transformOption(limitExpr, expressionResolver)
, child)
case gplan.Unwind(expr.UnwindAttribute(collection, alias, resolvedName), child) => gplan.Unwind(expr.UnwindAttribute(collection.transform(expressionResolver), alias, resolvedName), child)
// DML
case gplan.UnresolvedDelete(attributes, detach, child) => gplan.Delete(resolveAttributes(attributes, child), detach, child)
case gplan.Create(attributes, child) => gplan.Create(filterForAttributesOfChildOutput(attributes, child, invert=true), child)
}
val expressionResolver: PartialFunction[Expression, Expression] = {
case expr.ExpressionAttribute(expression, name, rn) => expr.ExpressionAttribute(expression.transform(expressionResolver), name, rn)
case UnresolvedFunction(functionIdentifier, children, isDistinct) => expr.FunctionInvocation(misc.Function.fromCypherName(functionIdentifier.identifier, children.length, isDistinct), children, isDistinct)
}
/**
* Resolve attribute references according to the output schema of the child GNode
* @param attributes
* @param child
* @return
*/
protected def resolveAttributes(attributes: Seq[cExpr.NamedExpression], child: gplan.GNode): Seq[expr.ResolvableName] = {
val transformedAttributes = attributes.flatMap( a => child.output.find( co => co.name == a.name ) )
if (attributes.length != transformedAttributes.length) {
throw new CompilerException(s"Unable to resolve all attributes. Resolved ${transformedAttributes.length} out of ${attributes.length}")
}
transformedAttributes
}
/**
* Filters the attributes passed in for being included in the child.output schema
* @param attributes
* @param child
* @param invert iff true, match is inverted, i.e. only those are returned which were not found
* @return
*/
protected def filterForAttributesOfChildOutput(attributes: Seq[expr.ResolvableName], child: gplan.GNode, invert: Boolean = false): Seq[expr.ResolvableName] = {
attributes.flatMap( a => if ( invert.^(child.output.exists( co => co.name == a.name )) ) Some(a) else None )
}
private def sortResolveHelper(order: Seq[SortOrder], child: GNode) = {
gplan.Sort(
order.map(_.transform(expressionResolver) match {
case so: SortOrder => so
case x => throw new UnexpectedTypeException(x, "sort items after resolution")
})
, child)
}
private def wrapInTopOperatorHelper(resolvedSkipExpr: Option[Expression], resolvedLimitExpr: Option[Expression], content: GNode): GNode = {
if (resolvedSkipExpr.isDefined || resolvedLimitExpr.isDefined)
gplan.Top(resolvedSkipExpr, resolvedLimitExpr, content)
else
content
}
/**
* Creates either a Projection or a Grouping instance based on the expressions found in projectList.
*
* Grouping is returned iff we find at least one expression in projectList having a call to an aggregate function at its top-level.
*
*
* @param child
* @param projectList
* @return
*/
protected def projectionResolveHelper(projectList: expr.types.TProjectList, child: GNode): UnaryGNode with expr.ProjectionDescriptor = {
/**
* Returns true iff e is an expression having a call to an aggregation function at its top-level.
* @param e
* @return
*/
def isAggregatingFunctionInvocation(e: cExpr.Expression): Boolean = {
e match {
case expr.FunctionInvocation(f, _, _) => f.isAggregation
case _ => false
}
}
// look for aggregation functions in top-level position of return items
// those having no aggregation function in top-level position will form the aggregation criteria if at least one aggregation is seen
val aggregationCriteriaCandidate: Seq[cExpr.Expression] = projectList.flatMap( returnItem => {
val returnItemExpression = returnItem.child
// validate aggregation semantics: no aggregation function is allowed at non top-level
returnItemExpression.children.foreach( c => c.find(isAggregatingFunctionInvocation).fold[Unit](Unit)( e => throw new IllegalAggregationException(e.toString) ) )
// see if this return item is an aggregation
returnItemExpression match {
// FIXME: UnresolvedStar is also allowed until it is resolved
case UnresolvedStar(_) => Some(returnItemExpression)
case e: cExpr.Expression => if (isAggregatingFunctionInvocation(e)) {
None
} else {
Some(e)
}
// This should never be reached, as projectList is expr.types.TProjectList
case x => throw new UnexpectedTypeException(x, "return item position")
}
})
// FIXME: when resolving UnresolvedStar, this needs revising
if (aggregationCriteriaCandidate.length != projectList.length) {
gplan.Grouping(aggregationCriteriaCandidate, projectList, child)
} else {
gplan.Projection(projectList, child)
}
}
}
|
FTSRG/ingraph
|
compiler/src/main/scala/ingraph/compiler/cypher2gplan/GPlanResolver.scala
|
Scala
|
epl-1.0
| 28,263
|
package im.actor.server.api.http.groups
import scala.concurrent.duration._
import scala.concurrent.{ ExecutionContext, Future }
import scala.util.{ Failure, Success }
import akka.actor.ActorSystem
import akka.http.scaladsl.model.HttpResponse
import akka.http.scaladsl.model.StatusCodes._
import akka.http.scaladsl.server.Directives._
import akka.http.scaladsl.server.Route
import play.api.libs.json.Json
import slick.driver.PostgresDriver.api._
import im.actor.api.rpc.files.FileLocation
import im.actor.server.api.http.RoutesHandler
import im.actor.server.api.http.json.JsonImplicits.{ errorsFormat, groupInviteInfoFormat }
import im.actor.server.api.http.json.{ AvatarUrls, Errors, Group, GroupInviteInfo, User }
import im.actor.server.util.FileStorageAdapter
import im.actor.server.util.ImageUtils.getAvatar
import im.actor.server.{ models, persist }
class GroupsHandler()(
implicit
db: Database,
system: ActorSystem,
ec: ExecutionContext,
fsAdapter: FileStorageAdapter
) extends RoutesHandler {
override def routes: Route = path("groups" / "invites" / Segment) { token ⇒
get {
onComplete(retrieve(token)) {
case Success(Right(result)) ⇒
complete(HttpResponse(
status = OK,
entity = Json.stringify(Json.toJson(result))
))
case Success(Left(errors)) ⇒
complete(HttpResponse(
status = NotAcceptable,
entity = Json.stringify(Json.toJson(errors))
))
case Failure(e) ⇒ complete(HttpResponse(InternalServerError))
}
}
}
private def retrieve(token: String): Future[Either[Errors, GroupInviteInfo]] =
db.run {
for {
optToken ← persist.GroupInviteToken.findByToken(token)
result ← optToken.map { token ⇒
for {
groupTitle ← persist.Group.findTitle(token.groupId)
groupAvatar ← persist.AvatarData.findByGroupId(token.groupId)
groupAvatarUrls ← avatarUrls(groupAvatar)
inviterName ← persist.User.findName(token.creatorId)
inviterAvatar ← persist.AvatarData.findByUserId(token.creatorId).headOption
inviterAvatarUrls ← avatarUrls(inviterAvatar)
} yield Right(GroupInviteInfo(group = Group(groupTitle.getOrElse("Group"), groupAvatarUrls), inviter = User(inviterName.getOrElse("User"), inviterAvatarUrls)))
}.getOrElse(DBIO.successful(Left(Errors("Expired or invalid token"))))
} yield result
}
private def avatarUrls(optAvatar: Option[models.AvatarData]): DBIO[Option[AvatarUrls]] = {
optAvatar.map(getAvatar).map { avatar ⇒
for {
small ← avatar.smallImage.map(i ⇒ urlOrNone(i.fileLocation)).getOrElse(DBIO.successful(None)) //TODO: rewrite with shapeless
large ← avatar.largeImage.map(i ⇒ urlOrNone(i.fileLocation)).getOrElse(DBIO.successful(None))
full ← avatar.fullImage.map(i ⇒ urlOrNone(i.fileLocation)).getOrElse(DBIO.successful(None))
} yield Some(AvatarUrls(small, large, full))
}.getOrElse(DBIO.successful(None))
}
private def urlOrNone(location: FileLocation): DBIO[Option[String]] = {
implicit val timeout = 1.day
for {
fileOpt ← persist.File.find(location.fileId)
url ← fileOpt.map { file ⇒
DBIO.from(fsAdapter.getFileUrl(file, location.accessHash))
}.getOrElse(DBIO.successful(None))
} yield url
}
}
|
boneyao/actor-platform
|
actor-server/actor-http-api/src/main/scala/im/actor/server/api/http/groups/GroupsHandler.scala
|
Scala
|
mit
| 3,448
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.mxnet
import org.slf4j.LoggerFactory
/**
* Learning rate scheduler, which adaptively changes the learning rate
* based on the training progress.
* @author Yuan Tang
*/
abstract class LRScheduler(var baseLR: Float = 0.01f) {
/**
* Base class of a learning rate scheduler
*
* The training progress is presented by `num_update`, which can be roughly
* viewed as the number of minibatches executed so far. Its value is
* non-decreasing, and increases at most by one.
*
* The exact value is the upper bound of the number of updates applied to
* a weight/index.
*
* @param numUpdate Int, the maximal number of updates applied to a weight.
*/
def apply(numUpdate: Int): Float
}
/**
* Class for reducing learning rate in factor
*
* Assume the weight has been updated by n times, then the learning rate will
* be base_lr * factor^^(floor(n/step))
*
* @param step Int, schedule learning rate after n updates
* @param factor Float, the factor for reducing the learning rate
*
*/
class FactorScheduler(protected var step: Int, protected var factor: Float) extends LRScheduler {
protected var count: Int = 0
private val logger = LoggerFactory.getLogger(classOf[FactorScheduler])
require(step >= 1, "Schedule step must be greater or equal than 1 round")
require(factor < 1.0, "Factor must be less than 1 to make lr reduce")
def apply(numUpdate: Int): Float = {
if (numUpdate > this.count + this.step) {
this.count += this.step
this.baseLR *= this.factor
this.logger.info(s"Update$numUpdate: Change learning rate to ${this.baseLR}")
}
this.baseLR
}
}
|
indhub/mxnet
|
scala-package/core/src/main/scala/org/apache/mxnet/LRScheduler.scala
|
Scala
|
apache-2.0
| 2,456
|
package org.allenai.pnp
import ExecutionScore.ExecutionScore
import org.allenai.pnp.AuxiliaryLoss.AuxiliaryLoss
/** A training example for neural probabilistic programs. An example
* consists of a conditional and an unconditional program, and an
* environment in which these programs execute. An additional
* filter on environments may be provided to further restrict the set
* of conditional executions during inference.
*/
case class PnpExample[A](unconditional: Pnp[A], conditional: Pnp[A],
env: Env, conditionalExecutionScore: ExecutionScore,
auxiliaryLoss: AuxiliaryLoss = AuxiliaryLoss.Zero) {
}
object PnpExample {
def fromDistributions[A](unconditional: Pnp[A], conditional: Pnp[A]) = {
PnpExample[A](unconditional, conditional, Env.init, ExecutionScore.Zero)
}
}
|
jayantk/pnp
|
src/main/scala/org/allenai/pnp/PnpExample.scala
|
Scala
|
apache-2.0
| 800
|
/*
* The MIT License (MIT)
*
* Copyright (c) 2016 Ian McIntosh
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of
* this software and associated documentation files (the "Software"), to deal in
* the Software without restriction, including without limitation the rights to
* use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
* the Software, and to permit persons to whom the Software is furnished to do so,
* subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
* FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
* COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
* IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
* CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
package f3.core
package object stats {}
|
cranst0n/f3
|
modules/core/src/main/scala/f3/core/stats/package.scala
|
Scala
|
mit
| 1,184
|
/*
* Copyright 2011 WorldWide Conferencing, LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.liftweb
package mongodb
import json._
import scala.util.matching.Regex
import java.util.{Date, UUID}
import java.util.regex.Pattern
import org.bson.types.ObjectId
object BsonDSL extends JsonDSL {
implicit def objectid2jvalue(oid: ObjectId): JValue = Meta.objectIdAsJValue(oid)
implicit def pattern2jvalue(p: Pattern): JValue = Meta.patternAsJValue(p)
implicit def regex2jvalue(r: Regex): JValue = Meta.patternAsJValue(r.pattern)
implicit def uuid2jvalue(u: UUID): JValue = Meta.uuidAsJValue(u)
implicit def date2jvalue(d: Date)(implicit formats: Formats): JValue = Meta.dateAsJValue(d, formats)
}
|
pbrant/framework
|
persistence/mongodb/src/main/scala/net/liftweb/mongodb/BsonDSL.scala
|
Scala
|
apache-2.0
| 1,235
|
package com.twitter.inject.server.tests
import com.twitter.finagle.stats.{
Counter,
Gauge,
InMemoryStatsReceiver,
MetricBuilder,
Stat,
StatsReceiver
}
import java.io.PrintStream
import scala.collection.mutable
/* Fake stats receiver for testing integration */
private[tests] class TestStatsReceiver extends StatsReceiver {
private[this] val underlying: InMemoryStatsReceiver = new InMemoryStatsReceiver
val counters: mutable.Map[Seq[String], Long] = underlying.counters
val stats: mutable.Map[Seq[String], Seq[Float]] = underlying.stats
val gauges: mutable.Map[Seq[String], () => Float] = underlying.gauges
/**
* Specifies the representative receiver. This is in order to
* expose an object we can use for comparison so that global stats
* are only reported once per receiver.
*/
override def repr: TestStatsReceiver = this
/**
* Get a [[Counter counter]] with the given `name`.
*/
def counter(metricBuilder: MetricBuilder): Counter =
underlying.counter(metricBuilder)
/**
* Get a [[Stat stat]] with the given name.
*/
def stat(metricBuilder: MetricBuilder): Stat =
underlying.stat(metricBuilder)
/**
* Add the function `f` as a [[Gauge gauge]] with the given name.
*
* The returned [[Gauge gauge]] value is only weakly referenced by the
* [[StatsReceiver]], and if garbage collected will eventually cease to
* be a part of this measurement: thus, it needs to be retained by the
* caller. Or put another way, the measurement is only guaranteed to exist
* as long as there exists a strong reference to the returned
* [[Gauge gauge]] and typically should be stored in a member variable.
*
* Measurements under the same name are added together.
*
* @see [[StatsReceiver.provideGauge]] when there is not a good location
* to store the returned [[Gauge gauge]] that can give the desired lifecycle.
* @see [[https://docs.oracle.com/javase/7/docs/api/java/lang/ref/WeakReference.html java.lang.ref.WeakReference]]
*/
def addGauge(metricBuilder: MetricBuilder)(f: => Float): Gauge =
underlying.addGauge(metricBuilder)(f)
override def toString: String = "TestStatsReceiver"
def print(p: PrintStream): Unit = print(p, includeHeaders = false)
def print(p: PrintStream, includeHeaders: Boolean): Unit = underlying.print(p, includeHeaders)
}
|
twitter/finatra
|
inject/inject-server/src/test/scala/com/twitter/inject/server/tests/TestStatsReceiver.scala
|
Scala
|
apache-2.0
| 2,370
|
/*
* Copyright 2015 - 2016 Red Bull Media House GmbH <http://www.redbullmediahouse.com> - all rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package doc
object ReliableDelivery extends App {
//#reliable-delivery
import scala.concurrent.duration._
import scala.util._
import akka.actor._
import com.rbmhtechnology.eventuate.EventsourcedActor
import com.rbmhtechnology.eventuate.ConfirmedDelivery
case class DeliverCommand(message: String)
case class DeliverEvent(message: String)
case class Confirmation(deliveryId: String)
case class ConfirmationEvent()
case class ReliableMessage(deliveryId: String, message: String)
case object Redeliver
class ExampleActor(destination: ActorPath,
override val id: String,
override val eventLog: ActorRef)
extends EventsourcedActor with ConfirmedDelivery {
import context.dispatcher
context.system.scheduler.schedule(
initialDelay = 10.seconds,
interval = 5.seconds,
receiver = self,
message = Redeliver)
override def onCommand = {
case DeliverCommand(message) =>
persist(DeliverEvent(message)) {
case Success(evt) => // ...
case Failure(err) => // ...
}
case Confirmation(deliveryId) if unconfirmed.contains(deliveryId) =>
persistConfirmation(ConfirmationEvent(), deliveryId) {
case Success(evt) => // ...
case Failure(err) => // ...
}
case Redeliver =>
redeliverUnconfirmed()
}
override def onEvent = {
case DeliverEvent(message) =>
val deliveryId = lastSequenceNr.toString
deliver(deliveryId, ReliableMessage(deliveryId, message), destination)
case ConfirmationEvent() =>
// handling of confirmation event is optional
}
}
//#
class Destination extends Actor {
import context.dispatcher
def receive = {
case r @ ReliableMessage(deliveryId, message) =>
context.system.scheduler.scheduleOnce(3.seconds, sender(), Confirmation(deliveryId))
}
}
import com.rbmhtechnology.eventuate.ReplicationConnection._
import com.rbmhtechnology.eventuate.log.leveldb.LeveldbEventLog
val system: ActorSystem = ActorSystem(DefaultRemoteSystemName)
val eventLog: ActorRef = system.actorOf(LeveldbEventLog.props("rd"))
val dest = system.actorOf(Props(new Destination))
val actor = system.actorOf(Props(new ExampleActor(dest.path, "r1", eventLog)))
actor ! DeliverCommand("test")
}
|
RBMHTechnology/eventuate
|
src/sphinx/code/ReliableDeliveryDoc.scala
|
Scala
|
apache-2.0
| 3,057
|
package intellij.haskell.alex.lang.psi.impl
import com.intellij.extapi.psi.ASTWrapperPsiElement
import com.intellij.lang.ASTNode
import intellij.haskell.alex.lang.psi.AlexElement
/**
* @author ice1000
*/
class AlexElementImpl private[impl](node: ASTNode) extends ASTWrapperPsiElement(node) with AlexElement {
}
|
rikvdkleij/intellij-haskell
|
src/main/scala/intellij/haskell/alex/lang/psi/impl/AlexElementImpl.scala
|
Scala
|
apache-2.0
| 317
|
package elea.term
import elea._
import elea.term.CriticalPair.Induction
import scalaz.{ICons, IList, ISet}
// Also, you can make fix indices unique thingies now, since it's just used for the coupling check
case class CriticalPair(
path: IList[Case.Index],
action: CriticalPair.Action) {
def isFoldable: Boolean = action.isInstanceOf[Induction]
def :/(sub: Substitution): CriticalPair =
copy(action = action :/ sub)
def extendPathWithMatch(idx: Case.Index): CriticalPair =
copy(path = idx :: path)
/**
* Are the first elements of the two critical paths equal to each other
*/
def couplesWith(other: CriticalPair): Boolean =
(path, other.path) match {
case (ICons(x, _), ICons(y, _)) => x == y
case _ => false
}
/**
* Check whether the path of this pair is a sub-path of the path of an`other` pair.
* Used to check whether we should continue unfolding fixed-points in [[elea.rewrite.Supercompiler.supercompile()]].
*/
def embedsInto(other: CriticalPair): Boolean =
action.sameTypeAs(other.action) &&
path.embedsInto(other.path)
}
object CriticalPair {
def of(fix: Fix, args: IList[Term]): CriticalPair = {
val fixVar = Name.fresh("f")
val fixArgSubterms = ISet
.unions(args.toList.map(arg => arg.freeSubtermSet.insert(arg)))
.toList
val cp = fix.body.apply(Var(fixVar) :: args).reduce match {
case term: Case if fixArgSubterms.exists(_ =@= term.matchedTerm) =>
term.matchedTerm match {
case AppView(matchFix: Fix, matchArgs) if matchFix.fissionConstructorContext.isDefined =>
CriticalPair
.fission(matchFix, matchArgs)
.extendPathWithMatch(term.index)
case AppView(matchFix: Fix, matchArgs: IList[Term]) =>
CriticalPair
.of(matchFix, matchArgs)
.extendPathWithMatch(term.index)
case _ =>
CriticalPair
.induction(term)
.extendPathWithMatch(term.index)
}
case term: Case =>
CriticalPair
.caseSplit(term)
.extendPathWithMatch(term.index)
case _ =>
throw new IllegalArgumentException(s"Term does not have critical pair: ${fix.apply(args)}")
}
cp :/ (fix / fixVar)
}
def unapply(term: Term): Option[(Fix, IList[Term], CriticalPair)] =
term match {
case AppView(fix: Fix, args) if fix.argCount == args.length =>
Some(fix, args, CriticalPair.of(fix, args))
case _ =>
None
}
sealed trait Action {
def :/(sub: Substitution): Action
def shouldFold: Boolean
def apply(from: Term): Term
def sameTypeAs(other: Action): Boolean
}
case class Induction private(caseOf: Case) extends Action {
override def :/(sub: Substitution) = copy(caseOf = caseOf :/ sub)
override def shouldFold = true
override def apply(from: Term): Term =
C(_ => from).applyToBranches(caseOf)
override def sameTypeAs(other: Action): Boolean =
other.isInstanceOf[Induction]
}
case class CaseSplit private(caseOf: Case) extends Action {
override def :/(sub: Substitution) = copy(caseOf = caseOf :/ sub)
override def shouldFold = false
override def apply(from: Term): Term =
C(_ => from).applyToBranches(caseOf)
override def sameTypeAs(other: Action): Boolean =
other.isInstanceOf[CaseSplit]
}
case class Fission private(fix: Fix, args: IList[Term]) extends Action {
require(fix.fissionConstructorContext.isDefined)
override def :/(sub: Substitution) = copy(
fix = (fix :/ sub).asInstanceOf[Fix],
args = args.map(_ :/ sub))
override def shouldFold = false
override def apply(from: Term): Term =
from.replace(fix.apply(args), fix.fissionConstructorContext(args).get)
override def sameTypeAs(other: Action): Boolean =
other.isInstanceOf[Fission]
}
def induction(caseOf: Case): CriticalPair =
CriticalPair(IList.empty, Induction(caseOf))
def caseSplit(caseOf: Case): CriticalPair =
CriticalPair(IList.empty, CaseSplit(caseOf))
def fission(fix: Fix, args: IList[Term]): CriticalPair =
CriticalPair(IList.empty, Fission(fix, args))
}
|
wsonnex/elea
|
src/main/scala/elea/term/CriticalPair.scala
|
Scala
|
mit
| 4,369
|
package org.renci.blazegraph
import java.io.{BufferedOutputStream, File, FileOutputStream, OutputStream}
import com.bigdata.rdf.sail.BigdataSailRepositoryConnection
import org.backuity.clist._
import org.openrdf.query.QueryLanguage
import org.openrdf.query.resultio.TupleQueryResultWriter
import org.openrdf.query.resultio.sparqljson.SPARQLResultsJSONWriter
import org.openrdf.query.resultio.sparqlxml.SPARQLResultsXMLWriter
import org.openrdf.query.resultio.text.tsv.SPARQLResultsTSVWriter
import scala.io.Source
object Select extends Command(description = "SPARQL select") with Common {
var queryFile = arg[File](name = "query")
var output = arg[File]()
def createOutputWriter(out: OutputStream): TupleQueryResultWriter = outformat.getOrElse("tsv").toLowerCase match {
case "tsv" => new SPARQLResultsTSVWriter(out)
case "xml" => new SPARQLResultsXMLWriter(out)
case "json" => new SPARQLResultsJSONWriter(out)
case other => throw new IllegalArgumentException(s"Invalid SPARQL select output format: $other")
}
def runUsingConnection(blazegraph: BigdataSailRepositoryConnection): Unit = {
val query = blazegraph.prepareTupleQuery(QueryLanguage.SPARQL, Source.fromFile(queryFile, "utf-8").mkString)
val queryOutput = new BufferedOutputStream(new FileOutputStream(output))
query.evaluate(createOutputWriter(queryOutput))
queryOutput.close()
}
}
|
balhoff/blazegraph-runner
|
src/main/scala/org/renci/blazegraph/Select.scala
|
Scala
|
bsd-3-clause
| 1,397
|
import sbt._
import Keys._
import org.json4s._
import org.json4s.jackson.JsonMethods._
object SbtAngularSeedKeys {
val AngularSeed = config("angularSeed") extend Compile
val targetFile = SettingKey[File]("target-file","where the Angular JS seed is compiled to, please make sure extension is a js file")
val jsonExpression = SettingKey[JValue]("json-expression","An expression that resolves to a JValue, who's result gets placed in the seed")
val angularModuleName = SettingKey[String]("angular-module-name", "The name of the angular js module")
val angularValueName = SettingKey[String]("angular-value-name","The name of the angular js value")
}
object SbtAngularSeedPlugin extends Plugin {
import SbtAngularSeedKeys._
val compileSeed = TaskKey[Unit]("compileSeed","Compiles the AngularJS seed file")
val compileSeedTask = Def.task {
val json = pretty(render(jsonExpression.value))
val moduleName = angularModuleName.value
val angularName = angularValueName.value
val firstString = """angular.module('""" + moduleName + """',[])""" + "\\n"
val secondString = """.value('""" + angularName + """',""" + "\\n"
val main = json + "\\n"
val end = """);"""
val finalString =
firstString + secondString + main + end
IO.write(targetFile.value,finalString)
}
val clean = TaskKey[Unit]("clean","Deletes the AngularJS seed file")
lazy val cleanTask = Def.task {
IO.delete(targetFile.value)
}
lazy val defaultSettings: Seq[Setting[_]] = Seq(
angularModuleName in AngularSeed := "angularSeed",
angularValueName in AngularSeed := (angularModuleName in AngularSeed).value.capitalize,
targetFile in AngularSeed <<= Def.settingDyn{
val fileName = (angularModuleName in AngularSeed).value
resourceManaged (_ / "main" / "js" / (fileName + ".js"))
},
jsonExpression in AngularSeed := {
JObject(List.empty)
},
compileSeed in AngularSeed := compileSeedTask.value,
clean in AngularSeed := cleanTask.value
)
}
|
mdedetrich/sbt-angular-seed
|
src/main/scala/SbtAngularSeedPlugin.scala
|
Scala
|
bsd-2-clause
| 2,018
|
package mimir.ctables;
import optimus.optimization._
import optimus.algebra._
import optimus.algebra.AlgebraOps._
import optimus.optimization.enums._
import optimus.optimization.model._
import mimir.ctables._
import mimir.models._
import mimir.algebra._
class Oracle(time : Double, trcmap : collection.mutable.LinkedHashMap[(Model,Int,Seq[PrimitiveValue]),(Double,Double,Double)]) {
var T: Double = time
var trc: collection.mutable.LinkedHashMap[(Model,Int,Seq[PrimitiveValue]),(Double,Double,Double)] = trcmap
}
object CTPrioritizer {
def prioritize(reasons : Iterable[Reason]) : Option[(Array[Array[MPIntVar]],Array[MPFloatVar],Array[MPIntVar])]= {
implicit val problem = MPModel(SolverLib.oJSolver)
if(reasons.size != 0) {
val trcmap = collection.mutable.LinkedHashMap[(Model,Int,Seq[PrimitiveValue]),(Double,Double,Double)]()
var i = 0
var j = 0.0
var k = 0.0
val numI = reasons.size // Number of UDOs
val I = 0 until numI
val numJ = 1 // Number of Oracles
val J = 0 until numJ
var q = new Array[Double](numI) // Reliability of UDO prior to curation
for (reason <- reasons) {
i += 1
j += 0.5
k += 2
//trcmap += ("{{"+reason.model+";"+reason.idx+"["+reason.args.mkString(", ")+"]}}" -> (i,j,k))
trcmap += ((reason.model,reason.idx,reason.args) -> (i,j,k))
q(i-1) = reason.model.confidence(reason.idx,reason.args,reason.hints)
}
val oracle = new Oracle(10,trcmap)
// println(reasons)
// Data (filled randomly)
// var q = Array.tabulate(numI)(_*0.2) // Reliability of UDO prior to curation
var w = Array.tabulate(numI)(_=>1) // Temporarily set to 1
//var t = Array.tabulate(numI,numJ)((x,y)=>x+y) // Time
//var r = Array.tabulate(numI,numJ)((x,y)=>0.1*x+0.2*y) // Reliability
//var c = Array.tabulate(numI,numJ)((x,y)=>x+2*y) // Cost of Repair
//var T = Array.tabulate(numJ)(x => (x+2)*3) // Time Limit per Oracle
var t = Array.ofDim[Double](numI,numJ)
var r = Array.ofDim[Double](numI,numJ)
var c = Array.ofDim[Double](numI,numJ)
var T = Array.ofDim[Double](numJ)
for ( j <- J ) {
val trcIter = oracle.trc.iterator
var i = 0
T(j) = oracle.T
while (trcIter.hasNext) {
val triple = trcIter.next._2
t(i)(j) = triple._1
r(i)(j) = triple._2
c(i)(j) = triple._3
i += 1
}
}
var B = 50 // Budget
var M = 1000 // Large Number
//Variables
val x = Array.tabulate(numI,numJ)((i,j) => MPIntVar(s"x($i,$j)", 0 to 1))
val y = Array.tabulate(numI)(i=>MPFloatVar(s"y$i"))
val z = Array.tabulate(numI)(i=>MPIntVar(s"z$i", 0 to 1))
// Objective Function
maximize(sum(I,J){ (i,j) => w(i)*y(i) })
// Constraints
for ( j <- J ) {
add(sum(I)(i => t(i)(j)*x(i)(j))<:=T(j))
}
for ( i <- I ) {
add(y(i) <:= q(i) + M*z(i))
add(y(i) <:= sum(J)(j => c(i)(j)*x(i)(j)) + M*(1-z(i)))
add(sum(J)(j => x(i)(j)) <:= z(i))
}
add(sum(I,J){(i,j) => c(i)(j)*x(i)(j)} <:= B)
start()
println("objective: " + objectiveValue)
for ( i <- I ) {
for ( j <- J ) {
x(i)(j).value match {
case Some(value) => if(value==1) println ("Oracle "+(j+1)+" is assigned to UDO "+(i+1))
case None => None
}
}
}
for (i <- I) {
y(i).value match {
case Some(value) => println ("Credibility of UDO "+(i+1)+" after curation: "+value)
case None => None
}
z(i).value match {
case Some(value) => if(value==1) println ("UDO "+(i+1)+" was repaired by a Repairing Oracle ")
case None => None
}
}
release()
Some((x,y,z))
}
else None
}
}
|
UBOdin/mimir
|
src/main/scala/mimir/ctables/CTPrioritizer.scala
|
Scala
|
apache-2.0
| 3,592
|
/**
* Licensed to Big Data Genomics (BDG) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The BDG licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.bdgenomics.adam.api.java
import org.apache.spark.api.java.function.Function2
import org.apache.spark.rdd.RDD
import org.bdgenomics.adam.models.{
Coverage,
VariantContext
}
import org.bdgenomics.adam.rdd.{ ADAMContext, GenomicRDD }
import org.bdgenomics.adam.rdd.contig.NucleotideContigFragmentRDD
import org.bdgenomics.adam.rdd.feature.{ CoverageRDD, FeatureRDD }
import org.bdgenomics.adam.rdd.fragment.FragmentRDD
import org.bdgenomics.adam.rdd.read.AlignmentRecordRDD
import org.bdgenomics.adam.rdd.variant.{
VariantRDD,
GenotypeRDD,
VariantContextRDD
}
import org.bdgenomics.formats.avro._
sealed trait SameTypeConversion[T, U <: GenomicRDD[T, U]] extends Function2[U, RDD[T], U] {
def call(v1: U, v2: RDD[T]): U = {
ADAMContext.sameTypeConversionFn(v1, v2)
}
}
final class ContigsToContigsConverter extends SameTypeConversion[NucleotideContigFragment, NucleotideContigFragmentRDD] {
}
final class ContigsToCoverageConverter extends Function2[NucleotideContigFragmentRDD, RDD[Coverage], CoverageRDD] {
def call(v1: NucleotideContigFragmentRDD, v2: RDD[Coverage]): CoverageRDD = {
ADAMContext.contigsToCoverageConversionFn(v1, v2)
}
}
final class ContigsToFeaturesConverter extends Function2[NucleotideContigFragmentRDD, RDD[Feature], FeatureRDD] {
def call(v1: NucleotideContigFragmentRDD, v2: RDD[Feature]): FeatureRDD = {
ADAMContext.contigsToFeaturesConversionFn(v1, v2)
}
}
final class ContigsToFragmentsConverter extends Function2[NucleotideContigFragmentRDD, RDD[Fragment], FragmentRDD] {
def call(v1: NucleotideContigFragmentRDD, v2: RDD[Fragment]): FragmentRDD = {
ADAMContext.contigsToFragmentsConversionFn(v1, v2)
}
}
final class ContigsToAlignmentRecordsConverter extends Function2[NucleotideContigFragmentRDD, RDD[AlignmentRecord], AlignmentRecordRDD] {
def call(v1: NucleotideContigFragmentRDD, v2: RDD[AlignmentRecord]): AlignmentRecordRDD = {
ADAMContext.contigsToAlignmentRecordsConversionFn(v1, v2)
}
}
final class ContigsToGenotypesConverter extends Function2[NucleotideContigFragmentRDD, RDD[Genotype], GenotypeRDD] {
def call(v1: NucleotideContigFragmentRDD, v2: RDD[Genotype]): GenotypeRDD = {
ADAMContext.contigsToGenotypesConversionFn(v1, v2)
}
}
final class ContigsToVariantsConverter extends Function2[NucleotideContigFragmentRDD, RDD[Variant], VariantRDD] {
def call(v1: NucleotideContigFragmentRDD, v2: RDD[Variant]): VariantRDD = {
ADAMContext.contigsToVariantsConversionFn(v1, v2)
}
}
final class ContigsToVariantContextsConverter extends Function2[NucleotideContigFragmentRDD, RDD[VariantContext], VariantContextRDD] {
def call(v1: NucleotideContigFragmentRDD, v2: RDD[VariantContext]): VariantContextRDD = {
ADAMContext.contigsToVariantContextConversionFn(v1, v2)
}
}
final class CoverageToContigsConverter extends Function2[CoverageRDD, RDD[NucleotideContigFragment], NucleotideContigFragmentRDD] {
def call(v1: CoverageRDD, v2: RDD[NucleotideContigFragment]): NucleotideContigFragmentRDD = {
ADAMContext.coverageToContigsConversionFn(v1, v2)
}
}
final class CoverageToCoverageConverter extends SameTypeConversion[Coverage, CoverageRDD] {
}
final class CoverageToFeaturesConverter extends Function2[CoverageRDD, RDD[Feature], FeatureRDD] {
def call(v1: CoverageRDD, v2: RDD[Feature]): FeatureRDD = {
ADAMContext.coverageToFeaturesConversionFn(v1, v2)
}
}
final class CoverageToFragmentsConverter extends Function2[CoverageRDD, RDD[Fragment], FragmentRDD] {
def call(v1: CoverageRDD, v2: RDD[Fragment]): FragmentRDD = {
ADAMContext.coverageToFragmentsConversionFn(v1, v2)
}
}
final class CoverageToAlignmentRecordsConverter extends Function2[CoverageRDD, RDD[AlignmentRecord], AlignmentRecordRDD] {
def call(v1: CoverageRDD, v2: RDD[AlignmentRecord]): AlignmentRecordRDD = {
ADAMContext.coverageToAlignmentRecordsConversionFn(v1, v2)
}
}
final class CoverageToGenotypesConverter extends Function2[CoverageRDD, RDD[Genotype], GenotypeRDD] {
def call(v1: CoverageRDD, v2: RDD[Genotype]): GenotypeRDD = {
ADAMContext.coverageToGenotypesConversionFn(v1, v2)
}
}
final class CoverageToVariantsConverter extends Function2[CoverageRDD, RDD[Variant], VariantRDD] {
def call(v1: CoverageRDD, v2: RDD[Variant]): VariantRDD = {
ADAMContext.coverageToVariantsConversionFn(v1, v2)
}
}
final class CoverageToVariantContextConverter extends Function2[CoverageRDD, RDD[VariantContext], VariantContextRDD] {
def call(v1: CoverageRDD, v2: RDD[VariantContext]): VariantContextRDD = {
ADAMContext.coverageToVariantContextConversionFn(v1, v2)
}
}
final class FeaturesToContigsConverter extends Function2[FeatureRDD, RDD[NucleotideContigFragment], NucleotideContigFragmentRDD] {
def call(v1: FeatureRDD, v2: RDD[NucleotideContigFragment]): NucleotideContigFragmentRDD = {
ADAMContext.featuresToContigsConversionFn(v1, v2)
}
}
final class FeaturesToCoverageConverter extends Function2[FeatureRDD, RDD[Coverage], CoverageRDD] {
def call(v1: FeatureRDD, v2: RDD[Coverage]): CoverageRDD = {
ADAMContext.featuresToCoverageConversionFn(v1, v2)
}
}
final class FeaturesToFeatureConverter extends SameTypeConversion[Feature, FeatureRDD] {
}
final class FeaturesToFragmentsConverter extends Function2[FeatureRDD, RDD[Fragment], FragmentRDD] {
def call(v1: FeatureRDD, v2: RDD[Fragment]): FragmentRDD = {
ADAMContext.featuresToFragmentsConversionFn(v1, v2)
}
}
final class FeaturesToAlignmentRecordsConverter extends Function2[FeatureRDD, RDD[AlignmentRecord], AlignmentRecordRDD] {
def call(v1: FeatureRDD, v2: RDD[AlignmentRecord]): AlignmentRecordRDD = {
ADAMContext.featuresToAlignmentRecordsConversionFn(v1, v2)
}
}
final class FeaturesToGenotypesConverter extends Function2[FeatureRDD, RDD[Genotype], GenotypeRDD] {
def call(v1: FeatureRDD, v2: RDD[Genotype]): GenotypeRDD = {
ADAMContext.featuresToGenotypesConversionFn(v1, v2)
}
}
final class FeaturesToVariantsConverter extends Function2[FeatureRDD, RDD[Variant], VariantRDD] {
def call(v1: FeatureRDD, v2: RDD[Variant]): VariantRDD = {
ADAMContext.featuresToVariantsConversionFn(v1, v2)
}
}
final class FeaturesToVariantContextConverter extends Function2[FeatureRDD, RDD[VariantContext], VariantContextRDD] {
def call(v1: FeatureRDD, v2: RDD[VariantContext]): VariantContextRDD = {
ADAMContext.featuresToVariantContextConversionFn(v1, v2)
}
}
final class FragmentsToContigsConverter extends Function2[FragmentRDD, RDD[NucleotideContigFragment], NucleotideContigFragmentRDD] {
def call(v1: FragmentRDD, v2: RDD[NucleotideContigFragment]): NucleotideContigFragmentRDD = {
ADAMContext.fragmentsToContigsConversionFn(v1, v2)
}
}
final class FragmentsToCoverageConverter extends Function2[FragmentRDD, RDD[Coverage], CoverageRDD] {
def call(v1: FragmentRDD, v2: RDD[Coverage]): CoverageRDD = {
ADAMContext.fragmentsToCoverageConversionFn(v1, v2)
}
}
final class FragmentsToFeaturesConverter extends Function2[FragmentRDD, RDD[Feature], FeatureRDD] {
def call(v1: FragmentRDD, v2: RDD[Feature]): FeatureRDD = {
ADAMContext.fragmentsToFeaturesConversionFn(v1, v2)
}
}
final class FragmentsToFragmentConverter extends SameTypeConversion[Fragment, FragmentRDD] {
}
final class FragmentsToAlignmentRecordsConverter extends Function2[FragmentRDD, RDD[AlignmentRecord], AlignmentRecordRDD] {
def call(v1: FragmentRDD, v2: RDD[AlignmentRecord]): AlignmentRecordRDD = {
ADAMContext.fragmentsToAlignmentRecordsConversionFn(v1, v2)
}
}
final class FragmentsToGenotypesConverter extends Function2[FragmentRDD, RDD[Genotype], GenotypeRDD] {
def call(v1: FragmentRDD, v2: RDD[Genotype]): GenotypeRDD = {
ADAMContext.fragmentsToGenotypesConversionFn(v1, v2)
}
}
final class FragmentsToVariantsConverter extends Function2[FragmentRDD, RDD[Variant], VariantRDD] {
def call(v1: FragmentRDD, v2: RDD[Variant]): VariantRDD = {
ADAMContext.fragmentsToVariantsConversionFn(v1, v2)
}
}
final class FragmentsToVariantContextConverter extends Function2[FragmentRDD, RDD[VariantContext], VariantContextRDD] {
def call(v1: FragmentRDD, v2: RDD[VariantContext]): VariantContextRDD = {
ADAMContext.fragmentsToVariantContextConversionFn(v1, v2)
}
}
final class AlignmentRecordsToContigsConverter extends Function2[AlignmentRecordRDD, RDD[NucleotideContigFragment], NucleotideContigFragmentRDD] {
def call(v1: AlignmentRecordRDD, v2: RDD[NucleotideContigFragment]): NucleotideContigFragmentRDD = {
ADAMContext.alignmentRecordsToContigsConversionFn(v1, v2)
}
}
final class AlignmentRecordsToCoverageConverter extends Function2[AlignmentRecordRDD, RDD[Coverage], CoverageRDD] {
def call(v1: AlignmentRecordRDD, v2: RDD[Coverage]): CoverageRDD = {
ADAMContext.alignmentRecordsToCoverageConversionFn(v1, v2)
}
}
final class AlignmentRecordsToFeaturesConverter extends Function2[AlignmentRecordRDD, RDD[Feature], FeatureRDD] {
def call(v1: AlignmentRecordRDD, v2: RDD[Feature]): FeatureRDD = {
ADAMContext.alignmentRecordsToFeaturesConversionFn(v1, v2)
}
}
final class AlignmentRecordsToFragmentsConverter extends Function2[AlignmentRecordRDD, RDD[Fragment], FragmentRDD] {
def call(v1: AlignmentRecordRDD, v2: RDD[Fragment]): FragmentRDD = {
ADAMContext.alignmentRecordsToFragmentsConversionFn(v1, v2)
}
}
final class AlignmentRecordsToAlignmentRecordsConverter extends SameTypeConversion[AlignmentRecord, AlignmentRecordRDD] {
}
final class AlignmentRecordsToGenotypesConverter extends Function2[AlignmentRecordRDD, RDD[Genotype], GenotypeRDD] {
def call(v1: AlignmentRecordRDD, v2: RDD[Genotype]): GenotypeRDD = {
ADAMContext.alignmentRecordsToGenotypesConversionFn(v1, v2)
}
}
final class AlignmentRecordsToVariantsConverter extends Function2[AlignmentRecordRDD, RDD[Variant], VariantRDD] {
def call(v1: AlignmentRecordRDD, v2: RDD[Variant]): VariantRDD = {
ADAMContext.alignmentRecordsToVariantsConversionFn(v1, v2)
}
}
final class AlignmentRecordsToVariantContextConverter extends Function2[AlignmentRecordRDD, RDD[VariantContext], VariantContextRDD] {
def call(v1: AlignmentRecordRDD, v2: RDD[VariantContext]): VariantContextRDD = {
ADAMContext.alignmentRecordsToVariantContextConversionFn(v1, v2)
}
}
final class GenotypesToContigsConverter extends Function2[GenotypeRDD, RDD[NucleotideContigFragment], NucleotideContigFragmentRDD] {
def call(v1: GenotypeRDD, v2: RDD[NucleotideContigFragment]): NucleotideContigFragmentRDD = {
ADAMContext.genotypesToContigsConversionFn(v1, v2)
}
}
final class GenotypesToCoverageConverter extends Function2[GenotypeRDD, RDD[Coverage], CoverageRDD] {
def call(v1: GenotypeRDD, v2: RDD[Coverage]): CoverageRDD = {
ADAMContext.genotypesToCoverageConversionFn(v1, v2)
}
}
final class GenotypesToFeaturesConverter extends Function2[GenotypeRDD, RDD[Feature], FeatureRDD] {
def call(v1: GenotypeRDD, v2: RDD[Feature]): FeatureRDD = {
ADAMContext.genotypesToFeaturesConversionFn(v1, v2)
}
}
final class GenotypesToFragmentsConverter extends Function2[GenotypeRDD, RDD[Fragment], FragmentRDD] {
def call(v1: GenotypeRDD, v2: RDD[Fragment]): FragmentRDD = {
ADAMContext.genotypesToFragmentsConversionFn(v1, v2)
}
}
final class GenotypesToAlignmentRecordsConverter extends Function2[GenotypeRDD, RDD[AlignmentRecord], AlignmentRecordRDD] {
def call(v1: GenotypeRDD, v2: RDD[AlignmentRecord]): AlignmentRecordRDD = {
ADAMContext.genotypesToAlignmentRecordsConversionFn(v1, v2)
}
}
final class GenotypesToGenotypesConverter extends SameTypeConversion[Genotype, GenotypeRDD] {
}
final class GenotypesToVariantsConverter extends Function2[GenotypeRDD, RDD[Variant], VariantRDD] {
def call(v1: GenotypeRDD, v2: RDD[Variant]): VariantRDD = {
ADAMContext.genotypesToVariantsConversionFn(v1, v2)
}
}
final class GenotypesToVariantContextConverter extends Function2[GenotypeRDD, RDD[VariantContext], VariantContextRDD] {
def call(v1: GenotypeRDD, v2: RDD[VariantContext]): VariantContextRDD = {
ADAMContext.genotypesToVariantContextConversionFn(v1, v2)
}
}
final class VariantsToContigsConverter extends Function2[VariantRDD, RDD[NucleotideContigFragment], NucleotideContigFragmentRDD] {
def call(v1: VariantRDD, v2: RDD[NucleotideContigFragment]): NucleotideContigFragmentRDD = {
ADAMContext.variantsToContigsConversionFn(v1, v2)
}
}
final class VariantsToCoverageConverter extends Function2[VariantRDD, RDD[Coverage], CoverageRDD] {
def call(v1: VariantRDD, v2: RDD[Coverage]): CoverageRDD = {
ADAMContext.variantsToCoverageConversionFn(v1, v2)
}
}
final class VariantsToFeaturesConverter extends Function2[VariantRDD, RDD[Feature], FeatureRDD] {
def call(v1: VariantRDD, v2: RDD[Feature]): FeatureRDD = {
ADAMContext.variantsToFeaturesConversionFn(v1, v2)
}
}
final class VariantsToFragmentsConverter extends Function2[VariantRDD, RDD[Fragment], FragmentRDD] {
def call(v1: VariantRDD, v2: RDD[Fragment]): FragmentRDD = {
ADAMContext.variantsToFragmentsConversionFn(v1, v2)
}
}
final class VariantsToAlignmentRecordsConverter extends Function2[VariantRDD, RDD[AlignmentRecord], AlignmentRecordRDD] {
def call(v1: VariantRDD, v2: RDD[AlignmentRecord]): AlignmentRecordRDD = {
ADAMContext.variantsToAlignmentRecordsConversionFn(v1, v2)
}
}
final class VariantsToGenotypesConverter extends Function2[VariantRDD, RDD[Genotype], GenotypeRDD] {
def call(v1: VariantRDD, v2: RDD[Genotype]): GenotypeRDD = {
ADAMContext.variantsToGenotypesConversionFn(v1, v2)
}
}
final class VariantsToVariantsConverter extends SameTypeConversion[Variant, VariantRDD] {
}
final class VariantsToVariantContextConverter extends Function2[VariantRDD, RDD[VariantContext], VariantContextRDD] {
def call(v1: VariantRDD, v2: RDD[VariantContext]): VariantContextRDD = {
ADAMContext.variantsToVariantContextConversionFn(v1, v2)
}
}
final class VariantContextsToContigsConverter extends Function2[VariantContextRDD, RDD[NucleotideContigFragment], NucleotideContigFragmentRDD] {
def call(v1: VariantContextRDD, v2: RDD[NucleotideContigFragment]): NucleotideContigFragmentRDD = {
ADAMContext.variantContextsToContigsConversionFn(v1, v2)
}
}
final class VariantContextsToCoverageConverter extends Function2[VariantContextRDD, RDD[Coverage], CoverageRDD] {
def call(v1: VariantContextRDD, v2: RDD[Coverage]): CoverageRDD = {
ADAMContext.variantContextsToCoverageConversionFn(v1, v2)
}
}
final class VariantContextsToFeaturesConverter extends Function2[VariantContextRDD, RDD[Feature], FeatureRDD] {
def call(v1: VariantContextRDD, v2: RDD[Feature]): FeatureRDD = {
ADAMContext.variantContextsToFeaturesConversionFn(v1, v2)
}
}
final class VariantContextsToFragmentsConverter extends Function2[VariantContextRDD, RDD[Fragment], FragmentRDD] {
def call(v1: VariantContextRDD, v2: RDD[Fragment]): FragmentRDD = {
ADAMContext.variantContextsToFragmentsConversionFn(v1, v2)
}
}
final class VariantContextsToAlignmentRecordsConverter extends Function2[VariantContextRDD, RDD[AlignmentRecord], AlignmentRecordRDD] {
def call(v1: VariantContextRDD, v2: RDD[AlignmentRecord]): AlignmentRecordRDD = {
ADAMContext.variantContextsToAlignmentRecordsConversionFn(v1, v2)
}
}
final class VariantContextsToGenotypesConverter extends Function2[VariantContextRDD, RDD[Genotype], GenotypeRDD] {
def call(v1: VariantContextRDD, v2: RDD[Genotype]): GenotypeRDD = {
ADAMContext.variantContextsToGenotypesConversionFn(v1, v2)
}
}
final class VariantContextsToVariantsConverter extends Function2[VariantContextRDD, RDD[Variant], VariantRDD] {
def call(v1: VariantContextRDD, v2: RDD[Variant]): VariantRDD = {
ADAMContext.variantContextsToVariantsConversionFn(v1, v2)
}
}
final class VariantContextsToVariantContextConverter extends SameTypeConversion[VariantContext, VariantContextRDD] {
}
|
laserson/adam
|
adam-apis/src/main/scala/org/bdgenomics/adam/api/java/GenomicRDDConverters.scala
|
Scala
|
apache-2.0
| 16,773
|
/**
* Licensed to Big Data Genomics (BDG) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The BDG licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.bdgenomics.avocado.postprocessing
import org.apache.commons.configuration.SubnodeConfiguration
import org.apache.spark.rdd.RDD
import org.bdgenomics.formats.avro.Genotype
import org.bdgenomics.adam.models.VariantContext
import org.bdgenomics.avocado.stats.AvocadoConfigAndStats
private[postprocessing] trait PostprocessingStage {
val stageName: String
def apply(rdd: RDD[VariantContext],
stats: AvocadoConfigAndStats,
config: SubnodeConfiguration): RDD[VariantContext]
}
private[postprocessing] trait GenotypeFilter extends Serializable {
/**
* Abstract method that must be implemented. Implements basic filtering on genotypes that
* are inside a single variant context.
*
* @param genotypes Genotypes to filter.
* @return Filtered genotypes.
*/
def filterGenotypes(genotypes: Seq[Genotype]): Seq[Genotype]
/**
* Applies filtering and creates a new variant context, if called genotypes still exist.
* If all genotypes have been filtered out, then an empty option (None) is returned.
*
* @param vc Variant context on which to filter.
* @return If not all genotypes have been filtered out, a new variant context, else none.
*/
def createNewVC(vc: VariantContext): Option[VariantContext] = {
val filteredGt = filterGenotypes(vc.genotypes.toSeq)
if (filteredGt.length > 0) {
Some(VariantContext.buildFromGenotypes(filteredGt))
} else {
None
}
}
/**
* Applies the filtering described above across a full RDD.
*
* @param rdd RDD of variant contexts.
* @return An RDD containing variant contexts after filtering.
*/
def filter(rdd: RDD[VariantContext]): RDD[VariantContext] = {
rdd.flatMap(vc => createNewVC(vc))
}
}
|
tdanford/avocado
|
avocado-core/src/main/scala/org/bdgenomics/avocado/postprocessing/PostprocessingStage.scala
|
Scala
|
apache-2.0
| 2,553
|
package com.esri
import org.apache.spark.SparkConf
import org.slf4j.LoggerFactory
private[esri] abstract class AbstractNumeReader(name: String, index: Int, throwException: Boolean)
extends FieldReader {
val missingSeq: Seq[(String, Any)]
override def readField(splits: Array[String], lineno: Long): Seq[(String, Any)] = {
val aNume = splits(index).toLowerCase
if (aNume.isEmpty)
missingSeq
else if (aNume.startsWith("null"))
missingSeq
else if (aNume.startsWith("undefined"))
missingSeq
else
try {
Seq((name, aNume.toInt))
} catch {
case t: Throwable => {
log.error(s"Cannot parse $aNume for field $name at line $lineno")
if (throwException)
throw t
else
missingSeq
}
}
}
}
class NumeReader(name: String, index: Int, throwException: Boolean)
extends AbstractNumeReader(name, index, throwException) {
override val missingSeq = Seq.empty
}
class NumeMissingReader(name: String, index: Int, throwException: Boolean, missing: Int)
extends AbstractNumeReader(name, index, throwException) {
override val missingSeq = Seq(name -> missing)
}
class NumeReaderFactory(name: String, index: Int, throwException: Boolean)
extends FieldReaderFactory {
override def createFieldReader(): FieldReader = {
new NumeReader(name, index, throwException)
}
}
class NumeMissingReaderFactory(name: String, index: Int, throwException: Boolean, missing: Int)
extends FieldReaderFactory {
override def createFieldReader(): FieldReader = {
new NumeMissingReader(name, index, throwException, missing)
}
}
object NumeReaderFactory extends Serializable {
@transient lazy val log = LoggerFactory.getLogger(getClass.getName)
def apply(splits: Array[String], conf: SparkConf): FieldReaderFactory = {
val throwException = conf.getBoolean("error.exception", true)
splits match {
case Array(_, name, index) => new NumeReaderFactory(name, index.toInt, throwException)
case Array(_, name, index, missing) => new NumeMissingReaderFactory(name, index.toInt, throwException, missing.toInt)
case _ => {
log.warn("Skipping field - Invalid parameters {}", splits.mkString(","))
NoopReaderFactory()
}
}
}
}
|
mraad/spark-csv-es
|
src/main/scala/com/esri/NumeReaderFactory.scala
|
Scala
|
apache-2.0
| 2,298
|
/*
* Copyright 2014-2022 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.atlas.core.model
import munit.FunSuite
class DesSuite extends FunSuite {
val step = 60000L
val dataTags = Map("name" -> "cpu", "node" -> "i-1")
val alignedStream = List(
List(Datapoint(dataTags, 0L * step, 1.0)),
List(Datapoint(dataTags, 1L * step, 1.5)),
List(Datapoint(dataTags, 2L * step, 1.6)),
List(Datapoint(dataTags, 3L * step, 1.7)),
List(Datapoint(dataTags, 4L * step, 1.4)),
List(Datapoint(dataTags, 5L * step, 1.3)),
List(Datapoint(dataTags, 6L * step, 1.2)),
List(Datapoint(dataTags, 7L * step, 1.0)),
List(Datapoint(dataTags, 8L * step, 0.0)),
List(Datapoint(dataTags, 9L * step, 0.0)),
List(Datapoint(dataTags, 10L * step, 1.0)),
List(Datapoint(dataTags, 11L * step, 1.1)),
List(Datapoint(dataTags, 12L * step, 1.2)),
List(Datapoint(dataTags, 13L * step, 1.2))
)
val alignedInputTS = TimeSeries(
dataTags,
new ArrayTimeSeq(
DsType.Gauge,
0L,
step,
Array[Double](1.0, 1.5, 1.6, 1.7, 1.4, 1.3, 1.2, 1.0, 0.0, 0.0, 1.0, 1.1, 1.2, 1.2)
)
)
val unalignedStream = List(
List(Datapoint(dataTags, 1L * step, 1.5)),
List(Datapoint(dataTags, 2L * step, 1.6)),
List(Datapoint(dataTags, 3L * step, 1.7)),
List(Datapoint(dataTags, 4L * step, 1.4)),
List(Datapoint(dataTags, 5L * step, 1.3)),
List(Datapoint(dataTags, 6L * step, 1.2)),
List(Datapoint(dataTags, 7L * step, 1.0)),
List(Datapoint(dataTags, 8L * step, 0.0)),
List(Datapoint(dataTags, 9L * step, 0.0)),
List(Datapoint(dataTags, 10L * step, 1.0)),
List(Datapoint(dataTags, 11L * step, 1.1)),
List(Datapoint(dataTags, 12L * step, 1.2)),
List(Datapoint(dataTags, 13L * step, 1.2))
)
val unalignedInputTS = TimeSeries(
dataTags,
new ArrayTimeSeq(
DsType.Gauge,
1L * step,
step,
Array[Double](1.5, 1.6, 1.7, 1.4, 1.3, 1.2, 1.0, 0.0, 0.0, 1.0, 1.1, 1.2, 1.2)
)
)
val des = StatefulExpr.Des(DataExpr.Sum(Query.Equal("name", "cpu")), 2, 0.1, 0.02)
val sdes = StatefulExpr.SlidingDes(DataExpr.Sum(Query.Equal("name", "cpu")), 2, 0.1, 0.02)
def eval(expr: TimeSeriesExpr, data: List[List[Datapoint]]): List[List[TimeSeries]] = {
var state = Map.empty[StatefulExpr, Any]
data.map { ts =>
val t = ts.head.timestamp
val context = EvalContext(t, t + step, step, state)
val rs = expr.eval(context, ts)
state = rs.state
rs.data
}
}
test("des: incremental exec matches global") {
val s = 0L
val e = 14L * step
val context = EvalContext(s, e, step, Map.empty)
val expected = des.eval(context, List(alignedInputTS)).data.head.data.bounded(s, e).data
val result = eval(des, alignedStream)
result.zip(expected).zipWithIndex.foreach {
case ((ts, v), i) =>
assertEquals(ts.size, 1)
ts.foreach { t =>
val r = t.data(i * step)
if (i <= 1)
assert(r.isNaN)
else
assertEqualsDouble(v, r, 0.00001)
}
}
}
test("sdes: aligned incremental exec matches global") {
val s = 0L
val e = 14L * step
val context = EvalContext(s, e, step, Map.empty)
val expected = sdes.eval(context, List(alignedInputTS)).data.head.data.bounded(s, e).data
val result = eval(sdes, alignedStream)
result.zip(expected).zipWithIndex.foreach {
case ((ts, v), i) =>
assertEquals(ts.size, 1)
ts.foreach { t =>
val r = t.data(i * step)
if (i <= 1)
assert(r.isNaN)
else
assertEqualsDouble(v, r, 0.00001)
}
}
}
test("sdes: unaligned incremental exec matches global") {
val s = 1L * step // offset by one step, half a training window used
val e = 14L * step
val context = EvalContext(s, e, step, Map.empty)
val expected = sdes.eval(context, List(unalignedInputTS)).data.head.data.bounded(s, e).data
val result = eval(sdes, unalignedStream)
//println(expected.mkString(", "))
//println(result.map { case v => v(0).data.asInstanceOf[ArrayTimeSeq].data(0) }.mkString(", "))
result.zip(expected).zipWithIndex.foreach {
case ((ts, v), i) =>
assertEquals(ts.size, 1)
ts.foreach { t =>
val r = t.data((i + 1) * step) // offset step by our skipped data
if (i <= 2)
assert(r.isNaN)
else
assertEqualsDouble(v, r, 0.00001)
}
}
}
}
|
Netflix/atlas
|
atlas-core/src/test/scala/com/netflix/atlas/core/model/DesSuite.scala
|
Scala
|
apache-2.0
| 5,058
|
/*
* Copyright 2016 Nicolas Rinaudo
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kantan.codecs.enumeratum.laws
package object discipline extends kantan.codecs.laws.discipline.DisciplinePackage
|
nrinaudo/kantan.codecs
|
enumeratum/laws/shared/src/main/scala/kantan/codecs/enumeratum/laws/discipline/package.scala
|
Scala
|
apache-2.0
| 721
|
package cn.gridx.scala.lang.types.traits
/**
* Created by tao on 11/20/15.
*/
trait Vehicle {
def getBrand():String
def getType(): String = "Type <vehicle>"
def getProduct():String
}
|
TaoXiao/Scala
|
lang/src/main/scala/cn/gridx/scala/lang/types/traits/Vehicle.scala
|
Scala
|
apache-2.0
| 198
|
package com.eevolution.context.dictionary.domain.api.repository
import com.eevolution.context.dictionary._
/**
* Copyright (C) 2003-2017, e-Evolution Consultants S.A. , http://www.e-evolution.com
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
* Email: emeris.hernandez@e-evolution.com, http://www.e-evolution.com , http://github.com/EmerisScala
* Created by emeris.hernandez@e-evolution.com , www.e-evolution.com on 06/11/17.
*/
trait WorkflowBlockRepository [WorkflowBlock , Int] extends api.Repostory [WorkflowBlock , Int] {
}
|
adempiere/ADReactiveSystem
|
dictionary-api/src/main/scala/com/eevolution/context/dictionary/domain/api/repository/WorkflowBlockRepository.scala
|
Scala
|
gpl-3.0
| 1,142
|
package org.homermultitext.edmodel
import org.scalatest._
class MidOrthographySpec extends FlatSpec {
"The library" should "include case objects for all edition types" in {
assert(HmtNamedEntityEdition.label == "named entities")
assert(HmtNamedEntityEdition.label == "named entities")
assert (HmtDiplomaticEdition.label == "diplomatic edition")
assert (HmtScribalNormalizedEdition.label == "scribal edition")
assert (HmtEditorsNormalizedEdition.label == "editorially normalized edition")
}
}
|
homermultitext/edmodel
|
src/test/scala/org/homermultitext/edmodel/MidOrthographySpec.scala
|
Scala
|
gpl-3.0
| 533
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.scheduler.cluster.mesos
import java.nio.ByteBuffer
import java.util
import java.util.Collections
import java.util.Arrays
import scala.collection.JavaConversions._
import scala.collection.mutable
import scala.collection.mutable.ArrayBuffer
import org.apache.mesos.Protos.Value.Scalar
import org.apache.mesos.Protos._
import org.apache.mesos.SchedulerDriver
import org.mockito.Matchers._
import org.mockito.Mockito._
import org.mockito.{ArgumentCaptor, Matchers}
import org.scalatest.mock.MockitoSugar
import org.apache.spark.executor.MesosExecutorBackend
import org.apache.spark.scheduler.cluster.ExecutorInfo
import org.apache.spark.scheduler.{LiveListenerBus, SparkListenerExecutorAdded,
TaskDescription, TaskSchedulerImpl, WorkerOffer}
import org.apache.spark.{LocalSparkContext, SparkConf, SparkContext, SparkFunSuite}
class MesosSchedulerBackendSuite extends SparkFunSuite with LocalSparkContext with MockitoSugar {
test("Use configured mesosExecutor.cores for ExecutorInfo") {
val mesosExecutorCores = 3
val conf = new SparkConf
conf.set("spark.mesos.mesosExecutor.cores", mesosExecutorCores.toString)
val listenerBus = mock[LiveListenerBus]
listenerBus.post(
SparkListenerExecutorAdded(anyLong, "s1", new ExecutorInfo("host1", 2, Map.empty)))
val sc = mock[SparkContext]
when(sc.getSparkHome()).thenReturn(Option("/spark-home"))
when(sc.conf).thenReturn(conf)
when(sc.executorEnvs).thenReturn(new mutable.HashMap[String, String])
when(sc.executorMemory).thenReturn(100)
when(sc.listenerBus).thenReturn(listenerBus)
val taskScheduler = mock[TaskSchedulerImpl]
when(taskScheduler.CPUS_PER_TASK).thenReturn(2)
val mesosSchedulerBackend = new MesosSchedulerBackend(taskScheduler, sc, "master")
val resources = Arrays.asList(
mesosSchedulerBackend.createResource("cpus", 4),
mesosSchedulerBackend.createResource("mem", 1024))
// uri is null.
val (executorInfo, _) = mesosSchedulerBackend.createExecutorInfo(resources, "test-id")
val executorResources = executorInfo.getResourcesList
val cpus = executorResources.find(_.getName.equals("cpus")).get.getScalar.getValue
assert(cpus === mesosExecutorCores)
}
test("check spark-class location correctly") {
val conf = new SparkConf
conf.set("spark.mesos.executor.home" , "/mesos-home")
val listenerBus = mock[LiveListenerBus]
listenerBus.post(
SparkListenerExecutorAdded(anyLong, "s1", new ExecutorInfo("host1", 2, Map.empty)))
val sc = mock[SparkContext]
when(sc.getSparkHome()).thenReturn(Option("/spark-home"))
when(sc.conf).thenReturn(conf)
when(sc.executorEnvs).thenReturn(new mutable.HashMap[String, String])
when(sc.executorMemory).thenReturn(100)
when(sc.listenerBus).thenReturn(listenerBus)
val taskScheduler = mock[TaskSchedulerImpl]
when(taskScheduler.CPUS_PER_TASK).thenReturn(2)
val mesosSchedulerBackend = new MesosSchedulerBackend(taskScheduler, sc, "master")
val resources = List(
mesosSchedulerBackend.createResource("cpus", 4),
mesosSchedulerBackend.createResource("mem", 1024))
// uri is null.
val (executorInfo, _) = mesosSchedulerBackend.createExecutorInfo(resources, "test-id")
assert(executorInfo.getCommand.getValue ===
s" /mesos-home/bin/spark-class ${classOf[MesosExecutorBackend].getName}")
// uri exists.
conf.set("spark.executor.uri", "hdfs:///test-app-1.0.0.tgz")
val (executorInfo1, _) = mesosSchedulerBackend.createExecutorInfo(resources, "test-id")
assert(executorInfo1.getCommand.getValue ===
s"cd test-app-1*; ./bin/spark-class ${classOf[MesosExecutorBackend].getName}")
}
test("spark docker properties correctly populate the DockerInfo message") {
val taskScheduler = mock[TaskSchedulerImpl]
val conf = new SparkConf()
.set("spark.mesos.executor.docker.image", "spark/mock")
.set("spark.mesos.executor.docker.volumes", "/a,/b:/b,/c:/c:rw,/d:ro,/e:/e:ro")
.set("spark.mesos.executor.docker.portmaps", "80:8080,53:53:tcp")
val listenerBus = mock[LiveListenerBus]
listenerBus.post(
SparkListenerExecutorAdded(anyLong, "s1", new ExecutorInfo("host1", 2, Map.empty)))
val sc = mock[SparkContext]
when(sc.executorMemory).thenReturn(100)
when(sc.getSparkHome()).thenReturn(Option("/spark-home"))
when(sc.executorEnvs).thenReturn(new mutable.HashMap[String, String])
when(sc.conf).thenReturn(conf)
when(sc.listenerBus).thenReturn(listenerBus)
val backend = new MesosSchedulerBackend(taskScheduler, sc, "master")
val (execInfo, _) = backend.createExecutorInfo(
List(backend.createResource("cpus", 4)), "mockExecutor")
assert(execInfo.getContainer.getDocker.getImage.equals("spark/mock"))
val portmaps = execInfo.getContainer.getDocker.getPortMappingsList
assert(portmaps.get(0).getHostPort.equals(80))
assert(portmaps.get(0).getContainerPort.equals(8080))
assert(portmaps.get(0).getProtocol.equals("tcp"))
assert(portmaps.get(1).getHostPort.equals(53))
assert(portmaps.get(1).getContainerPort.equals(53))
assert(portmaps.get(1).getProtocol.equals("tcp"))
val volumes = execInfo.getContainer.getVolumesList
assert(volumes.get(0).getContainerPath.equals("/a"))
assert(volumes.get(0).getMode.equals(Volume.Mode.RW))
assert(volumes.get(1).getContainerPath.equals("/b"))
assert(volumes.get(1).getHostPath.equals("/b"))
assert(volumes.get(1).getMode.equals(Volume.Mode.RW))
assert(volumes.get(2).getContainerPath.equals("/c"))
assert(volumes.get(2).getHostPath.equals("/c"))
assert(volumes.get(2).getMode.equals(Volume.Mode.RW))
assert(volumes.get(3).getContainerPath.equals("/d"))
assert(volumes.get(3).getMode.equals(Volume.Mode.RO))
assert(volumes.get(4).getContainerPath.equals("/e"))
assert(volumes.get(4).getHostPath.equals("/e"))
assert(volumes.get(4).getMode.equals(Volume.Mode.RO))
}
test("mesos resource offers result in launching tasks") {
def createOffer(id: Int, mem: Int, cpu: Int): Offer = {
val builder = Offer.newBuilder()
builder.addResourcesBuilder()
.setName("mem")
.setType(Value.Type.SCALAR)
.setScalar(Scalar.newBuilder().setValue(mem))
builder.addResourcesBuilder()
.setName("cpus")
.setType(Value.Type.SCALAR)
.setScalar(Scalar.newBuilder().setValue(cpu))
builder.setId(OfferID.newBuilder().setValue(s"o${id.toString}").build())
.setFrameworkId(FrameworkID.newBuilder().setValue("f1"))
.setSlaveId(SlaveID.newBuilder().setValue(s"s${id.toString}"))
.setHostname(s"host${id.toString}").build()
}
val driver = mock[SchedulerDriver]
val taskScheduler = mock[TaskSchedulerImpl]
val listenerBus = mock[LiveListenerBus]
listenerBus.post(
SparkListenerExecutorAdded(anyLong, "s1", new ExecutorInfo("host1", 2, Map.empty)))
val sc = mock[SparkContext]
when(sc.executorMemory).thenReturn(100)
when(sc.getSparkHome()).thenReturn(Option("/path"))
when(sc.executorEnvs).thenReturn(new mutable.HashMap[String, String])
when(sc.conf).thenReturn(new SparkConf)
when(sc.listenerBus).thenReturn(listenerBus)
val backend = new MesosSchedulerBackend(taskScheduler, sc, "master")
val minMem = backend.calculateTotalMemory(sc)
val minCpu = 4
val mesosOffers = new java.util.ArrayList[Offer]
mesosOffers.add(createOffer(1, minMem, minCpu))
mesosOffers.add(createOffer(2, minMem - 1, minCpu))
mesosOffers.add(createOffer(3, minMem, minCpu))
val expectedWorkerOffers = new ArrayBuffer[WorkerOffer](2)
expectedWorkerOffers.append(new WorkerOffer(
mesosOffers.get(0).getSlaveId.getValue,
mesosOffers.get(0).getHostname,
(minCpu - backend.mesosExecutorCores).toInt
))
expectedWorkerOffers.append(new WorkerOffer(
mesosOffers.get(2).getSlaveId.getValue,
mesosOffers.get(2).getHostname,
(minCpu - backend.mesosExecutorCores).toInt
))
val taskDesc = new TaskDescription(1L, 0, "s1", "n1", 0, ByteBuffer.wrap(new Array[Byte](0)))
when(taskScheduler.resourceOffers(expectedWorkerOffers)).thenReturn(Seq(Seq(taskDesc)))
when(taskScheduler.CPUS_PER_TASK).thenReturn(2)
val capture = ArgumentCaptor.forClass(classOf[util.Collection[TaskInfo]])
when(
driver.launchTasks(
Matchers.eq(Collections.singleton(mesosOffers.get(0).getId)),
capture.capture(),
any(classOf[Filters])
)
).thenReturn(Status.valueOf(1))
when(driver.declineOffer(mesosOffers.get(1).getId)).thenReturn(Status.valueOf(1))
when(driver.declineOffer(mesosOffers.get(2).getId)).thenReturn(Status.valueOf(1))
backend.resourceOffers(driver, mesosOffers)
verify(driver, times(1)).launchTasks(
Matchers.eq(Collections.singleton(mesosOffers.get(0).getId)),
capture.capture(),
any(classOf[Filters])
)
verify(driver, times(1)).declineOffer(mesosOffers.get(1).getId)
verify(driver, times(1)).declineOffer(mesosOffers.get(2).getId)
assert(capture.getValue.size() === 1)
val taskInfo = capture.getValue.iterator().next()
assert(taskInfo.getName.equals("n1"))
val cpus = taskInfo.getResourcesList.get(0)
assert(cpus.getName.equals("cpus"))
assert(cpus.getScalar.getValue.equals(2.0))
assert(taskInfo.getSlaveId.getValue.equals("s1"))
// Unwanted resources offered on an existing node. Make sure they are declined
val mesosOffers2 = new java.util.ArrayList[Offer]
mesosOffers2.add(createOffer(1, minMem, minCpu))
reset(taskScheduler)
reset(driver)
when(taskScheduler.resourceOffers(any(classOf[Seq[WorkerOffer]]))).thenReturn(Seq(Seq()))
when(taskScheduler.CPUS_PER_TASK).thenReturn(2)
when(driver.declineOffer(mesosOffers2.get(0).getId)).thenReturn(Status.valueOf(1))
backend.resourceOffers(driver, mesosOffers2)
verify(driver, times(1)).declineOffer(mesosOffers2.get(0).getId)
}
test("can handle multiple roles") {
val driver = mock[SchedulerDriver]
val taskScheduler = mock[TaskSchedulerImpl]
val listenerBus = mock[LiveListenerBus]
listenerBus.post(
SparkListenerExecutorAdded(anyLong, "s1", new ExecutorInfo("host1", 2, Map.empty)))
val sc = mock[SparkContext]
when(sc.executorMemory).thenReturn(100)
when(sc.getSparkHome()).thenReturn(Option("/path"))
when(sc.executorEnvs).thenReturn(new mutable.HashMap[String, String])
when(sc.conf).thenReturn(new SparkConf)
when(sc.listenerBus).thenReturn(listenerBus)
val id = 1
val builder = Offer.newBuilder()
builder.addResourcesBuilder()
.setName("mem")
.setType(Value.Type.SCALAR)
.setRole("prod")
.setScalar(Scalar.newBuilder().setValue(500))
builder.addResourcesBuilder()
.setName("cpus")
.setRole("prod")
.setType(Value.Type.SCALAR)
.setScalar(Scalar.newBuilder().setValue(1))
builder.addResourcesBuilder()
.setName("mem")
.setRole("dev")
.setType(Value.Type.SCALAR)
.setScalar(Scalar.newBuilder().setValue(600))
builder.addResourcesBuilder()
.setName("cpus")
.setRole("dev")
.setType(Value.Type.SCALAR)
.setScalar(Scalar.newBuilder().setValue(2))
val offer = builder.setId(OfferID.newBuilder().setValue(s"o${id.toString}").build())
.setFrameworkId(FrameworkID.newBuilder().setValue("f1"))
.setSlaveId(SlaveID.newBuilder().setValue(s"s${id.toString}"))
.setHostname(s"host${id.toString}").build()
val mesosOffers = new java.util.ArrayList[Offer]
mesosOffers.add(offer)
val backend = new MesosSchedulerBackend(taskScheduler, sc, "master")
val expectedWorkerOffers = new ArrayBuffer[WorkerOffer](1)
expectedWorkerOffers.append(new WorkerOffer(
mesosOffers.get(0).getSlaveId.getValue,
mesosOffers.get(0).getHostname,
2 // Deducting 1 for executor
))
val taskDesc = new TaskDescription(1L, 0, "s1", "n1", 0, ByteBuffer.wrap(new Array[Byte](0)))
when(taskScheduler.resourceOffers(expectedWorkerOffers)).thenReturn(Seq(Seq(taskDesc)))
when(taskScheduler.CPUS_PER_TASK).thenReturn(1)
val capture = ArgumentCaptor.forClass(classOf[util.Collection[TaskInfo]])
when(
driver.launchTasks(
Matchers.eq(Collections.singleton(mesosOffers.get(0).getId)),
capture.capture(),
any(classOf[Filters])
)
).thenReturn(Status.valueOf(1))
backend.resourceOffers(driver, mesosOffers)
verify(driver, times(1)).launchTasks(
Matchers.eq(Collections.singleton(mesosOffers.get(0).getId)),
capture.capture(),
any(classOf[Filters])
)
assert(capture.getValue.size() === 1)
val taskInfo = capture.getValue.iterator().next()
assert(taskInfo.getName.equals("n1"))
assert(taskInfo.getResourcesCount === 1)
val cpusDev = taskInfo.getResourcesList.get(0)
assert(cpusDev.getName.equals("cpus"))
assert(cpusDev.getScalar.getValue.equals(1.0))
assert(cpusDev.getRole.equals("dev"))
val executorResources = taskInfo.getExecutor.getResourcesList
assert(executorResources.exists { r =>
r.getName.equals("mem") && r.getScalar.getValue.equals(484.0) && r.getRole.equals("prod")
})
assert(executorResources.exists { r =>
r.getName.equals("cpus") && r.getScalar.getValue.equals(1.0) && r.getRole.equals("prod")
})
}
}
|
ArvinDevel/onlineAggregationOnSparkV2
|
core/src/test/scala/org/apache/spark/scheduler/cluster/mesos/MesosSchedulerBackendSuite.scala
|
Scala
|
apache-2.0
| 14,298
|
// scalac: -Xsource:3
class K { def x(y: Int) = 0 }
class Test {
def bad = {
(new K)
x 42
}
}
|
scala/scala
|
test/files/neg/infixed.scala
|
Scala
|
apache-2.0
| 108
|
/***********************************************************************
* Copyright (c) 2013-2019 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.accumulo.index
import org.apache.accumulo.core.security.Authorizations
import org.junit.runner.RunWith
import org.locationtech.geomesa.accumulo.TestWithDataStore
import org.locationtech.geomesa.features.ScalaSimpleFeature
import org.locationtech.geomesa.index.index.z3.Z3Index
import org.locationtech.geomesa.utils.geotools.SimpleFeatureTypes
import org.locationtech.geomesa.utils.geotools.SimpleFeatureTypes.Configs.IndexZShards
import org.locationtech.geomesa.utils.index.GeoMesaSchemaValidator
import org.specs2.mutable.Specification
import org.specs2.runner.JUnitRunner
import scala.collection.JavaConversions._
@RunWith(classOf[JUnitRunner])
class ConfigureShardsTest extends Specification with TestWithDataStore {
sequential
import org.locationtech.geomesa.utils.geotools.RichSimpleFeatureType.RichSimpleFeatureType
val spec = "name:String,dtg:Date,*geom:Point:srid=4326;geomesa.z.splits='8'"
val features: Seq[ScalaSimpleFeature] = {
(0 until 100).map { i =>
val sf = new ScalaSimpleFeature(sft, s"$i")
i match {
case a if a < 24 => sf.setAttributes(Array[AnyRef](s"name$i", s"2010-05-07T$i:00:00.000Z",
s"POINT(40 $i)"))
case b if b < 48 => sf.setAttributes(Array[AnyRef](s"name$i", s"2010-05-08T$i:00:00.000Z",
s"POINT(40 ${i - 24})"))
case c if c < 72 => sf.setAttributes(Array[AnyRef](s"name$i", s"2010-05-09T$i:00:00.000Z",
s"POINT(40 ${i - 48})"))
case d if d < 96 => sf.setAttributes(Array[AnyRef](s"name$i", s"2010-05-10T$i:00:00.000Z",
s"POINT(40 ${i - 72})"))
case e => sf.setAttributes(Array[AnyRef](s"name$i", s"2010-05-11T$i:00:00.000Z",
s"POINT(40 ${i - 96})"))
}
sf
}
}
"Indexes" should {
"configure from spec" >> {
addFeatures(features)
var shardSet: Set[Long] = Set[Long]()
val index = ds.manager.indices(sft).find(_.name == Z3Index.name)
index must beSome
index.get.getTableNames().foreach { table =>
ds.connector.createScanner(table, new Authorizations()).foreach { r =>
val bytes = r.getKey.getRow.getBytes
val shard = bytes(0).toInt
shardSet = shardSet + shard
}
}
shardSet must haveSize(8)
}
"throw exception" >> {
val sftPrivate = SimpleFeatureTypes.createType("private", spec)
sftPrivate.getUserData.put(IndexZShards, "128")
GeoMesaSchemaValidator.validate(sftPrivate) must throwAn[IllegalArgumentException]
}
}
}
|
elahrvivaz/geomesa
|
geomesa-accumulo/geomesa-accumulo-datastore/src/test/scala/org/locationtech/geomesa/accumulo/index/ConfigureShardsTest.scala
|
Scala
|
apache-2.0
| 3,010
|
/**
* Copyright (C) 2016 Nicola Justus <nicola.justus@mni.thm.de>
*
* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/.
*/
package de.thm.move.views.dialogs
import java.io.{PrintWriter, StringWriter}
import javafx.scene.control.Alert.AlertType
import javafx.scene.control._
import javafx.scene.layout.{GridPane, Priority}
import javafx.stage.FileChooser
import de.thm.move.Global._
import de.thm.move.implicits.FxHandlerImplicits._
import de.thm.move.util.converters.StringMarshaller
object Dialogs {
val allFilesFilter = new FileChooser.ExtensionFilter("All files", "*.*")
val moFileFilter = new FileChooser.ExtensionFilter("Modelica files (*.mo)", "*.mo")
val svgFileFilter = new FileChooser.ExtensionFilter("Svg files (*.svg)", "*.svg")
val pngFileFilter = new FileChooser.ExtensionFilter("Png files (*.png)", "*.png")
val bitmapFileFilter = new FileChooser.ExtensionFilter(
"Image files (jpg,jpeg,png,gif,bmp)", "*.jpg",
"*.jpeg","*.png","*.gif", "*.bmp")
def newExceptionDialog(ex:Throwable, aditionalInfo:String=""): Alert = {
val alert = new Alert(AlertType.ERROR)
alert.setTitle(fontBundle.getString("alert.exc.title"))
alert.setHeaderText(fontBundle.getString("alert.exc.header"))
alert.setContentText(s"${ex.getMessage} $aditionalInfo")
// Create expandable Exception.
val sw = new StringWriter()
val pw = new PrintWriter(sw)
ex.printStackTrace(pw)
val exceptionText = sw.toString()
val label = new Label(fontBundle.getString("alert.exc.stacktrace"))
val textArea = new TextArea(exceptionText)
textArea.setEditable(false)
textArea.setWrapText(true)
textArea.setMaxWidth(Double.MaxValue)
textArea.setMaxHeight(Double.MaxValue)
GridPane.setVgrow(textArea, Priority.ALWAYS)
GridPane.setHgrow(textArea, Priority.ALWAYS)
val expContent = new GridPane()
expContent.setMaxWidth(Double.MaxValue)
expContent.add(label, 0, 0)
expContent.add(textArea, 0, 1)
// Set expandable Exception into the dialog pane.
alert.getDialogPane().setExpandableContent(expContent)
alert
}
def newScaleDialog()(implicit marshaller:StringMarshaller[Int]): InputDialog[Int] = {
val dialog = new InputDialog(fontBundle.getString("scaledialog.inputfield") -> Some(1))
dialog.setTitle(fontBundle.getString("scaledialog.title"))
dialog.setHeaderText(fontBundle.getString("scaledialog.header"))
dialog.setContentText(fontBundle.getString("scaledialog.content"))
dialog
}
def newPaperSizeDialog(width:Double,height:Double)(implicit marshaller:StringMarshaller[Double]): InputDialog[Double] = {
val dialog = new InputDialog(fontBundle.getString("inputfield-width") -> Some(width), fontBundle.getString("papersizedialog.inputfield-height") -> Some(height))
dialog.setTitle(fontBundle.getString("papersizedialog.title"))
dialog.setHeaderText(fontBundle.getString("papersizedialog.header"))
dialog
}
def newGridSizeDialog(cellSize:Int)(implicit marshaller:StringMarshaller[Int]): InputDialog[Int] = {
val dialog = new InputDialog(fontBundle.getString("gridsizedialog.inputfield") -> Some(cellSize))
dialog.setTitle(fontBundle.getString("gridsizedialog.title"))
dialog.setHeaderText(fontBundle.getString("gridsizedialog.header"))
dialog
}
def newErrorDialog(msg:String): Alert = {
val dialog = new Alert(AlertType.ERROR)
dialog.setTitle(fontBundle.getString("alert.error.title"))
dialog.setHeaderText(fontBundle.getString("alert.error.header"))
dialog.setContentText(msg)
dialog
}
def newWarnDialog(msg:String): Alert = {
val dialog = new Alert(AlertType.WARNING)
dialog.setTitle(fontBundle.getString("alert.warning.title"))
dialog.setHeaderText(msg)
dialog
}
private def newFileChooser(
selectedFilter:FileChooser.ExtensionFilter)(
fileFilters:FileChooser.ExtensionFilter*): FileChooser = {
val chooser = new FileChooser()
chooser.getExtensionFilters().addAll(fileFilters:_*)
chooser.setSelectedExtensionFilter(selectedFilter)
chooser
}
def newModelicaFileChooser(): FileChooser =
newFileChooser(moFileFilter)(allFilesFilter, moFileFilter)
def newBitmapFileChooser(): FileChooser =
newFileChooser(bitmapFileFilter)(allFilesFilter, bitmapFileFilter)
def newSvgFileChooser(): FileChooser = newFileChooser(svgFileFilter)(allFilesFilter, svgFileFilter)
def newPngFileChooser(): FileChooser = newFileChooser(pngFileFilter)(allFilesFilter, pngFileFilter)
def newConfirmationDialog(additionalInfo:String = ""):Alert = {
val alert = new Alert(AlertType.CONFIRMATION)
alert.setTitle(fontBundle.getString("alert.confirmation.title"))
alert.setHeaderText(fontBundle.getString("alert.confirmation.header")+s"\n$additionalInfo")
alert
}
def newListDialog[A](xs:List[A], aditionalInfo:String=""): Alert = {
val alert = new Alert(AlertType.WARNING)
alert.setTitle(fontBundle.getString("alert.warning.title"))
alert.setHeaderText(aditionalInfo)
// Create expandable Exception.
val label = new Label(fontBundle.getString("alert.warning.list"))
val text = xs.mkString("\n")
val textArea = new TextArea(text)
textArea.setEditable(false)
textArea.setWrapText(true)
textArea.setMaxWidth(Double.MaxValue)
textArea.setMaxHeight(Double.MaxValue)
GridPane.setVgrow(textArea, Priority.ALWAYS)
GridPane.setHgrow(textArea, Priority.ALWAYS)
val listContent = new GridPane()
listContent.setMaxWidth(Double.MaxValue)
listContent.add(label, 0, 0)
listContent.add(textArea, 0, 1)
// Set expandable Exception into the dialog pane.
alert.getDialogPane.setExpandableContent(listContent)
alert.getDialogPane.setExpanded(true)
alert
}
}
|
THM-MoTE/MoVE
|
src/main/scala/de/thm/move/views/dialogs/Dialogs.scala
|
Scala
|
mpl-2.0
| 5,935
|
package lila.report
import play.api.data.Form
import play.api.data.Forms.{ single, text }
import lila.common.Ints
import lila.memo.SettingStore.{ Formable, StringReader }
case class ScoreThresholds(mid: Int, high: Int)
private case class Thresholds(score: () => ScoreThresholds, discord: () => Int)
private object ReportThresholds {
private val defaultScoreThresholds = ScoreThresholds(40, 50)
val thresholdsIso = lila.common.Iso
.ints(",")
.map[ScoreThresholds](
{
case Ints(List(m, h)) => ScoreThresholds(m, h)
case _ => defaultScoreThresholds
},
t => Ints(List(t.mid, t.high))
)
implicit val scoreThresholdsBsonHandler = lila.db.dsl.isoHandler(thresholdsIso)
implicit val scoreThresholdsStringReader = StringReader.fromIso(thresholdsIso)
implicit val scoreThresholdsFormable =
new Formable[ScoreThresholds](t => Form(single("v" -> text)) fill thresholdsIso.to(t))
def makeScoreSetting(store: lila.memo.SettingStore.Builder) =
store[ScoreThresholds](
"reportScoreThresholds",
default = defaultScoreThresholds,
text = "Report score mid and high thresholds, separated with a comma.".some
)
def makeDiscordSetting(store: lila.memo.SettingStore.Builder) =
store[Int](
"discordScoreThreshold",
default = 80,
text = "Discord score threshold. Comm reports with higher scores are notified in Discord".some
)
}
|
luanlv/lila
|
modules/report/src/main/ReportThresholds.scala
|
Scala
|
mit
| 1,446
|
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.aliyun.helper
import org.apache.hadoop.io.compress.CompressionCodec
import org.apache.hadoop.io.{NullWritable, Text}
import org.apache.hadoop.mapred.TextOutputFormat
import org.apache.spark.rdd.RDD
import scala.reflect.ClassTag
/**
* Extend RDD to support more functions.
*/
class EMRExtendRDD[T](rdd: RDD[T]) {
def saveAsTextFileWithEncoding(path: String, encoding: String): Unit = {
val nullWritableClassTag = implicitly[ClassTag[NullWritable]]
val textClassTag = implicitly[ClassTag[Text]]
val r = rdd.mapPartitions { iter =>
val text = new Text()
iter.map { x =>
text.set(x.toString.getBytes(encoding))
(NullWritable.get(), text)
}
}
RDD.rddToPairRDDFunctions(r)(nullWritableClassTag, textClassTag, null)
.saveAsHadoopFile[TextOutputFormat[NullWritable, Text]](path)
}
def saveAsTextFileWithEncoding(path: String, codec: Class[_ <: CompressionCodec],
encoding: String): Unit = {
val nullWritableClassTag = implicitly[ClassTag[NullWritable]]
val textClassTag = implicitly[ClassTag[Text]]
val r = rdd.mapPartitions { iter =>
val text = new Text()
iter.map { x =>
text.set(x.toString.getBytes(encoding))
(NullWritable.get(), text)
}
}
RDD.rddToPairRDDFunctions(r)(nullWritableClassTag, textClassTag, null)
.saveAsHadoopFile[TextOutputFormat[NullWritable, Text]](path, codec)
}
}
|
aliyun/aliyun-spark-sdk
|
core/src/main/scala/org/apache/spark/aliyun/helper/EMRExtendRDD.scala
|
Scala
|
artistic-2.0
| 2,274
|
package org.scaladebugger.api.pipelines
import org.scaladebugger.test.helpers.ParallelMockFunSpec
class CloseOperationSpec extends ParallelMockFunSpec {
describe("CloseOperation") {
describe("#process") {
it("should invoke the constructor-provided close function") {
val mockCloseFunction = mockFunction[Unit]
val operation = new CloseOperation[Int](mockCloseFunction)
mockCloseFunction.expects().once()
operation.process(Seq(1, 2, 3)) should be (empty)
}
}
}
}
|
chipsenkbeil/scala-debugger
|
scala-debugger-api/src/test/scala/org/scaladebugger/api/pipelines/CloseOperationSpec.scala
|
Scala
|
apache-2.0
| 522
|
/*
* Copyright (c) 2015 Mind Eratosthenes Kft.
* License: AGPL v3
*/
package com.mind_era.underscore
import scala.scalajs._
import org.scalatest._
/**
* Tests Underscore
*
* @author Gabor Bakos
*/
class TestUnderscore extends FlatSpec with Matchers {
"Unique" should "return unique values" in {
val list/*: List[Int]*/ = js.Array(1, 2, 1, 3, 1, 4)
___.uniq(list) should be (js.Array(1, 2, 3, 4))
}
"Unique with single value" should "return that value" in {
val list/*: List[Int]*/ = scala.collection.mutable.IndexedSeq(1)
___.uniq(list) should be (collection.mutable.IndexedSeq(1))
}
}
|
aborg0/underscore.scala.js
|
src/test/scala/underscore/TestUnderscore.scala
|
Scala
|
mit
| 621
|
/*
* Ported from https://github.com/junit-team/junit
*/
package org.junit.runner
trait Describable {
def getDescription(): Description
}
|
nicolasstucki/scala-js-junit
|
runtime/src/main/scala/org/junit/runner/Describable.scala
|
Scala
|
bsd-3-clause
| 142
|
package com.tribbloids.spookystuff.utils
import java.io.File
import com.tribbloids.spookystuff.testutils.{FunSpecx, TestHelper}
import org.apache.spark.TaskContext
import org.apache.spark.rdd.RDD
import scala.collection.immutable.Seq
import scala.collection.mutable.ArrayBuffer
import scala.concurrent.TimeoutException
import scala.util.Random
/**
* Created by peng on 11/1/14.
*/
class SpookyUtilsSuite extends FunSpecx {
import SpookyViews._
import scala.concurrent.duration._
it("canonizeUrn should clean ?:$&#") {
val url = SpookyUtils.canonizeUrn("http://abc.com?re#k2$si")
assert(url === "http/abc.com/re/k2/si")
}
it("asArray[Int]") {
assert(SpookyUtils.asArray[Int](2).toSeq == Seq(2))
assert(SpookyUtils.asArray[Int](Seq(1, 2, 3).iterator).toSeq == Seq(1, 2, 3))
assert(SpookyUtils.asArray[Int](Seq(1, 2.2, "b")).toSeq == Seq(1))
}
it("asIterable[Int]") {
assert(SpookyUtils.asIterable[Int](2) == Iterable(2))
assert(SpookyUtils.asIterable[Int](Seq(1, 2, 3).iterator).toSeq == Iterable(1, 2, 3))
assert(SpookyUtils.asIterable[Int](Seq(1, 2.2, "b")).toSeq == Iterable(1))
}
it("copyResourceToDirectory can extract a dependency's package in a jar") {
val src = SpookyUtils.getCPResource("org/apache/log4j/xml").get
val dst = CommonUtils.\\\\\\(CommonConst.USER_TEMP_DIR, "log4j")
SpookyUtils.extractResource(src, dst)
val dir = new File(dst)
assert(dir.list().nonEmpty)
}
it("copyResourceToDirectory can extract a package in file system") {
val src = SpookyUtils.getCPResource("com/tribbloids/spookystuff/utils").get
val dst = "temp/utils/"
SpookyUtils.extractResource(src, dst)
val dir = new File(dst)
assert(dir.list().nonEmpty)
}
it("withDeadline can write heartbeat info into log by default") {
val (_, time) = CommonUtils.timed {
TestHelper.intercept[TimeoutException] {
CommonUtils.withDeadline(10.seconds, Some(1.second))(
{
Thread.sleep(20000)
}
)
}
}
Predef.assert(time < 12000)
val (_, time2) = CommonUtils.timed {
CommonUtils.withDeadline(10.seconds, Some(1.second))(
{
Thread.sleep(5000)
}
)
}
assert(time2 < 6000)
}
it("withDeadline can execute heartbeat") {
var log = ArrayBuffer[Int]()
val (_, time) = CommonUtils.timed {
TestHelper.intercept[TimeoutException] {
CommonUtils.withDeadline(10.seconds, Some(1.second))(
{
Thread.sleep(20000)
},
Some { i: Int =>
val str = s"heartbeat: i=$i"
println(str)
log += i
}
)
}
}
Predef.assert(time < 12000)
Predef.assert(Seq(9, 10).contains(log.max))
log.clear()
val (_, time2) = CommonUtils.timed {
CommonUtils.withDeadline(10.seconds, Some(1.second))(
{
Thread.sleep(5000)
},
Some { i: Int =>
val str = s"heartbeat: i=$i"
println(str)
log += i
}
)
}
Predef.assert(time2 < 6000)
Predef.assert(Seq(4, 5).contains(log.max))
}
it("withDeadline won't be affected by scala concurrency global ForkJoin thread pool") {
TestHelper.TestSC.uuidSeed().mapOncePerCore { _ =>
println("partition-" + TaskContext.get().partitionId())
val (_, time) = CommonUtils.timed {
TestHelper.intercept[TimeoutException] {
CommonUtils.withDeadline(10.seconds, Some(1.second)) {
Thread.sleep(20000)
println("result 1")
}
}
}
Predef.assert(time < 11000, s"$time vs 11000")
val (_, time2) = CommonUtils.timed {
CommonUtils.withDeadline(10.seconds, Some(1.second)) {
Thread.sleep(3000)
println("result 2")
}
}
Predef.assert(time2 < 6000, s"$time2 vs 6000")
}
}
it("RDDs.batchReduce yield the same results as RDDs.map(_.reduce)") {
val src = TestHelper.TestSC.parallelize(1 to 10)
val rdds: Seq[RDD[Int]] = (1 to 10).map { i =>
val result = src.map { j =>
Random.nextInt(100)
}
result.persist()
}
val sum1 = rdds.zipWithIndex.map {
case (rdd, i) =>
rdd.reduce(_ + _)
}
val sum2 = SpookyUtils.RDDs.batchReduce(rdds)(_ + _)
val sum3 = SpookyUtils.RDDs.batchReduce(rdds)(_ + _)
assert(sum1 == sum2)
assert(sum3 == sum1)
}
it("RDDs.shufflePartitions can move data into random partitions") {
val src = TestHelper.TestSC.parallelize(1 to 100).persist()
val shuffled1 = src.shufflePartitions
val shuffled2 = src.shufflePartitions
val identical = shuffled1
.zipPartitions(shuffled2) { (i1, i2) =>
Iterator(i1.toSet == i2.toSet)
}
.collect()
assert(identical.length > identical.count(identity))
val clusters1 = shuffled1.collectPerPartition.toSet
val clusters2 = shuffled2.collectPerPartition.toSet
assert(clusters1 != clusters2)
}
}
|
tribbloid/spookystuff
|
core/src/test/scala/com/tribbloids/spookystuff/utils/SpookyUtilsSuite.scala
|
Scala
|
apache-2.0
| 5,045
|
/*
* Copyright 2011-2014 Chris de Vreeze
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package nl.ebpi.yaidom.queryapi
import nl.ebpi.yaidom.XmlStringUtils
/**
* Trait partly implementing the contract for elements as text containers.
* Typical element types are both an [[nl.ebpi.yaidom.queryapi.ElemLike]] as well as a [[nl.ebpi.yaidom.queryapi.HasText]].
*
* @author Chris de Vreeze
*/
trait HasText extends HasTextApi {
/** Returns `text.trim`. */
final def trimmedText: String = text.trim
/** Returns `XmlStringUtils.normalizeString(text)`. */
final def normalizedText: String = XmlStringUtils.normalizeString(text)
}
|
EBPI/yaidom
|
src/main/scala/nl/ebpi/yaidom/queryapi/HasText.scala
|
Scala
|
apache-2.0
| 1,153
|
// if が値を返す
object FizzBuzz02 {
def main(args: Array[String]) {
for (i <- 1 to 20) {
val line = if (i % 3 == 0 && i % 5 ==0) {
"FizzBuzz"
} else if (i % 3 == 0) {
"Fizz"
} else if (i % 5 == 0) {
"Buzz"
} else {
i
}
println(line)
}
}
}
|
mzkrelx/wakuwaku-scala1
|
FizzBuzz02.scala
|
Scala
|
mit
| 328
|
package drt.users
import java.util.UUID
import akka.http.scaladsl.model._
import drt.shared.KeyCloakApi.{KeyCloakGroup, KeyCloakUser}
import drt.users.KeyCloakUserParserProtocol._
import services.crunch.CrunchTestLike
import scala.concurrent.duration._
import scala.concurrent.{Await, Future}
class KeyCloakApiSpec extends CrunchTestLike {
val keyCloakUrl = "https://keycloak"
val userId1: UUID = UUID.fromString("e25f2a14-bdaa-11e8-a355-529269fb1459")
val userId2: UUID = UUID.fromString("e25f2dfc-bdaa-11e8-a355-529269fb1459")
val usersJson: String =
s"""[{
| "id": "$userId1",
| "createdTimestamp": 1516283371483,
| "username": "test1@digital.homeoffice.gov.uk",
| "enabled": true,
| "totp": false,
| "emailVerified": true,
| "firstName": "Man",
| "lastName": "One",
| "email": "test1@digital.homeoffice.gov.uk",
| "disableableCredentialTypes": [
| "password"
| ],
| "requiredActions": [],
| "notBefore": 0,
| "access": {
| "manageGroupMembership": true,
| "view": true,
| "mapRoles": true,
| "impersonate": true,
| "manage": true
| }
| },
| {
| "id": "$userId2",
| "createdTimestamp": 1516967531289,
| "username": "test2@homeoffice.gsi.gov.uk",
| "enabled": true,
| "totp": false,
| "emailVerified": true,
| "firstName": "Man",
| "lastName": "Two",
| "email": "test2@digital.homeoffice.gov.uk",
| "disableableCredentialTypes": [
| "password"
| ],
| "requiredActions": [],
| "notBefore": 0,
| "access": {
| "manageGroupMembership": true,
| "view": true,
| "mapRoles": true,
| "impersonate": true,
| "manage": true
| }
| }]""".stripMargin
val usersMissingOptionalFieldsJson: String =
s"""[{
| "id": "$userId1",
| "username": "test1@digital.homeoffice.gov.uk",
| "enabled": true,
| "emailVerified": true,
| "firstName": "Man",
| "lastName": "One",
| "email": "test1@digital.homeoffice.gov.uk",
| "requiredActions": [],
| "notBefore": 0
| },
| {
| "id": "$userId2",
| "createdTimestamp": 1516967531289,
| "username": "test2@homeoffice.gsi.gov.uk",
| "enabled": true,
| "totp": false,
| "emailVerified": true,
| "firstName": "Man",
| "lastName": "Two",
| "email": "test2@digital.homeoffice.gov.uk",
| "disableableCredentialTypes": [
| "password"
| ],
| "requiredActions": [],
| "notBefore": 0,
| "access": {
| "manageGroupMembership": true,
| "view": true,
| "mapRoles": true,
| "impersonate": true,
| "manage": true
| }
| }]""".stripMargin
private val user1 = KeyCloakUser(
userId1,
"test1@digital.homeoffice.gov.uk",
true,
true,
"Man",
"One",
"test1@digital.homeoffice.gov.uk"
)
private val user2 = KeyCloakUser(
userId2,
"test2@homeoffice.gsi.gov.uk",
true,
true,
"Man",
"Two",
"test2@digital.homeoffice.gov.uk"
)
val expectedUsers = List(
user1,
user2
)
"When parsing a JSON list of users from key cloak I should get back a list of KeyCloakUsers" >> {
import spray.json._
val result = usersJson.parseJson.convertTo[List[KeyCloakUser]]
result == expectedUsers
}
"When querying the keycloak API to get a list of all users " +
"Given an auth token then I should get back a list of users" >> {
val token = "testToken"
val kc = new KeyCloakClient(token, keyCloakUrl) {
def sendAndReceive: HttpRequest => Future[HttpResponse] = (_: HttpRequest) => {
Future(HttpResponse().withEntity(HttpEntity(ContentTypes.`application/json`, usersJson)))
}
}
val users: List[KeyCloakUser] = Await.result(kc.getUsers(), 30 seconds)
users === expectedUsers
}
val groupsJson =
""" [{
| "id": "id1",
| "name": "DRT Admin User",
| "path": "/DRT Admin User",
| "subGroups": []
| },
| {
| "id": "id2",
| "name": "LHR",
| "path": "/LHR",
| "subGroups": []
| }]""".stripMargin
private val lhrGroup = KeyCloakGroup("id2", "LHR", "/LHR")
val expectedGroups = List(
KeyCloakGroup("id1", "DRT Admin User", "/DRT Admin User"),
lhrGroup
)
"When parsing a JSON list of users from key cloak I should get back a list of KeyCloakUsers" >> {
import spray.json._
val result = usersJson.parseJson.convertTo[List[KeyCloakUser]]
result == expectedUsers
}
"When parsing a JSON list of users missing some optional fields the I should still get a list of Users" >> {
import spray.json._
val result = usersJson.parseJson.convertTo[List[KeyCloakUser]]
result == expectedUsers
}
"When querying the keycloak API to get a list of all groups I should get back a list of groups" >> {
val token = "testToken"
val kc = new KeyCloakClient(token, keyCloakUrl) {
def sendAndReceive: HttpRequest => Future[HttpResponse] = (_: HttpRequest) => {
Future(HttpResponse().withEntity(HttpEntity(ContentTypes.`application/json`, groupsJson)))
}
}
val groups: List[KeyCloakGroup] = Await.result(kc.getGroups, 30 seconds)
groups === expectedGroups
}
"When querying the keycloak API to get a list of users in LHR then I should get back user2" >> {
val token = "testToken"
val kc = new KeyCloakClient(token, keyCloakUrl) with MockServerForUsersInGroup
val users: List[KeyCloakUser] = Await.result(kc.getUsersInGroup("LHR"), 30 seconds)
users === List(user2)
}
"When asking for users not in LHR I should get back user1" >> {
val token = "testToken"
val kc = new KeyCloakClient(token, keyCloakUrl) with MockServerForUsersInGroup
val users: List[KeyCloakUser] = Await.result(kc.getUsersNotInGroup("LHR"), 30 seconds)
users === List(user1)
}
"When adding a user to a group the user and group should be posted to the correct keycloak endpoint" >> {
val token = "testToken"
val kc = new KeyCloakClient(token, keyCloakUrl) with MockServerForUsersInGroup
val res: HttpResponse = Await.result(kc.addUserToGroup(user1.id, lhrGroup.id), 30 seconds)
res.status === StatusCodes.NoContent
}
val lhrUsers: String = s"""
| [{
| "id": "$userId2",
| "createdTimestamp": 1516967531289,
| "username": "test2@homeoffice.gsi.gov.uk",
| "enabled": true,
| "totp": false,
| "emailVerified": true,
| "firstName": "Man",
| "lastName": "Two",
| "email": "test2@digital.homeoffice.gov.uk",
| "disableableCredentialTypes": [
| "password"
| ],
| "requiredActions": [],
| "notBefore": 0,
| "access": {
| "manageGroupMembership": true,
| "view": true,
| "mapRoles": true,
| "impersonate": true,
| "manage": true
| }
| }]
""".stripMargin
trait MockServerForUsersInGroup {
def sendAndReceive: HttpRequest => Future[HttpResponse] = (req: HttpRequest) => {
req.uri.toString.replace(keyCloakUrl, "") match {
case "/groups/id2/members?max=1000" =>
Future(HttpResponse().withEntity(HttpEntity(ContentTypes.`application/json`, lhrUsers)))
case "/groups" =>
Future(HttpResponse().withEntity(HttpEntity(ContentTypes.`application/json`, groupsJson)))
case "/users?max=100&first=0" =>
Future(HttpResponse().withEntity(HttpEntity(ContentTypes.`application/json`, usersJson)))
case "/users/e25f2a14-bdaa-11e8-a355-529269fb1459/groups/id2" =>
assert(req.method == HttpMethods.PUT)
Future(HttpResponse(StatusCodes.NoContent))
}
}
}
}
|
UKHomeOffice/drt-scalajs-spa-exploration
|
server/src/test/scala/drt/users/KeyCloakApiSpec.scala
|
Scala
|
apache-2.0
| 8,638
|
import org.jobimtext.run.SparkRunner
/*
*
* Copyright 2015.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
/**
* Created by Steffen Remus.
*/
object run {
def main(args: Array[String]):Unit = {
SparkRunner.run(args)
}
}
|
tudarmstadt-lt/JoBimTextCT
|
org.jobimtext.ct/src/main/scala/run.scala
|
Scala
|
apache-2.0
| 764
|
// Copyright (c) 2011-2015 ScalaMock Contributors (https://github.com/paulbutcher/ScalaMock/graphs/contributors)
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
package org.scalamock.matchers
/**
* Can be extended to implement custom matchers.
*/
trait Matcher[T] extends MatcherBase {
override def toString: String = this.getClass.getSimpleName
override def canEqual(x: Any): Boolean = true // x.isInstanceOf[T] would not work anyway
override def equals(x: Any) = if (canEqual(x)) safeEquals(x.asInstanceOf[T]) else false
def safeEquals(that: T): Boolean
}
|
paulbutcher/ScalaMock
|
shared/src/main/scala/org/scalamock/matchers/Matcher.scala
|
Scala
|
mit
| 1,598
|
/*
* Copyright (c) 2020. StulSoft
*/
package com.stulsoft.ysps.pcollection.mutable
import scala.collection.mutable.{ArrayBuffer, ListBuffer}
/**
* @author Yuriy Stul
*/
object BufferExc1 extends App {
arrayBufferEx1()
listBufferEx1()
def arrayBufferEx1(): Unit = {
println("==>arrayBufferEx1")
val ab = ArrayBuffer[Int]()
ab += 1
ab += 2
ab += 3
ab += 4
println(ab)
}
def listBufferEx1(): Unit = {
println("==>listBufferEx1")
val ab = ListBuffer[Int]()
ab += 1
ab += 2
ab += 3
ab += 4
println(ab)
}
}
|
ysden123/ysps
|
src/main/scala/com/stulsoft/ysps/pcollection/mutable/BufferExc1.scala
|
Scala
|
mit
| 578
|
/*******************************************************************************
* Copyright (c) 2019. Carl Minden
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
******************************************************************************/
package com.anathema_roguelike
package entities.characters.player.perks.abilities.spells.inquisitor
import com.anathema_roguelike.entities.characters.perks.actions.TargetedPerk
import com.anathema_roguelike.entities.characters.player.classes.Inquisitor
import com.anathema_roguelike.entities.characters.player.perks.abilities.spells.Spell
import com.anathema_roguelike.environment.Location
class IllusoryWall() extends Spell[TargetedPerk[Location]](2, classOf[Inquisitor]) {
override protected def createPerk: TargetedPerk[Location] = ??? // TODO Auto-generated method stub
}
|
carlminden/anathema-roguelike
|
src/com/anathema_roguelike/entities/characters/player/perks/abilities/spells/inquisitor/IllusoryWall.scala
|
Scala
|
gpl-3.0
| 1,426
|
package com.github.jeroenr.bson.reader
import java.nio.ByteBuffer
import com.github.jeroenr.bson.element.BsonArray
object BsonArrayReader extends Reader[BsonArray] {
def read(buffer: ByteBuffer): Option[BsonArray] = {
val name = readCString(buffer)
BsonDocumentReader.read(buffer).map(BsonArray(name, _))
}
}
|
jeroenr/tepkin
|
bson/src/main/scala/com/github/jeroenr/bson/reader/BsonArrayReader.scala
|
Scala
|
apache-2.0
| 325
|
package org.http4s.ember.core
import org.specs2.mutable.Specification
import cats.implicits._
import cats.effect.{IO, Sync}
import org.http4s._
class EncoderSpec extends Specification {
private object Helpers {
def stripLines(s: String): String = s.replace("\\r\\n", "\\n")
// Only for Use with Text Requests
def encodeRequestRig[F[_]: Sync](req: Request[F]): F[String] =
Encoder
.reqToBytes(req)
.through(fs2.text.utf8Decode[F])
.compile
.foldMonoid
.map(stripLines)
// Only for Use with Text Requests
def encodeResponseRig[F[_]: Sync](resp: Response[F]): F[String] =
Encoder
.respToBytes(resp)
.through(fs2.text.utf8Decode[F])
.compile
.foldMonoid
.map(stripLines)
}
"Encoder.reqToBytes" should {
"encode a no body request correctly" in {
val req = Request[IO](Method.GET, Uri.unsafeFromString("http://www.google.com"))
val expected =
"""GET http://www.google.com HTTP/1.1
|Host: www.google.com
|
|""".stripMargin
Helpers.encodeRequestRig(req).unsafeRunSync must_=== expected
}
"encode a request with a body correctly" in {
val req = Request[IO](Method.POST, Uri.unsafeFromString("http://www.google.com"))
.withEntity("Hello World!")
val expected =
"""POST http://www.google.com HTTP/1.1
|Host: www.google.com
|Content-Length: 12
|Content-Type: text/plain; charset=UTF-8
|
|Hello World!""".stripMargin
Helpers.encodeRequestRig(req).unsafeRunSync must_=== expected
}
}
"Encoder.respToBytes" should {
"encode a no body response correctly" in {
val resp = Response[IO](Status.Ok)
val expected =
"""HTTP/1.1 200 OK
|
|""".stripMargin
Helpers.encodeResponseRig(resp).unsafeRunSync must_=== expected
}
"encode a response with a body correctly" in {
val resp = Response[IO](Status.NotFound)
.withEntity("Not Found")
val expected =
"""HTTP/1.1 404 Not Found
|Content-Length: 9
|Content-Type: text/plain; charset=UTF-8
|
|Not Found""".stripMargin
Helpers.encodeResponseRig(resp).unsafeRunSync must_=== expected
}
}
}
|
ChristopherDavenport/http4s
|
ember-core/src/test/scala/org/http4s/ember/core/EncoderSpec.scala
|
Scala
|
apache-2.0
| 2,282
|
package com.twitter.zk.coordination
import com.twitter.util.Future
import com.twitter.concurrent.Permit
import org.apache.zookeeper.{CreateMode, KeeperException}
import java.util.concurrent.RejectedExecutionException
import ZkAsyncSemaphore.{PermitNodeException, PermitMismatchException, LackOfConsensusException}
import com.twitter.zk.{ZNode, ZkClient}
object ShardCoordinator {
case class SemaphoreError(err: Throwable) extends Exception("Exception from underlying semaphore.", err)
}
/**
* A rudimentary shard/partition coordinator. Provides ShardPermits by lowest available
* ID first (linear scan).
*
* {{{
* val shardCoordinator = new ShardCoordinator(zkClient, "/testing/twitter/service/something/shards", numShards)
* log.trace("Waiting for shard permit...")
* shardCoordinator.acquire flatMap { shard =>
* log.trace("Working as shard %d", shard.id)
* { // inside some Future
* if (Hashing.consistentHash(item, numShards) == shard.id) action
* } ensure { shard.release }
* }
* }}}
*/
class ShardCoordinator(zk: ZkClient, path: String, numShards: Int) {
import ShardCoordinator._
require(numShards > 0)
private[this] val separator = "/"
private[this] val semaphorePath = Seq(path, "sem").mkString(separator)
private[this] val shardPathPrefix = Seq(path, "shard-").mkString(separator)
private[this] val semaphore = new ZkAsyncSemaphore(zk, semaphorePath, numShards)
/**
* Acquire a permit for a shard (ShardPermit) asynchronously. A ShardPermit contains
* a zero-indexed shard ID. Be sure to call release() on the ShardPermit when your
* client is finished performing work for the shard.
*
* @return A Future of ShardPermit that is satisfied when a shard slot becomes available.
* @throws SemaphoreError when the underlying semaphore throws an exception.
* @throws RejectedExecutionException when a shard cannot be acquired due to an unexpected
* state in the zookeeper tree. Assumed to only happen if some
* zookeeper client clobbers the tree location for this
* ShardCoordinator.
*/
def acquire(): Future[ShardPermit] = {
semaphore.acquire flatMap { permit =>
shardNodes() map { nodes =>
nodes map { node => shardIdOf(node.path) }
} map { ids =>
(0 until numShards) filterNot { ids contains _ }
} flatMap { availableIds =>
// Iteratively (brute force) attempt to create a node for the next lowest available ID until
// a Shard is successfully created (race resolution).
availableIds.tail.foldLeft(createShardNode(availableIds.head, permit)) { (futureShardOption, id) =>
futureShardOption flatMap { shardOption =>
shardOption match {
case Some(shard) => Future.value(shardOption)
case None => createShardNode(id, permit)
}
}
}
} flatMap { shardOption =>
shardOption map { Future.value(_) } getOrElse {
Future.exception(new RejectedExecutionException("Could not get a shard, polluted zk tree?"))
}
} rescue {
case err: LackOfConsensusException => Future.exception(SemaphoreError(err))
case err: PermitMismatchException => Future.exception(SemaphoreError(err))
case err: PermitNodeException => Future.exception(SemaphoreError(err))
} onFailure { err =>
permit.release()
}
}
}
private[this] def createShardNode(id: Int, permit: Permit): Future[Option[Shard]] = {
zk(shardPath(id)).create(mode = CreateMode.EPHEMERAL) map { node =>
Some(Shard(id, node, permit))
} handle {
case err: KeeperException.NodeExistsException => None
}
}
private[this] def shardNodes(): Future[Seq[ZNode]] = {
zk(path).getChildren() map { zop =>
zop.children filter { child =>
child.path.startsWith(shardPathPrefix)
} sortBy(child => shardIdOf(child.path))
}
}
private[this] def shardIdOf(path: String): Int = {
path.substring(shardPathPrefix.length).toInt
}
private[this] def shardPath(id: Int) = Seq(path, "shard-" + id).mkString(separator)
}
sealed trait ShardPermit {
val id: Int
def release(): Unit
}
case class Shard(id: Int, private val node: ZNode, private val permit: Permit)
extends ShardPermit {
def release() = {
node.delete() ensure { permit.release() }
}
}
|
mosesn/util
|
util-zk/src/main/scala/com/twitter/zk/coordination/ShardCoordinator.scala
|
Scala
|
apache-2.0
| 4,457
|
/** MACHINE-GENERATED FROM AVRO SCHEMA. DO NOT EDIT DIRECTLY */
package avro.examples.flight
final case class Pilot(number: Int, first_name: String, last_name: String, nicknames: Seq[Handle])
|
julianpeeters/avrohugger
|
avrohugger-tools/src/test/compiler/output/Pilot.scala
|
Scala
|
apache-2.0
| 192
|
/**
* Copyright 2015 ICT.
*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cn.ac.ict.acs.netflow.rest
class RestServer {
}
|
DataSysLab/netflow
|
core/src/main/scala/cn/ac/ict/acs/netflow/rest/RestServer.scala
|
Scala
|
apache-2.0
| 886
|
package edu.berkeley.veloxms.util
object Utils extends Serializable {
def nonNegativeMod(x: Int, mod: Int): Int = {
val rawMod = x % mod
rawMod + (if (rawMod < 0) mod else 0)
}
}
|
kcompher/velox-modelserver
|
veloxms-core/src/main/scala/edu/berkeley/veloxms/util/Utils.scala
|
Scala
|
apache-2.0
| 192
|
object ch10 {
import scala.language.higherKinds
trait Monoid[A] {
def op(a1: A, a2: A): A
def zero: A
}
trait Foldable[F[_]] {
import Monoid._
def foldRight[A,B](as: F[A])(z: B)(f: (A,B) => B): B =
foldMap(as)(f.curried)(endoMonoid[B])(z)
def foldLeft[A,B](as: F[A])(z: B)(f: (B,A) => B): B =
foldMap(as)(a => (b: B) => f(b, a))(dual(endoMonoid[B]))(z)
def foldMap[A,B](as: F[A])(f: A => B)(mb: Monoid[B]): B =
foldRight(as)(mb.zero)((a, b) => mb.op(f(a), b))
def concatenate[A](as: F[A])(m: Monoid[A]): A =
foldLeft(as)(m.zero)(m.op)
def toList[A](fa: F[A]): List[A] =
foldRight(fa)(List[A]())(_ :: _)
}
object Monoid {
val intAddiction = new Monoid[Int] {
def op(a1: Int, a2: Int): Int = a1 + a2
def zero: Int = 0
}
val intMultiplication: Monoid[Int] = new Monoid[Int] {
def op(a1: Int, a2: Int): Int = a1 * a2
def zero: Int = 1
}
val booleanOr: Monoid[Boolean] = new Monoid[Boolean] {
def op(a1: Boolean, a2: Boolean): Boolean = a1 || a2
def zero: Boolean = false
}
val booleanAnd: Monoid[Boolean] = new Monoid[Boolean] {
def op(a1: Boolean, a2: Boolean): Boolean = a1 && a2
def zero: Boolean = true
}
def optionMonoid[A]: Monoid[Option[A]] = new Monoid[Option[A]] {
def op(a1: Option[A], a2: Option[A]): Option[A] = a1 orElse a2
def zero: Option[A] = None
}
def endoMonoid[A]: Monoid[A => A] = new Monoid[A => A] {
def op(f: A => A, g: A => A): A => A = f compose g
def zero: A => A = a => a
}
def dual[A](m: Monoid[A]): Monoid[A] = new Monoid[A] {
def op(x: A, y: A): A = m.op(y, x)
val zero = m.zero
}
def mapMergeMonoid[K,V](V: Monoid[V]): Monoid[Map[K, V]] = new Monoid[Map[K, V]] {
def zero = Map[K,V]()
def op(a: Map[K, V], b: Map[K, V]) =
(a.keySet ++ b.keySet).foldLeft(zero) { (acc,k) =>
acc.updated(k, V.op(a.getOrElse(k, V.zero),
b.getOrElse(k, V.zero)))
}
}
def productMonoid[A,B](A: Monoid[A], B: Monoid[B]): Monoid[(A, B)] =
new Monoid[(A, B)] {
def op(x: (A, B), y: (A, B)) =
(A.op(x._1, y._1), B.op(x._2, y._2))
val zero = (A.zero, B.zero)
}
def functionMonoid[A,B](B: Monoid[B]): Monoid[A => B] =
new Monoid[A => B] {
def op(f: A => B, g: A => B) = a => B.op(f(a), g(a))
val zero: A => B = a => B.zero
}
def foldMapV[A,B](v: IndexedSeq[A], m: Monoid[B])(f: A => B): B = {
if (v.length == 0) m.zero
else if (v.length == 1) f(v.head)
else {
val pairs = v.splitAt(v.length / 2)
m.op(foldMapV(pairs._1, m)(f), foldMapV(pairs._2, m)(f))
}
}
}
}
|
rucka/fpinscala
|
src/main/scala/fpinscala/ch10/Monoid.scala
|
Scala
|
gpl-2.0
| 2,777
|
trait IPerson {
def getName(): String
}
|
gradle/gradle
|
subprojects/scala/src/integTest/resources/org/gradle/scala/compile/IncrementalScalaCompileIntegrationTest/restoresClassesOnCompilationFailure/src/main/scala/IPerson.scala
|
Scala
|
apache-2.0
| 43
|
// Copyright: 2010 - 2018 https://github.com/ensime/ensime-server/graphs
// License: http://www.gnu.org/licenses/gpl-3.0.en.html
package org.ensime.core
import akka.actor.ActorSystem
import akka.testkit.TestActorRef
import org.ensime.api._
import org.ensime.fixture._
import org.ensime.util.EnsimeSpec
class DocResolverSpec
extends EnsimeSpec
with IsolatedEnsimeConfigFixture
with IsolatedTestKitFixture {
val original = EnsimeConfigFixture.DocsTestProject
def resolver(java: Option[String] = None)(implicit c: EnsimeConfig,
s: ActorSystem) =
TestActorRef[DocResolver](DocResolver(java = java)).underlyingActor
"DocResolver" should "support a wide range of queries" in withEnsimeConfig {
implicit c =>
withTestKit { tk =>
import tk._
val serv = resolver()
serv.resolve(
DocSigPair(
DocSig(DocFqn("scala", "Some"), Some("map[B](f:A=>B):Option[B]")),
DocSig(DocFqn("scala", "Some"), Some("map(scala.Function1)"))
)
) should (
equal(
Some(
"docs/scala-library-" + c.scalaVersion + "-javadoc.jar/index.html#scala.Some@map[B](f:A=>B):Option[B]"
)
) or
equal(
Some(
"docs/scala-library-" + c.scalaVersion + "-javadoc.jar/scala/Some.html#map[B](f:A=>B):Option[B]"
)
)
)
serv.resolve(
DocSigPair(
DocSig(DocFqn("scala", "Some"), None),
DocSig(DocFqn("scala", "Some"), None)
)
) should (
equal(
Some(
"docs/scala-library-" + c.scalaVersion + "-javadoc.jar/index.html#scala.Some"
)
) or
equal(
Some(
"docs/scala-library-" + c.scalaVersion + "-javadoc.jar/scala/Some.html"
)
)
)
serv.resolve(
DocSigPair(
DocSig(DocFqn("scala", "Some$"), None),
DocSig(DocFqn("scala", "Some"), None)
)
) should (
equal(
Some(
"docs/scala-library-" + c.scalaVersion + "-javadoc.jar/index.html#scala.Some"
)
) or
equal(
Some(
"docs/scala-library-" + c.scalaVersion + "-javadoc.jar/scala/Some.html"
)
)
)
serv.resolve(
DocSigPair(
DocSig(DocFqn("com.google.common.io", "Files$"),
Some("simplifyPath(x$1:String):String")),
DocSig(DocFqn("com.google.common.io", "Files"),
Some("simplifyPath(java.lang.String)"))
)
) shouldBe Some(
"docs/guava-18.0-javadoc.jar/com/google/common/io/Files.html#simplifyPath(java.lang.String)"
)
serv.resolve(
DocSigPair(
DocSig(DocFqn("scala", "Boolean"), None),
DocSig(DocFqn("", "boolean"), None)
)
) should (
equal(
Some(
"docs/scala-library-" + c.scalaVersion + "-javadoc.jar/index.html#scala.Boolean"
)
) or
equal(
Some(
"docs/scala-library-" + c.scalaVersion + "-javadoc.jar/scala/Boolean.html"
)
)
)
serv.resolve(
DocSigPair(
DocSig(DocFqn("com.google.common.io", "Files$"),
Some("simplifyPath(x$1:String):String")),
DocSig(DocFqn("com.google.common.io", "Files"),
Some("simplifyPath(java.lang.String)"))
)
) shouldBe Some(
"docs/guava-18.0-javadoc.jar/com/google/common/io/Files.html#simplifyPath(java.lang.String)"
)
serv.resolve(
DocSigPair(
DocSig(DocFqn("scala", "Option"), Some("isDefined:Boolean")),
DocSig(DocFqn("scala", "Option"), Some("isDefined"))
)
) should (
equal(
Some(
"docs/scala-library-" + c.scalaVersion + "-javadoc.jar/index.html#scala.Option@isDefined:Boolean"
)
) or
equal(
Some(
"docs/scala-library-" + c.scalaVersion + "-javadoc.jar/scala/Option.html#isDefined:Boolean"
)
)
)
serv.resolve(
DocSigPair(
DocSig(DocFqn("com.google.common.io", "Files$"),
Some("simplifyPath(x$1:String):String")),
DocSig(DocFqn("com.google.common.io", "Files"),
Some("simplifyPath(java.lang.String)"))
)
) shouldBe Some(
"docs/guava-18.0-javadoc.jar/com/google/common/io/Files.html#simplifyPath(java.lang.String)"
)
serv.resolve(
DocSigPair(
DocSig(DocFqn("scala", "Some"),
Some("flatMap[B](f:A=>Option[B]):Option[B]")),
DocSig(DocFqn("scala", "Some"), Some("flatMap(scala.Function1)"))
)
) should (
equal(
Some(
"docs/scala-library-" + c.scalaVersion + "-javadoc.jar/index.html#scala.Some@flatMap[B](f:A=>Option[B]):Option[B]"
)
) or
equal(
Some(
"docs/scala-library-" + c.scalaVersion + "-javadoc.jar/scala/Some.html#flatMap[B](f:A=>Option[B]):Option[B]"
)
)
)
serv.resolve(
DocSigPair(
DocSig(DocFqn("com.google.common.io", "Files$"),
Some("simplifyPath(x$1:String):String")),
DocSig(DocFqn("com.google.common.io", "Files"),
Some("simplifyPath(java.lang.String)"))
)
) shouldBe Some(
"docs/guava-18.0-javadoc.jar/com/google/common/io/Files.html#simplifyPath(java.lang.String)"
)
serv.resolve(
DocSigPair(
DocSig(DocFqn("scala", "Some"),
Some("flatten[B](implicitev:<:<[A,Option[B]]):Option[B]")),
DocSig(DocFqn("scala", "Some"), Some("flatten(scala.Predef.<:<)"))
)
) should (
equal(
Some(
"docs/scala-library-" + c.scalaVersion + "-javadoc.jar/index.html#scala.Some@flatten[B](implicitev:<:<[A,Option[B]]):Option[B]"
)
) or
equal(
Some(
"docs/scala-library-" + c.scalaVersion + "-javadoc.jar/scala/Some.html#flatten[B](implicitev:<:<[A,Option[B]]):Option[B]"
)
) or
equal(
Some(
"docs/scala-library-" + c.scalaVersion + "-javadoc.jar/scala/Some.html#flatten[B](implicitev:A<:<Option[B]):Option[B]"
)
)
)
serv.resolve(
DocSigPair(
DocSig(DocFqn("com.google.common.io", "Files$"),
Some("simplifyPath(x$1:String):String")),
DocSig(DocFqn("com.google.common.io", "Files"),
Some("simplifyPath(java.lang.String)"))
)
) shouldBe Some(
"docs/guava-18.0-javadoc.jar/com/google/common/io/Files.html#simplifyPath(java.lang.String)"
)
serv.resolve(
DocSigPair(
DocSig(DocFqn("scala", "Some"),
Some("fold[B](ifEmpty:=>B)(f:A=>B):B")),
DocSig(DocFqn("scala", "Some"),
Some("fold(scala.<byname>, scala.Function1)"))
)
) should (
equal(
Some(
"docs/scala-library-" + c.scalaVersion + "-javadoc.jar/index.html#scala.Some@fold[B](ifEmpty:=>B)(f:A=>B):B"
)
) or
equal(
Some(
"docs/scala-library-" + c.scalaVersion + "-javadoc.jar/scala/Some.html#fold[B](ifEmpty:=>B)(f:A=>B):B"
)
)
)
serv.resolve(
DocSigPair(
DocSig(DocFqn("com.google.common.io", "Files$"),
Some("simplifyPath(x$1:String):String")),
DocSig(DocFqn("com.google.common.io", "Files"),
Some("simplifyPath(java.lang.String)"))
)
) shouldBe Some(
"docs/guava-18.0-javadoc.jar/com/google/common/io/Files.html#simplifyPath(java.lang.String)"
)
serv.resolve(
DocSigPair(
DocSig(DocFqn("scala", "Some"),
Some("mkString(start:String,sep:String,end:String):String")),
DocSig(
DocFqn("scala", "Some"),
Some(
"mkString(java.lang.String, java.lang.String, java.lang.String)"
)
)
)
) should (
equal(
Some(
"docs/scala-library-" + c.scalaVersion + "-javadoc.jar/index.html#scala.Some@mkString(start:String,sep:String,end:String):String"
)
) or
equal(
Some(
"docs/scala-library-" + c.scalaVersion + "-javadoc.jar/scala/Some.html#mkString(start:String,sep:String,end:String):String"
)
)
)
serv.resolve(
DocSigPair(
DocSig(DocFqn("com.google.common.io", "Files$"),
Some("simplifyPath(x$1:String):String")),
DocSig(DocFqn("com.google.common.io", "Files"),
Some("simplifyPath(java.lang.String)"))
)
) shouldBe Some(
"docs/guava-18.0-javadoc.jar/com/google/common/io/Files.html#simplifyPath(java.lang.String)"
)
serv.resolve(
DocSigPair(
DocSig(DocFqn("scala", "Some"), Some("mkString:String")),
DocSig(DocFqn("scala", "Some"), Some("mkString"))
)
) should (
equal(
Some(
"docs/scala-library-" + c.scalaVersion + "-javadoc.jar/index.html#scala.Some@mkString:String"
)
) or
equal(
Some(
"docs/scala-library-" + c.scalaVersion + "-javadoc.jar/scala/Some.html#mkString:String"
)
)
)
serv.resolve(
DocSigPair(
DocSig(DocFqn("com.google.common.io", "Files$"),
Some("simplifyPath(x$1:String):String")),
DocSig(DocFqn("com.google.common.io", "Files"),
Some("simplifyPath(java.lang.String)"))
)
) shouldBe Some(
"docs/guava-18.0-javadoc.jar/com/google/common/io/Files.html#simplifyPath(java.lang.String)"
)
serv.resolve(
DocSigPair(
DocSig(DocFqn("scala", "Some"),
Some("mkString(sep:String):String")),
DocSig(DocFqn("scala", "Some"), Some("mkString(java.lang.String)"))
)
) should (
equal(
Some(
"docs/scala-library-" + c.scalaVersion + "-javadoc.jar/index.html#scala.Some@mkString(sep:String):String"
)
) or
equal(
Some(
"docs/scala-library-" + c.scalaVersion + "-javadoc.jar/scala/Some.html#mkString(sep:String):String"
)
)
)
serv.resolve(
DocSigPair(
DocSig(DocFqn("com.google.common.io", "Files$"),
Some("simplifyPath(x$1:String):String")),
DocSig(DocFqn("com.google.common.io", "Files"),
Some("simplifyPath(java.lang.String)"))
)
) shouldBe Some(
"docs/guava-18.0-javadoc.jar/com/google/common/io/Files.html#simplifyPath(java.lang.String)"
)
serv.resolve(
DocSigPair(
DocSig(DocFqn("scala", "Some"),
Some("getOrElse[B>:A](default:=>B):B")),
DocSig(DocFqn("scala", "Some"), Some("getOrElse(scala.<byname>)"))
)
) should (
equal(
Some(
"docs/scala-library-" + c.scalaVersion + "-javadoc.jar/index.html#scala.Some@getOrElse[B>:A](default:=>B):B"
)
) or
equal(
Some(
"docs/scala-library-" + c.scalaVersion + "-javadoc.jar/scala/Some.html#getOrElse[B>:A](default:=>B):B"
)
)
)
serv.resolve(
DocSigPair(
DocSig(DocFqn("com.google.common.io", "Files$"),
Some("simplifyPath(x$1:String):String")),
DocSig(DocFqn("com.google.common.io", "Files"),
Some("simplifyPath(java.lang.String)"))
)
) shouldBe Some(
"docs/guava-18.0-javadoc.jar/com/google/common/io/Files.html#simplifyPath(java.lang.String)"
)
serv.resolve(
DocSigPair(
DocSig(DocFqn("scala", "Some"),
Some("grouped(size:Int):Iterator[Repr]")),
DocSig(DocFqn("scala", "Some"), Some("grouped(int)"))
)
) should (
equal(
Some(
"docs/scala-library-" + c.scalaVersion + "-javadoc.jar/index.html#scala.Some@grouped(size:Int):Iterator[Repr]"
)
) or
equal(
Some(
"docs/scala-library-" + c.scalaVersion + "-javadoc.jar/scala/Some.html#grouped(size:Int):Iterator[Repr]"
)
)
)
serv.resolve(
DocSigPair(
DocSig(DocFqn("com.google.common.io", "Files$"),
Some("simplifyPath(x$1:String):String")),
DocSig(DocFqn("com.google.common.io", "Files"),
Some("simplifyPath(java.lang.String)"))
)
) shouldBe Some(
"docs/guava-18.0-javadoc.jar/com/google/common/io/Files.html#simplifyPath(java.lang.String)"
)
serv.resolve(
DocSigPair(
DocSig(DocFqn("scala.collection.immutable", "List$"),
Some("empty[A]:List[A]")),
DocSig(DocFqn("scala.collection.immutable", "List"), Some("empty"))
)
) should (
equal(
Some(
"docs/scala-library-" + c.scalaVersion + "-javadoc.jar/index.html#scala.collection.immutable.List$@empty[A]:List[A]"
)
) or
equal(
Some(
"docs/scala-library-" + c.scalaVersion + "-javadoc.jar/scala/collection/immutable/List$.html#empty[A]:List[A]"
)
)
)
serv.resolve(
DocSigPair(
DocSig(DocFqn("com.google.common.io", "Files$"),
Some("simplifyPath(x$1:String):String")),
DocSig(DocFqn("com.google.common.io", "Files"),
Some("simplifyPath(java.lang.String)"))
)
) shouldBe Some(
"docs/guava-18.0-javadoc.jar/com/google/common/io/Files.html#simplifyPath(java.lang.String)"
)
serv.resolve(
DocSigPair(
DocSig(DocFqn("com.google.common.io", "Files$"),
Some("move(x$1:java.io.File,x$2:java.io.File):Unit")),
DocSig(DocFqn("com.google.common.io", "Files"),
Some("move(java.io.File, java.io.File)"))
)
) shouldBe Some(
"docs/guava-18.0-javadoc.jar/com/google/common/io/Files.html#move(java.io.File, java.io.File)"
)
serv.resolve(
DocSigPair(
DocSig(DocFqn("com.google.common.io", "Files$"),
Some("simplifyPath(x$1:String):String")),
DocSig(DocFqn("com.google.common.io", "Files"),
Some("simplifyPath(java.lang.String)"))
)
) shouldBe Some(
"docs/guava-18.0-javadoc.jar/com/google/common/io/Files.html#simplifyPath(java.lang.String)"
)
serv.resolve(
DocSigPair(
DocSig(
DocFqn("com.google.common.io", "Files$"),
Some(
"asByteSource(x$1:java.io.File):com.google.common.io.ByteSource"
)
),
DocSig(DocFqn("com.google.common.io", "Files"),
Some("asByteSource(java.io.File)"))
)
) shouldBe Some(
"docs/guava-18.0-javadoc.jar/com/google/common/io/Files.html#asByteSource(java.io.File)"
)
serv.resolve(
DocSigPair(
DocSig(DocFqn("com.google.common.io", "Files$"),
Some("simplifyPath(x$1:String):String")),
DocSig(DocFqn("com.google.common.io", "Files"),
Some("simplifyPath(java.lang.String)"))
)
) shouldBe Some(
"docs/guava-18.0-javadoc.jar/com/google/common/io/Files.html#simplifyPath(java.lang.String)"
)
serv.resolve(
DocSigPair(
DocSig(DocFqn("com.google.common.io", "Files$"),
Some(
"map(x$1:java.io.File,x$2:java.nio.channels.FileChannel.MapMode):java.nio.MappedByteBuffer"
)),
DocSig(
DocFqn("com.google.common.io", "Files"),
Some("map(java.io.File, java.nio.channels.FileChannel.MapMode)")
)
)
) shouldBe Some(
"docs/guava-18.0-javadoc.jar/com/google/common/io/Files.html#map(java.io.File, java.nio.channels.FileChannel.MapMode)"
)
serv.resolve(
DocSigPair(
DocSig(DocFqn("com.google.common.io", "Files$"),
Some("simplifyPath(x$1:String):String")),
DocSig(DocFqn("com.google.common.io", "Files"),
Some("simplifyPath(java.lang.String)"))
)
) shouldBe Some(
"docs/guava-18.0-javadoc.jar/com/google/common/io/Files.html#simplifyPath(java.lang.String)"
)
serv.resolve(
DocSigPair(
DocSig(DocFqn("com.google.common.io", "Files$"),
Some(
"map(x$1:java.io.File,x$2:java.nio.channels.FileChannel.MapMode,x$3:Long):java.nio.MappedByteBuffer"
)),
DocSig(
DocFqn("com.google.common.io", "Files"),
Some(
"map(java.io.File, java.nio.channels.FileChannel.MapMode, long)"
)
)
)
) shouldBe Some(
"docs/guava-18.0-javadoc.jar/com/google/common/io/Files.html#map(java.io.File, java.nio.channels.FileChannel.MapMode, long)"
)
serv.resolve(
DocSigPair(
DocSig(DocFqn("com.google.common.io", "Files$"),
Some("simplifyPath(x$1:String):String")),
DocSig(DocFqn("com.google.common.io", "Files"),
Some("simplifyPath(java.lang.String)"))
)
) shouldBe Some(
"docs/guava-18.0-javadoc.jar/com/google/common/io/Files.html#simplifyPath(java.lang.String)"
)
serv.resolve(
DocSigPair(
DocSig(DocFqn("com.google.common.io", "Files$"),
Some("write(x$1:Array[Byte],x$2:java.io.File):Unit")),
DocSig(DocFqn("com.google.common.io", "Files"),
Some("write(byte[], java.io.File)"))
)
) shouldBe Some(
"docs/guava-18.0-javadoc.jar/com/google/common/io/Files.html#write(byte[], java.io.File)"
)
serv.resolve(
DocSigPair(
DocSig(DocFqn("com.google.common.io", "Files$"),
Some("simplifyPath(x$1:String):String")),
DocSig(DocFqn("com.google.common.io", "Files"),
Some("simplifyPath(java.lang.String)"))
)
) shouldBe Some(
"docs/guava-18.0-javadoc.jar/com/google/common/io/Files.html#simplifyPath(java.lang.String)"
)
serv.resolve(
DocSigPair(
DocSig(DocFqn("scala", "Some"), None),
DocSig(DocFqn("scala", "Some"), None)
)
) should (
equal(
Some(
"docs/scala-library-" + c.scalaVersion + "-javadoc.jar/index.html#scala.Some"
)
) or
equal(
Some(
"docs/scala-library-" + c.scalaVersion + "-javadoc.jar/scala/Some.html"
)
)
)
serv.resolve(
DocSigPair(
DocSig(DocFqn("com.google.common.io", "Files$"),
Some("simplifyPath(x$1:String):String")),
DocSig(DocFqn("com.google.common.io", "Files"),
Some("simplifyPath(java.lang.String)"))
)
) shouldBe Some(
"docs/guava-18.0-javadoc.jar/com/google/common/io/Files.html#simplifyPath(java.lang.String)"
)
serv.resolve(
DocSigPair(
DocSig(DocFqn("com.google.common.io", "Files$"),
Some("simplifyPath(x$1:String):String")),
DocSig(DocFqn("com.google.common.io", "Files"),
Some("simplifyPath(java.lang.String)"))
)
) shouldBe Some(
"docs/guava-18.0-javadoc.jar/com/google/common/io/Files.html#simplifyPath(java.lang.String)"
)
serv.resolve(
DocSigPair(
DocSig(DocFqn("scala", "Int"), None),
DocSig(DocFqn("", "int"), None)
)
) should (
equal(
Some(
"docs/scala-library-" + c.scalaVersion + "-javadoc.jar/index.html#scala.Int"
)
) or
equal(
Some(
"docs/scala-library-" + c.scalaVersion + "-javadoc.jar/scala/Int.html"
)
)
)
serv.resolve(
DocSigPair(
DocSig(DocFqn("com.google.common.io", "Files$"),
Some("simplifyPath(x$1:String):String")),
DocSig(DocFqn("com.google.common.io", "Files"),
Some("simplifyPath(java.lang.String)"))
)
) shouldBe Some(
"docs/guava-18.0-javadoc.jar/com/google/common/io/Files.html#simplifyPath(java.lang.String)"
)
serv.resolve(
DocSigPair(
DocSig(DocFqn("com.google.common.io", "Files$"),
Some("simplifyPath(x$1:String):String")),
DocSig(DocFqn("com.google.common.io", "Files"),
Some("simplifyPath(java.lang.String)"))
)
) shouldBe Some(
"docs/guava-18.0-javadoc.jar/com/google/common/io/Files.html#simplifyPath(java.lang.String)"
)
serv.resolve(
DocSigPair(
DocSig(DocFqn("com.google.common.io", "Files$"),
Some("simplifyPath(x$1:String):String")),
DocSig(DocFqn("com.google.common.io", "Files"),
Some("simplifyPath(java.lang.String)"))
)
) shouldBe Some(
"docs/guava-18.0-javadoc.jar/com/google/common/io/Files.html#simplifyPath(java.lang.String)"
)
serv.resolve(
DocSigPair(
DocSig(DocFqn("scala", "Predef$$DummyImplicit$"), None),
DocSig(DocFqn("scala", "Predef.DummyImplicit"), None)
)
) should (
equal(
Some(
"docs/scala-library-" + c.scalaVersion + "-javadoc.jar/index.html#scala.Predef$$DummyImplicit$"
)
) or
equal(
Some(
"docs/scala-library-" + c.scalaVersion + "-javadoc.jar/scala/Predef$$DummyImplicit$.html"
)
)
)
serv.resolve(
DocSigPair(
DocSig(DocFqn("com.google.common.io", "Files$"),
Some("simplifyPath(x$1:String):String")),
DocSig(DocFqn("com.google.common.io", "Files"),
Some("simplifyPath(java.lang.String)"))
)
) shouldBe Some(
"docs/guava-18.0-javadoc.jar/com/google/common/io/Files.html#simplifyPath(java.lang.String)"
)
serv.resolve(
DocSigPair(
DocSig(DocFqn("com.google.common.io", "Files$"),
Some("simplifyPath(x$1:String):String")),
DocSig(DocFqn("com.google.common.io", "Files"),
Some("simplifyPath(java.lang.String)"))
)
) shouldBe Some(
"docs/guava-18.0-javadoc.jar/com/google/common/io/Files.html#simplifyPath(java.lang.String)"
)
serv.resolve(
DocSigPair(
DocSig(DocFqn("com.google.common.io", "Files$"),
Some("simplifyPath(x$1:String):String")),
DocSig(DocFqn("com.google.common.io", "Files"),
Some("simplifyPath(java.lang.String)"))
)
) shouldBe Some(
"docs/guava-18.0-javadoc.jar/com/google/common/io/Files.html#simplifyPath(java.lang.String)"
)
serv.resolve(
DocSigPair(
DocSig(DocFqn("com.google.common.io", "Files$"),
Some("simplifyPath(x$1:String):String")),
DocSig(DocFqn("com.google.common.io", "Files"),
Some("simplifyPath(java.lang.String)"))
)
) shouldBe Some(
"docs/guava-18.0-javadoc.jar/com/google/common/io/Files.html#simplifyPath(java.lang.String)"
)
serv.resolve(
DocSigPair(
DocSig(DocFqn("com.google.common.io", "Files$"),
Some("simplifyPath(x$1:String):String")),
DocSig(DocFqn("com.google.common.io", "Files"),
Some("simplifyPath(java.lang.String)"))
)
) shouldBe Some(
"docs/guava-18.0-javadoc.jar/com/google/common/io/Files.html#simplifyPath(java.lang.String)"
)
serv.resolve(
DocSigPair(
DocSig(DocFqn("scala.collection", ".package"), None),
DocSig(DocFqn("scala.collection", "package"), None)
)
) shouldBe None
serv.resolve(
DocSigPair(
DocSig(DocFqn("com.google.common.io", "Files$"),
Some("simplifyPath(x$1:String):String")),
DocSig(DocFqn("com.google.common.io", "Files"),
Some("simplifyPath(java.lang.String)"))
)
) shouldBe Some(
"docs/guava-18.0-javadoc.jar/com/google/common/io/Files.html#simplifyPath(java.lang.String)"
)
serv.resolve(
DocSigPair(
DocSig(DocFqn("scala", "package"), Some("Exception=Exception")),
DocSig(DocFqn("scala", "package"), Some("Exception"))
)
) should (
equal(
Some(
"docs/scala-library-" + c.scalaVersion + "-javadoc.jar/index.html#scala.package@Exception=Exception"
)
) or
equal(
Some(
"docs/scala-library-" + c.scalaVersion + "-javadoc.jar/scala/index.html#Exception=Exception"
)
)
)
serv.resolve(
DocSigPair(
DocSig(DocFqn("com.google.common.io", "Files$"),
Some("simplifyPath(x$1:String):String")),
DocSig(DocFqn("com.google.common.io", "Files"),
Some("simplifyPath(java.lang.String)"))
)
) shouldBe Some(
"docs/guava-18.0-javadoc.jar/com/google/common/io/Files.html#simplifyPath(java.lang.String)"
)
serv.resolve(
DocSigPair(
DocSig(
DocFqn("scala", "Some"),
Some(
"++[B>:A,That](that:scala.collection.GenTraversableOnce[B])(implicitbf:scala.collection.generic.CanBuildFrom[Repr,B,That]):That"
)
),
DocSig(DocFqn("scala", "Some"),
Some(
"++(scala.collection.GenTraversableOnce, scala.collection.generic.CanBuildFrom)"
))
)
) should (
equal(
Some(
"docs/scala-library-" + c.scalaVersion + "-javadoc.jar/index.html#scala.Some@++[B](that:scala.collection.GenTraversableOnce[B]):Option[B]"
)
) or
equal(
Some(
"docs/scala-library-" + c.scalaVersion + "-javadoc.jar/scala/Some.html#++[B](that:scala.collection.GenTraversableOnce[B]):Option[B]"
)
) or
equal(
Some(
"docs/scala-library-" + c.scalaVersion + "-javadoc.jar/scala/Some.html#++[B>:A,That](that:scala.collection.GenTraversableOnce[B])(implicitbf:scala.collection.generic.CanBuildFrom[Repr,B,That]):That"
)
)
)
serv.resolve(
DocSigPair(
DocSig(DocFqn("com.google.common.io", "Files$"),
Some("simplifyPath(x$1:String):String")),
DocSig(DocFqn("com.google.common.io", "Files"),
Some("simplifyPath(java.lang.String)"))
)
) shouldBe Some(
"docs/guava-18.0-javadoc.jar/com/google/common/io/Files.html#simplifyPath(java.lang.String)"
)
serv.resolve(
DocSigPair(
DocSig(
DocFqn("scala.collection.immutable", "List"),
Some(
"flatMap[B,That](f:A=>scala.collection.GenTraversableOnce[B])(implicitbf:scala.collection.generic.CanBuildFrom[List[A],B,That]):That"
)
),
DocSig(DocFqn("scala.collection.immutable", "List"),
Some(
"flatMap(scala.Function1, scala.collection.generic.CanBuildFrom)"
))
)
) should (
equal(
Some(
"docs/scala-library-" + c.scalaVersion + "-javadoc.jar/index.html#scala.collection.immutable.List@flatMap[B](f:A=>scala.collection.GenTraversableOnce[B]):List[B]"
)
) or
equal(
Some(
"docs/scala-library-" + c.scalaVersion + "-javadoc.jar/scala/collection/immutable/List.html#flatMap[B](f:A=>scala.collection.GenTraversableOnce[B]):List[B]"
)
)
)
serv.resolve(
DocSigPair(
DocSig(DocFqn("com.google.common.io", "Files$"),
Some("simplifyPath(x$1:String):String")),
DocSig(DocFqn("com.google.common.io", "Files"),
Some("simplifyPath(java.lang.String)"))
)
) shouldBe Some(
"docs/guava-18.0-javadoc.jar/com/google/common/io/Files.html#simplifyPath(java.lang.String)"
)
serv.resolve(
DocSigPair(
DocSig(
DocFqn("scala.collection.immutable", "List"),
Some(
"collect[B,That](pf:PartialFunction[A,B])(implicitbf:scala.collection.generic.CanBuildFrom[List[A],B,That]):That"
)
),
DocSig(DocFqn("scala.collection.immutable", "List"),
Some(
"collect(scala.PartialFunction, scala.collection.generic.CanBuildFrom)"
))
)
) should (
equal(
Some(
"docs/scala-library-" + c.scalaVersion + "-javadoc.jar/index.html#scala.collection.immutable.List@collect[B](pf:PartialFunction[A,B]):List[B]"
)
) or
equal(
Some(
"docs/scala-library-" + c.scalaVersion + "-javadoc.jar/scala/collection/immutable/List.html#collect[B](pf:PartialFunction[A,B]):List[B]"
)
)
)
serv.resolve(
DocSigPair(
DocSig(DocFqn("com.google.common.io", "Files$"),
Some("simplifyPath(x$1:String):String")),
DocSig(DocFqn("com.google.common.io", "Files"),
Some("simplifyPath(java.lang.String)"))
)
) shouldBe Some(
"docs/guava-18.0-javadoc.jar/com/google/common/io/Files.html#simplifyPath(java.lang.String)"
)
}
}
it should "support Java 6 online docs" in withEnsimeConfig {
implicit config =>
withTestKit { tk =>
import tk._
val serv = resolver(Some("1.6"))
serv.resolve(
DocSig(DocFqn("java.io", "File"), None)
) shouldBe Some(
"http://docs.oracle.com/javase/6/docs/api/java/io/File.html"
)
serv.resolve(
DocSig(DocFqn("java.util", "Map.Entry"), None)
) shouldBe Some(
"http://docs.oracle.com/javase/6/docs/api/java/util/Map.Entry.html"
)
serv.resolve(
DocSig(DocFqn("java.util", "package"), None)
) shouldBe Some(
"http://docs.oracle.com/javase/6/docs/api/java/util/package-summary.html"
)
}
}
it should "support Java 8 docs" in withEnsimeConfig { implicit config =>
withTestKit { tk =>
import tk._
val serv = resolver(Some("1.8"))
// a local java 8 javadoc
serv.resolve(
DocSig(DocFqn("com.github.dvdme.ForecastIOLib", "ForecastIO"),
Some("getForecast(com.eclipsesource.json.JsonObject)"))
) shouldBe Some(
"docs/ForecastIOLib-1.5.1-javadoc.jar/com/github/dvdme/ForecastIOLib/ForecastIO.html#getForecast-com.eclipsesource.json.JsonObject-"
)
serv.resolve(
DocSig(DocFqn("java.io", "File"), Some("delete()"))
) shouldBe Some(
"http://docs.oracle.com/javase/8/docs/api/java/io/File.html#delete--"
)
serv.resolve(
DocSig(DocFqn("java.lang", "Math"), Some("max(int, int)"))
) shouldBe Some(
"http://docs.oracle.com/javase/8/docs/api/java/lang/Math.html#max-int-int-"
)
serv.resolve(
DocSig(DocFqn("java.util", "Arrays"), Some("binarySearch(int[],int)"))
) shouldBe Some(
"http://docs.oracle.com/javase/8/docs/api/java/util/Arrays.html#binarySearch-int:A-int-"
)
}
}
}
|
yyadavalli/ensime-server
|
core/src/it/scala/org/ensime/core/DocResolverSpec.scala
|
Scala
|
gpl-3.0
| 33,974
|
package uk.gov.dvla.vehicles.presentation.common.controllers.k2kacquire
import org.scalatest.mock.MockitoSugar
import play.api.data.Form
import play.api.mvc.{Request, Result}
import scala.collection.mutable.ArrayBuffer
import uk.gov.dvla.vehicles.presentation.common.clientsidesession.ClientSideSessionFactory
import uk.gov.dvla.vehicles.presentation.common.controllers.NewKeeperEnterAddressManuallyBase
import uk.gov.dvla.vehicles.presentation.common.model.CacheKeyPrefix
import uk.gov.dvla.vehicles.presentation.common.model.NewKeeperEnterAddressManuallyFormModel
import uk.gov.dvla.vehicles.presentation.common.model.VehicleAndKeeperDetailsModel
import uk.gov.dvla.vehicles.presentation.common.services.DateService
object NewKeeperEnterAddressManuallyTesting extends MockitoSugar {
import play.api.mvc.Results.{Ok, BadRequest}
val presentTestResult = Ok("presentResult")
val successTestResult = Ok("successResult")
val missingVehicleDetailsTestResult = Ok("missingVehicleDetailsResult")
val invalidFormTestResult = BadRequest("invalidFormResult")
}
class NewKeeperEnterAddressManuallyTesting(implicit override val clientSideSessionFactory: ClientSideSessionFactory,
dateService: DateService,
prefix: CacheKeyPrefix) extends NewKeeperEnterAddressManuallyBase {
import NewKeeperEnterAddressManuallyTesting._
val presentResultArgs = ArrayBuffer[(VehicleAndKeeperDetailsModel, Form[NewKeeperEnterAddressManuallyFormModel])]()
val invalidFormResultArgs = ArrayBuffer[(VehicleAndKeeperDetailsModel, Form[NewKeeperEnterAddressManuallyFormModel])]()
protected def presentResult(model: VehicleAndKeeperDetailsModel,
form: Form[NewKeeperEnterAddressManuallyFormModel])
(implicit request: Request[_]): Result = {
presentResultArgs.append((model, form))
presentTestResult
}
override protected def success(implicit request: Request[_]): Result = successTestResult
override protected def missingVehicleDetails(implicit request: Request[_]): Result = missingVehicleDetailsTestResult
protected def invalidFormResult(model: VehicleAndKeeperDetailsModel,
form: Form[NewKeeperEnterAddressManuallyFormModel])
(implicit request: Request[_]): Result = {
invalidFormResultArgs.append((model, form))
invalidFormTestResult
}
}
|
dvla/vehicles-presentation-common
|
test/uk/gov/dvla/vehicles/presentation/common/controllers/k2kacquire/NewKeeperEnterAddressManuallyTesting.scala
|
Scala
|
mit
| 2,453
|
package controller
import skinny.controller.SkinnyServlet
import skinny.controller.feature.{ CSRFProtectionFeature, FileUploadFeature }
/**
* AjaxによるファイルアップロードControllerの基底クラス.
*/
class UploadController extends SkinnyServlet with FileUploadFeature with DiInjector with CSRFProtectionFeature {
addErrorFilter {
case e: Throwable =>
logger.error(e.getMessage, e)
halt(500)
}
}
|
nemuzuka/vss-kanban
|
src/main/scala/controller/UploadController.scala
|
Scala
|
mit
| 439
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.planner.plan.batch.table.stringexpr
import org.apache.flink.api.scala._
import org.apache.flink.table.api._
import org.apache.flink.table.planner.utils.{HierarchyTableFunction, PojoTableFunc, TableFunc1, TableFunc2, TableTestBase}
import org.junit.Test
class CorrelateStringExpressionTest extends TableTestBase {
private val util = batchTestUtil()
private val tab = util.addTableSource[(Int, Long, String)]("Table1", 'a, 'b, 'c)
private val func1 = new TableFunc1
util.addFunction("func1", func1)
private val func2 = new TableFunc2
util.addFunction("func2", func2)
@Test
def testCorrelateJoins1(): Unit = {
// test cross join
util.verifyExecPlan(tab.joinLateral(func1('c) as 's).select('c, 's))
}
@Test
def testCorrelateJoins2(): Unit = {
// test left outer join
util.verifyExecPlan(tab.leftOuterJoinLateral(func1('c) as 's).select('c, 's))
}
@Test
def testCorrelateJoins3(): Unit = {
// test overloading
util.verifyExecPlan(tab.joinLateral(func1('c, "$") as 's).select('c, 's))
}
@Test
def testCorrelateJoins4(): Unit = {
// test custom result type
util.verifyExecPlan(tab.joinLateral(func2('c) as('name, 'len)).select('c, 'name, 'len))
}
@Test
def testCorrelateJoins5(): Unit = {
// test hierarchy generic type
val hierarchy = new HierarchyTableFunction
util.addFunction("hierarchy", hierarchy)
val scalaTable = tab.joinLateral(
hierarchy('c) as('name, 'adult, 'len)).select('c, 'name, 'len, 'adult)
util.verifyExecPlan(scalaTable)
}
@Test
def testCorrelateJoins6(): Unit = {
// test pojo type
val pojo = new PojoTableFunc
util.addFunction("pojo", pojo)
val scalaTable = tab.joinLateral(pojo('c)).select('c, 'name, 'age)
util.verifyExecPlan(scalaTable)
}
@Test
def testCorrelateJoins7(): Unit = {
// test with filter
val scalaTable = tab.joinLateral(
func2('c) as('name, 'len)).select('c, 'name, 'len).filter('len > 2)
util.verifyExecPlan(scalaTable)
}
@Test
def testCorrelateJoins8(): Unit = {
// test with scalar function
val scalaTable = tab.joinLateral(func1('c.substring(2)) as 's).select(
'a, 'c, 's)
util.verifyExecPlan(scalaTable)
}
}
|
apache/flink
|
flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/planner/plan/batch/table/stringexpr/CorrelateStringExpressionTest.scala
|
Scala
|
apache-2.0
| 3,065
|
/*
* Copyright 2016 The BigDL Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.bigdl.nn
import com.intel.analytics.bigdl.nn.abstractnn.TensorModule
import com.intel.analytics.bigdl.tensor.{DenseTensorApply, Tensor, TensorFunc6}
import com.intel.analytics.bigdl.tensor.TensorNumericMath.TensorNumeric
import scala.reflect.ClassTag
/**
* [[Masking]] Use a mask value to skip timesteps for a sequence
*
* @param maskValue mask value
*/
class Masking[T: ClassTag](maskValue: Double = 0.0)
(implicit ev: TensorNumeric[T]) extends TensorModule[T] {
val batchDim = 1
val timeDim = 2
override def updateOutput(input: Tensor[T]): Tensor[T] = {
output.resizeAs(input)
var timeIndex = 1
var batchIndex = 1
val fillValue = ev.fromType(0.0)
while(batchIndex <= input.size(batchDim)) {
val batchInput = input.select(batchDim, batchIndex)
val batchOutput = output.select(batchDim, batchIndex)
while(timeIndex <= input.size(timeDim)) {
val slicedTensor = batchInput.select(timeDim - 1, timeIndex)
if (!slicedTensor.notEqualValue(maskValue)) {
batchOutput.select(timeDim - 1, timeIndex).fill(fillValue)
} else {
batchOutput.select(timeDim - 1, timeIndex).copy(slicedTensor)
}
timeIndex += 1
}
batchIndex += 1
timeIndex = 1
}
output
}
override def updateGradInput(input: Tensor[T], gradOutput: Tensor[T]): Tensor[T] = {
require(input.isSameSizeAs(gradOutput),
"Input should have the same size as gradOutput" +
s"input size(${input.size().foreach(x => x)})" +
s"gradOutput size(${gradOutput.size().foreach(x => x)})")
gradInput.resizeAs(input)
var timeIndex = 1
var batchIndex = 1
val fillValue = ev.fromType(0.0)
while(batchIndex <= input.size(batchDim)) {
val batchInput = input.select(batchDim, batchIndex)
val batchgradOutput = gradOutput.select(batchDim, batchIndex)
val batchgradInput = gradInput.select(batchDim, batchIndex)
while(timeIndex <= input.size(timeDim)) {
val slicedTensor = batchInput.select(timeDim - 1, timeIndex)
if (!slicedTensor.notEqualValue(maskValue)) {
batchgradInput.select(timeDim - 1, timeIndex).fill(fillValue)
} else {
batchgradInput.select(timeDim - 1, timeIndex).copy(
batchgradOutput.select(timeDim - 1, timeIndex))
}
timeIndex += 1
}
batchIndex += 1
timeIndex = 1
}
gradInput
}
}
object Masking {
def apply[T : ClassTag](maskValue: Double)(implicit ev: TensorNumeric[T]): Masking[T]
= new Masking[T](maskValue)
}
|
qiuxin2012/BigDL
|
spark/dl/src/main/scala/com/intel/analytics/bigdl/nn/Masking.scala
|
Scala
|
apache-2.0
| 3,203
|
package mesosphere.marathon.core.launcher.impl
import org.apache.mesos.{ Protos => MesosProtos }
case class ResourceLabels(labels: Map[String, String]) {
lazy val mesosLabels: MesosProtos.Labels = {
val labelsBuilder = MesosProtos.Labels.newBuilder()
labels.foreach {
case (k, v) =>
labelsBuilder.addLabels(MesosProtos.Label.newBuilder().setKey(k).setValue(v))
}
labelsBuilder.build()
}
def get(key: String): Option[String] = labels.get(key)
override def toString: String = labels.map { case (k, v) => s"$k: $v" }.mkString(", ")
}
object ResourceLabels {
def empty: ResourceLabels = new ResourceLabels(Map.empty)
def apply(resource: MesosProtos.Resource): ResourceLabels = {
if (resource.hasReservation && resource.getReservation.hasLabels)
ResourceLabels(resource.getReservation.getLabels)
else
ResourceLabels.empty
}
def apply(resource: MesosProtos.Labels): ResourceLabels = {
import scala.collection.JavaConverters._
ResourceLabels(resource.getLabelsList.asScala.iterator.map(l => l.getKey -> l.getValue).toMap)
}
}
|
ss75710541/marathon
|
src/main/scala/mesosphere/marathon/core/launcher/impl/ResourceLabels.scala
|
Scala
|
apache-2.0
| 1,101
|
package mm4s.examples.proxy
import akka.actor.{Actor, ActorLogging, ActorRef}
import akka.http.scaladsl.model.StatusCodes
import akka.http.scaladsl.model.ws.{Message, TextMessage}
import akka.http.scaladsl.server.Directives._
import akka.stream.OverflowStrategy
import akka.stream.scaladsl.{Flow, Sink, Source}
import mm4s.api.{Post, Posted}
import mm4s.bots.api.{Bot, BotID, Ready}
import net.codingwell.scalaguice.ScalaModule
import rxthings.webhooks.ActorWebApi
import spray.json.{DefaultJsonProtocol, RootJsonFormat}
/**
* Example of a bot acting as a pass-through for a third party ws consumer
*/
class ProxyBot extends Actor with Bot with ActorWebApi with ActorLogging {
override def config() = Option(actorSystem.settings.config)
def receive: Receive = {
case Ready(api, id) => context.become(ready(api, id))
}
def ready(api: ActorRef, bid: BotID): Receive = {
log.debug("ProxyBot [{}] ready, start HTTP", bid.username)
webstart(routes(self))
{
case WsConnected(id, ws) => context.become(connected(api, ws, id))
}
}
def connected(api: ActorRef, ws: ActorRef, id: String): Receive = {
log.debug("ProxyBot [{}] connected", id)
{
// incoming from http client, out to mm
case m: PostingDetails =>
println(s"${m.message} <<from client to mm>>")
api ! Post(m.message)
// incoming from http client ws, out to mm
case TextMessage.Strict(t) =>
println(s"$t <<from http ws>>")
api ! Post(t)
// incoming from mm, out to proxy client
case Posted(t) =>
ws ! TextMessage(s"$t <<from mm ws to proxy client>>")
case WsDisconnected(x) =>
println(s"proxy-client disconnected $x")
api ! Post(s"proxy-client disconnected $x")
}
}
def ws(id: String) = {
val source = Source.actorRef[Message](bufferSize = 5, OverflowStrategy.fail)
.mapMaterializedValue(a => context.self ! WsConnected(id, a))
val sink = Sink.actorRef(context.self, WsDisconnected(id))
Flow.fromSinkAndSource(sink, source)
}
case class WsConnected(id: String, ref: ActorRef)
case class WsDisconnected(id: String)
case class PostingDetails(username: String, channel: String, message: String)
object PostingDetailsProtocol extends DefaultJsonProtocol {
implicit val postingFormat: RootJsonFormat[PostingDetails] = jsonFormat3(PostingDetails)
}
def routes(a: ActorRef) = {
// the proxy client connects to
(get & pathPrefix("ws" / Segment)) { id =>
handleWebSocketMessages(ws(id))
} ~
// the proxy client can post restful messages to default channel
(post & path("/msg" / Segment)) { s =>
a ! s
complete(StatusCodes.OK)
} ~
// the proxy client can post restful messages to alternate channels
(post & path("/msg")) {
import PostingDetailsProtocol._
import akka.http.scaladsl.marshallers.sprayjson.SprayJsonSupport._
entity(as[PostingDetails]) { m =>
a ! m
complete(StatusCodes.OK)
}
}
}
}
class ProxyBotModule extends ScalaModule {
def configure() = bind[Bot].to[ProxyBot]
}
object ProxyBotBoot4dev extends App {
mm4s.bots.Boot.main(Array.empty)
}
|
jw3/mm4s-examples
|
proxybot/src/main/scala/mm4s/examples/proxy/ProxyBot.scala
|
Scala
|
apache-2.0
| 3,215
|
/*
* ============= Ryft-Customized BSD License ============
* Copyright (c) 2015, Ryft Systems, Inc.
* All rights reserved.
* Redistribution and use in source and binary forms, with or without modification,
* are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation and/or
* other materials provided with the distribution.
* 3. All advertising materials mentioning features or use of this software must display the following acknowledgement:
* This product includes software developed by Ryft Systems, Inc.
* 4. Neither the name of Ryft Systems, Inc. nor the names of its contributors may be used
* to endorse or promote products derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY RYFT SYSTEMS, INC. ''AS IS'' AND ANY
* EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL RYFT SYSTEMS, INC. BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
* ============
*/
package com.ryft.spark.connector.examples
import com.ryft.spark.connector._
import com.ryft.spark.connector.domain.RyftQueryOptions
import com.ryft.spark.connector.query.SimpleQuery
import com.ryft.spark.connector.query.value.DateValue
import com.ryft.spark.connector.query.value.model.{Format, Date}
import org.apache.spark.{SparkContext, SparkConf, Logging}
import scala.language.postfixOps
object DateQueryExample extends App with Logging {
val sparkConf = new SparkConf()
.setAppName("DateQueryExample")
val sc = new SparkContext(sparkConf)
val query = SimpleQuery(DateValue(Format("MM/DD/YYYY") > Date("01/01/2015")))
val queryOptions = RyftQueryOptions("chicago.pcrime", 10, 0 toByte)
val ryftRDD = sc.ryftRDD(Seq(query), queryOptions)
logInfo(s"RDD first: ${ryftRDD.first()}")
}
|
getryft/spark-ryft-connector
|
examples/src/main/scala/com/ryft/spark/connector/examples/DateQueryExample.scala
|
Scala
|
bsd-3-clause
| 2,603
|
package mesosphere.marathon
package raml
import mesosphere.marathon.core.pod
import mesosphere.marathon.stream.Implicits._
import mesosphere.mesos.protos.Implicits._
import org.apache.mesos.Protos.ContainerInfo.DockerInfo.{Network => DockerNetworkMode}
trait NetworkConversion {
import NetworkConversionMessages._
implicit val networkRamlReader: Reads[Network, pod.Network] =
Reads { raml =>
raml.mode match {
case NetworkMode.Host => pod.HostNetwork
case NetworkMode.ContainerBridge => pod.BridgeNetwork(raml.labels)
case NetworkMode.Container => pod.ContainerNetwork(
// we expect validation to catch this problem first. but it's possible that migration
// also runs into this problem so we handle it explicitly.
raml.name.getOrElse(throw SerializationFailedException(ContainerNetworkRequiresName)),
raml.labels
)
}
}
implicit val networkRamlWriter: Writes[pod.Network, Network] = Writes {
case cnet: pod.ContainerNetwork =>
Network(
name = Some(cnet.name),
mode = NetworkMode.Container,
labels = cnet.labels
)
case br: pod.BridgeNetwork =>
Network(
mode = NetworkMode.ContainerBridge,
labels = br.labels
)
case pod.HostNetwork => Network(mode = NetworkMode.Host)
}
implicit val protocolWrites: Writes[String, NetworkProtocol] = Writes { protocol =>
// Regression MARATHON-8575
if (protocol == "tcp,udp") {
NetworkProtocol.UdpTcp
} else {
NetworkProtocol.fromString(protocol).getOrElse(throw new IllegalStateException(s"unsupported protocol $protocol"))
}
}
implicit val portDefinitionWrites: Writes[state.PortDefinition, PortDefinition] = Writes { port =>
PortDefinition(port.port, port.labels, port.name, port.protocol.toRaml[NetworkProtocol])
}
implicit val portDefinitionRamlReader: Reads[PortDefinition, state.PortDefinition] = Reads { portDef =>
state.PortDefinition(
port = portDef.port,
protocol = portDef.protocol.value,
name = portDef.name,
labels = portDef.labels
)
}
implicit val portMappingWrites: Writes[state.Container.PortMapping, ContainerPortMapping] = Writes { portMapping =>
ContainerPortMapping(
networkNames = portMapping.networkNames,
containerPort = portMapping.containerPort,
hostPort = portMapping.hostPort,
labels = portMapping.labels,
name = portMapping.name,
protocol = portMapping.protocol.toRaml[NetworkProtocol],
servicePort = portMapping.servicePort
)
}
implicit val portMappingRamlReader: Reads[ContainerPortMapping, state.Container.PortMapping] = Reads {
case ContainerPortMapping(containerPort, hostPort, labels, name, protocol, servicePort, networkNames) =>
import state.Container.PortMapping._
state.Container.PortMapping(
containerPort = containerPort,
hostPort = hostPort.orElse(defaultInstance.hostPort),
servicePort = servicePort,
protocol = protocol.value,
name = name,
labels = labels,
networkNames = networkNames
)
}
implicit val containerPortMappingProtoRamlWriter: Writes[Protos.ExtendedContainerInfo.PortMapping, ContainerPortMapping] = Writes { mapping =>
ContainerPortMapping(
containerPort = mapping.whenOrElse(_.hasContainerPort, _.getContainerPort, ContainerPortMapping.DefaultContainerPort),
hostPort = mapping.when(_.hasHostPort, _.getHostPort).orElse(ContainerPortMapping.DefaultHostPort),
labels = mapping.whenOrElse(_.getLabelsCount > 0, _.getLabelsList.flatMap(_.fromProto)(collection.breakOut), ContainerPortMapping.DefaultLabels),
name = mapping.when(_.hasName, _.getName).orElse(ContainerPortMapping.DefaultName),
protocol = mapping.when(_.hasProtocol, _.getProtocol).flatMap(NetworkProtocol.fromString).getOrElse(ContainerPortMapping.DefaultProtocol),
servicePort = mapping.whenOrElse(_.hasServicePort, _.getServicePort, ContainerPortMapping.DefaultServicePort),
networkNames = mapping.whenOrElse(_.getNetworkNamesList.size > 0, _.getNetworkNamesList.toList, ContainerPortMapping.DefaultNetworkNames)
)
}
implicit val dockerPortMappingProtoRamlWriter: Writes[Protos.ExtendedContainerInfo.DockerInfo.ObsoleteDockerPortMapping, ContainerPortMapping] = Writes { mapping =>
ContainerPortMapping(
containerPort = mapping.whenOrElse(_.hasContainerPort, _.getContainerPort, ContainerPortMapping.DefaultContainerPort),
hostPort = mapping.when(_.hasHostPort, _.getHostPort).orElse(ContainerPortMapping.DefaultHostPort),
labels = mapping.whenOrElse(_.getLabelsCount > 0, _.getLabelsList.flatMap(_.fromProto)(collection.breakOut), ContainerPortMapping.DefaultLabels),
name = mapping.when(_.hasName, _.getName).orElse(ContainerPortMapping.DefaultName),
protocol = mapping.whenOrElse(_.hasProtocol, _.getProtocol.toRaml[NetworkProtocol], ContainerPortMapping.DefaultProtocol),
servicePort = mapping.whenOrElse(_.hasServicePort, _.getServicePort, ContainerPortMapping.DefaultServicePort)
)
}
implicit val dockerNetworkInfoWrites: Writes[DockerNetworkMode, DockerNetwork] = Writes {
case DockerNetworkMode.BRIDGE => DockerNetwork.Bridge
case DockerNetworkMode.HOST => DockerNetwork.Host
case DockerNetworkMode.USER => DockerNetwork.User
case DockerNetworkMode.NONE => DockerNetwork.None
}
implicit val networkProtoRamlWriter: Writes[Protos.NetworkDefinition, Network] = Writes { net =>
import Protos.NetworkDefinition.Mode._
val mode = net.getMode match {
case HOST => NetworkMode.Host
case BRIDGE => NetworkMode.ContainerBridge
case CONTAINER => NetworkMode.Container
case badMode => throw new IllegalStateException(s"unsupported network mode $badMode")
}
Network(
name = if (net.hasName) Option(net.getName) else Network.DefaultName,
mode = mode,
labels = if (net.getLabelsCount > 0) net.getLabelsList.to[Seq].fromProto else Network.DefaultLabels
)
}
}
object NetworkConversion extends NetworkConversion
object NetworkConversionMessages {
val ContainerNetworkRequiresName = "container network must specify a name"
}
|
gsantovena/marathon
|
src/main/scala/mesosphere/marathon/raml/NetworkConversion.scala
|
Scala
|
apache-2.0
| 6,262
|
package com.bitmotif.part_1
object Exercise_3_5 {
import fpinscala.datastructures._
@annotation.tailrec
def dropWhile[A](list: List[A], f: A => Boolean): List[A] =
list match {
case Nil => list
case Cons(head, tail) =>
if ( !f(head) ) list
else dropWhile(tail, f)
}
}
|
pjberry/functional-programming-in-scala
|
src/com/bitmotif/part_1/Exercise_3_5.scala
|
Scala
|
mit
| 321
|
package breeze.util
/*
Copyright 2009 David Hall, Daniel Ramage
Licensed under the Apache License, Version 2.0 (the "License")
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
import collection.{mutable, IterableProxy}
import collection.JavaConverters._
import scala.collection.mutable.{ArrayBuffer,HashMap}
import java.util.Arrays
import java.util
/**
* Trait that marks an O(1) bidirectional map between Ints (increasing from 0)
* and T's. This class is used, for example, to efficiently build unique
* vector space mappings for strings. The methods in this trait do not mutate
* the underlying index. Use either a MutableIndex or one of the companion
* object constructor methods to build an index.
*
* @author dlwh, dramage
*/
@SerialVersionUID(1L)
trait Index[T] extends Iterable[T] with (T=>Int) with Serializable {
/** Number of elements in this index. */
def size : Int
/**
* Returns the int id of the given element (0-based) or -1 if not
* found in the index. This method never changes the index (even
* in MutableIndex).
*/
def apply(t : T) : Int
/**
* Returns Some(t) if this int corresponds to some object,
* and None otherwise.
*/
def unapply(i : Int) : Option[T]
/** Returns true if this index contains the element t. */
def contains(t : T) : Boolean
= apply(t) >= 0
/** Returns Some(i) if the object has been indexed, or None. */
def indexOpt(t : T) : Option[Int] = {
val i = apply(t)
if (i >= 0) Some(i) else None
}
/** Override Iterable's linear-scan indexOf to use our apply method. */
def indexOf(t: T) : Int =
apply(t)
/** Returns the indexed items along with their indicies */
def pairs: Iterator[(T,Int)]
/**
* Returns an object at the given position or throws
* IndexOutOfBoundsException if it's not found.
*/
def get(i : Int) : T =
unapply(i).getOrElse(throw new IndexOutOfBoundsException())
override def equals(other : Any) : Boolean = {
other match {
case that : Index[_] if this.size == that.size =>
this sameElements that
case _ => false
}
}
protected lazy val defaultHashCode =
(17 /: this)(_ * 41 + _.hashCode)
override def hashCode = defaultHashCode
override def toString = {
iterator.mkString("Index(",",",")")
}
def |[U](right: Index[U]) = new EitherIndex(this,right)
}
/**
* A proxy passing all calls to the underlying index instance.
*
* @author dramage
*/
trait IndexProxy[T] extends Index[T] with IterableProxy[T] {
override def self : Index[T]
override def size = self.size
override def apply(t : T) = self.apply(t)
override def unapply(i : Int) = self.unapply(i)
override def contains(t : T) = self.contains(t)
override def indexOpt(t : T) = self.indexOpt(t)
override def indexOf(t : T) = self.indexOf(t)
override def get(i : Int) = self.get(i)
override def equals(other : Any) = self.equals(other)
override def hashCode = self.hashCode
/** Returns the indexed items along with their indicies */
def pairs: Iterator[(T,Int)] = self.pairs
}
/**
* Synchronized view of an Index for thread-safe access.
*
* @author dramage
*/
trait SynchronizedIndex[T] extends Index[T] {
abstract override def size = this synchronized super.size
abstract override def apply(t : T) = this synchronized super.apply(t)
abstract override def unapply(pos : Int) = this synchronized super.unapply(pos)
abstract override def contains(t : T) = this synchronized super.contains(t)
abstract override def indexOpt(t : T) = this synchronized super.indexOpt(t)
abstract override def indexOf(t : T) = this synchronized super.indexOf(t)
abstract override def get(pos : Int) = this synchronized super.get(pos)
abstract override def equals(other : Any) = this synchronized super.equals(other)
abstract override def hashCode = this synchronized super.hashCode
}
/**
* An Index that contains an extra method: <em>index</em> that adds the
* given element (if necessary), returning its (possibly new) position in
* the index.
*
* @author dramage
*/
trait MutableIndex[T] extends Index[T] {
/**
* Returns an integer index for the given object, adding it to the
* index if it is not already present.
*/
def index(t : T) : Int
}
/**
* A proxy for MutableIndex instances.
*
* @author dramage
*/
trait MutableIndexProxy[T] extends IndexProxy[T] with MutableIndex[T] {
override def self : MutableIndex[T]
override def index(t : T) = self.index(t)
}
/**
* A synchronized view of a MutableIndex.
*
* @author dramage
*/
trait SynchronizedMutableIndex[T] extends MutableIndex[T] with SynchronizedIndex[T] {
abstract override def index(t : T) = this synchronized super.index(t)
}
/**
* Class that builds a 1-to-1 mapping between Ints and T's, which
* is very useful for efficiency concerns.
*
* Two extra views are provided: the index.synchronized view
* enables threadsafe access and the index.immutable view keeps
* prevents the (view) from being updated.
*
* @author dlwh, dramage
*/
@SerialVersionUID(-7655100457525569617L)
class HashIndex[T] extends MutableIndex[T] with Serializable {
/** Forward map from int to object */
private val objects = new ArrayBuffer[T]
/** Map from object back to int index */
private val indices = new util.HashMap[T, Int]().asScala
override def size =
indices.size
override def apply(t : T) : Int =
indices.getOrElse(t,-1)
override def unapply(pos : Int) : Option[T] =
if (pos >= 0 && pos < objects.length) Some(objects(pos)) else None
override def contains(t : T) =
indices contains t
override def indexOpt(t : T): Option[Int] =
indices.get(t)
override def get(pos : Int) =
objects(pos); // throws IndexOutOfBoundsException as required
override def iterator =
objects.iterator
/** Returns the position of T, adding it to the index if it's not there. */
override def index(t: T) = {
if(!indices.contains(t)) {
val ind = objects.size
objects += t
indices.put(t, ind)
ind
} else {
indices(t)
}
}
def pairs = indices.iterator
}
/**
* For use when we need an index, but we already have (densely packed) positive
* ints and don't want hash overhead.
*
* @author dlwh, dramage
*/
class DenseIntIndex(beg: Int, end: Int) extends Index[Int] {
def this(end: Int) = this(0, end)
require(beg >= 0)
require(end >= beg)
override def size = end - beg
override def apply(t : Int) = if(contains(t)) t - beg else -1
override def unapply(i : Int) = if (i < size) Some(i + beg) else None
override def contains(t : Int) = t < end - beg && t >= 0
override def indexOpt(t : Int) =
if (contains(t)) Some(t) else None
override def get(i : Int) =
if (contains(i)) i else throw new IndexOutOfBoundsException()
override def iterator = (beg until end).iterator
def pairs = iterator zip iterator.map(_ + min)
override def hashCode = beg + 37 * end
}
/**
* Utilities for manipulating and creating Index objects.
*/
object Index {
/** Constructs an empty index. */
import scala.reflect.ClassTag.{Char=>MChar}
import scala.reflect.OptManifest
def apply[T:OptManifest]() : MutableIndex[T] = implicitly[OptManifest[T]] match {
case _ => new HashIndex[T];
}
/** Constructs an Index from some iterator. */
def apply[T:OptManifest](iterator : Iterator[T]) : Index[T] = {
val index = Index[T]()
// read through all iterator now -- don't lazily defer evaluation
for (element <- iterator) {
index.index(element)
}
index
}
/** Constructs an Index from some iterator. */
def apply[T](iterable : Iterable[T]) : Index[T] = {
val index = Index[T]()
// read through all iterator now -- don't lazily defer evaluation
for (element <- iterable) {
index.index(element)
}
index
}
/**
* Loads a String index, one line per item with line
* numbers (starting at 0) as the indices.
*/
def load(source : {def getLines : Iterator[String]}) : Index[String] = {
apply(source.getLines.map(_.stripLineEnd))
}
}
/**
* An Index over two kinds of things. Layout is straightforward:
* The first left.size entries are from the left index, while the next
* right.size are from the right index. Values are wrapped in Left/Right
*
* @author dlwh
*/
class EitherIndex[L,R](left: Index[L], right: Index[R]) extends Index[Either[L,R]] {
def apply(t: Either[L, R]) = t match {
case Left(l) => left(l)
case Right(r) => right(r) + rightOffset
}
/**
* What you add to the indices from the rightIndex to get indices into this index
* @return
*/
def rightOffset = left.size
def unapply(i: Int) = {
if(i < 0 || i >= size) None
else if(i < left.size) Some(Left(left.get(i)))
else Some(Right(right.get(i-left.size)))
}
def pairs = left.pairs.map { case (l,i) => Left(l) -> i} ++ right.pairs.map { case (r,i) => Right(r) -> (i + left.size) }
def iterator = left.iterator.map{Left(_)} ++ right.map{Right(_)}
override def size:Int = left.size + right.size
}
/**
* Lifts an index of T into an index of Option[T] . The last element is None. Everything else is as you expect.
*
* @author dlwh
*/
class OptionIndex[T](inner: Index[T]) extends Index[Option[T]] {
def apply(t: Option[T]) = t match {
case Some(l) => inner(l)
case None => inner.size
}
def unapply(i: Int) = {
if(i < 0 || i >= size) None
else if(i < inner.size) Some(Some(inner.get(i))) // sic!
else Some(None) // sic!
}
override def get(i: Int): Option[T] = {
if(i < 0 || i >= size) throw new IndexOutOfBoundsException()
else if(i < inner.size) Some(inner.get(i))
else None
}
def pairs = inner.pairs.map { case (l,i) => Some(l) -> i} ++ Iterator(None -> inner.size)
def iterator = inner.iterator.map{Some(_)} ++ Iterator(None)
override def size:Int = inner.size + 1
}
/**
* An Index over N kinds of things. A little type unsafe.
*
* @author dlwh
*/
final class CompositeIndex[U](indices: Index[_ <:U]*) extends Index[(Int,U)] {
private val offsets:Array[Int] = indices.unfold(0){ (n,i) => n + i.size}.toArray
/** If you know which component, and which index in that component,
* you can quickly get its mapped value with this function.
*/
@inline
def mapIndex(component: Int, uIndex: Int) = {
if(uIndex < 0) -1
else offsets(component) + uIndex
}
def apply(t: (Int,U)) = {
if(t._1 >= indices.length || t._1 < 0) -1
else {
indices(t._1).asInstanceOf[Index[U]](t._2) + offsets(t._1)
}
}
def unapply(i: Int) = {
if(i < 0 || i >= size) None
else {
val index = {
val res = Arrays.binarySearch(offsets,i)
if(res >= 0) res
else -(res+2)
}
Some(index -> indices(index).get(i-offsets(index)))
}
}
def pairs = indices.iterator.zipWithIndex.flatMap { case (index,i) => index.iterator.map { t => (i,t:U)}}.zipWithIndex
def iterator = indices.iterator.zipWithIndex.flatMap { case (index,i) => index.iterator.map{ t => (i -> t)}}
override def size:Int = offsets(offsets.length-1)
}
object EnumerationIndex {
def apply[T<:Enumeration](t: T): Index[t.Value] = new Index[t.Value] {
/**
* Returns the int id of the given element (0-based) or -1 if not
* found in the index. This method never changes the index (even
* in MutableIndex).
*/
def apply(x: t.Value): Int = x.id
/**
* Returns Some(t) if this int corresponds to some object,
* and None otherwise.
*/
def unapply(i: Int): Option[t.Value] = Some[t.Value](t(i))
/** Returns the indexed items along with their indicies */
def pairs: Iterator[(t.Value, Int)] = for(v <- t.values.iterator) yield v -> v.id
def iterator: Iterator[t.Value] = t.values.iterator
override def size: Int = t.maxId
}
}
|
wavelets/breeze
|
src/main/scala/breeze/util/Index.scala
|
Scala
|
apache-2.0
| 12,304
|
package com.supergloo.utils
import com.supergloo.models._
/**
* https://www.supergloo.com
*/
object Utils {
private val httpStatuses = List(
"100", "101", "103",
"200", "201", "202", "203", "204", "205", "206",
"300", "301", "302", "303", "304", "305", "306", "307", "308",
"400", "401", "402", "403", "404", "405", "406", "407", "408", "409", "410", "411", "412", "413", "414", "415", "416", "417",
"500", "501", "502", "503", "504", "505", "511"
)
def populateHttpStatusList(): List[HttpStatus] = {
httpStatuses map createHttpStatus
}
def createHttpStatus(status: String): HttpStatus = status match {
case status if (status.startsWith("1")) => HttpInfoStatus(status)
case status if (status.startsWith("2")) => HttpSuccessStatus(status)
case status if (status.startsWith("3")) => HttpRedirectStatus(status)
case status if (status.startsWith("4")) => HttpClientErrorStatus(status)
case status if (status.startsWith("5")) => HttpServerErrorStatus(status)
}
}
|
tmcgrath/spark-scala
|
accessloganalyzer/src/main/scala/com/supergloo/utils/Utils.scala
|
Scala
|
cc0-1.0
| 1,026
|
package com.twitter.finagle.memcached.partitioning
import com.twitter.concurrent.Broker
import com.twitter.finagle._
import com.twitter.finagle.addr.WeightedAddress
import com.twitter.finagle.liveness.FailureAccrualFactory
import com.twitter.finagle.loadbalancer.LoadBalancerFactory
import com.twitter.finagle.memcached._
import com.twitter.finagle.serverset2.addr.ZkMetadata
import com.twitter.finagle.service.FailedService
import com.twitter.hashing._
import com.twitter.util._
import java.net.InetSocketAddress
import scala.collection.{breakOut, mutable}
/**
* Helper class for managing the nodes in the Ketama ring. Note that it tracks all addresses
* as weighted addresses, which means a weight change for a given node will be considered a
* node restart. This way implementations can adjust their partitions if weight is a factor
* in partitioning.
*/
private[partitioning] class KetamaNodeManager[Req, Rep, Key](
underlying: Stack[ServiceFactory[Req, Rep]],
params: Stack.Params,
numReps: Int = KetamaPartitioningService.DefaultNumReps
) { self =>
private[this] val statsReceiver = {
val param.Stats(stats) = params[param.Stats]
stats.scope("partitioner")
}
private[this] val ejectionCount = statsReceiver.counter("ejections")
private[this] val revivalCount = statsReceiver.counter("revivals")
private[this] val nodeLeaveCount = statsReceiver.counter("leaves")
private[this] val nodeJoinCount = statsReceiver.counter("joins")
private[this] val keyRingRedistributeCount = statsReceiver.counter("redistributes")
// nodes in the ketama ring, representing the backend services
private[this] val nodes = mutable.Map[KetamaClientKey, Node]()
private[this] val liveNodesGauge = statsReceiver.addGauge("live_nodes") {
self.synchronized { nodes.count { case (_, Node(_, state)) => state == NodeState.Live } }
}
private[this] val deadNodesGauge = statsReceiver.addGauge("dead_nodes") {
self.synchronized { nodes.count { case (_, Node(_, state)) => state == NodeState.Ejected } }
}
// used when all cache nodes are ejected from the cache ring
private[this] val shardNotAvailableDistributor: Distributor[Future[Service[Req, Rep]]] =
new SingletonDistributor(Future.value(new FailedService(new ShardNotAvailableException)))
// We update those out of the request path so we need to make sure to synchronize on
// read-modify-write operations on `currentDistributor` and `distributor`.
// Note: Volatile-read from `partitionForKey` safety (not raciness) is guaranteed by JMM.
@volatile private[this] var currentDistributor: Distributor[Future[Service[Req, Rep]]] =
shardNotAvailableDistributor
private[this] type KetamaKeyAndNode = (KetamaClientKey, KetamaNode[Future[Service[Req, Rep]]])
// snapshot is used to detect new nodes when there is a change in bound addresses
@volatile private[this] var snapshot: Set[KetamaKeyAndNode] = Set.empty
// The nodeHealthBroker is use to track health of the nodes. Optionally, when the param
// 'Memcached.param.EjectFailedHost' is true, unhealthy nodes are removed from the hash ring. It
// connects the KetamaFailureAccrualFactory with the partition service to communicate the
// health events.
private[this] val nodeHealthBroker = new Broker[NodeHealth]
// We also listen on a broker to eject/revive cache nodes.
nodeHealthBroker.recv.foreach {
case NodeMarkedDead(key) => ejectNode(key)
case NodeRevived(key) => reviveNode(key)
}
private[this] sealed trait NodeState
private[this] object NodeState {
case object Live extends NodeState
case object Ejected extends NodeState
}
// Node represents backend partition
private[this] case class Node(
node: KetamaNode[Future[Service[Req, Rep]]],
var state: NodeState
)
private[this] val ketamaNodesChanges: Event[Set[KetamaKeyAndNode]] = {
// Addresses in the current serverset that have been processed and have associated cache nodes.
// Access synchronized on `self`
var mapped: Map[Address, KetamaKeyAndNode] = Map.empty
// Last set Addrs that have been processed.
// Access synchronized on `self`
var prevAddrs: Set[Address] = Set.empty
// `map` is called on updates to `addrs`.
// Cache nodes must only be created for new additions to the set of addresses; therefore
// we must keep track of addresses in the current set that already have associated nodes
val nodes: Var[Set[KetamaKeyAndNode]] = {
// Intercept the params meant for Loadbalancer inserted by the BindingFactory
val LoadBalancerFactory.Dest(dest: Var[Addr]) = params[LoadBalancerFactory.Dest]
dest.map {
case Addr.Bound(currAddrs, _) =>
self.synchronized {
// Add new nodes for new addresses by finding the difference between the two sets
mapped ++= (currAddrs &~ prevAddrs).collect {
case weightedAddr @ WeightedAddress(addr @ Address.Inet(ia, metadata), w) =>
val (shardIdOpt: Option[String], boundAddress: Addr) =
metadata match {
case CacheNodeMetadata(_, shardId) =>
// This means the destination was resolved by TwitterCacheResolver.
twcacheConversion(shardId, ia)
case _ =>
ZkMetadata.fromAddrMetadata(metadata) match {
case Some(ZkMetadata(Some(shardId))) =>
(Some(shardId.toString), Addr.Bound(addr))
case _ =>
(None, Addr.Bound(addr))
}
}
val node = CacheNode(ia.getHostName, ia.getPort, w.asInstanceOf[Int], shardIdOpt)
val key = KetamaClientKey.fromCacheNode(node)
val service = mkService(boundAddress, key)
weightedAddr -> (
key -> KetamaNode[Future[Service[Req, Rep]]](
key.identifier,
node.weight,
service
)
)
}
// Remove old nodes no longer in the serverset.
mapped --= prevAddrs &~ currAddrs
prevAddrs = currAddrs
}
mapped.values.toSet
case _ =>
Set.empty
}
}
nodes.changes.filter(_.nonEmpty)
}
/**
* This code is needed to support the old "twcache" scheme. The TwitterCacheResolver uses
* CacheNodeMetadata instead of ZkMetadata for shardId. Also address is unresolved. Therefore
* doing the necessary conversions here.
*/
private[this] def twcacheConversion(
shardId: Option[String],
ia: InetSocketAddress
): (Option[String], Addr) = {
val resolved = if (ia.isUnresolved) {
new InetSocketAddress(ia.getHostName, ia.getPort)
} else {
ia
}
// Convert CacheNodeMetadata to ZkMetadata
(
shardId,
Addr.Bound(
Address.Inet(resolved, ZkMetadata.toAddrMetadata(ZkMetadata(shardId.map(_.toInt))))
)
)
}
// We listen for changes to the set of nodes to update the cache ring.
private[this] val nodeWatcher: Closable = ketamaNodesChanges.respond(updateNodes)
private[this] def mkService(addr: Addr, key: KetamaClientKey): Future[Service[Req, Rep]] = {
val modifiedParams = params + LoadBalancerFactory.Dest(Var.value(addr))
val next = underlying
.replace(
FailureAccrualFactory.role,
KetamaFailureAccrualFactory.module[Req, Rep](key, nodeHealthBroker)
)
.make(modifiedParams)
next().map { svc =>
new ServiceProxy(svc) {
override def close(deadline: Time): Future[Unit] = {
Future.join(Seq(Closable.all(svc, next).close(deadline), super.close(deadline)))
}
}
}
}
private[this] def rebuildDistributor(): Unit = self.synchronized {
keyRingRedistributeCount.incr()
val liveNodes = nodes.collect({ case (_, Node(node, NodeState.Live)) => node })(breakOut)
currentDistributor = if (liveNodes.isEmpty) {
shardNotAvailableDistributor
} else {
new KetamaDistributor(liveNodes, numReps, false /*oldLibMemcachedVersionComplianceMode*/ )
}
}
private[this] def updateNodes(current: Set[KetamaKeyAndNode]): Unit = {
self.synchronized {
val old = snapshot
// remove old nodes and release clients
nodes --= (old &~ current).collect {
case (key, node) =>
node.handle.map { (service: Service[Req, Rep]) =>
service.close()
}
nodeLeaveCount.incr()
key
}
// new joined node appears as Live state
nodes ++= (current &~ old).collect {
case (key, node: KetamaNode[Future[Service[Req, Rep]]]) =>
nodeJoinCount.incr()
key -> Node(node, NodeState.Live)
}
snapshot = current
rebuildDistributor()
}
}
private[this] def ejectNode(key: KetamaClientKey) = self.synchronized {
nodes.get(key) match {
case Some(node) if node.state == NodeState.Live =>
node.state = NodeState.Ejected
rebuildDistributor()
ejectionCount.incr()
case _ =>
}
}
private[this] def reviveNode(key: KetamaClientKey) = self.synchronized {
nodes.get(key) match {
case Some(node) if node.state == NodeState.Ejected =>
node.state = NodeState.Live
rebuildDistributor()
revivalCount.incr()
case _ =>
}
}
def getServiceForHash(hash: Long): Future[Service[Req, Rep]] = {
currentDistributor.nodeForHash(hash)
}
def close(deadline: Time): Future[Unit] = {
nodeWatcher.close(deadline)
}
}
|
mkhq/finagle
|
finagle-memcached/src/main/scala/com/twitter/finagle/memcached/partitioning/KetamaNodeManager.scala
|
Scala
|
apache-2.0
| 9,699
|
package beamly.core.lang.future
package extensions
import scala.concurrent.duration._
import scala.concurrent.{Await, ExecutionContext, Future}
import scala.util.{Failure, Success, Try}
import scala.util.control.NonFatal
final class FutureW[+A](val underlying: Future[A]) extends AnyVal {
/**
* Maps a [[scala.util.Try]] to a value.
* @param f Function which maps the [[scala.util.Try]] to a value
* @param executor Execution context
* @tparam B The return type
* @return New [[scala.concurrent.Future]]
*/
@inline
def mapTry[B](f: Try[A] => B)(implicit executor: ExecutionContext): Future[B] = {
promising[B] { promise =>
underlying onComplete { x =>
try {
promise success f(x)
} catch {
case e if NonFatal(e) => promise failure e
}
}
}
}
/**
* Maps a [[scala.util.Try]] to a new [[scala.concurrent.Future]].
* @param f Function which maps the [[scala.util.Try]] to a value
* @param executor Execution context
* @tparam B The return type
* @return New [[scala.concurrent.Future]]
*/
@inline
def flatMapTry[B](f: Try[A] => Future[B])(implicit executor: ExecutionContext): Future[B] = {
promising[B] { promise =>
underlying onComplete { x =>
try {
promise completeWith f(x)
} catch {
case e if NonFatal(e) => promise failure e
}
}
}
}
/**
* @return The result from the [[scala.concurrent.Future]] after awaiting a result
*/
@inline
def get(): A = get(5.seconds) // Duration.Inf)
/**
* @param duration The amount of time to wait for the future to return
* @return The result from the [[scala.concurrent.Future]] after awaiting a result
*/
@inline
def get(duration: Duration): A = Await result (underlying, duration)
/**
* @return The [[scala.concurrent.Future]] after awaiting a result
*/
@inline
def await(): Future[A] = await(5.seconds) // Duration.Inf)
/**
* @return The [[scala.concurrent.Future]] after awaiting a result
*/
@inline
def await(duration: Duration): Future[A] = {
Await ready (underlying, duration)
}
/**
* Maps successful or failed values into a new [[scala.concurrent.Future]]
* Catches any exceptions from conversion and returns failed future.
*
* @param failed Function for converting a [[scala.Throwable]] to a successful value
* @param successful Function for converting a successful value to a new success
* @param ec The execution context
* @tparam X The new success type
* @return [[scala.concurrent.Future]] containing the new successful value
*/
@inline
def fold[X](failed: Throwable => X, successful: A => X)(implicit ec: ExecutionContext): Future[X] = {
promising[X] { promise =>
underlying onComplete {
case Success(a) => try promise success successful(a) catch { case e if NonFatal(e) => promise failure e }
case Failure(f) => try promise success failed(f) catch { case e if NonFatal(e) => promise failure e }
}
}
}
/**
* Maps successful or failed values into a new [[scala.concurrent.Future]]
* Catches any exceptions from conversion and returns failed future.
*
* @param failed Function for converting a [[scala.Throwable]] to a successful value
* @param successful Function for converting a successful value to a new success
* @param ec The execution context
* @tparam X The new success type
* @return [[scala.concurrent.Future]] containing the new successful value
*/
@inline
def flatFold[X](failed: Throwable => Future[X], successful: A => Future[X])(implicit ec: ExecutionContext): Future[X] = {
promising[X] { promise =>
underlying onComplete {
case Success(a) => try promise completeWith successful(a) catch { case e if NonFatal(e) => promise failure e }
case Failure(f) => try promise completeWith failed(f) catch { case e if NonFatal(e) => promise failure e }
}
}
}
}
|
beamly/beamly.core.lang
|
src/main/scala/beamly/core/lang/future/extensions/FutureW.scala
|
Scala
|
apache-2.0
| 3,998
|
/*
* Artificial Intelligence for Humans
* Volume 2: Nature Inspired Algorithms
* Java Version
* http://www.aifh.org
* http://www.jeffheaton.com
*
* Code repository:
* https://github.com/jeffheaton/aifh
*
* Copyright 2014 by Jeff Heaton
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* For more information on Heaton Research copyrights, licenses
* and trademarks visit:
* http://www.heatonresearch.com/copyright
*/
package com.heatonresearch.aifh.evolutionary.genome
import com.heatonresearch.aifh.evolutionary.population.Population
import com.heatonresearch.aifh.evolutionary.species.Species
import java.io.Serializable
/**
* A basic abstract genome. Provides base functionality.
*/
@SerialVersionUID(1L)
abstract class BasicGenome extends Genome with Serializable {
override def toString: String = s"[${getClass.getSimpleName}: score=$score"
/**
* The adjusted score. If unknown, it is set to NaN.
*/
var adjustedScore: Double = Double.NaN
/**
* The score of this genome.
*/
var score: Double = Double.NaN
/**
* The population this genome belongs to.
*/
var population: Population = null
/**
* The birth generation for this genome.
*/
var birthGeneration: Int = 0
/**
* The species of this genome.
*/
var species: Species = null
}
|
PeterLauris/aifh
|
vol2/vol2-scala-examples/src/main/scala/com/heatonresearch/aifh/evolutionary/genome/BasicGenome.scala
|
Scala
|
apache-2.0
| 1,818
|
package vultura.factor
import org.specs2._
/**
* Test parsing of *.uai files into [[vultura.factor.Problem]].
*/
class UAIParserTest extends Specification {
def is =
"parse all example uai files" ! (SampleProblems.examples must contain((p: SampleProblems.Example) => {
f"${p.filename} is ok" ==> (p.problem must not (throwAn[Exception]))
}).forall)
}
|
ziggystar/vultura-factor
|
src/test/scala/vultura/factor/UAIParserTest.scala
|
Scala
|
mit
| 371
|
package com.twitter.finagle.pushsession
import com.twitter.conversions.DurationOps._
import com.twitter.finagle.pushsession.utils.MockChannelHandle
import com.twitter.finagle.{Failure, IndividualRequestTimeoutException => FinagleTimeoutException}
import com.twitter.finagle.stats.{InMemoryStatsReceiver, NullStatsReceiver}
import com.twitter.util.{Await, MockTimer, Promise, Time, TimeoutException => UtilTimeoutException}
import java.net.{InetSocketAddress, SocketAddress}
import org.mockito.Mockito.never
import org.scalatest.FunSuite
import org.scalatestplus.mockito.MockitoSugar
class PipeliningMockChannelHandle[In, Out] extends MockChannelHandle[In, Out] {
// The remote address is logged when the pipeline stalls
override val remoteAddress: SocketAddress = new InetSocketAddress("1.2.3.4", 100)
}
class PipeliningClientPushSessionTest extends FunSuite with MockitoSugar {
val exns = Seq(
("util", new UtilTimeoutException("boom!"), never()),
("finagle", new FinagleTimeoutException(1.second), never())
)
exns.foreach {
case (kind, exc, numClosed) =>
test(s"Should ignore $kind timeout interrupts immediately") {
val timer = new MockTimer
Time.withCurrentTimeFrozen { _ =>
val handle = new PipeliningMockChannelHandle[Unit, Unit]()
val session =
new PipeliningClientPushSession[Unit, Unit](
handle,
NullStatsReceiver,
10.seconds,
timer
).toService
val f = session(())
f.raise(exc)
assert(!handle.closedCalled)
}
}
}
test("Should not fail the request on an interrupt") {
val timer = new MockTimer
Time.withCurrentTimeFrozen { ctl =>
val handle = new PipeliningMockChannelHandle[Unit, Unit]()
val service =
new PipeliningClientPushSession[Unit, Unit](
handle,
NullStatsReceiver,
10.seconds,
timer
).toService
val f = service(())
f.raise(new UtilTimeoutException("boom!"))
assert(!f.isDefined)
}
}
test("Should handle timeout interrupts after waiting `stallTimeout`") {
val stallTimeout = 10.seconds
val timer = new MockTimer
Time.withCurrentTimeFrozen { ctl =>
val handle = new PipeliningMockChannelHandle[Unit, Unit]()
val service =
new PipeliningClientPushSession[Unit, Unit](
handle,
NullStatsReceiver,
stallTimeout,
timer
).toService
val f = service(())
f.raise(new UtilTimeoutException("boom!"))
assert(!handle.closedCalled)
ctl.advance(stallTimeout)
timer.tick()
handle.serialExecutor.executeAll()
assert(handle.closedCalled)
val failure = intercept[Failure] {
Await.result(f, 5.seconds)
}
assert(failure.why.contains("The connection pipeline could not make progress"))
}
}
test("Should not handle interrupts after waiting if the pipeline clears") {
val stallTimeout = 10.seconds
val timer = new MockTimer
Time.withCurrentTimeFrozen { ctl =>
val handle = new PipeliningMockChannelHandle[Unit, Unit]()
val session =
new PipeliningClientPushSession[Unit, Unit](
handle,
NullStatsReceiver,
stallTimeout,
timer
)
val service = session.toService
val f = service(())
f.raise(new UtilTimeoutException("boom!"))
assert(!handle.closedCalled)
handle.serialExecutor.executeAll()
session.receive(())
ctl.advance(stallTimeout)
timer.tick()
assert(!handle.closedCalled)
}
}
test("queue size") {
val stats = new InMemoryStatsReceiver()
val timer = new MockTimer
var p0, p1, p2 = new Promise[String]()
val handle = new PipeliningMockChannelHandle[String, String]()
val session =
new PipeliningClientPushSession[String, String](
handle,
stats,
10.seconds,
timer
)
val service = session.toService
assert(session.getQueueSize == 0)
service("0")
service("1")
service("2")
handle.serialExecutor.executeAll()
assert(session.getQueueSize == 3)
session.receive("resp")
assert(session.getQueueSize == 2)
session.receive("resp")
assert(session.getQueueSize == 1)
session.receive("resp")
assert(session.getQueueSize == 0)
}
}
|
luciferous/finagle
|
finagle-core/src/test/scala/com/twitter/finagle/pushsession/PipeliningClientPushSessionTest.scala
|
Scala
|
apache-2.0
| 4,436
|
package d04
trait Collect[T]
case class CollectS[T](tail: Collect[T], head: T) extends Collect[T]
case class CollectT[T]() extends Collect[T]
trait Number[A] {
def execute[T <: TypeContext](contexts: Context[T, A])(s: T#Parameter, t: T#toDataType): Collect[T#Result]
}
case class NumberS[A](tail: () => Number[A], head: A) extends Number[A] {
override def execute[T <: TypeContext](context: Context[T, A])(parameter: T#Parameter, t: T#toDataType): Collect[T#Result] = {
val newDataCtx = context.convert(t, tail())
context.bindS(newDataCtx, parameter, head)
}
}
case class NumberT[A](tail: () => Number[A]) extends Number[A] {
override def execute[T <: TypeContext](context: Context[T, A])(parameter: T#Parameter, t: T#toDataType): Collect[T#Result] = {
val newDataCtx = context.convert(t, tail())
context.bindT(newDataCtx, parameter)
}
}
trait Context[T <: TypeContext, A] {
def convert(t: T#toDataType, current: Number[A]): T#DataCtx
def bindS(number: T#DataCtx, parameter: T#Parameter, head: A): Collect[T#Result]
def bindT(number: T#DataCtx, parameter: T#Parameter): Collect[T#Result]
}
trait TypeContext {
type DataCtx
type toDataType
type Parameter
type Result
}
|
djx314/ubw
|
a58-jysg项目存档/src/main/scala/d04/Counter.scala
|
Scala
|
bsd-3-clause
| 1,237
|
/*
* Copyright 2012-2020 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package laika.bundle
import laika.ast._
import laika.parse.Parser
import laika.parse.markup.DocumentParser.DocumentInput
/** Bundles a collection of all types of parsers used in a transformation.
*
* The parsers for text markup and configuration headers are meant to complement
* base parsers defined by the host language. If they fail for a given input the built-in parsers
* will still be tried for the same block, span or configuration header respectively.
*
* The parsers for stylesheets and templates on the other hand are meant to overwrite
* any previously installed parsers.
*
* @param blockParsers parsers for block elements in text markup, complementing the parsers of the host language
* @param spanParsers parsers for span elements in text markup, complementing the parsers of the host language
* @param syntaxHighlighters parsers for syntax highlighting of code blocks
* @param markupParserHooks hooks for markup parsers to control aspects beyond the individual span and block parsers
* @param configProvider parser for configuration headers in text markup and template documents and configuration documents
* @param templateParser parser for template documents
* @param styleSheetParser parser for CSS documents
*/
case class ParserBundle(blockParsers: Seq[BlockParserBuilder] = Nil,
spanParsers: Seq[SpanParserBuilder] = Nil,
syntaxHighlighters: Seq[SyntaxHighlighter] = Nil,
markupParserHooks: Option[ParserHooks] = None,
configProvider: Option[ConfigProvider] = None,
templateParser: Option[Parser[TemplateRoot]] = None,
styleSheetParser: Option[Parser[Set[StyleDeclaration]]] = None) {
/** Merges this instance with the specified base.
* Collections of parsers will be merged.
* Optional parsers in this instance will overwrite optional parsers
* in the base (if defined), with the base only serving as a fallback.
*/
def withBase (base: ParserBundle): ParserBundle =
ParserBundle(
blockParsers ++ base.blockParsers,
spanParsers ++ base.spanParsers,
syntaxHighlighters ++ base.syntaxHighlighters,
(markupParserHooks.toSeq ++ base.markupParserHooks.toSeq).reduceLeftOption(_ withBase _),
configProvider.orElse(base.configProvider),
templateParser.orElse(base.templateParser),
styleSheetParser.orElse(base.styleSheetParser)
)
/** Just the extensions for the text markup parser defined in this bundle.
* Fallback instances will be added where appropriate for parsers or hooks not defined
* in this bundle.
*/
def markupExtensions: MarkupExtensions =
MarkupExtensions(blockParsers, spanParsers, syntaxHighlighters, markupParserHooks.getOrElse(ParserHooks()))
}
/** Hooks for markup parsers to control aspects beyond the individual span and block
* parsers defined for the host language.
*
* @param postProcessBlocks function invoked for every block container, allowing post-processing of the result
* @param postProcessDocument function invoked after parsing but before rewriting, allowing to modify the document
* @param preProcessInput function invoked before parsing, allowing to pre-process the input
*/
case class ParserHooks(postProcessBlocks: Seq[Block] => Seq[Block] = identity,
postProcessDocument: UnresolvedDocument => UnresolvedDocument = identity,
preProcessInput: DocumentInput => DocumentInput = identity) {
/** Merges this instance with the specified base.
* The functions specified in the base are always invoked before
* the functions in this instance.
*/
def withBase (base: ParserHooks): ParserHooks = ParserHooks(
base.postProcessBlocks andThen postProcessBlocks,
base.postProcessDocument andThen postProcessDocument,
base.preProcessInput andThen preProcessInput
)
}
/** Bundles extensions for the text markup parsers defined for the host language to support additional
* syntax not recognized by the base parsers.
*
* When extension parsers fail for a given input the built-in parsers
* will still be tried for the same block or span respectively.
*
* @param blockParsers parsers for block elements in text markup, complementing the parsers of the host language
* @param spanParsers parsers for span elements in text markup, complementing the parsers of the host language
* @param syntaxHighlighters parsers for syntax highlighting of code blocks
* @param parserHooks hooks for markup parsers to control aspects beyond the individual span and block parsers
*/
case class MarkupExtensions (blockParsers: Seq[BlockParserBuilder],
spanParsers: Seq[SpanParserBuilder],
syntaxHighlighters: Seq[SyntaxHighlighter],
parserHooks: ParserHooks)
|
planet42/Laika
|
core/shared/src/main/scala/laika/bundle/ParserBundle.scala
|
Scala
|
apache-2.0
| 5,563
|
package model
import scala.slick.driver.MySQLDriver.simple._
import scala.slick.ast.ColumnOption.{Default, NotNull}
/**
* CREATE TABLE IF NOT EXISTS `wordnet31_snapshot`.`adjpositiontypes` (
`position` ENUM('a','p','ip') NOT NULL,
`positionname` VARCHAR(24) NOT NULL,
PRIMARY KEY (`position`))
ENGINE = InnoDB
DEFAULT CHARACTER SET = utf8
*/
case class adjpositiontypes(position:String,positionname:String)
class _adjpositiontypes(tag:Tag) extends Table[adjpositiontypes](tag,"adjpositiontypes"){
def position = column[String]("position", O.PrimaryKey, O.DBType("ENUM('a','p','ip')"), NotNull) // This is the primary key column
def positionname = column[String]("positionname",NotNull,O.DBType("VARCHAR(24)"))
def * = (position, positionname) <> (adjpositiontypes.tupled, adjpositiontypes.unapply)
}
|
gaoyike/WordNet-Scala-Slick-Model
|
src/main/scala/model/adjpositiontypes.scala
|
Scala
|
bsd-2-clause
| 815
|
package pl.writeonly.re.main
import pl.writeonly.re.shared.core._
import slogging._
import slogging.TerminalLoggerFactory.TerminalControlCode
object Main extends App {
LoggerConfig.factory = TerminalLoggerFactory()
TerminalLoggerFactory.infoCode = TerminalControlCode.green
TerminalLoggerFactory.debugCode = TerminalControlCode.cyan
TerminalLoggerFactory.traceCode = TerminalControlCode.blue
LoggerConfig.level = LogLevel.TRACE
Core.apply("Native")
StrictLoggingCore.rainbow()
}
|
writeonly/resentiment
|
re/native/src/main/scala/pl/writeonly/re/main/Main.scala
|
Scala
|
mit
| 497
|
/* __ *\\
** ________ ___ / / ___ Scala API **
** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\\ \\/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\\___/_/ |_/____/_/ | | **
** |/ **
\\* */
package scala
import scala.collection.generic._
import scala.collection.{ mutable, immutable }
import mutable.{ ArrayBuilder, ArraySeq }
import scala.compat.Platform.arraycopy
import scala.reflect.ClassTag
import scala.runtime.ScalaRunTime.{ array_apply, array_update }
/** Contains a fallback builder for arrays when the element type
* does not have a class tag. In that case a generic array is built.
*/
class FallbackArrayBuilding {
/** A builder factory that generates a generic array.
* Called instead of `Array.newBuilder` if the element type of an array
* does not have a class tag. Note that fallbackBuilder factory
* needs an implicit parameter (otherwise it would not be dominated in
* implicit search by `Array.canBuildFrom`). We make sure that
* implicit search is always successful.
*/
implicit def fallbackCanBuildFrom[T](implicit m: DummyImplicit): CanBuildFrom[Array[_], T, ArraySeq[T]] =
new CanBuildFrom[Array[_], T, ArraySeq[T]] {
def apply(from: Array[_]) = ArraySeq.newBuilder[T]
def apply() = ArraySeq.newBuilder[T]
}
}
/** Utility methods for operating on arrays.
* For example:
* {{{
* val a = Array(1, 2)
* val b = Array.ofDim[Int](2)
* val c = Array.concat(a, b)
* }}}
* where the array objects `a`, `b` and `c` have respectively the values
* `Array(1, 2)`, `Array(0, 0)` and `Array(1, 2, 0, 0)`.
*
* @author Martin Odersky
* @version 1.0
*/
object Array extends FallbackArrayBuilding {
def emptyBooleanArray = EmptyArrays.emptyBooleanArray
def emptyByteArray = EmptyArrays.emptyByteArray
def emptyCharArray = EmptyArrays.emptyCharArray
def emptyDoubleArray = EmptyArrays.emptyDoubleArray
def emptyFloatArray = EmptyArrays.emptyFloatArray
def emptyIntArray = EmptyArrays.emptyIntArray
def emptyLongArray = EmptyArrays.emptyLongArray
def emptyShortArray = EmptyArrays.emptyShortArray
def emptyObjectArray = EmptyArrays.emptyObjectArray
private object EmptyArrays {
val emptyBooleanArray = new Array[Boolean](0)
val emptyByteArray = new Array[Byte](0)
val emptyCharArray = new Array[Char](0)
val emptyDoubleArray = new Array[Double](0)
val emptyFloatArray = new Array[Float](0)
val emptyIntArray = new Array[Int](0)
val emptyLongArray = new Array[Long](0)
val emptyShortArray = new Array[Short](0)
val emptyObjectArray = new Array[Object](0)
}
implicit def canBuildFrom[T](implicit t: ClassTag[T]): CanBuildFrom[Array[_], T, Array[T]] = {
@inline
class ArrayCanBuildFrom extends CanBuildFrom[Array[_], T, Array[T]] {
def apply(from: Array[_]) = ArrayBuilder.make[T]()(t)
def apply() = ArrayBuilder.make[T]()(t)
}
new ArrayCanBuildFrom
}
/**
* Returns a new [[scala.collection.mutable.ArrayBuilder]].
*/
def newBuilder[T](implicit t: ClassTag[T]): ArrayBuilder[T] = ArrayBuilder.make[T]()(t)
private def slowcopy(src : AnyRef,
srcPos : Int,
dest : AnyRef,
destPos : Int,
length : Int) {
var i = srcPos
var j = destPos
val srcUntil = srcPos + length
while (i < srcUntil) {
array_update(dest, j, array_apply(src, i))
i += 1
j += 1
}
}
/** Copy one array to another.
* Equivalent to Java's
* `System.arraycopy(src, srcPos, dest, destPos, length)`,
* except that this also works for polymorphic and boxed arrays.
*
* Note that the passed-in `dest` array will be modified by this call.
*
* @param src the source array.
* @param srcPos starting position in the source array.
* @param dest destination array.
* @param destPos starting position in the destination array.
* @param length the number of array elements to be copied.
*
* @see `java.lang.System#arraycopy`
*/
def copy(src: AnyRef, srcPos: Int, dest: AnyRef, destPos: Int, length: Int) {
val srcClass = src.getClass
if (srcClass.isArray && dest.getClass.isAssignableFrom(srcClass))
arraycopy(src, srcPos, dest, destPos, length)
else
slowcopy(src, srcPos, dest, destPos, length)
}
/** Returns an array of length 0 */
def empty[T: ClassTag]: Array[T] = new Array[T](0)
/** Creates an array with given elements.
*
* @param xs the elements to put in the array
* @return an array containing all elements from xs.
*/
// Subject to a compiler optimization in Cleanup.
// Array(e0, ..., en) is translated to { val a = new Array(3); a(i) = ei; a }
def apply[T: ClassTag](xs: T*): Array[T] = {
val array = new Array[T](xs.length)
var i = 0
for (x <- xs.iterator) { array(i) = x; i += 1 }
array
}
/** Creates an array of `Boolean` objects */
// Subject to a compiler optimization in Cleanup, see above.
def apply(x: Boolean, xs: Boolean*): Array[Boolean] = {
val array = new Array[Boolean](xs.length + 1)
array(0) = x
var i = 1
for (x <- xs.iterator) { array(i) = x; i += 1 }
array
}
/** Creates an array of `Byte` objects */
// Subject to a compiler optimization in Cleanup, see above.
def apply(x: Byte, xs: Byte*): Array[Byte] = {
val array = new Array[Byte](xs.length + 1)
array(0) = x
var i = 1
for (x <- xs.iterator) { array(i) = x; i += 1 }
array
}
/** Creates an array of `Short` objects */
// Subject to a compiler optimization in Cleanup, see above.
def apply(x: Short, xs: Short*): Array[Short] = {
val array = new Array[Short](xs.length + 1)
array(0) = x
var i = 1
for (x <- xs.iterator) { array(i) = x; i += 1 }
array
}
/** Creates an array of `Char` objects */
// Subject to a compiler optimization in Cleanup, see above.
def apply(x: Char, xs: Char*): Array[Char] = {
val array = new Array[Char](xs.length + 1)
array(0) = x
var i = 1
for (x <- xs.iterator) { array(i) = x; i += 1 }
array
}
/** Creates an array of `Int` objects */
// Subject to a compiler optimization in Cleanup, see above.
def apply(x: Int, xs: Int*): Array[Int] = {
val array = new Array[Int](xs.length + 1)
array(0) = x
var i = 1
for (x <- xs.iterator) { array(i) = x; i += 1 }
array
}
/** Creates an array of `Long` objects */
// Subject to a compiler optimization in Cleanup, see above.
def apply(x: Long, xs: Long*): Array[Long] = {
val array = new Array[Long](xs.length + 1)
array(0) = x
var i = 1
for (x <- xs.iterator) { array(i) = x; i += 1 }
array
}
/** Creates an array of `Float` objects */
// Subject to a compiler optimization in Cleanup, see above.
def apply(x: Float, xs: Float*): Array[Float] = {
val array = new Array[Float](xs.length + 1)
array(0) = x
var i = 1
for (x <- xs.iterator) { array(i) = x; i += 1 }
array
}
/** Creates an array of `Double` objects */
// Subject to a compiler optimization in Cleanup, see above.
def apply(x: Double, xs: Double*): Array[Double] = {
val array = new Array[Double](xs.length + 1)
array(0) = x
var i = 1
for (x <- xs.iterator) { array(i) = x; i += 1 }
array
}
/** Creates an array of `Unit` objects */
def apply(x: Unit, xs: Unit*): Array[Unit] = {
val array = new Array[Unit](xs.length + 1)
array(0) = x
var i = 1
for (x <- xs.iterator) { array(i) = x; i += 1 }
array
}
/** Creates array with given dimensions */
def ofDim[T: ClassTag](n1: Int): Array[T] =
new Array[T](n1)
/** Creates a 2-dimensional array */
def ofDim[T: ClassTag](n1: Int, n2: Int): Array[Array[T]] = {
val arr: Array[Array[T]] = (new Array[Array[T]](n1): Array[Array[T]])
for (i <- 0 until n1) arr(i) = new Array[T](n2)
arr
// tabulate(n1)(_ => ofDim[T](n2))
}
/** Creates a 3-dimensional array */
def ofDim[T: ClassTag](n1: Int, n2: Int, n3: Int): Array[Array[Array[T]]] =
tabulate(n1)(_ => ofDim[T](n2, n3))
/** Creates a 4-dimensional array */
def ofDim[T: ClassTag](n1: Int, n2: Int, n3: Int, n4: Int): Array[Array[Array[Array[T]]]] =
tabulate(n1)(_ => ofDim[T](n2, n3, n4))
/** Creates a 5-dimensional array */
def ofDim[T: ClassTag](n1: Int, n2: Int, n3: Int, n4: Int, n5: Int): Array[Array[Array[Array[Array[T]]]]] =
tabulate(n1)(_ => ofDim[T](n2, n3, n4, n5))
/** Concatenates all arrays into a single array.
*
* @param xss the given arrays
* @return the array created from concatenating `xss`
*/
def concat[T: ClassTag](xss: Array[T]*): Array[T] = {
val b = newBuilder[T]
b.sizeHint(xss.map(_.length).sum)
for (xs <- xss) b ++= xs
b.result()
}
/** Returns an array that contains the results of some element computation a number
* of times.
*
* Note that this means that `elem` is computed a total of n times:
* {{{
* scala> Array.fill(3){ math.random }
* res3: Array[Double] = Array(0.365461167592537, 1.550395944913685E-4, 0.7907242137333306)
* }}}
*
* @param n the number of elements desired
* @param elem the element computation
* @return an Array of size n, where each element contains the result of computing
* `elem`.
*/
def fill[T: ClassTag](n: Int)(elem: => T): Array[T] = {
val b = newBuilder[T]
b.sizeHint(n)
var i = 0
while (i < n) {
b += elem
i += 1
}
b.result()
}
/** Returns a two-dimensional array that contains the results of some element
* computation a number of times.
*
* @param n1 the number of elements in the 1st dimension
* @param n2 the number of elements in the 2nd dimension
* @param elem the element computation
*/
def fill[T: ClassTag](n1: Int, n2: Int)(elem: => T): Array[Array[T]] =
tabulate(n1)(_ => fill(n2)(elem))
/** Returns a three-dimensional array that contains the results of some element
* computation a number of times.
*
* @param n1 the number of elements in the 1st dimension
* @param n2 the number of elements in the 2nd dimension
* @param n3 the number of elements in the 3nd dimension
* @param elem the element computation
*/
def fill[T: ClassTag](n1: Int, n2: Int, n3: Int)(elem: => T): Array[Array[Array[T]]] =
tabulate(n1)(_ => fill(n2, n3)(elem))
/** Returns a four-dimensional array that contains the results of some element
* computation a number of times.
*
* @param n1 the number of elements in the 1st dimension
* @param n2 the number of elements in the 2nd dimension
* @param n3 the number of elements in the 3nd dimension
* @param n4 the number of elements in the 4th dimension
* @param elem the element computation
*/
def fill[T: ClassTag](n1: Int, n2: Int, n3: Int, n4: Int)(elem: => T): Array[Array[Array[Array[T]]]] =
tabulate(n1)(_ => fill(n2, n3, n4)(elem))
/** Returns a five-dimensional array that contains the results of some element
* computation a number of times.
*
* @param n1 the number of elements in the 1st dimension
* @param n2 the number of elements in the 2nd dimension
* @param n3 the number of elements in the 3nd dimension
* @param n4 the number of elements in the 4th dimension
* @param n5 the number of elements in the 5th dimension
* @param elem the element computation
*/
def fill[T: ClassTag](n1: Int, n2: Int, n3: Int, n4: Int, n5: Int)(elem: => T): Array[Array[Array[Array[Array[T]]]]] =
tabulate(n1)(_ => fill(n2, n3, n4, n5)(elem))
/** Returns an array containing values of a given function over a range of integer
* values starting from 0.
*
* @param n The number of elements in the array
* @param f The function computing element values
* @return A traversable consisting of elements `f(0),f(1), ..., f(n - 1)`
*/
def tabulate[T: ClassTag](n: Int)(f: Int => T): Array[T] = {
val b = newBuilder[T]
b.sizeHint(n)
var i = 0
while (i < n) {
b += f(i)
i += 1
}
b.result()
}
/** Returns a two-dimensional array containing values of a given function
* over ranges of integer values starting from `0`.
*
* @param n1 the number of elements in the 1st dimension
* @param n2 the number of elements in the 2nd dimension
* @param f The function computing element values
*/
def tabulate[T: ClassTag](n1: Int, n2: Int)(f: (Int, Int) => T): Array[Array[T]] =
tabulate(n1)(i1 => tabulate(n2)(f(i1, _)))
/** Returns a three-dimensional array containing values of a given function
* over ranges of integer values starting from `0`.
*
* @param n1 the number of elements in the 1st dimension
* @param n2 the number of elements in the 2nd dimension
* @param n3 the number of elements in the 3rd dimension
* @param f The function computing element values
*/
def tabulate[T: ClassTag](n1: Int, n2: Int, n3: Int)(f: (Int, Int, Int) => T): Array[Array[Array[T]]] =
tabulate(n1)(i1 => tabulate(n2, n3)(f(i1, _, _)))
/** Returns a four-dimensional array containing values of a given function
* over ranges of integer values starting from `0`.
*
* @param n1 the number of elements in the 1st dimension
* @param n2 the number of elements in the 2nd dimension
* @param n3 the number of elements in the 3rd dimension
* @param n4 the number of elements in the 4th dimension
* @param f The function computing element values
*/
def tabulate[T: ClassTag](n1: Int, n2: Int, n3: Int, n4: Int)(f: (Int, Int, Int, Int) => T): Array[Array[Array[Array[T]]]] =
tabulate(n1)(i1 => tabulate(n2, n3, n4)(f(i1, _, _, _)))
/** Returns a five-dimensional array containing values of a given function
* over ranges of integer values starting from `0`.
*
* @param n1 the number of elements in the 1st dimension
* @param n2 the number of elements in the 2nd dimension
* @param n3 the number of elements in the 3rd dimension
* @param n4 the number of elements in the 4th dimension
* @param n5 the number of elements in the 5th dimension
* @param f The function computing element values
*/
def tabulate[T: ClassTag](n1: Int, n2: Int, n3: Int, n4: Int, n5: Int)(f: (Int, Int, Int, Int, Int) => T): Array[Array[Array[Array[Array[T]]]]] =
tabulate(n1)(i1 => tabulate(n2, n3, n4, n5)(f(i1, _, _, _, _)))
/** Returns an array containing a sequence of increasing integers in a range.
*
* @param start the start value of the array
* @param end the end value of the array, exclusive (in other words, this is the first value '''not''' returned)
* @return the array with values in range `start, start + 1, ..., end - 1`
* up to, but excluding, `end`.
*/
def range(start: Int, end: Int): Array[Int] = range(start, end, 1)
/** Returns an array containing equally spaced values in some integer interval.
*
* @param start the start value of the array
* @param end the end value of the array, exclusive (in other words, this is the first value '''not''' returned)
* @param step the increment value of the array (may not be zero)
* @return the array with values in `start, start + step, ...` up to, but excluding `end`
*/
def range(start: Int, end: Int, step: Int): Array[Int] = {
if (step == 0) throw new IllegalArgumentException("zero step")
val b = newBuilder[Int]
b.sizeHint(immutable.Range.count(start, end, step, isInclusive = false))
var i = start
while (if (step < 0) end < i else i < end) {
b += i
i += step
}
b.result()
}
/** Returns an array containing repeated applications of a function to a start value.
*
* @param start the start value of the array
* @param len the number of elements returned by the array
* @param f the function that is repeatedly applied
* @return the array returning `len` values in the sequence `start, f(start), f(f(start)), ...`
*/
def iterate[T: ClassTag](start: T, len: Int)(f: T => T): Array[T] = {
val b = newBuilder[T]
if (len > 0) {
b.sizeHint(len)
var acc = start
var i = 1
b += acc
while (i < len) {
acc = f(acc)
i += 1
b += acc
}
}
b.result()
}
/** Called in a pattern match like `{ case Array(x,y,z) => println('3 elements')}`.
*
* @param x the selector value
* @return sequence wrapped in a [[scala.Some]], if `x` is a Seq, otherwise `None`
*/
def unapplySeq[T](x: Array[T]): Option[IndexedSeq[T]] =
if (x == null) None else Some(x.toIndexedSeq)
// !!! the null check should to be necessary, but without it 2241 fails. Seems to be a bug
// in pattern matcher. @PP: I noted in #4364 I think the behavior is correct.
}
/** Arrays are mutable, indexed collections of values. `Array[T]` is Scala's representation
* for Java's `T[]`.
*
* {{{
* val numbers = Array(1, 2, 3, 4)
* val first = numbers(0) // read the first element
* numbers(3) = 100 // replace the 4th array element with 100
* val biggerNumbers = numbers.map(_ * 2) // multiply all numbers by two
* }}}
*
* Arrays make use of two common pieces of Scala syntactic sugar, shown on lines 2 and 3 of the above
* example code.
* Line 2 is translated into a call to `apply(Int)`, while line 3 is translated into a call to
* `update(Int, T)`.
*
* Two implicit conversions exist in [[scala.Predef]] that are frequently applied to arrays: a conversion
* to [[scala.collection.mutable.ArrayOps]] (shown on line 4 of the example above) and a conversion
* to [[scala.collection.mutable.WrappedArray]] (a subtype of [[scala.collection.Seq]]).
* Both types make available many of the standard operations found in the Scala collections API.
* The conversion to `ArrayOps` is temporary, as all operations defined on `ArrayOps` return an `Array`,
* while the conversion to `WrappedArray` is permanent as all operations return a `WrappedArray`.
*
* The conversion to `ArrayOps` takes priority over the conversion to `WrappedArray`. For instance,
* consider the following code:
*
* {{{
* val arr = Array(1, 2, 3)
* val arrReversed = arr.reverse
* val seqReversed : Seq[Int] = arr.reverse
* }}}
*
* Value `arrReversed` will be of type `Array[Int]`, with an implicit conversion to `ArrayOps` occurring
* to perform the `reverse` operation. The value of `seqReversed`, on the other hand, will be computed
* by converting to `WrappedArray` first and invoking the variant of `reverse` that returns another
* `WrappedArray`.
*
* @author Martin Odersky
* @version 1.0
* @see [[http://www.scala-lang.org/files/archive/spec/2.11/ Scala Language Specification]], for in-depth information on the transformations the Scala compiler makes on Arrays (Sections 6.6 and 6.15 respectively.)
* @see [[http://docs.scala-lang.org/sips/completed/scala-2-8-arrays.html "Scala 2.8 Arrays"]] the Scala Improvement Document detailing arrays since Scala 2.8.
* @see [[http://docs.scala-lang.org/overviews/collections/arrays.html "The Scala 2.8 Collections' API"]] section on `Array` by Martin Odersky for more information.
* @define coll array
* @define Coll `Array`
* @define orderDependent
* @define orderDependentFold
* @define mayNotTerminateInf
* @define willNotTerminateInf
* @define collectExample
* @define undefinedorder
* @define thatinfo the class of the returned collection. In the standard library configuration,
* `That` is either `Array[B]` if an ClassTag is available for B or `ArraySeq[B]` otherwise.
* @define zipthatinfo $thatinfo
* @define bfinfo an implicit value of class `CanBuildFrom` which determines the result class `That` from the current
* representation type `Repr` and the new element type `B`.
*/
final class Array[T](_length: Int) extends java.io.Serializable with java.lang.Cloneable {
/** The length of the array */
def length: Int = throw new Error()
/** The element at given index.
*
* Indices start at `0`; `xs.apply(0)` is the first element of array `xs`.
* Note the indexing syntax `xs(i)` is a shorthand for `xs.apply(i)`.
*
* @param i the index
* @return the element at the given index
* @throws ArrayIndexOutOfBoundsException if `i < 0` or `length <= i`
*/
def apply(i: Int): T = throw new Error()
/** Update the element at given index.
*
* Indices start at `0`; `xs.update(i, x)` replaces the i^th^ element in the array.
* Note the syntax `xs(i) = x` is a shorthand for `xs.update(i, x)`.
*
* @param i the index
* @param x the value to be written at index `i`
* @throws ArrayIndexOutOfBoundsException if `i < 0` or `length <= i`
*/
def update(i: Int, x: T) { throw new Error() }
/** Clone the Array.
*
* @return A clone of the Array.
*/
override def clone(): Array[T] = throw new Error()
}
|
jmnarloch/scala-js
|
scalalib/overrides/scala/Array.scala
|
Scala
|
bsd-3-clause
| 21,526
|
package com.giyeok.gitexplorer.model
import com.giyeok.commons.io.EndianRandomAccessFile
import com.giyeok.gitexplorer.Util._
trait GitPackfiles {
this: GitRepository =>
class GitPackfile(val idxpath: String, val packpath: String) extends GitObjectStore {
def this(path: String) = this(path + ".idx", path + ".pack")
// TODO Maybe make it to load on-demand (for small jobs)
val idx = {
var idx: EndianRandomAccessFile = null
var pack: EndianRandomAccessFile = null
try {
idx = new EndianRandomAccessFile(idxpath, "r")
pack = new EndianRandomAccessFile(packpath, "r")
// reading pack file
pack.setEndian(true)
pack.seek(0)
// validating pack file
// 1. signature
val packSignature = pack.readLength(4).toSeq
if (packSignature != Seq('P', 'A', 'C', 'K'))
throw InvalidFormat("Invalid Packfile")
// 2. version
val packVersion = pack.readInt()
packVersion match {
case 2 =>
case 3 => throw new NotImplementedError("Pack version 3 is not supported yet")
case _ => throw InvalidFormat(s"Unknown Pack version: $packVersion")
}
val objectsCount = pack.readInt()
// reading idx file
idx.setEndian(true)
idx.seek(0)
val firstValue = idx.readLength(4).toSeq
val idxversion = idx.readInt()
val idxfile = (firstValue, idxversion) match {
case (Seq(-1, 't', 'O', 'c'), 2) =>
// version 2
val fanout = (0 until 256) map { _ =>
idx.readInt()
}
// println(fanout)
val objectNames = (0 until objectsCount) map { _ =>
GitSHA1(idx.readLength(20))
}
// objectNames takeRight 100 foreach { x => println(x.string) }
val crc32 = (0 until objectsCount) map { _ =>
idx.readInt()
}
// println(crc32 take 100)
val offsets4 = (0 until objectsCount) map { _ => idx.readInt() }
val eights = offsets4 filter { x => (x & 0x10000000) != 0 }
val offsets8 = (0 until eights.length) map { _ => idx.readLong() }
val packSHA1 = GitSHA1(idx.readLength(20))
val idxSHA1 = GitSHA1(idx.readLength(20))
new IdxFile(objectsCount, fanout, objectNames, Some(crc32), offsets4, offsets8, pack.length() - 20)
case _ =>
// version 1
throw new NotImplementedError("idx version 1 is not implemented yet")
}
idxfile
} finally {
if (idx != null) idx.close()
if (pack != null) pack.close()
}
}
class IdxFile(
val objectsCount: Int,
val fanout: Seq[Int],
val objectNames: Seq[GitId],
val crc32: Option[Seq[Int]],
val offsets4: Seq[Int],
offsets8: Seq[Long],
packSize: Long) {
assert({
def isSorted[T <% Ordered[T]](l: List[T], canEqual: Boolean = false): Boolean = l match {
case (_ +: Nil) | Nil => true
case x1 +: x2 +: xs if x1 < x2 || !(!canEqual && x1 != x2) => isSorted(x2 +: xs)
case _ => false
}
val fanoutOrdered = fanout.length == 256 && isSorted(fanout.toList, true)
val objectsOrdered = isSorted(objectNames.toList)
fanoutOrdered && objectsOrdered
})
assert(objectsCount == objectNames.length && objectsCount == offsets4.length)
val sizeFromOffset = {
val orderedOffsets = (offsets4 map { realOffset(_) }).sorted.toList
val mymap = scala.collection.mutable.Map[Long, Long]()
def diffOffset(offsets: List[Long]): Unit = {
offsets match {
case o1 +: o2 +: os =>
mymap(o1) = o2 - o1
diffOffset(o2 +: os)
case Seq(l) =>
mymap(l) = packSize - l
}
}
diffOffset(orderedOffsets)
Map(mymap.toList: _*)
}
val objectNameFromOffset = {
(offsets4 map { realOffset _ } zip objectNames).toMap
}
def findOffsetFor(id: GitId): Option[Long] = {
def binarySearch(left: Int, right: Int): Option[Int] = {
if (left > right) None
else {
val mid = (right + left) / 2
val midv = objectNames(mid)
if (id == midv) Some(mid)
else if (id < midv) binarySearch(left, mid - 1)
else binarySearch(mid + 1, right)
}
}
// TODO improve this using fanout
binarySearch(0, objectNames.length - 1) match {
case Some(offsetIndex) =>
Some(realOffset(offsets4(offsetIndex)))
case None => None
}
}
def realOffset(offset4: Int): Long = {
if ((offset4 & 0x10000000) != 0) {
// TODO offset8
throw new UnsupportedOperationException
} else {
offset4
}
}
}
case class GitUnknown(id: GitId, objType: Int, actualContent: Array[Byte] = new Array[Byte](0)) extends GitVirtualObject
case class GitDelta(id: GitId, original: GitId, delta: Array[Byte]) extends GitVirtualObject {
// NOTE Currently, assumes pack version 3
abstract class DeltaOp
case class DeltaInsert(content: Array[Byte]) extends DeltaOp {
override def toString = s"DeltaInsert(${content.toContent})"
}
case class DeltaCopy(offset: Long, size: Int) extends DeltaOp
lazy val (baseObjectLength, resultObjectLength, deltaOps) = {
// from patch-delta.c
var pointer = 0
def next = {
pointer += 1
last
}
def last = {
delta(pointer - 1)
}
def readLittleEndian128Int = {
var value = next & 0x7f
var counter = 7
while (last &? 0x80) {
value = value | ((next & 0x7f) << counter)
counter += 7
}
value
}
val baseObjectLength = readLittleEndian128Int
val resultObjectLength = readLittleEndian128Int
// println(baseObjectLength, resultObjectLength)
var deltaOps = List[DeltaOp]()
while (pointer < delta.length) {
// println(s"opcode ${opcode.toBinaryString}")
deltaOps +:= (next.toUB match {
case 0 =>
throw InvalidFormat("Delta opcode 0 is for future expansion: not supported")
case opcode if opcode &? 0x80 =>
// "copy"
// println(s"copy ${(opcode & 0x7f).toBinaryString}")
var (offset, size) = (0L, 0)
if (opcode &? 0x01) offset = next.toUB
if (opcode &? 0x02) offset |= next.toUB << 8
if (opcode &? 0x04) offset |= next.toUB << 16
if (opcode &? 0x08) offset |= next.toUB.toLong << 24
if (opcode &? 0x10) size = next.toUB
if (opcode &? 0x20) size |= next.toUB << 8
if (opcode &? 0x40) size |= next.toUB << 16
if (size == 0) size = 0x10000
DeltaCopy(offset, size)
case opcode =>
// "insert"
val inserted = delta slice (pointer, pointer + opcode)
pointer += opcode
DeltaInsert(inserted)
})
}
(baseObjectLength, resultObjectLength, deltaOps.reverse)
}
def actualContent: Array[Byte] = {
val source = getObject(original).getOrElse(throw InvalidFormat(s"Delta object refers to invalid object $original")).content.toSeq
val blocks = deltaOps flatMap {
case DeltaCopy(offset, size) =>
assert(offset < Integer.MAX_VALUE)
source slice (offset.toInt, offset.toInt + size)
case DeltaInsert(content) => content.toSeq
}
blocks.toArray
}
}
private def readFromOffset(pack: EndianRandomAccessFile, offset: Long, id: GitId): Option[GitVirtualObject] = {
pack.seek(offset)
var read = pack.readByte()
val objectType = (read & 0x70) >> 4
val size = {
var value = read & 0xf
var counter = 4
while (read &? 0x80) {
read = pack.readByte()
value = value | ((read & 0x7f) << counter)
counter += 7
}
value
}
val sizeInPack = idx.sizeFromOffset(offset)
if (size > Integer.MAX_VALUE || sizeInPack > Integer.MAX_VALUE)
throw new UnsupportedOperationException(s"Too big object: $id")
// println(s"$id $objectType $size $sizeInPack $offset")
val delta = objectType & 0x4
val realType = objectType & 0x3
try {
/* git - cache.h
* enum object_type {
OBJ_BAD = -1,
OBJ_NONE = 0,
OBJ_COMMIT = 1,
OBJ_TREE = 2,
OBJ_BLOB = 3,
OBJ_TAG = 4,
/* 5 for future expansion */
OBJ_OFS_DELTA = 6,
OBJ_REF_DELTA = 7,
OBJ_ANY,
OBJ_MAX
};
*
*/
def readAndInflate(size: Int) = {
val raw = pack.readLength(size.toInt)
inflate(raw)
}
objectType match {
case 0x1 =>
// raw commit
// println(s"$id commit $size $sizeInPack $offset")
val _content = readAndInflate(sizeInPack.toInt)
Some(new GitCommitExisting(id, _content))
case 0x2 =>
// tree
// println(s"$id tree $size $sizeInPack $offset")
val _content = readAndInflate(sizeInPack.toInt)
Some(new GitTreeExisting(id, _content))
case 0x3 =>
// blob
// println(s"$id blob $size $sizeInPack $offset")
val _content = readAndInflate(sizeInPack.toInt)
Some(new GitBlobExisting(id, _content))
case 0x4 =>
// tag
// println(s"$id tag $size $sizeInPack $offset")
val _content = readAndInflate(sizeInPack.toInt)
Some(new GitTagExisting(id, _content))
case 0x6 =>
// ofs_delta
val (negOffset, offsetLen) = {
var read = pack.readByte()
var value = read & 0x7f
var aug = 0
var len = 1
while (read &? 0x80) {
read = pack.readByte()
value = (value << 7) | (read & 0x7f)
aug = (aug << 7) | (1 << 7)
len += 1
}
(value + aug, len)
}
val originalOffset = offset - negOffset
val original = idx.objectNameFromOffset.getOrElse(originalOffset, { throw InvalidFormat("wrong ofs_delta offset") })
val inflated = readAndInflate(sizeInPack.toInt - offsetLen)
// println(s"$id delta $size $sizeInPack $offset \\\\ $original")
Some(new GitDelta(id, original, inflated))
case t =>
// unknown?
// println(s"$id unknown $objectType $size $sizeInPack $offset")
Some(GitUnknown(id, objectType))
}
} catch {
case x: Throwable =>
x.printStackTrace()
None
}
}
private val knownObjects = scala.collection.mutable.Map[GitId, GitVirtualObject]()
def registerObject(id: GitId, obj: GitVirtualObject): GitVirtualObject = {
knownObjects(id) = obj
obj
}
def getObject(pack: EndianRandomAccessFile, id: GitId): Option[GitVirtualObject] = {
(knownObjects get id) match {
case Some(obj) => Some(obj)
case None =>
val offset = idx.findOffsetFor(id)
offset match {
case Some(offset) =>
readFromOffset(pack, offset, id) match {
case Some(obj) => Some(registerObject(id, obj))
case None => None
}
case None => None
}
}
}
def getRawObject(id: GitId): Option[GitVirtualObject] = {
(knownObjects get id) match {
case Some(obj) => Some(obj)
case None =>
var pack: EndianRandomAccessFile = null
try {
pack = new EndianRandomAccessFile(packpath, "r")
getObject(pack, id)
} finally {
if (pack == null) pack.close()
}
}
}
private def getActualObject(id: GitId): GitObject = {
// TODO improve performance
allObjects(id)
}
def hasObject(id: GitId): Boolean = {
// TODO improve performance
getObject(id).isDefined
}
def getObject(id: GitId): Option[GitObject] = {
Some(getActualObject(id))
}
private def readAllObjects = {
var pack: EndianRandomAccessFile = null
try {
pack = new EndianRandomAccessFile(packpath, "r")
val objs = (idx.objectNames zip idx.offsets4) flatMap {
case (objId, _) if knownObjects contains objId =>
Some(knownObjects(objId))
case (objId, offset) =>
readFromOffset(pack, idx.realOffset(offset), objId)
}
// println(s"${objs.length - (objs count { _.isInstanceOf[GitDelta] })} non-delta objects")
objs
} finally {
if (pack == null) pack.close()
}
}
lazy val allObjects: Map[GitId, GitObject] = {
val objects = readAllObjects groupBy { _.id } map { case (key, value) => (key, value.head) ensuring value.length == 1 }
val roots = scala.collection.mutable.Map[GitId, GitObject]()
def rootOf(obj: GitVirtualObject): GitObject = obj match {
case GitDelta(id, original, _) =>
roots.get(id) match {
case Some(root) => root
case None =>
val root = rootOf(objects(original))
roots(id) = root
root
}
case x: GitObject => x
}
objects flatMap {
case (id: GitId, delta: GitDelta) =>
val root = rootOf(delta)
Some(id, GitObject.fromTypes(id, root.objectType, () => delta.content))
case (id: GitId, GitUnknown(_, objType, _)) =>
println(s"Unknown packfile object type: $id $objType")
None
case (id: GitId, o: GitObject) => Some(id, o)
}
}
lazy val allObjectIds = idx.objectNames.toSet
}
}
object PackfileTester {
def main(args: Array[String]): Unit = {
val repo = new GitRepository("samples/git/.git")
val packfile = new repo.GitPackfile("samples/git/.git/objects/pack/pack-144a2409b5481eff97e37a82cc508f347198e4cc")
println(s"Start loading ${packfile.idxpath}")
val all = packfile.allObjects
println(s"${all.size} objects")
}
}
|
Joonsoo/gitexplorer
|
src/main/scala/com/giyeok/gitexplorer/model/GitPackfiles.scala
|
Scala
|
mit
| 17,973
|
package integrationtest
import controller.Controllers
import skinny.test.{ SkinnyFlatSpec, SkinnyTestSupport }
import unit.DBSettings
class FileUploadControllerSpec extends SkinnyFlatSpec with SkinnyTestSupport with DBSettings {
addServlet(Controllers.fileUpload, "/*")
it should "redirect users as expected" in {
post("/fileupload/submit", "name" -> "foo") {
status should equal(302)
}
}
}
|
holycattle/skinny-framework
|
example/src/test/scala/integrationtest/FileUploadControllerSpec.scala
|
Scala
|
mit
| 416
|
package im.tox.antox.activities
import java.io.File
import android.content.Intent
import android.graphics.PorterDuff
import android.os.{Build, Bundle}
import android.support.v7.app.AppCompatActivity
import android.text.{Editable, TextWatcher}
import android.view.View
import android.widget.{EditText, TextView}
import com.shamanland.fab.FloatingActionButton
import de.hdodenhof.circleimageview.CircleImageView
import im.tox.antox.data.AntoxDB
import im.tox.antox.utils.BitmapManager
import im.tox.antoxnightly.R
class FriendProfileActivity extends AppCompatActivity {
var friendKey: String = null
var nickChanged: Boolean = false
override def onCreate(savedInstanceState: Bundle) {
super.onCreate(savedInstanceState)
setContentView(R.layout.activity_friend_profile)
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.HONEYCOMB) {
getSupportActionBar.setIcon(R.drawable.ic_actionbar)
}
friendKey = getIntent.getStringExtra("key")
val db = new AntoxDB(this)
val friendNote = db.getContactStatusMessage(friendKey)
setTitle(getResources.getString(R.string.friend_profile_title, getIntent.getStringExtra("name")))
val editFriendAlias = findViewById(R.id.friendAlias).asInstanceOf[EditText]
editFriendAlias.setText(getIntent.getStringExtra("name"))
editFriendAlias.addTextChangedListener(new TextWatcher() {
override def afterTextChanged(s: Editable) {
/* Set nick changed to true in order to save change in onPause() */
nickChanged = true
/* Update title to reflect new nick */
setTitle(getResources.getString(R.string.friend_profile_title, editFriendAlias.getText.toString))
}
override def beforeTextChanged(s: CharSequence, start: Int, count: Int, after: Int) {}
override def onTextChanged(s: CharSequence, start: Int, before: Int, count: Int) {}
})
// Set cursor to end of edit text field
editFriendAlias.setSelection(editFriendAlias.length(), editFriendAlias.length())
val editFriendNote = findViewById(R.id.friendNoteText).asInstanceOf[TextView]
editFriendNote.setText("\\"" + friendNote + "\\"")
val avatar = getIntent.getSerializableExtra("avatar").asInstanceOf[Option[File]]
avatar.foreach(avatar => {
val avatarHolder = findViewById(R.id.avatar).asInstanceOf[CircleImageView]
BitmapManager.load(avatar, avatarHolder, isAvatar = true)
})
updateFab(db.getFriendInfo(friendKey).favorite)
}
override def onBackPressed() {
super.onBackPressed()
val intent = new Intent(FriendProfileActivity.this, classOf[MainActivity])
intent.addCategory(Intent.CATEGORY_HOME)
intent.setFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP)
FriendProfileActivity.this.startActivity(intent)
finish()
}
/**
* Override onPause() in order to save any nickname changes
*/
override def onPause() {
super.onPause()
/* Update friend alias after text has been changed */
if (nickChanged) {
val editFriendAlias = findViewById(R.id.friendAlias).asInstanceOf[EditText]
val db = new AntoxDB(getApplicationContext)
db.updateAlias(editFriendAlias.getText.toString, friendKey)
}
}
def onClickFavorite(view: View): Unit = {
val db = new AntoxDB(this)
val favorite = !db.getFriendInfo(friendKey).favorite
db.updateContactFavorite(friendKey, favorite)
updateFab(favorite)
}
def updateFab(favorite: Boolean): Unit = {
val fab = findViewById(R.id.favorite_button).asInstanceOf[FloatingActionButton]
fab.setSize(FloatingActionButton.SIZE_NORMAL)
fab.setColor(getResources.getColor(if (favorite) R.color.material_red_a700 else R.color.white))
if (favorite) {
val drawable = getResources.getDrawable(R.drawable.ic_star_black_24dp)
drawable.setColorFilter(R.color.brand_primary, PorterDuff.Mode.MULTIPLY)
fab.setImageDrawable(drawable)
} else {
fab.setImageDrawable(
getResources.getDrawable(R.drawable.ic_star_outline_black_24dp))
}
fab.initBackground()
}
}
|
Astonex/Antox
|
app/src/main/scala/im/tox/antox/activities/FriendProfileActivity.scala
|
Scala
|
gpl-3.0
| 4,043
|
package slate.bindings
import scala.scalajs.js
import scala.scalajs.js.annotation.JSName
import scala.scalajs.js.typedarray.Uint8Array
@js.native
@JSName("LZString")
object LZString extends js.Any {
@js.native
def compressToUint8Array(str: String): Uint8Array = js.native
@js.native
def decompressFromUint8Array(buf: Uint8Array): String = js.native
@js.native
def compressToUTF16(str: String): String = js.native
@js.native
def decompressFromUTF16(str: String): String = js.native
@js.native
def compress(str: String): String = js.native
@js.native
def decompress(str: String): String = js.native
}
|
edmundnoble/dashboarder
|
ui/src/main/scala/slate/bindings/LZString.scala
|
Scala
|
mit
| 630
|
package de.sciss.fscape
import de.sciss.kollflitz.Vec
import scala.concurrent.Promise
class ZipWindowNSpec extends UGenSpec {
"The ZipWindowN UGen" should "work as specified" in {
for {
winLen <- Seq(1, 4, 15, 16, 17)
seqLen <- Seq(0, 4, 15, 16, 17)
numCh <- Seq(1, 2, 3)
} {
val p = Promise[Vec[Int]]()
val g = Graph {
import graph._
val in: GE = Vector.tabulate(numCh) { ch =>
ArithmSeq(ch * 100, length = seqLen)
}
val z = ZipWindowN(in, winLen)
DebugIntPromise(z, p)
}
runGraph(g, 16)
assert(p.isCompleted)
val res = getPromiseVec(p)
val inSq = Vector.tabulate(numCh) { ch =>
(ch * 100) until (ch * 100 + seqLen)
}
val numW = (seqLen + winLen - 1) / winLen
val exp = (0 until numW).flatMap { wi =>
inSq.flatMap(_.slice(wi * winLen, (wi + 1) * winLen).padTo(winLen, 0))
}
assert (res === exp, s"; for winLen $winLen, seqLen $seqLen, numCh $numCh")
}
}
}
|
Sciss/FScape-next
|
core/jvm/src/test/scala/de/sciss/fscape/ZipWindowNSpec.scala
|
Scala
|
agpl-3.0
| 1,043
|
package org.jetbrains.sbt.shell
import java.io.File
import java.util
import java.util.UUID
import com.intellij.build.events.{SuccessResult, Warning}
import com.intellij.compiler.impl.CompilerUtil
import com.intellij.debugger.DebuggerManagerEx
import com.intellij.debugger.settings.DebuggerSettings
import com.intellij.debugger.ui.HotSwapUI
import com.intellij.execution.Executor
import com.intellij.execution.executors.DefaultRunExecutor
import com.intellij.execution.runners.ExecutionEnvironment
import com.intellij.openapi.compiler.ex.CompilerPathsEx
import com.intellij.openapi.externalSystem.model.ProjectKeys
import com.intellij.openapi.externalSystem.model.execution.ExternalSystemTaskExecutionSettings
import com.intellij.openapi.externalSystem.model.project.ExternalSystemSourceType
import com.intellij.openapi.externalSystem.service.project.ProjectDataManager
import com.intellij.openapi.externalSystem.util.{ExternalSystemUtil, ExternalSystemApiUtil => ES}
import com.intellij.openapi.fileEditor.FileDocumentManager
import com.intellij.openapi.module.{Module, ModuleType}
import com.intellij.openapi.progress.{PerformInBackgroundOption, ProgressIndicator, ProgressManager, Task}
import com.intellij.openapi.project.Project
import com.intellij.openapi.vfs.LocalFileSystem
import com.intellij.task._
import org.jetbrains.annotations.Nullable
import org.jetbrains.plugins.scala.build.{BuildMessages, BuildWarning, IndicatorReporter}
import org.jetbrains.plugins.scala.extensions
import org.jetbrains.sbt.SbtUtil
import org.jetbrains.sbt.project.SbtProjectSystem
import org.jetbrains.sbt.project.module.SbtModuleType
import org.jetbrains.sbt.settings.SbtSettings
import org.jetbrains.sbt.shell.SbtShellCommunication._
import scala.collection.JavaConverters._
import scala.concurrent.Await
import scala.concurrent.duration.Duration
import scala.util.{Failure, Success}
/**
* Created by jast on 2016-11-25.
*/
class SbtProjectTaskRunner extends ProjectTaskRunner {
// will override the usual jps build thingies
override def canRun(projectTask: ProjectTask): Boolean = projectTask match {
case task: ModuleBuildTask =>
val module = task.getModule
val project = task.getModule.getProject
val projectSettings = SbtSettings.getInstance(project).getLinkedProjectSettings(module)
projectSettings.exists(_.useSbtShellForBuild) &&
ES.isExternalSystemAwareModule(SbtProjectSystem.Id, module)
case _: ExecuteRunConfigurationTask =>
// TODO this includes tests (and what else?). sbt should handle it and test output should be parsed
false
case _ => false
}
override def run(project: Project,
context: ProjectTaskContext,
callback: ProjectTaskNotification,
tasks: util.Collection[_ <: ProjectTask]): Unit = {
val validTasks = tasks.asScala.collect {
// TODO Android AARs are currently imported as modules. need a way to filter them away before building
case task: ModuleBuildTask
// SbtModuleType actually denotes `-build` modules, which are not part of the regular build
if ModuleType.get(task.getModule).getId != SbtModuleType.Id =>
task
}
// the "build" button in IDEA always runs the build for all individual modules,
// and may work differently than just calling the products task from the main module in sbt
val moduleCommands = validTasks.flatMap(buildCommands)
val modules = validTasks.map(_.getModule)
// don't run anything if there's no module to run a build for
// TODO user feedback
val callbackOpt = Option(callback)
if (moduleCommands.isEmpty){
val taskResult = new ProjectTaskResult(false, 0, 0)
callbackOpt.foreach(_.finished(taskResult))
} else {
val command =
if (moduleCommands.size == 1) moduleCommands.head
else moduleCommands.mkString("all ", " ", "")
FileDocumentManager.getInstance().saveAllDocuments()
// run this as a task (which blocks a thread) because it seems non-trivial to just update indicators asynchronously?
val task = new CommandTask(project, modules.toArray, command, callbackOpt)
ProgressManager.getInstance().run(task)
}
}
private def buildCommands(task: ModuleBuildTask): Seq[String] = {
// TODO sensible way to find out what scopes to run it for besides compile and test?
// TODO make tasks should be user-configurable
SbtUtil.getSbtModuleData(task.getModule).toSeq.flatMap { sbtModuleData =>
val scope = SbtUtil.makeSbtProjectId(sbtModuleData)
// `products` task is a little more general than just `compile`
Seq(s"$scope/products", s"$scope/test:products")
}
}
@Nullable
override def createExecutionEnvironment(project: Project,
task: ExecuteRunConfigurationTask,
executor: Executor): ExecutionEnvironment = {
val taskSettings = new ExternalSystemTaskExecutionSettings
val executorId = Option(executor).map(_.getId).getOrElse(DefaultRunExecutor.EXECUTOR_ID)
ExternalSystemUtil.createExecutionEnvironment(
project,
SbtProjectSystem.Id,
taskSettings, executorId
)
}
}
private class CommandTask(project: Project, modules: Array[Module], command: String, callbackOpt: Option[ProjectTaskNotification]) extends
Task.Backgroundable(project, "sbt build", false, PerformInBackgroundOption.ALWAYS_BACKGROUND) {
import CommandTask._
private val taskId: UUID = UUID.randomUUID()
private val shellRunner: SbtShellRunner = SbtProcessManager.forProject(project).acquireShellRunner
private def showShell(): Unit =
shellRunner.openShell(false)
override def run(indicator: ProgressIndicator): Unit = {
import org.jetbrains.plugins.scala.lang.macros.expansion.ReflectExpansionsCollector
val report = new IndicatorReporter(indicator)
val shell = SbtShellCommunication.forProject(project)
val collector = ReflectExpansionsCollector.getInstance(project)
report.start()
collector.compilationStarted()
// TODO build events instead of indicator
val resultAggregator: (BuildMessages, ShellEvent) => BuildMessages = { (messages,event) =>
event match {
case TaskStart =>
// handled for main task
messages
case TaskComplete =>
// handled for main task
messages
case ErrorWaitForInput =>
// can only actually happen during reload, but handle it here to be sure
showShell()
report.error("build interrupted", None)
messages.addError("ERROR: build interrupted")
messages
case Output(raw) =>
val text = raw.trim
val messagesWithErrors = if (text startsWith ERROR_PREFIX) {
val msg = text.stripPrefix(ERROR_PREFIX)
// only report first error until we can get a good mapping message -> error
if (messages.errors.isEmpty) {
showShell()
report.error("errors in build", None)
}
messages.addError(msg)
} else if (text startsWith WARN_PREFIX) {
val msg = text.stripPrefix(WARN_PREFIX)
// only report first warning
if (messages.warnings.isEmpty) {
report.warning("warnings in build", None)
}
messages.addWarning(msg)
} else messages
collector.processCompilerMessage(text)
report.log(text)
messagesWithErrors.appendMessage(text)
}
}
// TODO consider running module build tasks separately
// may require collecting results individually and aggregating
val commandFuture = shell.command(command, BuildMessages.empty, resultAggregator, showShell = true)
// block thread to make indicator available :(
val buildMessages = Await.ready(commandFuture, Duration.Inf).value.get
// build effects
refreshRoots(modules, indicator)
// handle callback
buildMessages match {
case Success(messages) =>
val taskResult = new ProjectTaskResult(messages.aborted, messages.errors.size, messages.warnings.size)
callbackOpt.foreach(_.finished(taskResult))
case Failure(_) =>
val failedResult = new ProjectTaskResult(true, 1, 0)
callbackOpt.foreach(_.finished(failedResult))
}
// build state reporting
buildMessages match {
case Success(messages) => report.finish(messages)
case Failure(err) => report.finishWithFailure(err)
}
// reload changed classes
val debuggerSession = DebuggerManagerEx.getInstanceEx(project).getContext.getDebuggerSession
val debuggerSettings = DebuggerSettings.getInstance
if (debuggerSession != null &&
debuggerSession.isAttached &&
debuggerSettings.RUN_HOTSWAP_AFTER_COMPILE == DebuggerSettings.RUN_HOTSWAP_ALWAYS) {
extensions.invokeLater {
HotSwapUI.getInstance(project).reloadChangedClasses(debuggerSession, false)
}
}
collector.compilationFinished()
}
// remove this if/when external system handles this refresh on its own
private def refreshRoots(modules: Array[Module], indicator: ProgressIndicator): Unit = {
indicator.setText("Synchronizing output directories...")
// simply refresh all the source roots to catch any generated files -- this MAY have a performance impact
// in which case it might be necessary to receive the generated sources directly from sbt and refresh them (see BuildManager)
val info = ProjectDataManager.getInstance().getExternalProjectData(project,SbtProjectSystem.Id, project.getBasePath)
val allSourceRoots = ES.findAllRecursively(info.getExternalProjectStructure, ProjectKeys.CONTENT_ROOT)
val generatedSourceRoots = allSourceRoots.asScala.flatMap { node =>
val data = node.getData
// sbt-side generated sources are still imported as regular sources
val generated = data.getPaths(ExternalSystemSourceType.SOURCE_GENERATED).asScala
val regular = data.getPaths(ExternalSystemSourceType.SOURCE).asScala
generated ++ regular
}.map(_.getPath).toSeq.distinct
val outputRoots = CompilerPathsEx.getOutputPaths(modules)
val toRefresh = generatedSourceRoots ++ outputRoots
CompilerUtil.refreshOutputRoots(toRefresh.asJavaCollection)
val toRefreshFiles = toRefresh.map(new File(_)).asJava
LocalFileSystem.getInstance().refreshIoFiles(toRefreshFiles, true, true, null)
indicator.setText("")
}
}
object CommandTask {
// some code duplication here with SbtStructureDump
private val WARN_PREFIX = "[warn]"
private val ERROR_PREFIX = "[error]"
}
private case class SbtBuildResult(warnings: Seq[String] = Seq.empty) extends SuccessResult {
override def isUpToDate = false
override def getWarnings: util.List[Warning] = warnings.map(BuildWarning.apply(_) : Warning).asJava
}
|
jastice/intellij-scala
|
scala/scala-impl/src/org/jetbrains/sbt/shell/SbtProjectTaskRunner.scala
|
Scala
|
apache-2.0
| 10,949
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst.catalog
import org.apache.spark.SparkFunSuite
import org.apache.spark.sql.AnalysisException
import org.apache.spark.sql.catalyst.{FunctionIdentifier, TableIdentifier}
import org.apache.spark.sql.catalyst.analysis._
import org.apache.spark.sql.catalyst.expressions.{Expression, ExpressionInfo, Literal}
import org.apache.spark.sql.catalyst.plans.logical.{Range, SubqueryAlias}
/**
* Tests for [[SessionCatalog]] that assume that [[InMemoryCatalog]] is correctly implemented.
*
* Note: many of the methods here are very similar to the ones in [[ExternalCatalogSuite]].
* This is because [[SessionCatalog]] and [[ExternalCatalog]] share many similar method
* signatures but do not extend a common parent. This is largely by design but
* unfortunately leads to very similar test code in two places.
*/
class SessionCatalogSuite extends SparkFunSuite {
private val utils = new CatalogTestUtils {
override val tableInputFormat: String = "com.fruit.eyephone.CameraInputFormat"
override val tableOutputFormat: String = "com.fruit.eyephone.CameraOutputFormat"
override def newEmptyCatalog(): ExternalCatalog = new InMemoryCatalog
}
import utils._
// --------------------------------------------------------------------------
// Databases
// --------------------------------------------------------------------------
test("basic create and list databases") {
val catalog = new SessionCatalog(newEmptyCatalog())
catalog.createDatabase(newDb("default"), ignoreIfExists = true)
assert(catalog.databaseExists("default"))
assert(!catalog.databaseExists("testing"))
assert(!catalog.databaseExists("testing2"))
catalog.createDatabase(newDb("testing"), ignoreIfExists = false)
assert(catalog.databaseExists("testing"))
assert(catalog.listDatabases().toSet == Set("default", "testing"))
catalog.createDatabase(newDb("testing2"), ignoreIfExists = false)
assert(catalog.listDatabases().toSet == Set("default", "testing", "testing2"))
assert(catalog.databaseExists("testing2"))
assert(!catalog.databaseExists("does_not_exist"))
}
def testInvalidName(func: (String) => Unit) {
// scalastyle:off
// non ascii characters are not allowed in the source code, so we disable the scalastyle.
val name = "砖"
// scalastyle:on
val e = intercept[AnalysisException] {
func(name)
}.getMessage
assert(e.contains(s"`$name` is not a valid name for tables/databases."))
}
test("create databases using invalid names") {
val catalog = new SessionCatalog(newEmptyCatalog())
testInvalidName(name => catalog.createDatabase(newDb(name), ignoreIfExists = true))
}
test("get database when a database exists") {
val catalog = new SessionCatalog(newBasicCatalog())
val db1 = catalog.getDatabaseMetadata("db1")
assert(db1.name == "db1")
assert(db1.description.contains("db1"))
}
test("get database should throw exception when the database does not exist") {
val catalog = new SessionCatalog(newBasicCatalog())
intercept[NoSuchDatabaseException] {
catalog.getDatabaseMetadata("db_that_does_not_exist")
}
}
test("list databases without pattern") {
val catalog = new SessionCatalog(newBasicCatalog())
assert(catalog.listDatabases().toSet == Set("default", "db1", "db2"))
}
test("list databases with pattern") {
val catalog = new SessionCatalog(newBasicCatalog())
assert(catalog.listDatabases("db").toSet == Set.empty)
assert(catalog.listDatabases("db*").toSet == Set("db1", "db2"))
assert(catalog.listDatabases("*1").toSet == Set("db1"))
assert(catalog.listDatabases("db2").toSet == Set("db2"))
}
test("drop database") {
val catalog = new SessionCatalog(newBasicCatalog())
catalog.dropDatabase("db1", ignoreIfNotExists = false, cascade = false)
assert(catalog.listDatabases().toSet == Set("default", "db2"))
}
test("drop database when the database is not empty") {
// Throw exception if there are functions left
val externalCatalog1 = newBasicCatalog()
val sessionCatalog1 = new SessionCatalog(externalCatalog1)
externalCatalog1.dropTable("db2", "tbl1", ignoreIfNotExists = false, purge = false)
externalCatalog1.dropTable("db2", "tbl2", ignoreIfNotExists = false, purge = false)
intercept[AnalysisException] {
sessionCatalog1.dropDatabase("db2", ignoreIfNotExists = false, cascade = false)
}
// Throw exception if there are tables left
val externalCatalog2 = newBasicCatalog()
val sessionCatalog2 = new SessionCatalog(externalCatalog2)
externalCatalog2.dropFunction("db2", "func1")
intercept[AnalysisException] {
sessionCatalog2.dropDatabase("db2", ignoreIfNotExists = false, cascade = false)
}
// When cascade is true, it should drop them
val externalCatalog3 = newBasicCatalog()
val sessionCatalog3 = new SessionCatalog(externalCatalog3)
externalCatalog3.dropDatabase("db2", ignoreIfNotExists = false, cascade = true)
assert(sessionCatalog3.listDatabases().toSet == Set("default", "db1"))
}
test("drop database when the database does not exist") {
val catalog = new SessionCatalog(newBasicCatalog())
intercept[NoSuchDatabaseException] {
catalog.dropDatabase("db_that_does_not_exist", ignoreIfNotExists = false, cascade = false)
}
catalog.dropDatabase("db_that_does_not_exist", ignoreIfNotExists = true, cascade = false)
}
test("drop current database and drop default database") {
val catalog = new SessionCatalog(newBasicCatalog())
catalog.setCurrentDatabase("db1")
assert(catalog.getCurrentDatabase == "db1")
catalog.dropDatabase("db1", ignoreIfNotExists = false, cascade = true)
intercept[NoSuchDatabaseException] {
catalog.createTable(newTable("tbl1", "db1"), ignoreIfExists = false)
}
catalog.setCurrentDatabase("default")
assert(catalog.getCurrentDatabase == "default")
intercept[AnalysisException] {
catalog.dropDatabase("default", ignoreIfNotExists = false, cascade = true)
}
}
test("alter database") {
val catalog = new SessionCatalog(newBasicCatalog())
val db1 = catalog.getDatabaseMetadata("db1")
// Note: alter properties here because Hive does not support altering other fields
catalog.alterDatabase(db1.copy(properties = Map("k" -> "v3", "good" -> "true")))
val newDb1 = catalog.getDatabaseMetadata("db1")
assert(db1.properties.isEmpty)
assert(newDb1.properties.size == 2)
assert(newDb1.properties.get("k") == Some("v3"))
assert(newDb1.properties.get("good") == Some("true"))
}
test("alter database should throw exception when the database does not exist") {
val catalog = new SessionCatalog(newBasicCatalog())
intercept[NoSuchDatabaseException] {
catalog.alterDatabase(newDb("unknown_db"))
}
}
test("get/set current database") {
val catalog = new SessionCatalog(newBasicCatalog())
assert(catalog.getCurrentDatabase == "default")
catalog.setCurrentDatabase("db2")
assert(catalog.getCurrentDatabase == "db2")
intercept[NoSuchDatabaseException] {
catalog.setCurrentDatabase("deebo")
}
catalog.createDatabase(newDb("deebo"), ignoreIfExists = false)
catalog.setCurrentDatabase("deebo")
assert(catalog.getCurrentDatabase == "deebo")
}
// --------------------------------------------------------------------------
// Tables
// --------------------------------------------------------------------------
test("create table") {
val externalCatalog = newBasicCatalog()
val sessionCatalog = new SessionCatalog(externalCatalog)
assert(externalCatalog.listTables("db1").isEmpty)
assert(externalCatalog.listTables("db2").toSet == Set("tbl1", "tbl2"))
sessionCatalog.createTable(newTable("tbl3", "db1"), ignoreIfExists = false)
sessionCatalog.createTable(newTable("tbl3", "db2"), ignoreIfExists = false)
assert(externalCatalog.listTables("db1").toSet == Set("tbl3"))
assert(externalCatalog.listTables("db2").toSet == Set("tbl1", "tbl2", "tbl3"))
// Create table without explicitly specifying database
sessionCatalog.setCurrentDatabase("db1")
sessionCatalog.createTable(newTable("tbl4"), ignoreIfExists = false)
assert(externalCatalog.listTables("db1").toSet == Set("tbl3", "tbl4"))
assert(externalCatalog.listTables("db2").toSet == Set("tbl1", "tbl2", "tbl3"))
}
test("create tables using invalid names") {
val catalog = new SessionCatalog(newEmptyCatalog())
testInvalidName(name => catalog.createTable(newTable(name, "db1"), ignoreIfExists = false))
}
test("create table when database does not exist") {
val catalog = new SessionCatalog(newBasicCatalog())
// Creating table in non-existent database should always fail
intercept[NoSuchDatabaseException] {
catalog.createTable(newTable("tbl1", "does_not_exist"), ignoreIfExists = false)
}
intercept[NoSuchDatabaseException] {
catalog.createTable(newTable("tbl1", "does_not_exist"), ignoreIfExists = true)
}
// Table already exists
intercept[TableAlreadyExistsException] {
catalog.createTable(newTable("tbl1", "db2"), ignoreIfExists = false)
}
catalog.createTable(newTable("tbl1", "db2"), ignoreIfExists = true)
}
test("create temp table") {
val catalog = new SessionCatalog(newBasicCatalog())
val tempTable1 = Range(1, 10, 1, 10)
val tempTable2 = Range(1, 20, 2, 10)
catalog.createTempView("tbl1", tempTable1, overrideIfExists = false)
catalog.createTempView("tbl2", tempTable2, overrideIfExists = false)
assert(catalog.getTempView("tbl1") == Option(tempTable1))
assert(catalog.getTempView("tbl2") == Option(tempTable2))
assert(catalog.getTempView("tbl3").isEmpty)
// Temporary table already exists
intercept[TempTableAlreadyExistsException] {
catalog.createTempView("tbl1", tempTable1, overrideIfExists = false)
}
// Temporary table already exists but we override it
catalog.createTempView("tbl1", tempTable2, overrideIfExists = true)
assert(catalog.getTempView("tbl1") == Option(tempTable2))
}
test("drop table") {
val externalCatalog = newBasicCatalog()
val sessionCatalog = new SessionCatalog(externalCatalog)
assert(externalCatalog.listTables("db2").toSet == Set("tbl1", "tbl2"))
sessionCatalog.dropTable(TableIdentifier("tbl1", Some("db2")), ignoreIfNotExists = false,
purge = false)
assert(externalCatalog.listTables("db2").toSet == Set("tbl2"))
// Drop table without explicitly specifying database
sessionCatalog.setCurrentDatabase("db2")
sessionCatalog.dropTable(TableIdentifier("tbl2"), ignoreIfNotExists = false, purge = false)
assert(externalCatalog.listTables("db2").isEmpty)
}
test("drop table when database/table does not exist") {
val catalog = new SessionCatalog(newBasicCatalog())
// Should always throw exception when the database does not exist
intercept[NoSuchDatabaseException] {
catalog.dropTable(TableIdentifier("tbl1", Some("unknown_db")), ignoreIfNotExists = false,
purge = false)
}
intercept[NoSuchDatabaseException] {
catalog.dropTable(TableIdentifier("tbl1", Some("unknown_db")), ignoreIfNotExists = true,
purge = false)
}
intercept[NoSuchTableException] {
catalog.dropTable(TableIdentifier("unknown_table", Some("db2")), ignoreIfNotExists = false,
purge = false)
}
catalog.dropTable(TableIdentifier("unknown_table", Some("db2")), ignoreIfNotExists = true,
purge = false)
}
test("drop temp table") {
val externalCatalog = newBasicCatalog()
val sessionCatalog = new SessionCatalog(externalCatalog)
val tempTable = Range(1, 10, 2, 10)
sessionCatalog.createTempView("tbl1", tempTable, overrideIfExists = false)
sessionCatalog.setCurrentDatabase("db2")
assert(sessionCatalog.getTempView("tbl1") == Some(tempTable))
assert(externalCatalog.listTables("db2").toSet == Set("tbl1", "tbl2"))
// If database is not specified, temp table should be dropped first
sessionCatalog.dropTable(TableIdentifier("tbl1"), ignoreIfNotExists = false, purge = false)
assert(sessionCatalog.getTempView("tbl1") == None)
assert(externalCatalog.listTables("db2").toSet == Set("tbl1", "tbl2"))
// If temp table does not exist, the table in the current database should be dropped
sessionCatalog.dropTable(TableIdentifier("tbl1"), ignoreIfNotExists = false, purge = false)
assert(externalCatalog.listTables("db2").toSet == Set("tbl2"))
// If database is specified, temp tables are never dropped
sessionCatalog.createTempView("tbl1", tempTable, overrideIfExists = false)
sessionCatalog.createTable(newTable("tbl1", "db2"), ignoreIfExists = false)
sessionCatalog.dropTable(TableIdentifier("tbl1", Some("db2")), ignoreIfNotExists = false,
purge = false)
assert(sessionCatalog.getTempView("tbl1") == Some(tempTable))
assert(externalCatalog.listTables("db2").toSet == Set("tbl2"))
}
test("rename table") {
val externalCatalog = newBasicCatalog()
val sessionCatalog = new SessionCatalog(externalCatalog)
assert(externalCatalog.listTables("db2").toSet == Set("tbl1", "tbl2"))
sessionCatalog.renameTable(TableIdentifier("tbl1", Some("db2")), TableIdentifier("tblone"))
assert(externalCatalog.listTables("db2").toSet == Set("tblone", "tbl2"))
sessionCatalog.renameTable(TableIdentifier("tbl2", Some("db2")), TableIdentifier("tbltwo"))
assert(externalCatalog.listTables("db2").toSet == Set("tblone", "tbltwo"))
// Rename table without explicitly specifying database
sessionCatalog.setCurrentDatabase("db2")
sessionCatalog.renameTable(TableIdentifier("tbltwo"), TableIdentifier("table_two"))
assert(externalCatalog.listTables("db2").toSet == Set("tblone", "table_two"))
// Renaming "db2.tblone" to "db1.tblones" should fail because databases don't match
intercept[AnalysisException] {
sessionCatalog.renameTable(
TableIdentifier("tblone", Some("db2")), TableIdentifier("tblones", Some("db1")))
}
// The new table already exists
intercept[TableAlreadyExistsException] {
sessionCatalog.renameTable(
TableIdentifier("tblone", Some("db2")),
TableIdentifier("table_two"))
}
}
test("rename tables to an invalid name") {
val catalog = new SessionCatalog(newBasicCatalog())
testInvalidName(
name => catalog.renameTable(TableIdentifier("tbl1", Some("db2")), TableIdentifier(name)))
}
test("rename table when database/table does not exist") {
val catalog = new SessionCatalog(newBasicCatalog())
intercept[NoSuchDatabaseException] {
catalog.renameTable(TableIdentifier("tbl1", Some("unknown_db")), TableIdentifier("tbl2"))
}
intercept[NoSuchTableException] {
catalog.renameTable(TableIdentifier("unknown_table", Some("db2")), TableIdentifier("tbl2"))
}
}
test("rename temp table") {
val externalCatalog = newBasicCatalog()
val sessionCatalog = new SessionCatalog(externalCatalog)
val tempTable = Range(1, 10, 2, 10)
sessionCatalog.createTempView("tbl1", tempTable, overrideIfExists = false)
sessionCatalog.setCurrentDatabase("db2")
assert(sessionCatalog.getTempView("tbl1") == Option(tempTable))
assert(externalCatalog.listTables("db2").toSet == Set("tbl1", "tbl2"))
// If database is not specified, temp table should be renamed first
sessionCatalog.renameTable(TableIdentifier("tbl1"), TableIdentifier("tbl3"))
assert(sessionCatalog.getTempView("tbl1").isEmpty)
assert(sessionCatalog.getTempView("tbl3") == Option(tempTable))
assert(externalCatalog.listTables("db2").toSet == Set("tbl1", "tbl2"))
// If database is specified, temp tables are never renamed
sessionCatalog.renameTable(TableIdentifier("tbl2", Some("db2")), TableIdentifier("tbl4"))
assert(sessionCatalog.getTempView("tbl3") == Option(tempTable))
assert(sessionCatalog.getTempView("tbl4").isEmpty)
assert(externalCatalog.listTables("db2").toSet == Set("tbl1", "tbl4"))
}
test("alter table") {
val externalCatalog = newBasicCatalog()
val sessionCatalog = new SessionCatalog(externalCatalog)
val tbl1 = externalCatalog.getTable("db2", "tbl1")
sessionCatalog.alterTable(tbl1.copy(properties = Map("toh" -> "frem")))
val newTbl1 = externalCatalog.getTable("db2", "tbl1")
assert(!tbl1.properties.contains("toh"))
assert(newTbl1.properties.size == tbl1.properties.size + 1)
assert(newTbl1.properties.get("toh") == Some("frem"))
// Alter table without explicitly specifying database
sessionCatalog.setCurrentDatabase("db2")
sessionCatalog.alterTable(tbl1.copy(identifier = TableIdentifier("tbl1")))
val newestTbl1 = externalCatalog.getTable("db2", "tbl1")
assert(newestTbl1 == tbl1)
}
test("alter table when database/table does not exist") {
val catalog = new SessionCatalog(newBasicCatalog())
intercept[NoSuchDatabaseException] {
catalog.alterTable(newTable("tbl1", "unknown_db"))
}
intercept[NoSuchTableException] {
catalog.alterTable(newTable("unknown_table", "db2"))
}
}
test("get table") {
val externalCatalog = newBasicCatalog()
val sessionCatalog = new SessionCatalog(externalCatalog)
assert(sessionCatalog.getTableMetadata(TableIdentifier("tbl1", Some("db2")))
== externalCatalog.getTable("db2", "tbl1"))
// Get table without explicitly specifying database
sessionCatalog.setCurrentDatabase("db2")
assert(sessionCatalog.getTableMetadata(TableIdentifier("tbl1"))
== externalCatalog.getTable("db2", "tbl1"))
}
test("get table when database/table does not exist") {
val catalog = new SessionCatalog(newBasicCatalog())
intercept[NoSuchDatabaseException] {
catalog.getTableMetadata(TableIdentifier("tbl1", Some("unknown_db")))
}
intercept[NoSuchTableException] {
catalog.getTableMetadata(TableIdentifier("unknown_table", Some("db2")))
}
}
test("get option of table metadata") {
val externalCatalog = newBasicCatalog()
val catalog = new SessionCatalog(externalCatalog)
assert(catalog.getTableMetadataOption(TableIdentifier("tbl1", Some("db2")))
== Option(externalCatalog.getTable("db2", "tbl1")))
assert(catalog.getTableMetadataOption(TableIdentifier("unknown_table", Some("db2"))).isEmpty)
intercept[NoSuchDatabaseException] {
catalog.getTableMetadataOption(TableIdentifier("tbl1", Some("unknown_db")))
}
}
test("lookup table relation") {
val externalCatalog = newBasicCatalog()
val sessionCatalog = new SessionCatalog(externalCatalog)
val tempTable1 = Range(1, 10, 1, 10)
val metastoreTable1 = externalCatalog.getTable("db2", "tbl1")
sessionCatalog.createTempView("tbl1", tempTable1, overrideIfExists = false)
sessionCatalog.setCurrentDatabase("db2")
// If we explicitly specify the database, we'll look up the relation in that database
assert(sessionCatalog.lookupRelation(TableIdentifier("tbl1", Some("db2")))
== SubqueryAlias("tbl1", SimpleCatalogRelation("db2", metastoreTable1), None))
// Otherwise, we'll first look up a temporary table with the same name
assert(sessionCatalog.lookupRelation(TableIdentifier("tbl1"))
== SubqueryAlias("tbl1", tempTable1, Some(TableIdentifier("tbl1"))))
// Then, if that does not exist, look up the relation in the current database
sessionCatalog.dropTable(TableIdentifier("tbl1"), ignoreIfNotExists = false, purge = false)
assert(sessionCatalog.lookupRelation(TableIdentifier("tbl1"))
== SubqueryAlias("tbl1", SimpleCatalogRelation("db2", metastoreTable1), None))
}
test("lookup table relation with alias") {
val catalog = new SessionCatalog(newBasicCatalog())
val alias = "monster"
val tableMetadata = catalog.getTableMetadata(TableIdentifier("tbl1", Some("db2")))
val relation = SubqueryAlias("tbl1", SimpleCatalogRelation("db2", tableMetadata), None)
val relationWithAlias =
SubqueryAlias(alias,
SimpleCatalogRelation("db2", tableMetadata), None)
assert(catalog.lookupRelation(
TableIdentifier("tbl1", Some("db2")), alias = None) == relation)
assert(catalog.lookupRelation(
TableIdentifier("tbl1", Some("db2")), alias = Some(alias)) == relationWithAlias)
}
test("lookup view with view name in alias") {
val catalog = new SessionCatalog(newBasicCatalog())
val tmpView = Range(1, 10, 2, 10)
catalog.createTempView("vw1", tmpView, overrideIfExists = false)
val plan = catalog.lookupRelation(TableIdentifier("vw1"), Option("range"))
assert(plan == SubqueryAlias("range", tmpView, Option(TableIdentifier("vw1"))))
}
test("table exists") {
val catalog = new SessionCatalog(newBasicCatalog())
assert(catalog.tableExists(TableIdentifier("tbl1", Some("db2"))))
assert(catalog.tableExists(TableIdentifier("tbl2", Some("db2"))))
assert(!catalog.tableExists(TableIdentifier("tbl3", Some("db2"))))
assert(!catalog.tableExists(TableIdentifier("tbl1", Some("db1"))))
assert(!catalog.tableExists(TableIdentifier("tbl2", Some("db1"))))
// If database is explicitly specified, do not check temporary tables
val tempTable = Range(1, 10, 1, 10)
assert(!catalog.tableExists(TableIdentifier("tbl3", Some("db2"))))
// If database is not explicitly specified, check the current database
catalog.setCurrentDatabase("db2")
assert(catalog.tableExists(TableIdentifier("tbl1")))
assert(catalog.tableExists(TableIdentifier("tbl2")))
catalog.createTempView("tbl3", tempTable, overrideIfExists = false)
// tableExists should not check temp view.
assert(!catalog.tableExists(TableIdentifier("tbl3")))
}
test("getTempViewOrPermanentTableMetadata on temporary views") {
val catalog = new SessionCatalog(newBasicCatalog())
val tempTable = Range(1, 10, 2, 10)
intercept[NoSuchTableException] {
catalog.getTempViewOrPermanentTableMetadata(TableIdentifier("view1"))
}.getMessage
intercept[NoSuchTableException] {
catalog.getTempViewOrPermanentTableMetadata(TableIdentifier("view1", Some("default")))
}.getMessage
catalog.createTempView("view1", tempTable, overrideIfExists = false)
assert(catalog.getTempViewOrPermanentTableMetadata(
TableIdentifier("view1")).identifier.table == "view1")
assert(catalog.getTempViewOrPermanentTableMetadata(
TableIdentifier("view1")).schema(0).name == "id")
intercept[NoSuchTableException] {
catalog.getTempViewOrPermanentTableMetadata(TableIdentifier("view1", Some("default")))
}.getMessage
}
test("list tables without pattern") {
val catalog = new SessionCatalog(newBasicCatalog())
val tempTable = Range(1, 10, 2, 10)
catalog.createTempView("tbl1", tempTable, overrideIfExists = false)
catalog.createTempView("tbl4", tempTable, overrideIfExists = false)
assert(catalog.listTables("db1").toSet ==
Set(TableIdentifier("tbl1"), TableIdentifier("tbl4")))
assert(catalog.listTables("db2").toSet ==
Set(TableIdentifier("tbl1"),
TableIdentifier("tbl4"),
TableIdentifier("tbl1", Some("db2")),
TableIdentifier("tbl2", Some("db2"))))
intercept[NoSuchDatabaseException] {
catalog.listTables("unknown_db")
}
}
test("list tables with pattern") {
val catalog = new SessionCatalog(newBasicCatalog())
val tempTable = Range(1, 10, 2, 10)
catalog.createTempView("tbl1", tempTable, overrideIfExists = false)
catalog.createTempView("tbl4", tempTable, overrideIfExists = false)
assert(catalog.listTables("db1", "*").toSet == catalog.listTables("db1").toSet)
assert(catalog.listTables("db2", "*").toSet == catalog.listTables("db2").toSet)
assert(catalog.listTables("db2", "tbl*").toSet ==
Set(TableIdentifier("tbl1"),
TableIdentifier("tbl4"),
TableIdentifier("tbl1", Some("db2")),
TableIdentifier("tbl2", Some("db2"))))
assert(catalog.listTables("db2", "*1").toSet ==
Set(TableIdentifier("tbl1"), TableIdentifier("tbl1", Some("db2"))))
intercept[NoSuchDatabaseException] {
catalog.listTables("unknown_db", "*")
}
}
// --------------------------------------------------------------------------
// Partitions
// --------------------------------------------------------------------------
test("basic create and list partitions") {
val externalCatalog = newEmptyCatalog()
val sessionCatalog = new SessionCatalog(externalCatalog)
sessionCatalog.createDatabase(newDb("mydb"), ignoreIfExists = false)
sessionCatalog.createTable(newTable("tbl", "mydb"), ignoreIfExists = false)
sessionCatalog.createPartitions(
TableIdentifier("tbl", Some("mydb")), Seq(part1, part2), ignoreIfExists = false)
assert(catalogPartitionsEqual(externalCatalog.listPartitions("mydb", "tbl"), part1, part2))
// Create partitions without explicitly specifying database
sessionCatalog.setCurrentDatabase("mydb")
sessionCatalog.createPartitions(
TableIdentifier("tbl"), Seq(partWithMixedOrder), ignoreIfExists = false)
assert(catalogPartitionsEqual(
externalCatalog.listPartitions("mydb", "tbl"), part1, part2, partWithMixedOrder))
}
test("create partitions when database/table does not exist") {
val catalog = new SessionCatalog(newBasicCatalog())
intercept[NoSuchDatabaseException] {
catalog.createPartitions(
TableIdentifier("tbl1", Some("unknown_db")), Seq(), ignoreIfExists = false)
}
intercept[NoSuchTableException] {
catalog.createPartitions(
TableIdentifier("does_not_exist", Some("db2")), Seq(), ignoreIfExists = false)
}
}
test("create partitions that already exist") {
val catalog = new SessionCatalog(newBasicCatalog())
intercept[AnalysisException] {
catalog.createPartitions(
TableIdentifier("tbl2", Some("db2")), Seq(part1), ignoreIfExists = false)
}
catalog.createPartitions(
TableIdentifier("tbl2", Some("db2")), Seq(part1), ignoreIfExists = true)
}
test("create partitions with invalid part spec") {
val catalog = new SessionCatalog(newBasicCatalog())
var e = intercept[AnalysisException] {
catalog.createPartitions(
TableIdentifier("tbl2", Some("db2")),
Seq(part1, partWithLessColumns), ignoreIfExists = false)
}
assert(e.getMessage.contains("Partition spec is invalid. The spec (a) must match " +
"the partition spec (a, b) defined in table '`db2`.`tbl2`'"))
e = intercept[AnalysisException] {
catalog.createPartitions(
TableIdentifier("tbl2", Some("db2")),
Seq(part1, partWithMoreColumns), ignoreIfExists = true)
}
assert(e.getMessage.contains("Partition spec is invalid. The spec (a, b, c) must match " +
"the partition spec (a, b) defined in table '`db2`.`tbl2`'"))
e = intercept[AnalysisException] {
catalog.createPartitions(
TableIdentifier("tbl2", Some("db2")),
Seq(partWithUnknownColumns, part1), ignoreIfExists = true)
}
assert(e.getMessage.contains("Partition spec is invalid. The spec (a, unknown) must match " +
"the partition spec (a, b) defined in table '`db2`.`tbl2`'"))
}
test("drop partitions") {
val externalCatalog = newBasicCatalog()
val sessionCatalog = new SessionCatalog(externalCatalog)
assert(catalogPartitionsEqual(externalCatalog.listPartitions("db2", "tbl2"), part1, part2))
sessionCatalog.dropPartitions(
TableIdentifier("tbl2", Some("db2")),
Seq(part1.spec),
ignoreIfNotExists = false,
purge = false,
retainData = false)
assert(catalogPartitionsEqual(externalCatalog.listPartitions("db2", "tbl2"), part2))
// Drop partitions without explicitly specifying database
sessionCatalog.setCurrentDatabase("db2")
sessionCatalog.dropPartitions(
TableIdentifier("tbl2"),
Seq(part2.spec),
ignoreIfNotExists = false,
purge = false,
retainData = false)
assert(externalCatalog.listPartitions("db2", "tbl2").isEmpty)
// Drop multiple partitions at once
sessionCatalog.createPartitions(
TableIdentifier("tbl2", Some("db2")), Seq(part1, part2), ignoreIfExists = false)
assert(catalogPartitionsEqual(externalCatalog.listPartitions("db2", "tbl2"), part1, part2))
sessionCatalog.dropPartitions(
TableIdentifier("tbl2", Some("db2")),
Seq(part1.spec, part2.spec),
ignoreIfNotExists = false,
purge = false,
retainData = false)
assert(externalCatalog.listPartitions("db2", "tbl2").isEmpty)
}
test("drop partitions when database/table does not exist") {
val catalog = new SessionCatalog(newBasicCatalog())
intercept[NoSuchDatabaseException] {
catalog.dropPartitions(
TableIdentifier("tbl1", Some("unknown_db")),
Seq(),
ignoreIfNotExists = false,
purge = false,
retainData = false)
}
intercept[NoSuchTableException] {
catalog.dropPartitions(
TableIdentifier("does_not_exist", Some("db2")),
Seq(),
ignoreIfNotExists = false,
purge = false,
retainData = false)
}
}
test("drop partitions that do not exist") {
val catalog = new SessionCatalog(newBasicCatalog())
intercept[AnalysisException] {
catalog.dropPartitions(
TableIdentifier("tbl2", Some("db2")),
Seq(part3.spec),
ignoreIfNotExists = false,
purge = false,
retainData = false)
}
catalog.dropPartitions(
TableIdentifier("tbl2", Some("db2")),
Seq(part3.spec),
ignoreIfNotExists = true,
purge = false,
retainData = false)
}
test("drop partitions with invalid partition spec") {
val catalog = new SessionCatalog(newBasicCatalog())
var e = intercept[AnalysisException] {
catalog.dropPartitions(
TableIdentifier("tbl2", Some("db2")),
Seq(partWithMoreColumns.spec),
ignoreIfNotExists = false,
purge = false,
retainData = false)
}
assert(e.getMessage.contains(
"Partition spec is invalid. The spec (a, b, c) must be contained within " +
"the partition spec (a, b) defined in table '`db2`.`tbl2`'"))
e = intercept[AnalysisException] {
catalog.dropPartitions(
TableIdentifier("tbl2", Some("db2")),
Seq(partWithUnknownColumns.spec),
ignoreIfNotExists = false,
purge = false,
retainData = false)
}
assert(e.getMessage.contains(
"Partition spec is invalid. The spec (a, unknown) must be contained within " +
"the partition spec (a, b) defined in table '`db2`.`tbl2`'"))
}
test("get partition") {
val catalog = new SessionCatalog(newBasicCatalog())
assert(catalog.getPartition(
TableIdentifier("tbl2", Some("db2")), part1.spec).spec == part1.spec)
assert(catalog.getPartition(
TableIdentifier("tbl2", Some("db2")), part2.spec).spec == part2.spec)
// Get partition without explicitly specifying database
catalog.setCurrentDatabase("db2")
assert(catalog.getPartition(TableIdentifier("tbl2"), part1.spec).spec == part1.spec)
assert(catalog.getPartition(TableIdentifier("tbl2"), part2.spec).spec == part2.spec)
// Get non-existent partition
intercept[AnalysisException] {
catalog.getPartition(TableIdentifier("tbl2"), part3.spec)
}
}
test("get partition when database/table does not exist") {
val catalog = new SessionCatalog(newBasicCatalog())
intercept[NoSuchDatabaseException] {
catalog.getPartition(TableIdentifier("tbl1", Some("unknown_db")), part1.spec)
}
intercept[NoSuchTableException] {
catalog.getPartition(TableIdentifier("does_not_exist", Some("db2")), part1.spec)
}
}
test("get partition with invalid partition spec") {
val catalog = new SessionCatalog(newBasicCatalog())
var e = intercept[AnalysisException] {
catalog.getPartition(TableIdentifier("tbl1", Some("db2")), partWithLessColumns.spec)
}
assert(e.getMessage.contains("Partition spec is invalid. The spec (a) must match " +
"the partition spec (a, b) defined in table '`db2`.`tbl1`'"))
e = intercept[AnalysisException] {
catalog.getPartition(TableIdentifier("tbl1", Some("db2")), partWithMoreColumns.spec)
}
assert(e.getMessage.contains("Partition spec is invalid. The spec (a, b, c) must match " +
"the partition spec (a, b) defined in table '`db2`.`tbl1`'"))
e = intercept[AnalysisException] {
catalog.getPartition(TableIdentifier("tbl1", Some("db2")), partWithUnknownColumns.spec)
}
assert(e.getMessage.contains("Partition spec is invalid. The spec (a, unknown) must match " +
"the partition spec (a, b) defined in table '`db2`.`tbl1`'"))
}
test("rename partitions") {
val catalog = new SessionCatalog(newBasicCatalog())
val newPart1 = part1.copy(spec = Map("a" -> "100", "b" -> "101"))
val newPart2 = part2.copy(spec = Map("a" -> "200", "b" -> "201"))
val newSpecs = Seq(newPart1.spec, newPart2.spec)
catalog.renamePartitions(
TableIdentifier("tbl2", Some("db2")), Seq(part1.spec, part2.spec), newSpecs)
assert(catalog.getPartition(
TableIdentifier("tbl2", Some("db2")), newPart1.spec).spec === newPart1.spec)
assert(catalog.getPartition(
TableIdentifier("tbl2", Some("db2")), newPart2.spec).spec === newPart2.spec)
intercept[AnalysisException] {
catalog.getPartition(TableIdentifier("tbl2", Some("db2")), part1.spec)
}
intercept[AnalysisException] {
catalog.getPartition(TableIdentifier("tbl2", Some("db2")), part2.spec)
}
// Rename partitions without explicitly specifying database
catalog.setCurrentDatabase("db2")
catalog.renamePartitions(TableIdentifier("tbl2"), newSpecs, Seq(part1.spec, part2.spec))
assert(catalog.getPartition(TableIdentifier("tbl2"), part1.spec).spec === part1.spec)
assert(catalog.getPartition(TableIdentifier("tbl2"), part2.spec).spec === part2.spec)
intercept[AnalysisException] {
catalog.getPartition(TableIdentifier("tbl2"), newPart1.spec)
}
intercept[AnalysisException] {
catalog.getPartition(TableIdentifier("tbl2"), newPart2.spec)
}
}
test("rename partitions when database/table does not exist") {
val catalog = new SessionCatalog(newBasicCatalog())
intercept[NoSuchDatabaseException] {
catalog.renamePartitions(
TableIdentifier("tbl1", Some("unknown_db")), Seq(part1.spec), Seq(part2.spec))
}
intercept[NoSuchTableException] {
catalog.renamePartitions(
TableIdentifier("does_not_exist", Some("db2")), Seq(part1.spec), Seq(part2.spec))
}
}
test("rename partition with invalid partition spec") {
val catalog = new SessionCatalog(newBasicCatalog())
var e = intercept[AnalysisException] {
catalog.renamePartitions(
TableIdentifier("tbl1", Some("db2")),
Seq(part1.spec), Seq(partWithLessColumns.spec))
}
assert(e.getMessage.contains("Partition spec is invalid. The spec (a) must match " +
"the partition spec (a, b) defined in table '`db2`.`tbl1`'"))
e = intercept[AnalysisException] {
catalog.renamePartitions(
TableIdentifier("tbl1", Some("db2")),
Seq(part1.spec), Seq(partWithMoreColumns.spec))
}
assert(e.getMessage.contains("Partition spec is invalid. The spec (a, b, c) must match " +
"the partition spec (a, b) defined in table '`db2`.`tbl1`'"))
e = intercept[AnalysisException] {
catalog.renamePartitions(
TableIdentifier("tbl1", Some("db2")),
Seq(part1.spec), Seq(partWithUnknownColumns.spec))
}
assert(e.getMessage.contains("Partition spec is invalid. The spec (a, unknown) must match " +
"the partition spec (a, b) defined in table '`db2`.`tbl1`'"))
}
test("alter partitions") {
val catalog = new SessionCatalog(newBasicCatalog())
val newLocation = newUriForDatabase()
// Alter but keep spec the same
val oldPart1 = catalog.getPartition(TableIdentifier("tbl2", Some("db2")), part1.spec)
val oldPart2 = catalog.getPartition(TableIdentifier("tbl2", Some("db2")), part2.spec)
catalog.alterPartitions(TableIdentifier("tbl2", Some("db2")), Seq(
oldPart1.copy(storage = storageFormat.copy(locationUri = Some(newLocation))),
oldPart2.copy(storage = storageFormat.copy(locationUri = Some(newLocation)))))
val newPart1 = catalog.getPartition(TableIdentifier("tbl2", Some("db2")), part1.spec)
val newPart2 = catalog.getPartition(TableIdentifier("tbl2", Some("db2")), part2.spec)
assert(newPart1.storage.locationUri == Some(newLocation))
assert(newPart2.storage.locationUri == Some(newLocation))
assert(oldPart1.storage.locationUri != Some(newLocation))
assert(oldPart2.storage.locationUri != Some(newLocation))
// Alter partitions without explicitly specifying database
catalog.setCurrentDatabase("db2")
catalog.alterPartitions(TableIdentifier("tbl2"), Seq(oldPart1, oldPart2))
val newerPart1 = catalog.getPartition(TableIdentifier("tbl2"), part1.spec)
val newerPart2 = catalog.getPartition(TableIdentifier("tbl2"), part2.spec)
assert(oldPart1.storage.locationUri == newerPart1.storage.locationUri)
assert(oldPart2.storage.locationUri == newerPart2.storage.locationUri)
// Alter but change spec, should fail because new partition specs do not exist yet
val badPart1 = part1.copy(spec = Map("a" -> "v1", "b" -> "v2"))
val badPart2 = part2.copy(spec = Map("a" -> "v3", "b" -> "v4"))
intercept[AnalysisException] {
catalog.alterPartitions(TableIdentifier("tbl2", Some("db2")), Seq(badPart1, badPart2))
}
}
test("alter partitions when database/table does not exist") {
val catalog = new SessionCatalog(newBasicCatalog())
intercept[NoSuchDatabaseException] {
catalog.alterPartitions(TableIdentifier("tbl1", Some("unknown_db")), Seq(part1))
}
intercept[NoSuchTableException] {
catalog.alterPartitions(TableIdentifier("does_not_exist", Some("db2")), Seq(part1))
}
}
test("alter partition with invalid partition spec") {
val catalog = new SessionCatalog(newBasicCatalog())
var e = intercept[AnalysisException] {
catalog.alterPartitions(TableIdentifier("tbl1", Some("db2")), Seq(partWithLessColumns))
}
assert(e.getMessage.contains("Partition spec is invalid. The spec (a) must match " +
"the partition spec (a, b) defined in table '`db2`.`tbl1`'"))
e = intercept[AnalysisException] {
catalog.alterPartitions(TableIdentifier("tbl1", Some("db2")), Seq(partWithMoreColumns))
}
assert(e.getMessage.contains("Partition spec is invalid. The spec (a, b, c) must match " +
"the partition spec (a, b) defined in table '`db2`.`tbl1`'"))
e = intercept[AnalysisException] {
catalog.alterPartitions(TableIdentifier("tbl1", Some("db2")), Seq(partWithUnknownColumns))
}
assert(e.getMessage.contains("Partition spec is invalid. The spec (a, unknown) must match " +
"the partition spec (a, b) defined in table '`db2`.`tbl1`'"))
}
test("list partition names") {
val catalog = new SessionCatalog(newBasicCatalog())
val expectedPartitionNames = Seq("a=1/b=2", "a=3/b=4")
assert(catalog.listPartitionNames(TableIdentifier("tbl2", Some("db2"))) ==
expectedPartitionNames)
// List partition names without explicitly specifying database
catalog.setCurrentDatabase("db2")
assert(catalog.listPartitionNames(TableIdentifier("tbl2")) == expectedPartitionNames)
}
test("list partition names with partial partition spec") {
val catalog = new SessionCatalog(newBasicCatalog())
assert(
catalog.listPartitionNames(TableIdentifier("tbl2", Some("db2")), Some(Map("a" -> "1"))) ==
Seq("a=1/b=2"))
}
test("list partition names with invalid partial partition spec") {
val catalog = new SessionCatalog(newBasicCatalog())
intercept[AnalysisException] {
catalog.listPartitionNames(TableIdentifier("tbl2", Some("db2")),
Some(Map("unknown" -> "unknown")))
}
}
test("list partitions") {
val catalog = new SessionCatalog(newBasicCatalog())
assert(catalogPartitionsEqual(
catalog.listPartitions(TableIdentifier("tbl2", Some("db2"))), part1, part2))
// List partitions without explicitly specifying database
catalog.setCurrentDatabase("db2")
assert(catalogPartitionsEqual(catalog.listPartitions(TableIdentifier("tbl2")), part1, part2))
}
test("list partitions with partial partition spec") {
val catalog = new SessionCatalog(newBasicCatalog())
assert(catalogPartitionsEqual(
catalog.listPartitions(TableIdentifier("tbl2", Some("db2")), Some(Map("a" -> "1"))), part1))
}
test("list partitions with invalid partial partition spec") {
val catalog = new SessionCatalog(newBasicCatalog())
intercept[AnalysisException] {
catalog.listPartitions(
TableIdentifier("tbl2", Some("db2")), Some(Map("unknown" -> "unknown")))
}
}
test("list partitions when database/table does not exist") {
val catalog = new SessionCatalog(newBasicCatalog())
intercept[NoSuchDatabaseException] {
catalog.listPartitions(TableIdentifier("tbl1", Some("unknown_db")))
}
intercept[NoSuchTableException] {
catalog.listPartitions(TableIdentifier("does_not_exist", Some("db2")))
}
}
private def catalogPartitionsEqual(
actualParts: Seq[CatalogTablePartition],
expectedParts: CatalogTablePartition*): Boolean = {
// ExternalCatalog may set a default location for partitions, here we ignore the partition
// location when comparing them.
actualParts.map(p => p.copy(storage = p.storage.copy(locationUri = None))).toSet ==
expectedParts.map(p => p.copy(storage = p.storage.copy(locationUri = None))).toSet
}
// --------------------------------------------------------------------------
// Functions
// --------------------------------------------------------------------------
test("basic create and list functions") {
val externalCatalog = newEmptyCatalog()
val sessionCatalog = new SessionCatalog(externalCatalog)
sessionCatalog.createDatabase(newDb("mydb"), ignoreIfExists = false)
sessionCatalog.createFunction(newFunc("myfunc", Some("mydb")), ignoreIfExists = false)
assert(externalCatalog.listFunctions("mydb", "*").toSet == Set("myfunc"))
// Create function without explicitly specifying database
sessionCatalog.setCurrentDatabase("mydb")
sessionCatalog.createFunction(newFunc("myfunc2"), ignoreIfExists = false)
assert(externalCatalog.listFunctions("mydb", "*").toSet == Set("myfunc", "myfunc2"))
}
test("create function when database does not exist") {
val catalog = new SessionCatalog(newBasicCatalog())
intercept[NoSuchDatabaseException] {
catalog.createFunction(
newFunc("func5", Some("does_not_exist")), ignoreIfExists = false)
}
}
test("create function that already exists") {
val catalog = new SessionCatalog(newBasicCatalog())
intercept[FunctionAlreadyExistsException] {
catalog.createFunction(newFunc("func1", Some("db2")), ignoreIfExists = false)
}
catalog.createFunction(newFunc("func1", Some("db2")), ignoreIfExists = true)
}
test("create temp function") {
val catalog = new SessionCatalog(newBasicCatalog())
val tempFunc1 = (e: Seq[Expression]) => e.head
val tempFunc2 = (e: Seq[Expression]) => e.last
val info1 = new ExpressionInfo("tempFunc1", "temp1")
val info2 = new ExpressionInfo("tempFunc2", "temp2")
catalog.createTempFunction("temp1", info1, tempFunc1, ignoreIfExists = false)
catalog.createTempFunction("temp2", info2, tempFunc2, ignoreIfExists = false)
val arguments = Seq(Literal(1), Literal(2), Literal(3))
assert(catalog.lookupFunction(FunctionIdentifier("temp1"), arguments) === Literal(1))
assert(catalog.lookupFunction(FunctionIdentifier("temp2"), arguments) === Literal(3))
// Temporary function does not exist.
intercept[NoSuchFunctionException] {
catalog.lookupFunction(FunctionIdentifier("temp3"), arguments)
}
val tempFunc3 = (e: Seq[Expression]) => Literal(e.size)
val info3 = new ExpressionInfo("tempFunc3", "temp1")
// Temporary function already exists
intercept[TempFunctionAlreadyExistsException] {
catalog.createTempFunction("temp1", info3, tempFunc3, ignoreIfExists = false)
}
// Temporary function is overridden
catalog.createTempFunction("temp1", info3, tempFunc3, ignoreIfExists = true)
assert(
catalog.lookupFunction(FunctionIdentifier("temp1"), arguments) === Literal(arguments.length))
}
test("isTemporaryFunction") {
val externalCatalog = newBasicCatalog()
val sessionCatalog = new SessionCatalog(externalCatalog)
// Returns false when the function does not exist
assert(!sessionCatalog.isTemporaryFunction(FunctionIdentifier("temp1")))
val tempFunc1 = (e: Seq[Expression]) => e.head
val info1 = new ExpressionInfo("tempFunc1", "temp1")
sessionCatalog.createTempFunction("temp1", info1, tempFunc1, ignoreIfExists = false)
// Returns true when the function is temporary
assert(sessionCatalog.isTemporaryFunction(FunctionIdentifier("temp1")))
// Returns false when the function is permanent
assert(externalCatalog.listFunctions("db2", "*").toSet == Set("func1"))
assert(!sessionCatalog.isTemporaryFunction(FunctionIdentifier("func1", Some("db2"))))
assert(!sessionCatalog.isTemporaryFunction(FunctionIdentifier("db2.func1")))
sessionCatalog.setCurrentDatabase("db2")
assert(!sessionCatalog.isTemporaryFunction(FunctionIdentifier("func1")))
// Returns false when the function is built-in or hive
assert(FunctionRegistry.builtin.functionExists("sum"))
assert(!sessionCatalog.isTemporaryFunction(FunctionIdentifier("sum")))
assert(!sessionCatalog.isTemporaryFunction(FunctionIdentifier("histogram_numeric")))
}
test("drop function") {
val externalCatalog = newBasicCatalog()
val sessionCatalog = new SessionCatalog(externalCatalog)
assert(externalCatalog.listFunctions("db2", "*").toSet == Set("func1"))
sessionCatalog.dropFunction(
FunctionIdentifier("func1", Some("db2")), ignoreIfNotExists = false)
assert(externalCatalog.listFunctions("db2", "*").isEmpty)
// Drop function without explicitly specifying database
sessionCatalog.setCurrentDatabase("db2")
sessionCatalog.createFunction(newFunc("func2", Some("db2")), ignoreIfExists = false)
assert(externalCatalog.listFunctions("db2", "*").toSet == Set("func2"))
sessionCatalog.dropFunction(FunctionIdentifier("func2"), ignoreIfNotExists = false)
assert(externalCatalog.listFunctions("db2", "*").isEmpty)
}
test("drop function when database/function does not exist") {
val catalog = new SessionCatalog(newBasicCatalog())
intercept[NoSuchDatabaseException] {
catalog.dropFunction(
FunctionIdentifier("something", Some("unknown_db")), ignoreIfNotExists = false)
}
intercept[NoSuchFunctionException] {
catalog.dropFunction(FunctionIdentifier("does_not_exist"), ignoreIfNotExists = false)
}
catalog.dropFunction(FunctionIdentifier("does_not_exist"), ignoreIfNotExists = true)
}
test("drop temp function") {
val catalog = new SessionCatalog(newBasicCatalog())
val info = new ExpressionInfo("tempFunc", "func1")
val tempFunc = (e: Seq[Expression]) => e.head
catalog.createTempFunction("func1", info, tempFunc, ignoreIfExists = false)
val arguments = Seq(Literal(1), Literal(2), Literal(3))
assert(catalog.lookupFunction(FunctionIdentifier("func1"), arguments) === Literal(1))
catalog.dropTempFunction("func1", ignoreIfNotExists = false)
intercept[NoSuchFunctionException] {
catalog.lookupFunction(FunctionIdentifier("func1"), arguments)
}
intercept[NoSuchTempFunctionException] {
catalog.dropTempFunction("func1", ignoreIfNotExists = false)
}
catalog.dropTempFunction("func1", ignoreIfNotExists = true)
}
test("get function") {
val catalog = new SessionCatalog(newBasicCatalog())
val expected =
CatalogFunction(FunctionIdentifier("func1", Some("db2")), funcClass,
Seq.empty[FunctionResource])
assert(catalog.getFunctionMetadata(FunctionIdentifier("func1", Some("db2"))) == expected)
// Get function without explicitly specifying database
catalog.setCurrentDatabase("db2")
assert(catalog.getFunctionMetadata(FunctionIdentifier("func1")) == expected)
}
test("get function when database/function does not exist") {
val catalog = new SessionCatalog(newBasicCatalog())
intercept[NoSuchDatabaseException] {
catalog.getFunctionMetadata(FunctionIdentifier("func1", Some("unknown_db")))
}
intercept[NoSuchFunctionException] {
catalog.getFunctionMetadata(FunctionIdentifier("does_not_exist", Some("db2")))
}
}
test("lookup temp function") {
val catalog = new SessionCatalog(newBasicCatalog())
val info1 = new ExpressionInfo("tempFunc1", "func1")
val tempFunc1 = (e: Seq[Expression]) => e.head
catalog.createTempFunction("func1", info1, tempFunc1, ignoreIfExists = false)
assert(catalog.lookupFunction(
FunctionIdentifier("func1"), Seq(Literal(1), Literal(2), Literal(3))) == Literal(1))
catalog.dropTempFunction("func1", ignoreIfNotExists = false)
intercept[NoSuchFunctionException] {
catalog.lookupFunction(FunctionIdentifier("func1"), Seq(Literal(1), Literal(2), Literal(3)))
}
}
test("list functions") {
val catalog = new SessionCatalog(newBasicCatalog())
val info1 = new ExpressionInfo("tempFunc1", "func1")
val info2 = new ExpressionInfo("tempFunc2", "yes_me")
val tempFunc1 = (e: Seq[Expression]) => e.head
val tempFunc2 = (e: Seq[Expression]) => e.last
catalog.createFunction(newFunc("func2", Some("db2")), ignoreIfExists = false)
catalog.createFunction(newFunc("not_me", Some("db2")), ignoreIfExists = false)
catalog.createTempFunction("func1", info1, tempFunc1, ignoreIfExists = false)
catalog.createTempFunction("yes_me", info2, tempFunc2, ignoreIfExists = false)
assert(catalog.listFunctions("db1", "*").map(_._1).toSet ==
Set(FunctionIdentifier("func1"),
FunctionIdentifier("yes_me")))
assert(catalog.listFunctions("db2", "*").map(_._1).toSet ==
Set(FunctionIdentifier("func1"),
FunctionIdentifier("yes_me"),
FunctionIdentifier("func1", Some("db2")),
FunctionIdentifier("func2", Some("db2")),
FunctionIdentifier("not_me", Some("db2"))))
assert(catalog.listFunctions("db2", "func*").map(_._1).toSet ==
Set(FunctionIdentifier("func1"),
FunctionIdentifier("func1", Some("db2")),
FunctionIdentifier("func2", Some("db2"))))
}
test("list functions when database does not exist") {
val catalog = new SessionCatalog(newBasicCatalog())
intercept[NoSuchDatabaseException] {
catalog.listFunctions("unknown_db", "func*")
}
}
}
|
Panos-Bletsos/spark-cost-model-optimizer
|
sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalogSuite.scala
|
Scala
|
apache-2.0
| 52,063
|
sealed abstract class Base
case class First(name: String) extends Base
case class Second(name: String) extends Base
case class Third(name: String) extends Base
val value: Base = Second("hello")
value match {
case First(name) => println("It was first! " + name)
case Second(name) => println("It was second! " + name)
// case Third(name) => println("It was third! " + name)
}
|
BinaryMuse/icebreaker
|
notes/case_class.scala
|
Scala
|
mit
| 386
|
Subsets and Splits
Filtered Scala Code Snippets
The query filters and retrieves a sample of code snippets that meet specific criteria, providing a basic overview of the dataset's content without revealing deeper insights.