code stringlengths 5 1M | repo_name stringlengths 5 109 | path stringlengths 6 208 | language stringclasses 1 value | license stringclasses 15 values | size int64 5 1M |
|---|---|---|---|---|---|
/*
* Copyright 2010-2011 Vilius Normantas <code@norma.lt>
*
* This file is part of Crossbow library.
*
* Crossbow is free software: you can redistribute it and/or modify it under the terms of the GNU
* General Public License as published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* Crossbow is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without
* even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* General Public License for more details.
*
* You should have received a copy of the GNU General Public License along with Crossbow. If not,
* see <http://www.gnu.org/licenses/>.
*/
package lt.norma.crossbow.indicators
import lt.norma.crossbow.messages._
/** Calculates exponential moving average of the specified indicator. In case indicator's value is
* unset, `EmaContinuous` retains the last value. */
class EmaContinuous(period: Int, indicator: Indicator[Double]) extends ListenerIndicator[Double] {
def name = "EMA_C(" + period + "; " + indicator.name + ")"
if (period < 1)
throw new IllegalArgumentException("Period of " + name + " indicator cannot be less than 1")
def dependencies = Set(indicator)
val e = 2.0 / (period + 1)
def receive = {
case BarClose(_) => (optionalValue, indicator()) match {
case (Some(v), Some(t)) => set(e * t + (1.0 - e) * v)
case (None, Some(t)) => set(t)
case _ =>
}
}
}
| ViliusN/Crossbow | crossbow-core/src/lt/norma/crossbow/indicators/EmaContinuous.scala | Scala | gpl-3.0 | 1,516 |
/*
* Copyright 2016 Dennis Vriend
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package akka.persistence.jdbc
package journal.dao
import akka.persistence.jdbc.config.JournalTableConfiguration
import slick.jdbc.JdbcProfile
class JournalQueries(val profile: JdbcProfile, override val journalTableCfg: JournalTableConfiguration) extends JournalTables {
import profile.api._
private val JournalTableC = Compiled(JournalTable)
def writeJournalRows(xs: Seq[JournalRow]) =
JournalTableC ++= xs.sortBy(_.sequenceNumber)
private def selectAllJournalForPersistenceId(persistenceId: Rep[String]) =
JournalTable.filter(_.persistenceId === persistenceId).sortBy(_.sequenceNumber.desc)
def delete(persistenceId: String, toSequenceNr: Long) = {
JournalTable
.filter(_.persistenceId === persistenceId)
.filter(_.sequenceNumber <= toSequenceNr)
.delete
}
/**
* Updates (!) a payload stored in a specific events row.
* Intended to be used sparingly, e.g. moving all events to their encrypted counterparts.
*/
def update(persistenceId: String, seqNr: Long, replacement: Array[Byte]) = {
val baseQuery = JournalTable
.filter(_.persistenceId === persistenceId)
.filter(_.sequenceNumber === seqNr)
baseQuery.map(_.message).update(replacement)
}
def markJournalMessagesAsDeleted(persistenceId: String, maxSequenceNr: Long) =
JournalTable
.filter(_.persistenceId === persistenceId)
.filter(_.sequenceNumber <= maxSequenceNr)
.filter(_.deleted === false)
.map(_.deleted).update(true)
private def _highestSequenceNrForPersistenceId(persistenceId: Rep[String]): Query[Rep[Long], Long, Seq] =
selectAllJournalForPersistenceId(persistenceId).map(_.sequenceNumber).take(1)
private def _highestMarkedSequenceNrForPersistenceId(persistenceId: Rep[String]): Query[Rep[Long], Long, Seq] =
selectAllJournalForPersistenceId(persistenceId).filter(_.deleted === true).map(_.sequenceNumber)
val highestSequenceNrForPersistenceId = Compiled(_highestSequenceNrForPersistenceId _)
val highestMarkedSequenceNrForPersistenceId = Compiled(_highestMarkedSequenceNrForPersistenceId _)
private def _selectByPersistenceIdAndMaxSequenceNumber(persistenceId: Rep[String], maxSequenceNr: Rep[Long]) =
selectAllJournalForPersistenceId(persistenceId).filter(_.sequenceNumber <= maxSequenceNr)
val selectByPersistenceIdAndMaxSequenceNumber = Compiled(_selectByPersistenceIdAndMaxSequenceNumber _)
private def _allPersistenceIdsDistinct: Query[Rep[String], String, Seq] =
JournalTable.map(_.persistenceId).distinct
val allPersistenceIdsDistinct = Compiled(_allPersistenceIdsDistinct)
def journalRowByPersistenceIds(persistenceIds: Iterable[String]): Query[Rep[String], String, Seq] = for {
query <- JournalTable.map(_.persistenceId)
if query inSetBind persistenceIds
} yield query
private def _messagesQuery(persistenceId: Rep[String], fromSequenceNr: Rep[Long], toSequenceNr: Rep[Long], max: ConstColumn[Long]) =
JournalTable
.filter(_.persistenceId === persistenceId)
.filter(_.deleted === false)
.filter(_.sequenceNumber >= fromSequenceNr)
.filter(_.sequenceNumber <= toSequenceNr)
.sortBy(_.sequenceNumber.asc)
.take(max)
val messagesQuery = Compiled(_messagesQuery _)
}
| gavares/akka-persistence-jdbc | src/main/scala/akka/persistence/jdbc/journal/dao/JournalQueries.scala | Scala | apache-2.0 | 3,854 |
package sbt
package std
import Def.Initialize
import sbt.internal.util.Types.{ Id, idFun }
import sbt.internal.util.AList
import sbt.internal.util.appmacro.{
Convert,
Converted,
Instance,
LinterDSL,
MixedBuilder,
MonadInstance
}
object InitializeInstance extends MonadInstance {
type M[x] = Initialize[x]
def app[K[L[x]], Z](in: K[Initialize], f: K[Id] => Z)(implicit a: AList[K]): Initialize[Z] =
Def.app[K, Z](in)(f)(a)
def map[S, T](in: Initialize[S], f: S => T): Initialize[T] = Def.map(in)(f)
def flatten[T](in: Initialize[Initialize[T]]): Initialize[T] = Def.bind(in)(idFun[Initialize[T]])
def pure[T](t: () => T): Initialize[T] = Def.pure(t)
}
import reflect.macros._
object InitializeConvert extends Convert {
def apply[T: c.WeakTypeTag](c: blackbox.Context)(nme: String, in: c.Tree): Converted[c.type] =
nme match {
case InputWrapper.WrapInitName => convert[T](c)(in)
case InputWrapper.WrapTaskName | InputWrapper.WrapInitTaskName => failTask[c.type](c)(in.pos)
case InputWrapper.WrapPreviousName => failPrevious[c.type](c)(in.pos)
case _ => Converted.NotApplicable
}
private def convert[T: c.WeakTypeTag](c: blackbox.Context)(in: c.Tree): Converted[c.type] = {
val i = c.Expr[Initialize[T]](in)
val t = c.universe.reify(i.splice).tree
Converted.Success(t)
}
private def failTask[C <: blackbox.Context with Singleton](c: C)(
pos: c.Position): Converted[c.type] =
Converted.Failure(pos, "A setting cannot depend on a task")
private def failPrevious[C <: blackbox.Context with Singleton](c: C)(
pos: c.Position): Converted[c.type] =
Converted.Failure(pos, "A setting cannot depend on a task's previous value.")
}
object SettingMacro {
import LinterDSL.{ Empty => EmptyLinter }
def settingMacroImpl[T: c.WeakTypeTag](c: blackbox.Context)(
t: c.Expr[T]): c.Expr[Initialize[T]] =
Instance.contImpl[T, Id](c, InitializeInstance, InitializeConvert, MixedBuilder, EmptyLinter)(
Left(t),
Instance.idTransform[c.type])
def settingDynMacroImpl[T: c.WeakTypeTag](c: blackbox.Context)(
t: c.Expr[Initialize[T]]): c.Expr[Initialize[T]] =
Instance.contImpl[T, Id](c, InitializeInstance, InitializeConvert, MixedBuilder, EmptyLinter)(
Right(t),
Instance.idTransform[c.type])
}
| Duhemm/sbt | main-settings/src/main/scala/sbt/std/SettingMacro.scala | Scala | bsd-3-clause | 2,444 |
package au.com.dius.pact.provider.sbtsupport
import java.io.File
import au.com.dius.pact.model.RequestResponsePact
import au.com.dius.pact.provider.PactFileSource
import org.scalatest._
object Main {
def loadFiles(pactRoot: File, configFile: File) = {
val config = PactConfiguration.loadConfiguration(configFile)
(config, PactFileSource.loadFiles(pactRoot))
}
def runPacts(t:(PactConfiguration, Seq[RequestResponsePact])) = t match { case (config, pacts) =>
val suite = new Sequential(pacts.map { pact =>
new PactSpec(config, pact)
}: _*)
stats.fullstacks.run(suite)
}
}
| flaregames/pact-jvm | pact-jvm-provider/src/main/scala/au/com/dius/pact/provider/sbtsupport/Main.scala | Scala | apache-2.0 | 609 |
/**
* Created with IntelliJ IDEA.
* User: rick
* Date: 6/20/14
* Time: 11:12 AM
*
*/
import org.scalatest.{FunSpec, BeforeAndAfter}
class RationalSpec extends FunSpec with BeforeAndAfter{
var r1 : Rational = _
var r2 : Rational = _
before(
r1 = new Rational(3,5),
r2 = new Rational(1,2)
)
describe("RationalSpec"){
it("add"){
val s1 = r1.add(r2).toString()
assert(s1 === "11/10")
println("add done.")
}
it("lessThan"){
val b = r2.lessThan(r1)
println(b)
assert(b)
println("lessThan done.")
}
it("max"){
assert(r1.max(r2) === r1);
println("max done.")
}
//最大公约数
it("gcd"){
val r3 = new Rational(2,4)
println(r3.number)
println(r3.denom)
assert(r2.equals(r3))
println("gcd done.")
}
it("equal"){
val r3 = new Rational(2,4)
println(r3)
assert(r2 === r3)
println("equal done.")
}
it ("*"){
val r4 = new Rational(3,10)
assert(r4 === r1*r2)
println("* done.")
}
it ("implicit"){
assert(r2 * 2 === new Rational(1))
println("implicit done.")
}
}
}
| sunjun81/ggstudy | scalatest/RationalSpec.scala | Scala | mit | 1,188 |
package akka.duke.taskmanager
trait Pausable {
def pause(): Unit
def resume(): Unit
}
| Taerus/taskmanager | taskmanager-core/src/main/scala/akka/duke/taskmanager/Pausable.scala | Scala | bsd-3-clause | 94 |
package com.azavea.gtfs
import com.github.nscala_time.time.Imports._
import org.scalatest._
class CompressTripsSpec extends FlatSpec with Matchers {
// TODO: Implement these tests whenever we need to implement CompressTrips
// val trip1 = Trip("T1","SR1","R1",None,
// List(
// StopTime("S1","T1", 1, 0.seconds, 1.minute),
// StopTime("S2","T1", 2, 10.minutes, 11.minutes),
// StopTime("S3","T1", 3, 15.minutes, 16.minutes)
// )
// )
// val trip2 = Trip("T2","SR1","R1",None,
// List(
// StopTime("S1","T2", 1, 1.minute + 0.seconds, 1.minute + 1.minute),
// StopTime("S2","T2", 1, 1.minute + 10.minutes, 1.minute + 11.minutes),
// StopTime("S3","T2", 1, 1.minute + 15.minutes, 1.minute + 16.minutes)
// )
// )
// val trip3 = Trip("T3","SR1","R1",None,
// List(
// StopTime("S1","T3", 1, 1.minute + 0.seconds, 1.minute + 1.minute),
// StopTime("S2","T3", 1, 1.minute + 10.minutes, 1.hour + 11.minutes), //long break here
// StopTime("S3","T3", 1, 1.hour + 15.minutes, 1.hour + 16.minutes)
// )
// )
// it should "recognize a shameish trip" in {
// assert(trip1 sameish trip2, "Two trips that differ only in time offset are sameish")
// assert(! (trip1 sameish trip3), "Two trips that differ in stop intervals are not sameish")
// }
// it should "know how to segregate" in {
// val bins = Trip.bin(trip1 :: trip2 :: trip3 :: Nil)
// val big = bins.find(_.size == 2).get
// val small = bins.find(_.size == 1).get
// big should contain (trip1)
// big should contain (trip2)
// small should contain (trip3)
// }
}
| WorldBank-Transport/open-transit-indicators | scala/gtfs-test/src/test/scala/com/azavea/gtfs/op/CompressTripsSpec.scala | Scala | gpl-3.0 | 1,646 |
import java.io.File
package object common {
/** An alias for the `Nothing` type.
* Denotes that the type should be filled in.
*/
type ??? = Nothing
/** An alias for the `Any` type.
* Denotes that the type should be filled in.
*/
type *** = Any
/**
* Get a child of a file. For example,
*
* subFile(homeDir, "b", "c")
*
* corresponds to ~/b/c
*/
def subFile(file: File, children: String*) = {
children.foldLeft(file)((file, child) => new File(file, child))
}
/**
* Get a resource from the `src/main/resources` directory. Eclipse does not copy
* resources to the output directory, then the class loader cannot find them.
*/
def resourceAsStreamFromSrc(resourcePath: List[String]): Option[java.io.InputStream] = {
val classesDir = new File(getClass.getResource(".").toURI)
val projectDir = classesDir.getParentFile.getParentFile.getParentFile
val resourceFile = subFile(projectDir, ("src" :: "main" :: "resources" :: resourcePath): _*)
if (resourceFile.exists)
Some(new java.io.FileInputStream(resourceFile))
else
None
}
}
| isc-carlos-gomez/scala-trial | forcomp/src/main/scala/common/package.scala | Scala | mit | 1,126 |
/* *\\
** \\ \\ / _) \\ \\ / \\ | **
** \\ \\ / | __ \\ _ \\ __| \\ \\ / |\\/ | **
** \\ \\ / | | | __/ | \\ \\ / | | **
** \\_/ _| .__/ \\___| _| \\_/ _| _| **
** _| **
** **
** ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ **
** **
** http://www.vipervm.org **
** GPLv3 **
\\* */
package org.vipervm.library.linearalgebra.kernels.jvm
import org.vipervm.library.linearalgebra.kernels.prototypes._
import org.vipervm.platform.jvm._
object FloatMatrixAdditionJVM extends JVMKernel with FloatMatrixAdditionPrototype {
def fun(params:Seq[Any]): Unit = {
val (w,h) = (params(width), params(height))
val (m1,m2,m3) = (params(a).peer,params(b).peer,params(c).peer)
var i = 0L
while (i < h) {
var j = 0L
while (j < w) {
val pos = (i*w+j)*4
m3.setFloat(pos,m1.getFloat(pos) + m2.getFloat(pos))
j += 1
}
i += 1
}
}
}
| hsyl20/Scala_ViperVM | src/main/scala/org/vipervm/library/linearalgebra/kernels/jvm/FloatMatrixAdditionJVM.scala | Scala | gpl-3.0 | 1,263 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.carbondata.spark.testsuite.dataload
import org.apache.hadoop.fs.Path
import org.apache.hadoop.mapreduce.lib.input.FileSplit
import org.apache.spark.rdd.{DataLoadPartitionCoalescer, RDD}
import org.apache.spark.sql.Row
import org.apache.spark.sql.catalyst.expressions.GenericRow
import org.apache.spark.sql.common.util.QueryTest
import org.apache.spark.{Partition, SerializableWritable, SparkContext, TaskContext}
import org.scalatest.BeforeAndAfterAll
class TestDataLoadPartitionCoalescer extends QueryTest with BeforeAndAfterAll {
var nodeList: Array[String] = _
class DummyPartition(val index: Int,
rawSplit: FileSplit) extends Partition {
val serializableHadoopSplit = new SerializableWritable(rawSplit)
}
class Dummy(sc: SparkContext, partitions: Array[Partition]) extends RDD[Row](sc, Nil) {
override def compute(split: Partition, context: TaskContext): Iterator[Row] = {
new Iterator[Row] {
var isFirst = true;
override def hasNext: Boolean = isFirst;
override def next(): Row = {
isFirst = false
new GenericRow(Array[Any]())
}
}
}
override protected def getPartitions: Array[Partition] = partitions
override protected def getPreferredLocations(split: Partition): Seq[String] = {
split.asInstanceOf[DummyPartition].serializableHadoopSplit.value.getLocations.toSeq
}
}
override def beforeAll: Unit = {
nodeList = Array("host1", "host2", "host3")
}
def createPartition(index: Int, file: String, hosts: Array[String]) : Partition = {
new DummyPartition(index, new FileSplit(new Path(file), 0, 1, hosts))
}
def repartition(parts: Array[Partition]): Array[Partition] = {
new DataLoadPartitionCoalescer(new Dummy(sqlContext.sparkContext, parts), nodeList).run
}
def checkPartition(prevParts: Array[Partition], parts: Array[Partition]): Unit = {
DataLoadPartitionCoalescer.checkPartition(prevParts, parts)
}
test("test number of partitions is more than nodes's") {
val prevParts = Array[Partition](
createPartition(0, "1.csv", Array("host1", "host2", "host3")),
createPartition(1, "2.csv", Array("host1", "host2", "host3")),
createPartition(2, "3.csv", Array("host1", "host2", "host3")),
createPartition(3, "4.csv", Array("host1", "host2", "host3")),
createPartition(4, "5.csv", Array("host1", "host2", "host3"))
)
val parts = repartition(prevParts)
assert(parts.size == 3)
checkPartition(prevParts, parts)
}
test("test number of partitions equals nodes's") {
val prevParts = Array[Partition](
createPartition(0, "1.csv", Array("host1", "host2", "host3")),
createPartition(1, "2.csv", Array("host1", "host2", "host3")),
createPartition(2, "3.csv", Array("host1", "host2", "host3"))
)
val parts = repartition(prevParts)
assert(parts.size == 3)
checkPartition(prevParts, parts)
}
test("test number of partitions is less than nodes's") {
val prevParts = Array[Partition](
createPartition(0, "1.csv", Array("host1", "host2", "host3")),
createPartition(1, "2.csv", Array("host1", "host2", "host3"))
)
val parts = repartition(prevParts)
assert(parts.size == 2)
checkPartition(prevParts, parts)
}
test("all partitions are locality") {
val prevParts = Array[Partition](
createPartition(0, "1.csv", Array("host1", "host2", "host3")),
createPartition(1, "2.csv", Array("host1", "host2", "host3"))
)
val parts = repartition(prevParts)
assert(parts.size == 2)
checkPartition(prevParts, parts)
}
test("part of partitions are locality1") {
val prevParts = Array[Partition](
createPartition(0, "1.csv", Array("host1", "host2", "host3")),
createPartition(1, "2.csv", Array("host1", "host2", "host4")),
createPartition(2, "3.csv", Array("host4", "host5", "host6"))
)
val parts = repartition(prevParts)
assert(parts.size == 3)
checkPartition(prevParts, parts)
}
test("part of partitions are locality2") {
val prevParts = Array[Partition](
createPartition(0, "1.csv", Array("host1", "host2", "host3")),
createPartition(1, "2.csv", Array("host1", "host2", "host4")),
createPartition(2, "3.csv", Array("host3", "host5", "host6"))
)
val parts = repartition(prevParts)
assert(parts.size == 3)
checkPartition(prevParts, parts)
}
test("part of partitions are locality3") {
val prevParts = Array[Partition](
createPartition(0, "1.csv", Array("host1", "host2", "host7")),
createPartition(1, "2.csv", Array("host1", "host2", "host4")),
createPartition(2, "3.csv", Array("host4", "host5", "host6"))
)
val parts = repartition(prevParts)
assert(parts.size == 3)
checkPartition(prevParts, parts)
}
test("all partition are not locality") {
val prevParts = Array[Partition](
createPartition(0, "1.csv", Array()),
createPartition(1, "2.csv", Array()),
createPartition(2, "3.csv", Array("host4", "host5", "host6"))
)
val parts = repartition(prevParts)
assert(parts.size == 3)
checkPartition(prevParts, parts)
}
override def afterAll {
}
}
| mohammadshahidkhan/incubator-carbondata | integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/dataload/TestDataLoadPartitionCoalescer.scala | Scala | apache-2.0 | 6,044 |
/*
* Copyright 2022 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package models.external.incorporatedentityid
import play.api.libs.json.{Json, OFormat}
case class IncorpIdJourneyConfig(continueUrl: String,
optServiceName: Option[String] = None,
deskProServiceId: String,
signOutUrl: String,
accessibilityUrl: String,
regime: String,
businessVerificationCheck: Boolean)
object IncorpIdJourneyConfig {
implicit val format: OFormat[IncorpIdJourneyConfig] = Json.format[IncorpIdJourneyConfig]
}
| hmrc/vat-registration-frontend | app/models/external/incorporatedentityid/IncorpIdJourneyConfig.scala | Scala | apache-2.0 | 1,234 |
package org.andrewconner.spot.cmdrs.client
import org.andrewconner.spot.cmdrs.instance._
// Client-facing instance information
case class InstanceStats(
// Instance information, from EC2Instance
name: String,
prettyName: String,
memory: Double,
ecu: Double,
cores: Double,
storage: Option[EC2Storage],
arch: Seq[EC2Arch],
networkPerformance: EC2NetworkPerformance,
enhancedNetworking: Boolean,
vpcOnly: Boolean,
linuxVirtualizationType: Seq[EC2VirtualizationType],
ebsOptimized: Boolean,
// Choice information
zone: String,
region: EC2Region,
platform: EC2Platform,
prices: EC2Pricing,
// Spot info
spotStatsThreshhold: Double,
weeklySpotStats: SpotPriceStats,
allTimeSpotStats: SpotPriceStats,
pricePastDay: Option[Double],
pricePastWeek: Option[Double],
priceSamples: Int,
periods: Int
)
case class SpotPriceStats(
avg: Double, // average price over all history window
max: Double, // max price over all history window
perECU: Double, // based on threshold cost
perGbRam: Double, // based on threshold cost
discount: Double, // discount of threshold cost
price: Double // price such that instance stays up at least threshold of the time
)
| andrewconner/spotsy | app/org/andrewconner/spot/cmdrs/client/InstanceStats.scala | Scala | mit | 1,210 |
package colang.ast.parsed.routines
import colang.ast.parsed._
import colang.ast.raw
import colang.issues.{Issue, Terms}
import colang.tokens.NativeKeyword
private[routines] object RegisterMethods {
/**
* "Registers" methods in their types, doesn't parse bodies.
* @param types types to check
* @return (new methods, encountered issues)
*/
def registerMethods(types: Seq[Type]): (Seq[Method], Seq[Issue]) = {
val result = types map { type_ =>
type_.definition match {
case Some(raw.TypeDefinition(_, _, _, Some(raw.TypeBody(_, methodDefs, _)))) =>
val methodResult = methodDefs map { registerMethod(type_, _) }
val methods = methodResult map { _._1 }
val methodsIssues = methodResult flatMap { _._2 }
(methods, methodsIssues)
case _ => (Seq.empty, Seq.empty)
}
}
val methods = result flatMap { _._1 }
val issues = result flatMap { _._2 }
(methods, issues)
}
private def registerMethod(type_ : Type, methodDef: raw.FunctionDefinition): (Method, Seq[Issue]) = {
val (returnType, returnTypeIssues) = Type.resolve(type_, methodDef.returnType)
val localContext = LocalContext(applicableKind = Terms.Method, expectedReturnType = returnType)
val methodBody = new CodeBlock(new LocalScope(Some(type_)), localContext, methodDef.body)
val paramsResult = methodDef.parameterList.params map { rawParam =>
val (paramType, paramTypeIssues) = Type.resolve(type_, rawParam.type_)
val param = Variable(
name = rawParam.name.value,
scope = Some(methodBody.innerScope),
type_ = paramType,
definition = Some(rawParam))
(param, paramTypeIssues)
}
val params = paramsResult map { _._1 }
val paramIssues = paramsResult flatMap { _._2 }
val method = new Method(
name = methodDef.name.value,
container = type_,
returnType = returnType,
parameters = params,
body = methodBody,
definition = Some(methodDef),
native = methodDef.specifiers.has(classOf[NativeKeyword]))
val methodIssues = type_.tryAddMethod(method)
(method, returnTypeIssues ++ paramIssues ++ methodIssues)
}
}
| merkispavel/colang | src/main/scala/colang/ast/parsed/routines/RegisterMethods.scala | Scala | mit | 2,205 |
package protocol.coap
import ch.ethz.inf.vs.californium.endpoint.ServerEndpoint
import ch.ethz.inf.vs.californium.endpoint.resources.LocalResource
import ch.ethz.inf.vs.californium.coap.GETRequest
import ch.ethz.inf.vs.californium.coap.registries.CodeRegistry
class CoapServer(port: Int) extends ServerEndpoint(port) {
addResource(new HelloWorldResource())
class HelloWorldResource extends LocalResource("helloWorld") {
setTitle("Hello-World Resource")
override def performGET(request: GETRequest) = {
// respond to the request
request.respond(CodeRegistry.RESP_CONTENT, "Hello World!");
}
}
}
| liamjjmcnamara/sicsthsense | web/app/protocol/coap/CoapServer.scala | Scala | apache-2.0 | 630 |
package com.datastax.spark.connector.rdd
sealed trait CassandraLimit
case class CassandraPartitionLimit(rowsNumber: Long) extends CassandraLimit {
require(rowsNumber > 0, s"$rowsNumber <= 0. Per Partition Limits must be greater than 0")
}
case class SparkPartitionLimit(rowsNumber: Long) extends CassandraLimit {
require(rowsNumber > 0, s"$rowsNumber <= 0. Limits must be greater than 0")
}
object CassandraLimit {
def limitToClause
(limit: Option[CassandraLimit]): String = limit match {
case Some(SparkPartitionLimit(rowsNumber)) => s"LIMIT $rowsNumber"
case Some(CassandraPartitionLimit(rowsNumber)) => s"PER PARTITION LIMIT $rowsNumber"
case None => ""
}
def limitForIterator(limit: Option[CassandraLimit]): Option[Long] = limit.collect {
case SparkPartitionLimit(rowsNumber) => rowsNumber
}
}
| shashwat7/spark-cassandra-connector | spark-cassandra-connector/src/main/scala/com/datastax/spark/connector/rdd/CassandraLimit.scala | Scala | apache-2.0 | 834 |
package org.cloudio.morpheus.mail.traditional
import java.util.{Date, Calendar}
/**
* Created by zslajchrt on 24/08/15.
*/
object App {
def main(args: Array[String]): Unit = {
val userMail = initializeMailUser(null, null)
userMail.sendEmail(Message(List("pepa@gmail.com"), "Hello", "Hi, Pepa!", Nil))
userMail match {
case r: RegisteredUser with PremiumUser =>
//...
}
}
def initializeMailUser(employee: Employee, registeredUser: RegisteredUser): UserMail = {
val employeeMail = new Employee() with
EmployeeAdapter with
DefaultUserMail with
EmployeeUserMail with
VirusDetector
employeeMail.adoptState(employee)
val regUserMail = new RegisteredUser() with
RegisteredUserAdapter with
DefaultUserMail with
RegisteredUserMail with
VirusDetector
regUserMail.adoptState(registeredUser)
val regUserMailPremium = new RegisteredUser() with PremiumUser with
RegisteredUserAdapter with
DefaultUserMail with
RegisteredUserMail with
VirusDetector with
DefaultFaxByMail
regUserMailPremium.adoptState(registeredUser)
// We still need to clone the state of both employee and registeredUser instances
// EmployeeUserMail and RegisteredUserMail are now more general since they extend UserMail and not DefaultUserMail
// VirusDetector no longer needs to override sendEmail; it validly extends DefaultUserMail; no schizophrenia.
// These adoption methods are annoying.
new AlternatingUserMail {
override protected def getDelegate: UserMail = {
val c = Calendar.getInstance()
def h = c.get(Calendar.HOUR_OF_DAY)
if (h >= 8 && h < 17) {
getEmployeeMail
} else {
getRegUserMail
}
}
def getEmployeeMail = {
employeeMail
}
def getRegUserMail = {
if (registeredUser.premium &&
registeredUser.validTo != null &&
registeredUser.validTo.after(new Date()))
regUserMailPremium
else
regUserMail
}
}
// The client must be fixed to AlternatingUserMail through which it can determine whether the service supports fax.
// VirusDetector trait is specified twice; this duplicity may cause some problems:
// 1) the virusCounter will exist in two copies, which may cause problems when monitoring the counter, for instance.
// 2) it may use more system resources
// 3) error-prone when refactoring the source code; easy to omit a VirusDetector's occurrence
// VirusDetector cannot be used as a trait of AlternatingUserMail, which could be suggested as a solution, since its
// validateEmail method would not be invoked from DefaultUserMail.sendEmail because of the delegation.
// The resulting instance still does not allow determining the user account type from the instance's type.
}
}
| zslajchrt/morpheus-tutor | src/main/scala/org/cloudio/morpheus/mail/traditional/App.scala | Scala | apache-2.0 | 2,928 |
/*
* Copyright 2021 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package utils
import cats.data.OptionT
import cats.implicits._
import connectors.{AmlsConnector, DataCacheConnector}
import models.businessmatching.BusinessMatching
import play.api.Logging
import services.StatusService
import uk.gov.hmrc.http.HeaderCarrier
import scala.concurrent.{ExecutionContext, Future}
object BusinessName extends Logging {
private val warn: String => Unit = msg => logger.warn(s"[BusinessName] $msg")
def getNameFromCache(credId: String)(implicit hc: HeaderCarrier, cache: DataCacheConnector, ec: ExecutionContext): OptionT[Future, String] =
for {
bm <- OptionT(cache.fetch[BusinessMatching](credId, BusinessMatching.key))
rd <- OptionT.fromOption[Future](bm.reviewDetails)
} yield {
// $COVERAGE-OFF$
logger.debug(s"Found business name in cache: ${rd.businessName}")
// $COVERAGE-ON$
rd.businessName
}
def getNameFromAmls(accountTypeId: (String, String), safeId: String)
(implicit hc: HeaderCarrier, amls: AmlsConnector, ec: ExecutionContext, dc: DataCacheConnector) = {
OptionT(amls.registrationDetails(accountTypeId, safeId) map { r =>
Option(r.companyName)
} recover {
case ex =>
warn(s"Call to registrationDetails failed: ${ex.getMessage}. Falling back to cache..")
None
})
}
def getName(credId: String, safeId: Option[String], accountTypeId: (String, String))
(implicit hc: HeaderCarrier, ec: ExecutionContext, cache: DataCacheConnector, amls: AmlsConnector) =
safeId.fold(getNameFromCache(credId))(v => getNameFromAmls(accountTypeId, v) orElse getNameFromCache(credId))
def getBusinessNameFromAmls(amlsRegistrationNumber: Option[String], accountTypeId: (String, String), cacheId: String)
(implicit hc: HeaderCarrier, amls: AmlsConnector, ec: ExecutionContext,
dc: DataCacheConnector, statusService: StatusService) = {
for {
(_, detailedStatus) <- OptionT.liftF(statusService.getDetailedStatus(amlsRegistrationNumber, accountTypeId, cacheId))
businessName <- detailedStatus.fold[OptionT[Future, String]](OptionT.some("")) { r =>
BusinessName.getName(cacheId, r.safeId, accountTypeId)
} orElse OptionT.some("")
} yield businessName
}
}
| hmrc/amls-frontend | app/utils/BusinessName.scala | Scala | apache-2.0 | 2,912 |
package xyz.hyperreal.bvm
import scala.util.parsing.input.{CharSequenceReader, Position, Positional}
import util.parsing.combinator.{PackratParsers, RegexParsers}
object PatternParser {
def parseRegex(regex: String): PatternAST = (new PatternParser).parseFromString(regex)
def parseRegexWithResult(regex: String): Either[(Position, String), PatternAST] =
(new PatternParser).parseFromStringWithResult(regex)
}
class PatternParser extends RegexParsers with PackratParsers {
override val skipWhitespace = false
lazy val pos: Parser[Position] = positioned(success(new Positional {})) ^^ (_.pos)
lazy val number: PackratParser[Int] = """\\d+""".r ^^ (_.toInt)
lazy val name: PackratParser[String] = regex("""[a-zA-Z][a-zA-Z0-9]*""".r)
lazy val regex: PackratParser[PatternAST] = altRegex
lazy val altRegex: PackratParser[PatternAST] =
opt(conRegex) ~ rep("|" ~> opt(conRegex)) ^^ {
case None ~ Nil => EmptyPattern
case Some(f) ~ Nil => f
case Some(f) ~ r => AlternationPattern(f +: r.map(_.getOrElse(EmptyPattern)))
case None ~ r => AlternationPattern(EmptyPattern +: r.map(_.getOrElse(EmptyPattern)))
}
lazy val conRegex: PackratParser[PatternAST] =
quantRegex ~ quantRegex.* ^^ {
case f ~ Nil => f
case f ~ r => ConcatenationPattern(f +: r)
}
def quant(r: PatternAST, q: PatternAST => PatternAST): PatternAST =
r match {
case LiteralPattern(s: String) if s.length > 1 =>
ConcatenationPattern(List(LiteralPattern(s.init), q(LiteralPattern(s.last.toString))))
case _ => q(r)
}
lazy val quantRegex: PackratParser[PatternAST] =
primaryRegex <~ "??" ^^ (r => quant(r, ReluctantOptionalPattern)) |
primaryRegex <~ "+?" ^^ (r => quant(r, ReluctantOneOrMorePattern)) |
primaryRegex <~ "*?" ^^ (r => quant(r, ReluctantZeroOrMorePattern)) |
primaryRegex ~ ("{" ~> number) ~ (opt("," ~> opt(pos ~ number)) <~ "}?") ^^ {
case p ~ l ~ None => quant(p, ReluctantRepeatPattern(_, l, null, Some(l)))
case p ~ l ~ Some(None) => quant(p, ReluctantRepeatPattern(_, l, null, None))
case p ~ l ~ Some(Some(pos ~ u)) => quant(p, ReluctantRepeatPattern(_, l, pos, Some(u)))
} |
primaryRegex <~ "?+" ^^ (r => quant(r, s => AtomicPattern(OptionalPattern(s)))) |
primaryRegex <~ "++" ^^ (r => quant(r, s => AtomicPattern(OneOrMorePattern(s)))) |
primaryRegex <~ "*+" ^^ (r => quant(r, s => AtomicPattern(ZeroOrMorePattern(s)))) |
primaryRegex ~ ("{" ~> number) ~ (opt("," ~> opt(pos ~ number)) <~ "}+") ^^ {
case p ~ l ~ None => quant(p, r => AtomicPattern(RepeatPattern(r, l, null, Some(l))))
case p ~ l ~ Some(None) => quant(p, r => AtomicPattern(RepeatPattern(r, l, null, None)))
case p ~ l ~ Some(Some(pos ~ u)) => quant(p, r => AtomicPattern(RepeatPattern(r, l, pos, Some(u))))
} |
primaryRegex <~ "?" ^^ (r => quant(r, OptionalPattern)) |
primaryRegex <~ "+" ^^ (r => quant(r, OneOrMorePattern)) |
primaryRegex <~ "*" ^^ (r => quant(r, ZeroOrMorePattern)) |
primaryRegex ~ ("{" ~> number) ~ (opt("," ~> opt(pos ~ number)) <~ "}") ^^ {
case p ~ l ~ None => quant(p, RepeatPattern(_, l, null, Some(l)))
case p ~ l ~ Some(None) => quant(p, RepeatPattern(_, l, null, None))
case p ~ l ~ Some(Some(pos ~ u)) => quant(p, RepeatPattern(_, l, pos, Some(u)))
} |
primaryRegex
lazy val classRegex: PackratParser[PatternAST] =
"[^" ~> charSet <~ "]" ^^ (c => LiteralClassPattern(Pattern.except(c))) |
"[" ~> charSet <~ "]" ^^ LiteralClassPattern
lazy val charSet: PackratParser[Set[Char]] =
rep1(("""[^]]""".r <~ "-") ~ ".".r | "[:lower:]" | "[:upper:]" | """[^]]""".r) ~ opt("&&[" ~> charSet <~ "]") ^^ {
case es ~ None =>
es map {
case (l: String) ~ (u: String) => l.head to u.head toSet
case "[:lower:]" => 'a' to 'z' toSet
case "[:upper:]" => 'A' to 'Z' toSet
case c: String => c.toSet
} reduce (_ union _)
case es ~ Some(s) =>
(es map {
case (l: String) ~ (u: String) => l.head to u.head toSet
case c: String => c.toSet
} reduce (_ union _)) & s
}
private def chars2flags(s: String) = {
var mask = 0
s foreach {
case 'i' => mask |= Pattern.CASE_INSENSITIVE
case 'd' => mask |= Pattern.UNIX_LINES
case 'm' => mask |= Pattern.MULTILINE
case 's' => mask |= Pattern.DOTALL
case 'u' => mask |= Pattern.UNICODE_CASE
case 'x' => mask |= Pattern.COMMENTS
case 'U' => mask |= Pattern.UNICODE_CHARACTER_CLASS
}
mask
}
lazy val primaryRegex: PackratParser[PatternAST] =
"""[^\\\\^$.|?*+(){\\[ \\n\\t]+(?![+*?])""".r ^^ { s =>
LiteralPattern(s)
} |
"""[^\\\\^$.|?*+(){\\[ \\n\\t]""".r ^^ { s =>
LiteralPattern(s)
} |
"""\\""" ~> """[\\\\^$.|?*+(){}\\[\\] ]""".r ^^ { s =>
LiteralPattern(s)
} |
"""\\0""" ~> "(?:(?:[0-3])[0-7])[0-7]".r ^^ (o => LiteralPattern(Integer.parseInt(o, 8).toChar.toString)) |
"""\\x""" ~> "[0-9a-fA-F]{2}".r ^^ (o => LiteralPattern(Integer.parseInt(o, 16).toChar.toString)) |
("""\\""" ~ "u") ~> "[0-9a-fA-F]{4}".r ^^ (o => LiteralPattern(Integer.parseInt(o, 16).toChar.toString)) |
"""\\t""" ^^^ LiteralPattern("\\t") |
"""\\n""" ^^^ LiteralPattern("\\n") |
"""\\r""" ^^^ LiteralPattern("\\r") |
"""\\f""" ^^^ LiteralPattern("\\f") |
"""\\a""" ^^^ LiteralPattern("\\u0007") |
"""\\e""" ^^^ LiteralPattern("\\u001B") |
"""\\c""" ~> "[A-Z]".r ^^ (o => LiteralPattern((o.head - 'A').toChar.toString)) |
classRegex |
"." ^^^ DotPattern |
"""\\d""" ^^^ LiteralClassPattern(DIGIT_CLASS) |
"""\\D""" ^^^ LiteralClassPattern(Pattern.except(DIGIT_CLASS)) |
"""\\h""" ^^^ LiteralClassPattern(HORIZONTAL_WHITESPACE_CLASS) |
"""\\H""" ^^^ LiteralClassPattern(Pattern.except(HORIZONTAL_WHITESPACE_CLASS)) |
"""\\s""" ^^^ LiteralClassPattern(WHITESPACE_CLASS) |
"""\\S""" ^^^ LiteralClassPattern(Pattern.except(WHITESPACE_CLASS)) |
"""\\v""" ^^^ LiteralClassPattern(VERTICAL_WHITESPACE_CLASS) |
"""\\V""" ^^^ LiteralClassPattern(Pattern.except(VERTICAL_WHITESPACE_CLASS)) |
"""\\w""" ^^^ LiteralClassPattern(WORD_CLASS) |
"""\\W""" ^^^ LiteralClassPattern(Pattern.except(WORD_CLASS)) |
"""\\""" ~> """\\d""".r ^^ ReferencePattern |
"""\\k<""" ~> name <~ ">" ^^ ReferencePattern |
"""^""" ^^^ BeginningOfLinePattern |
"""$""" ^^^ EndOfLinePattern |
"""\\b""" ^^^ WordBoundaryPattern |
"""\\B""" ^^^ NonWordBoundaryPattern |
"""\\A""" ^^^ BeginningOfInputPattern |
"""\\z""" ^^^ EndOfInputPattern |
"""\\Z""" ^^^ EndOfInputBeforeFinalTerminatorPattern |
"""\\R""" ^^^ LineBreakPattern |
"(?:" ~> optRegex <~ ")" ^^ GroupPattern |
"(?>" ~> optRegex <~ ")" ^^ AtomicPattern |
"(?=" ~> optRegex <~ ")" ^^ LookaheadPattern |
"(?!" ~> optRegex <~ ")" ^^ NegationPattern |
"(?<=" ~> optRegex <~ ")" ^^ LookbehindPattern |
"(?<!" ~> optRegex <~ ")" ^^ (r => NegationPattern(LookbehindPattern(r))) |
("(?<" ~> name <~ ">") ~ (optRegex <~ ")") ^^ { case n ~ r => CapturePattern(n, r, null) } |
("(?" ~> "[idmsuxU]*".r <~ "-") ~ ("[idmsuxU]*".r <~ ")") ^^ {
case s ~ c => SetFlagsPattern(chars2flags(s), chars2flags(c))
} |
("(?" ~> "[idmsuxU]*".r <~ "-") ~ ("[idmsuxU]*".r <~ ":") ~ (optRegex <~ ")") ^^ {
case s ~ c ~ r => SetFlagsGroupPattern(chars2flags(s), chars2flags(c), r)
} |
"(" ~> optRegex <~ ")" ^^ (r => NumberedCapturePattern(r))
lazy val optRegex: PackratParser[PatternAST] =
opt(regex) ^^ {
case None => ConcatenationPattern(Nil)
case Some(r) => r
}
def problem(pos: Position, error: String): Nothing =
if (pos eq null)
sys.error(error)
else if (pos.line == 1)
sys.error(s"$error\\n${pos.longString}")
else
sys.error(s"${pos.line}: $error\\n${pos.longString}")
def parseFromStringWithResult[T](src: String): Either[(Position, String), PatternAST] =
parseAll(regex, new CharSequenceReader(src)) match {
case Success(tree, _) => Right(tree)
case NoSuccess(error, rest) => Left((rest.pos, error))
}
def parseFromString[T](src: String): PatternAST =
parseFromStringWithResult(src) match {
case Right(tree) => tree
case Left((pos, error)) => problem(pos, error)
}
}
| edadma/funl | bvm/src/main/scala/xyz/hyperreal/bvm/PatternParser.scala | Scala | mit | 8,665 |
package org.jetbrains.plugins.scala
package lang
package refactoring
package introduceParameter
import com.intellij.ide.util.SuperMethodWarningUtil
import com.intellij.internal.statistic.UsageTrigger
import com.intellij.openapi.actionSystem.DataContext
import com.intellij.openapi.application.ApplicationManager
import com.intellij.openapi.editor.markup.RangeHighlighter
import com.intellij.openapi.editor.{Editor, SelectionModel}
import com.intellij.openapi.project.Project
import com.intellij.openapi.util.TextRange
import com.intellij.psi._
import com.intellij.psi.codeStyle.CodeStyleManager
import com.intellij.psi.util.PsiTreeUtil
import com.intellij.refactoring.util.CommonRefactoringUtil
import com.intellij.refactoring.{RefactoringActionHandler, RefactoringBundle}
import org.jetbrains.plugins.scala.codeInsight.intention.expression.IntroduceImplicitParameterIntention
import org.jetbrains.plugins.scala.extensions._
import org.jetbrains.plugins.scala.lang.psi.ScalaPsiUtil
import org.jetbrains.plugins.scala.lang.psi.api.ScalaFile
import org.jetbrains.plugins.scala.lang.psi.api.base.{ScMethodLike, ScPrimaryConstructor}
import org.jetbrains.plugins.scala.lang.psi.api.expr._
import org.jetbrains.plugins.scala.lang.psi.api.statements.{ScFunction, ScFunctionDefinition}
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.typedef.ScClass
import org.jetbrains.plugins.scala.lang.psi.dataFlow.impl.reachingDefs.{ReachingDefintionsCollector, VariableInfo}
import org.jetbrains.plugins.scala.lang.psi.impl.ScalaPsiElementFactory
import org.jetbrains.plugins.scala.lang.psi.types.{ScFunctionType, ScType, StdType, Any => scTypeAny, Unit => scTypeUnit}
import org.jetbrains.plugins.scala.lang.refactoring.changeSignature.{ScalaMethodDescriptor, ScalaParameterInfo}
import org.jetbrains.plugins.scala.lang.refactoring.introduceParameter.ScalaIntroduceParameterHandler._
import org.jetbrains.plugins.scala.lang.refactoring.namesSuggester.NameSuggester
import org.jetbrains.plugins.scala.lang.refactoring.util.ScalaRefactoringUtil.{IntroduceException, showErrorHint}
import org.jetbrains.plugins.scala.lang.refactoring.util.{DialogConflictsReporter, ScalaRefactoringUtil, ScalaVariableValidator}
import scala.collection.mutable.ArrayBuffer
/**
* User: Alexander Podkhalyuzin
* Date: 11.06.2009
*/
class ScalaIntroduceParameterHandler extends RefactoringActionHandler with DialogConflictsReporter {
private var occurrenceHighlighters = Seq.empty[RangeHighlighter]
def invoke(project: Project, editor: Editor, file: PsiFile, dataContext: DataContext) {
if (!file.isInstanceOf[ScalaFile]) return
if (!ScalaRefactoringUtil.ensureFileWritable(project, file)) {
showErrorHint(ScalaBundle.message("file.is.not.writable"), project, editor, REFACTORING_NAME)
return
}
val canBeIntroduced: (ScExpression) => Boolean = ScalaRefactoringUtil.checkCanBeIntroduced(_)
ScalaRefactoringUtil.afterExpressionChoosing(project, editor, file, dataContext, "Introduce Parameter", canBeIntroduced) {
UsageTrigger.trigger(ScalaBundle.message("introduce.parameter.id"))
invoke(project, editor, file)
}
}
def functionalArg(elems: Seq[PsiElement], input: Iterable[VariableInfo], method: ScMethodLike): (ScExpression, ScType) = {
val namesAndTypes = input.map { v =>
val elem = v.element
val typeText = elem match {
case fun: ScFunction => fun.getType().getOrAny.canonicalText
case named => ScType.ofNamedElement(v.element).getOrElse(scTypeAny).canonicalText
}
s"${elem.name}: $typeText"
}
val project = method.getProject
val arrow = ScalaPsiUtil.functionArrow(project)
val paramsText = namesAndTypes.mkString("(", ", ", ")")
val funText = elems match {
case Seq(single: ScExpression) =>
val bodyText = single.getText
s"$paramsText $arrow $bodyText"
case _ =>
val bodyText = elems.map(_.getText).mkString
s"$paramsText $arrow {\n$bodyText\n}"
}
val expr = ScalaPsiElementFactory.createExpressionWithContextFromText(funText, elems.head.getContext, elems.head).asInstanceOf[ScFunctionExpr]
val toReturn = IntroduceImplicitParameterIntention.createExpressionToIntroduce(expr, withoutParameterTypes = true) match {
case Left(e) => e
case _ => expr
}
ScalaPsiUtil.adjustTypes(toReturn, addImports = false)
(CodeStyleManager.getInstance(project).reformat(toReturn).asInstanceOf[ScExpression], expr.getNonValueType().getOrAny)
}
def invoke(project: Project, editor: Editor, file: PsiFile) {
ScalaRefactoringUtil.trimSpacesAndComments(editor, file)
PsiDocumentManager.getInstance(project).commitAllDocuments()
val (exprWithTypes, elems) = selectedElements(file, project, editor) match {
case Some((x, y)) => (x, y)
case None => return
}
afterMethodChoosing(elems.head, editor) { methodLike =>
val data = collectData(exprWithTypes, elems, methodLike, editor)
data.foreach { d =>
val dialog = createDialog(project, d)
if (dialog.showAndGet) {
invokeLater {
if (editor != null && !editor.isDisposed)
editor.getSelectionModel.removeSelection()
}
} else {
occurrenceHighlighters.foreach(_.dispose())
occurrenceHighlighters = Seq.empty
}
}
}
}
private type ExprWithTypes = Option[(ScExpression, Array[ScType])]
def selectedElements(file: PsiFile, project: Project, editor: Editor): Option[(ExprWithTypes, Seq[PsiElement])] = {
try {
val selModel: SelectionModel = editor.getSelectionModel
if (!selModel.hasSelection) return None
val (startOffset, endOffset) = (selModel.getSelectionStart, selModel.getSelectionEnd)
ScalaRefactoringUtil.checkFile(file, project, editor, REFACTORING_NAME)
val exprWithTypes = ScalaRefactoringUtil.getExpression(project, editor, file, startOffset, endOffset)
val elems = exprWithTypes match {
case Some((e, _)) => Seq(e)
case None => ScalaRefactoringUtil.selectedElements(editor, file.asInstanceOf[ScalaFile], trimComments = false)
}
val hasWarnings = ScalaRefactoringUtil.showNotPossibleWarnings(elems, project, editor, REFACTORING_NAME)
if (hasWarnings) return None
if (haveReturnStmts(elems)) {
showErrorHint("Refactoring is not supported: selection contains return statement", project, editor, REFACTORING_NAME)
return None
}
Some((exprWithTypes, elems))
}
catch {
case _: IntroduceException => None
}
}
def collectData(exprWithTypes: ExprWithTypes, elems: Seq[PsiElement], methodLike: ScMethodLike, editor: Editor): Option[ScalaIntroduceParameterData] = {
val project = methodLike.getProject
val info = ReachingDefintionsCollector.collectVariableInfo(elems, methodLike)
val input = info.inputVariables
val (types, argText, argClauseText) =
if (input.nonEmpty || exprWithTypes.isEmpty) {
val (funExpr, funType) = functionalArg(elems, input, methodLike)
val argClauseText = input.map(_.element.name).mkString("(", ", ", ")")
val allTypes = funType match {
case ScFunctionType(retType, _) => Array(funType, retType, StdType.ANY)
case _ => Array(funType, StdType.ANY)
}
(allTypes, funExpr.getText, argClauseText)
}
else (exprWithTypes.get._2, exprWithTypes.get._1.getText, "")
val superMethod = methodLike.findDeepestSuperMethod() match {
case null => methodLike
case scMethod: ScMethodLike => SuperMethodWarningUtil.checkSuperMethod(methodLike, RefactoringBundle.message("to.refactor"))
case _ => methodLike
}
val methodToSearchFor = superMethod match {
case m: ScMethodLike => m
case _ => return None
}
if (!CommonRefactoringUtil.checkReadOnlyStatus(project, superMethod)) return None
val suggestedName = {
val validator = new ScalaVariableValidator(this, project, elems.head, false, methodLike, methodLike)
val possibleNames = elems match {
case Seq(expr: ScExpression) => NameSuggester.suggestNames(expr, validator)
case _ => NameSuggester.suggestNamesByType(types(0))
}
possibleNames(0)
}
val (occurrences, mainOcc) = elems match {
case Seq(expr: ScExpression) =>
val occurrencesScope = methodLike match {
case ScFunctionDefinition.withBody(body) => body
case pc: ScPrimaryConstructor => pc.containingClass.extendsBlock
case _ => methodLike
}
val occurrences = ScalaRefactoringUtil.getOccurrenceRanges(ScalaRefactoringUtil.unparExpr(expr), occurrencesScope)
if (occurrences.length > 1)
occurrenceHighlighters = ScalaRefactoringUtil.highlightOccurrences(project, occurrences, editor)
(occurrences, expr.getTextRange)
case _ => (Array.empty[TextRange], elems.head.getTextRange.union(elems.last.getTextRange))
}
val data = ScalaIntroduceParameterData(methodLike, methodToSearchFor, elems,
suggestedName, types, types(0), occurrences, mainOcc, replaceAll = false, argText, Some(argClauseText))
Some(data)
}
def invoke(project: Project, elements: Array[PsiElement], dataContext: DataContext) {
/*do nothing*/
}
private def getEnclosingMethods(expr: PsiElement): Seq[ScMethodLike] = {
var enclosingMethods = new ArrayBuffer[ScMethodLike]
var elem: PsiElement = expr
while (elem != null) {
val newFun = PsiTreeUtil.getContextOfType(elem, true, classOf[ScFunctionDefinition], classOf[ScClass])
newFun match {
case f@ScFunctionDefinition.withBody(body) if PsiTreeUtil.isContextAncestor(body, expr, false) =>
enclosingMethods += f
case cl: ScClass => enclosingMethods ++= cl.constructor
case _ =>
}
elem = newFun
}
if (enclosingMethods.size > 1) {
val methodsNotImplementingLibraryInterfaces = enclosingMethods.filter {
case f: ScFunctionDefinition if f.superMethods.exists(isLibraryInterfaceMethod) => false
case _ => true
}
if (methodsNotImplementingLibraryInterfaces.nonEmpty)
return methodsNotImplementingLibraryInterfaces
}
enclosingMethods
}
def createDialog(project: Project, data: ScalaIntroduceParameterData) = {
val paramInfo = new ScalaParameterInfo(data.paramName, -1, data.tp, project, false, false, data.defaultArg, isIntroducedParameter = true)
val descriptor = createMethodDescriptor(data.methodToSearchFor, paramInfo)
new ScalaIntroduceParameterDialog(project, descriptor, data)
}
def createMethodDescriptor(method: ScMethodLike, paramInfo: ScalaParameterInfo): ScalaMethodDescriptor = {
new ScalaMethodDescriptor(method) {
override def parametersInner: Seq[Seq[ScalaParameterInfo]] = {
val params = super.parametersInner
params.headOption match {
case Some(seq) if seq.lastOption.exists(_.isRepeatedParameter) =>
val newFirstClause = seq.dropRight(1) :+ paramInfo :+ seq.last
newFirstClause +: params.tail
case Some(seq) =>
val newFirstClause = seq :+ paramInfo
newFirstClause +: params.tail
case None => Seq(Seq(paramInfo))
}
}
}
}
private def getTextForElement(method: ScMethodLike): String = {
method match {
case pc: ScPrimaryConstructor => s"${pc.containingClass.name} (primary constructor)"
case (f: ScFunctionDefinition) && ContainingClass(c: ScNewTemplateDefinition) => s"${f.name} (in anonymous class)"
case (f: ScFunctionDefinition) && ContainingClass(c) => s"${f.name} (in ${c.name})"
case f: ScFunctionDefinition => s"${f.name}"
}
}
private def toHighlight(e: PsiElement) = e match {
case pc: ScPrimaryConstructor => pc.containingClass.extendsBlock
case _ => e
}
def afterMethodChoosing(elem: PsiElement, editor: Editor)(action: ScMethodLike => Unit): Unit = {
val validEnclosingMethods: Seq[ScMethodLike] = getEnclosingMethods(elem)
if (validEnclosingMethods.size > 1 && !ApplicationManager.getApplication.isUnitTestMode) {
ScalaRefactoringUtil.showChooser[ScMethodLike](editor, validEnclosingMethods.toArray, action,
s"Choose function for $REFACTORING_NAME", getTextForElement, toHighlight)
}
else if (validEnclosingMethods.size == 1 || ApplicationManager.getApplication.isUnitTestMode) {
action(validEnclosingMethods.head)
} else {
showErrorHint(ScalaBundle.message("cannot.refactor.no.function"), elem.getProject, editor, REFACTORING_NAME)
}
}
private def isLibraryInterfaceMethod(method: PsiMethod): Boolean = {
(method.hasModifierPropertyScala(PsiModifier.ABSTRACT) || method.isInstanceOf[ScFunctionDefinition]) &&
!method.getManager.isInProject(method)
}
private def haveReturnStmts(elems: Seq[PsiElement]): Boolean = {
for {
elem <- elems
ret @ (r: ScReturnStmt) <- elem.depthFirst
} {
if (ret.returnFunction.isEmpty || !elem.isAncestorOf(ret.returnFunction.get))
return true
}
false
}
}
object ScalaIntroduceParameterHandler {
val REFACTORING_NAME = ScalaBundle.message("introduce.parameter.title")
} | jeantil/intellij-scala | src/org/jetbrains/plugins/scala/lang/refactoring/introduceParameter/ScalaIntroduceParameterHandler.scala | Scala | apache-2.0 | 13,312 |
/**
* Copyright (C) 2012 LShift Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.lshift.diffa.agent.rest
import org.junit.Test
import org.junit.Assert._
import java.io.ByteArrayInputStream
import net.lshift.diffa.participant.scanning.ScanResultEntry
import org.joda.time.{DateTimeZone, DateTime}
import java.util.HashMap
import scala.collection.JavaConversions._
import net.lshift.diffa.kernel.frontend.InvalidInventoryException
class InventoryReaderTest {
val reader = new InventoryReader
val emptyAttrs = new HashMap[String, String]
@Test
def shouldParseAnAttributelessUpload() {
val result = parseCSV(
"id,version,updated",
"a,v1,2012-03-07T12:31:00Z",
"b,v4,2011-12-31T07:15:12Z"
)
assertEquals(Seq(
ScanResultEntry.forEntity("a", "v1", new DateTime(2012, 3, 7, 12, 31, 0, 0, DateTimeZone.UTC), emptyAttrs),
ScanResultEntry.forEntity("b", "v4", new DateTime(2011, 12, 31, 7, 15, 12, 0, DateTimeZone.UTC), emptyAttrs)),
result.results)
}
@Test
def shouldParseExcessColumnsIntoAttributes() {
val result = parseCSV(
"id,version,updated,foo,bar,pop",
"a,v1,2012-03-07T12:31:00Z,p,q,r",
"b,v4,2011-12-31T07:15:12Z,x,y,z"
)
assertEquals(Seq(
ScanResultEntry.forEntity("a", "v1", new DateTime(2012, 3, 7, 12, 31, 0, 0, DateTimeZone.UTC),
new HashMap[String,String](Map("foo" -> "p", "bar" -> "q", "pop" -> "r"))),
ScanResultEntry.forEntity("b", "v4", new DateTime(2011, 12, 31, 7, 15, 12, 0, DateTimeZone.UTC),
new HashMap[String,String](Map("foo" -> "x", "bar" -> "y", "pop" -> "z")))),
result.results)
}
@Test
def shouldRejectEmptyCSV() {
try {
parseCSV("")
} catch {
case e:InvalidInventoryException => assertEquals("CSV file appears to be empty. No header line was found", e.getMessage)
}
}
@Test
def shouldAcceptCSVWithHeaderOnly() {
val result = parseCSV("id,version,updated")
assertEquals(Seq(), result.results)
}
@Test
def shouldRejectCSVWithMissingID() {
try {
parseCSV("version,updated", "v1,2012-03-07T12:31:00Z")
} catch {
case e:InvalidInventoryException => assertEquals("No 'id' field is defined in the header", e.getMessage)
}
}
@Test
def shouldRejectCSVWithMissingVsn() {
try {
parseCSV("id,updated", "a,2012-03-07T12:31:00Z")
} catch {
case e:InvalidInventoryException => assertEquals("No 'version' field is defined in the header", e.getMessage)
}
}
@Test
def shouldAcceptCSVWithoutUpdated() {
val result = parseCSV("id,version", "a,v1", "b,v2")
assertEquals(Seq(
ScanResultEntry.forEntity("a", "v1", null, emptyAttrs),
ScanResultEntry.forEntity("b", "v2", null, emptyAttrs)),
result.results)
}
@Test
def shouldRejectCSVWithAnInvalidUpdatedValue() {
try {
parseCSV("id,version,updated", "a,v1,2012-03-07T12:31:00Z", "b,v2,garbled")
} catch {
case e:InvalidInventoryException => assertEquals("Invalid updated timestamp 'garbled' on line 3: Invalid format: \\"garbled\\"", e.getMessage)
}
}
@Test
def shouldRejectCSVWithIncompleteLine() {
try {
parseCSV("id,version,updated,foo,bar", "a,v1,2012-03-07T12:31:00Z,a,b", "b,v2,2012-03-07T12:31:00Z")
} catch {
case e:InvalidInventoryException => assertEquals("Line 3 has 3 elements, but the header had 5", e.getMessage)
}
}
@Test
def shouldAcceptCSVWithEmptyValues() {
val result = parseCSV("id,version,updated,foo,bar", "a,v1,2012-03-07T12:31:00Z,x,y", "b,v2,2011-12-31T07:15:12Z,,")
assertEquals(Seq(
ScanResultEntry.forEntity("a", "v1", new DateTime(2012, 3, 7, 12, 31, 0, 0, DateTimeZone.UTC),
new HashMap[String,String](Map("foo" -> "x", "bar" -> "y"))),
ScanResultEntry.forEntity("b", "v2", new DateTime(2011, 12, 31, 7, 15, 12, 0, DateTimeZone.UTC),
new HashMap[String,String](Map("foo" -> "", "bar" -> "")))),
result.results)
}
private def parseCSV(s:String*) =
reader.readFrom(null, null, null, null, null, new ByteArrayInputStream(s.mkString("\\n").getBytes("UTF-8")))
} | aprescott/diffa | agent/src/test/scala/net/lshift/diffa/agent/rest/InventoryReaderTest.scala | Scala | apache-2.0 | 4,663 |
package scalaz.stream
import collection.immutable.Queue
import concurrent.duration._
import scalaz.{\/, -\/, \/-}
import scalaz.\/._
import Process._
import scalaz.stream.ReceiveY._
import scalaz.stream.Process.Emit
import scalaz.stream.ReceiveY.ReceiveL
import scala.Some
import scalaz.stream.ReceiveY.ReceiveR
import scalaz.stream.Process.Halt
import scalaz.stream.Process.Env
import scalaz.stream.ReceiveY.HaltR
import scalaz.stream.Process.Await
import scalaz.stream.ReceiveY.HaltL
trait wye {
import scalaz.stream.wye.{AwaitL, AwaitR, AwaitBoth}
/**
* A `Wye` which emits values from its right branch, but allows up to `n`
* elements from the left branch to enqueue unanswered before blocking
* on the right branch.
*/
def boundedQueue[I](n: Int): Wye[Any,I,I] =
yipWithL(n)((i,i2) => i2)
/**
* After each input, dynamically determine whether to read from the left, right, or both,
* for the subsequent input, using the provided functions `f` and `g`. The returned
* `Wye` begins by reading from the left side and is left-biased--if a read of both branches
* returns a `These(x,y)`, it uses the signal generated by `f` for its next step.
*/
def dynamic[I,I2](f: I => wye.Request, g: I2 => wye.Request): Wye[I,I2,ReceiveY[I,I2]] = {
import wye.Request._
def go(signal: wye.Request): Wye[I,I2,ReceiveY[I,I2]] = signal match {
case L => awaitL[I].flatMap { i => emit(ReceiveL(i)) fby go(f(i)) }
case R => awaitR[I2].flatMap { i2 => emit(ReceiveR(i2)) fby go(g(i2)) }
case Both => awaitBoth[I,I2].flatMap {
case t@ReceiveL(i) => emit(t) fby go(f(i))
case t@ReceiveR(i2) => emit(t) fby go(g(i2))
case _ => go(signal)
}
}
go(L)
}
/**
* A `Wye` which echoes the right branch while draining the left,
* taking care to make sure that the left branch is never more
* than `maxUnacknowledged` behind the right. For example:
* `src.connect(snk)(observe(10))` will output the the same thing
* as `src`, but will as a side effect direct output to `snk`,
* blocking on `snk` if more than 10 elements have enqueued
* without a response.
*/
def drainL[I](maxUnacknowledged: Int): Wye[Any,I,I] =
wye.flip(drainR(maxUnacknowledged))
/**
* A `Wye` which echoes the left branch while draining the right,
* taking care to make sure that the right branch is never more
* than `maxUnacknowledged` behind the left. For example:
* `src.connect(snk)(observe(10))` will output the the same thing
* as `src`, but will as a side effect direct output to `snk`,
* blocking on `snk` if more than 10 elements have enqueued
* without a response.
*/
def drainR[I](maxUnacknowledged: Int): Wye[I,Any,I] =
yipWithL[I,Any,I](maxUnacknowledged)((i,i2) => i)
/**
* Invokes `dynamic` with `I == I2`, and produces a single `I` output. Output is
* left-biased: if a `These(i1,i2)` is emitted, this is translated to an
* `emitSeq(List(i1,i2))`.
*/
def dynamic1[I](f: I => wye.Request): Wye[I,I,I] =
dynamic(f, f).flatMap {
case ReceiveL(i) => emit(i)
case ReceiveR(i) => emit(i)
case HaltR(_) => halt
case HaltL(_) => halt
}
/**
* Nondeterminstic interleave of both inputs. Emits values whenever either
* of the inputs is available.
*/
def either[I,I2]: Wye[I,I2,I \/ I2] = {
def go: Wye[I,I2,I \/ I2] =
receiveBoth[I,I2,I \/ I2]({
case ReceiveL(i) => emit(-\/(i)) fby go
case ReceiveR(i) => emit(\/-(i)) fby go
case other => go
})
go
}
/**
* Continuous wye, that first reads from Left to get `A`,
* Then when `A` is not available it reads from R echoing any `A` that was received from Left
* Will halt once the
*/
def echoLeft[A]: Wye[A, Any, A] = {
def go(a: A): Wye[A, Any, A] =
receiveBoth({
case ReceiveL(l) => emit(l) fby go(l)
case ReceiveR(_) => emit(a) fby go(a)
case HaltOne(rsn) => Halt(rsn)
})
awaitL[A].flatMap(s => emit(s) fby go(s))
}
/**
* Let through the right branch as long as the left branch is `false`,
* listening asynchronously for the left branch to become `true`.
* This halts as soon as the right branch halts.
*/
def interrupt[I]: Wye[Boolean, I, I] = {
def go[I]: Wye[Boolean, I, I] = awaitBoth[Boolean,I].flatMap {
case ReceiveR(None) => halt
case ReceiveR(i) => emit(i) ++ go
case ReceiveL(kill) => if (kill) halt else go
case HaltOne(e) => Halt(e)
}
go
}
/**
* Nondeterminstic interleave of both inputs. Emits values whenever either
* of the inputs is available.
*/
def merge[I]: Wye[I,I,I] = {
def go: Wye[I,I,I] =
receiveBoth[I,I,I]({
case ReceiveL(i) => emit(i) fby go
case ReceiveR(i) => emit(i) fby go
case other => go
}
)
go
}
/**
* A `Wye` which blocks on the right side when either a) the age of the oldest unanswered
* element from the left size exceeds the given duration, or b) the number of unanswered
* elements from the left exceeds `maxSize`.
*/
def timedQueue[I](d: Duration, maxSize: Int = Int.MaxValue): Wye[Duration,I,I] = {
def go(q: Vector[Duration]): Wye[Duration,I,I] =
awaitBoth[Duration,I].flatMap {
case ReceiveL(d2) =>
if (q.size >= maxSize || (d2 - q.headOption.getOrElse(d2) > d))
awaitR[I].flatMap(i => emit(i) fby go(q.drop(1)))
else
go(q :+ d2)
case ReceiveR(i) => emit(i) fby (go(q.drop(1)))
case _ => go(q)
}
go(Vector())
}
/**
* `Wye` which repeatedly awaits both branches, emitting any values
* received from the right. Useful in conjunction with `connect`,
* for instance `src.connect(snk)(unboundedQueue)`
*/
def unboundedQueue[I]: Wye[Any,I,I] =
awaitBoth[Any,I].flatMap {
case ReceiveL(_) => halt
case ReceiveR(i) => emit(i) fby unboundedQueue
case _ => unboundedQueue
}
/** Nondeterministic version of `zip` which requests both sides in parallel. */
def yip[I,I2]: Wye[I,I2,(I,I2)] = yipWith((_,_))
/**
* Left-biased, buffered version of `yip`. Allows up to `n` elements to enqueue on the
* left unanswered before requiring a response from the right. If buffer is empty,
* always reads from the left.
*/
def yipL[I,I2](n: Int): Wye[I,I2,(I,I2)] =
yipWithL(n)((_,_))
/** Nondeterministic version of `zipWith` which requests both sides in parallel. */
def yipWith[I,I2,O](f: (I,I2) => O): Wye[I,I2,O] =
awaitBoth[I,I2].flatMap {
case ReceiveL(i) => awaitR[I2].flatMap(i2 => emit(f(i,i2)) ++ yipWith(f))
case ReceiveR(i2) => awaitL[I].flatMap(i => emit(f(i,i2)) ++ yipWith(f))
case _ => halt
}
/**
* Left-biased, buffered version of `yipWith`. Allows up to `n` elements to enqueue on the
* left unanswered before requiring a response from the right. If buffer is empty,
* always reads from the left.
*/
def yipWithL[I,O,O2](n: Int)(f: (I,O) => O2): Wye[I,O,O2] = {
def go(buf: Vector[I]): Wye[I,O,O2] =
if (buf.size > n) awaitR[O].flatMap { o =>
emit(f(buf.head,o)) ++ go(buf.tail)
}
else if (buf.isEmpty) awaitL[I].flatMap { i => go(buf :+ i) }
else awaitBoth[I,O].flatMap {
case ReceiveL(i) => go(buf :+ i)
case ReceiveR(o) => emit(f(buf.head,o)) ++ go(buf.tail)
case _ => halt
}
go(Vector())
}
}
object wye extends wye {
// combinators that don't have globally unique names and
// shouldn't be mixed into `processes`
/**
* Transform the left input of the given `Wye` using a `Process1`.
*/
def attachL[I0,I,I2,O](p: Process1[I0,I])(w: Wye[I,I2,O]): Wye[I0,I2,O] = w match {
case h@Halt(_) => h
case Emit(h,t) => Emit(h, attachL(p)(t))
case AwaitL(recv, fb, c) =>
p match {
case Emit(h, t) => attachL(t)(wye.feedL(h)(w))
case Await1(recvp, fbp, cp) =>
await(L[I0]: Env[I0,I2]#Y[I0])(
recvp andThen (attachL(_)(w)),
attachL(fbp)(w),
attachL(cp)(w))
case h@Halt(_) => attachL(h)(fb)
}
case AwaitR(recv, fb, c) =>
awaitR[I2].flatMap(recv andThen (attachL(p)(_))).
orElse(attachL(p)(fb), attachL(p)(c))
case AwaitBoth(recv, fb, c) =>
p match {
case Emit(h, t) => attachL(t)(scalaz.stream.wye.feedL(h)(w))
case Await1(recvp, fbp, cp) =>
await(Both[I0,I2]: Env[I0,I2]#Y[ReceiveY[I0,I2]])(
{ case ReceiveL(i0) => attachL(p.feed1(i0))(w)
case ReceiveR(i2) => attachL(p)(feed1R(i2)(w))
case HaltL(End) => attachL(p.fallback)(w)
case HaltL(e) => attachL(p.causedBy(e))(haltL(e)(w))
case HaltR(e) => attachL(p)(haltR(e)(w))
},
attachL(fbp)(w),
attachL(cp)(w))
case h@Halt(End) => attachL(h)(fb)
case h@Halt(e) => attachL(h)(c.causedBy(e))
}
}
/**
* Transform the right input of the given `Wye` using a `Process1`.
*/
def attachR[I,I1,I2,O](p: Process1[I1,I2])(w: Wye[I,I2,O]): Wye[I,I1,O] =
flip(attachL(p)(flip(w)))
/**
* Feed a single `ReceiveY` value to a `Wye`.
*/
def feed1[I,I2,O](i: ReceiveY[I,I2])(w: Wye[I,I2,O]): Wye[I,I2,O] =
i match {
case ReceiveL(i) => feed1L(i)(w)
case ReceiveR(i2) => feed1R(i2)(w)
case HaltL(e) => haltL(e)(w)
case HaltR(e) => haltR(e)(w)
}
/** Feed a single value to the left branch of a `Wye`. */
def feed1L[I,I2,O](i: I)(w: Wye[I,I2,O]): Wye[I,I2,O] =
feedL(List(i))(w)
/** Feed a single value to the right branch of a `Wye`. */
def feed1R[I,I2,O](i2: I2)(w: Wye[I,I2,O]): Wye[I,I2,O] =
feedR(List(i2))(w)
/** Feed a sequence of inputs to the left side of a `Tee`. */
def feedL[I,I2,O](i: Seq[I])(p: Wye[I,I2,O]): Wye[I,I2,O] = {
@annotation.tailrec
def go(in: Seq[I], out: Vector[Seq[O]], cur: Wye[I,I2,O]): Wye[I,I2,O] =
if (in.nonEmpty) cur match {
case h@Halt(_) => emitSeq(out.flatten, h)
case Emit(h, t) => go(in, out :+ h, t)
case AwaitL(recv, fb, c) =>
val next =
try recv(in.head)
catch {
case End => fb
case e: Throwable => c.causedBy(e)
}
go(in.tail, out, next)
case AwaitBoth(recv, fb, c) =>
val next =
try recv(ReceiveY.ReceiveL(in.head))
catch {
case End => fb
case e: Throwable => c.causedBy(e)
}
go(in.tail, out, next)
case AwaitR(recv, fb, c) =>
emitSeq(out.flatten,
await(R[I2]: Env[I,I2]#Y[I2])(recv andThen (feedL(in)), feedL(in)(fb), feedL(in)(c)))
}
else emitSeq(out.flatten, cur)
go(i, Vector(), p)
}
/** Feed a sequence of inputs to the right side of a `Tee`. */
def feedR[I,I2,O](i: Seq[I2])(p: Wye[I,I2,O]): Wye[I,I2,O] = {
@annotation.tailrec
def go(in: Seq[I2], out: Vector[Seq[O]], cur: Wye[I,I2,O]): Wye[I,I2,O] =
if (in.nonEmpty) cur match {
case h@Halt(_) => emitSeq(out.flatten, h)
case Emit(h, t) => go(in, out :+ h, t)
case AwaitR(recv, fb, c) =>
val next =
try recv(in.head)
catch {
case End => fb
case e: Throwable => c.causedBy(e)
}
go(in.tail, out, next)
case AwaitBoth(recv, fb, c) =>
val next =
try recv(ReceiveY.ReceiveR(in.head))
catch {
case End => fb
case e: Throwable => c.causedBy(e)
}
go(in.tail, out, next)
case AwaitL(recv, fb, c) =>
emitSeq(out.flatten,
await(L[I]: Env[I,I2]#Y[I])(recv andThen (feedR(in)), feedR(in)(fb), feedR(in)(c)))
}
else emitSeq(out.flatten, cur)
go(i, Vector(), p)
}
/** Signal to wye that left side has terminated **/
def haltL[I,I2,O](e:Throwable)(p:Wye[I,I2,O]):Wye[I,I2,O] = {
p match {
case h@Halt(_) => h
case Emit(h, t) =>
val (nh,nt) = t.unemit
Emit(h ++ nh, haltL(e)(nt))
case AwaitL(rcv,fb,c) => p.killBy(e)
case AwaitR(rcv,fb,c) => await(R[I2]: Env[I,I2]#Y[I2])(rcv, haltL(e)(fb), haltL(e)(c))
case AwaitBoth(rcv,fb,c) =>
try rcv(ReceiveY.HaltL(e))
catch {
case End => fb
case e: Throwable => c.causedBy(e)
}
}
}
def haltR[I,I2,O](e:Throwable)(p:Wye[I,I2,O]):Wye[I,I2,O] = {
p match {
case h@Halt(_) => h
case Emit(h, t) =>
val (nh,nt) = t.unemit
Emit(h ++ nh, haltR(e)(nt))
case AwaitR(rcv,fb,c) => p.killBy(e)
case AwaitL(rcv,fb,c) => await(L[I]: Env[I,I2]#Y[I])(rcv, haltR(e)(fb), haltR(e)(c))
case AwaitBoth(rcv,fb,c) =>
try rcv(ReceiveY.HaltR(e))
catch {
case End => fb
case e: Throwable => c.causedBy(e)
}
}
}
/**
* Convert right requests to left requests and vice versa.
*/
def flip[I,I2,O](w: Wye[I,I2,O]): Wye[I2,I,O] = w match {
case h@Halt(_) => h
case Emit(h, t) => Emit(h, flip(t))
case AwaitL(recv, fb, c) =>
await(R[I]: Env[I2,I]#Y[I])(recv andThen (flip), flip(fb), flip(c))
case AwaitR(recv, fb, c) =>
await(L[I2]: Env[I2,I]#Y[I2])(recv andThen (flip), flip(fb), flip(c))
case AwaitBoth(recv, fb, c) =>
await(Both[I2,I])((t: ReceiveY[I2,I]) => flip(recv(t.flip)), flip(fb), flip(c))
}
/**
* Lift a `Wye` to operate on the left side of an `\/`, passing
* through any values it receives on the right from either branch.
*/
def liftL[I0,I,I2,O](w: Wye[I,I2,O]): Wye[I \/ I0, I2 \/ I0, O \/ I0] =
liftR[I0,I,I2,O](w)
.map(_.swap)
.contramapL((e: I \/ I0) => e.swap)
.contramapR((e: I2 \/ I0) => e.swap)
/**
* Lift a `Wye` to operate on the right side of an `\/`, passing
* through any values it receives on the left from either branch.
*/
def liftR[I0,I,I2,O](w: Wye[I,I2,O]): Wye[I0 \/ I, I0 \/ I2, I0 \/ O] =
w match {
case Emit(h, t) => Emit(h map right, liftR[I0,I,I2,O](t))
case h@Halt(_) => h
case AwaitL(recv, fb, c) =>
val w2: Wye[I0 \/ I, I0 \/ I2, I0 \/ O] =
awaitL[I0 \/ I].flatMap(_.fold(
i0 => emit(left(i0)) ++ liftR(w),
i => liftR[I0,I,I2,O](recv(i))
))
val fb2 = liftR[I0,I,I2,O](fb)
val c2 = liftR[I0,I,I2,O](c)
w2.orElse(fb2, c2)
case AwaitR(recv, fb, c) =>
val w2: Wye[I0 \/ I, I0 \/ I2, I0 \/ O] =
awaitR[I0 \/ I2].flatMap(_.fold(
i0 => emit(left(i0)) ++ liftR(w),
i => liftR[I0,I,I2,O](recv(i))
))
val fb2 = liftR[I0,I,I2,O](fb)
val c2 = liftR[I0,I,I2,O](c)
w2.orElse(fb2, c2)
case AwaitBoth(recv, fb, c) =>
val w2: Wye[I0 \/ I, I0 \/ I2, I0 \/ O] = awaitBoth[I0 \/ I, I0 \/ I2].flatMap {
case ReceiveL(io) => feed1(ReceiveL(io))(liftR(AwaitL(recv compose ReceiveL.apply, fb, c)))
case ReceiveR(io) => feed1(ReceiveR(io))(liftR(AwaitR(recv compose ReceiveR.apply, fb, c)))
case HaltL(e) => liftR(w)
case HaltR(e) => liftR(w)
}
val fb2 = liftR[I0,I,I2,O](fb)
val c2 = liftR[I0,I,I2,O](c)
w2.orElse(fb2, c2)
}
/** Simple enumeration for dynamically generated `Wye` request types. See `wye.dynamic`. */
trait Request
object Request {
case object L extends Request
case object R extends Request
case object Both extends Request
}
object AwaitL {
def unapply[I,I2,O](self: Wye[I,I2,O]):
Option[(I => Wye[I,I2,O], Wye[I,I2,O], Wye[I,I2,O])] = self match {
case Await(req,recv,fb,c) if req.tag == 0 => Some((recv.asInstanceOf[I => Wye[I,I2,O]], fb, c))
case _ => None
}
def apply[I,I2,O](recv: I => Wye[I,I2,O],
fallback: Wye[I,I2,O] = halt,
cleanup: Wye[I,I2,O] = halt): Wye[I,I2,O] =
await(L[I]: Env[I,I2]#Y[I])(recv, fallback, cleanup)
}
object AwaitR {
def unapply[I,I2,O](self: Wye[I,I2,O]):
Option[(I2 => Wye[I,I2,O], Wye[I,I2,O], Wye[I,I2,O])] = self match {
case Await(req,recv,fb,c) if req.tag == 1 => Some((recv.asInstanceOf[I2 => Wye[I,I2,O]], fb, c))
case _ => None
}
def apply[I,I2,O](recv: I2 => Wye[I,I2,O],
fallback: Wye[I,I2,O] = halt,
cleanup: Wye[I,I2,O] = halt): Wye[I,I2,O] =
await(R[I2]: Env[I,I2]#Y[I2])(recv, fallback, cleanup)
}
object AwaitBoth {
def unapply[I,I2,O](self: Wye[I,I2,O]):
Option[(ReceiveY[I,I2] => Wye[I,I2,O], Wye[I,I2,O], Wye[I,I2,O])] = self match {
case Await(req,recv,fb,c) if req.tag == 2 => Some((recv.asInstanceOf[ReceiveY[I,I2] => Wye[I,I2,O]], fb, c))
case _ => None
}
def apply[I,I2,O](recv: ReceiveY[I,I2] => Wye[I,I2,O],
fallback: Wye[I,I2,O] = halt,
cleanup: Wye[I,I2,O] = halt): Wye[I,I2,O] =
await(Both[I,I2])(recv, fallback, cleanup)
}
}
| wemrysi/scalaz-stream | src/main/scala/scalaz/stream/wye.scala | Scala | mit | 17,197 |
package is.hail.methods
import breeze.linalg._
import is.hail.HailContext
import is.hail.annotations._
import is.hail.backend.ExecuteContext
import is.hail.expr.ir.functions.MatrixToTableFunction
import is.hail.expr.ir.{IntArrayBuilder, MatrixValue, TableValue}
import is.hail.types.virtual.{TArray, TFloat64, TStruct}
import is.hail.types.{MatrixType, TableType}
import is.hail.rvd.RVDType
import is.hail.stats._
import is.hail.utils._
case class LogisticRegression(
test: String,
yFields: Seq[String],
xField: String,
covFields: Seq[String],
passThrough: Seq[String]) extends MatrixToTableFunction {
override def typ(childType: MatrixType): TableType = {
val logRegTest = LogisticRegressionTest.tests(test)
val multiPhenoSchema = TStruct(("logistic_regression", TArray(logRegTest.schema)))
val passThroughType = TStruct(passThrough.map(f => f -> childType.rowType.field(f).typ): _*)
TableType(childType.rowKeyStruct ++ passThroughType ++ multiPhenoSchema, childType.rowKey, TStruct.empty)
}
def preservesPartitionCounts: Boolean = true
def execute(ctx: ExecuteContext, mv: MatrixValue): TableValue = {
val logRegTest = LogisticRegressionTest.tests(test)
val tableType = typ(mv.typ)
val newRVDType = tableType.canonicalRVDType
val multiPhenoSchema = TStruct(("logistic_regression", TArray(logRegTest.schema)))
val (yVecs, cov, completeColIdx) = RegressionUtils.getPhenosCovCompleteSamples(mv, yFields.toArray, covFields.toArray)
(0 until yVecs.cols).foreach(col => {
if (!yVecs(::, col).forall(yi => yi == 0d || yi == 1d))
fatal(s"For logistic regression, y at index ${col} must be bool or numeric with all present values equal to 0 or 1")
val sumY = sum(yVecs(::,col))
if (sumY == 0d || sumY == yVecs(::,col).length)
fatal(s"For logistic regression, y at index ${col} must be non-constant")
})
val n = yVecs.rows
val k = cov.cols
val d = n - k - 1
if (d < 1)
fatal(s"$n samples and ${ k + 1 } ${ plural(k, "covariate") } (including x) implies $d degrees of freedom.")
info(s"logistic_regression_rows: running $test on $n samples for response variable y,\\n"
+ s" with input variable x, and ${ k } additional ${ plural(k, "covariate") }...")
val nullFits = (0 until yVecs.cols).map(col => {
val nullModel = new LogisticRegressionModel(cov, yVecs(::, col))
var nullFit = nullModel.fit()
if (!nullFit.converged)
if (logRegTest == LogisticFirthTest)
nullFit = GLMFit(nullModel.bInterceptOnly(),
None, None, 0, nullFit.nIter, exploded = nullFit.exploded, converged = false)
else
fatal("Failed to fit logistic regression null model (standard MLE with covariates only): " + (
if (nullFit.exploded)
s"exploded at Newton iteration ${nullFit.nIter}"
else
"Newton iteration failed to converge"))
nullFit
})
val backend = HailContext.backend
val completeColIdxBc = backend.broadcast(completeColIdx)
val yVecsBc = backend.broadcast(yVecs)
val XBc = backend.broadcast(new DenseMatrix[Double](n, k + 1, cov.toArray ++ Array.ofDim[Double](n)))
val nullFitBc = backend.broadcast(nullFits)
val logRegTestBc = backend.broadcast(logRegTest)
val fullRowType = mv.rvRowPType
val entryArrayType = mv.entryArrayPType
val entryType = mv.entryPType
val fieldType = entryType.field(xField).typ
assert(fieldType.virtualType == TFloat64)
val entryArrayIdx = mv.entriesIdx
val fieldIdx = entryType.fieldIdx(xField)
val copiedFieldIndices = (mv.typ.rowKey ++ passThrough).map(mv.rvRowType.fieldIdx(_)).toArray
val newRVD = mv.rvd.mapPartitions(newRVDType) { (ctx, it) =>
val rvb = ctx.rvb
val missingCompleteCols = new IntArrayBuilder()
val _nullFits = nullFitBc.value
val _yVecs = yVecsBc.value
val X = XBc.value.copy
it.map { ptr =>
RegressionUtils.setMeanImputedDoubles(X.data, n * k, completeColIdxBc.value, missingCompleteCols,
ptr, fullRowType, entryArrayType, entryType, entryArrayIdx, fieldIdx)
val logregAnnotations = (0 until _yVecs.cols).map(col => {
logRegTestBc.value.test(X, _yVecs(::,col), _nullFits(col), "logistic")
})
rvb.start(newRVDType.rowType)
rvb.startStruct()
rvb.addFields(fullRowType, ctx.r, ptr, copiedFieldIndices)
rvb.startArray(_yVecs.cols)
logregAnnotations.foreach(stats => {
rvb.startStruct()
stats.addToRVB(rvb)
rvb.endStruct()
})
rvb.endArray()
rvb.endStruct()
rvb.end()
}
}
TableValue(ctx, tableType, BroadcastRow.empty(ctx), newRVD)
}
}
| hail-is/hail | hail/src/main/scala/is/hail/methods/LogisticRegression.scala | Scala | mit | 4,789 |
package de.neuland.reminder.postgres
import scala.slick.driver.PostgresDriver.simple._
import scala.slick.lifted.{Index, ProvenShape, Tag}
class Reminders(tag: Tag) extends Table[Reminder](tag, "reminders") {
def id: Column[String] = column[String]("id", O.PrimaryKey, O.NotNull)
def author: Column[String] = column[String]("author", O.NotNull)
def recipient: Column[String] = column[String]("recipient", O.NotNull)
def message: Column[String] = column[String]("message", O.NotNull)
def schedules: Column[String] = column[String]("schedules", O.NotNull)
def webhookKey: Column[String] = column[String]("webhookkey", O.NotNull)
def * : ProvenShape[Reminder] = (id, author, recipient, message, schedules, webhookKey) <> (Reminder.tupled, Reminder.unapply)
def idx_author: Index = index("idx_author", author)
def idx_recipient: Index = index("idx_recipient", recipient)
}
| neuland/matterminder | app/de/neuland/reminder/postgres/Reminders.scala | Scala | gpl-3.0 | 889 |
package ru.tolsi.matcher.naive
import scala.concurrent.{ExecutionContext, Future}
import ru.tolsi.matcher.{Client, ClientRepository, OrderType, ReverseOrders, ReverseOrdersExecutor}
private[naive] class SingleThreadOrderExecutor extends ReverseOrdersExecutor[Long] {
def execute(orders: ReverseOrders, clientRepository: ClientRepository[Long])
(implicit ec: ExecutionContext): Future[Unit] = {
val ReverseOrders(order, reverseOrder) = orders
for {
orderCreatorOption <- clientRepository.get(order.creator)
reverseOrderCreatorOption <- clientRepository.get(reverseOrder.creator)
} yield {
(orderCreatorOption, reverseOrderCreatorOption) match {
case (Some(orderCreator), Some(reverseOrderCreator)) =>
import order._
def buy(buyer: Client[Long], seller: Client[Long]): Unit = {
buyer.addDeltaToBalance(asset, qty)
// todo currency?
buyer.addDeltaToBalance("USD", -price * qty)
seller.addDeltaToBalance(asset, -qty)
seller.addDeltaToBalance("USD", price * qty)
}
if (`type` == OrderType.Buy) {
buy(orderCreator, reverseOrderCreator)
} else {
buy(reverseOrderCreator, orderCreator)
}
case _ => throw new IllegalStateException(s"At least one of orders '$orders' creators are not found")
}
}
}
}
| Tolsi/matcher | src/main/scala/ru/tolsi/matcher/naive/SingleThreadOrderExecutor.scala | Scala | mit | 1,400 |
package com.overviewdocs.ingest.process
import akka.stream.{Graph,Materializer,Outlet,UniformFanOutShape}
import akka.stream.scaladsl.{Flow,GraphDSL,Partition,Sink}
import akka.util.ByteString
import java.util.concurrent.Callable
import scala.concurrent.{ExecutionContext,Future,blocking}
import com.overviewdocs.blobstorage.BlobStorage
import com.overviewdocs.ingest.model.WrittenFile2
import com.overviewdocs.models.BlobStorageRef
import com.overviewdocs.util.Logger
import org.overviewproject.mime_types.MimeTypeDetector
/** Ensures a valid `contentType` for each input WrittenFile2; emits to the
* appropriate Step.
*
* Some Steps have special considerations:
*
* * "Canceled" is selected if the job is canceled
* * "Unhandled" is the default Step
* * "PdfOcr" is selected over "Pdf" if wantOcr is true
*
* This Graph has `steps.size` outlets: one per (hard-coded) Step. Outputs
* on outlet 0 should connect to `Step.all(0)`'s inlet. Outputs on outlet 1
* should connect to `Step.all(1)`'s inlet. And so on.
*
* We don't write our results to the database: we assume contentType
* detection is relatively quick. Most of the time, it relies on filename
* extension. Only rarely does it use magic numbers from BlobStorage. Plus,
* if there's a large backlog of magic-number detection to handle, that means
* downstream isn't processing files quickly enough.
*/
class Decider(
steps: Vector[Step],
blobStorage: BlobStorage,
/** Number of magic-number detections to run in parallel.
*
* Usually, the slow part is reading from BlobStorage.
*/
parallelism: Int = 2
) {
private val logger = Logger.forClass(getClass)
sealed trait NextStep {
def forFile(file: WrittenFile2): Step
}
object NextStep {
case class SimpleStep(stepId: String) extends NextStep {
val step: Step = steps.find(_.id == stepId).get
override def forFile(file: WrittenFile2) = step
}
case object PdfStep extends NextStep {
val ocrStep = steps.find(_.id == "PdfOcr").get
val noOcrStep = steps.find(_.id == "Pdf").get
override def forFile(file: WrittenFile2) = {
if (file.wantOcr) {
ocrStep
} else {
noOcrStep
}
}
}
val Archive = SimpleStep("Archive")
val Email = SimpleStep("Email")
val Html = SimpleStep("Html")
val Image = SimpleStep("Image")
val Office = SimpleStep("Office")
val Pdf = PdfStep
val Pst = SimpleStep("Pst")
val Text = SimpleStep("Text")
val Canceled = SimpleStep("Canceled")
val Unhandled = SimpleStep("Unhandled")
}
private val handlers = Map[String,NextStep](
"application/pdf" -> NextStep.Pdf,
"application/bzip2" -> NextStep.Archive,
"application/gzip" -> NextStep.Archive,
"application/jar" -> NextStep.Archive,
"application/tar" -> NextStep.Archive,
"application/vnd.ms-cab-compressed" -> NextStep.Archive,
"application/x-7z-compressed" -> NextStep.Archive,
"application/x-bzip2" -> NextStep.Archive,
"application/x-bzip2-compressed-tar" -> NextStep.Archive,
"application/x-bzip" -> NextStep.Archive,
"application/x-bzip-compressed-tar" -> NextStep.Archive,
"application/x-compressed-tar" -> NextStep.Archive,
"application/x-iso9660-image" -> NextStep.Archive,
"application/x-rar-compressed" -> NextStep.Archive,
"application/x-tar" -> NextStep.Archive,
"application/x-xz" -> NextStep.Archive,
"application/x-xz-compressed-tar" -> NextStep.Archive,
"application/zip" -> NextStep.Archive,
"message/rfc822" -> NextStep.Email,
"image/jpeg" -> NextStep.Image,
"image/png" -> NextStep.Image,
"application/clarisworks" -> NextStep.Office,
"application/excel" -> NextStep.Office,
"application/macwriteii" -> NextStep.Office,
"application/msexcel" -> NextStep.Office,
"application/mspowerpoint" -> NextStep.Office,
"application/msword" -> NextStep.Office,
"application/prs.plucker" -> NextStep.Office,
"application/tab-separated-values" -> NextStep.Office,
"application/vnd.corel-draw" -> NextStep.Office,
"application/vnd.lotus-1-2-3" -> NextStep.Office,
"application/vnd.lotus-wordpro" -> NextStep.Office,
"application/vnd.ms-excel" -> NextStep.Office,
"application/vnd.ms-excel.sheet.binary.macroenabled.12" -> NextStep.Office,
"application/vnd.ms-excel.sheet.macroenabled.12" -> NextStep.Office,
"application/vnd.ms-excel.template.macroenabled.12" -> NextStep.Office,
"application/vnd.ms-powerpoint" -> NextStep.Office,
"application/vnd.ms-powerpoint.presentation.macroenabled.12" -> NextStep.Office,
"application/vnd.ms-powerpoint.slideshow.macroEnabled.12" -> NextStep.Office,
"application/vnd.ms-powerpoint.template.macroenabled.12" -> NextStep.Office,
"application/vnd.ms-publisher" -> NextStep.Office,
"application/vnd.ms-word" -> NextStep.Office,
"application/vnd.ms-word.document.macroenabled.12" -> NextStep.Office,
"application/vnd.ms-word.template.macroenabled.12" -> NextStep.Office,
"application/vnd.ms-works" -> NextStep.Office,
"application/vnd.oasis.opendocument.chart" -> NextStep.Office,
"application/vnd.oasis.opendocument.chart-template" -> NextStep.Office,
"application/vnd.oasis.opendocument.graphics" -> NextStep.Office,
"application/vnd.oasis.opendocument.graphics-flat-xml" -> NextStep.Office,
"application/vnd.oasis.opendocument.graphics-template" -> NextStep.Office,
"application/vnd.oasis.opendocument.presentation" -> NextStep.Office,
"application/vnd.oasis.opendocument.presentation-flat-xml" -> NextStep.Office,
"application/vnd.oasis.opendocument.presentation-template" -> NextStep.Office,
"application/vnd.oasis.opendocument.spreadsheet" -> NextStep.Office,
"application/vnd.oasis.opendocument.spreadsheet-flat-xml" -> NextStep.Office,
"application/vnd.oasis.opendocument.spreadsheet-template" -> NextStep.Office,
"application/vnd.oasis.opendocument.text" -> NextStep.Office,
"application/vnd.oasis.opendocument.text-flat-xml" -> NextStep.Office,
"application/vnd.oasis.opendocument.text-master" -> NextStep.Office,
"application/vnd.oasis.opendocument.text-template" -> NextStep.Office,
"application/vnd.oasis.opendocument.text-web" -> NextStep.Office,
"application/vnd.openxmlformats-officedocument.presentationml.presentation" -> NextStep.Office,
"application/vnd.openxmlformats-officedocument.presentationml.slide" -> NextStep.Office,
"application/vnd.openxmlformats-officedocument.presentationml.slideshow" -> NextStep.Office,
"application/vnd.openxmlformats-officedocument.presentationml.template" -> NextStep.Office,
"application/vnd.openxmlformats-officedocument.spreadsheetml.sheet" -> NextStep.Office,
"application/vnd.openxmlformats-officedocument.spreadsheetml.template" -> NextStep.Office,
"application/vnd.openxmlformats-officedocument.wordprocessingml.document" -> NextStep.Office,
"application/vnd.openxmlformats-officedocument.wordprocessingml.template" -> NextStep.Office,
"application/vnd.palm" -> NextStep.Office,
"application/vnd.stardivision.writer-global" -> NextStep.Office,
"application/vnd.sun.xml.calc" -> NextStep.Office,
"application/vnd.sun.xml.calc.template" -> NextStep.Office,
"application/vnd.sun.xml.draw" -> NextStep.Office,
"application/vnd.sun.xml.draw.template" -> NextStep.Office,
"application/vnd.sun.xml.impress" -> NextStep.Office,
"application/vnd.sun.xml.impress.template" -> NextStep.Office,
"application/vnd.sun.xml.writer" -> NextStep.Office,
"application/vnd.sun.xml.writer.global" -> NextStep.Office,
"application/vnd.sun.xml.writer.template" -> NextStep.Office,
"application/vnd.visio" -> NextStep.Office,
"application/vnd.wordperfect" -> NextStep.Office,
"application/wordperfect" -> NextStep.Office,
"application/x-123" -> NextStep.Office,
"application/x-aportisdoc" -> NextStep.Office,
"application/x-dbase" -> NextStep.Office,
"application/x-dbf" -> NextStep.Office,
"application/x-doc" -> NextStep.Office,
"application/x-dos_ms_excel" -> NextStep.Office,
"application/x-excel" -> NextStep.Office,
"application/x-extension-txt" -> NextStep.Office,
"application/x-fictionbook+xml" -> NextStep.Office,
"application/x-hwp" -> NextStep.Office,
"application/x-iwork-keynote-sffkey" -> NextStep.Office,
"application/x-msexcel" -> NextStep.Office,
"application/x-ms-excel" -> NextStep.Office,
"application/x-quattropro" -> NextStep.Office,
"application/x-t602" -> NextStep.Office,
"application/x-wpg" -> NextStep.Office,
"image/x-freehand" -> NextStep.Office,
// One can imagine better than LibreOffice for CSV
"application/csv" -> NextStep.Office,
"text/csv" -> NextStep.Office,
"application/vnd.ms-outlook" -> NextStep.Pst,
"application/rtf" -> NextStep.Html,
"text/html" -> NextStep.Html,
"application/xhtml+xml" -> NextStep.Html,
"application/javascript" -> NextStep.Text,
"application/json" -> NextStep.Text,
"application/x-python" -> NextStep.Text,
"application/x-ruby" -> NextStep.Text,
"application/x-shellscript" -> NextStep.Text,
"application/x-yaml" -> NextStep.Text,
"application/xml" -> NextStep.Text,
"text/*" -> NextStep.Text // Lots of source code in this category
)
def graph(implicit mat: Materializer): Graph[UniformFanOutShape[WrittenFile2, WrittenFile2], akka.NotUsed] = {
implicit val ec = mat.executionContext
GraphDSL.create() { implicit builder =>
import GraphDSL.Implicits._
val addOutletIndex = builder.add(Flow.apply[WrittenFile2].mapAsyncUnordered(parallelism) { file =>
getOutletIndex(file).map(i => (file, i))
})
val partition = builder.add(Partition[(WrittenFile2,Int)](steps.size, t => t._2))
val extractFiles = steps.map { _ => builder.add(Flow.apply[(WrittenFile2,Int)].map(t => t._1)) }
addOutletIndex ~> partition
extractFiles.foreach { extractFile =>
partition ~> extractFile
}
// The compiler seems to need help typecasting outlets
val outlets: Seq[Outlet[WrittenFile2]] = extractFiles.map(_.out)
UniformFanOutShape(addOutletIndex.in, outlets: _*)
}
}
private def getBytes(
blobLocation: String
)(implicit mat: Materializer): Future[Array[Byte]] = {
val maxNBytes = Decider.mimeTypeDetector.getMaxGetBytesLength
blobStorage.getBytes(blobLocation, maxNBytes)
}
private def detectMimeType(
filename: String,
blob: BlobStorageRef
)(implicit mat: Materializer): Future[String] = {
import scala.compat.java8.FutureConverters.{toJava,toScala}
toScala(Decider.mimeTypeDetector.detectMimeTypeAsync(filename, () => toJava(getBytes(blob.location))))
}
protected[ingest] def getContentTypeNoParameters(
input: WrittenFile2
)(implicit mat: Materializer): Future[String] = {
val MediaTypeRegex = "^([^;]+).*$".r
input.contentType match {
case "application/octet-stream" => detectMimeType(input.filename, input.blob.ref)
case MediaTypeRegex(withoutParameter) => Future.successful(withoutParameter)
case _ => detectMimeType(input.filename, input.blob.ref)
}
}
protected[ingest] def getNextStep(
input: WrittenFile2
)(implicit mat: Materializer): Future[Step] = {
implicit val ec = mat.executionContext
if (input.isCanceled) return Future.successful(NextStep.Canceled.forFile(input))
for {
detectedContentType <- getContentTypeNoParameters(input)
} yield {
val nextStep: NextStep = handlers.get(detectedContentType)
.orElse(handlers.get(detectedContentType.replaceFirst("/.*", "/*")))
.getOrElse(NextStep.Unhandled)
nextStep.forFile(input)
}
}
private def getOutletIndex(
input: WrittenFile2
)(implicit mat: Materializer): Future[Int] = {
implicit val ec = mat.executionContext
for {
step <- getNextStep(input)
} yield {
logger.info("FileGroup {}: File {} '{}': ⇒ {}", input.fileGroupJob.fileGroupId, input.id, input.filename, step.id)
steps.indexOf(step)
}
}
}
object Decider {
protected[ingest] val mimeTypeDetector = new MimeTypeDetector
}
| overview/overview-server | worker/src/main/scala/com/overviewdocs/ingest/process/Decider.scala | Scala | agpl-3.0 | 12,339 |
import org.scalacheck._, Prop._, Gen._, Arbitrary._
import scala.reflect.runtime.universe._
object DeprecationProps extends QuasiquoteProperties("deprecation") {
val tname = TypeName("Foo")
val tpt = tq"Foo"
val tpe = typeOf[Int]
val sym = tpe.typeSymbol.asType
val argss = List(q"x") :: List(q"y") :: Nil
val args = q"x" :: q"y" :: Nil
property("new tpt argss") = test {
assert(q"new $tpt(...$argss)" ≈ New(tpt, argss))
}
property("new tpe args") = test {
assert(q"new $tpe(..$args)" ≈ New(tpe, args: _*))
}
property("new tpe args") = test {
assert(q"new ${sym.toType}(..$args)" ≈ New(sym, args: _*))
}
property("apply sym args") = test {
assert(q"$sym(..$args)" ≈ Apply(sym, args: _*))
}
property("applyconstructor") = test {
assert(q"new $tpt(..$args)" ≈ ApplyConstructor(tpt, args))
}
property("super sym name") = test {
assert(q"$sym.super[$tname].x".qualifier ≈ Super(sym, tname))
}
property("throw tpe args") = test {
assert(q"throw new $tpe(..$args)" ≈ Throw(tpe, args: _*))
}
property("casedef pat body") = test {
val pat = pq"foo"
val body = q"bar"
assert(cq"$pat => $body" ≈ CaseDef(pat, body))
}
property("try body cases") = test {
val cases = (pq"a", q"b") :: (pq"c", q"d") :: Nil
val newcases = cases.map { case (pat, body) => cq"$pat => $body" }
val body = q"foo"
assert(q"try $body catch { case ..$newcases }" ≈ Try(body, cases: _*))
}
} | felixmulder/scala | test/files/scalacheck/quasiquotes/DeprecationProps.scala | Scala | bsd-3-clause | 1,485 |
/*
* Copyright 2020 Precog Data
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package quasar
import slamdata.Predef._
import quasar.contrib.scalaz.{MonadListen_, MonadTell_}
import cats.Eval
import scalaz._
package object common {
type CIName = CIString
object CIName {
def apply(value: String): CIName =
CIString(value)
def unapply(name: CIName): Option[String] =
CIString.unapply(name)
}
type PhaseResults = Vector[Eval[PhaseResult]]
object PhaseResults {
final def logPhase[M[_]]
(pr: Eval[PhaseResult])
(implicit MT: PhaseResultTell[M])
: M[Unit] =
MT.tell(Vector(pr))
}
type PhaseResultW[A] = Writer[PhaseResults, A]
type PhaseResultT[F[_], A] = WriterT[F, PhaseResults, A]
type PhaseResultCatsT[F[_], A] = cats.data.WriterT[F, PhaseResults, A]
type PhaseResultTell[F[_]] = MonadTell_[F, PhaseResults]
object PhaseResultTell {
def apply[F[_]](implicit F: PhaseResultTell[F]) = F
}
type PhaseResultListen[F[_]] = MonadListen_[F, PhaseResults]
object PhaseResultListen {
def apply[F[_]](implicit F: PhaseResultListen[F]) = F
}
object phase {
def apply[F[_]] = new PartiallyApplied[F]
final class PartiallyApplied[F[_]] {
def apply[A: RenderTree](label: String, a: A)(implicit F: PhaseResultTell[F]): F[A] =
F.writer(Vector(Eval.later(PhaseResult.tree(label, a))), a)
}
}
object phaseM {
def apply[F[_]] = new PartiallyApplied[F]
final class PartiallyApplied[F[_]] {
def apply[A: RenderTree](label: String, fa: F[A])(implicit F0: PhaseResultTell[F], F1: Monad[F]): F[A] =
F1.bind(fa)(phase[F](label, _))
}
}
}
| djspiewak/quasar | common/src/main/scala/quasar/common/package.scala | Scala | apache-2.0 | 2,192 |
package shredzzz.kirkwood
import com.simple.simplespec.Matchers
import shredzzz.kirkwood.traits.{TypeSpecificTester, ExceptionTester}
import shredzzz.kirkwood.cumath.CuValue
import shredzzz.kirkwood.cumath.tensor.{CuMatrix, CuVector}
import breeze.linalg.{DenseMatrix, DenseVector}
import breeze.storage.DefaultArrayValue
package object junit
{
trait EquivalentCuTester[V] extends TypeSpecificTester[V] with Matchers
{
def matchValue(res: CuValue[V], expected: V) {
res.load() must be(expected)
}
def matchVector(res: CuVector[V], expected: Array[V]) {
res.fetch() must matchers.be(expected)
res.fetchBreeze() must matchers.be(DenseVector(expected))
}
def matchMatrix(res: CuMatrix[V], expected: Array[V])(implicit d: DefaultArrayValue[V]) {
res.fetch() must matchers.be(expected)
res.fetchBreeze() must matchers.be(DenseMatrix.create(res.rows, res.cols, expected))
}
}
abstract class ApproximateCuTester[V]()(implicit num: Numeric[V]) extends TypeSpecificTester[V] with Matchers
{
def eps: V
def matchValue(res: CuValue[V], expected: V) {
res.load() must approximately(expected, eps)
}
def matchVector(res: CuVector[V], expected: Array[V]) {
res.fetch() must matchers.approximately(expected, eps)
res.fetchBreeze() must matchers.approximately(DenseVector(expected), eps)
}
def matchMatrix(res: CuMatrix[V], expected: Array[V])(implicit d: DefaultArrayValue[V]) {
res.fetch() must matchers.approximately(expected, eps)
res.fetchBreeze() must matchers.approximately(DenseMatrix.create(res.rows, res.cols, expected), eps)
}
}
object ExceptTester extends ExceptionTester with Matchers
{
def matchExceptionThrowing(f: => Any) {
evaluating{f} must throwA[UnsupportedOperationException]
}
}
object BooleanTester extends EquivalentCuTester[Boolean]
object IntTester extends EquivalentCuTester[Int]
object FloatTester extends ApproximateCuTester[Float]
{
def eps = 0.0001f
}
object DoubleTester extends ApproximateCuTester[Double]
{
def eps = 0.0001
}
implicit val exceptionTester = ExceptTester
implicit val intTester = IntTester
implicit val floatTester = FloatTester
implicit val booleanTester = BooleanTester
implicit val doubleTester = DoubleTester
}
| shredzzz/kirkwood | src/test/scala/shredzzz/kirkwood/junit/package.scala | Scala | apache-2.0 | 2,340 |
/*
* Odessa State environmental University
* Copyright (C) 2014
*/
package ua.edu.odeku.ceem.mapRadar.tools.geoName.layer
import gov.nasa.worldwind.geom.Position
import gov.nasa.worldwind.render.{DrawContext, UserFacingText}
import ua.edu.odeku.ceem.mapRadar.tools.geoName.models.GeoName
/**
* User: Aleo Bakalov
* Date: 10.07.2014
* Time: 13:35
*/
class GeoNamesChunk(val geoNames: GeoNames) {
val geoNameList = ua.edu.odeku.ceem.mapRadar.tools.geoName.models.GeoNames.list(null, geoNames.country, geoNames.geoClass, geoNames.geoCode)
def iterable(dc: DrawContext) = for (geoName <- geoNameList) yield {
val pos = GeoNamesChunk.mkPosition(geoName)
val text = new UserFacingText(GeoNamesChunk.mkText(geoName), pos)
text.setFont(this.geoNames.font)
text.setColor(this.geoNames.color)
text.setBackgroundColor(this.geoNames.backgroundColor)
text.setVisible(GeoNameLayer.isNameVisible(dc, this.geoNames, pos))
text.setPriority(this.geoNames.maxDisplayDistance)
text
}
}
object GeoNamesChunk {
def mkText(geo: GeoName) = if (geo.translateName.isDefined) geo.name + '(' + geo.translateName + ')' else geo.name
def mkPosition(geo: GeoName) = Position.fromDegrees(geo.lat, geo.lon, 0)
}
| aleo72/ww-ceem-radar | src/main/scala/ua/edu/odeku/ceem/mapRadar/tools/geoName/layer/GeoNamesChunk.scala | Scala | apache-2.0 | 1,237 |
package redis.protocol
import akka.util.ByteString
import java.nio.charset.Charset
import scala.collection.mutable
import scala.Array
import scala.compat.Platform._
object RedisProtocolRequest {
val UTF8_CHARSET = Charset.forName("UTF-8")
val LS_STRING = "\\r\\n"
val LS = LS_STRING.getBytes(UTF8_CHARSET)
def multiBulk(command: String, args: Seq[ByteString]): ByteString = {
val argsSizeString = (args.size + 1).toString
var length: Int = 1 + argsSizeString.length + LS.length
val cmdLenghtString = command.length.toString
length += 1 + cmdLenghtString.length + LS.length + command.length + LS.length
args.foreach(arg => {
val argLengthString = arg.length.toString
length += 1 + argLengthString.length + LS.length + arg.length + LS.length
})
val bytes: Array[Byte] = new Array(length)
var i: Int = 0
bytes.update(i, '*')
i += 1
arraycopy(argsSizeString.getBytes(UTF8_CHARSET), 0, bytes, i, argsSizeString.length)
i += argsSizeString.length
arraycopy(LS, 0, bytes, i, LS.length)
i += LS.length
bytes.update(i, '$')
i += 1
arraycopy(cmdLenghtString.getBytes(UTF8_CHARSET), 0, bytes, i, cmdLenghtString.length)
i += cmdLenghtString.length
arraycopy(LS, 0, bytes, i, LS.length)
i += LS.length
arraycopy(command.getBytes(UTF8_CHARSET), 0, bytes, i, command.length)
i += command.length
arraycopy(LS, 0, bytes, i, LS.length)
i += LS.length
args.foreach(arg => {
bytes.update(i, '$')
i += 1
val argLengthString = arg.length.toString
arraycopy(argLengthString.getBytes(UTF8_CHARSET), 0, bytes, i, argLengthString.length)
i += argLengthString.length
arraycopy(LS, 0, bytes, i, LS.length)
i += LS.length
val argArray = arg.toArray
arraycopy(argArray, 0, bytes, i, argArray.length)
i += argArray.length
arraycopy(LS, 0, bytes, i, LS.length)
i += LS.length
})
ByteString(bytes)
}
def inline(command: String): ByteString = ByteString(command + LS_STRING)
}
| owynrichen/rediscala | src/main/scala/redis/protocol/RedisProtocolRequest.scala | Scala | apache-2.0 | 2,059 |
import sbt._
import sbt.Keys._
object Testing {
import Configs._
lazy val testAll = TaskKey[Unit]("test-all")
private lazy val itSettings =
inConfig(IntegrationTest)(Defaults.testSettings) ++
Seq(
fork in IntegrationTest := false,
parallelExecution in IntegrationTest := false,
scalaSource in IntegrationTest := baseDirectory.value / "src/it/scala")
private lazy val e2eSettings =
inConfig(EndToEndTest)(Defaults.testSettings) ++
Seq(
fork in EndToEndTest := false,
parallelExecution in EndToEndTest := false,
scalaSource in EndToEndTest := baseDirectory.value / "src/e2e/scala")
lazy val settings = itSettings ++ e2eSettings ++ Seq(
testAll <<= (test in EndToEndTest).dependsOn((test in IntegrationTest).dependsOn(test in Test))
)
}
| dodie/time-admin | project/Testing.scala | Scala | apache-2.0 | 807 |
package scalariform.formatter
import scalariform.lexer._
import scalariform.formatter.preferences._
import scala.annotation.tailrec
trait CommentFormatter { self: HasFormattingPreferences with ScalaFormatter ⇒
private def getLines(comment: String, afterStarSpaces: Int): (String, List[String]) = {
val prefix = List("/** ", "/**", "/* ", "/*").find(comment.startsWith).get
val (start, rest) = comment.splitAt(prefix.length)
val (contents, _) = rest.splitAt(rest.length - "*/".length)
val firstLine :: otherLines = contents.split("""\\r?\\n([ \\t]*(\\*(?!/))?)?""", Integer.MAX_VALUE).toList
val initialSpaces = firstLine takeWhile (_.isWhitespace)
val adjustedLines = dropInitialSpaces(firstLine, initialSpaces.size) :: (otherLines map { dropInitialSpaces(_, afterStarSpaces) })
// val adjustedLines map { line ⇒ if (line startsWith "*/") "*" + line else line }
(start, adjustedLines)
}
@tailrec
private def dropInitialSpaces(s: String, maxSpacesToDrop: Int): String =
if (maxSpacesToDrop > 0 && s.startsWith(" "))
dropInitialSpaces(s drop 1, maxSpacesToDrop - 1)
else
s
private def removeTrailingWhitespace(s: String) = s.reverse.dropWhile(_.isWhitespace).reverse
private def pruneEmptyInitial(lines: List[String]) = lines match {
case first :: rest if first.trim == "" ⇒ rest
case _ ⇒ lines
}
private def pruneEmptyFinal(lines: List[String]) = pruneEmptyInitial(lines.reverse).reverse
def formatScaladocComment(comment: HiddenToken, indentLevel: Int): String =
if (comment.rawText contains '\\n') {
val alignBeneathSecondAsterisk = formattingPreferences(PlaceScaladocAsterisksBeneathSecondAsterisk)
val startOnFirstLine = formattingPreferences(MultilineScaladocCommentsStartOnFirstLine)
// Comments with right-justified asterisks always get one space. Left-justified asterisks get
// two spaces only if they also start on the first line.
val afterStarSpaces = if (alignBeneathSecondAsterisk || !startOnFirstLine) 1 else 2
val (start, rawLines) = getLines(comment.rawText, afterStarSpaces)
val lines = pruneEmptyFinal(pruneEmptyInitial(rawLines))
val beforeStarSpacesString = if (alignBeneathSecondAsterisk) " " else " "
val afterStarSpacesString = " " * afterStarSpaces
val sb = new StringBuilder(start.trim)
var firstLine = true
for (line ← lines) {
val trimmedLine = removeTrailingWhitespace(line)
if (firstLine && startOnFirstLine) {
if (trimmedLine.nonEmpty)
sb.append(" ").append(trimmedLine)
} else {
sb.append(newlineSequence).indent(indentLevel).append(beforeStarSpacesString).append("*")
if (trimmedLine.nonEmpty)
sb.append(afterStarSpacesString).append(trimmedLine)
}
firstLine = false
}
sb.append(newlineSequence).indent(indentLevel).append(beforeStarSpacesString).append("*/")
sb.toString
} else
comment.rawText
/** Formats a non-Scaladoc comment by trimming trailing whitespace from each line. */
def formatNonScaladocComment(comment: HiddenToken, indentLevel: Int): String = {
comment.rawText.replaceAll("""\\s+(\\r?\\n)""", "$1")
}
}
| jkinkead/scalariform | scalariform/src/main/scala/scalariform/formatter/CommentFormatter.scala | Scala | mit | 3,288 |
package com.seanshubin.contract.test
import java.io.InputStream
import java.net.URL
import java.util
import com.seanshubin.contract.domain.ClassLoaderContract
trait ClassLoaderNotImplemented extends ClassLoaderContract {
override def loadClass(name: String): Class[_] = ???
override def getResource(name: String): URL = ???
override def getResources(name: String): util.Enumeration[URL] = ???
override def getSystemResource(name: String): URL = ???
override def getSystemResources(name: String): util.Enumeration[URL] = ???
override def getResourceAsStream(name: String): InputStream = ???
override def getSystemResourceAsStream(name: String): InputStream = ???
override def getParent: ClassLoader = ???
override def getSystemClassLoader: ClassLoader = ???
override def setDefaultAssertionStatus(enabled: Boolean): Unit = ???
override def setPackageAssertionStatus(packageName: String, enabled: Boolean): Unit = ???
override def setClassAssertionStatus(className: String, enabled: Boolean): Unit = ???
override def clearAssertionStatus(): Unit = ???
}
| SeanShubin/contract | test/src/main/scala/com/seanshubin/contract/test/ClassLoaderNotImplemented.scala | Scala | unlicense | 1,093 |
package lila.lobby
import org.joda.time.DateTime
import reactivemongo.bson.{ BSONDocument, BSONInteger, BSONRegex, BSONArray, BSONBoolean }
import reactivemongo.core.commands._
import scala.concurrent.duration._
import actorApi.LobbyUser
import lila.db.BSON.BSONJodaDateTimeHandler
import lila.db.Types.Coll
import lila.memo.AsyncCache
import lila.user.{ User, UserRepo }
final class SeekApi(
coll: Coll,
archiveColl: Coll,
blocking: String => Fu[Set[String]],
maxPerPage: Int,
maxPerUser: Int) {
private sealed trait CacheKey
private object ForAnon extends CacheKey
private object ForUser extends CacheKey
private def allCursor =
coll.find(BSONDocument())
.sort(BSONDocument("createdAt" -> -1))
.cursor[Seek]()
private val cache = AsyncCache[CacheKey, List[Seek]](
f = {
case ForAnon => allCursor.collect[List](maxPerPage)
case ForUser => allCursor.collect[List]()
},
timeToLive = 3.seconds)
def forAnon = cache(ForAnon)
def forUser(user: User): Fu[List[Seek]] =
blocking(user.id) flatMap { blocking =>
forUser(LobbyUser.make(user, blocking))
}
def forUser(user: LobbyUser): Fu[List[Seek]] = cache(ForUser) map { seeks =>
val filtered = seeks.filter { seek =>
seek.user.id == user.id || Biter.canJoin(seek, user)
}
noDupsFor(user, filtered) take maxPerPage
}
private def noDupsFor(user: LobbyUser, seeks: List[Seek]) =
seeks.foldLeft(List[Seek]() -> Set[String]()) {
case ((res, h), seek) if seek.user.id == user.id => (seek :: res, h)
case ((res, h), seek) =>
val seekH = List(seek.variant, seek.daysPerTurn, seek.mode, seek.color, seek.user.id) mkString ","
if (h contains seekH) (res, h)
else (seek :: res, h + seekH)
}._1.reverse
def find(id: String): Fu[Option[Seek]] =
coll.find(BSONDocument("_id" -> id)).one[Seek]
def insert(seek: Seek) = coll.insert(seek) >> findByUser(seek.user.id).flatMap {
case seeks if seeks.size <= maxPerUser => funit
case seeks =>
seeks.drop(maxPerUser).map(remove).sequenceFu
} >> cache.clear
def findByUser(userId: String): Fu[List[Seek]] =
coll.find(BSONDocument("user.id" -> userId))
.sort(BSONDocument("createdAt" -> -1))
.cursor[Seek]().collect[List]()
def remove(seek: Seek) =
coll.remove(BSONDocument("_id" -> seek.id)).void >> cache.clear
def archive(seek: Seek, gameId: String) = {
val archiveDoc = Seek.seekBSONHandler.write(seek) ++ BSONDocument(
"gameId" -> gameId,
"archivedAt" -> DateTime.now)
coll.remove(BSONDocument("_id" -> seek.id)).void >>
cache.clear >>
archiveColl.insert(archiveDoc)
}
def findArchived(gameId: String): Fu[Option[Seek]] =
archiveColl.find(BSONDocument("gameId" -> gameId)).one[Seek]
def removeBy(seekId: String, userId: String) =
coll.remove(BSONDocument(
"_id" -> seekId,
"user.id" -> userId
)).void >> cache.clear
}
| JimmyMow/lila | modules/lobby/src/main/SeekApi.scala | Scala | mit | 2,978 |
// There's not a real test here, but on compilation the
// switch should have the cases arranged in order from 1-30.
class A {
def f(x: Int) = x match {
case 6 => "6"
case 18 => "18"
case 7 => "7"
case 2 => "2"
case 13 => "13"
case 11 => "11"
case 26 => "26"
case 27 => "27"
case 29 => "29"
case 25 => "25"
case 9 => "9"
case 17 => "17"
case 16 => "16"
case 1 => "1"
case 30 => "30"
case 15 => "15"
case 22 => "22"
case 19 => "19"
case 23 => "23"
case 8 => "8"
case 28 => "28"
case 5 => "5"
case 12 => "12"
case 10 => "10"
case 21 => "21"
case 24 => "24"
case 4 => "4"
case 14 => "14"
case 3 => "3"
case 20 => "20"
}
}
| som-snytt/dotty | tests/pos/lookupswitch.scala | Scala | apache-2.0 | 752 |
/*
* Copyright 2016 The BigDL Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.bigdl.integration.torch
import com.intel.analytics.bigdl.nn.Threshold
import com.intel.analytics.bigdl.tensor.Tensor
import com.intel.analytics.bigdl.utils.Engine
import scala.math._
@com.intel.analytics.bigdl.tags.Serial
class ThresholdSpec extends TorchSpec {
"A Threshold Module " should "generate correct output and grad" in {
torchCheck()
val module = new Threshold[Double](1, 0.8)
val input = Tensor[Double](2, 2, 2)
input(Array(1, 1, 1)) = -0.89699813351035
input(Array(1, 1, 2)) = 1.8529373928905
input(Array(1, 2, 1)) = 1.8799053365365
input(Array(1, 2, 2)) = 0.076761466450989
input(Array(2, 1, 1)) = 1.8863626234233
input(Array(2, 1, 2)) = 0.73405137099326
input(Array(2, 2, 1)) = 1.3404842875898
input(Array(2, 2, 2)) = -0.64910735283047
val gradOutput = Tensor[Double](2, 2, 2)
gradOutput(Array(1, 1, 1)) = 0.31924905977212
gradOutput(Array(1, 1, 2)) = 0.22160539613105
gradOutput(Array(1, 2, 1)) = 0.19705923949368
gradOutput(Array(1, 2, 2)) = 0.386440459406
gradOutput(Array(2, 1, 1)) = 0.12920403806493
gradOutput(Array(2, 1, 2)) = 0.7669838971924
gradOutput(Array(2, 2, 1)) = 0.10939974407665
gradOutput(Array(2, 2, 2)) = 0.70845287665725
val start = System.nanoTime()
val output = module.forward(input)
val gradInput = module.backward(input, gradOutput)
val end = System.nanoTime()
val scalaTime = end - start
val code = "output = module:forward(input)\\n" +
"gradInput = module:backward(input,gradOutput)"
val (luaTime, torchResult) = TH.run(code, Map("module" -> module, "input" -> input,
"gradOutput" -> gradOutput), Array("output", "gradInput"))
val luaOutput1 = torchResult("output").asInstanceOf[Tensor[Double]]
val luaOutput2 = torchResult("gradInput").asInstanceOf[Tensor[Double]]
luaOutput1.map(output, (v1, v2) => {
assert(abs(v1 - v2) < 1e-6);
v1
})
luaOutput2.map(gradInput, (v1, v2) => {
assert(abs(v1 - v2) < 1e-6);
v1
})
println("Test case : Threshold, Torch : " + luaTime + " s, Scala : " + scalaTime / 1e9 + " s")
}
}
| zhangxiaoli73/BigDL | spark/dl/src/test/scala/com/intel/analytics/bigdl/integration/torch/ThresholdSpec.scala | Scala | apache-2.0 | 2,769 |
/*
* The MIT License (MIT)
* <p>
* Copyright (c) 2017-2019
* <p>
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
* <p>
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
* <p>
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package io.techcode.streamy.json.component
import akka.NotUsed
import akka.util.ByteString
import com.typesafe.config.ConfigFactory
import io.techcode.streamy.component.TestTransformer
import io.techcode.streamy.event.StreamEvent
import io.techcode.streamy.json.component.JsonTransformer.{Bind, Config, Mode}
import io.techcode.streamy.util.json.{Json, _}
import pureconfig.ConfigSource
import pureconfig.error.ConfigReaderException
/**
* Json transformer spec.
*/
class JsonTransformerSpec extends TestTransformer {
"Json transformer config" should {
"be build with mode deserialize" in {
ConfigSource.fromConfig(ConfigFactory.parseString("""{"source":"/", "mode":"deserialize"}"""))
.loadOrThrow[JsonTransformer.Config]
}
"be build with mode serialize" in {
ConfigSource.fromConfig(ConfigFactory.parseString("""{"source":"/", "mode":"serialize"}"""))
.loadOrThrow[JsonTransformer.Config]
}
"raise an error with invalid mode" in {
intercept[ConfigReaderException[_]] {
ConfigSource.fromConfig(ConfigFactory.parseString("""{"source":"/", "mode":"invalid"}"""))
.loadOrThrow[JsonTransformer.Config]
}.getMessage() should include("Mode must be either 'serialize' or 'deserialize'")
}
"be build with bind bytes" in {
ConfigSource.fromConfig(ConfigFactory.parseString("""{"source":"/", "bind":"bytes"}"""))
.loadOrThrow[JsonTransformer.Config]
}
"be build with bind string" in {
ConfigSource.fromConfig(ConfigFactory.parseString("""{"source":"/", "bind":"string"}"""))
.loadOrThrow[JsonTransformer.Config]
}
"raise an error with invalid bind" in {
intercept[ConfigReaderException[_]] {
ConfigSource.fromConfig(ConfigFactory.parseString("""{"source":"/", "bind":"invalid"}"""))
.loadOrThrow[JsonTransformer.Config]
}.getMessage() should include("Bind must be either 'bytes' or 'string'")
}
}
"Json transformer" should {
"deserialize correctly a packet inplace from string" in {
except(
JsonTransformerSpec.Transformer.DeserializeSource,
JsonTransformerSpec.Input.DeserializeInplace,
JsonTransformerSpec.Output.DeserializeInplace
)
}
"deserialize correctly a packet with a root target from string" in {
except(
JsonTransformerSpec.Transformer.DeserializeSourceToRoot,
JsonTransformerSpec.Input.DeserializeSourceToRoot,
JsonTransformerSpec.Output.DeserializeSourceToRoot
)
}
"deserialize correctly a packet with a root target equal to an existing field from string" in {
except(
JsonTransformerSpec.Transformer.DeserializeSourceToExistingRoot,
JsonTransformerSpec.Input.DeserializeSourceToExistingRoot,
JsonTransformerSpec.Output.DeserializeSourceToExistingRoot
)
}
"fast skip correctly a packet with an empty source field from string" in {
except(
JsonTransformerSpec.Transformer.DeserializeSource,
JsonTransformerSpec.Input.SkipEmptyStringSource,
JsonTransformerSpec.Input.SkipEmptyStringSource
)
}
"fast skip correctly a packet with a wrong source field from string" in {
except(
JsonTransformerSpec.Transformer.DeserializeSource,
JsonTransformerSpec.Input.SkipStringSource,
JsonTransformerSpec.Input.SkipStringSource
)
}
"skip correctly a packet with a wrong source field from string" in {
except(
JsonTransformerSpec.Transformer.DeserializeSource,
JsonTransformerSpec.Input.SkipWrongJsonSource,
JsonTransformerSpec.Input.SkipWrongJsonSource
)
}
"deserialize correctly a packet inplace from bytestring" in {
except(
JsonTransformerSpec.Transformer.DeserializeSourceBytes,
JsonTransformerSpec.Input.DeserializeInplaceByteString,
JsonTransformerSpec.Output.DeserializeInplace
)
}
"deserialize correctly a packet with a root target from bytestring" in {
except(
JsonTransformerSpec.Transformer.DeserializeSourceToRootBytes,
JsonTransformerSpec.Input.DeserializeSourceToRootByteString,
JsonTransformerSpec.Output.DeserializeSourceToRootByteString
)
}
"deserialize correctly a packet with a root target equal to an existing field from bytestring" in {
except(
JsonTransformerSpec.Transformer.DeserializeSourceToExistingRootBytes,
JsonTransformerSpec.Input.DeserializeSourceToExistingRootByteString,
JsonTransformerSpec.Output.DeserializeSourceToExistingRoot
)
}
"fast skip correctly a packet with an empty source field from bytestring" in {
except(
JsonTransformerSpec.Transformer.DeserializeSourceBytes,
JsonTransformerSpec.Input.SkipEmptyByteStringSource,
JsonTransformerSpec.Input.SkipEmptyByteStringSource
)
}
"fast skip correctly a packet with a wrong source field from bytestring" in {
except(
JsonTransformerSpec.Transformer.DeserializeSourceBytes,
JsonTransformerSpec.Input.SkipByteStringSource,
JsonTransformerSpec.Input.SkipByteStringSource
)
}
"skip correctly a packet with a wrong source field from bytestring" in {
except(
JsonTransformerSpec.Transformer.DeserializeSourceBytes,
JsonTransformerSpec.Input.SkipWrongJsonByteStringSource,
JsonTransformerSpec.Input.SkipWrongJsonByteStringSource
)
}
"serialize correctly a packet inplace" in {
except(
JsonTransformerSpec.Transformer.SerializeSource,
JsonTransformerSpec.Input.SerializeInplace,
JsonTransformerSpec.Output.SerializeInplace
)
}
"serialize correctly a packet inplace with bytes input" in {
except(
JsonTransformerSpec.Transformer.SerializeSourceBytes,
JsonTransformerSpec.Input.SerializeInplace,
JsonTransformerSpec.Output.SerializeInplaceBytes
)
}
"serialize correctly a packet with a root source" in {
except(
JsonTransformerSpec.Transformer.SerializeRootToTarget,
JsonTransformerSpec.Input.SerializeRootToTarget,
JsonTransformerSpec.Output.SerializeRootToTarget
)
}
"serialize correctly a packet with a root target equal to an existing field" in {
except(
JsonTransformerSpec.Transformer.SerializeRootToExistingTarget,
JsonTransformerSpec.Input.SerializeRootToExistingTarget,
JsonTransformerSpec.Output.SerializeRootToExistingTarget
)
}
"handle correctly unexpected input when expected string" in {
except(
JsonTransformerSpec.Transformer.WrongTypeString,
JsonTransformerSpec.Input.WrongType,
JsonTransformerSpec.Input.WrongType
)
}
"handle correctly unexpected input when expected bytes" in {
except(
JsonTransformerSpec.Transformer.WrongTypeBytes,
JsonTransformerSpec.Input.WrongType,
JsonTransformerSpec.Input.WrongType
)
}
}
}
object JsonTransformerSpec {
object Input {
val SkipEmptyStringSource: StreamEvent = StreamEvent(Json.obj("message" -> ""))
val SkipStringSource: StreamEvent = StreamEvent(Json.obj("message" -> "foobar"))
val SkipWrongJsonSource: StreamEvent = StreamEvent(Json.obj("message" -> "{foobar}"))
val SkipEmptyByteStringSource: StreamEvent = StreamEvent(Json.obj("message" -> ByteString()))
val SkipByteStringSource: StreamEvent = StreamEvent(Json.obj("message" -> ByteString("foobar")))
val SkipWrongJsonByteStringSource: StreamEvent = StreamEvent(Json.obj("message" -> ByteString("{foobar}")))
val DeserializeInplace: StreamEvent = StreamEvent(Json.obj("message" -> """{"message":"foobar"}"""))
val DeserializeSourceToRoot: StreamEvent = StreamEvent(Json.obj("message" -> """{"test":"foobar"}"""))
val DeserializeSourceToExistingRoot: StreamEvent = StreamEvent(Json.obj("message" -> """{"message":"foobar"}"""))
val DeserializeInplaceByteString: StreamEvent = StreamEvent(Json.obj("message" -> ByteString("""{"message":"foobar"}""")))
val DeserializeSourceToRootByteString: StreamEvent = StreamEvent(Json.obj("message" -> ByteString("""{"test":"foobar"}""")))
val DeserializeSourceToExistingRootByteString: StreamEvent = StreamEvent(Json.obj("message" -> ByteString("""{"message":"foobar"}""")))
val SerializeInplace: StreamEvent = StreamEvent(Json.obj("message" -> Json.obj("message" -> "foobar")))
val SerializeRootToTarget: StreamEvent = StreamEvent(Json.obj("test" -> "foobar"))
val SerializeRootToExistingTarget: StreamEvent = StreamEvent(Json.obj("test" -> "foobar"))
val WrongType: StreamEvent = StreamEvent(Json.obj("test" -> 10))
}
object Transformer {
val DeserializeSource = JsonTransformer[NotUsed](Config(Root / "message", mode = Mode.Deserialize))
val DeserializeSourceBytes = JsonTransformer[NotUsed](Config(Root / "message", mode = Mode.Deserialize, bind = Bind.Bytes))
val DeserializeSourceToRoot = JsonTransformer[NotUsed](Config(Root / "message", Some(Root), mode = Mode.Deserialize))
val DeserializeSourceToRootBytes = JsonTransformer[NotUsed](Config(Root / "message", Some(Root), mode = Mode.Deserialize, bind = Bind.Bytes))
val DeserializeSourceToExistingRoot = JsonTransformer[NotUsed](Config(Root / "message", Some(Root), mode = Mode.Deserialize))
val DeserializeSourceToExistingRootBytes = JsonTransformer[NotUsed](Config(Root / "message", Some(Root), mode = Mode.Deserialize, bind = Bind.Bytes))
val SerializeSource = JsonTransformer[NotUsed](Config(Root / "message", mode = Mode.Serialize))
val SerializeSourceBytes = JsonTransformer[NotUsed](Config(Root / "message", mode = Mode.Serialize, bind = Bind.Bytes))
val SerializeRootToTarget = JsonTransformer[NotUsed](Config(Root, Some(Root / "message"), mode = Mode.Serialize))
val SerializeRootToExistingTarget = JsonTransformer[NotUsed](Config(Root, Some(Root / "test"), mode = Mode.Serialize))
val WrongTypeString = JsonTransformer[NotUsed](Config(Root, Some(Root / "test"), mode = Mode.Deserialize))
val WrongTypeBytes = JsonTransformer[NotUsed](Config(Root, Some(Root / "test"), mode = Mode.Deserialize, bind = Bind.Bytes))
}
object Output {
val DeserializeInplace: StreamEvent = StreamEvent(Json.obj("message" -> Json.obj("message" -> "foobar")))
val DeserializeSourceToRoot: StreamEvent = StreamEvent(Json.obj(
"message" -> """{"test":"foobar"}""",
"test" -> "foobar"
))
val DeserializeSourceToRootByteString: StreamEvent = StreamEvent(Json.obj(
"message" -> ByteString("""{"test":"foobar"}"""),
"test" -> "foobar"
))
val DeserializeSourceToExistingRoot: StreamEvent = StreamEvent(Json.obj("message" -> "foobar"))
val SerializeInplace: StreamEvent = StreamEvent(Json.obj("message" -> """{"message":"foobar"}"""))
val SerializeInplaceBytes: StreamEvent = StreamEvent(Json.obj("message" -> ByteString("""{"message":"foobar"}""")))
val SerializeRootToTarget: StreamEvent = StreamEvent(Json.obj(
"message" -> """{"test":"foobar"}""",
"test" -> "foobar"
))
val SerializeRootToExistingTarget: StreamEvent = StreamEvent(Json.obj("test" -> """{"test":"foobar"}"""))
}
} | amannocci/streamy | plugin-json/src/test/scala/io/techcode/streamy/json/component/JsonTransformerSpec.scala | Scala | mit | 12,534 |
/* Copyright 2009-2016 EPFL, Lausanne */
package leon
package termination
import purescala.Expressions._
import purescala.Common._
import purescala.Definitions._
import scala.concurrent.duration._
import leon.solvers._
trait Processor {
val name: String
val checker : TerminationChecker
implicit val debugSection = utils.DebugSectionTermination
val reporter = checker.context.reporter
def run(problem: Problem): Option[Seq[Result]]
}
trait Solvable extends Processor {
val modules: Strengthener with StructuralSize
private val solver: SolverFactory[Solver] =
SolverFactory.getFromSettings(checker.context, checker.program).withTimeout(1.seconds)
type Solution = (Option[Boolean], Map[Identifier, Expr])
private def withoutPosts[T](block: => T): T = {
val dangerousFunDefs = checker.functions.filter(fd => !checker.terminates(fd).isGuaranteed)
val backups = dangerousFunDefs.toList map { fd =>
val p = fd.postcondition
fd.postcondition = None
() => fd.postcondition = p
}
val res : T = block // force evaluation now
backups.foreach(_())
res
}
def maybeSAT(problem: Expr): Boolean = withoutPosts {
SimpleSolverAPI(solver).solveSAT(problem)._1 getOrElse true
}
def definitiveALL(problem: Expr): Boolean = withoutPosts {
SimpleSolverAPI(solver).solveSAT(Not(problem))._1.exists(!_)
}
def definitiveSATwithModel(problem: Expr): Option[Model] = withoutPosts {
val (sat, model) = SimpleSolverAPI(solver).solveSAT(problem)
if (sat.isDefined && sat.get) Some(model) else None
}
}
| epfl-lara/leon | src/main/scala/leon/termination/Processor.scala | Scala | gpl-3.0 | 1,584 |
/*start*/Set(1, 2, "")/*end*/
//Set[Any] | ilinum/intellij-scala | testdata/typeInference/bugs2/SCL2024C.scala | Scala | apache-2.0 | 40 |
/**
* Licensed to Big Data Genomics (BDG) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The BDG licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.bdgenomics.adam.rdd.variant
import htsjdk.variant.vcf.{ VCFHeader, VCFHeaderLine }
import org.apache.hadoop.fs.Path
import org.apache.parquet.hadoop.metadata.CompressionCodecName
import org.apache.spark.SparkContext
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.{ Dataset, SQLContext }
import org.bdgenomics.adam.converters.DefaultHeaderLines
import org.bdgenomics.adam.models.{
ReferenceRegion,
ReferenceRegionSerializer,
SequenceDictionary,
VariantContext
}
import org.bdgenomics.adam.rdd.ADAMContext._
import org.bdgenomics.adam.rdd.{
AvroGenomicRDD,
VCFHeaderUtils
}
import org.bdgenomics.adam.serialization.AvroSerializer
import org.bdgenomics.adam.sql.{ Variant => VariantProduct }
import org.bdgenomics.formats.avro.{ Sample, Variant }
import org.bdgenomics.utils.interval.array.{
IntervalArray,
IntervalArraySerializer
}
import scala.collection.JavaConversions._
import scala.reflect.ClassTag
import scala.reflect.runtime.universe._
private[adam] case class VariantArray(
array: Array[(ReferenceRegion, Variant)],
maxIntervalWidth: Long) extends IntervalArray[ReferenceRegion, Variant] {
def duplicate(): IntervalArray[ReferenceRegion, Variant] = {
copy()
}
protected def replace(arr: Array[(ReferenceRegion, Variant)],
maxWidth: Long): IntervalArray[ReferenceRegion, Variant] = {
VariantArray(arr, maxWidth)
}
}
private[adam] class VariantArraySerializer extends IntervalArraySerializer[ReferenceRegion, Variant, VariantArray] {
protected val kSerializer = new ReferenceRegionSerializer
protected val tSerializer = new AvroSerializer[Variant]
protected def builder(arr: Array[(ReferenceRegion, Variant)],
maxIntervalWidth: Long): VariantArray = {
VariantArray(arr, maxIntervalWidth)
}
}
object VariantRDD extends Serializable {
/**
* Builds a VariantRDD without a partition map.
*
* @param rdd The underlying Variant RDD.
* @param sequences The sequence dictionary for the RDD.
* @param headerLines The header lines for the RDD.
* @return A new Variant RDD.
*/
def apply(rdd: RDD[Variant],
sequences: SequenceDictionary,
headerLines: Seq[VCFHeaderLine] = DefaultHeaderLines.allHeaderLines): VariantRDD = {
new RDDBoundVariantRDD(rdd, sequences, headerLines, None)
}
/**
* An dataset containing variants called against a given reference genome.
*
* @param ds Variants.
* @param sequences A dictionary describing the reference genome.
* @param headerLines The VCF header lines that cover all INFO/FORMAT fields
* needed to represent this RDD of Variants.
*/
def apply(ds: Dataset[VariantProduct],
sequences: SequenceDictionary,
headerLines: Seq[VCFHeaderLine]): VariantRDD = {
new DatasetBoundVariantRDD(ds, sequences, headerLines)
}
}
case class ParquetUnboundVariantRDD private[rdd] (
@transient private val sc: SparkContext,
private val parquetFilename: String,
sequences: SequenceDictionary,
@transient headerLines: Seq[VCFHeaderLine]) extends VariantRDD {
lazy val rdd: RDD[Variant] = {
sc.loadParquet(parquetFilename)
}
protected lazy val optPartitionMap = sc.extractPartitionMap(parquetFilename)
lazy val dataset = {
val sqlContext = SQLContext.getOrCreate(sc)
import sqlContext.implicits._
sqlContext.read.parquet(parquetFilename).as[VariantProduct]
}
def replaceSequences(
newSequences: SequenceDictionary): VariantRDD = {
copy(sequences = newSequences)
}
def replaceHeaderLines(newHeaderLines: Seq[VCFHeaderLine]): VariantRDD = {
copy(headerLines = newHeaderLines)
}
}
case class DatasetBoundVariantRDD private[rdd] (
dataset: Dataset[VariantProduct],
sequences: SequenceDictionary,
@transient headerLines: Seq[VCFHeaderLine] = DefaultHeaderLines.allHeaderLines) extends VariantRDD {
protected lazy val optPartitionMap = None
lazy val rdd = dataset.rdd.map(_.toAvro)
override def saveAsParquet(filePath: String,
blockSize: Int = 128 * 1024 * 1024,
pageSize: Int = 1 * 1024 * 1024,
compressCodec: CompressionCodecName = CompressionCodecName.GZIP,
disableDictionaryEncoding: Boolean = false) {
log.warn("Saving directly as Parquet from SQL. Options other than compression codec are ignored.")
dataset.toDF()
.write
.format("parquet")
.option("spark.sql.parquet.compression.codec", compressCodec.toString.toLowerCase())
.save(filePath)
saveMetadata(filePath)
}
override def transformDataset(
tFn: Dataset[VariantProduct] => Dataset[VariantProduct]): VariantRDD = {
copy(dataset = tFn(dataset))
}
def replaceSequences(
newSequences: SequenceDictionary): VariantRDD = {
copy(sequences = newSequences)
}
def replaceHeaderLines(newHeaderLines: Seq[VCFHeaderLine]): VariantRDD = {
copy(headerLines = newHeaderLines)
}
}
case class RDDBoundVariantRDD private[rdd] (
rdd: RDD[Variant],
sequences: SequenceDictionary,
@transient headerLines: Seq[VCFHeaderLine] = DefaultHeaderLines.allHeaderLines,
optPartitionMap: Option[Array[Option[(ReferenceRegion, ReferenceRegion)]]] = None) extends VariantRDD {
/**
* A SQL Dataset of reads.
*/
lazy val dataset: Dataset[VariantProduct] = {
val sqlContext = SQLContext.getOrCreate(rdd.context)
import sqlContext.implicits._
sqlContext.createDataset(rdd.map(VariantProduct.fromAvro))
}
def replaceSequences(
newSequences: SequenceDictionary): VariantRDD = {
copy(sequences = newSequences)
}
def replaceHeaderLines(newHeaderLines: Seq[VCFHeaderLine]): VariantRDD = {
copy(headerLines = newHeaderLines)
}
}
sealed abstract class VariantRDD extends AvroGenomicRDD[Variant, VariantProduct, VariantRDD] {
@transient val uTag: TypeTag[VariantProduct] = typeTag[VariantProduct]
val headerLines: Seq[VCFHeaderLine]
/**
* Replaces the header lines attached to this RDD.
*
* @param newHeaderLines The new header lines to attach to this RDD.
* @return A new RDD with the header lines replaced.
*/
def replaceHeaderLines(newHeaderLines: Seq[VCFHeaderLine]): VariantRDD
/**
* Appends new header lines to the existing lines.
*
* @param headerLinesToAdd Zero or more header lines to add.
* @return A new RDD with the new header lines added.
*/
def addHeaderLines(headerLinesToAdd: Seq[VCFHeaderLine]): VariantRDD = {
replaceHeaderLines(headerLines ++ headerLinesToAdd)
}
/**
* Appends a new header line to the existing lines.
*
* @param headerLineToAdd A header line to add.
* @return A new RDD with the new header line added.
*/
def addHeaderLine(headerLineToAdd: VCFHeaderLine): VariantRDD = {
addHeaderLines(Seq(headerLineToAdd))
}
/**
* Save the VCF headers to disk.
*
* @param filePath The filepath to the file where we will save the VCF headers.
*/
def saveVcfHeaders(filePath: String): Unit = {
// write vcf headers to file
VCFHeaderUtils.write(new VCFHeader(headerLines.toSet),
new Path("%s/_header".format(filePath)),
rdd.context.hadoopConfiguration,
false,
false)
}
override protected def saveMetadata(filePath: String): Unit = {
savePartitionMap(filePath)
saveSequences(filePath)
saveVcfHeaders(filePath)
}
protected def buildTree(rdd: RDD[(ReferenceRegion, Variant)])(
implicit tTag: ClassTag[Variant]): IntervalArray[ReferenceRegion, Variant] = {
IntervalArray(rdd, VariantArray.apply(_, _))
}
def union(rdds: VariantRDD*): VariantRDD = {
val iterableRdds = rdds.toSeq
VariantRDD(rdd.context.union(rdd, iterableRdds.map(_.rdd): _*),
iterableRdds.map(_.sequences).fold(sequences)(_ ++ _),
(headerLines ++ iterableRdds.flatMap(_.headerLines)).distinct)
}
/**
* Applies a function that transforms the underlying RDD into a new RDD using
* the Spark SQL API.
*
* @param tFn A function that transforms the underlying RDD as a Dataset.
* @return A new RDD where the RDD of genomic data has been replaced, but the
* metadata (sequence dictionary, and etc) is copied without modification.
*/
def transformDataset(
tFn: Dataset[VariantProduct] => Dataset[VariantProduct]): VariantRDD = {
DatasetBoundVariantRDD(tFn(dataset), sequences, headerLines)
}
/**
* @return Returns this VariantRDD as a VariantContextRDD.
*/
def toVariantContextRDD(): VariantContextRDD = {
VariantContextRDD(rdd.map(VariantContext(_)),
sequences,
Seq.empty[Sample],
headerLines,
optPartitionMap = optPartitionMap)
}
/**
* @param newRdd An RDD to replace the underlying RDD with.
* @return Returns a new VariantRDD with the underlying RDD replaced.
*/
protected def replaceRdd(newRdd: RDD[Variant],
newPartitionMap: Option[Array[Option[(ReferenceRegion, ReferenceRegion)]]] = None): VariantRDD = {
RDDBoundVariantRDD(newRdd, sequences, headerLines, newPartitionMap)
}
/**
* @param elem The variant to get a reference region for.
* @return Returns the singular region this variant covers.
*/
protected def getReferenceRegions(elem: Variant): Seq[ReferenceRegion] = {
Seq(ReferenceRegion(elem))
}
}
| laserson/adam | adam-core/src/main/scala/org/bdgenomics/adam/rdd/variant/VariantRDD.scala | Scala | apache-2.0 | 10,222 |
import sbt._
object Dependencies extends Build {
val sprayVersion = "1.2-RC2"
val akkaVersion = "2.2.3"
val json4sVersion = "3.2.5"
val resolutionRepos = Seq(
"Typesafe repository" at "http://repo.typesafe.com/typesafe/releases/",
"spray repo" at "http://repo.spray.io/",
"Mark Schaake" at "http://markschaake.github.com/snapshots",
"Sourceforge Releases" at "https://oss.sonatype.org/content/repositories/sourceforge-releases"
)
val sprayCan = "io.spray" % "spray-can" % sprayVersion
val sprayRouting = "io.spray" % "spray-routing" % sprayVersion
val sprayTestkit = "io.spray" % "spray-testkit" % sprayVersion
val sprayClient = "io.spray" % "spray-client" % sprayVersion
val slf4j = "org.slf4j" % "slf4j-api" % "1.7.5"
val logback = "ch.qos.logback" % "logback-classic" % "1.0.13"
val akkaActor = "com.typesafe.akka" %% "akka-actor" % akkaVersion
val akkaAgent = "com.typesafe.akka" %% "akka-agent" % akkaVersion
val akkaSlf4j = "com.typesafe.akka" %% "akka-slf4j" % akkaVersion
val akkaTestkit = "com.typesafe.akka" %% "akka-testkit" % akkaVersion
val scalaTest = "org.scalatest" %% "scalatest" % "2.0"
val sprayJson = "io.spray" %% "spray-json" % "1.2.5"
val json4sNative = "org.json4s" %% "json4s-native" % json4sVersion
val json4sJackson = "org.json4s" %% "json4s-jackson" % json4sVersion
val scalaDynamicJson = "com.lucho" %% "scala-dynamic-json"% "0.2"
def compile (deps: ModuleID*): Seq[ModuleID] = deps map (_ % "compile")
def provided (deps: ModuleID*): Seq[ModuleID] = deps map (_ % "provided")
def test (deps: ModuleID*): Seq[ModuleID] = deps map (_ % "test")
def runtime (deps: ModuleID*): Seq[ModuleID] = deps map (_ % "runtime")
def container (deps: ModuleID*): Seq[ModuleID] = deps map (_ % "container")
}
| lukiano/scala-dynamic-json | example/project/Dependencies.scala | Scala | mit | 2,268 |
package org.firesocks
import java.nio.charset.Charset
import java.net.{URI, InetSocketAddress, InetAddress}
import akka.util.{Timeout, ByteIterator}
import scala.concurrent.Await
import scala.concurrent.duration._
import akka.actor.ActorRef
import akka.pattern.ask
import language.reflectiveCalls
import language.postfixOps
package object lang {
val CHARSET_UTF8 = Charset.forName("UTF-8")
def quietly[R](code: => R): Unit = {
try {
code
}
catch {
case e: Exception => // pass
}
}
def using[C <: {def close(): Unit}, R](closable: C)(process: C => R): R = {
try {
process(closable)
}
finally {
closable.close()
}
}
implicit class AsWord[T](value: T) {
def as[R](code: T => R): R = code(value)
}
implicit class ByteStringIteratorEx(itr: ByteIterator) {
def getBytes(n: Int): Array[Byte] = {
val rv = new Array[Byte](n)
itr.getBytes(rv)
rv
}
}
implicit class ItrEx[T](itr: Iterator[T]) {
def next2(): (T, T) = (itr.next(), itr.next())
def next3(): (T, T, T) = (itr.next(), itr.next(), itr.next())
def next4(): (T, T, T, T) = (itr.next(), itr.next(), itr.next(), itr.next())
def next5(): (T, T, T, T, T) =
(itr.next(), itr.next(), itr.next(), itr.next(), itr.next())
def next6(): (T, T, T, T, T, T) =
(itr.next(), itr.next(), itr.next(), itr.next(), itr.next(), itr.next())
def next7(): (T, T, T, T, T, T, T) =
(itr.next(), itr.next(), itr.next(), itr.next(), itr.next(), itr.next(),
itr.next())
def next8(): (T, T, T, T, T, T, T, T) =
(itr.next(), itr.next(), itr.next(), itr.next(), itr.next(), itr.next(),
itr.next(), itr.next())
}
implicit class AskEx(actor: ActorRef)(implicit timeout: FiniteDuration) {
private implicit val ASK_TIMEOUT: Timeout = timeout
def ?! (message: Any): Any = {
val f = actor ? message
Await.result(f, timeout)
}
}
def mkActorName(token: Any*): String = token.map {
case c: Class[_] => c.getSimpleName
case a: ActorRef => a.path.name
case a: InetAddress => a.toString.replace('/', ';')
case a: InetSocketAddress => a.toString.replace('/', ';')
case u: URI => u.toString.replace('/', ';')
case s => s.toString
}.mkString
}
| fa08c/firesocks | modules/core/src/main/scala/org/firesocks/lang/package.scala | Scala | mit | 2,294 |
/*
* Copyright 2022 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package iht.controllers.application.assets.insurancePolicy
import iht.config.AppConfig
import iht.controllers.application.ApplicationControllerTest
import iht.models.application.assets.InsurancePolicy
import iht.testhelpers.{CommonBuilder, ContentChecker, TestHelper}
import iht.utils.DeceasedInfoHelper
import iht.views.html.application.asset.insurancePolicy.insurance_policies_overview
import org.mockito.ArgumentMatchers._
import org.mockito.Mockito._
import play.api.mvc.MessagesControllerComponents
import play.api.test.Helpers._
import uk.gov.hmrc.play.bootstrap.frontend.controller.FrontendController
import scala.concurrent.Future
/**
*
* Created by Yasar Acar on 18/02/16.
*
*/
class InsurancePolicyOverviewControllerTest extends ApplicationControllerTest {
protected abstract class TestController extends FrontendController(mockControllerComponents) with InsurancePolicyOverviewController {
override val cc: MessagesControllerComponents = mockControllerComponents
override implicit val appConfig: AppConfig = mockAppConfig
override val insurancePoliciesOverviewView: insurance_policies_overview = app.injector.instanceOf[insurance_policies_overview]
}
def insurancePolicyOverviewController = new TestController {
override val authConnector = mockAuthConnector
override val cachingConnector = mockCachingConnector
override val ihtConnector = mockIhtConnector
}
val registrationDetails = CommonBuilder.buildRegistrationDetails copy(
deceasedDetails = Some(CommonBuilder.buildDeceasedDetails),
ihtReference = Some("ABC123"))
lazy val deceasedName = DeceasedInfoHelper.getDeceasedNameOrDefaultString(registrationDetails)
val allAssets = CommonBuilder.buildAllAssets copy (insurancePolicy = Some(InsurancePolicy(
isAnnuitiesBought = Some(true),
isInsurancePremiumsPayedForSomeoneElse = Some(true),
value = Some(BigDecimal(7)),
shareValue = Some(BigDecimal(8)),
policyInDeceasedName = Some(true),
isJointlyOwned = Some(true),
isInTrust = Some(false),
coveredByExemption = Some(true),
sevenYearsBefore = Some(true),
moreThanMaxValue = Some(false)
)))
val applicationDetails = CommonBuilder.buildApplicationDetails copy (allAssets = Some(allAssets))
"InsurancePolicyOverviewController" must {
"respond with OK and all questions on page load" in {
when(mockCachingConnector.getRegistrationDetails(any(), any()))
.thenReturn(Future.successful(Some(registrationDetails)))
when(mockIhtConnector.getApplication(any(), any(), any())(any()))
.thenReturn(Future.successful(Some(applicationDetails)))
val result = insurancePolicyOverviewController.onPageLoad(createFakeRequest())
status(result) mustBe OK
ContentChecker.stripLineBreaks(contentAsString(result)) must include(messagesApi("iht.estateReport.insurancePolicies.jointlyHeld.question", deceasedName))
ContentChecker.stripLineBreaks(contentAsString(result)) must include(messagesApi("iht.estateReport.assets.insurancePolicies.totalValueOfDeceasedsShare", deceasedName))
ContentChecker.stripLineBreaks(contentAsString(result)) must include(messagesApi("iht.estateReport.insurancePolicies.ownName.question", deceasedName))
ContentChecker.stripLineBreaks(contentAsString(result)) must include(messagesApi("iht.estateReport.assets.insurancePolicies.totalValueOwnedAndPayingOut", deceasedName))
ContentChecker.stripLineBreaks(contentAsString(result)) must include(messagesApi("iht.estateReport.insurancePolicies.premiumsNotPayingOut.question", deceasedName))
ContentChecker.stripLineBreaks(contentAsString(result)) must include(messagesApi("iht.estateReport.insurancePolicies.overLimitNotOwnEstate.question", deceasedName))
ContentChecker.stripLineBreaks(contentAsString(result)) must include(messagesApi("iht.estateReport.assets.insurancePolicies.buyAnnuity.question", deceasedName))
ContentChecker.stripLineBreaks(contentAsString(result)) must include(messagesApi("page.iht.application.assets.insurance.policies.overview.other.question4", deceasedName))
}
"respond with OK and correct question1 text on page load if deceased not married" in {
val deceasedDetailsTemp = CommonBuilder.buildDeceasedDetails copy (maritalStatus=Some(TestHelper.MaritalStatusSingle))
when(mockCachingConnector.getRegistrationDetails(any(), any()))
.thenReturn(Future.successful(Some(registrationDetails copy (deceasedDetails = Some(deceasedDetailsTemp)))))
when(mockIhtConnector.getApplication(any(), any(), any())(any()))
.thenReturn(Future.successful(Some(applicationDetails)))
val result = insurancePolicyOverviewController.onPageLoad(createFakeRequest())
status(result) mustBe OK
ContentChecker.stripLineBreaks(contentAsString(result)) must include(messagesApi("iht.estateReport.insurancePolicies.premiumsNotPayingOut.question", deceasedName))
}
behave like controllerOnPageLoadWithNoExistingRegistrationDetails(mockCachingConnector,
insurancePolicyOverviewController.onPageLoad(createFakeRequest()))
}
}
| hmrc/iht-frontend | test/iht/controllers/application/assets/insurancePolicy/InsurancePolicyOverviewControllerTest.scala | Scala | apache-2.0 | 5,722 |
package picasso.frontend
import picasso.utils._
import picasso.utils.report._
import picasso.utils.tools.armc._
import picasso.utils.tools.flata._
import picasso.utils.tools.nts._
import picasso.model.dbp._
import picasso.model.integer._
import picasso.analysis._
class Termination[P <: picasso.model.dbp.DBCT](
fileName: String,
content: String,
parse: String => Option[(DepthBoundedProcess[P], DepthBoundedConf[P], Option[DepthBoundedConf[P]])]
) extends AnalysisCommon[P]("Termination", fileName, content, parse)
{
protected def dump(file: String, prog: Program) {
Logger("Termination", LogInfo, "saving integer program in " + file)
if (Config.flata) {
IO.writeInFile(file, NTSPrinter(_, prog))
} else {
IO.writeInFile(file, prog.printForARMC(_))
}
}
protected def analyse(prog: Program) = {
if (Config.flata) {
Stats("proving termination with Flata", Flata.withOutput(prog))
} else {
Stats("proving termination with ARMC", ARMC.withOutput(prog))
}
}
protected def analysis(_process: DepthBoundedProcess[P], init: DepthBoundedConf[P], target: Option[DepthBoundedConf[P]]): Unit = {
assert(target.isEmpty, "Termination analysis does not expect a target state")
val process = new DepthBoundedProcess( _process) with KarpMillerTree with DBPTermination[P]
val (cover, tree) = Stats("cover computation", process.computeTree(init))
val coverReport = new List("Cover")
for ((c, i) <- cover.zipWithIndex) {
coverReport.add( new GenericItem(
"cover element " + i,
c.toGraphviz("cover"),
Misc.graphvizToSvgFdp(c.toGraphviz("cover"))
))
}
report.add(coverReport)
val intProgram = Stats("extracting numerical abstraction", process.termination(cover))
Stats.comment("numerical abstraction has " +
intProgram.pcs.size + " locations, " +
intProgram.variables.size + " variables, " +
intProgram.transitions.size + " transitions.")
report.add(new PreformattedText("Numerical Abstraction", intProgram.printForARMC))
if (Config.dumpIntProg == "") {
val (_, out, _) = analyse(intProgram)
report.add(new PreformattedText("Termination analysis", out))
} else {
dump(Config.dumpIntProg, intProgram)
}
}
}
| dzufferey/picasso | core/src/main/scala/picasso/frontend/Termination.scala | Scala | bsd-2-clause | 2,338 |
package io.buoyant.linkerd
package protocol
import com.twitter.concurrent.AsyncStream
import com.twitter.conversions.DurationOps._
import com.twitter.finagle.{Http => FinagleHttp, Status => _, http => _, _}
import com.twitter.finagle.buoyant.linkerd.Headers
import com.twitter.finagle.http.Method._
import com.twitter.finagle.http.filter.{ClientDtabContextFilter, ServerDtabContextFilter}
import com.twitter.finagle.http.{param => _, _}
import com.twitter.finagle.service.ExpiringService
import com.twitter.finagle.stats.{InMemoryStatsReceiver, NullStatsReceiver}
import com.twitter.finagle.tracing.{Annotation, BufferingTracer, NullTracer}
import com.twitter.io.{Buf, Pipe}
import com.twitter.util._
import io.buoyant.router.StackRouter.Client.PerClientParams
import io.buoyant.test.{Awaits, BudgetedRetries}
import java.io.File
import java.net.InetSocketAddress
import org.scalatest.{FunSuite, MustMatchers, OptionValues}
import org.scalatest.tagobjects.Retryable
import org.scalatest.time.{Millis, Seconds, Span}
import scala.io.Source
import scala.util.Random
class HttpEndToEndTest
extends FunSuite
with Awaits
with MustMatchers
with OptionValues
with BudgetedRetries {
case class Downstream(name: String, server: ListeningServer) {
val address = server.boundAddress.asInstanceOf[InetSocketAddress]
val port = address.getPort
val dentry = Dentry(
Path.read(s"/svc/$name"),
NameTree.read(s"/$$/inet/127.1/$port")
)
}
object Downstream {
def mk(name: String)(f: Request=>Response): Downstream = {
val service = Service.mk { req: Request => Future(f(req)) }
val stack = FinagleHttp.server.stack
.remove(Headers.Ctx.serverModule.role)
.remove(ServerDtabContextFilter.role)
val server = FinagleHttp.server.withStack(stack)
.configured(param.Label(name))
.configured(param.Tracer(NullTracer))
.serve(":*", service)
Downstream(name, server)
}
def const(name: String, value: String, status: Status = Status.Ok): Downstream =
mk(name) { _ =>
val rsp = Response()
rsp.status = status
rsp.contentString = value
rsp
}
}
def upstream(server: ListeningServer) = {
val address = Address(server.boundAddress.asInstanceOf[InetSocketAddress])
val name = Name.Bound(Var.value(Addr.Bound(address)), address)
val stack = FinagleHttp.client.stack
.remove(Headers.Ctx.clientModule.role)
.remove(ClientDtabContextFilter.role)
FinagleHttp.client.withStack(stack)
.configured(param.Stats(NullStatsReceiver))
.configured(param.Tracer(NullTracer))
.newClient(name, "upstream").toService
}
def basicConfig(dtab: Dtab, streaming: Boolean = false) =
s"""|routers:
|- protocol: http
| streamingEnabled: $streaming
| dtab: ${dtab.show}
| servers:
| - port: 0
|""".stripMargin
def annotationKeys(annotations: Seq[Annotation]): Seq[String] =
annotations.collect {
case Annotation.ClientSend => "cs"
case Annotation.ClientRecv => "cr"
case Annotation.ServerSend => "ss"
case Annotation.ServerRecv => "sr"
case Annotation.WireSend => "ws"
case Annotation.WireRecv => "wr"
case Annotation.BinaryAnnotation(k, _) if k == "l5d.success" => k
case Annotation.Message(m) if Seq("l5d.retryable", "l5d.failure").contains(m) => m
}
test("linking", Retryable) {
val stats = NullStatsReceiver
val tracer = new BufferingTracer
def withAnnotations(f: Seq[Annotation] => Unit): Unit = {
f(tracer.iterator.map(_.annotation).toSeq)
tracer.clear()
}
val cat = Downstream.const("cat", "meow")
val dog = Downstream.const("dog", "woof")
val dtab = Dtab.read(s"""
/p/cat => /$$/inet/127.1/${cat.port} ;
/p/dog => /$$/inet/127.1/${dog.port} ;
/svc/felix => /p/cat ;
/svc/clifford => /p/dog ;
""")
val linker = Linker.Initializers(Seq(HttpInitializer)).load(basicConfig(dtab))
.configured(param.Stats(stats))
.configured(param.Tracer(tracer))
val router = linker.routers.head.initialize()
val server = router.servers.head.serve()
val client = upstream(server)
def get(host: String, path: String = "/")(f: Response => Unit): Unit = {
val req = Request()
req.host = host
req.uri = path
val rsp = await(client(req))
f(rsp)
}
try {
get("felix") { rsp =>
assert(rsp.status == Status.Ok)
assert(rsp.contentString == "meow")
val path = "/svc/felix"
val bound = s"/$$/inet/127.1/${cat.port}"
withAnnotations { anns =>
assert(annotationKeys(anns) == Seq("wr", "sr", "cs", "ws", "wr", "l5d.success", "cr", "ss", "ws"))
assert(anns.contains(Annotation.BinaryAnnotation("service", path)))
assert(anns.contains(Annotation.BinaryAnnotation("client", bound)))
assert(anns.contains(Annotation.BinaryAnnotation("residual", "/")))
()
}
}
get("ralph-machio") { rsp =>
assert(rsp.status == Status.BadGateway)
assert(rsp.headerMap.contains(Headers.Err.Key))
()
}
get("") { rsp =>
assert(rsp.status == Status.BadRequest)
assert(rsp.headerMap.contains(Headers.Err.Key))
()
}
// todo check stats
} finally {
await(client.close())
await(cat.server.close())
await(dog.server.close())
await(server.close())
await(router.close())
}
}
test("marks 5XX as failure by default", Retryable) {
val stats = new InMemoryStatsReceiver
val tracer = NullTracer
val downstream = Downstream.mk("dog") {
case req if req.path == "/woof" =>
val rsp = Response()
rsp.status = Status.Ok
rsp.contentString = "woof"
rsp
case _ =>
val rsp = Response()
rsp.status = Status.InternalServerError
rsp
}
val label = s"$$/inet/127.1/${downstream.port}"
val dtab = Dtab.read(s"/svc/dog => /$label;")
val linker = Linker.Initializers(Seq(HttpInitializer)).load(basicConfig(dtab))
.configured(param.Stats(stats))
.configured(param.Tracer(tracer))
val router = linker.routers.head.initialize()
val server = router.servers.head.serve()
val client = upstream(server)
try {
val okreq = Request()
okreq.host = "dog"
okreq.uri = "/woof"
val okrsp = await(client(okreq))
assert(okrsp.status == Status.Ok)
assert(stats.counters.get(Seq("rt", "http", "server", "127.0.0.1/0", "requests")) == Some(1))
assert(stats.counters.get(Seq("rt", "http", "server", "127.0.0.1/0", "success")) == Some(1))
assert(stats.counters.get(Seq("rt", "http", "server", "127.0.0.1/0", "failures")) == Some(0))
assert(stats.counters.get(Seq("rt", "http", "client", label, "requests")) == Some(1))
assert(stats.counters.get(Seq("rt", "http", "client", label, "success")) == Some(1))
assert(stats.counters.get(Seq("rt", "http", "client", label, "failures")) == Some(0))
val errreq = Request()
errreq.host = "dog"
val errrsp = await(client(errreq))
assert(errrsp.status == Status.InternalServerError)
assert(stats.counters.get(Seq("rt", "http", "server", "127.0.0.1/0", "requests")) == Some(2))
assert(stats.counters.get(Seq("rt", "http", "server", "127.0.0.1/0", "success")) == Some(1))
assert(stats.counters.get(Seq("rt", "http", "server", "127.0.0.1/0", "failures")) == Some(1))
assert(stats.counters.get(Seq("rt", "http", "client", label, "requests")) == Some(2))
assert(stats.counters.get(Seq("rt", "http", "client", label, "success")) == Some(1))
assert(stats.counters.get(Seq("rt", "http", "client", label, "failures")) == Some(1))
} finally {
await(client.close())
await(downstream.server.close())
await(server.close())
await(router.close())
}
}
test("marks exceptions as failure by default", Retryable) {
val stats = new InMemoryStatsReceiver
val tracer = NullTracer
val downstream = Downstream.mk("dog") { req => ??? }
val label = s"$$/inet/127.1/${downstream.port}"
val dtab = Dtab.read(s"/svc/dog => /$label;")
val linker = Linker.Initializers(Seq(HttpInitializer)).load(basicConfig(dtab))
.configured(param.Stats(stats))
.configured(param.Tracer(tracer))
val router = linker.routers.head.initialize()
val server = router.servers.head.serve()
val client = upstream(server)
// Just close the downstream right away to generate connection exceptions
await(downstream.server.close())
try {
val req = Request()
req.host = "dog"
val rsp = await(client(req))
assert(rsp.status == Status.BadGateway)
assert(stats.counters.get(Seq("rt", "http", "server", "127.0.0.1/0", "requests")) == Some(1))
assert(stats.counters.get(Seq("rt", "http", "server", "127.0.0.1/0", "success")).forall(_ == 0))
assert(stats.counters.get(Seq("rt", "http", "server", "127.0.0.1/0", "failures")) == Some(1))
assert(stats.counters.get(Seq("rt", "http", "service", "svc/dog", "requests")) == Some(1))
assert(stats.counters.get(Seq("rt", "http", "service", "svc/dog", "success")).forall(_ == 0))
assert(stats.counters.get(Seq("rt", "http", "service", "svc/dog", "failures")) == Some(1))
} finally {
await(client.close())
await(downstream.server.close())
await(server.close())
await(router.close())
}
}
val allMethods = Set[Method](Connect, Delete, Get, Head, Patch, Post, Put, Options, Trace)
val readMethods = Set[Method](Get, Head, Options, Trace)
val idempotentMethods = readMethods ++ Set[Method](Delete, Put)
def retryTest(kind: String, methods: Set[Method]): Unit = {
val stats = new InMemoryStatsReceiver
val tracer = new BufferingTracer
def withAnnotations(f: Seq[Annotation] => Unit): Unit = {
f(tracer.iterator.map(_.annotation).toSeq)
tracer.clear()
}
@volatile var failNext = false
val downstream = Downstream.mk("dog") { req =>
val rsp = Response()
rsp.status = if (failNext) Status.InternalServerError else Status.Ok
failNext = false
rsp
}
val label = s"$$/inet/127.1/${downstream.port}"
val dtab = Dtab.read(s"/svc/dog => /$label;")
val yaml =
s"""|routers:
|- protocol: http
| dtab: ${dtab.show}
| service:
| responseClassifier:
| kind: $kind
| servers:
| - port: 0
|""".stripMargin
val linker = Linker.load(yaml)
.configured(param.Stats(stats))
.configured(param.Tracer(tracer))
val router = linker.routers.head.initialize()
val server = router.servers.head.serve()
val client = upstream(server)
try {
// retryable request, fails and is retried
for (method <- methods) {
val req = Request()
req.method = method
req.host = "dog"
failNext = true
stats.clear()
val rsp = await(client(req))
assert(rsp.status == Status.Ok)
assert(stats.counters.get(Seq("rt", "http", "server", "127.0.0.1/0", "requests")) == Some(1))
assert(stats.counters.get(Seq("rt", "http", "server", "127.0.0.1/0", "success")) == Some(1))
assert(stats.counters.get(Seq("rt", "http", "server", "127.0.0.1/0", "failures")) == None)
assert(stats.counters.get(Seq("rt", "http", "client", label, "requests")) == Some(2))
assert(stats.counters.get(Seq("rt", "http", "client", label, "success")) == Some(1))
assert(stats.counters.get(Seq("rt", "http", "client", label, "failures")) == Some(1))
assert(stats.counters.get(Seq("rt", "http", "client", label, "status", "200")) == Some(1))
assert(stats.counters.get(Seq("rt", "http", "client", label, "status", "500")) == Some(1))
val name = "svc/dog"
assert(stats.counters.get(Seq("rt", "http", "service", name, "requests")) == Some(1))
assert(stats.counters.get(Seq("rt", "http", "service", name, "success")) == Some(1))
assert(stats.counters.get(Seq("rt", "http", "service", name, "failures")).getOrElse(0) == 0)
assert(stats.stats.get(Seq("rt", "http", "service", name, "retries", "per_request")) == Some(Seq(1.0)))
assert(stats.counters.get(Seq("rt", "http", "service", name, "retries", "total")) == Some(1))
withAnnotations { anns =>
assert(annotationKeys(anns) == Seq("wr", "sr", "cs", "ws", "wr", "l5d.retryable", "cr", "cs", "ws", "wr", "l5d.success", "cr", "ss", "ws"))
()
}
}
// non-retryable request, fails and is not retried
for (method <- allMethods -- methods) {
val req = Request()
req.method = method
req.host = "dog"
failNext = true
stats.clear()
val rsp = await(client(req))
assert(rsp.status == Status.InternalServerError)
assert(stats.counters.get(Seq("rt", "http", "server", "127.0.0.1/0", "requests")) == Some(1))
assert(stats.counters.get(Seq("rt", "http", "server", "127.0.0.1/0", "success")).forall(_ == 0))
assert(stats.counters.get(Seq("rt", "http", "server", "127.0.0.1/0", "failures")) == Some(1))
assert(stats.counters.get(Seq("rt", "http", "client", label, "requests")) == Some(1))
assert(stats.counters.get(Seq("rt", "http", "client", label, "success")).forall(_ == 0))
assert(stats.counters.get(Seq("rt", "http", "client", label, "failures")) == Some(1))
assert(stats.counters.get(Seq("rt", "http", "client", label, "status", "200")).forall(_ == 0))
assert(stats.counters.get(Seq("rt", "http", "client", label, "status", "500")) == Some(1))
val name = s"svc/dog"
assert(stats.counters.get(Seq("rt", "http", "service", name, "requests")) == Some(1))
assert(stats.counters.get(Seq("rt", "http", "service", name, "success")).forall(_ == 0))
assert(stats.counters.get(Seq("rt", "http", "service", name, "failures")) == Some(1))
assert(stats.stats.get(Seq("rt", "http", "service", name, "retries", "per_request")) == Some(Seq(0.0)))
assert(stats.counters.get(Seq("rt", "http", "service", name, "retries", "total")).forall(_ == 0))
withAnnotations { anns =>
assert(annotationKeys(anns) == Seq("wr", "sr", "cs", "ws", "wr", "l5d.failure", "cr", "ss", "ws"))
()
}
}
} finally {
await(client.close())
await(downstream.server.close())
await(server.close())
await(router.close())
}
}
test("retries retryableIdempotent5XX", Retryable) {
retryTest("io.l5d.http.retryableIdempotent5XX", idempotentMethods)
}
test("retries retryablRead5XX", Retryable) {
retryTest("io.l5d.http.retryableRead5XX", readMethods)
}
test("retries nonRetryable5XX", Retryable) {
retryTest("io.l5d.http.nonRetryable5XX", Set.empty)
}
val dtabReadHeaders = Seq("l5d-dtab", "l5d-ctx-dtab")
val dtabWriteHeader = "l5d-ctx-dtab"
for (readHeader <- dtabReadHeaders) test(s"dtab read from $readHeader header", Retryable) {
val stats = NullStatsReceiver
val tracer = new BufferingTracer
@volatile var headers: HeaderMap = null
val dog = Downstream.mk("dog") { req =>
headers = req.headerMap
Response()
}
val dtab = Dtab.read(s"""
/svc/* => /$$/inet/127.1/${dog.port} ;
""")
val linker = Linker.Initializers(Seq(HttpInitializer)).load(basicConfig(dtab))
.configured(param.Stats(stats))
.configured(param.Tracer(tracer))
val router = linker.routers.head.initialize()
val server = router.servers.head.serve()
val client = upstream(server)
val req = Request()
req.host = "dog"
req.headerMap.set(readHeader, "/a=>/b")
await(client(req))
for (header <- dtabReadHeaders) {
if (header == dtabWriteHeader) assert(headers(header) == "/a=>/b")
else assert(!headers.contains(header))
}
assert(!headers.contains("dtab-local"))
}
test("dtab-local header is ignored", Retryable) {
val stats = NullStatsReceiver
val tracer = new BufferingTracer
@volatile var headers: HeaderMap = null
val dog = Downstream.mk("dog") { req =>
headers = req.headerMap
Response()
}
val dtab = Dtab.read(s"""
/svc/* => /$$/inet/127.1/${dog.port} ;
""")
val linker = Linker.Initializers(Seq(HttpInitializer)).load(basicConfig(dtab))
.configured(param.Stats(stats))
.configured(param.Tracer(tracer))
val router = linker.routers.head.initialize()
val server = router.servers.head.serve()
val client = upstream(server)
val req = Request()
req.host = "dog"
req.headerMap.set("dtab-local", "/a=>/b")
await(client(req))
assert(headers("dtab-local") == "/a=>/b")
assert(!headers.contains(dtabWriteHeader))
}
test("with clearContext", Retryable) {
val downstream = Downstream.mk("dog") { req =>
val rsp = Response()
rsp.contentString = req.headerMap.collect {
case (k, v) if k.startsWith("l5d-") => s"$k=$v"
}.mkString(",")
rsp
}
val localDtab = "/foo=>/bar"
val req = Request()
req.host = "test"
req.headerMap("l5d-dtab") = localDtab
req.headerMap("l5d-ctx-thing") = "yoooooo"
val yaml =
s"""|routers:
|- protocol: http
| dtab: /svc/* => /$$/inet/127.1/${downstream.port}
| servers:
| - port: 0
| clearContext: true
|""".stripMargin
val linker = Linker.load(yaml)
val router = linker.routers.head.initialize()
val s = router.servers.head.serve()
val body =
try {
val c = upstream(s)
try await(c(req)).contentString
finally await(c.close())
} finally await(s.close())
val headers =
body.split(",").map { kv =>
val Array(k, v) = kv.split("=", 2)
k -> v
}.toMap
assert(headers.keySet == Set(
"l5d-dst-service",
"l5d-dst-client",
"l5d-reqid",
"l5d-ctx-trace"
))
}
test("clearContext will remove linkerd error headers and body", Retryable) {
val yaml =
s"""|routers:
|- protocol: http
| dtab: /svc/* => /$$/inet/127.1/1234
| servers:
| - port: 0
| clearContext: true
|""".stripMargin
val linker = Linker.load(yaml)
val router = linker.routers.head.initialize()
val s = router.servers.head.serve()
val req = Request()
req.host = "test"
val c = upstream(s)
try {
val resp = await(c(req))
resp.headerMap.keys must not contain ("l5d-err", "l5d-success-class", "l5d-retryable")
resp.contentString must be("")
} finally {
await(c.close())
await(s.close())
}
}
test("clearContext will remove linkerd error headers and body when identifier is set", Retryable) {
val yaml =
s"""|routers:
|- protocol: http
| dtab: /svc/a/b => /$$/inet/127.1/1234
| identifier:
| kind: io.l5d.path
| segments: 2
| servers:
| - port: 0
| clearContext: true
|""".stripMargin
val linker = Linker.load(yaml)
val router = linker.routers.head.initialize()
val s = router.servers.head.serve()
val req = Request("/a")
req.host = "test"
val c = upstream(s)
try {
val resp = await(c(req))
resp.headerMap.keys must not contain ("l5d-err", "l5d-success-class", "l5d-retryable")
resp.contentString must be("")
} finally {
await(c.close())
await(s.close())
}
}
test("timestampHeader adds header", Retryable) {
@volatile var headers: Option[HeaderMap] = None
val downstream = Downstream.mk("test") {
req =>
headers = Some(req.headerMap)
val rsp = Response()
rsp.status = Status.Ok
rsp
}
val yaml =
s"""|routers:
|- protocol: http
| dtab: ${downstream.dentry.show};
| servers:
| - port: 0
| timestampHeader: x-request-start
|""".stripMargin
val linker = Linker.load(yaml)
val router = linker.routers.head.initialize()
val s = router.servers.head.serve()
val req = Request()
req.host = "test"
val c = upstream(s)
try {
val resp = await(c(req))
resp.status must be (Status.Ok)
headers.value.keys must contain ("x-request-start")
Try(headers.value.get("x-request-start").value.toLong) must be a 'return
} finally {
await(c.close())
await(downstream.server.close())
await(s.close())
}
}
test("no timestampHeader does not add timestamp header", Retryable) {
@volatile var headers: Option[HeaderMap] = None
val downstream = Downstream.mk("test") {
req =>
headers = Some(req.headerMap)
val rsp = Response()
rsp.status = Status.Ok
rsp
}
val yaml =
s"""|routers:
|- protocol: http
| dtab: ${downstream.dentry.show};
| servers:
| - port: 0
|""".stripMargin
val linker = Linker.load(yaml)
val router = linker.routers.head.initialize()
val s = router.servers.head.serve()
val req = Request()
req.host = "test"
val c = upstream(s)
try {
val resp = await(c(req))
resp.status must be (Status.Ok)
headers.value.keys must not contain "x-request-start"
} finally {
await(c.close())
await(downstream.server.close())
await(s.close())
}
}
test("without clearContext", Retryable) {
val downstream = Downstream.mk("dog") { req =>
val rsp = Response()
rsp.contentString = req.headerMap.collect {
case (k, v) if k.startsWith("l5d-") => s"$k=$v"
}.mkString(",")
rsp
}
val localDtab = "/foo=>/bar"
val req = Request()
req.host = "test"
req.headerMap("l5d-dtab") = localDtab
req.headerMap("l5d-ctx-thing") = "yoooooo"
val yaml =
s"""|routers:
|- protocol: http
| dtab: /svc/* => /$$/inet/127.1/${downstream.port}
| servers:
| - port: 0
|""".stripMargin
val linker = Linker.load(yaml)
val router = linker.routers.head.initialize()
val s = router.servers.head.serve()
val body =
try {
val c = upstream(s)
try await(c(req)).contentString
finally await(c.close())
} finally await(s.close())
val headers =
body.split(",").map { kv =>
val Array(k, v) = kv.split("=", 2)
k -> v
}.toMap
assert(headers.keySet == Set(
"l5d-dst-service",
"l5d-dst-client",
"l5d-reqid",
"l5d-ctx-dtab",
"l5d-ctx-trace",
"l5d-ctx-thing"
))
assert(headers.get("l5d-ctx-dtab") == Some(localDtab))
}
test("logs to correct files", Retryable) {
val downstream = Downstream.mk("test") {
req =>
val rsp = Response()
rsp.status = Status.Ok
rsp
}
val logs = Array(
File.createTempFile("access", "log0"),
File.createTempFile("access", "log1")
)
logs.foreach { log => log.deleteOnExit() }
val rand = new Random()
def randomPort = 32000 + (Random.nextDouble * 30000).toInt
val yaml =
s"""|routers:
|- protocol: http
| label: router0
| httpAccessLog: ${logs(0).getPath}
| dtab: ${downstream.dentry.show};
| servers:
| - port: ${randomPort}
|- protocol: http
| label: router1
| httpAccessLog: ${logs(1).getPath}
| dtab: ${downstream.dentry.show};
| servers:
| - port: ${randomPort}
|""".stripMargin
val routers = Linker.load(yaml).routers.map { router =>
router.initialize()
}
try {
Array("/path0", "/path1", "/path2", "/path3").zipWithIndex.foreach {
case (path, i) =>
val routerIndex = i%2
val req = Request()
req.host = "test"
req.uri = path
val s = routers(routerIndex).servers.head.serve()
val c = upstream(s)
try {
val resp = await(c(req))
resp.status must be (Status.Ok)
} finally {
await(c.close())
await(s.close())
}
val source = Source.fromFile(logs(routerIndex))
val lines = try source.mkString finally source.close()
assert(lines.contains(path))
}
} finally {
await(downstream.server.close())
routers.foreach { router => await(router.close()) }
}
}
test("clientSession idleTimeMs should close client connections") {
val config =
s"""|routers:
|- protocol: http
| experimental: true
| dtab: |
| /p/fox => /$$/inet/127.1/{fox.port} ;
| /svc/century => /p/fox ;
| servers:
| - port: 0
| client:
| clientSession:
| idleTimeMs: 1500
|""".stripMargin
idleTimeMsBaseTest(config){ (router:Router.Initialized, stats:InMemoryStatsReceiver, foxPort:Int) =>
// Assert
def activeConnectionsCount = stats.gauges(Seq("rt", "http", "client", s"$$/inet/127.1/${foxPort}", "connections"))
// An incoming request through the Http.Router will establish an active connection; We expect to see it here
assert(activeConnectionsCount() == 1.0)
eventually(timeout(Span(5, Seconds)), interval(Span(250, Millis))) {
val cnt = activeConnectionsCount()
assert(cnt == 0.0)
}
val clientSessionParams = router.params[PerClientParams].paramsFor(Path.read("/svc/century"))[ExpiringService.Param]
assert(clientSessionParams.idleTime == 1500.milliseconds)
assert(clientSessionParams.lifeTime == Duration.Top)
()
}
}
test("clientSession idleTimeMs should close client connections for static client") {
val config =
s"""|routers:
|- protocol: http
| experimental: true
| dtab: |
| /p/fox => /$$/inet/127.1/{fox.port} ;
| /svc/century => /p/fox ;
| servers:
| - port: 0
| client:
| kind: io.l5d.static
| configs:
| - prefix: /*
| clientSession:
| idleTimeMs: 1500
|""".stripMargin
idleTimeMsBaseTest(config){ (router:Router.Initialized, stats:InMemoryStatsReceiver, foxPort:Int) =>
// Assert
def activeConnectionsCount = stats.gauges(Seq("rt", "http", "client", s"$$/inet/127.1/${foxPort}", "connections"))
// An incoming request through the Http.Router will establish an active connection; We expect to see it here
assert(activeConnectionsCount() == 1.0)
eventually(timeout(Span(5, Seconds)), interval(Span(250, Millis))) {
val cnt = activeConnectionsCount()
assert(cnt == 0.0)
}
val clientSessionParams = router.params[PerClientParams].paramsFor(Path.read("/svc/century"))[ExpiringService.Param]
assert(clientSessionParams.idleTime == 1500.milliseconds)
assert(clientSessionParams.lifeTime == Duration.Top)
()
}
}
test("clientSession idleTimeMs should not close client connections when isn't specified") {
val config =
s"""|routers:
|- protocol: http
| experimental: true
| dtab: |
| /p/fox => /$$/inet/127.1/{fox.port} ;
| /svc/century => /p/fox ;
| servers:
| - port: 0
| client:
| forwardClientCert: false
|""".stripMargin
idleTimeMsBaseTest(config){ (router:Router.Initialized, stats:InMemoryStatsReceiver, foxPort:Int) =>
// Assert
def activeConnectionsCount = stats.gauges(Seq("rt", "http", "client", s"$$/inet/127.1/${foxPort}", "connections"))
// An incoming request through the Http.Router will establish an active connection; We expect to see it here
assert(activeConnectionsCount() == 1.0)
val clientSessionParams = router.params[PerClientParams].paramsFor(Path.read("/svc/century"))[ExpiringService.Param]
assert(clientSessionParams.idleTime == Duration.Top)
assert(clientSessionParams.lifeTime == Duration.Top)
()
}
}
test("serverSession idleTimeMs should close server connections") {
val config =
s"""|routers:
|- protocol: http
| experimental: true
| dtab: |
| /p/fox => /$$/inet/127.1/{fox.port} ;
| /svc/century => /p/fox ;
| servers:
| - port: 0
| serverSession:
| idleTimeMs: 1500
| lifeTimeMs: 3000
|""".stripMargin
serverIdleTimeMsBaseTest(config){ (router:Router.Initialized, stats:InMemoryStatsReceiver) =>
// Assert
def connectsCount = stats.counters(Seq("rt", "http", "server", "127.0.0.1/0", "connects"))
def activeConnectionsCount = stats.gauges(Seq("rt", "http", "server", "127.0.0.1/0", "connections"))
def idleConnectionsCount = () => stats.counters(Seq("rt", "http", "server", "127.0.0.1/0", "idle"))
// An incoming request through the Http.Router will establish an active connection; We expect to see it here
assert(connectsCount == 1.0)
assert(activeConnectionsCount() == 1.0)
eventually(timeout(Span(5, Seconds)), interval(Span(250, Millis))) {
assert(activeConnectionsCount() == 0.0)
assert(idleConnectionsCount() == 1.0)
}
()
}
}
test("serverSession lifeTimeMs should close server connections") {
val config =
s"""|routers:
|- protocol: http
| experimental: true
| dtab: |
| /p/fox => /$$/inet/127.1/{fox.port} ;
| /svc/century => /p/fox ;
| servers:
| - port: 0
| serverSession:
| idleTimeMs: 3000
| lifeTimeMs: 1500
|""".stripMargin
serverIdleTimeMsBaseTest(config){ (router:Router.Initialized, stats:InMemoryStatsReceiver) =>
// Assert
def connectsCount = stats.counters(Seq("rt", "http", "server", "127.0.0.1/0", "connects"))
def activeConnectionsCount = stats.gauges(Seq("rt", "http", "server", "127.0.0.1/0", "connections"))
def lifetimeConnectionsCount = () => stats.counters(Seq("rt", "http", "server", "127.0.0.1/0", "lifetime"))
// An incoming request through the Http.Router will establish an active connection; We expect to see it here
assert(connectsCount == 1.0)
assert(activeConnectionsCount() == 1.0)
eventually(timeout(Span(5, Seconds)), interval(Span(250, Millis))) {
assert(activeConnectionsCount() == 0.0)
assert(lifetimeConnectionsCount() == 1.0)
}
()
}
}
test("serverSession lifeTimeMs should not close server connections if not specified") {
val config =
s"""|routers:
|- protocol: http
| experimental: true
| dtab: |
| /p/fox => /$$/inet/127.1/{fox.port} ;
| /svc/century => /p/fox ;
| servers:
| - port: 0
|""".stripMargin
serverIdleTimeMsBaseTest(config){ (router:Router.Initialized, stats:InMemoryStatsReceiver) =>
// Assert
def connectsCount = stats.counters(Seq("rt", "http", "server", "127.0.0.1/0", "connects"))
def activeConnectionsCount = stats.gauges(Seq("rt", "http", "server", "127.0.0.1/0", "connections"))
// An incoming request through the Http.Router will establish an active connection; We expect to see it here
assert(connectsCount == 1.0)
assert(activeConnectionsCount() == 1.0)
assert(router.servers.head.params[ExpiringService.Param].idleTime == Duration.Top)
assert(router.servers.head.params[ExpiringService.Param].lifeTime == Duration.Top)
()
}
}
test("requests with Max-Forwards header, l5d-add-context and method TRACE are sent downstream") {
@volatile var headers: HeaderMap = null
@volatile var method: Method = null
val downstream = Downstream.mk("dog") { req =>
headers = req.headerMap
method = req.method
val resp = Response()
resp.contentString = "response from downstream"
resp
}
val dtab = Dtab.read(s"""
/svc/* => /$$/inet/127.1/${downstream.port} ;
""")
val linker = Linker.Initializers(Seq(HttpInitializer)).load(basicConfig(dtab))
val router = linker.routers.head.initialize()
val server = router.servers.head.serve()
val client = upstream(server)
val req = Request()
req.host = "dog"
req.headerMap.add("Max-Forwards", "5")
req.headerMap.add("l5d-add-context", "true")
req.method = Method.Trace
val resp = await(client(req))
assert(resp.contentString.contains("response from downstream"))
assert(headers.contains("Max-Forwards"))
assert(headers.contains("l5d-add-context"))
assert(method == Method.Trace)
}
test("prints out human readable dtab resolution path"){
val downstream = Downstream.mk("dog") { req =>
Response()
}
val dtab = Dtab.read(s"""
/srv => /$$/inet/127.1/${downstream.port};
/svc => /srv;
""")
val linker = Linker.Initializers(Seq(HttpInitializer)).load(basicConfig(dtab))
val router = linker.routers.head.initialize()
val server = router.servers.head.serve()
val client = upstream(server)
val req = Request()
req.host = "dog"
req.method = Method.Trace
req.headerMap.add("Max-Forwards", "5")
req.headerMap.add("l5d-add-context", "true")
val resp = await(client(req))
assert(resp.contentString.contains(
s"""|client name: /$$/inet/127.1/${downstream.port}
|addresses: [127.0.0.1:${downstream.port}]
|selected address: 127.0.0.1:${downstream.port}
|dtab resolution:
| /svc/dog
| /srv/dog (/svc=>/srv)
| /$$/inet/127.1/${downstream.port}/dog (/srv=>/$$/inet/127.1/${downstream.port})
|""".stripMargin))
}
test("discards content from chunked server response during diagnostic trace"){
val responseDiscardedMsg = "Diagnostic trace encountered chunked response. Response content discarded."
val downstream = Downstream.mk("dog") { req =>
val chunkedWriter = new Pipe[Buf]()
AsyncStream[Buf](
Seq("Chunked", "Response")
.map(Buf.Utf8(_)): _*)
.foreachF(chunkedWriter.write)
.before(chunkedWriter.close())
Response(req.version, Status.Ok, chunkedWriter)
}
val dtab = Dtab.read(s"""
/srv => /$$/inet/127.1/${downstream.port};
/svc => /srv;
""")
val linker = Linker.Initializers(Seq(HttpInitializer)).load(basicConfig(dtab, streaming = true))
val router = linker.routers.head.initialize()
val server = router.servers.head.serve()
val client = upstream(server)
val req = Request()
req.host = "dog"
req.method = Method.Trace
req.headerMap.add("Max-Forwards", "5")
req.headerMap.add("l5d-add-context", "true")
val resp = await(client(req))
assert(resp.contentString.contains(responseDiscardedMsg))
}
test("returns 400 for requests that have more than allowed hops", Retryable) {
val yaml =
s"""|routers:
|- protocol: http
| servers:
| - port: 0
| maxCallDepth: 2
|""".stripMargin
val linker = Linker.load(yaml)
val router = linker.routers.head.initialize()
val s = router.servers.head.serve()
val req = Request()
req.headerMap.add(Fields.Via, "hop1, hop2, hop3")
val c = upstream(s)
try {
val resp = await(c(req))
resp.status must be (Status.BadRequest)
resp.contentString must be ("Maximum number of calls (2) has been exceeded. Please check for proxy loops.")
} finally {
await(c.close())
await(s.close())
}
}
def idleTimeMsBaseTest(config:String)(assertionsF: (Router.Initialized, InMemoryStatsReceiver, Int) => Unit): Unit = {
// Arrange
val stats = new InMemoryStatsReceiver
val fox = Downstream.const("fox", "what does the fox say?")
val configWithPort = config.replace("{fox.port}", fox.port.toString)
val linker = Linker.Initializers(Seq(HttpInitializer)).load(configWithPort)
.configured(param.Stats(stats))
val router = linker.routers.head.initialize()
val server = router.servers.head.serve()
val client = upstream(server)
def get(host: String, path: String = "/")(f: Response => Unit): Unit = {
val req = Request(path)
req.host = host
val rsp = await(client(req))
f(rsp)
}
// Act
try {
get("century") { rsp =>
assert(rsp.status == Status.Ok)
assert(rsp.contentString == "what does the fox say?")
()
}
// Assert
assertionsF(router, stats, fox.port)
} finally {
await(client.close())
await(fox.server.close())
await(server.close())
await(router.close())
}
}
def serverIdleTimeMsBaseTest(config:String)(assertionsF: (Router.Initialized, InMemoryStatsReceiver) => Unit): Unit = {
// Arrange
val stats = new InMemoryStatsReceiver
val fox = Downstream.const("fox", "what does the fox say?")
val configWithPort = config.replace("{fox.port}", fox.port.toString)
val linker = Linker.Initializers(Seq(HttpInitializer)).load(configWithPort)
.configured(param.Stats(stats))
val router = linker.routers.head.initialize()
val server = router.servers.head.serve()
val client = upstream(server)
def get(host: String, path: String = "/")(f: Response => Unit): Unit = {
val req = Request(path)
req.host = host
val rsp = await(client(req))
f(rsp)
}
// Act
try {
get("century") { rsp =>
assert(rsp.status == Status.Ok)
assert(rsp.contentString == "what does the fox say?")
()
}
// Assert
assertionsF(router, stats)
} finally {
await(client.close())
await(fox.server.close())
await(server.close())
await(router.close())
}
}
}
| linkerd/linkerd | linkerd/protocol/http/src/e2e/scala/io/buoyant/linkerd/protocol/HttpEndToEndTest.scala | Scala | apache-2.0 | 38,595 |
import sbt._
class Project(info: ProjectInfo) extends DefaultProject(info) with AkkaProject with sbt_akka_bivy.AkkaKernelDeployment {
// Compiler and distribution specific options
override def compileOptions = super.compileOptions ++ Seq(Unchecked)
override val artifactID = "breakybot"
// Project dependencies
val akkaKernel = akkaModule("kernel")
val akkaSlf4j = akkaModule("slf4j")
val testkit = akkaModule("testkit")
val logback = "ch.qos.logback" % "logback-classic" % "0.9.28"
override def ivyXML =
<dependencies>
<exclude module="slf4j-simple"/>
</dependencies>
val scalaTest = "org.scalatest" % "scalatest" % "1.4.RC2"
// Bivy configuration
override def akkaKernelBootClass = "akka.kernel.Main"
override def akkaBootScript = """#!/bin/sh
SCALA_VERSION=%s
SCRIPT=$(readlink -f $0)
export AKKA_HOME=`dirname $SCRIPT`
java -jar -server -Xss128k $AKKA_HOME/%s&
echo $! > $AKKA_HOME/akka.pid
""".format(buildScalaVersion, defaultJarPath(".jar").name)
}
| blt/BreakyBot | project/build/Project.scala | Scala | mit | 1,000 |
/*
* Copyright 2021 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.ct.computations
import uk.gov.hmrc.ct.box._
import uk.gov.hmrc.ct.computations.retriever.ComputationsBoxRetriever
case class CP24(value: Option[Int]) extends CtBoxIdentifier(name = "Repairs, renewals and maintenance") with CtOptionalInteger with Input
with ValidatableBox[ComputationsBoxRetriever] {
override def validate(boxRetriever: ComputationsBoxRetriever): Set[CtValidation] = {
validateZeroOrPositiveInteger(this)
}
}
object CP24 {
def apply(int: Int): CP24 = CP24(Some(int))
}
| hmrc/ct-calculations | src/main/scala/uk/gov/hmrc/ct/computations/CP24.scala | Scala | apache-2.0 | 1,127 |
/*
* Copyright 2012-2021 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package laika.preview
import java.io.InputStream
import cats.syntax.all._
import cats.effect.{Async, Resource, Sync}
import fs2.concurrent.Topic
import fs2.io.readInputStream
import laika.preview.ServerBuilder.Logger
import org.http4s.dsl.Http4sDsl
import org.http4s.{CacheDirective, EntityEncoder, Headers, HttpRoutes, MediaType, ServerSentEvent}
import org.http4s.headers.{`Cache-Control`, `Content-Type`}
import scala.concurrent.duration.DurationInt
private [preview] class RouteBuilder[F[_]: Async](cache: Cache[F, SiteResults[F]],
sseTopic: Topic[F, String],
logger: Logger[F]) extends Http4sDsl[F] {
implicit def inputStreamResourceEncoder[G[_]: Sync, IS <: InputStream]: EntityEncoder[G, Resource[G, IS]] =
EntityEncoder.entityBodyEncoder[G].contramap { (in: Resource[G, IS]) =>
fs2.Stream.resource(in).flatMap { stream =>
readInputStream[G](Sync[G].pure(stream), 4096, closeAfterUse = false)
}
}
private def mediaTypeFor (suffix: String): Option[MediaType] =
MediaType.forExtension(suffix).orElse(MediaType.forExtension(suffix.split("\\\\.").last))
private val noCache = `Cache-Control`(CacheDirective.`no-store`)
def build: HttpRoutes[F] = HttpRoutes.of[F] {
case GET -> Root / "laika" / "events" =>
val keepAlive = fs2.Stream.fixedRate(10.seconds).as("keepAlive")
Ok(sseTopic.subscribe(10).merge(keepAlive).map(msg => ServerSentEvent(Some(msg))))
case GET -> path =>
val laikaPath = laika.ast.Path.parse(path.toString)
cache.get.map(_.get(laikaPath.withoutFragment)).flatMap {
case Some(RenderedResult(content)) =>
logger(s"serving path $laikaPath - transformed markup") *>
Ok(content).map(_
.withHeaders(noCache)
.withContentType(`Content-Type`(MediaType.text.html))
)
case Some(StaticResult(input)) =>
logger(s"serving path $laikaPath - static input") *> {
val mediaType = laikaPath.suffix.flatMap(mediaTypeFor).map(`Content-Type`(_))
Ok(input).map(_.withHeaders(Headers(mediaType, noCache)))
}
case None =>
logger(s"serving path $laikaPath - not found") *> NotFound()
}
}
}
| planet42/Laika | preview/src/main/scala/laika/preview/RouteBuilder.scala | Scala | apache-2.0 | 2,946 |
object SCL9306B extends App {
class A
class B
class C
class D
implicit def convert(f: A => B): (C => D) = { c: C => new D }
def func1: (A => B) = { a: A => new B }
/*start*/func1(new C)/*end*/
}
//SCL9306B.D | whorbowicz/intellij-scala | testdata/typeInference/bugs5/SCL9306A.scala | Scala | apache-2.0 | 223 |
package org.elasticsearch.spark.sql
import org.elasticsearch.hadoop.serialization.field.ConstantFieldExtractor
import org.elasticsearch.hadoop.serialization.MapFieldExtractor
import scala.collection.GenMapLike
import scala.collection.Map
import org.elasticsearch.hadoop.serialization.field.FieldExtractor
import org.apache.spark.sql.catalyst.types.StructType
import org.apache.spark.sql.catalyst.expressions.Row
import org.elasticsearch.spark.serialization.ScalaMapFieldExtractor
class SchemaRDDFieldExtractor extends ScalaMapFieldExtractor {
override protected def extractField(target: AnyRef): AnyRef = {
target match {
case t: (Row, StructType) => {
val struct = t._2
val index = struct.fieldNames.indexOf(getFieldName())
if (index < 0) {
FieldExtractor.NOT_FOUND
} else {
t._1(index).asInstanceOf[AnyRef]
}
}
case _ => super.extractField(target)
}
}
} | eliasah/elasticsearch-hadoop | spark/src/main/scala/org/elasticsearch/spark/sql/SchemaRDDFieldExtractor.scala | Scala | apache-2.0 | 962 |
package fa
object Core {
val FirebaseTld = "firebaseIO.com"
val Proto = "http://"
val JSuffix = ".json"
val Secret = "Firebase_Secret"
}
case class ReadOptions(shallow: Boolean = false, printStyle: PrintStyle)
trait PrintStyle
object PrintStyle {
case object Pretty extends PrintStyle
case object Silent extends PrintStyle
case object Standard extends PrintStyle
}
case class Token(v: String)
trait RestCall {
val token: Token
val print: PrintStyle
}
case class Read(token: Token, shallow: Boolean = false, print: PrintStyle) extends RestCall
object Auth {
import com.firebase.security.token.{TokenGenerator, TokenOptions}
import scala.collection.JavaConversions._
import scala.util.Properties
private lazy val secret = Properties.envOrNone(Core.Secret)
case class UserId(value: Int)
case class AuthPayload(userId: UserId)
private object AuthPayload{
implicit def asMap(pl: AuthPayload) =
Map[String, Object](
"uid" -> pl.userId.value
)
}
def token(p: AuthPayload, admin: Boolean = false): Option[Token] =
secret.map(s => {
val tg = new TokenGenerator(s)
val opts = new TokenOptions()
opts.setAdmin(admin)
Token(tg.createToken(mapAsJavaMap(p), opts))
})
}
//note this stuff is pretty bad
trait WireType[A]
object WireType{
implicit object i extends WireType[Int]
implicit object s extends WireType[String]
implicit object n extends WireType[BigDecimal]
implicit object b extends WireType[Boolean]
}
object Querying{
type or[A, B] = Either[A,B]
case class Query[A,B,C](before: Option[StartAt[A]],
after:Option[EndAt[B]],
equal: Option[EqualTo[C]],
ordering: Option[OrderBy],
limit: Option[First or Last] )
object Query {
def asQueryString[A,B,C](q: Query[A,B,C]): String = {
for{
a <- q.after
b <- q.before
e <- q.equal
o <- q.ordering
l <- q.limit
} yield {
}
}
}
trait FirebaseOrdering
case class OrderBy(key: String) extends FirebaseOrdering
case object OrderByKey extends FirebaseOrdering
case object OrderByValue extends FirebaseOrdering
case object OrderByPriority extends FirebaseOrdering
case class StartAt[T: WireType](value: T)
case class EndAt[T: WireType](value: T)
case class EqualTo[T: WireType](value: T)
case class First(i: Int)
case class Last(i: Int)
}
| ChrisCoffey/ForwardAction | src/main/scala/Domain.scala | Scala | agpl-3.0 | 2,415 |
package frdomain.ch6
package streams
import java.util.Date
import scala.concurrent.duration._
import scala.concurrent.{ Future, ExecutionContext }
import scala.concurrent.ExecutionContext.Implicits.global
import scala.collection.immutable._
import scalaz._
import Scalaz._
import common._
sealed trait TransactionType
case object Debit extends TransactionType
case object Credit extends TransactionType
case class Transaction(id: String, accountNo: String, debitCredit: TransactionType, amount: Amount, date: Date = today)
object Transaction {
implicit val TransactionMonoid = new Monoid[Transaction] {
val zero = Transaction("", "", Debit, 0)
def append(i: Transaction, j: => Transaction) = {
val f = if (i.debitCredit == Debit) -i.amount else i.amount
val s = if (j.debitCredit == Debit) -j.amount else j.amount
val sum = f + s
val id = util.Random.nextInt(Integer.MAX_VALUE).toString
if (sum < 0) Transaction(id, j.accountNo, Debit, -sum) else Transaction(id, j.accountNo, Credit, sum)
}
}
}
trait AccountRepository {
def query(no: String): Option[Account]
}
object AccountRepository extends AccountRepository {
val m = Map("a-1" -> Account("a-1", "dg", today.some),
"a-2" -> Account("a-2", "gh", today.some),
"a-3" -> Account("a-3", "tr", today.some)
)
def query(no: String) = m.get(no)
}
trait OnlineService {
def allAccounts(implicit ec: ExecutionContext): Future[Seq[String]] = Future {
Seq("a-1", "a-2", "a-3")
}
def queryAccount(no: String, repo: AccountRepository) =
repo.query(no).getOrElse { throw new RuntimeException("Invalid account number") }
val txns =
Seq(
Transaction("t-1", "a-1", Debit, 1000),
Transaction("t-2", "a-2", Debit, 1000),
Transaction("t-3", "a-3", Credit, 1000),
Transaction("t-4", "a-1", Credit, 1000),
Transaction("t-5", "a-1", Debit, 1000),
Transaction("t-6", "a-2", Debit, 1000),
Transaction("t-7", "a-3", Credit, 1000),
Transaction("t-8", "a-3", Debit, 1000),
Transaction("t-9", "a-2", Credit, 1000),
Transaction("t-10", "a-2", Debit, 1000),
Transaction("t-11", "a-1", Credit, 1000),
Transaction("t-12", "a-3", Debit, 1000)
)
def getBankingTransactions(a: Account) = txns.filter(_.accountNo == a.no)
def getSettlementTransactions(a: Account) = txns.filter(_.accountNo == a.no)
def validate(t: Transaction) = t
def allTransactions(implicit ec: ExecutionContext): Future[Seq[Transaction]] = Future { txns }
}
object OnlineService extends OnlineService
| debasishg/frdomain | src/main/scala/frdomain/ch6/streams/domain.scala | Scala | apache-2.0 | 2,599 |
package com.wavesplatform.it.sync.transactions
import cats.syntax.option._
import com.wavesplatform.account.AddressScheme
import com.wavesplatform.common.state.ByteStr
import com.wavesplatform.common.utils._
import com.wavesplatform.it.api.BurnTransactionInfo
import com.wavesplatform.it.api.SyncHttpApi._
import com.wavesplatform.it.sync.{issueAmount, issueFee, _}
import com.wavesplatform.it.transactions.BaseTransactionSuite
import com.wavesplatform.transaction.Asset.IssuedAsset
import com.wavesplatform.transaction.TxVersion
import com.wavesplatform.transaction.assets.BurnTransaction
import play.api.libs.json.Json
class BurnTransactionSuite extends BaseTransactionSuite {
private val decimals: Byte = 2
test("burning assets changes issuer's asset balance; issuer's waves balance is decreased by fee") {
for (v <- burnTxSupportedVersions) {
val (balance, effectiveBalance) = miner.accountBalances(firstAddress)
val issuedAssetId =
sender.issue(firstKeyPair, s"name+$v", "description", issueAmount, decimals, reissuable = false, fee = issueFee, waitForTx = true).id
miner.assertBalances(firstAddress, balance - issueFee, effectiveBalance - issueFee)
miner.assertAssetBalance(firstAddress, issuedAssetId, issueAmount)
val details1 = miner.assetsDetails(issuedAssetId)
assert(!details1.reissuable)
assert(details1.quantity == issueAmount)
assert(details1.minSponsoredAssetFee.isEmpty)
// burn half of the coins and check balance
val burnTx = sender.burn(firstKeyPair, issuedAssetId, issueAmount / 2, minFee, version = v, waitForTx = true)
if (v > 2) {
burnTx.chainId shouldBe Some(AddressScheme.current.chainId)
sender.transactionInfo[BurnTransactionInfo](burnTx.id).chainId shouldBe Some(AddressScheme.current.chainId)
}
sender.transactionInfo[BurnTransactionInfo](burnTx.id).amount shouldBe issueAmount / 2
miner.assertBalances(firstAddress, balance - minFee - issueFee, effectiveBalance - minFee - issueFee)
miner.assertAssetBalance(firstAddress, issuedAssetId, issueAmount / 2)
val details2 = miner.assetsDetails(issuedAssetId)
assert(!details2.reissuable)
assert(details2.quantity == issueAmount - issueAmount / 2)
val assetOpt = miner.assetsBalance(firstAddress).balances.find(_.assetId == issuedAssetId)
assert(assetOpt.exists(_.balance == issueAmount / 2))
// burn the rest and check again
sender.burn(firstKeyPair, issuedAssetId, issueAmount / 2, minFee, version = v, waitForTx = true).id
miner.assertAssetBalance(firstAddress, issuedAssetId, 0)
val details3 = miner.assetsDetails(issuedAssetId)
assert(!details3.reissuable)
assert(details3.quantity == 0)
assert(details1.minSponsoredAssetFee.isEmpty)
val assetOptRest = miner.assetsBalance(firstAddress).balances.find(_.assetId == issuedAssetId)
assert(assetOptRest.isEmpty)
}
miner
.transactionsByAddress(firstAddress, limit = 100)
.count(_._type == BurnTransaction.typeId) shouldBe burnTxSupportedVersions.length * 2
}
test("can burn non-owned asset; issuer asset balance decreased by transfer amount; burner balance decreased by burned amount") {
for (v <- burnTxSupportedVersions) {
val issuedQuantity = issueAmount
val transferredQuantity = issuedQuantity / 2
val issuedAssetId =
sender.issue(firstKeyPair, s"name+$v", "description", issuedQuantity, decimals, reissuable = false, issueFee, waitForTx = true).id
sender.assertAssetBalance(firstAddress, issuedAssetId, issuedQuantity)
sender.transfer(firstKeyPair, secondAddress, transferredQuantity, minFee, issuedAssetId.some, waitForTx = true).id
sender.assertAssetBalance(firstAddress, issuedAssetId, issuedQuantity - transferredQuantity)
sender.assertAssetBalance(secondAddress, issuedAssetId, transferredQuantity)
sender.burn(secondKeyPair, issuedAssetId, transferredQuantity, minFee, v, waitForTx = true).id
sender.assertAssetBalance(secondAddress, issuedAssetId, 0)
val details = miner.assetsDetails(issuedAssetId)
assert(!details.reissuable)
assert(details.quantity == issuedQuantity - transferredQuantity)
assert(details.minSponsoredAssetFee.isEmpty)
assertBadRequestAndMessage(
sender.transfer(secondKeyPair, firstAddress, transferredQuantity / 2, minFee, issuedAssetId.some).id,
"Attempt to transfer unavailable funds"
)
}
}
test("issuer can't burn more tokens than he own") {
for (v <- burnTxSupportedVersions) {
val issuedQuantity = issueAmount
val burnedQuantity = issuedQuantity * 2
val issuedAssetId =
sender.issue(firstKeyPair, s"name+$v", "description", issuedQuantity, decimals, reissuable = false, issueFee).id
nodes.waitForHeightAriseAndTxPresent(issuedAssetId)
sender.assertAssetBalance(firstAddress, issuedAssetId, issuedQuantity)
assertBadRequestAndMessage(sender.burn(secondKeyPair, issuedAssetId, burnedQuantity, minFee, v).id, "Accounts balance errors")
}
}
test("user can't burn more tokens than he own") {
for (v <- burnTxSupportedVersions) {
val issuedQuantity = issueAmount
val transferredQuantity = issuedQuantity / 2
val burnedQuantity = transferredQuantity * 2
val issuedAssetId = sender.issue(firstKeyPair, s"name+$v", "description", issuedQuantity, decimals, reissuable = false, issueFee).id
miner.waitForTransaction(issuedAssetId)
sender.assertAssetBalance(firstAddress, issuedAssetId, issuedQuantity)
val transferId = sender.transfer(firstKeyPair, secondAddress, transferredQuantity, minFee, issuedAssetId.some).id
miner.waitForTransaction(transferId)
sender.assertAssetBalance(firstAddress, issuedAssetId, issuedQuantity - transferredQuantity)
sender.assertAssetBalance(secondAddress, issuedAssetId, transferredQuantity)
assertBadRequestAndMessage(sender.burn(secondKeyPair, issuedAssetId, burnedQuantity, minFee, v).id, "Accounts balance errors")
}
}
test("non-owner can burn asset after reissue") {
for (v <- burnTxSupportedVersions) {
val issuedQuantity = issueAmount
val transferredQuantity = issuedQuantity / 2
val issuedAssetId = sender.issue(firstKeyPair, s"name+$v", "description", issuedQuantity, decimals, reissuable = true, issueFee).id
miner.waitForTransaction(issuedAssetId)
sender.assertAssetBalance(firstAddress, issuedAssetId, issuedQuantity)
val transferId = sender.transfer(firstKeyPair, secondAddress, transferredQuantity, minFee, issuedAssetId.some).id
miner.waitForTransaction(transferId)
val burnOwnerTxTd = sender.burn(firstKeyPair, issuedAssetId, transferredQuantity, minFee, v).id
miner.waitForTransaction(burnOwnerTxTd)
sender.assertAssetBalance(firstAddress, issuedAssetId, 0)
sender.assertAssetBalance(secondAddress, issuedAssetId, transferredQuantity)
val details = miner.assetsDetails(issuedAssetId)
assert(details.reissuable)
assert(details.quantity == transferredQuantity)
assert(details.minSponsoredAssetFee.isEmpty)
val reissueId = sender.reissue(firstKeyPair, issuedAssetId, issuedQuantity, false, issueFee).id
miner.waitForTransaction(reissueId)
val details1 = miner.assetsDetails(issuedAssetId)
assert(!details1.reissuable)
assert(details1.quantity == transferredQuantity + issuedQuantity)
assert(details1.minSponsoredAssetFee.isEmpty)
val burn1 = sender.burn(firstKeyPair, issuedAssetId, issuedQuantity, minFee, v).id
miner.waitForTransaction(burn1)
val burn2 = sender.burn(secondKeyPair, issuedAssetId, transferredQuantity, minFee, v).id
miner.waitForTransaction(burn2)
val details2 = miner.assetsDetails(issuedAssetId)
assert(!details2.reissuable)
assert(details2.quantity == 0)
assert(details2.minSponsoredAssetFee.isEmpty)
assertBadRequestAndMessage(sender.reissue(firstKeyPair, issuedAssetId, issuedQuantity, true, issueFee).id, "Asset is not reissuable")
assertBadRequestAndMessage(
sender.transfer(secondKeyPair, thirdAddress, transferredQuantity / 2, minFee, issuedAssetId.some).id,
"Attempt to transfer unavailable funds"
)
assertBadRequestAndMessage(
sender.transfer(firstKeyPair, thirdAddress, transferredQuantity / 2, minFee, issuedAssetId.some).id,
"Attempt to transfer unavailable funds"
)
}
}
test("send burn with quantity field") {
val issuedAssetId =
sender.issue(firstKeyPair, "name", "description", issueAmount, decimals, reissuable = false, fee = issueFee, waitForTx = true).id
val tx = BurnTransaction
.selfSigned(TxVersion.V1, firstKeyPair, IssuedAsset(ByteStr.decodeBase58(issuedAssetId).get), 1, minFee, System.currentTimeMillis())
.explicitGet()
val json = tx.json() - "amount" ++ Json.obj("quantity" -> 1L)
sender.signedBroadcast(json, waitForTx = true).id
}
}
| wavesplatform/Waves | node-it/src/test/scala/com/wavesplatform/it/sync/transactions/BurnTransactionSuite.scala | Scala | mit | 9,118 |
package ghpages.examples
import ghpages.GhPagesMacros
import ghpages.examples.util.SingleSide
import japgolly.scalajs.react._
import japgolly.scalajs.react.vdom.html_<^._
import org.scalajs.dom.html
object CallbackOptionExample {
def content = SingleSide.Content(source, Main2())
val source = GhPagesMacros.exampleSource
def Main2 = ScalaComponent.static(
<.div(
<.p(
<.code("CallbackOption"), " is a ", <.code("Callback"), " that you can compose so that steps can abort the rest of the process.",
<.br,
"It makes it easy to work with conditions."),
<.br,
<.p(
"Press ←↑↓→ to move the box. Hold ctrl to move to the edges.",
<.br,
"Notice that PageDown still scrolls the page but ↓ doesn't? That's because", <.code("preventDefault"), "is only called when a key is matched."),
Main()))
// EXAMPLE:START
import org.scalajs.dom.ext.KeyCode
val OuterX = 600
val OuterY = 240
val InnerSize = 24
val MoveDist = 24
case class State(x: Int, y: Int)
def initState = State((OuterX - InnerSize) / 2, (OuterY - InnerSize) / 2)
val OuterDiv =
<.div(
^.tabIndex := 0,
^.width := OuterX.px,
^.height := OuterY.px,
^.border := "solid 1px #333",
^.background := "#ddd")
val InnerDiv =
<.div(
^.position.relative,
^.width := InnerSize.px,
^.height := InnerSize.px,
^.background := "#800")
def moveOneAxis(pos: Int, steps: Int, max: Int): Int =
(pos + steps * MoveDist) min (max - InnerSize) max 0
class Backend($: BackendScope[Unit, State]) {
private val outerRef = Ref[html.Element]
def init: Callback =
outerRef.foreach(_.focus())
def move(dx: Int, dy: Int): Callback =
$.modState(s => s.copy(
x = moveOneAxis(s.x, dx, OuterX),
y = moveOneAxis(s.y, dy, OuterY)))
def handleKey(e: ReactKeyboardEvent): Callback = {
def plainKey: CallbackOption[Unit] = // CallbackOption will stop if a key isn't matched
CallbackOption.keyCodeSwitch(e) {
case KeyCode.Up => move(0, -1)
case KeyCode.Down => move(0, 1)
case KeyCode.Left => move(-1, 0)
case KeyCode.Right => move( 1, 0)
}
def ctrlKey: CallbackOption[Unit] = // Like above but if ctrlKey is pressed
CallbackOption.keyCodeSwitch(e, ctrlKey = true) {
case KeyCode.Up => move(0, -OuterY)
case KeyCode.Down => move(0, OuterY)
case KeyCode.Left => move(-OuterX, 0)
case KeyCode.Right => move( OuterX, 0)
}
(plainKey orElse ctrlKey) >> e.preventDefaultCB // This is the interesting part.
//
// orElse joins CallbackOptions so if one fails, it tries the other.
//
// The >> means "and then run" but only if the left side passes.
// This means preventDefault only runs if a valid key is pressed.
}
def render(s: State) =
OuterDiv.withRef(outerRef)(
^.onKeyDown ==> handleKey,
InnerDiv(
^.left := s.x.px,
^.top := s.y.px))
}
val Main = ScalaComponent.builder[Unit]
.initialState(initState)
.renderBackend[Backend]
.componentDidMount(_.backend.init)
.build
// EXAMPLE:END
}
| japgolly/scalajs-react | ghpages/src/main/scala/ghpages/examples/CallbackOptionExample.scala | Scala | apache-2.0 | 3,611 |
package nl.svanwouw.trending.util
import java.text.SimpleDateFormat
import java.util.Date
import org.json4s.DateFormat
import org.json4s.ParserUtil.ParseException
/**
* Make simple date format serializable for Spark.
*/
class SerializableDateFormat(pattern: String) extends DateFormat {
private[this] def formatter = new SimpleDateFormat(pattern)
override def parse(s: String) = try {
Some(formatter.parse(s))
} catch {
case e: ParseException => None
}
override def format(d: Date) = formatter.format(d)
}
| stefanvanwouw/spark-based-trending-topics-extraction | src/main/scala/nl/svanwouw/trending/util/SerializableDateFormat.scala | Scala | mit | 534 |
/*
* ******************************************************************************
* * Copyright (C) 2013 Christopher Harris (Itszuvalex)
* * Itszuvalex@gmail.com
* *
* * This program is free software; you can redistribute it and/or
* * modify it under the terms of the GNU General Public License
* * as published by the Free Software Foundation; either version 2
* * of the License, or (at your option) any later version.
* *
* * This program is distributed in the hope that it will be useful,
* * but WITHOUT ANY WARRANTY; without even the implied warranty of
* * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* * GNU General Public License for more details.
* *
* * You should have received a copy of the GNU General Public License
* * along with this program; if not, write to the Free Software
* * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
* *****************************************************************************
*/
package com.itszuvalex.femtocraft.power.tiles
import com.itszuvalex.femtocraft.api.core.{Configurable, Saveable}
import com.itszuvalex.femtocraft.core.tiles.TileEntityBase
import com.itszuvalex.femtocraft.core.traits.tile.{Inventory, MultiBlockComponent}
import com.itszuvalex.femtocraft.network.FemtocraftPacketHandler
import com.itszuvalex.femtocraft.network.messages.MessageFissionReactorCore
import com.itszuvalex.femtocraft.power.FissionReactorRegistry
import com.itszuvalex.femtocraft.power.multiblock.MultiBlockNanoFissionReactor
import com.itszuvalex.femtocraft.power.tiles.TileEntityNanoFissionReactorCore._
import com.itszuvalex.femtocraft.utils.BaseInventory
import com.itszuvalex.femtocraft.{Femtocraft, FemtocraftGuiConstants}
import net.minecraft.entity.player.EntityPlayer
import net.minecraft.item.ItemStack
import net.minecraft.tileentity.TileEntity
import net.minecraftforge.common.util.ForgeDirection
import net.minecraftforge.fluids._
import org.apache.logging.log4j.Level
object TileEntityNanoFissionReactorCore {
val incrementAction : Byte = 0
val decrementAction : Byte = 1
val abortAction : Byte = 2
val heatSlot = 0
val thoriumSlot = 2
val saltSlot = 1
//***********************************************************
@Configurable(comment = "Amount of cooled salt converted to molten salt per tick.")
val cooledSaltConversionPerTick : Int = 100
@Configurable(comment = "Contaminated salt converts to cooled salt at this ratio.")
val contaminatedSaltLossRatio : Float = .7f
@Configurable(comment = "Contamianted salt consumes thorium at this ratio.")
val contaminatedThoriumLossRatio : Float = .3f
@Configurable(comment = "When the plus or minus button is hit, increment thorium concentration target by this amount.")
val thoriumConcentrationTargetIncrementAmount: Float = .01f
@Configurable(comment = "Minimum thorium concentration before salt is melted.")
val minimumThoriumConcentrationToMeltSalt : Float = .01f
@Configurable(comment = "Minimum heat required before the reactor melts anything.")
val minimumHeat : Int = 100
@Configurable(comment = "Heat required per mB of cooled salt to convert it to molten salt.")
val cooledSaltConversionHeatRatio : Double = 1D
@Configurable(comment = "Amount of heat generated per mB of cooled salt per tick.")
val cooledSaltHeatMultiplier : Double = .001
@Configurable(comment = "Amount of heat generated per mB of molten salt per tick.")
val moltenSaltHeatMultiplier : Double = .002
@Configurable(comment = "% of maximum heat that represents the minimum of the UNSTABLE threshold.")
val unstableTemperatureThreshold : Double = .66
@Configurable(comment = "% of maximum heat that represents the minimum of the CRITICAL threshold.")
val criticalTemperatureThreshold : Double = .83
@Configurable(comment = "Amount of solid salt needed per thorium")
val solidSaltToThoriumRatio : Double = .2
@Configurable(comment = "Heat is multiplied by this amount every tick.")
val enviroHeatLossMultiplier : Double = .99
@Configurable(comment = "Tank size for cooled salt.")
val cooledSaltTankMaxAmount : Int = 100000
@Configurable(comment = "Tank size for molten salt.")
val moltenSaltTankMaxAmount : Int = 100000
@Configurable(comment = "Amount of stored thorium maximum.")
val thoriumStoreMax : Int = 100000
@Configurable(comment = "Maximum temperature.")
val temperatureMaxAmount : Int = 3000
object ReactorState extends Enumeration {
type ReactorState = Value
val INACTIVE, ACTIVE, UNSTABLE, CRITICAL = Value
}
}
@Configurable class TileEntityNanoFissionReactorCore
extends TileEntityBase with Inventory with IFluidHandler with MultiBlockComponent {
@Saveable private val cooledSaltTank = new FluidTank(cooledSaltTankMaxAmount)
@Saveable private val moltenSaltTank = new FluidTank(moltenSaltTankMaxAmount)
@Saveable private var thoriumStoreCurrent = 0
private var temperatureMax = temperatureMaxAmount
@Saveable private var temperatureCurrent = 0f
@Saveable private var thoriumConcentrationTarget = 0f
override def defaultInventory: BaseInventory = new BaseInventory(3)
def getState = {
val temp = getTemperatureCurrent
val max = getTemperatureMax
temp match {
case _ if temp <= minimumHeat => ReactorState.INACTIVE
case _ if temp < max * unstableTemperatureThreshold => ReactorState.ACTIVE
case _ if temp < max * criticalTemperatureThreshold => ReactorState.UNSTABLE
case _ => ReactorState.CRITICAL
}
}
override def onSideActivate(par5EntityPlayer: EntityPlayer, side: Int): Boolean = {
if (isValidMultiBlock) {
val te: TileEntity = worldObj.getTileEntity(info.x, info.y, info.z)
if (te == null) {
return false
}
par5EntityPlayer.openGui(getMod, getGuiID, worldObj, info.x, info.y, info.z)
return true
}
false
}
override def getGuiID = FemtocraftGuiConstants.NanoFissionReactorGuiID
override def femtocraftServerUpdate() {
super.femtocraftServerUpdate()
loseHeat()
gainHeat()
meltThorium()
meltSalt()
meltWorld()
}
private def meltWorld() {
if (temperatureCurrent > temperatureMax) {
}
else if (temperatureCurrent > temperatureMax * criticalTemperatureThreshold) {
}
else if (temperatureCurrent > temperatureMax * unstableTemperatureThreshold) {
}
}
private def meltThorium() {
if (getTemperatureCurrent < minimumHeat) {
return
}
if (getThoriumConcentration < getThoriumConcentrationTarget) {
val item: ItemStack = getStackInSlot(thoriumSlot)
if (item == null) {
return
}
val reagent: FissionReactorRegistry.FissionReactorReagent = FissionReactorRegistry.getThoriumSource(item)
if (reagent == null) {
return
}
if (reagent.item.stackSize <= item.stackSize && getTemperatureCurrent >= reagent
.temp && getTemperatureCurrent > minimumHeat && (thoriumStoreMax - thoriumStoreCurrent) >= reagent
.amount) {
decrStackSize(thoriumSlot, reagent.item.stackSize)
setTemperatureCurrent(getTemperatureCurrent - reagent.temp)
thoriumStoreCurrent += reagent.amount
setModified()
}
}
}
def getThoriumConcentrationTarget = thoriumConcentrationTarget
private def meltSalt() {
if (getThoriumConcentration < minimumThoriumConcentrationToMeltSalt) {
return
}
if (getTemperatureCurrent < minimumHeat) {
return
}
var saltAmount: Int = Math.min(getCooledSaltAmount, cooledSaltConversionPerTick)
if (saltAmount > 0) {
saltAmount = Math.min(saltAmount, moltenSaltTank.getCapacity - getMoltenSaltAmount)
val heatAmount: Int = (saltAmount * cooledSaltConversionHeatRatio).toInt
saltAmount = Math.min(heatAmount, getTemperatureCurrent / cooledSaltConversionHeatRatio).toInt
if (saltAmount > 0) {
cooledSaltTank.drain(saltAmount, true)
setTemperatureCurrent((getTemperatureCurrent - saltAmount * cooledSaltConversionHeatRatio).toFloat)
addMoltenSalt(saltAmount)
setModified()
}
}
else {
val item: ItemStack = getStackInSlot(saltSlot)
if (item == null) {
return
}
val reagent: FissionReactorRegistry.FissionReactorReagent = FissionReactorRegistry.getSaltSource(item)
if (reagent != null) {
if (reagent.item.stackSize <= item.stackSize && getTemperatureCurrent >= reagent.temp && (moltenSaltTank
.getCapacity - getMoltenSaltAmount) >= reagent
.amount && thoriumStoreCurrent >= (reagent
.amount * solidSaltToThoriumRatio)) {
decrStackSize(saltSlot, reagent.item.stackSize)
setTemperatureCurrent(getTemperatureCurrent - reagent.temp)
addMoltenSalt(reagent.amount)
thoriumStoreCurrent -= (reagent.amount * solidSaltToThoriumRatio).toInt
setModified()
}
}
}
}
def addMoltenSalt(amount: Int): Int = {
val ret = moltenSaltTank.fill(new FluidStack(Femtocraft.fluidMoltenSalt, amount), true)
if (ret > 0) {
setModified()
}
ret
}
def getThoriumConcentration = thoriumStoreCurrent.toFloat / thoriumStoreMax.toFloat
def getCooledSaltAmount = cooledSaltTank.getFluidAmount
def getMoltenSaltAmount = moltenSaltTank.getFluidAmount
private def gainHeat() {
setTemperatureCurrent((getTemperatureCurrent + (getCooledSaltAmount
.toFloat * cooledSaltHeatMultiplier * getThoriumConcentration))
.toFloat)
setTemperatureCurrent((getTemperatureCurrent + (getMoltenSaltAmount
.toFloat * moltenSaltHeatMultiplier * getThoriumConcentration))
.toFloat)
val heatItem: ItemStack = inventory.getStackInSlot(heatSlot)
if (heatItem != null) {
val result: FissionReactorRegistry.FissionReactorReagent = FissionReactorRegistry.getHeatSource(heatItem)
if (result != null) {
if (result.item.stackSize <= heatItem.stackSize && ((result
.temp > 0 && (getTemperatureMax - getTemperatureCurrent) >= result
.temp) || (result
.temp < 0 && Math
.abs(result
.temp) <= getTemperatureCurrent))) {
decrStackSize(heatSlot, result.item.stackSize)
setTemperatureCurrent(getTemperatureCurrent + result.temp)
setModified()
}
}
}
}
def getTemperatureMax = temperatureMax
def setTemperatureMax(temperatureMax: Int) {
this.temperatureMax = temperatureMax
}
private def loseHeat() {
setTemperatureCurrent((getTemperatureCurrent * enviroHeatLossMultiplier).toFloat)
}
def getTemperatureCurrent = temperatureCurrent
def setTemperatureCurrent(temperatureCurrent: Float) {
this.temperatureCurrent = temperatureCurrent
}
override def hasGUI = isValidMultiBlock
def addCooledSalt(amount: Int): Int = {
val ret: Int = cooledSaltTank.fill(new FluidStack(Femtocraft.fluidCooledMoltenSalt, amount), true)
if (ret > 0) {
setModified()
}
ret
}
def fill(from: ForgeDirection, resource: FluidStack, doFill: Boolean): Int = {
var fill: FluidStack = null
if (resource.getFluid eq Femtocraft.fluidCooledContaminatedMoltenSalt) {
var amount: Int = resource.amount
amount = Math.min(amount, Math.min(Math.max(getThoriumStoreCurrent - minimumThoriumConcentrationToMeltSalt, 0),
resource.amount * contaminatedThoriumLossRatio) / contaminatedThoriumLossRatio)
.toInt
amount = Math.min(amount, (moltenSaltTank.getCapacity - getMoltenSaltAmount) / contaminatedSaltLossRatio).toInt
fill = new FluidStack(Femtocraft.fluidCooledMoltenSalt, amount)
if (doFill) {
thoriumStoreCurrent -= (amount * contaminatedThoriumLossRatio).toInt
}
}
else if (resource.getFluid eq Femtocraft.fluidCooledMoltenSalt) {
fill = resource
}
else {
return 0
}
val result: Int = cooledSaltTank.fill(fill, doFill)
if (result > 0) {
setModified()
}
result
}
def getThoriumStoreCurrent = thoriumStoreCurrent
def setThoriumStoreCurrent(thoriumStoreCurrent: Int) {
this.thoriumStoreCurrent = thoriumStoreCurrent
}
def drain(from: ForgeDirection, resource: FluidStack, doDrain: Boolean): FluidStack = {
if (resource.getFluid ne Femtocraft.fluidMoltenSalt) return null
drain(from, resource.amount, doDrain)
}
def drain(from: ForgeDirection, maxDrain: Int, doDrain: Boolean): FluidStack = {
val result: FluidStack = moltenSaltTank.drain(maxDrain, doDrain)
if (result != null && result.amount > 0) {
setModified()
}
result
}
def canFill(from: ForgeDirection, fluid: Fluid) = fluid == Femtocraft
.fluidCooledContaminatedMoltenSalt || fluid == Femtocraft
.fluidCooledMoltenSalt
def canDrain(from: ForgeDirection, fluid: Fluid) = fluid == Femtocraft.fluidMoltenSalt
def getTankInfo(from: ForgeDirection): Array[FluidTankInfo] = Array[FluidTankInfo](cooledSaltTank.getInfo,
moltenSaltTank.getInfo)
override def markDirty() {
MultiBlockNanoFissionReactor.onMultiblockInventoryChanged(worldObj, info.x, info.y, info.z)
super.markDirty()
}
override def isItemValidForSlot(i: Int, itemstack: ItemStack): Boolean = i match {
case `heatSlot` => FissionReactorRegistry.getHeatSource(itemstack) != null
case `saltSlot` => FissionReactorRegistry.getSaltSource(itemstack) != null
case `thoriumSlot` => FissionReactorRegistry.getThoriumSource(itemstack) != null
case _ => false
}
def setCooledMoltenSalt(cooledMoltenSalt: Int) {
if (cooledSaltTank.getFluid == null) {
cooledSaltTank.fill(new FluidStack(Femtocraft.fluidCooledMoltenSalt, cooledMoltenSalt), true)
}
else {
cooledSaltTank.getFluid.amount = cooledMoltenSalt
}
}
def setMoltenSalt(moltenSalt: Int) {
if (moltenSaltTank.getFluid == null) {
moltenSaltTank.fill(new FluidStack(Femtocraft.fluidMoltenSalt, moltenSalt), true)
}
else {
moltenSaltTank.getFluid.amount = moltenSalt
}
}
def getCooledSaltTank = cooledSaltTank
def getMoltenSaltTank = moltenSaltTank
def onIncrementClick() {
onClick(incrementAction)
}
def onDecrementClick() {
onClick(decrementAction)
}
def onAbortClick() {
onClick(abortAction)
}
private def onClick(action: Byte) {
FemtocraftPacketHandler.INSTANCE
.sendToServer(new MessageFissionReactorCore(xCoord, yCoord, zCoord, worldObj.provider.dimensionId, action))
}
def handleAction(action: Byte) {
action match {
case `incrementAction` => incrementThoriumConcentrationTarget()
case `decrementAction` => decrementThoriumConcentrationTarget()
case `abortAction` => abortReaction()
case _ => Femtocraft.log(Level.ERROR,
"Received invalid action for Fusion Reactor at x-" + xCoord + " y-" + yCoord + " z-" + zCoord + " at dimension-" + worldObj
.provider
.dimensionId + ".")
}
}
def incrementThoriumConcentrationTarget() {
setThoriumConcentrationTarget(getThoriumConcentrationTarget + thoriumConcentrationTargetIncrementAmount)
}
def setThoriumConcentrationTarget(thoriumConcentrationTarget: Float) {
this.thoriumConcentrationTarget = Math.min(Math.max(thoriumConcentrationTarget, 0f), 1f)
setModified()
}
def decrementThoriumConcentrationTarget() {
setThoriumConcentrationTarget(getThoriumConcentrationTarget - thoriumConcentrationTargetIncrementAmount)
}
def abortReaction() {
thoriumStoreCurrent = 0
cooledSaltTank.setFluid(null)
moltenSaltTank.setFluid(null)
setUpdate()
setModified()
}
}
| Itszuvalex/Femtocraft-alpha-1 | src/main/java/com/itszuvalex/femtocraft/power/tiles/TileEntityNanoFissionReactorCore.scala | Scala | gpl-2.0 | 18,254 |
package filodb.core.query
import scala.reflect.runtime.universe._
import com.typesafe.scalalogging.StrictLogging
import monix.eval.Task
import org.joda.time.DateTime
import filodb.core.binaryrecord2.RecordSchema
import filodb.core.metadata.Column
import filodb.core.metadata.Column.ColumnType
import filodb.core.store.ChunkScanMethod
import filodb.memory.format.RowReader
/**
* Some basic info about a single Partition
*/
final case class PartitionInfo(schema: RecordSchema, base: Array[Byte], offset: Long, shardNo: Int) {
def partKeyBytes: Array[Byte] = schema.asByteArray(base, offset)
override def toString: String = s"/shard:$shardNo/${schema.stringify(base, offset)}"
}
/**
* Describes column/field name and type
*/
final case class ColumnInfo(name: String, colType: Column.ColumnType)
object ColumnInfo {
def apply(col: Column): ColumnInfo = ColumnInfo(col.name, col.columnType)
}
/**
* Describes the full schema of result types, including how many initial columns are for row keys.
* The first ColumnInfo in the schema describes the first vector in Vectors and first field in Tuples, etc.
* @param brSchemas if any of the columns is a BinaryRecord: map of colNo -> inner BinaryRecord schema
* @param numRowKeyColumns the number of row key columns at the start of columnns
* @param fixedVectorLen if defined, each vector is guaranteed to have exactly this many output elements.
* See PeriodicSampleMapper for an example of how this is used.
* @param colIDs the column IDs of the columns, used to access additional columns if needed
*/
final case class ResultSchema(columns: Seq[ColumnInfo], numRowKeyColumns: Int,
brSchemas: Map[Int, RecordSchema] = Map.empty,
fixedVectorLen: Option[Int] = None,
colIDs: Seq[Int] = Nil) {
import Column.ColumnType._
def length: Int = columns.length
def isEmpty: Boolean = columns.isEmpty
def isTimeSeries: Boolean = columns.nonEmpty && numRowKeyColumns == 1 &&
(columns.head.colType == LongColumn || columns.head.colType == TimestampColumn)
// True if main col is Histogram and extra column is a Double
def isHistDouble: Boolean = columns.length == 3 &&
columns(1).colType == HistogramColumn && columns(2).colType == DoubleColumn
def isHistogram: Boolean = columns.length == 2 && columns(1).colType == HistogramColumn
def isAvgAggregator: Boolean = columns.length == 3 && columns(2).name.equals("count")
def isStdValAggregator: Boolean = columns.length == 4 && columns(2).name.equals("mean")
def hasSameColumnsAs(other: ResultSchema): Boolean = {
// exclude fixedVectorLen & colIDs
other.columns == columns && other.numRowKeyColumns == numRowKeyColumns &&
other.brSchemas == brSchemas
}
def hasSameColumnTypes(other: ResultSchema): Boolean = {
// exclude column names
other.columns.map(_.colType) == columns.map(_.colType)
}
}
object ResultSchema {
val empty = ResultSchema(Nil, 1)
def valueColumnType(schema: ResultSchema): ColumnType = {
require(schema.isTimeSeries, s"Schema $schema is not time series based, cannot continue query")
require(schema.columns.size >= 2, s"Schema $schema has less than 2 columns, cannot continue query")
schema.columns(1).colType
}
}
/**
* There are three types of final query results.
* - a list of raw (or via function, transformed) time series samples, with an optional key range
* - a list of aggregates
* - a final aggregate
*/
// NOTE: the Serializable is needed for Akka to choose a more specific serializer (eg Kryo)
sealed trait Result extends java.io.Serializable {
def schema: ResultSchema
/**
* Returns an Iterator of (Option[PartitionInfo], Seq[RowReader]) which helps with serialization. Basically each
* element of the returned Seq contains partition info (optional), plus a Seq of RowReaders. Each RowReader
* can then be converted to pretty print text, JSON, etc. etc.
*/
def toRowReaders: Iterator[(Option[PartitionInfo], Seq[RowReader])]
/**
* Pretty prints all the elements into strings. Returns an iterator to avoid memory bloat.
*/
def prettyPrint(formatTime: Boolean = true, partitionRowLimit: Int = 50): Iterator[String] = {
val curTime = System.currentTimeMillis
toRowReaders.map { case (partInfoOpt, rowReaders) =>
partInfoOpt.map(_.toString).getOrElse("") + "\\n\\t" +
rowReaders.take(partitionRowLimit).map {
case reader =>
val firstCol = if (formatTime && schema.isTimeSeries) {
val timeStamp = reader.getLong(0)
s"${new DateTime(timeStamp).toString()} (${(curTime - timeStamp)/1000}s ago)"
} else {
reader.getAny(0).toString
}
(firstCol +: (1 until schema.length).map(reader.getAny(_).toString)).mkString("\\t")
}.mkString("\\n\\t") + "\\n"
}
}
}
/**
* Converts various types to result types
* TODO: consider collapsing into Result
*/
abstract class ResultMaker[A: TypeTag] {
/**
* Converts a source type like a Vector or Tuple to a result, with the given schema.
* @param schema the schema of the result
* @param chunkMethod used only for the VectorListResult to filter rows from the vectors
* @param limit for Observables, limits the number of items to take
*/
def toResult(input: A, schema: ResultSchema, chunkMethod: ChunkScanMethod, limit: Int): Task[Result]
def fromResult(res: Result): A
/**
* Full type info including erased inner class info. Needed to discern inner type of Observables.
* Converted to a string and shortened to leave out the package namespaces
*/
def typeInfo: String = {
val typ = typeOf[A]
s"${typ.typeSymbol.name}[${typ.typeArgs.map(_.typeSymbol.name).mkString(",")}]"
}
}
object ResultMaker extends StrictLogging {
implicit object UnitMaker extends ResultMaker[Unit] {
// Unit should NEVER be the output of an ExecPlan. Create an empty result if we ever desire that.
def toResult(u: Unit,
schema: ResultSchema,
chunkMethod: ChunkScanMethod,
limit: Int = 1000): Task[Result] = ???
def fromResult(res: Result): Unit = {}
}
}
class ServiceUnavailableException(message: String) extends RuntimeException(message)
| filodb/FiloDB | core/src/main/scala/filodb.core/query/ResultTypes.scala | Scala | apache-2.0 | 6,398 |
import sbt._
import Keys._
import PlayProject._
object ApplicationBuild extends Build {
val appName = "grom-rest"
val appVersion = "1.0-SNAPSHOT"
val appDependencies = Seq(
"com.amazonaws" % "aws-java-sdk" % "1.0.002",
"org.mockito" % "mockito-core" % "1.9.0",
"org.specs2" %% "specs2" % "1.8.2" % "test",
"org.openoffice" % "juh" % "3.2.1",
"org.openoffice" % "ridl" % "3.2.1",
"org.openoffice" % "unoil" % "3.2.1",
"org.json" % "json" % "20090211",
"commons-io" % "commons-io" % "2.1",
"eu.medsea.mimeutil" % "mime-util" % "2.1.3" exclude("org.slf4j", "slf4j-log4j12")
)
seq(net.virtualvoid.sbt.graph.Plugin.graphSettings: _*)
val main = PlayProject(appName, appVersion, appDependencies, mainLang = SCALA).settings ()
}
| vba/grom | rest/project/Build.scala | Scala | apache-2.0 | 750 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.network.yarn
import scala.collection.JavaConverters._
import org.apache.hadoop.metrics2.MetricsRecordBuilder
import org.mockito.ArgumentMatchers.{any, anyDouble, anyInt, anyLong}
import org.mockito.Mockito.{mock, times, verify, when}
import org.scalatest.Matchers
import org.apache.spark.SparkFunSuite
import org.apache.spark.network.server.OneForOneStreamManager
import org.apache.spark.network.shuffle.{ExternalBlockHandler, ExternalShuffleBlockResolver}
class YarnShuffleServiceMetricsSuite extends SparkFunSuite with Matchers {
val streamManager = mock(classOf[OneForOneStreamManager])
val blockResolver = mock(classOf[ExternalShuffleBlockResolver])
when(blockResolver.getRegisteredExecutorsSize).thenReturn(42)
val metrics = new ExternalBlockHandler(streamManager, blockResolver).getAllMetrics
test("metrics named as expected") {
val allMetrics = Set(
"openBlockRequestLatencyMillis", "registerExecutorRequestLatencyMillis",
"blockTransferRateBytes", "registeredExecutorsSize", "numActiveConnections",
"numCaughtExceptions")
metrics.getMetrics.keySet().asScala should be (allMetrics)
}
// these three metrics have the same effect on the collector
for (testname <- Seq("openBlockRequestLatencyMillis",
"registerExecutorRequestLatencyMillis",
"blockTransferRateBytes")) {
test(s"$testname - collector receives correct types") {
val builder = mock(classOf[MetricsRecordBuilder])
when(builder.addCounter(any(), anyLong())).thenReturn(builder)
when(builder.addGauge(any(), anyDouble())).thenReturn(builder)
YarnShuffleServiceMetrics.collectMetric(builder, testname,
metrics.getMetrics.get(testname))
verify(builder).addCounter(any(), anyLong())
verify(builder, times(4)).addGauge(any(), anyDouble())
}
}
// this metric writes only one gauge to the collector
test("registeredExecutorsSize - collector receives correct types") {
val builder = mock(classOf[MetricsRecordBuilder])
YarnShuffleServiceMetrics.collectMetric(builder, "registeredExecutorsSize",
metrics.getMetrics.get("registeredExecutorsSize"))
// only one
verify(builder).addGauge(any(), anyInt())
}
}
| spark-test/spark | resource-managers/yarn/src/test/scala/org/apache/spark/network/yarn/YarnShuffleServiceMetricsSuite.scala | Scala | apache-2.0 | 3,038 |
package com.twitter.finagle.netty4.ssl.client
import com.twitter.finagle.{Address, SslVerificationFailedException}
import com.twitter.finagle.ssl.client.{SslClientConfiguration, SslClientSessionVerifier}
import io.netty.channel.Channel
import io.netty.channel.embedded.EmbeddedChannel
import io.netty.handler.ssl.SslHandler
import io.netty.util.concurrent.DefaultPromise
import java.net.InetSocketAddress
import javax.net.ssl.{SSLEngine, SSLSession}
import org.mockito.Mockito._
import org.scalatest.{FunSuite, OneInstancePerTest}
import org.scalatestplus.mockito.MockitoSugar
class SslClientVerificationHandlerTest extends FunSuite with MockitoSugar with OneInstancePerTest {
val fakeAddress = InetSocketAddress.createUnresolved("ssl", 8081)
val address = Address.Inet(fakeAddress, Map.empty)
val config = SslClientConfiguration()
class TestVerifier(result: => Boolean) extends SslClientSessionVerifier {
def apply(address: Address, config: SslClientConfiguration, session: SSLSession): Boolean =
result
}
val (channel, sslHandler, handshakePromise) = {
val ch = new EmbeddedChannel()
val hd = mock[SslHandler]
val e = mock[SSLEngine]
val hp = new DefaultPromise[Channel](ch.eventLoop())
when(hd.handshakeFuture()).thenReturn(hp)
when(hd.engine).thenReturn(e)
when(e.getSession).thenReturn(mock[SSLSession])
(ch, hd, hp)
}
test("success") {
channel
.pipeline()
.addFirst(
new SslClientVerificationHandler(
sslHandler,
address,
config,
SslClientSessionVerifier.AlwaysValid
)
)
val connectPromise = channel.connect(fakeAddress)
assert(!connectPromise.isDone)
channel.writeOutbound("pending write")
assert(channel.outboundMessages().size() == 0)
handshakePromise.setSuccess(channel)
assert(connectPromise.isSuccess)
assert(channel.readOutbound[String]() == "pending write")
channel.finishAndReleaseAll()
}
test("session verification failed") {
channel
.pipeline()
.addFirst(
new SslClientVerificationHandler(
sslHandler,
address,
config,
new TestVerifier(false)
)
)
val connectPromise = channel.connect(fakeAddress)
assert(!connectPromise.isDone)
channel.writeOutbound("pending write")
assert(channel.outboundMessages().size() == 0)
handshakePromise.setSuccess(channel)
assert(connectPromise.cause().isInstanceOf[SslVerificationFailedException])
assert(
intercept[Exception](channel.checkException()).isInstanceOf[SslVerificationFailedException]
)
channel.finishAndReleaseAll()
}
test("failed session validation") {
val e = new Exception("whoa")
channel
.pipeline()
.addFirst(
new SslClientVerificationHandler(
sslHandler,
address,
config,
new TestVerifier(throw e)
)
)
val connectPromise = channel.connect(fakeAddress)
assert(!connectPromise.isDone)
channel.writeOutbound("pending write")
assert(channel.outboundMessages().size() == 0)
handshakePromise.setSuccess(channel)
assert(connectPromise.cause.getMessage.startsWith("whoa"))
assert(
intercept[Exception](channel.checkException()).isInstanceOf[SslVerificationFailedException]
)
channel.finishAndReleaseAll()
}
test("cancelled after connected") {
channel
.pipeline()
.addFirst(
new SslClientVerificationHandler(
sslHandler,
address,
config,
SslClientSessionVerifier.AlwaysValid
)
)
val connectPromise = channel.connect(fakeAddress)
assert(!connectPromise.isDone)
assert(connectPromise.cancel(true))
assert(!channel.isActive)
channel.finishAndReleaseAll()
}
test("failed handshake") {
channel
.pipeline()
.addFirst(
new SslClientVerificationHandler(
sslHandler,
address,
config,
SslClientSessionVerifier.AlwaysValid
)
)
val connectPromise = channel.connect(fakeAddress)
assert(!connectPromise.isDone)
channel.writeOutbound("pending write")
assert(channel.outboundMessages().size() == 0)
val e = new Exception("not so good")
handshakePromise.setFailure(e)
assert(!connectPromise.isSuccess)
assert(intercept[Exception](channel.checkException()) == e)
channel.finishAndReleaseAll()
}
test("session verification failed without connect") {
channel
.pipeline()
.addFirst(
new SslClientVerificationHandler(
sslHandler,
address,
config,
new TestVerifier(false)
)
)
channel.writeOutbound("pending write")
assert(channel.outboundMessages().size() == 0)
assert(channel.isOpen)
handshakePromise.setSuccess(channel)
assert(
intercept[Exception](channel.checkException()).isInstanceOf[SslVerificationFailedException]
)
channel.finishAndReleaseAll()
}
}
| luciferous/finagle | finagle-netty4/src/test/scala/com/twitter/finagle/netty4/ssl/client/SslClientVerificationHandlerTest.scala | Scala | apache-2.0 | 5,080 |
package edu.berkeley.nlp.coref.config
import edu.berkeley.nlp.coref.NumberGenderComputer
import edu.berkeley.nlp.coref.lang.CorefLanguagePack
import scala.beans.BeanProperty
/**
* A coref system configuration suitable for predicitons.
*
* @param languagePack A language pack providing language-specific information about words and POS tags
* @param numberGenderComputer number/gender computer to use by the coref system
*
* @author Tim Feuerbach
*/
class PredictionCorefSystemConfiguration(@BeanProperty var languagePack: CorefLanguagePack,
@BeanProperty var numberGenderComputer: NumberGenderComputer) extends DefaultCorefSystemConfiguration {
}
| timfeu/berkeleycoref-thesaurus | src/main/java/edu/berkeley/nlp/coref/config/PredictionCorefSystemConfiguration.scala | Scala | gpl-3.0 | 699 |
package ml.sparkling.graph.operators.algorithms.shortestpaths.pathprocessors.fastutils
import java.util.function.{BiConsumer, Consumer}
import it.unimi.dsi.fastutil.longs._
import it.unimi.dsi.fastutil.objects._
import ml.sparkling.graph.api.operators.algorithms.shortestpaths.ShortestPathsTypes
import ml.sparkling.graph.api.operators.algorithms.shortestpaths.ShortestPathsTypes._
import ml.sparkling.graph.operators.algorithms.shortestpaths.pathprocessors.PathProcessor
import org.apache.spark.graphx.VertexId
/**
* Created by Roman Bartusiak (roman.bartusiak@pwr.edu.pl http://riomus.github.io).
* Path processor that utilizes it.unimi.dsi.fastutil as data store, and computes all paths with their structure
*/
class FastUtilWithPath[VD,ED]() extends PathProcessor[VD,ED,WithPathContainer]{
private type PathsSet=ObjectOpenHashSet[JPath]
private type PathsMap=Long2ObjectOpenHashMap[JPath]
private type SinglePath=ObjectArrayList[JDouble]
private val DEFAULT_CONTAINER_SIZE=64
def EMPTY_CONTAINER=getNewContainerForPaths(0)
def getNewContainerForPaths() ={
new PathsMap(DEFAULT_CONTAINER_SIZE,0.25f).asInstanceOf[WithPathContainer]
}
def getNewContainerForPaths(size:Int) ={
new PathsMap(size,0.25f).asInstanceOf[WithPathContainer]
}
def putNewPath(map:WithPathContainer,to:VertexId,weight:ED)(implicit num:Numeric[ED]): WithPathContainer={
val existingPaths =getPathsContainer()
val newPath=new SinglePath()
newPath.add(num.toDouble(weight))
existingPaths.add(newPath)
val out=map.asInstanceOf[PathsMap].clone().asInstanceOf[WithPathContainer];
out.put(to,existingPaths)
out
}
def processNewMessages(map1:WithPathContainer,map2:WithPathContainer)(implicit num:Numeric[ED]):WithPathContainer={
val out=map2.asInstanceOf[PathsMap].clone().asInstanceOf[WithPathContainer]
mergeMessages(map1,out)
}
override def mergeMessages(map1:WithPathContainer, map2:WithPathContainer)(implicit num:Numeric[ED]):WithPathContainer={
val out=map2
map1.forEach(new BiConsumer[JLong,JPathCollection](){
def accept(key: JLong, u: JPathCollection) = {
val map2Value: JPathCollection =Option(map2.get(key)).getOrElse(ObjectSets.EMPTY_SET.asInstanceOf[JPathCollection])
val map1Value: JPathCollection =u
val value=mergePathSets(map1Value,map2Value)
out.put(key,value)
}
})
out
}
def extendPathsMerging(targetVertexId:VertexId,map:WithPathContainer,vertexId:VertexId,distance:ED,map2:WithPathContainer)(implicit num:Numeric[ED]): WithPathContainer ={
val out=map2.asInstanceOf[PathsMap].clone().asInstanceOf[WithPathContainer]
map.forEach(new BiConsumer[JLong,JPathCollection](){
def accept(k: JLong, u: JPathCollection) = {
if (!targetVertexId.equals(k)) {
val map2Value: JPathCollection =Option(map2.get(k)).getOrElse(ObjectSets.EMPTY_SET.asInstanceOf[JPathCollection])
val coll=extendPathsSet(targetVertexId,map.get(k), vertexId, distance)
val value=mergePathSets(coll,map2Value)
out.put(k,value)
}
}
})
out
}
private def extendPathsSet(targetVertexId:VertexId,set:JPathCollection,vertexId:VertexId,distance:ED)(implicit num:Numeric[ED]):JPathCollection={
val out =getPathsContainer(set.size())
val javaTarget:JDouble=targetVertexId.toDouble;
set.forEach(new Consumer[JPath](){
def accept( l: JPath) = {
if(l.indexOf(javaTarget)<1){
val lClone=l.asInstanceOf[SinglePath].clone()
lClone.add(vertexId.toDouble)
lClone.set(0,lClone.get(0)+num.toDouble(distance))
out.add(lClone)
}
}
})
out
}
private def mergePathSets(set1:JPathCollection,set2:JPathCollection)(implicit num:Numeric[ED]): JPathCollection ={
val firstSetLength=if(set1.size()==0) {java.lang.Double.MAX_VALUE.asInstanceOf[JDouble]} else {set1.iterator().next().get(0)}
val secondSetLength=if(set2.size()==0) {java.lang.Double.MAX_VALUE.asInstanceOf[JDouble]} else { set2.iterator().next().get(0)}
firstSetLength compareTo secondSetLength signum match{
case 0=>{
val set1Clone= set1.asInstanceOf[PathsSet].clone()
set1Clone.addAll(set2)
set1Clone
}
case 1 => set2.asInstanceOf[PathsSet].clone()
case -1 => set1.asInstanceOf[PathsSet].clone()
}
}
private def getPathsContainer(size:Int=DEFAULT_CONTAINER_SIZE): JPathCollection ={
new PathsSet(size,1)
}
} | sparkling-graph/sparkling-graph | operators/src/main/scala/ml/sparkling/graph/operators/algorithms/shortestpaths/pathprocessors/fastutils/FastUtilWithPath.scala | Scala | bsd-2-clause | 4,502 |
package com.benkolera.Rt.Parser
import scalaz._
import org.joda.time.format.{DateTimeFormat,DateTimeFormatter}
import org.joda.time.DateTimeZone
object Read {
// Should turn this into a typeclass. It is pretty clunky.
def readInt(s:String): String \\/ Int = {
\\/.fromTryCatchNonFatal( s.toInt ).leftMap( _ => s"$s is not an int" )
}
def readLong(s:String): String \\/ Long = {
\\/.fromTryCatchNonFatal( s.toLong ).leftMap( _ => s"$s is not an int" )
}
def readDouble(s:String): String \\/ Double = {
\\/.fromTryCatchNonFatal( s.toDouble ).leftMap( _ => s"$s is not an int" )
}
def readList(s:String): List[String] = {
s.split(",").toList.map( _.trim )
}
def readDateTime(format: DateTimeFormatter, tz: DateTimeZone ) = {
val tzFormat = format.withZone(tz)
def read(s:String) = \\/.fromTryCatchNonFatal(
tzFormat.parseDateTime(s)
).leftMap( t => s"$s is not a datetime. Err: ${t.getMessage}" )
read _
}
def readOptDateTime(format: DateTimeFormatter,tz:DateTimeZone)(s:String) =
s match {
case "" => \\/-(None)
case "Not set" => \\/-(None)
case str => readDateTime(format,tz)(str).map(Some(_))
}
}
| benkolera/scala-rt | src/main/scala/Rt/Parser/Read.scala | Scala | mit | 1,195 |
/*
* This file is part of Jetdoc.
*
* Jetdoc is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Jetdoc is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with Jetdoc. If not, see <http://www.gnu.org/licenses/>.
*/
package jetdoc
import unfiltered.request._
import unfiltered.response._
import unfiltered.netty._
object typeRegexps {
val HtmlReg = "(^.*\\\\.html$)".r
val CssReg = "(^.*\\\\.css$)".r
val GifReg = "(^.*\\\\.gif$)".r
val JpgReg = "(^.*\\\\.jpe?g$)".r
val PngReg = "(^.*\\\\.png$)".r
}
class JarServer(jar: java.io.File) extends async.Plan
with ServerErrorResponse {
import java.util.jar._
import java.util.zip._
import scala.reflect.ClassTag
import typeRegexps._
def f[T](v: T)(implicit ev: ClassTag[T]) = ev.toString
def filetype(p: String): BaseContentType = p match {
case HtmlReg(_) => HtmlContent
case CssReg(_) => CssContent
case GifReg(_) => ContentType("image/gif")
case JpgReg(_) => ContentType("image/jpg")
case PngReg(_) => ContentType("image/png")
case _ => PlainTextContent
}
val SlashReg = "(^.*/$)".r
val jarFile = new ZipFile(jar)
val logger = org.clapper.avsl.Logger(getClass)
logger.info("Using file " + jar.getName)
def intent = async.Intent {
case r @ Path(p) => r match {
case GET(_) =>
val path = p match {
case "" => "/index.html"
case SlashReg(pp) => pp + "index.html"
case pp => pp
}
Option(jarFile.getEntry(path.substring(1,path.length))).flatMap(ent => {
if(ent.isDirectory)
None
else
Option(jarFile.getInputStream(ent)).map(is => {
Iterator continually is.read takeWhile (-1 !=) map (_.toByte) toArray
})
}).map(arr =>
r respond Ok ~> filetype(path) ~> ResponseBytes(arr)
).getOrElse(r respond NotFound ~> ResponseString("Not found"))
case _ => r respond MethodNotAllowed
}
}
}
object JarServer {
def apply(jar: java.io.File): JarServer = new JarServer(jar)
}
| judu/jetdoc | src/main/scala/JarServer.scala | Scala | gpl-3.0 | 2,384 |
/*
* Copyright 2017-2020 47 Degrees Open Source <https://www.47deg.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package higherkindness.mu.rpc.internal.metrics
import higherkindness.mu.rpc.internal.interceptors.GrpcMethodInfo
import io.grpc.MethodDescriptor.MethodType
import io.grpc.Status
import org.scalacheck.Gen
import org.scalacheck.Gen.alphaLowerChar
object MetricsOpsGenerators {
def nonEmptyStrGen: Gen[String] = Gen.nonEmptyListOf(alphaLowerChar).map(_.mkString)
def methodInfoGen: Gen[GrpcMethodInfo] =
for {
serviceName <- nonEmptyStrGen
fullMethodName <- nonEmptyStrGen
methodName <- nonEmptyStrGen
methodType <- Gen.oneOf(
Seq(
MethodType.BIDI_STREAMING,
MethodType.CLIENT_STREAMING,
MethodType.SERVER_STREAMING,
MethodType.UNARY,
MethodType.UNKNOWN
)
)
} yield GrpcMethodInfo(
serviceName,
fullMethodName,
methodName,
methodType
)
def statusGen: Gen[Status] =
Gen.oneOf(
Status.ABORTED,
Status.ALREADY_EXISTS,
Status.CANCELLED,
Status.DATA_LOSS,
Status.DEADLINE_EXCEEDED,
Status.FAILED_PRECONDITION,
Status.INTERNAL,
Status.INVALID_ARGUMENT,
Status.NOT_FOUND,
Status.OK,
Status.OUT_OF_RANGE,
Status.PERMISSION_DENIED,
Status.RESOURCE_EXHAUSTED,
Status.UNAUTHENTICATED,
Status.UNAVAILABLE,
Status.UNIMPLEMENTED,
Status.UNKNOWN
)
}
| frees-io/freestyle-rpc | modules/tests/src/test/scala/higherkindness/mu/rpc/internal/metrics/MetricsOpsGenerators.scala | Scala | apache-2.0 | 2,029 |
/*
* Copyright 2021 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.ct.computations.calculations
import org.scalatest.{Matchers, WordSpec}
import uk.gov.hmrc.ct.CATO01
import uk.gov.hmrc.ct.computations.{CP118, CP997, CP998, CPQ19}
class LossesSetAgainstOtherProfitsCalculatorSpec extends WordSpec with Matchers {
"Losses Set Against Other Profits Calculator" should {
"return CP118 when CP118 is less than CATO01 and CP118 is positive" in new LossesSetAgainstOtherProfitsCalculator {
calculateLossesSetAgainstProfits(cato01 = CATO01(10), cp997 = CP997(None), cp118 = CP118(9), cpq19 = CPQ19(Some(true))) shouldBe CP998(Some(9))
}
"return CATO01 when CATO01 is less CP118, CP997 is None and CP118 is positive" in new LossesSetAgainstOtherProfitsCalculator {
calculateLossesSetAgainstProfits(cato01 = CATO01(10), cp997 = CP997(None), cp118 = CP118(19), cpq19 = CPQ19(Some(true))) shouldBe CP998(Some(10))
}
"return CATO01 when CATO01 is less CP118, CP997 is 0 and CP118 is positive" in new LossesSetAgainstOtherProfitsCalculator {
calculateLossesSetAgainstProfits(cato01 = CATO01(10), cp997 = CP997(Some(0)), cp118 = CP118(19), cpq19 = CPQ19(Some(true))) shouldBe CP998(Some(10))
}
"return CATO01 minus CP997 when CATO01 is less CP118, CP997 is positive and CP118 is positive" in new LossesSetAgainstOtherProfitsCalculator {
calculateLossesSetAgainstProfits(cato01 = CATO01(10), cp997 = CP997(Some(1)), cp118 = CP118(19), cpq19 = CPQ19(Some(true))) shouldBe CP998(Some(9))
}
"return CATO01 when CATO01 equals absolute CP118 and CP118 is positive" in new LossesSetAgainstOtherProfitsCalculator {
calculateLossesSetAgainstProfits(cato01 = CATO01(10), cp997 = CP997(None), cp118 = CP118(10), cpq19 = CPQ19(Some(true))) shouldBe CP998(Some(10))
}
"return None when CPQ19 is None" in new LossesSetAgainstOtherProfitsCalculator {
calculateLossesSetAgainstProfits(cato01 = CATO01(10), cp997 = CP997(None), cp118 = CP118(10), cpq19 = CPQ19(None)) shouldBe CP998(None)
}
"return None when CPQ19 is false" in new LossesSetAgainstOtherProfitsCalculator {
calculateLossesSetAgainstProfits(cato01 = CATO01(10), cp997 = CP997(None), cp118 = CP118(10), cpq19 = CPQ19(Some(false))) shouldBe CP998(None)
}
}
}
| hmrc/ct-calculations | src/test/scala/uk/gov/hmrc/ct/computations/calculations/LossesSetAgainstOtherProfitsCalculatorSpec.scala | Scala | apache-2.0 | 2,854 |
package konto
class GiroKonto(inhaber: Person, val kreditrahmenCt: Long) extends Konto(inhaber) {
require(kreditrahmenCt > 0)
override def ueberweisen(zielKonto: Konto, betragCt: Long): Unit = {
require(saldoCt + kreditrahmenCt >= betragCt)
super.ueberweisen(zielKonto, betragCt)
}
override protected def zinssatz(): Double = Konto.kreditzinssatzPA
override def verzinsen(anzahlTage: Int): Long = {
if (saldoCt > 0) 0
else {
val zinsen: Long = super.verzinsen(anzahlTage)
_saldoCt += zinsen
zinsen
}
}
} | sebastian-dasse/uni-scala | ScalaKurs(Knabe)_S/src/konto/GiroKonto.scala | Scala | mit | 563 |
package uk.co.grahamcox.oauth
import org.specs2.mutable._
class PercentEncoderSpec extends SpecificationWithJUnit {
"PercentEncoder" should {
"do nothing to a safe string" in {
val input = "0123456789"
val output = PercentEncoder(input)
output must beEqualTo("0123456789")
}
"convert an unsafe string" in {
val input = "!\\"£$%^&*()"
val output = PercentEncoder(input)
output must beEqualTo("%21%22%C2%A3%24%25%5E%26%2A%28%29")
}
"convert a Base64 String" in {
val input = "tnnArxj06cWHq44gCs1OSKk/jLY="
val output = PercentEncoder(input)
output must beEqualTo("tnnArxj06cWHq44gCs1OSKk%2FjLY%3D")
}
}
}
| sazzer/books | oauth/src/test/scala/uk/co/grahamcox/oauth/PercentEncoderSpec.scala | Scala | gpl-3.0 | 689 |
package quisp
import spray.json.DefaultJsonProtocol._
import spray.json._
import java.awt.Color
/**
* General-purpose JSON formats
* @author rodneykinney
*/
object GeneralJson {
import scala.language.implicitConversions
implicit def writerToFormat[T](writer: JsonWriter[T]) = new JsonFormat[T] {
override def write(obj: T): JsValue = writer.write(obj)
override def read(json: JsValue): T = ???
}
implicit val pointJS: JsonFormat[Point] = new JsonWriter[Point] {
def write(p: Point) = (p.X, p.Y, p.Name) match {
case (Some(x), Some(y), Some(s)) => (x, y, s).toJson
case (None, Some(y), Some(s)) => (s, y).toJson
case (Some(x), None, Some(s)) => (x, s).toJson
case (None, None, Some(s)) => s.toJson
case (Some(x), Some(y), None) => (x, y).toJson
case (Some(x), None, None) => x.toJson
case (None, Some(y), None) => y.toJson
case (None, None, None) => "".toJson
}
}
implicit val colorJS: JsonFormat[Color] =
new JsonWriter[Color] {
def write(c: Color) = c.getAlpha match {
case 255 => "#%02x%02x%02x".format(c.getRed, c.getGreen, c.getBlue).toJson
case a => s"rgba(${c.getRed},${c.getGreen},${c.getBlue},${a.toDouble / 255})".toJson
}
}
}
| rodneykinney/quisp | src/main/scala/quisp/GeneralJson.scala | Scala | apache-2.0 | 1,262 |
package org.bitcoins.core.script
import org.bitcoins.core.script.constant._
import org.bitcoins.testkitcore.util.TestUtil
import org.bitcoins.testkitcore.util.BitcoinSUnitTest
/** Created by chris on 2/6/16.
*/
class ScriptProgramTest extends BitcoinSUnitTest {
"ScriptProgram" must "determine if the stack top is true" in {
val stack = List(ScriptNumber(1))
val script = List()
val program =
TestUtil.testProgramExecutionInProgress.updateStackAndScript(stack,
script)
program.stackTopIsTrue must be(true)
}
it must "determine if the stack stop is false" in {
val stack = List(ScriptNumber.zero)
val script = List()
val program =
TestUtil.testProgramExecutionInProgress.updateStackAndScript(stack,
script)
program.stackTopIsTrue must be(false)
val program2 = program.updateStack(List(OP_0))
program2.stackTopIsTrue must be(false)
//stack top should not be true for negative zero
val program3 = program.updateStack(List(ScriptNumber.negativeZero))
program3.stackTopIsTrue must be(false)
}
}
| bitcoin-s/bitcoin-s | core-test/src/test/scala/org/bitcoins/core/script/ScriptProgramTest.scala | Scala | mit | 1,204 |
object Test {
import scalatest._
trait Eq[T]
implicit val eq: Eq[Int] = new Eq[Int] {}
implicit class AnyOps[T](x: T) {
def === (y: T)(implicit c: Eq[T]) = x == y
}
def main(args: Array[String]): Unit = {
assertCompile("5 === 5")
assertNotCompile("5.6 === 7.7")
val x: Int = 5
assertCompile("x + 3")
assertNotCompile("y + 3")
import scala.util.Left
assertCompile("Left(3)")
assertNotCompile("Rigth(3)")
def f(x: Int): Int = x * x
assertCompile("f(3)")
assertNotCompile("g(3)")
type T
assertCompile("def foo(x: T): T = x")
assertNotCompile("foo(???)")
assertNotCompile("def foo(x: S): S = x")
assertNotCompile("def test(x: Int) =")
assertCompile(
"""
class EqString extends Eq[String]
new EqString
"""
)
}
}
| som-snytt/dotty | tests/run-macros/reflect-typeChecks/test_2.scala | Scala | apache-2.0 | 829 |
package shield.implicits
import com.typesafe.scalalogging.LazyLogging
import spray.http.HttpHeaders.RawHeader
import spray.http.Uri.Path
import spray.http._
import scala.language.implicitConversions
// todo: Do not like/want. Do this better
class ImplicitHttpResponse(msg: HttpResponse) extends LazyLogging {
def withAdditionalHeaders(header: HttpHeader*) : HttpResponse = {
msg.withHeaders(header.toList ++ msg.headers)
}
def withReplacedHeaders(header: HttpHeader*) : HttpResponse = {
val headerNames = header.map(_.lowercaseName).toSet
msg.withHeaders(header.toList ++ msg.headers.filterNot(h => headerNames.contains(h.lowercaseName)))
}
def withStrippedHeaders(headers: Set[String]) : HttpResponse = {
msg.withHeaders(msg.headers.filterNot(h => headers.exists(s => s.toLowerCase.equals(h.lowercaseName))))
}
}
class ImplicitHttpRequest(msg: HttpRequest) extends LazyLogging {
def withAdditionalHeaders(header: HttpHeader*) : HttpRequest = {
msg.withHeaders(header.toList ++ msg.headers)
}
def withReplacedHeaders(header: HttpHeader*) : HttpRequest = {
val headerNames = header.map(_.lowercaseName).toSet
msg.withHeaders(header.toList ++ msg.headers.filterNot(h => headerNames.contains(h.lowercaseName)))
}
def withStrippedHeaders(headers: Set[String]) : HttpRequest = {
msg.withHeaders(msg.headers.filterNot(h => headers.contains(h.lowercaseName)))
}
def withTrustXForwardedFor(trustProxies : Int) : HttpRequest = {
val forwardedList:Array[String] = msg.headers.find(_.lowercaseName == "x-forwarded-for").map(_.value.split(",")).getOrElse(Array())
val remoteHeader = msg.headers.find(_.lowercaseName == "remote-address").map(_.value).getOrElse("127.0.0.1")
val combinedList = (forwardedList :+ remoteHeader).reverse //List containing [Remote-Address header, most recent x-forwarded-for, 2nd most recent x-forwarded-for, etc]
val clientAddress = RawHeader("client-address", combinedList(if(trustProxies < combinedList.length) trustProxies else combinedList.length-1).trim)
withReplacedHeaders(clientAddress)
}
def withTrustXForwardedProto(trustProto : Boolean) : HttpRequest = {
if (trustProto) {
val proto = msg.headers.find(_.lowercaseName == "x-forwarded-proto").map(_.value).getOrElse(msg.uri.scheme)
try {
msg.copy(uri = msg.uri.copy(scheme = proto))
} catch {
case e: spray.http.IllegalUriException =>
logger.error("Received invalid protocol \\"" + proto + "\\" in the 'X-Forwarded-Proto' header, using original request.",e)
msg
}
} else {
msg
}
}
def withStrippedExtensions(extensions : Set[String]) : HttpRequest = {
val trimmedPath = getExtension(msg.uri.path.toString) match {
case (path, Some(extension)) if extensions.contains(extension) => Path(path)
case (path, _) => msg.uri.path
}
msg.copy(uri = msg.uri.copy(path = trimmedPath))
}
protected def getExtension(path: String) : (String, Option[String]) = {
val extensionPos = path.lastIndexOf('.')
val lastDirSeparator = path.lastIndexOf('/')
if (lastDirSeparator < extensionPos) {
val t = path.splitAt(extensionPos)
t._1 -> Some(t._2.toLowerCase())
} else {
path -> None
}
}
}
object HttpImplicits {
implicit def toHttpMethod(s: String) : HttpMethod = HttpMethods.getForKey(s.toUpperCase).get
implicit def betterResponse(response: HttpResponse) : ImplicitHttpResponse = new ImplicitHttpResponse(response)
implicit def betterRequest(request: HttpRequest) : ImplicitHttpRequest = new ImplicitHttpRequest(request)
}
| RetailMeNot/shield | src/main/scala/shield/implicits/HttpImplicits.scala | Scala | mit | 3,754 |
//package com.git.huanghaifeng.basis
///**
// * @describe
// * Trait 为重用代码的一个基本单位
// * @author HHF
// * @email huanghaifengdyx@163.com
// * @date 2016年5月21日
// */
//
//object Traits {
// trait ForEachAble[A] {
// def iterator: java.util.Iterator[A]
// def foreach(f: A => Unit) = {
// val iter = iterator
// while (iter.hasNext)
// f(iter.next)
// }
// }
//
// trait JsonAble {
// def toJson() = scala.util.parsing.json.JSONFormat.defaultFormatter(this)
// }
//
// def main(args:Array[String]){
// val list = new java.util.ArrayList[Int]() with ForEachAble[Int]
// list.add(1)
// list.add(2)
// println("For each: ");
// list.foreach(x => println(x))
//
// val list1 = new java.util.ArrayList[Int]() with JsonAble
// list1.add(1)
// list1.add(2)
// println("For each: ");
// println("Json: " + list1.toJson())
// }
//} | prucehuang/quickly-start-scala | src/main/scala/com/git/huanghaifeng/basis/Traits.scala | Scala | apache-2.0 | 1,020 |
package ca.uwo.eng.sel.cepsim.placement
import ca.uwo.eng.sel.cepsim.query.Query
/* Interface for operator placement strategies. */
trait OpPlacementStrategy {
/**
* Define placement for all vertices from the queries.
* @param queries Set of queries from which the placement should be executed.
* @return List of placements (mappings of vertices to virtual machines)
*/
def execute(queries: Query*): Set[Placement]
} | virsox/cepsim | cepsim-core/src/main/scala/ca/uwo/eng/sel/cepsim/placement/OpPlacementStrategy.scala | Scala | mit | 439 |
package mesosphere.marathon.event.http
import akka.actor.{ Actor, ActorLogging }
import akka.pattern.pipe
import mesosphere.marathon.event.{
MarathonSubscriptionEvent,
Unsubscribe,
Subscribe
}
import mesosphere.marathon.event.http.SubscribersKeeperActor._
import mesosphere.marathon.state.MarathonStore
import scala.concurrent.Future
class SubscribersKeeperActor(val store: MarathonStore[EventSubscribers]) extends Actor with ActorLogging {
implicit val ec = HttpEventModule.executionContext
override def receive = {
case event @ Subscribe(_, callbackUrl, _, _) =>
val addResult: Future[Option[EventSubscribers]] = add(callbackUrl)
val subscription: Future[MarathonSubscriptionEvent] =
addResult.collect {
case Some(subscribers) =>
if (subscribers.urls.contains(callbackUrl))
log.info("Callback [%s] subscribed." format callbackUrl)
event
}
subscription pipeTo sender()
case event @ Unsubscribe(_, callbackUrl, _, _) =>
val removeResult: Future[Option[EventSubscribers]] = remove(callbackUrl)
val subscription: Future[MarathonSubscriptionEvent] =
removeResult.collect {
case Some(subscribers) =>
if (!subscribers.urls.contains(callbackUrl))
log.info("Callback [%s] unsubscribed." format callbackUrl)
event
}
subscription pipeTo sender()
case GetSubscribers =>
val subscription = store.fetch(Subscribers).map(_.getOrElse(EventSubscribers()))
subscription pipeTo sender()
}
protected[this] def add(callbackUrl: String): Future[Option[EventSubscribers]] =
store.modify(Subscribers) { deserialize =>
val existingSubscribers = deserialize()
if (existingSubscribers.urls.contains(callbackUrl)) {
log.info("Existing callback [%s] resubscribed." format callbackUrl)
existingSubscribers
}
else EventSubscribers(existingSubscribers.urls + callbackUrl)
}
protected[this] def remove(callbackUrl: String): Future[Option[EventSubscribers]] =
store.modify(Subscribers) { deserialize =>
val existingSubscribers = deserialize()
if (existingSubscribers.urls.contains(callbackUrl))
EventSubscribers(existingSubscribers.urls - callbackUrl)
else {
log.warning("Attempted to unsubscribe nonexistent callback [%s]." format callbackUrl)
existingSubscribers
}
}
}
object SubscribersKeeperActor {
case object GetSubscribers
final val Subscribers = "http_event_subscribers"
}
| EvanKrall/marathon | src/main/scala/mesosphere/marathon/event/http/SubscribersKeeperActor.scala | Scala | apache-2.0 | 2,580 |
package chess
object OpeningExplorer {
private type Move = String
private val any: Move = "**"
case class Branch(
moves: Map[Move, Branch] = Map.empty,
opening: Option[Opening] = None) {
def get(move: Move): Option[Branch] = {
val precise = moves get move
val wildcard = moves get any
wildcard.fold(precise) { wild =>
precise.fold(wild)(_ merge wild).some
}
}
def apply(move: Move) = get(move) getOrElse Branch()
def add(moves: List[Move], opening: Opening): Branch = moves match {
case Nil => this
case move :: Nil => this.updated(move, apply(move) set opening)
case move :: others => this.updated(move, apply(move).add(others, opening))
}
def updated(k: Move, v: Branch) = copy(moves = moves.updated(k, v))
def set(o: Opening) = copy(opening = Some(o))
def render(margin: String = ""): String =
" " + toString + "\\n" + (moves map {
case (m, b) => margin + m + b.render(margin + " ")
} mkString "\\n")
private def merge(other: Branch) = Branch(
moves = other.moves ++ moves,
opening = opening orElse other.opening)
override def toString = "branch " + opening.fold("")(_.toString) + moves.keys
}
def openingOf(moves: List[String]): Option[Opening] = {
def next(
branch: Branch,
moves: List[Move],
last: Option[Opening]): Option[Opening] =
moves match {
case Nil => branch.opening orElse last
case m :: ms => (branch get m).fold(last) { b =>
next(b, ms, b.opening orElse last)
}
}
next(tree, moves, none)
}
val tree: Branch = OpeningDB.db.foldLeft(Branch()) {
case (tree, opening) => tree.add(opening.moveList, opening)
}
}
| psuter/scalachess | src/main/scala/OpeningExplorer.scala | Scala | mit | 1,777 |
/*
* Copyright 2015 Nicolas Rinaudo
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kantan.csv.scalaz
import kantan.codecs.scalaz.laws.discipline.ScalazDisciplineSuite
import kantan.csv.{DecodeError, RowDecoder, RowEncoder}
import kantan.csv.scalaz.arbitrary._
import kantan.csv.scalaz.equality._
import org.scalacheck.{Arbitrary, Gen}
import scalaz.Equal
import scalaz.scalacheck.ScalazProperties.{contravariant, monadError, plus}
import scalaz.std.anyVal._
import scalaz.std.list._
import scalaz.std.string._
class RowCodecInstancesTests extends ScalazDisciplineSuite {
// Limits the size of rows to 10 - using the default size makes these tests prohibitively long in some contexts
// (in particular, travis will timeout on the scala.js execution of these tests).
implicit def arbSeq[A: Arbitrary]: Arbitrary[Seq[A]] = Arbitrary(Gen.listOfN(10, implicitly[Arbitrary[A]].arbitrary))
// scalaz doesn't provide an Eq[Seq] instance, mostly because Seq isn't a very meaningfull type.
implicit def seqEq[A: Equal]: Equal[Seq[A]] = Equal[List[A]].contramap(_.toList)
checkAll("RowDecoder", monadError.laws[RowDecoder, DecodeError])
checkAll("RowDecoder", plus.laws[RowDecoder])
checkAll("RowEncoder", contravariant.laws[RowEncoder])
}
| nrinaudo/tabulate | scalaz/shared/src/test/scala/kantan/csv/scalaz/RowCodecInstancesTests.scala | Scala | mit | 1,776 |
package sbt.testing
/**
* Information in addition to a test class name that identifies the suite or test about which an
* event was fired.
*
* <p>
* This class has five subtypes:
* </p>
*
* <ul>
* <li><code>SuiteSelector</code> - indicates an event is about an entire suite of tests whose
* class was reported as <code>fullyQualifiedName</code> in the <code>Event</code></li>
* <li><code>TestSelector</code> - indicates an event is about a single test directly contained
* in the suite whose class was reported as <code>fullyQualifiedName</code> in the <code>Event</code></li>
* <li><code>NestedSuiteSelector</code> - indicates an event is about an entire nested suite of tests whose
* top-level, "nesting" class was reported as <code>fullyQualifiedName</code> in the <code>Event</code></li>
* <li><code>NestedTestSelector</code> - indicates an event is about a single test contained
* in a nested suite whose top-level, "nesting" class was reported as <code>fullyQualifiedName</code> in the <code>Event</code></li>
* <li><code>TestWildcardSelector</code> - indicates an event is about zero to many tests directly contained
* in the suite whose class was reported as <code>fullyQualifiedName</code> in the <code>Event</code></li>
* </ul>
*/
abstract sealed class Selector
/**
* Indicates an event was about the entire suite whose class had the fully qualified name specified as
* the <code>fullyQualifiedName</code> attribute the event.
*/
final class SuiteSelector extends Selector with Serializable {
override def equals(o: Any): Boolean = o.isInstanceOf[SuiteSelector]
override def hashCode(): Int = 29
override def toString(): String = "SuiteSelector"
}
/**
* Information in addition to a test class name that identifies a test directly contained in the suite
* whose class had the fully qualified name specified as the <code>fullyQualifiedName</code> attribute
* passed to the event.
*/
final class TestSelector(_testName: String) extends Selector with Serializable {
if (_testName == null)
throw new NullPointerException("testName was null");
/**
* The name of a test about which an event was fired.
*
* @return the name of the test
*/
def testName(): String = _testName
override def equals(that: Any): Boolean = that match {
case that: TestSelector => this.testName == that.testName
case _ => false
}
override def hashCode(): Int = testName.hashCode()
override def toString(): String = s"TestSelector($testName)"
}
/**
* Information in addition to a test class name that identifies a nested suite about which an
* event was fired.
*/
final class NestedSuiteSelector(_suiteId: String) extends Selector with Serializable {
if (_suiteId == null)
throw new NullPointerException("suiteId was null");
/**
* An id that, in addition to a test class name, identifies a nested suite about which an
* event was fired.
*
* @return the id of the nested suite
*/
def suiteId(): String = _suiteId
override def equals(that: Any): Boolean = that match {
case that: NestedSuiteSelector => this.suiteId == that.suiteId
case _ => false
}
override def hashCode(): Int = suiteId.hashCode()
override def toString(): String = s"NestedSuiteSelector($suiteId)"
}
/**
* Information in addition to a test class name that identifies a test in a nested suite about which an
* event was fired.
*/
final class NestedTestSelector(_suiteId: String,
_testName: String) extends Selector with Serializable {
if (_suiteId == null)
throw new NullPointerException("suiteId was null");
if (_testName == null)
throw new NullPointerException("testName was null");
/**
* An id that, in addition to a test class name, identifies a nested suite that contains a test
* about which an event was fired.
*
* @return the id of the nested suite containing the test
*/
def suiteId(): String = _suiteId
/**
* The name of the test in a nested suite about which an event was fired.
*
* @return the name of the test in the nested suite identified by the id returned by <code>suiteId</code>.
*/
def testName(): String = _testName
override def equals(that: Any): Boolean = that match {
case that: NestedTestSelector =>
this.suiteId == that.suiteId && this.testName == that.testName
case _ => false
}
override def hashCode(): Int = {
var retVal = 17
retVal = 31 * retVal + suiteId.hashCode()
retVal = 31 * retVal + testName.hashCode()
retVal
}
override def toString(): String = s"NestedTestSelector($suiteId, $testName)"
}
/**
* Information that identifies zero to many tests directly contained in a test class.
*
* <p>
* The <code>testWildcard</code> is a simple string, <em>i.e.</em>, not a glob or regular expression.
* Any test whose name includes the <code>testWildcard</code> string as a substring will be selected.
* </p>
*/
final class TestWildcardSelector(
_testWildcard: String) extends Selector with Serializable {
if (_testWildcard == null)
throw new NullPointerException("testWildcard was null");
/**
* A test wildcard string used to select tests.
*
* <p>
* The <code>testWildcard</code> is a simple string, <em>i.e.</em>, not a glob or regular expression.
* Any test whose name includes the <code>testWildcard</code> string as a substring will be selected.
* </p>
*
* @return the test wildcard string used to select tests.
*/
def testWildcard(): String = _testWildcard
override def equals(that: Any): Boolean = that match {
case that: TestWildcardSelector =>
this.testWildcard == that.testWildcard
case _ => false
}
override def hashCode(): Int = testWildcard.hashCode()
override def toString(): String = s"TestWildcardSelector($testWildcard)"
}
| matthughes/scala-js | test-interface/src/main/scala/sbt/testing/Selectors.scala | Scala | bsd-3-clause | 5,811 |
package bootstrap
import com.google.inject.AbstractModule
/**
* Created by ismet on 16/12/15.
*/
class MongoModule extends AbstractModule {
protected def configure(): Unit = {
bind(classOf[InitSetup]).asEagerSingleton()
}
} | TVilaboa/Egresados | app/bootstrap/MongoModule.scala | Scala | gpl-3.0 | 237 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.hive.execution
import org.apache.spark.sql.{AnalysisException, Row}
import org.apache.spark.sql.catalyst.TableIdentifier
import org.apache.spark.sql.catalyst.catalog.{CatalogStorageFormat, CatalogTable, CatalogTableType}
import org.apache.spark.sql.execution.SQLViewSuite
import org.apache.spark.sql.hive.test.TestHiveSingleton
import org.apache.spark.sql.types.{NullType, StructType}
/**
* A test suite for Hive view related functionality.
*/
class HiveSQLViewSuite extends SQLViewSuite with TestHiveSingleton {
import testImplicits._
test("create a permanent/temp view using a hive, built-in, and permanent user function") {
val permanentFuncName = "myUpper"
val permanentFuncClass =
classOf[org.apache.hadoop.hive.ql.udf.generic.GenericUDFUpper].getCanonicalName
val builtInFuncNameInLowerCase = "abs"
val builtInFuncNameInMixedCase = "aBs"
val hiveFuncName = "histogram_numeric"
withUserDefinedFunction(permanentFuncName -> false) {
sql(s"CREATE FUNCTION $permanentFuncName AS '$permanentFuncClass'")
withTable("tab1") {
(1 to 10).map(i => (s"$i", i)).toDF("str", "id").write.saveAsTable("tab1")
Seq("VIEW", "TEMPORARY VIEW").foreach { viewMode =>
withView("view1") {
sql(
s"""
|CREATE $viewMode view1
|AS SELECT
|$permanentFuncName(str),
|$builtInFuncNameInLowerCase(id),
|$builtInFuncNameInMixedCase(id) as aBs,
|$hiveFuncName(id, 5) over()
|FROM tab1
""".stripMargin)
checkAnswer(sql("select count(*) FROM view1"), Row(10))
}
}
}
}
}
test("create a permanent/temp view using a temporary function") {
val tempFunctionName = "temp"
val functionClass =
classOf[org.apache.hadoop.hive.ql.udf.generic.GenericUDFUpper].getCanonicalName
withUserDefinedFunction(tempFunctionName -> true) {
sql(s"CREATE TEMPORARY FUNCTION $tempFunctionName AS '$functionClass'")
withView("view1") {
withTempView("tempView1") {
withTable("tab1") {
(1 to 10).map(i => s"$i").toDF("id").write.saveAsTable("tab1")
// temporary view
sql(s"CREATE TEMPORARY VIEW tempView1 AS SELECT $tempFunctionName(id) from tab1")
checkAnswer(sql("select count(*) FROM tempView1"), Row(10))
// permanent view
val e = intercept[AnalysisException] {
sql(s"CREATE VIEW view1 AS SELECT $tempFunctionName(id) from tab1")
}.getMessage
assert(e.contains("Not allowed to create a permanent view `default`.`view1` by " +
s"referencing a temporary function `$tempFunctionName`"))
}
}
}
}
}
test("SPARK-14933 - create view from hive parquet table") {
withTable("t_part") {
withView("v_part") {
spark.sql("create table t_part stored as parquet as select 1 as a, 2 as b")
spark.sql("create view v_part as select * from t_part")
checkAnswer(
sql("select * from t_part"),
sql("select * from v_part"))
}
}
}
test("SPARK-14933 - create view from hive orc table") {
withTable("t_orc") {
withView("v_orc") {
spark.sql("create table t_orc stored as orc as select 1 as a, 2 as b")
spark.sql("create view v_orc as select * from t_orc")
checkAnswer(
sql("select * from t_orc"),
sql("select * from v_orc"))
}
}
}
test("make sure we can resolve view created by old version of Spark") {
withTable("hive_table") {
withView("old_view") {
spark.sql("CREATE TABLE hive_table AS SELECT 1 AS a, 2 AS b")
// The views defined by older versions of Spark(before 2.2) will have empty view default
// database name, and all the relations referenced in the viewText will have database part
// defined.
val view = CatalogTable(
identifier = TableIdentifier("old_view"),
tableType = CatalogTableType.VIEW,
storage = CatalogStorageFormat.empty,
schema = new StructType().add("a", "int").add("b", "int"),
viewText = Some("SELECT `gen_attr_0` AS `a`, `gen_attr_1` AS `b` FROM (SELECT " +
"`gen_attr_0`, `gen_attr_1` FROM (SELECT `a` AS `gen_attr_0`, `b` AS " +
"`gen_attr_1` FROM hive_table) AS gen_subquery_0) AS hive_table")
)
hiveContext.sessionState.catalog.createTable(view, ignoreIfExists = false)
val df = sql("SELECT * FROM old_view")
// Check the output rows.
checkAnswer(df, Row(1, 2))
// Check the output schema.
assert(df.schema.sameType(view.schema))
}
}
}
test("SPARK-20680: Add HiveVoidType to compatible with Hive void type") {
withView("v1") {
sql("create view v1 as select null as c")
val df = sql("select * from v1")
assert(df.schema.fields.head.dataType == NullType)
checkAnswer(
df,
Row(null)
)
sql("alter view v1 as select null as c1, 1 as c2")
val df2 = sql("select * from v1")
assert(df2.schema.fields.head.dataType == NullType)
checkAnswer(
df2,
Row(null, 1)
)
}
}
}
| witgo/spark | sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveSQLViewSuite.scala | Scala | apache-2.0 | 6,167 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel
package scala
package dsl
import languages.LanguageFunction
import org.apache.camel.Exchange
import org.apache.camel.model._
import org.apache.camel.processor.aggregate.AggregationStrategy
import org.apache.camel.scala.dsl.builder.RouteBuilder
import reflect.Manifest
import java.lang.String
import java.util.Comparator
import spi.{Language, Policy}
abstract class SAbstractDefinition[P <: ProcessorDefinition[_]] extends DSL with Wrapper[P] with Block {
val target: P
val unwrap = target
implicit val builder: RouteBuilder
implicit def predicateBuilder(predicate: Exchange => Any) = new ScalaPredicate(predicate)
implicit def expressionBuilder(expression: Exchange => Any) = new ScalaExpression(expression)
def apply(block: => Unit) = {
builder.build(this, block)
this
}
/**
* Helper method to return this Scala type instead of creating another wrapper type for the processor
*/
def wrap(block: => Unit): SAbstractDefinition[_] = {
block
this
}
// EIPs
//-----------------------------------------------------------------
def aggregate(expression: Exchange => Any, strategy: AggregationStrategy) = SAggregateDefinition(target.aggregate(expression, strategy))
def as[Target](toType: Class[Target]) = wrap(target.convertBodyTo(toType))
def attempt: STryDefinition = STryDefinition(target.doTry)
def bean(bean: Any) = bean match {
case cls: Class[_] => wrap(target.bean(cls))
case ref: String => wrap(target.beanRef(ref))
case obj: Any => wrap(target.bean(obj))
}
def choice = SChoiceDefinition(target.choice)
def delay(period: Period) = SDelayDefinition(target.delay(period.milliseconds))
def dynamicRouter(expression: Exchange => Any) = wrap(target.dynamicRouter(expression))
def enrich(uri: String, strategy: AggregationStrategy) = wrap(target.enrich(uri, strategy))
def filter(predicate: Exchange => Any) = SFilterDefinition(target.filter(predicateBuilder(predicate)))
def handle[E](block: => Unit)(implicit manifest: Manifest[E]) = SOnExceptionDefinition(target.onException(manifest.erasure)).apply(block)
def id(id : String) = wrap(target.id(id))
def idempotentConsumer(expression: Exchange => Any) = SIdempotentConsumerDefinition(target.idempotentConsumer(expression, null))
def inOnly = wrap(target.inOnly)
def inOut = wrap(target.inOut)
def loadbalance = SLoadBalanceDefinition(target.loadBalance)
def log(message: String) = wrap(target.log(message))
def log(level: LoggingLevel, message: String) = wrap(target.log(level, message))
def log(level: LoggingLevel, logName: String, message: String) = wrap(target.log(level, logName, message))
def loop(expression: Exchange => Any) = SLoopDefinition(target.loop(expression))
def marshal(format: DataFormatDefinition) = wrap(target.marshal(format))
def multicast = SMulticastDefinition(target.multicast)
def onCompletion: SOnCompletionDefinition = {
var completion = SOnCompletionDefinition(target.onCompletion)
// let's end the block in the Java DSL, we have a better way of handling blocks here
completion.target.end
completion
}
def onCompletion(predicate: Exchange => Boolean) = onCompletion().when(predicate).asInstanceOf[SOnCompletionDefinition]
def onCompletion(config: Config[SOnCompletionDefinition]) = {
val completion = onCompletion().asInstanceOf[SOnCompletionDefinition]
config.configure(completion)
completion
}
def otherwise: SChoiceDefinition = throw new Exception("otherwise is only supported in a choice block or after a when statement")
def pipeline = SPipelineDefinition(target.pipeline)
def policy(policy: Policy) = wrap(target.policy(policy))
def pollEnrich(uri: String, strategy: AggregationStrategy = null, timeout: Long = 0) =
wrap(target.pollEnrich(uri, timeout, strategy))
def process(function: Exchange => Unit) = wrap(target.process(new ScalaProcessor(function)))
def process(processor: Processor) = wrap(target.process(processor))
def recipients(expression: Exchange => Any) = wrap(target.recipientList(expression))
def resequence(expression: Exchange => Any) = SResequenceDefinition(target.resequence(expression))
def rollback = wrap(target.rollback)
def routeId(routeId: String) = wrap(target.routeId(routeId))
def routingSlip(header: String) = wrap(target.routingSlip(header))
def routingSlip(header: String, separator: String) = wrap(target.routingSlip(header, separator))
def routingSlip(expression: Exchange => Any) = wrap(target.routingSlip(expression))
def setBody(expression: Exchange => Any) = wrap(target.setBody(expression))
def setFaultBody(expression: Exchange => Any) = wrap(target.setFaultBody(expression))
def setHeader(name: String, expression: Exchange => Any) = wrap(target.setHeader(name, expression))
def sort[T](expression: (Exchange) => Any, comparator: Comparator[T] = null) = wrap(target.sort(expression, comparator))
def split(expression: Exchange => Any) = SSplitDefinition(target.split(expression))
def stop = wrap(target.stop)
def threads = SThreadsDefinition(target.threads)
def throttle(frequency: Frequency) = SThrottleDefinition(target.throttle(frequency.count).timePeriodMillis(frequency.period.milliseconds))
def throwException(exception: Exception) = wrap(target.throwException(exception))
def transacted = wrap(target.transacted)
def transacted(ref: String) = wrap(target.transacted(ref))
def transform(expression: Exchange => Any) = wrap(target.transform(expression))
def unmarshal(format: DataFormatDefinition) = wrap(target.unmarshal(format))
def validate(expression: Exchange => Any) = wrap(target.validate(predicateBuilder(expression)))
def when(filter: Exchange => Any): DSL with Block = SChoiceDefinition(target.choice).when(filter)
def wireTap(uri: String) = wrap(target.wireTap(uri))
def wireTap(uri: String, expression: Exchange => Any) = wrap(target.wireTap(uri).newExchangeBody(expression))
def -->(pattern: ExchangePattern, uri: String) = wrap(target.to(pattern, uri))
def -->(uris: String*) = to(uris:_*)
def to(pattern: ExchangePattern, uri: String) = wrap(target.to(pattern, uri))
def to(uris: String*) = {
uris.length match {
case 1 => target.to(uris(0))
case _ => {
val multi = multicast
uris.foreach(multi.to(_))
}
}
this
}
}
| cexbrayat/camel | components/camel-scala/src/main/scala/org/apache/camel/scala/dsl/SAbstractDefinition.scala | Scala | apache-2.0 | 7,180 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.network.netty
import java.io.IOException
import scala.concurrent.{ExecutionContext, Future}
import scala.reflect.ClassTag
import scala.util.Random
import org.mockito.ArgumentMatchers.any
import org.mockito.Mockito.{mock, times, verify, when}
import org.scalatest.BeforeAndAfterEach
import org.scalatest.matchers.must.Matchers
import org.scalatest.matchers.should.Matchers._
import org.apache.spark.{ExecutorDeadException, SecurityManager, SparkConf, SparkFunSuite}
import org.apache.spark.network.BlockDataManager
import org.apache.spark.network.client.{TransportClient, TransportClientFactory}
import org.apache.spark.network.shuffle.{BlockFetchingListener, DownloadFileManager}
import org.apache.spark.rpc.{RpcAddress, RpcEndpointRef, RpcTimeout}
class NettyBlockTransferServiceSuite
extends SparkFunSuite
with BeforeAndAfterEach
with Matchers {
private var service0: NettyBlockTransferService = _
private var service1: NettyBlockTransferService = _
override def afterEach(): Unit = {
try {
if (service0 != null) {
service0.close()
service0 = null
}
if (service1 != null) {
service1.close()
service1 = null
}
} finally {
super.afterEach()
}
}
test("can bind to a random port") {
service0 = createService(port = 0)
service0.port should not be 0
}
test("can bind to two random ports") {
service0 = createService(port = 0)
service1 = createService(port = 0)
service0.port should not be service1.port
}
test("can bind to a specific port") {
val port = 17634 + Random.nextInt(10000)
logInfo("random port for test: " + port)
service0 = createService(port)
verifyServicePort(expectedPort = port, actualPort = service0.port)
}
test("can bind to a specific port twice and the second increments") {
val port = 17634 + Random.nextInt(10000)
logInfo("random port for test: " + port)
service0 = createService(port)
verifyServicePort(expectedPort = port, actualPort = service0.port)
service1 = createService(service0.port)
// `service0.port` is occupied, so `service1.port` should not be `service0.port`
verifyServicePort(expectedPort = service0.port + 1, actualPort = service1.port)
}
test("SPARK-27637: test fetch block with executor dead") {
implicit val exectionContext = ExecutionContext.global
val port = 17634 + Random.nextInt(10000)
logInfo("random port for test: " + port)
val driverEndpointRef = new RpcEndpointRef(new SparkConf()) {
override def address: RpcAddress = null
override def name: String = "test"
override def send(message: Any): Unit = {}
// This rpcEndPointRef always return false for unit test to touch ExecutorDeadException.
override def ask[T: ClassTag](message: Any, timeout: RpcTimeout): Future[T] = {
Future{false.asInstanceOf[T]}
}
}
val clientFactory = mock(classOf[TransportClientFactory])
val client = mock(classOf[TransportClient])
// This is used to touch an IOException during fetching block.
when(client.sendRpc(any(), any())).thenAnswer(_ => {throw new IOException()})
var createClientCount = 0
when(clientFactory.createClient(any(), any(), any())).thenAnswer(_ => {
createClientCount += 1
client
})
val listener = mock(classOf[BlockFetchingListener])
var hitExecutorDeadException = false
when(listener.onBlockFetchFailure(any(), any(classOf[ExecutorDeadException])))
.thenAnswer(_ => {hitExecutorDeadException = true})
service0 = createService(port, driverEndpointRef)
val clientFactoryField = service0.getClass.getField(
"org$apache$spark$network$netty$NettyBlockTransferService$$clientFactory")
clientFactoryField.setAccessible(true)
clientFactoryField.set(service0, clientFactory)
service0.fetchBlocks("localhost", port, "exec1",
Array("block1"), listener, mock(classOf[DownloadFileManager]))
assert(createClientCount === 1)
assert(hitExecutorDeadException)
}
private def verifyServicePort(expectedPort: Int, actualPort: Int): Unit = {
actualPort should be >= expectedPort
// avoid testing equality in case of simultaneous tests
// if `spark.testing` is true,
// the default value for `spark.port.maxRetries` is 100 under test
actualPort should be <= (expectedPort + 100)
}
private def createService(
port: Int,
rpcEndpointRef: RpcEndpointRef = null): NettyBlockTransferService = {
val conf = new SparkConf()
.set("spark.app.id", s"test-${getClass.getName}")
val securityManager = new SecurityManager(conf)
val blockDataManager = mock(classOf[BlockDataManager])
val service = new NettyBlockTransferService(conf, securityManager, "localhost", "localhost",
port, 1, rpcEndpointRef)
service.init(blockDataManager)
service
}
}
| dbtsai/spark | core/src/test/scala/org/apache/spark/network/netty/NettyBlockTransferServiceSuite.scala | Scala | apache-2.0 | 5,711 |
/*
* Cerres (c) 2012-2014 EPFL, Lausanne
*/
import cerres.macros._
import ceres.common.{QuadDouble => QD}
import ceres.smartfloat.SmartFloat
import ceres.common._
import ceres.affine._
/**
* Regression tests for macros.
*/
object MacroTest extends App {
import Macros._
import math._
/*testDerivative1
testDerivative2
testDerivative3*/
testDerivativeN
/*
testUnary
testBinary
testTernary
testAnyN*/
//testFunctionTransform
def testFunctionTransform = {
val f1 = (x: Double) => sin(x + 0.4) + log(3*x)/(x*x)
val f2 = (x: Double, y: Double) => sin(x + y) / log(x*x) + 1
val f1Integ = double2Interval(f1)
println(f1Integ(Interval(0.3)).mid + " " + f1(0.3))
println(f1Integ(Interval(0.45)).mid + " " + f1(0.45))
val f2Integ = double2Interval(f2)
println(f2Integ(Interval(0.3), Interval(0.75)).mid + " " + f2(0.3, 0.75))
println(f2Integ(Interval(0.45), Interval(0.98)).mid + " " + f2(0.45, 0.98))
val f1Aff = double2Affine(f1)
println(f1Aff(AffineForm(0.3)).x0 + " " + f1(0.3))
println(f1Aff(AffineForm(0.45)).x0 + " " + f1(0.45))
val f2Aff = double2Affine(f2)
println(f2Aff(AffineForm(0.3), AffineForm(0.75)).x0 + " " + f2(0.3, 0.75))
println(f2Aff(AffineForm(0.45), AffineForm(0.98)).x0 + " " + f2(0.45, 0.98))
}
def testDerivative1 = {
val a1 = 10.0; val a2 = 13.0; val a3 = 8.0; val a4 = 10.0; val b = 0.4
val f = (x: Double) =>
a1/a2 * cos(b) - a1/a4 * cos(x) - cos(b - x) +
(a1*a1 + a2*a2 - a3*a3 + a4*a4)/(2*a2*a4)
val fPrime = derivative(f)
println("\\ntestDerivative1")
println(fPrime(0.356) == 0.30454203373225586)
}
def testDerivative2 = {
val f1 = (x1: Double, x2: Double) => x1 * cos(x2) + x2 * cos(x1) - 0.9
val f2 = (x1: Double, x2: Double) => x1 * sin(x2) + x2 * sin(x1) - 0.1
val j = jacobian(f1, f2)
println("\\ntestDerivative2")
println(j(0)(0)(0.33, 0.33) == 0.8391081441580804)
println(j(0)(1)(0.25, 0.37) == 0.8785085637194042)
println(j(1)(0)(0.33, 0.33) == 0.636237001759236)
println(j(1)(1)(0.33, 0.77) == 0.5609535493664797)
}
def testDerivative3 = {
import math._
import scala.{Double => D}
val f1 = (v: D, w: D, r: D) => 3 + 2/(r*r) - 0.125*(3-2*v)*(w*w*r*r)/(1-v) - 4.5
val f2 = (v: D, w: D, r: D) => 6*v - 0.5 * v * (w*w*r*r) / (1-v) - 2.5
val f3 = (v: D, w: D, r: D) => 3 - 2/(r*r) - 0.125 * (1+2*v) * (w*w*r*r) / (1-v) - 0.5
val j = jacobian(f1, f2, f3)
println("\\ntestDerivative3")
println(j(0)(0)(0.33, 0.33, 0.33) == -0.00330229728224549)
println(j(0)(1)(0.25, 0.37, 0.264) == -0.021489600000000005)
println(j(0)(2)(0.25, 0.37, 0.001) == -4.000000000000114E9)
println(j(1)(0)(0.33, 0.33, 0.33) == 5.986790810871018)
println(j(1)(1)(0.25, 0.37, 0.264) == -0.00859584)
println(j(1)(2)(0.25, 0.37, 0.001) == -4.563333333333333E-5)
println(j(2)(0)(0.33, 0.33, 0.33) == -0.00990689184673647)
println(j(2)(1)(0.25, 0.37, 0.264) == -0.012893759999999999)
println(j(2)(2)(0.25, 0.37, 0.001) == 3.9999999999999313E9)
}
def testDerivativeN = {
val f1 = (x1: Double, x2: Double, x3: Double, x4: Double) =>
x1 * cos(x2) + x2 * cos(x1) - 0.9 + cos(2*x3) - cos(2*x4) - 0.4
val f2 = (x1: Double, x2: Double, x3: Double, x4: Double) =>
x1 * sin(x2) + x2 * sin(x1) - 0.1 + 2*(x4 - x3) + sin(2*x4) - sin(2*x3) - 1.2
val f3 = (x1: Double, x2: Double, x3: Double, x4: Double) =>
x3 * cos(x4) + x4 * cos(x3) - 0.9 + cos(2*x1) - cos(2*x2) - 0.4
val f4 = (x1: Double, x2: Double, x3: Double, x4: Double) =>
x3 * sin(x4) + x4 * sin(x3) - 0.1 + 2*(x2 - x1) + sin(2*x2) - sin(2*x1) - 1.2
val j = jacobian(List(f1, f2, f3, f4))
println("\\ntestDerivative X")
println(j(0)(0)(0.33, 0.33, 0.11, 0.33) == 0.8391081441580804)
println(j(0)(1)(0.25, 0.37, 0.11, 0.33) == 0.8785085637194042)
println(j(1)(0)(0.33, 0.11, 0.33, 0.33) == 0.2138429586252974)
println(j(1)(1)(0.33, 0.77, 0.25, 0.77) == 0.5609535493664797)
}
def testUnary = {
val r = 4.0; val K = 1.11; val x0 = 0.1; val tol = 1e-9
val x = 3.329999999902176
val err = errorBound((x: Double) => r / (1 + (x/K)) - 1, x, tol)
val err2 = assertBound((x: Double) => r / (1 + (x/K)) - 1, x, tol)
val err3 = certify((x: Double) => r / (1 + (x/K)) - 1, x, tol)
println("\\n Unary test")
println(err.toString == "[-9.7827e-11,-9.7823e-11]")
println(err2.toString == "[-9.7827e-11,-9.7823e-11]")
println(err3.toString == "[3.329999999804349,3.330000000000003] (1.110223024690388E-16)")
}
def testBinary = {
import math._
val tol = 1e-9
val f1 = (x1: Double, x2: Double) => cos(2*x1) - cos(2*x2) - 0.4
val f2 = (x1: Double, x2: Double) => 2*(x2 - x1) + sin(2*x2) - sin(2*x1) - 1.2
//val x0 = Array(0.1, 0.5)
val roots = Array(0.1565200696473004, 0.4933763741817787)
val errors = errorBound(f1, f2, roots(0), roots(1), tol)
val errors2 = assertBound(f1, f2, roots(0), roots(1), tol)
//val errors3 = certify(f1, f2, roots(0), roots(1), tol)
println("\\n Binary test")
println(errors.deep.mkString(", ") == "[3.5835e-11,3.5836e-11], [4.1466e-11,4.1467e-11]")
println(errors2.deep.mkString(", ") == "[3.5835e-11,3.5836e-11], [4.1466e-11,4.1467e-11]")
//println(errors3.deep.mkString(", "))
}
def testTernary = {
import math._
import scala.{Double => D}
val tol = 1e-9
val f1 = (v: D, w: D, r: D) =>
3 + 2/(r*r) - 0.125*(3-2*v)*(w*w*r*r)/(1-v) - 4.5
val f2 = (v: D, w: D, r: D) =>
6*v - 0.5 * v * (w*w*r*r) / (1-v) - 2.5
val f3 = (v: D, w: D, r: D) =>
3 - 2/(r*r) - 0.125 * (1+2*v) * (w*w*r*r) / (1-v) - 0.5
//val x0 = Array(0.75, 0.5, 0.5)
val roots = Array(0.5, 1.0000000000018743, 0.9999999999970013)
val errors = errorBound(f1, f2, f3, roots(0), roots(1), roots(2), tol)
val errors2 = assertBound(f1, f2, f3, roots(0), roots(1), roots(2), tol)
//val errors3 = certify(f1, f2, f3, roots(0), roots(1), roots(2), tol)
println("\\n Ternary test")
println(errors.deep.mkString(", ") == "[-4.2403e-16,5.0730e-16], [-1.8757e-12,-1.8730e-12], [2.9983e-12,2.9991e-12]")
println(errors2.deep.mkString(", ") == "[-4.2403e-16,5.0730e-16], [-1.8757e-12,-1.8730e-12], [2.9983e-12,2.9991e-12]")
}
def testAnyN = {
import math._
import scala.{Double => D}
val tol = 1e-9
val f1 = (v: D, w: D, r: D) =>
3 + 2/(r*r) - 0.125*(3-2*v)*(w*w*r*r)/(1-v) - 4.5
val f2 = (v: D, w: D, r: D) =>
6*v - 0.5 * v * (w*w*r*r) / (1-v) - 2.5
val f3 = (v: D, w: D, r: D) =>
3 - 2/(r*r) - 0.125 * (1+2*v) * (w*w*r*r) / (1-v) - 0.5
//val x0 = Array(0.75, 0.5, 0.5)
val roots = Array(0.5, 1.0000000000018743, 0.9999999999970013)
val errors = errorBoundX(List(f1, f2, f3), roots, tol)
val errors2 = assertBoundX(List(f1, f2, f3), roots, tol)
//val errors3 = certifyX(f1, f2, f3, roots(0), roots(1), roots(2), tol)
println("\\n Any N test")
println(errors.deep.mkString(", ") == "[-4.2403e-16,5.0730e-16], [-1.8757e-12,-1.8730e-12], [2.9983e-12,2.9991e-12]")
println(errors2.deep.mkString(", ") == "[-4.2403e-16,5.0730e-16], [-1.8757e-12,-1.8730e-12], [2.9983e-12,2.9991e-12]")
}
}
| malyzajko/cassia | macros/test/src/MacroTest.scala | Scala | bsd-3-clause | 7,286 |
package benchmarks
import java.util.concurrent.TimeUnit
import org.openjdk.jmh.annotations._
import org.scalafmt.Scalafmt
import org.scalafmt.benchmarks.FormatBenchmark
import scala.collection.GenIterable
import scala.meta.testkit.Corpus
import scala.util.Try
/** Formats filename at with scalafmt.
*
* To run benchmark:
*
* > benchmarks/jmh:run -i 10 -wi 10 -f1 -t1 org.scalafmt.*
*/
@org.openjdk.jmh.annotations.State(Scope.Benchmark)
@Warmup(iterations = 5, time = 1, timeUnit = TimeUnit.SECONDS)
@Measurement(iterations = 5, time = 1, timeUnit = TimeUnit.SECONDS)
@BenchmarkMode(Array(Mode.AverageTime))
@OutputTimeUnit(TimeUnit.SECONDS)
abstract class MacroBenchmark(parallel: Boolean, maxFiles: Int)
extends FormatBenchmark {
var files: GenIterable[String] = _
override def toString = s"${this.getClass.getName}(parallel=$parallel)"
@Setup
def setup(): Unit = {
files = {
val x = Corpus
.files(
Corpus.fastparse.copy(
// TODO(olafur) remove once testkit 1.7 is out
url = Corpus.fastparse.url.replace("olafurpg", "scalameta")
)
)
.filter { f => f.projectUrl.contains("scala-js") }
.take(maxFiles)
.map(_.read)
.toBuffer
if (parallel) x.par
else x
}
}
def testMe(): Unit = {
setup()
scalafmt()
}
@Benchmark
def scalafmt(): Unit = {
files.foreach { file => Try(Scalafmt.format(file)) }
}
@Benchmark
def scalafmt_rewrite(): Unit = {
files.foreach { file => Try(formatRewrite(file)) }
}
}
object MacroSmall {
val size = 10
class Parallel extends MacroBenchmark(parallel = true, size)
// class Synchronous extends MacroBenchmark(parallel = false, size)
}
object MacroHuge {
val size = 10000
class Parallel extends MacroBenchmark(parallel = true, size)
// class Synchronous extends MacroBenchmark(parallel = false, size)
}
| scalameta/scalafmt | scalafmt-benchmarks/src/main/scala-2.12/benchmarks/MacroBenchmark.scala | Scala | apache-2.0 | 1,918 |
package com.sk.app.proxmock
import java.util.concurrent.TimeUnit
import com.sk.app.proxmock.application.ProxmockApplication
import com.sk.app.proxmock.console.ArgsParser
import org.slf4j.LoggerFactory
import scalafx.application.JFXApp
import scalafx.application.JFXApp.PrimaryStage
import scalafx.scene.Scene
import scalafx.scene.paint.Color._
/**
* Created by Szymon on 15.05.2016.
*/
object Main extends JFXApp {
stage = new PrimaryStage {
title = "ScalaFX Hello World"
scene = new Scene {
fill = Black
// content = new HBox {
// padding = Insets(20)
// children = Seq(
// new Text {
// text = "Hello "
// style = "-fx-font-size: 48pt"
// fill = new LinearGradient(
// endX = 0,
// stops = Stops(PaleGreen, SeaGreen))
// },
// new Text {
// text = "World!!!"
// style = "-fx-font-size: 48pt"
// fill = new LinearGradient(
// endX = 0,
// stops = Stops(Cyan, DodgerBlue)
// )
// effect = new DropShadow {
// color = DodgerBlue
// radius = 25
// spread = 0.25
// }
// }
// )
// }
}
}
def showHelp() = {
println(
"""
|list of available commands:
| list - list all running instances
| stop n - stop instance with name: n
| help - displays this information
| run filepath - runs proxymock in background with configuration fetched from file under filepath
| This command accepts also additional parameters (--name) used by spring boot which
| can be used to change proxmock behaviour. i.e.:
| proxmock run /file.yaml --server.port=9090
| See spring boot documentation for more information about available properties
|
|i.e.:
|proxmock.jar list
|- above command will list all running instances
|
|proxmoc.jar stop baka
|- above command will stop instance with name "baka"
""".stripMargin)
}
def listRemote() = {
ProxmockApplication.listRemote()
TimeUnit.SECONDS.sleep(1)
}
def closeRemote(name: String) = {
ProxmockApplication.closeRemote(name)
TimeUnit.SECONDS.sleep(1)
}
def runMock(filePath: String, metaArgs: Array[String]) = {
ProxmockApplication.run(filePath, metaArgs)
}
override def main(args: Array[String]) = {
ArgsParser
.operation("help", _ => showHelp())
.operation("list", _ => listRemote())
.unaryOperation("stop", (name, _) => closeRemote(name))
.unaryOperation("run", (filePath, metaArgs) => runMock(filePath, metaArgs))
.defaultOperation(_ => super.main(args))
.error(exception => {
exception.printStackTrace(System.out)
showHelp()
})
.parse(args)
}
}
| szymonkudzia/proxmock | sources/src/main/scala/com/sk/app/proxmock/Main.scala | Scala | mit | 3,157 |
package eu.svez.backpressuredemo.B_http
import java.nio.file.Paths
import akka.http.scaladsl.Http
import akka.http.scaladsl.model._
import akka.stream.scaladsl.FileIO
import eu.svez.backpressuredemo.Flows._
import eu.svez.backpressuredemo.StreamDemo
object FileClient extends StreamDemo {
val byteSource = FileIO
.fromPath(Paths.get("/tmp/bigfile.zip"))
.via(valve(sourceRate.get))
.via(meter("sourceHttp"))
val host = "localhost"
val port = 8080
val request = HttpRequest(
uri = Uri(s"http://$host:$port/file"),
entity = HttpEntity(ContentTypes.`application/octet-stream`, byteSource)
)
sourceRate.send(5)
Http().singleRequest(request).onComplete{ _ =>
println("All sent!")
}
readRatesFromStdIn()
}
| svezfaz/akka-backpressure-scala-central-talk | demo/src/main/scala/eu/svez/backpressuredemo/B_http/FileClient.scala | Scala | apache-2.0 | 751 |
/**
* Copyright (C) 2009-2014 Typesafe Inc. <http://www.typesafe.com>
*/
package akka.remote.security.provider
import org.uncommons.maths.random.{ AESCounterRNG }
import SeedSize.Seed256
/**
* INTERNAL API
* This class is a wrapper around the 256-bit AESCounterRNG algorithm provided by http://maths.uncommons.org/
* It uses the default seed generator which uses one of the following 3 random seed sources:
* Depending on availability: random.org, /dev/random, and SecureRandom (provided by Java)
* The only method used by netty ssl is engineNextBytes(bytes)
*/
class AES256CounterInetRNG extends java.security.SecureRandomSpi {
private val rng = new AESCounterRNG(engineGenerateSeed(Seed256))
/**
* This is managed internally by AESCounterRNG
*/
override protected def engineSetSeed(seed: Array[Byte]): Unit = ()
/**
* Generates a user-specified number of random bytes.
*
* @param bytes the array to be filled in with random bytes.
*/
override protected def engineNextBytes(bytes: Array[Byte]): Unit = rng.nextBytes(bytes)
/**
* Unused method
* Returns the given number of seed bytes. This call may be used to
* seed other random number generators.
*
* @param numBytes the number of seed bytes to generate.
* @return the seed bytes.
*/
override protected def engineGenerateSeed(numBytes: Int): Array[Byte] = InternetSeedGenerator.getInstance.generateSeed(numBytes)
}
| Fincore/org.spark-project.akka | remote/src/main/scala/akka/remote/security/provider/AES256CounterInetRNG.scala | Scala | mit | 1,436 |
package phenan.prj.declaration
import java.io._
import phenan.prj.ir._
import scala.util.Try
trait DeclarationCompiler {
this: IRs =>
def compileDeclaration(file: String): Try[IRFile] = compileDeclaration(new FileReader(file), file)
def compileDeclaration(file: File): Try[IRFile] = compileDeclaration(new FileReader(file), file.getPath)
def compileDeclaration(reader: Reader, file: String): Try[IRFile] = {
DeclarationParsers.tryParse(DeclarationParsers.compilationUnit, new BufferedReader(reader), file).map(cu => IRFile(cu, file))
}
}
| csg-tokyo/proteaj2 | src/main/scala/phenan/prj/declaration/DeclarationCompiler.scala | Scala | mit | 557 |
package remotely.transport.aeron.tests
import remotely.{Field, Protocol, Signature, Type, codecs}
import scodec.Codec
object Test1Protocol {
import remotely.codecs._
implicit lazy val sigCodec: Codec[List[Signature]] = codecs.list(Signature.signatureCodec)
val definition = Protocol.empty
.codec[Int]
.codec[Long]
.codec[String]
.specify0("zeroInt", Type[Int])
.specify0("zeroLong", Type[Long])
.specify0("zeroString", Type[String])
.specify1("idInt", Field.strict[Int]("a"), Type[Int])
.specify1("idLong", Field.strict[Long]("a"), Type[Long])
.specify1("idString", Field.strict[String]("a"), Type[String])
//awaiting fix in remotely Signatures.scala
//.specify2("addInt", Field.strict[Int]("a"), Field.strict[Int]("b"), Type[Int])
}
| LGLO/remotely-aeron | test-protocols/src/main/scala/remotely/transport/aeron/tests/Test1Protocol.scala | Scala | apache-2.0 | 791 |
package org.jetbrains.plugins.scala.lang.scaladoc.psi.impl
import com.intellij.lang.ASTNode
import org.jetbrains.plugins.scala.lang.psi.impl.ScalaPsiElementImpl
import org.jetbrains.plugins.scala.lang.scaladoc.lexer.ScalaDocTokenType
import org.jetbrains.plugins.scala.lang.scaladoc.psi.api.{ScDocListItem, ScPsiDocToken}
class ScDocListItemImpl(node: ASTNode) extends ScalaPsiElementImpl(node) with ScDocListItem {
override def toString: String = "ScDocListItem"
override def headToken: ScPsiDocToken =
findChildByType[ScPsiDocToken](ScalaDocTokenType.DOC_LIST_ITEM_HEAD)
} | JetBrains/intellij-scala | scala/scala-impl/src/org/jetbrains/plugins/scala/lang/scaladoc/psi/impl/ScDocListItemImpl.scala | Scala | apache-2.0 | 585 |
/*
* Copyright (c) 2011 Guardian Media Group
*
* This file is part of Zapush.
*
* Zapush is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Zapush is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with Foobar. If not, see <http://www.gnu.org/licenses/>.
*/
package zapush
import net.liftweb.util.Helpers._
import java.net.Socket
import net.liftweb.json._
import scalax.io.Resource
import scalax.io.Codec
import net.liftweb.common.{Empty, Full, Failure, Loggable}
object Sender extends Loggable {
implicit val jsonFormat = DefaultFormats
implicit val codec = Codec("ASCII")
def sendNow() {
for (zabbixConf <- Config.zabbix) {
val sendResult = tryo { send(zabbixConf) }
sendResult match {
case Failure(msg, _, _) => logger.warn("Failed: " + msg)
case Full(response) => logger.info(response.response + ": " + response.info)
case Empty => logger.error("Something unexpected happened")
}
}
}
def send(zabbixConfig: ZabbixConfig) = {
def data: List[Data] =
for {
mbean <- MBeans.all
property <- mbean.properties
value <- tryo { mbean(property.propertyName) }
} yield {
Data(zabbixConfig.hostname, property.zabbixName, value.toString)
}
val toSend = ZabbixSenderData(data = data)
logger.debug("data to send = " + toSend.data.map(d => d.key + " -> " + d.value).mkString("\\n"))
val socket = new Socket(zabbixConfig.server, zabbixConfig.serverPort)
// NB: must use compact json rendering here (i.e. no whitespace) - zabbix
// fails silently when encountering whitespace in json :(
val m = ZabbixMessage(compact(render(Extraction.decompose(toSend))))
logger.debug("writing to zabbix")
socket.getOutputStream.write(m.asBytes.toArray)
logger.debug("reading response")
val input = Resource.fromInputStream(socket.getInputStream)
val binaryResult = input.byteArray.toList
logger.debug("binary result: " + binaryResult)
val result = ZabbixMessage.parse(binaryResult)
Serialization.read[ZabbixSenderDataResponse](result)
}
}
| guardian/zapush | src/main/scala/zapush/Sender.scala | Scala | gpl-3.0 | 2,554 |
/*
* Copyright (C) 2011, Mysema Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.mysema.scalagen
import org.junit.Assert.assertEquals
import japa.parser.ParseException
import japa.parser.ast.CompilationUnit
import java.io.FileNotFoundException
import org.junit.Test
import com.mysema.examples._
class ConstructorsTest extends AbstractParserTest {
@Test
def Empty_Constructor_Are_Ignored() {
var unit = getCompilationUnit(classOf[WithStatic])
assertEquals(2, unit.getTypes.get(0).getMembers.size)
unit = Constructors.transform(unit)
assertEquals(2, unit.getTypes.get(0).getMembers.size)
}
@Test
def Body_Is_Extracted() {
var unit = getCompilationUnit(classOf[Immutable])
assertEquals(6, unit.getTypes.get(0).getMembers.size)
unit = Constructors.transform(unit)
assertEquals(4, unit.getTypes.get(0).getMembers.size)
}
@Test
def Immutable2 {
var unit = getCompilationUnit(classOf[Immutable2])
unit = Constructors.transform(unit)
// TODO
}
}
| paddymahoney/scalagen | scalagen/src/test/scala/com/mysema/scalagen/ConstructorsTest.scala | Scala | apache-2.0 | 1,530 |
package org.smartpony.core.config.store
import java.io.InputStream
import org.smartpony.common.error.FormattedExceptionLogger
import org.smartpony.core.CLASSPATH_CONFIG_ERROR
import org.smartpony.core.config.store.api.ConfigStoreType
import scala.util.{Failure, Success, Try}
class ClassPathConfigStore extends api.ConfigStore {
override def read[T](documentName: String)(notFound: String => T,
processor: (Option[String], InputStream) => T): Either[Throwable, T] = {
type Result = Either[Throwable, T]
val resource = Option(Thread.currentThread().getContextClassLoader.getResourceAsStream(documentName))
resource.fold(Right(notFound(s"classpath: $documentName")): Result) { str =>
Try {
val r = processor(None, str)
Try {
str.close()
}
r
} match {
case Success(result) => Right(result)
case Failure(x) => {
FormattedExceptionLogger.logException(CLASSPATH_CONFIG_ERROR, x, showStackTrace = true,
Option(s"Error occurred when reading $documentName from the classpath"))
Left(x)
}
}
}
}
override def path = "classpath"
override def storeType = ConfigStoreType.CLASSPATH
}
| Dextaa/smartpony | core/src/main/scala/org/smartpony/core/config/store/ClassPathConfigStore.scala | Scala | mit | 1,266 |
/*
* Licensed to Intel Corporation under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* Intel Corporation licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.bigdl.tensor
import java.io.Serializable
import scala.collection.mutable.{ArrayBuffer, Map}
import breeze.linalg.{DenseMatrix => BrzDenseMatrix, DenseVector => BrzDenseVector}
import com.intel.analytics.bigdl.nn.abstractnn.Activity
import com.intel.analytics.bigdl.tensor.TensorNumericMath.TensorNumeric
import com.intel.analytics.bigdl.utils.{File, Table}
import org.apache.spark.mllib.linalg.{DenseMatrix, DenseVector, Matrix, Vector}
import scala.reflect.ClassTag
/**
* It is the class for handling numeric data.
*
* @tparam T should be Double or Float
*/
trait Tensor[T] extends Serializable with TensorMath[T] with Activity {
/**
* Dimension number of the tensor. For empty tensor, its dimension number is 0
*
* @return dimension number
*/
def nDimension(): Int
/**
* A shortcut of nDimension()
*
* @see nDimension()
*/
def dim(): Int
/**
* Size of tensor. Return an array of which each value represent the size on the
* dimension(i + 1), i is the index of the corresponding value
* It will generate a new array each time you invoke the method
*
* @return size array
*/
def size(): Array[Int]
/**
* size of the tensor on the given dimension
*
* @param dim dimension, count from 1
* @return size
*/
def size(dim: Int): Int
/**
* Jumps between element on the each dimension in the storage.
* It will generate a new array each time you invoke the method
*
* @return strides array
*/
def stride(): Array[Int]
/**
* Jumps between element on the given dimension in the storage.
*
* @param dim dimension, count from 1
* @return jump
*/
def stride(dim: Int): Int
/**
* Fill with a given value. It will change the value of the current tensor and return itself
*
* @param v value to fill the tensor
* @return current tensor
*/
def fill(v: T): Tensor[T]
/**
* Fill with zero. It will change the value of the current tensor and return itself
*
* @return current tensor
*/
def zero(): Tensor[T]
/**
* Fill with random value(normal gaussian distribution).
* It will change the value of the current tensor and return itself
*
* @return current tensor
*/
def randn(): Tensor[T]
/**
* Fill with random value(uniform distribution).
* It will change the value of the current tensor and return itself
*
* @return current tensor
*/
def rand(): Tensor[T]
/**
* Fill with random value(bernoulli distribution).
* It will change the value of the current tensor and return itself
*
* @return current tensor
*/
def bernoulli(p: Double): Tensor[T]
/** *
* Create a new tensor which exchanges the given dimensions of the current tensor
*
* @param dim1 dimension to be exchanged, count from one
* @param dim2 dimension to be exchanged, count from one
* @return new tensor
*/
def transpose(dim1: Int, dim2: Int): Tensor[T]
/**
* Shortcut of transpose(1, 2) for 2D tensor
*
* @see transpose()
*/
def t(): Tensor[T]
/**
* Query tensor on a given index. Tensor should not be empty
*
* @param index count from 1
* @return
*/
def apply(index: Int): Tensor[T]
/**
* Query the value on a given index. Tensor should not be empty
*
* @param indexes the indexes length should be same as the tensor dimension length and each
* value count from 1
* @return the value on the given index
*/
def apply(indexes: Array[Int]): T
/**
* Query the value on a given position. The number of parameters
* should be equal to the dimension number of the tensor.
* Tensor should not be empty.
*
* @param d1,( d2, d3, d4, d5) the given position
* @return the value on a given position
*/
def valueAt(d1: Int): T
def valueAt(d1: Int, d2: Int): T
def valueAt(d1: Int, d2: Int, d3: Int): T
def valueAt(d1: Int, d2: Int, d3: Int, d4: Int): T
def valueAt(d1: Int, d2: Int, d3: Int, d4: Int, d5: Int): T
/**
* Subset the tensor by apply the element of the given table to corresponding dimension of the
* tensor. The element of the given table can be an Int or another Table.
* An Int means select on current dimension; A table means narrow on current dimension,
* the table should has two elements, of which the first is start index and
* the second is the end index. An empty table is equals to Table(1, size_of_current_dimension)
* If the table length is less than the tensor dimension, the missing dimension is applied by
* an empty table
*
* @see select
* @see narrow
* @param t The table length should be less than or equal to the tensor dimensions
* @return
*/
def apply(t: Table): Tensor[T]
/**
* For tensor(i) = value. If tensor(i) is another tensor, it will fill the selected subset by
* the given value
*
* @param index index
* @param value value to write
*/
def update(index: Int, value: T): Unit
/**
* Copy the give tensor value to the select subset of the current tensor by the given index.
* The subset should
* has the same size of the given tensor
*
* @param index index
* @param src tensor to write
*/
def update(index: Int, src: Tensor[T]): Unit
/**
* Write the value to the value indexed by the given index array
*
* @param indexes index array. It should has same length with the tensor dimension
* @param value value to write
*/
def update(indexes: Array[Int], value: T): Unit
/**
* Write the value on a given position. The number of parameters
* should be equal to the dimension number of the tensor.
*
* @param d1,( d2, d3, d4, d5) the given position
* @param value the written value
* @return
*/
def setValue(d1: Int, value: T): this.type
def setValue(d1: Int, d2: Int, value: T): this.type
def setValue(d1: Int, d2: Int, d3: Int, value: T): this.type
def setValue(d1: Int, d2: Int, d3: Int, d4: Int, value: T): this.type
def setValue(d1: Int, d2: Int, d3: Int, d4: Int, d5: Int, value: T): this.type
/**
* Fill the select subset of the current tensor with the given value.
* The element of the given table can be an Int or another Table. An Int means select on current
* dimension; A tablemeans narrow on current dimension, the table should has two elements,
* of which the first is start index and the second is the end index. An empty table is equals
* to Table(1, size_of_current_dimension) If the table length is less than the tensor dimension,
* the missing dimension is applied by an empty table
*
* @param t subset table
* @param value value to write
*/
def update(t: Table, value: T): Unit
/**
* Copy the given tensor value to the select subset of the current tensor
* The element of the given table can be an Int or another Table. An Int means select on current
* dimension; A table means narrow on current dimension, the table should has two elements,
* of which the first is start index and the second is the end index. An empty table is equals
* to Table(1, size_of_current_dimension) If the table length is less than the tensor dimension,
* the missing dimension is applied by an empty table
*
* @param t subset table
* @param src tensor to copy
*/
def update(t: Table, src: Tensor[T]): Unit
/**
* Update the value meeting the filter criteria with the give value
*
* @param filter filter
* @param value value to update
*/
def update(filter: T => Boolean, value: T): Unit
/**
* Check if the tensor is contiguous on the storage
*
* @return true if it's contiguous
*/
def isContiguous(): Boolean
/**
* Get a contiguous tensor from current tensor
*
* @return the current tensor if it's contiguous; or a new contiguous tensor with separated
* storage
*/
def contiguous(): Tensor[T]
/**
* Check if the size is same with the give tensor
*
* @param other tensor to be compared
* @return true if they have same size
*/
def isSameSizeAs(other: Tensor[_]): Boolean
/**
* Get a new tensor with same value and different storage
*
* @return new tensor
*/
override def clone(): Tensor[T] = {
this
}
/**
* Resize the current tensor to the same size of the given tensor. It will still use the same
* storage if the storage
* is sufficient for the new size
*
* @param src target tensor
* @return current tensor
*/
def resizeAs(src: Tensor[_]): Tensor[T]
/**
* Resize the current tensor to the give shape
*
* @param sizes Array describe the size
* @param strides Array describe the jumps
* @return
*/
def resize(sizes: Array[Int], strides: Array[Int] = null): Tensor[T]
def resize(size1: Int): Tensor[T]
def resize(size1: Int, size2: Int): Tensor[T]
def resize(size1: Int, size2: Int, size3: Int): Tensor[T]
def resize(size1: Int, size2: Int, size3: Int, size4: Int): Tensor[T]
def resize(size1: Int, size2: Int, size3: Int, size4: Int, size5: Int): Tensor[T]
// def repeatTensor(result: Tensor, tensor: Tensor, size: Int*)
/**
* Element number
*
* @return element number
*/
def nElement(): Int
/**
* Remove the dim-th dimension and return the subset part. For instance
* tensor =
* 1 2 3
* 4 5 6
* tensor.select(1, 1) is [1 2 3]
* tensor.select(1, 2) is [4 5 6]
* tensor.select(2, 3) is [3 6]
*
* @param dim
* @param index
* @return
*/
def select(dim: Int, index: Int): Tensor[T]
/**
* Get the storage
*
* @return storage
*/
def storage(): Storage[T]
/**
* tensor offset on the storage
*
* @return storage offset, count from 1
*/
def storageOffset(): Int
/**
* The Tensor is now going to "view" the same storage as the given tensor. As the result,
* any modification in the elements of the Tensor will have an impact on the elements of the
* given tensor, and vice-versa. This is an efficient method, as there is no memory copy!
*
* @param other the given tensor
* @return current tensor
*/
def set(other: Tensor[T]): Tensor[T]
/**
* The Tensor is now going to "view" the given storage, starting at position storageOffset (>=1)
* with the given dimension sizes and the optional given strides. As the result, any
* modification in the elements of the Storage will have an impact on the elements of the Tensor,
* and vice-versa. This is an efficient method, as there is no memory copy!
*
* If only storage is provided, the whole storage will be viewed as a 1D Tensor.
*
* @param storage
* @param storageOffset
* @param sizes
* @param strides
* @return current tensor
*/
def set(storage: Storage[T], storageOffset: Int = 1, sizes: Array[Int] = null,
strides: Array[Int] = null): Tensor[T]
/**
* Shrunk the size of the storage to 0, and also the tensor size
*
* @return
*/
def set(): Tensor[T]
/**
* Get a subset of the tensor on dim-th dimension. The offset is given by index, and length is
* give by size. The important difference with select is that it will not reduce the dimension
* number. For Instance
* tensor =
* 1 2 3
* 4 5 6
* tensor.narrow(1, 1, 1) is [1 2 3]
* tensor.narrow(2, 2, 3) is
* 2 3
* 5 6
*
* @param dim
* @param index
* @param size
* @return
*/
def narrow(dim: Int, index: Int, size: Int): Tensor[T]
/**
* Copy the value of the given tensor to the current. They should have same size. It will use
* the old storage
*
* @param other source tensor
* @return current tensor
*/
def copy(other: Tensor[T]): Tensor[T]
/**
* Apply a function to each element of the tensor and modified it value if it return a double
*
* @param func applied function
* @return current tensor
*/
def apply1(func: T => T): Tensor[T]
/**
* Map value of another tensor to corresponding value of current tensor and apply function on
* the two value and change the value of the current tensor
* The another tensor should has the same size of the current tensor
*
* @param other another tensor
* @param func applied function
* @return current tensor
*/
def map(other: Tensor[T], func: (T, T) => T): Tensor[T]
/**
* Removes all singleton dimensions of the tensor
*
* @return current tensor
*/
def squeeze(): Tensor[T]
/**
* Removes given dimensions of the tensor if it's singleton
*
* @return current tensor
*/
def squeeze(dim: Int): Tensor[T]
/**
* Return a new tensor with specified sizes. The input tensor must be contiguous, and the
* elements number in the given sizes must be equal to the current tensor
*
* @param sizes
* @return new tensor
*/
def view(sizes: Int*): Tensor[T] = {
view(sizes.toArray)
}
def view(sizes: Array[Int]): Tensor[T]
/**
*
* Returns a tensor which contains all slices of size @param size
* in the dimension @param dim. Step between two slices is given by @param step.
*
* @param dim
* @param size
* @param step Step between two slices
* @return new tensor
*/
def unfold(dim: Int, size: Int, step: Int): Tensor[T]
/**
* Repeating a tensor allocates new memory, unless result is provided, in which case its memory
* is resized. sizes specify the number of times the tensor is repeated in each dimension.
*
* @param sizes
* @return
*/
def repeatTensor(sizes: Array[Int]): Tensor[T]
/**
* This is equivalent to this.expand(template.size())
*
* @param template the given tensor
* @return
*/
def expandAs(template: Tensor[T]): Tensor[T]
/**
* Expanding a tensor allocates new memory, tensor where singleton dimensions can be expanded
* to multiple ones by setting the stride to 0. Any dimension that has size 1 can be expanded
* to arbitrary value with new memory allocation. Attempting to expand along a dimension that
* does not have size 1 will result in an error.
*
* @param sizes the size that tensor will expend to
* @return
*/
def expand(sizes: Array[Int]): Tensor[T]
/**
* Splits current tensor along dimension dim into a result table of Tensors of size size
* (a number) or less (in the case of the last Tensor). The sizes of the non-dim dimensions
* remain unchanged. Internally, a series of narrows are performed along dimensions dim.
* Argument dim defaults to 1.
*
* @param size
* @param dim
* @return
*/
def split(size: Int, dim: Int = 1): Array[Tensor[T]]
/**
* convert the tensor to BreezeVector, the dimension of the tensor need to be 1.
*
* @return BrzDenseVector
*/
def toBreezeVector(): BrzDenseVector[T]
/**
* convert the tensor to MLlibVector, the dimension of the
* tensor need to be 1, and tensor need to be continuous.
*
* @return Vector
*/
def toMLlibVector(): Vector
/**
* convert the tensor to BreezeMatrix, the dimension of the tensor need to be 2.
*
* @return BrzDenseMatrix
*/
def toBreezeMatrix(): BrzDenseMatrix[T]
/**
* convert the tensor to MLlibMatrix, the dimension of the
* tensor need to be 2, and tensor need to be continuous.
*
* @return Matrix
*/
def toMLlibMatrix(): Matrix
/**
* return the tensor datatype( DoubleType or FloatType)
*
* @return
*/
def getType(): TensorDataType
/**
* Compare and print differences between two tensors
*
* @param other
* @param count
* @return true if there's difference, vice versa
*/
def diff(other: Tensor[T], count: Int = 1, reverse: Boolean = false): Boolean
/**
* view this.tensor and add a Singleton Dimension to `dim` dimension
*
* @param t source tensor
* @param dim the specific dimension, default is 1
* @return this
*/
def addSingletonDimension(t: Tensor[T] = this, dim: Int = 1): Tensor[T]
/**
* create a new tensor without any change of the tensor
*
* @param sizes the size of the new Tensor
* @return
*/
def reshape(sizes: Array[Int]): Tensor[T]
/**
* Save the tensor to given path
*
* @param path
* @param overWrite
* @return
*/
def save(path : String, overWrite : Boolean = false) : this.type
override def toTable: Table =
throw new IllegalArgumentException("Tensor cannot be cast to Table")
}
sealed trait TensorDataType
object DoubleType extends TensorDataType
object FloatType extends TensorDataType
object Tensor {
/**
* Returns an empty tensor.
*
* @param ev
* @tparam T
* @return
*/
def apply[@specialized(Float, Double) T: ClassTag]()(
implicit ev: TensorNumeric[T]): Tensor[T] = new DenseTensor[T]()
/**
* Create a tensor up to 5 dimensions. The tensor size will be `d1 x d2 x d3 x d4 x d5`.
*
* @param d1,(d2, d3, d4, d5)
* @param ev
* @tparam T
* @return
*/
def apply[@specialized(Float, Double) T: ClassTag](d1: Int)(
implicit ev: TensorNumeric[T]): Tensor[T] = new DenseTensor[T](d1)
def apply[@specialized(Float, Double) T: ClassTag](d1: Int, d2: Int)(
implicit ev: TensorNumeric[T]): Tensor[T] = new DenseTensor[T](d1, d2)
def apply[@specialized(Float, Double) T: ClassTag](d1: Int, d2: Int, d3: Int)(
implicit ev: TensorNumeric[T]): Tensor[T] = new DenseTensor[T](d1, d2, d3)
def apply[@specialized(Float, Double) T: ClassTag](d1: Int, d2: Int, d3: Int, d4: Int)(
implicit ev: TensorNumeric[T]): Tensor[T] = new DenseTensor[T](d1, d2, d3, d4)
def apply[@specialized(Float, Double) T: ClassTag](d1: Int, d2: Int, d3: Int, d4: Int, d5: Int)(
implicit ev: TensorNumeric[T]): Tensor[T] = new DenseTensor[T](d1, d2, d3, d4, d5)
/**
* Create a tensor with a table
* @param xs the table contains a multi-dimensional numbers
* @return a new Tensor
*/
def apply[@specialized(Float, Double) T: ClassTag](xs : Table)(
implicit ev: TensorNumeric[T]): Tensor[T] = {
val map = xs.flatten().getState().asInstanceOf[Map[Int, T]]
val content = new Array[T](map.values.size)
var i = 1
for (i <- 1 to content.size) {
content(i - 1) = map(i)
}
val dims = new ArrayBuffer[Int]()
def getDims(xs: Table): ArrayBuffer[Int] = xs match {
case _ if xs.length() != 0 =>
dims.append(xs.length())
if (xs(1).isInstanceOf[Table]) {
getDims(xs(1))
}
dims
case otherwise => dims
}
getDims(xs)
new DenseTensor[T](
new ArrayStorage[T](content), 0, dims.toArray,
DenseTensor.size2Stride(dims.toArray), dims.length)
}
/**
* Create a tensor on given dimensions. The tensor size will be the product of dims
*
* @param dims
* @param ev
* @tparam T
* @return
*/
def apply[@specialized(Float, Double) T: ClassTag](dims: Int*)(
implicit ev: TensorNumeric[T]): Tensor[T] =
new DenseTensor[T](new ArrayStorage[T](new Array[T](dims.product)), 0, dims.toArray,
DenseTensor.size2Stride(dims.toArray), dims.length)
/**
* Create a tensor on given sizes. The tensor size will be the product of sizes
*
* @param sizes
* @param ev
* @tparam T
* @return
*/
def apply[@specialized(Float, Double) T: ClassTag](sizes: Array[Int])(
implicit ev: TensorNumeric[T]): Tensor[T] =
new DenseTensor(new ArrayStorage[T](new Array[T](sizes.product)), 0, sizes.clone(),
DenseTensor.size2Stride(sizes.clone()), sizes.length)
/**
* Returns a tensor which uses the existing Storage storage.
*
* @param storage the given storage
* @param ev
* @tparam T
* @return
*/
def apply[@specialized(Float, Double) T: ClassTag](storage: Storage[T])(
implicit ev: TensorNumeric[T]): Tensor[T] = {
new DenseTensor(storage.asInstanceOf[Storage[T]])
}
/**
* Returns a tensor with the given array and shape
*
* @param data the given storage
* @param shape the given shape
* @param ev
* @tparam T
* @return
*/
def apply[@specialized(Float, Double) T: ClassTag](data: Array[T],
shape: Array[Int])(implicit ev: TensorNumeric[T]): Tensor[T] = {
new DenseTensor[T]().set(Storage[T](data), storageOffset = 1, sizes = shape)
}
/**
* Returns a tensor which uses the existing Storage storage, starting at
* position storageOffset (>=1). The size of each dimension of the tensor
* is given by the optional Array size. If not given, the size will be computed
* as the length of storage. The jump necessary to go from one element to the
* next one in each dimension is given by the optional Array stride. If not
* given, the stride() will be computed such that the tensor is as contiguous
* as possible in memory.
*
* @param storage
* @param storageOffset
* @param size
* @param stride
* @param ev
* @tparam T
* @return
*/
def apply[@specialized(Float, Double) T: ClassTag](storage: Storage[T],
storageOffset: Int,
size: Array[Int] = null,
stride: Array[Int] = null)
(implicit ev: TensorNumeric[T]): Tensor[T] = {
new DenseTensor(storage.asInstanceOf[Storage[T]], storageOffset, size, stride)
}
/**
* create a tensor with a given tensor. The tensor will have same size
* with the given tensor.
*
* @param other the given tensor
* @param ev
* @tparam T
* @return
*/
def apply[@specialized(Float, Double) T: ClassTag](other: Tensor[T])(
implicit ev: TensorNumeric[T]): Tensor[T] = new DenseTensor(other)
/**
* create a tensor with a given breeze vector. The tensor will have the same size
* with the given breeze vector.
*
* @param vector the given breeze vector
* @param ev
* @tparam T
* @return
*/
def apply[@specialized(Float, Double) T: ClassTag](vector: BrzDenseVector[T])(
implicit ev: TensorNumeric[T]): Tensor[T] = apply(Storage(vector.data),
vector.offset + 1, Array(vector.length), Array(vector.stride))
/**
* create a tensor with a given spark Densevector. The tensor will have the same size
* with the given spark Densevector.
*
* @param vector the given spark Densevector
* @return
*/
def apply(vector: DenseVector): Tensor[Double] =
apply[Double](Storage(vector.toArray))
/**
* create a tensor with a given breeze matrix. The tensor will have the same size with
* the given breeze matrix.
*
* @param matrix the given breeze matrix
* @param ev
* @tparam T
* @return
*/
def apply[@specialized(Float, Double) T: ClassTag](matrix: BrzDenseMatrix[T])(
implicit ev: TensorNumeric[T]): Tensor[T] = apply(Storage(matrix.data),
matrix.offset + 1, Array(matrix.rows, matrix.cols),
if (matrix.isTranspose) Array(matrix.majorStride, 1) else Array(1, matrix.majorStride))
/**
* create a tensor with a given spark Densematrix. The tensor will have the same size with
* the given spark Densematrix.
*
* @param matrix
* @return
*/
def apply(matrix: DenseMatrix): Tensor[Double] = {
val strides = if (matrix.isTransposed) {
Array(matrix.numCols, 1)
} else {
Array(1, matrix.numRows) // column major
}
apply(Storage(matrix.toArray), 1, Array(matrix.numRows, matrix.numCols), strides)
}
/**
* This is equivalent to DenseTensor.randperm[T](size)
*
* @param size
* @param ev
* @tparam T
* @return
*/
def randperm[@specialized(Float, Double) T: ClassTag](size: Int)(
implicit ev: TensorNumeric[T]): Tensor[T] = DenseTensor.randperm[T](size)
/**
* This is equivalent to tensor.expand(sizes.toArray)
*
* @param tensor
* @param sizes
* @tparam T
* @return
*/
def expand[T](tensor: Tensor[T], sizes: Int*): Tensor[T] = tensor.expand(sizes.toArray)
/**
* This is equivalent to tensor.expandAs(template)
*
* @param tensor
* @param template
* @tparam T
* @return
*/
def expandAs[T](tensor: Tensor[T], template: Tensor[T]): Tensor[T] = tensor.expandAs(template)
/**
* This is equivalent to tensor.repeatTensor(sizes.toArray)
*
* @param tensor
* @param sizes
* @tparam T
* @return
*/
def repeatTensor[T](tensor: Tensor[T], sizes: Int*): Tensor[T] =
tensor.repeatTensor(sizes.toArray)
def load[T](path : String) : Tensor[T] = {
File.load[Tensor[T]](path)
}
/**
* This is equivalent to DenseTensor.range(xmin, xmax, step)
*
* @param xmin
* @param xmax
* @param step
* @return
*/
def range[@specialized(Float, Double) T: ClassTag](xmin: Double, xmax: Double, step: Int = 1)(
implicit ev: TensorNumeric[T]): Tensor[T] = DenseTensor.range[T](xmin, xmax, step)
/**
* return a tensor of sizes filled with 1.
* @param sizes
* @return a tensor
*/
def ones[@specialized(Float, Double) T: ClassTag](sizes: Int*)(
implicit ev: TensorNumeric[T]): Tensor[T] = {
DenseTensor.ones[T](sizes.toArray)
}
/**
* Returns a 1D Gaussian kernel of size size, mean mean and standard deviation sigma.
* @param size
* @param sigma
* @param amplitude
* @param normalize
* @param mean
* @param tensor If tensor is set, will discard size, and write result to tensor.
* @return
*/
def gaussian1D[@specialized(Float, Double) T: ClassTag](
size: Int = 3,
sigma: Double = 0.25,
amplitude: Int = 1,
normalize: Boolean = false,
mean: Double = 0.5,
tensor: Tensor[T] = null)(implicit ev: TensorNumeric[T]): Tensor[T] = {
DenseTensor.gaussian1D[T](size, sigma, amplitude, normalize, mean, tensor)
}
}
| zhichao-li/BigDL | dl/src/main/scala/com/intel/analytics/bigdl/tensor/Tensor.scala | Scala | apache-2.0 | 26,604 |
package api.route
import api.domain.SessionCookie
import api.CommonTraits
import spray.httpx.PlayTwirlSupport
import html._
private[api]
trait Root extends {self: CommonTraits with Misc with PlayTwirlSupport =>
def rootRoute(implicit session: SessionCookie) = {
get {
path("") {
val userId = session("id")
generateCsrfToken(userId) { token =>
complete {
page(app(token, userId))
}
}
}
}
}
}
| onurzdg/spray-app | src/main/scala/api/route/Root.scala | Scala | apache-2.0 | 484 |
package extracells.part
import java.util
import appeng.api.AEApi
import appeng.api.config.Actionable
import appeng.api.storage.data.IAEFluidStack
import cpw.mods.fml.common.Optional
import extracells.integration.Integration
import extracells.util.GasUtil
import mekanism.api.gas.IGasHandler
import net.minecraftforge.fluids.{Fluid, FluidStack, IFluidHandler}
class PartGasExport extends PartFluidExport{
private val isMekanismEnabled = Integration.Mods.MEKANISMGAS.isEnabled
override def doWork(rate: Int, tickSinceLastCall: Int): Boolean ={
if (isMekanismEnabled)
work(rate, tickSinceLastCall)
else
false
}
@Optional.Method(modid = "MekanismAPI|gas")
protected def work(rate: Int, ticksSinceLastCall: Int): Boolean ={
val facingTank: IGasHandler = getFacingGasTank
if (facingTank == null || !isActive) return false
val filter = new util.ArrayList[Fluid]
filter.add(this.filterFluids(4))
if (this.filterSize >= 1) {
{
var i: Byte = 1
while (i < 9) {
{
if (i != 4) {
filter.add(this.filterFluids(i))
}
}
i = (i + 2).toByte
}
}
}
if (this.filterSize >= 2) {
{
var i: Byte = 0
while (i < 9) {
{
if (i != 4) {
filter.add(this.filterFluids(i))
}
}
i = (i + 2).toByte
}
}
}
import scala.collection.JavaConversions._
for (fluid <- filter) {
if (fluid != null) {
val stack: IAEFluidStack = extractFluid(AEApi.instance.storage.createFluidStack(new FluidStack(fluid, rate * ticksSinceLastCall)), Actionable.SIMULATE)
if (stack != null) {
val gasStack = GasUtil.getGasStack(stack.getFluidStack)
if (gasStack != null && facingTank.canReceiveGas(getSide.getOpposite, gasStack.getGas)) {
val filled: Int = facingTank.receiveGas(getSide.getOpposite, gasStack, true)
if (filled > 0) {
extractFluid(AEApi.instance.storage.createFluidStack(new FluidStack(fluid, filled)), Actionable.MODULATE)
return true
}
}
}
}
}
return false
}
}
| AmethystAir/ExtraCells2 | src/main/scala/extracells/part/PartGasExport.scala | Scala | mit | 2,245 |
// Copyright: 2010 - 2018 https://github.com/ensime/ensime-server/graphs
// License: http://www.gnu.org/licenses/lgpl-3.0.en.html
package org.ensime.sexp.formats
trait DefaultSexpProtocol
extends BasicFormats
with StandardFormats
with CollectionFormats
with LegacyProductFormats
object DefaultSexpProtocol extends DefaultSexpProtocol
| yyadavalli/ensime-server | s-express/src/main/scala/org/ensime/sexp/formats/DefaultSexpProtocol.scala | Scala | gpl-3.0 | 352 |
/*
* Copyright 2014 Rik van der Kleij
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.powertuple.intellij.haskell.code
import com.intellij.codeInsight.editorActions.SimpleTokenSetQuoteHandler
import com.intellij.psi.TokenType
import com.powertuple.intellij.haskell.psi.HaskellTypes
class HaskellQuoteHandler extends SimpleTokenSetQuoteHandler(HaskellTypes.HS_STRING_LITERAL, HaskellTypes.HS_QUOTE, HaskellTypes.HS_BACKQUOTE, TokenType.BAD_CHARACTER)
| ericssmith/intellij-haskell | src/com/powertuple/intellij/haskell/code/HaskellQuoteHandler.scala | Scala | apache-2.0 | 975 |
package omnibus.it.perf
import io.gatling.core.Predef._
import io.gatling.http.Predef._
import scala.concurrent.duration._
import omnibus.it.OmnibusSimulation
class SubStress extends OmnibusSimulation {
val scenarioCreateTopic = scenario("Create topic")
.pause(5)
.exec(
http("create topic")
.post("/topics/batman")
.check(status.is(201)))
.exec(
http("topic existence")
.get("/topics/batman")
.check(status.is(200)))
val scenarioOmnibus = scenario("Subcription")
.pause(6)
.exec(ws("Subscribe to topic").open("/streams/topics/batman"))
setUp(
scenarioCreateTopic.inject(atOnceUsers(1)),
scenarioOmnibus.inject(rampUsers(100) over (10 seconds)))
.protocols(
http.baseURL("http://localhost:8080")
.wsBaseURL("ws://localhost:8081")
.warmUp("http://localhost:8080/stats/metrics")
)
.assertions(
global.successfulRequests.percent.greaterThan(minSuccessPercentage),
global.responseTime.percentile1.lessThan(maxResponseTimePercentile1)
)
}
| agourlay/omnibus | src/it/scala/omnibus/perf/SubStress.scala | Scala | apache-2.0 | 1,068 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.ml.evaluation
import org.apache.spark.SparkFunSuite
import org.apache.spark.ml.param.ParamsSuite
import org.apache.spark.ml.util.DefaultReadWriteTest
import org.apache.spark.ml.util.TestingUtils._
import org.apache.spark.mllib.util.MLlibTestSparkContext
import org.apache.spark.sql.{DataFrame, SparkSession}
import org.apache.spark.sql.types.IntegerType
class ClusteringEvaluatorSuite
extends SparkFunSuite with MLlibTestSparkContext with DefaultReadWriteTest {
import testImplicits._
test("params") {
ParamsSuite.checkParams(new ClusteringEvaluator)
}
test("read/write") {
val evaluator = new ClusteringEvaluator()
.setPredictionCol("myPrediction")
.setFeaturesCol("myLabel")
testDefaultReadWrite(evaluator)
}
/*
Use the following python code to load the data and evaluate it using scikit-learn package.
from sklearn import datasets
from sklearn.metrics import silhouette_score
iris = datasets.load_iris()
round(silhouette_score(iris.data, iris.target, metric='sqeuclidean'), 10)
0.6564679231
*/
test("squared euclidean Silhouette") {
val iris = ClusteringEvaluatorSuite.irisDataset(spark)
val evaluator = new ClusteringEvaluator()
.setFeaturesCol("features")
.setPredictionCol("label")
assert(evaluator.evaluate(iris) ~== 0.6564679231 relTol 1e-5)
}
test("number of clusters must be greater than one") {
val iris = ClusteringEvaluatorSuite.irisDataset(spark)
.where($"label" === 0.0)
val evaluator = new ClusteringEvaluator()
.setFeaturesCol("features")
.setPredictionCol("label")
val e = intercept[AssertionError]{
evaluator.evaluate(iris)
}
assert(e.getMessage.contains("Number of clusters must be greater than one"))
}
}
object ClusteringEvaluatorSuite {
def irisDataset(spark: SparkSession): DataFrame = {
val irisPath = Thread.currentThread()
.getContextClassLoader
.getResource("test-data/iris.libsvm")
.toString
spark.read.format("libsvm").load(irisPath)
}
}
| minixalpha/spark | mllib/src/test/scala/org/apache/spark/ml/evaluation/ClusteringEvaluatorSuite.scala | Scala | apache-2.0 | 2,885 |
Subsets and Splits
Filtered Scala Code Snippets
The query filters and retrieves a sample of code snippets that meet specific criteria, providing a basic overview of the dataset's content without revealing deeper insights.