code
stringlengths 5
1M
| repo_name
stringlengths 5
109
| path
stringlengths 6
208
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 5
1M
|
|---|---|---|---|---|---|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark
import scala.collection.mutable.ArrayBuffer
import scala.math.abs
import org.scalatest.PrivateMethodTester
import org.apache.spark.rdd.RDD
import org.apache.spark.util.StatCounter
class PartitioningSuite extends SparkFunSuite with SharedSparkContext with PrivateMethodTester {
test("HashPartitioner equality") {
val p2 = new HashPartitioner(2)
val p4 = new HashPartitioner(4)
val anotherP4 = new HashPartitioner(4)
assert(p2 === p2)
assert(p4 === p4)
assert(p2 != p4)
assert(p4 != p2)
assert(p4 === anotherP4)
assert(anotherP4 === p4)
}
test("RangePartitioner equality") {
// Make an RDD where all the elements are the same so that the partition range bounds
// are deterministically all the same.
val rdd = sc.parallelize(Seq(1, 1, 1, 1)).map(x => (x, x))
val p2 = new RangePartitioner(2, rdd)
val p4 = new RangePartitioner(4, rdd)
val anotherP4 = new RangePartitioner(4, rdd)
val descendingP2 = new RangePartitioner(2, rdd, false)
val descendingP4 = new RangePartitioner(4, rdd, false)
assert(p2 === p2)
assert(p4 === p4)
assert(p2 === p4)
assert(p4 === anotherP4)
assert(anotherP4 === p4)
assert(descendingP2 === descendingP2)
assert(descendingP4 === descendingP4)
assert(descendingP2 === descendingP4)
assert(p2 != descendingP2)
assert(p4 != descendingP4)
assert(descendingP2 != p2)
assert(descendingP4 != p4)
}
test("RangePartitioner getPartition") {
val rdd = sc.parallelize(1.to(2000)).map(x => (x, x))
// We have different behaviour of getPartition for partitions with less than 1000 and more than
// 1000 partitions.
val partitionSizes = List(1, 2, 10, 100, 500, 1000, 1500)
val partitioners = partitionSizes.map(p => (p, new RangePartitioner(p, rdd)))
val decoratedRangeBounds = PrivateMethod[Array[Int]]('rangeBounds)
partitioners.foreach { case (numPartitions, partitioner) =>
val rangeBounds = partitioner.invokePrivate(decoratedRangeBounds())
for (element <- 1 to 1000) {
val partition = partitioner.getPartition(element)
if (numPartitions > 1) {
if (partition < rangeBounds.size) {
assert(element <= rangeBounds(partition))
}
if (partition > 0) {
assert(element > rangeBounds(partition - 1))
}
} else {
assert(partition === 0)
}
}
}
}
test("RangePartitioner for keys that are not Comparable (but with Ordering)") {
// Row does not extend Comparable, but has an implicit Ordering defined.
implicit object RowOrdering extends Ordering[Item] {
override def compare(x: Item, y: Item): Int = x.value - y.value
}
val rdd = sc.parallelize(1 to 4500).map(x => (Item(x), Item(x)))
val partitioner = new RangePartitioner(1500, rdd)
partitioner.getPartition(Item(100))
}
test("RangPartitioner.sketch") {
val rdd = sc.makeRDD(0 until 20, 20).flatMap { i =>
val random = new java.util.Random(i)
Iterator.fill(i)(random.nextDouble())
}.cache()
val sampleSizePerPartition = 10
val (count, sketched) = RangePartitioner.sketch(rdd, sampleSizePerPartition)
assert(count === rdd.count())
sketched.foreach { case (idx, n, sample) =>
assert(n === idx)
assert(sample.size === math.min(n, sampleSizePerPartition))
}
}
test("RangePartitioner.determineBounds") {
assert(RangePartitioner.determineBounds(ArrayBuffer.empty[(Int, Float)], 10).isEmpty,
"Bounds on an empty candidates set should be empty.")
val candidates = ArrayBuffer(
(0.7, 2.0f), (0.1, 1.0f), (0.4, 1.0f), (0.3, 1.0f), (0.2, 1.0f), (0.5, 1.0f), (1.0, 3.0f))
assert(RangePartitioner.determineBounds(candidates, 3) === Array(0.4, 0.7))
}
test("RangePartitioner should run only one job if data is roughly balanced") {
val rdd = sc.makeRDD(0 until 20, 20).flatMap { i =>
val random = new java.util.Random(i)
Iterator.fill(5000 * i)((random.nextDouble() + i, i))
}.cache()
for (numPartitions <- Seq(10, 20, 40)) {
val partitioner = new RangePartitioner(numPartitions, rdd)
assert(partitioner.numPartitions === numPartitions)
val counts = rdd.keys.map(key => partitioner.getPartition(key)).countByValue().values
assert(counts.max < 3.0 * counts.min)
}
}
test("RangePartitioner should work well on unbalanced data") {
val rdd = sc.makeRDD(0 until 20, 20).flatMap { i =>
val random = new java.util.Random(i)
Iterator.fill(20 * i * i * i)((random.nextDouble() + i, i))
}.cache()
for (numPartitions <- Seq(2, 4, 8)) {
val partitioner = new RangePartitioner(numPartitions, rdd)
assert(partitioner.numPartitions === numPartitions)
val counts = rdd.keys.map(key => partitioner.getPartition(key)).countByValue().values
assert(counts.max < 3.0 * counts.min)
}
}
test("RangePartitioner should return a single partition for empty RDDs") {
val empty1 = sc.emptyRDD[(Int, Double)]
val partitioner1 = new RangePartitioner(0, empty1)
assert(partitioner1.numPartitions === 1)
val empty2 = sc.makeRDD(0 until 2, 2).flatMap(i => Seq.empty[(Int, Double)])
val partitioner2 = new RangePartitioner(2, empty2)
assert(partitioner2.numPartitions === 1)
}
test("HashPartitioner not equal to RangePartitioner") {
val rdd = sc.parallelize(1 to 10).map(x => (x, x))
val rangeP2 = new RangePartitioner(2, rdd)
val hashP2 = new HashPartitioner(2)
assert(rangeP2 === rangeP2)
assert(hashP2 === hashP2)
assert(hashP2 !== rangeP2)
assert(rangeP2 !== hashP2)
}
test("partitioner preservation") {
val rdd = sc.parallelize(1 to 10, 4).map(x => (x, x))
val grouped2 = rdd.groupByKey(2)
val grouped4 = rdd.groupByKey(4)
val reduced2 = rdd.reduceByKey(_ + _, 2)
val reduced4 = rdd.reduceByKey(_ + _, 4)
assert(rdd.partitioner === None)
assert(grouped2.partitioner === Some(new HashPartitioner(2)))
assert(grouped4.partitioner === Some(new HashPartitioner(4)))
assert(reduced2.partitioner === Some(new HashPartitioner(2)))
assert(reduced4.partitioner === Some(new HashPartitioner(4)))
assert(grouped2.groupByKey().partitioner === grouped2.partitioner)
assert(grouped2.groupByKey(3).partitioner != grouped2.partitioner)
assert(grouped2.groupByKey(2).partitioner === grouped2.partitioner)
assert(grouped4.groupByKey().partitioner === grouped4.partitioner)
assert(grouped4.groupByKey(3).partitioner != grouped4.partitioner)
assert(grouped4.groupByKey(4).partitioner === grouped4.partitioner)
assert(grouped2.join(grouped4).partitioner === grouped4.partitioner)
assert(grouped2.leftOuterJoin(grouped4).partitioner === grouped4.partitioner)
assert(grouped2.rightOuterJoin(grouped4).partitioner === grouped4.partitioner)
assert(grouped2.fullOuterJoin(grouped4).partitioner === grouped4.partitioner)
assert(grouped2.cogroup(grouped4).partitioner === grouped4.partitioner)
assert(grouped2.join(reduced2).partitioner === grouped2.partitioner)
assert(grouped2.leftOuterJoin(reduced2).partitioner === grouped2.partitioner)
assert(grouped2.rightOuterJoin(reduced2).partitioner === grouped2.partitioner)
assert(grouped2.fullOuterJoin(reduced2).partitioner === grouped2.partitioner)
assert(grouped2.cogroup(reduced2).partitioner === grouped2.partitioner)
assert(grouped2.map(_ => 1).partitioner === None)
assert(grouped2.mapValues(_ => 1).partitioner === grouped2.partitioner)
assert(grouped2.flatMapValues(_ => Seq(1)).partitioner === grouped2.partitioner)
assert(grouped2.filter(_._1 > 4).partitioner === grouped2.partitioner)
}
test("partitioning Java arrays should fail") {
val arrs: RDD[Array[Int]] = sc.parallelize(Array(1, 2, 3, 4), 2).map(x => Array(x))
val arrPairs: RDD[(Array[Int], Int)] =
sc.parallelize(Array(1, 2, 3, 4), 2).map(x => (Array(x), x))
def verify(testFun: => Unit): Unit = {
intercept[SparkException](testFun).getMessage.contains("array")
}
verify(arrs.distinct())
// We can't catch all usages of arrays, since they might occur inside other collections:
// assert(fails { arrPairs.distinct() })
verify(arrPairs.partitionBy(new HashPartitioner(2)))
verify(arrPairs.join(arrPairs))
verify(arrPairs.leftOuterJoin(arrPairs))
verify(arrPairs.rightOuterJoin(arrPairs))
verify(arrPairs.fullOuterJoin(arrPairs))
verify(arrPairs.groupByKey())
verify(arrPairs.countByKey())
verify(arrPairs.countByKeyApprox(1))
verify(arrPairs.cogroup(arrPairs))
verify(arrPairs.reduceByKeyLocally(_ + _))
verify(arrPairs.reduceByKey(_ + _))
}
test("zero-length partitions should be correctly handled") {
// Create RDD with some consecutive empty partitions (including the "first" one)
val rdd: RDD[Double] = sc
.parallelize(Array(-1.0, -1.0, -1.0, -1.0, 2.0, 4.0, -1.0, -1.0), 8)
.filter(_ >= 0.0)
// Run the partitions, including the consecutive empty ones, through StatCounter
val stats: StatCounter = rdd.stats()
assert(abs(6.0 - stats.sum) < 0.01)
assert(abs(6.0/2 - rdd.mean) < 0.01)
assert(abs(1.0 - rdd.variance) < 0.01)
assert(abs(1.0 - rdd.stdev) < 0.01)
assert(abs(rdd.variance - rdd.popVariance) < 1e-14)
assert(abs(rdd.stdev - rdd.popStdev) < 1e-14)
assert(abs(2.0 - rdd.sampleVariance) < 1e-14)
assert(abs(Math.sqrt(2.0) - rdd.sampleStdev) < 1e-14)
assert(stats.max === 4.0)
assert(stats.min === 2.0)
// Add other tests here for classes that should be able to handle empty partitions correctly
}
test("Number of elements in RDD is less than number of partitions") {
val rdd = sc.parallelize(1 to 3).map(x => (x, x))
val partitioner = new RangePartitioner(22, rdd)
assert(partitioner.numPartitions === 3)
}
test("defaultPartitioner") {
val rdd1 = sc.parallelize((1 to 1000).map(x => (x, x)), 150)
val rdd2 = sc.parallelize(Array((1, 2), (2, 3), (2, 4), (3, 4)))
.partitionBy(new HashPartitioner(10))
val rdd3 = sc.parallelize(Array((1, 6), (7, 8), (3, 10), (5, 12), (13, 14)))
.partitionBy(new HashPartitioner(100))
val rdd4 = sc.parallelize(Array((1, 2), (2, 3), (2, 4), (3, 4)))
.partitionBy(new HashPartitioner(9))
val rdd5 = sc.parallelize((1 to 10).map(x => (x, x)), 11)
val partitioner1 = Partitioner.defaultPartitioner(rdd1, rdd2)
val partitioner2 = Partitioner.defaultPartitioner(rdd2, rdd3)
val partitioner3 = Partitioner.defaultPartitioner(rdd3, rdd1)
val partitioner4 = Partitioner.defaultPartitioner(rdd1, rdd2, rdd3)
val partitioner5 = Partitioner.defaultPartitioner(rdd4, rdd5)
assert(partitioner1.numPartitions == rdd1.getNumPartitions)
assert(partitioner2.numPartitions == rdd3.getNumPartitions)
assert(partitioner3.numPartitions == rdd3.getNumPartitions)
assert(partitioner4.numPartitions == rdd3.getNumPartitions)
assert(partitioner5.numPartitions == rdd4.getNumPartitions)
}
test("defaultPartitioner when defaultParallelism is set") {
assert(!sc.conf.contains("spark.default.parallelism"))
try {
sc.conf.set("spark.default.parallelism", "4")
val rdd1 = sc.parallelize((1 to 1000).map(x => (x, x)), 150)
val rdd2 = sc.parallelize(Array((1, 2), (2, 3), (2, 4), (3, 4)))
.partitionBy(new HashPartitioner(10))
val rdd3 = sc.parallelize(Array((1, 6), (7, 8), (3, 10), (5, 12), (13, 14)))
.partitionBy(new HashPartitioner(100))
val rdd4 = sc.parallelize(Array((1, 2), (2, 3), (2, 4), (3, 4)))
.partitionBy(new HashPartitioner(9))
val rdd5 = sc.parallelize((1 to 10).map(x => (x, x)), 11)
val rdd6 = sc.parallelize(Array((1, 2), (2, 3), (2, 4), (3, 4)))
.partitionBy(new HashPartitioner(3))
val partitioner1 = Partitioner.defaultPartitioner(rdd1, rdd2)
val partitioner2 = Partitioner.defaultPartitioner(rdd2, rdd3)
val partitioner3 = Partitioner.defaultPartitioner(rdd3, rdd1)
val partitioner4 = Partitioner.defaultPartitioner(rdd1, rdd2, rdd3)
val partitioner5 = Partitioner.defaultPartitioner(rdd4, rdd5)
val partitioner6 = Partitioner.defaultPartitioner(rdd5, rdd5)
val partitioner7 = Partitioner.defaultPartitioner(rdd1, rdd6)
assert(partitioner1.numPartitions == rdd2.getNumPartitions)
assert(partitioner2.numPartitions == rdd3.getNumPartitions)
assert(partitioner3.numPartitions == rdd3.getNumPartitions)
assert(partitioner4.numPartitions == rdd3.getNumPartitions)
assert(partitioner5.numPartitions == rdd4.getNumPartitions)
assert(partitioner6.numPartitions == sc.defaultParallelism)
assert(partitioner7.numPartitions == sc.defaultParallelism)
} finally {
sc.conf.remove("spark.default.parallelism")
}
}
}
private sealed case class Item(value: Int)
|
bravo-zhang/spark
|
core/src/test/scala/org/apache/spark/PartitioningSuite.scala
|
Scala
|
apache-2.0
| 13,796
|
/*
* Copyright (C) 2005, The Beangle Software.
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published
* by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.beangle.data.model.pojo
trait Named {
var name: String = _
}
|
beangle/data
|
model/src/main/scala/org/beangle/data/model/pojo/Named.scala
|
Scala
|
lgpl-3.0
| 798
|
package org.clulab.agiga
import java.io._
import com.typesafe.config.ConfigFactory
import com.typesafe.scalalogging.LazyLogging
import scala.collection.parallel.ForkJoinTaskSupport
object AgigaReader extends App with LazyLogging {
val config = ConfigFactory.load()
val inDir = new File(config.getString("inputDir"))
val outDir = new File(config.getString("outDir"))
val view = config.getString("view")
val nthreads = config.getInt("nthreads")
def mkOutput(f: File, outDir: File, view: String): Unit = {
// create a Processors Document
val docs = toDocuments(f.getAbsolutePath)
for (doc <- docs) {
// get output representation
val output = view.toLowerCase match {
case "words" =>
doc.sentences.map(_.words.mkString(" ")).mkString("\\n")
case "tags" =>
doc.sentences.map(_.tags.get.mkString(" ")).mkString("\\n")
case "lemmas" =>
doc.sentences.map(_.lemmas.get.mkString(" ")).mkString("\\n")
case entities if entities == "entities" || entities == "ner" =>
doc.sentences.map(_.entities.get.mkString(" ")).mkString("\\n")
// these are unordered
case "lemma-deps" =>
doc.sentences.map { s =>
val deps = s.dependencies.get
val lemmas = s.lemmas.get
depsToString(deps, lemmas)
}.mkString("\\n")
case "tag-deps" =>
doc.sentences.map { s =>
val deps = s.dependencies.get
val tags = s.tags.get
depsToString(deps, tags)
}.mkString("\\n")
case "entity-deps" =>
doc.sentences.map { s =>
val deps = s.dependencies.get
val entities = s.entities.get
depsToString(deps, entities)
}.mkString("\\n")
case "dep" =>
doc.sentences.map { s =>
val deps = s.dependencies.get
val words = s.words
depsToString(deps, words)
}.mkString("\\n")
}
// prepare output file
//val fName = f.getName.replace(".xml.gz", "")
val outFile = new File(outDir, s"${doc.id.get}-$view.txt")
val pw = new PrintWriter(outFile)
// write processed text to file
pw.write(output)
pw.close()
// compress file
compress(outFile)
// delete uncompress out file
outFile.delete
logger.info(s"Successfully processed ${f.getName}")
}
}
// create dir if it doesn't exist...
outDir.mkdirs()
logger.info(s"Input: $inDir")
logger.info(s"Output folder: $outDir")
logger.info(s"View: $view")
val files = inDir.listFiles
// filter out any non- *.xml.gz files in the directory
.filter(_.getName.endsWith(".xml.gz"))
// and parallelize the Array of valid Files...
.par
logger.info(s"Files to process: ${files.size}")
// limit parallelization
files.tasksupport = new ForkJoinTaskSupport(new scala.concurrent.forkjoin.ForkJoinPool(nthreads))
logger.info(s"Threads to use: $nthreads")
// process files
files.foreach(f => mkOutput(f, outDir, view))
}
|
myedibleenso/processors-agiga
|
src/main/scala/org/clulab/agiga/AgigaReader.scala
|
Scala
|
apache-2.0
| 3,083
|
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.scala.dsl
import java.util.concurrent.ExecutorService
import org.apache.camel.model.MulticastDefinition
import org.apache.camel.scala.dsl.builder.RouteBuilder
import org.apache.camel.Exchange
import org.apache.camel.processor.aggregate.AggregationStrategy
case class SMulticastDefinition(override val target: MulticastDefinition)(implicit val builder: RouteBuilder) extends SAbstractDefinition[MulticastDefinition] {
def strategy(function: (Exchange, Exchange) => Exchange) = {
target.setAggregationStrategy(
new AggregationStrategy() {
def aggregate(oldExchange: Exchange, newExchange: Exchange) = function(oldExchange, newExchange)
}
)
this
}
def strategy(strategy: AggregationStrategy) = wrap(target.setAggregationStrategy(strategy))
@Deprecated
def parallel = wrap(target.parallelProcessing)
def parallelProcessing = wrap(target.parallelProcessing)
def parallelAggregate = wrap(target.parallelAggregate)
def streaming = wrap(target.streaming)
def stopOnException = wrap(target.stopOnException())
def executorService(executorService: ExecutorService) = wrap(target.setExecutorService(executorService))
def executorServiceRef(ref: String) = wrap(target.setExecutorServiceRef(ref))
def timeout(timeout: Long) = wrap(target.timeout(timeout))
}
|
YMartsynkevych/camel
|
components/camel-scala/src/main/scala/org/apache/camel/scala/dsl/SMulticastDefinition.scala
|
Scala
|
apache-2.0
| 2,149
|
/*
* Copyright 2009-2010 WorldWide Conferencing, LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.liftweb {
package transaction {
import _root_.javax.persistence.{EntityManager, EntityManagerFactory}
import _root_.javax.transaction.{Transaction, Status, TransactionManager}
import _root_.net.liftweb.common.Loggable
import _root_.org.scala_libs.jpa.{ScalaEntityManager, ScalaEMFactory}
/**
* Base monad for the transaction monad implementations.
*
* @author <a href="http://jonasboner.com">Jonas Bonér</a>
*/
trait TransactionMonad {
// -----------------------------
// Monadic definitions
// -----------------------------
def map[T](f: TransactionMonad => T): T
def flatMap[T](f: TransactionMonad => T): T
def foreach(f: TransactionMonad => Unit): Unit
def filter(f: TransactionMonad => Boolean): TransactionMonad =
if (f(this)) this else TransactionContext.NoOpTransactionMonad
// -----------------------------
// JTA Transaction definitions
// -----------------------------
/**
* Returns the current Transaction.
*/
def getTransaction: Transaction = TransactionContext.getTransactionManager.getTransaction
/**
* Marks the current transaction as doomed.
*/
def setRollbackOnly = TransactionContext.setRollbackOnly
/**
* Marks the current transaction as doomed.
*/
def doom = TransactionContext.setRollbackOnly
/**
* Checks if the current transaction is doomed.
*/
def isRollbackOnly = TransactionContext.isRollbackOnly
/**
* Checks that the current transaction is NOT doomed.
*/
def isNotDoomed = !TransactionContext.isRollbackOnly
// -----------------------------
// JPA EntityManager definitions
// -----------------------------
/**
* Returns the current EntityManager.
*/
def getEntityManager: EntityManager = TransactionContext.getEntityManager
/**
* Checks if an EntityManager exists in current context.
*/
//def hasEntityManager: Boolean = TransactionContext.hasEntityManager
/**
* Closes and removes the current EntityManager.
* <p/>
* IMPORTANT: This method must always be used to close the EntityManager, never use em.close directly.
*/
def closeEntityManager = TransactionContext.closeEntityManager
}
/**
* Manages a thread-local stack of TransactionContexts.
* <p/>
* Choose TransactionService implementation by implicit definition of the implementation of choice,
* e.g. <code>implicit val txService = TransactionServices.AtomikosTransactionService</code>.
* <p/>
* Example usage 1:
* <pre>
* for {
* ctx <- TransactionContext.Required
* entity <- updatedEntities
* if !ctx.isRollbackOnly
* } {
* // transactional stuff
* ctx.getEntityManager.merge(entity)
* }
* </pre>
* Example usage 2:
* <pre>
* val users = for {
* ctx <- TransactionContext.Required
* name <- userNames
* } yield {
* // transactional stuff
* val query = ctx.getEntityManager.createNamedQuery("findUserByName")
* query.setParameter("userName", name)
* query.getSingleResult
* }
* </pre>
*
* @author <a href="http://jonasboner.com">Jonas Bonér</a>
*/
object TransactionContext extends TransactionProtocol with Loggable {
// FIXME: make configurable
private implicit val defaultTransactionService = atomikos.AtomikosTransactionService
private[TransactionContext] val stack = new scala.util.DynamicVariable(new TransactionContext)
object Required extends TransactionMonad {
def map[T](f: TransactionMonad => T): T = withTxRequired { f(this) }
def flatMap[T](f: TransactionMonad => T): T = withTxRequired { f(this) }
def foreach(f: TransactionMonad => Unit): Unit = withTxRequired { f(this) }
}
object RequiresNew extends TransactionMonad {
def map[T](f: TransactionMonad => T): T = withTxRequiresNew { f(this) }
def flatMap[T](f: TransactionMonad => T): T = withTxRequiresNew { f(this) }
def foreach(f: TransactionMonad => Unit): Unit = withTxRequiresNew { f(this) }
}
object Supports extends TransactionMonad {
def map[T](f: TransactionMonad => T): T = withTxSupports { f(this) }
def flatMap[T](f: TransactionMonad => T): T = withTxSupports { f(this) }
def foreach(f: TransactionMonad => Unit): Unit = withTxSupports { f(this) }
}
object Mandatory extends TransactionMonad {
def map[T](f: TransactionMonad => T): T = withTxMandatory { f(this) }
def flatMap[T](f: TransactionMonad => T): T = withTxMandatory { f(this) }
def foreach(f: TransactionMonad => Unit): Unit = withTxMandatory { f(this) }
}
object Never extends TransactionMonad {
def map[T](f: TransactionMonad => T): T = withTxNever { f(this) }
def flatMap[T](f: TransactionMonad => T): T = withTxNever { f(this) }
def foreach(f: TransactionMonad => Unit): Unit = withTxNever { f(this) }
}
object NoOpTransactionMonad extends TransactionMonad {
def map[T](f: TransactionMonad => T): T = f(this)
def flatMap[T](f: TransactionMonad => T): T = f(this)
def foreach(f: TransactionMonad => Unit): Unit = f(this)
override def filter(f: TransactionMonad => Boolean): TransactionMonad = this
}
private[transaction] def setRollbackOnly = current.setRollbackOnly
private[transaction] def isRollbackOnly = current.isRollbackOnly
private[transaction] def getTransactionManager: TransactionManager = current.getTransactionManager
private[transaction] def getTransaction: Transaction = current.getTransactionManager.getTransaction
private[transaction] def getEntityManager: EntityManager = current.getEntityManager
private[transaction] def closeEntityManager = current.closeEntityManager
private[this] def current = stack.value
/**
* Continues with the invocation defined in 'body' with the brand new context define in 'newCtx', the old
* one is put on the stack and will automatically come back in scope when the method exits.
* <p/>
* Suspends and resumes the current JTA transaction.
*/
private[transaction] def withNewContext[T](body: => T): T = {
val suspendedTx: Option[Transaction] =
if (isInExistingTransaction(getTransactionManager)) {
logger.debug("Suspending TX")
Some(getTransactionManager.suspend)
} else None
val result = stack.withValue(new TransactionContext) { body }
if (suspendedTx.isDefined) {
logger.debug("Resuming TX")
getTransactionManager.resume(suspendedTx.get)
}
result
}
}
/**
* Transaction context, holds the EntityManager and the TransactionManager.
*
* @author <a href="http://jonasboner.com">Jonas Bonér</a>
*/
class TransactionContext(private implicit val transactionService: TransactionService)
extends ScalaEntityManager with ScalaEMFactory {
val em: EntityManager = transactionService.entityManagerFactory.createEntityManager
val tm: TransactionManager = transactionService.transactionManager
private def setRollbackOnly = tm.setRollbackOnly
protected def getUnitName = "N/A"
private def isRollbackOnly: Boolean = tm.getStatus == Status.STATUS_MARKED_ROLLBACK
private def getTransactionManager: TransactionManager = tm
private def getEntityManager: EntityManager = em
private def closeEntityManager = em.close
// ---------------------------------
// To make ScalaEMFactory happy
val factory = this
def openEM: javax.persistence.EntityManager = em
def closeEM(e: javax.persistence.EntityManager) = closeEntityManager
}
}
}
|
wsaccaco/lift
|
framework/lift-modules/lift-jta/src/main/scala/net/liftweb/transaction/TransactionContext.scala
|
Scala
|
apache-2.0
| 8,037
|
package com.cerner.beadledom.lifecycle.legacy
import com.google.inject.{AbstractModule, Guice}
import javax.annotation.PostConstruct
import org.scalatest.mockito.MockitoSugar
import org.scalatest.{FunSpec, MustMatchers}
/**
* Unit tests for [[BeadledomLifecycleModule]].
*
* @author John Leacox
*/
class BeadledomLifecycleModuleSpec extends FunSpec with MustMatchers with MockitoSugar {
describe("BeadledomLifecycleModule") {
it("provides a binding for LifecycleProvisionListener") {
val shutdownManager = mock[LifecycleShutdownManager]
val injector = Guice.createInjector(
new BeadledomLifecycleModule,
new AbstractModule() {
protected override def configure() {
bind(classOf[LifecycleShutdownManager]).toInstance(shutdownManager)
}
})
injector.getInstance(classOf[LifecycleProvisionListener]) must not be null
}
it("binds LifecycleProvisionListener for provisioning events") {
val shutdownManager = mock[LifecycleShutdownManager]
val injector = Guice.createInjector(
new BeadledomLifecycleModule,
new AbstractModule() {
protected override def configure() {
bind(classOf[LifecycleShutdownManager]).toInstance(shutdownManager)
}
})
// Use a PostConstruct method executed by the provision listener to show the listener is
// registered
injector.getInstance(classOf[ProvisionedStartupHook]).hasExecutedStartup mustBe true
}
}
}
class ProvisionedStartupHook {
var hasExecutedStartup = false
@PostConstruct
def startup(): Unit = {
hasExecutedStartup = true
}
}
|
bbaugher/beadledom
|
lifecycle/src/test/scala/com/cerner/beadledom/lifecycle/legacy/BeadledomLifecycleModuleSpec.scala
|
Scala
|
apache-2.0
| 1,666
|
package jsentric
import org.scalatest.{FunSuite, Matchers}
import argonaut._
import Argonaut._
import scalaz._
class ValidatorTests extends FunSuite with Matchers {
import Jsentric._
private def toSet(dis: \\/[NonEmptyList[(String, Path)], Json]): \\/[Set[(String, Path)], Json] =
dis.leftMap(_.list.toList.toSet)
test("Property validation") {
object StrValid extends Contract {
val expected = \\[String]("expected")
val maybe = \\?[Int]("maybe")
val default = \\
val option = \\?[Option[String]]("option")
}
StrValid.$validate(jEmptyObject) should be (-\\/(NonEmptyList("Value required." -> Path("expected"))))
val json1 = Json("expected" := "value")
StrValid.$validate(json1) should be (\\/-(json1))
StrValid.$validate(Json("expected" := 3)) should be (-\\/(NonEmptyList("Unexpected type 'JNumber'." -> Path("expected"))))
val json2 = Json("expected" := "value", "maybe" := 4)
StrValid.$validate(json2) should be (\\/-(json2))
StrValid.$validate(Json("expected" := "value", "maybe" := 4.6)) should be (-\\/(NonEmptyList("Unexpected type 'JNumber'." -> Path("maybe"))))
val json3 = Json("expected" := "value", "default" := true)
StrValid.$validate(json3) should be (\\/-(json3))
StrValid.$validate(Json("expected" := "value", "default" := 4.6)) should be (-\\/(NonEmptyList("Unexpected type 'JNumber'." -> Path("default"))))
val json4 = Json("expected" := "value", "option" := "value")
StrValid.$validate(json4) should be (\\/-(json4))
val json5 = Json("expected" := "value", "maybe" := jNull, "option" := jNull)
StrValid.$validate(json5) should be (\\/-(json5))
StrValid.$validate(Json("expected" := "value", "option" := false)) should be (-\\/(NonEmptyList("Unexpected type 'JBool'." -> Path("option"))))
}
test("Nested validation") {
object NestValid extends Contract {
val value1 = \\[String]("value1")
val nest1 = new \\\\("nest1") {
val value2 = \\[String]("value2")
val value3 = \\[String]("value3")
}
val nest2 = new \\\\?("nest2") {
val nest3 = new \\\\("nest3") {
val value4 = \\[String]("value4")
}
val value5 = \\[String]("value5")
}
}
val json1 = Json("value1" := "V", "nest1" := Json("value2" := "V", "value3" := "V"))
NestValid.$validate(json1) should be (\\/-(json1))
val json2 = Json("value1" := "V", "nest1" := Json("value2" := "V", "value3" := "V"), "nest2" -> Json("nest3" -> Json("value4" := "V"), "value5" := "V"))
NestValid.$validate(json2) should be (\\/-(json2))
toSet(NestValid.$validate(Json("value1" := "V", "nest1" := Json("value3" := 3)))) should
be (-\\/(Set("Value required." -> "nest1"\\"value2", "Unexpected type 'JNumber'." -> "nest1"\\"value3")))
toSet(NestValid.$validate(Json("value1" := "V", "nest2" := jEmptyObject))) should
be (-\\/(Set("Value required." -> Path("nest1"), "Value required." -> "nest2"\\"nest3", "Value required." -> "nest2"\\"value5")))
}
test("Internal and reserved validators") {
object IRValid extends Contract {
val reserve = \\?[String]("reserve", reserved)
val defaultReserve = \\
val intern = \\?[Boolean]("intern", internal)
val internReserve = \\
}
IRValid.$validate(jEmptyObject) should be (\\/-(jEmptyObject))
IRValid.$validate(Json("reserve" := "check")) should be (-\\/(NonEmptyList("Value is reserved and cannot be provided." -> Path("reserve"))))
}
test("Custom validator") {
object Custom extends Contract {
val values = \\:?[(String, Int)]("values" , forall(custom((t:(String, Int)) => t._2 > 0, "Int must be greater than zero")))
val compare = \\?[Int]("compare", customCompare[Int]((d,c) => math.abs(d - c) < 3, "Cannot change by more than three"))
}
val success = Json("values" -> jArrayElements(jArrayElements("one".asJson, 1.asJson)))
val failure = Json("values" -> jArrayElements(jArrayElements("one".asJson, 1.asJson), jArrayElements("negative".asJson, (-1).asJson)))
Custom.$validate(success) should be (\\/-(success))
Custom.$validate(failure) should be (-\\/(NonEmptyList("Int must be greater than zero" -> "values"\\1)))
val compare = Json("compare" := 5)
Custom.$validate(compare, Json("compare" := 7)) should be (\\/-(compare))
Custom.$validate(compare, Json("compare" := 0)) should be (-\\/(NonEmptyList("Cannot change by more than three" -> Path("compare"))))
}
test("Delta validation") {
object Delta extends Contract {
val expected = \\[String]("expected")
val immute = \\[Boolean]("immute", immutable)
val maybe = \\?[Int]("maybe")
val reserve = \\?[Float]("reserve", reserved)
}
val replaceExpected = Json("expected" := "replace")
val replaceImmute = Json("immute" := true)
val replaceMaybe = Json("maybe" := 123)
val clearMaybe = Json("maybe" -> jNull)
val replaceReserve = Json("reserve" := 12.3)
Delta.$validate(replaceExpected, Json("expected" := "original", "immute" := false)) should be (\\/-(replaceExpected))
Delta.$validate(replaceImmute, Json("expected" := "original", "immute" := false)) should be (-\\/(NonEmptyList("Value is immutable and cannot be changed." -> Path("immute"))))
Delta.$validate(replaceImmute, Json("expected" := "original", "immute" := true)) should be (\\/-(replaceImmute))
Delta.$validate(replaceMaybe, Json("expected" := "original", "immute" := false)) should be (\\/-(replaceMaybe))
Delta.$validate(replaceMaybe, Json("expected" := "original", "immute" := false, "maybe" := 1)) should be (\\/-(replaceMaybe))
Delta.$validate(clearMaybe, Json("expected" := "original", "immute" := false)) should be (\\/-(clearMaybe))
Delta.$validate(clearMaybe, Json("expected" := "original", "immute" := false, "maybe" := 1)) should be (\\/-(clearMaybe))
Delta.$validate(replaceReserve, Json("expected" := "original", "immute" := false, "maybe" := 1)) should be (-\\/(NonEmptyList("Value is reserved and cannot be provided." -> Path("reserve"))))
}
}
|
HigherState/jsentric
|
src/test/scala/jsentric/ValidatorTests.scala
|
Scala
|
apache-2.0
| 6,159
|
/*
* Copyright 2017 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.ct.computations
import org.scalatest.{Matchers, WordSpec}
import uk.gov.hmrc.ct.box.CtValidation
import uk.gov.hmrc.ct.version.calculations.ComputationsBoxRetrieverForTest
import org.mockito.Mockito._
import org.scalatest.mock.MockitoSugar
class CP503Spec extends WordSpec with MockitoSugar with Matchers {
"validateNotExceedingCP501" should {
"return validation error if value is more that CP501" in {
val mockBoxRetriever = mock[ComputationsBoxRetrieverForTest]
when(mockBoxRetriever.cp501()).thenReturn(CP501(Some(0)))
CP503(Some(2)).validate(mockBoxRetriever) shouldBe Set(CtValidation(Some("CP503"), "error.CP503.propertyExpensesExceedsIncome", None))
}
"return validation error if value is set but CP501 is not set" in {
val mockBoxRetriever = mock[ComputationsBoxRetrieverForTest]
when(mockBoxRetriever.cp501()).thenReturn(CP501(None))
CP503(Some(2)).validate(mockBoxRetriever) shouldBe Set(CtValidation(Some("CP503"), "error.CP503.propertyExpensesExceedsIncome", None))
}
"return no validation error if value is less that CP501" in {
val mockBoxRetriever = mock[ComputationsBoxRetrieverForTest]
when(mockBoxRetriever.cp501()).thenReturn(CP501(Some(3)))
CP503(Some(2)).validate(mockBoxRetriever) shouldBe Set()
}
"return no validation error if value is equal CP501" in {
val mockBoxRetriever = mock[ComputationsBoxRetrieverForTest]
when(mockBoxRetriever.cp501()).thenReturn(CP501(Some(1)))
CP503(Some(1)).validate(mockBoxRetriever) shouldBe Set()
}
"return no validation error if value is not set but CP501 is" in {
val mockBoxRetriever = mock[ComputationsBoxRetrieverForTest]
when(mockBoxRetriever.cp501()).thenReturn(CP501(Some(1)))
CP503(None).validate(mockBoxRetriever) shouldBe Set()
}
"return no validation error if value is not set and CP501 is also not set" in {
val mockBoxRetriever = mock[ComputationsBoxRetrieverForTest]
when(mockBoxRetriever.cp501()).thenReturn(CP501(None))
CP503(None).validate(mockBoxRetriever) shouldBe Set()
}
"return no validation error if value is 0 and CP501 is not set" in {
val mockBoxRetriever = mock[ComputationsBoxRetrieverForTest]
when(mockBoxRetriever.cp501()).thenReturn(CP501(None))
CP503(Some(0)).validate(mockBoxRetriever) shouldBe Set()
}
}
}
|
pncampbell/ct-calculations
|
src/test/scala/uk/gov/hmrc/ct/computations/CP503Spec.scala
|
Scala
|
apache-2.0
| 3,016
|
package org.openurp.edu.eams.teach.lesson.dao.hibernate.internal
import java.io.Serializable
import scala.collection.JavaConversions._
import org.beangle.commons.collection.Collections
import org.beangle.commons.collection.page.Page
import org.beangle.commons.collection.page.PageLimit
import org.beangle.commons.lang.annotation.description
import org.beangle.data.jpa.dao.OqlBuilder
import org.beangle.data.jpa.hibernate.HibernateEntityDao
import org.beangle.data.jpa.hibernate.QuerySupport
import org.beangle.data.model.dao.Conditions
import org.hibernate.FlushMode
import org.hibernate.SessionFactory
import org.openurp.base.Semester
import org.openurp.edu.base.States
import org.openurp.edu.eams.classroom.util.RoomUseridGenerator
import org.openurp.edu.eams.classroom.util.RoomUseridGenerator.Usage
import org.openurp.edu.eams.teach.lesson.dao.LessonDao
import org.openurp.edu.eams.teach.lesson.dao.LessonSeqNoGenerator
import org.openurp.edu.eams.teach.lesson.service.LessonFilterStrategy
import org.openurp.edu.teach.lesson.Lesson
import org.openurp.edu.teach.plan.MajorPlan
import org.openurp.lg.room.Occupancy
class LessonDaoHibernate(sf: SessionFactory) extends HibernateEntityDao(sf) with LessonDao {
var lessonSeqNoGenerator: LessonSeqNoGenerator = _
private def evictLessonRegion() {
val cache = sessionFactory.getCache
if (null != cache) {
cache.evictEntityRegion(classOf[Lesson])
}
}
def getLessonsByCategory(sid: Serializable,
strategy: LessonFilterStrategy,
semester: Semester,
pageNo: Int,
pageSize: Int): Page[Lesson] = {
val params = Collections.newMap[String, Any]
val id = if (strategy.name == "teacher") "%" + sid + "%" else sid
params.put("id", id)
params.put("semesterId", semester.id)
val queryStr = strategy.queryString(null, " and task.semester.id= :semesterId ")
val lessons = search(queryStr, params, new PageLimit(pageNo, pageSize), false)
lessons.asInstanceOf[Page[Lesson]]
}
def getLessonsByCategory(id: Serializable, strategy: LessonFilterStrategy, semesters: Iterable[Semester]): Seq[Lesson] = {
val taskQuery = strategy.createQuery(currentSession, "select distinct task.id from Lesson as task ",
" and task.semester in (:semesters) ")
taskQuery.setParameter("id", id)
taskQuery.setParameterList("semesters", semesters)
find(classOf[Lesson], taskQuery.list().toArray().asInstanceOf[Array[java.lang.Long]])
}
def getLessonsOfStd(stdId: Serializable, semesters: List[Semester]): Seq[Lesson] = {
val queryBuilder = OqlBuilder.from(classOf[Lesson], "lesson")
queryBuilder.join("lesson.teachClass.courseTakes", "courseTake")
queryBuilder.where("courseTake.std.id =:stdId", stdId)
queryBuilder.where("lesson.semester in (:semesters)", semesters)
search(queryBuilder)
}
def updateLessonByCategory(attr: String,
value: AnyRef,
id: java.lang.Long,
strategy: LessonFilterStrategy,
semester: Semester): Int = {
evictLessonRegion()
val queryStr = strategy.queryString("update TeachTask set " + attr + " = :value ", " and semester.id = :semesterId")
executeUpdate(queryStr, Array(value, semester.id))
}
private def getUpdateQueryString(attr: String,
value: AnyRef,
task: Lesson,
stdTypeIds: Array[Integer],
departIds: Array[Long],
newParamsMap: collection.Map[String, Any]): String = {
val entityQuery = OqlBuilder.from(classOf[Lesson], "task")
entityQuery.where(Conditions.extractConditions("task", task))
if (null != stdTypeIds && 0 != stdTypeIds.length) {
entityQuery.where("task.teachClass.stdType.id in (:stdTypeIds) ", stdTypeIds)
}
if (null != departIds && 0 != departIds.length) {
entityQuery.where("task.teachDepart.id in (:departIds) ", departIds)
}
val updateSql = new StringBuffer("update TeachTask set " + attr + "=(:" + attr + ") where id in (")
updateSql.append(entityQuery.build().statement).append(")")
newParamsMap.put(attr, value)
newParamsMap.putAll(entityQuery.params)
updateSql.toString
}
def updateLessonByCriteria(attr: String,
value: AnyRef,
task: Lesson,
stdTypeIds: Array[Integer],
departIds: Array[Long]): Int = {
evictLessonRegion()
val newParamsMap = Collections.newMap[String, Any]
val updateSql = getUpdateQueryString(attr, value, task, stdTypeIds, departIds, newParamsMap)
val query = currentSession.createQuery(updateSql)
QuerySupport.setParameters(query, newParamsMap)
query.executeUpdate()
}
def countLesson(sid: Serializable, strategy: LessonFilterStrategy, semester: Semester): Int = {
val countQuery = strategy.createQuery(currentSession, "select count(task.id) from TeachTask as task ",
" and task.semester.id = :semesterId")
val id = if (strategy.name == "teacher") "%" + sid + "%" else sid
countQuery.setParameter("id", id)
countQuery.setParameter("semesterId", semester.id)
val rsList = countQuery.list()
rsList.get(0).asInstanceOf[Number].intValue()
}
def saveMergeResult(lessons: Array[Lesson], index: Int) {
saveOrUpdate(lessons(index))
for (i <- 0 until lessons.length) {
if (i != index) {
remove(lessons(i))
}
}
}
def remove(lesson: Lesson) {
val removeEntities = Collections.newBuffer[Any]
val occupancies = getOccupancies(lesson)
removeEntities.addAll(occupancies)
removeEntities.add(lesson)
super.remove(removeEntities)
}
def getOccupancies(lesson: Lesson): Seq[Occupancy] = {
val builder = OqlBuilder.from(classOf[Occupancy], "occupancy").where("occupancy.userid in( :lessonIds)",
RoomUseridGenerator.gen(lesson, Usage.COURSE, Usage.EXAM))
search(builder)
}
def saveGenResult(plan: MajorPlan,
semester: Semester,
lessons: collection.mutable.Buffer[Lesson],
removeExists: Boolean) {
currentSession.setFlushMode(FlushMode.COMMIT)
lessonSeqNoGenerator.genLessonSeqNos(lessons)
for (lesson <- lessons) {
lesson.state = States.Draft
super.saveOrUpdate(lesson)
}
currentSession.flush()
}
override def saveOrUpdate[E](entities: Iterable[E]): Unit = {
for (entity <- entities) {
val lesson = entity.asInstanceOf[Lesson]
val iter = lesson.teachClass.limitGroups.iterator
while (iter.hasNext) {
if (Collections.isEmpty(iter.next().items)) {
iter.remove()
}
}
lessonSeqNoGenerator.genLessonSeqNo(lesson)
super.saveOrUpdate(lesson)
}
}
}
|
openurp/edu-eams-webapp
|
core/src/main/scala/org/openurp/edu/eams/teach/lesson/dao/hibernate/internal/LessonDaoHibernate.scala
|
Scala
|
gpl-3.0
| 6,532
|
/*
* Copyright (c) 2012, TU Berlin
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* * Neither the name of the TU Berlin nor the
* names of its contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL TU Berlin BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
* */
package de.tuberlin.uebb.sl2.impl
import scala.collection.mutable.ListBuffer
import scala.text.Document
import scala.text.DocText
import de.tuberlin.uebb.sl2.modules._
import java.io.File
import java.io.PrintWriter
import java.net.URI;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.nio.file.StandardCopyOption;
import scala.io.Source
/*
* This file module has been superseded by the MultiDriver.
*/
trait SimpleDriver extends Driver {
self: Parser with CodeGenerator with Syntax with ProgramChecker with JsSyntax
with Errors with SignatureSerializer with DebugOutput with Configs
with ModuleResolver with ModuleNormalizer =>
override def run(inpCfg: Config): Either[Error, String] = {
val input = inpCfg.sources
//TODO: implement for multiple files! at the moment only the first
// will be handled
// load input file
val file = new File(input.head)
val name = file.getName()
// if no destination has been specified, the output goes to the folder of the input file.
val destination = if (inpCfg.destination == null) file.getParentFile() else inpCfg.destination
val config = inpCfg.copy(mainName = file.getName, mainParent = file.getParentFile, destination = destination)
val source = scala.io.Source.fromFile(file)
val code = source.mkString
source.close()
// parse the syntax
fileName = name
val ast = parseAst(code)
debugPrint(ast.toString());
for (
mo <- ast.right;
// check and load dependencies
imports <- inferDependencies(mo, config).right;
// type check the program
_ <- checkProgram(mo, normalizeModules(imports)).right;
// qualify references to unqualified module and synthesize
res <- compile(qualifyUnqualifiedModules(mo.asInstanceOf[Program], imports), name, imports, config).right
) yield res
}
def compile(program: Program, name: String, imports: List[ResolvedImport], config: Config): Either[Error, String] = {
val compiled = astToJs(program)
// Create modules directory, if necessary
val modulesDir = config.destination; // new File(config.destination, "modules")
if(!modulesDir.exists()) {
if(modulesDir.mkdirs()) {
println("Created directory "+modulesDir)
} else {
println("Could not create directory"+modulesDir)
}
} else if(!modulesDir.isDirectory()) {
println(modulesDir+" is not a directory")
}
/*
// copy .js and .signature of imported modules from classpath to modules/ directory
for(i <- imports.filter(_.isInstanceOf[ResolvedModuleImport])) {
val imp = i.asInstanceOf[ResolvedModuleImport]
copy(Paths.get(imp.file.toURI), Paths.get(modulesDir.getAbsolutePath(), imp.path+".sl.signature"))
copy(Paths.get(imp.jsFile.toURI), Paths.get(modulesDir.getAbsolutePath(), imp.path+".sl.js"))
}
*/
val tarJs = new File(modulesDir, name + ".js")
println("compiling "+name+" to "+tarJs)
val moduleTemplate = Source.fromURL(getClass().getResource("/js/module_template.js")).getLines.mkString("\n")
val moduleWriter = new PrintWriter(new File(modulesDir, name + ".js"))
for(i <- imports.filter(_.isInstanceOf[ResolvedExternImport])) {
val imp = i.asInstanceOf[ResolvedExternImport]
val includedCode = imp.file.contents
moduleWriter.println("/***********************************/")
moduleWriter.println("// included from: "+imp.file.path)
moduleWriter.println("/***********************************/")
moduleWriter.println(includedCode)
moduleWriter.println("/***********************************/")
}
moduleWriter.println("/***********************************/")
moduleWriter.println("// generated from: "+name)
moduleWriter.println("/***********************************/")
val requires = imports.filter(_.isInstanceOf[ResolvedModuleImport]).map(
x => JsDef(x.asInstanceOf[ResolvedModuleImport].name,
JsFunctionCall(JsName("require"),JsStr(x.asInstanceOf[ResolvedModuleImport].path+".sl"))
))
moduleWriter.write(moduleTemplate.replace("%%MODULE_BODY%%", JsPrettyPrinter.pretty(requires)+"\n\n"
+JsPrettyPrinter.pretty(dataDefsToJs(program.dataDefs)
& functionDefsExternToJs(program.functionDefsExtern)
& functionDefsToJs(program.functionDefs)
& functionSigsToJs(program.signatures))));
moduleWriter.close();
val signatureFile = new File(modulesDir, name + ".signature")
println("writing signature of "+name+" to "+signatureFile)
val writerSig = new PrintWriter(signatureFile)
writerSig.write(serialize(program))
writerSig.close()
// create main.js only if a main function is declared
if(program.isInstanceOf[Program] && program.asInstanceOf[Program].functionDefs.contains("main")) {
val mainWriter = new PrintWriter(new File(config.destination, "main.js"))
val paths = JsObject(List((JsName(standardLibName), JsStr(getLibResource("").toString))))
for(i <- imports.filter(_.isInstanceOf[ResolvedExternImport])) {
val imp = i.asInstanceOf[ResolvedExternImport]
val includedCode = imp.file.contents
mainWriter.println("/***********************************/")
mainWriter.println("// included from: "+imp.file.path)
mainWriter.println("/***********************************/")
mainWriter.println(includedCode)
mainWriter.println("/***********************************/")
}
mainWriter.println("/***********************************/")
mainWriter.println("// generated from: "+name)
mainWriter.println("/***********************************/")
val mainTemplate = Source.fromURL(getClass.getResource("/js/main_template.js")).getLines.mkString("\n")
mainWriter.write(mainTemplate.replace("%%MODULE_PATHS_LIST%%", "\""+name+"\"")
.replace("%%PATHS%%", JsPrettyPrinter.pretty(paths))
.replace("%%MODULE_NAMES_LIST%%", "$$$"+name.substring(0, name.length()-3)) // FIXME! the -3 here cuts off the .sl extension. should be done less hacky
.replace("%%MAIN%%", JsPrettyPrinter.pretty(JsFunctionCall("$$$"+name.substring(0, name.length()-3)+".$main"))))
mainWriter.close()
// copy index.html, require.js to config.destination
copy(Paths.get(getClass().getResource("/js/index.html").toURI()),
Paths.get(config.destination.getAbsolutePath(), "index.html"))
copy(Paths.get(getClass().getResource("/js/require.js").toURI()),
Paths.get(config.destination.getAbsolutePath(), "require.js"))
}
return Right("compilation successful")
}
def copy(from: Path, to: Path) = {
if(!to.getParent.toFile.exists) {
if(!to.getParent.toFile.mkdirs) {
// TODO: return an error
println("Could not create directory: "+to.getParent)
}
} else if (to.getParent.toFile.exists && !to.getParent.toFile.isDirectory) {
// TODO: return an error
println("Not a directory: "+to.getParent)
}
val target = Files.copy(from, to, StandardCopyOption.REPLACE_EXISTING)
println("copied "+from+" to "+to);
target
}
def mergeAst(a: Program, b: Program): Either[Error, Program] =
{
for (
sigs <- mergeMap(a.signatures, b.signatures).right;
funs <- mergeMap(a.functionDefs, b.functionDefs).right;
funsEx <- mergeMap(a.functionDefsExtern, b.functionDefsExtern).right
) yield {
val defs = a.dataDefs ++ b.dataDefs
Program(List(), sigs, funs, funsEx, defs)
}
}
def mergeMap[A, B](a: Map[A, B], b: Map[A, B]): Either[Error, Map[A, B]] =
{
val intersect = a.keySet & b.keySet
if (intersect.isEmpty)
Right(a ++ b)
else
Left(DuplicateError("Duplicated definition: " + intersect.mkString(", "), "", Nil))
}
}
|
choeger/sl2
|
src/main/scala/de/tuberlin/uebb/sl2/impl/SimpleDriver.scala
|
Scala
|
bsd-3-clause
| 9,405
|
package com.azavea.opentransit.database
import com.azavea.gtfs._
import com.azavea.opentransit.indicators._
import com.azavea.opentransit.service._
import com.azavea.opentransit.JobStatus
import spray.json.DefaultJsonProtocol._
import com.github.nscala_time.time.Imports._
import scala.slick.driver.{JdbcDriver, JdbcProfile, PostgresDriver}
import scala.slick.jdbc.{StaticQuery => Q}
case class FullIndicatorJob(
id: Int,
calcStatus: String,
jobStatus: String,
errorType: String,
creatorId: Int,
scenarioId: Int,
cityName: String
)
/**
* Trait for providing the indicatorjobs table
*/
trait IndicatorJobsTable {
import PostgresDriver.simple._
class IndicatorJobs(tag: Tag) extends Table[FullIndicatorJob](tag, "transit_indicators_indicatorjob") {
def id = column[Int]("id")
def calcStatus = column[String]("calculation_status")
def jobStatus = column[String]("job_status")
def errorType = column[String]("error_type")
def creatorId = column[Int]("created_by_id")
def scenarioId = column[Int]("scenario_id")
def cityName = column[String]("city_name")
def * = (id, calcStatus, jobStatus, errorType, creatorId, scenarioId, cityName) <> (FullIndicatorJob.tupled, FullIndicatorJob.unapply)
}
def indicatorJobsTable = TableQuery[IndicatorJobs]
// Fail out all processing jobs
def failProcessingJobs(reason: String)(implicit session: Session): Unit = {
indicatorJobsTable.filter(_.jobStatus === "processing").map { fullJob =>
fullJob.errorType
}.update(reason)
indicatorJobsTable.filter(_.jobStatus === "processing").map { fullJob =>
fullJob.jobStatus
}.update("error")
}
// Partially applied failAllJobs for scala restart
def failOOMError(implicit session: Session): Unit = failProcessingJobs("scala_unknown_error")
// Set job to processing
def updateJobStatus(id: Int, jobStatus: String)(implicit session: Session): Unit =
indicatorJobsTable.filter(_.id === id).map { fullJob =>
fullJob.jobStatus
}.update(jobStatus)
// Set a job to failure
def failJob(id: Int, reason: String = "")(implicit session: Session): Unit = {
indicatorJobsTable.filter(_.jobStatus === "processing").map { fullJob =>
fullJob.errorType
}.update(reason)
indicatorJobsTable.filter(_.jobStatus === "processing").map { fullJob =>
fullJob.jobStatus
}.update("error")
}
// Function for arbitrarily adding to the error-tracking column
def updateErrorType(id: Int, errorType: String)(implicit session: Session): Unit =
indicatorJobsTable.filter(_.id === id).map { fullJob =>
fullJob.errorType
}.update(errorType)
// The indicator job parameter here is NOT the same as 'FullIndicatorJob' above!
def updateCalcStatus(job: IndicatorJob)(implicit session: Session): Unit = {
val hasComplete =
job.status.map { case (period, indicatorResult) =>
indicatorResult.forall { case (indicatorName, state) =>
state != JobStatus.Processing && state != JobStatus.Submitted
}
}.foldLeft(true)(_ && _)
val noFailed =
!job.status.map { case (period, indicatorResult) =>
indicatorResult.exists {case (indicatorName, state) => state == JobStatus.Failed }
}.foldLeft(true)(_ && _)
val jobStatus =
if (hasComplete) {
if (noFailed) JobStatus.Complete else JobStatus.Failed
} else {
JobStatus.Processing
}
val status = job.status.map { case (periodType, indicatorStatus) =>
(periodType -> indicatorStatus.map { case (indicatorName, status) =>
(indicatorName -> status.getJsonWithMsg)
}.toMap)
}.toMap
updateJobStatus(job.id, jobStatus.toString)
indicatorJobsTable.filter(_.id === job.id).map { fullJob =>
fullJob.calcStatus
}.update(status.toJson.toString)
}
}
|
flibbertigibbet/open-transit-indicators
|
scala/opentransit/src/main/scala/com/azavea/opentransit/database/IndicatorJobsTable.scala
|
Scala
|
gpl-3.0
| 3,839
|
package smarthouse.restapi.http.routes
import java.util.Date
import akka.http.scaladsl.model.{StatusCodes}
import akka.http.scaladsl.server.Directives._
import de.heikoseeberger.akkahttpcirce.CirceSupport
import io.circe.{Decoder, Encoder}
import io.circe.generic.auto._
import io.circe.syntax._
import smarthouse.restapi.http.SecurityDirectives
import smarthouse.restapi.models.DeviceEntity
import smarthouse.restapi.services.{AuthService, DevicesService}
import scala.concurrent.ExecutionContext
class DevicesServiceRoute(val authService: AuthService,
val eventService: DevicesService)
(implicit executionContext: ExecutionContext) extends CirceSupport with SecurityDirectives {
import StatusCodes._
import eventService._
implicit val dateTimeEncoder: Encoder[Date] = Encoder.instance(a => a.getTime.asJson)
implicit val dateTimeDecoder: Decoder[Date] = Decoder.instance(a => a.as[Long].map(new Date(_)))
val route = pathPrefix("devices") {
pathEndOrSingleSlash {
get {
complete(getDevices().map(_.asJson))
} ~ post {
entity(as[DeviceEntity]) { item =>
complete(Created -> createDevice(item).map(_.asJson))
}
}
}
}
}
|
andrewobukhov/smart-house
|
src/main/scala/smarthouse/restapi/http/routes/DevicesServiceRoute.scala
|
Scala
|
mit
| 1,250
|
/*
* Copyright 2010-2011 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package scalaz.camel.core
import java.util.concurrent.{BlockingQueue, CountDownLatch, TimeUnit}
import java.util.concurrent.atomic.AtomicInteger
import org.apache.camel.{Exchange, AsyncCallback, AsyncProcessor}
import scalaz._
import Scalaz._
import Message._
import concurrent.Promise
import concurrent.Strategy
/**
* Message processors representing enterprise integration patterns (EIPs).
*
* @author Martin Krasser
*/
trait DslEip { this: Conv =>
/**
* Name of the position message header needed by scatter-gather. Needed to
* preserve the order of messages that are distributed to destinations.
*/
val Position = "scalaz.camel.multicast.position"
/**
* Concurrency strategy for distributing messages to destinations
* with the multicast and scatter-gather EIPs.
*/
protected def multicastStrategy: Strategy
/**
* Creates a message processor that sets the message context's oneway field to true.
*/
def oneway: MessageProcessor = oneway(true)
/**
* Creates a message processor that sets the message context's oneway field to given value.
*/
def oneway(oneway: Boolean): MessageProcessor = messageProcessor { m: Message => m.setOneway(oneway) }
/**
* Creates a message processor that routes messages based on pattern matching. Implements
* the content-based router EIP.
*/
def choose(f: PartialFunction[Message, MessageRoute]): MessageProcessor =
(m: Message, k: MessageValidation => Unit) => {
f.lift(m) match {
case Some(r) => messageProcessor(r)(m, k)
case None => k(m.success)
}
}
/**
* Creates a message processor that distributes messages to given destinations. The created
* processor applies the concurrency strategy returned by <code>multicastStrategy</code>
* to distribute messages. Distributed messages are not combined, instead n responses
* are sent where n is the number of destinations. Implements the static recipient-list EIP.
*/
def multicast(destinations: MessageRoute*): MessageProcessor =
(m: Message, k: MessageValidation => Unit) => {
0 until destinations.size foreach { i =>
multicastStrategy.apply {
destinations(i) apply m.success respond { mv => k(mv ∘ (_.addHeader(Position -> i))) }
}
}
}
/**
* Creates a message processor that generates a sequence of messages using <code>f</code> and
* sends n responses taken from the generated message sequence. Implements the splitter EIP.
*/
def split(f: Message => Seq[Message]): MessageProcessor =
(m: Message, k: MessageValidation => Unit) => {
try {
f(m) foreach { r => k(r.success) }
} catch {
case e: Exception => k(m.setException(e).fail)
}
}
/**
* Creates a message processor that filters messages if <code>f</code> returns None and sends
* a response if <code>f</code> returns Some message. Allows providers of <code>f</code> to
* aggregate messages and continue processing with a combined message, for example.
* Implements the aggregator EIP.
*/
def aggregate(f: Message => Option[Message]): MessageProcessor =
(m: Message, k: MessageValidation => Unit) => {
try {
f(m) match {
case Some(r) => k(r.success)
case None => { /* do not continue */ }
}
} catch {
case e: Exception => k(m.setException(e).fail)
}
}
/**
* Creates a message processor that filters messages by evaluating predicate <code>p</code>. If
* <code>p</code> evaluates to <code>true</code> a response is sent, otherwise the message is
* filtered. Implements the filter EIP.
*/
def filter(p: Message => Boolean) = aggregate { m: Message =>
if (p(m)) Some(m) else None
}
/**
* Creates a builder for a scatter-gather processor. Implements the scatter-gather EIP.
*
* @see ScatterDefinition
*/
def scatter(destinations: MessageRoute*) = new ScatterDefinition(destinations: _*)
/**
* Builder for a scatter-gather processor.
*
* @see scatter
*/
class ScatterDefinition(destinations: MessageRoute*) {
/**
* Creates a message processor that scatters messages to <code>destinations</code> and
* gathers and combines them using <code>combine</code>. Messages are scattered to
* <code>destinations</code> using the concurrency strategy returned by
* <code>multicastStrategy</code>. Implements the scatter-gather EIP.
*
* @see scatter
*/
def gather(combine: (Message, Message) => Message): MessageRoute = {
val mcp = multicastProcessor(destinations.toList, combine)
messageRoute((m: Message, k: MessageValidation => Unit) => mcp(m, k))
}
}
/**
* Creates a message processor that sets exception <code>e</code> on the input message and
* generates a failure.
*/
def failWith(e: Exception): MessageProcessor =
(m: Message, k: MessageValidation => Unit) => k(m.setException(e).fail)
// ------------------------------------------
// Internal
// ------------------------------------------
/**
* Creates a message processor that distributes messages to destinations (using multicast) and gathers
* and combines the responses using an aggregator with <code>gatherFunction</code>.
*/
private def multicastProcessor(destinations: List[MessageRoute], combine: (Message, Message) => Message): MessageProcessor = {
(m: Message, k: MessageValidation => Unit) => {
val sgm = multicast(destinations: _*)
val sga = aggregate(gatherFunction(combine, destinations.size))
messageRoute(sgm) >=> messageRoute(sga) apply m.success respond k
}
}
/**
* Creates an aggregation function that gathers and combines multicast responses.
*/
private def gatherFunction(combine: (Message, Message) => Message, count: Int): Message => Option[Message] = {
val ct = new AtomicInteger(count)
val ma = Array.fill[Message](count)(null)
(m: Message) => {
for (pos <- m.header(Position).asInstanceOf[Option[Int]]) {
ma.synchronized(ma(pos) = m)
}
if (ct.decrementAndGet == 0) {
val ml = ma.synchronized(ma.toList)
Some(ml.tail.foldLeft(ml.head)((m1, m2) => combine(m1, m2).removeHeader(Position)))
} else {
None
}
}
}
}
trait DslAttempt { this: Conv =>
type AttemptHandler1 = PartialFunction[Exception, MessageRoute]
type AttemptHandlerN = PartialFunction[(Exception, RetryState), MessageRoute]
/**
* Captures the state of (repeated) message routing attempts. A retry state is defined by
* <ul>
* <li>the attempted route <code>r</code></li>
* <li>the fallback routes returned by <code>h</code></li>
* <li>the remaining number of retries (can be modified by application code)</li>
* <li>the original message <code>orig</code> used as input for the attempted route</li>
* </ul>
*/
case class RetryState(r: MessageRoute, h: AttemptHandlerN, count: Int, orig: Message) {
def next = RetryState(r, h, count - 1, orig)
}
/**
* Creates a message processor that extracts the original message from retry state <code>s</code>.
*/
def orig(s: RetryState): MessageProcessor =
(m: Message, k: MessageValidation => Unit) => k(s.orig.success)
/**
* Creates a builder for an attempt-fallback processor. The processor makes a single attempt
* to apply route <code>r</code> to an input message.
*/
def attempt(r: MessageRoute) = new AttemptDefinition0(r)
/**
* Creates a builder for an attempt(n)-fallback processor. The processor can be used to make n
* attempts to apply route <code>r</code> to an input message.
*
* @see orig
* @see retry
*/
def attempt(count: Int)(r: MessageRoute) = new AttemptDefinitionN(r, count - 1)
/**
* Creates a message processor that makes an additional attempt to apply <code>s.r</code>
* (the initially attempted route) to its input message. The message processor decreases
* <code>s.count</code> (the retry count by one). A retry attempt is only made if an exception
* is set on the message an <code>s.h</code> (a retry handler) is defined at that exception.
*/
def retry(s: RetryState): MessageProcessor =
(m: Message, k: MessageValidation => Unit) => {
s.r apply m.success respond { mv => mv match {
case Success(_) => k(mv)
case Failure(m) => {
for {
e <- m.exception
r <- s.h.lift(e, s.next)
} {
if (s.count > 0) r apply m.exceptionHandled.success respond k else k(mv)
}
}
}}
}
/**
* Builder for an attempt-retry processor.
*/
class AttemptDefinition0(r: MessageRoute) {
/**
* Creates an attempt-retry processor using retry handlers defined by <code>h</code>.
*/
def fallback(h: AttemptHandler1): MessageProcessor =
(m: Message, k: MessageValidation => Unit) => {
r apply m.success respond { mv => mv match {
case Success(_) => k(mv)
case Failure(m) => {
for {
e <- m.exception
r <- h.lift(e)
} {
r apply m.exceptionHandled.success respond k
}
}
}}
}
}
/**
* Builder for an attempt(n)-retry processor.
*/
class AttemptDefinitionN(r: MessageRoute, count: Int) {
/**
* Creates an attempt(n)-retry processor using retry handlers defined by <code>h</code>.
*/
def fallback(h: AttemptHandlerN): MessageProcessor =
(m: Message, k: MessageValidation => Unit) => {
retry(new RetryState(r, h, count, m))(m, k)
}
}
}
/**
* DSL for endpoint management.
*
* @author Martin Krasser
*/
trait DslEndpoint { this: Conv =>
/**
* Creates a consumer for an endpoint represented by <code>uri</code> and connects it to the route
* <code>r</code>. This method registers the created consumer at the Camel context for lifecycle
* management.
*/
def from(uri: String)(r: MessageRoute)(implicit em: EndpointMgnt, cm: ContextMgnt): Unit =
em.createConsumer(uri, new RouteProcessor(r))
/**
* Creates a CPS processor that acts as a producer to the endpoint represented by <code>uri</code>.
* This method registers the created producer at the Camel context for lifecycle management.
*/
def to(uri: String)(implicit em: EndpointMgnt, cm: ContextMgnt): MessageProcessor = messageProcessor(uri, em, cm)
private class RouteProcessor(val p: MessageRoute) extends AsyncProcessor {
import RouteProcessor._
/**
* Synchronous message processing.
*/
def process(exchange: Exchange) = {
val latch = new CountDownLatch(1)
process(exchange, new AsyncCallback() {
def done(doneSync: Boolean) = {
latch.countDown
}
})
latch.await
}
/**
* Asynchronous message processing (may be synchronous as well if all message processor are synchronous
* processors and all concurrency strategies are configured to be <code>Sequential</code>).
*/
def process(exchange: Exchange, callback: AsyncCallback) =
if (exchange.getPattern.isOutCapable) processInOut(exchange, callback) else processInOnly(exchange, callback)
private def processInOut(exchange: Exchange, callback: AsyncCallback) = {
route(exchange.getIn.toMessage, once(respondTo(exchange, callback)))
false
}
private def processInOnly(exchange: Exchange, callback: AsyncCallback) = {
route(exchange.getIn.toMessage.setOneway(true), ((mv: MessageValidation) => { /* ignore any result */ }))
callback.done(true)
true
}
private def route(message: Message, k: MessageValidation => Unit): Unit = p apply message.success respond k
}
private object RouteProcessor {
def respondTo(exchange: Exchange, callback: AsyncCallback): MessageValidation => Unit = (mv: MessageValidation ) => mv match {
case Success(m) => respond(m, exchange, callback)
case Failure(m) => respond(m, exchange, callback)
}
def respond(message: Message, exchange: Exchange, callback: AsyncCallback): Unit = {
message.exception ∘ (exchange.setException(_))
exchange.getIn.fromMessage(message)
exchange.getOut.fromMessage(message)
callback.done(false)
}
def once(k: MessageValidation => Unit): MessageValidation => Unit = {
val done = new java.util.concurrent.atomic.AtomicBoolean(false)
(mv: MessageValidation) => if (!done.getAndSet(true)) k(mv)
}
}
}
/**
* DSL support classes for applying message validation responders and message processing routes.
*
* @see Camel.responderToResponderApplication
* @see Camel.routeToRouteApplication
*
* @author Martin Krasser
*/
trait DslApply { this: Conv =>
/**
* Applies a message validation responder <code>r</code>.
*
* @see Camel.responderToResponderApplication
*/
class ResponderApplication(r: Responder[MessageValidation]) {
/** Apply responder r and wait for response */
def response: MessageValidation = responseQueue.take
/** Apply responder r and wait for response with timeout */
def response(timeout: Long, unit: TimeUnit): MessageValidation = responseQueue.poll(timeout, unit)
/** Apply responder r and get response promise */
def responsePromise(implicit s: Strategy): Promise[MessageValidation] = promise(responseQueue.take)
/** Apply responder r and get response queue */
def responseQueue: BlockingQueue[MessageValidation] = {
val queue = new java.util.concurrent.LinkedBlockingQueue[MessageValidation](10)
r respond { mv => queue.put(mv) }
queue
}
}
/**
* Applies a message processing route <code>r</code>.
*
* @see Camel.routeToRouteApplication
*/
class RouteApplication(r: MessageRoute) {
/** Apply route r to message m and wait for response */
def process(m: Message) =
new ResponderApplication(r apply m.success).response
/** Apply route r to message m and wait for response with timeout */
def process(m: Message, timeout: Long, unit: TimeUnit) =
new ResponderApplication(r apply m.success).response(timeout: Long, unit: TimeUnit)
/** Apply route r to message m and get response promise */
def submit(m: Message)(implicit s: Strategy) =
new ResponderApplication(r apply m.success).responsePromise
/** Apply route r to message m and get response queue */
def submitN(m: Message) =
new ResponderApplication(r apply m.success).responseQueue
/** Apply route r to messages ms and wait for (first) response */
def process(ms: Seq[Message]) =
submitN(ms).take
/** Apply route r to messages ms and wait for (first) response with timeout */
def process(ms: Seq[Message], timeout: Long, unit: TimeUnit) =
submitN(ms).poll(timeout, unit)
/** Apply route r to messaged md and get response promise */
def submit(ms: Seq[Message])(implicit s: Strategy) =
promise(submitN(ms).take)
/** Apply route r to messaged md and get response queue */
def submitN(ms: Seq[Message]) = {
val queue = new java.util.concurrent.LinkedBlockingQueue[MessageValidation]
for (m <- ms) r apply m.success respond { mv => queue.put(mv) }
queue
}
}
}
|
krasserm/scalaz-camel
|
scalaz-camel-core/src/main/scala/scalaz/camel/core/Dsl.scala
|
Scala
|
apache-2.0
| 15,950
|
/*
* PlotFrame.scala
* (SysSon)
*
* Copyright (c) 2013-2017 Institute of Electronic Music and Acoustics, Graz.
* Copyright (c) 2014-2019 Hanns Holger Rutz. All rights reserved.
*
* This software is published under the GNU General Public License v3+
*
*
* For further information, please contact Hanns Holger Rutz at
* contact@sciss.de
*/
package at.iem.sysson.gui
import at.iem.sysson.Plot
import at.iem.sysson.gui.impl.{PlotFrameImpl => Impl}
import de.sciss.lucre.stm.Sys
import de.sciss.lucre.swing.Window
import de.sciss.synth.proc.Universe
object PlotFrame {
def apply[S <: Sys[S]](plot: Plot[S])
(implicit tx: S#Tx, universe: Universe[S]): PlotFrame[S] =
Impl(plot)
def apply[S <: Sys[S]](plot: Plot[S], parent: SonificationView[S])
(implicit tx: S#Tx, universe: Universe[S]): PlotFrame[S] =
Impl(plot, parent)
def spreadsheet[S <: Sys[S]](plot: Plot[S], parent: Option[SonificationView[S]] = None)
(implicit tx: S#Tx, universe: Universe[S]): PlotFrame[S] =
Impl.spreadsheet(plot, parent)
}
trait PlotFrame[S <: Sys[S]] extends Window[S] {
def view: PlotView[S]
}
|
iem-projects/sysson
|
src/main/scala/at/iem/sysson/gui/PlotFrame.scala
|
Scala
|
gpl-3.0
| 1,189
|
/**
* See <a href="https://www.codeeval.com/open_challenges/118/">Seat Your Team</a>
*/
object SeatYourTeam extends Challenge {
val lines = scala.io.Source.fromFile(args(0)).getLines().filter(_.length > 0)
lines.collect {
case Input(favorites, available) =>
eval(favorites, Set())
} foreach {
case true => println("Yes")
case false => println("No")
}
def eval(favorites: List[List[Int]], path: Set[Int]): Boolean =
favorites match {
case Nil => true
case x :: xs => x.exists(p => !path.contains(p) && eval(xs, path + p))
}
object Input {
// 4; 1:[1, 3, 2], 2:[1], 3:[4, 3], 4:[4, 3]
def unapply(line: String) = line.split("; ").toList match {
case n :: places :: Nil =>
val available = 1 to n.toInt
val favorites = places.split("(\\],|]$)").map(_.split("(:\\[|, )").toList).map(_.tail.map(_.toInt))
Some(favorites.toList, available.toSet)
case _ => None
}
}
}
|
zelca/codeeval
|
src/SeatYourTeam.scala
|
Scala
|
mit
| 970
|
/*
* Copyright 2001-2013 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalatest.examples.freespec.getfixture
import org.scalatest.FreeSpec
import collection.mutable.ListBuffer
class ExampleSpec extends FreeSpec {
class Fixture {
val builder = new StringBuilder("ScalaTest is ")
val buffer = new ListBuffer[String]
}
def fixture = new Fixture
"Testing" - {
"should be easy" in {
val f = fixture
f.builder.append("easy!")
assert(f.builder.toString === "ScalaTest is easy!")
assert(f.buffer.isEmpty)
f.buffer += "sweet"
}
"should be fun" in {
val f = fixture
f.builder.append("fun!")
assert(f.builder.toString === "ScalaTest is fun!")
assert(f.buffer.isEmpty)
}
}
}
|
dotty-staging/scalatest
|
examples/src/test/scala/org/scalatest/examples/freespec/getfixture/ExampleSpec.scala
|
Scala
|
apache-2.0
| 1,306
|
/*
* Copyright 2012-2014 Broad Institute, Inc.
*
* This file is part of Pilon.
*
* Pilon is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License version 2
* as published by the Free Software Foundation.
*
* Pilon is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with Pilon. If not, see <http://www.gnu.org/licenses/>.
*/
package org.broadinstitute.pilon
import collection.mutable.ArrayBuffer
import math.pow
// Calculates the moments of a sample distribution.
class NormalDistribution(values: Array[Double], nMoments: Int) {
require(values.size > 0, "can't compute moments of empty distribution")
require(nMoments > 1, "assumes at least two moments (mean, stddev)")
val mean = values.sum / values.size
val median = {
val sorted = values.clone.sorted
val n = sorted.size
if (n % 2 == 0)
(values(n/2) + values(n/2 - 1)) / 2.0
else
values(n/2)
}
val moments = new Array[Double](nMoments)
moments(0) = mean
for (n <- 1 until nMoments) {
for (v <- values) moments(n) += pow(v - mean, n + 1)
moments(n) = pow(moments(n) / values.size, 1.0 / (n + 1))
}
def toSigma(value: Double, maxSigma: Double = 5.0) = {
val sigma = (value - moments(0)) / moments(1)
sigma
}
def toSigma10x(value: Double) = (toSigma(value) * 10.0).round.toInt
def fromSigma(sigma: Double) = moments(0) + sigma * moments(1)
def this(ivalues: Array[Int], nMoments: Int) =
this(for { v<-ivalues } yield v.toDouble, nMoments)
def this(ivalues: Array[Short], nMoments: Int) =
this(for { v<-ivalues } yield v.toDouble, nMoments)
def this(ivalues: Array[Byte], nMoments: Int) =
this(for { v<-ivalues } yield v.toDouble, nMoments)
override def toString = "<moments: n=" + values.size + ",moments=" + (moments mkString ",") + ">"
}
|
AbeelLab/pilon
|
src/main/scala/org/broadinstitute/pilon/NormalDistribution.scala
|
Scala
|
gpl-2.0
| 2,130
|
/*
* Copyright 2001-2013 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this thing except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalatest
import SharedHelpers.thisLineNumber
import enablers.Definition
import Matchers._
import exceptions.TestFailedException
import org.scalactic.Prettifier
class ShouldBeDefinedImplicitSpec extends FunSpec {
private val prettifier = Prettifier.default
trait Thing {
def isDefined: Boolean
}
val something = new Thing {
val isDefined = true
}
val nothing = new Thing {
val isDefined = false
}
implicit def definitionOfThing[T <: Thing]: Definition[T] =
new Definition[T] {
def isDefined(thing: T): Boolean = thing.isDefined
}
val fileName: String = "ShouldBeDefinedImplicitSpec.scala"
def wasNotDefined(left: Any): String =
FailureMessages.wasNotDefined(prettifier, left)
def wasDefined(left: Any): String =
FailureMessages.wasDefined(prettifier, left)
def allError(left: Any, message: String, lineNumber: Int): String = {
val messageWithIndex = UnquotedString(" " + FailureMessages.forAssertionsGenTraversableMessageWithStackDepth(prettifier, 0, UnquotedString(message), UnquotedString(fileName + ":" + lineNumber)))
FailureMessages.allShorthandFailed(prettifier, messageWithIndex, left)
}
describe("Defined matcher") {
describe("when work with 'thing should be (defined)'") {
it("should do nothing when thing is defined") {
something should be (defined)
}
it("should throw TestFailedException with correct stack depth when thing is not defined") {
val caught1 = intercept[TestFailedException] {
nothing should be (defined)
}
assert(caught1.message === Some(wasNotDefined(nothing)))
assert(caught1.failedCodeFileName === Some(fileName))
assert(caught1.failedCodeLineNumber === Some(thisLineNumber - 4))
}
}
describe("when work with 'thing should not be defined'") {
it("should do nothing when thing is not defined") {
nothing should not be defined
}
it("should throw TestFailedException with correct stack depth when thing is defined") {
val caught1 = intercept[TestFailedException] {
something should not be defined
}
assert(caught1.message === Some(wasDefined(something)))
assert(caught1.failedCodeFileName === Some(fileName))
assert(caught1.failedCodeLineNumber === Some(thisLineNumber - 4))
}
}
describe("when work with 'thing shouldBe defined'") {
it("should do nothing when thing is defined") {
something shouldBe defined
}
it("should throw TestFailedException with correct stack depth when thing is not defined") {
val caught1 = intercept[TestFailedException] {
nothing shouldBe defined
}
assert(caught1.message === Some(wasNotDefined(nothing)))
assert(caught1.failedCodeFileName === Some(fileName))
assert(caught1.failedCodeLineNumber === Some(thisLineNumber - 4))
}
}
describe("when work with 'thing shouldNot be (defined)'") {
it("should do nothing when thing is not defined") {
nothing shouldNot be (defined)
}
it("should throw TestFailedException with correct stack depth when thing is defined") {
val caught1 = intercept[TestFailedException] {
something shouldNot be (defined)
}
assert(caught1.message === Some(wasDefined(something)))
assert(caught1.failedCodeFileName === Some(fileName))
assert(caught1.failedCodeLineNumber === Some(thisLineNumber - 4))
}
}
describe("when work with 'all(xs) should be (defined)'") {
it("should do nothing when all(xs) is defined") {
all(List(something)) should be (defined)
}
it("should throw TestFailedException with correct stack depth when all(xs) is not defined") {
val left1 = List(nothing)
val caught1 = intercept[TestFailedException] {
all(left1) should be (defined)
}
assert(caught1.message === Some(allError(left1, wasNotDefined(nothing), thisLineNumber - 2)))
assert(caught1.failedCodeFileName === Some(fileName))
assert(caught1.failedCodeLineNumber === Some(thisLineNumber - 4))
}
}
describe("when work with 'all(xs) should not be defined'") {
it("should do nothing when all(xs) is not defined") {
all(List(nothing)) should not be defined
}
it("should throw TestFailedException with correct stack depth when all(xs) is defined") {
val left1 = List(something)
val caught1 = intercept[TestFailedException] {
all(left1) should not be defined
}
assert(caught1.message === Some(allError(left1, wasDefined(something), thisLineNumber - 2)))
assert(caught1.failedCodeFileName === Some(fileName))
assert(caught1.failedCodeLineNumber === Some(thisLineNumber - 4))
}
}
describe("when work with 'all(xs) shouldBe defined'") {
it("should do nothing when all(xs) is defined") {
all(List(something)) shouldBe defined
}
it("should throw TestFailedException with correct stack depth when all(xs) is not defined") {
val left1 = List(nothing)
val caught1 = intercept[TestFailedException] {
all(left1) shouldBe defined
}
assert(caught1.message === Some(allError(left1, wasNotDefined(nothing), thisLineNumber - 2)))
assert(caught1.failedCodeFileName === Some(fileName))
assert(caught1.failedCodeLineNumber === Some(thisLineNumber - 4))
}
}
describe("when work with 'all(xs) shouldNot be (defined)'") {
it("should do nothing when all(xs) is not defined") {
all(List(nothing)) shouldNot be (defined)
}
it("should throw TestFailedException with correct stack depth when all(xs) is defined") {
val left1 = List(something)
val caught1 = intercept[TestFailedException] {
all(left1) shouldNot be (defined)
}
assert(caught1.message === Some(allError(left1, wasDefined(something), thisLineNumber - 2)))
assert(caught1.failedCodeFileName === Some(fileName))
assert(caught1.failedCodeLineNumber === Some(thisLineNumber - 4))
}
}
}
}
|
dotty-staging/scalatest
|
scalatest-test/src/test/scala/org/scalatest/ShouldBeDefinedImplicitSpec.scala
|
Scala
|
apache-2.0
| 7,037
|
package formation.warp10.warp10
import formation.warp10.{Configuration, Magasin, Warp10Api}
import kneelnrise.warp10scala.model.{GTS, GTSStringValue}
class MagasinWarp10(warp10Api: Warp10Api, configuration: Configuration) {
def registerAll(magasins: Seq[Magasin]): Unit = {
warp10Api.pushAll(magasins.map(magasinToGTS))
}
def register(magasin: Magasin): Unit = {
warp10Api.push(magasinToGTS(magasin))
}
private def magasinToGTS(magasin: Magasin): GTS = {
GTS(
ts = Some(configuration.nowAtOneTime),
coordinates = Some(magasin.coordinates),
elev = None,
name = "org.test.plain.store",
labels = Map("id" -> magasin.id),
value = GTSStringValue(magasin.name)
)
}
}
|
kneelnrise/formation-warp10
|
src/main/scala/formation/warp10/warp10/MagasinWarp10.scala
|
Scala
|
mit
| 729
|
package com.monsanto.engineering_blog.testing_without_mocking
import com.monsanto.engineering_blog.testing_without_mocking.JsonStuff._
import scala.concurrent.Future
object RealAccessTokenService {
def reallyCheckAccessToken(jsonClient:JsonClient)(accessToken: String): Future[JsonResponse] = jsonClient.getWithoutSession(
Path("identity"),
Params("access_token" -> accessToken)
)
}
|
samus42/engineering-blog
|
examples/testing-without-mocking/src/main/scala/com/monsanto/engineering_blog/testing_without_mocking/RealAccessTokenService.scala
|
Scala
|
bsd-3-clause
| 401
|
package com.github.cocagne.composable_paxos
class Learner[T](val networkUid: NetworkUID, val quorumSize: Int) {
private case class ProposalStatus(var acceptCount: Int, var retainCount: Int,
var acceptors: Set[NetworkUID], val value: T)
private var proposals: Map[ProposalID, ProposalStatus] = Map() // proposal_id -> [accept_count, retain_count, value]
private var acceptors: Map[NetworkUID, ProposalID] = Map() // from_uid -> last_accepted_proposal_id
private var _finalValue: Option[T] = None
private var _finalAcceptors: Set[NetworkUID] = Set() // Contains UIDs of all acceptors known to have the final value
private var _finalProposalId: Option[ProposalID] = None
def finalValue = _finalValue
def finalAcceptors = _finalAcceptors
def finalProposalId = _finalProposalId
def receive(msg: Accepted[T]): Option[Resolution[T]] = {
_finalValue match {
case Some(value) =>
if (msg.proposalId >= _finalProposalId.get && msg.proposalValue == _finalValue.get)
_finalAcceptors += msg.networkUid
return Some(Resolution(networkUid, _finalValue.get))
case None =>
}
val last = acceptors.get(msg.networkUid)
last match {
case Some(lastPid) if msg.proposalId <= lastPid => return None // Old message
case _ =>
}
acceptors += (msg.networkUid -> msg.proposalId)
last match {
case Some(lastPid) =>
val pstatus = proposals.get(lastPid).get
pstatus.retainCount -= 1
pstatus.acceptors -= msg.networkUid
if (pstatus.retainCount == 0)
proposals -= lastPid
case _ =>
}
if ( !proposals.contains(msg.proposalId) )
proposals += (msg.proposalId -> ProposalStatus(0,0,Set[NetworkUID](),msg.proposalValue))
val pstatus = proposals.get(msg.proposalId).get
pstatus.acceptCount += 1
pstatus.retainCount += 1
pstatus.acceptors += msg.networkUid
if (pstatus.acceptCount == quorumSize) {
_finalProposalId = Some(msg.proposalId)
_finalValue = Some(msg.proposalValue)
_finalAcceptors = pstatus.acceptors
proposals = proposals.empty
acceptors = acceptors.empty
Some(Resolution(networkUid, msg.proposalValue))
}
else
None
}
}
|
cocagne/scala-composable-paxos
|
src/main/scala/com/github/cocagne/composable_paxos/Learner.scala
|
Scala
|
mit
| 2,356
|
package com.sksamuel.scapegoat.io
import java.io.{ BufferedWriter, File, FileWriter }
import com.sksamuel.scapegoat.Feedback
/**
* @author Stephen Samuel
* @author Eugene Sypachev (Axblade)
*/
object IOUtils {
private val XmlFile = "scapegoat.xml"
private val ScalastyleXmlFile = "scapegoat-scalastyle.xml"
private val HtmlFile = "scapegoat.html"
def serialize(file: File, str: String) = {
val out = new BufferedWriter(new FileWriter(file))
out.write(str)
out.close()
}
def writeHTMLReport(targetDir: File, reporter: Feedback): File = {
val html = HtmlReportWriter.generate(reporter).toString()
writeFile(targetDir, reporter, html, HtmlFile)
}
def writeXMLReport(targetDir: File, reporter: Feedback): File = {
val html = XmlReportWriter.toXML(reporter).toString()
writeFile(targetDir, reporter, html, XmlFile)
}
def writeScalastyleReport(targetDir: File, reporter: Feedback): File = {
val html = ScalastyleReportWriter.toXML(reporter).toString()
writeFile(targetDir, reporter, html, ScalastyleXmlFile)
}
private def writeFile(targetDir: File, reporter: Feedback, data: String, fileName: String) = {
targetDir.mkdirs()
val file = new File(targetDir.getAbsolutePath + "/" + fileName)
serialize(file, data)
file
}
}
|
pwwpche/scalac-scapegoat-plugin
|
src/main/scala/com/sksamuel/scapegoat/io/IOUtils.scala
|
Scala
|
apache-2.0
| 1,304
|
// Copyright 2014 Commonwealth Bank of Australia
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package au.com.cba.omnia.maestro.core
package data
/**
* Represents a field of `A` with type `B`. It has the name of the field and a getter given an
* instance of `A`.
*/
case class Field[A : Manifest, B : Manifest](name: String, get: A => B) {
val structType = manifest[A].getClass
val columnType = manifest[B].getClass
/**
* Fields are consider equal if name and type of Thrift struct are equal and column type are equal
* Notice that this function will be working correctly only with Fields generated by FieldsMacro.
* Do not try to use it with custom created fields.
*
* @throws RuntimeException when encounters 2 fields with same name from the same Thrift struct with different column type,
* the intention is to indicate serious error in logic of your program.
*/
override def equals(other: Any): Boolean = other match {
case f: Field[_, _] => equalityTest(f)
case _ => false
}
private def equalityTest(f: Field[_, _]): Boolean = {
val equalFields = structType == f.structType && name == f.name
if (equalFields && columnType != f.columnType) {
throw new RuntimeException("Can't have two columns with the same name from the same Thrift structure with different column type")
}
equalFields
}
override def hashCode: Int = name.hashCode * 41 + structType.hashCode
}
|
toddmowen/maestro
|
maestro-core/src/main/scala/au/com/cba/omnia/maestro/core/data/Field.scala
|
Scala
|
apache-2.0
| 2,009
|
// Copyright (C) 2019 MapRoulette contributors (see CONTRIBUTORS.md).
// Licensed under the Apache License, Version 2.0 (see LICENSE).
package org.maproulette.models.dal
import java.sql.Connection
import anorm.SqlParser._
import anorm._
import javax.inject.Inject
import org.joda.time.DateTime
import org.maproulette.Config
import org.maproulette.cache.CacheManager
import org.maproulette.data.{Actions, TaskType, VirtualChallengeType}
import org.maproulette.exception.InvalidException
import org.maproulette.models._
import org.maproulette.models.utils.DALHelper
import org.maproulette.permissions.Permission
import org.maproulette.session.{SearchLocation, SearchParameters, SearchChallengeParameters, User}
import play.api.db.Database
import play.api.libs.json.JodaReads._
import play.api.libs.json.{JsString, JsValue, Json}
/**
* @author mcuthbert
*/
class VirtualChallengeDAL @Inject()(override val db: Database,
override val permission: Permission,
val taskDAL: TaskDAL,
val config: Config)
extends BaseDAL[Long, VirtualChallenge] with DALHelper with Locking[Task] {
override val cacheManager = new CacheManager[Long, VirtualChallenge](config, Config.CACHE_ID_VIRTUAL_CHALLENGES)
override val tableName: String = "virtual_challenges"
implicit val searchLocationWrites = Json.writes[SearchLocation]
implicit val searchLocationReads = Json.reads[SearchLocation]
override val parser: RowParser[VirtualChallenge] = {
get[Long]("virtual_challenges.id") ~
get[String]("virtual_challenges.name") ~
get[DateTime]("virtual_challenges.created") ~
get[DateTime]("virtual_challenges.modified") ~
get[Option[String]]("virtual_challenges.description") ~
get[Long]("virtual_challenges.owner_id") ~
get[String]("virtual_challenges.search_parameters") ~
get[DateTime]("virtual_challenges.expiry") map {
case id ~ name ~ created ~ modified ~ description ~ ownerId ~ searchParameters ~ expiry =>
new VirtualChallenge(id, name, created, modified, description, ownerId, Json.parse(searchParameters).as[SearchParameters], expiry)
}
}
/**
* The insert function for virtual challenges needs to create a new challenge and then find all the
* tasks based on the search parameters in the virtual challenge
*
* @param element The element that you are inserting to the database
* @param user The user executing the task
* @return The object that was inserted into the database. This will include the newly created id
*/
override def insert(element: VirtualChallenge, user: User)(implicit c: Option[Connection] = None): VirtualChallenge = {
this.cacheManager.withOptionCaching { () =>
withMRTransaction { implicit c =>
// check if any virtual challenges with the same name need to expire
// calling the retrieve function will also remove any expired virtual challenges
this.retrieveListByName(List(element.name))
val validParameters = element.taskIdList match {
case Some(ids) => ids.nonEmpty
case None => element.searchParameters.location match {
case Some(box) if (box.right - box.left) * (box.top - box.bottom) < config.virtualChallengeLimit => true
case None => false
}
}
if (validParameters) {
val query =
"""INSERT INTO virtual_challenges (owner_id, name, description, search_parameters, expiry)
VALUES ({owner}, {name}, {description}, {parameters}, {expiry}::timestamp)
RETURNING *"""
val newChallenge = SQL(query).on(
'owner -> user.osmProfile.id,
'name -> element.name,
'description -> element.description,
'parameters -> Json.toJson(element.searchParameters).toString(),
'expiry -> ToParameterValue.apply[String].apply(String.valueOf(element.expiry))
).as(this.parser.single)
c.commit()
element.taskIdList match {
case Some(ids) => this.createVirtualChallengeFromIds(newChallenge.id, ids)
case None => this.rebuildVirtualChallenge(newChallenge.id, element.searchParameters, user)
}
Some(newChallenge)
} else {
throw new InvalidException(s"Bounding Box that has an area smaller than ${config.virtualChallengeLimit} required to create virtual challenge.")
}
}
}.get
}
/**
* This function will rebuild the virtual challenge based on the search parameters that have been stored with the object
*
* @param id The id of the virtual challenge
* @param user The user making the request
* @param c implicit connection
* @return
*/
def rebuildVirtualChallenge(id: Long, params: SearchParameters, user: User)(implicit c: Option[Connection] = None): Unit = {
permission.hasWriteAccess(VirtualChallengeType(), user)(id)
withMRTransaction { implicit c =>
val (count, result) = this.taskDAL.getTasksInBoundingBox(user, params, -1, 0)
result.grouped(config.virtualChallengeBatchSize).foreach(batch => {
val insertRows = batch.map(point => s"(${point.id}, $id)").mkString(",")
SQL"""
INSERT INTO virtual_challenge_tasks (task_id, virtual_challenge_id) VALUES #$insertRows
""".execute()
c.commit()
})
}
}
private def createVirtualChallengeFromIds(id: Long, idList: List[Long])(implicit c: Option[Connection] = None): Unit = {
withMRTransaction { implicit c =>
val insertRows = idList.map(taskId => s"($taskId, $id)").mkString(",")
SQL"""
INSERT INTO virtual_challenge_tasks (task_id, virtual_challenge_id) VALUES #$insertRows
""".execute()
c.commit()
}
}
override def retrieveListByName(implicit names: List[String], parentId: Long, c: Option[Connection] = None): List[VirtualChallenge] =
this.removeExpiredFromList(super.retrieveListByName)
/**
* The update function is limited in that you can only update the superficial elements, name
* and description. The only value of consequence that you can update is expiry, which by default
* is set to a day but can be extended by the user.
*
* @param updates The updates in json form
* @param user The user executing the task
* @param id The id of the object that you are updating
* @param c
* @return An optional object, it will return None if no object found with a matching id that was supplied
*/
override def update(updates: JsValue, user: User)(implicit id: Long, c: Option[Connection] = None): Option[VirtualChallenge] = {
permission.hasWriteAccess(VirtualChallengeType(), user)
this.cacheManager.withUpdatingCache(Long => retrieveById) { implicit cachedItem =>
withMRTransaction { implicit c =>
// first if the virtual challenge has expired then just delete it
val expiry = (updates \ "expiry").asOpt[DateTime].getOrElse(cachedItem.expiry)
if (DateTime.now().isAfter(expiry)) {
this.delete(cachedItem.id, user)
throw new InvalidException("Could not update virtual challenge as it has already expired")
} else {
val name = (updates \ "name").asOpt[String].getOrElse(cachedItem.name)
val description = (updates \ "description").asOpt[String].getOrElse(cachedItem.description.getOrElse(""))
val query =
"""UPDATE virtual_challenges
SET name = {name}, description = {description}, expiry = {expiry}::timestamp
WHERE id = {id} RETURNING *"""
SQL(query)
.on(
'name -> name,
'description -> description,
'expiry -> ToParameterValue.apply[String].apply(String.valueOf(expiry)),
'id -> id
).as(this.parser.*).headOption
}
}
}
}
// --- FOLLOWING FUNCTION OVERRIDE BASE FUNCTION TO SIMPLY REMOVE ANY RETRIEVED VIRTUAL CHALLENGES
// --- THAT ARE EXPIRED
override def retrieveById(implicit id: Long, c: Option[Connection] = None): Option[VirtualChallenge] = {
super.retrieveById match {
case Some(vc) if vc.isExpired =>
this.delete(id, User.superUser)
None
case x => x
}
}
def listTasks(id: Long, user: User, limit: Int, offset: Int)(implicit c: Option[Connection] = None): List[Task] = {
permission.hasReadAccess(VirtualChallengeType(), user)(id)
withMRTransaction { implicit c =>
SQL"""SELECT tasks.#${taskDAL.retrieveColumnsWithReview} FROM tasks
LEFT OUTER JOIN task_review ON task_review.task_id = tasks.id
INNER JOIN virtual_challenge_tasks vct ON vct.task_id = tasks.id
WHERE virtual_challenge_id = $id
LIMIT #${sqlLimit(limit)} OFFSET $offset""".as(taskDAL.parser.*)
}
}
/**
* Gets a random task from the list of tasks associated with the virtual challenge
*
* @param id The id of the virtual challenge
* @param params The search parameters, most of the parameters will not be used
* @param user The user making the request
* @param proximityId Id of the task to find the closest next task
* @param c
* @return An optional Task, None if no tasks available
*/
def getRandomTask(id: Long, params: SearchParameters, user: User, proximityId: Option[Long] = None)
(implicit c: Option[Connection] = None): Option[Task] = {
permission.hasReadAccess(VirtualChallengeType(), user)(id)
// The default where clause will check to see if the parents are enabled, that the task is
// not locked (or if it is, it is locked by the current user) and that the status of the task
// is either Created or Skipped
val taskStatusList = params.taskStatus match {
case Some(l) if l.nonEmpty => l
case _ => {
config.skipTooHard match {
case true =>
List(Task.STATUS_CREATED, Task.STATUS_SKIPPED)
case false =>
List(Task.STATUS_CREATED, Task.STATUS_SKIPPED, Task.STATUS_TOO_HARD)
}
}
}
val whereClause = new StringBuilder(
s"""
WHERE vct.virtual_challenge_id = $id AND
(l.id IS NULL OR l.user_id = ${user.id}) AND
tasks.status IN ({statusList})
""")
val proximityOrdering = proximityId match {
case Some(id) =>
appendInWhereClause(whereClause, s"tasks.id != $id")
s"ST_Distance(tasks.location, (SELECT location FROM tasks WHERE id = $id)),"
case None => ""
}
val query =
s"""SELECT tasks.${taskDAL.retrieveColumnsWithReview} FROM tasks
LEFT JOIN locked l ON l.item_id = tasks.id
LEFT OUTER JOIN task_review ON task_review.task_id = tasks.id
INNER JOIN virtual_challenge_tasks vct ON vct.task_id = tasks.id
${whereClause.toString}
ORDER BY $proximityOrdering tasks.status, RANDOM() LIMIT 1"""
this.withSingleLocking(user, Some(TaskType())) { () =>
withMRTransaction { implicit c =>
SQL(query)
.on(
'statusList -> ToParameterValue.apply[List[Int]].apply(taskStatusList)
).as(taskDAL.parser.*).headOption
}
}
}
/**
* Simple query to retrieve the next task in the sequence
*
* @param id The parent of the task
* @param currentTaskId The current task that we are basing our query from
* @return An optional task, if no more tasks in the list will retrieve the first task
*/
def getSequentialNextTask(id: Long, currentTaskId: Long)(implicit c: Option[Connection] = None): Option[(Task, Lock)] = {
this.withMRConnection { implicit c =>
val lp = for {
task <- taskDAL.parser
lock <- lockedParser
} yield task -> lock
val query =
s"""SELECT locked.*, tasks.${taskDAL.retrieveColumnsWithReview} FROM tasks
LEFT JOIN locked ON locked.item_id = tasks.id
LEFT OUTER JOIN task_review ON task_review.task_id = tasks.id
WHERE tasks.id = (SELECT task_id
FROM virtual_challenge_tasks
WHERE task_id > $currentTaskId AND virtual_challenge_id = $id
LIMIT 1)
"""
SQL(query).as(lp.*).headOption match {
case Some(t) => Some(t)
case None =>
val loopQuery =
s"""SELECT locked.*, tasks.${taskDAL.retrieveColumnsWithReview} FROM tasks
LEFT JOIN locked ON locked.item_id = tasks.id
LEFT OUTER JOIN task_review ON task_review.task_id = tasks.id
WHERE tasks.id = (SELECT task_id
FROM virtual_challenge_tasks
WHERE virtual_challenge_id = $id
AND task_id != $currentTaskId
ORDER BY id ASC LIMIT 1)
"""
SQL(loopQuery).as(lp.*).headOption
}
}
}
/**
* Simple query to retrieve the previous task in the sequence
*
* @param id The parent of the task
* @param currentTaskId The current task that we are basing our query from
* @return An optional task, if no more tasks in the list will retrieve the last task
*/
def getSequentialPreviousTask(id: Long, currentTaskId: Long)(implicit c: Option[Connection] = None): Option[(Task, Lock)] = {
this.withMRConnection { implicit c =>
val lp = for {
task <- taskDAL.parser
lock <- lockedParser
} yield task -> lock
val query =
s"""SELECT locked.*, tasks.${taskDAL.retrieveColumnsWithReview} FROM tasks
LEFT JOIN locked ON locked.item_id = tasks.id
LEFT OUTER JOIN task_review ON task_review.task_id = tasks.id
WHERE tasks.id = (SELECT task_id
FROM virtual_challenge_tasks
WHERE task_id < $currentTaskId AND virtual_challenge_id = $id
LIMIT 1)
"""
SQL(query).as(lp.*).headOption match {
case Some(t) => Some(t)
case None =>
val loopQuery =
s"""SELECT locked.*, tasks.${taskDAL.retrieveColumnsWithReview} FROM tasks
LEFT JOIN locked ON locked.item_id = tasks.id
LEFT OUTER JOIN task_review ON task_review.task_id = tasks.id
WHERE tasks.id = (SELECT task_id
FROM virtual_challenge_tasks
WHERE virtual_challenge_id = $id
AND task_id != $currentTaskId
ORDER BY id DESC LIMIT 1)
"""
SQL(loopQuery).as(lp.*).headOption
}
}
}
/**
* Retrieve tasks geographically closest to the given task id within the
* given virtual challenge. Ignores tasks that are complete, locked by other
* users, or that the current user has worked on in the last hour
*/
def getNearbyTasks(user: User, challengeId: Long, proximityId: Long, limit: Int = 5)
(implicit c: Option[Connection] = None): List[Task] = {
val query = s"""SELECT tasks.${taskDAL.retrieveColumnsWithReview} FROM tasks
LEFT JOIN locked l ON l.item_id = tasks.id
LEFT JOIN virtual_challenge_tasks vct on vct.task_id = tasks.id
LEFT OUTER JOIN task_review ON task_review.task_id = tasks.id
WHERE tasks.id <> $proximityId AND
vct.virtual_challenge_id = $challengeId AND
(l.id IS NULL OR l.user_id = ${user.id}) AND
tasks.status IN (0, 3, 6) AND
NOT tasks.id IN (
SELECT task_id FROM status_actions
WHERE osm_user_id = ${user.osmProfile.id} AND created >= NOW() - '1 hour'::INTERVAL)
ORDER BY ST_Distance(tasks.location, (SELECT location FROM tasks WHERE id = $proximityId)), tasks.status, RANDOM()
LIMIT ${this.sqlLimit(limit)}"""
this.withMRTransaction { implicit c =>
SQL(query).as(taskDAL.parser.*)
}
}
/**
* Gets the combined geometry of all the tasks that are associated with the virtual challenge
* NOTE* Due to the way this function finds the geometries, it could be quite slow.
*
* @param challengeId The id for the virtual challenge
* @param statusFilter To view the geojson for only tasks with a specific status
* @param c The implicit connection for the function
* @return A JSON string representing the geometry
*/
def getChallengeGeometry(challengeId: Long, statusFilter: Option[List[Int]] = None)(implicit c: Option[Connection] = None): String = {
this.withMRConnection { implicit c =>
val filter = statusFilter match {
case Some(s) => s"AND status IN (${s.mkString(",")}"
case None => ""
}
SQL"""SELECT ROW_TO_JSON(f)::TEXT AS geometries
FROM (
SELECT 'FeatureCollection' AS type, ARRAY_TO_JSON(ARRAY_AGG(ST_ASGEOJSON(geom)::JSON)) AS features
FROM tasks WHERE id IN
(SELECT task_id FROM virtual_challenge_tasks
WHERE virtual_challenge_id = $challengeId)
#$filter
) as f""".as(str("geometries").single)
}
}
/**
* Retrieves the json that contains the central points for all the tasks in the virtual challenge.
* One caveat to Virtual Challenges, is that if a project or challenge is flagged as deleted that has tasks
* in the virtual challenge, then those tasks will remain part of the virtual challenge until the
* tasks are cleared from the database.
*
* @param challengeId The id of the virtual challenge
* @param statusFilter Filter the displayed task cluster points by their status
* @return A list of clustered point objects
*/
def getClusteredPoints(challengeId: Long, statusFilter: Option[List[Int]] = None)
(implicit c: Option[Connection] = None): List[ClusteredPoint] = {
this.withMRConnection { implicit c =>
val filter = statusFilter match {
case Some(s) => s"AND status IN (${s.mkString(",")}"
case None => ""
}
val pointParser = long("id") ~ str("name") ~ str("instruction") ~ str("location") ~
int("status") ~ get[Option[String]]("suggested_fix") ~ get[Option[DateTime]]("mapped_on") ~
get[Option[Int]]("review_status") ~ get[Option[Int]]("review_requested_by") ~
get[Option[Int]]("reviewed_by") ~ get[Option[DateTime]]("reviewed_at") ~
get[Option[DateTime]]("review_started_at") ~ int("priority") ~
get[Option[Long]]("bundle_id") ~ get[Option[Boolean]]("is_bundle_primary") map {
case id ~ name ~ instruction ~ location ~ status ~ suggestedFix ~ mappedOn ~ reviewStatus ~ reviewRequestedBy ~
reviewedBy ~ reviewedAt ~ reviewStartedAt ~ priority ~ bundleId ~ isBundlePrimary =>
val locationJSON = Json.parse(location)
val coordinates = (locationJSON \ "coordinates").as[List[Double]]
val point = Point(coordinates(1), coordinates.head)
val pointReview = PointReview(reviewStatus, reviewRequestedBy, reviewedBy, reviewedAt, reviewStartedAt)
ClusteredPoint(id, -1, "", name, -1, "", point, JsString(""),
instruction, DateTime.now(), -1, Actions.ITEM_TYPE_TASK, status, suggestedFix, mappedOn,
pointReview, priority, bundleId, isBundlePrimary)
}
SQL"""SELECT tasks.id, name, instruction, status, suggestedfix_geojson::TEXT as suggested_fix,
mapped_on, review_status, review_requested_by,
reviewed_by, reviewed_at, review_started_at, ST_AsGeoJSON(location) AS location, priority,
bundle_id, is_bundle_primary
FROM tasks LEFT OUTER JOIN task_review ON task_review.task_id = tasks.id
WHERE tasks.id IN
(SELECT task_id FROM virtual_challenge_tasks
WHERE virtual_challenge_id = $challengeId) #$filter"""
.as(pointParser.*)
}
}
/**
* For Virtual Challenges the retrieveByName function won't quite work as expected, there is a possibility
* that there are multiple Virtual Challenges with the same name. This function will simply return the
* first one. Generally retrieveListByName should be used instead.
*
* @param name The name you are looking up by
* @param parentId
* @param c
* @return The object that you are looking up, None if not found
*/
override def retrieveByName(implicit name: String, parentId: Long, c: Option[Connection] = None): Option[VirtualChallenge] = {
super.retrieveByName match {
case Some(vc) if vc.isExpired =>
this.delete(vc.id, User.superUser)
None
case x => x
}
}
override def retrieveListById(limit: Int, offset: Int)(implicit ids: List[Long], c: Option[Connection] = None): List[VirtualChallenge] =
this.removeExpiredFromList(super.retrieveListById(limit, offset))
private def removeExpiredFromList(superList: List[VirtualChallenge]): List[VirtualChallenge] = {
superList.flatMap(vc => {
if (vc.isExpired) {
this.delete(vc.id, User.superUser)
None
} else {
Some(vc)
}
})
}
override def retrieveListByPrefix(prefix: String, limit: Int, offset: Int, onlyEnabled: Boolean, orderColumn: String, orderDirection: String)
(implicit parentId: Long, c: Option[Connection] = None): List[VirtualChallenge] =
this.removeExpiredFromList(super.retrieveListByPrefix(prefix, limit, offset, onlyEnabled, orderColumn, orderDirection))
override def find(searchString: String, limit: Int, offset: Int, onlyEnabled: Boolean, orderColumn: String, orderDirection: String)
(implicit parentId: Long, c: Option[Connection] = None): List[VirtualChallenge] =
this.removeExpiredFromList(super.find(searchString, limit, offset, onlyEnabled, orderColumn, orderDirection))
override def list(limit: Int, offset: Int, onlyEnabled: Boolean, searchString: String, orderColumn: String, orderDirection: String)
(implicit parentId: Long, c: Option[Connection] = None): List[VirtualChallenge] =
this.removeExpiredFromList(super.list(limit, offset, onlyEnabled, searchString, orderColumn, orderDirection))
// --- END OF OVERRIDDEN FUNCTIONS TO FILTER OUT ANY EXPIRED VIRTUAL CHALLENGES
}
|
mvexel/maproulette2
|
app/org/maproulette/models/dal/VirtualChallengeDAL.scala
|
Scala
|
apache-2.0
| 23,063
|
import scala.util.Try
import scalaz.Scalaz._
import scalaz.effect.IO
import scalaz.effect.IO._
object Main extends App {
import Game._
def play(): IO[Unit] = for {
_ <- greetings()
game <- setup()
_ <- gameLoop(game)
} yield ()
def greetings(): IO[Unit] = {
val greet =
""" ●▬▬▬▬ஜ۩۞۩ஜ▬▬▬▬●
|░░░▒▒▒▒▒▓▓▓▒▒▒▒▒░░░
|░╔╦╦╦═╦╗╔═╦═╦══╦═╗░
|░║║║║╩╣╚╣═╣║║║║║╩╣░
|░╚══╩═╩═╩═╩═╩╩╩╩═╝░
|░░░▒▒▒▒▒▓▓▓▒▒▒▒▒░░░
| ●▬▬▬▬ஜ۩۞۩ஜ▬▬▬▬●
""".stripMargin
for {
_ <- putStrLn(greet)
_ <- putStrLn(initGame.show())
} yield remindMoves()
}
def showResults(game: Game): IO[Unit] = for {
_ <- showGame(game)
} yield putStrLn("Gave over")
def showGame(game: Game): IO[Unit] = putStrLn(game.show())
def gameLoop(game: Game): IO[Unit] = {
if (isGameOver(game)) for {
_ <- showResults(game)
game <- setup()
_ <- gameLoop(game)
} yield ()
else for {
_ <- showGame(game)
move <- askForMove()
_ <- reactOnMove(game, move)
} yield ()
}
def parseQuery(input: String): Option[Query] = input match {
case "up" => Play(Up).some
case "u" => Play(Up).some
case "down" => Play(Down).some
case "d" => Play(Down).some
case "left" => Play(Left).some
case "l" => Play(Left).some
case "right" => Play(Right).some
case "r" => Play(Right).some
case "quit" => Quit.some
case "q" => Quit.some
case str if str.startsWith("new") =>
str.split(" ").tail.headOption
.flatMap(n => readInt(n))
.map(NewGame)
case _ => none
}
def showAsk(): IO[Unit] = putStrLn("Your move: ")
def remindMoves(): IO[Unit] = {
val reminder =
"""Possible moves of the empty cell:
| left or l -- move on the left
| right or r -- move on the right
| up or u -- move up
| down ot d -- move down
|
| Other actions:
| new [Int] -- start new game with difficulty [Int]
| quit or q -- quit the game
""".stripMargin
putStrLn(reminder)
}
def wrongMove(): IO[Unit] = for {
_ <- putStrLn("Can't recognize move.")
_ <- remindMoves()
} yield ()
def askAgain(): IO[Query] = for {
move <- askForMove()
} yield move
def askForMove(): IO[Query] = for {
_ <- showAsk()
in <- readLn
move <- parseQuery(in).fold(wrongMove().flatMap(_ => askAgain()))(q => IO(q))
} yield move
def quit(): IO[Unit] = putStrLn("See you again, stranger!")
def reactOnMove(game: Game, query: Query): IO[Unit] = query match {
case Quit => quit()
case g@NewGame(difficulty) => shuffle(difficulty) flatMap gameLoop
case p@Play(move) => gameLoop(doMove(move, game))
}
def setup(): IO[Game] = for {
_ <- putStrLn("Start new game?")
_ <- putStrLn("Choose difficulty: ")
in <- readLn
game <- readInt(in).fold(setup())(diff => shuffle(diff))
} yield game
def readInt(str: String): Option[Int] = Try(str.toInt).toOption
play().unsafePerformIO()
}
|
nikdon/15puzzle
|
src/main/scala/Main.scala
|
Scala
|
mit
| 3,363
|
package com.mesosphere.cosmos.rpc.v1.model
case class SearchResponse(packages: Seq[SearchResult])
|
movicha/cosmos
|
cosmos-model/src/main/scala/com/mesosphere/cosmos/rpc/v1/model/SearchResponse.scala
|
Scala
|
apache-2.0
| 99
|
package org.jetbrains.plugins.scala
package lang
package surroundWith
package surrounders
package expression
/**
* @author: Dmitry Krasilschikov
*/
import com.intellij.lang.ASTNode
import com.intellij.openapi.util.TextRange
import com.intellij.psi.PsiElement
/*
* ScalaWithBracketsSurrounder is responsible of surrounders, witch enclause expression in brackets: { Expression } or ( Expression )
*/
class ScalaWithBracesSurrounder extends ScalaExpressionSurrounder {
override def getTemplateAsString(elements: Array[PsiElement]): String = {
return "{"+super.getTemplateAsString(elements)+"}"
}
override def getTemplateDescription = "{ }"
override def getSurroundSelectionRange (expr : ASTNode) : TextRange = {
val offset = expr.getTextRange.getEndOffset
new TextRange(offset, offset)
}
override def needParenthesis(elements: Array[PsiElement]) = false
}
|
LPTK/intellij-scala
|
src/org/jetbrains/plugins/scala/lang/surroundWith/surrounders/expression/ScalaWithBracesSurrounder.scala
|
Scala
|
apache-2.0
| 907
|
package org.psliwa.idea.composerJson.composer.model.repository
import org.junit.Assert._
import org.junit.Test
class RepositoryProviderWrapperTest {
val innerRepository: Repository[String] = Repository.inMemory[String](List("package1"))
val innerRepositoryProvider: RepositoryProvider[String] = new RepositoryProvider[String] {
override def repositoryFor(file: String): Repository[String] = innerRepository
override def updateRepository(file: String, info: RepositoryInfo) = false
override def hasDefaultRepository(file: String): Boolean = false
}
val defaultRepository: Repository[String] = Repository.inMemory[String](List("package2"))
@Test
def defaultRepositoryShouldBeReturnedWhenPredicateIsTrue(): Unit = {
//given
val defaultFile = "defaultFile"
val predicate = (file: String) => file == defaultFile
val repositoryProvider =
new RepositoryProviderWrapper[String](innerRepositoryProvider, defaultRepository, predicate)
//when & then
assertEquals(defaultRepository, repositoryProvider.repositoryFor(defaultFile))
assertEquals(innerRepository, repositoryProvider.repositoryFor("different-file"))
}
}
|
psliwa/idea-composer-plugin
|
src/test/scala/org/psliwa/idea/composerJson/composer/model/repository/RepositoryProviderWrapperTest.scala
|
Scala
|
mit
| 1,172
|
package controllers
import org.json4s._
package object helpers {
private implicit val format = DefaultFormats
def Json(o: Any) = Extraction.decompose(o)
}
|
maximx1/roma_quantum
|
app/controllers/helpers/package.scala
|
Scala
|
mit
| 160
|
package ml.wolfe.ui
import java.util.UUID
import ml.wolfe.term._
import org.sameersingh.htmlgen.{HTML, RawHTML}
/**
* @author riedel
*/
object D3Term {
implicit def render(term:AnyTerm):HTML = {
val id = "term" + UUID.randomUUID().toString
def nodeName(term:AnyTerm):String = {
term match {
case c:Constant[_] => c.value.toString
case v:AnyVar => v.varName
case d:ProductDom#DomTermImpl => d.domain.productName
case t:Tuple2Dom[_,_]#Tuple2Constructor => "Tuple2"
case t => t.getClass.getSimpleName
}
}
def treeData(term:AnyTerm, parent:AnyTerm = null):String = {
term match {
case p:ProxyTerm[_] => treeData(p.self,parent)
case _ =>
val name = nodeName(term)
val parentName = if (parent != null) nodeName(parent) else null
val children = term match {
case n:NAry => n.arguments map (treeData(_,term))
case _ => Nil
}
val childrenString = children.mkString(",\\n")
val data =
s"""
|{
| "name": "$name",
| "parent": "$parentName",
| "children": [$childrenString]
|}
""".stripMargin
data
}
}
def depth(term:AnyTerm):Int = term match {
case p:ProxyTerm[_] => depth(p.self)
case n:NAry => (n.arguments map depth).max + 1
case _ => 1
}
val termAsData = treeData(term)
val termDepth = depth(term)
val html = s"""
|<div id = "$id" class="term">
|<svg></svg>
|</div>
|<script>
|
|var treeData = [
| $termAsData
|];
|
|var depth = $termDepth + 1
|
|// ************** Generate the tree diagram *****************
|var margin = {top: 40, right: 120, bottom: 20, left: 120},
| width = 960 - margin.right - margin.left,
| height = depth * 70 - margin.top - margin.bottom;
|
|var i = 0;
|
|var tree = d3.layout.tree()
| .size([height, width]);
|
|var diagonal = d3.svg.diagonal()
| .projection(function(d) { return [d.x, d.y]; });
|
|var svg = d3.select("#$id svg")
| .attr("width", width + margin.right + margin.left)
| .attr("height", height + margin.top + margin.bottom)
| .append("g")
| .attr("transform", "translate(" + margin.left + "," + margin.top + ")");
|
|root = treeData[0];
|
|update(root);
|
|function update(source) {
|
| // Compute the new tree layout.
| var nodes = tree.nodes(source).reverse(),
| links = tree.links(nodes);
|
| // Normalize for fixed-depth.
| nodes.forEach(function(d) { d.y = d.depth * 50; });
|
| // Declare the nodes…
| var node = svg.selectAll("g.node")
| .data(nodes, function(d) { return d.id || (d.id = ++i); });
|
| // Enter the nodes.
| var nodeEnter = node.enter().append("g")
| .attr("class", "node")
| .attr("transform", function(d) {
| return "translate(" + d.x + "," + d.y + ")"; });
|
| nodeEnter.append("circle")
| .attr("r", 10);
|
| nodeEnter.append("text")
| .attr("y", function(d) {
| return d.children || d._children ? -18 : 18; })
| .attr("dy", ".35em")
| .attr("text-anchor", "middle")
| .text(function(d) { return d.name; })
| .style("fill-opacity", 1);
|
| // Declare the links…
| var link = svg.selectAll("path.link")
| .data(links, function(d) { return d.target.id; });
|
| // Enter the links.
| link.enter().insert("path", "g")
| .attr("class", "link")
| .attr("d", diagonal);
|
|}
|
|</script>
""".stripMargin
RawHTML(html)
}
}
|
wolfe-pack/wolfe
|
wolfe-ui/src/main/scala/ml/wolfe/ui/D3Term.scala
|
Scala
|
apache-2.0
| 3,942
|
package com.twitter.finagle.http
import com.twitter.conversions.storage._
import com.twitter.conversions.time._
import com.twitter.finagle._
import com.twitter.finagle.builder.{ClientBuilder, ServerBuilder}
import com.twitter.finagle.param.Stats
import com.twitter.finagle.service.FailureAccrualFactory
import com.twitter.finagle.stats.{InMemoryStatsReceiver, StatsReceiver}
import com.twitter.finagle.tracing.Trace
import com.twitter.io.{Buf, Reader, Writer}
import com.twitter.util.{Await, Closable, Future, JavaTimer, Promise, Return, Throw, Time}
import java.io.{PrintWriter, StringWriter}
import java.net.{InetAddress, InetSocketAddress}
import org.junit.runner.RunWith
import org.scalatest.{BeforeAndAfter, FunSuite}
import org.scalatest.junit.JUnitRunner
@RunWith(classOf[JUnitRunner])
class EndToEndTest extends FunSuite with BeforeAndAfter {
var saveBase: Dtab = Dtab.empty
before {
saveBase = Dtab.base
Dtab.base = Dtab.read("/foo=>/bar; /baz=>/biz")
}
after {
Dtab.base = saveBase
}
type HttpService = Service[Request, Response]
type HttpTest = String => (HttpService => HttpService) => Unit
def drip(w: Writer): Future[Unit] = w.write(buf("*")) before drip(w)
def buf(msg: String): Buf = Buf.Utf8(msg)
/**
* Read `n` number of bytes from the bytestream represented by `r`.
*/
def readNBytes(n: Int, r: Reader): Future[Buf] = {
def loop(left: Buf): Future[Buf] = (n - left.length) match {
case x if x > 0 =>
r.read(x) flatMap {
case Some(right) => loop(left concat right)
case None => Future.value(left)
}
case _ => Future.value(left)
}
loop(Buf.Empty)
}
def run(name: String)(tests: HttpTest*)(connect: HttpService => HttpService): Unit = {
tests.foreach(t => t(name)(connect))
}
def standardErrors(name: String)(connect: HttpService => HttpService): Unit = {
test(name + ": request uri too long") {
val service = new HttpService {
def apply(request: Request) = Future.value(Response())
}
val client = connect(service)
val request = Request("/" + "a" * 4096)
val response = Await.result(client(request))
assert(response.status == Status.RequestURITooLong)
client.close()
}
test(name + ": request header fields too large") {
val service = new HttpService {
def apply(request: Request) = Future.value(Response())
}
val client = connect(service)
val request = Request()
request.headers().add("header", "a" * 8192)
val response = Await.result(client(request))
assert(response.status == Status.RequestHeaderFieldsTooLarge)
client.close()
}
test(name + ": unhandled exceptions are converted into 500s") {
val service = new HttpService {
def apply(request: Request) = Future.exception(new IllegalArgumentException("bad news"))
}
val client = connect(service)
val response = Await.result(client(Request()))
assert(response.status == Status.InternalServerError)
client.close()
}
test(name + ": return 413s for requests with too large payloads") {
val service = new HttpService {
def apply(request: Request) = Future.value(Response())
}
val client = connect(service)
val tooBig = Request("/")
tooBig.content = Buf.ByteArray.Owned(new Array[Byte](200))
val justRight = Request("/")
justRight.content = Buf.ByteArray.Owned(Array[Byte](100))
assert(Await.result(client(tooBig)).status == Status.RequestEntityTooLarge)
assert(Await.result(client(justRight)).status == Status.Ok)
client.close()
}
}
def standardBehaviour(name: String)(connect: HttpService => HttpService) {
test(name + ": client stack observes max header size") {
val service = new HttpService {
def apply(req: Request) = {
val res = Response()
res.headerMap.put("Foo", ("*" * 8192) + "Bar: a")
Future.value(res)
}
}
val client = connect(service)
// Whether this fails or not, which determined by configuration of max
// header size in client configuration, there should definitely be no
// "Bar" header.
val hasBar = client(Request()).transform {
case Throw(_) => Future.False
case Return(res) =>
val names = res.headerMap.keys
Future.value(names.exists(_.contains("Bar")))
}
assert(!Await.result(hasBar))
client.close()
}
test(name + ": client sets content length") {
val service = new HttpService {
def apply(request: Request) = {
val response = Response()
val len = request.headerMap.get(Fields.ContentLength)
response.contentString = len.getOrElse("")
Future.value(response)
}
}
val body = "hello"
val client = connect(service)
val req = Request()
req.contentString = body
assert(Await.result(client(req)).contentString == body.length.toString)
client.close()
}
test(name + ": echo") {
val service = new HttpService {
def apply(request: Request) = {
val response = Response()
response.contentString = request.uri
Future.value(response)
}
}
val client = connect(service)
val response = client(Request("123"))
assert(Await.result(response).contentString == "123")
client.close()
}
test(name + ": dtab") {
val service = new HttpService {
def apply(request: Request) = {
val stringer = new StringWriter
val printer = new PrintWriter(stringer)
Dtab.local.print(printer)
val response = Response(request)
response.contentString = stringer.toString
Future.value(response)
}
}
val client = connect(service)
Dtab.unwind {
Dtab.local ++= Dtab.read("/a=>/b; /c=>/d")
val res = Await.result(client(Request("/")))
assert(res.contentString == "Dtab(2)\n\t/a => /b\n\t/c => /d\n")
}
client.close()
}
test(name + ": (no) dtab") {
val service = new HttpService {
def apply(request: Request) = {
val stringer = new StringWriter
val response = Response(request)
response.contentString = "%d".format(Dtab.local.length)
Future.value(response)
}
}
val client = connect(service)
val res = Await.result(client(Request("/")))
assert(res.contentString == "0")
client.close()
}
test(name + ": stream") {
def service(r: Reader) = new HttpService {
def apply(request: Request) = {
val response = Response()
response.setChunked(true)
response.writer.write(buf("hello")) before
response.writer.write(buf("world")) before
response.close()
Future.value(response)
}
}
val writer = Reader.writable()
val client = connect(service(writer))
val response = Await.result(client(Request()))
assert(response.contentString == "helloworld")
client.close()
}
test(name + ": client abort") {
import com.twitter.conversions.time._
val timer = new JavaTimer
val promise = new Promise[Response]
val service = new HttpService {
def apply(request: Request) = promise
}
val client = connect(service)
client(Request())
Await.ready(timer.doLater(20.milliseconds) {
Await.ready(client.close())
intercept[CancelledRequestException] {
promise.isInterrupted match {
case Some(intr) => throw intr
case _ =>
}
}
})
}
}
def streaming(name: String)(connect: HttpService => HttpService) {
def service(r: Reader) = new HttpService {
def apply(request: Request) = {
val response = new Response {
final val httpResponse = request.response.httpResponse
override def reader = r
}
response.setChunked(true)
Future.value(response)
}
}
test(name + ": symmetric reader and getContent") {
val s = Service.mk[Request, Response] { req =>
val buf = Await.result(Reader.readAll(req.reader))
assert(buf == Buf.Utf8("hello"))
assert(req.contentString == "hello")
req.response.content = req.content
Future.value(req.response)
}
val req = Request()
req.contentString = "hello"
req.headerMap.put("Content-Length", "5")
val client = connect(s)
val res = Await.result(client(req))
val buf = Await.result(Reader.readAll(res.reader))
assert(buf == Buf.Utf8("hello"))
assert(res.contentString == "hello")
}
test(name + ": stream") {
val writer = Reader.writable()
val client = connect(service(writer))
val reader = Await.result(client(Request())).reader
Await.result(writer.write(buf("hello")))
assert(Await.result(readNBytes(5, reader)) == Buf.Utf8("hello"))
Await.result(writer.write(buf("world")))
assert(Await.result(readNBytes(5, reader)) == Buf.Utf8("world"))
client.close()
}
test(name + ": transport closure propagates to request stream reader") {
val p = new Promise[Buf]
val s = Service.mk[Request, Response] { req =>
p.become(Reader.readAll(req.reader))
Future.value(Response())
}
val client = connect(s)
val req = Request()
req.setChunked(true)
Await.result(client(req))
client.close()
intercept[ChannelClosedException] { Await.result(p) }
}
test(name + ": transport closure propagates to request stream producer") {
val s = Service.mk[Request, Response] { _ => Future.value(Response()) }
val client = connect(s)
val req = Request()
req.setChunked(true)
client(req)
client.close()
intercept[Reader.ReaderDiscarded] { Await.result(drip(req.writer), 5.seconds) }
}
test(name + ": request discard terminates remote stream producer") {
val s = Service.mk[Request, Response] { req =>
val res = Response()
res.setChunked(true)
def go = for {
Some(c) <- req.reader.read(Int.MaxValue)
_ <- res.writer.write(c)
_ <- res.close()
} yield ()
// discard the reader, which should terminate the drip.
go ensure req.reader.discard()
Future.value(res)
}
val client = connect(s)
val req = Request()
req.setChunked(true)
val resf = client(req)
Await.result(req.writer.write(buf("hello")))
val contentf = resf flatMap { res => Reader.readAll(res.reader) }
assert(Await.result(contentf) == Buf.Utf8("hello"))
// drip should terminate because the request is discarded.
intercept[Reader.ReaderDiscarded] { Await.result(drip(req.writer)) }
}
test(name + ": client discard terminates stream and frees up the connection") {
val s = new Service[Request, Response] {
var rep: Response = null
def apply(req: Request) = {
rep = Response()
rep.setChunked(true)
// Make sure the body is fully read.
// Then we hang forever.
val body = Reader.readAll(req.reader)
Future.value(rep)
}
}
val client = connect(s)
val rep = Await.result(client(Request()), 10.seconds)
assert(s.rep != null)
rep.reader.discard()
s.rep = null
// Now, make sure the connection doesn't clog up.
Await.result(client(Request()), 10.seconds)
assert(s.rep != null)
}
test(name + ": two fixed-length requests") {
val svc = Service.mk[Request, Response] { _ => Future.value(Response()) }
val client = connect(svc)
Await.result(client(Request()))
Await.result(client(Request()))
client.close()
}
}
def tracing(name: String)(connect: HttpService => HttpService) {
test(name + ": trace") {
var (outerTrace, outerSpan) = ("", "")
val inner = connect(new HttpService {
def apply(request: Request) = {
val response = Response(request)
response.contentString = Seq(
Trace.id.traceId.toString,
Trace.id.spanId.toString,
Trace.id.parentId.toString
).mkString(".")
Future.value(response)
}
})
val outer = connect(new HttpService {
def apply(request: Request) = {
outerTrace = Trace.id.traceId.toString
outerSpan = Trace.id.spanId.toString
inner(request)
}
})
val response = Await.result(outer(Request()))
val Seq(innerTrace, innerSpan, innerParent) =
response.contentString.split('.').toSeq
assert(innerTrace == outerTrace, "traceId")
assert(outerSpan == innerParent, "outer span vs inner parent")
assert(innerSpan != outerSpan, "inner (%s) vs outer (%s) spanId".format(innerSpan, outerSpan))
outer.close()
inner.close()
}
}
run("ClientBuilder")(standardErrors, standardBehaviour) {
service =>
val server = ServerBuilder()
.codec(Http().maxRequestSize(100.bytes))
.bindTo(new InetSocketAddress(InetAddress.getLoopbackAddress, 0))
.name("server")
.build(service)
val client = ClientBuilder()
.codec(Http())
.hosts(Seq(server.boundAddress))
.hostConnectionLimit(1)
.name("client")
.build()
new ServiceProxy(client) {
override def close(deadline: Time) =
Closable.all(client, server).close(deadline)
}
}
run("Client/Server")(standardErrors, standardBehaviour, tracing) {
service =>
import com.twitter.finagle
val server = finagle.Http.server.withMaxRequestSize(100.bytes).serve("localhost:*", service)
val addr = server.boundAddress.asInstanceOf[InetSocketAddress]
val client = finagle.Http.newService("%s:%d".format(addr.getHostName, addr.getPort))
new ServiceProxy(client) {
override def close(deadline: Time) =
Closable.all(client, server).close(deadline)
}
}
run("ClientBuilder (streaming)")(streaming) {
service =>
val server = ServerBuilder()
.codec(Http().streaming(true))
.bindTo(new InetSocketAddress(InetAddress.getLoopbackAddress, 0))
.name("server")
.build(service)
val client = ClientBuilder()
.codec(Http().streaming(true))
.hosts(Seq(server.boundAddress))
.hostConnectionLimit(1)
.name("client")
.build()
new ServiceProxy(client) {
override def close(deadline: Time) =
Closable.all(client, server).close(deadline)
}
}
run("ClientBuilder (tracing)")(tracing) {
service =>
val server = ServerBuilder()
.codec(Http().enableTracing(true))
.bindTo(new InetSocketAddress(InetAddress.getLoopbackAddress, 0))
.name("server")
.build(service)
val client = ClientBuilder()
.codec(Http().enableTracing(true))
.hosts(Seq(server.boundAddress))
.hostConnectionLimit(1)
.name("client")
.build()
new ServiceProxy(client) {
override def close(deadline: Time) =
Closable.all(client, server).close(deadline)
}
}
// use 1 less than the requeue limit so that we trigger failure accrual
// before we run into the requeue limit.
private val failureAccrualFailures = 19
def status(name: String)(connect: (HttpService, StatsReceiver, String) => (HttpService)): Unit = {
test(name + ": Status.busy propagates along the Stack") {
val st = new InMemoryStatsReceiver
val clientName = "http"
val failService = new HttpService {
def apply(req: Request): Future[Response] =
Future.exception(Failure.rejected("unhappy"))
}
val client = connect(failService, st, clientName)
intercept[Exception](Await.result(client(Request())))
assert(st.counters(Seq(clientName, "failure_accrual", "removals")) == 1)
assert(st.counters(Seq(clientName, "retries", "requeues")) == failureAccrualFailures - 1)
assert(st.counters(Seq(clientName, "failures", "restartable")) == failureAccrualFailures)
client.close()
}
}
status("ClientBuilder") {
(service, st, name) =>
val server = ServerBuilder()
.codec(Http())
.bindTo(new InetSocketAddress(InetAddress.getLoopbackAddress, 0))
.name("server")
.build(service)
val client = ClientBuilder()
.codec(Http())
.hosts(Seq(server.boundAddress))
.hostConnectionLimit(1)
.name(name)
.failureAccrualParams((failureAccrualFailures, 1.minute))
.reportTo(st)
.build()
new ServiceProxy(client) {
override def close(deadline: Time) =
Closable.all(client, server).close(deadline)
}
}
status("Client/Server") {
(service, st, name) =>
import com.twitter.finagle
val server = finagle.Http.serve(new InetSocketAddress(0), service)
val client = finagle.Http.client
.configured(Stats(st))
.configured(FailureAccrualFactory.Param(failureAccrualFailures, () => 1.minute))
.newService(Name.bound(server.boundAddress), name)
new ServiceProxy(client) {
override def close(deadline: Time) =
Closable.all(client, server).close(deadline)
}
}
test("codec should require a message size be less than 2Gb") {
intercept[IllegalArgumentException](Http().maxRequestSize(2.gigabytes))
intercept[IllegalArgumentException](Http(_maxResponseSize = 100.gigabytes))
intercept[IllegalArgumentException] {
com.twitter.finagle.Http.server.withMaxRequestSize(2049.megabytes)
}
intercept[IllegalArgumentException] {
com.twitter.finagle.Http.client.withMaxResponseSize(3000.megabytes)
}
}
}
|
a-manumohan/finagle
|
finagle-http/src/test/scala/com/twitter/finagle/http/EndToEndTest.scala
|
Scala
|
apache-2.0
| 18,107
|
package com.softwaremill.codebrag.dao.finders.views
import com.softwaremill.codebrag.domain.{FileDiffStats, DiffLine, CommitFileDiff}
case class CommitDetailsView(
commit: CommitView,
diff: List[FileDiffView],
supressedFiles: List[SupressedFileView],
reactions: ReactionsView,
lineReactions: Map[String, Map[String, ReactionsView]])
case class FileDiffView(filename: String, status: String, lines: List[DiffLineView], diffStats: FileDiffStatsView)
case class SupressedFileView(filename: String, status: String, diffStats: FileDiffStatsView)
case class FileDiffStatsView(added: Int, removed: Int)
case class DiffLineView(line: String, lineNumberOriginal: String, lineNumberChanged: String, lineType: String)
object DiffLineView {
val HeaderLine = ("...", "...")
def fromDiffLine(diffLine: DiffLine) = {
DiffLineView(diffLine.line, diffLine.lineNumberOriginal, diffLine.lineNumberChanged, diffLine.lineType)
}
def apply(line: String, lineNumberOriginal: Int, lineNumberChanged: Int, lineType: String) = {
def lineAdded(num: Int) = ("", num.toString)
def lineRemoved(num: Int) = (num.toString, "")
def lineNotChanged(orig: Int, changed: Int) = (orig.toString, changed.toString)
val lines = (lineNumberOriginal, lineNumberChanged) match {
case (-1, -1) => HeaderLine
case (-1, num) => lineAdded(num)
case (num, -1) => lineRemoved(num)
case (orig, changed) => lineNotChanged(orig, changed)
}
new DiffLineView(line, lines._1, lines._2, lineType)
}
}
object CommitDetailsView {
val MaxAcceptableDiffLinesCount = 600
def buildFrom(commit: CommitView, reactions: CommitReactionsView, diffs: List[CommitFileDiff]) = {
val (smallerDiffs, largerDiffs) = diffs.partition(_.lines.size < MaxAcceptableDiffLinesCount)
CommitDetailsView(
commit,
buildDiffView(smallerDiffs),
buildSupressedDiffView(largerDiffs),
reactions.entireCommitReactions,
reactions.inlineReactions)
}
private def buildDiffView(diffs: List[CommitFileDiff]) = {
diffs.map({fileDiff =>
val lineViews = fileDiff.lines.map(DiffLineView.fromDiffLine(_))
FileDiffView(fileDiff.filename, fileDiff.status, lineViews, FileDiffStatsView(fileDiff.diffStats))
})
}
private def buildSupressedDiffView(diffs: List[CommitFileDiff]) = {
diffs.map({fileDiff =>
SupressedFileView(fileDiff.filename, fileDiff.status, FileDiffStatsView(fileDiff.diffStats))
})
}
}
object FileDiffStatsView {
def apply(diffStats: FileDiffStats) = {
new FileDiffStatsView(diffStats.added, diffStats.removed)
}
}
|
softwaremill/codebrag
|
codebrag-dao/src/main/scala/com/softwaremill/codebrag/dao/finders/views/CommitDetailsView.scala
|
Scala
|
agpl-3.0
| 2,755
|
package im.actor.server.api.rpc.service.sequence
import scala.concurrent.duration._
import scala.concurrent.{ ExecutionContext, Future }
import scala.util.Success
import akka.actor.ActorSystem
import akka.stream.Materializer
import akka.util.Timeout
import slick.driver.PostgresDriver.api._
import im.actor.api.rpc._
import im.actor.api.rpc.misc.{ ResponseSeq, ResponseVoid }
import im.actor.api.rpc.peers.{ GroupOutPeer, UserOutPeer }
import im.actor.api.rpc.sequence.{ DifferenceUpdate, ResponseGetDifference, SequenceService }
import im.actor.server.db.DbExtension
import im.actor.server.group.{ GroupViewRegion, GroupExtension, GroupOffice }
import im.actor.server.models
import im.actor.server.push.{ SeqUpdatesExtension, SeqUpdatesManager }
import im.actor.server.session._
import im.actor.server.user.{ UserViewRegion, UserExtension, UserOffice }
final class SequenceServiceImpl(config: SequenceServiceConfig)(
implicit
sessionRegion: SessionRegion,
actorSystem: ActorSystem,
materializer: Materializer
) extends SequenceService {
import SeqUpdatesManager._
protected override implicit val ec: ExecutionContext = actorSystem.dispatcher
private implicit val timeout: Timeout = Timeout(30.seconds)
private implicit val db: Database = DbExtension(actorSystem).db
private implicit val seqUpdExt: SeqUpdatesExtension = SeqUpdatesExtension(actorSystem)
private implicit val userViewRegion: UserViewRegion = UserExtension(actorSystem).viewRegion
private implicit val groupViewRegion: GroupViewRegion = GroupExtension(actorSystem).viewRegion
private val maxDifferenceSize: Long = config.maxDifferenceSize
override def jhandleGetState(clientData: ClientData): Future[HandlerResult[ResponseSeq]] = {
val authorizedAction = requireAuth(clientData).map { implicit client ⇒
for {
seqstate ← getSeqState(client.authId)
} yield Ok(ResponseSeq(seqstate.seq, seqstate.state.toByteArray))
}
db.run(toDBIOAction(authorizedAction))
}
override def jhandleGetDifference(seq: Int, state: Array[Byte], clientData: ClientData): Future[HandlerResult[ResponseGetDifference]] = {
val authorizedAction = requireAuth(clientData).map { implicit client ⇒
for {
// FIXME: would new updates between getSeqState and getDifference break client state?
(updates, needMore) ← getDifference(client.authId, bytesToTimestamp(state), maxDifferenceSize)
(diffUpdates, userIds, groupIds) = extractDiff(updates)
(users, groups) ← getUsersGroups(userIds, groupIds)
} yield {
val (newSeq, newState) = updates.lastOption map { u ⇒ u.seq → timestampToBytes(u.timestamp) } getOrElse (seq → state)
Ok(ResponseGetDifference(
seq = newSeq,
state = newState,
updates = diffUpdates,
needMore = needMore,
users = users.toVector,
groups = groups.toVector
))
}
}
db.run(toDBIOAction(authorizedAction))
}
override def jhandleSubscribeToOnline(users: Vector[UserOutPeer], clientData: ClientData): Future[HandlerResult[ResponseVoid]] = {
val authorizedAction = requireAuth(clientData).map { client ⇒
DBIO.successful(Ok(ResponseVoid))
}
db.run(toDBIOAction(authorizedAction)) andThen {
case Success(_) ⇒
// FIXME: #security check access hashes
val userIds = users.map(_.userId).toSet
sessionRegion.ref ! SessionEnvelope(clientData.authId, clientData.sessionId)
.withSubscribeToOnline((SubscribeToOnline(userIds.toSeq)))
}
}
override def jhandleSubscribeFromOnline(users: Vector[UserOutPeer], clientData: ClientData): Future[HandlerResult[ResponseVoid]] = {
val authorizedAction = requireAuth(clientData).map { client ⇒
DBIO.successful(Ok(ResponseVoid))
}
db.run(toDBIOAction(authorizedAction)) andThen {
case Success(_) ⇒
// FIXME: #security check access hashes
val userIds = users.map(_.userId).toSet
sessionRegion.ref ! SessionEnvelope(clientData.authId, clientData.sessionId)
.withSubscribeFromOnline(SubscribeFromOnline(userIds.toSeq))
}
}
override def jhandleSubscribeToGroupOnline(groups: Vector[GroupOutPeer], clientData: ClientData): Future[HandlerResult[ResponseVoid]] = {
Future.successful(Ok(ResponseVoid)) andThen {
case _ ⇒
// FIXME: #security check access hashes
sessionRegion.ref ! SessionEnvelope(clientData.authId, clientData.sessionId)
.withSubscribeToGroupOnline(SubscribeToGroupOnline(groups.map(_.groupId)))
}
}
override def jhandleSubscribeFromGroupOnline(groups: Vector[GroupOutPeer], clientData: ClientData): Future[HandlerResult[ResponseVoid]] = {
Future.successful(Ok(ResponseVoid)) andThen {
case _ ⇒
// FIXME: #security check access hashes
sessionRegion.ref ! SessionEnvelope(clientData.authId, clientData.sessionId)
.withSubscribeFromGroupOnline(SubscribeFromGroupOnline(groups.map(_.groupId)))
}
}
private def extractDiff(updates: Vector[models.sequence.SeqUpdate]): (Vector[DifferenceUpdate], Set[Int], Set[Int]) = {
updates.foldLeft[(Vector[DifferenceUpdate], Set[Int], Set[Int])](Vector.empty, Set.empty, Set.empty) {
case ((updates, userIds, groupIds), update) ⇒
(updates :+ DifferenceUpdate(update.header, update.serializedData),
userIds ++ update.userIds,
groupIds ++ update.groupIds)
}
}
private def getUsersGroups(userIds: Set[Int], groupIds: Set[Int])(implicit client: AuthorizedClientData) = {
DBIO.from(for {
groups ← Future.sequence(groupIds map (GroupOffice.getApiStruct(_, client.userId)))
// TODO: #perf optimize collection operations
allUserIds = userIds ++ groups.foldLeft(Set.empty[Int]) { (ids, g) ⇒ ids ++ g.members.map(m ⇒ Seq(m.userId, m.inviterUserId)).flatten + g.creatorUserId }
users ← Future.sequence(allUserIds map (UserOffice.getApiStruct(_, client.userId, client.authId)))
} yield (users, groups))
}
}
|
x303597316/actor-platform
|
actor-server/actor-rpc-api/src/main/scala/im/actor/server/api/rpc/service/sequence/SequenceServiceImpl.scala
|
Scala
|
mit
| 6,083
|
import java.io.{FileOutputStream, File}
import java.nio.charset.Charset
import sbt.{Info, Pure, Task}
import scala.xml.{NodeSeq, Elem}
import scala.collection.immutable.{Map ⇒ ▶:}
object Documentation {
def generate(version: String): Task[File] =
Task[File](Info[File](), Pure[File](() ⇒ generateDoc(version), inline = false))
private def generateDoc(version: String): File = {
println("Generating documentation")
val file: File = new File("docs/index.html")
val fos = new FileOutputStream(file, false)
fos.write(template(version, Map("A" → List(
Partial("calc", "(A ⇒ B) ⇒ B", "Applies a function",
"12.calc(_ + 3)" → "15", " 2.calc(_ * 3)" → "6"
),
Partial("|>", "(A ⇒ B) ⇒ B", "Applies a function",
"12 |> (_ + 3)" → "15", " 2 |> (_ * 3)" → "6"
),
Partial("calcIf", "Predicate[A] ⇒ (A ⇒ B) ⇒ Option[B]", "Applies a function if a predicate holds",
"1.calcIf(_ > 1)(_ * 10)" → "None", "2.calcIf(_ > 1)(_ * 10)" → "Some(20)"
),
Partial("calcUnless", "Predicate[A] ⇒ (A ⇒ B) ⇒ Option[B]", "Applies a function if a predicate does not hold",
"1.calcUnless(_ > 1)(_ * 10)" → "Some(10)", "2.calcUnless(_ > 1)(_ * 10)" → "None"
),
Partial("calcPF", "PartialFunction[A, B] ⇒ Option[B]", "Applies a partial function",
"1 calcPF { case 2 ⇒ 20 }" → "None", "2 calcPF { case 2 ⇒ 20 }" → "Some(20)"
),
Partial("transform", "PartialFunction[A, A] ⇒ A", "Applies a partial function",
"1 transform { case 2 ⇒ 20 }" → "1", "2 transform { case 2 ⇒ 20 }" → "20"
),
Partial("tapIf", "Predicate[A] ⇒ (A ⇒ Discarded)*) ⇒ A", "Performs some actions if a predicate holds",
"1.tapIf(_ > 1)(print)" → "Does nothing", "2.tapIf(_ > 1)(print)" → "prints \"2\""
),
Partial("tapUnless", "Predicate[A] ⇒ (A ⇒ Discarded)*) ⇒ A", "Performs some actions if a predicate does not hold",
"1.tapUnless(_ > 1)(print)" → "prints \"1\"", "2.tapUnless(_ > 1)(print)" → "Does nothing"
),
Partial("tapPF", "PartialFunction[A, Discarded] ⇒ A", "Performs an action",
"1 tapPF { case 2 ⇒ println(\"two !\") }" → "Does nothing",
"2 tapPF { case 2 ⇒ println(\"two !\") }" → "Prints \"two !\""
),
Partial("attempt", "(A ⇒ B) ⇒ Try[B]", "Applies a function that can fail",
"1.attempt(_ ⇒ sys.error(\"boom !\"))" → "Failure(Exception(\"boom !\"))", "1.attempt(_ * 2)" → "Success(2)"
),
Partial("partialMatch", "PartialFunction[A, B] ⇒ Option[B]", "Applies a partial function",
"1 partialMatch { case 2 ⇒ \"two\" }" → "None", "2 partialMatch { case 2 ⇒ \"two\" }" → "Some(\"two\")"
),
Partial("lpair", "(A ⇒ B) ⇒ (B, A)", "Applies a function & retain original value",
"1.lpair(_ * 2)" → "(2, 1)"
),
Partial("rpair", "(A ⇒ B) ⇒ (A, B)", "Retain original value & applies a function",
"1.rpair(_ * 2)" → "(1, 2)"
),
Partial("filterSelf", "Predicate[A] ⇒ Option[A]", "Retain original value if a predicate holds",
"1.filterSelf(_ > 1)" → "None", "2.filterSelf(_ > 1)" → "Some(2)"
),
Partial("ifSelf", "Predicate[A] ⇒ Option[A]", "Retain original value if a predicate holds",
"1.ifSelf(_ > 1)" → "None", "2.ifSelf(_ > 1)" → "Some(2)"
),
Partial("filterNotSelf", "Predicate[A] ⇒ Option[A]", "Retain original value if a predicate does not hold",
"1.filterNotSelf(_ > 1)" → "Some(1)", "2.filterNotSelf(_ > 1)" → "None"
),
Partial("unlessSelf", "Predicate[A] ⇒ Option[A]", "Retain original value if a predicate does not hold",
"1.unlessSelf(_ > 1)" → "Some(1)", "2.unlessSelf(_ > 1)" → "None"
),
Partial("isOneOf", "A* ⇒ Boolean", "Test if value is contained in a sequence",
"1.isOneOf(1, 2, 3)" → "true", "4.isOneOf(1, 2, 3)" → "false"
),
Partial("isNotOneOf", "A* ⇒ Boolean", "Test if value is missing from a sequence",
"1.isNotOneOf(1, 2, 3)" → "false", "4.isNotOneOf(1, 2, 3)" → "true"
),
Partial("containedIn", "Set[A] ⇒ Boolean", "Test if value is contained in a set",
"1.containedIn(Set(1, 2, 3()" → "true", "4.containedIn(Set(1, 2, 3))" → "false"
),
Partial("notContainedIn", "Set[A] ⇒ Boolean", "Test if value is missing from a set",
"1.notContainedIn(Set(1, 2, 3))" → "false", "4.notContainedIn(Set(1, 2, 3))" → "true"
),
Partial("withFinally", "(A ⇒ Unit) ⇒ (A ⇒ B) ⇒ B", "Applies a function and perform a cleanup action",
"1.withFinally(println)(_ * 2)" → "2 (and prints 1)"
),
Partial("tryFinally", "(A ⇒ B) ⇒ (A ⇒ Unit) ⇒ B", "Applies a function and perform a cleanup action",
"1.tryFinally(_ * 2)(println)" → "2 (and prints 1)"
),
Partial("cond", "Predicate[A] ⇒ ((A ⇒ B), (A ⇒ B)) ⇒ B", "Applies a choice of functions depending on whether a predicate holds",
"1.cond(_ > 1)(\"true: \" + _)(\"false: \" + _)" → "false: 1",
"2.cond(_ > 1)(\"true: \" + _)(\"false: \" + _)" → "true: 2"
),
Partial("addTo", "Growable[A] ⇒ A", "Adds value into a growable",
"1.addTo(ints)" → "ints += 1"
),
Partial("removeFrom", "Shrinkable[A] ⇒ A", "Removed value from a shrinkable",
"1.removeFrom(ints)" → "ints -= 1"
),
Partial("unfold", "(A ⇒ Option[(B, A)]) ⇒ Stream[B]", "Builds a stream by repeatedly applying a function",
"64.unfold(i ⇒ if (i > 1) Some((i, i/2)) else None)" → "Stream(64, 32, 16, 8, 4, 2)"
),
Partial("update", "(A ⇒ Discarded)* ⇒ A", "Performs some actions",
"1.tap(println)" → "Prints 1"
),
Partial("withSideEffect", "(A ⇒ Discarded)* ⇒ A", "Performs some actions",
"1.withSideEffect(println)" → "Prints 1"
),
Partial("tap", "(A ⇒ Discarded)* ⇒ A", "Performs some actions",
"1.update(println)" → "Prints 1"
),
Partial("bounded", "(A, A) ⇒ (implicit Numeric[A]) ⇒ A", "Constrain value to be between two others",
"0.bounded(1, 3)" → "1", "2.bounded(1, 3)" → "2", "5.bounded(1, 3)" → "3"
),
Partial("ensure", "( ⇒ E) ⇒ Predicate[A] ⇒ Validation[E, A]", "Validates predicate",
"1.ensure(\"<= 1\")(_ > 1)" → "Failure(\"<= 1\")", "2.ensure(\"<= 1\")(_ > 1)" → "Success(2)"
),
Partial("ensureNel", "( ⇒ E) ⇒ Predicate[A] ⇒ ValidationNel[E, A]", "",
"1.ensureNel(\"<= 1\")(_ > 1)" → "Failure(NonEmptyList(\"<= 1\"))", "2.ensureNel(\"<= 1\")(_ > 1)" → "Success(2)"
)
// passes, fails
), "(A, B)" → List(
Partial("addTo", "(Growable[A], Growable[B]) ⇒ (A, B)", "Adds values into growables",
"(1, \"foo\").addTo(ints, strings)" → "ints += 1; strings += \"foo\""
),
Partial("removeFrom", "(Shrinkable[A], Shrinkable[B]) ⇒ (A, B)", "Removes values from a shrinkables",
"(1, \"foo\").removeFrom(ints, strings)" → "ints -= 1; strings -= \"foo\""
),
Partial("tap", "(A ⇒ B ⇒ Discarded) ⇒ (A, B)", "Performs an action",
"(1, \"foo\").tap(i ⇒ s ⇒ println(s\"int: $i, string: $s\")" → "Prints \"int: 1, string: foo\""
),
Partial("calc", "((A, B) ⇒ C) ⇒ C", "Applies a function",
"(1, \"foo\").calc { case (i, s) ⇒ s\"int: $i, string: $s\" }" → "\"int: 1, string: foo\""
),
Partial("calcC", "(A ⇒ B ⇒ C) ⇒ C", "Applies a curried function",
"(1, \"foo\").calc(i ⇒ s ⇒ s\"int: $i, string: $s\")" → "\"int: 1, string: foo\""
),
Partial("to", "(implicit A ⇒ C, B ⇒ C) ⇒ (C, C)", "Converts all elements to a common type",
"(123, 456.0).to[String]" → "(\"123\", \"456.0\")"
),
Partial("tmap", "(A ⇒ C, B ⇒ D) ⇒ (C, D)", "Applies functions to each element",
"(1, \"foo\").tmap(_ * 2, _.reverse)" → "(2, \"oof\")"
)
), "Boolean" → List(
Partial("asInt", "Int", "Converts to int",
"false.asInt" → "0", "true.asInt" → "1"
),
Partial("either.or", "R ⇒ L ⇒ Either[L, R]", "Construct a Right if true, Left if false",
"false.either(1).or(\"foo\")" → "Left(\"foo\")", "true.either(1).or(\"foo\")" → "Right(1)"
),
Partial("option", "A ⇒ Option[A]", "Construct a Some if true, None if false",
"false.option(1)" → "None", "true.option(1)" → "Some(1)"
),
Partial("cond", "(⇒ A, ⇒ A) ⇒ A", "Returns first value if true, second if false",
"false.cond(123, 456)" → "456", "true.cond(123, 456)" → "123"
),
Partial("implies", "Boolean ⇒ Boolean", "Logical implication",
"false implies false" → "true", "false implies true" → "true",
"true implies false" → "false", "true implies true" → "true"
),
Partial("nor", "Boolean ⇒ Boolean", "Logical NOR",
"false nor false" → "true", "false nor true" → "false",
"true nor false" → "false", "true nor true" → "false"
),
Partial("nand", "Boolean ⇒ Boolean", "Logical NAND",
"false nand false" → "true", "false nand true" → "true",
"true nand false" → "true", "true nand true" → "false"
)
), "Option[A]" → List(
Partial("getOrThrow", "String ⇒ A", "Return value or throw exception with message",
"None.getOrThrow(\"No potatoes!\")" → "throws NoSuchElementException(\"No potatoes\")",
"Some(\"Potatoes\").getOrThrow(\"No potatoes\")" → "\"Potatoes\""
),
Partial("getOrThrow", "(⇒ Exception) ⇒ A", "Return value or throw exception",
"None.getOrThrow(new PotatoException(\"No potatoes!\"))" → "throws PotatoException(\"No potatoes\")",
"Some(\"Potatoes\").getOrThrow(new PotatoException(\"No potatoes\"))" → "\"Potatoes\""
),
Partial("invert", "A ⇒ Option[A]", "Convert Some to None and vice versa",
"None.invert(456)" → "Some(456)", "Some(123).invert(456)" → "None"
),
Partial("toTry", "Try[A]", "Convert Some to Success and None to Failure",
"None.toTry" → "Failure(new NoSuchElementException)",
"Some(123).toTry" → "Success(123)"
),
Partial("tap", "(⇒ Discarded, A ⇒ Discarded) ⇒ Option[A]", "Perform one action or another",
"None.tap(println(\"none\"), i ⇒ println(s\"some: $i\"))" → "prints \"none\"",
"Some(123).tap(println(\"none\"), i ⇒ println(s\"some: $i\"))" → "prints \"some: 123\""
),
Partial("tapNone", "(⇒ Discarded) ⇒ Option[A]", "Perform action if None",
"None.tapNone(println(\"none\"))" → "prints \"none\"",
"Some(123).tapNone(println(\"none\"))" → "Does nothing"
),
Partial("tapSome", "(A ⇒ Discarded) ⇒ Option[A]", "Perform action if Some",
"None.tapSome(i ⇒ println(s\"some: $i\"))" → "Does nothing",
"Some(123).tapSome(i ⇒ println(s\"some: $i\"))" → "prints \"some: 123\""
),
Partial("amass", "PartialFunction[A, Option[B]] ⇒ Option[B]", "Applies partial function if Some",
" None amass { case 123 ⇒ Some(456) }" → "None",
"Some(321) amass { case 123 ⇒ Some(456) }" → "None",
"Some(123) amass { case 123 ⇒ Some(456) }" → "Some(456)"
),
Partial("toSuccessNel", "(⇒ E) ⇒ ValidationNel[E, A]", "Converts Some to Success & None to FailureNel",
"Some(1).toSuccessNel(\"fail\")" → "Success(1)",
"None.toSuccessNel(\"fail\")" → "Failure(NonEmptyList(\"fail\"))"
)
), "Option[E]" → List(
Partial("toFailureNel", "(⇒ A) ⇒ ValidationNel[E, A]", "Converts Some to FailureNel & None to Success",
"Some(1).toFailureNel(\"succeed\")" → "Failure(NonEmptyList(1))",
"None.toFailureNel(\"succeed\")" → "Success(\"succeed\")"
)
), "Either[L, R]" → List(
Partial("tap", "(L ⇒ Discarded, R ⇒ Discarded) ⇒ Either[L, R]", "Perform one action or another",
" Left(1).tap(l ⇒ print(\"left: \" + l), r ⇒ print(\"right: \" + r))" → "Prints \"left: 1\"",
"Right(true).tap(l ⇒ print(\"left: \" + l), r ⇒ print(\"right: \" + r))" → "Prints \"right: true\""
),
Partial("tapLeft", "(L ⇒ Discarded) ⇒ Either[L, R]", "Perform action if Left",
" Left(1).tapLeft(l ⇒ print(\"left: \" + l))" → "Prints \"left: 1\"",
"Right(true).tapLeft(l ⇒ print(\"left: \" + l))" → "Does nothing"
),
Partial("tapRight", "(R ⇒ Discarded) ⇒ Either[L, R]", "Perform action if Right",
" Left(1).tap(r ⇒ print(\"right: \" + r))" → "Does nothing",
"Right(true).tap(r ⇒ print(\"right: \" + r))" → "Prints \"right: true\""
),
Partial("addTo", "(Growable[L], Growable[R]) ⇒ Either[L, R]", "Adds values into growables",
" Left(1).addTo(ints, strings)" → "ints += 1",
"Right(\"foo\").addTo(ints, strings)" → "strings += \"foo\""
),
Partial("removeFrom", "(Shrinkable[L], Shrinkable[R]) ⇒ Either[L, R]", "Removes values from a shrinkables",
" Left(1).removeFrom(ints, strings)" → "ints -= 1",
"Right(\"foo\").removeFrom(ints, strings)" → "strings -= \"foo\""
)
), "Try[A]" → List(
Partial("fold", "(Throwable ⇒ B, A ⇒ B) ⇒ B", "Convert to B",
"Failure(Throwable(\"boom\")).fold(_.getMessage, \"message: \" + _)" → "\"boom\"",
"Success(123).fold(_.getMessage, \"message: \" + _)" → "\"message: 123\""
),
Partial("getMessage", "Option[String]", "None if Success, Some if Failure",
"Failure(Throwable(\"boom\")).getMessage" → "Some(\"boom\")", "Success(123).getMessage" → "None"
),
Partial("toEither", "Either[Throwable, A]", "Convert to Either",
"Failure(Throwable(\"boom\")).toEither" → "Left(Throwable(\"boom\"))",
"Success(123).toEither" → "Right(123)"
),
Partial("toDisjunction", "Throwable \\/ A", "Convert to Disjunction",
"Failure(Throwable(\"boom\")).toDisjunction" → "-\\/(Throwable(\"boom\"))",
"Success(123).toDisjunction" → "\\/-(123)"
)
), "FilterMonadic[(K, V)]" → List(
Partial("toMultiMap", "MultiMap[List, K, V]", "Convert to MultiMap",
"List((1, 11), (2, 22), (1, 111)).toMultiMap[Set]" → "Map(1 → Set(11, 111), 2 → Set(22))"
)
), "List[A]" → List(
Partial("batchBy", "(A ⇒ B) → List[List[A]]", "Split into batches whenever result of function changes",
"List(1, 2, -1, -2, 3, -3, -4).batchBy(_ >= 0)" → "List(List(1, 2), List(-1, -2), List(3), List(-3, -4))"
),
Partial("countBy", "MultiMap[List, Int, A]", "Group by number of occurrences of function result",
"List(1, 2, -1, -2, 3, -3, -4).countBy(_ >= 0)" → "Map(3 → List(1, 2, 3), 4 → List(-1, -2, -3, -4))"
),
Partial("distinctBy", "(A ⇒ B) ⇒ List[A]", "Retain only the first occurrence of an element whose function result is repeated",
"List(\"foo\", \"food\", \"oof\", \"foods\").distinctBy(_.length))" → "List(\"food\", \"foods\")"
),
Partial("duplicatesBy", "(A ⇒ B) ⇒ List[A]", "Retain only those elements whose function result is repeated",
"List(\"foo\", \"food\", \"oof\", \"foods\", \"doof\").duplicatesBy(_.length))" → "List(\"foo\", \"food\", \"oof\", \"doof\")"
),
Partial("duplicates", "List[A]", "Retain only those elements which are repeated",
"List(1, 2, 3, 1, 4, 3).duplicates" → "LIst(1, 3)"
),
Partial("const", "B ⇒ List[A]", "Replace all elements with a constant value",
"List(1, 2, 3).const(\"foo\")" → "List(\"foo\", \"foo\", \"foo\")"
),
Partial("lpair", "A => B => List[(B, A)]", "Pair up each element with the result of f",
"List(1, 2, 3).lpair(_ * 2)" → "List((2,1), (4,2), (6,3)))"
),
Partial("rpair", "A => B => List[(A, B)]", "Pair up each element with the result of f",
"List(1, 2, 3).lpair(_ * 2)" → "List((1,2), (2,4), (3,6)))"
),
Partial("countWithSize", "Predicate[A] ⇒ Option[(Int, Int)]", "Count number of times a predicate passes, along with the list size",
"Nil.countWithSize(_ >= 0)" → "None",
"List(1, 2, -2, 3, -4, 5).countWithSize(_ >= 0)" → "Some((4, 6))"
),
Partial("sizeGT", "Int ⇒ Boolean", "Determine if the list size exceeds some value",
"List(1, 2, 3).sizeGT(2)" → "true", "List(1, 2, 3).sizeGT(3)" → "false"
),
Partial("fraction", "Predicate[A] ⇒ Double", "Determine what fraction of the list passes some predicate",
"Nil.fraction(_ >= 0)" → "Double.NaN",
"List(1, 2, -2, 3, -4, 5).fracton(_ >= 0)" → "Some(0.667)"
),
Partial("emptyTo", "List[A] ⇒ List[A]", "Replace empty list with supplied value",
"List(1, 2).emptyTo(List(3, 4))" → "List(1, 2)",
"Nil.emptyTo(List(3, 4))" → "List(3, 4)"
),
Partial("headTail", "(A, List[A])", "The head & tail if the list is non empty, throws otherwise",
"List(1, 2, 3).headTail" → "(1, List(2, 3))",
"Nil.headTail" → "throws \"headTail of empty list\""
),
Partial("headTailOption", "Option[(A, List[A])]", "The head & tail if the list is non empty, None otherwise",
"List(1, 2, 3).headTailOPtion" → "Some((1, List(2, 3)))", "Nil.headTailOption" → "None"
),
Partial("initOption", "Option[List[A]]", "The init if the list is non empty, None otherwise",
"List(1, 2, 3).initOption" → "Some(List(1, 2))", "Nil.initOption" → "None"
),
Partial("tailOption", "Option[List[A]]", "The tail if the list is non empty, None otherwise",
"List(1, 2, 3).tailOption" → "Some(List(2, 3))", "Nil.tailOption" → "None"
),
Partial("initLast", "(List[A], A)", "The init & last if the list is non empty, throws otherwise",
"List(1, 2, 3).initLast" → "(List(1, 2), 3)",
"Nil.initLast" → "throws \"initLast of empty list\""
),
Partial("initLastOption", "Option[(List[A], A)]", "The init & last if the list is non empty, None otherwise",
"List(1, 2, 3).initLastOption" → "Some((List(1, 2), 3))",
"Nil.initLastOption" → "None"
),
Partial("prefixPadTo", "(Int, A) ⇒ List[A]",
"Prefix lists smaller than provide size with repetitions of the provided element",
"List(1, 2).prefixPadTo(1, 99)" → "List(1, 2)",
"List(1, 2).prefixPadTo(4, 99)" → "List(99, 99, 1, 2)"
),
Partial("sharedPrefix", "List[A] ⇒ (implicit A ⇒ A ⇒ Boolean) ⇒ (List[A], List[A], List[A])",
"Split list into parts shared with the beginning of another, along with the remainder of each",
"List(1, 2, 3, 4).sharedPrefix(List(1, 2, 4, 3))" → "(List(1, 2), List(3, 4), List(4, 3))"
),
Partial("amass", "PartialFunction[A, List[B]] ⇒ List[B]", "filter andThen flatMap",
"List(1, 2, 3, 4).amass { case i if i % 2 == 0 ⇒ List(i, -i) }" → "List(2, -2, 4, -4)"
),
Partial("calcIfNonEmpty", "(List[A] ⇒ B) ⇒ Option[B]", "Calculate result if non empty, None otherwise",
"Nil.calcIfNonEmpty(_.length)" → "None", "List(1, 2, 3).calcifNonEmpty(_.length)" → "Some(3)"
),
Partial("mapIfNonEmpty", "(A ⇒ B) ⇒ Option[List[B]]", "Map if non empty, None otherwise",
"Nil.mapIfNonEmpty(_ * 10)" → "None", "List(1, 2, 3).mapIfNonEmpty(_ * 10)" → "Some(List(10, 20, 30))"
),
Partial("onlyDisjunction", "List[A] \\/ A", "\\/- if list has 1 element, -\\/ otherwise",
"Nil.onlyDisjunction" → "-\\/(Nil)", "List(1).onlyDisjunction" → "\\/-(1)", "List(1, 2).onlyDisjunction" → "-\\/(List(1, 2))"
),
Partial("uncons", "(⇒ B, List[A] ⇒ B) ⇒ B", "Convert to B depending on whether the list is empty or not",
" Nil.uncons(\"[]\", l ⇒ s\"[${l.length} elements]\"" → "\"[]\"",
"List(1, 2, 3).uncons(\"[]\", l ⇒ s\"[${l.length} elements]\"" → "\"[3 elements]\""
),
Partial("unconsC", "(⇒ B, A ⇒ List[A] ⇒ B) ⇒ B", "Convert to B with head & tail if non empty, default otherwise",
" Nil.unconsC(\"[]\", h ⇒ t ⇒ s\"[$h .. ${t.length} more]\"" → "\"[]\"",
"List(1, 2, 3).unconsC(\"[]\", h ⇒ t ⇒ s\"[$h .. ${t.length} more]\"" → "\"[1, 2 more]\""
),
Partial("unsnocC", "(⇒ B, List[A] ⇒ A ⇒ B) ⇒ B", "Convert to B with init & last if non empty, default otherwise",
" Nil.unsnocC(\"[]\", i ⇒ l ⇒ s\"[${i.length} previous, $l]\"" → "\"[]\"",
"List(1, 2, 3).unsnocC(\"[]\", i ⇒ l ⇒ s\"[${i.length} previous, $l]\"" → "\"[2 previous, 3]\""
),
Partial("tapNonEmpty", "List[A] ⇒ Discarded", "Perform an action if the list is non empty",
" Nil.tapNonEmpty(l ⇒ print(\"non-empty: \" + l))" → "Does nothing",
"List(1).tapNonEmpty(l ⇒ print(\"non-empty: \" + l))" → "Prints \"non-empty: List(1)\""
),
Partial("tapEmpty", "(⇒ Discarded) ⇒ List[A]", "Perform an action if the list is empty",
" Nil.tapEmpty(print(\"empty\"))" → "Prints \"empty\"",
"List(1).tapEmpty(print(\"empty\"))" → "Does nothing"
),
Partial("tap", "(⇒ Discarded, List[A] ⇒ Discarded) ⇒ List[A]",
"Perform an action depending on whether the list is empty or not",
" Nil.tap(print(\"empty\"), l ⇒ print(\"non-empty: \" + l))" → "Prints \"empty\"",
"List(1).tap(print(\"empty\"), l ⇒ print(\"non-empty: \" + l))" → "Prints \"non-empty: List(1)\""
),
Partial("zipWith", "List[B] ⇒ ((A, B) ⇒ C) ⇒ List[C]", "Combine with another list element-wise",
"List(1, 2).zipWith(List('z', 'x')) { case (i, c) ⇒ s\"i: $i, c: $c\"}" → "List(\"i: 1, c: z\", \"i: 2, c: x\")",
" List(1).zipWith(List('z', 'x')) { case (i, c) ⇒ s\"i: $i, c: $c\"}" → "List(\"i: 1, c: z\")",
"List(1, 2).zipWith(List('z')) { case (i, c) ⇒ s\"i: $i, c: $c\"}" → "List(\"i: 1, c: z\")"
),
Partial("zipToMap", "List[V] ⇒ Map[A, V]", "Combine with another list element-wise to form a Map",
"List(1, 2).zipToMap(List('z', 'x'))" → "Map(1 → 'z', 2 → 'x')",
" List(1).zipToMap(List('z', 'x'))" → "Map(1 → 'z')",
"List(1, 2).zipToMap(List('z')) " → "Map(1 → 'z')"
),
Partial("zipExact", "List[B] ⇒ (List[(A, B)], Option[Either[List[A], List[B]]])",
"Losslessly combine with another list element-wise",
"List(1, 2).zipExact(List('z', 'x'))" → "(List((1, 'z'), (2, 'x')), None)",
" List(1).zipExact(List('z', 'x'))" → "(List((1, 'z')), Some(Right('x')))",
"List(1, 2).zipExact(List('z')) " → "(List((1, 'z')), Some(Left(1)))"
),
Partial("zipExactWith", "List[B] ⇒ ((A, B) ⇒ C) ⇒ (List[C], Option[Either[List[A], List[B]]])",
"Losslessly combine with another list element-wise",
"List(1, 2).zipExactWith(List(10.0, 20.0))(_ + _)" → "(List(11.0, 22.0), None)",
" List(1).zipExactWith(List(10.0, 20.0))(_ + _)" → "(List(11.0), Some(Right(20.0))",
"List(1, 2).zipExactWith(List(10.0 ))(_ + _)" → "(List(11.0), Some(Left(2)))"
),
Partial("sortPromoting", "A* ⇒ (implicit Ordering[A]) ⇒ List[A]",
"When sorting promote specified elements, otherwise use existing ordering",
"List(\"red\", \"green\", \"blue\", \"cyan\").sortPromoting(\"cyan\", \"green\")" → "List(\"cyan\", \"green\", \"blue\", \"red\")"
),
Partial("sortDemoting", "A* ⇒ (implicit Ordering[A]) ⇒ List[A]",
"When sorting demote specified elements, otherwise use existing ordering",
"List(\"blue\", \"red\", \"cyan\", \"green\").sortDemoting(\"green\", \"blue\")" → "List(\"cyan\", \"red\", \"green\", \"blue\")"
),
Partial("toNel", "Option[NonEmptyList[A]]", "Convert to non-empty list, if possible",
"Nil.toNel" → "None", "List(1, 2).toNel" → "Some(NonEmptyList(1, 2))"
)
), "List[(A, B)" → List(
Partial("mapC", "A => B => C => List[C]", "Curried map method",
"List((1, 2), (2, 3)).mapC(a ⇒ b ⇒ a * b)" → "List(2, 6)"
)
), "List[List[A]]" → List(
Partial("cartesianProduct", "List[List[A]]", "Every permutation of elements choosen from each list",
"List(List(1,2,3), List(8,9)).cartesianProduct" → "List(List(1,8), List(1,9), List(2,8), List(2,9), List(3,8), List(3,9))"
)
), "Set[A]" → List(
Partial("notContains", "A ⇒ Boolean", "Tests if some element is absent in this set",
"Set(1, 2).notContains(1)" → "false", "Set(1, 2).notContains(3)" → "true"
),
Partial("mutable", "mutable.Set[A]", "Convert to mutable set"),
Partial("toMutable", "mutable.Set[A]", "Convert to mutable set"),
Partial("powerSet", "Set[Set[A]]", "All possble subsets of the set",
"Set(1, 2, 3).powerSet" → "Set(1, 2, 3), Set(1, 2), Set(1, 3), Set(2, 3), Set(1), Set(2), Set(3), Set())"
)
), "Stream[A]" → List(
Partial("tailOption", "Option[Stream[A]]", "The tail of the stream if it is not empty, None otherwise",
"Stream.empty[Int].tailOption" → "None"
),
Partial("uncons", "(⇒ B, Stream[A] ⇒ B) ⇒ B", "Convert to B depending on whether the stream is empty or not",
" Stream.empty.uncons(\"[]\", s ⇒ s\"[${s.length} elements]\"" → "\"[]\"",
"Stream(1, 2, 3).uncons(\"[]\", s ⇒ s\"[${s.length} elements]\"" → "\"[3 elements]\""
),
Partial("unconsC", "(⇒ B, A ⇒ (⇒ Stream[A]) ⇒ B) ⇒ B", "Convert to B with head & tail if non empty, default otherwise",
" Stream.empty.unconsC(\"[]\", h ⇒ t ⇒ s\"[$h .. ${t.length} more]\"" → "\"[]\"",
"Stream(1, 2, 3).unconsC(\"[]\", h ⇒ t ⇒ s\"[$h .. ${t.length} more]\"" → "\"[1, 2 more]\""
),
Partial("lazyScanLeft", "B ⇒ ((B, A) ⇒ B) ⇒ Stream[B]", "scanLeft that works on infinite or blocking streams",
"(1 #:: {synchronized(wait(0)); Stream(2)}).lazyScanLeft(0)(_ + _).take(1).toList" → "List(1)"
),
Partial("reverseInits", "Stream[Stream[A]]", "inits that works on infinite or blocking streams",
"Stream.iterate(0)(_ + 1).reverseInits.take(3)" → "Stream(Stream.empty, Stream(0), Stream(0, 1))"
)
), "Array[A]" → List(
Partial("copyTo", "(Int, Array[A], Int, Int) ⇒ Array[A]", "Copy subset of an array to another",
"Array(1, 2, 3, 4, 5).copyTo(2, Array(0, 0, 0, 0, 0), 1, 3)" → "Array(0, 3, 4, 5, 0)"
)
), "Array[Byte]" → List(
Partial("toHex", "String", "Convert to hex string",
"Array[Byte](126, 87, -85, 30).toHex" → "7e57ab1e"
),
Partial("toHex", "Int ⇒ String", "Convert to hex & prefix up to a certain length",
"Array[Byte](126, 87, -85, 30).toHex(10)" → "007e57ab1e"
),
Partial("copyUpToN", "(Long, InputStream, OutputStream) ⇒ Int",
"Use array as buffer to copy up to n bytes from an InputStream to an OutputStream"
),
Partial("readUpToN", "(Long, InputStream) ⇒ Int", "Read up to n bytes from an InputStream")
), "GTL[A]" → List(
Partial("collectHistogram", "PartialFunction[A, B] ⇒ Map[B, Int]",
"Calculate how many occurences of a property of each element",
"""|List("foo", "food", "bar", "oo").collectAttributeCounts {
| case word if word.size > 2 ⇒ word.size
|}""" → """|// 2 words of length 3, 1 of length 4
|Map(3 → 2, 4 → 1)"""
),
Partial("optHistogram", "(A ⇒ Option[B]) ⇒ Map[B, Int]",
"Calculate how many occurences of a property of each element",
"""|List("foo", "food", "bar", "oo").optAttributeCounts(
| word ⇒ if (word.size <= 2) None else Some(word.size)
|)""" → """|// 2 words of length 3, 1 of length 4
|Map(3 → 2, 4 → 1)"""
),
Partial("histogram", "(A ⇒ B) ⇒ Map[B, Int]",
"Calculate how many occurences of a property of each element",
"""|List("foo", "food", "bar", "oo").attributeCounts(
| word ⇒ word.size
|)""" →
"""|// 1 word length 1, 2 of length 3, 1 of length 4
|Map(2 → 1, 3 → 2, 4 → 1)"""
),
Partial("onlyOption", "Option[A]", "Head if gtl has 1 element, None otherwise",
"Nil.onlyOption" → "None", "Set(1).onlyOption" → "Some(1)", "Vector(1, 2).onlyOption" → "None"
),
Partial("onlyOrThrow", "(CC[A] ⇒ Exception) ⇒ A", "Head if list gtl 1 element, throws otherwise",
"Nil.onlyOrThrow(l ⇒ new Throwable(\"Not singleton: \"l.length)" → "throws \"Not singleton: 0\"",
"Set(1).onlyOrThrow(s ⇒ new Throwable(\"Not singleton: \"s.length)" → "1",
"Vector(1, 2).onlyOrThrow(v ⇒ new Throwable(\"Not singleton: \"v.length)" → "throws \"Not singleton: 2\""
),
Partial("onlyEither", "Either[CC[A], A]", "Right if gtl has 1 element, Left otherwise",
"Nil.onlyEither" → "Left(Nil)", "Set(1).onlyEither" → "Right(1)", "Vector(1, 2).onlyEither" → "Left(Vector(1, 2))"
),
Partial("asMap.withEntries", "(A ⇒ (K, V)) ⇒ Map[K, V]", "Build a map by specifying entries",
"List(2, 4).asMap.withEntries(i ⇒ (i/2, i*2))" → "Map(1 → 4, 2 → 8)"
),
Partial("asMap.withEntries", "(A ⇒ K, A ⇒ V) ⇒ Map[K, V]", "Build a map by specifying keys & values",
"List(2, 4).asMap.withEntries(i ⇒ i/2, i ⇒ i*2)" → "Map(1 → 4, 2 → 8)"
),
Partial("asMap.withEntries", "(A ⇒ K1, A ⇒ K2, A ⇒ V) ⇒ Map[K1, Map[K2, V]]",
"Build a nested map by specifying keys & values",
"List(2, 4).asMap.withEntries(i ⇒ i/2, i ⇒ i, i ⇒ i*2)" → "Map(1 → Map(2 → 4), 2 → Map(4 → 8))"
),
Partial("asMap.withEntries", "(A ⇒ K1, A ⇒ K2, A ⇒ K3, A ⇒ V) ⇒ Map[K1, Map[K2, Map[K3, V]]]",
"Build a nested map by specifying keys & values",
"""List(2, 4).asMap.withEntries(
| i ⇒ i/2, i ⇒ i, i ⇒ i*10, i ⇒ i*2
|)""" → """|Map(
| 1 → Map(2 → Map(20 → 4)),
| 2 → Map(4 → Map(40 → 8))
|)"""
),
Partial("asMap.withSomeEntries", "(A ⇒ Option[(K, V)]) ⇒ Map[K, V]",
"Build a map by optionally specifying entries",
"List(2, 4).asMap.withSomeEntries(i ⇒ if (i == 2) Some((i/2, i*2)) else None)" → "Map(1, 4)"
),
Partial("asMap.withSomeEntries", "(A ⇒ Option[K], A ⇒ Option[V]) ⇒ Map[K, V]",
"Build a map by optionally specifying keys & values",
"""List(3, 5, 15).asMap.withSomeEntries(
| i ⇒ if (i%3 == 0) Some(i/3) else None,
| i ⇒ if (i%5 == 0) Some(i/5) else None
|)""" → "Map(5 → 3)"
),
Partial("asMap.withPFEntries", "PartialFunction[A, (K, V)] ⇒ Map[K, V]",
"Build a map by partially specifying entries",
"List(2, 4).asMap.withPFEntries { case 2 ⇒ (22, 222) }" → "Map(22 → 222)"
),
Partial("asMap.withPFEntries", "(PartialFunction[A, K], PartialFunction[A, V]) ⇒ Map[K, V]",
"Build a map by partially specifying keys & values",
"""List(1, 2, 3).asMap.withPFEntries({
| case 1 ⇒ 11
| case 3 ⇒ 33
|}, {
| case 2 ⇒ 222
| case 3 ⇒ 333
|})""" → "Map(33 → 333)"
),
Partial("asMultiMap.withEntries", "(A ⇒ (K, V)) ⇒ MultiMap[GTL, [K, V]",
"Build a multi map by specifying entries",
"List(1, 2, 3).asMultiMap[List].withEntries(i ⇒ (i%2, i))" → "Map(0 → List(2), 1 → List(1, 3))",
" List(1, 2, 3).asMultiMap[Set].withEntries(i ⇒ (i%2, i))" → "Map(0 → Set(2), 1 → Set(1, 3))"
),
Partial("asMultiMap.withEntries", "(A ⇒ K, A ⇒ V) ⇒ MultiMap[GTL, K, V]",
"Build a multi map by specifying keys & values",
"List(1, 2, 3).asMultiMap[List].withEntries(i ⇒ i%2, i ⇒ i))" → "Map(0 → List(2), 1 → List(1, 3))",
" List(1, 2, 3).asMultiMap[Set].withEntries(i ⇒ i%2, i ⇒ i))" → "Map(0 → Set(2), 1 → Set(1, 3))"
),
Partial("asMultiMap.withEntries", "(A ⇒ K1, A ⇒ K2, A ⇒ V) ⇒ Map[K1, MultiMap[GTL, K2, V]]",
"Builds a nested multi map by specifying keys & values",
"""|List(1, 2, 3, 4).asMultiMap[List].withEntries(
| i ⇒ i%2, i ⇒ i/2, i ⇒ i
|)""" →
"""|Map(
| 0 -> Map(1 -> List(2), 2 -> List(4)),
| 1 -> Map(0 -> List(1), 1 -> List(3))
|)"""
),
Partial("asMultiMap.withEntries", "(A ⇒ K1, A ⇒ K2, A ⇒ K3, A ⇒ V) ⇒ Map[K1, Map[K2, MultiMap[GTL, K3, V]]]",
"Builds a nested multi map by specifying keys & values...",
"""|List(1, 2, 3, 4).asMultiMap[List].withEntries(
| i ⇒ i%2, i ⇒ i/2, i ⇒ i*10, i ⇒ i
|)""" →
"""|Map(
| 0 -> Map(2 -> Map(40 -> List(4)), 1 -> Map(20 -> List(2))),
| 1 -> Map(1 -> Map(30 -> List(3)), 0 -> Map(10 -> List(1)))
|)""",
"""|List(1, 2, 3, 4).asMultiMap[Set].withEntries(
| i ⇒ i%2, i ⇒ i/2, i ⇒ i*10, i ⇒ i
|)""" →
"""|Map(
| 0 -> Map(2 -> Map(40 -> Set(4)), 1 -> Map(20 -> Set(2))),
| 1 -> Map(1 -> Map(30 -> Set(3)), 0 -> Map(10 -> Set(1)))
|)"""
),
Partial("asMultiMap.withSomeEntries", "(A ⇒ Option[(K, V)]) ⇒ MultiMap[GTL, K, V]", ""),
Partial("asMultiMap.withSomeEntries", "(A ⇒ Option[K], A ⇒ Option[V]) ⇒ MultiMap[GTL, K, V]", ""),
Partial("asMultiMap.withPFEntries", "PartialFunction[A, (K, V)] ⇒ MultiMap[GTL, K, V]", ""),
Partial("asMultiMap.withPFEntries", "(PartialFunction[A, K], PartialFunction[A, V]) ⇒ MultiMap[GTL, K, V]", ""),
Partial("ungroupBy", "(A ⇒ B) ⇒ GTL[GTL[A]]", ""),
Partial("partitionByPF", "PartialFunction[A, B] ⇒ (GTL[A], GTL[B])", ""),
Partial("all", "A ⇒ Boolean", ""),
Partial("none", "A ⇒ Boolean", ""),
Partial("seqFold", "B ⇒ ((B, A) ⇒ Option[B]) ⇒ Option[B]", ""),
Partial("seqMap", "(A ⇒ Option[B]) ⇒ (implicit CanBuildFrom[Nothing, B, To]) ⇒ Option[To]", ""),
Partial("apoFold", "B ⇒ ((B, A) ⇒ Either[C, B]) ⇒ Either[C, B]", "")
), "GTL[V]" → List(
Partial("asMap.withKeys", "(V ⇒ K) ⇒ Map[K, V]", ""),
Partial("asMap.withSomeKeys", "(V ⇒ Option[K]) ⇒ Map[K, V]", ""),
Partial("asMap.withManyKeys", "(V ⇒ List[K]) ⇒ Map[K, V]", ""),
Partial("asMap.withUniqueKeys", "(V ⇒ K) ⇒ Option[Map[K, V]]", ""),
Partial("asMap.withPFKeys", "PartialFunction[V, K] ⇒ Map[K, V]", ""),
Partial("asMultiMap.withKeys", "(V ⇒ K) ⇒ MultiMap[GTL, K, V]", ""),
Partial("asMultiMap.withSomeKeys", "(V ⇒ Option[K]) ⇒ MultiMap[GTL, K, V]", ""),
Partial("asMultiMap.withManyKeys", "(V ⇒ List[K]) ⇒ MultiMap[GTL, K, V]", ""),
Partial("asMultiMap.withUniqueKeys", "(V ⇒ K) ⇒ Option[MultiMap[GTL, K, V]]", ""),
Partial("asMultiMap.withPFKeys", "PartialFunction[V, K] ⇒ MultiMap[GTL, K, V]", "")
), "GTL[K]" → List(
Partial("asMap.withValues", "(K ⇒ V) ⇒ Map[K, V]", ""),
Partial("asMap.withSomeValues", "(K ⇒ Option[V]) ⇒ Map[K, V]", ""),
Partial("asMap.withPFValues", "PartialFunction[K, V] ⇒ Map[K, V]", ""),
Partial("asMap.withConstValue", "V ⇒ Map[K, V]", ""),
Partial("asMultiMap.withValues", "(K ⇒ V) ⇒ MultiMap[GTL, K, V]", ""),
Partial("asMultiMap.withSomeValues", "(K ⇒ Option[V]) ⇒ MultiMap[GTL, K, V]", ""),
Partial("asMultiMap.withPFValues", "PartialFunction[K, V] ⇒ MultiMap[GTL, K, V]", ""),
Partial("asMultiMap.withConstValue", "V ⇒ MultiMap[GTL, K, V]", "")
// as[F]
), "GTL[(K, V)]" → List(
Partial("toMultiMap", "MultiMap[F, K, V]", "")
), "GTL[Either[L, R]]" → List(
Partial("partitionEithers", "(GTL[L], GTL[R])", "Partition eithers into lefts & rights",
"List(Right(3.0), Left(1), Right(4.0), Left(1)).partitionEithers" → "(List(1, 1), List(3.0, 4.0))"
)
), "GTL[L \\/ R]" → List(
Partial("partitionDisjunctions", "(GTL[L], GTL[R])", "Partition disjunctions into lefts & rights",
"List(\\/-(3.0), -\\/(1), \\/-(4.0), -\\/(1)).partitionDisjunctions" → "(List(1, 1), List(3.0, 4.0))"
)
), "Map[K, V]" → List(
Partial("getOrThrow", "(K, String) ⇒ V", "Retrieve value associated with key or throw exception with message",
"Map(1 → 2).getOrThrow(1, \"Alas, no 1\")" → "2",
"Map(2 → 3).getOrThrow(1, \"Alas, no 1\")" → "throw new IllegalArgumentException(\"Alas no 1\")"
),
Partial("getOrThrow", "(K, ⇒ Exception) ⇒ V", "Retrieve value associated with key or throw exception",
"Map(1 → 2).getOrThrow(1, new Exception(\"No such element\"))" → "2",
"Map(2 → 3).getOrThrow(1, new Exception(\"No such element\"))" → "throw new Exception(\"No such element\")"
),
Partial("mapKeysEagerly", "(K ⇒ C) ⇒ Map[C, V]", "Eagerly applies a function to each key",
"Map(1 → 2, 2 → 3).mapKeysEagerly(_ * 10)" → "Map(10 → 2, 20 → 3)"
),
Partial("mapValuesEagerly", "(V ⇒ W) ⇒ Map[K, W]", "Eagerly applies a function to each value",
"Map(1 → 2, 2 → 3).mapValuesEagerly(_ * 10)" → "Map(1 → 20, 2 → 30)"
),
Partial("mapEntries", "(K ⇒ V ⇒ (C, W)) ⇒ Map[C, W]", "Applies a function to each entry, result must be a Tuple2",
"Map(1 → 2, 2 → 3).mapEntries(k ⇒ v ⇒ (k * 10, v + 0.5))" → "Map(10 → 2.5, 20 → 3.5)"
),
Partial("seqMapKeys", "(K ⇒ Option[C]) ⇒ Option[Map[C, V]]", "Eagerly applies a function to each key, produces a Map if it never equals None",
"Map(2 → 4, 4 → 6).seqMapKeys(k ⇒ (k % 2 == 0).option(k / 2)))" → "Some(Map(1 → 4, 2 → 6))",
" Map(1 → 3).seqMapKeys(k ⇒ (k % 2 == 0).option(k / 2)))" → "None"
),
Partial("seqMapValues", "(V ⇒ Option[W]) ⇒ Option[Map[K, W]]", "Eagerly applies a function to each value, produces a Map if it never equals None",
"Map(2 → 4, 4 → 6).seqMapValues(v ⇒ (v % 2 == 0).option(v / 2)))" → "Some(Map(2 → 2, 4 → 3))",
" Map(1 → 3).seqMapValues(v ⇒ (v % 2 == 0).option(v / 2)))" → "None"
),
Partial("seqMapEntries", "(K ⇒ V ⇒ Option[(C, W)]) ⇒ Option[Map[C, W]]", "Applies a function to each entry, produces a Map if it never equals None",
"Map(2 → 4, 4 → 6).seqMapEntries(k ⇒ v => (k % 2 == 0).option((k / 2) -> (v / 2)))" → "Some(Map(1 → 2, 2 → 3))",
" Map(1 → 3).seqMapEntries(k ⇒ v => (k % 2 == 0).option((k / 2) -> (v / 2)))" → "None"
),
Partial("collectKeys", "PartialFunction[K, C] ⇒ Map[C, V]", "Applies a partial function to each key",
"Map(1 → 2, 2 → 3).collectKeys { case 2 → 20 }" → "Map(20 → 3)" // TODO [28 Oct 2015] Remove this or updateKeys
),
Partial("updateKeys", "PartialFunction[K, C] ⇒ Map[C, V]", "Applies a partial function to each key",
"Map(1 → 2, 2 → 3).updateKeys { case 2 → 20 }" → "Map(20 → 3)"
),
Partial("updateKeys", "(K ⇒ Option[C]) ⇒ Map[C, V]", "Applies a function to each key, retains only Somes",
"Map(1 → 2, 2 → 3).updateKeys(k ⇒ if (k == 2) Some(20) else None)" → "Map(20 → 3)"
),
Partial("updateValue", "(K, V ⇒ Option[V]) ⇒ Map[K, V]", "Update or remove a single entry",
"Map(1 → 2, 2 → 3).updateValue(1, _ ⇒ None)" → "Map(2 → 3)"
),
Partial("collectValues", "PartialFunction[V, W] ⇒ Map[K, W]", "Applies a partial function to each value",
"Map(1 → 2, 2 → 3).collectValues { case 2 ⇒ 20 }" → "Map(1 → 20)" // TODO [28 Oct 2015] Remove this or updateValues
),
Partial("updateValues", "PartialFunction[V, W] ⇒ Map[K, W]", "Applies a partial function to each value",
"Map(1 → 2, 2 → 3).updateValues { case 2 ⇒ 20 }" → "Map(1 → 20)"
),
Partial("updateValues", "(V ⇒ Option[W]) ⇒ Map[K, W]", "Applies a function to each value, retains only Somes",
"Map(1 → 2, 2 → 3).updateValues(v ⇒ if (v == 2) Some(20) else None)" → "Map(1 → 20)"
),
Partial("keyExists", "Predicate[K] ⇒ Boolean", "Determine if any key matches the predicate",
"Map(1 → 2).keyExists(_ > 1)" → "false", "Map(2 → 3).keyExists(_ > 1)" → "true"
),
Partial("valueExists", "Predicate[V] ⇒ Boolean", "Determine if any value matches the predicate",
"Map(1 → 2).valueExists(_ > 2)" → "false", "Map(2 → 3).valueExists(_ > 2)" → "true"
),
Partial("filterValues", "Predicate[V] ⇒ Map[K, V]", "Retain those values that match a predicate",
"Map(1 → 2, 2 → 3).filterValues(_ > 2)" → "Map(2 → 3)"
),
Partial("filterValuesNot", "Predicate[V] ⇒ Map[K, V]", "Discard those value that match a predicate",
"Map(1 → 2, 2 → 3).filterValuesNot(_ > 2)" → "Map(1 → 2)"
),
Partial("filterKeysNot", "Predicate[K] ⇒ Map[K, V]", "Discard those keys that match a predicate",
"Map(1 → 2, 2 → 3).filterKeysNot(_ > 1)" → "Map(1 → 2)"
),
Partial("findValue", "Predicate[V] ⇒ Option[V]", "Search for a value that matches a predicate",
"Map(1 → 2).findValue(_ > 2)" → "None", "Map(2 → 3).findValue(_ > 2)" → "Some(3)"
),
Partial("findKey", "Predicate[K] ⇒ Option[K]", "Search for a key that matches a predicate",
"Map(1 → 2).findValue(_ > 1)" → "None", "Map(2 → 3).findValue(_ > 1)" → "Some(2)"
),
Partial("sorted", "implicit Ordering[K] ⇒ SortedMap[K, V]", "Sort map by keys",
"Map(1 → 2, 2 → 3).sorted(Ordering.Int.reverse)" → "SortedMap(2 → 3, 1 → 2)"
),
Partial("reverse", "(Set[K] ⇒ K) ⇒ Map[V, K]", "Reverse the keys & values, using the function to choose between keys",
"Map(1 → 2, 2 → 3, 3 → 3).reverse(_.min)" → "Map(2 → 1, 3 → 2)"
),
Partial("reverseToMultiMap", "MultiMap[Set, V, K]", "Reverse the keys & values, retaining all data",
"Map(1 → 2, 2 → 3, 3 → 3).reverseToMultiMap" → "Map(2 → Set(1), 3 → Set(2, 3))"
),
Partial("containsAll", "Option[K] ⇒ Boolean", "Determine if the map contains all the provided keys",
"Map(1 → 2).containsAll(None)" → "true", "Map(1 → 2).containsAll(Some(10))" → "false",
"Map(1 → 2).containsAll(Some(1))" → "true"
),
Partial("containsAll", "GTL[K] ⇒ Boolean", "Determine if the map contains all the provided keys",
"Map(1 → 2).containsAll(Nil)" → "true", "Map(1 → 2).containsAll(List(10))" → "false",
"Map(1 → 2).containsAll(List(1))" → "true"
),
Partial("containsAny", "Option[K] ⇒ Boolean", "Determine if the map contains any of the provided keys",
"Map(1 → 2).containsAny(None)" → "false", "Map(1 → 2).containsAny(Some(10))" → "false",
"Map(1 → 2).containsAny(Some(1))" → "true"
),
Partial("containsAny", "GTL[K] ⇒ Boolean", "Determine if the map contains any of the provided keys",
"Map(1 → 2).containsAny(Nil)" → "true", "Map(1 → 2).containsAny(List(10))" → "false",
"Map(1 → 2).containsAny(List(1))" → "true"
),
Partial("containsEntry", "(K, V) ⇒ Boolean", "Determine if the map contains the provided entry",
"Map(1 → 2).containsEntry(1, 2)" → "true", "Map(1 → 2).containsEntry(1, 1)" → "false"
),
Partial("containsEntry", "((K, V)) ⇒ Boolean", "Determine if the map contains the provided entry",
"Map(1 → 2).containsEntry((1, 2))" → "true", "Map(1 → 2).containsEntry((1, 1))" → "false"
),
Partial("get", "Option[K] ⇒ Option[V]", "Lookup the provided key if Some",
"Map(1 → 2).get(None)" → "None", "Map(1 → 2).get(Some(2))" → "None", "Map(1 → 2).get(Some(1))" → "Some(2)"
),
Partial("emptyTo", "Map[K, V] ⇒ Map[K, V]", "Converts empty map to provided value, leaves other maps alone",
"Map(1 → 2).emptyTo(Map(2 → 3))" → "Map(1 → 2)", "Map().emptyTo(Map(2 → 3))" → "Map(2 → 3)"
),
Partial("uncons", "(A, Map[K, V] ⇒ A) ⇒ A", "Convert to A depending on whether the map is empty or not",
" Map().uncons(\"empty\", m ⇒ \"non-empty: \" + m.size)" → "\"empty\"",
"Map(1 → 2).uncons(\"empty\", m ⇒ \"non-empty: \" + m.size)" → "\"non-empty: 1\""
),
Partial("entryFor.minKey", "implicit Ordering[K] ⇒ Option[(K, V)]", "The entry with the minimum key",
"Map(1 → 2, 2 → 3).entryFor.minKey" → "Some((1, 2))",
"Map(1 → 2, 2 → 3).entryFor.minKey(Ordering.Int.reverse)" → "Some((2, 3))"
),
Partial("entryFor.maxKey", "implicit Ordering[K] ⇒ Option[(K, V)]", "The entry with the maximum key",
"Map(1 → 2, 2 → 3).entryFor.maxKey" → "Some((2, 3))",
"Map(1 → 2, 2 → 3).entryFor.maxKey(Ordering.Int.reverse)" → "Some((1, 2))"
),
Partial("entryFor.minValue", "implicit Ordering[V] ⇒ Option[(K, V)]", "The entry with the minimum value",
"Map(1 → 2, 2 → 3).entryFor.minValue" → "Some((1, 2))",
"Map(1 → 2, 2 → 3).entryFor.minValue(Ordering.Int.reverse)" → "Some((2, 3))"
),
Partial("entryFor.maxValue", "implicit Ordering[V] ⇒ Option[(K, V)]", "The entry with the maximum value",
"Map(1 → 2, 2 → 3).entryFor.maxValue" → "Some((2, 3))",
"Map(1 → 2, 2 → 3).entryFor.maxValue(Ordering.Int.reverse)" → "Some((1, 2))"
),
Partial("entryFor.matchingKey", "Predicate[K] ⇒ Option[(K, V)]", "The entry whose key matches the predicate",
"Map(1 → 2, 2 → 3).entryFor.matchingKey(_ > 2)" → "Some((2, 3))"
),
Partial("entryFor.matchingValue", "Predicate[V] ⇒ Option[(K, V)]", "The entry whose value matches the predicate",
"Map(1 → 2, 2 → 3).entryFor.matchingValue(_ > 3)" → "Some((2, 3))"
),
Partial("keyFor.minValue", "implicit Ordering[V] ⇒ Option[K]", "The key whose entry has the minimum value",
"Map(1 → 2, 2 → 3).keyFor.minValue" → "Some(1)",
"Map(1 → 2, 2 → 3).keyFor.minValue(Ordering.Int.reverse)" → "Some(2)"
),
Partial("keyFor.maxValue", "implicit Ordering[V] ⇒ Option[K]", "The key whose entry has the maximum value",
"Map(1 → 2, 2 → 3).keyFor.maxValue" → "Some(2)",
"Map(1 → 2, 2 → 3).keyFor.maxValue(Ordering.Int.reverse)" → "Some(1)"
),
Partial("valueFor.minKey", "implicit Ordering[K] ⇒ Option[V]", "The value whose entry has the minimum key",
"Map(1 → 2, 2 → 3).valueFor.minKey" → "Some(2)",
"Map(1 → 2, 2 → 3).valueFor.minKey(Ordering.Int.reverse)" → "Some(3)"
),
Partial("valueFor.maxKey", "implicit Ordering[K] ⇒ Option[V]", "The value whose entry has the maximum key",
"Map(1 → 2, 2 → 3).valueFor.maxKey" → "Some(3)",
"Map(1 → 2, 2 → 3).valueFor.maxKey(Ordering.Int.reverse)" → "Some(2)"
),
Partial("andThenM", "Map[V, W] ⇒ Map[K, W]", "Join the values of the map with the keys of another",
"Map(1 → 2, 2 → 3).andThenM(Map(2 → \"two\"))" → "Map(1 → \"two\")"
),
Partial("composeM", "Map[C, K] ⇒ Map[C, V]", "Join the keys of the map with the values of another",
"Map(1 → 2, 2 → 3).composeM(Map(\"two\" → 2))" → "Map(\"two\" → 3)"
),
Partial("partitionKeysBy", "PartialFunction[K, C] ⇒ (Map[K, V], Map[C, V])",
"Applies a partial function to each key but retain the subset of the input where the function isn't defined",
"Map(1 → 2, 2 → 3).partitionKeysBy { case 2 ⇒ \"two\" }" → "(Map(1 → 2), Map(\"two\" → 3))"
),
Partial("partitionValuesBy", "PartialFunction[V, W] ⇒ (Map[K, V], Map[K, W])",
"Applies a partial function to each value but retain the subset of the input where the function isn't defined",
"Map(1 → 2, 2 → 3).partitionValuesBy { case 2 ⇒ \"two\" }" → "(Map(2 → 3), Map(1 → \"two\"))"
),
Partial("partitionEntriesBy", "PartialFunction[(K, V), (C, W)] ⇒ (Map[K, V], Map[C, W])",
"Applies a partial function to each entry but retain the subset of the input where the function isn't defined",
"Map(1 → 2, 2 → 3).partitionValuesBy { case (1, 2) ⇒ (\"one\", \"two\") }" → "(Map(2 → 3), Map(\"one\" → \"two\"))"
),
Partial("calcIfNonEmpty", "(Map[K, V] ⇒ B) ⇒ Option[B]", "Calculate result if non empty, None otherwise",
"Map(1 → 2, 2 → 3).calcIfNonEmpty(_.length)" → "Some(2)", "Map().calcIfNonEmpty(_.length)" → "None"
),
Partial("mutable", "mutable.Map[K, V]", "Convert to mutable Map"),
Partial("toMutable", "mutable.Map[K, V]", "Convert to mutable Map")
), "MultiMap[F, K, V]" → List(
Partial("select", "F[V] ⇒ W ⇒ Map[K, W]", "Choose a slice of the multimap, equivalent to Map[K, F[V]].mapValuesEagerly",
"Map(1 → List(1, 11), 2 → List(2, 22)).select(_.last)" → "Map(1 → 11, 2 → 22)"
),
Partial("merge", "MultiMap[F, K, V] ⇒ MultiMap[F, K, V]", "Combines two multimaps by concatenating their values",
"Map(1 → List(1), 3 → List(3)).merge(Map(2 → List(2), 3 → List(33))" →
"Map(1 → List(1), 2 → List(2), 3 → List(3, 33))"
),
Partial("append", "(K, F[V]) ⇒ MultiMap[F, K, V]", "Appends a single entry, concatenating if already present",
"Map(1 → List(11)).append(2, List(22))" → "Map(1 → List(11), 2 → List(22))",
"Map(1 → List(11)).append(1, List(111))" → "Map(1 → List(11, 111))"
),
Partial("multiMap.head", "Map[K, V]", "Selects the head element of each value",
"Map(1 → List(11), 2 → List(22, 222)).multiMap.head" → "Map(1 → 11, 2 → 22)"
),
Partial("multiMap.tail", "MultiMap[F, K, V]", "Selects the tail of each value, only retains non-empty values",
"Map(1 → List(1, 11), 2 → List(22), 3 → Nil).multiMap.tail" → "Map(1 → List(11))"
),
Partial("multiMap.reverse", "MultiMap[F, V, K]", "Reverses the multimap",
"Map('a' → List(1, 2), 'b' → List(2, 3)).reverse" → "Map(1 → List('a'), 2 → List('a', 'b'), 3 → List('b'))"
),
Partial("multiMap.mapEntries", "(K ⇒ F[V] ⇒ (C, F[W])) ⇒ MultiMap[F, C, W]", "Map over each entry to produce another multimap",
"Map(2 → List(2, 22)).mapEntries(key ⇒ values ⇒ (key * 10, List(values.sum)))" → "Map(20 → List(24))"
),
Partial("multiMap.mapEntriesU", "(K ⇒ F[V] ⇒ (C, G[W])) ⇒ MultiMap[G, C, W]", "Map over each entry to produce another type of multimap",
"Map(2 → List(2, 22)).mapEntriesU(key ⇒ values ⇒ (key * 10, Set(values.sum)))" → "Map(20 → Set(24))"
),
Partial("multiMap.values", "F[V]", "Concatenate values",
"Map(1 → List(1), 2 → List(2, 22)).multiMap.values" → "List(1, 2, 22)"
),
Partial("multiMap.sliding", "Int ⇒ F[MultiMap[F, K, V]]", "Produce a sequence of multimaps that slide across the original",
"Map(1 → List(11, 12, 13), 2 → List(21, 22, 23)).multiMap.sliding(2)" ->
"List(Map(1 → List(11, 12), 2 → List(21, 22)), Map(1 → List(12, 13), 2 → List(22, 23)))"
),
Partial("pop", "K ⇒ MultiMap[F, K, V]", "Removes head value associated with key, discarding empty values entirely",
"Map(1 → List(2, 3), 2 → List(3)).pop(1)" → "Map(1 → List(3), 2 → List(3))",
"Map(1 → List(2, 3), 2 → List(3)).pop(2)" → "Map(1 → List(2, 3))",
"Map(1 → List(2, 3), 2 → List(3)).pop(3)" → "Map(1 → List(2, 3), 2 → List(3)))"
),
Partial("sequence", "F[Map[K, V]]", "Convert a multimap to a sequence of maps",
"Map(1 → List(10, 11), 2 → List(20, 21)).sequence" → "List(Map(1 → 10, 2 → 20), Map(1 → 11, 2 → 21))"
),
Partial("headTailOption", "Option[(Map[K, V], MultiMap[F, K, V])]",
"The head & tail if the multimap is non empty, None otherwise",
"Map(1 → List(10, 11), 2 → List(20)).headTailOption" → "Some(Map(1 → 10, 2 → 20), Map(1 → List(11)))",
"Map(1 → Nil, 2 → List(20)).headTailOption" → "Some(Map(2 → 20), Map())",
"Map(1 → (Nil: List[Int])).headTailOption" → "None",
"MultiMap.empty[List, Int, Int]" → "None"
),
Partial("flatMapValues", "(V ⇒ F[W]) ⇒ MultiMap[F, K, W]", "Expand each value to a collection of values",
"Map(0 → List(1, 2)).flatMapValues(v ⇒ List(v, -v))" → "Map(0 → List(1, -1, 2, -2))"
),
Partial("flatMapValuesU", "(V ⇒ G[W]) ⇒ MultiMap[G, K, W]", "Expand each value to a different type of collection of values",
"Map(0 → List(1, 2)).flatMapValuesU(v ⇒ Set(v, -v))" → "Map(0 → Set(1, -1, 2, -2))"
),
Partial("getOrEmpty", "K ⇒ F[V]", "Retrive the values associated with a key or an empty collection",
"Map(1 → List(2)).getOrEmpty(1)" → "List(2)", "Map(1 → List(2)).getOrEmpty(2)" → "Nil"
),
Partial("onlyOption", "Option[Map[K, V]]", "Returns Some map if the multimap only contains empty or singleton containers, None otherwise",
"Map(1 → Nil, 2 → List(22)).onlyOption" → "Some(Map(2 → 22))", "Map(1 → List(1, 11)).onlyOption" → "None"
)
), "NestedMap[K1, K2, V]" → List(
Partial("append", "(K1, K2, V) ⇒ NestedMap[K1, K2, V]", "Append a value",
"Map(1 → Map(2 → 3)).append(1, 2, 4)" → "Map(1 → Map(2 → 4))",
"Map(1 → Map(2 → 3)).append(1, 3, 4)" → "Map(1 → Map(2 → 3, 3 → 4))",
"Map(1 → Map(2 → 3)).append(2, 3, 4)" → "Map(1 → Map(2 → 3), 2 → Map(3 → 4))"
),
Partial("+", "((K1, K2, V)) ⇒ NestedMap[K1, K2, V]", "Append a value",
"Map(1 → Map(2 → 3)) + ((1, 2, 4))" → "Map(1 → Map(2 → 4))",
"Map(1 → Map(2 → 3)) + ((1, 3, 4))" → "Map(1 → Map(2 → 3, 3 → 4))",
"Map(1 → Map(2 → 3)) + ((2, 3, 4))" → "Map(1 → Map(2 → 3), 2 → Map(3 → 4))"
),
Partial("flipNesting", "NestedMap[K2, K1, V]", "Flips the inner & outer keys",
"Map(1 → Map(2 → 3), 6 → Map(3 → 4, 4 → 5)).flipNesting" → "Map(2 → Map(1 → 3), 3 → Map(6 → 4), 4 → Map(6 → 5))"
),
Partial("getOrEmpty", "K1 ⇒ Map[K2, V]", "Retrive the values associated with a key or an empty collection",
"Map(1 → Map(2 → 3)).getOrEmpty(1)" → "Map(2 → 3)", "Map(1 → Map(2 → 3)).getOrEmpty(2)" → "Map()"
),
Partial("nestedMap.mapValuesEagerly", "(V ⇒ W) ⇒ NestedMap[K1, K2, W]", "Eagerly applies a function to each value",
"Map(1 → Map(2 → 3)).nestedMap.mapValueEagerly(_ * 10)" → "Map(1 → Map(2 → 30))"
),
Partial("nestedMap.mapKeysEagerly", "(K2 ⇒ C) ⇒ NestedMap[K1, C, V]", "Eagerly applies a function to each inner key",
"Map(1 → Map(2 → 3)).nestedMap.mapKeysEagerly(_ * 10)" → "Map(1 → Map(20 → 3))"
)
), "Predicate[A]" → List(
Partial("ifSome", "Predicate[Option[A]]", "Lift predicate to apply to Option",
"((i: Int) ⇒ i == 2).ifSome.apply(None)" → "false",
"((i: Int) ⇒ i == 2).ifSome.apply(Some(1))" → "false",
"((i: Int) ⇒ i == 2).ifSome.apply(Some(2))" → "true"
),
Partial("and", "Predicate[A] ⇒ Predicate[A]", "A predicate that's true only when both input predicates are true",
"((i: Int) ⇒ i % 3 == 0).and((i: Int) ⇒ i % 5 == 0).apply(1)" → "false",
"((i: Int) ⇒ i % 3 == 0).and((i: Int) ⇒ i % 5 == 0).apply(3)" → "false",
"((i: Int) ⇒ i % 3 == 0).and((i: Int) ⇒ i % 5 == 0).apply(5)" → "false",
"((i: Int) ⇒ i % 3 == 0).and((i: Int) ⇒ i % 5 == 0).apply(15)" → "true"
),
Partial("or", "Predicate[A] ⇒ Predicate[A]", "A predicate that's true when either input predicates are",
"((i: Int) ⇒ i % 3 == 0).or((i: Int) ⇒ i % 5 == 0).apply(1)" → "false",
"((i: Int) ⇒ i % 3 == 0).or((i: Int) ⇒ i % 5 == 0).apply(3)" → "true",
"((i: Int) ⇒ i % 3 == 0).or((i: Int) ⇒ i % 5 == 0).apply(5)" → "true",
"((i: Int) ⇒ i % 3 == 0).or((i: Int) ⇒ i % 5 == 0).apply(15)" → "true"
),
Partial("exists", "Predicate[List[A]]", "A predicate that's true when there exists an element that passes",
"((i: Int) ⇒ i == 2).exists.apply(Nil)" → "false",
"((i: Int) ⇒ i == 2).exists.apply(List(1))" → "false",
"((i: Int) ⇒ i == 2).exists.apply(List(2))" → "true",
"((i: Int) ⇒ i == 2).exists.apply(List(1, 2))" → "true"
),
Partial("guard", "(A ⇒ B) ⇒ PartialFunction[A, B]", "Limit a function by a predicate",
"((i: Int) ⇒ i == 2).guard(_ * 10).lift(1)" → "None",
"((i: Int) ⇒ i == 2).guard(_ * 10).lift(2)" → "Some(20)"
),
Partial("cond", "(⇒ B, ⇒ B) ⇒ (A ⇒ B)", "")
), "(A ⇒ B)" → List(
Partial("attempt", "(A ⇒ Try[B])", ""),
Partial("guardWith", "Predicate[A] ⇒ PartialFunction[A, B]", "")
), "(A ⇒ B ⇒ C)" → List(
Partial("tupled", "((A, B)) ⇒ C", "Convert curried function to a tupled one")
), "PartialFunction[A, B]" → List(
), "(A ⇒ Option[B])" → List(
Partial("unlift", "PartialFunction[A, B]", "Create partial function")
), "PartialFunction[A, B]" → List(
Partial("toLeft", "A ⇒ Either[B, A]", ""),
Partial("toRight", "A ⇒ Either[A, B]", ""),
Partial("either", "A ⇒ Either[A, B]", ""),
Partial("partition", "CC[A] ⇒ (CC[A], CC[B])", ""),
Partial("isUndefinedAt", "A ⇒ Boolean", ""),
Partial("first", "PartialFunction[(A, C), (B, C)]", ""),
Partial("second", "PartialFunction[(C, A), (C, B)]", ""),
Partial("unify", "A ⇒ A", ""),
Partial("map", "(B ⇒ C): PartialFunction[A, C]", ""),
Partial("contramap", "(C ⇒ A)", "PartialFunction[C, B]"),
Partial("***", "PartialFunction[C, D] ⇒ PartialFunction[(A, C), (B, D)]", ""),
Partial("&&&", "PartialFunction[A, C] ⇒ PartialFunction[A, (B, C)]", ""),
Partial("|||", "PartialFunction[C, B] ⇒ PartialFunction[Either[A, C], B]", ""),
Partial("\\/", "PartialFunction[C, B] ⇒ PartialFunction[A \\/ C, B]", "")
), "Ordering[A]" → List(
Partial("promote", "A* ⇒ Ordering[A]", "Promote occurences of the provided values when sorting",
"List(10, 7, 5, 2, 7).sorted(Ordering.Int.promote(5, 7))" → "List(5, 7, 7, 2, 10)"
),
Partial("demote", "A* ⇒ Ordering[A]", "Demote occurences of the provided values when sorting",
"List(10, 7, 5, 2, 7).sorted(Ordering.Int.demote(5, 7))" → "List(2, 10, 5, 7, 7)"
)
), "Numeric[A]" → List(
Partial("xmap", "(A ⇒ B, B ⇒ A) ⇒ Numeric[B]", "Converts to operate on a new type",
"implicitly[Numeric[Int]].xmap[String](_.toString, Integer.parseInt).plus(\"2\", \"3\")" → "\"5\""
)
), "String" → List(
Partial("emptyTo", "String ⇒ String", "Convert empty strings to another",
""""".emptyTo("replacement")""" → """"replacement"""",
""""non-empty".emptyTo("replacement")""" → """"non-empty""""
),
Partial("prefixPadTo", "(Int, Char) ⇒ String", ""),
Partial("suffixWith", "String ⇒ String", "Add suffix to string if absent",
""""file".suffixWith(".txt")""" → """"file.txt"""",
""""file.txt".suffixWith(".txt")""" → """"file.txt""""
),
Partial("prefixWith", "String ⇒ String", "Add prefix to string if absent",
""""file".prefixWith("dir/")""" → """"dir/file"""",
""""dir/file".prefixWith("dir/")""" → """"dir/file""""
),
Partial("affixWith", "String => String ⇒ String", "Add prefix & suffix to string if absent",
""""file".affixWith("dir/", ".txt")""" → """"dir/file.txt"""",
""""file.txt".affixWith("dir/", ".txt")""" → """"dir/file.txt"""",
""""dir/file".affixWith("dir/", ".txt")""" → """"dir/file.txt"""",
""""dir/file.txt".affixWith("dir/", ".txt")""" → """"dir/file.txt""""
),
Partial("stripAffixes", "String => String ⇒ String", "Remove prefix & suffix from string",
""""file".stripAffixes("dir/", ".txt")""" → """"file"""",
""""file.txt".stripAffixes("dir/", ".txt")""" → """"file"""",
""""dir/file".stripAffixes("dir/", ".txt")""" → """"file"""",
""""dir/file.txt".stripAffixes("dir/", ".txt")""" → """"file""""
),
Partial("sharedPrefix", "String ⇒ (String, String, String)",
"Split string into parts shared with the beginning of another, along with the remainder of each",
"\"1234\".sharedPrefix(\"1243\")" -> "(\"12\", \"34\", \"43\")"
),
Partial("md5", "String", ""),
Partial("toByteArray", "Array[Byte]", ""),
Partial("toByteArray", "CharSet ⇒ Array[Byte]", "")
), "Random" → List(
Partial("between", "(Int, Int) ⇒ Int", "Generate a random number between two others",
"Random.between(0, 10)" → "9"
)
), "ThreadFactory" → List(
Partial("naming", "(Int ⇒ String) ⇒ ThreadFactory", "Adapt threads created by factory to be named",
"threadFactory.naming(i ⇒ s\"Thread: $i\").newThread(...).getName" → "Thread: 0"
)
), "Callable[A]" → List(
Partial("attempt", "Callable[Try[A]]", "Wrap result in Try")
), "Date" → List(
Partial("addDay", "Int ⇒ Date", "Adds n days to date",
"Date().addDay(2)" → ""
)
), "InputStream" → List(
Partial("closeAfter", "(InputStream ⇒ A) ⇒ A", "Close after applying function",
"new ByteArrayInputStream(Array(123)).closeAfter(_.read())" → "123"
),
Partial("attemptClose", "Try[Unit]", "Close & catch exceptions"),
Partial("closeIf", "Boolean ⇒ InputStream", "Close if boolean is true"),
Partial("closeUnless", "Boolean ⇒ InputStream", "Close if boolean is false"),
Partial("drain", "(OutputStream, Boolean, Boolean) ⇒ InputStream",
"Drain all bytes into an OutputStream, optionally closing afterwards"
),
Partial(">>", "OutputStream ⇒ InputStream", "Drain all bytes into an OutputStream"),
Partial("buffered", "BufferedInputStream", "Wrap in a BufferedInputStream"),
Partial("gunzip", "GZIPInputStream", "Wrap in a GZIPInputStream"),
Partial("readN", "(OutputStream, Long) ⇒ InputStream", "Read n bytes into an OutputStream & throw if unable to"),
Partial("readUpToN", "(OutputStream, Long) ⇒ Long", "Read up to n bytes into an OutputStream"),
Partial("toByteArray", "Array[Byte]", "Convert to an array of bytes")
), "OutputStream" → List(
Partial("closeAfter", "(OutputStream ⇒ A) ⇒ A", "Close after applying a function"),
Partial("attemptClose", "Try[Unit]", "Close & catch exceptions"),
Partial("closeIf", "Boolean ⇒ OutputStream", "Close if boolean is true"),
Partial("closeUnless", "Boolean ⇒ OutputStream", "Close if boolean is false"),
Partial("drain", "(InputStream, Boolean, Boolean) ⇒ OutputStream",
"Drain all bytes from an InputStream, optionally closing each afterwards"
),
Partial("<<", "InputStream ⇒ OutputStream", "Drain all bytes from an InputStream"),
Partial("buffered", "BufferedOutputStream", "Wrap in a BufferedOutputStream"),
Partial("gzip", "GZIPOutputStream", "Wrap in a GZIPOutputStream"),
Partial("writeN", "(InputStream, Long) ⇒ OutputStream", "Write n bytes into an InputStream & throw if unable to"),
Partial("writeUpToN", "(InputStream, Long) ⇒ Long", "Write up to n bytes into an InputStream")
), "File" → List(
Partial("isJar", "Boolean", "Check if file has '.jar' extension"),
Partial("isClass", "Boolean", "Check if file has '.class' extension"),
Partial("isJava", "Boolean", "Check if file has '.java' extension"),
Partial("isScala", "Boolean", "Check if file has '.scala' extension"),
Partial("missing", "Boolean", "Check if file is missing"),
Partial("isChildOf", "File ⇒ Boolean", "Check if file is a child of another",
"new File(\"/etc/password\").isChildOf(\"/etc\")" → "true",
"new File(\"/etc/password\").isChildOf(\"/\")" → "false"
),
Partial("isParentOf", "File ⇒ Boolean", "Check if file is parent of another",
"new File(\"/etc\").isParentOf(\"/etc/password\")" → "true",
" new File(\"/\").isParentOf(\"/etc/password\")" → "false"
),
Partial("isAncestorOf", "File ⇒ Boolean", ""),
Partial("isContainedIn", "File ⇒ Boolean", ""),
Partial("contains", "File ⇒ Boolean", ""),
Partial("hasExtension", "String ⇒ Boolean", ""),
Partial("/", "String ⇒ File", ""),
Partial("named", "String ⇒ File", ""),
Partial("canon", "File", ""),
Partial("relativeTo", "File ⇒ File", ""),
Partial("writeLines", "(List[String], Boolean) ⇒ File", ""),
Partial("writeBytes", "(Array[Byte], Boolean) ⇒ File", ""),
Partial("write", "(String, Boolean) ⇒ File", ""),
Partial("deleteRecursively", "File", ""),
Partial("deleteRecursivelyOnExit", "File", ""),
Partial("changeToDirectory", "File", ""),
Partial("create", "Boolean ⇒ File", ""),
Partial("touch", "File", ""),
Partial("children", "Stream[File]", ""),
Partial("childDirs", "Stream[File]", ""),
Partial("tree", "Stream[File]", ""),
Partial("ancestors", "Stream[File]", ""),
Partial("path", "List[String]", ""),
Partial("outputStream", "Boolean ⇒ FileOutputStream", ""),
Partial("source", "implicit Codec ⇒ BufferedSource", ""),
Partial("readString", "implicit Codec ⇒ String", ""),
Partial("readBytes", "Array[Byte]", ""),
Partial("readLines", "implicit Codec ⇒ List[String]", ""),
Partial("className", "File ⇒ String", ""),
Partial("md5", "String", "")
), "mutable.Builder[A, B]" → List(
Partial("+++=", "TraversableOnce[TraversableOnce[A]] ⇒ mutable.Builder[A, B]",
"Adds the elements of several traversables",
"(new ListBuffer[Int] +++= List(List(1, 2), List(3), List(4))).result()" → "List(1, 2, 3, 4)"
),
Partial("on", "(C ⇒ A) ⇒ mutable.Builder[C, B]", "Change the type of element consumed by the builder",
"(new ListBuffer[Double].on[Int](_.toDouble)) += 3).result()" → "List(3.0)"
),
Partial("reset", "B", "Return result & clear"),
Partial("run", "(mutable.Builder[A, B] ⇒ Discarded)* ⇒ B", "Update the builder & produce a result",
"new ListBufer[Int].run(_ += 1, _ ++= List(2, 3)" → "List(1, 2, 3)"
)
), "mutable.Map[K, V]" → List(
Partial("retainKeys", "Predicate[K] ⇒ mutable.Map[K, V]", "Retain entries whose keys match the predicate"),
Partial("retainValues", "Predicate[V] ⇒ mutable.Map[K, V]", "Retain entries whose values match the predicate")
), "CodecJson[A]" → List(
Partial("beforeDecode", "(Json ⇒ Json) ⇒ CodecJson[A]", "Pre-processes json before decoding"),
Partial("afterDecode", "(A ⇒ A) ⇒ CodecJson[A]", "Converts after decoding"),
Partial("beforeEncode", "(A ⇒ A) ⇒ CodecJson[A]", "Converts before encoding"),
Partial("afterEncode", "(Json ⇒ Json) ⇒ CodecJson[A]", "Post-processes json after encoding"),
Partial("andThen", "(Json ⇒ Json) ⇒ CodecJson[A]", "Post-processes json after encoding"),
Partial("compose", "(Json ⇒ Json) ⇒ CodecJson[A]", "Pre-processes json before decoding")
), "CodecJson[Map[K, V]]" → List(
Partial("xmapKeys", "(K ⇒ C) ⇒ (C ⇒ K) ⇒ CodecJson[Map[C, V]]", "Converts map keys before encoding & after decoding"),
Partial("xmapValues", "(V ⇒ W) ⇒ (W ⇒ V) ⇒ CodecJson[Map[K, W]]", "Converts map values before encoding & after decoding")
), "DecodeJson[A]" → List(
Partial("beforeDecode", "(Json ⇒ Json) ⇒ DecodeJson[A]", "Pre-processes json before decoding"),
Partial("compose", "(Json ⇒ Json) ⇒ DecodeJson[A]", "Pre-processes json before decoding"),
Partial("upcast", "DecodeJson[B]", "Upcasts results after decoding")
), "DecodeJson[Map[K, V]]" → List(
Partial("mapKeys", "(K ⇒ C) ⇒ DecodeJson[Map[C, V]]", "Converts map keys after decoding",
"""mapDecoder.mapKeys(_.reverse).decodeJson({ "foo" → "bar" })""" → """Map("oof" → "bar")"""
),
Partial("mapValues", "(V ⇒ W) ⇒ DecodeJson[Map[K, W]]", "Converts map values after decoding",
"""mapDecoder.mapValues(_.reverse).decodeJson({ "foo" → "bar" })""" → """Map("foo" → "rab")"""
)
), "EncodeJson[A]" → List(
Partial("afterEncode", "(Json ⇒ Json) ⇒ EncodeJson[A]", "Post-processes json after encoding"),
Partial("andThen", "(Json ⇒ Json) ⇒ EncodeJson[A]", "Post-processes json after encoding"),
Partial("downcast", "EncodeJson[B]", "Downcasts values before encoding")
), "EncodeJson[Map[K, V]]" → List(
Partial("contramapKeys", "(C ⇒ K) ⇒ EncodeJson[Map[C, V]]", "Converts map keys before encoding",
"""mapEncoder.contramapKeys(_.reverse).encodeJson(Map("foo" → "bar"))""" → """{ "oof" → "bar" }"""
),
Partial("contramapValues", "(W ⇒ V) ⇒ EncodeJson[Map[K, W]]", "Converts map values before encoding",
"""mapEncoder.contramapValues(_.reverse).encodeJson(Map("foo" → "bar"))""" → """{ "foo" → "rab" }"""
)
), "Json" → List(
Partial("filterNulls", "Json", "Recursively removes null values",
"""null""" → """null""",
"""{ "a": null, "b": 3 }""" → """{ "b": 3 }""",
"""[ "a", null, "b" ]""" → """[ "a", "b" ]""",
"""{ "o": [ "a", null, "b" ] }""" → """{ "o": [ "a", "b" ] }""",
"""[ { "a": null, "b": 3 } ]""" → """[ { "b": 3 } ]"""
)
), "Traversal[Json, Json]" → List(
Partial("string", "Traversal[Json, String]", "compose with string prism",
"""Traversal.id[Json].string.getAll(Json.jString("foo"))""" → """List("foo")""",
"""Traversal.id[Json].string.getAll(Json.jNumber(3))""" → """Nil"""
),
Partial("int", "Traversal[Json, String]", "compose with int prism",
"""Traversal.id[Json].int.getAll(Json.jString("foo"))""" → """Nil""",
"""Traversal.id[Json].int.getAll(Json.jNumber(3))""" → """List(3)"""
)
), "(L \\/ R)" → List(
Partial("tap", "(L ⇒ Discarded, R ⇒ Discarded) ⇒ L \\/ R", "Perform one action or another",
" -\\/(1).tap(l ⇒ print(\"left: \" + l), r ⇒ print(\"right: \" + r))" → "Prints \"left: 1\"",
"\\/-(true).tap(l ⇒ print(\"left: \" + l), r ⇒ print(\"right: \" + r))" → "Prints \"right: true\""
),
Partial("tapLeft", "(L ⇒ Discarded) ⇒ L \\/ R", "Perform action if Left",
" -\\/(1).tapLeft(l ⇒ print(\"left: \" + l))" → "Prints \"left: 1\"",
"\\/-(true).tapLeft(l ⇒ print(\"left: \" + l))" → "Does nothing"
),
Partial("tapRight", "(R ⇒ Discarded) ⇒ L \\/ R", "Perform action if Right",
" -\\/(1).tap(r ⇒ print(\"right: \" + r))" → "Does nothing",
"\\/-(true).tap(r ⇒ print(\"right: \" + r))" → "Prints \"right: true\""
),
Partial("addTo", "(Growable[L], Growable[R]) ⇒ L \\/ R", "Adds values into growables",
" -\\/(1).addTo(ints, strings)" → "ints += 1",
"\\/-(\"foo\").addTo(ints, strings)" → "strings += \"foo\""
),
Partial("removeFrom", "(Shrinkable[L], Shrinkable[R]) ⇒ L \\/ R", "Removes values from a shrinkables",
" -\\/(1).removeFrom(ints, strings)" → "ints -= 1",
"\\/-(\"foo\").removeFrom(ints, strings)" → "strings -= \"foo\""
),
Partial("leftFlatMap", "L => L \\/ R => L \\/ R", "flatMap over the left, equivalent to swap.flatMap.swap",
" \\/-(123).leftFlatMap(in => in match { case \"left\" => \\/-(100); case _ => -\\/(in) }" → "\\/-(123)",
"-\\/(\"left\").leftFlatMap(in => in match { case \"left\" => \\/-(100); case _ => -\\/(in) }" → "\\/-(100)",
" -\\/(\"101\").leftFlatMap(in => in match { case \"left\" => \\/-(100); case _ => -\\/(in) }" → "-\\/(\"101\")"
)
), "(L \\/ (L \\/ R))" → List(
Partial("flatten", "L \\/ R", "Flattens two level disjunction into one",
"-\\/(1).flatten" → "-\\/(1)", "\\/-(-\\/(2)).flatten" → "-\\/(2)", "\\/-(\\/-(\"s\"))).flatten" → "\\/-(\"s\")"
)
), "((L \\/ R) \\/ R)" → List(
Partial("flatten", "L \\/ R", "Flattens two level disjunction into one",
"-\\/(-\\/(1)).flatten" → "-\\/(1)", "-\\/(\\/-(\"s\")).flatten" → "\\/-(\"s\")", "\\/-(\"s\").flatten" → "\\/-(\"s\")"
)
), "NonEmptyList[A]" → List(
Partial("unique", "NonEmptyList[A]", "Remove duplicates",
"NonEmptyList(3, 1, 4, 3, 4).unique" → "NonEmptyList(3, 1, 4)"
),
Partial("uniqueBy", "(A ⇒ B) ⇒ NonEmptyList[A]", "Remove items with duplicate properties",
"NonEmptyList(\"foo\", \"bar\", \"bare\", \"food\").uniqueBy(_.length)" → "NonEmptyList(\"foo\", \"bare\")"
),
Partial("filter", "Predicate[A]", "filters the NonEmptyList",
" NonEmptyList(1).filter(_ % 2 == 0)" → "None",
"NonEmptyList(1, 2).filter(_ % 2 == 0)" → "Some(NonEmptyList(2))"
),
Partial("filterNot", "Predicate[A]", "filters the NonEmptyList",
" NonEmptyList(2).filterNot(_ % 2 == 0)" → "None",
"NonEmptyList(1, 2).filterNot(_ % 2 == 0)" → "Some(NonEmptyList(1))"
),
Partial("onlyOption", "Option[A]", "Head if non-empty list has 1 element, None otherwise",
"NonEmptyList(1).onlyOption" → "Some(1)", "NonEmptyList(1, 2).onlyOption" → "None"
),
Partial("onlyEither", "Either[NonEmptyList[A], A]", "Right if non-empty list has 1 element, Left otherwise",
"NonEmptyList(1).onlyEither" → "Right(1)", "NonEmptyList(1, 2).onlyEither" → "Left(NonEmptyList(1, 2))"
),
Partial("onlyDisjunction", "NonEmptyList[A] \\/ A", "\\/- if list has 1 element, -\\/ otherwise",
"NonEmptyList(1).onlyDisjunction" → "\\/-(1)", "NonEmptyList(1, 2).onlyDisjunction" → "-\\/(NonEmptyList(1, 2))"
)
), "NonEmptyList[A: Order]" → List(
Partial("max", "A", "Maximum value",
"NonEmptyList(3, 1, 4).max" → "4"
),
Partial("min", "A", "Minimum value",
"NonEmptyList(3, 1, 4).min" → "1"
)
), "NonEmptyList[V]" → List(
Partial("asMap...", "", ""),
Partial("asMultiMap...", "", "")
), "NonEmptyList[K]" → List(
Partial("as[F]", "", "")
))).toString().getBytes(Charset.forName("UTF-8")))
fos.flush()
fos.close()
null // returning null => Generate in 1s, return file => 30s !?
}
object Partial {
def apply(name: String, partSignature: String, description: String, examples0: (String, String)*): Partial =
new Partial(name, partSignature, description, examples0.toList, Nil)
}
class Partial(val name: String, signature: String, val description: String, examples: List[(String, String)], seeOthers: List[String]) {
def seeOther(names: String*) = new Partial(name, signature, description, examples, names.toList ++ seeOthers)
def pimp(category: String) = Pimp(category, name, signature, description, examples, seeOthers)
}
case class Pimp(
category: String, name: String, partSignature: String, description: String,
examples0: List[(String, String)] = Nil, seeOthers: List[String] = Nil
) {
def signature = category + " ⇒ " + partSignature
def dataName = category + "_" + name
def idName = "id_" + dataName
def href = "#" + dataName
def src = "https://github.com/stacycurl/pimpathon/blob/master/core/src/main/scala/pimpathon/any.scala#L13"
def seeAlso: NodeSeq = NodeSeq.Empty
// <div class="see">
// See also
// <a href="#subtract">|></a>.
// </div>
def examples = if (examples0.isEmpty) NodeSeq.Empty else {
// val maxExpressionLength = examples0.flatMap(_._1.split("\n").toList).map(_.length).max
examples0.flatMap {
case (expression, result) ⇒ {
<div style="display: flex">
<pre style="flex: 1"><code class="hljs javascript">{expression.stripMargin}</code></pre>
<pre><code class="hljs-comment">{result.stripMargin}</code></pre>
</div>
}
}
// <pre><code class="hljs javascript">{examples0.flatMap {
// case (expressions, result0) ⇒ {
// expressions.split("\n").zip(result0 #:: Stream.continually("")).map {
// case (expression, result) if result != "" ⇒ {
// val padding = " " * (maxExpressionLength - expression.length)
//
// <span>{Text(expression)}</span><span class="hljs-comment"> {padding}// => {result}</span><br/>
// }
// case (expression, "") ⇒ <span>{Text(expression)}</span><br/>
// }
// }
// }}</code></pre>
}
//<pre><code class="hljs javascript"></code></pre>
}
def template(version: String, partials0: String ▶: List[Partial]): Elem = {
val partials = partials0
// .filterKeys(_ == "GTL[A]").mapValues(_.filter(p ⇒ p.name == "asMultiMap.withEntries" && p.description.contains("nested")))
template(version, partials.flatMap {
case (category, ps) ⇒ if (ps.isEmpty) List(Pimp(category, "todo", "todo", "todo")) else ps.map(_.pimp(category))
})
}
def template(version: String, pimps: Iterable[Pimp]): Elem =
<html class="docs-page">
<head>
<meta charset="UTF-8"/>
<meta name="viewport" content="width=device-width, initial-scale=1"/>
<title>Pimpathon Documentation</title>
<link rel="stylesheet" type="text/css" href="bootstrap-3.3.5.css"/>
</head>
<body>
<input type="checkbox" id="open-nav"/>
<header class="navbar navbar-fixed-top navbar-inverse container-fluid">
<div class="navbar-header">
<label class="open-nav" for="open-nav"></label>
<a class="navbar-brand" href="/">
<strong>Pimpathon</strong>
<span class="version">{version}</span>
</a>
</div>
<div class="navbar-left">
<ul class="nav navbar-nav">
<li><a href="https://github.com/stacycurl/pimpathon">GitHub</a></li>
</ul>
</div>
</header>
<aside class="sidebar container-fluid">
<div class="form-group has-feedback filter">
<input class="form-control"
tabindex="1"
id="name-filter"
placeholder="Filter"
type="text"
data-bind="textInput: filter"
autofocus="autoFocus"
/>
<span class="form-control-feedback">
<span></span>
</span>
</div>
<ul class="nav nav-pills nav-stacked toc">{pimps.map(pimp ⇒ {
<li class="func" data-name={pimp.dataName} data-category={pimp.category}>
<a href={pimp.href}>{pimp.name}<span data-category={pimp.category} class="label label-category pull-right">{pimp.category}</span></a>
</li>
})}</ul>
</aside>
<main class="container-fluid">{pimps.map(pimp ⇒ {
<section class="card" id={pimp.dataName}>
<h2>
<a tabindex="2" class="name" href={pimp.href} id={pimp.idName}>{pimp.name}</a>
<span class="pull-right">
<span class="label label-category">{pimp.category}</span>
<a target="_blank" title="View source on GitHub" href={pimp.src}><small>GH</small></a>
</span>
</h2>
<div class="sig btn btn-link"><span class="caret rotated"></span>{pimp.signature}</div>
<div class="description"><p>{pimp.description}</p></div>
{pimp.seeAlso}
{pimp.examples}
</section>
})}</main>
<script src="main.js"></script>
</body>
</html>
}
|
stacycurl/pimpathon
|
project/Documentation.scala
|
Scala
|
apache-2.0
| 81,861
|
package jadecrawler.dto.website
import jadeutils.mongo._
/* ================================================================= */
/* Modles for iciba crawler */
/* ================================================================= */
/**
* iciba model
*/
case class IcibaS2Dto(s1: String, s2: String) {
def this() = this(null, null)
@MongoField var str1 = s1
@MongoField var str2 = s2
override def toString = "{%s, %s}".format(str1, str2)
}
case class IcibaS3Dto(s1: String, s2: String, s3: String) {
def this() = this(null, null, null)
@MongoField var str1 = s1
@MongoField var str2 = s2
@MongoField var str3 = s3
override def toString = "{%s, %s, %s}".format(str1, str2, str3)
}
case class Opposite(s: String, ws: java.util.List[String]) {
def this() = this(null, null)
@MongoField var str = s
@MongoField(ElemType = classOf[String]) var words = ws
override def toString = "{%s, %s}".format(str,
if (null == words) "{}" else words.toString)
}
case class IcibaHomoDto(s: String, dto: java.util.List[IcibaS2Dto]) {
def this() = this(null, null)
@MongoField var str = s
@MongoField(ElemType = classOf[IcibaS2Dto]) var s2dto = dto
override def toString = "{%s, %s}".format(str,
if (null == s2dto) "{}" else s2dto.toString)
}
@MongoDocument(databaseName = "crawler", collectionName = "iciba")
case class IcibaDto(w: String, prons: java.util.List[IcibaS3Dto],
exps: java.util.List[(IcibaS2Dto)], rlts: java.util.List[IcibaS2Dto],
exmps: java.util.List[IcibaS3Dto], homs: java.util.List[IcibaHomoDto],
samws: java.util.List[Opposite], oppws: java.util.List[Opposite],
snys: java.util.List[IcibaS3Dto], phws: java.util.List[IcibaS3Dto]) extends MongoModel
{
def this() = this(null, null, null, null, null, null, null, null, null, null)
@MongoField var word = w
@MongoField(ElemType = classOf[IcibaS3Dto]) var pronunciations = prons
@MongoField(ElemType = classOf[IcibaS2Dto]) var explantions = exps
@MongoField(ElemType = classOf[IcibaS2Dto]) var relatedWords = rlts
@MongoField(ElemType = classOf[IcibaS3Dto]) var examples = exmps
@MongoField(ElemType = classOf[IcibaHomoDto]) var homoionyms = homs
@MongoField(ElemType = classOf[Opposite]) var sameWrds = samws
@MongoField(ElemType = classOf[Opposite]) var oppsites = oppws
@MongoField(ElemType = classOf[IcibaS3Dto]) var slangys = snys
@MongoField(ElemType = classOf[IcibaS3Dto]) var phrases = phws
override def toString = ( "{IcibaDto: {word=%s, pronunciations=%s, " +
"explantions=%s, relatedWords=%s, examples=%s, homoionyms=%s, " +
"sameWrds=%s, oppsites=%s, phrases=%s, slangys=%s}}").format(word
,if (null != pronunciations) pronunciations.toString else ""
,if (null != explantions ) explantions.toString else ""
,if (null != relatedWords ) relatedWords.toString else ""
,if (null != examples ) examples.toString else ""
,if (null != homoionyms ) homoionyms.toString else ""
,if (null != sameWrds ) sameWrds.toString else ""
,if (null != oppsites ) oppsites.toString else ""
,if (null != slangys ) slangys.toString else ""
,if (null != phrases ) phrases.toString else "")
}
case class NewWord(w: String, c: Int) {
def this() = this(null, 1)
@MongoField var word = w
@MongoField var count = c
override def toString = "{NewWord: {word=%s, count=%d}}".format(word, count)
}
@MongoDocument(databaseName = "crawler", collectionName = "newwordbook")
case class NewWordBook(u: String, p: String, ws: java.util.List[NewWord])
extends MongoModel
{
def this() = this(null, null, null)
@MongoField var username = u
@MongoField var password = p
@MongoField(ElemType = classOf[NewWord]) var words = ws
override def toString =
"{NewWordBook: {username=%s, password=%s, words=%s}}".format(
username, password, if (null != words) words.toString else "")
}
/* ================================================================= */
/* Modles for YYets crawler */
/* ================================================================= */
class YyetsRecListDto(val id: String, val name: String) extends MongoModel {
override def toString = "{YyetsRecListDto: {id=%s, name=%s}}".format(id, name)
}
class YyetsLink(@MongoField linkType: String, @MongoField link: String) {
override def toString = "{%s, %s}".format(linkType, link)
}
@MongoDocument(databaseName = "crawler", collectionName = "yyets")
class YyetsRecInfoDto(@MongoField id: String, @MongoField name: String,
@MongoField season: String, @MongoField episode: String,
@MongoField format: String, @MongoField filename: String,
@MongoField size: String, @MongoField links: java.util.List[YyetsLink]
) extends MongoModel
{
def getId = id
def getName = name
def getSeason = season
def getEpisode = episode
def getFormat = format
def getFilename = filename
def getSize = size
def getLinks = links
override def toString = ("{className=YyetsRecInfoDto id=%s, name=%s, " +
"season=%s, episode=%s, format=%s, filename=%s, size=%s, links=%s}}"
).format( id, name, season, episode, format, filename, size,
if(null!=links) links.toString else "")
}
|
Jade-Shan/Jade-crawler
|
crawler-dto/src/main/scala/website/dto.scala
|
Scala
|
gpl-3.0
| 5,387
|
/*
* The MIT License (MIT)
*
* Copyright (c) 2016 rainysoft
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package com.rainysoft.quantlib.math.optimization
import java.util.concurrent.TimeUnit
import org.openjdk.jmh.annotations._
/** Performance test of Brent-Dekker optimization
*
* @author mikael.ohman
*/
@BenchmarkMode(Array(Mode.Throughput, Mode.AverageTime)) // How many benchmark calls and what is the average time for them
@OutputTimeUnit(TimeUnit.MICROSECONDS) // per millisecond (e.g. 10 calls per ms, avg. call time of 0.1 ms)
@Warmup(iterations = 5, time = 1, timeUnit = TimeUnit.SECONDS)
@Measurement(iterations = 5, time = 1, timeUnit = TimeUnit.SECONDS)
@Fork(1)
@State(Scope.Benchmark)
class BrentDekkerPTest {
val f = (x: Double) => 2 * x + 3
@Benchmark
def search(): Unit = {
val f = (x: Double) => 2 * x + 3
val goal = BrentDekker.search(-10, 10, f)
}
}
|
MikaelUmaN/quantlib
|
src/jmh/scala/com/rainysoft/quantlib/math/optimization/BrentDekkerPTest.scala
|
Scala
|
mit
| 1,954
|
package backend.flowNetwork
import akka.actor._
import backend.NextFlowUID
import backend.flowNetwork.sinks.{FlowAccumulator, FlowFrequency, FlowTrace, FlowCounter}
import backend.flowNetwork.sources.{FlowTwitterSource, FlowIpsumSource, FlowNumberSource}
import backend.flowNetwork.transformations._
import play.api.libs.iteratee.Concurrent
import play.api.libs.json.JsValue
/** Registers an observer for node and connection configuration updates */
case class Register(observer: ActorRef)
/** Unregisters an observer for node and connection configuration updates */
case class Unregister(observer: ActorRef)
/** Triggers a complete re-discovery of the whole network configuration */
case object DetectConfiguration
/** Requests Some[ActorRef] of the connection between given source and target from Supervisor */
case class LookupConnection(sourceId: Long, targetId: Long)
/** Requests Some[ActorRef] of the node with the given id */
case class LookupObj(id: Long)
/** Requests Some[Long] ID of the node with the given ActorRef */
case class LookupId(obj: ActorRef)
/** Requests a list of possible flow objects */
case object GetFlowObjectTypes
/** Requests a set of all object IDs */
case object GetFlowObjects
/** Requests the creation of a flow object.
* Answers with Some((id, ActorRef)) on success. None on failure.
*
* @param what NodeType to create
* @param x X-Position in visualisation
* @param y Y-Position in visualisation
*/
case class CreateFlowObject(what: String, x: Int, y: Int)
/** Requests deletion of the flow object with the given ID
* Answers Some(id) on success, None on failure
*/
case class DeleteFlowObject(id: Long)
/** Requests a list of (sourceId, targetId) tuples */
case object GetConnections
/** Requests the connection between source and target.
* Answers with Some(((sourceId, targetId), connection:ActorRef)) on success. None on failure.
*
* @param sourceId Source ID
* @param targetId Target ID
* @param attributes Connection attributes freely settable by the client (e.g. for connector positioning)
*/
case class Connect(sourceId: Long, targetId: Long, attributes: Map[String, String])
/** Requests disconnect of sourceId and targetId
* Answers with Some((sourceId, targetId)) on success. None on failure.
*/
case class Disconnect(sourceId: Long, targetId: Long)
/** Used by FlowSupervisor to request shutdown from its child actors */
case object Shutdown
/** Used by child actors to aknowledge supervisor shutdown request */
case object Stopping
/** Requests a Enumerator[JsValue] subscribed to all network configuration updates.
* Receives the full current configuration of the network (nodes then connections)
* before streaming of configuration updates starts.
*/
case object RequestEnumerator
/** Used for representation and update of the (key -> value) configuration in a Node/Connection */
case class Configuration(config: Map[String, String])
/** Requests configuration from a FlowNode/Connection */
case object GetConfiguration
object FlowSupervisor {
def props(): Props = Props(new FlowSupervisor)
}
/** The FlowSupervisor handles central supervision and control tasks for the Flow network.
* It is the central instance to talk to for network reconfiguration or
* observation.
*/
class FlowSupervisor extends Actor with ActorLogging {
val ordinaryFlowObjects = Map[String, (Long, String, Int, Int) => Props](
FlowNumberSource.nodeType -> FlowNumberSource.props,
FlowCrossbar.nodeType -> FlowCrossbar.props,
FlowFilter.nodeType -> FlowFilter.props,
FlowTokenizer.nodeType -> FlowTokenizer.props,
FlowFrequency.nodeType -> FlowFrequency.props,
FlowSentiment.nodeType -> FlowSentiment.props,
FlowAccumulator.nodeType -> FlowAccumulator.props,
FlowIpsumSource.nodeType -> FlowIpsumSource.props,
FlowCounter.nodeType -> FlowCounter.props,
FlowTrace.nodeType -> FlowTrace.props,
FlowTwitterSource.nodeType -> FlowTwitterSource.props,
FlowMultiplier.nodeType -> FlowMultiplier.props,
FlowStopwordFilter.nodeType -> FlowStopwordFilter.props
)
/** Helper object which name sequences when called repeatedly.
* e.g. generates Name1, Name2, ... for newActorName("Name")
*/
object newActorName {
var objectCounts = scala.collection.mutable.Map[String, Int]().withDefaultValue(0)
def apply(name: String) = {
objectCounts(name) += 1
s"$name${objectCounts(name)}"
}
}
var flowIdToObject = scala.collection.mutable.Map.empty[Long, ActorRef]
var flowObjectToId = scala.collection.mutable.Map.empty[ActorRef, Long]
var connectionObjToIds = scala.collection.mutable.Map.empty[ActorRef, (Long, Long)]
var connectionIdsToObj = scala.collection.mutable.Map.empty[(Long, Long), ActorRef]
var currentNodeConfigurations = scala.collection.mutable.Map.empty[Long, Configuration]
var currentConnectionConfigurations = scala.collection.mutable.Map.empty[(Long, Long), Configuration]
var translators = scala.collection.mutable.Map.empty[Long, ActorRef]
var observers = scala.collection.mutable.Set.empty[ActorRef]
/** Returns all connections related to the given node id */
private def connectionsFor(id: Long): scala.collection.mutable.Map[(Long, Long), ActorRef] =
connectionIdsToObj.filter { case ((sourceId, targetId),_) => sourceId == id || targetId == id }
/** Broadcasts the given message to all registered observers */
private def notifyObservers[T](message: T) = observers map { _ ! message }
/** Removes an existing connection between two nodes.
*
* @param sourceId Source node ID
* @param targetId Target node ID
* @return true on success. False on failure.
*/
private def disconnect(sourceId: Long, targetId: Long): Boolean = {
connectionIdsToObj.remove((sourceId, targetId)) match {
case Some(connection) =>
log.info(s"Disconnecting $sourceId and $targetId")
flowIdToObject.get(sourceId) match {
case Some(source) => source ! RemoveTarget(connection)
case None => // Fine, already gone
}
currentConnectionConfigurations.remove((sourceId, targetId))
connectionIdsToObj.remove((sourceId, targetId))
notifyObservers((sourceId, targetId), None)
connection ! Shutdown
true
case _ =>
log.warning(s"Asked to disconnect $sourceId from $targetId but have no connection")
false
}
}
def receive = {
case GetFlowObjectTypes => sender() ! ordinaryFlowObjects.keys.toList
case RequestEnumerator =>
import play.api.libs.concurrent.Execution.Implicits.defaultContext
val uid = NextFlowUID()
log.info(s"Translator reservation $uid")
val shutdownTranslator = () => {
translators.get(uid) match {
case Some(translator) =>
log.info(s"Shutting down translator $uid")
translators.remove(uid)
self ! Unregister(translator)
translator ! Shutdown
case None => log.info(s"Translator $uid already gone")
}
}
val enumerator = Concurrent.unicast[JsValue](
onStart = (channel) => {
// Seems like someone actually
log.info(s"Starting new translator for client session $uid")
val translator = context.actorOf(MessageTranslator.props(channel), name = s"messageTranslator$uid")
translators += uid -> translator
self ! Register(translator)
// Push current state
currentNodeConfigurations map { case (k,v) => translator ! (k, v) }
currentConnectionConfigurations map { case (k,v) => translator ! (k,v) }
},
onComplete = { shutdownTranslator() },
onError = (_,_) => shutdownTranslator()
).onDoneEnumerating(
callback = { shutdownTranslator() }
)
sender() ! enumerator
case Register(observer) =>
log.info(s"$observer registered for updates")
observers += observer
case Unregister(observer) =>
log.info(s"$observer removed from updates")
case DetectConfiguration =>
log.info(s"${sender()} triggered configuration update")
// Notify about current configuration state
currentNodeConfigurations map { case (k,v) => notifyObservers(k, v) }
currentConnectionConfigurations map { case (k,v) => notifyObservers(k,v) }
case CreateFlowObject(objectType, x, y) =>
ordinaryFlowObjects.get(objectType) match {
case Some(_) =>
log.info(s"Creating new $objectType for ${
sender()
}")
val name = newActorName(objectType)
val id = NextFlowUID()
val obj = context.actorOf(ordinaryFlowObjects(objectType)(id, name, x, y), name = name)
flowIdToObject += id -> obj
flowObjectToId += obj -> id
obj ! GetConfiguration // Pull configuration
sender() ! Some((id, obj))
case None =>
log.warning(s"Asked to create unknown flow object type $objectType")
None
}
case DeleteFlowObject(id: Long) =>
flowIdToObject.get(id) match {
case Some(obj) =>
log.info(s"Deleting node $id")
// First disconnect
connectionsFor(id) map { case ((sourceId, targetId), _) => disconnect(sourceId, targetId)}
// Then delete
obj ! Shutdown
flowIdToObject.remove(id)
flowObjectToId.remove(obj)
sender() ! Some(id) // Ack delete of ID
currentNodeConfigurations.remove(id)
notifyObservers(id, None)
case None =>
log.warning(s"Asked to delete unknown object $id")
sender() ! None
}
case Configuration(data) =>
flowObjectToId.get(sender()) match {
case Some(id) =>
currentNodeConfigurations(id) = Configuration(data)
notifyObservers(id, Configuration(data))
case None =>
connectionObjToIds.get(sender()) match {
case Some((sourceId, targetId)) =>
currentConnectionConfigurations((sourceId, targetId)) = Configuration(data)
notifyObservers((sourceId, targetId), Configuration(data))
case None => log.error(s"Received configuration update for untracked actor ${sender()}")
}
}
case (id: Long, Configuration(data)) =>
// Forward config to addressed actor
flowIdToObject.get(id) match {
case Some(obj) =>
obj ! Configuration(data)
sender() ! true
case None =>
log.error(s"Asked to forward configuration for unknown id $id")
sender ! false
}
case LookupConnection(sourceId, targetId) =>
connectionIdsToObj.get((sourceId, targetId)) match {
case Some(connection) => sender() ! Some(connection)
case None => sender() ! None
}
case LookupObj(id) =>
flowIdToObject.get(id) match {
case Some(obj) => sender() ! Some(obj)
case None => sender() ! None
}
case LookupId(obj) =>
flowObjectToId.get(obj) match {
case Some(id) => sender() ! Some(id)
case None => sender() ! None
}
case GetConnections =>
sender() ! connectionIdsToObj.keySet.toSet
case GetFlowObjects =>
sender() ! flowIdToObject.keySet.toSet
case Connect(sourceId, targetId, attributes) =>
(flowIdToObject.get(sourceId), flowIdToObject.get(targetId)) match {
case (Some(source), Some(target)) if !connectionIdsToObj.contains { (sourceId, targetId) } =>
log.info(s"Creating new connection from $source to $target")
val connection = context.actorOf(
FlowConnection.props(source, sourceId, target, targetId, attributes),
name = newActorName("FlowConnection"))
connectionObjToIds += connection ->(sourceId, targetId)
connectionIdsToObj += (sourceId, targetId) -> connection
source ! AddTarget(connection)
sender() ! Some(((sourceId, targetId), connection))
connection ! GetConfiguration
case _ =>
sender ! None
log.warning(s"Asked to connect $sourceId with $targetId of which are invalid or already connected")
}
case Disconnect(sourceId, targetId) =>
if (disconnect(sourceId, targetId)) {
sender() ! Some((sourceId, targetId))
} else {
sender() ! None
}
}
}
|
hacst/reactiveStreamPlay
|
app/backend/flowNetwork/FlowSupervisor.scala
|
Scala
|
bsd-3-clause
| 12,448
|
package com.twitter.concurrent
import org.specs.SpecificationWithJUnit
import com.twitter.util.{Return, Throw}
class AsyncQueueSpec extends SpecificationWithJUnit {
"AsyncQueue" should {
val q = new AsyncQueue[Int]
"queue pollers" in {
val p0 = q.poll()
val p1 = q.poll()
val p2 = q.poll()
p0.isDefined must beFalse
p1.isDefined must beFalse
p2.isDefined must beFalse
q.offer(1)
p0.poll must beSome(Return(1))
p1.isDefined must beFalse
p2.isDefined must beFalse
q.offer(2)
p1.poll must beSome(Return(2))
p2.isDefined must beFalse
q.offer(3)
p2.poll must beSome(Return(3))
}
"queue offers" in {
q.offer(1)
q.offer(2)
q.offer(3)
q.poll().poll must beSome(Return(1))
q.poll().poll must beSome(Return(2))
q.poll().poll must beSome(Return(3))
}
"into idle state and back" in {
q.offer(1)
q.poll().poll must beSome(Return(1))
val p = q.poll()
p.isDefined must beFalse
q.offer(2)
p.poll must beSome(Return(2))
q.offer(3)
q.poll().poll must beSome(Return(3))
}
"fail pending and new pollers" in {
val exc = new Exception("sad panda")
val p0 = q.poll()
val p1 = q.poll()
p0.isDefined must beFalse
p1.isDefined must beFalse
q.fail(exc)
p0.poll must beSome(Throw(exc))
p1.poll must beSome(Throw(exc))
q.poll().poll must beSome(Throw(exc))
}
"fail doesn't blow up offer" in {
val exc = new Exception
q.fail(exc)
q.offer(1) mustNot throwA[Throwable]
q.poll().poll must beSome(Throw(exc))
}
}
}
|
mosesn/util
|
util-core/src/test/scala/com/twitter/concurrent/AsyncQueueSpec.scala
|
Scala
|
apache-2.0
| 1,698
|
package coursier.cli.install
import java.nio.file.{Path, Paths}
final case class ListParams(
installPath: Path
)
object ListParams {
def apply(options: ListOptions): ListParams = {
val dir = options.installDir.filter(_.nonEmpty) match {
case Some(d) => Paths.get(d)
case None => SharedInstallParams.defaultDir
}
ListParams(dir)
}
}
|
alexarchambault/coursier
|
modules/cli/src/main/scala/coursier/cli/install/ListParams.scala
|
Scala
|
apache-2.0
| 368
|
package com.sksamuel.elastic4s.requests.ingest
import com.sksamuel.elastic4s.json.{XContentBuilder, XContentFactory}
import com.sksamuel.elastic4s._
import com.sksamuel.elastic4s.handlers.ElasticErrorParser
trait IngestHandlers {
implicit object GetPipelineRequestHandler extends Handler[GetPipelineRequest, GetPipelineResponse] {
override def build(request: GetPipelineRequest): ElasticRequest = {
val endpoint = s"/_ingest/pipeline/${request.id}"
ElasticRequest("GET", endpoint)
}
override def responseHandler: ResponseHandler[GetPipelineResponse] = new ResponseHandler[GetPipelineResponse] {
override def handle(response: HttpResponse): Either[ElasticError, GetPipelineResponse] = response.statusCode match {
case 200 =>
val raw = ResponseHandler.fromResponse[Map[String, Map[String, Any]]](response)
val resp = raw.map { case (id, types) =>
GetPipelineResponse(
id,
types("description").asInstanceOf[String],
types.get("version").asInstanceOf[Option[Int]],
types("processors").asInstanceOf[Seq[Map[String, Map[String, Any]]]].map { processor =>
val name = processor.keys.head
name match {
case GeoIPProcessor.name =>
val mapping = processor(name)
GeoIPProcessor(
mapping("field").asInstanceOf[String],
mapping.get("target_field").asInstanceOf[Option[String]],
mapping.get("database_file").asInstanceOf[Option[String]],
mapping.get("properties").asInstanceOf[Option[Seq[String]]],
mapping.get("ignore_missing").asInstanceOf[Option[Boolean]],
mapping.get("first_only").asInstanceOf[Option[Boolean]]
)
case _ =>
val b = XContentFactory.jsonBuilder()
processor(name).foreach { case (k, v) =>
b.autofield(k, v)
}
b.endObject()
CustomProcessor(name, b.string())
}
}
)
}
Right(resp.head)
case _ =>
Left(ElasticErrorParser.parse(response))
}
}
}
implicit object PutPipelineRequestHandler extends Handler[PutPipelineRequest, PutPipelineResponse] {
private def processorToXContent(p: Processor): XContentBuilder = {
val xcb = XContentFactory.jsonBuilder()
xcb.rawField(p.name, p.buildProcessorBody())
xcb
}
override def build(request: PutPipelineRequest): ElasticRequest = {
val xcb = XContentFactory.jsonBuilder()
xcb.field("description", request.description)
request.version.map(xcb.field("version", _))
xcb.array("processors", request.processors.map(processorToXContent).toArray)
xcb.endObject()
ElasticRequest("PUT", s"_ingest/pipeline/${request.id}", HttpEntity(xcb.string()))
}
}
implicit object DeletePipelineRequestHandler extends Handler[DeletePipelineRequest, DeletePipelineResponse] {
override def build(request: DeletePipelineRequest): ElasticRequest = {
val endpoint = s"/_ingest/pipeline/${request.id}"
ElasticRequest("DELETE", endpoint)
}
}
}
|
sksamuel/elastic4s
|
elastic4s-core/src/main/scala/com/sksamuel/elastic4s/requests/ingest/IngestHandlers.scala
|
Scala
|
apache-2.0
| 3,360
|
package com.twitter.finagle.netty4
import com.twitter.app.GlobalFlag
/**
* An experimental option that enables pooling for receive buffers.
*
* Since we always copy onto the heap (see `DirectToHeapInboundHandler`), the receive
* buffers never leave the pipeline hence can safely be pooled.
* In its current form, this will preallocate at least N * 2 mb (chunk size) of
* direct memory at the application startup, where N is the number of worker threads
* Finagle uses.
*
* Example:
*
* On a 16 core machine, the lower bound for the pool size will be 16 * 2 * 2mb = 64mb.
*
* @note This will likely be a default for finagle-netty4.
*/
object poolReceiveBuffers extends GlobalFlag(false, "enables/disables pooling of receive buffers")
|
BuoyantIO/finagle
|
finagle-netty4/src/main/scala/com/twitter/finagle/netty4/poolReceiveBuffers.scala
|
Scala
|
apache-2.0
| 748
|
package streaming.perf.util
import java.io.PrintStream
import scala.collection.immutable.IndexedSeq
import org.apache.spark.util.StatCounter
/**
* Util for getting some stats from a small sample of numeric values, with some handy
* summary functions. This has been copied verbatim from org.apache.spark.util.Distribution
*
* Entirely in memory, not intended as a good way to compute stats over large data sets.
*
* Assumes you are giving it a non-empty set of data
*/
class Distribution(val data: Array[Double], val startIdx: Int, val endIdx: Int) {
require(startIdx < endIdx)
def this(data: Traversable[Double]) = this(data.toArray, 0, data.size)
java.util.Arrays.sort(data, startIdx, endIdx)
val length = endIdx - startIdx
val defaultProbabilities = Array(0,0.25,0.5,0.75,1.0)
/**
* Get the value of the distribution at the given probabilities. Probabilities should be
* given from 0 to 1
* @param probabilities
*/
def getQuantiles(probabilities: Traversable[Double] = defaultProbabilities)
: IndexedSeq[Double] = {
probabilities.toIndexedSeq.map{p:Double => data(closestIndex(p))}
}
private def closestIndex(p: Double) = {
math.min((p * length).toInt + startIdx, endIdx - 1)
}
def showQuantiles(out: PrintStream = System.out): Unit = {
out.println("min\\t25%\\t50%\\t75%\\tmax")
getQuantiles(defaultProbabilities).foreach{q => out.print(q + "\\t")}
out.println
}
def statCounter = StatCounter(data.slice(startIdx, endIdx))
/**
* print a summary of this distribution to the given PrintStream.
* @param out
*/
def summary(out: PrintStream = System.out) {
out.println(statCounter)
showQuantiles(out)
}
}
object Distribution {
def apply(data: Traversable[Double]): Option[Distribution] = {
if (data.size > 0) {
Some(new Distribution(data))
} else {
None
}
}
def showQuantiles(out: PrintStream = System.out, quantiles: Traversable[Double]) {
out.println("min\\t25%\\t50%\\t75%\\tmax")
quantiles.foreach{q => out.print(q + "\\t")}
out.println
}
}
|
XiaoqingWang/spark-perf
|
streaming-tests/src/main/scala/streaming/perf/util/Distribution.scala
|
Scala
|
apache-2.0
| 2,088
|
/**
* © 2019 Refinitiv. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the “License”); you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an “AS IS” BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
*
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cmwell.tools.data.downloader
import cmwell.tools.data.downloader.streams.Downloader
import cmwell.tools.data.utils.ArgsManipulations._
import cmwell.tools.data.utils.akka.Implicits._
import cmwell.tools.data.utils.akka._
import cmwell.tools.data.utils.ops._
import cmwell.tools.data.utils.text.Files._
import nl.grons.metrics4.scala._
import org.apache.commons.lang3.time.DurationFormatUtils
import org.rogach.scallop.ScallopConf
import org.slf4j.LoggerFactory
import scala.concurrent.ExecutionContext.Implicits.global
import scala.util.{Failure, Success}
object StreamsMain extends App with InstrumentedBuilder {
object Opts extends ScallopConf(args) {
version(s"cm-well downloader ${getVersionFromManifest()} (c) 2015")
val host = opt[String]("host", descr = "cm-well host name server", required = true)
val path = opt[String]("path", short = 'p', descr = "path in cm-well", default = Some("/"))
val recursive =
opt[Boolean]("recursive", short = 'r', default = Some(false), descr = "flag to get download data recursively")
val length = opt[String]("length",
short = 'l',
descr = "max number of records to download (i.e., 1, 100, all)",
default = Some("50"))
val format = opt[String](
"format",
short = 'f',
descr = "desired record format (i.e., json, jsonld, jsonldq, n3, ntriples, nquads, trig, rdfxml)",
default = Some("trig")
)
val params = opt[String]("params", descr = "params string in cm-well", default = Some(""))
val op = opt[String]("op", descr = "operation type (stream, nstream, mstream, sstream)", default = Some("stream"))
val qp = opt[String]("qp", descr = "query params in cm-well", default = Some(""))
val fromUuids = opt[Boolean]("from-uuids",
descr = "download data from uuids input stream provided by stdin",
default = Some(false))
val fromPaths = opt[Boolean]("from-paths",
descr = "download data from paths input stream provided by stdin",
default = Some(false))
val fromQuery = opt[Boolean]("from-query", descr = "download data from query to cm-well", default = Some(false))
val numConnections = opt[Int]("num-connections", descr = "number of http connections to open")
mutuallyExclusive(fromUuids, fromPaths)
conflicts(fromUuids, List(path, recursive, length, params, op, qp))
conflicts(fromPaths, List(path, recursive, length, params, op, qp))
verify()
}
val allowedOps = Set("stream", "nstream", "mstream", "sstream")
if (!allowedOps.contains(Opts.op())) {
Opts.printHelp()
System.exit(1)
}
// resize akka http connection pool
Opts.numConnections.toOption.map { numConnections =>
System.setProperty("akka.http.host-connection-pool.max-connections", numConnections.toString)
}
val length = Opts.length() match {
case "ALL" | "all" | "All" => None
case num => Some(num.toInt)
}
val metricRegistry = new com.codahale.metrics.MetricRegistry()
val metricDownloading = metrics.timer("downloading")
val totalDownloadedBytes = metrics.counter("received-bytes")
var bytesInWindow = 0L
val metricRateBytes = metrics.meter("rate-bytes")
var nextTimeToReport = 0L
val start = System.currentTimeMillis()
// check which download function to execute
val source = if (Opts.fromUuids()) {
// download from uuids
Downloader.createSourceFromUuidInputStream(baseUrl = formatHost(Opts.host()),
format = Opts.format(),
in = System.in)
} else if (Opts.fromPaths()) {
Downloader.createSourceFromPathsInputStream(baseUrl = formatHost(Opts.host()),
format = Opts.format(),
in = System.in)
} else {
// download from query
Downloader.createSourceFromQuery(
baseUrl = formatHost(Opts.host()),
path = formatPath(Opts.path()),
params = Opts.params(),
qp = Opts.qp(),
format = Opts.format(),
op = Opts.op(),
length = length,
recursive = Opts.recursive()
)
}
var lastTime = 0L
var lastMessageSize = 0
val result = source.runForeach { data =>
// print data to standard output
print(data.utf8String)
// calculate download rate and statistics
val bytesRead = data.size
bytesInWindow += bytesRead
totalDownloadedBytes += bytesRead
metricRateBytes.mark(bytesRead)
val now = System.currentTimeMillis()
// throttle statistics report messages
if (now > nextTimeToReport) {
val rate = toHumanReadable(bytesInWindow * 1000 / (now - lastTime))
val message =
s"received=${toHumanReadable(totalDownloadedBytes.count)}".padTo(20, ' ') +
s"mean rate=${toHumanReadable(metricRateBytes.meanRate)}/sec".padTo(30, ' ') +
// s"rate=${toHumanReadable(metricRateBytes.oneMinuteRate)}/sec".padTo(30, ' ') +
s"rate =${rate}/sec".padTo(30, ' ') +
s"[${DurationFormatUtils.formatDurationWords(now - start, true, true)}]"
System.err.print("\\r" * lastMessageSize + message)
nextTimeToReport = now + 1000
bytesInWindow = 0
lastTime = now
lastMessageSize = message.size
}
}
result.onComplete { x =>
val time = (System.currentTimeMillis() - start) / 1000.0
System.err.println(s"\\ntotal execution time: $time seconds")
System.err.println("status: " + x)
cleanup()
x match {
case Success(v) => LoggerFactory.getLogger(getClass).info("value: " + v)
case Failure(err) => LoggerFactory.getLogger(getClass).error("error: ", err)
}
}
}
|
dudi3001/CM-Well
|
server/cmwell-data-tools-app/src/main/scala/cmwell/tools/data/downloader/StreamsMain.scala
|
Scala
|
apache-2.0
| 6,497
|
import org.specs2.execute.Pending
import org.specs2.mutable._
import org.specs2.runner._
import org.junit.runner._
import play.api.test._
import play.api.test.Helpers._
/**
* add your integration spec here.
* An integration test will fire up a whole play application in a real (or headless) browser
*/
@RunWith(classOf[JUnitRunner])
class IntegrationSpec extends Specification {
"TaskForm" should {
"return only content with onlyContent=true" in {
"by GET" in new WithBrowser {
Pending("to research test with session.")
/*
browser.goTo("http://localhost:" + port + "/task/form?onlyContent=true")
browser.pageSource must not contain("<html>")
browser.pageSource must not contain("<head>")
browser.pageSource must not contain("</head>")
browser.pageSource must not contain("<body>")
browser.pageSource must not contain("</body>")
browser.pageSource must not contain("</html>")
browser.pageSource must contain("<form>")
browser.pageSource must contain("</form>")
*/
}
}
}
}
|
Kuchitama/MyTime
|
test/IntegrationSpec.scala
|
Scala
|
apache-2.0
| 1,101
|
package io.koff.hll
/**
* Simple example of using HLL from stream
*/
object SimpleStreamExample {
import com.clearspring.analytics.stream.cardinality.HyperLogLogPlus
def main(args: Array[String]) {
//define test data
val data = Seq("aaa", "bbb", "ccc")
//create HLL object in which we will add our data.
// You can set parameters here in a constructor
val merged = new HyperLogLogPlus(5, 25)
//adding data in hll
data.foreach{ elem =>
//in order to control string encoding during string conversion to bytes we explicitly set using encoding
val bytes = elem.getBytes("utf-8")
merged.offer(bytes)
}
//print the estimation
println("estimate count: " + merged.cardinality())
}
}
|
coffius/koffio-hll
|
src/main/scala/io/koff/hll/SimpleStreamExample.scala
|
Scala
|
mit
| 745
|
package pgep.GeneticOperators.Mutators
abstract class Mutator(selection: GenotypeSelector) extends GeneticOperator(1, 1, selection){
def apply(genotypes: Array[Genotype]) {
for (sel <- selection.select(genotypes)) {
assert(sel.length == 1)
apply(sel.head)
}
}
protected[Mutators] def apply(gt: Genotype)
}
|
khernyo/PGEP
|
src/pgep/GeneticOperators/Mutators/Mutator.scala
|
Scala
|
gpl-3.0
| 327
|
package lazybenchmarks
import leon.lazyeval._
import leon.lang._
import leon.annotation._
import leon.instrumentation._
//import leon.invariant._
object MergeSort {
// TODO: making this parametric will break many things. Fix them
sealed abstract class LList {
def size: BigInt = {
this match {
case SNil() => BigInt(0)
case SCons(x, t) => 1 + ssize(t)
}
} ensuring (_ >= 0)
}
case class SCons(x: BigInt, tail: $[LList]) extends LList
case class SNil() extends LList
def ssize(l: $[LList]): BigInt = (l*).size
sealed abstract class List {
def size: BigInt = {
this match {
case Cons(_, xs) => 1 + xs.size
case _ => BigInt(0)
}
} ensuring (_ >= 0)
}
case class Cons(x: BigInt, tail: List) extends List
case class Nil() extends List
def length(l: List): BigInt = {
l match {
case Nil() => BigInt(0)
case Cons(x, xs) => 1 + length(xs)
}
} ensuring (res => res >= 0 && res == l.size && stack <= 14 * l.size + 15)
def split(l: List, n: BigInt): (List, List) = {
require(n > 0 && n < l.size)
l match {
case Nil() => (Nil(), l)
case Cons(x, xs) =>
if (n == 1) {
(Cons(x, Nil()), xs)
} else {
val (fst, snd) = split(xs, n - 1)
(Cons(x, fst), snd)
}
}
} ensuring (res => res._2.size == l.size - n && res._1.size == n && stack <= 25 * l.size - 1)
/*
* Proving standalone bound for merge requires preconditions.
*/
def merge(a: $[LList], b: $[LList]): LList = (b.value match {
case SNil() => a.value
case bl @ SCons(x, xs) =>
a.value match {
case SNil() => bl
case SCons(y, ys) =>
if (y < x)
SCons(y, $(merge(ys, b)))
else
SCons(x, $(merge(a, xs)))
}
}) //ensuring (res => ssize(a) + ssize(b) == res.size)
/**
* Note the time is not O(n) but only O(n log n) since
* we have time recurrence T(n) = 2T(n/2) + O(n)
*/
def mergeSort(l: List): LList = (l match {
case Nil() => SNil()
case Cons(x, Nil()) => SCons(x, $(SNil()))
case _ =>
val (fst, snd) = split(l, length(l) / 2)
merge($(mergeSort(fst)), $(mergeSort(snd)))
}) ensuring (res => stack <= 81 * l.size + 35) // res.size == l.size
}
|
regb/leon
|
testcases/lazy-datastructures/withconst/LazyMegeSort.scala
|
Scala
|
gpl-3.0
| 2,344
|
/*
* Copyright (c) 2014-2015 Snowplow Analytics Ltd. All rights reserved.
*
* This program is licensed to you under the Apache License Version 2.0,
* and you may not use this file except in compliance with the Apache License Version 2.0.
* You may obtain a copy of the Apache License Version 2.0 at http://www.apache.org/licenses/LICENSE-2.0.
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the Apache License Version 2.0 is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the Apache License Version 2.0 for the specific language governing permissions and limitations there under.
*/
package com.snowplowanalytics.snowplow.storage.kinesis.s3
// AWS libs
import com.amazonaws.services.kinesis.model.Record
// AWS Kinesis Connector libs
import com.amazonaws.services.kinesis.connectors.interfaces.ITransformer
// Thrift libs
import org.apache.thrift.{TSerializer,TDeserializer}
// Loggings
import org.apache.commons.logging.LogFactory
// Scalaz
import scalaz._
import Scalaz._
/**
* Thrift serializer/deserializer class
*/
class RawEventTransformer extends ITransformer[ ValidatedRecord, EmitterInput ] {
val log = LogFactory.getLog(getClass)
lazy val serializer = new TSerializer()
lazy val deserializer = new TDeserializer()
override def toClass(record: Record): ValidatedRecord = {
log.info("Converting one record to EmitterInput before adding it to the buffer")
record.getData.array.success
}
override def fromClass(record: ValidatedRecord) = record
}
|
RetentionGrid/kinesis-s3
|
src/main/scala/com.snowplowanalytics.snowplow.storage.kinesis/s3/RawEventTransformer.scala
|
Scala
|
apache-2.0
| 1,607
|
/*
* scala-swing (https://www.scala-lang.org)
*
* Copyright EPFL, Lightbend, Inc., contributors
*
* Licensed under Apache License 2.0
* (http://www.apache.org/licenses/LICENSE-2.0).
*
* See the NOTICE file distributed with this work for
* additional information regarding copyright ownership.
*/
package scala.swing
package event
abstract class PopupMenuEvent extends ComponentEvent
case class PopupMenuCanceled(source: PopupMenu) extends PopupMenuEvent
case class PopupMenuWillBecomeInvisible(source: PopupMenu) extends PopupMenuEvent
case class PopupMenuWillBecomeVisible(source: PopupMenu) extends PopupMenuEvent
|
scala/scala-swing
|
src/main/scala/scala/swing/event/PopupMenuEvent.scala
|
Scala
|
apache-2.0
| 628
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.sources
import scala.collection.JavaConversions._
import java.io.File
import com.google.common.io.Files
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.Path
import org.apache.hadoop.mapreduce.{JobContext, TaskAttemptContext}
import org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter
import parquet.hadoop.ParquetOutputCommitter
import org.apache.spark.deploy.SparkHadoopUtil
import org.apache.spark.sql._
import org.apache.spark.sql.hive.test.TestHive
import org.apache.spark.sql.test.SQLTestUtils
import org.apache.spark.sql.types._
import org.apache.spark.{SparkException, SparkFunSuite}
abstract class HadoopFsRelationTest extends QueryTest with SQLTestUtils {
override val sqlContext: SQLContext = TestHive
import sqlContext._
import sqlContext.implicits._
val dataSourceName: String
val dataSchema =
StructType(
Seq(
StructField("a", IntegerType, nullable = false),
StructField("b", StringType, nullable = false)))
val testDF = (1 to 3).map(i => (i, s"val_$i")).toDF("a", "b")
val partitionedTestDF1 = (for {
i <- 1 to 3
p2 <- Seq("foo", "bar")
} yield (i, s"val_$i", 1, p2)).toDF("a", "b", "p1", "p2")
val partitionedTestDF2 = (for {
i <- 1 to 3
p2 <- Seq("foo", "bar")
} yield (i, s"val_$i", 2, p2)).toDF("a", "b", "p1", "p2")
val partitionedTestDF = partitionedTestDF1.unionAll(partitionedTestDF2)
def checkQueries(df: DataFrame): Unit = {
// Selects everything
checkAnswer(
df,
for (i <- 1 to 3; p1 <- 1 to 2; p2 <- Seq("foo", "bar")) yield Row(i, s"val_$i", p1, p2))
// Simple filtering and partition pruning
checkAnswer(
df.filter('a > 1 && 'p1 === 2),
for (i <- 2 to 3; p2 <- Seq("foo", "bar")) yield Row(i, s"val_$i", 2, p2))
// Simple projection and filtering
checkAnswer(
df.filter('a > 1).select('b, 'a + 1),
for (i <- 2 to 3; _ <- 1 to 2; _ <- Seq("foo", "bar")) yield Row(s"val_$i", i + 1))
// Simple projection and partition pruning
checkAnswer(
df.filter('a > 1 && 'p1 < 2).select('b, 'p1),
for (i <- 2 to 3; _ <- Seq("foo", "bar")) yield Row(s"val_$i", 1))
// Project many copies of columns with different types (reproduction for SPARK-7858)
checkAnswer(
df.filter('a > 1 && 'p1 < 2).select('b, 'b, 'b, 'b, 'p1, 'p1, 'p1, 'p1),
for (i <- 2 to 3; _ <- Seq("foo", "bar"))
yield Row(s"val_$i", s"val_$i", s"val_$i", s"val_$i", 1, 1, 1, 1))
// Self-join
df.registerTempTable("t")
withTempTable("t") {
checkAnswer(
sql(
"""SELECT l.a, r.b, l.p1, r.p2
|FROM t l JOIN t r
|ON l.a = r.a AND l.p1 = r.p1 AND l.p2 = r.p2
""".stripMargin),
for (i <- 1 to 3; p1 <- 1 to 2; p2 <- Seq("foo", "bar")) yield Row(i, s"val_$i", p1, p2))
}
}
test("save()/load() - non-partitioned table - Overwrite") {
withTempPath { file =>
testDF.write.mode(SaveMode.Overwrite).format(dataSourceName).save(file.getCanonicalPath)
testDF.write.mode(SaveMode.Overwrite).format(dataSourceName).save(file.getCanonicalPath)
checkAnswer(
read.format(dataSourceName)
.option("path", file.getCanonicalPath)
.option("dataSchema", dataSchema.json)
.load(),
testDF.collect())
}
}
test("save()/load() - non-partitioned table - Append") {
withTempPath { file =>
testDF.write.mode(SaveMode.Overwrite).format(dataSourceName).save(file.getCanonicalPath)
testDF.write.mode(SaveMode.Append).format(dataSourceName).save(file.getCanonicalPath)
checkAnswer(
read.format(dataSourceName)
.option("dataSchema", dataSchema.json)
.load(file.getCanonicalPath).orderBy("a"),
testDF.unionAll(testDF).orderBy("a").collect())
}
}
test("save()/load() - non-partitioned table - ErrorIfExists") {
withTempDir { file =>
intercept[RuntimeException] {
testDF.write.format(dataSourceName).mode(SaveMode.ErrorIfExists).save(file.getCanonicalPath)
}
}
}
test("save()/load() - non-partitioned table - Ignore") {
withTempDir { file =>
testDF.write.mode(SaveMode.Ignore).format(dataSourceName).save(file.getCanonicalPath)
val path = new Path(file.getCanonicalPath)
val fs = path.getFileSystem(sqlContext.sparkContext.hadoopConfiguration)
assert(fs.listStatus(path).isEmpty)
}
}
test("save()/load() - partitioned table - simple queries") {
withTempPath { file =>
partitionedTestDF.write
.format(dataSourceName)
.mode(SaveMode.ErrorIfExists)
.partitionBy("p1", "p2")
.save(file.getCanonicalPath)
checkQueries(
read.format(dataSourceName)
.option("dataSchema", dataSchema.json)
.load(file.getCanonicalPath))
}
}
test("save()/load() - partitioned table - Overwrite") {
withTempPath { file =>
partitionedTestDF.write
.format(dataSourceName)
.mode(SaveMode.Overwrite)
.partitionBy("p1", "p2")
.save(file.getCanonicalPath)
partitionedTestDF.write
.format(dataSourceName)
.mode(SaveMode.Overwrite)
.partitionBy("p1", "p2")
.save(file.getCanonicalPath)
checkAnswer(
read.format(dataSourceName)
.option("dataSchema", dataSchema.json)
.load(file.getCanonicalPath),
partitionedTestDF.collect())
}
}
test("save()/load() - partitioned table - Append") {
withTempPath { file =>
partitionedTestDF.write
.format(dataSourceName)
.mode(SaveMode.Overwrite)
.partitionBy("p1", "p2")
.save(file.getCanonicalPath)
partitionedTestDF.write
.format(dataSourceName)
.mode(SaveMode.Append)
.partitionBy("p1", "p2")
.save(file.getCanonicalPath)
checkAnswer(
read.format(dataSourceName)
.option("dataSchema", dataSchema.json)
.load(file.getCanonicalPath),
partitionedTestDF.unionAll(partitionedTestDF).collect())
}
}
test("save()/load() - partitioned table - Append - new partition values") {
withTempPath { file =>
partitionedTestDF1.write
.format(dataSourceName)
.mode(SaveMode.Overwrite)
.partitionBy("p1", "p2")
.save(file.getCanonicalPath)
partitionedTestDF2.write
.format(dataSourceName)
.mode(SaveMode.Append)
.partitionBy("p1", "p2")
.save(file.getCanonicalPath)
checkAnswer(
read.format(dataSourceName)
.option("dataSchema", dataSchema.json)
.load(file.getCanonicalPath),
partitionedTestDF.collect())
}
}
test("save()/load() - partitioned table - ErrorIfExists") {
withTempDir { file =>
intercept[RuntimeException] {
partitionedTestDF.write
.format(dataSourceName)
.mode(SaveMode.ErrorIfExists)
.partitionBy("p1", "p2")
.save(file.getCanonicalPath)
}
}
}
test("save()/load() - partitioned table - Ignore") {
withTempDir { file =>
partitionedTestDF.write
.format(dataSourceName).mode(SaveMode.Ignore).save(file.getCanonicalPath)
val path = new Path(file.getCanonicalPath)
val fs = path.getFileSystem(SparkHadoopUtil.get.conf)
assert(fs.listStatus(path).isEmpty)
}
}
test("saveAsTable()/load() - non-partitioned table - Overwrite") {
testDF.write.format(dataSourceName).mode(SaveMode.Overwrite)
.option("dataSchema", dataSchema.json)
.saveAsTable("t")
withTable("t") {
checkAnswer(table("t"), testDF.collect())
}
}
test("saveAsTable()/load() - non-partitioned table - Append") {
testDF.write.format(dataSourceName).mode(SaveMode.Overwrite).saveAsTable("t")
testDF.write.format(dataSourceName).mode(SaveMode.Append).saveAsTable("t")
withTable("t") {
checkAnswer(table("t"), testDF.unionAll(testDF).orderBy("a").collect())
}
}
test("saveAsTable()/load() - non-partitioned table - ErrorIfExists") {
Seq.empty[(Int, String)].toDF().registerTempTable("t")
withTempTable("t") {
intercept[AnalysisException] {
testDF.write.format(dataSourceName).mode(SaveMode.ErrorIfExists).saveAsTable("t")
}
}
}
test("saveAsTable()/load() - non-partitioned table - Ignore") {
Seq.empty[(Int, String)].toDF().registerTempTable("t")
withTempTable("t") {
testDF.write.format(dataSourceName).mode(SaveMode.Ignore).saveAsTable("t")
assert(table("t").collect().isEmpty)
}
}
test("saveAsTable()/load() - partitioned table - simple queries") {
partitionedTestDF.write.format(dataSourceName)
.mode(SaveMode.Overwrite)
.option("dataSchema", dataSchema.json)
.saveAsTable("t")
withTable("t") {
checkQueries(table("t"))
}
}
test("saveAsTable()/load() - partitioned table - Overwrite") {
partitionedTestDF.write
.format(dataSourceName)
.mode(SaveMode.Overwrite)
.option("dataSchema", dataSchema.json)
.partitionBy("p1", "p2")
.saveAsTable("t")
partitionedTestDF.write
.format(dataSourceName)
.mode(SaveMode.Overwrite)
.option("dataSchema", dataSchema.json)
.partitionBy("p1", "p2")
.saveAsTable("t")
withTable("t") {
checkAnswer(table("t"), partitionedTestDF.collect())
}
}
test("saveAsTable()/load() - partitioned table - Append") {
partitionedTestDF.write
.format(dataSourceName)
.mode(SaveMode.Overwrite)
.option("dataSchema", dataSchema.json)
.partitionBy("p1", "p2")
.saveAsTable("t")
partitionedTestDF.write
.format(dataSourceName)
.mode(SaveMode.Append)
.option("dataSchema", dataSchema.json)
.partitionBy("p1", "p2")
.saveAsTable("t")
withTable("t") {
checkAnswer(table("t"), partitionedTestDF.unionAll(partitionedTestDF).collect())
}
}
test("saveAsTable()/load() - partitioned table - Append - new partition values") {
partitionedTestDF1.write
.format(dataSourceName)
.mode(SaveMode.Overwrite)
.option("dataSchema", dataSchema.json)
.partitionBy("p1", "p2")
.saveAsTable("t")
partitionedTestDF2.write
.format(dataSourceName)
.mode(SaveMode.Append)
.option("dataSchema", dataSchema.json)
.partitionBy("p1", "p2")
.saveAsTable("t")
withTable("t") {
checkAnswer(table("t"), partitionedTestDF.collect())
}
}
test("saveAsTable()/load() - partitioned table - Append - mismatched partition columns") {
partitionedTestDF1.write
.format(dataSourceName)
.mode(SaveMode.Overwrite)
.option("dataSchema", dataSchema.json)
.partitionBy("p1", "p2")
.saveAsTable("t")
// Using only a subset of all partition columns
intercept[Throwable] {
partitionedTestDF2.write
.format(dataSourceName)
.mode(SaveMode.Append)
.option("dataSchema", dataSchema.json)
.partitionBy("p1")
.saveAsTable("t")
}
}
test("saveAsTable()/load() - partitioned table - ErrorIfExists") {
Seq.empty[(Int, String)].toDF().registerTempTable("t")
withTempTable("t") {
intercept[AnalysisException] {
partitionedTestDF.write
.format(dataSourceName)
.mode(SaveMode.ErrorIfExists)
.option("dataSchema", dataSchema.json)
.partitionBy("p1", "p2")
.saveAsTable("t")
}
}
}
test("saveAsTable()/load() - partitioned table - Ignore") {
Seq.empty[(Int, String)].toDF().registerTempTable("t")
withTempTable("t") {
partitionedTestDF.write
.format(dataSourceName)
.mode(SaveMode.Ignore)
.option("dataSchema", dataSchema.json)
.partitionBy("p1", "p2")
.saveAsTable("t")
assert(table("t").collect().isEmpty)
}
}
test("Hadoop style globbing") {
withTempPath { file =>
partitionedTestDF.write
.format(dataSourceName)
.mode(SaveMode.Overwrite)
.partitionBy("p1", "p2")
.save(file.getCanonicalPath)
val df = read
.format(dataSourceName)
.option("dataSchema", dataSchema.json)
.load(s"${file.getCanonicalPath}/p1=*/p2=???")
val expectedPaths = Set(
s"${file.getCanonicalFile}/p1=1/p2=foo",
s"${file.getCanonicalFile}/p1=2/p2=foo",
s"${file.getCanonicalFile}/p1=1/p2=bar",
s"${file.getCanonicalFile}/p1=2/p2=bar"
).map { p =>
val path = new Path(p)
val fs = path.getFileSystem(sqlContext.sparkContext.hadoopConfiguration)
path.makeQualified(fs.getUri, fs.getWorkingDirectory).toString
}
val actualPaths = df.queryExecution.analyzed.collectFirst {
case LogicalRelation(relation: HadoopFsRelation) =>
relation.paths.toSet
}.getOrElse {
fail("Expect an FSBasedRelation, but none could be found")
}
assert(actualPaths === expectedPaths)
checkAnswer(df, partitionedTestDF.collect())
}
}
test("Partition column type casting") {
withTempPath { file =>
val input = partitionedTestDF.select('a, 'b, 'p1.cast(StringType).as('ps), 'p2)
input
.write
.format(dataSourceName)
.mode(SaveMode.Overwrite)
.partitionBy("ps", "p2")
.saveAsTable("t")
withTempTable("t") {
checkAnswer(table("t"), input.collect())
}
}
}
test("SPARK-7616: adjust column name order accordingly when saving partitioned table") {
val df = (1 to 3).map(i => (i, s"val_$i", i * 2)).toDF("a", "b", "c")
df.write
.format(dataSourceName)
.mode(SaveMode.Overwrite)
.partitionBy("c", "a")
.saveAsTable("t")
withTable("t") {
checkAnswer(table("t"), df.select('b, 'c, 'a).collect())
}
}
// NOTE: This test suite is not super deterministic. On nodes with only relatively few cores
// (4 or even 1), it's hard to reproduce the data loss issue. But on nodes with for example 8 or
// more cores, the issue can be reproduced steadily. Fortunately our Jenkins builder meets this
// requirement. We probably want to move this test case to spark-integration-tests or spark-perf
// later.
test("SPARK-8406: Avoids name collision while writing files") {
withTempPath { dir =>
val path = dir.getCanonicalPath
sqlContext
.range(10000)
.repartition(250)
.write
.mode(SaveMode.Overwrite)
.format(dataSourceName)
.save(path)
assertResult(10000) {
sqlContext
.read
.format(dataSourceName)
.option("dataSchema", StructType(StructField("id", LongType) :: Nil).json)
.load(path)
.count()
}
}
}
test("SPARK-8578 specified custom output committer will not be used to append data") {
val clonedConf = new Configuration(configuration)
try {
val df = sqlContext.range(1, 10).toDF("i")
withTempPath { dir =>
df.write.mode("append").format(dataSourceName).save(dir.getCanonicalPath)
configuration.set(
SQLConf.OUTPUT_COMMITTER_CLASS,
classOf[AlwaysFailOutputCommitter].getName)
// Since Parquet has its own output committer setting, also set it
// to AlwaysFailParquetOutputCommitter at here.
configuration.set("spark.sql.parquet.output.committer.class",
classOf[AlwaysFailParquetOutputCommitter].getName)
// Because there data already exists,
// this append should succeed because we will use the output committer associated
// with file format and AlwaysFailOutputCommitter will not be used.
df.write.mode("append").format(dataSourceName).save(dir.getCanonicalPath)
checkAnswer(
sqlContext.read
.format(dataSourceName)
.option("dataSchema", df.schema.json)
.load(dir.getCanonicalPath),
df.unionAll(df))
// This will fail because AlwaysFailOutputCommitter is used when we do append.
intercept[Exception] {
df.write.mode("overwrite").format(dataSourceName).save(dir.getCanonicalPath)
}
}
withTempPath { dir =>
configuration.set(
SQLConf.OUTPUT_COMMITTER_CLASS,
classOf[AlwaysFailOutputCommitter].getName)
// Since Parquet has its own output committer setting, also set it
// to AlwaysFailParquetOutputCommitter at here.
configuration.set("spark.sql.parquet.output.committer.class",
classOf[AlwaysFailParquetOutputCommitter].getName)
// Because there is no existing data,
// this append will fail because AlwaysFailOutputCommitter is used when we do append
// and there is no existing data.
intercept[Exception] {
df.write.mode("append").format(dataSourceName).save(dir.getCanonicalPath)
}
}
} finally {
// Hadoop 1 doesn't have `Configuration.unset`
configuration.clear()
clonedConf.foreach(entry => configuration.set(entry.getKey, entry.getValue))
}
}
}
// This class is used to test SPARK-8578. We should not use any custom output committer when
// we actually append data to an existing dir.
class AlwaysFailOutputCommitter(
outputPath: Path,
context: TaskAttemptContext)
extends FileOutputCommitter(outputPath, context) {
override def commitJob(context: JobContext): Unit = {
sys.error("Intentional job commitment failure for testing purpose.")
}
}
// This class is used to test SPARK-8578. We should not use any custom output committer when
// we actually append data to an existing dir.
class AlwaysFailParquetOutputCommitter(
outputPath: Path,
context: TaskAttemptContext)
extends ParquetOutputCommitter(outputPath, context) {
override def commitJob(context: JobContext): Unit = {
sys.error("Intentional job commitment failure for testing purpose.")
}
}
class SimpleTextHadoopFsRelationSuite extends HadoopFsRelationTest {
override val dataSourceName: String = classOf[SimpleTextSource].getCanonicalName
import sqlContext._
test("save()/load() - partitioned table - simple queries - partition columns in data") {
withTempDir { file =>
val basePath = new Path(file.getCanonicalPath)
val fs = basePath.getFileSystem(SparkHadoopUtil.get.conf)
val qualifiedBasePath = fs.makeQualified(basePath)
for (p1 <- 1 to 2; p2 <- Seq("foo", "bar")) {
val partitionDir = new Path(qualifiedBasePath, s"p1=$p1/p2=$p2")
sparkContext
.parallelize(for (i <- 1 to 3) yield s"$i,val_$i,$p1")
.saveAsTextFile(partitionDir.toString)
}
val dataSchemaWithPartition =
StructType(dataSchema.fields :+ StructField("p1", IntegerType, nullable = true))
checkQueries(
read.format(dataSourceName)
.option("dataSchema", dataSchemaWithPartition.json)
.load(file.getCanonicalPath))
}
}
}
class CommitFailureTestRelationSuite extends SparkFunSuite with SQLTestUtils {
override val sqlContext = TestHive
// When committing a task, `CommitFailureTestSource` throws an exception for testing purpose.
val dataSourceName: String = classOf[CommitFailureTestSource].getCanonicalName
test("SPARK-7684: commitTask() failure should fallback to abortTask()") {
withTempPath { file =>
// Here we coalesce partition number to 1 to ensure that only a single task is issued. This
// prevents race condition happened when FileOutputCommitter tries to remove the `_temporary`
// directory while committing/aborting the job. See SPARK-8513 for more details.
val df = sqlContext.range(0, 10).coalesce(1)
intercept[SparkException] {
df.write.format(dataSourceName).save(file.getCanonicalPath)
}
val fs = new Path(file.getCanonicalPath).getFileSystem(SparkHadoopUtil.get.conf)
assert(!fs.exists(new Path(file.getCanonicalPath, "_temporary")))
}
}
}
class ParquetHadoopFsRelationSuite extends HadoopFsRelationTest {
override val dataSourceName: String = classOf[parquet.DefaultSource].getCanonicalName
import sqlContext._
import sqlContext.implicits._
test("save()/load() - partitioned table - simple queries - partition columns in data") {
withTempDir { file =>
val basePath = new Path(file.getCanonicalPath)
val fs = basePath.getFileSystem(SparkHadoopUtil.get.conf)
val qualifiedBasePath = fs.makeQualified(basePath)
for (p1 <- 1 to 2; p2 <- Seq("foo", "bar")) {
val partitionDir = new Path(qualifiedBasePath, s"p1=$p1/p2=$p2")
sparkContext
.parallelize(for (i <- 1 to 3) yield (i, s"val_$i", p1))
.toDF("a", "b", "p1")
.write.parquet(partitionDir.toString)
}
val dataSchemaWithPartition =
StructType(dataSchema.fields :+ StructField("p1", IntegerType, nullable = true))
checkQueries(
read.format(dataSourceName)
.option("dataSchema", dataSchemaWithPartition.json)
.load(file.getCanonicalPath))
}
}
test("SPARK-7868: _temporary directories should be ignored") {
withTempPath { dir =>
val df = Seq("a", "b", "c").zipWithIndex.toDF()
df.write
.format("parquet")
.save(dir.getCanonicalPath)
df.write
.format("parquet")
.save(s"${dir.getCanonicalPath}/_temporary")
checkAnswer(read.format("parquet").load(dir.getCanonicalPath), df.collect())
}
}
test("SPARK-8014: Avoid scanning output directory when SaveMode isn't SaveMode.Append") {
withTempDir { dir =>
val path = dir.getCanonicalPath
val df = Seq(1 -> "a").toDF()
// Creates an arbitrary file. If this directory gets scanned, ParquetRelation2 will throw
// since it's not a valid Parquet file.
val emptyFile = new File(path, "empty")
Files.createParentDirs(emptyFile)
Files.touch(emptyFile)
// This shouldn't throw anything.
df.write.format("parquet").mode(SaveMode.Ignore).save(path)
// This should only complain that the destination directory already exists, rather than file
// "empty" is not a Parquet file.
assert {
intercept[RuntimeException] {
df.write.format("parquet").mode(SaveMode.ErrorIfExists).save(path)
}.getMessage.contains("already exists")
}
// This shouldn't throw anything.
df.write.format("parquet").mode(SaveMode.Overwrite).save(path)
checkAnswer(read.format("parquet").load(path), df)
}
}
test("SPARK-8079: Avoid NPE thrown from BaseWriterContainer.abortJob") {
withTempPath { dir =>
intercept[AnalysisException] {
// Parquet doesn't allow field names with spaces. Here we are intentionally making an
// exception thrown from the `ParquetRelation2.prepareForWriteJob()` method to trigger
// the bug. Please refer to spark-8079 for more details.
range(1, 10)
.withColumnRenamed("id", "a b")
.write
.format("parquet")
.save(dir.getCanonicalPath)
}
}
}
test("SPARK-8604: Parquet data source should write summary file while doing appending") {
withTempPath { dir =>
val path = dir.getCanonicalPath
val df = sqlContext.range(0, 5)
df.write.mode(SaveMode.Overwrite).parquet(path)
val summaryPath = new Path(path, "_metadata")
val commonSummaryPath = new Path(path, "_common_metadata")
val fs = summaryPath.getFileSystem(configuration)
fs.delete(summaryPath, true)
fs.delete(commonSummaryPath, true)
df.write.mode(SaveMode.Append).parquet(path)
checkAnswer(sqlContext.read.parquet(path), df.unionAll(df))
assert(fs.exists(summaryPath))
assert(fs.exists(commonSummaryPath))
}
}
}
|
andrewor14/iolap
|
sql/hive/src/test/scala/org/apache/spark/sql/sources/hadoopFsRelationSuites.scala
|
Scala
|
apache-2.0
| 24,798
|
/*
* Copyright (C) 2009-2018 Lightbend Inc. <https://www.lightbend.com>
*/
package play.api
import java.io.{ InputStream, File }
/**
* The environment for the application.
*
* Captures concerns relating to the classloader and the filesystem for the application.
*
* @param rootPath The root path that the application is deployed at.
* @param classLoader The classloader that all application classes and resources can be loaded from.
* @param mode The mode of the application.
*/
case class Environment(
rootPath: File,
classLoader: ClassLoader,
mode: Mode) {
/**
* Retrieves a file relative to the application root path.
*
* Note that it is up to you to manage the files in the application root path in production. By default, there will
* be nothing available in the application root path.
*
* For example, to retrieve some deployment specific data file:
* {{{
* val myDataFile = application.getFile("data/data.xml")
* }}}
*
* @param relativePath relative path of the file to fetch
* @return a file instance; it is not guaranteed that the file exists
*/
def getFile(relativePath: String): File = new File(rootPath, relativePath)
/**
* Retrieves a file relative to the application root path.
* This method returns an Option[File], using None if the file was not found.
*
* Note that it is up to you to manage the files in the application root path in production. By default, there will
* be nothing available in the application root path.
*
* For example, to retrieve some deployment specific data file:
* {{{
* val myDataFile = application.getExistingFile("data/data.xml")
* }}}
*
* @param relativePath the relative path of the file to fetch
* @return an existing file
*/
def getExistingFile(relativePath: String): Option[File] = Some(getFile(relativePath)).filter(_.exists)
/**
* Scans the application classloader to retrieve a resource.
*
* The conf directory is included on the classpath, so this may be used to look up resources, relative to the conf
* directory.
*
* For example, to retrieve the conf/logback.xml configuration file:
* {{{
* val maybeConf = application.resource("logback.xml")
* }}}
*
* @param name the absolute name of the resource (from the classpath root)
* @return the resource URL, if found
*/
def resource(name: String): Option[java.net.URL] = {
val n = name.stripPrefix("/")
Option(classLoader.getResource(n))
}
/**
* Scans the application classloader to retrieve a resource’s contents as a stream.
*
* The conf directory is included on the classpath, so this may be used to look up resources, relative to the conf
* directory.
*
* For example, to retrieve the conf/logback.xml configuration file:
* {{{
* val maybeConf = application.resourceAsStream("logback.xml")
* }}}
*
* @param name the absolute name of the resource (from the classpath root)
* @return a stream, if found
*/
def resourceAsStream(name: String): Option[InputStream] = {
val n = name.stripPrefix("/")
Option(classLoader.getResourceAsStream(n))
}
/**
* @return Returns the Java version for this environment.
*/
def asJava: play.Environment = new play.Environment(this)
}
object Environment {
/**
* A simple environment.
*
* Uses the same classloader that the environment classloader is defined in, and the current working directory as the
* path.
*/
def simple(path: File = new File("."), mode: Mode = Mode.Test) = Environment(path, Environment.getClass.getClassLoader, mode)
}
|
Shenker93/playframework
|
framework/src/play/src/main/scala/play/api/Environment.scala
|
Scala
|
apache-2.0
| 3,635
|
/*
Copyright 2018 Jo Pol
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see http://www.gnu.org/licenses/gpl.html dibl
*/package dibl
import dibl.Force.Point
import dibl.proto.TilesConfig
import scala.scalajs.js.annotation.{ JSExport, JSExportTopLevel }
@JSExportTopLevel("NewPairDiagram") object NewPairDiagram {
@JSExport
def create(config: TilesConfig): Diagram = {
val itemMatrix = config.getItemMatrix
val rows: Int = itemMatrix.length
val cols: Int = itemMatrix.head.length
var seqNr = 0
def toPoint(row: Double, col: Double) = {
Point(x = 30 + 15 * col, y = 30 + 15 * row)
}
val northEastNode = toPoint(0, 0)
val southWestNode = toPoint(rows - 1, cols - 1)
def isFringe(startPoint: Point): Boolean = {
startPoint.x < northEastNode.x || startPoint.x > southWestNode.x || startPoint.y < northEastNode.y
}
/** @param point one of the tips of a V, the legs may lie flat but never collapse
* @param target the bottom of the V, makes the point unique when we have to search for it
* @return
*/
def toSimpleNode(point: Point, target: Node) = {
seqNr += 1
SimpleNode(seqNr, point, target.target)
}
def toNodeSeq(row: Int, col: Int): Seq[ConnectedNode] = {
val item = itemMatrix(row)(col)
if(item.relativeSources.isEmpty) return Seq.empty
val Array((leftRow, leftCol), (rightRow, rightCol)) = item.relativeSources
val sourceLeft = toPoint(row + leftRow, col + leftCol)
val sourceRight = toPoint(row + rightRow, col + rightCol)
val target = toPoint(row, col)
val colorName = Stitches.defaultColorName(item.stitch)
seqNr += 1
val node = Node(seqNr, target, sourceLeft, sourceRight, item.stitch, item.id, colorName)
(isFringe(sourceLeft), isFringe(sourceRight)) match {
case (false, false) => Seq(node)
case (true, false) => Seq(node, toSimpleNode(sourceLeft, node))
case (false, true) => Seq(node, toSimpleNode(sourceRight, node))
case (true, true) => Seq(node, toSimpleNode(sourceLeft, node), toSimpleNode(sourceRight, node))
}
}
val nodes: Seq[ConnectedNode] = Seq(SimpleNode(0, Point(0, 0), Point(0, 0))) ++
(0 until rows).flatMap(row =>
(0 until cols).map(col =>
toNodeSeq(row, col)
)
).flatten
// lookup table
val nodeMap: Map[(Point, Point), Int] = nodes.map {
case n: Node => (n.target, n.target) -> n.seqNr
case n: SimpleNode => (n.source, n.target) -> n.seqNr
}.toMap
def findNode(source: Point, target: Point): ConnectedNode = {
// try to find a SimpleNode, if not found, try a Node, fall back to first dummy node
nodes(nodeMap.getOrElse((source, target), nodeMap.getOrElse((source, source), 0)))
}
def toPairLinks(target: Node) = {
val leftNode = findNode(target.srcLeft, target.target)
val rightNode = findNode(target.srcRight, target.target)
Seq(
LinkProps.pairLink(
source = leftNode.seqNr,
target = target.seqNr,
start = leftNode.color,
mid = leftNode.twistsToLeftOf(target) - 1,
end = target.color),
LinkProps.pairLink(
source = rightNode.seqNr,
target = target.seqNr,
start = rightNode.color,
mid = rightNode.twistsToRightOf(target) - 1,
end = target.color)
)
}
var pairNr = 0
Diagram(
nodes.map {
case SimpleNode(_, Point(x, y), _) =>
pairNr += 1 // TODO why does it start with 3?
NodeProps.node(s"Pair $pairNr", x, y)
case Node(_, Point(x, y), _, _, stitch, id, color) => NodeProps.node(s"$stitch - $id", color, x, y)
},
nodes.withFilter(_.isInstanceOf[Node])
.flatMap { case target: Node => toPairLinks(target) }
)
}
private trait ConnectedNode {
val seqNr: Int
val target: Point
val color: String
def twistsToLeftOf(target: Node) = 0
def twistsToRightOf(target: Node) = 0
}
private case class SimpleNode(override val seqNr: Int,
source: Point,
override val target: Point
) extends ConnectedNode {
override val color = "pair" // the "color" is a named marker alias customized "arrow head"
}
private case class Node(override val seqNr: Int,
override val target: Point,
srcLeft: Point,
srcRight: Point,
stitch: String,
id: String,
override val color: String) extends ConnectedNode {
private val openingTwists: String = stitch.replaceAll("c.*", "").replaceAll("t", "lr")
private val closingTwists = stitch.replaceAll(".*c", "").replaceAll("t", "lr")
private val openingTwistsLeft: Int = openingTwists.count(_ == 'l')
private val openingTwistsRight: Int = openingTwists.count(_ == 'r')
private val closingTwistsLeft: Int = closingTwists.count(_ == 'l')
private val closingTwistsRight: Int = closingTwists.count(_ == 'r')
// stitches (X's) arranged in ascii art:
// X-X source
// -X- target
// the caller knows if this (the source) is the left or right stitch
override def twistsToLeftOf(target: Node): Int = closingTwistsRight + target.openingTwistsLeft
override def twistsToRightOf(target: Node): Int = closingTwistsLeft + target.openingTwistsRight
}
}
|
d-bl/GroundForge
|
src/main/scala/dibl/NewPairDiagram.scala
|
Scala
|
gpl-3.0
| 6,126
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.rdd
import scala.reflect.ClassTag
import org.apache.spark.{Partitioner, RangePartitioner}
import org.apache.spark.annotation.DeveloperApi
import org.apache.spark.internal.Logging
/**
* Extra functions available on RDDs of (key, value) pairs where the key is sortable through
* an implicit conversion. They will work with any key type `K` that has an implicit `Ordering[K]`
* in scope. Ordering objects already exist for all of the standard primitive types. Users can also
* define their own orderings for custom types, or to override the default ordering. The implicit
* ordering that is in the closest scope will be used.
*
* {{{
* import org.apache.spark.SparkContext._
*
* val rdd: RDD[(String, Int)] = ...
* implicit val caseInsensitiveOrdering = new Ordering[String] {
* override def compare(a: String, b: String) =
* a.toLowerCase(Locale.ROOT).compare(b.toLowerCase(Locale.ROOT))
* }
*
* // Sort by key, using the above case insensitive ordering.
* rdd.sortByKey()
* }}}
*/
class OrderedRDDFunctions[K : Ordering : ClassTag,
V: ClassTag,
P <: Product2[K, V] : ClassTag] @DeveloperApi() (
self: RDD[P])
extends Logging with Serializable {
private val ordering = implicitly[Ordering[K]]
/**
* Sort the RDD by key, so that each partition contains a sorted range of the elements. Calling
* `collect` or `save` on the resulting RDD will return or output an ordered list of records
* (in the `save` case, they will be written to multiple `part-X` files in the filesystem, in
* order of the keys).
*/
// TODO: this currently doesn't work on P other than Tuple2!
def sortByKey(ascending: Boolean = true, numPartitions: Int = self.partitions.length)
: RDD[(K, V)] = self.withScope
{
val part = new RangePartitioner(numPartitions, self, ascending)
new ShuffledRDD[K, V, V](self, part)
.setKeyOrdering(if (ascending) ordering else ordering.reverse)
}
/**
* Repartition the RDD according to the given partitioner and, within each resulting partition,
* sort records by their keys.
*
* This is more efficient than calling `repartition` and then sorting within each partition
* because it can push the sorting down into the shuffle machinery.
*/
def repartitionAndSortWithinPartitions(partitioner: Partitioner): RDD[(K, V)] = self.withScope {
new ShuffledRDD[K, V, V](self, partitioner).setKeyOrdering(ordering)
}
/**
* Returns an RDD containing only the elements in the inclusive range `lower` to `upper`.
* If the RDD has been partitioned using a `RangePartitioner`, then this operation can be
* performed efficiently by only scanning the partitions that might contain matching elements.
* Otherwise, a standard `filter` is applied to all partitions.
*/
def filterByRange(lower: K, upper: K): RDD[P] = self.withScope {
def inRange(k: K): Boolean = ordering.gteq(k, lower) && ordering.lteq(k, upper)
val rddToFilter: RDD[P] = self.partitioner match {
case Some(rp: RangePartitioner[K, V]) =>
val partitionIndicies = (rp.getPartition(lower), rp.getPartition(upper)) match {
case (l, u) => Math.min(l, u) to Math.max(l, u)
}
PartitionPruningRDD.create(self, partitionIndicies.contains)
case _ =>
self
}
rddToFilter.filter { case (k, v) => inRange(k) }
}
}
|
pgandhi999/spark
|
core/src/main/scala/org/apache/spark/rdd/OrderedRDDFunctions.scala
|
Scala
|
apache-2.0
| 4,248
|
package travelling
import junit.framework.TestCase
import org.junit.Test
import org.junit.Assert._
class VecTest {
@Test
def testClamp() {
val v = Vec(100, 100)
assertVectorEquals(Vec(1, 1), Vec(100, 100).clamp(1))
assertVectorEquals(Vec(1, 0.5f), Vec(100, 50).clamp(1))
assertVectorEquals(Vec(1, 1), Vec(1, 1).clamp(1))
assertVectorEquals(Vec(-1, 0.5f), Vec(-100, 50).clamp(1))
}
@Test
def testShortestDistance {
val a = Vec(0, 0)
val b = Vec(0, 100)
// Distance at the edges == 0
assertEquals(0, Vec.shortestDistance(a, b, Vec(0, 0)), 0.001f)
assertEquals(0, Vec.shortestDistance(a, b, Vec(0, 100)), 0.001f)
// Distance on the segment == 0
assertEquals(0, Vec.shortestDistance(a, b, Vec(0, 50)), 0.001f)
// Distance perpendicular from the segment
assertEquals(5, Vec.shortestDistance(a, b, Vec(5, 0)), 0.001f)
assertEquals(5, Vec.shortestDistance(a, b, Vec(5, 100)), 0.001f)
assertEquals(5, Vec.shortestDistance(a, b, Vec(5, 50)), 0.001f)
// Distance after the edges
assertEquals(100, Vec.shortestDistance(a, b, Vec(0, 200)), 0.001f)
assertEquals(75, Vec.shortestDistance(a, b, Vec(0, -75)), 0.001f)
}
def assertVectorEquals(v1:Vec, v2:Vec) {
assertEquals(v1.x, v2.x, 0.00001f)
assertEquals(v1.y, v2.y, 0.00001f)
}
}
|
fdb/travelling_letters
|
src/test/scala/travelling/VecTest.scala
|
Scala
|
lgpl-3.0
| 1,312
|
/*
* Copyright 2016 Miroslav Janíček
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.classdump.luna.test.fragments
import org.classdump.luna.Table
import org.classdump.luna.runtime.LuaFunction
import org.classdump.luna.test.{FragmentBundle, FragmentExpectations, OneLiners}
object BasicLibFragments extends FragmentBundle with FragmentExpectations with OneLiners {
in(BasicContext) {
about("type") {
program("return type(nil)") succeedsWith "nil"
program("return type(true)") succeedsWith "boolean"
program("return type(false)") succeedsWith "boolean"
program("return type(0)") succeedsWith "number"
program("return type(0.1)") succeedsWith "number"
program("return type(\\"\\")") succeedsWith "string"
program("return type(\\"hello\\")") succeedsWith "string"
program("return type(\\"2\\")") succeedsWith "string"
program("return type(\\"0.2\\")") succeedsWith "string"
program("return type(function() end)") succeedsWith "function"
program("return type(type)") succeedsWith "function"
program("return type({})") succeedsWith "table"
program("return type()") failsWith "" << "bad argument #1 to 'type' (value expected)"
}
about("print") {
fragment("print retrieves tostring once") {
"""local n = 0
|
|local tos = tostring
|local function cf(x)
| return function(y) return '['..x..'|'..tos(y)..']' end
|end
|setmetatable(_ENV, {__index=function(t,k) n = n + 1; return cf(n) end})
|
|tostring = nil
|
|print(nil, 10, "x")
|return n
"""
} in BasicContext succeedsWith (1)
}
about("tostring") {
program("return tostring(nil)") succeedsWith "nil"
program("return tostring(true)") succeedsWith "true"
program("return tostring(false)") succeedsWith "false"
program("return tostring(0)") succeedsWith "0"
program("return tostring(-0)") succeedsWith "0"
program("return tostring(0.0)") succeedsWith "0.0"
program("return tostring(-0.0)") succeedsWith "-0.0"
program("return tostring(\\"\\")") succeedsWith ""
program("return tostring(\\"1\\")") succeedsWith "1"
program("return tostring(\\"1.00\\")") succeedsWith "1.00"
program("return tostring(1 / 0)") succeedsWith "inf"
program("return tostring(-1 / 0)") succeedsWith "-inf"
program("return tostring(0 / 0)") succeedsWith "nan"
program("return tostring(function() end)") succeedsWith (stringStartingWith("function: "))
program("return tostring(tostring)") succeedsWith (stringStartingWith("function: "))
program("return tostring({})") succeedsWith (stringStartingWith("table: "))
program("return tostring()") failsWith "" << "bad argument #1 to 'tostring' (value expected)"
}
about("_VERSION") {
program("return _VERSION") succeedsWith "Lua 5.3"
}
about("tonumber") {
program("return tonumber(nil)") succeedsWith null
program("return tonumber(1)") succeedsWith 1
program("return tonumber(0.3)") succeedsWith 0.3
program("return tonumber(0)") succeedsWith 0
program("return tonumber(0.0)") succeedsWith 0.0
program("return tonumber(\\"x\\")") succeedsWith null
program("return tonumber(\\"2\\")") succeedsWith 2
program("return tonumber(\\"0.4\\")") succeedsWith 0.4
program("return tonumber(\\"3.0\\")") succeedsWith 3.0
program("return tonumber({})") succeedsWith null
program("tonumber(1, \\"x\\")") failsWith "" << "bad argument #2 to 'tonumber' (number expected, got string)"
program("tonumber(\\"1\\", 1)") failsWith "" << "bad argument #2 to 'tonumber' (base out of range)"
program("tonumber(\\"1\\", 37)") failsWith "" << "bad argument #2 to 'tonumber' (base out of range)"
program("tonumber(1, 1)") failsWith "" << "bad argument #1 to 'tonumber' (string expected, got number)"
program("tonumber(nil, 10)") failsWith "" << "bad argument #1 to 'tonumber' (string expected, got nil)"
program("tonumber(nil, 1)") failsWith "" << "bad argument #1 to 'tonumber' (string expected, got nil)"
program("return tonumber(\\"-AbCd\\", 14)") succeedsWith -29777
program("return tonumber(\\"+Hello\\", 36)") succeedsWith 29234652
program("return tonumber(\\" spaces \\", 36)") succeedsWith 1735525972
program("return tonumber(\\"spaces\\", 36)") succeedsWith 1735525972
program("return tonumber(\\"A0\\", 10)") succeedsWith null
program("return tonumber(\\"99\\", 9)") succeedsWith null
program("return tonumber(\\"zzz\\", 36)") succeedsWith 46655
program("return tonumber(1 / 0, 36)") failsWith "" << "bad argument #1 to 'tonumber' (string expected, got number)"
program("return tonumber(0 / 0, 36)") failsWith "" << "bad argument #1 to 'tonumber' (string expected, got number)"
program("return tonumber(0.2, 10)") failsWith "" << "bad argument #1 to 'tonumber' (string expected, got number)"
program("return tonumber()") failsWith "" << "bad argument #1 to 'tonumber' (value expected)"
}
about("getmetatable") {
program("return getmetatable(nil)") succeedsWith null
program("return getmetatable(true)") succeedsWith null
program("return getmetatable(0)") succeedsWith null
program("return getmetatable(\\"hurray\\")") succeedsWith null // defined by the string library
program("getmetatable()") failsWith "" << "bad argument #1 to 'getmetatable' (value expected)"
}
about("setmetatable") {
program("setmetatable(0, nil)") failsWith "" << "bad argument #1 to 'setmetatable' (table expected, got number)"
program("setmetatable({}, 0)") failsWith "" << "bad argument #2 to 'setmetatable' (nil or table expected)"
program("setmetatable({})") failsWith "" << "bad argument #2 to 'setmetatable' (nil or table expected)"
val SetMetatableReturnsItsFirstArgument = fragment("setmetatable returns its first argument") {
"""local x = {}
|local y = setmetatable(x, {})
|return x == y, x == {}
"""
}
SetMetatableReturnsItsFirstArgument in thisContext succeedsWith(true, false)
val SetMetatableAndGetMetatable = fragment("setmetatable and getmetatable") {
"""local t = {}
|local mt0 = getmetatable(t)
|local mt1 = {}
|setmetatable(t, mt1)
|local mt2 = getmetatable(t)
|setmetatable(t, nil)
|local mt3 = getmetatable(t)
|return mt0 == nil, mt2 == mt1, mt2 == {}, mt3 == nil
"""
}
SetMetatableAndGetMetatable in thisContext succeedsWith(true, true, false, true)
program(
"""mt = {}
|t = {}
|setmetatable(t, mt)
|mt.__metatable = 'hello'
|return getmetatable(t)
""") succeedsWith "hello"
}
about("pcall") {
program("return pcall(nil)") succeedsWith(false, "attempt to call a nil value")
program("return pcall(function() end)") succeedsWith true
program("return pcall(pcall)") succeedsWith(false, "bad argument #1 to 'pcall' (value expected)")
program("return pcall(pcall, pcall, pcall)") succeedsWith(true, true, false, "bad argument #1 to 'pcall' (value expected)")
program("pcall()") failsWith "" << "bad argument #1 to 'pcall' (value expected)"
val PCallHonoursTheCallMetamethod = fragment("pcall honours the __call metamethod") {
"""function callable()
| local mt = {}
| mt.__call = function() return 42 end
| local t = {}
| setmetatable(t, mt)
| return t
|end
|
|x = callable()
|return pcall(x)
"""
}
PCallHonoursTheCallMetamethod in thisContext succeedsWith(true, 42)
val PCallCatchesErrorInACallMetamethod = fragment("pcall catches error in a __call metamethod") {
"""function callable()
| local mt = {}
| mt.__call = function() error('kaboom') end
| local t = {}
| setmetatable(t, mt)
| return t
|end
|
|x = callable()
|return pcall(x)
"""
}
// FIXME: the error object should actually be "stdin:3: kaboom"
PCallCatchesErrorInACallMetamethod in thisContext succeedsWith(false, "kaboom")
}
about("xpcall") {
program("xpcall()") failsWith "" << "bad argument #2 to 'xpcall' (function expected, got no value)"
program("return xpcall(nil)") failsWith "" << "bad argument #2 to 'xpcall' (function expected, got no value)"
program("return xpcall(function() end)") failsWith "" << "bad argument #2 to 'xpcall' (function expected, got no value)"
program("return xpcall(nil, nil)") failsWith "" << "bad argument #2 to 'xpcall' (function expected, got nil)"
program("return xpcall(nil, function(...) return ... end)") succeedsWith(false, "attempt to call a nil value")
program("return xpcall(xpcall, pcall)") succeedsWith(false, false)
program("return xpcall(pcall, xpcall)") succeedsWith(false, "error in error handling")
program(
"""count = 0
|function handle(eo)
| count = count + 1
| error(eo)
|end
|xpcall(nil, handle)
|return count
""") succeedsWith 220
}
about("error") {
program("return error()") failsWithLuaError (null)
program("return error(nil)") failsWithLuaError (null)
program("error(1)") failsWithLuaError (java.lang.Long.valueOf(1))
program("error(1.0)") failsWithLuaError (java.lang.Double.valueOf(1.0))
program("error(\\"boom\\")") failsWithLuaError "boom"
program("return pcall(error)") succeedsWith(false, null)
}
about("assert") {
program("assert(nil)") failsWith "" << "assertion failed!"
program("assert(false, 'boom')") failsWith "" << "boom"
program("return assert(true)") succeedsWith true
program("return assert(1, false, 'x')") succeedsWith(1, false, "x")
program("assert()") failsWith "" << "bad argument #1 to 'assert' (value expected)"
program("assert(pcall(error, 'boom'))") failsWith "" << "boom"
}
about("rawequal") {
program("return rawequal()") failsWith "" << "bad argument #1 to 'rawequal' (value expected)"
program("return rawequal(nil)") failsWith "" << "bad argument #2 to 'rawequal' (value expected)"
program("return rawequal(nil, nil)") succeedsWith true
program("return rawequal(0, 0)") succeedsWith true
program("return rawequal(0 / 0, 0 / 0)") succeedsWith false
// TODO: add tests for values that do have the __eq metamethod
}
about("rawget") {
program("rawget()") failsWith "" << "bad argument #1 to 'rawget' (table expected, got no value)"
program("rawget(nil)") failsWith "" << "bad argument #1 to 'rawget' (table expected, got nil)"
program("rawget('x')") failsWith "" << "bad argument #1 to 'rawget' (table expected, got string)"
program(
"""x = {}
|x.hello = 'world'
|return rawget(x, 'hello')
""") succeedsWith "world"
// TODO: add tests for values that do have the __index metamethod
}
about("rawset") {
program("rawset()") failsWith "" << "bad argument #1 to 'rawset' (table expected, got no value)"
program("rawset(nil)") failsWith "" << "bad argument #1 to 'rawset' (table expected, got nil)"
program("rawset('x')") failsWith "" << "bad argument #1 to 'rawset' (table expected, got string)"
program("rawset({}, nil)") failsWith "" << "bad argument #3 to 'rawset' (value expected)"
program(
"""x = {}
|rawset(x, 'hello', 'world')
|return x.hello
""") succeedsWith "world"
program("rawset({}, nil, 1)") failsWith "" << "table index is nil"
program("rawset({}, 0 / 0, 1)") failsWith "" << "table index is NaN"
program(
"""x = {}
|y = rawset(x, 0, 'hi')
|return x == y
""") succeedsWith true
// TODO: add tests for values that do have the __newindex metamethod
}
about("rawlen") {
program("rawlen()") failsWith "" << "bad argument #1 to 'rawlen' (table or string expected)"
program("rawlen(1)") failsWith "" << "bad argument #1 to 'rawlen' (table or string expected)"
program("return rawlen('x')") succeedsWith 1
program("return rawlen({'x', 1, true})") succeedsWith 3
// TODO: add tests for values that do have the __len metamethod
}
about("select") {
program("select()") failsWith "" << "bad argument #1 to 'select' (number expected, got no value)"
program("select('x')") failsWith "" << "bad argument #1 to 'select' (number expected, got string)"
program("select(' #')") failsWith "" << "bad argument #1 to 'select' (number expected, got string)"
program("select(' # ')") failsWith "" << "bad argument #1 to 'select' (number expected, got string)"
program("return select('#')") succeedsWith 0
program("return select('#', nil)") succeedsWith 1
program("return select('#', 1, 2, 3, 4, 5)") succeedsWith 5
program("return select('+1', true, false)") succeedsWith(true, false)
program("return select('-1', true, false)") succeedsWith (false)
program("return select(7, true, false)") succeedsWith()
program("select(0, true, false)") failsWith "" << "bad argument #1 to 'select' (index out of range)"
program("select(-3, true, false)") failsWith "" << "bad argument #1 to 'select' (index out of range)"
program("select(1.5, true, false)") failsWith "" << "bad argument #1 to 'select' (number has no integer representation)"
program("return select(1, 1, 2, 3, 4, 5)") succeedsWith(1, 2, 3, 4, 5)
program("return select(-1, 1, 2, 3, 4, 5)") succeedsWith (5)
program("return select(2, 1, 2, 3, 4, 5)") succeedsWith(2, 3, 4, 5)
program("return select(3, 1, 2, 3, 4, 5)") succeedsWith(3, 4, 5)
program("return select(-2, 1, 2, 3, 4, 5)") succeedsWith(4, 5)
program("return select(-3, 1, 2, 3, 4, 5)") succeedsWith(3, 4, 5)
}
about("load") {
program("load()") failsWith "" << "bad argument #1 to 'load' (function expected, got no value)"
program("load({})") failsWith "" << "bad argument #1 to 'load' (function expected, got table)"
program("load(nil)") failsWith "" << "bad argument #1 to 'load' (function expected, got nil)"
program("return load(42)") succeedsWith(null, classOf[String])
program("return load(42, 42, 42)") succeedsWith(null, "attempt to load a text chunk (mode is '42')")
program("return load('return nil', nil)") succeedsWith (classOf[LuaFunction[_, _, _, _, _]])
program("return load('return 1 + 2')()") succeedsWith (3)
program("return load('return x', nil, 't', {x = 10})()") succeedsWith (10)
program("local x = 20; return load('return x')()") succeedsWith (null)
program("x = 20; return load('return x')()") succeedsWith (20)
program("local e = load('return _ENV')(); return _ENV, e, _ENV == e") succeedsWith(classOf[Table], classOf[Table], true)
// error reporting: FIXME: actual loader error messages are different from PUC-Lua!
program("""return load("x(")""") succeedsWith(null, "[string \\"x(\\"]:?: function call expected at line 1")
program("""return load("'")""") succeedsWith(null, "[string \\"'\\"]:?: Lexical error at line 1, column 2. Encountered: <EOF> after : \\"\\"")
program("""return load('"')""") succeedsWith(null, "[string \\"\\"\\"]:?: Lexical error at line 1, column 2. Encountered: <EOF> after : \\"\\"")
program(
"""local n = 5
|local i = 0
|local function f()
| i = i + 1
| if i == 1 then return "return "
| elseif i < n+1 then return (i-1) .. ","
| elseif i == n+1 then return (i-1)
| else return nil
| end
|end
|return load(f)()
""") succeedsWith(1, 2, 3, 4, 5)
}
}
}
|
kroepke/luna
|
luna-tests/src/test/scala/org/classdump/luna/test/fragments/BasicLibFragments.scala
|
Scala
|
apache-2.0
| 16,760
|
package org.wartremover
package contrib.test
import org.scalatest.FunSuite
import org.wartremover.contrib.warts.SomeApply
import org.wartremover.test.WartTestTraverser
class SomeApplyTest extends FunSuite with ResultAssertions {
test("can't use Some.apply with null") {
val result = WartTestTraverser(SomeApply) {
Some(null)
}
assertError(result)("Some.apply is disabled - use Option.apply instead")
}
test("can't use Some.apply with a literal") {
val result = WartTestTraverser(SomeApply) {
Some(1)
}
assertError(result)("Some.apply is disabled - use Option.apply instead")
}
test("can't use Some.apply with an identifier") {
val result = WartTestTraverser(SomeApply) {
val x = 1
Some(x)
}
assertError(result)("Some.apply is disabled - use Option.apply instead")
}
test("can use Some.unapply in pattern matching") {
val result = WartTestTraverser(SomeApply) {
Option("test") match {
case Some(test) => println(test)
case None => println("not gonna happen")
}
}
assertEmpty(result)
}
test("obeys SuppressWarnings") {
val result = WartTestTraverser(SomeApply) {
@SuppressWarnings(Array("org.wartremover.contrib.warts.SomeApply"))
val x = Some(null)
}
assertEmpty(result)
}
}
|
tim-zh/wartremover-contrib
|
core/src/test/scala/wartremover/contrib/warts/SomeApplyTest.scala
|
Scala
|
apache-2.0
| 1,325
|
/*
* Copyright (C) 2005, The OpenURP Software.
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published
* by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.openurp.qos.evaluation.clazz.model
import org.beangle.data.orm.MappingModule
class DefaultMapping extends MappingModule {
def binding(): Unit = {
//clazz result
bind[EvaluateResult].declare { e =>
e.clazz & e.student & e.department & e.questionnaire are notnull
e.questionResults is depends("result")
}
bind[QuestionResult]
bind[QuestionnaireClazz]
bind[FinalTeacherScore]
//course stat
bind[CourseEvalStat].declare { e =>
e.questionStats is depends(classOf[CourseQuestionStat], "stat")
e.indicatorStats is depends(classOf[CourseIndicatorStat], "stat")
}
bind[CourseOptionStat].declare { e =>
e.questionStat is target[CourseQuestionStat]
}
bind[CourseQuestionStat].declare { e =>
e.stat is target[CourseEvalStat]
e.optionStats is depends(classOf[CourseOptionStat], "questionStat")
}
bind[CourseIndicatorStat].declare { e =>
e.stat is target[CourseEvalStat]
}
//category
bind[CategoryEvalStat].declare { e =>
e.ranges is depends("stat")
e.grades is depends("stat")
}
bind[CategoryStatRange]
bind[CategoryStatGrade]
bind[Feedback].declare { e =>
e.contents is length(500)
}
bind[FinalComment].declare { e =>
e.contents is length(500)
}
bind[DepartEvalStat]
bind[SchoolEvalStat]
}
}
|
openurp/api
|
qos/src/main/scala/org/openurp/qos/evaluation/clazz/model/mapping.scala
|
Scala
|
lgpl-3.0
| 2,099
|
/**
* Rectangle.
*
* @author Yujian Zhang <yujian{dot}zhang[at]gmail(dot)com>
*
* License:
* GNU General Public License v2
* http://www.gnu.org/licenses/gpl-2.0.html
* Copyright (C) 2015 Yujian Zhang
*/
package net.whily.scasci.geo
/** Rectangle with edges parallel to axes.
*/
class Rect(val x1: Double, val y1: Double, val x2: Double, val y2: Double) {
assert((x1 <= x2) && (y1 <= y2))
/** Returns true if the rectangle contains the point `p`. */
def contains(p: Point) = {
(x1 <= p.x) && (p.x <= x2) && (y1 <= p.y) && (p.y <= y2)
}
/** Returns true if the rectangle intersects `that` rectangel. */
def intersects(that: Rect) = {
(Math.max(x1, that.x1) <= Math.min(x2, that.x2)) &&
(Math.max(y1, that.y1) <= Math.min(y2, that.y2))
}
}
/** Factory for [[net.whily.scasci.geo.Rect]] instance. */
object Rect {
/** Creates a Rect. */
def apply(x1: Double, y1: Double, x2: Double, y2: Double) = new Rect(x1, y1, x2, y2)
}
|
whily/scasci
|
src/main/scala/geo/Rect.scala
|
Scala
|
gpl-2.0
| 972
|
package model.coin
import model.coin.utils.EmbeddedMariaDb
import model.coin.utils.WorkEntryTestUtil._
import models.coin.WorkEntry
import models.common.Person
import models.common.Person.PersonId
import org.specs2.matcher.Matchers
import play.api.test.{PlaySpecification, WithApplication}
import scala.language.postfixOps
/**
* Created by julianliebl on 29.01.17.
*/
class PersonSpec extends PlaySpecification with Matchers with EmbeddedMariaDb {
sequential
"Database" should {
"create and find a person by id" in new WithApplication() {
val id: PersonId = Person.create("testuid")
val person: Person = Person.findById(id).get
person.uid must be equalTo "testuid"
}
"create and find a person by uid" in new WithApplication() {
val id: PersonId = Person.create("testuid")
val person: Person = Person.findByUid("testuid").get
person.id === id
person.uid ==="testuid"
}
"find and create a not existent person by uid" in new WithApplication() {
val person1: Person = Person.findOrCreateByUid("testuid")
val person2: Person = Person.findOrCreateByUid("testuid")
person1.id === 1
person1.uid ==="testuid"
person2.id === 1
person2.uid ==="testuid"
}
"not create a person with the same uid" in new WithApplication() {
val id1First: PersonId = Person.create("test1")
Person.create("test1") must throwA[com.mysql.jdbc.exceptions.jdbc4.MySQLIntegrityConstraintViolationException]
id1First must be equalTo 1
}
"add work entries and find them by uid" in new WithApplication() {
val personId1: PersonId = Person.create("personId1")
val personId2: PersonId = Person.create("personId2")
createDummyWorkEntry(dummyWorkEntry.copy(personId = personId1))
createDummyWorkEntry(dummyWorkEntry.copy(personId = personId1))
createDummyWorkEntry(dummyWorkEntry.copy(personId = personId2))
val workEntries1: Seq[WorkEntry] = Person.findWorkEntries(personId1)
val workEntries2: Seq[WorkEntry] = Person.findWorkEntries(personId2)
workEntries1.size === 2
workEntries2.size === 1
}
"add work entries and count them" in new WithApplication() {
val personId1: PersonId = Person.create("personId1")
val personId2: PersonId = Person.create("personId2")
createDummyWorkEntry(dummyWorkEntry.copy(personId = personId1))
createDummyWorkEntry(dummyWorkEntry.copy(personId = personId1))
createDummyWorkEntry(dummyWorkEntry.copy(personId = personId2))
val workEntriesCount1: Long = Person.countWorkEntries(personId1)
val workEntriesCount2: Long = Person.countWorkEntries(personId2)
workEntriesCount1 == 2
workEntriesCount2 == 1
}
}
}
|
wirvomgut/curo
|
test/model/coin/PersonSpec.scala
|
Scala
|
apache-2.0
| 2,772
|
// Copyright (c) 2014 David Miguel Antunes <davidmiguel {at} antunes.net>
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
package com.github.david04.liftutils.algo
class Bitmap extends Serializable {
private var ranges = List[(Long, Long)]()
def update(idx: Long, v: Boolean) = { if (v) ranges = set(idx, ranges) else ranges = unset(idx, ranges); this }
def apply(idx: Long): Boolean = valueAt(idx, ranges)
private def valueAt(idx: Long, r: List[(Long, Long)]): Boolean =
if (r.isEmpty) false
else if (idx < r.head._1 || idx > r.last._2) false
else if (r.size == 1) idx >= r.head._1 && idx <= r.head._2
else r.splitAt(r.size / 2) match {
case (r1, r2) if idx < r2.head._1 => valueAt(idx, r1)
case (r1, r2) => valueAt(idx, r2)
}
private def join(r1: List[(Long, Long)], r2: List[(Long, Long)]) =
if (r1.last._2 != r2.head._1 - 1) r1 ::: r2
else r1.dropRight(1) ::: List((r1.last._1, r2.head._2)) ::: r2.tail
private def set(idx: Long, r: List[(Long, Long)]): List[(Long, Long)] =
if (r.isEmpty) List((idx, idx))
else if (idx < r.head._1) join(List((idx, idx)), r)
else if (idx > r.last._2) join(r, List((idx, idx)))
else if (r.size == 1) r // Already set
else r.splitAt(r.size / 2) match {
case (r1, r2) if idx < r2.head._1 => join(set(idx, r1), r2)
case (r1, r2) => join(r1, set(idx, r2))
}
private def unset(idx: Long, r: List[(Long, Long)]): List[(Long, Long)] =
if (r.isEmpty) r
else if (idx < r.head._1) r
else if (idx > r.last._2) r
else if (r.size == 1)
(if (r.head._1 < idx) Some((r.head._1, idx - 1)) else None) ::
(if (r.head._2 > idx) Some((idx + 1, r.head._2)) else None) ::
Nil flatten
else r.splitAt(r.size / 2) match {
case (r1, r2) if idx < r2.head._1 => unset(idx, r1) ::: r2
case (r1, r2) => r1 ::: unset(idx, r2)
}
}
|
david04/liftutils
|
src/main/scala/com/github/david04/liftutils/algo/Bitmap.scala
|
Scala
|
mit
| 2,935
|
package ohnosequences.awstools.test
import ohnosequences.awstools._
import com.amazonaws.services.autoscaling.AmazonAutoScaling
class AutoScaling extends org.scalatest.FunSuite with org.scalatest.BeforeAndAfterAll {
lazy val as: AmazonAutoScaling = autoscaling.defaultClient
// override def beforeAll() = {
// }
//
// override def afterAll() = {
// }
}
|
ohnosequences/aws-scala-tools
|
src/test/scala/ohnosequences/awstools/autoscaling.scala
|
Scala
|
agpl-3.0
| 371
|
package backend.distributor
import akka.actor.{ActorRef, Terminated}
import akka.stream.scaladsl.Flow
import akka.stream.stage._
import akka.stream.{Attributes, FlowShape, Inlet, Outlet}
import backend._
import backend.PricerMsg._
import backend.distributor.StreamLinkApi.{Payload, Demand, DistributorStreamRef, PricerStreamRef}
import backend.shared.Currencies
import com.typesafe.scalalogging.StrictLogging
import scala.collection.immutable.Queue
import scala.concurrent.duration._
import scala.language.postfixOps
object PricerStreamEndpointStage {
def apply(parentRef: ActorRef) = Flow.fromGraph(new PricerStreamEndpointStage(parentRef))
}
/**
* This is the last stage of the pricer stream. This stage links with the distributor stream to form end-to-end flow.
*/
private class PricerStreamEndpointStage(monitorRef: ActorRef) extends GraphStage[FlowShape[PricerMsg, PricerMsg]] {
val in: Inlet[PricerMsg] = Inlet("ClientBound")
val out: Outlet[PricerMsg] = Outlet("PricerBound")
override val shape: FlowShape[PricerMsg, PricerMsg] = FlowShape(in, out)
override def createLogic(inheritedAttributes: Attributes): GraphStageLogic = new TimerGraphStageLogic(shape) with StrictLogging {
case object TimerKey
lazy val self = getStageActorRef(onMessage) // available only when stage is started
var pendingToPricer: Queue[StreamHead] = Queue() // an element pending to Pricer
var activeDistributorStreams: Set[ActorRef] = Set() // set of active distributor streams (one for each connected client)
var openSubscriptions: Map[Short, List[ActorRef]] = Map() // list of all open unique subscriptions with Pricer
setHandler(in, new InHandler {
// this method is called every time next element is available for consumption
override def onPush(): Unit = {
grab(in) match {
case m@PriceUpdate(cId, _, _) =>
openSubscriptions get cId foreach (_.foreach(_ ! m)) // forward price update to all streams subscribed to that id
case m: Ping => activeDistributorStreams foreach (_ ! m) // forward to all live streams
}
pull(in) // pull next element
}
})
setHandler(out, new OutHandler {
// this method is called every time downstream is ready for the next element
override def onPull(): Unit = pushToPricer()
})
override def preStart(): Unit = {
monitorRef ! PricerStreamRef(self) // publish location of the endpoint
pull(in) // pull the first element
}
override protected def onTimer(timerKey: Any): Unit = {
openSubscriptions = openSubscriptions filter { // subscription maintenance routine...
case (cId, list) if list.isEmpty =>
pendingToPricer = pendingToPricer :+ StreamHead(None, StreamCancel(cId)) // unsubscribe if no interested parties
false
case _ => true
}
pushToPricer() // push cancels if possible
if (openSubscriptions.isEmpty) cancelTimer(TimerKey) // cancel timer if no active subscriptions
}
private def onMessage(x: (ActorRef, Any)): Unit = x match {
case (_, DistributorStreamRef(ref)) => // location of the distibutor stream ..
logger.info(s"Linked with distributor stream at $ref")
activeDistributorStreams += ref // add to the list ..
self.watch(ref) // and watch it
ref ! Demand(self) // kickoff the flow with a demand request
case (_, Terminated(ref)) => // distributor stream terminated ..
logger.info(s"Broken link with $ref")
pendingToPricer = pendingToPricer.filterNot(_.maybeRef.contains(ref)) // remove all related requests
activeDistributorStreams -= ref
openSubscriptions = openSubscriptions map { // remove from the open subscriptions lists
case (cId, subscribers) if subscribers.contains(ref) => cId -> subscribers.filterNot(_ == ref)
case other => other
}
case (_, Payload(ref, m@StreamRequest(cId))) => // subscription request
val ccy = Currencies.all(cId.toInt)
logger.info(s"Subscription request for $ccy")
if (!openSubscriptions.contains(cId)) { // no subscription with pricer yet?
logger.info(s"Opening pricer subscription for $ccy")
pendingToPricer = pendingToPricer :+ StreamHead(Some(ref), m)
pushToPricer()
if (openSubscriptions.isEmpty) schedulePeriodicallyWithInitialDelay(TimerKey, 5 seconds, 5 seconds)
} else {
ref ! Demand(self) // request next element
logger.info(s"Sharing $ccy stream, total ${openSubscriptions.get(cId).get.size + 1} subscribers")
}
if (!openSubscriptions.get(cId).exists(_.contains(ref))) // keep track of stream interest
openSubscriptions += cId -> (openSubscriptions.getOrElse(cId, List()) :+ ref)
case (_, Payload(ref, m: ServerToClient)) =>
pendingToPricer = StreamHead(Some(ref), m) +: pendingToPricer
pushToPricer()
case (_, el) => logger.warn(s"Unexpected: $el")
}
private def pushToPricer() = if (isAvailable(out) && pendingToPricer.nonEmpty)
pendingToPricer.dequeue match {
case (StreamHead(ref, el), queue) =>
push(out, el)
pendingToPricer = queue
ref foreach (_ ! Demand(self))
}
}
case class StreamHead(maybeRef: Option[ActorRef], element: PricerMsg)
}
|
intelix/activator-reactive-fx
|
app/backend/distributor/PricerStreamEndpointStage.scala
|
Scala
|
apache-2.0
| 5,947
|
package com.evnm.markovprocessor
import scala.io.Source
import org.scalatest.FunSuite
import com.evnm.markovprocessor.{MarkovProcessor => MP}
class MarkovProcessorSuite extends FunSuite {
/*
* Test groupWords.
*/
test("groupWords should return Nil when input list is Nil") {
assert(MP.groupWords(0, Nil) == Nil)
assert(MP.groupWords(3, Nil) == Nil)
}
test("groupWords should return Nil when n > length of input list") {
assert(MP.groupWords(1, Nil) == Nil)
assert(MP.groupWords(2, List("a")) == Nil)
assert(MP.groupWords(3, List("a", "b")) == Nil)
}
test("groupWords should return appropriate list on valid input") {
val lst = List((List("a", "b"), "c"), (List("b", "c"), "d"), (List("c", "d"), "e"))
assert(MP.groupWords(3, List("a", "b", "c", "d", "e")) == lst)
}
test("Grouping massive lists should not generate stack overflows") {
val words = Source.fromFile("src/test/resources/hamlet.txt").mkString.split("( |\\n)+").toList
MP.groupWords(4, words)
}
/*
* Test buildTree.
*/
test("buildTree should return an EmptyNode when given an empty string") {
MP.buildTree(3, "") match {
case EmptyNode => assert(true)
case _ => assert(false, "tree built from empty string should be an EmptyNode")
}
}
test("buildTree should return a single leaf Node when given a string of n=3 words") {
MP.buildTree(3, "oh hai there") match {
case Node(Ngram(lw, c), EmptyNode, EmptyNode) => {
assert(lw == List("oh", "hai"))
assert(c == Map("there" -> 1))
}
case _ => assert(false, "tree built was invalid")
}
}
test("buildTree should return an appropriate tree when given a non-trivial string") {
MP.buildTree(4, "four score and seven years ago") match {
case Node(Ngram(lw, c), left, right) => {
assert(lw == List("four", "score", "and"))
assert(c == Map("seven" -> 1))
right match {
case Node(Ngram(lw, c), left, EmptyNode) => {
assert(lw == List("score", "and", "seven"))
assert(c == Map("years" -> 1))
}
case _ => assert(false, "right should not be empty")
}
left match {
case Node(Ngram(lw, c), l, r) => {
assert(lw == List("and", "seven", "years"))
assert(c == Map("ago" -> 1))
}
case _ => assert(false, "left should not be empty")
}
}
case _ => assert(false, "tree built was invalid")
}
}
/*
* Test find.
*/
test("find should return None on an empty tree") {
assert(!MP.find(List(), EmptyNode).isDefined)
}
test("find should return a valid ngram option on a non-empty tree") {
val result =
MP.find(List("foo"), Node(Ngram(List("foo"),Map("bar" -> 1)), EmptyNode, EmptyNode))
assert(result.isDefined)
assert(result.get.leading_words == List("foo"))
assert(result.get.choices == Map("bar" -> 1))
}
}
|
evnm/scala-markov-processor
|
src/test/scala/MarkovProcessorSuite.scala
|
Scala
|
mit
| 2,961
|
import sbt._
import Keys._
import sbtrelease.ReleasePlugin._
import sbtbuildinfo.Plugin._
import scala.xml.Group
object build extends Build {
val manifestSetting = packageOptions <+= (name, version, organization) map {
(title, version, vendor) =>
Package.ManifestAttributes(
"Created-By" -> "Simple Build Tool",
"Built-By" -> System.getProperty("user.name"),
"Build-Jdk" -> System.getProperty("java.version"),
"Specification-Title" -> title,
"Specification-Version" -> version,
"Specification-Vendor" -> vendor,
"Implementation-Title" -> title,
"Implementation-Version" -> version,
"Implementation-Vendor-Id" -> vendor,
"Implementation-Vendor" -> vendor)
}
val publishSettings: Seq[Setting[_]] = Seq(
// publishTo <<= (version) { version: String =>
// val res =
// if (version.trim.endsWith("SNAPSHOT"))
// Opts.resolver.sonatypeSnapshots
// else
// Opts.resolver.sonatypeStaging
// Some(res)
// },
publishTo <<= (version) { version: String =>
val artifactory = "https://ci.aws.wordnik.com/artifactory/m2-"
if (version.trim.endsWith("SNAPSHOT"))
Some("snapshots" at artifactory + "snapshots")
else
Some("releases" at artifactory + "releases")
},
publishMavenStyle := true,
publishArtifact in Test := false,
pomIncludeRepository := { x => false }
)
val mavenCentralFrouFrou = Seq(
homepage := Some(new URL("https://developers.helloreverb.com/swagger/")),
startYear := Some(2009),
licenses := Seq(("ASL", new URL("http://github.com/wordnik/swagger-async-httpclient/raw/HEAD/LICENSE"))),
pomExtra <<= (pomExtra, name, description) {(pom, name, desc) => pom ++ Group(
<scm>
<url>http://github.com/wordnik/swagger-async-httpclient</url>
<connection>scm:git:git://github.com/wordnik/swagger-async-httpclient.git</connection>
</scm>
<developers>
<developer>
<id>casualjim</id>
<name>Ivan Porto Carrero</name>
<url>http://flanders.co.nz/</url>
</developer>
</developers>
)}
)
def versionSpecificSourcesIn(c: Configuration) =
unmanagedSourceDirectories in c <+= (scalaVersion, sourceDirectory in c) {
case (v, dir) if v startsWith "2.9" => dir / "scala_2.9"
case (v, dir) if v startsWith "2.10" => dir / "scala_2.10"
}
val projectSettings = Seq(
organization := "com.wordnik.swagger",
name := "swagger-async-httpclient",
scalaVersion := "2.10.0",
crossScalaVersions := Seq("2.9.1", "2.9.1-1", "2.9.2", "2.9.3", "2.10.0"),
scalacOptions ++= Seq("-unchecked", "-deprecation", "-optimize", "-Xcheckinit", "-encoding", "utf8", "-P:continuations:enable"),
scalacOptions in Compile <++= scalaVersion map ({
case v if v startsWith "2.10" => Seq("-language:implicitConversions", "-language:reflectiveCalls")
case _ => Seq.empty
}),
javacOptions in compile ++= Seq("-target", "1.6", "-source", "1.6", "-Xlint:deprecation"),
manifestSetting,
autoCompilerPlugins := true,
libraryDependencies <+= scalaVersion(sv => compilerPlugin("org.scala-lang.plugins" % "continuations" % sv)),
parallelExecution in Test := false,
commands += Command.args("s", "<shell command>") { (state, args) =>
args.mkString(" ") ! state.log
state
},
TaskKey[Unit]("gc", "runs garbage collector") <<= streams map { s =>
s.log.info("requesting garbage collection")
System.gc()
}
)
val buildInfoConfig: Seq[Setting[_]] = buildInfoSettings ++ Seq(
sourceGenerators in Compile <+= buildInfo,
buildInfoKeys := Seq[BuildInfoKey](name, version, scalaVersion, sbtVersion),
buildInfoPackage <<= organization(_ + ".client.async")
)
val defaultSettings =
Defaults.defaultSettings ++ releaseSettings ++ buildInfoConfig ++ projectSettings ++ publishSettings ++ mavenCentralFrouFrou
lazy val root = Project(
id = "swagger-async-httpclient",
base = file("."),
settings = defaultSettings ++ Seq(
libraryDependencies ++= Seq(
"org.scalatra.rl" %% "rl" % "0.4.8",
"org.slf4j" % "slf4j-api" % "1.7.5",
"ch.qos.logback" % "logback-classic" % "1.0.13" % "provided",
"org.json4s" %% "json4s-jackson" % "3.2.5",
"com.googlecode.juniversalchardet" % "juniversalchardet" % "1.0.3",
"eu.medsea.mimeutil" % "mime-util" % "2.1.3" exclude("org.slf4j", "slf4j-log4j12") exclude("log4j", "log4j"),
"com.ning" % "async-http-client" % "1.7.19"
),
libraryDependencies <+= scalaVersion {
case "2.9.3" => "org.clapper" % "grizzled-slf4j_2.9.2" % "0.6.10" exclude("org.scala-lang", "scala-library")
case v if v startsWith "2.9" => "org.clapper" %% "grizzled-slf4j" % "0.6.10"
case v => "com.typesafe" %% "scalalogging-slf4j" % "1.0.1"
},
libraryDependencies <++= scalaVersion {
case v if v startsWith "2.9" => Seq("com.typesafe.akka" % "akka-actor" % "2.0.5")
case v => Seq.empty
},
resolvers <++= scalaVersion {
case v if v startsWith "2.9" => Seq("Typesafe Repository" at "http://repo.typesafe.com/typesafe/releases/")
case v => Seq.empty
},
versionSpecificSourcesIn(Compile)
)
)
}
|
casualjim/swagger-async-httpclient
|
project/build.scala
|
Scala
|
apache-2.0
| 5,358
|
package org.odfi.indesign.core.module.ui.www.pdf
import org.odfi.indesign.core.module.ui.www.external.ExternalBuilder
import com.idyria.osi.vui.html.HTMLNode
import org.w3c.dom.html.HTMLElement
import java.net.URI
trait PDFBuilder extends ExternalBuilder {
// Current Page
//----------------
var pdfPage = 1
def pdfCanvas(url: String, id: String = "canvas") = {
var c = canvas {
this.id("pdfjs-" + id)
+@("data-url" -> url)
+@("page" -> pdfPage.toString)
/*+@("width" -> "600")
+@("height" -> "300")*/
}
this.registerAction("pdfjs.updatePage")(c) {
n =>
println(s"Updating Page "+request.get.getURLParameter("page"))
pdfPage = request.get.getURLParameter("page").get.toInt
}
c
}
override def externalAdd(targetNode: HTMLNode[HTMLElement, Any]): Unit = {
super.externalAdd(targetNode)
switchToNode(targetNode, {
// Extra scripts
// Extra scripts
stylesheet(new URI(createSpecialPath("resources", "pdfjs/web/locale/locale.properties"))) {
+@("rel"->"resource")
+@("type"->"application/l10n")
}
script(new URI(createSpecialPath("resources", "pdfjs/web/l10n.js"))) {
}
script(new URI(createSpecialPath("resources", "pdfjs/web/compatibility.js"))) {
}
script(new URI(createSpecialPath("resources", "pdfjs/build/pdf.js"))) {
}
script(new URI(createSpecialPath("resources", "pdfjs/indesign-pdfjs.js"))) {
}
/*$(<link rel="resource" type="application/l10n" href="/resources/pdfjs/web/locale/locale.properties"/>)
$(<script src="/resources/pdfjs/web/l10n.js"></script>)
$(<script src="/resources/pdfjs/web/compatibility.js"></script>)
$(<script src="/resources/pdfjs/build/pdf.js"></script>)
$(<script src="/resources/pdfjs/indesign-pdfjs.js"></script>)*/
})
//super.externalAdd(targetNode)
}
}
|
opendesignflow/indesign
|
indesign-wwwui/src/main/scala/org/odfi/indesign/core/module/ui/www/pdf/PDFBuilder.scala
|
Scala
|
gpl-3.0
| 2,036
|
package com.github.caiiiycuk.async4s.request
import com.ning.http.client.Response
import com.github.caiiiycuk.async4s.response.ResponseType
import java.util.HashMap
import scala.collection.mutable.ListBuffer
case class RequestParam(key: String, value: Any)
case class RequestUrl[T](url: String, rType: ResponseType[T],
params: ListBuffer[RequestParam] = new ListBuffer[RequestParam]) {
def as[T2](rType: ResponseType[T2]) = {
new RequestUrl[T2](url, rType, params)
}
def ~(param: RequestParam) = {
params += param
this
}
def ~(param: (String, Any)) = {
params += RequestParam(param._1, param._2)
this
}
def r2T(response: Response): T = rType.r2T(response)
}
|
caiiiycuk/async4s-http-client
|
src/main/scala/com/github/caiiiycuk/async4s/request/RequestUrl.scala
|
Scala
|
mit
| 714
|
import sbt._
import Keys._
import ls.Plugin.LsKeys
object Unfiltered extends Build {
import Common._
import java.lang.{ Boolean => JBoolean }
def id(name: String) = "unfiltered-%s" format name
def local(name: String) = LocalProject(id(name))
def srcPathSetting(projectId: String, rootPkg: String) =
mappings in (LocalProject(projectId), Compile, packageSrc) ~= {
defaults: Seq[(File,String)] =>
defaults.map { case(file, path) =>
(file, rootPkg + "/" + path)
}
}
private def ciSettings: Seq[Def.Setting[_]] = {
if (JBoolean.parseBoolean(
sys.env.getOrElse("TRAVIS", "false"))) Seq(
logLevel in Global := Level.Warn,
logLevel in Compile := Level.Warn,
logLevel in Test := Level.Info
) else Seq.empty[Def.Setting[_]]
}
private def module(moduleName: String)(
projectId: String = "unfiltered-" + moduleName,
dirName: String = moduleName,
srcPath: String = "unfiltered/" + moduleName.replace("-","/")
) = Project(projectId, file(dirName),
settings = (Common.settings ++
ls.Plugin.lsSettings ++
ciSettings ++
srcPathSetting(projectId, srcPath)
))
lazy val unfiltered =
Project("unfiltered-all",
file("."),
settings = Common.settings
).aggregate(
library, filters, filtersAsync , uploads, filterUploads,
nettyUploads, util, jetty,
netty, nettyServer, json4s,
specs2Helpers, scalaTestHelpers, websockets, oauth, mac,
oauth2, agents, directives)
lazy val library: Project =
module("unfiltered")(
dirName = "library",
projectId = "unfiltered"
).dependsOn(util)
lazy val directives =
module("directives")().dependsOn(library)
lazy val filters = module("filter")().dependsOn(library)
lazy val filtersAsync = module("filter-async")().dependsOn(filters)
lazy val agents =
module("agents")(
srcPath = "unfiltered/request"
).dependsOn(library)
lazy val uploads =
module("uploads")(
srcPath = "unfiltered/request"
).dependsOn(library)
lazy val filterUploads =
module("filter-uploads")(
srcPath = "unfiltered/request"
).dependsOn(uploads, filters)
lazy val util = module("util")()
lazy val jetty = module("jetty")().dependsOn(util)
lazy val nettyServer =
module("netty-server")(
srcPath = "unfiltered/netty"
).dependsOn(netty, util)
lazy val netty = module("netty")().dependsOn(library)
lazy val specs2Helpers =
module("specs2")().dependsOn(filters, jetty, nettyServer)
lazy val scalaTestHelpers =
module("scalatest")().dependsOn(filters, jetty, nettyServer)
lazy val json4s =
module("json4s")(
srcPath = "unfiltered"
).dependsOn(library)
lazy val websockets =
module("netty-websockets")().dependsOn(nettyServer)
lazy val oauth = module("oauth")().dependsOn(jetty, filters, directives)
lazy val mac = module("mac")().dependsOn(library)
lazy val oauth2 = module("oauth2")().dependsOn(jetty, filters, mac, directives)
lazy val nettyUploads = module("netty-uploads")().dependsOn(nettyServer, uploads)
}
|
peel/unfiltered
|
project/build.scala
|
Scala
|
mit
| 3,252
|
package chat.tox.antox.tox
import java.io._
import android.content.Context
import android.preference.PreferenceManager
import android.util.Log
import chat.tox.antox.utils.FileUtils
import im.tox.tox4j.core.options.SaveDataOptions
import im.tox.tox4j.core.options.SaveDataOptions.ToxSave
class ToxDataFile(ctx: Context, fileName: String) {
def this(context: Context) = this(context, {
val preferences = PreferenceManager.getDefaultSharedPreferences(context)
preferences.getString("active_account", "")
})
def doesFileExist(): Boolean = {
if (ctx == null) {
Log.d("ToxDataFile", "Context is null!")
}
Log.d("ToxDataFile", "fileName: " + fileName)
val myFile = ctx.getFileStreamPath(fileName)
if (myFile == null) {
Log.d("ToxDataFile", "myFile is null!")
}
myFile.exists()
}
def exportFile(dest: File): Unit = {
if (!dest.exists()) {
throw new IllegalArgumentException("dest must exist")
}
FileUtils.copy(ctx.getFileStreamPath(fileName), new File(dest + "/" + fileName + ".tox"))
}
def deleteFile() {
ctx.deleteFile(fileName)
}
def loadFile(): Array[Byte] = {
var fin: FileInputStream = null
val file = ctx.getFileStreamPath(fileName)
var data: Array[Byte] = null
try {
fin = new FileInputStream(file)
data = Array.ofDim[Byte](file.length.toInt)
fin.read(data)
} catch {
case e: FileNotFoundException => e.printStackTrace()
case e: IOException => e.printStackTrace()
} finally {
try {
if (fin != null) {
fin.close()
}
} catch {
case ioe: IOException => ioe.printStackTrace()
}
}
data
}
def loadAsSaveType(): SaveDataOptions = {
if (doesFileExist()) {
ToxSave(loadFile())
} else {
SaveDataOptions.None
}
}
def saveFile(dataToBeSaved: Array[Byte]) {
val myFile = ctx.getFileStreamPath(fileName)
try {
myFile.createNewFile()
} catch {
case e1: IOException => e1.printStackTrace()
}
try {
val output = new FileOutputStream(myFile)
output.write(dataToBeSaved, 0, dataToBeSaved.length)
output.close()
} catch {
case e: IOException => e.printStackTrace()
}
}
}
|
gale320/Antox
|
app/src/main/scala/chat/tox/antox/tox/ToxDataFile.scala
|
Scala
|
gpl-3.0
| 2,262
|
/*
* Copyright 2016 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.ct.ct600j.v3
import uk.gov.hmrc.ct.box._
import uk.gov.hmrc.ct.ct600.v3.retriever.CT600BoxRetriever
abstract class SchemeReferenceNumberBox extends CtBoxIdentifier("Scheme reference number") with CtOptionalString with Input with ValidatableBox[CT600BoxRetriever] {
def validateSchemeReferenceNumber(previousSchemeNumberBox: CtOptionalString, previousSchemeDateBox: CtOptionalDate, schemeDateBox: CtOptionalDate) = (previousSchemeNumberBox.value, previousSchemeDateBox.value, schemeDateBox.value) match {
case (None, None, _) => validateStringAsBlank(id, this)
case (_, _, Some(_)) => validateAsMandatory(this) ++ validateOptionalStringByRegex(id, this, taxAvoidanceSchemeNumberRegex)
case _ => validateOptionalStringByRegex(id, this, taxAvoidanceSchemeNumberRegex)
}
}
|
ahudspith-equalexperts/ct-calculations
|
src/main/scala/uk/gov/hmrc/ct/ct600j/v3/SchemeReferenceNumberBox.scala
|
Scala
|
apache-2.0
| 1,413
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.carbondata.spark.testsuite.dataload
import java.io.BufferedWriter
import java.io.File
import java.io.FileWriter
import java.util.Random
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat
import org.apache.spark.sql.common.util.CarbonHiveContext
import org.apache.spark.sql.common.util.CarbonHiveContext.sql
import org.apache.spark.sql.common.util.QueryTest
import org.scalatest.BeforeAndAfterAll
/**
* Test Case for new defaultsource: com.databricks.spark.csv.newapi
*
* @date: Apr 10, 2016 10:34:58 PM
* @See org.apache.carbondata.spark.util.GlobalDictionaryUtil
*/
class DefaultSourceTestCase extends QueryTest with BeforeAndAfterAll {
var filePath: String = _
override def beforeAll {
buildTestData
buildTable
}
def buildTestData() = {
val workDirectory = new File(this.getClass.getResource("/").getPath + "/../../").getCanonicalPath
filePath = workDirectory + "/target/defaultsource.csv"
val file = new File(filePath)
val writer = new BufferedWriter(new FileWriter(file))
writer.write("c1,c2,c3,c4")
writer.newLine()
var i = 0
val random = new Random
for (i <- 0 until 2000000) {
writer.write(" aaaaaaa" + i + " , " +
"bbbbbbb" + i % 1000 + "," +
i % 1000000 + "," + i % 10000 + "\\n")
}
writer.close
}
def buildTable() = {
try {
sql("drop table if exists defaultsource")
sql("""create table if not exists defaultsource
(c1 string, c2 string, c3 int, c4 int)
STORED BY 'org.apache.carbondata.format'""")
} catch {
case ex: Throwable => logError(ex.getMessage + "\\r\\n" + ex.getStackTraceString)
}
}
test("test new defaultsource: com.databricks.spark.csv.newapi") {
val df1 = CarbonHiveContext.read
.format("com.databricks.spark.csv.newapi")
.option("header", "true")
.option("delimiter", ",")
.option("parserLib", "univocity")
.option("ignoreLeadingWhiteSpace", "true")
.option("ignoreTrailingWhiteSpace", "true")
.load(filePath)
assert(!df1.first().getString(0).startsWith(" "))
assert(df1.count() == 2000000)
assert(df1.rdd.partitions.length == 3)
}
test("test defaultsource: com.databricks.spark.csv") {
val df2 = CarbonHiveContext.read
.format("com.databricks.spark.csv")
.option("header", "true")
.option("delimiter", ",")
.option("parserLib", "univocity")
.option("ignoreLeadingWhiteSpace", "true")
.option("ignoreTrailingWhiteSpace", "true")
.load(filePath)
assert(!df2.first().getString(0).startsWith(" "))
assert(df2.count() == 2000000)
assert(df2.rdd.partitions.length == 3)
}
}
|
foryou2030/incubator-carbondata
|
integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/dataload/DefaultSourceTestCase.scala
|
Scala
|
apache-2.0
| 3,519
|
/*
* Copyright 2022 Typelevel
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.typelevel.sbt.gha
import sbt.Keys._
import sbt._
import java.nio.file.FileSystems
import scala.io.Source
object GenerativePlugin extends AutoPlugin {
override def requires = plugins.JvmPlugin
override def trigger = allRequirements
object autoImport extends GenerativeKeys {
type WorkflowJob = org.typelevel.sbt.gha.WorkflowJob
val WorkflowJob = org.typelevel.sbt.gha.WorkflowJob
type JobContainer = org.typelevel.sbt.gha.JobContainer
val JobContainer = org.typelevel.sbt.gha.JobContainer
type WorkflowStep = org.typelevel.sbt.gha.WorkflowStep
val WorkflowStep = org.typelevel.sbt.gha.WorkflowStep
type RefPredicate = org.typelevel.sbt.gha.RefPredicate
val RefPredicate = org.typelevel.sbt.gha.RefPredicate
type Ref = org.typelevel.sbt.gha.Ref
val Ref = org.typelevel.sbt.gha.Ref
type UseRef = org.typelevel.sbt.gha.UseRef
val UseRef = org.typelevel.sbt.gha.UseRef
type PREventType = org.typelevel.sbt.gha.PREventType
val PREventType = org.typelevel.sbt.gha.PREventType
type MatrixInclude = org.typelevel.sbt.gha.MatrixInclude
val MatrixInclude = org.typelevel.sbt.gha.MatrixInclude
type MatrixExclude = org.typelevel.sbt.gha.MatrixExclude
val MatrixExclude = org.typelevel.sbt.gha.MatrixExclude
type Paths = org.typelevel.sbt.gha.Paths
val Paths = org.typelevel.sbt.gha.Paths
type JavaSpec = org.typelevel.sbt.gha.JavaSpec
val JavaSpec = org.typelevel.sbt.gha.JavaSpec
}
import autoImport._
private def indent(output: String, level: Int): String = {
val space = (0 until level * 2).map(_ => ' ').mkString
(space + output.replace("\n", s"\n$space")).replaceAll("""\n[ ]+\n""", "\n\n")
}
private def isSafeString(str: String): Boolean =
!(str.indexOf(':') >= 0 || // pretend colon is illegal everywhere for simplicity
str.indexOf('#') >= 0 || // same for comment
str.indexOf('!') == 0 ||
str.indexOf('*') == 0 ||
str.indexOf('-') == 0 ||
str.indexOf('?') == 0 ||
str.indexOf('{') == 0 ||
str.indexOf('}') == 0 ||
str.indexOf('[') == 0 ||
str.indexOf(']') == 0 ||
str.indexOf(',') == 0 ||
str.indexOf('|') == 0 ||
str.indexOf('>') == 0 ||
str.indexOf('@') == 0 ||
str.indexOf('`') == 0 ||
str.indexOf('"') == 0 ||
str.indexOf('\'') == 0 ||
str.indexOf('&') == 0)
private def wrap(str: String): String =
if (str.indexOf('\n') >= 0)
"|\n" + indent(str, 1)
else if (isSafeString(str))
str
else
s"'${str.replace("'", "''")}'"
def compileList(items: List[String], level: Int): String = {
val rendered = items.map(wrap)
if (rendered.map(_.length).sum < 40) // just arbitrarily...
rendered.mkString(" [", ", ", "]")
else
"\n" + indent(rendered.map("- " + _).mkString("\n"), level)
}
def compileListOfSimpleDicts(items: List[Map[String, String]]): String =
items map { dict =>
val rendered = dict map { case (key, value) => s"$key: $value" } mkString "\n"
"-" + indent(rendered, 1).substring(1)
} mkString "\n"
def compilePREventType(tpe: PREventType): String = {
import PREventType._
tpe match {
case Assigned => "assigned"
case Unassigned => "unassigned"
case Labeled => "labeled"
case Unlabeled => "unlabeled"
case Opened => "opened"
case Edited => "edited"
case Closed => "closed"
case Reopened => "reopened"
case Synchronize => "synchronize"
case ReadyForReview => "ready_for_review"
case Locked => "locked"
case Unlocked => "unlocked"
case ReviewRequested => "review_requested"
case ReviewRequestRemoved => "review_request_removed"
}
}
def compileRef(ref: Ref): String = ref match {
case Ref.Branch(name) => s"refs/heads/$name"
case Ref.Tag(name) => s"refs/tags/$name"
}
def compileBranchPredicate(target: String, pred: RefPredicate): String = pred match {
case RefPredicate.Equals(ref) =>
s"$target == '${compileRef(ref)}'"
case RefPredicate.Contains(Ref.Tag(name)) =>
s"(startsWith($target, 'refs/tags/') && contains($target, '$name'))"
case RefPredicate.Contains(Ref.Branch(name)) =>
s"(startsWith($target, 'refs/heads/') && contains($target, '$name'))"
case RefPredicate.StartsWith(ref) =>
s"startsWith($target, '${compileRef(ref)}')"
case RefPredicate.EndsWith(Ref.Tag(name)) =>
s"(startsWith($target, 'refs/tags/') && endsWith($target, '$name'))"
case RefPredicate.EndsWith(Ref.Branch(name)) =>
s"(startsWith($target, 'refs/heads/') && endsWith($target, '$name'))"
}
def compileEnvironment(environment: JobEnvironment): String =
environment.url match {
case Some(url) =>
val fields = s"""name: ${wrap(environment.name)}
|url: ${wrap(url.toString)}""".stripMargin
s"""environment:
|${indent(fields, 1)}""".stripMargin
case None =>
s"environment: ${wrap(environment.name)}"
}
def compileEnv(env: Map[String, String], prefix: String = "env"): String =
if (env.isEmpty) {
""
} else {
val rendered = env map {
case (key, value) =>
if (!isSafeString(key) || key.indexOf(' ') >= 0)
sys.error(s"'$key' is not a valid environment variable name")
s"""$key: ${wrap(value)}"""
}
s"""$prefix:
${indent(rendered.mkString("\n"), 1)}"""
}
def compileStep(
step: WorkflowStep,
sbt: String,
sbtStepPreamble: List[String],
declareShell: Boolean = false): String = {
import WorkflowStep._
val renderedName = step.name.map(wrap).map("name: " + _ + "\n").getOrElse("")
val renderedId = step.id.map(wrap).map("id: " + _ + "\n").getOrElse("")
val renderedCond = step.cond.map(wrap).map("if: " + _ + "\n").getOrElse("")
val renderedShell = if (declareShell) "shell: bash\n" else ""
val renderedEnvPre = compileEnv(step.env)
val renderedEnv =
if (renderedEnvPre.isEmpty)
""
else
renderedEnvPre + "\n"
val preamblePre = renderedName + renderedId + renderedCond + renderedEnv
val preamble =
if (preamblePre.isEmpty)
""
else
preamblePre
val body = step match {
case run: Run =>
renderRunBody(run.commands, run.params, renderedShell)
case sbtStep: Sbt =>
import sbtStep.commands
val sbtClientMode = sbt.matches("""sbt.* --client($| .*)""")
val safeCommands =
if (sbtClientMode)
s"'${(sbtStepPreamble ::: commands).mkString("; ")}'"
else
(sbtStepPreamble ::: commands)
.map { c =>
if (c.indexOf(' ') >= 0)
s"'$c'"
else
c
}
.mkString(" ")
renderRunBody(
commands = List(s"$sbt $safeCommands"),
params = sbtStep.params,
renderedShell = renderedShell
)
case use: Use =>
import use.{ref, params}
val decl = ref match {
case UseRef.Public(owner, repo, ref) =>
s"uses: $owner/$repo@$ref"
case UseRef.Local(path) =>
val cleaned =
if (path.startsWith("./"))
path
else
"./" + path
s"uses: $cleaned"
case UseRef.Docker(image, tag, Some(host)) =>
s"uses: docker://$host/$image:$tag"
case UseRef.Docker(image, tag, None) =>
s"uses: docker://$image:$tag"
}
decl + renderParams(params)
}
indent(preamble + body, 1).updated(0, '-')
}
def renderRunBody(
commands: List[String],
params: Map[String, String],
renderedShell: String) =
renderedShell + "run: " + wrap(commands.mkString("\n")) + renderParams(params)
def renderParams(params: Map[String, String]): String = {
val renderedParamsPre = compileEnv(params, prefix = "with")
val renderedParams =
if (renderedParamsPre.isEmpty)
""
else
"\n" + renderedParamsPre
renderedParams
}
def compileJob(job: WorkflowJob, sbt: String): String = {
val renderedNeeds =
if (job.needs.isEmpty)
""
else
s"\nneeds: [${job.needs.mkString(", ")}]"
val renderedEnvironment =
job.environment.map(compileEnvironment).map("\n" + _).getOrElse("")
val renderedCond = job.cond.map(wrap).map("\nif: " + _).getOrElse("")
val renderedContainer = job.container match {
case Some(JobContainer(image, credentials, env, volumes, ports, options)) =>
if (credentials.isEmpty && env.isEmpty && volumes.isEmpty && ports.isEmpty && options.isEmpty) {
"\n" + s"container: ${wrap(image)}"
} else {
val renderedImage = s"image: ${wrap(image)}"
val renderedCredentials = credentials match {
case Some((username, password)) =>
s"\ncredentials:\n${indent(s"username: ${wrap(username)}\npassword: ${wrap(password)}", 1)}"
case None =>
""
}
val renderedEnv =
if (!env.isEmpty)
"\n" + compileEnv(env)
else
""
val renderedVolumes =
if (!volumes.isEmpty)
s"\nvolumes:${compileList(volumes.toList map { case (l, r) => s"$l:$r" }, 1)}"
else
""
val renderedPorts =
if (!ports.isEmpty)
s"\nports:${compileList(ports.map(_.toString), 1)}"
else
""
val renderedOptions =
if (!options.isEmpty)
s"\noptions: ${wrap(options.mkString(" "))}"
else
""
s"\ncontainer:\n${indent(renderedImage + renderedCredentials + renderedEnv + renderedVolumes + renderedPorts + renderedOptions, 1)}"
}
case None =>
""
}
val renderedEnvPre = compileEnv(job.env)
val renderedEnv =
if (renderedEnvPre.isEmpty)
""
else
"\n" + renderedEnvPre
List("include", "exclude") foreach { key =>
if (job.matrixAdds.contains(key)) {
sys.error(s"key `$key` is reserved and cannot be used in an Actions matrix definition")
}
}
val renderedMatricesPre = job.matrixAdds map {
case (key, values) => s"$key: ${values.map(wrap).mkString("[", ", ", "]")}"
} mkString "\n"
// TODO refactor all of this stuff to use whitelist instead
val whitelist = Map(
"os" -> job.oses,
"scala" -> job.scalas,
"java" -> job.javas.map(_.render)) ++ job.matrixAdds
def checkMatching(matching: Map[String, String]): Unit = {
matching foreach {
case (key, value) =>
if (!whitelist.contains(key)) {
sys.error(s"inclusion key `$key` was not found in matrix")
}
if (!whitelist(key).contains(value)) {
sys.error(
s"inclusion key `$key` was present in matrix, but value `$value` was not in ${whitelist(key)}")
}
}
}
val renderedIncludesPre = if (job.matrixIncs.isEmpty) {
renderedMatricesPre
} else {
job.matrixIncs.foreach(inc => checkMatching(inc.matching))
val rendered = compileListOfSimpleDicts(
job.matrixIncs.map(i => i.matching ++ i.additions))
val renderedMatrices =
if (renderedMatricesPre.isEmpty)
""
else
renderedMatricesPre + "\n"
s"${renderedMatrices}include:\n${indent(rendered, 1)}"
}
val renderedExcludesPre = if (job.matrixExcs.isEmpty) {
renderedIncludesPre
} else {
job.matrixExcs.foreach(exc => checkMatching(exc.matching))
val rendered = compileListOfSimpleDicts(job.matrixExcs.map(_.matching))
val renderedIncludes =
if (renderedIncludesPre.isEmpty)
""
else
renderedIncludesPre + "\n"
s"${renderedIncludes}exclude:\n${indent(rendered, 1)}"
}
val renderedMatrices =
if (renderedExcludesPre.isEmpty)
""
else
"\n" + indent(renderedExcludesPre, 2)
val declareShell = job.oses.exists(_.contains("windows"))
val runsOn =
if (job.runsOnExtraLabels.isEmpty)
s"$${{ matrix.os }}"
else
job.runsOnExtraLabels.mkString(s"""[ "$${{ matrix.os }}", """, ", ", " ]")
val renderedFailFast = job.matrixFailFast.fold("")("\n fail-fast: " + _)
// format: off
val body = s"""name: ${wrap(job.name)}${renderedNeeds}${renderedCond}
strategy:${renderedFailFast}
matrix:
os:${compileList(job.oses, 3)}
scala:${compileList(job.scalas, 3)}
java:${compileList(job.javas.map(_.render), 3)}${renderedMatrices}
runs-on: ${runsOn}${renderedEnvironment}${renderedContainer}${renderedEnv}
steps:
${indent(job.steps.map(compileStep(_, sbt, job.sbtStepPreamble, declareShell = declareShell)).mkString("\n\n"), 1)}"""
// format: on
s"${job.id}:\n${indent(body, 1)}"
}
def compileWorkflow(
name: String,
branches: List[String],
tags: List[String],
paths: Paths,
prEventTypes: List[PREventType],
env: Map[String, String],
jobs: List[WorkflowJob],
sbt: String): String = {
val renderedEnvPre = compileEnv(env)
val renderedEnv =
if (renderedEnvPre.isEmpty)
""
else
renderedEnvPre + "\n\n"
val renderedTypesPre = prEventTypes.map(compilePREventType).mkString("[", ", ", "]")
val renderedTypes =
if (prEventTypes.sortBy(_.toString) == PREventType.Defaults)
""
else
"\n" + indent("types: " + renderedTypesPre, 2)
val renderedTags =
if (tags.isEmpty)
""
else
s"""
tags: [${tags.map(wrap).mkString(", ")}]"""
val renderedPaths = paths match {
case Paths.None =>
""
case Paths.Include(paths) =>
"\n" + indent(s"""paths: [${paths.map(wrap).mkString(", ")}]""", 2)
case Paths.Ignore(paths) =>
"\n" + indent(s"""paths-ignore: [${paths.map(wrap).mkString(", ")}]""", 2)
}
s"""# This file was automatically generated by sbt-github-actions using the
# githubWorkflowGenerate task. You should add and commit this file to
# your git repository. It goes without saying that you shouldn't edit
# this file by hand! Instead, if you wish to make changes, you should
# change your sbt build configuration to revise the workflow description
# to meet your needs, then regenerate this file.
name: ${wrap(name)}
on:
pull_request:
branches: [${branches.map(wrap).mkString(", ")}]$renderedTypes$renderedPaths
push:
branches: [${branches.map(wrap).mkString(", ")}]$renderedTags$renderedPaths
${renderedEnv}jobs:
${indent(jobs.map(compileJob(_, sbt)).mkString("\n\n"), 1)}
"""
}
val settingDefaults = Seq(
githubWorkflowSbtCommand := "sbt",
githubWorkflowIncludeClean := true,
// This is currently set to false because of https://github.com/sbt/sbt/issues/6468. When a new SBT version is
// released that fixes this issue then check for that SBT version (or higher) and set to true.
githubWorkflowUseSbtThinClient := false,
githubWorkflowBuildMatrixFailFast := None,
githubWorkflowBuildMatrixAdditions := Map(),
githubWorkflowBuildMatrixInclusions := Seq(),
githubWorkflowBuildMatrixExclusions := Seq(),
githubWorkflowBuildRunsOnExtraLabels := Seq(),
githubWorkflowBuildPreamble := Seq(),
githubWorkflowBuildPostamble := Seq(),
githubWorkflowBuildSbtStepPreamble := Seq(s"++$${{ matrix.scala }}"),
githubWorkflowBuild := Seq(WorkflowStep.Sbt(List("test"), name = Some("Build project"))),
githubWorkflowPublishPreamble := Seq(),
githubWorkflowPublishPostamble := Seq(),
githubWorkflowPublish := Seq(
WorkflowStep.Sbt(List("+publish"), name = Some("Publish project"))),
githubWorkflowPublishTargetBranches := Seq(RefPredicate.Equals(Ref.Branch("main"))),
githubWorkflowPublishCond := None,
githubWorkflowJavaVersions := Seq(JavaSpec.temurin("11")),
githubWorkflowScalaVersions := crossScalaVersions.value,
githubWorkflowOSes := Seq("ubuntu-latest"),
githubWorkflowDependencyPatterns := Seq("**/*.sbt", "project/build.properties"),
githubWorkflowTargetBranches := Seq("**"),
githubWorkflowTargetTags := Seq(),
githubWorkflowTargetPaths := Paths.None,
githubWorkflowEnv := Map("GITHUB_TOKEN" -> s"$${{ secrets.GITHUB_TOKEN }}"),
githubWorkflowAddedJobs := Seq()
)
private lazy val internalTargetAggregation =
settingKey[Seq[File]]("Aggregates target directories from all subprojects")
private val windowsGuard = Some("contains(runner.os, 'windows')")
private val PlatformSep = FileSystems.getDefault.getSeparator
private def normalizeSeparators(pathStr: String): String = {
pathStr.replace(PlatformSep, "/") // *force* unix separators
}
private val pathStrs = Def setting {
val base = (ThisBuild / baseDirectory).value.toPath
internalTargetAggregation.value map { file =>
val path = file.toPath
if (path.isAbsolute)
normalizeSeparators(base.relativize(path).toString)
else
normalizeSeparators(path.toString)
}
}
override def globalSettings =
Seq(internalTargetAggregation := Seq(), githubWorkflowArtifactUpload := true)
override def buildSettings = settingDefaults ++ Seq(
githubWorkflowPREventTypes := PREventType.Defaults,
githubWorkflowArtifactDownloadExtraKeys := Set.empty,
githubWorkflowGeneratedUploadSteps := {
val generate =
githubWorkflowArtifactUpload.value &&
githubWorkflowPublishTargetBranches.value.nonEmpty
if (generate) {
val sanitized = pathStrs.value map { str =>
if (str.indexOf(' ') >= 0) // TODO be less naive
s"'$str'"
else
str
}
val mkdir = WorkflowStep.Run(
List(s"mkdir -p ${sanitized.mkString(" ")} project/target"),
name = Some("Make target directories"),
cond = Some(publicationCond.value))
val tar = WorkflowStep.Run(
List(s"tar cf targets.tar ${sanitized.mkString(" ")} project/target"),
name = Some("Compress target directories"),
cond = Some(publicationCond.value))
val keys = githubWorkflowBuildMatrixAdditions.value.keys.toList.sorted
val artifactId =
(List("os", "java", "scala") ::: keys).map(k => s"$${{ matrix.$k }}").mkString("-")
val upload = WorkflowStep.Use(
UseRef.Public("actions", "upload-artifact", "v2"),
name = Some(s"Upload target directories"),
params = Map("name" -> s"target-$artifactId", "path" -> "targets.tar"),
cond = Some(publicationCond.value)
)
Seq(mkdir, tar, upload)
} else {
Seq()
}
},
githubWorkflowGeneratedDownloadSteps := {
val extraKeys = githubWorkflowArtifactDownloadExtraKeys.value
val additions = githubWorkflowBuildMatrixAdditions.value
val matrixAdds = additions.map {
case (key, values) =>
if (extraKeys(key))
key -> values // we want to iterate over all values
else
key -> values.take(1) // we only want the primary value
}
val keys = "scala" :: additions.keys.toList.sorted
val oses = githubWorkflowOSes.value.toList
val scalas = githubWorkflowScalaVersions.value.toList
val javas = githubWorkflowJavaVersions.value.toList
val exclusions = githubWorkflowBuildMatrixExclusions.value.toList
// we build the list of artifacts, by iterating over all combinations of keys
val artifacts =
expandMatrix(
oses,
scalas,
javas,
matrixAdds,
Nil,
exclusions
).map {
case _ :: scala :: _ :: tail => scala :: tail
case _ => sys.error("Bug generating artifact download steps") // shouldn't happen
}
if (githubWorkflowArtifactUpload.value) {
artifacts flatMap { v =>
val pretty = v.mkString(", ")
val download = WorkflowStep.Use(
UseRef.Public("actions", "download-artifact", "v2"),
name = Some(s"Download target directories ($pretty)"),
params =
Map("name" -> s"target-$${{ matrix.os }}-$${{ matrix.java }}-${v.mkString("-")}")
)
val untar = WorkflowStep.Run(
List("tar xf targets.tar", "rm targets.tar"),
name = Some(s"Inflate target directories ($pretty)"))
Seq(download, untar)
}
} else {
Seq()
}
},
githubWorkflowGeneratedCacheSteps := {
val hashes = githubWorkflowDependencyPatterns.value map { glob =>
s"$${{ hashFiles('$glob') }}"
}
Seq(
WorkflowStep.Use(
UseRef.Public("actions", "cache", "v2"),
name = Some("Cache sbt"),
params = Map(
"path" -> Seq(
"~/.sbt",
"~/.ivy2/cache",
"~/.coursier/cache/v1",
"~/.cache/coursier/v1",
"~/AppData/Local/Coursier/Cache/v1",
"~/Library/Caches/Coursier/v1"
).mkString("\n"),
"key" -> s"$${{ runner.os }}-sbt-cache-v2-${hashes.mkString("-")}"
)
)
)
},
githubWorkflowJobSetup := {
val autoCrlfOpt = if (githubWorkflowOSes.value.exists(_.contains("windows"))) {
List(
WorkflowStep.Run(
List("git config --global core.autocrlf false"),
name = Some("Ignore line ending differences in git"),
cond = windowsGuard))
} else {
Nil
}
autoCrlfOpt :::
List(WorkflowStep.CheckoutFull) :::
WorkflowStep.SetupJava(githubWorkflowJavaVersions.value.toList) :::
githubWorkflowGeneratedCacheSteps.value.toList
},
githubWorkflowGeneratedCI := {
val uploadStepsOpt =
if (githubWorkflowPublishTargetBranches
.value
.isEmpty && githubWorkflowAddedJobs.value.isEmpty)
Nil
else
githubWorkflowGeneratedUploadSteps.value.toList
val publishJobOpt = Seq(
WorkflowJob(
"publish",
"Publish Artifacts",
githubWorkflowJobSetup.value.toList :::
githubWorkflowGeneratedDownloadSteps.value.toList :::
githubWorkflowPublishPreamble.value.toList :::
githubWorkflowPublish.value.toList :::
githubWorkflowPublishPostamble.value.toList,
cond = Some(publicationCond.value),
scalas = List(scalaVersion.value),
javas = List(githubWorkflowJavaVersions.value.head),
needs = List("build")
)).filter(_ => !githubWorkflowPublishTargetBranches.value.isEmpty)
Seq(
WorkflowJob(
"build",
"Build and Test",
githubWorkflowJobSetup.value.toList :::
githubWorkflowBuildPreamble.value.toList :::
WorkflowStep.Sbt(
List("project /", "githubWorkflowCheck"),
name = Some("Check that workflows are up to date")) ::
githubWorkflowBuild.value.toList :::
githubWorkflowBuildPostamble.value.toList :::
uploadStepsOpt,
sbtStepPreamble = githubWorkflowBuildSbtStepPreamble.value.toList,
oses = githubWorkflowOSes.value.toList,
scalas = githubWorkflowScalaVersions.value.toList,
javas = githubWorkflowJavaVersions.value.toList,
matrixFailFast = githubWorkflowBuildMatrixFailFast.value,
matrixAdds = githubWorkflowBuildMatrixAdditions.value,
matrixIncs = githubWorkflowBuildMatrixInclusions.value.toList,
matrixExcs = githubWorkflowBuildMatrixExclusions.value.toList,
runsOnExtraLabels = githubWorkflowBuildRunsOnExtraLabels.value.toList
)) ++ publishJobOpt ++ githubWorkflowAddedJobs.value
}
)
private val publicationCond = Def setting {
val publicationCondPre =
githubWorkflowPublishTargetBranches
.value
.map(compileBranchPredicate("github.ref", _))
.mkString("(", " || ", ")")
val publicationCond = githubWorkflowPublishCond.value match {
case Some(cond) => publicationCondPre + " && (" + cond + ")"
case None => publicationCondPre
}
s"github.event_name != 'pull_request' && $publicationCond"
}
private val generateCiContents = Def task {
val sbt = if (githubWorkflowUseSbtThinClient.value) {
githubWorkflowSbtCommand.value + " --client"
} else {
githubWorkflowSbtCommand.value
}
compileWorkflow(
"Continuous Integration",
githubWorkflowTargetBranches.value.toList,
githubWorkflowTargetTags.value.toList,
githubWorkflowTargetPaths.value,
githubWorkflowPREventTypes.value.toList,
githubWorkflowEnv.value,
githubWorkflowGeneratedCI.value.toList,
sbt
)
}
private val readCleanContents = Def task {
val src = Source.fromURL(getClass.getResource("/clean.yml"))
try {
src.mkString
} finally {
src.close()
}
}
private val workflowsDirTask = Def task {
val githubDir = baseDirectory.value / ".github"
val workflowsDir = githubDir / "workflows"
if (!githubDir.exists()) {
githubDir.mkdir()
}
if (!workflowsDir.exists()) {
workflowsDir.mkdir()
}
workflowsDir
}
private val ciYmlFile = Def task {
workflowsDirTask.value / "ci.yml"
}
private val cleanYmlFile = Def task {
workflowsDirTask.value / "clean.yml"
}
override def projectSettings = Seq(
Global / internalTargetAggregation ++= {
if (githubWorkflowArtifactUpload.value)
Seq(target.value)
else
Seq()
},
githubWorkflowGenerate / aggregate := false,
githubWorkflowCheck / aggregate := false,
githubWorkflowGenerate := {
val ciContents = generateCiContents.value
val includeClean = githubWorkflowIncludeClean.value
val cleanContents = readCleanContents.value
val ciYml = ciYmlFile.value
val cleanYml = cleanYmlFile.value
IO.write(ciYml, ciContents)
if (includeClean)
IO.write(cleanYml, cleanContents)
},
githubWorkflowCheck := {
val expectedCiContents = generateCiContents.value
val includeClean = githubWorkflowIncludeClean.value
val expectedCleanContents = readCleanContents.value
val ciYml = ciYmlFile.value
val cleanYml = cleanYmlFile.value
val log = state.value.log
def reportMismatch(file: File, expected: String, actual: String): Unit = {
log.error(s"Expected:\n$expected")
log.error(s"Actual:\n${diff(expected, actual)}")
sys.error(
s"${file.getName} does not contain contents that would have been generated by sbt-github-actions; try running githubWorkflowGenerate")
}
def compare(file: File, expected: String): Unit = {
val actual = IO.read(file)
if (expected != actual) {
reportMismatch(file, expected, actual)
}
}
compare(ciYml, expectedCiContents)
if (includeClean)
compare(cleanYml, expectedCleanContents)
}
)
private[sbt] def expandMatrix(
oses: List[String],
scalas: List[String],
javas: List[JavaSpec],
matrixAdds: Map[String, List[String]],
includes: List[MatrixInclude],
excludes: List[MatrixExclude]
): List[List[String]] = {
val keys = "os" :: "scala" :: "java" :: matrixAdds.keys.toList.sorted
val matrix =
matrixAdds + ("os" -> oses) + ("scala" -> scalas) + ("java" -> javas.map(_.render))
// expand the matrix
keys
.foldLeft(List(List.empty[String])) { (cells, key) =>
val values = matrix.getOrElse(key, Nil)
cells.flatMap { cell => values.map(v => cell ::: v :: Nil) }
}
.filterNot { cell => // remove the excludes
val job = keys.zip(cell).toMap
excludes.exists { // there is an exclude that matches the current job
case MatrixExclude(matching) => matching.toSet.subsetOf(job.toSet)
}
} ::: includes.map { // add the includes
case MatrixInclude(matching, additions) =>
// yoloing here, but let's wait for the bug report
keys.map(matching) ::: additions.values.toList
}
}
private[sbt] def diff(expected: String, actual: String): String = {
val expectedLines = expected.split("\n", -1)
val actualLines = actual.split("\n", -1)
val (lines, _) =
expectedLines.zipAll(actualLines, "", "").foldLeft((Vector.empty[String], false)) {
case ((acc, foundDifference), (expectedLine, actualLine))
if expectedLine == actualLine =>
(acc :+ actualLine, foundDifference)
case ((acc, false), ("", actualLine)) =>
val previousLineLength = acc.lastOption.map(_.length).getOrElse(0)
val padding = " " * previousLineLength
val highlight = s"$padding^ (additional lines)"
(acc :+ highlight :+ actualLine, true)
case ((acc, false), (_, "")) =>
val previousLineLength = acc.lastOption.map(_.length).getOrElse(0)
val padding = " " * previousLineLength
val highlight = s"$padding^ (missing lines)"
(acc :+ highlight, true)
case ((acc, false), (expectedLine, actualLine)) =>
val sameCount =
expectedLine.zip(actualLine).takeWhile { case (a, b) => a == b }.length
val padding = " " * sameCount
val highlight = s"$padding^ (different character)"
(acc :+ actualLine :+ highlight, true)
case ((acc, true), (_, "")) =>
(acc, true)
case ((acc, true), (_, actualLine)) =>
(acc :+ actualLine, true)
}
lines.mkString("\n")
}
}
|
typelevel/sbt-typelevel
|
github-actions/src/main/scala/org/typelevel/sbt/gha/GenerativePlugin.scala
|
Scala
|
apache-2.0
| 30,696
|
/**
* Copyright 2009 Jorge Ortiz
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
**/
package org.scala_tools.time
import org.joda.time._
class RichDateMidnight(underlying: DateMidnight) {
def -(duration: Long): DateMidnight =
underlying.minus(duration)
def -(duration: ReadableDuration): DateMidnight =
underlying.minus(duration)
def -(period: ReadablePeriod): DateMidnight =
underlying.minus(period)
def -(builder: DurationBuilder): DateMidnight =
underlying.minus(builder.underlying)
def +(duration: Long): DateMidnight =
underlying.plus(duration)
def +(duration: ReadableDuration): DateMidnight =
underlying.plus(duration)
def +(period: ReadablePeriod): DateMidnight =
underlying.plus(period)
def +(builder: DurationBuilder): DateMidnight =
underlying.plus(builder.underlying)
}
|
jorgeortiz85/scala-time
|
src/main/scala/org/scala_tools/time/RichDateMidnight.scala
|
Scala
|
apache-2.0
| 1,346
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.examples
import org.apache.spark.SparkContext
object BroadcastTest {
def main(args: Array[String]) {
if (args.length == 0) {
System.err.println("Usage: BroadcastTest <master> [slices] [numElem] [broadcastAlgo] [blockSize]")
System.exit(1)
}
val bcName = if (args.length > 3) args(3) else "Http"
val blockSize = if (args.length > 4) args(4) else "4096"
System.setProperty("spark.broadcast.factory", "org.apache.spark.broadcast." + bcName + "BroadcastFactory")
System.setProperty("spark.broadcast.blockSize", blockSize)
val sc = new SparkContext(args(0), "Broadcast Test",
System.getenv("SPARK_HOME"), SparkContext.jarOfClass(this.getClass))
val slices = if (args.length > 1) args(1).toInt else 2
val num = if (args.length > 2) args(2).toInt else 1000000
val arr1 = new Array[Int](num)
for (i <- 0 until arr1.length) {
arr1(i) = i
}
for (i <- 0 until 3) {
println("Iteration " + i)
println("===========")
val startTime = System.nanoTime
val barr1 = sc.broadcast(arr1)
val observedSizes = sc.parallelize(1 to 10, slices).map(_ => barr1.value.size)
// Collect the small RDD so we can print the observed sizes locally.
observedSizes.collect().foreach(i => println(i))
println("Iteration %d took %.0f milliseconds".format(i, (System.nanoTime - startTime) / 1E6))
}
System.exit(0)
}
}
|
dotunolafunmiloye/spark
|
examples/src/main/scala/org/apache/spark/examples/BroadcastTest.scala
|
Scala
|
apache-2.0
| 2,264
|
/*
* Copyright 2014 Databricks
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.databricks.spark.csv
import org.apache.hadoop.fs.Path
import org.apache.spark.sql.{DataFrame, SaveMode, SQLContext}
import org.apache.spark.sql.sources._
import org.apache.spark.sql.types.StructType
import com.databricks.spark.csv.util.{ParserLibs, TextFile, TypeCast}
/**
* Provides access to CSV data from pure SQL statements (i.e. for users of the
* JDBC server).
*/
class DefaultSource
extends RelationProvider with SchemaRelationProvider with CreatableRelationProvider {
private def checkPath(parameters: Map[String, String]): String = {
parameters.getOrElse("path", sys.error("'path' must be specified for CSV data."))
}
/**
* Creates a new relation for data store in CSV given parameters.
* Parameters have to include 'path' and optionally 'delimiter', 'quote', and 'header'
*/
override def createRelation(sqlContext: SQLContext, parameters: Map[String, String]) = {
createRelation(sqlContext, parameters, null)
}
/**
* Creates a new relation for data store in CSV given parameters and user supported schema.
* Parameters have to include 'path' and optionally 'delimiter', 'quote', and 'header'
*/
override def createRelation(
sqlContext: SQLContext,
parameters: Map[String, String],
schema: StructType) = {
val path = checkPath(parameters)
val delimiter = TypeCast.toChar(parameters.getOrElse("delimiter", ","))
val quote = parameters.getOrElse("quote", "\\"")
val quoteChar = if (quote.length == 1) {
quote.charAt(0)
} else {
throw new Exception("Quotation cannot be more than one character.")
}
val escape = parameters.getOrElse("escape", null)
val escapeChar: Character = if (escape == null) {
null
} else if (escape.length == 1) {
escape.charAt(0)
} else {
throw new Exception("Escape character cannot be more than one character.")
}
val parseMode = parameters.getOrElse("mode", "PERMISSIVE")
val useHeader = parameters.getOrElse("header", "false")
val headerFlag = if (useHeader == "true") {
true
} else if (useHeader == "false") {
false
} else {
throw new Exception("Header flag can be true or false")
}
val parserLib = parameters.getOrElse("parserLib", ParserLibs.DEFAULT)
val ignoreLeadingWhiteSpace = parameters.getOrElse("ignoreLeadingWhiteSpace", "false")
val ignoreLeadingWhiteSpaceFlag = if(ignoreLeadingWhiteSpace == "false") {
false
} else if(ignoreLeadingWhiteSpace == "true") {
if(!ParserLibs.isUnivocityLib(parserLib)) {
throw new Exception("Ignore whitesspace supported for Univocity parser only")
}
true
} else {
throw new Exception("Ignore white space flag can be true or false")
}
val ignoreTrailingWhiteSpace = parameters.getOrElse("ignoreTrailingWhiteSpace", "false")
val ignoreTrailingWhiteSpaceFlag = if(ignoreTrailingWhiteSpace == "false") {
false
} else if(ignoreTrailingWhiteSpace == "true") {
if(!ParserLibs.isUnivocityLib(parserLib)) {
throw new Exception("Ignore whitespace supported for the Univocity parser only")
}
true
} else {
throw new Exception("Ignore white space flag can be true or false")
}
val charset = parameters.getOrElse("charset", TextFile.DEFAULT_CHARSET.name())
// TODO validate charset?
CsvRelation(path,
headerFlag,
delimiter,
quoteChar,
escapeChar,
parseMode,
parserLib,
ignoreLeadingWhiteSpaceFlag,
ignoreTrailingWhiteSpaceFlag,
schema,
charset)(sqlContext)
}
override def createRelation(
sqlContext: SQLContext,
mode: SaveMode,
parameters: Map[String, String],
data: DataFrame): BaseRelation = {
val path = checkPath(parameters)
val filesystemPath = new Path(path)
val fs = filesystemPath.getFileSystem(sqlContext.sparkContext.hadoopConfiguration)
val doSave = if (fs.exists(filesystemPath)) {
mode match {
case SaveMode.Append =>
sys.error(s"Append mode is not supported by ${this.getClass.getCanonicalName}")
case SaveMode.Overwrite =>
fs.delete(filesystemPath, true)
true
case SaveMode.ErrorIfExists =>
sys.error(s"path $path already exists.")
case SaveMode.Ignore => false
}
} else {
true
}
if (doSave) {
// Only save data when the save mode is not ignore.
data.saveAsCsvFile(path, parameters)
}
createRelation(sqlContext, parameters, data.schema)
}
}
|
yl2695/spark-csv
|
src/main/scala/com/databricks/spark/csv/DefaultSource.scala
|
Scala
|
apache-2.0
| 5,178
|
/*
* Copyright 2015 Mediative
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.mediative.sparrow
import org.apache.spark.sql.Row
import org.apache.spark.sql.types._
import org.scalatest._
import com.github.nscala_time.time.Imports._
import RowConverter._
import RowConverter.syntax._
class DateTimeOptionsSpec extends FreeSpec {
import ConverterTester._
case class DateTimeHolder(
name: String,
dateTime: DateTime)
object DateTimeHolder {
implicit val schema = (
field[String]("name") and
field[DateTime]("dateTime")(DatePattern("dd/MM/yyyy HH:mm:ss"))
)(apply _)
implicit val tpe: Tpe[DateTimeHolder] = StructType(List(
StructField("name", StringType, nullable = false),
StructField("dateTime", StringType, nullable = false)
))
}
case class LocalDateHolder(
name: String,
dateTime: LocalDate)
object LocalDateHolder {
implicit val schema = (
field[String]("name") and
field[LocalDate]("dateTime")(DatePattern("dd/MM/yyyy"))
)(apply _)
implicit val tpe: Tpe[LocalDateHolder] = StructType(List(
StructField("name", StringType, nullable = false),
StructField("dateTime", StringType, nullable = false)
))
}
"DateTimeRowConverter" - {
"should allow define a custom date format for DateTime fields" in {
test(Row("Hello", "25/12/2015 14:40:00"), DateTimeHolder("Hello", DateTime.parse("2015-12-25T14:40:00.00")))
}
"should throw an exception if the DateTime value doesn't have the correct format" in {
val ex = intercept[IllegalArgumentException] {
test(Row("Hello", "2/212/2015 14:40:00"), DateTimeHolder("Hello", DateTime.parse("2015-12-25T14:40:00.00")))
}
assert(ex.getMessage === "Invalid format: \\"2/212/2015 14:40:00\\" is malformed at \\"2/2015 14:40:00\\"")
}
"should allow define a custom date format for LocalDate fields" in {
test(Row("Hello", "25/12/2015"), LocalDateHolder("Hello", LocalDate.parse("2015-12-25")))
}
}
}
|
jonas/sparrow
|
core/src/test/scala/com.mediative.sparrow/DateTimeOptionsSpec.scala
|
Scala
|
apache-2.0
| 2,542
|
package c2.w4.futures
import scala.concurrent.Future
trait Request {}
object Request {
def apply(bytes: Array[Byte]): Request = ???
}
trait Response {
def isOK: Boolean
def body: Array[Byte]
}
object Response {
def apply(bytes: Array[Byte]): Response = ???
}
object Http {
def apply(url: String, request: Request): Future[Response] = ???
}
|
lwo/lwo.github.io
|
src/main/scala/c2/w4/futures/Send.scala
|
Scala
|
gpl-3.0
| 357
|
package infra.piece.html
import infra.piece.core.{Piece, PieceKind}
import play.api.Plugin
import play.api.libs.json.{Json, Format}
import play.api.templates.Html
import scala.concurrent.{Future, ExecutionContext}
import org.jsoup.Jsoup
import org.jsoup.safety.Whitelist
/**
* @author alari (name.alari@gmail.com)
* @since 07.05.14 14:45
*/
class HtmlKind(app: play.api.Application) extends PieceKind("html") with Plugin{
override type P = HtmlPiece
override def html(piece: P): Html = infra.piece.html.html.html(piece)
override val format: Format[P] = Json.format[P]
override def handlePiece(implicit ec: ExecutionContext): PartialFunction[Piece, Future[P]] = {
case p: P => Future(p.copy(content = Jsoup.clean(p.content, Whitelist.relaxed)))
}
}
|
alari/play-content
|
module-code/app/infra/piece/html/HtmlKind.scala
|
Scala
|
mit
| 770
|
package com.feynmanliang.optala
import scala.util.{Failure, Success, Try}
import breeze.linalg._
private[optala] case class GradientBasedSolution(
override val point: DenseVector[Double],
override val f: Vector[Double] => Double,
grad: DenseVector[Double],
normGrad: Double) extends Solution(f, point)
private[optala] case class GradientBasedRunResult (
override val stateTrace: List[GradientBasedSolution],
override val numObjEval: Long,
override val numGradEval: Long) extends RunResult[GradientBasedSolution] {
override val bestSolution = stateTrace.minBy(_.objVal)
}
/** Methods for minimizing objective functions with analytical gradients as well as quadratic forms.
*
* @param maxSteps maximum number of steps before termination
* @param tol defines convergence when norm(gradient) < tol
*/
class GradientOptimizer(
var maxSteps: Int = 50000,
var tol: Double = 1E-6) {
import com.feynmanliang.optala.GradientAlgorithm._
import com.feynmanliang.optala.LineSearchConfig._
/** Minimizes a quadratic form 0.5 x'Ax - b'x.
*
* @param A hessian
* @param b gradient
* @param x0 starting point
* @param gradientAlgorithm algorithm to use for choosing step size
* @param lineSearchConfig algorithm to use for choosing step direction
*/
def minQuadraticForm(
A: Matrix[Double],
b: Vector[Double],
x0: Vector[Double],
gradientAlgorithm: GradientAlgorithm.GradientAlgorithm,
lineSearchConfig: LineSearchConfig.LineSearchConfig): Try[GradientBasedRunResult] = {
val fCnt = new FunctionWithCounter[Vector[Double], Double](x => 0.5D * (x.t * (A * x)) - b.t * x)
val dfCnt = new FunctionWithCounter[Vector[Double], Vector[Double]](x => A * x - b)
val lineSearch: (Vector[Double], Vector[Double]) => Option[Double] = lineSearchConfig match {
case Exact => (x, p) => LineSearch.exactLineSearch(A, dfCnt(x), x, p)
case CubicInterpolation => (x, p) => LineSearch.chooseStepSize(fCnt, dfCnt, x, p)
}
val xValues = (gradientAlgorithm match {
case SteepestDescent => steepestDescent(lineSearch, fCnt, dfCnt, x0.toDenseVector)
case ConjugateGradient => conjugateGradient(lineSearch, fCnt, dfCnt, x0.toDenseVector)
}).take(maxSteps)
.toSeq
xValues.find(_.normGrad < tol) match {
case Some(xStar) =>
val trace = (xValues.takeWhile(_.normGrad >= tol) :+ xStar).toList
Success(GradientBasedRunResult(trace, fCnt.numCalls, dfCnt.numCalls))
case None => Failure(sys.error("Did not converge!"))
}
}
/** Overload of `minimize` for accepting scalar-argument functions.
*
* @param f scalar argument function
* @param df derivative of f
* @param x0 starting point
* @param gradientAlgorithm algorithm to use for choosing step size
* @param lineSearchConfig algorithm to use for choosing step direction
*/
def minimize(
f: Double => Double,
df: Double => Double,
x0: Double,
gradientAlgorithm: GradientAlgorithm.GradientAlgorithm,
lineSearchConfig: LineSearchConfig.LineSearchConfig): Try[GradientBasedRunResult] = {
val vecF: Vector[Double] => Double = v => {
require(v.size == 1, s"vectorized f expected dimension 1 input but got ${v.size}")
f(v(0))
}
val vecDf: Vector[Double] => Vector[Double] = v => {
require(v.size == 1, s"vectorized f expected dimension 1 input but got ${v.size}")
DenseVector(df(v(0)))
}
minimize(vecF, vecDf, DenseVector(x0), gradientAlgorithm, lineSearchConfig)
}
/** Minimize a smooth convex function.
*
* @param f objective function
* @param df gradient
* @param x0 starting point
* @param gradientAlgorithm algorithm to use for choosing step size
* @param lineSearchConfig algorithm to use for choosing step direction
*/
def minimize(
f: Vector[Double] => Double,
df: Vector[Double] => Vector[Double],
x0: Vector[Double],
gradientAlgorithm: GradientAlgorithm,
lineSearchConfig: LineSearchConfig): Try[GradientBasedRunResult] = {
val fCnt = new FunctionWithCounter(f)
val dfCnt = new FunctionWithCounter(df)
val lineSearch: (Vector[Double], Vector[Double]) => Option[Double] = (x, p) => {
LineSearch.chooseStepSize(fCnt, dfCnt, x, p)
}
val xValues = (gradientAlgorithm match {
case SteepestDescent => steepestDescent(lineSearch, fCnt, dfCnt, x0.toDenseVector)
case ConjugateGradient => conjugateGradient(lineSearch, fCnt, dfCnt, x0.toDenseVector)
}).take(maxSteps)
.toSeq
xValues.find(_.normGrad < tol) match {
case Some(xStar) =>
val trace = (xValues.takeWhile(_.normGrad >= tol) :+ xStar).toList
Success(GradientBasedRunResult(trace, fCnt.numCalls, dfCnt.numCalls))
case None => Failure(sys.error("Did not converge!"))
}
}
private def steepestDescent(
lineSearch: (Vector[Double], Vector[Double]) => Option[Double],
f: Vector[Double] => Double,
df: Vector[Double] => Vector[Double],
x0: DenseVector[Double]): Stream[GradientBasedSolution] = {
/** Computes a Stream of x values along steepest descent direction */
def improve(x: DenseVector[Double]): Stream[GradientBasedSolution] = {
val grad = df(x).toDenseVector
val currSolution = GradientBasedSolution(x, f, grad, norm(grad))
if (currSolution.normGrad == 0D) {
currSolution #:: Stream.Empty
} else {
val p = -grad / norm(grad.toDenseVector) // steepest descent direction
lineSearch(x, p) match {
case Some(alpha) => currSolution #:: improve(x + alpha * p)
case None => currSolution #:: Stream.Empty
}
}
}
improve(x0)
}
/** Conjugate Gradient using Fletcher-Reeves rule. */
private def conjugateGradient(
lineSearch: (Vector[Double], Vector[Double]) => Option[Double],
f: Vector[Double] => Double,
df: Vector[Double] => Vector[Double],
x0: DenseVector[Double]): Stream[GradientBasedSolution] = {
/** Compute a Stream of x values using CG minimizing `f`. */
def improve(
x: DenseVector[Double],
grad: DenseVector[Double],
p: DenseVector[Double]): Stream[GradientBasedSolution] = {
val currSolution = GradientBasedSolution(x, f, grad, norm(grad))
if (currSolution.normGrad == 0) {
currSolution #:: Stream.Empty
} else {
lineSearch(x, p) match {
case Some(alpha) =>
val newX = x + alpha * p
val newGrad = df(newX).toDenseVector
val beta = (newGrad dot newGrad) / (grad dot grad) // Fletcher-Reeves rule
val newP = -newGrad + beta * p
currSolution #:: improve(newX, newGrad, newP)
case None => currSolution #:: Stream.Empty
}
}
}
val dfx0 = df(x0).toDenseVector
improve(x0, dfx0, -dfx0)
}
}
object GradientAlgorithm extends Enumeration {
type GradientAlgorithm = Value
val SteepestDescent = Value("Steepest Descent")
val ConjugateGradient = Value("Conjugate Gradient")
}
object LineSearchConfig extends Enumeration {
type LineSearchConfig = Value
val CubicInterpolation = Value("Cubic Interpolation")
val Exact = Value("Exact Line Search")
}
|
feynmanliang/optala
|
src/main/scala/com/feynmanliang/optala/GradientOptimizer.scala
|
Scala
|
mit
| 7,309
|
package mesosphere.marathon.core
import mesosphere.marathon.core.auth.AuthModule
import mesosphere.marathon.core.launcher.LauncherModule
import mesosphere.marathon.core.launchqueue.LaunchQueueModule
import mesosphere.marathon.core.leadership.LeadershipModule
import mesosphere.marathon.core.plugin.PluginModule
import mesosphere.marathon.core.task.bus.TaskBusModule
import mesosphere.marathon.core.task.tracker.TaskTrackerModule
/**
* The exported interface of the [[CoreModuleImpl]].
*
* This is necessary to allow guice to introduce proxies to break cyclic dependencies
* (as long as we have them).
*/
trait CoreModule {
def leadershipModule: LeadershipModule
def taskBusModule: TaskBusModule
def taskTrackerModule: TaskTrackerModule
def launcherModule: LauncherModule
def appOfferMatcherModule: LaunchQueueModule
def pluginModule: PluginModule
def authModule: AuthModule
}
|
matsluni/marathon
|
src/main/scala/mesosphere/marathon/core/CoreModule.scala
|
Scala
|
apache-2.0
| 902
|
package views.html.helpers
import replsampler.formatting.Formatter
import play.api.templates.Html
import replsampler.Runner.Result
import replsampler.{Runner, ReplSampler}
import utils.Hash
import play.api.cache.Cache
import play.api.Play.current
object sample {
object KodknackningFormatter extends Formatter[Html]{
override def apply(in: Seq[Result]): Html = sampleFormatter(in)
}
def apply(in: String): Html = {
val hash = Hash.SHA1(in).toHexString
Cache.getOrElse[Html](s"sample.$hash") {
ReplSampler.runAndFormat(in, KodknackningFormatter)
}
}
def status2cssclass(r: Runner.ResultStatus) = r match {
case Runner.CompileFail => "error"
case Runner.RuntimeFail => "error"
case Runner.Success => "success"
}
}
|
teozkr/kodknackning
|
app/views/helpers/sample.scala
|
Scala
|
bsd-2-clause
| 762
|
/*
* Copyright 2017 Datamountaineer.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.datamountaineer.streamreactor.connect.cassandra.sink
import com.datamountaineer.streamreactor.connect.cassandra.TestConfig
import com.datamountaineer.streamreactor.connect.cassandra.config.CassandraConfigConstants
import org.scalatest.{BeforeAndAfter, Matchers, WordSpec}
import scala.collection.JavaConverters._
class TestCassandraSinkConnector extends WordSpec with BeforeAndAfter with Matchers with TestConfig {
"Should start a Cassandra Sink Connector" in {
val props = Map(
"topics" -> s"$TOPIC1, $TOPIC2",
CassandraConfigConstants.CONTACT_POINTS -> CONTACT_POINT,
CassandraConfigConstants.KEY_SPACE -> CASSANDRA_SINK_KEYSPACE,
CassandraConfigConstants.USERNAME -> USERNAME,
CassandraConfigConstants.PASSWD -> PASSWD,
CassandraConfigConstants.KCQL -> QUERY_ALL
).asJava
val connector = new CassandraSinkConnector()
connector.start(props)
val taskConfigs = connector.taskConfigs(1)
taskConfigs.asScala.head.get(CassandraConfigConstants.KCQL) shouldBe QUERY_ALL
taskConfigs.asScala.head.get(CassandraConfigConstants.CONTACT_POINTS) shouldBe CONTACT_POINT
taskConfigs.asScala.head.get(CassandraConfigConstants.KEY_SPACE) shouldBe TOPIC1
taskConfigs.size() shouldBe 1
connector.taskClass() shouldBe classOf[CassandraSinkTask]
//connector.version() shouldBe ""
connector.stop()
}
}
|
CodeSmell/stream-reactor
|
kafka-connect-cassandra/src/test/scala/com/datamountaineer/streamreactor/connect/cassandra/sink/TestCassandraSinkConnector.scala
|
Scala
|
apache-2.0
| 1,984
|
/*
Copyright 2013 Stephen K Samuel
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package com.sksamuel.scrimage.filter
import com.sksamuel.scrimage.BufferedOpFilter
/** @author Stephen Samuel */
class LensBlurFilter(radius: Float, bloom: Float, bloomThreshold: Float, sides: Int, angle: Float)
extends BufferedOpFilter {
val op = new thirdparty.jhlabs.image.LensBlurFilter()
op.setSides(sides)
op.setBloomThreshold(bloomThreshold)
op.setBloom(bloom)
op.setRadius(radius)
}
object LensBlurFilter {
def apply() = new LensBlurFilter(5, 2, 255, 5, 0)
}
|
carlosFattor/scrimage
|
scrimage-filters/src/main/scala/com/sksamuel/scrimage/filter/LensBlurFitler.scala
|
Scala
|
apache-2.0
| 1,079
|
package com.arcusys.valamis.lesson.scorm.model
import com.arcusys.valamis.lesson.scorm.model.manifest.Metadata
import org.scalatest.FlatSpec
import org.scalatest.matchers.ShouldMatchers
class MetadataTest extends FlatSpec with ShouldMatchers {
"Metadata" can "be constructed" in {
val externalLocations = Seq("Loc1", "Loc2")
val inline = Seq("<inlineM>MM</inlineM>")
val metadata = new Metadata(externalLocations, inline)
metadata.externalMetadataLocations should equal(externalLocations)
metadata.inlineMetadata should equal(inline)
}
it can "be constructed with empty external locations" in {
val inline = Seq("<inlineM>MM</inlineM>")
val metadata = new Metadata(Nil, inline)
metadata.externalMetadataLocations should equal(Nil)
metadata.inlineMetadata should equal(inline)
}
it can "be constructed with empty inline" in {
val externalLocations = Seq("Loc1", "Loc2")
val metadata = new Metadata(externalLocations, Nil)
metadata.externalMetadataLocations should equal(externalLocations)
metadata.inlineMetadata should equal(Nil)
}
it can "be constructed with empty external locations and inline" in {
val metadata = new Metadata(Nil, Nil)
metadata.externalMetadataLocations should equal(Nil)
metadata.inlineMetadata should equal(Nil)
}
}
|
ViLPy/Valamis
|
valamis-scorm-lesson/src/test/scala/com/arcusys/valamis/lesson/scorm/model/MetadataTest.scala
|
Scala
|
lgpl-3.0
| 1,325
|
package com.raquo
package object xstream extends StreamConversions
|
raquo/XStream.scala
|
src/main/scala/com/raquo/xstream/package.scala
|
Scala
|
mit
| 68
|
package controllers
import play.api._
import play.api.mvc._
import util.Location
object Page extends Controller {
val pageModel = new models.Pages
def page(slug: String) = Action {
val pageOption = pageModel.getPage(slug)
pageOption match {
case Some(pageEntity) => {
Location.set(pageEntity.title)
Ok(views.html.page.view(pageEntity))
}
case None => Ok(views.html.page.notfound())
}
}
}
|
icambridge-old/inspector
|
app/controllers/Pages.scala
|
Scala
|
mit
| 448
|
package scalaz.stream.mongodb
import scalaz.stream.mongodb.query.{QueryEnums, QuerySyntax}
import scalaz.stream.mongodb.bson.{BSONValuesImplicits, BSONValues}
import scalaz.stream.mongodb.index.CollectionIndexSyntax
import com.mongodb.DBCollection
import scalaz.concurrent.Task
import scalaz.stream.Process
import scalaz.stream.Process._
import scala.language.implicitConversions
import scalaz.stream.mongodb.update.{UpdateSyntax, FindAndModifySyntax}
import scalaz.syntax.monad._
import scalaz.stream.mongodb.filesystem.FileSystemSyntax
import scalaz.stream.mongodb.aggregate.AggregationSyntax
trait Collection {
implicit def dbCollection2Process(c: DBCollection): Process[Task, DBCollection] = emit(Task.now(c)).eval
def use(c: DBCollection): Process[Task, DBCollection] = emit(Task.now(c)).eval
implicit class DBCollectionSyntax(c: DBCollection) {
def through[A](f: Channel[Task, DBCollection, Process[Task, A]]): Process[Task, A] =
(eval(Task.now(c)) through f).join
def >>>[A](f: Channel[Task, DBCollection, Process[Task, A]]): Process[Task, A] = through(f)
}
}
/**
* Generic implicit that has to be injected to get or collection related functionality in scope
*/
object collectionSyntax extends Collection
with QuerySyntax with QueryEnums
with CollectionIndexSyntax
with channel.ChannelResultSyntax
with UpdateSyntax with FindAndModifySyntax
with FileSystemSyntax
with AggregationSyntax
with BSONValues with BSONValuesImplicits
|
Spinoco/scalaz-stream-mongodb
|
core/src/main/scala/scalaz/stream/mongodb/collection.scala
|
Scala
|
mit
| 1,700
|
package scala.c.engine
import java.nio.file.Paths
import better.files.File
class TinyExprTest extends StandardTest {
// "tinyexpr test 2" should "print the correct results" in {
// val code = """
// void main() {
//
// #include "tinyexpr.h"
//
// int i;
// int err;
// const double r = te_interp("1*(4+5)", &err);
// printf("%f\\n", r);
// return 0;
// }"""
//
// val tinyExpr = Paths.get("tests", "scala", "c", "engine", "tinyexpr", "tinyexpr.c")
// val strtod = Paths.get("tests", "scala", "c", "engine", "tinyexpr", "strtod.c")
// //val testC = Paths.get("tests", "scala", "c", "engine", "tinyexpr", "test.c")
//
// val allCode = Seq(File(strtod).contentAsString, File(tinyExpr).contentAsString, code)
//
// checkResults2(allCode, includePaths = List(raw"./tests/scala/c/engine/tinyexpr"))
// }
}
|
bdwashbu/cEngine
|
tests/scala/c/engine/TinyExprTest.scala
|
Scala
|
apache-2.0
| 868
|
/*
*************************************************************************************
* Copyright 2011 Normation SAS
*************************************************************************************
*
* This file is part of Rudder.
*
* Rudder is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* In accordance with the terms of section 7 (7. Additional Terms.) of
* the GNU General Public License version 3, the copyright holders add
* the following Additional permissions:
* Notwithstanding to the terms of section 5 (5. Conveying Modified Source
* Versions) and 6 (6. Conveying Non-Source Forms.) of the GNU General
* Public License version 3, when you create a Related Module, this
* Related Module is not considered as a part of the work and may be
* distributed under the license agreement of your choice.
* A "Related Module" means a set of sources files including their
* documentation that, without modification of the Source Code, enables
* supplementary functions or services in addition to those offered by
* the Software.
*
* Rudder is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with Rudder. If not, see <http://www.gnu.org/licenses/>.
*
*************************************************************************************
*/
package com.normation.rudder.web.components.popup
import com.normation.rudder.domain.nodes._
import com.normation.inventory.domain.NodeId
import org.slf4j.LoggerFactory
import com.normation.rudder.domain.nodes.NodeInfo
import com.normation.rudder.domain.queries.Query
import com.normation.rudder.domain.queries.NodeReturnType
import net.liftweb.http.LocalSnippet
import net.liftweb.http.Templates
import net.liftweb.http.js._
import JsCmds._
import com.normation.utils.StringUuidGenerator
import JE._
import net.liftweb.common._
import net.liftweb.http.{SHtml,S,DispatchSnippet,Templates}
import scala.xml._
import net.liftweb.util.Helpers
import net.liftweb.util.Helpers._
import com.normation.rudder.web.model.{
WBTextField, FormTracker, WBTextAreaField, WBSelectField, WBRadioField
}
import com.normation.rudder.repository._
import com.normation.rudder.services.nodes.NodeInfoService
import com.normation.rudder.web.model.CurrentUser
import com.normation.rudder.domain.queries.DitQueryData
import com.normation.rudder.domain.queries.And
import com.normation.inventory.ldap.core.LDAPConstants._
import com.normation.rudder.domain.queries.CriterionLine
import com.normation.rudder.web.services.UserPropertyService
import com.normation.eventlog.ModificationId
import com.normation.rudder.web.services.CategoryHierarchyDisplayer
import bootstrap.liftweb.RudderConfig
import com.normation.rudder.web.ChooseTemplate
/**
* Create a group or a category
* This is a popup that allows for the creation of a group or a category, or
* if a group is passed as an argument, will force the creation of a new group based on the query
* contained
*/
class CreateCategoryOrGroupPopup(
groupGenerator : Option[NodeGroup]
, rootCategory : FullNodeGroupCategory
, selectedCategory : Option[NodeGroupCategoryId]
, onSuccessCategory: (NodeGroupCategory) => JsCmd
, onSuccessGroup : (NodeGroup, NodeGroupCategoryId) => JsCmd
, onSuccessCallback: (String) => JsCmd = { _ => Noop }
, onFailureCallback: () => JsCmd = { () => Noop }
) extends DispatchSnippet with Loggable {
// Load the template from the popup
def popupTemplate = ChooseTemplate(
List("templates-hidden", "Popup", "createCategoryOrGroup")
, "groups-creategrouppopup"
)
private[this] val woNodeGroupRepository = RudderConfig.woNodeGroupRepository
private[this] val nodeInfoService = RudderConfig.nodeInfoService
private[this] val categoryHierarchyDisplayer = RudderConfig.categoryHierarchyDisplayer
private[this] val uuidGen = RudderConfig.stringUuidGenerator
private[this] val userPropertyService = RudderConfig.userPropertyService
private[this] val ditQueryData = RudderConfig.ditQueryData
var createContainer = false //issue #1190 always create a group by default
def dispatch = {
case "popupContent" => { _ => popupContent }
}
/**
* If we create a category, the info about the group is hidden (default), otherwise we show it
*/
private[this] def initJs : JsCmd = {
JsRaw("""
if($('input[value="Group"]').get(':checked')){
$('#createGroupHiddable').removeClass('nodisplay');
$('#itemTitle').text('Group');
}else{
$('#itemTitle').text('Category');
}
$('input[value="Group"]').click(
function() {
$('#createGroupHiddable').removeClass('nodisplay');
$('#itemTitle').text('Group');
}
);
$('input[value="Category"]').click(
function() {
$('#createGroupHiddable').addClass('nodisplay');
$('#itemTitle').text('Category');
}
);
$('input[value="Group"]').click();
""")
}
def popupContent() : NodeSeq = {
val f = SHtml.ajaxForm(
(
"item-itemtype" #> {
groupGenerator match {
case None => piItemType.toForm_!
case Some(x) => NodeSeq.Empty
}
}
& "item-itemname" #> piName.toForm_!
& "item-itemcontainer" #> piContainer.toForm_!
& "item-itemdescription" #> piDescription.toForm_!
& "item-notifications" #> updateAndDisplayNotifications()
& "item-grouptype" #> piStatic.toForm_!
& "item-itemreason" #> { piReasons.map { f =>
<div>
<h4 class="col-lg-12 col-sm-12 col-xs-12 audit-title">Change Audit Log</h4>
{f.toForm_!}
</div>
} }
& "item-cancel" #> ( SHtml.ajaxButton("Cancel", { () => closePopup() }) % ("tabindex","6") % ("class","btn btn-default") )
& "item-save" #> ( SHtml.ajaxSubmit("Create", onSubmit _) % ("id","createCOGSaveButton") % ("tabindex","5") % ("class","btn btn-success") )
)(popupTemplate)
) ++ Script(OnLoad(initJs))
f
}
///////////// fields for category settings ///////////////////
private[this] val piName = new WBTextField("Name", "") {
override def setFilter = notNull _ :: trim _ :: Nil
override def errorClassName = "col-lg-12 errors-container"
override def inputField = super.inputField %("onkeydown" , "return processKey(event , 'createCOGSaveButton')") % ("tabindex","2")
override def validations =
valMinLen(3, "The name must have at least 3 characters.") _ :: Nil
}
private[this] val piDescription = new WBTextAreaField("Description", "") {
override def setFilter = notNull _ :: trim _ :: Nil
override def inputField = super.inputField % ("style" -> "height:5em") % ("tabindex","4")
override def errorClassName = "col-lg-12 errors-container"
override def validations = Nil
}
private[this] val piStatic = new WBRadioField("Group type", Seq("static", "dynamic"), "dynamic", {
//how to display label ? Capitalize, and with a tooltip
case "static" =>
<span title="The list of member nodes is defined at creation and will not change automatically.">
Static
</span>
case "dynamic" =>
<span title="Nodes will be automatically added and removed so that the list of members always matches this group's search criteria.">Dynamic</span>
},Some(5)) {
override def setFilter = notNull _ :: trim _ :: Nil
override def className = "align-radio-generate-input"
override def errorClassName = "col-lg-12 errors-container"
override def inputField = super.inputField %("onkeydown" , "return processKey(event , 'createCOGSaveButton')")
override def validations =
valMinLen(1, "Please choose a group type.") _ :: Nil
}
private[this] val piItemType = {
new WBRadioField(
"Item to create",
Seq("Group", "Category"),
"Group",
{case "Group" =>
<span id="textGroupRadio">Group</span>
case "Category" =>
<span id="textCategoryRadio">Category</span>
}, Some(1)) {
override def setFilter = notNull _ :: trim _ :: Nil
override def className = "align-radio-generate-input"
override def errorClassName = "col-lg-12 errors-container"
override def inputField = super.inputField %("onkeydown" , "return processKey(event , 'createCOGSaveButton')")
override def validations =
valMinLen(1, "Please choose between group or category.") _ :: Nil
}
}
private[this] val piContainer = new WBSelectField(
"Parent category"
, (categoryHierarchyDisplayer.getCategoriesHierarchy(rootCategory, None).map { case (id, name) => (id.value -> name)})
, selectedCategory.map(_.value).getOrElse("")) {
override def errorClassName = "col-lg-12 errors-container"
override def className = "col-lg-12 col-sm-12 col-xs-12 form-control"
override def inputField =
super.inputField % ("onkeydown" , "return processKey(event , 'createCOGSaveButton')") %
("tabindex","3")
override def validations =
valMinLen(1, "Please select a category") _ :: Nil
}
private[this] val formTracker = new FormTracker(piName, piDescription, piContainer, piStatic)
private[this] var notifications = List.empty[NodeSeq]
private[this] def error(msg:String) = Text(msg)
private[this] def closePopup() : JsCmd = {
JsRaw(""" $('#createGroupPopup').bsModal('hide');""")
}
/**
* Update the form when something happened
*/
private[this] def updateFormClientSide() : JsCmd = {
SetHtml("createGroupContainer", popupContent())
}
private[this] def onSubmit() : JsCmd = {
if(formTracker.hasErrors) {
onFailure & onFailureCallback()
} else {
val createCategory = piItemType.get match {
case "Group" => false
case "Category" => true
}
if (createCategory) {
woNodeGroupRepository.addGroupCategorytoCategory(
new NodeGroupCategory(
NodeGroupCategoryId(uuidGen.newUuid),
piName.get,
piDescription.get,
Nil,
Nil
)
, NodeGroupCategoryId(piContainer.get)
, ModificationId(uuidGen.newUuid)
, CurrentUser.getActor
, piReasons.map(_.get)
) match {
case Full(x) => closePopup() & onSuccessCallback(x.id.value) & onSuccessCategory(x)
case Empty =>
logger.error("An error occurred while saving the category")
formTracker.addFormError(error("An error occurred while saving the category"))
onFailure & onFailureCallback()
case Failure(m,_,_) =>
logger.error("An error occurred while saving the category:" + m)
formTracker.addFormError(error(m))
onFailure & onFailureCallback()
}
} else {
val defaultLine = CriterionLine(
objectType = ditQueryData.criteriaMap(OC_NODE)
, attribute = ditQueryData.criteriaMap(OC_NODE).criteria(0)
, comparator = ditQueryData.criteriaMap(OC_NODE).criteria(0).cType.comparators(0)
, value = "Linux"
)
val query = Some(groupGenerator.flatMap(_.query).getOrElse(Query(NodeReturnType,And,Seq(defaultLine))))
val isDynamic = piStatic.get match { case "dynamic" => true ; case _ => false }
val srvList = groupGenerator.map(_.serverList).getOrElse(Set[NodeId]())
val nodeId = NodeGroupId(uuidGen.newUuid)
val nodeGroup = NodeGroup(nodeId,piName.get,piDescription.get,query,isDynamic,srvList,true)
woNodeGroupRepository.create(
nodeGroup
, NodeGroupCategoryId(piContainer.get)
, ModificationId(uuidGen.newUuid)
, CurrentUser.getActor
, piReasons.map(_.get)
) match {
case Full(x) =>
closePopup() &
onSuccessCallback(x.group.id.value) & onSuccessGroup(x.group, NodeGroupCategoryId(piContainer.get))
case Empty =>
logger.error("An error occurred while saving the group")
formTracker.addFormError(error("An error occurred while saving the group"))
onFailure & onFailureCallback()
case Failure(m,_,_) =>
logger.error("An error occurred while saving the group: " + m)
formTracker.addFormError(error(m))
onFailure & onFailureCallback()
}
}
}
}
private[this] val piReasons = {
import com.normation.rudder.web.services.ReasonBehavior._
userPropertyService.reasonsFieldBehavior match {
case Disabled => None
case Mandatory => Some(buildReasonField(true, "subContainerReasonField"))
case Optionnal => Some(buildReasonField(false, "subContainerReasonField"))
}
}
def buildReasonField(mandatory:Boolean, containerClass:String = "twoCol") = {
new WBTextAreaField("Change audit message", "") {
override def setFilter = notNull _ :: trim _ :: Nil
override def inputField = super.inputField %
("style" -> "height:5em;") % ("placeholder" -> {userPropertyService.reasonsFieldExplanation})
override def errorClassName = "col-lg-12 errors-container"
override def validations() = {
if(mandatory){
valMinLen(5, "The reason must have at least 5 characters.") _ :: Nil
} else {
Nil
}
}
}
}
private[this] def onCreateSuccess : JsCmd = {
notifications ::= <span class="greenscala">The group was successfully created</span>
updateFormClientSide
}
private[this] def onUpdateSuccess : JsCmd = {
notifications ::= <span class="greenscala">The group was successfully updated</span>
updateFormClientSide
}
private[this] def onFailure : JsCmd = {
updateFormClientSide()
}
private[this] def updateAndDisplayNotifications() : NodeSeq = {
notifications :::= formTracker.formErrors
formTracker.cleanErrors
if(notifications.isEmpty) NodeSeq.Empty
else {
val html = <div id="notifications" class="alert alert-danger text-center col-lg-12 col-xs-12 col-sm-12" role="alert"><ul class="text-danger">{notifications.map( n => <li>{n}</li>) }</ul></div>
notifications = Nil
html
}
}
}
|
armeniaca/rudder
|
rudder-web/src/main/scala/com/normation/rudder/web/components/popup/CreateCategoryOrGroupPopup.scala
|
Scala
|
gpl-3.0
| 14,737
|
import quoted.*
import scala.quoted.staging.*
object Test {
given Compiler = Compiler.make(getClass.getClassLoader)
def main(args: Array[String]): Unit = withQuotes {
val q = '{ (q: Quotes) ?=>
val t = Type.of[String]
t
}
println(q.show)
}
}
|
dotty-staging/dotty
|
tests/run-staging/quote-nested-4.scala
|
Scala
|
apache-2.0
| 275
|
package com.gilt.sbt.artifactory
import sbt._
import sbt.Keys._
object GiltArtifactory extends AutoPlugin {
override def trigger = allRequirements
override def requires = empty
private lazy val defaultIvyPattern = Patterns(
ivyPatterns = Vector("[organisation]/[module]/(scala_[scalaVersion]/)(sbt_[sbtVersion]/)[revision]/ivy-[revision].xml"),
artifactPatterns = Vector("[organisation]/[module]/(scala_[scalaVersion]/)(sbt_[sbtVersion]/)[revision]/[artifact]-[revision](-[classifier]).[ext]"),
isMavenCompatible = true,
descriptorOptional = true,
skipConsistencyCheck = false
)
override def projectSettings = Seq(
credentials += Credentials("Artifactory Realm", "giltgroupe.artifactoryonline.com", System.getenv("ART_USER"), System.getenv("ART_PASS")),
resolvers ++= Seq(
DefaultMavenRepository,
Resolver.url("Typesafe Cache", url("https://giltgroupe.artifactoryonline.com/giltgroupe/typesafe.releases")),
Resolver.url("Gilt Common Cache", url("https://giltgroupe.artifactoryonline.com/giltgroupe/gilt.common"))(defaultIvyPattern),
Resolver.url("Gilt Internal Releases Cache", url("https://giltgroupe.artifactoryonline.com/giltgroupe/gilt.internal.releases"))(defaultIvyPattern),
Resolver.url("Gilt Internal Snapshots Cache", url("https://giltgroupe.artifactoryonline.com/giltgroupe/gilt.internal.snapshots"))(defaultIvyPattern),
Resolver.url("SBT Plugin Releases Cache", url("https://giltgroupe.artifactoryonline.com/giltgroupe/sbt-plugin-releases"))(Patterns(Vector("[organisation]/[module]/(scala_[scalaVersion]/)(sbt_[sbtVersion]/)[revision]/[type]s/[artifact](-[classifier]).[ext]"), Vector("[organisation]/[module]/(scala_[scalaVersion]/)(sbt_[sbtVersion]/)[revision]/[type]s/[artifact](-[classifier]).[ext]"), isMavenCompatible = false, descriptorOptional = true, skipConsistencyCheck = false)),
Resolver.url("giltgroupe-sbt-plugin-releases", url("https://dl.bintray.com/content/giltgroupe/sbt-plugin-releases/"))(defaultIvyPattern),
Resolver.typesafeRepo("releases"),
Resolver.typesafeIvyRepo("releases"),
Resolver.bintrayRepo("giltgroupe", "maven"),
Resolver.bintrayRepo("scalaz", "releases")
)
)
}
|
myyk/gilt-sbt-artifactory
|
src/main/scala/com/gilt/sbt/artifactory/GiltArtifactory.scala
|
Scala
|
mit
| 2,225
|
/*
* Artificial Intelligence for Humans
* Volume 2: Nature Inspired Algorithms
* Java Version
* http://www.aifh.org
* http://www.jeffheaton.com
*
* Code repository:
* https://github.com/jeffheaton/aifh
*
* Copyright 2014 by Jeff Heaton
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* For more information on Heaton Research copyrights, licenses
* and trademarks visit:
* http://www.heatonresearch.com/copyright
*/
package com.heatonresearch.aifh.examples.ga.iris
import com.heatonresearch.aifh.evolutionary.population.BasicPopulation
import com.heatonresearch.aifh.evolutionary.population.Population
import com.heatonresearch.aifh.evolutionary.species.BasicSpecies
import com.heatonresearch.aifh.evolutionary.train.basic.BasicEA
import com.heatonresearch.aifh.examples.util.SimpleLearn
import com.heatonresearch.aifh.general.data.BasicData
import com.heatonresearch.aifh.genetic.crossover.Splice
import com.heatonresearch.aifh.genetic.genome.DoubleArrayGenome
import com.heatonresearch.aifh.genetic.genome.DoubleArrayGenomeFactory
import com.heatonresearch.aifh.genetic.mutate.MutatePerturb
import com.heatonresearch.aifh.learning.RBFNetwork
import com.heatonresearch.aifh.learning.RBFNetworkGenomeCODEC
import com.heatonresearch.aifh.learning.score.ScoreFunction
import com.heatonresearch.aifh.learning.score.ScoreRegressionData
import com.heatonresearch.aifh.normalize.DataSet
import com.heatonresearch.aifh.randomize.GenerateRandom
import com.heatonresearch.aifh.randomize.MersenneTwisterGenerateRandom
import java.io.InputStream
/**
* Learn the Iris data set with a RBF network trained by a genetic algorithm.
*/
object ModelIris {
/**
* Create an initial population.
*
* @param rnd Random number generator.
* @param codec The codec, the type of network to use.
* @return The population.
*/
def initPopulation(rnd: GenerateRandom, codec: RBFNetworkGenomeCODEC): Population = {
val network = RBFNetwork(codec.getInputCount, codec.getRbfCount, codec.getOutputCount)
val size = network.getLongTermMemory.length
val result = new BasicPopulation(POPULATION_SIZE, new DoubleArrayGenomeFactory(size))
val defaultSpecies = new BasicSpecies(result)
result.speciesList += defaultSpecies
for(i <- 0 until POPULATION_SIZE) {
val genome = new DoubleArrayGenome(size)
network.reset(rnd)
System.arraycopy(network.getLongTermMemory, 0, genome.getData, 0, size)
defaultSpecies.add(genome)
}
result.genomeFactory = new DoubleArrayGenomeFactory(size)
result
}
def main(args: Array[String]) {
val prg = new ModelIris
prg.process()
}
/**
* The size of the population.
*/
val POPULATION_SIZE: Int = 1000
}
class ModelIris extends SimpleLearn {
import ModelIris._
/**
* Run the example.
*/
def process() {
try {
val iStream: InputStream = this.getClass.getResourceAsStream("/iris.csv")
if (iStream == null) {
println("Cannot access data set, make sure the resources are available.")
System.exit(1)
}
val rnd: GenerateRandom = new MersenneTwisterGenerateRandom
val ds = DataSet.load(iStream)
ds.normalizeRange(0, -1, 1)
ds.normalizeRange(1, -1, 1)
ds.normalizeRange(2, -1, 1)
ds.normalizeRange(3, -1, 1)
val species = ds.encodeOneOfN(4)
iStream.close()
val codec = new RBFNetworkGenomeCODEC(4, 4, 3)
val trainingData = ds.extractSupervised(0, codec.getInputCount, codec.getRbfCount, codec.getOutputCount)
val pop = initPopulation(rnd, codec)
val score: ScoreFunction = new ScoreRegressionData(trainingData)
val genetic = new BasicEA(pop, score)
genetic.codec = codec
genetic.addOperation(0.7, new Splice(genetic,codec.size / 5))
genetic.addOperation(0.3, new MutatePerturb(genetic,0.1))
performIterations(genetic, 100000, 0.05, shouldMinimize = true)
val winner = codec.decode(genetic.getBestGenome).asInstanceOf[RBFNetwork]
SimpleLearn.queryOneOfNOld(winner, trainingData, species)
}
catch {
case t: Throwable =>
t.printStackTrace()
}
}
}
|
PeterLauris/aifh
|
vol2/vol2-scala-examples/src/main/scala/com/heatonresearch/aifh/examples/ga/iris/ModelIris.scala
|
Scala
|
apache-2.0
| 4,655
|
/* The Computer Language Benchmarks Game
http://benchmarksgame.alioth.debian.org/
Contributed by The Anh Tran
Updated for 2.8 by Rex Kerr
Modified by Michael Peng for 2.10
*/
import scala.concurrent.duration.Duration
import java.util.regex.Pattern
import scala.concurrent._
import ExecutionContext.Implicits.global
import scala.io.Source
object regexdna {
def main(args : Array[String]) {
// load data from stdin
var initInput = Source.stdin.mkString
val init_len = initInput length
// strip header & newline
val input = ">.*\\n|\\n".r replaceAllIn(initInput, "")
val strip_len = input length
// counting patterns
val patterns = Seq(
"agggtaaa|tttaccct" ,
"[cgt]gggtaaa|tttaccc[acg]",
"a[act]ggtaaa|tttacc[agt]t",
"ag[act]gtaaa|tttac[agt]ct",
"agg[act]taaa|ttta[agt]cct",
"aggg[acg]aaa|ttt[cgt]ccct",
"agggt[cgt]aa|tt[acg]accct",
"agggta[cgt]a|t[acg]taccct",
"agggtaa[cgt]|[acg]ttaccct")
// queue tasks, each task is handled in a separate thread
val count_results = patterns map( pt =>
future(
(pt, pt.r.findAllIn(input).length)
)
)
// replace IUB
val iub = Map(
"B" -> "(c|g|t)",
"D" -> "(a|g|t)",
"H" -> "(a|c|t)",
"K" -> "(g|t)",
"M" -> "(a|c)",
"N" -> "(a|c|g|t)",
"R" -> "(a|g)",
"S" -> "(c|g)",
"V" -> "(a|c|g)",
"W" -> "(a|t)",
"Y" -> "(c|t)")
val replace_result = {
val buffer = new StringBuffer((input.length * 3) / 2)
val matcher = Pattern compile "[BDHKMNRSVWY]" matcher input
while ( matcher find )
matcher appendReplacement( buffer, iub(matcher group))
matcher appendTail buffer
buffer length
}
// print results
Await.result(Future.sequence(count_results), Duration.Inf) foreach (v => printf("%s %d\\n", v._1, v._2))
printf( "\\n%d\\n%d\\n%d\\n", init_len, strip_len, replace_result )
}
}
|
dc-uba/metrika_benchs_game
|
benchs/regexdna/regexdna.scala-2.scala
|
Scala
|
mit
| 1,977
|
package com.shorrockin.cascal.model
import java.nio.ByteBuffer
/**
* provides the high level abstraction for the keyspace. can be thought
* of as a DB schema. also can be considered the 1st dimension of the
* cassandra map. This class can be used in the following ways to construct
* paths to various endpoints in the cassandra namespace.:
*
* "ExampleKeyspace" \\\\ "TheSuperFamily" \\ "SuperKey" \\ "StandardKey"
* "ExampleKeyspace" \\ "ColumnFamily" \\ "Key"
*
* @author Chris Shorrock
*/
case class Keyspace(val value:String) extends StringValue {
def \\(value:String):StandardColumnFamily = new StandardColumnFamily(value, this)
def \\#(value:String):CounterStandardColumnFamily = new CounterStandardColumnFamily(value, this)
def \\\\(value:String):SuperColumnFamily = new SuperColumnFamily(value, this)
def \\\\#(value:String):CounterSuperColumnFamily = new CounterSuperColumnFamily(value, this)
override def toString = "Keyspace(value = %s)".format(value)
}
|
Shimi/cascal
|
src/main/scala/com/shorrockin/cascal/model/Keyspace.scala
|
Scala
|
apache-2.0
| 977
|
/*
* Copyright 2015 Nicolas Rinaudo
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kantan.csv.laws.discipline
import kantan.csv.engine.ReaderEngine
import kantan.csv.laws.ReaderEngineLaws
import org.scalacheck.Prop
import org.scalacheck.Prop.forAll
trait ReaderEngineTests
extends RfcReaderTests with SpectrumReaderTests with KnownFormatsReaderTests with VersionSpecificReaderEngineTests {
def laws: ReaderEngineLaws
def readerEngine: RuleSet = new RuleSet {
def name: String = "readerEngine"
def bases: Seq[(String, RuleSet)] = Nil
def parents: Seq[RuleSet] = Seq(rfc4180, csvSpectrum, knownFormats)
def props: Seq[(String, Prop)] = Seq(
"drop" -> forAll(laws.drop _),
"dropWhile" -> forAll(laws.dropWhile _),
"take" -> forAll(laws.take _),
"forall" -> forAll(laws.forall _),
"map" -> forAll(laws.map _),
"flatMap" -> forAll(laws.flatMap _),
"find" -> forAll(laws.find _),
"exists" -> forAll(laws.exists _),
"filter" -> forAll(laws.filter _),
"next on empty" -> forAll(laws.nextOnEmpty _),
"next on empty (take)" -> forAll(laws.nextOnEmptyTake _),
"hasDefiniteSize" -> forAll(laws.hasDefiniteSize _),
"isEmpty" -> forAll(laws.isEmpty _),
"isTraversableAgain" -> forAll(laws.isTraversableAgain _)
)
}
}
object ReaderEngineTests {
def apply(engine: ReaderEngine): ReaderEngineTests = new ReaderEngineTests {
override def laws: ReaderEngineLaws = ReaderEngineLaws(engine)
}
}
|
nrinaudo/tabulate
|
laws/shared/src/main/scala/kantan/csv/laws/discipline/ReaderEngineTests.scala
|
Scala
|
mit
| 2,208
|
package cn.dunn
import cn.dunn.mode.User
import cn.dunn.mongo.UserRepository
import cn.dunn.util.MD5Util
import com.alibaba.fastjson.{JSON, JSONObject}
import org.springframework.context.support.ClassPathXmlApplicationContext
/**
* Created by Administrator on 2016/9/19.
*/
object RepositoryTest extends App {
val context = new ClassPathXmlApplicationContext("applicationContext.xml")
val userRepository = context.getBean(classOf[UserRepository])
val user: User = userRepository.getByUsernameAndPassword("422450455@qq.com",MD5Util.MD5("1234561"))
println(user.getId)
}
|
weimeittx/IM
|
runner/src/test/scala/cn/dunn/RepositoryTest.scala
|
Scala
|
apache-2.0
| 587
|
package monocle
import scalaz.{Applicative, Category, Equal, Maybe, Monoid, Traverse, \\/}
import scalaz.std.option._
import scalaz.syntax.std.option._
/**
* A [[PPrism]] can be seen as a pair of functions:
* - `getOrModify: S => T \\/ A`
* - `reverseGet : B => T`
*
* A [[PPrism]] could also be defined as a weaker [[PIso]] where get can fail.
*
* Typically a [[PPrism]] or [[Prism]] encodes the relation between a Sum or
* CoProduct type (e.g. sealed trait) and one of it is element.
*
* [[PPrism]] stands for Polymorphic Prism as it set and modify methods change
* a type `A` to `B` and `S` to `T`.
* [[Prism]] is a type alias for [[PPrism]] where the type of target cannot be modified:
* {{{
* type Prism[S, A] = PPrism[S, S, A, A]
* }}}
*
* A [[PPrism]] is also a valid [[Fold]], [[POptional]], [[PTraversal]] and [[PSetter]]
*
* @see [[monocle.law.PrismLaws]]
*
* @tparam S the source of a [[PPrism]]
* @tparam T the modified source of a [[PPrism]]
* @tparam A the target of a [[PPrism]]
* @tparam B the modified target of a [[PPrism]]
*/
abstract class PPrism[S, T, A, B] extends Serializable { self =>
/** get the target of a [[PPrism]] or return the original value while allowing the type to change if it does not match */
def getOrModify(s: S): T \\/ A
/** get the modified source of a [[PPrism]] */
def reverseGet(b: B): T
/** get the target of a [[PPrism]] or nothing if there is no target */
def getOption(s: S): Option[A]
/** modify polymorphically the target of a [[PPrism]] with an Applicative function */
@inline final def modifyF[F[_] : Applicative](f: A => F[B])(s: S): F[T] =
getOrModify(s).fold(
t => Applicative[F].point(t),
a => Applicative[F].map(f(a))(reverseGet)
)
/** modify polymorphically the target of a [[PPrism]] with a function */
@inline final def modify(f: A => B): S => T =
getOrModify(_).fold(identity,a => reverseGet(f(a)))
/**
* modify polymorphically the target of a [[PPrism]] with a function.
* return empty if the [[PPrism]] is not getOrModify
*/
@inline final def modifyOption(f: A => B): S => Option[T] =
s => getOption(s).map(a => reverseGet(f(a)))
/** set polymorphically the target of a [[PPrism]] with a value */
@inline final def set(b: B): S => T =
modify(_ => b)
/**
* set polymorphically the target of a [[PPrism]] with a value.
* return empty if the [[PPrism]] is not getOrModify
*/
@inline final def setOption(b: B): S => Option[T] =
modifyOption(_ => b)
/** check if a [[PPrism]] has a target */
@inline final def isMatching(s: S): Boolean =
getOption(s).isDefined
/** create a [[Getter]] from the modified target to the modified source of a [[PPrism]] */
@inline final def re: Getter[B, T] =
Getter(reverseGet)
@inline final def first[C]: PPrism[(S, C), (T, C), (A, C), (B, C)] =
PPrism[(S, C), (T, C), (A, C), (B, C)]{
case (s, c) => getOrModify(s).bimap(_ -> c, _ -> c)
}{
case (b, c) => (reverseGet(b), c)
}
@inline final def second[C]: PPrism[(C, S), (C, T), (C, A), (C, B)] =
PPrism[(C, S), (C, T), (C, A), (C, B)]{
case (c, s) => getOrModify(s).bimap(c -> _, c -> _)
}{
case (c, b) => (c, reverseGet(b))
}
@inline final def left[C] : PPrism[S \\/ C, T \\/ C, A \\/ C, B \\/ C] =
PPrism[S \\/ C, T \\/ C, A \\/ C, B \\/ C](
_.fold(getOrModify(_).bimap(\\/.left, \\/.left), c => \\/.right(\\/.right(c)))
)(_.leftMap(reverseGet))
@inline final def right[C]: PPrism[C \\/ S, C \\/ T, C \\/ A, C \\/ B] =
PPrism[C \\/ S, C \\/ T, C \\/ A, C \\/ B](
_.fold(c => \\/.right(\\/.left(c)), getOrModify(_).bimap(\\/.right, \\/.right))
)(_.map(reverseGet))
@deprecated("use getOption", since = "1.1.0")
@inline final def getMaybe(s: S): Maybe[A] =
getOption(s).toMaybe
@deprecated("use modifyOption", since = "1.1.0")
@inline final def modifyMaybe(f: A => B): S => Maybe[T] =
s => modifyOption(f)(s).toMaybe
@deprecated("use setOption", since = "1.1.0")
@inline final def setMaybe(b: B): S => Maybe[T] =
s => setOption(b)(s).toMaybe
/************************************************************/
/** Compose methods between a [[PPrism]] and another Optics */
/************************************************************/
/** compose a [[PPrism]] with a [[Fold]] */
@inline final def composeFold[C](other: Fold[A, C]): Fold[S, C] =
asFold composeFold other
/** compose a [[PPrism]] with a [[Getter]] */
@inline final def composeGetter[C](other: Getter[A, C]): Fold[S, C] =
asFold composeGetter other
/** compose a [[PPrism]] with a [[PSetter]] */
@inline final def composeSetter[C, D](other: PSetter[A, B, C, D]): PSetter[S, T, C, D] =
asSetter composeSetter other
/** compose a [[PPrism]] with a [[PTraversal]] */
@inline final def composeTraversal[C, D](other: PTraversal[A, B, C, D]): PTraversal[S, T, C, D] =
asTraversal composeTraversal other
/** compose a [[PPrism]] with a [[POptional]] */
@inline final def composeOptional[C, D](other: POptional[A, B, C, D]): POptional[S, T, C, D] =
asOptional composeOptional other
/** compose a [[PPrism]] with a [[PLens]] */
@inline final def composeLens[C, D](other: PLens[A, B, C, D]): POptional[S, T, C, D] =
asOptional composeOptional other.asOptional
/** compose a [[PPrism]] with a [[PPrism]] */
@inline final def composePrism[C, D](other: PPrism[A, B, C, D]): PPrism[S, T, C, D] =
new PPrism[S, T, C, D]{
def getOrModify(s: S): T \\/ C =
self.getOrModify(s).flatMap(a => other.getOrModify(a).bimap(self.set(_)(s), identity))
def reverseGet(d: D): T =
self.reverseGet(other.reverseGet(d))
def getOption(s: S): Option[C] =
self.getOption(s) flatMap other.getOption
}
/** compose a [[PPrism]] with a [[PIso]] */
@inline final def composeIso[C, D](other: PIso[A, B, C, D]): PPrism[S, T, C, D] =
composePrism(other.asPrism)
/********************************************/
/** Experimental aliases of compose methods */
/********************************************/
/** alias to composeTraversal */
@inline final def ^|->>[C, D](other: PTraversal[A, B, C, D]): PTraversal[S, T, C, D] =
composeTraversal(other)
/** alias to composeOptional */
@inline final def ^|-?[C, D](other: POptional[A, B, C, D]): POptional[S, T, C, D] =
composeOptional(other)
/** alias to composePrism */
@inline final def ^<-?[C, D](other: PPrism[A, B, C, D]): PPrism[S, T, C, D] =
composePrism(other)
/** alias to composeLens */
@inline final def ^|->[C, D](other: PLens[A, B, C, D]): POptional[S, T, C, D] =
composeLens(other)
/** alias to composeIso */
@inline final def ^<->[C, D](other: PIso[A, B, C, D]): PPrism[S, T, C, D] =
composeIso(other)
/******************************************************************/
/** Transformation methods to view a [[PPrism]] as another Optics */
/******************************************************************/
/** view a [[PPrism]] as a [[Fold]] */
@inline final def asFold: Fold[S, A] = new Fold[S, A]{
def foldMap[M: Monoid](f: A => M)(s: S): M =
getOption(s) map f getOrElse Monoid[M].zero
}
/** view a [[PPrism]] as a [[Setter]] */
@inline final def asSetter: PSetter[S, T, A, B] =
new PSetter[S, T, A, B]{
def modify(f: A => B): S => T =
self.modify(f)
def set(b: B): S => T =
self.set(b)
}
/** view a [[PPrism]] as a [[PTraversal]] */
@inline final def asTraversal: PTraversal[S, T, A, B] =
new PTraversal[S, T, A, B] {
def modifyF[F[_]: Applicative](f: A => F[B])(s: S): F[T] =
self.modifyF(f)(s)
}
/** view a [[PPrism]] as a [[POptional]] */
@inline final def asOptional: POptional[S, T, A, B] =
new POptional[S, T, A, B]{
def getOrModify(s: S): T \\/ A =
self.getOrModify(s)
def set(b: B): S => T =
self.set(b)
def getOption(s: S): Option[A] =
self.getOption(s)
def modify(f: A => B): S => T =
self.modify(f)
def modifyF[F[_]: Applicative](f: A => F[B])(s: S): F[T] =
self.modifyF(f)(s)
}
}
object PPrism extends PrismInstances {
def id[S, T]: PPrism[S, T, S, T] =
PIso.id[S, T].asPrism
/** create a [[PPrism]] using the canonical functions: getOrModify and reverseGet */
def apply[S, T, A, B](_getOrModify: S => T \\/ A)(_reverseGet: B => T): PPrism[S, T, A, B] =
new PPrism[S, T, A, B]{
def getOrModify(s: S): T \\/ A =
_getOrModify(s)
def reverseGet(b: B): T =
_reverseGet(b)
def getOption(s: S): Option[A] =
_getOrModify(s).toOption
}
implicit def prismSyntax[S, A](self: Prism[S, A]): PrismSyntax[S, A] =
new PrismSyntax(self)
}
object Prism {
def id[A]: Prism[A, A] =
Iso.id[A].asPrism
/** alias for [[PPrism]] apply restricted to monomorphic update */
def apply[S, A](_getOption: S => Option[A])(_reverseGet: A => S): Prism[S, A] =
new Prism[S, A]{
def getOrModify(s: S): S \\/ A =
_getOption(s).fold[S \\/ A](\\/.left(s))(\\/.right)
def reverseGet(b: A): S =
_reverseGet(b)
def getOption(s: S): Option[A] =
_getOption(s)
}
/** a [[Prism]] that checks for equality with a given value */
def only[A](a: A)(implicit A: Equal[A]): Prism[A, Unit] =
Prism[A, Unit](a2 => if(A.equal(a, a2)) Some(()) else None)(_ => a)
}
sealed abstract class PrismInstances {
implicit val prismCategory: Category[Prism] = new Category[Prism] {
def id[A]: Prism[A, A] =
Prism.id
def compose[A, B, C](f: Prism[B, C], g: Prism[A, B]): Prism[A, C] =
g composePrism f
}
}
final case class PrismSyntax[S, A](self: Prism[S, A]) extends AnyVal {
/** lift a [[Prism]] such as it only matches if all elements of `F[S]` are getOrModify */
def below[F[_]](implicit F: Traverse[F]): Prism[F[S], F[A]] =
Prism[F[S], F[A]](F.traverse(_)(self.getOption))(F.map(_)(self.reverseGet))
}
|
NightRa/Monocle
|
core/src/main/scala/monocle/Prism.scala
|
Scala
|
mit
| 10,036
|
package com.phasmid.hedge_fund.rules
import scala.collection.MapLike
import scala.collection.GenMapLike
/**
* @author robinhillyard
*/
trait Candidate extends Function1[String, Option[Any]] {
def identifier: String
def ++(m: Map[String, Any]): Candidate
}
case class MapCandidate(id: String, map: Map[String, Any]) extends Candidate {
def identifier = id
def ++(m: Map[String, Any]) = MapCandidate(id, map ++ m)
def apply(s: String) = map.get(s)
}
|
rchillyard/Scalaprof
|
hedge-fund/src/main/scala/com/phasmid/hedge_fund/rules/Candidate.scala
|
Scala
|
gpl-2.0
| 461
|
package scwebapp
import scutil.lang.*
object ChannelState {
final case class Initial[T]() extends ChannelState[T]
final case class HasValue[T](value:T) extends ChannelState[T]
final case class HasHandler[T](handler:Effect[T]) extends ChannelState[T]
final case class Final[T]() extends ChannelState[T]
}
sealed trait ChannelState[T]
|
ritschwumm/scwebapp
|
modules/core/src/main/scala/scwebapp/ChannelState.scala
|
Scala
|
bsd-2-clause
| 354
|
package com.twitter.finagle.exp.zookeeper.integration.standalone.v3_4.command
import java.util
import com.twitter.finagle.exp.zookeeper.Zookeeper
import com.twitter.finagle.exp.zookeeper.ZookeeperDefs.CreateMode
import com.twitter.finagle.exp.zookeeper.data.Ids
import com.twitter.finagle.exp.zookeeper.integration.standalone.StandaloneIntegrationConfig
import com.twitter.util.Await
import org.junit.runner.RunWith
import org.scalatest.FunSuite
import org.scalatest.junit.JUnitRunner
@RunWith(classOf[JUnitRunner])
class ChrootTest extends FunSuite with StandaloneIntegrationConfig {
test("Chroot works") {
val clientWCh = Some(
Zookeeper.client
.withAutoReconnect()
.withZkConfiguration(chroot = "/ch1")
.newRichClient(ipAddress + ":" + port)
)
val client = Some(
Zookeeper.client
.withAutoReconnect()
.newRichClient(ipAddress + ":" + port)
)
Await.ready(client.get.connect())
Await.result(client.get.create(
"/ch1",
"hello".getBytes,
Ids.OPEN_ACL_UNSAFE,
CreateMode.PERSISTENT
))
Await.ready(clientWCh.get.connect())
val rep = for {
_ <- clientWCh.get.create(
"/ch2",
"hello".getBytes,
Ids.OPEN_ACL_UNSAFE,
CreateMode.PERSISTENT)
exists <- client.get.exists("/ch1", true)
exists2 <- client.get.exists("/ch1/ch2", true)
existsW <- clientWCh.get.exists("/ch2", true)
getChildren <- client.get.getChildren("/ch1", true)
getChildrenW <- clientWCh.get.getChildren("/", true)
_ <- client.get.setData("/ch1", "HELLO".getBytes, -1)
_ <- clientWCh.get.setData("/ch2", "HELLO1".getBytes, -1)
getData <- client.get.getData("/ch1", false)
getData2 <- client.get.getData("/ch1/ch2", false)
getDataW <- clientWCh.get.getData("/ch2", false)
_ <- clientWCh.get.delete("/ch2", -1)
_ <- client.get.delete("/ch1", -1)
} yield (exists, exists2, existsW, getChildren,
getChildrenW, getData,
getData2, getDataW)
val (exists, exists2, existsW, getChildren,
getChildrenW, getData,
getData2, getDataW) = Await.result(rep)
assert(exists.stat.isDefined)
assert(exists.watcher.isDefined)
assert(exists2.stat.isDefined)
assert(exists2.watcher.isDefined)
assert(existsW.stat.isDefined)
assert(existsW.watcher.isDefined)
Await.ready(exists.watcher.get.event)
Await.ready(exists2.watcher.get.event)
Await.ready(existsW.watcher.get.event)
assert(util.Arrays.equals(getData.data, "HELLO".getBytes))
assert(util.Arrays.equals(getData2.data, "HELLO1".getBytes))
assert(util.Arrays.equals(getDataW.data, "HELLO1".getBytes))
assert(getChildren.watcher.isDefined)
assert(getChildrenW.watcher.isDefined)
Await.ready(getChildren.watcher.get.event)
Await.ready(getChildrenW.watcher.get.event)
Await.ready(clientWCh.get.disconnect())
Await.ready(client.get.disconnect())
Await.ready(clientWCh.get.close())
Await.ready(client.get.close())
}
}
|
finagle/finagle-zookeeper
|
integration/src/test/scala/com/twitter/finagle/exp/zookeeper/integration/standalone/v3_4/command/ChrootTest.scala
|
Scala
|
apache-2.0
| 3,053
|
package org.openmole.buildsystem
import org.apache.commons.compress.archivers.tar.{ TarArchiveEntry, TarArchiveOutputStream }
import java.io.{ BufferedOutputStream, FileOutputStream }
import java.util.zip.GZIPOutputStream
import sbt._
import Keys._
import resource._
import scala.io.Source
object TarPlugin extends AutoPlugin {
object autoImport {
val tar = TaskKey[File]("tar", "Tar file produced by the assembly project")
val tarInnerFolder = SettingKey[String]("tar-inner-folder", "All files in tar will be put under this folder")
val tarName = SettingKey[String]("tar-name")
val tarPath = SettingKey[File]("tar-path")
val tarFolder = TaskKey[File]("tar-folder", "The folder to tar.")
}
import autoImport._
override lazy val projectSettings = Seq(
tarName := "assemble.tar.gz",
tarPath := target.value / tarName.value,
tarInnerFolder := "",
tar := tarImpl(tarFolder.value, tarPath.value, target.value, tarInnerFolder.value, streams.value))
def tarImpl(folder: File, tarFile: File, target: File, innerFolder: String, streams: TaskStreams): File = {
val out = tarFile
val tgzOS = managed {
val tos = new TarArchiveOutputStream(new BufferedOutputStream(new GZIPOutputStream(new FileOutputStream(out))))
tos.setLongFileMode(TarArchiveOutputStream.LONGFILE_GNU)
tos
}
def findFiles(f: File): Set[File] = if (f.isDirectory) (f.listFiles map findFiles flatten).toSet else Set(f)
val files: Set[File] = findFiles(folder).toSet
val fn = FileFunction.cached(target / "zip-cache", FilesInfo.lastModified, FilesInfo.exists) {
fileSet ⇒
streams.log.info("Zipping:\\n\\t")
val lCP = folder
for {
os ← tgzOS
file ← fileSet
is ← managed(Source.fromFile(file)(scala.io.Codec.ISO8859))
} {
val relativeFile = innerFolder + "/" + (file relativeTo lCP).get.getPath
streams.log.info("\\t - " + relativeFile)
val entry = new TarArchiveEntry(file, relativeFile)
entry.setSize(file.length)
if (file.canExecute) entry.setMode(TarArchiveEntry.DEFAULT_FILE_MODE | 111)
os.putArchiveEntry(entry)
for (c ← is.iter) {
os.write(c.toByte)
}
os.closeArchiveEntry()
}
Set(out)
}
fn(files).head
}
}
|
openmole/openmole
|
build-system/src/main/scala/org/openmole/buildsystem/TarPlugin.scala
|
Scala
|
agpl-3.0
| 2,376
|
Subsets and Splits
Filtered Scala Code Snippets
The query filters and retrieves a sample of code snippets that meet specific criteria, providing a basic overview of the dataset's content without revealing deeper insights.